1 /*
2 * LPC32xx SLC NAND flash controller driver
3 *
4 * (C) Copyright 2015-2018 Vladimir Zapolskiy <vz@mleia.com>
5 * Copyright (c) 2015 Tyco Fire Protection Products.
6 *
7 * Hardware ECC support original source code
8 * Copyright (C) 2008 by NXP Semiconductors
9 * Author: Kevin Wells
10 */
11
12 #include <common.h>
13 #include <nand.h>
14 #include <linux/mtd/nand_ecc.h>
15 #include <linux/errno.h>
16 #include <asm/io.h>
17 #include <asm/arch/config.h>
18 #include <asm/arch/clk.h>
19 #include <asm/arch/sys_proto.h>
20 #include <asm/arch/dma.h>
21 #include <asm/arch/cpu.h>
22
23 struct lpc32xx_nand_slc_regs {
24 u32 data;
25 u32 addr;
26 u32 cmd;
27 u32 stop;
28 u32 ctrl;
29 u32 cfg;
30 u32 stat;
31 u32 int_stat;
32 u32 ien;
33 u32 isr;
34 u32 icr;
35 u32 tac;
36 u32 tc;
37 u32 ecc;
38 u32 dma_data;
39 };
40
41 /* CFG register */
42 #define CFG_CE_LOW (1 << 5)
43 #define CFG_DMA_ECC (1 << 4) /* Enable DMA ECC bit */
44 #define CFG_ECC_EN (1 << 3) /* ECC enable bit */
45 #define CFG_DMA_BURST (1 << 2) /* DMA burst bit */
46 #define CFG_DMA_DIR (1 << 1) /* DMA write(0)/read(1) bit */
47
48 /* CTRL register */
49 #define CTRL_SW_RESET (1 << 2)
50 #define CTRL_ECC_CLEAR (1 << 1) /* Reset ECC bit */
51 #define CTRL_DMA_START (1 << 0) /* Start DMA channel bit */
52
53 /* STAT register */
54 #define STAT_DMA_FIFO (1 << 2) /* DMA FIFO has data bit */
55 #define STAT_NAND_READY (1 << 0)
56
57 /* INT_STAT register */
58 #define INT_STAT_TC (1 << 1)
59 #define INT_STAT_RDY (1 << 0)
60
61 /* TAC register bits, be aware of overflows */
62 #define TAC_W_RDY(n) (max_t(uint32_t, (n), 0xF) << 28)
63 #define TAC_W_WIDTH(n) (max_t(uint32_t, (n), 0xF) << 24)
64 #define TAC_W_HOLD(n) (max_t(uint32_t, (n), 0xF) << 20)
65 #define TAC_W_SETUP(n) (max_t(uint32_t, (n), 0xF) << 16)
66 #define TAC_R_RDY(n) (max_t(uint32_t, (n), 0xF) << 12)
67 #define TAC_R_WIDTH(n) (max_t(uint32_t, (n), 0xF) << 8)
68 #define TAC_R_HOLD(n) (max_t(uint32_t, (n), 0xF) << 4)
69 #define TAC_R_SETUP(n) (max_t(uint32_t, (n), 0xF) << 0)
70
71 /* NAND ECC Layout for small page NAND devices
72 * Note: For large page devices, the default layouts are used. */
73 static struct nand_ecclayout lpc32xx_nand_oob_16 = {
74 .eccbytes = 6,
75 .eccpos = { 10, 11, 12, 13, 14, 15, },
76 .oobfree = {
77 { .offset = 0, .length = 4, },
78 { .offset = 6, .length = 4, },
79 }
80 };
81
82 #if defined(CONFIG_DMA_LPC32XX) && !defined(CONFIG_SPL_BUILD)
83 #define ECCSTEPS (CONFIG_SYS_NAND_PAGE_SIZE / CONFIG_SYS_NAND_ECCSIZE)
84
85 /*
86 * DMA Descriptors
87 * For Large Block: 17 descriptors = ((16 Data and ECC Read) + 1 Spare Area)
88 * For Small Block: 5 descriptors = ((4 Data and ECC Read) + 1 Spare Area)
89 */
90 static struct lpc32xx_dmac_ll dmalist[ECCSTEPS * 2 + 1];
91 static u32 ecc_buffer[8]; /* MAX ECC size */
92 static unsigned int dmachan = (unsigned int)-1; /* Invalid channel */
93
94 /*
95 * Helper macro for the DMA client (i.e. NAND SLC):
96 * - to write the next DMA linked list item address
97 * (see arch/include/asm/arch-lpc32xx/dma.h).
98 * - to assign the DMA data register to DMA source or destination address.
99 * - to assign the ECC register to DMA source or destination address.
100 */
101 #define lpc32xx_dmac_next_lli(x) ((u32)x)
102 #define lpc32xx_dmac_set_dma_data() ((u32)&lpc32xx_nand_slc_regs->dma_data)
103 #define lpc32xx_dmac_set_ecc() ((u32)&lpc32xx_nand_slc_regs->ecc)
104 #endif
105
106 static struct lpc32xx_nand_slc_regs __iomem *lpc32xx_nand_slc_regs
107 = (struct lpc32xx_nand_slc_regs __iomem *)SLC_NAND_BASE;
108
lpc32xx_nand_init(void)109 static void lpc32xx_nand_init(void)
110 {
111 uint32_t hclk = get_hclk_clk_rate();
112
113 /* Reset SLC NAND controller */
114 writel(CTRL_SW_RESET, &lpc32xx_nand_slc_regs->ctrl);
115
116 /* 8-bit bus, no DMA, no ECC, ordinary CE signal */
117 writel(0, &lpc32xx_nand_slc_regs->cfg);
118
119 /* Interrupts disabled and cleared */
120 writel(0, &lpc32xx_nand_slc_regs->ien);
121 writel(INT_STAT_TC | INT_STAT_RDY,
122 &lpc32xx_nand_slc_regs->icr);
123
124 /* Configure NAND flash timings */
125 writel(TAC_W_RDY(CONFIG_LPC32XX_NAND_SLC_WDR_CLKS) |
126 TAC_W_WIDTH(hclk / CONFIG_LPC32XX_NAND_SLC_WWIDTH) |
127 TAC_W_HOLD(hclk / CONFIG_LPC32XX_NAND_SLC_WHOLD) |
128 TAC_W_SETUP(hclk / CONFIG_LPC32XX_NAND_SLC_WSETUP) |
129 TAC_R_RDY(CONFIG_LPC32XX_NAND_SLC_RDR_CLKS) |
130 TAC_R_WIDTH(hclk / CONFIG_LPC32XX_NAND_SLC_RWIDTH) |
131 TAC_R_HOLD(hclk / CONFIG_LPC32XX_NAND_SLC_RHOLD) |
132 TAC_R_SETUP(hclk / CONFIG_LPC32XX_NAND_SLC_RSETUP),
133 &lpc32xx_nand_slc_regs->tac);
134 }
135
lpc32xx_nand_cmd_ctrl(struct mtd_info * mtd,int cmd,unsigned int ctrl)136 static void lpc32xx_nand_cmd_ctrl(struct mtd_info *mtd,
137 int cmd, unsigned int ctrl)
138 {
139 debug("ctrl: 0x%08x, cmd: 0x%08x\n", ctrl, cmd);
140
141 if (ctrl & NAND_NCE)
142 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_CE_LOW);
143 else
144 clrbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_CE_LOW);
145
146 if (cmd == NAND_CMD_NONE)
147 return;
148
149 if (ctrl & NAND_CLE)
150 writel(cmd & 0xFF, &lpc32xx_nand_slc_regs->cmd);
151 else if (ctrl & NAND_ALE)
152 writel(cmd & 0xFF, &lpc32xx_nand_slc_regs->addr);
153 }
154
lpc32xx_nand_dev_ready(struct mtd_info * mtd)155 static int lpc32xx_nand_dev_ready(struct mtd_info *mtd)
156 {
157 return readl(&lpc32xx_nand_slc_regs->stat) & STAT_NAND_READY;
158 }
159
160 #if defined(CONFIG_DMA_LPC32XX) && !defined(CONFIG_SPL_BUILD)
161 /*
162 * Prepares DMA descriptors for NAND RD/WR operations
163 * If the size is < 256 Bytes then it is assumed to be
164 * an OOB transfer
165 */
lpc32xx_nand_dma_configure(struct nand_chip * chip,const u8 * buffer,int size,int read)166 static void lpc32xx_nand_dma_configure(struct nand_chip *chip,
167 const u8 *buffer, int size,
168 int read)
169 {
170 u32 i, dmasrc, ctrl, ecc_ctrl, oob_ctrl, dmadst;
171 struct lpc32xx_dmac_ll *dmalist_cur;
172 struct lpc32xx_dmac_ll *dmalist_cur_ecc;
173
174 /*
175 * CTRL descriptor entry for reading ECC
176 * Copy Multiple times to sync DMA with Flash Controller
177 */
178 ecc_ctrl = 0x5 |
179 DMAC_CHAN_SRC_BURST_1 |
180 DMAC_CHAN_DEST_BURST_1 |
181 DMAC_CHAN_SRC_WIDTH_32 |
182 DMAC_CHAN_DEST_WIDTH_32 |
183 DMAC_CHAN_DEST_AHB1;
184
185 /* CTRL descriptor entry for reading/writing Data */
186 ctrl = (CONFIG_SYS_NAND_ECCSIZE / 4) |
187 DMAC_CHAN_SRC_BURST_4 |
188 DMAC_CHAN_DEST_BURST_4 |
189 DMAC_CHAN_SRC_WIDTH_32 |
190 DMAC_CHAN_DEST_WIDTH_32 |
191 DMAC_CHAN_DEST_AHB1;
192
193 /* CTRL descriptor entry for reading/writing Spare Area */
194 oob_ctrl = (CONFIG_SYS_NAND_OOBSIZE / 4) |
195 DMAC_CHAN_SRC_BURST_4 |
196 DMAC_CHAN_DEST_BURST_4 |
197 DMAC_CHAN_SRC_WIDTH_32 |
198 DMAC_CHAN_DEST_WIDTH_32 |
199 DMAC_CHAN_DEST_AHB1;
200
201 if (read) {
202 dmasrc = lpc32xx_dmac_set_dma_data();
203 dmadst = (u32)buffer;
204 ctrl |= DMAC_CHAN_DEST_AUTOINC;
205 } else {
206 dmadst = lpc32xx_dmac_set_dma_data();
207 dmasrc = (u32)buffer;
208 ctrl |= DMAC_CHAN_SRC_AUTOINC;
209 }
210
211 /*
212 * Write Operation Sequence for Small Block NAND
213 * ----------------------------------------------------------
214 * 1. X'fer 256 bytes of data from Memory to Flash.
215 * 2. Copy generated ECC data from Register to Spare Area
216 * 3. X'fer next 256 bytes of data from Memory to Flash.
217 * 4. Copy generated ECC data from Register to Spare Area.
218 * 5. X'fer 16 byets of Spare area from Memory to Flash.
219 * Read Operation Sequence for Small Block NAND
220 * ----------------------------------------------------------
221 * 1. X'fer 256 bytes of data from Flash to Memory.
222 * 2. Copy generated ECC data from Register to ECC calc Buffer.
223 * 3. X'fer next 256 bytes of data from Flash to Memory.
224 * 4. Copy generated ECC data from Register to ECC calc Buffer.
225 * 5. X'fer 16 bytes of Spare area from Flash to Memory.
226 * Write Operation Sequence for Large Block NAND
227 * ----------------------------------------------------------
228 * 1. Steps(1-4) of Write Operations repeate for four times
229 * which generates 16 DMA descriptors to X'fer 2048 bytes of
230 * data & 32 bytes of ECC data.
231 * 2. X'fer 64 bytes of Spare area from Memory to Flash.
232 * Read Operation Sequence for Large Block NAND
233 * ----------------------------------------------------------
234 * 1. Steps(1-4) of Read Operations repeate for four times
235 * which generates 16 DMA descriptors to X'fer 2048 bytes of
236 * data & 32 bytes of ECC data.
237 * 2. X'fer 64 bytes of Spare area from Flash to Memory.
238 */
239
240 for (i = 0; i < size/CONFIG_SYS_NAND_ECCSIZE; i++) {
241 dmalist_cur = &dmalist[i * 2];
242 dmalist_cur_ecc = &dmalist[(i * 2) + 1];
243
244 dmalist_cur->dma_src = (read ? (dmasrc) : (dmasrc + (i*256)));
245 dmalist_cur->dma_dest = (read ? (dmadst + (i*256)) : dmadst);
246 dmalist_cur->next_lli = lpc32xx_dmac_next_lli(dmalist_cur_ecc);
247 dmalist_cur->next_ctrl = ctrl;
248
249 dmalist_cur_ecc->dma_src = lpc32xx_dmac_set_ecc();
250 dmalist_cur_ecc->dma_dest = (u32)&ecc_buffer[i];
251 dmalist_cur_ecc->next_lli =
252 lpc32xx_dmac_next_lli(&dmalist[(i * 2) + 2]);
253 dmalist_cur_ecc->next_ctrl = ecc_ctrl;
254 }
255
256 if (i) { /* Data only transfer */
257 dmalist_cur_ecc = &dmalist[(i * 2) - 1];
258 dmalist_cur_ecc->next_lli = 0;
259 dmalist_cur_ecc->next_ctrl |= DMAC_CHAN_INT_TC_EN;
260 return;
261 }
262
263 /* OOB only transfer */
264 if (read) {
265 dmasrc = lpc32xx_dmac_set_dma_data();
266 dmadst = (u32)buffer;
267 oob_ctrl |= DMAC_CHAN_DEST_AUTOINC;
268 } else {
269 dmadst = lpc32xx_dmac_set_dma_data();
270 dmasrc = (u32)buffer;
271 oob_ctrl |= DMAC_CHAN_SRC_AUTOINC;
272 }
273
274 /* Read/ Write Spare Area Data To/From Flash */
275 dmalist_cur = &dmalist[i * 2];
276 dmalist_cur->dma_src = dmasrc;
277 dmalist_cur->dma_dest = dmadst;
278 dmalist_cur->next_lli = 0;
279 dmalist_cur->next_ctrl = (oob_ctrl | DMAC_CHAN_INT_TC_EN);
280 }
281
lpc32xx_nand_xfer(struct mtd_info * mtd,const u8 * buf,int len,int read)282 static void lpc32xx_nand_xfer(struct mtd_info *mtd, const u8 *buf,
283 int len, int read)
284 {
285 struct nand_chip *chip = mtd_to_nand(mtd);
286 u32 config;
287 int ret;
288
289 /* DMA Channel Configuration */
290 config = (read ? DMAC_CHAN_FLOW_D_P2M : DMAC_CHAN_FLOW_D_M2P) |
291 (read ? DMAC_DEST_PERIP(0) : DMAC_DEST_PERIP(DMA_PERID_NAND1)) |
292 (read ? DMAC_SRC_PERIP(DMA_PERID_NAND1) : DMAC_SRC_PERIP(0)) |
293 DMAC_CHAN_ENABLE;
294
295 /* Prepare DMA descriptors */
296 lpc32xx_nand_dma_configure(chip, buf, len, read);
297
298 /* Setup SLC controller and start transfer */
299 if (read)
300 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_DIR);
301 else /* NAND_ECC_WRITE */
302 clrbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_DIR);
303 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_BURST);
304
305 /* Write length for new transfers */
306 if (!((readl(&lpc32xx_nand_slc_regs->stat) & STAT_DMA_FIFO) |
307 readl(&lpc32xx_nand_slc_regs->tc))) {
308 int tmp = (len != mtd->oobsize) ? mtd->oobsize : 0;
309 writel(len + tmp, &lpc32xx_nand_slc_regs->tc);
310 }
311
312 setbits_le32(&lpc32xx_nand_slc_regs->ctrl, CTRL_DMA_START);
313
314 /* Start DMA transfers */
315 ret = lpc32xx_dma_start_xfer(dmachan, dmalist, config);
316 if (unlikely(ret < 0))
317 BUG();
318
319 /* Wait for NAND to be ready */
320 while (!lpc32xx_nand_dev_ready(mtd))
321 ;
322
323 /* Wait till DMA transfer is DONE */
324 if (lpc32xx_dma_wait_status(dmachan))
325 pr_err("NAND DMA transfer error!\r\n");
326
327 /* Stop DMA & HW ECC */
328 clrbits_le32(&lpc32xx_nand_slc_regs->ctrl, CTRL_DMA_START);
329 clrbits_le32(&lpc32xx_nand_slc_regs->cfg,
330 CFG_DMA_DIR | CFG_DMA_BURST | CFG_ECC_EN | CFG_DMA_ECC);
331 }
332
slc_ecc_copy_to_buffer(u8 * spare,const u32 * ecc,int count)333 static u32 slc_ecc_copy_to_buffer(u8 *spare, const u32 *ecc, int count)
334 {
335 int i;
336 for (i = 0; i < (count * CONFIG_SYS_NAND_ECCBYTES);
337 i += CONFIG_SYS_NAND_ECCBYTES) {
338 u32 ce = ecc[i / CONFIG_SYS_NAND_ECCBYTES];
339 ce = ~(ce << 2) & 0xFFFFFF;
340 spare[i+2] = (u8)(ce & 0xFF); ce >>= 8;
341 spare[i+1] = (u8)(ce & 0xFF); ce >>= 8;
342 spare[i] = (u8)(ce & 0xFF);
343 }
344 return 0;
345 }
346
lpc32xx_ecc_calculate(struct mtd_info * mtd,const uint8_t * dat,uint8_t * ecc_code)347 static int lpc32xx_ecc_calculate(struct mtd_info *mtd, const uint8_t *dat,
348 uint8_t *ecc_code)
349 {
350 return slc_ecc_copy_to_buffer(ecc_code, ecc_buffer, ECCSTEPS);
351 }
352
353 /*
354 * Enables and prepares SLC NAND controller
355 * for doing data transfers with H/W ECC enabled.
356 */
lpc32xx_hwecc_enable(struct mtd_info * mtd,int mode)357 static void lpc32xx_hwecc_enable(struct mtd_info *mtd, int mode)
358 {
359 /* Clear ECC */
360 writel(CTRL_ECC_CLEAR, &lpc32xx_nand_slc_regs->ctrl);
361
362 /* Setup SLC controller for H/W ECC operations */
363 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_ECC_EN | CFG_DMA_ECC);
364 }
365
366 /*
367 * lpc32xx_correct_data - [NAND Interface] Detect and correct bit error(s)
368 * mtd: MTD block structure
369 * dat: raw data read from the chip
370 * read_ecc: ECC from the chip
371 * calc_ecc: the ECC calculated from raw data
372 *
373 * Detect and correct a 1 bit error for 256 byte block
374 */
lpc32xx_correct_data(struct mtd_info * mtd,u_char * dat,u_char * read_ecc,u_char * calc_ecc)375 int lpc32xx_correct_data(struct mtd_info *mtd, u_char *dat,
376 u_char *read_ecc, u_char *calc_ecc)
377 {
378 unsigned int i;
379 int ret1, ret2 = 0;
380 u_char *r = read_ecc;
381 u_char *c = calc_ecc;
382 u16 data_offset = 0;
383
384 for (i = 0 ; i < ECCSTEPS ; i++) {
385 r += CONFIG_SYS_NAND_ECCBYTES;
386 c += CONFIG_SYS_NAND_ECCBYTES;
387 data_offset += CONFIG_SYS_NAND_ECCSIZE;
388
389 ret1 = nand_correct_data(mtd, dat + data_offset, r, c);
390 if (ret1 < 0)
391 return -EBADMSG;
392 else
393 ret2 += ret1;
394 }
395
396 return ret2;
397 }
398
lpc32xx_dma_read_buf(struct mtd_info * mtd,uint8_t * buf,int len)399 static void lpc32xx_dma_read_buf(struct mtd_info *mtd, uint8_t *buf, int len)
400 {
401 lpc32xx_nand_xfer(mtd, buf, len, 1);
402 }
403
lpc32xx_dma_write_buf(struct mtd_info * mtd,const uint8_t * buf,int len)404 static void lpc32xx_dma_write_buf(struct mtd_info *mtd, const uint8_t *buf,
405 int len)
406 {
407 lpc32xx_nand_xfer(mtd, buf, len, 0);
408 }
409
410 /* Reuse the logic from "nand_read_page_hwecc()" */
lpc32xx_read_page_hwecc(struct mtd_info * mtd,struct nand_chip * chip,uint8_t * buf,int oob_required,int page)411 static int lpc32xx_read_page_hwecc(struct mtd_info *mtd, struct nand_chip *chip,
412 uint8_t *buf, int oob_required, int page)
413 {
414 int i;
415 int stat;
416 uint8_t *p = buf;
417 uint8_t *ecc_calc = chip->buffers->ecccalc;
418 uint8_t *ecc_code = chip->buffers->ecccode;
419 uint32_t *eccpos = chip->ecc.layout->eccpos;
420 unsigned int max_bitflips = 0;
421
422 /*
423 * As per the "LPC32x0 and LPC32x0/01 User manual" table 173 notes
424 * and section 9.7, the NAND SLC & DMA allowed single DMA transaction
425 * of a page size using DMA controller scatter/gather mode through
426 * linked list; the ECC read is done without any software intervention.
427 */
428
429 lpc32xx_hwecc_enable(mtd, NAND_ECC_READ);
430 lpc32xx_dma_read_buf(mtd, p, chip->ecc.size * chip->ecc.steps);
431 lpc32xx_ecc_calculate(mtd, p, &ecc_calc[0]);
432 lpc32xx_dma_read_buf(mtd, chip->oob_poi, mtd->oobsize);
433
434 for (i = 0; i < chip->ecc.total; i++)
435 ecc_code[i] = chip->oob_poi[eccpos[i]];
436
437 stat = chip->ecc.correct(mtd, p, &ecc_code[0], &ecc_calc[0]);
438 if (stat < 0)
439 mtd->ecc_stats.failed++;
440 else {
441 mtd->ecc_stats.corrected += stat;
442 max_bitflips = max_t(unsigned int, max_bitflips, stat);
443 }
444
445 return max_bitflips;
446 }
447
448 /* Reuse the logic from "nand_write_page_hwecc()" */
lpc32xx_write_page_hwecc(struct mtd_info * mtd,struct nand_chip * chip,const uint8_t * buf,int oob_required,int page)449 static int lpc32xx_write_page_hwecc(struct mtd_info *mtd,
450 struct nand_chip *chip,
451 const uint8_t *buf, int oob_required,
452 int page)
453 {
454 int i;
455 uint8_t *ecc_calc = chip->buffers->ecccalc;
456 const uint8_t *p = buf;
457 uint32_t *eccpos = chip->ecc.layout->eccpos;
458
459 /*
460 * As per the "LPC32x0 and LPC32x0/01 User manual" table 173 notes
461 * and section 9.7, the NAND SLC & DMA allowed single DMA transaction
462 * of a page size using DMA controller scatter/gather mode through
463 * linked list; the ECC read is done without any software intervention.
464 */
465
466 lpc32xx_hwecc_enable(mtd, NAND_ECC_WRITE);
467 lpc32xx_dma_write_buf(mtd, p, chip->ecc.size * chip->ecc.steps);
468 lpc32xx_ecc_calculate(mtd, p, &ecc_calc[0]);
469
470 for (i = 0; i < chip->ecc.total; i++)
471 chip->oob_poi[eccpos[i]] = ecc_calc[i];
472
473 lpc32xx_dma_write_buf(mtd, chip->oob_poi, mtd->oobsize);
474
475 return 0;
476 }
477 #else
lpc32xx_read_buf(struct mtd_info * mtd,uint8_t * buf,int len)478 static void lpc32xx_read_buf(struct mtd_info *mtd, uint8_t *buf, int len)
479 {
480 while (len-- > 0)
481 *buf++ = readl(&lpc32xx_nand_slc_regs->data);
482 }
483
lpc32xx_write_buf(struct mtd_info * mtd,const uint8_t * buf,int len)484 static void lpc32xx_write_buf(struct mtd_info *mtd, const uint8_t *buf, int len)
485 {
486 while (len-- > 0)
487 writel(*buf++, &lpc32xx_nand_slc_regs->data);
488 }
489 #endif
490
lpc32xx_read_byte(struct mtd_info * mtd)491 static uint8_t lpc32xx_read_byte(struct mtd_info *mtd)
492 {
493 return readl(&lpc32xx_nand_slc_regs->data);
494 }
495
lpc32xx_write_byte(struct mtd_info * mtd,uint8_t byte)496 static void lpc32xx_write_byte(struct mtd_info *mtd, uint8_t byte)
497 {
498 writel(byte, &lpc32xx_nand_slc_regs->data);
499 }
500
501 /*
502 * LPC32xx has only one SLC NAND controller, don't utilize
503 * CONFIG_SYS_NAND_SELF_INIT to be able to reuse this function
504 * both in SPL NAND and U-Boot images.
505 */
board_nand_init(struct nand_chip * lpc32xx_chip)506 int board_nand_init(struct nand_chip *lpc32xx_chip)
507 {
508 #if defined(CONFIG_DMA_LPC32XX) && !defined(CONFIG_SPL_BUILD)
509 int ret;
510
511 /* Acquire a channel for our use */
512 ret = lpc32xx_dma_get_channel();
513 if (unlikely(ret < 0)) {
514 pr_info("Unable to get free DMA channel for NAND transfers\n");
515 return -1;
516 }
517 dmachan = (unsigned int)ret;
518 #endif
519
520 lpc32xx_chip->cmd_ctrl = lpc32xx_nand_cmd_ctrl;
521 lpc32xx_chip->dev_ready = lpc32xx_nand_dev_ready;
522
523 /*
524 * The implementation of these functions is quite common, but
525 * they MUST be defined, because access to data register
526 * is strictly 32-bit aligned.
527 */
528 lpc32xx_chip->read_byte = lpc32xx_read_byte;
529 lpc32xx_chip->write_byte = lpc32xx_write_byte;
530
531 #if defined(CONFIG_DMA_LPC32XX) && !defined(CONFIG_SPL_BUILD)
532 /* Hardware ECC calculation is supported when DMA driver is selected */
533 lpc32xx_chip->ecc.mode = NAND_ECC_HW;
534
535 lpc32xx_chip->read_buf = lpc32xx_dma_read_buf;
536 lpc32xx_chip->write_buf = lpc32xx_dma_write_buf;
537
538 lpc32xx_chip->ecc.calculate = lpc32xx_ecc_calculate;
539 lpc32xx_chip->ecc.correct = lpc32xx_correct_data;
540 lpc32xx_chip->ecc.hwctl = lpc32xx_hwecc_enable;
541 lpc32xx_chip->chip_delay = 2000;
542
543 lpc32xx_chip->ecc.read_page = lpc32xx_read_page_hwecc;
544 lpc32xx_chip->ecc.write_page = lpc32xx_write_page_hwecc;
545 lpc32xx_chip->options |= NAND_NO_SUBPAGE_WRITE;
546 #else
547 /*
548 * Hardware ECC calculation is not supported by the driver,
549 * because it requires DMA support, see LPC32x0 User Manual,
550 * note after SLC_ECC register description (UM10326, p.198)
551 */
552 lpc32xx_chip->ecc.mode = NAND_ECC_SOFT;
553
554 /*
555 * The implementation of these functions is quite common, but
556 * they MUST be defined, because access to data register
557 * is strictly 32-bit aligned.
558 */
559 lpc32xx_chip->read_buf = lpc32xx_read_buf;
560 lpc32xx_chip->write_buf = lpc32xx_write_buf;
561 #endif
562
563 /*
564 * These values are predefined
565 * for both small and large page NAND flash devices.
566 */
567 lpc32xx_chip->ecc.size = CONFIG_SYS_NAND_ECCSIZE;
568 lpc32xx_chip->ecc.bytes = CONFIG_SYS_NAND_ECCBYTES;
569 lpc32xx_chip->ecc.strength = 1;
570
571 if (CONFIG_SYS_NAND_PAGE_SIZE != NAND_LARGE_BLOCK_PAGE_SIZE)
572 lpc32xx_chip->ecc.layout = &lpc32xx_nand_oob_16;
573
574 #if defined(CONFIG_SYS_NAND_USE_FLASH_BBT)
575 lpc32xx_chip->bbt_options |= NAND_BBT_USE_FLASH;
576 #endif
577
578 /* Initialize NAND interface */
579 lpc32xx_nand_init();
580
581 return 0;
582 }
583