1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun * LPC32xx SLC NAND flash controller driver
3*4882a593Smuzhiyun *
4*4882a593Smuzhiyun * (C) Copyright 2015-2018 Vladimir Zapolskiy <vz@mleia.com>
5*4882a593Smuzhiyun * Copyright (c) 2015 Tyco Fire Protection Products.
6*4882a593Smuzhiyun *
7*4882a593Smuzhiyun * Hardware ECC support original source code
8*4882a593Smuzhiyun * Copyright (C) 2008 by NXP Semiconductors
9*4882a593Smuzhiyun * Author: Kevin Wells
10*4882a593Smuzhiyun */
11*4882a593Smuzhiyun
12*4882a593Smuzhiyun #include <common.h>
13*4882a593Smuzhiyun #include <nand.h>
14*4882a593Smuzhiyun #include <linux/mtd/nand_ecc.h>
15*4882a593Smuzhiyun #include <linux/errno.h>
16*4882a593Smuzhiyun #include <asm/io.h>
17*4882a593Smuzhiyun #include <asm/arch/config.h>
18*4882a593Smuzhiyun #include <asm/arch/clk.h>
19*4882a593Smuzhiyun #include <asm/arch/sys_proto.h>
20*4882a593Smuzhiyun #include <asm/arch/dma.h>
21*4882a593Smuzhiyun #include <asm/arch/cpu.h>
22*4882a593Smuzhiyun
23*4882a593Smuzhiyun struct lpc32xx_nand_slc_regs {
24*4882a593Smuzhiyun u32 data;
25*4882a593Smuzhiyun u32 addr;
26*4882a593Smuzhiyun u32 cmd;
27*4882a593Smuzhiyun u32 stop;
28*4882a593Smuzhiyun u32 ctrl;
29*4882a593Smuzhiyun u32 cfg;
30*4882a593Smuzhiyun u32 stat;
31*4882a593Smuzhiyun u32 int_stat;
32*4882a593Smuzhiyun u32 ien;
33*4882a593Smuzhiyun u32 isr;
34*4882a593Smuzhiyun u32 icr;
35*4882a593Smuzhiyun u32 tac;
36*4882a593Smuzhiyun u32 tc;
37*4882a593Smuzhiyun u32 ecc;
38*4882a593Smuzhiyun u32 dma_data;
39*4882a593Smuzhiyun };
40*4882a593Smuzhiyun
41*4882a593Smuzhiyun /* CFG register */
42*4882a593Smuzhiyun #define CFG_CE_LOW (1 << 5)
43*4882a593Smuzhiyun #define CFG_DMA_ECC (1 << 4) /* Enable DMA ECC bit */
44*4882a593Smuzhiyun #define CFG_ECC_EN (1 << 3) /* ECC enable bit */
45*4882a593Smuzhiyun #define CFG_DMA_BURST (1 << 2) /* DMA burst bit */
46*4882a593Smuzhiyun #define CFG_DMA_DIR (1 << 1) /* DMA write(0)/read(1) bit */
47*4882a593Smuzhiyun
48*4882a593Smuzhiyun /* CTRL register */
49*4882a593Smuzhiyun #define CTRL_SW_RESET (1 << 2)
50*4882a593Smuzhiyun #define CTRL_ECC_CLEAR (1 << 1) /* Reset ECC bit */
51*4882a593Smuzhiyun #define CTRL_DMA_START (1 << 0) /* Start DMA channel bit */
52*4882a593Smuzhiyun
53*4882a593Smuzhiyun /* STAT register */
54*4882a593Smuzhiyun #define STAT_DMA_FIFO (1 << 2) /* DMA FIFO has data bit */
55*4882a593Smuzhiyun #define STAT_NAND_READY (1 << 0)
56*4882a593Smuzhiyun
57*4882a593Smuzhiyun /* INT_STAT register */
58*4882a593Smuzhiyun #define INT_STAT_TC (1 << 1)
59*4882a593Smuzhiyun #define INT_STAT_RDY (1 << 0)
60*4882a593Smuzhiyun
61*4882a593Smuzhiyun /* TAC register bits, be aware of overflows */
62*4882a593Smuzhiyun #define TAC_W_RDY(n) (max_t(uint32_t, (n), 0xF) << 28)
63*4882a593Smuzhiyun #define TAC_W_WIDTH(n) (max_t(uint32_t, (n), 0xF) << 24)
64*4882a593Smuzhiyun #define TAC_W_HOLD(n) (max_t(uint32_t, (n), 0xF) << 20)
65*4882a593Smuzhiyun #define TAC_W_SETUP(n) (max_t(uint32_t, (n), 0xF) << 16)
66*4882a593Smuzhiyun #define TAC_R_RDY(n) (max_t(uint32_t, (n), 0xF) << 12)
67*4882a593Smuzhiyun #define TAC_R_WIDTH(n) (max_t(uint32_t, (n), 0xF) << 8)
68*4882a593Smuzhiyun #define TAC_R_HOLD(n) (max_t(uint32_t, (n), 0xF) << 4)
69*4882a593Smuzhiyun #define TAC_R_SETUP(n) (max_t(uint32_t, (n), 0xF) << 0)
70*4882a593Smuzhiyun
71*4882a593Smuzhiyun /* NAND ECC Layout for small page NAND devices
72*4882a593Smuzhiyun * Note: For large page devices, the default layouts are used. */
73*4882a593Smuzhiyun static struct nand_ecclayout lpc32xx_nand_oob_16 = {
74*4882a593Smuzhiyun .eccbytes = 6,
75*4882a593Smuzhiyun .eccpos = { 10, 11, 12, 13, 14, 15, },
76*4882a593Smuzhiyun .oobfree = {
77*4882a593Smuzhiyun { .offset = 0, .length = 4, },
78*4882a593Smuzhiyun { .offset = 6, .length = 4, },
79*4882a593Smuzhiyun }
80*4882a593Smuzhiyun };
81*4882a593Smuzhiyun
82*4882a593Smuzhiyun #if defined(CONFIG_DMA_LPC32XX) && !defined(CONFIG_SPL_BUILD)
83*4882a593Smuzhiyun #define ECCSTEPS (CONFIG_SYS_NAND_PAGE_SIZE / CONFIG_SYS_NAND_ECCSIZE)
84*4882a593Smuzhiyun
85*4882a593Smuzhiyun /*
86*4882a593Smuzhiyun * DMA Descriptors
87*4882a593Smuzhiyun * For Large Block: 17 descriptors = ((16 Data and ECC Read) + 1 Spare Area)
88*4882a593Smuzhiyun * For Small Block: 5 descriptors = ((4 Data and ECC Read) + 1 Spare Area)
89*4882a593Smuzhiyun */
90*4882a593Smuzhiyun static struct lpc32xx_dmac_ll dmalist[ECCSTEPS * 2 + 1];
91*4882a593Smuzhiyun static u32 ecc_buffer[8]; /* MAX ECC size */
92*4882a593Smuzhiyun static unsigned int dmachan = (unsigned int)-1; /* Invalid channel */
93*4882a593Smuzhiyun
94*4882a593Smuzhiyun /*
95*4882a593Smuzhiyun * Helper macro for the DMA client (i.e. NAND SLC):
96*4882a593Smuzhiyun * - to write the next DMA linked list item address
97*4882a593Smuzhiyun * (see arch/include/asm/arch-lpc32xx/dma.h).
98*4882a593Smuzhiyun * - to assign the DMA data register to DMA source or destination address.
99*4882a593Smuzhiyun * - to assign the ECC register to DMA source or destination address.
100*4882a593Smuzhiyun */
101*4882a593Smuzhiyun #define lpc32xx_dmac_next_lli(x) ((u32)x)
102*4882a593Smuzhiyun #define lpc32xx_dmac_set_dma_data() ((u32)&lpc32xx_nand_slc_regs->dma_data)
103*4882a593Smuzhiyun #define lpc32xx_dmac_set_ecc() ((u32)&lpc32xx_nand_slc_regs->ecc)
104*4882a593Smuzhiyun #endif
105*4882a593Smuzhiyun
106*4882a593Smuzhiyun static struct lpc32xx_nand_slc_regs __iomem *lpc32xx_nand_slc_regs
107*4882a593Smuzhiyun = (struct lpc32xx_nand_slc_regs __iomem *)SLC_NAND_BASE;
108*4882a593Smuzhiyun
lpc32xx_nand_init(void)109*4882a593Smuzhiyun static void lpc32xx_nand_init(void)
110*4882a593Smuzhiyun {
111*4882a593Smuzhiyun uint32_t hclk = get_hclk_clk_rate();
112*4882a593Smuzhiyun
113*4882a593Smuzhiyun /* Reset SLC NAND controller */
114*4882a593Smuzhiyun writel(CTRL_SW_RESET, &lpc32xx_nand_slc_regs->ctrl);
115*4882a593Smuzhiyun
116*4882a593Smuzhiyun /* 8-bit bus, no DMA, no ECC, ordinary CE signal */
117*4882a593Smuzhiyun writel(0, &lpc32xx_nand_slc_regs->cfg);
118*4882a593Smuzhiyun
119*4882a593Smuzhiyun /* Interrupts disabled and cleared */
120*4882a593Smuzhiyun writel(0, &lpc32xx_nand_slc_regs->ien);
121*4882a593Smuzhiyun writel(INT_STAT_TC | INT_STAT_RDY,
122*4882a593Smuzhiyun &lpc32xx_nand_slc_regs->icr);
123*4882a593Smuzhiyun
124*4882a593Smuzhiyun /* Configure NAND flash timings */
125*4882a593Smuzhiyun writel(TAC_W_RDY(CONFIG_LPC32XX_NAND_SLC_WDR_CLKS) |
126*4882a593Smuzhiyun TAC_W_WIDTH(hclk / CONFIG_LPC32XX_NAND_SLC_WWIDTH) |
127*4882a593Smuzhiyun TAC_W_HOLD(hclk / CONFIG_LPC32XX_NAND_SLC_WHOLD) |
128*4882a593Smuzhiyun TAC_W_SETUP(hclk / CONFIG_LPC32XX_NAND_SLC_WSETUP) |
129*4882a593Smuzhiyun TAC_R_RDY(CONFIG_LPC32XX_NAND_SLC_RDR_CLKS) |
130*4882a593Smuzhiyun TAC_R_WIDTH(hclk / CONFIG_LPC32XX_NAND_SLC_RWIDTH) |
131*4882a593Smuzhiyun TAC_R_HOLD(hclk / CONFIG_LPC32XX_NAND_SLC_RHOLD) |
132*4882a593Smuzhiyun TAC_R_SETUP(hclk / CONFIG_LPC32XX_NAND_SLC_RSETUP),
133*4882a593Smuzhiyun &lpc32xx_nand_slc_regs->tac);
134*4882a593Smuzhiyun }
135*4882a593Smuzhiyun
lpc32xx_nand_cmd_ctrl(struct mtd_info * mtd,int cmd,unsigned int ctrl)136*4882a593Smuzhiyun static void lpc32xx_nand_cmd_ctrl(struct mtd_info *mtd,
137*4882a593Smuzhiyun int cmd, unsigned int ctrl)
138*4882a593Smuzhiyun {
139*4882a593Smuzhiyun debug("ctrl: 0x%08x, cmd: 0x%08x\n", ctrl, cmd);
140*4882a593Smuzhiyun
141*4882a593Smuzhiyun if (ctrl & NAND_NCE)
142*4882a593Smuzhiyun setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_CE_LOW);
143*4882a593Smuzhiyun else
144*4882a593Smuzhiyun clrbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_CE_LOW);
145*4882a593Smuzhiyun
146*4882a593Smuzhiyun if (cmd == NAND_CMD_NONE)
147*4882a593Smuzhiyun return;
148*4882a593Smuzhiyun
149*4882a593Smuzhiyun if (ctrl & NAND_CLE)
150*4882a593Smuzhiyun writel(cmd & 0xFF, &lpc32xx_nand_slc_regs->cmd);
151*4882a593Smuzhiyun else if (ctrl & NAND_ALE)
152*4882a593Smuzhiyun writel(cmd & 0xFF, &lpc32xx_nand_slc_regs->addr);
153*4882a593Smuzhiyun }
154*4882a593Smuzhiyun
lpc32xx_nand_dev_ready(struct mtd_info * mtd)155*4882a593Smuzhiyun static int lpc32xx_nand_dev_ready(struct mtd_info *mtd)
156*4882a593Smuzhiyun {
157*4882a593Smuzhiyun return readl(&lpc32xx_nand_slc_regs->stat) & STAT_NAND_READY;
158*4882a593Smuzhiyun }
159*4882a593Smuzhiyun
160*4882a593Smuzhiyun #if defined(CONFIG_DMA_LPC32XX) && !defined(CONFIG_SPL_BUILD)
161*4882a593Smuzhiyun /*
162*4882a593Smuzhiyun * Prepares DMA descriptors for NAND RD/WR operations
163*4882a593Smuzhiyun * If the size is < 256 Bytes then it is assumed to be
164*4882a593Smuzhiyun * an OOB transfer
165*4882a593Smuzhiyun */
lpc32xx_nand_dma_configure(struct nand_chip * chip,const u8 * buffer,int size,int read)166*4882a593Smuzhiyun static void lpc32xx_nand_dma_configure(struct nand_chip *chip,
167*4882a593Smuzhiyun const u8 *buffer, int size,
168*4882a593Smuzhiyun int read)
169*4882a593Smuzhiyun {
170*4882a593Smuzhiyun u32 i, dmasrc, ctrl, ecc_ctrl, oob_ctrl, dmadst;
171*4882a593Smuzhiyun struct lpc32xx_dmac_ll *dmalist_cur;
172*4882a593Smuzhiyun struct lpc32xx_dmac_ll *dmalist_cur_ecc;
173*4882a593Smuzhiyun
174*4882a593Smuzhiyun /*
175*4882a593Smuzhiyun * CTRL descriptor entry for reading ECC
176*4882a593Smuzhiyun * Copy Multiple times to sync DMA with Flash Controller
177*4882a593Smuzhiyun */
178*4882a593Smuzhiyun ecc_ctrl = 0x5 |
179*4882a593Smuzhiyun DMAC_CHAN_SRC_BURST_1 |
180*4882a593Smuzhiyun DMAC_CHAN_DEST_BURST_1 |
181*4882a593Smuzhiyun DMAC_CHAN_SRC_WIDTH_32 |
182*4882a593Smuzhiyun DMAC_CHAN_DEST_WIDTH_32 |
183*4882a593Smuzhiyun DMAC_CHAN_DEST_AHB1;
184*4882a593Smuzhiyun
185*4882a593Smuzhiyun /* CTRL descriptor entry for reading/writing Data */
186*4882a593Smuzhiyun ctrl = (CONFIG_SYS_NAND_ECCSIZE / 4) |
187*4882a593Smuzhiyun DMAC_CHAN_SRC_BURST_4 |
188*4882a593Smuzhiyun DMAC_CHAN_DEST_BURST_4 |
189*4882a593Smuzhiyun DMAC_CHAN_SRC_WIDTH_32 |
190*4882a593Smuzhiyun DMAC_CHAN_DEST_WIDTH_32 |
191*4882a593Smuzhiyun DMAC_CHAN_DEST_AHB1;
192*4882a593Smuzhiyun
193*4882a593Smuzhiyun /* CTRL descriptor entry for reading/writing Spare Area */
194*4882a593Smuzhiyun oob_ctrl = (CONFIG_SYS_NAND_OOBSIZE / 4) |
195*4882a593Smuzhiyun DMAC_CHAN_SRC_BURST_4 |
196*4882a593Smuzhiyun DMAC_CHAN_DEST_BURST_4 |
197*4882a593Smuzhiyun DMAC_CHAN_SRC_WIDTH_32 |
198*4882a593Smuzhiyun DMAC_CHAN_DEST_WIDTH_32 |
199*4882a593Smuzhiyun DMAC_CHAN_DEST_AHB1;
200*4882a593Smuzhiyun
201*4882a593Smuzhiyun if (read) {
202*4882a593Smuzhiyun dmasrc = lpc32xx_dmac_set_dma_data();
203*4882a593Smuzhiyun dmadst = (u32)buffer;
204*4882a593Smuzhiyun ctrl |= DMAC_CHAN_DEST_AUTOINC;
205*4882a593Smuzhiyun } else {
206*4882a593Smuzhiyun dmadst = lpc32xx_dmac_set_dma_data();
207*4882a593Smuzhiyun dmasrc = (u32)buffer;
208*4882a593Smuzhiyun ctrl |= DMAC_CHAN_SRC_AUTOINC;
209*4882a593Smuzhiyun }
210*4882a593Smuzhiyun
211*4882a593Smuzhiyun /*
212*4882a593Smuzhiyun * Write Operation Sequence for Small Block NAND
213*4882a593Smuzhiyun * ----------------------------------------------------------
214*4882a593Smuzhiyun * 1. X'fer 256 bytes of data from Memory to Flash.
215*4882a593Smuzhiyun * 2. Copy generated ECC data from Register to Spare Area
216*4882a593Smuzhiyun * 3. X'fer next 256 bytes of data from Memory to Flash.
217*4882a593Smuzhiyun * 4. Copy generated ECC data from Register to Spare Area.
218*4882a593Smuzhiyun * 5. X'fer 16 byets of Spare area from Memory to Flash.
219*4882a593Smuzhiyun * Read Operation Sequence for Small Block NAND
220*4882a593Smuzhiyun * ----------------------------------------------------------
221*4882a593Smuzhiyun * 1. X'fer 256 bytes of data from Flash to Memory.
222*4882a593Smuzhiyun * 2. Copy generated ECC data from Register to ECC calc Buffer.
223*4882a593Smuzhiyun * 3. X'fer next 256 bytes of data from Flash to Memory.
224*4882a593Smuzhiyun * 4. Copy generated ECC data from Register to ECC calc Buffer.
225*4882a593Smuzhiyun * 5. X'fer 16 bytes of Spare area from Flash to Memory.
226*4882a593Smuzhiyun * Write Operation Sequence for Large Block NAND
227*4882a593Smuzhiyun * ----------------------------------------------------------
228*4882a593Smuzhiyun * 1. Steps(1-4) of Write Operations repeate for four times
229*4882a593Smuzhiyun * which generates 16 DMA descriptors to X'fer 2048 bytes of
230*4882a593Smuzhiyun * data & 32 bytes of ECC data.
231*4882a593Smuzhiyun * 2. X'fer 64 bytes of Spare area from Memory to Flash.
232*4882a593Smuzhiyun * Read Operation Sequence for Large Block NAND
233*4882a593Smuzhiyun * ----------------------------------------------------------
234*4882a593Smuzhiyun * 1. Steps(1-4) of Read Operations repeate for four times
235*4882a593Smuzhiyun * which generates 16 DMA descriptors to X'fer 2048 bytes of
236*4882a593Smuzhiyun * data & 32 bytes of ECC data.
237*4882a593Smuzhiyun * 2. X'fer 64 bytes of Spare area from Flash to Memory.
238*4882a593Smuzhiyun */
239*4882a593Smuzhiyun
240*4882a593Smuzhiyun for (i = 0; i < size/CONFIG_SYS_NAND_ECCSIZE; i++) {
241*4882a593Smuzhiyun dmalist_cur = &dmalist[i * 2];
242*4882a593Smuzhiyun dmalist_cur_ecc = &dmalist[(i * 2) + 1];
243*4882a593Smuzhiyun
244*4882a593Smuzhiyun dmalist_cur->dma_src = (read ? (dmasrc) : (dmasrc + (i*256)));
245*4882a593Smuzhiyun dmalist_cur->dma_dest = (read ? (dmadst + (i*256)) : dmadst);
246*4882a593Smuzhiyun dmalist_cur->next_lli = lpc32xx_dmac_next_lli(dmalist_cur_ecc);
247*4882a593Smuzhiyun dmalist_cur->next_ctrl = ctrl;
248*4882a593Smuzhiyun
249*4882a593Smuzhiyun dmalist_cur_ecc->dma_src = lpc32xx_dmac_set_ecc();
250*4882a593Smuzhiyun dmalist_cur_ecc->dma_dest = (u32)&ecc_buffer[i];
251*4882a593Smuzhiyun dmalist_cur_ecc->next_lli =
252*4882a593Smuzhiyun lpc32xx_dmac_next_lli(&dmalist[(i * 2) + 2]);
253*4882a593Smuzhiyun dmalist_cur_ecc->next_ctrl = ecc_ctrl;
254*4882a593Smuzhiyun }
255*4882a593Smuzhiyun
256*4882a593Smuzhiyun if (i) { /* Data only transfer */
257*4882a593Smuzhiyun dmalist_cur_ecc = &dmalist[(i * 2) - 1];
258*4882a593Smuzhiyun dmalist_cur_ecc->next_lli = 0;
259*4882a593Smuzhiyun dmalist_cur_ecc->next_ctrl |= DMAC_CHAN_INT_TC_EN;
260*4882a593Smuzhiyun return;
261*4882a593Smuzhiyun }
262*4882a593Smuzhiyun
263*4882a593Smuzhiyun /* OOB only transfer */
264*4882a593Smuzhiyun if (read) {
265*4882a593Smuzhiyun dmasrc = lpc32xx_dmac_set_dma_data();
266*4882a593Smuzhiyun dmadst = (u32)buffer;
267*4882a593Smuzhiyun oob_ctrl |= DMAC_CHAN_DEST_AUTOINC;
268*4882a593Smuzhiyun } else {
269*4882a593Smuzhiyun dmadst = lpc32xx_dmac_set_dma_data();
270*4882a593Smuzhiyun dmasrc = (u32)buffer;
271*4882a593Smuzhiyun oob_ctrl |= DMAC_CHAN_SRC_AUTOINC;
272*4882a593Smuzhiyun }
273*4882a593Smuzhiyun
274*4882a593Smuzhiyun /* Read/ Write Spare Area Data To/From Flash */
275*4882a593Smuzhiyun dmalist_cur = &dmalist[i * 2];
276*4882a593Smuzhiyun dmalist_cur->dma_src = dmasrc;
277*4882a593Smuzhiyun dmalist_cur->dma_dest = dmadst;
278*4882a593Smuzhiyun dmalist_cur->next_lli = 0;
279*4882a593Smuzhiyun dmalist_cur->next_ctrl = (oob_ctrl | DMAC_CHAN_INT_TC_EN);
280*4882a593Smuzhiyun }
281*4882a593Smuzhiyun
lpc32xx_nand_xfer(struct mtd_info * mtd,const u8 * buf,int len,int read)282*4882a593Smuzhiyun static void lpc32xx_nand_xfer(struct mtd_info *mtd, const u8 *buf,
283*4882a593Smuzhiyun int len, int read)
284*4882a593Smuzhiyun {
285*4882a593Smuzhiyun struct nand_chip *chip = mtd_to_nand(mtd);
286*4882a593Smuzhiyun u32 config;
287*4882a593Smuzhiyun int ret;
288*4882a593Smuzhiyun
289*4882a593Smuzhiyun /* DMA Channel Configuration */
290*4882a593Smuzhiyun config = (read ? DMAC_CHAN_FLOW_D_P2M : DMAC_CHAN_FLOW_D_M2P) |
291*4882a593Smuzhiyun (read ? DMAC_DEST_PERIP(0) : DMAC_DEST_PERIP(DMA_PERID_NAND1)) |
292*4882a593Smuzhiyun (read ? DMAC_SRC_PERIP(DMA_PERID_NAND1) : DMAC_SRC_PERIP(0)) |
293*4882a593Smuzhiyun DMAC_CHAN_ENABLE;
294*4882a593Smuzhiyun
295*4882a593Smuzhiyun /* Prepare DMA descriptors */
296*4882a593Smuzhiyun lpc32xx_nand_dma_configure(chip, buf, len, read);
297*4882a593Smuzhiyun
298*4882a593Smuzhiyun /* Setup SLC controller and start transfer */
299*4882a593Smuzhiyun if (read)
300*4882a593Smuzhiyun setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_DIR);
301*4882a593Smuzhiyun else /* NAND_ECC_WRITE */
302*4882a593Smuzhiyun clrbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_DIR);
303*4882a593Smuzhiyun setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_BURST);
304*4882a593Smuzhiyun
305*4882a593Smuzhiyun /* Write length for new transfers */
306*4882a593Smuzhiyun if (!((readl(&lpc32xx_nand_slc_regs->stat) & STAT_DMA_FIFO) |
307*4882a593Smuzhiyun readl(&lpc32xx_nand_slc_regs->tc))) {
308*4882a593Smuzhiyun int tmp = (len != mtd->oobsize) ? mtd->oobsize : 0;
309*4882a593Smuzhiyun writel(len + tmp, &lpc32xx_nand_slc_regs->tc);
310*4882a593Smuzhiyun }
311*4882a593Smuzhiyun
312*4882a593Smuzhiyun setbits_le32(&lpc32xx_nand_slc_regs->ctrl, CTRL_DMA_START);
313*4882a593Smuzhiyun
314*4882a593Smuzhiyun /* Start DMA transfers */
315*4882a593Smuzhiyun ret = lpc32xx_dma_start_xfer(dmachan, dmalist, config);
316*4882a593Smuzhiyun if (unlikely(ret < 0))
317*4882a593Smuzhiyun BUG();
318*4882a593Smuzhiyun
319*4882a593Smuzhiyun /* Wait for NAND to be ready */
320*4882a593Smuzhiyun while (!lpc32xx_nand_dev_ready(mtd))
321*4882a593Smuzhiyun ;
322*4882a593Smuzhiyun
323*4882a593Smuzhiyun /* Wait till DMA transfer is DONE */
324*4882a593Smuzhiyun if (lpc32xx_dma_wait_status(dmachan))
325*4882a593Smuzhiyun pr_err("NAND DMA transfer error!\r\n");
326*4882a593Smuzhiyun
327*4882a593Smuzhiyun /* Stop DMA & HW ECC */
328*4882a593Smuzhiyun clrbits_le32(&lpc32xx_nand_slc_regs->ctrl, CTRL_DMA_START);
329*4882a593Smuzhiyun clrbits_le32(&lpc32xx_nand_slc_regs->cfg,
330*4882a593Smuzhiyun CFG_DMA_DIR | CFG_DMA_BURST | CFG_ECC_EN | CFG_DMA_ECC);
331*4882a593Smuzhiyun }
332*4882a593Smuzhiyun
slc_ecc_copy_to_buffer(u8 * spare,const u32 * ecc,int count)333*4882a593Smuzhiyun static u32 slc_ecc_copy_to_buffer(u8 *spare, const u32 *ecc, int count)
334*4882a593Smuzhiyun {
335*4882a593Smuzhiyun int i;
336*4882a593Smuzhiyun for (i = 0; i < (count * CONFIG_SYS_NAND_ECCBYTES);
337*4882a593Smuzhiyun i += CONFIG_SYS_NAND_ECCBYTES) {
338*4882a593Smuzhiyun u32 ce = ecc[i / CONFIG_SYS_NAND_ECCBYTES];
339*4882a593Smuzhiyun ce = ~(ce << 2) & 0xFFFFFF;
340*4882a593Smuzhiyun spare[i+2] = (u8)(ce & 0xFF); ce >>= 8;
341*4882a593Smuzhiyun spare[i+1] = (u8)(ce & 0xFF); ce >>= 8;
342*4882a593Smuzhiyun spare[i] = (u8)(ce & 0xFF);
343*4882a593Smuzhiyun }
344*4882a593Smuzhiyun return 0;
345*4882a593Smuzhiyun }
346*4882a593Smuzhiyun
lpc32xx_ecc_calculate(struct mtd_info * mtd,const uint8_t * dat,uint8_t * ecc_code)347*4882a593Smuzhiyun static int lpc32xx_ecc_calculate(struct mtd_info *mtd, const uint8_t *dat,
348*4882a593Smuzhiyun uint8_t *ecc_code)
349*4882a593Smuzhiyun {
350*4882a593Smuzhiyun return slc_ecc_copy_to_buffer(ecc_code, ecc_buffer, ECCSTEPS);
351*4882a593Smuzhiyun }
352*4882a593Smuzhiyun
353*4882a593Smuzhiyun /*
354*4882a593Smuzhiyun * Enables and prepares SLC NAND controller
355*4882a593Smuzhiyun * for doing data transfers with H/W ECC enabled.
356*4882a593Smuzhiyun */
lpc32xx_hwecc_enable(struct mtd_info * mtd,int mode)357*4882a593Smuzhiyun static void lpc32xx_hwecc_enable(struct mtd_info *mtd, int mode)
358*4882a593Smuzhiyun {
359*4882a593Smuzhiyun /* Clear ECC */
360*4882a593Smuzhiyun writel(CTRL_ECC_CLEAR, &lpc32xx_nand_slc_regs->ctrl);
361*4882a593Smuzhiyun
362*4882a593Smuzhiyun /* Setup SLC controller for H/W ECC operations */
363*4882a593Smuzhiyun setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_ECC_EN | CFG_DMA_ECC);
364*4882a593Smuzhiyun }
365*4882a593Smuzhiyun
366*4882a593Smuzhiyun /*
367*4882a593Smuzhiyun * lpc32xx_correct_data - [NAND Interface] Detect and correct bit error(s)
368*4882a593Smuzhiyun * mtd: MTD block structure
369*4882a593Smuzhiyun * dat: raw data read from the chip
370*4882a593Smuzhiyun * read_ecc: ECC from the chip
371*4882a593Smuzhiyun * calc_ecc: the ECC calculated from raw data
372*4882a593Smuzhiyun *
373*4882a593Smuzhiyun * Detect and correct a 1 bit error for 256 byte block
374*4882a593Smuzhiyun */
lpc32xx_correct_data(struct mtd_info * mtd,u_char * dat,u_char * read_ecc,u_char * calc_ecc)375*4882a593Smuzhiyun int lpc32xx_correct_data(struct mtd_info *mtd, u_char *dat,
376*4882a593Smuzhiyun u_char *read_ecc, u_char *calc_ecc)
377*4882a593Smuzhiyun {
378*4882a593Smuzhiyun unsigned int i;
379*4882a593Smuzhiyun int ret1, ret2 = 0;
380*4882a593Smuzhiyun u_char *r = read_ecc;
381*4882a593Smuzhiyun u_char *c = calc_ecc;
382*4882a593Smuzhiyun u16 data_offset = 0;
383*4882a593Smuzhiyun
384*4882a593Smuzhiyun for (i = 0 ; i < ECCSTEPS ; i++) {
385*4882a593Smuzhiyun r += CONFIG_SYS_NAND_ECCBYTES;
386*4882a593Smuzhiyun c += CONFIG_SYS_NAND_ECCBYTES;
387*4882a593Smuzhiyun data_offset += CONFIG_SYS_NAND_ECCSIZE;
388*4882a593Smuzhiyun
389*4882a593Smuzhiyun ret1 = nand_correct_data(mtd, dat + data_offset, r, c);
390*4882a593Smuzhiyun if (ret1 < 0)
391*4882a593Smuzhiyun return -EBADMSG;
392*4882a593Smuzhiyun else
393*4882a593Smuzhiyun ret2 += ret1;
394*4882a593Smuzhiyun }
395*4882a593Smuzhiyun
396*4882a593Smuzhiyun return ret2;
397*4882a593Smuzhiyun }
398*4882a593Smuzhiyun
lpc32xx_dma_read_buf(struct mtd_info * mtd,uint8_t * buf,int len)399*4882a593Smuzhiyun static void lpc32xx_dma_read_buf(struct mtd_info *mtd, uint8_t *buf, int len)
400*4882a593Smuzhiyun {
401*4882a593Smuzhiyun lpc32xx_nand_xfer(mtd, buf, len, 1);
402*4882a593Smuzhiyun }
403*4882a593Smuzhiyun
lpc32xx_dma_write_buf(struct mtd_info * mtd,const uint8_t * buf,int len)404*4882a593Smuzhiyun static void lpc32xx_dma_write_buf(struct mtd_info *mtd, const uint8_t *buf,
405*4882a593Smuzhiyun int len)
406*4882a593Smuzhiyun {
407*4882a593Smuzhiyun lpc32xx_nand_xfer(mtd, buf, len, 0);
408*4882a593Smuzhiyun }
409*4882a593Smuzhiyun
410*4882a593Smuzhiyun /* Reuse the logic from "nand_read_page_hwecc()" */
lpc32xx_read_page_hwecc(struct mtd_info * mtd,struct nand_chip * chip,uint8_t * buf,int oob_required,int page)411*4882a593Smuzhiyun static int lpc32xx_read_page_hwecc(struct mtd_info *mtd, struct nand_chip *chip,
412*4882a593Smuzhiyun uint8_t *buf, int oob_required, int page)
413*4882a593Smuzhiyun {
414*4882a593Smuzhiyun int i;
415*4882a593Smuzhiyun int stat;
416*4882a593Smuzhiyun uint8_t *p = buf;
417*4882a593Smuzhiyun uint8_t *ecc_calc = chip->buffers->ecccalc;
418*4882a593Smuzhiyun uint8_t *ecc_code = chip->buffers->ecccode;
419*4882a593Smuzhiyun uint32_t *eccpos = chip->ecc.layout->eccpos;
420*4882a593Smuzhiyun unsigned int max_bitflips = 0;
421*4882a593Smuzhiyun
422*4882a593Smuzhiyun /*
423*4882a593Smuzhiyun * As per the "LPC32x0 and LPC32x0/01 User manual" table 173 notes
424*4882a593Smuzhiyun * and section 9.7, the NAND SLC & DMA allowed single DMA transaction
425*4882a593Smuzhiyun * of a page size using DMA controller scatter/gather mode through
426*4882a593Smuzhiyun * linked list; the ECC read is done without any software intervention.
427*4882a593Smuzhiyun */
428*4882a593Smuzhiyun
429*4882a593Smuzhiyun lpc32xx_hwecc_enable(mtd, NAND_ECC_READ);
430*4882a593Smuzhiyun lpc32xx_dma_read_buf(mtd, p, chip->ecc.size * chip->ecc.steps);
431*4882a593Smuzhiyun lpc32xx_ecc_calculate(mtd, p, &ecc_calc[0]);
432*4882a593Smuzhiyun lpc32xx_dma_read_buf(mtd, chip->oob_poi, mtd->oobsize);
433*4882a593Smuzhiyun
434*4882a593Smuzhiyun for (i = 0; i < chip->ecc.total; i++)
435*4882a593Smuzhiyun ecc_code[i] = chip->oob_poi[eccpos[i]];
436*4882a593Smuzhiyun
437*4882a593Smuzhiyun stat = chip->ecc.correct(mtd, p, &ecc_code[0], &ecc_calc[0]);
438*4882a593Smuzhiyun if (stat < 0)
439*4882a593Smuzhiyun mtd->ecc_stats.failed++;
440*4882a593Smuzhiyun else {
441*4882a593Smuzhiyun mtd->ecc_stats.corrected += stat;
442*4882a593Smuzhiyun max_bitflips = max_t(unsigned int, max_bitflips, stat);
443*4882a593Smuzhiyun }
444*4882a593Smuzhiyun
445*4882a593Smuzhiyun return max_bitflips;
446*4882a593Smuzhiyun }
447*4882a593Smuzhiyun
448*4882a593Smuzhiyun /* Reuse the logic from "nand_write_page_hwecc()" */
lpc32xx_write_page_hwecc(struct mtd_info * mtd,struct nand_chip * chip,const uint8_t * buf,int oob_required,int page)449*4882a593Smuzhiyun static int lpc32xx_write_page_hwecc(struct mtd_info *mtd,
450*4882a593Smuzhiyun struct nand_chip *chip,
451*4882a593Smuzhiyun const uint8_t *buf, int oob_required,
452*4882a593Smuzhiyun int page)
453*4882a593Smuzhiyun {
454*4882a593Smuzhiyun int i;
455*4882a593Smuzhiyun uint8_t *ecc_calc = chip->buffers->ecccalc;
456*4882a593Smuzhiyun const uint8_t *p = buf;
457*4882a593Smuzhiyun uint32_t *eccpos = chip->ecc.layout->eccpos;
458*4882a593Smuzhiyun
459*4882a593Smuzhiyun /*
460*4882a593Smuzhiyun * As per the "LPC32x0 and LPC32x0/01 User manual" table 173 notes
461*4882a593Smuzhiyun * and section 9.7, the NAND SLC & DMA allowed single DMA transaction
462*4882a593Smuzhiyun * of a page size using DMA controller scatter/gather mode through
463*4882a593Smuzhiyun * linked list; the ECC read is done without any software intervention.
464*4882a593Smuzhiyun */
465*4882a593Smuzhiyun
466*4882a593Smuzhiyun lpc32xx_hwecc_enable(mtd, NAND_ECC_WRITE);
467*4882a593Smuzhiyun lpc32xx_dma_write_buf(mtd, p, chip->ecc.size * chip->ecc.steps);
468*4882a593Smuzhiyun lpc32xx_ecc_calculate(mtd, p, &ecc_calc[0]);
469*4882a593Smuzhiyun
470*4882a593Smuzhiyun for (i = 0; i < chip->ecc.total; i++)
471*4882a593Smuzhiyun chip->oob_poi[eccpos[i]] = ecc_calc[i];
472*4882a593Smuzhiyun
473*4882a593Smuzhiyun lpc32xx_dma_write_buf(mtd, chip->oob_poi, mtd->oobsize);
474*4882a593Smuzhiyun
475*4882a593Smuzhiyun return 0;
476*4882a593Smuzhiyun }
477*4882a593Smuzhiyun #else
lpc32xx_read_buf(struct mtd_info * mtd,uint8_t * buf,int len)478*4882a593Smuzhiyun static void lpc32xx_read_buf(struct mtd_info *mtd, uint8_t *buf, int len)
479*4882a593Smuzhiyun {
480*4882a593Smuzhiyun while (len-- > 0)
481*4882a593Smuzhiyun *buf++ = readl(&lpc32xx_nand_slc_regs->data);
482*4882a593Smuzhiyun }
483*4882a593Smuzhiyun
lpc32xx_write_buf(struct mtd_info * mtd,const uint8_t * buf,int len)484*4882a593Smuzhiyun static void lpc32xx_write_buf(struct mtd_info *mtd, const uint8_t *buf, int len)
485*4882a593Smuzhiyun {
486*4882a593Smuzhiyun while (len-- > 0)
487*4882a593Smuzhiyun writel(*buf++, &lpc32xx_nand_slc_regs->data);
488*4882a593Smuzhiyun }
489*4882a593Smuzhiyun #endif
490*4882a593Smuzhiyun
lpc32xx_read_byte(struct mtd_info * mtd)491*4882a593Smuzhiyun static uint8_t lpc32xx_read_byte(struct mtd_info *mtd)
492*4882a593Smuzhiyun {
493*4882a593Smuzhiyun return readl(&lpc32xx_nand_slc_regs->data);
494*4882a593Smuzhiyun }
495*4882a593Smuzhiyun
lpc32xx_write_byte(struct mtd_info * mtd,uint8_t byte)496*4882a593Smuzhiyun static void lpc32xx_write_byte(struct mtd_info *mtd, uint8_t byte)
497*4882a593Smuzhiyun {
498*4882a593Smuzhiyun writel(byte, &lpc32xx_nand_slc_regs->data);
499*4882a593Smuzhiyun }
500*4882a593Smuzhiyun
501*4882a593Smuzhiyun /*
502*4882a593Smuzhiyun * LPC32xx has only one SLC NAND controller, don't utilize
503*4882a593Smuzhiyun * CONFIG_SYS_NAND_SELF_INIT to be able to reuse this function
504*4882a593Smuzhiyun * both in SPL NAND and U-Boot images.
505*4882a593Smuzhiyun */
board_nand_init(struct nand_chip * lpc32xx_chip)506*4882a593Smuzhiyun int board_nand_init(struct nand_chip *lpc32xx_chip)
507*4882a593Smuzhiyun {
508*4882a593Smuzhiyun #if defined(CONFIG_DMA_LPC32XX) && !defined(CONFIG_SPL_BUILD)
509*4882a593Smuzhiyun int ret;
510*4882a593Smuzhiyun
511*4882a593Smuzhiyun /* Acquire a channel for our use */
512*4882a593Smuzhiyun ret = lpc32xx_dma_get_channel();
513*4882a593Smuzhiyun if (unlikely(ret < 0)) {
514*4882a593Smuzhiyun pr_info("Unable to get free DMA channel for NAND transfers\n");
515*4882a593Smuzhiyun return -1;
516*4882a593Smuzhiyun }
517*4882a593Smuzhiyun dmachan = (unsigned int)ret;
518*4882a593Smuzhiyun #endif
519*4882a593Smuzhiyun
520*4882a593Smuzhiyun lpc32xx_chip->cmd_ctrl = lpc32xx_nand_cmd_ctrl;
521*4882a593Smuzhiyun lpc32xx_chip->dev_ready = lpc32xx_nand_dev_ready;
522*4882a593Smuzhiyun
523*4882a593Smuzhiyun /*
524*4882a593Smuzhiyun * The implementation of these functions is quite common, but
525*4882a593Smuzhiyun * they MUST be defined, because access to data register
526*4882a593Smuzhiyun * is strictly 32-bit aligned.
527*4882a593Smuzhiyun */
528*4882a593Smuzhiyun lpc32xx_chip->read_byte = lpc32xx_read_byte;
529*4882a593Smuzhiyun lpc32xx_chip->write_byte = lpc32xx_write_byte;
530*4882a593Smuzhiyun
531*4882a593Smuzhiyun #if defined(CONFIG_DMA_LPC32XX) && !defined(CONFIG_SPL_BUILD)
532*4882a593Smuzhiyun /* Hardware ECC calculation is supported when DMA driver is selected */
533*4882a593Smuzhiyun lpc32xx_chip->ecc.mode = NAND_ECC_HW;
534*4882a593Smuzhiyun
535*4882a593Smuzhiyun lpc32xx_chip->read_buf = lpc32xx_dma_read_buf;
536*4882a593Smuzhiyun lpc32xx_chip->write_buf = lpc32xx_dma_write_buf;
537*4882a593Smuzhiyun
538*4882a593Smuzhiyun lpc32xx_chip->ecc.calculate = lpc32xx_ecc_calculate;
539*4882a593Smuzhiyun lpc32xx_chip->ecc.correct = lpc32xx_correct_data;
540*4882a593Smuzhiyun lpc32xx_chip->ecc.hwctl = lpc32xx_hwecc_enable;
541*4882a593Smuzhiyun lpc32xx_chip->chip_delay = 2000;
542*4882a593Smuzhiyun
543*4882a593Smuzhiyun lpc32xx_chip->ecc.read_page = lpc32xx_read_page_hwecc;
544*4882a593Smuzhiyun lpc32xx_chip->ecc.write_page = lpc32xx_write_page_hwecc;
545*4882a593Smuzhiyun lpc32xx_chip->options |= NAND_NO_SUBPAGE_WRITE;
546*4882a593Smuzhiyun #else
547*4882a593Smuzhiyun /*
548*4882a593Smuzhiyun * Hardware ECC calculation is not supported by the driver,
549*4882a593Smuzhiyun * because it requires DMA support, see LPC32x0 User Manual,
550*4882a593Smuzhiyun * note after SLC_ECC register description (UM10326, p.198)
551*4882a593Smuzhiyun */
552*4882a593Smuzhiyun lpc32xx_chip->ecc.mode = NAND_ECC_SOFT;
553*4882a593Smuzhiyun
554*4882a593Smuzhiyun /*
555*4882a593Smuzhiyun * The implementation of these functions is quite common, but
556*4882a593Smuzhiyun * they MUST be defined, because access to data register
557*4882a593Smuzhiyun * is strictly 32-bit aligned.
558*4882a593Smuzhiyun */
559*4882a593Smuzhiyun lpc32xx_chip->read_buf = lpc32xx_read_buf;
560*4882a593Smuzhiyun lpc32xx_chip->write_buf = lpc32xx_write_buf;
561*4882a593Smuzhiyun #endif
562*4882a593Smuzhiyun
563*4882a593Smuzhiyun /*
564*4882a593Smuzhiyun * These values are predefined
565*4882a593Smuzhiyun * for both small and large page NAND flash devices.
566*4882a593Smuzhiyun */
567*4882a593Smuzhiyun lpc32xx_chip->ecc.size = CONFIG_SYS_NAND_ECCSIZE;
568*4882a593Smuzhiyun lpc32xx_chip->ecc.bytes = CONFIG_SYS_NAND_ECCBYTES;
569*4882a593Smuzhiyun lpc32xx_chip->ecc.strength = 1;
570*4882a593Smuzhiyun
571*4882a593Smuzhiyun if (CONFIG_SYS_NAND_PAGE_SIZE != NAND_LARGE_BLOCK_PAGE_SIZE)
572*4882a593Smuzhiyun lpc32xx_chip->ecc.layout = &lpc32xx_nand_oob_16;
573*4882a593Smuzhiyun
574*4882a593Smuzhiyun #if defined(CONFIG_SYS_NAND_USE_FLASH_BBT)
575*4882a593Smuzhiyun lpc32xx_chip->bbt_options |= NAND_BBT_USE_FLASH;
576*4882a593Smuzhiyun #endif
577*4882a593Smuzhiyun
578*4882a593Smuzhiyun /* Initialize NAND interface */
579*4882a593Smuzhiyun lpc32xx_nand_init();
580*4882a593Smuzhiyun
581*4882a593Smuzhiyun return 0;
582*4882a593Smuzhiyun }
583