1 /*
2 * Copyright (c) 2018 Fuzhou Rockchip Electronics Co., Ltd
3 *
4 * SPDX-License-Identifier: GPL-2.0
5 */
6
7 #include <common.h>
8 #include <linux/compat.h>
9 #include <linux/delay.h>
10
11 #include "flash.h"
12 #include "flash_com.h"
13 #include "nandc.h"
14 #include "rk_sftl.h"
15
16 #define CPU_DELAY_NS(n) ndelay(n)
17 #define usleep_range(a, b) udelay((b))
18
19 #define NANDC_MASTER_EN
20
21 void __iomem *nandc_base;
22 static u8 g_nandc_ver;
23
24 static u32 g_nandc_ecc_bits;
25 #ifdef NANDC_MASTER_EN
26 static struct MASTER_INFO_T master;
27 static u32 *g_master_temp_buf;
28 #endif
29
nandc_get_version(void)30 u8 nandc_get_version(void)
31 {
32 return g_nandc_ver;
33 }
34
nandc_init(void __iomem * nandc_addr)35 void nandc_init(void __iomem *nandc_addr)
36 {
37 union FM_CTL_T ctl_reg;
38
39 nandc_base = nandc_addr;
40
41 ctl_reg.d32 = 0;
42 g_nandc_ver = 6;
43 if (nandc_readl(NANDC_V9_NANDC_VER) == RK3326_NANDC_VER)
44 g_nandc_ver = 9;
45 if (g_nandc_ver == 9) {
46 ctl_reg.V9.wp = 1;
47 ctl_reg.V9.sif_read_delay = 2;
48 nandc_writel(ctl_reg.d32, NANDC_V9_FMCTL);
49 nandc_writel(0, NANDC_V9_RANDMZ_CFG);
50 nandc_writel(0x1041, NANDC_V9_FMWAIT);
51 } else {
52 ctl_reg.V6.wp = 1;
53 nandc_writel(ctl_reg.d32, NANDC_FMCTL);
54 nandc_writel(0, NANDC_RANDMZ_CFG);
55 nandc_writel(0x1061, NANDC_FMWAIT);
56 }
57 nandc_time_cfg(40);
58
59 #ifdef NANDC_MASTER_EN
60 if (!g_master_temp_buf)
61 g_master_temp_buf = (u32 *)ftl_malloc(MAX_FLASH_PAGE_SIZE +
62 MAX_FLASH_PAGE_SIZE / 8);
63 master.page_buf = &g_master_temp_buf[0];
64 master.spare_buf = &g_master_temp_buf[MAX_FLASH_PAGE_SIZE / 4];
65 master.mapped = 0;
66 #endif
67 }
68
nandc_flash_cs(u8 chip_sel)69 void nandc_flash_cs(u8 chip_sel)
70 {
71 union FM_CTL_T tmp;
72
73 tmp.d32 = nandc_readl(NANDC_FMCTL);
74 tmp.V6.cs = 0x01 << chip_sel;
75 nandc_writel(tmp.d32, NANDC_FMCTL);
76 }
77
nandc_flash_de_cs(u8 chip_sel)78 void nandc_flash_de_cs(u8 chip_sel)
79 {
80 union FM_CTL_T tmp;
81
82 tmp.d32 = nandc_readl(NANDC_FMCTL);
83 tmp.V6.cs = 0;
84 tmp.V6.flash_abort_clear = 0;
85 nandc_writel(tmp.d32, NANDC_FMCTL);
86 }
87
nandc_delayns(u32 count)88 u32 nandc_delayns(u32 count)
89 {
90 CPU_DELAY_NS(count);
91 return 0;
92 }
93
nandc_wait_flash_ready(u8 chip_sel)94 u32 nandc_wait_flash_ready(u8 chip_sel)
95 {
96 union FM_CTL_T tmp;
97 u32 status;
98 u32 i;
99
100 status = 0;
101 for (i = 0; i < 100000; i++) {
102 nandc_delayns(100);
103 tmp.d32 = nandc_readl(NANDC_FMCTL);
104 if (tmp.V6.rdy != 0)
105 break;
106 }
107
108 if (i >= 100000)
109 status = -1;
110 return status;
111 }
112
nandc_randmz_sel(u8 chip_sel,u32 randmz_seed)113 void nandc_randmz_sel(u8 chip_sel, u32 randmz_seed)
114 {
115 nandc_writel(randmz_seed, NANDC_RANDMZ_CFG);
116 }
117
nandc_time_cfg(u32 ns)118 void nandc_time_cfg(u32 ns)
119 {
120 if (g_nandc_ver == 9) {
121 if (ns < 36)
122 nandc_writel(0x1041, NANDC_V9_FMWAIT);
123 else if (ns >= 100)
124 nandc_writel(0x2082, NANDC_V9_FMWAIT);
125 else
126 nandc_writel(0x1061, NANDC_V9_FMWAIT);
127 } else {
128 if (ns < 36)
129 nandc_writel(0x1061, NANDC_FMWAIT);
130 else if (ns >= 100)
131 nandc_writel(0x2082, NANDC_FMWAIT);
132 else
133 nandc_writel(0x1081, NANDC_FMWAIT);
134 }
135 }
136
nandc_bch_sel(u8 bits)137 void nandc_bch_sel(u8 bits)
138 {
139 union BCH_CTL_T tmp;
140 union FL_CTL_T fl_reg;
141 u8 bch_config;
142
143 fl_reg.d32 = 0;
144 fl_reg.V6.rst = 1;
145 g_nandc_ecc_bits = bits;
146 if (g_nandc_ver == 9) {
147 nandc_writel(fl_reg.d32, NANDC_V9_FLCTL);
148 if (bits == 70)
149 bch_config = 0;
150 else if (bits == 60)
151 bch_config = 3;
152 else if (bits == 40)
153 bch_config = 2;
154 else
155 bch_config = 1;
156 tmp.d32 = 0;
157 tmp.V9.bchmode = bch_config;
158 tmp.V9.bchrst = 1;
159 nandc_writel(tmp.d32, NANDC_V9_BCHCTL);
160 } else {
161 nandc_writel(fl_reg.d32, NANDC_FLCTL);
162 tmp.d32 = 0;
163 tmp.V6.addr = 0x10;
164 tmp.V6.bch_mode1 = 0;
165 if (bits == 16) {
166 tmp.V6.bch_mode = 0;
167 } else if (bits == 24) {
168 tmp.V6.bch_mode = 1;
169 } else {
170 tmp.V6.bch_mode1 = 1;
171 tmp.V6.bch_mode = 1;
172 if (bits == 40)
173 tmp.V6.bch_mode = 0;
174 }
175 tmp.V6.rst = 1;
176 nandc_writel(tmp.d32, NANDC_BCHCTL);
177 }
178 }
179
180 /*
181 *Nandc xfer data transmission
182 *1. set bch register except nandc version equals 9
183 *2. set internal transfer control register
184 *3. set bus transfer
185 * a. target memory data address
186 * b. ahb setting
187 *4. configure register orderly and start transmission
188 */
nandc_xfer_start(u8 dir,u8 n_sec,u32 * data,u32 * spare)189 static void nandc_xfer_start(u8 dir, u8 n_sec, u32 *data, u32 *spare)
190 {
191 union BCH_CTL_T bch_reg;
192 union FL_CTL_T fl_reg;
193 u32 i;
194 union MTRANS_CFG_T master_reg;
195 u16 *p_spare_tmp = (u16 *)spare;
196 unsigned long vir_addr;
197
198 fl_reg.d32 = 0;
199 if (g_nandc_ver == 9) {
200 fl_reg.V9.flash_rdn = dir;
201 fl_reg.V9.bypass = 1;
202 fl_reg.V9.tr_count = 1;
203 fl_reg.V9.async_tog_mix = 1;
204 fl_reg.V9.cor_able = 1;
205 fl_reg.V9.st_addr = 0;
206 fl_reg.V9.page_num = (n_sec + 1) / 2;
207 /* dma start transfer data do care flash rdy */
208 fl_reg.V9.flash_st_mod = 1;
209
210 if (dir != 0) {
211 for (i = 0; i < n_sec / 2; i++) {
212 if (spare) {
213 master.spare_buf[i] =
214 (p_spare_tmp[0]) |
215 ((u32)p_spare_tmp[1] << 16);
216 p_spare_tmp += 2;
217 } else {
218 master.spare_buf[i] = 0xffffffff;
219 }
220 }
221 } else {
222 master.spare_buf[0] = 1;
223 }
224 master.page_vir = (u32 *)((data == (u32 *)NULL) ?
225 master.page_buf :
226 (u32 *)data);
227 master.spare_vir = (u32 *)master.spare_buf;
228
229 master.page_phy = (u32)((unsigned long)master.page_vir);
230 master.spare_phy = (u32)((unsigned long)master.spare_vir);
231 vir_addr = ((unsigned long)master.page_phy);
232 flush_dcache_range(vir_addr & (~0x3FuL),
233 ((vir_addr + 63) & (~0x3FuL)) +
234 fl_reg.V6.page_num * 1024);
235 vir_addr = ((unsigned long)master.spare_phy);
236 flush_dcache_range(vir_addr & (~0x3FuL),
237 ((vir_addr + 63) & (~0x3FuL)) +
238 fl_reg.V6.page_num * 128);
239 master.mapped = 1;
240 nandc_writel(master.page_phy, NANDC_V9_MTRANS_SADDR0);
241 nandc_writel(master.spare_phy, NANDC_V9_MTRANS_SADDR1);
242
243 master_reg.d32 = nandc_readl(NANDC_V9_MTRANS_CFG);
244 master_reg.V9.incr_num = 16;
245 master_reg.V9.burst = 7;
246 master_reg.V9.hsize = 2;
247 master_reg.V9.bus_mode = 1;
248 master_reg.V9.ahb_wr = !dir;
249 master_reg.V9.ahb_wr_st = 1;
250 master_reg.V9.redundance_size = 0;
251
252 nandc_writel(master_reg.d32, NANDC_V9_MTRANS_CFG);
253 nandc_writel(fl_reg.d32, NANDC_V9_FLCTL);
254 fl_reg.V9.flash_st = 1;
255 nandc_writel(fl_reg.d32, NANDC_V9_FLCTL);
256 } else {
257 bch_reg.d32 = nandc_readl(NANDC_BCHCTL);
258 bch_reg.V6.addr = 0x10;
259 bch_reg.V6.power_down = 0;
260 bch_reg.V6.region = 0;
261
262 fl_reg.V6.rdn = dir;
263 fl_reg.V6.dma = 1;
264 fl_reg.V6.tr_count = 1;
265 fl_reg.V6.async_tog_mix = 1;
266 fl_reg.V6.cor_en = 1;
267 fl_reg.V6.st_addr = 0;
268
269 master_reg.d32 = nandc_readl(NANDC_MTRANS_CFG);
270 master_reg.V6.bus_mode = 0;
271 if (dir != 0) {
272 u32 spare_sz = 64;
273
274 for (i = 0; i < n_sec / 2; i++) {
275 if (spare) {
276 master.spare_buf[i * spare_sz / 4] =
277 (p_spare_tmp[0]) |
278 ((u32)p_spare_tmp[1] << 16);
279 p_spare_tmp += 2;
280 } else {
281 master.spare_buf[i * spare_sz / 4] =
282 0xffffffff;
283 }
284 }
285 }
286 fl_reg.V6.page_num = (n_sec + 1) / 2;
287 master.page_vir = (u32 *)((data == (u32 *)NULL) ?
288 master.page_buf :
289 (u32 *)data);
290 master.spare_vir = (u32 *)master.spare_buf;
291
292 master.page_phy = (u32)((unsigned long)master.page_vir);
293 master.spare_phy = (u32)((unsigned long)master.spare_vir);
294 vir_addr = ((unsigned long)master.page_phy);
295 flush_dcache_range(vir_addr & (~0x3FuL),
296 ((vir_addr + 63) & (~0x3FuL)) +
297 fl_reg.V6.page_num * 1024);
298 vir_addr = ((unsigned long)master.spare_phy);
299 flush_dcache_range(vir_addr & (~0x3FuL),
300 ((vir_addr + 63) & (~0x3FuL)) +
301 fl_reg.V6.page_num * 128);
302 master.mapped = 1;
303 nandc_writel(master.page_phy, NANDC_MTRANS_SADDR0);
304 nandc_writel(master.spare_phy, NANDC_MTRANS_SADDR1);
305 master_reg.d32 = 0;
306 master_reg.V6.incr_num = 16;
307 master_reg.V6.burst = 7;
308 master_reg.V6.hsize = 2;
309 master_reg.V6.bus_mode = 1;
310 master_reg.V6.ahb_wr = !dir;
311 master_reg.V6.ahb_wr_st = 1;
312
313 nandc_writel(master_reg.d32, NANDC_MTRANS_CFG);
314 nandc_writel(bch_reg.d32, NANDC_BCHCTL);
315 nandc_writel(fl_reg.d32, NANDC_FLCTL);
316 fl_reg.V6.start = 1;
317 nandc_writel(fl_reg.d32, NANDC_FLCTL);
318 }
319 }
320
321 /*
322 * Wait for the end of data transmission
323 */
nandc_xfer_done(void)324 static void nandc_xfer_done(void)
325 {
326 union FL_CTL_T fl_reg;
327 union MTRANS_CFG_T master_reg;
328
329 if (g_nandc_ver == 9) {
330 union MTRANS_STAT_T stat_reg;
331
332 master_reg.d32 = nandc_readl(NANDC_V9_MTRANS_CFG);
333 if (master_reg.V9.ahb_wr != 0) {
334 do {
335 fl_reg.d32 = nandc_readl(NANDC_V9_FLCTL);
336 stat_reg.d32 = nandc_readl(NANDC_V9_MTRANS_STAT);
337 usleep_range(20, 30);
338 } while (stat_reg.V9.mtrans_cnt < fl_reg.V9.page_num ||
339 fl_reg.V9.tr_rdy == 0);
340 udelay(5);
341 } else {
342 do {
343 fl_reg.d32 = nandc_readl(NANDC_V9_FLCTL);
344 usleep_range(20, 30);
345 } while (fl_reg.V9.tr_rdy == 0);
346 }
347 } else {
348 master_reg.d32 = nandc_readl(NANDC_MTRANS_CFG);
349 if (master_reg.V6.bus_mode != 0) {
350 union MTRANS_STAT_T stat_reg;
351
352 if (master_reg.V6.ahb_wr != 0) {
353 do {
354 fl_reg.d32 = nandc_readl(NANDC_FLCTL);
355 stat_reg.d32 = nandc_readl(NANDC_MTRANS_STAT);
356 usleep_range(20, 30);
357 } while (stat_reg.V6.mtrans_cnt < fl_reg.V6.page_num ||
358 fl_reg.V6.tr_rdy == 0);
359 } else {
360 do {
361 fl_reg.d32 = nandc_readl(NANDC_FLCTL);
362 usleep_range(20, 30);
363 } while (fl_reg.V6.tr_rdy == 0);
364 }
365 } else {
366 do {
367 fl_reg.d32 = nandc_readl(NANDC_FLCTL);
368 } while ((fl_reg.V6.tr_rdy == 0));
369 }
370 }
371 }
372
nandc_xfer_data(u8 chip_sel,u8 dir,u8 n_sec,u32 * p_data,u32 * p_spare)373 u32 nandc_xfer_data(u8 chip_sel, u8 dir, u8 n_sec,
374 u32 *p_data, u32 *p_spare)
375 {
376 u32 status = NAND_STS_OK;
377 u32 i;
378 u32 spare[16];
379 union BCH_ST_T bch_st_reg;
380
381 if (dir == NANDC_WRITE && !p_spare) {
382 p_spare = (u32 *)spare;
383 memset(spare, 0xFF, sizeof(spare));
384 }
385 nandc_xfer_start(dir, n_sec, p_data, p_spare);
386 nandc_xfer_done();
387 if (dir == NANDC_READ) {
388 if (g_nandc_ver == 9) {
389 for (i = 0; i < n_sec / 4; i++) {
390 bch_st_reg.d32 = nandc_readl(NANDC_V9_BCHST(i));
391 if (n_sec > 2) {
392 if (bch_st_reg.V9.fail0 || bch_st_reg.V9.fail1) {
393 status = NAND_STS_ECC_ERR;
394 } else {
395 u32 tmp = max((u32)bch_st_reg.V9.err_bits0,
396 (u32)bch_st_reg.V9.err_bits1);
397 status = max(tmp, status);
398 }
399 } else {
400 if (bch_st_reg.V9.fail0)
401 status = NAND_STS_ECC_ERR;
402 else
403 status = bch_st_reg.V9.err_bits0;
404 }
405 }
406 if (p_spare) {
407 for (i = 0; i < n_sec / 2; i++)
408 p_spare[i] = master.spare_buf[i];
409 }
410 } else {
411 for (i = 0; i < n_sec / 4 ; i++) {
412 bch_st_reg.d32 = nandc_readl(NANDC_BCHST(i));
413 if (bch_st_reg.V6.fail0 || bch_st_reg.V6.fail1) {
414 status = NAND_STS_ECC_ERR;
415 } else {
416 u32 tmp = 0;
417
418 tmp =
419 max(bch_st_reg.V6.err_bits0 |
420 ((u32)bch_st_reg.V6.err_bits0_5 << 5),
421 bch_st_reg.V6.err_bits1 |
422 ((u32)bch_st_reg.V6.err_bits1_5 << 5));
423 status = max(tmp, status);
424 }
425 }
426 if (p_spare) {
427 u32 spare_sz = 64;
428 u32 temp_data;
429 u8 *p_spare_temp = (u8 *)p_spare;
430
431 for (i = 0; i < n_sec / 2; i++) {
432 temp_data = master.spare_buf[i * spare_sz / 4];
433 *p_spare_temp++ = (u8)temp_data;
434 *p_spare_temp++ = (u8)(temp_data >> 8);
435 *p_spare_temp++ = (u8)(temp_data >> 16);
436 *p_spare_temp++ = (u8)(temp_data >> 24);
437 }
438 }
439 nandc_writel(0, NANDC_MTRANS_CFG);
440 }
441 }
442 return status;
443 }
444
nandc_clean_irq(void)445 void nandc_clean_irq(void)
446 {
447 }
448