1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0
2*4882a593Smuzhiyun
3*4882a593Smuzhiyun /* Copyright (c) 2018 Rockchip Electronics Co. Ltd. */
4*4882a593Smuzhiyun
5*4882a593Smuzhiyun #define pr_fmt(fmt) "sfc_nand: " fmt
6*4882a593Smuzhiyun
7*4882a593Smuzhiyun #include <linux/bug.h>
8*4882a593Smuzhiyun #include <linux/delay.h>
9*4882a593Smuzhiyun #include <linux/kernel.h>
10*4882a593Smuzhiyun #include <linux/slab.h>
11*4882a593Smuzhiyun
12*4882a593Smuzhiyun #include "rkflash_debug.h"
13*4882a593Smuzhiyun #include "rk_sftl.h"
14*4882a593Smuzhiyun #include "sfc_nand.h"
15*4882a593Smuzhiyun
16*4882a593Smuzhiyun static u32 sfc_nand_get_ecc_status0(void);
17*4882a593Smuzhiyun static u32 sfc_nand_get_ecc_status1(void);
18*4882a593Smuzhiyun static u32 sfc_nand_get_ecc_status2(void);
19*4882a593Smuzhiyun static u32 sfc_nand_get_ecc_status3(void);
20*4882a593Smuzhiyun static u32 sfc_nand_get_ecc_status4(void);
21*4882a593Smuzhiyun static u32 sfc_nand_get_ecc_status5(void);
22*4882a593Smuzhiyun static u32 sfc_nand_get_ecc_status6(void);
23*4882a593Smuzhiyun static u32 sfc_nand_get_ecc_status7(void);
24*4882a593Smuzhiyun static u32 sfc_nand_get_ecc_status8(void);
25*4882a593Smuzhiyun static u32 sfc_nand_get_ecc_status9(void);
26*4882a593Smuzhiyun
27*4882a593Smuzhiyun static struct nand_info spi_nand_tbl[] = {
28*4882a593Smuzhiyun /* TC58CVG0S0HxAIx */
29*4882a593Smuzhiyun { 0x98, 0xC2, 0x00, 4, 0x40, 1, 1024, 0x00, 18, 0x8, 0, { 0x04, 0x08, 0xFF, 0xFF }, &sfc_nand_get_ecc_status0 },
30*4882a593Smuzhiyun /* TC58CVG1S0HxAIx */
31*4882a593Smuzhiyun { 0x98, 0xCB, 0x00, 4, 0x40, 2, 1024, 0x00, 19, 0x8, 0, { 0x04, 0x08, 0xFF, 0xFF }, &sfc_nand_get_ecc_status0 },
32*4882a593Smuzhiyun /* TC58CVG2S0HRAIJ */
33*4882a593Smuzhiyun { 0x98, 0xED, 0x00, 8, 0x40, 1, 2048, 0x0C, 20, 0x8, 0, { 0x04, 0x0C, 0x08, 0x10 }, &sfc_nand_get_ecc_status0 },
34*4882a593Smuzhiyun /* TC58CVG1S3HRAIJ */
35*4882a593Smuzhiyun { 0x98, 0xEB, 0x00, 4, 0x40, 1, 2048, 0x0C, 19, 0x8, 0, { 0x04, 0x08, 0xFF, 0xFF }, &sfc_nand_get_ecc_status0 },
36*4882a593Smuzhiyun /* TC58CVG0S3HRAIJ */
37*4882a593Smuzhiyun { 0x98, 0xE2, 0x00, 4, 0x40, 1, 1024, 0x0C, 18, 0x8, 0, { 0x04, 0x08, 0xFF, 0xFF }, &sfc_nand_get_ecc_status0 },
38*4882a593Smuzhiyun
39*4882a593Smuzhiyun /* MX35LF1GE4AB */
40*4882a593Smuzhiyun { 0xC2, 0x12, 0x00, 4, 0x40, 1, 1024, 0x0C, 18, 0x4, 1, { 0x04, 0x08, 0xFF, 0xFF }, &sfc_nand_get_ecc_status1 },
41*4882a593Smuzhiyun /* MX35LF2GE4AB */
42*4882a593Smuzhiyun { 0xC2, 0x22, 0x00, 4, 0x40, 2, 1024, 0x0C, 19, 0x4, 1, { 0x04, 0x08, 0xFF, 0xFF }, &sfc_nand_get_ecc_status1 },
43*4882a593Smuzhiyun /* MX35LF2GE4AD */
44*4882a593Smuzhiyun { 0xC2, 0x26, 0x00, 4, 0x40, 1, 2048, 0x0C, 19, 0x8, 1, { 0x04, 0x08, 0xFF, 0xFF }, &sfc_nand_get_ecc_status0 },
45*4882a593Smuzhiyun /* MX35LF4GE4AD */
46*4882a593Smuzhiyun { 0xC2, 0x37, 0x00, 8, 0x40, 1, 2048, 0x0C, 20, 0x8, 1, { 0x04, 0x08, 0x14, 0x18 }, &sfc_nand_get_ecc_status0 },
47*4882a593Smuzhiyun /* MX35UF1GE4AC */
48*4882a593Smuzhiyun { 0xC2, 0x92, 0x00, 4, 0x40, 1, 1024, 0x0C, 18, 0x4, 1, { 0x04, 0x08, 0xFF, 0xFF }, &sfc_nand_get_ecc_status0 },
49*4882a593Smuzhiyun /* MX35UF2GE4AC */
50*4882a593Smuzhiyun { 0xC2, 0xA2, 0x00, 4, 0x40, 1, 2048, 0x0C, 19, 0x4, 1, { 0x04, 0x08, 0xFF, 0xFF }, &sfc_nand_get_ecc_status0 },
51*4882a593Smuzhiyun /* MX35UF1GE4AD */
52*4882a593Smuzhiyun { 0xC2, 0x96, 0x00, 4, 0x40, 1, 1024, 0x0C, 18, 0x8, 1, { 0x04, 0x08, 0xFF, 0xFF }, &sfc_nand_get_ecc_status0 },
53*4882a593Smuzhiyun /* MX35UF2GE4AD */
54*4882a593Smuzhiyun { 0xC2, 0xA6, 0x00, 4, 0x40, 1, 2048, 0x0C, 19, 0x8, 1, { 0x04, 0x08, 0xFF, 0xFF }, &sfc_nand_get_ecc_status0 },
55*4882a593Smuzhiyun /* MX35UF4GE4AD */
56*4882a593Smuzhiyun { 0xC2, 0xB7, 0x00, 8, 0x40, 1, 2048, 0x0C, 20, 0x8, 1, { 0x04, 0x08, 0x14, 0x18 }, &sfc_nand_get_ecc_status0 },
57*4882a593Smuzhiyun
58*4882a593Smuzhiyun /* GD5F1GQ4UAYIG */
59*4882a593Smuzhiyun { 0xC8, 0xF1, 0x00, 4, 0x40, 1, 1024, 0x0C, 18, 0x8, 1, { 0x04, 0x08, 0xFF, 0xFF }, &sfc_nand_get_ecc_status0 },
60*4882a593Smuzhiyun /* GD5F1GQ4RB9IGR */
61*4882a593Smuzhiyun { 0xC8, 0xD1, 0x00, 4, 0x40, 1, 1024, 0x0C, 18, 0x8, 1, { 0x04, 0x08, 0xFF, 0xFF }, &sfc_nand_get_ecc_status3 },
62*4882a593Smuzhiyun /* GD5F2GQ40BY2GR */
63*4882a593Smuzhiyun { 0xC8, 0xD2, 0x00, 4, 0x40, 2, 1024, 0x0C, 19, 0x8, 1, { 0x04, 0x08, 0xFF, 0xFF }, &sfc_nand_get_ecc_status3 },
64*4882a593Smuzhiyun /* GD5F1GQ5UEYIG */
65*4882a593Smuzhiyun { 0xC8, 0x51, 0x00, 4, 0x40, 1, 1024, 0x4C, 18, 0x4, 1, { 0x04, 0x14, 0xFF, 0xFF }, &sfc_nand_get_ecc_status2 },
66*4882a593Smuzhiyun /* GD5F2GQ5UEYIG */
67*4882a593Smuzhiyun { 0xC8, 0x52, 0x00, 4, 0x40, 1, 2048, 0x4C, 19, 0x4, 1, { 0x04, 0x14, 0xFF, 0xFF }, &sfc_nand_get_ecc_status2 },
68*4882a593Smuzhiyun /* GD5F1GQ4R */
69*4882a593Smuzhiyun { 0xC8, 0xC1, 0x00, 4, 0x40, 1, 1024, 0x0C, 18, 0x8, 1, { 0x04, 0x08, 0xFF, 0xFF }, &sfc_nand_get_ecc_status3 },
70*4882a593Smuzhiyun /* GD5F4GQ6RExxG 1*4096 */
71*4882a593Smuzhiyun { 0xC8, 0x45, 0x00, 4, 0x40, 2, 2048, 0x4C, 20, 0x4, 1, { 0x04, 0x08, 0x14, 0x18 }, &sfc_nand_get_ecc_status2 },
72*4882a593Smuzhiyun /* GD5F4GQ6UExxG 1*4096 */
73*4882a593Smuzhiyun { 0xC8, 0x55, 0x00, 4, 0x40, 2, 2048, 0x4C, 20, 0x4, 1, { 0x04, 0x08, 0x14, 0x18 }, &sfc_nand_get_ecc_status2 },
74*4882a593Smuzhiyun /* GD5F1GQ4UExxH */
75*4882a593Smuzhiyun { 0xC8, 0xD9, 0x00, 4, 0x40, 1, 1024, 0x0C, 18, 0x8, 1, { 0x04, 0x08, 0xFF, 0xFF }, &sfc_nand_get_ecc_status3 },
76*4882a593Smuzhiyun /* GD5F1GQ5REYIG */
77*4882a593Smuzhiyun { 0xC8, 0x41, 0x00, 4, 0x40, 1, 1024, 0x4C, 18, 0x4, 1, { 0x04, 0x14, 0xFF, 0xFF }, &sfc_nand_get_ecc_status2 },
78*4882a593Smuzhiyun /* GD5F2GQ5REYIG */
79*4882a593Smuzhiyun { 0xC8, 0x42, 0x00, 4, 0x40, 1, 2048, 0x4C, 19, 0x4, 1, { 0x04, 0x14, 0xFF, 0xFF }, &sfc_nand_get_ecc_status2 },
80*4882a593Smuzhiyun /* GD5F2GM7RxG */
81*4882a593Smuzhiyun { 0xC8, 0x82, 0x00, 4, 0x40, 1, 2048, 0x0C, 19, 0x8, 1, { 0x04, 0x14, 0xFF, 0xFF }, &sfc_nand_get_ecc_status2 },
82*4882a593Smuzhiyun /* GD5F2GM7UxG */
83*4882a593Smuzhiyun { 0xC8, 0x92, 0x00, 4, 0x40, 1, 2048, 0x0C, 19, 0x8, 1, { 0x04, 0x14, 0xFF, 0xFF }, &sfc_nand_get_ecc_status2 },
84*4882a593Smuzhiyun /* GD5F1GM7UxG */
85*4882a593Smuzhiyun { 0xC8, 0x91, 0x00, 4, 0x40, 1, 1024, 0x0C, 18, 0x8, 1, { 0x04, 0x14, 0xFF, 0xFF }, &sfc_nand_get_ecc_status2 },
86*4882a593Smuzhiyun /* GD5F4GQ4UAYIG 1*4096 */
87*4882a593Smuzhiyun { 0xC8, 0xF4, 0x00, 4, 0x40, 2, 2048, 0x0C, 20, 0x8, 1, { 0x04, 0x08, 0x14, 0x18 }, &sfc_nand_get_ecc_status0 },
88*4882a593Smuzhiyun
89*4882a593Smuzhiyun /* W25N01GV */
90*4882a593Smuzhiyun { 0xEF, 0xAA, 0x21, 4, 0x40, 1, 1024, 0x4C, 18, 0x1, 0, { 0x04, 0x14, 0x24, 0xFF }, &sfc_nand_get_ecc_status1 },
91*4882a593Smuzhiyun /* W25N02KVZEIR */
92*4882a593Smuzhiyun { 0xEF, 0xAA, 0x22, 4, 0x40, 1, 2048, 0x4C, 19, 0x8, 0, { 0x04, 0x14, 0x24, 0xFF }, &sfc_nand_get_ecc_status0 },
93*4882a593Smuzhiyun /* W25N04KVZEIR */
94*4882a593Smuzhiyun { 0xEF, 0xAA, 0x23, 4, 0x40, 1, 4096, 0x4C, 20, 0x8, 0, { 0x04, 0x14, 0x24, 0x34 }, &sfc_nand_get_ecc_status0 },
95*4882a593Smuzhiyun /* W25N01GW */
96*4882a593Smuzhiyun { 0xEF, 0xBA, 0x21, 4, 0x40, 1, 1024, 0x4C, 18, 0x1, 0, { 0x04, 0x14, 0x24, 0xFF }, &sfc_nand_get_ecc_status1 },
97*4882a593Smuzhiyun /* W25N02KW */
98*4882a593Smuzhiyun { 0xEF, 0xBA, 0x22, 4, 0x40, 1, 2048, 0x4C, 19, 0x8, 0, { 0x04, 0x14, 0x24, 0xFF }, &sfc_nand_get_ecc_status0 },
99*4882a593Smuzhiyun /* W25N512GVEIG */
100*4882a593Smuzhiyun { 0xEF, 0xAA, 0x20, 4, 0x40, 1, 512, 0x4C, 17, 0x1, 0, { 0x04, 0x14, 0x24, 0xFF }, &sfc_nand_get_ecc_status1 },
101*4882a593Smuzhiyun /* W25N01KV */
102*4882a593Smuzhiyun { 0xEF, 0xAE, 0x21, 4, 0x40, 1, 1024, 0x4C, 18, 0x4, 0, { 0x04, 0x14, 0xFF, 0xFF }, &sfc_nand_get_ecc_status0 },
103*4882a593Smuzhiyun
104*4882a593Smuzhiyun /* HYF2GQ4UAACAE */
105*4882a593Smuzhiyun { 0xC9, 0x52, 0x00, 4, 0x40, 1, 2048, 0x4C, 19, 0xE, 1, { 0x04, 0x24, 0xFF, 0xFF }, &sfc_nand_get_ecc_status0 },
106*4882a593Smuzhiyun /* HYF1GQ4UDACAE */
107*4882a593Smuzhiyun { 0xC9, 0x21, 0x00, 4, 0x40, 1, 1024, 0x4C, 18, 0x4, 1, { 0x04, 0x14, 0xFF, 0xFF }, &sfc_nand_get_ecc_status0 },
108*4882a593Smuzhiyun /* HYF1GQ4UPACAE */
109*4882a593Smuzhiyun { 0xC9, 0xA1, 0x00, 4, 0x40, 1, 1024, 0x4C, 18, 0x1, 1, { 0x04, 0x14, 0xFF, 0xFF }, &sfc_nand_get_ecc_status1 },
110*4882a593Smuzhiyun /* HYF2GQ4UDACAE */
111*4882a593Smuzhiyun { 0xC9, 0x22, 0x00, 4, 0x40, 1, 2048, 0x4C, 19, 0x4, 1, { 0x04, 0x14, 0xFF, 0xFF }, &sfc_nand_get_ecc_status0 },
112*4882a593Smuzhiyun /* HYF2GQ4UHCCAE */
113*4882a593Smuzhiyun { 0xC9, 0x5A, 0x00, 4, 0x40, 1, 2048, 0x4C, 19, 0xE, 1, { 0x04, 0x24, 0xFF, 0xFF }, &sfc_nand_get_ecc_status0 },
114*4882a593Smuzhiyun /* HYF4GQ4UAACBE */
115*4882a593Smuzhiyun { 0xC9, 0xD4, 0x00, 8, 0x40, 1, 2048, 0x4C, 20, 0x4, 1, { 0x20, 0x40, 0x24, 0x44 }, &sfc_nand_get_ecc_status0 },
116*4882a593Smuzhiyun
117*4882a593Smuzhiyun /* FS35ND01G-S1 */
118*4882a593Smuzhiyun { 0xCD, 0xB1, 0x00, 4, 0x40, 1, 1024, 0x0C, 18, 0x4, 1, { 0x10, 0x14, 0xFF, 0xFF }, &sfc_nand_get_ecc_status5 },
119*4882a593Smuzhiyun /* FS35ND02G-S2 */
120*4882a593Smuzhiyun { 0xCD, 0xA2, 0x00, 4, 0x40, 1, 2048, 0x00, 19, 0x4, 0, { 0x10, 0x14, 0xFF, 0xFF }, &sfc_nand_get_ecc_status5 },
121*4882a593Smuzhiyun /* FS35ND01G-S1Y2 */
122*4882a593Smuzhiyun { 0xCD, 0xEA, 0x00, 4, 0x40, 1, 1024, 0x4C, 18, 0x4, 0, { 0x04, 0x08, 0xFF, 0xFF }, &sfc_nand_get_ecc_status1 },
123*4882a593Smuzhiyun /* FS35ND02G-S3Y2 */
124*4882a593Smuzhiyun { 0xCD, 0xEB, 0x00, 4, 0x40, 1, 2048, 0x4C, 19, 0x4, 0, { 0x04, 0x08, 0xFF, 0xFF }, &sfc_nand_get_ecc_status1 },
125*4882a593Smuzhiyun /* FS35ND04G-S2Y2 1*4096 */
126*4882a593Smuzhiyun { 0xCD, 0xEC, 0x00, 4, 0x40, 2, 2048, 0x4C, 20, 0x4, 0, { 0x04, 0x08, 0xFF, 0xFF }, &sfc_nand_get_ecc_status1 },
127*4882a593Smuzhiyun /* F35SQA001G */
128*4882a593Smuzhiyun { 0xCD, 0x71, 0x00, 4, 0x40, 1, 1024, 0x4C, 18, 0x1, 1, { 0x04, 0x08, 0xFF, 0xFF }, &sfc_nand_get_ecc_status1 },
129*4882a593Smuzhiyun /* F35SQA002G */
130*4882a593Smuzhiyun { 0xCD, 0x72, 0x00, 4, 0x40, 1, 2048, 0x4C, 19, 0x1, 1, { 0x04, 0x08, 0xFF, 0xFF }, &sfc_nand_get_ecc_status1 },
131*4882a593Smuzhiyun /* F35SQA512M */
132*4882a593Smuzhiyun { 0xCD, 0x70, 0x00, 4, 0x40, 1, 512, 0x4C, 17, 0x1, 1, { 0x04, 0x08, 0xFF, 0xFF }, &sfc_nand_get_ecc_status1 },
133*4882a593Smuzhiyun /* F35UQA512M */
134*4882a593Smuzhiyun { 0xCD, 0x60, 0x00, 4, 0x40, 1, 512, 0x4C, 17, 0x1, 1, { 0x04, 0x08, 0xFF, 0xFF }, &sfc_nand_get_ecc_status1 },
135*4882a593Smuzhiyun
136*4882a593Smuzhiyun /* DS35Q1GA-IB */
137*4882a593Smuzhiyun { 0xE5, 0x71, 0x00, 4, 0x40, 1, 1024, 0x0C, 18, 0x4, 1, { 0x04, 0x14, 0xFF, 0xFF }, &sfc_nand_get_ecc_status1 },
138*4882a593Smuzhiyun /* DS35Q2GA-IB */
139*4882a593Smuzhiyun { 0xE5, 0x72, 0x00, 4, 0x40, 2, 1024, 0x0C, 19, 0x4, 1, { 0x04, 0x14, 0xFF, 0xFF }, &sfc_nand_get_ecc_status1 },
140*4882a593Smuzhiyun /* DS35M1GA-1B */
141*4882a593Smuzhiyun { 0xE5, 0x21, 0x00, 4, 0x40, 1, 1024, 0x0C, 18, 0x4, 1, { 0x04, 0x14, 0xFF, 0xFF }, &sfc_nand_get_ecc_status1 },
142*4882a593Smuzhiyun /* DS35M2GA-IB */
143*4882a593Smuzhiyun { 0xE5, 0x22, 0x00, 4, 0x40, 2, 1024, 0x0C, 19, 0x4, 1, { 0x04, 0x14, 0xFF, 0xFF }, &sfc_nand_get_ecc_status1 },
144*4882a593Smuzhiyun /* DS35Q1GB-IB */
145*4882a593Smuzhiyun { 0xE5, 0xF1, 0x00, 4, 0x40, 1, 1024, 0x0C, 18, 0x8, 1, { 0x04, 0x14, 0xFF, 0xFF }, &sfc_nand_get_ecc_status6 },
146*4882a593Smuzhiyun /* DS35Q2GB-IB */
147*4882a593Smuzhiyun { 0xE5, 0xF2, 0x00, 4, 0x40, 2, 1024, 0x0C, 19, 0x8, 1, { 0x04, 0x14, 0xFF, 0xFF }, &sfc_nand_get_ecc_status6 },
148*4882a593Smuzhiyun /* DS35Q4GM */
149*4882a593Smuzhiyun { 0xE5, 0xF4, 0x00, 4, 0x40, 2, 2048, 0x0C, 20, 0x8, 1, { 0x04, 0x14, 0xFF, 0xFF }, &sfc_nand_get_ecc_status6 },
150*4882a593Smuzhiyun /* DS35M1GB-IB */
151*4882a593Smuzhiyun { 0xE5, 0xA1, 0x00, 4, 0x40, 1, 1024, 0x0C, 18, 0x8, 1, { 0x04, 0x14, 0xFF, 0xFF }, &sfc_nand_get_ecc_status6 },
152*4882a593Smuzhiyun
153*4882a593Smuzhiyun /* EM73C044VCC-H */
154*4882a593Smuzhiyun { 0xD5, 0x22, 0x00, 4, 0x40, 1, 1024, 0x0C, 18, 0x8, 1, { 0x04, 0x14, 0xFF, 0xFF }, &sfc_nand_get_ecc_status0 },
155*4882a593Smuzhiyun /* EM73D044VCE-H */
156*4882a593Smuzhiyun { 0xD5, 0x20, 0x00, 4, 0x40, 1, 2048, 0x0C, 19, 0x8, 1, { 0x04, 0x14, 0xFF, 0xFF }, &sfc_nand_get_ecc_status0 },
157*4882a593Smuzhiyun /* EM73E044SNA-G */
158*4882a593Smuzhiyun { 0xD5, 0x03, 0x00, 8, 0x40, 1, 2048, 0x4C, 20, 0x8, 1, { 0x04, 0x28, 0x08, 0x2C }, &sfc_nand_get_ecc_status0 },
159*4882a593Smuzhiyun /* EM73C044VCF-H */
160*4882a593Smuzhiyun { 0xD5, 0x25, 0x00, 4, 0x40, 1, 1024, 0x0C, 18, 0x4, 1, { 0x04, 0x14, 0xFF, 0xFF }, &sfc_nand_get_ecc_status0 },
161*4882a593Smuzhiyun
162*4882a593Smuzhiyun /* XT26G02A */
163*4882a593Smuzhiyun { 0x0B, 0xE2, 0x00, 4, 0x40, 1, 2048, 0x4C, 19, 0x8, 1, { 0x08, 0x0C, 0xFF, 0xFF }, &sfc_nand_get_ecc_status4 },
164*4882a593Smuzhiyun /* XT26G01A */
165*4882a593Smuzhiyun { 0x0B, 0xE1, 0x00, 4, 0x40, 1, 1024, 0x4C, 18, 0x8, 1, { 0x08, 0x0C, 0xFF, 0xFF }, &sfc_nand_get_ecc_status4 },
166*4882a593Smuzhiyun /* XT26G04A */
167*4882a593Smuzhiyun { 0x0B, 0xE3, 0x00, 4, 0x80, 1, 2048, 0x4C, 20, 0x8, 1, { 0x08, 0x0C, 0xFF, 0xFF }, &sfc_nand_get_ecc_status4 },
168*4882a593Smuzhiyun /* XT26G01B */
169*4882a593Smuzhiyun { 0x0B, 0xF1, 0x00, 4, 0x40, 1, 1024, 0x4C, 18, 0x8, 1, { 0x08, 0x0C, 0xFF, 0xFF }, &sfc_nand_get_ecc_status4 },
170*4882a593Smuzhiyun /* XT26G02B */
171*4882a593Smuzhiyun { 0x0B, 0xF2, 0x00, 4, 0x40, 1, 2048, 0x4C, 19, 0x4, 1, { 0x08, 0x0C, 0xFF, 0xFF }, &sfc_nand_get_ecc_status5 },
172*4882a593Smuzhiyun /* XT26G01C */
173*4882a593Smuzhiyun { 0x0B, 0x11, 0x00, 4, 0x40, 1, 1024, 0x4C, 18, 0x8, 1, { 0x08, 0x0C, 0xFF, 0xFF }, &sfc_nand_get_ecc_status7 },
174*4882a593Smuzhiyun /* XT26G02C */
175*4882a593Smuzhiyun { 0x0B, 0x12, 0x00, 4, 0x40, 1, 2048, 0x4C, 19, 0x8, 1, { 0x08, 0x0C, 0xFF, 0xFF }, &sfc_nand_get_ecc_status7 },
176*4882a593Smuzhiyun /* XT26G04C */
177*4882a593Smuzhiyun { 0x0B, 0x13, 0x00, 8, 0x40, 1, 2048, 0x4C, 20, 0x8, 1, { 0x04, 0x08, 0x0C, 0x10 }, &sfc_nand_get_ecc_status7 },
178*4882a593Smuzhiyun /* XT26G11C */
179*4882a593Smuzhiyun { 0x0B, 0x15, 0x00, 4, 0x40, 1, 1024, 0x4C, 18, 0x8, 1, { 0x04, 0x08, 0xFF, 0xFF }, &sfc_nand_get_ecc_status0 },
180*4882a593Smuzhiyun
181*4882a593Smuzhiyun /* MT29F2G01ABA, XT26G02E, F50L2G41XA */
182*4882a593Smuzhiyun { 0x2C, 0x24, 0x00, 4, 0x40, 2, 1024, 0x4C, 19, 0x8, 0, { 0x20, 0x24, 0xFF, 0xFF }, &sfc_nand_get_ecc_status6 },
183*4882a593Smuzhiyun /* MT29F1G01ABA, F50L1G41XA */
184*4882a593Smuzhiyun { 0x2C, 0x14, 0x00, 4, 0x40, 1, 1024, 0x4C, 18, 0x8, 0, { 0x20, 0x24, 0xFF, 0xFF }, &sfc_nand_get_ecc_status6 },
185*4882a593Smuzhiyun
186*4882a593Smuzhiyun /* FM25S01 */
187*4882a593Smuzhiyun { 0xA1, 0xA1, 0x00, 4, 0x40, 1, 1024, 0x4C, 18, 0x1, 0, { 0x00, 0x04, 0xFF, 0xFF }, &sfc_nand_get_ecc_status1 },
188*4882a593Smuzhiyun /* FM25S01A */
189*4882a593Smuzhiyun { 0xA1, 0xE4, 0x00, 4, 0x40, 1, 1024, 0x4C, 18, 0x1, 0, { 0x04, 0x08, 0xFF, 0xFF }, &sfc_nand_get_ecc_status1 },
190*4882a593Smuzhiyun /* FM25S02A */
191*4882a593Smuzhiyun { 0xA1, 0xE5, 0x00, 4, 0x40, 2, 1024, 0x4C, 19, 0x1, 1, { 0x04, 0x08, 0xFF, 0xFF }, &sfc_nand_get_ecc_status1 },
192*4882a593Smuzhiyun /* FM25LS01 */
193*4882a593Smuzhiyun { 0xA1, 0xA5, 0x00, 4, 0x40, 1, 1024, 0x4C, 18, 0x1, 0, { 0x04, 0x08, 0xFF, 0xFF }, &sfc_nand_get_ecc_status1 },
194*4882a593Smuzhiyun
195*4882a593Smuzhiyun /* IS37SML01G1 */
196*4882a593Smuzhiyun { 0xC8, 0x21, 0x00, 4, 0x40, 1, 1024, 0x00, 18, 0x1, 0, { 0x08, 0x0C, 0xFF, 0xFF }, &sfc_nand_get_ecc_status1 },
197*4882a593Smuzhiyun /* F50L1G41LB */
198*4882a593Smuzhiyun { 0xC8, 0x01, 0x00, 4, 0x40, 1, 1024, 0x4C, 18, 0x1, 0, { 0x14, 0x24, 0xFF, 0xFF }, &sfc_nand_get_ecc_status1 },
199*4882a593Smuzhiyun /* ATO25D1GA */
200*4882a593Smuzhiyun { 0x9B, 0x12, 0x00, 4, 0x40, 1, 1024, 0x40, 18, 0x1, 1, { 0x14, 0x24, 0xFF, 0xFF }, &sfc_nand_get_ecc_status1 },
201*4882a593Smuzhiyun /* BWJX08K-2Gb */
202*4882a593Smuzhiyun { 0xBC, 0xB3, 0x00, 4, 0x40, 1, 2048, 0x4C, 19, 0x8, 1, { 0x04, 0x10, 0xFF, 0xFF }, &sfc_nand_get_ecc_status0 },
203*4882a593Smuzhiyun /* JS28U1GQSCAHG-83 */
204*4882a593Smuzhiyun { 0xBF, 0x21, 0x00, 4, 0x40, 1, 1024, 0x40, 18, 0x4, 1, { 0x08, 0x0C, 0xFF, 0xFF }, &sfc_nand_get_ecc_status8 },
205*4882a593Smuzhiyun /* SGM7000I-S24W1GH */
206*4882a593Smuzhiyun { 0xEA, 0xC1, 0x00, 4, 0x40, 1, 1024, 0x0C, 18, 0x4, 1, { 0x04, 0x08, 0xFF, 0xFF }, &sfc_nand_get_ecc_status1 },
207*4882a593Smuzhiyun /* TX25G01 */
208*4882a593Smuzhiyun { 0xA1, 0xF1, 0x00, 4, 0x40, 1, 1024, 0x0C, 18, 0x4, 1, { 0x04, 0x14, 0xFF, 0xFF }, &sfc_nand_get_ecc_status8 },
209*4882a593Smuzhiyun /* S35ML02G3 */
210*4882a593Smuzhiyun { 0x01, 0x25, 0x00, 4, 0x40, 2, 1024, 0x4C, 19, 0x4, 1, { 0x04, 0x08, 0x0C, 0x10 }, &sfc_nand_get_ecc_status9 },
211*4882a593Smuzhiyun /* S35ML04G3 */
212*4882a593Smuzhiyun { 0x01, 0x35, 0x00, 4, 0x40, 2, 2048, 0x4C, 20, 0x4, 1, { 0x04, 0x08, 0x0C, 0x10 }, &sfc_nand_get_ecc_status9 },
213*4882a593Smuzhiyun };
214*4882a593Smuzhiyun
215*4882a593Smuzhiyun static struct nand_info *p_nand_info;
216*4882a593Smuzhiyun static u32 *gp_page_buf;
217*4882a593Smuzhiyun static struct SFNAND_DEV sfc_nand_dev;
218*4882a593Smuzhiyun
sfc_nand_get_info(u8 * nand_id)219*4882a593Smuzhiyun static struct nand_info *sfc_nand_get_info(u8 *nand_id)
220*4882a593Smuzhiyun {
221*4882a593Smuzhiyun u32 i;
222*4882a593Smuzhiyun
223*4882a593Smuzhiyun for (i = 0; i < ARRAY_SIZE(spi_nand_tbl); i++) {
224*4882a593Smuzhiyun if (spi_nand_tbl[i].id0 == nand_id[0] &&
225*4882a593Smuzhiyun spi_nand_tbl[i].id1 == nand_id[1]) {
226*4882a593Smuzhiyun if (spi_nand_tbl[i].id2 &&
227*4882a593Smuzhiyun spi_nand_tbl[i].id2 != nand_id[2])
228*4882a593Smuzhiyun continue;
229*4882a593Smuzhiyun
230*4882a593Smuzhiyun return &spi_nand_tbl[i];
231*4882a593Smuzhiyun }
232*4882a593Smuzhiyun }
233*4882a593Smuzhiyun
234*4882a593Smuzhiyun return NULL;
235*4882a593Smuzhiyun }
236*4882a593Smuzhiyun
sfc_nand_write_en(void)237*4882a593Smuzhiyun static int sfc_nand_write_en(void)
238*4882a593Smuzhiyun {
239*4882a593Smuzhiyun int ret;
240*4882a593Smuzhiyun struct rk_sfc_op op;
241*4882a593Smuzhiyun
242*4882a593Smuzhiyun op.sfcmd.d32 = 0;
243*4882a593Smuzhiyun op.sfcmd.b.cmd = CMD_WRITE_EN;
244*4882a593Smuzhiyun
245*4882a593Smuzhiyun op.sfctrl.d32 = 0;
246*4882a593Smuzhiyun
247*4882a593Smuzhiyun ret = sfc_request(&op, 0, NULL, 0);
248*4882a593Smuzhiyun return ret;
249*4882a593Smuzhiyun }
250*4882a593Smuzhiyun
sfc_nand_rw_preset(void)251*4882a593Smuzhiyun static int sfc_nand_rw_preset(void)
252*4882a593Smuzhiyun {
253*4882a593Smuzhiyun int ret;
254*4882a593Smuzhiyun struct rk_sfc_op op;
255*4882a593Smuzhiyun
256*4882a593Smuzhiyun op.sfcmd.d32 = 0;
257*4882a593Smuzhiyun op.sfcmd.b.cmd = 0xff;
258*4882a593Smuzhiyun op.sfcmd.b.cs = 2;
259*4882a593Smuzhiyun
260*4882a593Smuzhiyun op.sfctrl.d32 = 0;
261*4882a593Smuzhiyun op.sfctrl.b.datalines = 2;
262*4882a593Smuzhiyun op.sfctrl.b.cmdlines = 2;
263*4882a593Smuzhiyun op.sfctrl.b.addrlines = 2;
264*4882a593Smuzhiyun
265*4882a593Smuzhiyun ret = sfc_request(&op, 0, NULL, 0);
266*4882a593Smuzhiyun return ret;
267*4882a593Smuzhiyun }
268*4882a593Smuzhiyun
sfc_nand_read_feature(u8 addr,u8 * data)269*4882a593Smuzhiyun static int sfc_nand_read_feature(u8 addr, u8 *data)
270*4882a593Smuzhiyun {
271*4882a593Smuzhiyun int ret;
272*4882a593Smuzhiyun struct rk_sfc_op op;
273*4882a593Smuzhiyun
274*4882a593Smuzhiyun op.sfcmd.d32 = 0;
275*4882a593Smuzhiyun op.sfcmd.b.cmd = 0x0F;
276*4882a593Smuzhiyun op.sfcmd.b.addrbits = SFC_ADDR_XBITS;
277*4882a593Smuzhiyun
278*4882a593Smuzhiyun op.sfctrl.d32 = 0;
279*4882a593Smuzhiyun op.sfctrl.b.addrbits = 8;
280*4882a593Smuzhiyun
281*4882a593Smuzhiyun *data = 0;
282*4882a593Smuzhiyun
283*4882a593Smuzhiyun ret = sfc_request(&op, addr, data, 1);
284*4882a593Smuzhiyun
285*4882a593Smuzhiyun if (ret != SFC_OK)
286*4882a593Smuzhiyun return ret;
287*4882a593Smuzhiyun
288*4882a593Smuzhiyun return SFC_OK;
289*4882a593Smuzhiyun }
290*4882a593Smuzhiyun
sfc_nand_write_feature(u32 addr,u8 status)291*4882a593Smuzhiyun static int sfc_nand_write_feature(u32 addr, u8 status)
292*4882a593Smuzhiyun {
293*4882a593Smuzhiyun int ret;
294*4882a593Smuzhiyun struct rk_sfc_op op;
295*4882a593Smuzhiyun
296*4882a593Smuzhiyun sfc_nand_write_en();
297*4882a593Smuzhiyun
298*4882a593Smuzhiyun op.sfcmd.d32 = 0;
299*4882a593Smuzhiyun op.sfcmd.b.cmd = 0x1F;
300*4882a593Smuzhiyun op.sfcmd.b.addrbits = SFC_ADDR_XBITS;
301*4882a593Smuzhiyun op.sfcmd.b.rw = SFC_WRITE;
302*4882a593Smuzhiyun
303*4882a593Smuzhiyun op.sfctrl.d32 = 0;
304*4882a593Smuzhiyun op.sfctrl.b.addrbits = 8;
305*4882a593Smuzhiyun
306*4882a593Smuzhiyun ret = sfc_request(&op, addr, &status, 1);
307*4882a593Smuzhiyun
308*4882a593Smuzhiyun if (ret != SFC_OK)
309*4882a593Smuzhiyun return ret;
310*4882a593Smuzhiyun
311*4882a593Smuzhiyun return ret;
312*4882a593Smuzhiyun }
313*4882a593Smuzhiyun
sfc_nand_wait_busy(u8 * data,int timeout)314*4882a593Smuzhiyun static int sfc_nand_wait_busy(u8 *data, int timeout)
315*4882a593Smuzhiyun {
316*4882a593Smuzhiyun int ret;
317*4882a593Smuzhiyun int i;
318*4882a593Smuzhiyun u8 status;
319*4882a593Smuzhiyun
320*4882a593Smuzhiyun *data = 0;
321*4882a593Smuzhiyun
322*4882a593Smuzhiyun for (i = 0; i < timeout; i++) {
323*4882a593Smuzhiyun ret = sfc_nand_read_feature(0xC0, &status);
324*4882a593Smuzhiyun
325*4882a593Smuzhiyun if (ret != SFC_OK)
326*4882a593Smuzhiyun return ret;
327*4882a593Smuzhiyun
328*4882a593Smuzhiyun *data = status;
329*4882a593Smuzhiyun
330*4882a593Smuzhiyun if (!(status & (1 << 0)))
331*4882a593Smuzhiyun return SFC_OK;
332*4882a593Smuzhiyun
333*4882a593Smuzhiyun sfc_delay(1);
334*4882a593Smuzhiyun }
335*4882a593Smuzhiyun
336*4882a593Smuzhiyun return SFC_NAND_WAIT_TIME_OUT;
337*4882a593Smuzhiyun }
338*4882a593Smuzhiyun
339*4882a593Smuzhiyun /*
340*4882a593Smuzhiyun * ecc default:
341*4882a593Smuzhiyun * ecc bits: 0xC0[4,5]
342*4882a593Smuzhiyun * 0b00, No bit errors were detected
343*4882a593Smuzhiyun * 0b01, Bit errors were detected and corrected.
344*4882a593Smuzhiyun * 0b10, Multiple bit errors were detected and not corrected.
345*4882a593Smuzhiyun * 0b11, Bits errors were detected and corrected, bit error count
346*4882a593Smuzhiyun * reach the bit flip detection threshold
347*4882a593Smuzhiyun */
sfc_nand_get_ecc_status0(void)348*4882a593Smuzhiyun static u32 sfc_nand_get_ecc_status0(void)
349*4882a593Smuzhiyun {
350*4882a593Smuzhiyun u32 ret;
351*4882a593Smuzhiyun u32 i;
352*4882a593Smuzhiyun u8 ecc;
353*4882a593Smuzhiyun u8 status;
354*4882a593Smuzhiyun u32 timeout = 1000 * 1000;
355*4882a593Smuzhiyun
356*4882a593Smuzhiyun for (i = 0; i < timeout; i++) {
357*4882a593Smuzhiyun ret = sfc_nand_read_feature(0xC0, &status);
358*4882a593Smuzhiyun
359*4882a593Smuzhiyun if (ret != SFC_OK)
360*4882a593Smuzhiyun return SFC_NAND_ECC_ERROR;
361*4882a593Smuzhiyun
362*4882a593Smuzhiyun if (!(status & (1 << 0)))
363*4882a593Smuzhiyun break;
364*4882a593Smuzhiyun
365*4882a593Smuzhiyun sfc_delay(1);
366*4882a593Smuzhiyun }
367*4882a593Smuzhiyun
368*4882a593Smuzhiyun ecc = (status >> 4) & 0x03;
369*4882a593Smuzhiyun
370*4882a593Smuzhiyun if (ecc <= 1)
371*4882a593Smuzhiyun ret = SFC_NAND_ECC_OK;
372*4882a593Smuzhiyun else if (ecc == 2)
373*4882a593Smuzhiyun ret = (u32)SFC_NAND_ECC_ERROR;
374*4882a593Smuzhiyun else
375*4882a593Smuzhiyun ret = SFC_NAND_ECC_REFRESH;
376*4882a593Smuzhiyun
377*4882a593Smuzhiyun return ret;
378*4882a593Smuzhiyun }
379*4882a593Smuzhiyun
380*4882a593Smuzhiyun /*
381*4882a593Smuzhiyun * ecc spectial type1:
382*4882a593Smuzhiyun * ecc bits: 0xC0[4,5]
383*4882a593Smuzhiyun * 0b00, No bit errors were detected;
384*4882a593Smuzhiyun * 0b01, Bits errors were detected and corrected, bit error count
385*4882a593Smuzhiyun * may reach the bit flip detection threshold;
386*4882a593Smuzhiyun * 0b10, Multiple bit errors were detected and not corrected;
387*4882a593Smuzhiyun * 0b11, Reserved.
388*4882a593Smuzhiyun */
sfc_nand_get_ecc_status1(void)389*4882a593Smuzhiyun static u32 sfc_nand_get_ecc_status1(void)
390*4882a593Smuzhiyun {
391*4882a593Smuzhiyun u32 ret;
392*4882a593Smuzhiyun u32 i;
393*4882a593Smuzhiyun u8 ecc;
394*4882a593Smuzhiyun u8 status;
395*4882a593Smuzhiyun u32 timeout = 1000 * 1000;
396*4882a593Smuzhiyun
397*4882a593Smuzhiyun for (i = 0; i < timeout; i++) {
398*4882a593Smuzhiyun ret = sfc_nand_read_feature(0xC0, &status);
399*4882a593Smuzhiyun
400*4882a593Smuzhiyun if (ret != SFC_OK)
401*4882a593Smuzhiyun return SFC_NAND_ECC_ERROR;
402*4882a593Smuzhiyun
403*4882a593Smuzhiyun if (!(status & (1 << 0)))
404*4882a593Smuzhiyun break;
405*4882a593Smuzhiyun
406*4882a593Smuzhiyun sfc_delay(1);
407*4882a593Smuzhiyun }
408*4882a593Smuzhiyun
409*4882a593Smuzhiyun ecc = (status >> 4) & 0x03;
410*4882a593Smuzhiyun
411*4882a593Smuzhiyun if (ecc == 0)
412*4882a593Smuzhiyun ret = SFC_NAND_ECC_OK;
413*4882a593Smuzhiyun else if (ecc == 1)
414*4882a593Smuzhiyun ret = SFC_NAND_ECC_REFRESH;
415*4882a593Smuzhiyun else
416*4882a593Smuzhiyun ret = (u32)SFC_NAND_ECC_ERROR;
417*4882a593Smuzhiyun
418*4882a593Smuzhiyun return ret;
419*4882a593Smuzhiyun }
420*4882a593Smuzhiyun
421*4882a593Smuzhiyun /*
422*4882a593Smuzhiyun * ecc spectial type2:
423*4882a593Smuzhiyun * ecc bits: 0xC0[4,5] 0xF0[4,5]
424*4882a593Smuzhiyun * [0b0000, 0b0011], No bit errors were detected;
425*4882a593Smuzhiyun * [0b0100, 0b0111], Bit errors were detected and corrected. Not
426*4882a593Smuzhiyun * reach Flipping Bits;
427*4882a593Smuzhiyun * [0b1000, 0b1011], Multiple bit errors were detected and
428*4882a593Smuzhiyun * not corrected.
429*4882a593Smuzhiyun * [0b1100, 0b1111], reserved.
430*4882a593Smuzhiyun */
sfc_nand_get_ecc_status2(void)431*4882a593Smuzhiyun static u32 sfc_nand_get_ecc_status2(void)
432*4882a593Smuzhiyun {
433*4882a593Smuzhiyun u32 ret;
434*4882a593Smuzhiyun u32 i;
435*4882a593Smuzhiyun u8 ecc;
436*4882a593Smuzhiyun u8 status, status1;
437*4882a593Smuzhiyun u32 timeout = 1000 * 1000;
438*4882a593Smuzhiyun
439*4882a593Smuzhiyun for (i = 0; i < timeout; i++) {
440*4882a593Smuzhiyun ret = sfc_nand_read_feature(0xC0, &status);
441*4882a593Smuzhiyun
442*4882a593Smuzhiyun if (ret != SFC_OK)
443*4882a593Smuzhiyun return SFC_NAND_ECC_ERROR;
444*4882a593Smuzhiyun
445*4882a593Smuzhiyun ret = sfc_nand_read_feature(0xF0, &status1);
446*4882a593Smuzhiyun
447*4882a593Smuzhiyun if (ret != SFC_OK)
448*4882a593Smuzhiyun return SFC_NAND_ECC_ERROR;
449*4882a593Smuzhiyun
450*4882a593Smuzhiyun if (!(status & (1 << 0)))
451*4882a593Smuzhiyun break;
452*4882a593Smuzhiyun
453*4882a593Smuzhiyun sfc_delay(1);
454*4882a593Smuzhiyun }
455*4882a593Smuzhiyun
456*4882a593Smuzhiyun ecc = (status >> 4) & 0x03;
457*4882a593Smuzhiyun ecc = (ecc << 2) | ((status1 >> 4) & 0x03);
458*4882a593Smuzhiyun
459*4882a593Smuzhiyun if (ecc < 7)
460*4882a593Smuzhiyun ret = SFC_NAND_ECC_OK;
461*4882a593Smuzhiyun else if (ecc == 7)
462*4882a593Smuzhiyun ret = SFC_NAND_ECC_REFRESH;
463*4882a593Smuzhiyun else
464*4882a593Smuzhiyun ret = (u32)SFC_NAND_ECC_ERROR;
465*4882a593Smuzhiyun
466*4882a593Smuzhiyun return ret;
467*4882a593Smuzhiyun }
468*4882a593Smuzhiyun
469*4882a593Smuzhiyun /*
470*4882a593Smuzhiyun * ecc spectial type3:
471*4882a593Smuzhiyun * ecc bits: 0xC0[4,5] 0xF0[4,5]
472*4882a593Smuzhiyun * [0b0000, 0b0011], No bit errors were detected;
473*4882a593Smuzhiyun * [0b0100, 0b0111], Bit errors were detected and corrected. Not
474*4882a593Smuzhiyun * reach Flipping Bits;
475*4882a593Smuzhiyun * [0b1000, 0b1011], Multiple bit errors were detected and
476*4882a593Smuzhiyun * not corrected.
477*4882a593Smuzhiyun * [0b1100, 0b1111], Bit error count equals the bit flip
478*4882a593Smuzhiyun * detectio nthreshold
479*4882a593Smuzhiyun */
sfc_nand_get_ecc_status3(void)480*4882a593Smuzhiyun static u32 sfc_nand_get_ecc_status3(void)
481*4882a593Smuzhiyun {
482*4882a593Smuzhiyun u32 ret;
483*4882a593Smuzhiyun u32 i;
484*4882a593Smuzhiyun u8 ecc;
485*4882a593Smuzhiyun u8 status, status1;
486*4882a593Smuzhiyun u32 timeout = 1000 * 1000;
487*4882a593Smuzhiyun
488*4882a593Smuzhiyun for (i = 0; i < timeout; i++) {
489*4882a593Smuzhiyun ret = sfc_nand_read_feature(0xC0, &status);
490*4882a593Smuzhiyun
491*4882a593Smuzhiyun if (ret != SFC_OK)
492*4882a593Smuzhiyun return SFC_NAND_ECC_ERROR;
493*4882a593Smuzhiyun
494*4882a593Smuzhiyun ret = sfc_nand_read_feature(0xF0, &status1);
495*4882a593Smuzhiyun
496*4882a593Smuzhiyun if (ret != SFC_OK)
497*4882a593Smuzhiyun return SFC_NAND_ECC_ERROR;
498*4882a593Smuzhiyun
499*4882a593Smuzhiyun if (!(status & (1 << 0)))
500*4882a593Smuzhiyun break;
501*4882a593Smuzhiyun
502*4882a593Smuzhiyun sfc_delay(1);
503*4882a593Smuzhiyun }
504*4882a593Smuzhiyun
505*4882a593Smuzhiyun ecc = (status >> 4) & 0x03;
506*4882a593Smuzhiyun ecc = (ecc << 2) | ((status1 >> 4) & 0x03);
507*4882a593Smuzhiyun
508*4882a593Smuzhiyun if (ecc < 7)
509*4882a593Smuzhiyun ret = SFC_NAND_ECC_OK;
510*4882a593Smuzhiyun else if (ecc == 7 || ecc >= 12)
511*4882a593Smuzhiyun ret = SFC_NAND_ECC_REFRESH;
512*4882a593Smuzhiyun else
513*4882a593Smuzhiyun ret = (u32)SFC_NAND_ECC_ERROR;
514*4882a593Smuzhiyun
515*4882a593Smuzhiyun return ret;
516*4882a593Smuzhiyun }
517*4882a593Smuzhiyun
518*4882a593Smuzhiyun /*
519*4882a593Smuzhiyun * ecc spectial type4:
520*4882a593Smuzhiyun * ecc bits: 0xC0[2,5]
521*4882a593Smuzhiyun * [0b0000], No bit errors were detected;
522*4882a593Smuzhiyun * [0b0001, 0b0111], Bit errors were detected and corrected. Not
523*4882a593Smuzhiyun * reach Flipping Bits;
524*4882a593Smuzhiyun * [0b1000], Multiple bit errors were detected and
525*4882a593Smuzhiyun * not corrected.
526*4882a593Smuzhiyun * [0b1100], Bit error count equals the bit flip
527*4882a593Smuzhiyun * detection threshold
528*4882a593Smuzhiyun * else, reserved
529*4882a593Smuzhiyun */
sfc_nand_get_ecc_status4(void)530*4882a593Smuzhiyun static u32 sfc_nand_get_ecc_status4(void)
531*4882a593Smuzhiyun {
532*4882a593Smuzhiyun u32 ret;
533*4882a593Smuzhiyun u32 i;
534*4882a593Smuzhiyun u8 ecc;
535*4882a593Smuzhiyun u8 status;
536*4882a593Smuzhiyun u32 timeout = 1000 * 1000;
537*4882a593Smuzhiyun
538*4882a593Smuzhiyun for (i = 0; i < timeout; i++) {
539*4882a593Smuzhiyun ret = sfc_nand_read_feature(0xC0, &status);
540*4882a593Smuzhiyun
541*4882a593Smuzhiyun if (ret != SFC_OK)
542*4882a593Smuzhiyun return SFC_NAND_ECC_ERROR;
543*4882a593Smuzhiyun
544*4882a593Smuzhiyun if (!(status & (1 << 0)))
545*4882a593Smuzhiyun break;
546*4882a593Smuzhiyun
547*4882a593Smuzhiyun sfc_delay(1);
548*4882a593Smuzhiyun }
549*4882a593Smuzhiyun
550*4882a593Smuzhiyun ecc = (status >> 2) & 0x0f;
551*4882a593Smuzhiyun
552*4882a593Smuzhiyun if (ecc < 7)
553*4882a593Smuzhiyun ret = SFC_NAND_ECC_OK;
554*4882a593Smuzhiyun else if (ecc == 7 || ecc == 12)
555*4882a593Smuzhiyun ret = SFC_NAND_ECC_REFRESH;
556*4882a593Smuzhiyun else
557*4882a593Smuzhiyun ret = (u32)SFC_NAND_ECC_ERROR;
558*4882a593Smuzhiyun
559*4882a593Smuzhiyun return ret;
560*4882a593Smuzhiyun }
561*4882a593Smuzhiyun
562*4882a593Smuzhiyun /*
563*4882a593Smuzhiyun * ecc spectial type5:
564*4882a593Smuzhiyun * ecc bits: 0xC0[4,6]
565*4882a593Smuzhiyun * [0b000], No bit errors were detected;
566*4882a593Smuzhiyun * [0b001, 0b011], Bit errors were detected and corrected. Not
567*4882a593Smuzhiyun * reach Flipping Bits;
568*4882a593Smuzhiyun * [0b100], Bit error count equals the bit flip
569*4882a593Smuzhiyun * detection threshold
570*4882a593Smuzhiyun * [0b101, 0b110], Reserved;
571*4882a593Smuzhiyun * [0b111], Multiple bit errors were detected and
572*4882a593Smuzhiyun * not corrected.
573*4882a593Smuzhiyun */
sfc_nand_get_ecc_status5(void)574*4882a593Smuzhiyun static u32 sfc_nand_get_ecc_status5(void)
575*4882a593Smuzhiyun {
576*4882a593Smuzhiyun u32 ret;
577*4882a593Smuzhiyun u32 i;
578*4882a593Smuzhiyun u8 ecc;
579*4882a593Smuzhiyun u8 status;
580*4882a593Smuzhiyun u32 timeout = 1000 * 1000;
581*4882a593Smuzhiyun
582*4882a593Smuzhiyun for (i = 0; i < timeout; i++) {
583*4882a593Smuzhiyun ret = sfc_nand_read_feature(0xC0, &status);
584*4882a593Smuzhiyun
585*4882a593Smuzhiyun if (ret != SFC_OK)
586*4882a593Smuzhiyun return SFC_NAND_ECC_ERROR;
587*4882a593Smuzhiyun
588*4882a593Smuzhiyun if (!(status & (1 << 0)))
589*4882a593Smuzhiyun break;
590*4882a593Smuzhiyun
591*4882a593Smuzhiyun sfc_delay(1);
592*4882a593Smuzhiyun }
593*4882a593Smuzhiyun
594*4882a593Smuzhiyun ecc = (status >> 4) & 0x07;
595*4882a593Smuzhiyun
596*4882a593Smuzhiyun if (ecc < 4)
597*4882a593Smuzhiyun ret = SFC_NAND_ECC_OK;
598*4882a593Smuzhiyun else if (ecc == 4)
599*4882a593Smuzhiyun ret = SFC_NAND_ECC_REFRESH;
600*4882a593Smuzhiyun else
601*4882a593Smuzhiyun ret = (u32)SFC_NAND_ECC_ERROR;
602*4882a593Smuzhiyun
603*4882a593Smuzhiyun return ret;
604*4882a593Smuzhiyun }
605*4882a593Smuzhiyun
606*4882a593Smuzhiyun /*
607*4882a593Smuzhiyun * ecc spectial type6:
608*4882a593Smuzhiyun * ecc bits: 0xC0[4,6]
609*4882a593Smuzhiyun * [0b000], No bit errors were detected;
610*4882a593Smuzhiyun * [0b001], 1-3 Bit errors were detected and corrected. Not
611*4882a593Smuzhiyun * reach Flipping Bits;
612*4882a593Smuzhiyun * [0b010], Multiple bit errors were detected and
613*4882a593Smuzhiyun * not corrected.
614*4882a593Smuzhiyun * [0b011], 4-6 Bit errors were detected and corrected. Not
615*4882a593Smuzhiyun * reach Flipping Bits;
616*4882a593Smuzhiyun * [0b101], Bit error count equals the bit flip
617*4882a593Smuzhiyun * detectionthreshold
618*4882a593Smuzhiyun * others, Reserved.
619*4882a593Smuzhiyun */
sfc_nand_get_ecc_status6(void)620*4882a593Smuzhiyun static u32 sfc_nand_get_ecc_status6(void)
621*4882a593Smuzhiyun {
622*4882a593Smuzhiyun u32 ret;
623*4882a593Smuzhiyun u32 i;
624*4882a593Smuzhiyun u8 ecc;
625*4882a593Smuzhiyun u8 status;
626*4882a593Smuzhiyun u32 timeout = 1000 * 1000;
627*4882a593Smuzhiyun
628*4882a593Smuzhiyun for (i = 0; i < timeout; i++) {
629*4882a593Smuzhiyun ret = sfc_nand_read_feature(0xC0, &status);
630*4882a593Smuzhiyun
631*4882a593Smuzhiyun if (ret != SFC_OK)
632*4882a593Smuzhiyun return SFC_NAND_ECC_ERROR;
633*4882a593Smuzhiyun
634*4882a593Smuzhiyun if (!(status & (1 << 0)))
635*4882a593Smuzhiyun break;
636*4882a593Smuzhiyun
637*4882a593Smuzhiyun sfc_delay(1);
638*4882a593Smuzhiyun }
639*4882a593Smuzhiyun
640*4882a593Smuzhiyun ecc = (status >> 4) & 0x07;
641*4882a593Smuzhiyun
642*4882a593Smuzhiyun if (ecc == 0 || ecc == 1 || ecc == 3)
643*4882a593Smuzhiyun ret = SFC_NAND_ECC_OK;
644*4882a593Smuzhiyun else if (ecc == 5)
645*4882a593Smuzhiyun ret = SFC_NAND_ECC_REFRESH;
646*4882a593Smuzhiyun else
647*4882a593Smuzhiyun ret = (u32)SFC_NAND_ECC_ERROR;
648*4882a593Smuzhiyun
649*4882a593Smuzhiyun return ret;
650*4882a593Smuzhiyun }
651*4882a593Smuzhiyun
652*4882a593Smuzhiyun /*
653*4882a593Smuzhiyun * ecc spectial type7:
654*4882a593Smuzhiyun * ecc bits: 0xC0[4,7]
655*4882a593Smuzhiyun * [0b0000], No bit errors were detected;
656*4882a593Smuzhiyun * [0b0001, 0b0111], 1-7 Bit errors were detected and corrected. Not
657*4882a593Smuzhiyun * reach Flipping Bits;
658*4882a593Smuzhiyun * [0b1000], 8 Bit errors were detected and corrected. Bit error count
659*4882a593Smuzhiyun * equals the bit flip detectionthreshold;
660*4882a593Smuzhiyun * [0b1111], Bit errors greater than ECC capability(8 bits) and not corrected;
661*4882a593Smuzhiyun * others, Reserved.
662*4882a593Smuzhiyun */
sfc_nand_get_ecc_status7(void)663*4882a593Smuzhiyun static u32 sfc_nand_get_ecc_status7(void)
664*4882a593Smuzhiyun {
665*4882a593Smuzhiyun u32 ret;
666*4882a593Smuzhiyun u32 i;
667*4882a593Smuzhiyun u8 ecc;
668*4882a593Smuzhiyun u8 status;
669*4882a593Smuzhiyun u32 timeout = 1000 * 1000;
670*4882a593Smuzhiyun
671*4882a593Smuzhiyun for (i = 0; i < timeout; i++) {
672*4882a593Smuzhiyun ret = sfc_nand_read_feature(0xC0, &status);
673*4882a593Smuzhiyun
674*4882a593Smuzhiyun if (ret != SFC_OK)
675*4882a593Smuzhiyun return SFC_NAND_ECC_ERROR;
676*4882a593Smuzhiyun
677*4882a593Smuzhiyun if (!(status & (1 << 0)))
678*4882a593Smuzhiyun break;
679*4882a593Smuzhiyun
680*4882a593Smuzhiyun sfc_delay(1);
681*4882a593Smuzhiyun }
682*4882a593Smuzhiyun
683*4882a593Smuzhiyun ecc = (status >> 4) & 0xf;
684*4882a593Smuzhiyun
685*4882a593Smuzhiyun if (ecc < 7)
686*4882a593Smuzhiyun ret = SFC_NAND_ECC_OK;
687*4882a593Smuzhiyun else if (ecc == 7 || ecc == 8)
688*4882a593Smuzhiyun ret = SFC_NAND_ECC_REFRESH;
689*4882a593Smuzhiyun else
690*4882a593Smuzhiyun ret = (u32)SFC_NAND_ECC_ERROR;
691*4882a593Smuzhiyun
692*4882a593Smuzhiyun return ret;
693*4882a593Smuzhiyun }
694*4882a593Smuzhiyun
695*4882a593Smuzhiyun /*
696*4882a593Smuzhiyun * ecc spectial type8:
697*4882a593Smuzhiyun * ecc bits: 0xC0[4,6]
698*4882a593Smuzhiyun * [0b000], No bit errors were detected;
699*4882a593Smuzhiyun * [0b001, 0b011], 1~3 Bit errors were detected and corrected. Not
700*4882a593Smuzhiyun * reach Flipping Bits;
701*4882a593Smuzhiyun * [0b100], Bit error count equals the bit flip
702*4882a593Smuzhiyun * detection threshold
703*4882a593Smuzhiyun * others, Reserved.
704*4882a593Smuzhiyun */
sfc_nand_get_ecc_status8(void)705*4882a593Smuzhiyun static u32 sfc_nand_get_ecc_status8(void)
706*4882a593Smuzhiyun {
707*4882a593Smuzhiyun u32 ret;
708*4882a593Smuzhiyun u32 i;
709*4882a593Smuzhiyun u8 ecc;
710*4882a593Smuzhiyun u8 status;
711*4882a593Smuzhiyun u32 timeout = 1000 * 1000;
712*4882a593Smuzhiyun
713*4882a593Smuzhiyun for (i = 0; i < timeout; i++) {
714*4882a593Smuzhiyun ret = sfc_nand_read_feature(0xC0, &status);
715*4882a593Smuzhiyun
716*4882a593Smuzhiyun if (ret != SFC_OK)
717*4882a593Smuzhiyun return SFC_NAND_ECC_ERROR;
718*4882a593Smuzhiyun
719*4882a593Smuzhiyun if (!(status & (1 << 0)))
720*4882a593Smuzhiyun break;
721*4882a593Smuzhiyun
722*4882a593Smuzhiyun sfc_delay(1);
723*4882a593Smuzhiyun }
724*4882a593Smuzhiyun
725*4882a593Smuzhiyun ecc = (status >> 4) & 0x07;
726*4882a593Smuzhiyun
727*4882a593Smuzhiyun if (ecc < 4)
728*4882a593Smuzhiyun ret = SFC_NAND_ECC_OK;
729*4882a593Smuzhiyun else if (ecc == 4)
730*4882a593Smuzhiyun ret = SFC_NAND_ECC_REFRESH;
731*4882a593Smuzhiyun else
732*4882a593Smuzhiyun ret = (u32)SFC_NAND_ECC_ERROR;
733*4882a593Smuzhiyun
734*4882a593Smuzhiyun return ret;
735*4882a593Smuzhiyun }
736*4882a593Smuzhiyun
737*4882a593Smuzhiyun /*
738*4882a593Smuzhiyun * ecc spectial type9:
739*4882a593Smuzhiyun * ecc bits: 0xC0[4,5]
740*4882a593Smuzhiyun * 0b00, No bit errors were detected
741*4882a593Smuzhiyun * 0b01, 1-2Bit errors were detected and corrected.
742*4882a593Smuzhiyun * 0b10, 3-4Bit errors were detected and corrected.
743*4882a593Smuzhiyun * 0b11, 11 can be used as uncorrectable
744*4882a593Smuzhiyun */
sfc_nand_get_ecc_status9(void)745*4882a593Smuzhiyun static u32 sfc_nand_get_ecc_status9(void)
746*4882a593Smuzhiyun {
747*4882a593Smuzhiyun u32 ret;
748*4882a593Smuzhiyun u32 i;
749*4882a593Smuzhiyun u8 ecc;
750*4882a593Smuzhiyun u8 status;
751*4882a593Smuzhiyun u32 timeout = 1000 * 1000;
752*4882a593Smuzhiyun
753*4882a593Smuzhiyun for (i = 0; i < timeout; i++) {
754*4882a593Smuzhiyun ret = sfc_nand_read_feature(0xC0, &status);
755*4882a593Smuzhiyun
756*4882a593Smuzhiyun if (ret != SFC_OK)
757*4882a593Smuzhiyun return SFC_NAND_ECC_ERROR;
758*4882a593Smuzhiyun
759*4882a593Smuzhiyun if (!(status & (1 << 0)))
760*4882a593Smuzhiyun break;
761*4882a593Smuzhiyun
762*4882a593Smuzhiyun sfc_delay(1);
763*4882a593Smuzhiyun }
764*4882a593Smuzhiyun
765*4882a593Smuzhiyun ecc = (status >> 4) & 0x03;
766*4882a593Smuzhiyun
767*4882a593Smuzhiyun if (ecc <= 1)
768*4882a593Smuzhiyun ret = SFC_NAND_ECC_OK;
769*4882a593Smuzhiyun else if (ecc == 2)
770*4882a593Smuzhiyun ret = SFC_NAND_ECC_REFRESH;
771*4882a593Smuzhiyun else
772*4882a593Smuzhiyun ret = (u32)SFC_NAND_ECC_ERROR;
773*4882a593Smuzhiyun
774*4882a593Smuzhiyun return ret;
775*4882a593Smuzhiyun }
776*4882a593Smuzhiyun
sfc_nand_erase_block(u8 cs,u32 addr)777*4882a593Smuzhiyun u32 sfc_nand_erase_block(u8 cs, u32 addr)
778*4882a593Smuzhiyun {
779*4882a593Smuzhiyun int ret;
780*4882a593Smuzhiyun struct rk_sfc_op op;
781*4882a593Smuzhiyun u8 status;
782*4882a593Smuzhiyun
783*4882a593Smuzhiyun rkflash_print_dio("%s %x\n", __func__, addr);
784*4882a593Smuzhiyun op.sfcmd.d32 = 0;
785*4882a593Smuzhiyun op.sfcmd.b.cmd = 0xd8;
786*4882a593Smuzhiyun op.sfcmd.b.addrbits = SFC_ADDR_24BITS;
787*4882a593Smuzhiyun op.sfcmd.b.rw = SFC_WRITE;
788*4882a593Smuzhiyun
789*4882a593Smuzhiyun op.sfctrl.d32 = 0;
790*4882a593Smuzhiyun
791*4882a593Smuzhiyun sfc_nand_write_en();
792*4882a593Smuzhiyun ret = sfc_request(&op, addr, NULL, 0);
793*4882a593Smuzhiyun
794*4882a593Smuzhiyun if (ret != SFC_OK)
795*4882a593Smuzhiyun return ret;
796*4882a593Smuzhiyun
797*4882a593Smuzhiyun ret = sfc_nand_wait_busy(&status, 1000 * 1000);
798*4882a593Smuzhiyun
799*4882a593Smuzhiyun if (status & (1 << 2))
800*4882a593Smuzhiyun return SFC_NAND_PROG_ERASE_ERROR;
801*4882a593Smuzhiyun
802*4882a593Smuzhiyun return ret;
803*4882a593Smuzhiyun }
804*4882a593Smuzhiyun
sfc_nand_read_cache(u32 row,u32 * p_page_buf,u32 column,u32 len)805*4882a593Smuzhiyun static u32 sfc_nand_read_cache(u32 row, u32 *p_page_buf, u32 column, u32 len)
806*4882a593Smuzhiyun {
807*4882a593Smuzhiyun int ret;
808*4882a593Smuzhiyun u32 plane;
809*4882a593Smuzhiyun struct rk_sfc_op op;
810*4882a593Smuzhiyun
811*4882a593Smuzhiyun op.sfcmd.d32 = 0;
812*4882a593Smuzhiyun op.sfcmd.b.cmd = sfc_nand_dev.page_read_cmd;
813*4882a593Smuzhiyun op.sfcmd.b.addrbits = SFC_ADDR_XBITS;
814*4882a593Smuzhiyun op.sfcmd.b.dummybits = 8;
815*4882a593Smuzhiyun
816*4882a593Smuzhiyun op.sfctrl.d32 = 0;
817*4882a593Smuzhiyun op.sfctrl.b.datalines = sfc_nand_dev.read_lines;
818*4882a593Smuzhiyun op.sfctrl.b.addrbits = 16;
819*4882a593Smuzhiyun
820*4882a593Smuzhiyun plane = p_nand_info->plane_per_die == 2 ? ((row >> 6) & 0x1) << 12 : 0;
821*4882a593Smuzhiyun
822*4882a593Smuzhiyun ret = sfc_request(&op, plane | column, p_page_buf, len);
823*4882a593Smuzhiyun if (ret != SFC_OK)
824*4882a593Smuzhiyun return SFC_NAND_HW_ERROR;
825*4882a593Smuzhiyun
826*4882a593Smuzhiyun return ret;
827*4882a593Smuzhiyun }
828*4882a593Smuzhiyun
sfc_nand_prog_page_raw(u8 cs,u32 addr,u32 * p_page_buf)829*4882a593Smuzhiyun u32 sfc_nand_prog_page_raw(u8 cs, u32 addr, u32 *p_page_buf)
830*4882a593Smuzhiyun {
831*4882a593Smuzhiyun int ret;
832*4882a593Smuzhiyun u32 plane;
833*4882a593Smuzhiyun struct rk_sfc_op op;
834*4882a593Smuzhiyun u8 status;
835*4882a593Smuzhiyun u32 page_size = SFC_NAND_SECTOR_FULL_SIZE * p_nand_info->sec_per_page;
836*4882a593Smuzhiyun u32 data_area_size = SFC_NAND_SECTOR_SIZE * p_nand_info->sec_per_page;
837*4882a593Smuzhiyun
838*4882a593Smuzhiyun rkflash_print_dio("%s %x %x\n", __func__, addr, p_page_buf[0]);
839*4882a593Smuzhiyun sfc_nand_write_en();
840*4882a593Smuzhiyun
841*4882a593Smuzhiyun if (sfc_nand_dev.prog_lines == DATA_LINES_X4 &&
842*4882a593Smuzhiyun p_nand_info->feature & FEA_SOFT_QOP_BIT &&
843*4882a593Smuzhiyun sfc_get_version() < SFC_VER_3)
844*4882a593Smuzhiyun sfc_nand_rw_preset();
845*4882a593Smuzhiyun
846*4882a593Smuzhiyun op.sfcmd.d32 = 0;
847*4882a593Smuzhiyun op.sfcmd.b.cmd = sfc_nand_dev.page_prog_cmd;
848*4882a593Smuzhiyun op.sfcmd.b.addrbits = SFC_ADDR_XBITS;
849*4882a593Smuzhiyun op.sfcmd.b.rw = SFC_WRITE;
850*4882a593Smuzhiyun
851*4882a593Smuzhiyun op.sfctrl.d32 = 0;
852*4882a593Smuzhiyun op.sfctrl.b.datalines = sfc_nand_dev.prog_lines;
853*4882a593Smuzhiyun op.sfctrl.b.addrbits = 16;
854*4882a593Smuzhiyun plane = p_nand_info->plane_per_die == 2 ? ((addr >> 6) & 0x1) << 12 : 0;
855*4882a593Smuzhiyun sfc_request(&op, plane, p_page_buf, page_size);
856*4882a593Smuzhiyun
857*4882a593Smuzhiyun /*
858*4882a593Smuzhiyun * At the moment of power lost or dev running in harsh environment, flash
859*4882a593Smuzhiyun * maybe work in a unkonw state and result in bit flip, when this situation
860*4882a593Smuzhiyun * is detected by cache recheck, it's better to wait a second for a reliable
861*4882a593Smuzhiyun * hardware environment to avoid abnormal data written to flash array.
862*4882a593Smuzhiyun */
863*4882a593Smuzhiyun if (p_nand_info->id0 == MID_GIGADEV) {
864*4882a593Smuzhiyun sfc_nand_read_cache(addr, (u32 *)sfc_nand_dev.recheck_buffer, 0, data_area_size);
865*4882a593Smuzhiyun if (memcmp(sfc_nand_dev.recheck_buffer, p_page_buf, data_area_size)) {
866*4882a593Smuzhiyun rkflash_print_error("%s cache bitflip1\n", __func__);
867*4882a593Smuzhiyun msleep(1000);
868*4882a593Smuzhiyun sfc_request(&op, plane, p_page_buf, page_size);
869*4882a593Smuzhiyun }
870*4882a593Smuzhiyun }
871*4882a593Smuzhiyun
872*4882a593Smuzhiyun op.sfcmd.d32 = 0;
873*4882a593Smuzhiyun op.sfcmd.b.cmd = 0x10;
874*4882a593Smuzhiyun op.sfcmd.b.addrbits = SFC_ADDR_24BITS;
875*4882a593Smuzhiyun op.sfcmd.b.rw = SFC_WRITE;
876*4882a593Smuzhiyun
877*4882a593Smuzhiyun op.sfctrl.d32 = 0;
878*4882a593Smuzhiyun ret = sfc_request(&op, addr, p_page_buf, 0);
879*4882a593Smuzhiyun
880*4882a593Smuzhiyun if (ret != SFC_OK)
881*4882a593Smuzhiyun return ret;
882*4882a593Smuzhiyun
883*4882a593Smuzhiyun ret = sfc_nand_wait_busy(&status, 1000 * 1000);
884*4882a593Smuzhiyun if (status & (1 << 3))
885*4882a593Smuzhiyun return SFC_NAND_PROG_ERASE_ERROR;
886*4882a593Smuzhiyun
887*4882a593Smuzhiyun return ret;
888*4882a593Smuzhiyun }
889*4882a593Smuzhiyun
sfc_nand_prog_page(u8 cs,u32 addr,u32 * p_data,u32 * p_spare)890*4882a593Smuzhiyun u32 sfc_nand_prog_page(u8 cs, u32 addr, u32 *p_data, u32 *p_spare)
891*4882a593Smuzhiyun {
892*4882a593Smuzhiyun int ret;
893*4882a593Smuzhiyun u32 sec_per_page = p_nand_info->sec_per_page;
894*4882a593Smuzhiyun u32 data_size = sec_per_page * SFC_NAND_SECTOR_SIZE;
895*4882a593Smuzhiyun struct nand_mega_area *meta = &p_nand_info->meta;
896*4882a593Smuzhiyun
897*4882a593Smuzhiyun memcpy(gp_page_buf, p_data, data_size);
898*4882a593Smuzhiyun memset(&gp_page_buf[data_size / 4], 0xff, sec_per_page * 16);
899*4882a593Smuzhiyun gp_page_buf[(data_size + meta->off0) / 4] = p_spare[0];
900*4882a593Smuzhiyun gp_page_buf[(data_size + meta->off1) / 4] = p_spare[1];
901*4882a593Smuzhiyun
902*4882a593Smuzhiyun if (sec_per_page == 8) {
903*4882a593Smuzhiyun gp_page_buf[(data_size + meta->off2) / 4] = p_spare[2];
904*4882a593Smuzhiyun gp_page_buf[(data_size + meta->off3) / 4] = p_spare[3];
905*4882a593Smuzhiyun }
906*4882a593Smuzhiyun
907*4882a593Smuzhiyun ret = sfc_nand_prog_page_raw(cs, addr, gp_page_buf);
908*4882a593Smuzhiyun
909*4882a593Smuzhiyun return ret;
910*4882a593Smuzhiyun }
911*4882a593Smuzhiyun
sfc_nand_read(u32 row,u32 * p_page_buf,u32 column,u32 len)912*4882a593Smuzhiyun u32 sfc_nand_read(u32 row, u32 *p_page_buf, u32 column, u32 len)
913*4882a593Smuzhiyun {
914*4882a593Smuzhiyun int ret;
915*4882a593Smuzhiyun u32 plane;
916*4882a593Smuzhiyun struct rk_sfc_op op;
917*4882a593Smuzhiyun u32 ecc_result;
918*4882a593Smuzhiyun u8 status;
919*4882a593Smuzhiyun
920*4882a593Smuzhiyun op.sfcmd.d32 = 0;
921*4882a593Smuzhiyun op.sfcmd.b.cmd = 0x13;
922*4882a593Smuzhiyun op.sfcmd.b.rw = SFC_WRITE;
923*4882a593Smuzhiyun op.sfcmd.b.addrbits = SFC_ADDR_24BITS;
924*4882a593Smuzhiyun
925*4882a593Smuzhiyun op.sfctrl.d32 = 0;
926*4882a593Smuzhiyun
927*4882a593Smuzhiyun sfc_request(&op, row, p_page_buf, 0);
928*4882a593Smuzhiyun
929*4882a593Smuzhiyun if (sfc_nand_dev.read_lines == DATA_LINES_X4 &&
930*4882a593Smuzhiyun p_nand_info->feature & FEA_SOFT_QOP_BIT &&
931*4882a593Smuzhiyun sfc_get_version() < SFC_VER_3)
932*4882a593Smuzhiyun sfc_nand_rw_preset();
933*4882a593Smuzhiyun
934*4882a593Smuzhiyun sfc_nand_wait_busy(&status, 1000 * 1000);
935*4882a593Smuzhiyun ecc_result = p_nand_info->ecc_status();
936*4882a593Smuzhiyun
937*4882a593Smuzhiyun op.sfcmd.d32 = 0;
938*4882a593Smuzhiyun op.sfcmd.b.cmd = sfc_nand_dev.page_read_cmd;
939*4882a593Smuzhiyun op.sfcmd.b.addrbits = SFC_ADDR_XBITS;
940*4882a593Smuzhiyun op.sfcmd.b.dummybits = 8;
941*4882a593Smuzhiyun
942*4882a593Smuzhiyun op.sfctrl.d32 = 0;
943*4882a593Smuzhiyun op.sfctrl.b.datalines = sfc_nand_dev.read_lines;
944*4882a593Smuzhiyun op.sfctrl.b.addrbits = 16;
945*4882a593Smuzhiyun
946*4882a593Smuzhiyun plane = p_nand_info->plane_per_die == 2 ? ((row >> 6) & 0x1) << 12 : 0;
947*4882a593Smuzhiyun ret = sfc_request(&op, plane | column, p_page_buf, len);
948*4882a593Smuzhiyun rkflash_print_dio("%s %x %x\n", __func__, row, p_page_buf[0]);
949*4882a593Smuzhiyun
950*4882a593Smuzhiyun if (ret != SFC_OK)
951*4882a593Smuzhiyun return SFC_NAND_HW_ERROR;
952*4882a593Smuzhiyun
953*4882a593Smuzhiyun return ecc_result;
954*4882a593Smuzhiyun }
955*4882a593Smuzhiyun
sfc_nand_read_page_raw(u8 cs,u32 addr,u32 * p_page_buf)956*4882a593Smuzhiyun u32 sfc_nand_read_page_raw(u8 cs, u32 addr, u32 *p_page_buf)
957*4882a593Smuzhiyun {
958*4882a593Smuzhiyun u32 page_size = SFC_NAND_SECTOR_FULL_SIZE * p_nand_info->sec_per_page;
959*4882a593Smuzhiyun
960*4882a593Smuzhiyun return sfc_nand_read(addr, p_page_buf, 0, page_size);
961*4882a593Smuzhiyun }
962*4882a593Smuzhiyun
sfc_nand_read_page(u8 cs,u32 addr,u32 * p_data,u32 * p_spare)963*4882a593Smuzhiyun u32 sfc_nand_read_page(u8 cs, u32 addr, u32 *p_data, u32 *p_spare)
964*4882a593Smuzhiyun {
965*4882a593Smuzhiyun u32 ret;
966*4882a593Smuzhiyun u32 sec_per_page = p_nand_info->sec_per_page;
967*4882a593Smuzhiyun u32 data_size = sec_per_page * SFC_NAND_SECTOR_SIZE;
968*4882a593Smuzhiyun struct nand_mega_area *meta = &p_nand_info->meta;
969*4882a593Smuzhiyun int retries = 0;
970*4882a593Smuzhiyun
971*4882a593Smuzhiyun retry:
972*4882a593Smuzhiyun ret = sfc_nand_read_page_raw(cs, addr, gp_page_buf);
973*4882a593Smuzhiyun memcpy(p_data, gp_page_buf, data_size);
974*4882a593Smuzhiyun p_spare[0] = gp_page_buf[(data_size + meta->off0) / 4];
975*4882a593Smuzhiyun p_spare[1] = gp_page_buf[(data_size + meta->off1) / 4];
976*4882a593Smuzhiyun
977*4882a593Smuzhiyun if (p_nand_info->sec_per_page == 8) {
978*4882a593Smuzhiyun p_spare[2] = gp_page_buf[(data_size + meta->off2) / 4];
979*4882a593Smuzhiyun p_spare[3] = gp_page_buf[(data_size + meta->off3) / 4];
980*4882a593Smuzhiyun }
981*4882a593Smuzhiyun
982*4882a593Smuzhiyun if (ret == SFC_NAND_HW_ERROR)
983*4882a593Smuzhiyun ret = SFC_NAND_ECC_ERROR;
984*4882a593Smuzhiyun
985*4882a593Smuzhiyun if (ret != SFC_NAND_ECC_OK) {
986*4882a593Smuzhiyun rkflash_print_error("%s[0x%x], ret=0x%x\n", __func__, addr, ret);
987*4882a593Smuzhiyun
988*4882a593Smuzhiyun if (p_data)
989*4882a593Smuzhiyun rkflash_print_hex("data:", p_data, 4, 8);
990*4882a593Smuzhiyun
991*4882a593Smuzhiyun if (p_spare)
992*4882a593Smuzhiyun rkflash_print_hex("spare:", p_spare, 4, 2);
993*4882a593Smuzhiyun if (ret == SFC_NAND_ECC_ERROR && retries < 1) {
994*4882a593Smuzhiyun retries++;
995*4882a593Smuzhiyun goto retry;
996*4882a593Smuzhiyun }
997*4882a593Smuzhiyun }
998*4882a593Smuzhiyun
999*4882a593Smuzhiyun return ret;
1000*4882a593Smuzhiyun }
1001*4882a593Smuzhiyun
sfc_nand_check_bad_block(u8 cs,u32 addr)1002*4882a593Smuzhiyun u32 sfc_nand_check_bad_block(u8 cs, u32 addr)
1003*4882a593Smuzhiyun {
1004*4882a593Smuzhiyun u32 ret;
1005*4882a593Smuzhiyun u32 data_size = p_nand_info->sec_per_page * SFC_NAND_SECTOR_SIZE;
1006*4882a593Smuzhiyun u32 marker = 0;
1007*4882a593Smuzhiyun
1008*4882a593Smuzhiyun ret = sfc_nand_read(addr, &marker, data_size, 2);
1009*4882a593Smuzhiyun
1010*4882a593Smuzhiyun /* unify with mtd framework */
1011*4882a593Smuzhiyun if (ret == SFC_NAND_ECC_ERROR || (u16)marker != 0xffff)
1012*4882a593Smuzhiyun rkflash_print_error("%s page= %x ret= %x spare= %x\n",
1013*4882a593Smuzhiyun __func__, addr, ret, marker);
1014*4882a593Smuzhiyun
1015*4882a593Smuzhiyun /* Original bad block */
1016*4882a593Smuzhiyun if ((u16)marker != 0xffff)
1017*4882a593Smuzhiyun return true;
1018*4882a593Smuzhiyun
1019*4882a593Smuzhiyun return false;
1020*4882a593Smuzhiyun }
1021*4882a593Smuzhiyun
sfc_nand_mark_bad_block(u8 cs,u32 addr)1022*4882a593Smuzhiyun u32 sfc_nand_mark_bad_block(u8 cs, u32 addr)
1023*4882a593Smuzhiyun {
1024*4882a593Smuzhiyun u32 ret;
1025*4882a593Smuzhiyun u32 data_size = p_nand_info->sec_per_page * SFC_NAND_SECTOR_SIZE;
1026*4882a593Smuzhiyun
1027*4882a593Smuzhiyun ret = sfc_nand_read_page_raw(cs, addr, gp_page_buf);
1028*4882a593Smuzhiyun
1029*4882a593Smuzhiyun if (ret)
1030*4882a593Smuzhiyun return SFC_NAND_HW_ERROR;
1031*4882a593Smuzhiyun
1032*4882a593Smuzhiyun gp_page_buf[data_size / 4] = 0x0;
1033*4882a593Smuzhiyun ret = sfc_nand_prog_page_raw(cs, addr, gp_page_buf);
1034*4882a593Smuzhiyun
1035*4882a593Smuzhiyun if (ret)
1036*4882a593Smuzhiyun return SFC_NAND_HW_ERROR;
1037*4882a593Smuzhiyun
1038*4882a593Smuzhiyun return ret;
1039*4882a593Smuzhiyun }
1040*4882a593Smuzhiyun
sfc_nand_read_id(u8 * data)1041*4882a593Smuzhiyun int sfc_nand_read_id(u8 *data)
1042*4882a593Smuzhiyun {
1043*4882a593Smuzhiyun int ret;
1044*4882a593Smuzhiyun struct rk_sfc_op op;
1045*4882a593Smuzhiyun
1046*4882a593Smuzhiyun op.sfcmd.d32 = 0;
1047*4882a593Smuzhiyun op.sfcmd.b.cmd = CMD_READ_JEDECID;
1048*4882a593Smuzhiyun op.sfcmd.b.addrbits = SFC_ADDR_XBITS;
1049*4882a593Smuzhiyun
1050*4882a593Smuzhiyun op.sfctrl.d32 = 0;
1051*4882a593Smuzhiyun op.sfctrl.b.addrbits = 8;
1052*4882a593Smuzhiyun
1053*4882a593Smuzhiyun ret = sfc_request(&op, 0, data, 3);
1054*4882a593Smuzhiyun
1055*4882a593Smuzhiyun return ret;
1056*4882a593Smuzhiyun }
1057*4882a593Smuzhiyun
1058*4882a593Smuzhiyun #if defined(CONFIG_RK_SFTL)
1059*4882a593Smuzhiyun /*
1060*4882a593Smuzhiyun * Read the 1st page's 1st byte of a phy_blk
1061*4882a593Smuzhiyun * If not FF, it's bad blk
1062*4882a593Smuzhiyun */
sfc_nand_get_bad_block_list(u16 * table,u32 die)1063*4882a593Smuzhiyun static int sfc_nand_get_bad_block_list(u16 *table, u32 die)
1064*4882a593Smuzhiyun {
1065*4882a593Smuzhiyun u32 bad_cnt, page;
1066*4882a593Smuzhiyun u32 blk_per_die;
1067*4882a593Smuzhiyun u16 blk;
1068*4882a593Smuzhiyun
1069*4882a593Smuzhiyun rkflash_print_info("%s\n", __func__);
1070*4882a593Smuzhiyun
1071*4882a593Smuzhiyun bad_cnt = 0;
1072*4882a593Smuzhiyun blk_per_die = p_nand_info->plane_per_die *
1073*4882a593Smuzhiyun p_nand_info->blk_per_plane;
1074*4882a593Smuzhiyun
1075*4882a593Smuzhiyun for (blk = 0; blk < blk_per_die; blk++) {
1076*4882a593Smuzhiyun page = (blk + blk_per_die * die) *
1077*4882a593Smuzhiyun p_nand_info->page_per_blk;
1078*4882a593Smuzhiyun
1079*4882a593Smuzhiyun if (sfc_nand_check_bad_block(die, page)) {
1080*4882a593Smuzhiyun table[bad_cnt++] = blk;
1081*4882a593Smuzhiyun rkflash_print_error("die[%d], bad_blk[%d]\n", die, blk);
1082*4882a593Smuzhiyun }
1083*4882a593Smuzhiyun }
1084*4882a593Smuzhiyun
1085*4882a593Smuzhiyun return (int)bad_cnt;
1086*4882a593Smuzhiyun }
1087*4882a593Smuzhiyun
sfc_nand_ftl_ops_init(void)1088*4882a593Smuzhiyun void sfc_nand_ftl_ops_init(void)
1089*4882a593Smuzhiyun {
1090*4882a593Smuzhiyun /* para init */
1091*4882a593Smuzhiyun g_nand_phy_info.nand_type = 1;
1092*4882a593Smuzhiyun g_nand_phy_info.die_num = 1;
1093*4882a593Smuzhiyun g_nand_phy_info.plane_per_die = p_nand_info->plane_per_die;
1094*4882a593Smuzhiyun g_nand_phy_info.blk_per_plane = p_nand_info->blk_per_plane;
1095*4882a593Smuzhiyun g_nand_phy_info.page_per_blk = p_nand_info->page_per_blk;
1096*4882a593Smuzhiyun g_nand_phy_info.page_per_slc_blk = p_nand_info->page_per_blk;
1097*4882a593Smuzhiyun g_nand_phy_info.byte_per_sec = SFC_NAND_SECTOR_SIZE;
1098*4882a593Smuzhiyun g_nand_phy_info.sec_per_page = p_nand_info->sec_per_page;
1099*4882a593Smuzhiyun g_nand_phy_info.sec_per_blk = p_nand_info->sec_per_page *
1100*4882a593Smuzhiyun p_nand_info->page_per_blk;
1101*4882a593Smuzhiyun g_nand_phy_info.reserved_blk = 8;
1102*4882a593Smuzhiyun g_nand_phy_info.blk_per_die = p_nand_info->plane_per_die *
1103*4882a593Smuzhiyun p_nand_info->blk_per_plane;
1104*4882a593Smuzhiyun g_nand_phy_info.ecc_bits = p_nand_info->max_ecc_bits;
1105*4882a593Smuzhiyun
1106*4882a593Smuzhiyun /* driver register */
1107*4882a593Smuzhiyun g_nand_ops.get_bad_blk_list = sfc_nand_get_bad_block_list;
1108*4882a593Smuzhiyun g_nand_ops.erase_blk = sfc_nand_erase_block;
1109*4882a593Smuzhiyun g_nand_ops.prog_page = sfc_nand_prog_page;
1110*4882a593Smuzhiyun g_nand_ops.read_page = sfc_nand_read_page;
1111*4882a593Smuzhiyun g_nand_ops.bch_sel = NULL;
1112*4882a593Smuzhiyun }
1113*4882a593Smuzhiyun #endif
1114*4882a593Smuzhiyun
sfc_nand_enable_QE(void)1115*4882a593Smuzhiyun static int sfc_nand_enable_QE(void)
1116*4882a593Smuzhiyun {
1117*4882a593Smuzhiyun int ret = SFC_OK;
1118*4882a593Smuzhiyun u8 status;
1119*4882a593Smuzhiyun
1120*4882a593Smuzhiyun ret = sfc_nand_read_feature(0xB0, &status);
1121*4882a593Smuzhiyun
1122*4882a593Smuzhiyun if (ret != SFC_OK)
1123*4882a593Smuzhiyun return ret;
1124*4882a593Smuzhiyun
1125*4882a593Smuzhiyun if (status & 1) /* is QE bit set */
1126*4882a593Smuzhiyun return SFC_OK;
1127*4882a593Smuzhiyun
1128*4882a593Smuzhiyun status |= 1;
1129*4882a593Smuzhiyun
1130*4882a593Smuzhiyun return sfc_nand_write_feature(0xB0, status);
1131*4882a593Smuzhiyun }
1132*4882a593Smuzhiyun
sfc_nand_init(void)1133*4882a593Smuzhiyun u32 sfc_nand_init(void)
1134*4882a593Smuzhiyun {
1135*4882a593Smuzhiyun u8 status, id_byte[8];
1136*4882a593Smuzhiyun
1137*4882a593Smuzhiyun sfc_nand_read_id(id_byte);
1138*4882a593Smuzhiyun rkflash_print_error("sfc_nand id: %x %x %x\n",
1139*4882a593Smuzhiyun id_byte[0], id_byte[1], id_byte[2]);
1140*4882a593Smuzhiyun
1141*4882a593Smuzhiyun if (id_byte[0] == 0xFF || id_byte[0] == 0x00)
1142*4882a593Smuzhiyun return (u32)FTL_NO_FLASH;
1143*4882a593Smuzhiyun
1144*4882a593Smuzhiyun p_nand_info = sfc_nand_get_info(id_byte);
1145*4882a593Smuzhiyun
1146*4882a593Smuzhiyun if (!p_nand_info) {
1147*4882a593Smuzhiyun pr_err("The device not support yet!\n");
1148*4882a593Smuzhiyun
1149*4882a593Smuzhiyun return (u32)FTL_UNSUPPORTED_FLASH;
1150*4882a593Smuzhiyun }
1151*4882a593Smuzhiyun
1152*4882a593Smuzhiyun if (!gp_page_buf)
1153*4882a593Smuzhiyun gp_page_buf = (u32 *)__get_free_pages(GFP_KERNEL | GFP_DMA32, get_order(SFC_NAND_PAGE_MAX_SIZE));
1154*4882a593Smuzhiyun if (!gp_page_buf)
1155*4882a593Smuzhiyun return -ENOMEM;
1156*4882a593Smuzhiyun
1157*4882a593Smuzhiyun sfc_nand_dev.manufacturer = id_byte[0];
1158*4882a593Smuzhiyun sfc_nand_dev.mem_type = id_byte[1];
1159*4882a593Smuzhiyun sfc_nand_dev.capacity = p_nand_info->density;
1160*4882a593Smuzhiyun sfc_nand_dev.block_size = p_nand_info->page_per_blk * p_nand_info->sec_per_page;
1161*4882a593Smuzhiyun sfc_nand_dev.page_size = p_nand_info->sec_per_page;
1162*4882a593Smuzhiyun
1163*4882a593Smuzhiyun /* disable block lock */
1164*4882a593Smuzhiyun sfc_nand_write_feature(0xA0, 0);
1165*4882a593Smuzhiyun sfc_nand_dev.read_lines = DATA_LINES_X1;
1166*4882a593Smuzhiyun sfc_nand_dev.prog_lines = DATA_LINES_X1;
1167*4882a593Smuzhiyun sfc_nand_dev.page_read_cmd = 0x03;
1168*4882a593Smuzhiyun sfc_nand_dev.page_prog_cmd = 0x02;
1169*4882a593Smuzhiyun if (!sfc_nand_dev.recheck_buffer)
1170*4882a593Smuzhiyun sfc_nand_dev.recheck_buffer = (u8 *)__get_free_pages(GFP_KERNEL | GFP_DMA32, get_order(SFC_NAND_PAGE_MAX_SIZE));
1171*4882a593Smuzhiyun if (!sfc_nand_dev.recheck_buffer) {
1172*4882a593Smuzhiyun pr_err("%s recheck_buffer alloc failed\n", __func__);
1173*4882a593Smuzhiyun return -ENOMEM;
1174*4882a593Smuzhiyun }
1175*4882a593Smuzhiyun
1176*4882a593Smuzhiyun if (p_nand_info->feature & FEA_4BIT_READ) {
1177*4882a593Smuzhiyun if ((p_nand_info->has_qe_bits && sfc_nand_enable_QE() == SFC_OK) ||
1178*4882a593Smuzhiyun !p_nand_info->has_qe_bits) {
1179*4882a593Smuzhiyun sfc_nand_dev.read_lines = DATA_LINES_X4;
1180*4882a593Smuzhiyun sfc_nand_dev.page_read_cmd = 0x6b;
1181*4882a593Smuzhiyun }
1182*4882a593Smuzhiyun }
1183*4882a593Smuzhiyun
1184*4882a593Smuzhiyun if (p_nand_info->feature & FEA_4BIT_PROG &&
1185*4882a593Smuzhiyun sfc_nand_dev.read_lines == DATA_LINES_X4) {
1186*4882a593Smuzhiyun sfc_nand_dev.prog_lines = DATA_LINES_X4;
1187*4882a593Smuzhiyun sfc_nand_dev.page_prog_cmd = 0x32;
1188*4882a593Smuzhiyun }
1189*4882a593Smuzhiyun
1190*4882a593Smuzhiyun sfc_nand_read_feature(0xA0, &status);
1191*4882a593Smuzhiyun rkflash_print_info("sfc_nand A0 = 0x%x\n", status);
1192*4882a593Smuzhiyun sfc_nand_read_feature(0xB0, &status);
1193*4882a593Smuzhiyun rkflash_print_info("sfc_nand B0 = 0x%x\n", status);
1194*4882a593Smuzhiyun rkflash_print_info("read_lines = %x\n", sfc_nand_dev.read_lines);
1195*4882a593Smuzhiyun rkflash_print_info("prog_lines = %x\n", sfc_nand_dev.prog_lines);
1196*4882a593Smuzhiyun rkflash_print_info("page_read_cmd = %x\n", sfc_nand_dev.page_read_cmd);
1197*4882a593Smuzhiyun rkflash_print_info("page_prog_cmd = %x\n", sfc_nand_dev.page_prog_cmd);
1198*4882a593Smuzhiyun
1199*4882a593Smuzhiyun return SFC_OK;
1200*4882a593Smuzhiyun }
1201*4882a593Smuzhiyun
sfc_nand_deinit(void)1202*4882a593Smuzhiyun void sfc_nand_deinit(void)
1203*4882a593Smuzhiyun {
1204*4882a593Smuzhiyun /* to-do */
1205*4882a593Smuzhiyun free_pages((unsigned long)sfc_nand_dev.recheck_buffer, get_order(SFC_NAND_PAGE_MAX_SIZE));
1206*4882a593Smuzhiyun free_pages((unsigned long)gp_page_buf, get_order(SFC_NAND_PAGE_MAX_SIZE));
1207*4882a593Smuzhiyun }
1208*4882a593Smuzhiyun
sfc_nand_get_private_dev(void)1209*4882a593Smuzhiyun struct SFNAND_DEV *sfc_nand_get_private_dev(void)
1210*4882a593Smuzhiyun {
1211*4882a593Smuzhiyun return &sfc_nand_dev;
1212*4882a593Smuzhiyun }
1213*4882a593Smuzhiyun
sfc_nand_get_nand_info(void)1214*4882a593Smuzhiyun struct nand_info *sfc_nand_get_nand_info(void)
1215*4882a593Smuzhiyun {
1216*4882a593Smuzhiyun return p_nand_info;
1217*4882a593Smuzhiyun }
1218