xref: /rk3399_ARM-atf/drivers/nxp/ifc/nand/ifc_nand.c (revision 28279cf2c141caf5e4e7156f874cde6f5a0d271b)
1*28279cf2SJiafei Pan /*
2*28279cf2SJiafei Pan  * Copyright 2022 NXP
3*28279cf2SJiafei Pan  *
4*28279cf2SJiafei Pan  * SPDX-License-Identifier: BSD-3-Clause
5*28279cf2SJiafei Pan  */
6*28279cf2SJiafei Pan 
7*28279cf2SJiafei Pan #include <string.h>
8*28279cf2SJiafei Pan 
9*28279cf2SJiafei Pan #include <common/debug.h>
10*28279cf2SJiafei Pan #include <drivers/io/io_block.h>
11*28279cf2SJiafei Pan #include "ifc.h"
12*28279cf2SJiafei Pan #include <lib/xlat_tables/xlat_tables_v2.h>
13*28279cf2SJiafei Pan #include <nxp_timer.h>
14*28279cf2SJiafei Pan 
15*28279cf2SJiafei Pan /* Private structure for NAND driver data */
16*28279cf2SJiafei Pan static struct nand_info nand_drv_data;
17*28279cf2SJiafei Pan 
18*28279cf2SJiafei Pan static int update_bbt(uint32_t idx, uint32_t blk, uint32_t *updated,
19*28279cf2SJiafei Pan 		struct nand_info *nand);
20*28279cf2SJiafei Pan 
21*28279cf2SJiafei Pan static int nand_wait(struct nand_info *nand)
22*28279cf2SJiafei Pan {
23*28279cf2SJiafei Pan 	int timeout = 1;
24*28279cf2SJiafei Pan 	uint32_t  neesr;
25*28279cf2SJiafei Pan 	unsigned long start_time;
26*28279cf2SJiafei Pan 
27*28279cf2SJiafei Pan 	start_time = get_timer_val(0);
28*28279cf2SJiafei Pan 
29*28279cf2SJiafei Pan 	while (get_timer_val(start_time) < NAND_TIMEOUT_MS) {
30*28279cf2SJiafei Pan 		/* clear the OPC event */
31*28279cf2SJiafei Pan 		neesr = read_reg(nand, NAND_EVTER_STAT);
32*28279cf2SJiafei Pan 		if (neesr & NAND_EVTER_STAT_OPC_DN) {
33*28279cf2SJiafei Pan 			write_reg(nand, NAND_EVTER_STAT, neesr);
34*28279cf2SJiafei Pan 			timeout = 0;
35*28279cf2SJiafei Pan 
36*28279cf2SJiafei Pan 			/* check for other errors */
37*28279cf2SJiafei Pan 			if (neesr & NAND_EVTER_STAT_FTOER) {
38*28279cf2SJiafei Pan 				ERROR("%s NAND_EVTER_STAT_FTOER occurs\n",
39*28279cf2SJiafei Pan 						__func__);
40*28279cf2SJiafei Pan 				return -1;
41*28279cf2SJiafei Pan 			} else if (neesr & NAND_EVTER_STAT_ECCER) {
42*28279cf2SJiafei Pan 				ERROR("%s NAND_EVTER_STAT_ECCER occurs\n",
43*28279cf2SJiafei Pan 						__func__);
44*28279cf2SJiafei Pan 				return -1;
45*28279cf2SJiafei Pan 			} else if (neesr & NAND_EVTER_STAT_DQSER) {
46*28279cf2SJiafei Pan 				ERROR("%s NAND_EVTER_STAT_DQSER occurs\n",
47*28279cf2SJiafei Pan 						__func__);
48*28279cf2SJiafei Pan 				return -1;
49*28279cf2SJiafei Pan 			}
50*28279cf2SJiafei Pan 
51*28279cf2SJiafei Pan 			break;
52*28279cf2SJiafei Pan 		}
53*28279cf2SJiafei Pan 	}
54*28279cf2SJiafei Pan 
55*28279cf2SJiafei Pan 	if (timeout) {
56*28279cf2SJiafei Pan 		ERROR("%s ERROR_NAND_TIMEOUT occurs\n", __func__);
57*28279cf2SJiafei Pan 		return -1;
58*28279cf2SJiafei Pan 	}
59*28279cf2SJiafei Pan 
60*28279cf2SJiafei Pan 	return 0;
61*28279cf2SJiafei Pan }
62*28279cf2SJiafei Pan 
63*28279cf2SJiafei Pan static uint32_t nand_get_port_size(struct nand_info *nand)
64*28279cf2SJiafei Pan {
65*28279cf2SJiafei Pan 	uint32_t port_size = U(0);
66*28279cf2SJiafei Pan 	uint32_t cs_reg;
67*28279cf2SJiafei Pan 	uint32_t cur_cs;
68*28279cf2SJiafei Pan 
69*28279cf2SJiafei Pan 	cur_cs = U(0);
70*28279cf2SJiafei Pan 	cs_reg = CSPR(cur_cs);
71*28279cf2SJiafei Pan 	port_size = (read_reg(nand, cs_reg) & CSPR_PS) >> CSPR_PS_SHIFT;
72*28279cf2SJiafei Pan 	switch (port_size) {
73*28279cf2SJiafei Pan 	case CSPR_PS_8:
74*28279cf2SJiafei Pan 		port_size = U(8);
75*28279cf2SJiafei Pan 		break;
76*28279cf2SJiafei Pan 	case CSPR_PS_16:
77*28279cf2SJiafei Pan 		port_size = U(16);
78*28279cf2SJiafei Pan 		break;
79*28279cf2SJiafei Pan 	case CSPR_PS_32:
80*28279cf2SJiafei Pan 		port_size = U(32);
81*28279cf2SJiafei Pan 		break;
82*28279cf2SJiafei Pan 	default:
83*28279cf2SJiafei Pan 		port_size = U(8);
84*28279cf2SJiafei Pan 	}
85*28279cf2SJiafei Pan 
86*28279cf2SJiafei Pan 	return port_size;
87*28279cf2SJiafei Pan }
88*28279cf2SJiafei Pan 
89*28279cf2SJiafei Pan static uint32_t nand_get_page_size(struct nand_info *nand)
90*28279cf2SJiafei Pan {
91*28279cf2SJiafei Pan 	uint32_t pg_size;
92*28279cf2SJiafei Pan 	uint32_t cs_reg;
93*28279cf2SJiafei Pan 	uint32_t cur_cs;
94*28279cf2SJiafei Pan 
95*28279cf2SJiafei Pan 	cur_cs = 0;
96*28279cf2SJiafei Pan 	cs_reg = CSOR(cur_cs);
97*28279cf2SJiafei Pan 	pg_size = read_reg(nand, cs_reg) & CSOR_NAND_PGS;
98*28279cf2SJiafei Pan 	switch (pg_size) {
99*28279cf2SJiafei Pan 	case CSOR_NAND_PGS_2K:
100*28279cf2SJiafei Pan 		pg_size = U(2048);
101*28279cf2SJiafei Pan 		break;
102*28279cf2SJiafei Pan 	case CSOR_NAND_PGS_4K:
103*28279cf2SJiafei Pan 		pg_size = U(4096);
104*28279cf2SJiafei Pan 		break;
105*28279cf2SJiafei Pan 	case CSOR_NAND_PGS_8K:
106*28279cf2SJiafei Pan 		pg_size = U(8192);
107*28279cf2SJiafei Pan 		break;
108*28279cf2SJiafei Pan 	case CSOR_NAND_PGS_16K:
109*28279cf2SJiafei Pan 		pg_size = U(16384);
110*28279cf2SJiafei Pan 		break;
111*28279cf2SJiafei Pan 	default:
112*28279cf2SJiafei Pan 		pg_size = U(512);
113*28279cf2SJiafei Pan 	}
114*28279cf2SJiafei Pan 
115*28279cf2SJiafei Pan 	return pg_size;
116*28279cf2SJiafei Pan }
117*28279cf2SJiafei Pan 
118*28279cf2SJiafei Pan static uint32_t nand_get_pages_per_blk(struct nand_info *nand)
119*28279cf2SJiafei Pan {
120*28279cf2SJiafei Pan 	uint32_t pages_per_blk;
121*28279cf2SJiafei Pan 	uint32_t cs_reg;
122*28279cf2SJiafei Pan 	uint32_t cur_cs;
123*28279cf2SJiafei Pan 
124*28279cf2SJiafei Pan 	cur_cs = 0;
125*28279cf2SJiafei Pan 	cs_reg = CSOR(cur_cs);
126*28279cf2SJiafei Pan 	pages_per_blk = (read_reg(nand, cs_reg) & CSOR_NAND_PB);
127*28279cf2SJiafei Pan 	switch (pages_per_blk) {
128*28279cf2SJiafei Pan 	case CSOR_NAND_PB_32:
129*28279cf2SJiafei Pan 		pages_per_blk = U(32);
130*28279cf2SJiafei Pan 		break;
131*28279cf2SJiafei Pan 	case CSOR_NAND_PB_64:
132*28279cf2SJiafei Pan 		pages_per_blk = U(64);
133*28279cf2SJiafei Pan 		break;
134*28279cf2SJiafei Pan 	case CSOR_NAND_PB_128:
135*28279cf2SJiafei Pan 		pages_per_blk = U(128);
136*28279cf2SJiafei Pan 		break;
137*28279cf2SJiafei Pan 	case CSOR_NAND_PB_256:
138*28279cf2SJiafei Pan 		pages_per_blk = U(256);
139*28279cf2SJiafei Pan 		break;
140*28279cf2SJiafei Pan 	case CSOR_NAND_PB_512:
141*28279cf2SJiafei Pan 		pages_per_blk = U(512);
142*28279cf2SJiafei Pan 		break;
143*28279cf2SJiafei Pan 	case CSOR_NAND_PB_1024:
144*28279cf2SJiafei Pan 		pages_per_blk = U(1024);
145*28279cf2SJiafei Pan 		break;
146*28279cf2SJiafei Pan 	case CSOR_NAND_PB_2048:
147*28279cf2SJiafei Pan 		pages_per_blk = U(2048);
148*28279cf2SJiafei Pan 		break;
149*28279cf2SJiafei Pan 	default:
150*28279cf2SJiafei Pan 		pages_per_blk = U(0);
151*28279cf2SJiafei Pan 	}
152*28279cf2SJiafei Pan 
153*28279cf2SJiafei Pan 	return pages_per_blk;
154*28279cf2SJiafei Pan }
155*28279cf2SJiafei Pan 
156*28279cf2SJiafei Pan static uint32_t get_page_index_width(uint32_t ppb)
157*28279cf2SJiafei Pan {
158*28279cf2SJiafei Pan 	switch (ppb) {
159*28279cf2SJiafei Pan 	case CSOR_NAND_PPB_32:
160*28279cf2SJiafei Pan 		return U(5);
161*28279cf2SJiafei Pan 	case CSOR_NAND_PPB_64:
162*28279cf2SJiafei Pan 		return U(6);
163*28279cf2SJiafei Pan 	case CSOR_NAND_PPB_128:
164*28279cf2SJiafei Pan 		return U(7);
165*28279cf2SJiafei Pan 	case CSOR_NAND_PPB_256:
166*28279cf2SJiafei Pan 		return U(8);
167*28279cf2SJiafei Pan 	case CSOR_NAND_PPB_512:
168*28279cf2SJiafei Pan 		return U(9);
169*28279cf2SJiafei Pan 	case CSOR_NAND_PPB_1024:
170*28279cf2SJiafei Pan 		return U(10);
171*28279cf2SJiafei Pan 	case CSOR_NAND_PPB_2048:
172*28279cf2SJiafei Pan 		return U(11);
173*28279cf2SJiafei Pan 	default:
174*28279cf2SJiafei Pan 		return U(5);
175*28279cf2SJiafei Pan 	}
176*28279cf2SJiafei Pan }
177*28279cf2SJiafei Pan 
178*28279cf2SJiafei Pan static void nand_get_params(struct nand_info *nand)
179*28279cf2SJiafei Pan {
180*28279cf2SJiafei Pan 	nand->port_size = nand_get_port_size(nand);
181*28279cf2SJiafei Pan 
182*28279cf2SJiafei Pan 	nand->page_size = nand_get_page_size(nand);
183*28279cf2SJiafei Pan 
184*28279cf2SJiafei Pan 	/*
185*28279cf2SJiafei Pan 	 * Set Bad marker Location for LP / SP
186*28279cf2SJiafei Pan 	 * Small Page : 8 Bit	 : 0x5
187*28279cf2SJiafei Pan 	 * Small Page : 16 Bit	: 0xa
188*28279cf2SJiafei Pan 	 * Large Page : 8 /16 Bit : 0x0
189*28279cf2SJiafei Pan 	 */
190*28279cf2SJiafei Pan 	nand->bad_marker_loc = (nand->page_size == 512) ?
191*28279cf2SJiafei Pan 				((nand->port_size == 8) ? 0x5 : 0xa) : 0;
192*28279cf2SJiafei Pan 
193*28279cf2SJiafei Pan 	/* check for the device is ONFI compliant or not */
194*28279cf2SJiafei Pan 	nand->onfi_dev_flag =
195*28279cf2SJiafei Pan 	   (read_reg(nand, NAND_EVTER_STAT) & NAND_EVTER_STAT_BBI_SRCH_SEL)
196*28279cf2SJiafei Pan 	   ? 1 : 0;
197*28279cf2SJiafei Pan 
198*28279cf2SJiafei Pan 	/* NAND Blk serached count for incremental Bad block search cnt */
199*28279cf2SJiafei Pan 	nand->bbs = 0;
200*28279cf2SJiafei Pan 
201*28279cf2SJiafei Pan 	/* pages per Block */
202*28279cf2SJiafei Pan 	nand->ppb = nand_get_pages_per_blk(nand);
203*28279cf2SJiafei Pan 
204*28279cf2SJiafei Pan 	/* Blk size */
205*28279cf2SJiafei Pan 	nand->blk_size = nand->page_size * nand->ppb;
206*28279cf2SJiafei Pan 
207*28279cf2SJiafei Pan 	/* get_page_index_width */
208*28279cf2SJiafei Pan 	nand->pi_width = get_page_index_width(nand->ppb);
209*28279cf2SJiafei Pan 
210*28279cf2SJiafei Pan 	/* bad block table init */
211*28279cf2SJiafei Pan 	nand->lgb = 0;
212*28279cf2SJiafei Pan 	nand->bbt_max = 0;
213*28279cf2SJiafei Pan 	nand->bzero_good = 0;
214*28279cf2SJiafei Pan 	memset(nand->bbt, EMPTY_VAL, BBT_SIZE * sizeof(nand->bbt[0]));
215*28279cf2SJiafei Pan }
216*28279cf2SJiafei Pan 
217*28279cf2SJiafei Pan static int nand_init(struct nand_info *nand)
218*28279cf2SJiafei Pan {
219*28279cf2SJiafei Pan 	uint32_t ncfgr = 0;
220*28279cf2SJiafei Pan 
221*28279cf2SJiafei Pan 	/* Get nand Parameters from IFC */
222*28279cf2SJiafei Pan 	nand_get_params(nand);
223*28279cf2SJiafei Pan 
224*28279cf2SJiafei Pan 	/* Clear all errors */
225*28279cf2SJiafei Pan 	write_reg(nand, NAND_EVTER_STAT, U(0xffffffff));
226*28279cf2SJiafei Pan 
227*28279cf2SJiafei Pan 	/*
228*28279cf2SJiafei Pan 	 * Disable autoboot in NCFGR. Mapping will change from
229*28279cf2SJiafei Pan 	 * physical to logical for SRAM buffer
230*28279cf2SJiafei Pan 	 */
231*28279cf2SJiafei Pan 	ncfgr = read_reg(nand, NCFGR);
232*28279cf2SJiafei Pan 	write_reg(nand, NCFGR, (ncfgr & ~NCFGR_BOOT));
233*28279cf2SJiafei Pan 
234*28279cf2SJiafei Pan 	return 0;
235*28279cf2SJiafei Pan }
236*28279cf2SJiafei Pan 
237*28279cf2SJiafei Pan static int nand_read_data(
238*28279cf2SJiafei Pan 		uintptr_t ifc_region_addr,
239*28279cf2SJiafei Pan 		uint32_t row_add,
240*28279cf2SJiafei Pan 		uint32_t col_add,
241*28279cf2SJiafei Pan 		uint32_t byte_cnt,
242*28279cf2SJiafei Pan 		uint8_t *data,
243*28279cf2SJiafei Pan 		uint32_t main_spare,
244*28279cf2SJiafei Pan 		struct nand_info *nand)
245*28279cf2SJiafei Pan {
246*28279cf2SJiafei Pan 	uint32_t page_size_add_bits = U(0);
247*28279cf2SJiafei Pan 	uint32_t page_add_in_actual, page_add;
248*28279cf2SJiafei Pan 	uintptr_t sram_addr_calc;
249*28279cf2SJiafei Pan 	int ret;
250*28279cf2SJiafei Pan 	uint32_t col_val;
251*28279cf2SJiafei Pan 
252*28279cf2SJiafei Pan 	/* Programming MS bit to read from spare area.*/
253*28279cf2SJiafei Pan 	col_val = (main_spare << NAND_COL_MS_SHIFT) | col_add;
254*28279cf2SJiafei Pan 
255*28279cf2SJiafei Pan 	write_reg(nand, NAND_BC, byte_cnt);
256*28279cf2SJiafei Pan 
257*28279cf2SJiafei Pan 	write_reg(nand, ROW0, row_add);
258*28279cf2SJiafei Pan 	write_reg(nand, COL0, col_val);
259*28279cf2SJiafei Pan 
260*28279cf2SJiafei Pan 	/* Program FCR for small Page */
261*28279cf2SJiafei Pan 	if (nand->page_size == U(512)) {
262*28279cf2SJiafei Pan 		if (byte_cnt == 0 ||
263*28279cf2SJiafei Pan 			(byte_cnt != 0  && main_spare == 0 && col_add <= 255)) {
264*28279cf2SJiafei Pan 			write_reg(nand, NAND_FCR0,
265*28279cf2SJiafei Pan 				  (NAND_CMD_READ0 << FCR_CMD0_SHIFT));
266*28279cf2SJiafei Pan 		} else if (main_spare == 0) {
267*28279cf2SJiafei Pan 			write_reg(nand, NAND_FCR0,
268*28279cf2SJiafei Pan 				  (NAND_CMD_READ1 << FCR_CMD0_SHIFT));
269*28279cf2SJiafei Pan 		} else {
270*28279cf2SJiafei Pan 			write_reg(nand, NAND_FCR0,
271*28279cf2SJiafei Pan 				  (NAND_CMD_READOOB << FCR_CMD0_SHIFT));
272*28279cf2SJiafei Pan 		}
273*28279cf2SJiafei Pan 
274*28279cf2SJiafei Pan 	} else {
275*28279cf2SJiafei Pan 		/* Program FCR for Large Page */
276*28279cf2SJiafei Pan 		write_reg(nand, NAND_FCR0, (NAND_CMD_READ0 << FCR_CMD0_SHIFT) |
277*28279cf2SJiafei Pan 			  (NAND_CMD_READSTART << FCR_CMD1_SHIFT));
278*28279cf2SJiafei Pan 	}
279*28279cf2SJiafei Pan 	if (nand->page_size == U(512)) {
280*28279cf2SJiafei Pan 		write_reg(nand, NAND_FIR0, ((FIR_OP_CW0 << FIR_OP0_SHIFT) |
281*28279cf2SJiafei Pan 					  (FIR_OP_CA0 << FIR_OP1_SHIFT) |
282*28279cf2SJiafei Pan 					  (FIR_OP_RA0 << FIR_OP2_SHIFT) |
283*28279cf2SJiafei Pan 					  (FIR_OP_BTRD << FIR_OP3_SHIFT) |
284*28279cf2SJiafei Pan 					  (FIR_OP_NOP << FIR_OP4_SHIFT)));
285*28279cf2SJiafei Pan 		write_reg(nand, NAND_FIR1, U(0x00000000));
286*28279cf2SJiafei Pan 	} else {
287*28279cf2SJiafei Pan 		write_reg(nand, NAND_FIR0, ((FIR_OP_CW0 << FIR_OP0_SHIFT) |
288*28279cf2SJiafei Pan 					 (FIR_OP_CA0 << FIR_OP1_SHIFT) |
289*28279cf2SJiafei Pan 					 (FIR_OP_RA0 << FIR_OP2_SHIFT) |
290*28279cf2SJiafei Pan 					 (FIR_OP_CMD1 << FIR_OP3_SHIFT) |
291*28279cf2SJiafei Pan 					 (FIR_OP_BTRD << FIR_OP4_SHIFT)));
292*28279cf2SJiafei Pan 
293*28279cf2SJiafei Pan 		write_reg(nand, NAND_FIR1, (FIR_OP_NOP << FIR_OP5_SHIFT));
294*28279cf2SJiafei Pan 	}
295*28279cf2SJiafei Pan 	write_reg(nand, NANDSEQ_STRT, NAND_SEQ_STRT_FIR_STRT);
296*28279cf2SJiafei Pan 
297*28279cf2SJiafei Pan 	ret = nand_wait(nand);
298*28279cf2SJiafei Pan 	if (ret != 0)
299*28279cf2SJiafei Pan 		return ret;
300*28279cf2SJiafei Pan 
301*28279cf2SJiafei Pan 	/* calculate page_size_add_bits i.e bits
302*28279cf2SJiafei Pan 	 * in sram address corresponding to area
303*28279cf2SJiafei Pan 	 * within a page for sram
304*28279cf2SJiafei Pan 	 */
305*28279cf2SJiafei Pan 	if (nand->page_size == U(512))
306*28279cf2SJiafei Pan 		page_size_add_bits = U(10);
307*28279cf2SJiafei Pan 	else if (nand->page_size == U(2048))
308*28279cf2SJiafei Pan 		page_size_add_bits = U(12);
309*28279cf2SJiafei Pan 	else if (nand->page_size == U(4096))
310*28279cf2SJiafei Pan 		page_size_add_bits = U(13);
311*28279cf2SJiafei Pan 	else if (nand->page_size == U(8192))
312*28279cf2SJiafei Pan 		page_size_add_bits = U(14);
313*28279cf2SJiafei Pan 	else if (nand->page_size == U(16384))
314*28279cf2SJiafei Pan 		page_size_add_bits = U(15);
315*28279cf2SJiafei Pan 
316*28279cf2SJiafei Pan 	page_add = row_add;
317*28279cf2SJiafei Pan 
318*28279cf2SJiafei Pan 	page_add_in_actual = (page_add << page_size_add_bits) & U(0x0000FFFF);
319*28279cf2SJiafei Pan 
320*28279cf2SJiafei Pan 	if (byte_cnt == 0)
321*28279cf2SJiafei Pan 		col_add = U(0);
322*28279cf2SJiafei Pan 
323*28279cf2SJiafei Pan 	/* Calculate SRAM address for main and spare area */
324*28279cf2SJiafei Pan 	if (main_spare == 0)
325*28279cf2SJiafei Pan 		sram_addr_calc = ifc_region_addr | page_add_in_actual | col_add;
326*28279cf2SJiafei Pan 	else
327*28279cf2SJiafei Pan 		sram_addr_calc = ifc_region_addr | page_add_in_actual |
328*28279cf2SJiafei Pan 				 (col_add + nand->page_size);
329*28279cf2SJiafei Pan 
330*28279cf2SJiafei Pan 	/* Depending Byte_count copy full page or partial page from SRAM */
331*28279cf2SJiafei Pan 	if (byte_cnt == 0)
332*28279cf2SJiafei Pan 		memcpy(data, (void *)sram_addr_calc,
333*28279cf2SJiafei Pan 			nand->page_size);
334*28279cf2SJiafei Pan 	else
335*28279cf2SJiafei Pan 		memcpy(data, (void *)sram_addr_calc, byte_cnt);
336*28279cf2SJiafei Pan 
337*28279cf2SJiafei Pan 	return 0;
338*28279cf2SJiafei Pan }
339*28279cf2SJiafei Pan 
340*28279cf2SJiafei Pan static int nand_read(struct nand_info *nand, int32_t src_addr,
341*28279cf2SJiafei Pan 		uintptr_t dst, uint32_t size)
342*28279cf2SJiafei Pan {
343*28279cf2SJiafei Pan 	uint32_t log_blk = U(0);
344*28279cf2SJiafei Pan 	uint32_t pg_no = U(0);
345*28279cf2SJiafei Pan 	uint32_t col_off = U(0);
346*28279cf2SJiafei Pan 	uint32_t row_off = U(0);
347*28279cf2SJiafei Pan 	uint32_t byte_cnt = U(0);
348*28279cf2SJiafei Pan 	uint32_t read_cnt = U(0);
349*28279cf2SJiafei Pan 	uint32_t i = U(0);
350*28279cf2SJiafei Pan 	uint32_t updated = U(0);
351*28279cf2SJiafei Pan 
352*28279cf2SJiafei Pan 	int ret = 0;
353*28279cf2SJiafei Pan 	uint8_t *out = (uint8_t *)dst;
354*28279cf2SJiafei Pan 
355*28279cf2SJiafei Pan 	uint32_t pblk;
356*28279cf2SJiafei Pan 
357*28279cf2SJiafei Pan 	/* loop till size */
358*28279cf2SJiafei Pan 	while (size) {
359*28279cf2SJiafei Pan 		log_blk = (src_addr / nand->blk_size);
360*28279cf2SJiafei Pan 		pg_no = ((src_addr - (log_blk * nand->blk_size)) /
361*28279cf2SJiafei Pan 					 nand->page_size);
362*28279cf2SJiafei Pan 		pblk = log_blk;
363*28279cf2SJiafei Pan 
364*28279cf2SJiafei Pan 		 // iterate the bbt to find the block
365*28279cf2SJiafei Pan 		for (i = 0; i <= nand->bbt_max; i++) {
366*28279cf2SJiafei Pan 			if (nand->bbt[i] == EMPTY_VAL_CHECK) {
367*28279cf2SJiafei Pan 				ret = update_bbt(i, pblk, &updated, nand);
368*28279cf2SJiafei Pan 
369*28279cf2SJiafei Pan 				if (ret != 0)
370*28279cf2SJiafei Pan 					return ret;
371*28279cf2SJiafei Pan 				 /*
372*28279cf2SJiafei Pan 				  * if table not updated and we reached
373*28279cf2SJiafei Pan 				  * end of table
374*28279cf2SJiafei Pan 				  */
375*28279cf2SJiafei Pan 				if (!updated)
376*28279cf2SJiafei Pan 					break;
377*28279cf2SJiafei Pan 			}
378*28279cf2SJiafei Pan 
379*28279cf2SJiafei Pan 			if (pblk < nand->bbt[i])
380*28279cf2SJiafei Pan 				break;
381*28279cf2SJiafei Pan 			else if (pblk >= nand->bbt[i])
382*28279cf2SJiafei Pan 				pblk++;
383*28279cf2SJiafei Pan 		}
384*28279cf2SJiafei Pan 
385*28279cf2SJiafei Pan 		col_off = (src_addr % nand->page_size);
386*28279cf2SJiafei Pan 		if (col_off) {
387*28279cf2SJiafei Pan 			if ((col_off + size) < nand->page_size)
388*28279cf2SJiafei Pan 				byte_cnt = size;
389*28279cf2SJiafei Pan 			else
390*28279cf2SJiafei Pan 				byte_cnt = nand->page_size - col_off;
391*28279cf2SJiafei Pan 
392*28279cf2SJiafei Pan 			row_off = (pblk << nand->pi_width) | pg_no;
393*28279cf2SJiafei Pan 
394*28279cf2SJiafei Pan 			ret = nand_read_data(
395*28279cf2SJiafei Pan 					nand->ifc_region_addr,
396*28279cf2SJiafei Pan 					row_off,
397*28279cf2SJiafei Pan 					col_off,
398*28279cf2SJiafei Pan 					byte_cnt, out, MAIN, nand);
399*28279cf2SJiafei Pan 
400*28279cf2SJiafei Pan 			if (ret != 0)
401*28279cf2SJiafei Pan 				return ret;
402*28279cf2SJiafei Pan 		} else {
403*28279cf2SJiafei Pan 			 /*
404*28279cf2SJiafei Pan 			  * fullpage/Partial Page
405*28279cf2SJiafei Pan 			  * if byte_cnt = 0 full page
406*28279cf2SJiafei Pan 			  * else partial page
407*28279cf2SJiafei Pan 			  */
408*28279cf2SJiafei Pan 			if (size < nand->page_size) {
409*28279cf2SJiafei Pan 				byte_cnt = size;
410*28279cf2SJiafei Pan 				read_cnt = size;
411*28279cf2SJiafei Pan 			} else	{
412*28279cf2SJiafei Pan 				byte_cnt = nand->page_size;
413*28279cf2SJiafei Pan 				read_cnt = 0;
414*28279cf2SJiafei Pan 			}
415*28279cf2SJiafei Pan 			row_off = (pblk << nand->pi_width) | pg_no;
416*28279cf2SJiafei Pan 
417*28279cf2SJiafei Pan 			ret = nand_read_data(
418*28279cf2SJiafei Pan 					nand->ifc_region_addr,
419*28279cf2SJiafei Pan 					row_off,
420*28279cf2SJiafei Pan 					0,
421*28279cf2SJiafei Pan 					read_cnt, out, MAIN, nand);
422*28279cf2SJiafei Pan 
423*28279cf2SJiafei Pan 			if (ret != 0) {
424*28279cf2SJiafei Pan 				ERROR("Error from nand-read_data %d\n", ret);
425*28279cf2SJiafei Pan 				return ret;
426*28279cf2SJiafei Pan 			}
427*28279cf2SJiafei Pan 		}
428*28279cf2SJiafei Pan 		src_addr += byte_cnt;
429*28279cf2SJiafei Pan 		out += byte_cnt;
430*28279cf2SJiafei Pan 		size -= byte_cnt;
431*28279cf2SJiafei Pan 	}
432*28279cf2SJiafei Pan 	return 0;
433*28279cf2SJiafei Pan }
434*28279cf2SJiafei Pan 
435*28279cf2SJiafei Pan static int isgoodblock(uint32_t blk, uint32_t *gb, struct nand_info *nand)
436*28279cf2SJiafei Pan {
437*28279cf2SJiafei Pan 	uint8_t buf[2];
438*28279cf2SJiafei Pan 	int ret;
439*28279cf2SJiafei Pan 	uint32_t row_add;
440*28279cf2SJiafei Pan 
441*28279cf2SJiafei Pan 	*gb = 0;
442*28279cf2SJiafei Pan 
443*28279cf2SJiafei Pan 	/* read Page 0 of blk */
444*28279cf2SJiafei Pan 	ret = nand_read_data(
445*28279cf2SJiafei Pan 			nand->ifc_region_addr,
446*28279cf2SJiafei Pan 			blk << nand->pi_width,
447*28279cf2SJiafei Pan 			nand->bad_marker_loc,
448*28279cf2SJiafei Pan 			0x2, buf, 1, nand);
449*28279cf2SJiafei Pan 
450*28279cf2SJiafei Pan 	if (ret != 0)
451*28279cf2SJiafei Pan 		return ret;
452*28279cf2SJiafei Pan 
453*28279cf2SJiafei Pan 	/* For ONFI devices check Page 0 and Last page of block for
454*28279cf2SJiafei Pan 	 * Bad Marker and for NON-ONFI Page 0 and 1 for Bad Marker
455*28279cf2SJiafei Pan 	 */
456*28279cf2SJiafei Pan 	row_add = (blk << nand->pi_width);
457*28279cf2SJiafei Pan 	if (nand->port_size == 8) {
458*28279cf2SJiafei Pan 		/* port size is 8 Bit */
459*28279cf2SJiafei Pan 		/* check if page 0 has 0xff */
460*28279cf2SJiafei Pan 		if (buf[0] == 0xff) {
461*28279cf2SJiafei Pan 			/* check page 1 */
462*28279cf2SJiafei Pan 			if (nand->onfi_dev_flag)
463*28279cf2SJiafei Pan 				ret =  nand_read_data(
464*28279cf2SJiafei Pan 						nand->ifc_region_addr,
465*28279cf2SJiafei Pan 						row_add | (nand->ppb - 1),
466*28279cf2SJiafei Pan 						nand->bad_marker_loc,
467*28279cf2SJiafei Pan 						0x2, buf, SPARE, nand);
468*28279cf2SJiafei Pan 			else
469*28279cf2SJiafei Pan 				ret =  nand_read_data(
470*28279cf2SJiafei Pan 						nand->ifc_region_addr,
471*28279cf2SJiafei Pan 						row_add | 1,
472*28279cf2SJiafei Pan 						nand->bad_marker_loc,
473*28279cf2SJiafei Pan 						0x2, buf, SPARE, nand);
474*28279cf2SJiafei Pan 
475*28279cf2SJiafei Pan 			if (ret != 0)
476*28279cf2SJiafei Pan 				return ret;
477*28279cf2SJiafei Pan 
478*28279cf2SJiafei Pan 			if (buf[0] == 0xff)
479*28279cf2SJiafei Pan 				*gb = GOOD_BLK;
480*28279cf2SJiafei Pan 			else
481*28279cf2SJiafei Pan 				*gb = BAD_BLK;
482*28279cf2SJiafei Pan 		} else {
483*28279cf2SJiafei Pan 			/* no, so it is bad blk */
484*28279cf2SJiafei Pan 			*gb = BAD_BLK;
485*28279cf2SJiafei Pan 		}
486*28279cf2SJiafei Pan 	} else {
487*28279cf2SJiafei Pan 		/* Port size 16-Bit */
488*28279cf2SJiafei Pan 		/* check if page 0 has 0xffff */
489*28279cf2SJiafei Pan 		if ((buf[0] == 0xff) &&
490*28279cf2SJiafei Pan 			(buf[1] == 0xff)) {
491*28279cf2SJiafei Pan 			/* check page 1 for 0xffff */
492*28279cf2SJiafei Pan 			if (nand->onfi_dev_flag) {
493*28279cf2SJiafei Pan 				ret =  nand_read_data(
494*28279cf2SJiafei Pan 						nand->ifc_region_addr,
495*28279cf2SJiafei Pan 						row_add | (nand->ppb - 1),
496*28279cf2SJiafei Pan 						nand->bad_marker_loc,
497*28279cf2SJiafei Pan 						0x2, buf, SPARE, nand);
498*28279cf2SJiafei Pan 			} else {
499*28279cf2SJiafei Pan 				ret =  nand_read_data(
500*28279cf2SJiafei Pan 						nand->ifc_region_addr,
501*28279cf2SJiafei Pan 						row_add | 1,
502*28279cf2SJiafei Pan 						nand->bad_marker_loc,
503*28279cf2SJiafei Pan 						0x2, buf, SPARE, nand);
504*28279cf2SJiafei Pan 			}
505*28279cf2SJiafei Pan 
506*28279cf2SJiafei Pan 			if (ret != 0)
507*28279cf2SJiafei Pan 				return ret;
508*28279cf2SJiafei Pan 
509*28279cf2SJiafei Pan 			if ((buf[0] == 0xff) &&
510*28279cf2SJiafei Pan 				(buf[1] == 0xff)) {
511*28279cf2SJiafei Pan 				*gb = GOOD_BLK;
512*28279cf2SJiafei Pan 			} else {
513*28279cf2SJiafei Pan 				*gb = BAD_BLK;
514*28279cf2SJiafei Pan 			}
515*28279cf2SJiafei Pan 		} else {
516*28279cf2SJiafei Pan 			/* no, so it is bad blk */
517*28279cf2SJiafei Pan 			*gb = BAD_BLK;
518*28279cf2SJiafei Pan 		}
519*28279cf2SJiafei Pan 	}
520*28279cf2SJiafei Pan 	return 0;
521*28279cf2SJiafei Pan }
522*28279cf2SJiafei Pan 
523*28279cf2SJiafei Pan static int update_bbt(uint32_t idx, uint32_t blk,
524*28279cf2SJiafei Pan 			   uint32_t *updated,  struct nand_info *nand)
525*28279cf2SJiafei Pan {
526*28279cf2SJiafei Pan 	uint32_t sblk;
527*28279cf2SJiafei Pan 	uint32_t lgb;
528*28279cf2SJiafei Pan 	int ret;
529*28279cf2SJiafei Pan 
530*28279cf2SJiafei Pan 	if (nand->bzero_good && blk == 0)
531*28279cf2SJiafei Pan 		return 0;
532*28279cf2SJiafei Pan 
533*28279cf2SJiafei Pan 	/* special case for lgb == 0 */
534*28279cf2SJiafei Pan 	/* if blk <= lgb retrun */
535*28279cf2SJiafei Pan 	if (nand->lgb != 0 && blk <= nand->lgb)
536*28279cf2SJiafei Pan 		return 0;
537*28279cf2SJiafei Pan 
538*28279cf2SJiafei Pan 	*updated = 0;
539*28279cf2SJiafei Pan 
540*28279cf2SJiafei Pan 	/* if blk is more than lgb, iterate from lgb till a good block
541*28279cf2SJiafei Pan 	 * is found for blk
542*28279cf2SJiafei Pan 	 */
543*28279cf2SJiafei Pan 
544*28279cf2SJiafei Pan 	if (nand->lgb < blk)
545*28279cf2SJiafei Pan 		sblk = nand->lgb;
546*28279cf2SJiafei Pan 	else
547*28279cf2SJiafei Pan 		/* this is when lgb = 0 */
548*28279cf2SJiafei Pan 		sblk = blk;
549*28279cf2SJiafei Pan 
550*28279cf2SJiafei Pan 
551*28279cf2SJiafei Pan 	lgb = nand->lgb;
552*28279cf2SJiafei Pan 
553*28279cf2SJiafei Pan 	/* loop from blk to find a good block */
554*28279cf2SJiafei Pan 	while (1) {
555*28279cf2SJiafei Pan 		while (lgb <= sblk) {
556*28279cf2SJiafei Pan 			uint32_t gb = 0;
557*28279cf2SJiafei Pan 
558*28279cf2SJiafei Pan 			ret =  isgoodblock(lgb, &gb, nand);
559*28279cf2SJiafei Pan 			if (ret != 0)
560*28279cf2SJiafei Pan 				return ret;
561*28279cf2SJiafei Pan 
562*28279cf2SJiafei Pan 			/* special case block 0 is good then set this flag */
563*28279cf2SJiafei Pan 			if (lgb == 0 && gb == GOOD_BLK)
564*28279cf2SJiafei Pan 				nand->bzero_good = 1;
565*28279cf2SJiafei Pan 
566*28279cf2SJiafei Pan 			if (gb == BAD_BLK) {
567*28279cf2SJiafei Pan 				if (idx >= BBT_SIZE) {
568*28279cf2SJiafei Pan 					ERROR("NAND BBT Table full\n");
569*28279cf2SJiafei Pan 					return -1;
570*28279cf2SJiafei Pan 				}
571*28279cf2SJiafei Pan 				*updated = 1;
572*28279cf2SJiafei Pan 				nand->bbt[idx] = lgb;
573*28279cf2SJiafei Pan 				idx++;
574*28279cf2SJiafei Pan 				blk++;
575*28279cf2SJiafei Pan 				sblk++;
576*28279cf2SJiafei Pan 				if (idx > nand->bbt_max)
577*28279cf2SJiafei Pan 					nand->bbt_max = idx;
578*28279cf2SJiafei Pan 			}
579*28279cf2SJiafei Pan 			lgb++;
580*28279cf2SJiafei Pan 		}
581*28279cf2SJiafei Pan 		/* the access block found */
582*28279cf2SJiafei Pan 		if (sblk == blk) {
583*28279cf2SJiafei Pan 			/* when good block found update lgb */
584*28279cf2SJiafei Pan 			nand->lgb =  blk;
585*28279cf2SJiafei Pan 			break;
586*28279cf2SJiafei Pan 		}
587*28279cf2SJiafei Pan 		sblk++;
588*28279cf2SJiafei Pan 	}
589*28279cf2SJiafei Pan 
590*28279cf2SJiafei Pan 	return 0;
591*28279cf2SJiafei Pan }
592*28279cf2SJiafei Pan 
593*28279cf2SJiafei Pan static size_t ifc_nand_read(int lba, uintptr_t buf, size_t size)
594*28279cf2SJiafei Pan {
595*28279cf2SJiafei Pan 	int ret;
596*28279cf2SJiafei Pan 	uint32_t page_size;
597*28279cf2SJiafei Pan 	uint32_t src_addr;
598*28279cf2SJiafei Pan 	struct nand_info *nand = &nand_drv_data;
599*28279cf2SJiafei Pan 
600*28279cf2SJiafei Pan 	page_size = nand_get_page_size(nand);
601*28279cf2SJiafei Pan 	src_addr = lba * page_size;
602*28279cf2SJiafei Pan 	ret = nand_read(nand, src_addr, buf, size);
603*28279cf2SJiafei Pan 	return ret ? 0 : size;
604*28279cf2SJiafei Pan }
605*28279cf2SJiafei Pan 
606*28279cf2SJiafei Pan static struct io_block_dev_spec ifc_nand_spec = {
607*28279cf2SJiafei Pan 	.buffer = {
608*28279cf2SJiafei Pan 		.offset = 0,
609*28279cf2SJiafei Pan 		.length = 0,
610*28279cf2SJiafei Pan 	},
611*28279cf2SJiafei Pan 	.ops = {
612*28279cf2SJiafei Pan 		.read = ifc_nand_read,
613*28279cf2SJiafei Pan 	},
614*28279cf2SJiafei Pan 	/*
615*28279cf2SJiafei Pan 	 * Default block size assumed as 2K
616*28279cf2SJiafei Pan 	 * Would be updated based on actual size
617*28279cf2SJiafei Pan 	 */
618*28279cf2SJiafei Pan 	.block_size = UL(2048),
619*28279cf2SJiafei Pan };
620*28279cf2SJiafei Pan 
621*28279cf2SJiafei Pan int ifc_nand_init(uintptr_t *block_dev_spec,
622*28279cf2SJiafei Pan 			uintptr_t ifc_region_addr,
623*28279cf2SJiafei Pan 			uintptr_t ifc_register_addr,
624*28279cf2SJiafei Pan 			size_t ifc_sram_size,
625*28279cf2SJiafei Pan 			uintptr_t ifc_nand_blk_offset,
626*28279cf2SJiafei Pan 			size_t ifc_nand_blk_size)
627*28279cf2SJiafei Pan {
628*28279cf2SJiafei Pan 	struct nand_info *nand = NULL;
629*28279cf2SJiafei Pan 	int ret;
630*28279cf2SJiafei Pan 
631*28279cf2SJiafei Pan 	nand = &nand_drv_data;
632*28279cf2SJiafei Pan 	memset(nand, 0, sizeof(struct nand_info));
633*28279cf2SJiafei Pan 
634*28279cf2SJiafei Pan 	nand->ifc_region_addr = ifc_region_addr;
635*28279cf2SJiafei Pan 	nand->ifc_register_addr = ifc_register_addr;
636*28279cf2SJiafei Pan 
637*28279cf2SJiafei Pan 	VERBOSE("nand_init\n");
638*28279cf2SJiafei Pan 	ret = nand_init(nand);
639*28279cf2SJiafei Pan 	if (ret) {
640*28279cf2SJiafei Pan 		ERROR("nand init failed\n");
641*28279cf2SJiafei Pan 		return ret;
642*28279cf2SJiafei Pan 	}
643*28279cf2SJiafei Pan 
644*28279cf2SJiafei Pan 	ifc_nand_spec.buffer.offset = ifc_nand_blk_offset;
645*28279cf2SJiafei Pan 	ifc_nand_spec.buffer.length = ifc_nand_blk_size;
646*28279cf2SJiafei Pan 
647*28279cf2SJiafei Pan 	ifc_nand_spec.block_size = nand_get_page_size(nand);
648*28279cf2SJiafei Pan 
649*28279cf2SJiafei Pan 	VERBOSE("Page size is %ld\n", ifc_nand_spec.block_size);
650*28279cf2SJiafei Pan 
651*28279cf2SJiafei Pan 	*block_dev_spec = (uintptr_t)&ifc_nand_spec;
652*28279cf2SJiafei Pan 
653*28279cf2SJiafei Pan 	/* Adding NAND SRAM< Buffer in XLAT Table */
654*28279cf2SJiafei Pan 	mmap_add_region(ifc_region_addr, ifc_region_addr,
655*28279cf2SJiafei Pan 			ifc_sram_size, MT_DEVICE | MT_RW);
656*28279cf2SJiafei Pan 
657*28279cf2SJiafei Pan 	return 0;
658*28279cf2SJiafei Pan }
659