xref: /OK3568_Linux_fs/kernel/drivers/spi/spi-tegra20-sflash.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-only
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * SPI driver for Nvidia's Tegra20 Serial Flash Controller.
4*4882a593Smuzhiyun  *
5*4882a593Smuzhiyun  * Copyright (c) 2012, NVIDIA CORPORATION.  All rights reserved.
6*4882a593Smuzhiyun  *
7*4882a593Smuzhiyun  * Author: Laxman Dewangan <ldewangan@nvidia.com>
8*4882a593Smuzhiyun  */
9*4882a593Smuzhiyun 
10*4882a593Smuzhiyun #include <linux/clk.h>
11*4882a593Smuzhiyun #include <linux/completion.h>
12*4882a593Smuzhiyun #include <linux/delay.h>
13*4882a593Smuzhiyun #include <linux/err.h>
14*4882a593Smuzhiyun #include <linux/interrupt.h>
15*4882a593Smuzhiyun #include <linux/io.h>
16*4882a593Smuzhiyun #include <linux/kernel.h>
17*4882a593Smuzhiyun #include <linux/kthread.h>
18*4882a593Smuzhiyun #include <linux/module.h>
19*4882a593Smuzhiyun #include <linux/platform_device.h>
20*4882a593Smuzhiyun #include <linux/pm_runtime.h>
21*4882a593Smuzhiyun #include <linux/of.h>
22*4882a593Smuzhiyun #include <linux/of_device.h>
23*4882a593Smuzhiyun #include <linux/reset.h>
24*4882a593Smuzhiyun #include <linux/spi/spi.h>
25*4882a593Smuzhiyun 
26*4882a593Smuzhiyun #define SPI_COMMAND				0x000
27*4882a593Smuzhiyun #define SPI_GO					BIT(30)
28*4882a593Smuzhiyun #define SPI_M_S					BIT(28)
29*4882a593Smuzhiyun #define SPI_ACTIVE_SCLK_MASK			(0x3 << 26)
30*4882a593Smuzhiyun #define SPI_ACTIVE_SCLK_DRIVE_LOW		(0 << 26)
31*4882a593Smuzhiyun #define SPI_ACTIVE_SCLK_DRIVE_HIGH		(1 << 26)
32*4882a593Smuzhiyun #define SPI_ACTIVE_SCLK_PULL_LOW		(2 << 26)
33*4882a593Smuzhiyun #define SPI_ACTIVE_SCLK_PULL_HIGH		(3 << 26)
34*4882a593Smuzhiyun 
35*4882a593Smuzhiyun #define SPI_CK_SDA_FALLING			(1 << 21)
36*4882a593Smuzhiyun #define SPI_CK_SDA_RISING			(0 << 21)
37*4882a593Smuzhiyun #define SPI_CK_SDA_MASK				(1 << 21)
38*4882a593Smuzhiyun #define SPI_ACTIVE_SDA				(0x3 << 18)
39*4882a593Smuzhiyun #define SPI_ACTIVE_SDA_DRIVE_LOW		(0 << 18)
40*4882a593Smuzhiyun #define SPI_ACTIVE_SDA_DRIVE_HIGH		(1 << 18)
41*4882a593Smuzhiyun #define SPI_ACTIVE_SDA_PULL_LOW			(2 << 18)
42*4882a593Smuzhiyun #define SPI_ACTIVE_SDA_PULL_HIGH		(3 << 18)
43*4882a593Smuzhiyun 
44*4882a593Smuzhiyun #define SPI_CS_POL_INVERT			BIT(16)
45*4882a593Smuzhiyun #define SPI_TX_EN				BIT(15)
46*4882a593Smuzhiyun #define SPI_RX_EN				BIT(14)
47*4882a593Smuzhiyun #define SPI_CS_VAL_HIGH				BIT(13)
48*4882a593Smuzhiyun #define SPI_CS_VAL_LOW				0x0
49*4882a593Smuzhiyun #define SPI_CS_SW				BIT(12)
50*4882a593Smuzhiyun #define SPI_CS_HW				0x0
51*4882a593Smuzhiyun #define SPI_CS_DELAY_MASK			(7 << 9)
52*4882a593Smuzhiyun #define SPI_CS3_EN				BIT(8)
53*4882a593Smuzhiyun #define SPI_CS2_EN				BIT(7)
54*4882a593Smuzhiyun #define SPI_CS1_EN				BIT(6)
55*4882a593Smuzhiyun #define SPI_CS0_EN				BIT(5)
56*4882a593Smuzhiyun 
57*4882a593Smuzhiyun #define SPI_CS_MASK			(SPI_CS3_EN | SPI_CS2_EN |	\
58*4882a593Smuzhiyun 					SPI_CS1_EN | SPI_CS0_EN)
59*4882a593Smuzhiyun #define SPI_BIT_LENGTH(x)		(((x) & 0x1f) << 0)
60*4882a593Smuzhiyun 
61*4882a593Smuzhiyun #define SPI_MODES			(SPI_ACTIVE_SCLK_MASK | SPI_CK_SDA_MASK)
62*4882a593Smuzhiyun 
63*4882a593Smuzhiyun #define SPI_STATUS			0x004
64*4882a593Smuzhiyun #define SPI_BSY				BIT(31)
65*4882a593Smuzhiyun #define SPI_RDY				BIT(30)
66*4882a593Smuzhiyun #define SPI_TXF_FLUSH			BIT(29)
67*4882a593Smuzhiyun #define SPI_RXF_FLUSH			BIT(28)
68*4882a593Smuzhiyun #define SPI_RX_UNF			BIT(27)
69*4882a593Smuzhiyun #define SPI_TX_OVF			BIT(26)
70*4882a593Smuzhiyun #define SPI_RXF_EMPTY			BIT(25)
71*4882a593Smuzhiyun #define SPI_RXF_FULL			BIT(24)
72*4882a593Smuzhiyun #define SPI_TXF_EMPTY			BIT(23)
73*4882a593Smuzhiyun #define SPI_TXF_FULL			BIT(22)
74*4882a593Smuzhiyun #define SPI_BLK_CNT(count)		(((count) & 0xffff) + 1)
75*4882a593Smuzhiyun 
76*4882a593Smuzhiyun #define SPI_FIFO_ERROR			(SPI_RX_UNF | SPI_TX_OVF)
77*4882a593Smuzhiyun #define SPI_FIFO_EMPTY			(SPI_TX_EMPTY | SPI_RX_EMPTY)
78*4882a593Smuzhiyun 
79*4882a593Smuzhiyun #define SPI_RX_CMP			0x8
80*4882a593Smuzhiyun #define SPI_DMA_CTL			0x0C
81*4882a593Smuzhiyun #define SPI_DMA_EN			BIT(31)
82*4882a593Smuzhiyun #define SPI_IE_RXC			BIT(27)
83*4882a593Smuzhiyun #define SPI_IE_TXC			BIT(26)
84*4882a593Smuzhiyun #define SPI_PACKED			BIT(20)
85*4882a593Smuzhiyun #define SPI_RX_TRIG_MASK		(0x3 << 18)
86*4882a593Smuzhiyun #define SPI_RX_TRIG_1W			(0x0 << 18)
87*4882a593Smuzhiyun #define SPI_RX_TRIG_4W			(0x1 << 18)
88*4882a593Smuzhiyun #define SPI_TX_TRIG_MASK		(0x3 << 16)
89*4882a593Smuzhiyun #define SPI_TX_TRIG_1W			(0x0 << 16)
90*4882a593Smuzhiyun #define SPI_TX_TRIG_4W			(0x1 << 16)
91*4882a593Smuzhiyun #define SPI_DMA_BLK_COUNT(count)	(((count) - 1) & 0xFFFF)
92*4882a593Smuzhiyun 
93*4882a593Smuzhiyun #define SPI_TX_FIFO			0x10
94*4882a593Smuzhiyun #define SPI_RX_FIFO			0x20
95*4882a593Smuzhiyun 
96*4882a593Smuzhiyun #define DATA_DIR_TX			(1 << 0)
97*4882a593Smuzhiyun #define DATA_DIR_RX			(1 << 1)
98*4882a593Smuzhiyun 
99*4882a593Smuzhiyun #define MAX_CHIP_SELECT			4
100*4882a593Smuzhiyun #define SPI_FIFO_DEPTH			4
101*4882a593Smuzhiyun #define SPI_DMA_TIMEOUT               (msecs_to_jiffies(1000))
102*4882a593Smuzhiyun 
103*4882a593Smuzhiyun struct tegra_sflash_data {
104*4882a593Smuzhiyun 	struct device				*dev;
105*4882a593Smuzhiyun 	struct spi_master			*master;
106*4882a593Smuzhiyun 	spinlock_t				lock;
107*4882a593Smuzhiyun 
108*4882a593Smuzhiyun 	struct clk				*clk;
109*4882a593Smuzhiyun 	struct reset_control			*rst;
110*4882a593Smuzhiyun 	void __iomem				*base;
111*4882a593Smuzhiyun 	unsigned				irq;
112*4882a593Smuzhiyun 	u32					cur_speed;
113*4882a593Smuzhiyun 
114*4882a593Smuzhiyun 	struct spi_device			*cur_spi;
115*4882a593Smuzhiyun 	unsigned				cur_pos;
116*4882a593Smuzhiyun 	unsigned				cur_len;
117*4882a593Smuzhiyun 	unsigned				bytes_per_word;
118*4882a593Smuzhiyun 	unsigned				cur_direction;
119*4882a593Smuzhiyun 	unsigned				curr_xfer_words;
120*4882a593Smuzhiyun 
121*4882a593Smuzhiyun 	unsigned				cur_rx_pos;
122*4882a593Smuzhiyun 	unsigned				cur_tx_pos;
123*4882a593Smuzhiyun 
124*4882a593Smuzhiyun 	u32					tx_status;
125*4882a593Smuzhiyun 	u32					rx_status;
126*4882a593Smuzhiyun 	u32					status_reg;
127*4882a593Smuzhiyun 
128*4882a593Smuzhiyun 	u32					def_command_reg;
129*4882a593Smuzhiyun 	u32					command_reg;
130*4882a593Smuzhiyun 	u32					dma_control_reg;
131*4882a593Smuzhiyun 
132*4882a593Smuzhiyun 	struct completion			xfer_completion;
133*4882a593Smuzhiyun 	struct spi_transfer			*curr_xfer;
134*4882a593Smuzhiyun };
135*4882a593Smuzhiyun 
136*4882a593Smuzhiyun static int tegra_sflash_runtime_suspend(struct device *dev);
137*4882a593Smuzhiyun static int tegra_sflash_runtime_resume(struct device *dev);
138*4882a593Smuzhiyun 
tegra_sflash_readl(struct tegra_sflash_data * tsd,unsigned long reg)139*4882a593Smuzhiyun static inline u32 tegra_sflash_readl(struct tegra_sflash_data *tsd,
140*4882a593Smuzhiyun 		unsigned long reg)
141*4882a593Smuzhiyun {
142*4882a593Smuzhiyun 	return readl(tsd->base + reg);
143*4882a593Smuzhiyun }
144*4882a593Smuzhiyun 
tegra_sflash_writel(struct tegra_sflash_data * tsd,u32 val,unsigned long reg)145*4882a593Smuzhiyun static inline void tegra_sflash_writel(struct tegra_sflash_data *tsd,
146*4882a593Smuzhiyun 		u32 val, unsigned long reg)
147*4882a593Smuzhiyun {
148*4882a593Smuzhiyun 	writel(val, tsd->base + reg);
149*4882a593Smuzhiyun }
150*4882a593Smuzhiyun 
tegra_sflash_clear_status(struct tegra_sflash_data * tsd)151*4882a593Smuzhiyun static void tegra_sflash_clear_status(struct tegra_sflash_data *tsd)
152*4882a593Smuzhiyun {
153*4882a593Smuzhiyun 	/* Write 1 to clear status register */
154*4882a593Smuzhiyun 	tegra_sflash_writel(tsd, SPI_RDY | SPI_FIFO_ERROR, SPI_STATUS);
155*4882a593Smuzhiyun }
156*4882a593Smuzhiyun 
tegra_sflash_calculate_curr_xfer_param(struct spi_device * spi,struct tegra_sflash_data * tsd,struct spi_transfer * t)157*4882a593Smuzhiyun static unsigned tegra_sflash_calculate_curr_xfer_param(
158*4882a593Smuzhiyun 	struct spi_device *spi, struct tegra_sflash_data *tsd,
159*4882a593Smuzhiyun 	struct spi_transfer *t)
160*4882a593Smuzhiyun {
161*4882a593Smuzhiyun 	unsigned remain_len = t->len - tsd->cur_pos;
162*4882a593Smuzhiyun 	unsigned max_word;
163*4882a593Smuzhiyun 
164*4882a593Smuzhiyun 	tsd->bytes_per_word = DIV_ROUND_UP(t->bits_per_word, 8);
165*4882a593Smuzhiyun 	max_word = remain_len / tsd->bytes_per_word;
166*4882a593Smuzhiyun 	if (max_word > SPI_FIFO_DEPTH)
167*4882a593Smuzhiyun 		max_word = SPI_FIFO_DEPTH;
168*4882a593Smuzhiyun 	tsd->curr_xfer_words = max_word;
169*4882a593Smuzhiyun 	return max_word;
170*4882a593Smuzhiyun }
171*4882a593Smuzhiyun 
tegra_sflash_fill_tx_fifo_from_client_txbuf(struct tegra_sflash_data * tsd,struct spi_transfer * t)172*4882a593Smuzhiyun static unsigned tegra_sflash_fill_tx_fifo_from_client_txbuf(
173*4882a593Smuzhiyun 	struct tegra_sflash_data *tsd, struct spi_transfer *t)
174*4882a593Smuzhiyun {
175*4882a593Smuzhiyun 	unsigned nbytes;
176*4882a593Smuzhiyun 	u32 status;
177*4882a593Smuzhiyun 	unsigned max_n_32bit = tsd->curr_xfer_words;
178*4882a593Smuzhiyun 	u8 *tx_buf = (u8 *)t->tx_buf + tsd->cur_tx_pos;
179*4882a593Smuzhiyun 
180*4882a593Smuzhiyun 	if (max_n_32bit > SPI_FIFO_DEPTH)
181*4882a593Smuzhiyun 		max_n_32bit = SPI_FIFO_DEPTH;
182*4882a593Smuzhiyun 	nbytes = max_n_32bit * tsd->bytes_per_word;
183*4882a593Smuzhiyun 
184*4882a593Smuzhiyun 	status = tegra_sflash_readl(tsd, SPI_STATUS);
185*4882a593Smuzhiyun 	while (!(status & SPI_TXF_FULL)) {
186*4882a593Smuzhiyun 		int i;
187*4882a593Smuzhiyun 		u32 x = 0;
188*4882a593Smuzhiyun 
189*4882a593Smuzhiyun 		for (i = 0; nbytes && (i < tsd->bytes_per_word);
190*4882a593Smuzhiyun 							i++, nbytes--)
191*4882a593Smuzhiyun 			x |= (u32)(*tx_buf++) << (i * 8);
192*4882a593Smuzhiyun 		tegra_sflash_writel(tsd, x, SPI_TX_FIFO);
193*4882a593Smuzhiyun 		if (!nbytes)
194*4882a593Smuzhiyun 			break;
195*4882a593Smuzhiyun 
196*4882a593Smuzhiyun 		status = tegra_sflash_readl(tsd, SPI_STATUS);
197*4882a593Smuzhiyun 	}
198*4882a593Smuzhiyun 	tsd->cur_tx_pos += max_n_32bit * tsd->bytes_per_word;
199*4882a593Smuzhiyun 	return max_n_32bit;
200*4882a593Smuzhiyun }
201*4882a593Smuzhiyun 
tegra_sflash_read_rx_fifo_to_client_rxbuf(struct tegra_sflash_data * tsd,struct spi_transfer * t)202*4882a593Smuzhiyun static int tegra_sflash_read_rx_fifo_to_client_rxbuf(
203*4882a593Smuzhiyun 		struct tegra_sflash_data *tsd, struct spi_transfer *t)
204*4882a593Smuzhiyun {
205*4882a593Smuzhiyun 	u32 status;
206*4882a593Smuzhiyun 	unsigned int read_words = 0;
207*4882a593Smuzhiyun 	u8 *rx_buf = (u8 *)t->rx_buf + tsd->cur_rx_pos;
208*4882a593Smuzhiyun 
209*4882a593Smuzhiyun 	status = tegra_sflash_readl(tsd, SPI_STATUS);
210*4882a593Smuzhiyun 	while (!(status & SPI_RXF_EMPTY)) {
211*4882a593Smuzhiyun 		int i;
212*4882a593Smuzhiyun 		u32 x = tegra_sflash_readl(tsd, SPI_RX_FIFO);
213*4882a593Smuzhiyun 
214*4882a593Smuzhiyun 		for (i = 0; (i < tsd->bytes_per_word); i++)
215*4882a593Smuzhiyun 			*rx_buf++ = (x >> (i*8)) & 0xFF;
216*4882a593Smuzhiyun 		read_words++;
217*4882a593Smuzhiyun 		status = tegra_sflash_readl(tsd, SPI_STATUS);
218*4882a593Smuzhiyun 	}
219*4882a593Smuzhiyun 	tsd->cur_rx_pos += read_words * tsd->bytes_per_word;
220*4882a593Smuzhiyun 	return 0;
221*4882a593Smuzhiyun }
222*4882a593Smuzhiyun 
tegra_sflash_start_cpu_based_transfer(struct tegra_sflash_data * tsd,struct spi_transfer * t)223*4882a593Smuzhiyun static int tegra_sflash_start_cpu_based_transfer(
224*4882a593Smuzhiyun 		struct tegra_sflash_data *tsd, struct spi_transfer *t)
225*4882a593Smuzhiyun {
226*4882a593Smuzhiyun 	u32 val = 0;
227*4882a593Smuzhiyun 	unsigned cur_words;
228*4882a593Smuzhiyun 
229*4882a593Smuzhiyun 	if (tsd->cur_direction & DATA_DIR_TX)
230*4882a593Smuzhiyun 		val |= SPI_IE_TXC;
231*4882a593Smuzhiyun 
232*4882a593Smuzhiyun 	if (tsd->cur_direction & DATA_DIR_RX)
233*4882a593Smuzhiyun 		val |= SPI_IE_RXC;
234*4882a593Smuzhiyun 
235*4882a593Smuzhiyun 	tegra_sflash_writel(tsd, val, SPI_DMA_CTL);
236*4882a593Smuzhiyun 	tsd->dma_control_reg = val;
237*4882a593Smuzhiyun 
238*4882a593Smuzhiyun 	if (tsd->cur_direction & DATA_DIR_TX)
239*4882a593Smuzhiyun 		cur_words = tegra_sflash_fill_tx_fifo_from_client_txbuf(tsd, t);
240*4882a593Smuzhiyun 	else
241*4882a593Smuzhiyun 		cur_words = tsd->curr_xfer_words;
242*4882a593Smuzhiyun 	val |= SPI_DMA_BLK_COUNT(cur_words);
243*4882a593Smuzhiyun 	tegra_sflash_writel(tsd, val, SPI_DMA_CTL);
244*4882a593Smuzhiyun 	tsd->dma_control_reg = val;
245*4882a593Smuzhiyun 	val |= SPI_DMA_EN;
246*4882a593Smuzhiyun 	tegra_sflash_writel(tsd, val, SPI_DMA_CTL);
247*4882a593Smuzhiyun 	return 0;
248*4882a593Smuzhiyun }
249*4882a593Smuzhiyun 
tegra_sflash_start_transfer_one(struct spi_device * spi,struct spi_transfer * t,bool is_first_of_msg,bool is_single_xfer)250*4882a593Smuzhiyun static int tegra_sflash_start_transfer_one(struct spi_device *spi,
251*4882a593Smuzhiyun 		struct spi_transfer *t, bool is_first_of_msg,
252*4882a593Smuzhiyun 		bool is_single_xfer)
253*4882a593Smuzhiyun {
254*4882a593Smuzhiyun 	struct tegra_sflash_data *tsd = spi_master_get_devdata(spi->master);
255*4882a593Smuzhiyun 	u32 speed;
256*4882a593Smuzhiyun 	u32 command;
257*4882a593Smuzhiyun 
258*4882a593Smuzhiyun 	speed = t->speed_hz;
259*4882a593Smuzhiyun 	if (speed != tsd->cur_speed) {
260*4882a593Smuzhiyun 		clk_set_rate(tsd->clk, speed);
261*4882a593Smuzhiyun 		tsd->cur_speed = speed;
262*4882a593Smuzhiyun 	}
263*4882a593Smuzhiyun 
264*4882a593Smuzhiyun 	tsd->cur_spi = spi;
265*4882a593Smuzhiyun 	tsd->cur_pos = 0;
266*4882a593Smuzhiyun 	tsd->cur_rx_pos = 0;
267*4882a593Smuzhiyun 	tsd->cur_tx_pos = 0;
268*4882a593Smuzhiyun 	tsd->curr_xfer = t;
269*4882a593Smuzhiyun 	tegra_sflash_calculate_curr_xfer_param(spi, tsd, t);
270*4882a593Smuzhiyun 	if (is_first_of_msg) {
271*4882a593Smuzhiyun 		command = tsd->def_command_reg;
272*4882a593Smuzhiyun 		command |= SPI_BIT_LENGTH(t->bits_per_word - 1);
273*4882a593Smuzhiyun 		command |= SPI_CS_VAL_HIGH;
274*4882a593Smuzhiyun 
275*4882a593Smuzhiyun 		command &= ~SPI_MODES;
276*4882a593Smuzhiyun 		if (spi->mode & SPI_CPHA)
277*4882a593Smuzhiyun 			command |= SPI_CK_SDA_FALLING;
278*4882a593Smuzhiyun 
279*4882a593Smuzhiyun 		if (spi->mode & SPI_CPOL)
280*4882a593Smuzhiyun 			command |= SPI_ACTIVE_SCLK_DRIVE_HIGH;
281*4882a593Smuzhiyun 		else
282*4882a593Smuzhiyun 			command |= SPI_ACTIVE_SCLK_DRIVE_LOW;
283*4882a593Smuzhiyun 		command |= SPI_CS0_EN << spi->chip_select;
284*4882a593Smuzhiyun 	} else {
285*4882a593Smuzhiyun 		command = tsd->command_reg;
286*4882a593Smuzhiyun 		command &= ~SPI_BIT_LENGTH(~0);
287*4882a593Smuzhiyun 		command |= SPI_BIT_LENGTH(t->bits_per_word - 1);
288*4882a593Smuzhiyun 		command &= ~(SPI_RX_EN | SPI_TX_EN);
289*4882a593Smuzhiyun 	}
290*4882a593Smuzhiyun 
291*4882a593Smuzhiyun 	tsd->cur_direction = 0;
292*4882a593Smuzhiyun 	if (t->rx_buf) {
293*4882a593Smuzhiyun 		command |= SPI_RX_EN;
294*4882a593Smuzhiyun 		tsd->cur_direction |= DATA_DIR_RX;
295*4882a593Smuzhiyun 	}
296*4882a593Smuzhiyun 	if (t->tx_buf) {
297*4882a593Smuzhiyun 		command |= SPI_TX_EN;
298*4882a593Smuzhiyun 		tsd->cur_direction |= DATA_DIR_TX;
299*4882a593Smuzhiyun 	}
300*4882a593Smuzhiyun 	tegra_sflash_writel(tsd, command, SPI_COMMAND);
301*4882a593Smuzhiyun 	tsd->command_reg = command;
302*4882a593Smuzhiyun 
303*4882a593Smuzhiyun 	return tegra_sflash_start_cpu_based_transfer(tsd, t);
304*4882a593Smuzhiyun }
305*4882a593Smuzhiyun 
tegra_sflash_transfer_one_message(struct spi_master * master,struct spi_message * msg)306*4882a593Smuzhiyun static int tegra_sflash_transfer_one_message(struct spi_master *master,
307*4882a593Smuzhiyun 			struct spi_message *msg)
308*4882a593Smuzhiyun {
309*4882a593Smuzhiyun 	bool is_first_msg = true;
310*4882a593Smuzhiyun 	int single_xfer;
311*4882a593Smuzhiyun 	struct tegra_sflash_data *tsd = spi_master_get_devdata(master);
312*4882a593Smuzhiyun 	struct spi_transfer *xfer;
313*4882a593Smuzhiyun 	struct spi_device *spi = msg->spi;
314*4882a593Smuzhiyun 	int ret;
315*4882a593Smuzhiyun 
316*4882a593Smuzhiyun 	msg->status = 0;
317*4882a593Smuzhiyun 	msg->actual_length = 0;
318*4882a593Smuzhiyun 	single_xfer = list_is_singular(&msg->transfers);
319*4882a593Smuzhiyun 	list_for_each_entry(xfer, &msg->transfers, transfer_list) {
320*4882a593Smuzhiyun 		reinit_completion(&tsd->xfer_completion);
321*4882a593Smuzhiyun 		ret = tegra_sflash_start_transfer_one(spi, xfer,
322*4882a593Smuzhiyun 					is_first_msg, single_xfer);
323*4882a593Smuzhiyun 		if (ret < 0) {
324*4882a593Smuzhiyun 			dev_err(tsd->dev,
325*4882a593Smuzhiyun 				"spi can not start transfer, err %d\n", ret);
326*4882a593Smuzhiyun 			goto exit;
327*4882a593Smuzhiyun 		}
328*4882a593Smuzhiyun 		is_first_msg = false;
329*4882a593Smuzhiyun 		ret = wait_for_completion_timeout(&tsd->xfer_completion,
330*4882a593Smuzhiyun 						SPI_DMA_TIMEOUT);
331*4882a593Smuzhiyun 		if (WARN_ON(ret == 0)) {
332*4882a593Smuzhiyun 			dev_err(tsd->dev,
333*4882a593Smuzhiyun 				"spi transfer timeout, err %d\n", ret);
334*4882a593Smuzhiyun 			ret = -EIO;
335*4882a593Smuzhiyun 			goto exit;
336*4882a593Smuzhiyun 		}
337*4882a593Smuzhiyun 
338*4882a593Smuzhiyun 		if (tsd->tx_status ||  tsd->rx_status) {
339*4882a593Smuzhiyun 			dev_err(tsd->dev, "Error in Transfer\n");
340*4882a593Smuzhiyun 			ret = -EIO;
341*4882a593Smuzhiyun 			goto exit;
342*4882a593Smuzhiyun 		}
343*4882a593Smuzhiyun 		msg->actual_length += xfer->len;
344*4882a593Smuzhiyun 		if (xfer->cs_change &&
345*4882a593Smuzhiyun 		    (xfer->delay_usecs || xfer->delay.value)) {
346*4882a593Smuzhiyun 			tegra_sflash_writel(tsd, tsd->def_command_reg,
347*4882a593Smuzhiyun 					SPI_COMMAND);
348*4882a593Smuzhiyun 			spi_transfer_delay_exec(xfer);
349*4882a593Smuzhiyun 		}
350*4882a593Smuzhiyun 	}
351*4882a593Smuzhiyun 	ret = 0;
352*4882a593Smuzhiyun exit:
353*4882a593Smuzhiyun 	tegra_sflash_writel(tsd, tsd->def_command_reg, SPI_COMMAND);
354*4882a593Smuzhiyun 	msg->status = ret;
355*4882a593Smuzhiyun 	spi_finalize_current_message(master);
356*4882a593Smuzhiyun 	return ret;
357*4882a593Smuzhiyun }
358*4882a593Smuzhiyun 
handle_cpu_based_xfer(struct tegra_sflash_data * tsd)359*4882a593Smuzhiyun static irqreturn_t handle_cpu_based_xfer(struct tegra_sflash_data *tsd)
360*4882a593Smuzhiyun {
361*4882a593Smuzhiyun 	struct spi_transfer *t = tsd->curr_xfer;
362*4882a593Smuzhiyun 
363*4882a593Smuzhiyun 	spin_lock(&tsd->lock);
364*4882a593Smuzhiyun 	if (tsd->tx_status || tsd->rx_status || (tsd->status_reg & SPI_BSY)) {
365*4882a593Smuzhiyun 		dev_err(tsd->dev,
366*4882a593Smuzhiyun 			"CpuXfer ERROR bit set 0x%x\n", tsd->status_reg);
367*4882a593Smuzhiyun 		dev_err(tsd->dev,
368*4882a593Smuzhiyun 			"CpuXfer 0x%08x:0x%08x\n", tsd->command_reg,
369*4882a593Smuzhiyun 				tsd->dma_control_reg);
370*4882a593Smuzhiyun 		reset_control_assert(tsd->rst);
371*4882a593Smuzhiyun 		udelay(2);
372*4882a593Smuzhiyun 		reset_control_deassert(tsd->rst);
373*4882a593Smuzhiyun 		complete(&tsd->xfer_completion);
374*4882a593Smuzhiyun 		goto exit;
375*4882a593Smuzhiyun 	}
376*4882a593Smuzhiyun 
377*4882a593Smuzhiyun 	if (tsd->cur_direction & DATA_DIR_RX)
378*4882a593Smuzhiyun 		tegra_sflash_read_rx_fifo_to_client_rxbuf(tsd, t);
379*4882a593Smuzhiyun 
380*4882a593Smuzhiyun 	if (tsd->cur_direction & DATA_DIR_TX)
381*4882a593Smuzhiyun 		tsd->cur_pos = tsd->cur_tx_pos;
382*4882a593Smuzhiyun 	else
383*4882a593Smuzhiyun 		tsd->cur_pos = tsd->cur_rx_pos;
384*4882a593Smuzhiyun 
385*4882a593Smuzhiyun 	if (tsd->cur_pos == t->len) {
386*4882a593Smuzhiyun 		complete(&tsd->xfer_completion);
387*4882a593Smuzhiyun 		goto exit;
388*4882a593Smuzhiyun 	}
389*4882a593Smuzhiyun 
390*4882a593Smuzhiyun 	tegra_sflash_calculate_curr_xfer_param(tsd->cur_spi, tsd, t);
391*4882a593Smuzhiyun 	tegra_sflash_start_cpu_based_transfer(tsd, t);
392*4882a593Smuzhiyun exit:
393*4882a593Smuzhiyun 	spin_unlock(&tsd->lock);
394*4882a593Smuzhiyun 	return IRQ_HANDLED;
395*4882a593Smuzhiyun }
396*4882a593Smuzhiyun 
tegra_sflash_isr(int irq,void * context_data)397*4882a593Smuzhiyun static irqreturn_t tegra_sflash_isr(int irq, void *context_data)
398*4882a593Smuzhiyun {
399*4882a593Smuzhiyun 	struct tegra_sflash_data *tsd = context_data;
400*4882a593Smuzhiyun 
401*4882a593Smuzhiyun 	tsd->status_reg = tegra_sflash_readl(tsd, SPI_STATUS);
402*4882a593Smuzhiyun 	if (tsd->cur_direction & DATA_DIR_TX)
403*4882a593Smuzhiyun 		tsd->tx_status = tsd->status_reg & SPI_TX_OVF;
404*4882a593Smuzhiyun 
405*4882a593Smuzhiyun 	if (tsd->cur_direction & DATA_DIR_RX)
406*4882a593Smuzhiyun 		tsd->rx_status = tsd->status_reg & SPI_RX_UNF;
407*4882a593Smuzhiyun 	tegra_sflash_clear_status(tsd);
408*4882a593Smuzhiyun 
409*4882a593Smuzhiyun 	return handle_cpu_based_xfer(tsd);
410*4882a593Smuzhiyun }
411*4882a593Smuzhiyun 
412*4882a593Smuzhiyun static const struct of_device_id tegra_sflash_of_match[] = {
413*4882a593Smuzhiyun 	{ .compatible = "nvidia,tegra20-sflash", },
414*4882a593Smuzhiyun 	{}
415*4882a593Smuzhiyun };
416*4882a593Smuzhiyun MODULE_DEVICE_TABLE(of, tegra_sflash_of_match);
417*4882a593Smuzhiyun 
tegra_sflash_probe(struct platform_device * pdev)418*4882a593Smuzhiyun static int tegra_sflash_probe(struct platform_device *pdev)
419*4882a593Smuzhiyun {
420*4882a593Smuzhiyun 	struct spi_master	*master;
421*4882a593Smuzhiyun 	struct tegra_sflash_data	*tsd;
422*4882a593Smuzhiyun 	int ret;
423*4882a593Smuzhiyun 	const struct of_device_id *match;
424*4882a593Smuzhiyun 
425*4882a593Smuzhiyun 	match = of_match_device(tegra_sflash_of_match, &pdev->dev);
426*4882a593Smuzhiyun 	if (!match) {
427*4882a593Smuzhiyun 		dev_err(&pdev->dev, "Error: No device match found\n");
428*4882a593Smuzhiyun 		return -ENODEV;
429*4882a593Smuzhiyun 	}
430*4882a593Smuzhiyun 
431*4882a593Smuzhiyun 	master = spi_alloc_master(&pdev->dev, sizeof(*tsd));
432*4882a593Smuzhiyun 	if (!master) {
433*4882a593Smuzhiyun 		dev_err(&pdev->dev, "master allocation failed\n");
434*4882a593Smuzhiyun 		return -ENOMEM;
435*4882a593Smuzhiyun 	}
436*4882a593Smuzhiyun 
437*4882a593Smuzhiyun 	/* the spi->mode bits understood by this driver: */
438*4882a593Smuzhiyun 	master->mode_bits = SPI_CPOL | SPI_CPHA;
439*4882a593Smuzhiyun 	master->transfer_one_message = tegra_sflash_transfer_one_message;
440*4882a593Smuzhiyun 	master->auto_runtime_pm = true;
441*4882a593Smuzhiyun 	master->num_chipselect = MAX_CHIP_SELECT;
442*4882a593Smuzhiyun 
443*4882a593Smuzhiyun 	platform_set_drvdata(pdev, master);
444*4882a593Smuzhiyun 	tsd = spi_master_get_devdata(master);
445*4882a593Smuzhiyun 	tsd->master = master;
446*4882a593Smuzhiyun 	tsd->dev = &pdev->dev;
447*4882a593Smuzhiyun 	spin_lock_init(&tsd->lock);
448*4882a593Smuzhiyun 
449*4882a593Smuzhiyun 	if (of_property_read_u32(tsd->dev->of_node, "spi-max-frequency",
450*4882a593Smuzhiyun 				 &master->max_speed_hz))
451*4882a593Smuzhiyun 		master->max_speed_hz = 25000000; /* 25MHz */
452*4882a593Smuzhiyun 
453*4882a593Smuzhiyun 	tsd->base = devm_platform_ioremap_resource(pdev, 0);
454*4882a593Smuzhiyun 	if (IS_ERR(tsd->base)) {
455*4882a593Smuzhiyun 		ret = PTR_ERR(tsd->base);
456*4882a593Smuzhiyun 		goto exit_free_master;
457*4882a593Smuzhiyun 	}
458*4882a593Smuzhiyun 
459*4882a593Smuzhiyun 	tsd->irq = platform_get_irq(pdev, 0);
460*4882a593Smuzhiyun 	ret = request_irq(tsd->irq, tegra_sflash_isr, 0,
461*4882a593Smuzhiyun 			dev_name(&pdev->dev), tsd);
462*4882a593Smuzhiyun 	if (ret < 0) {
463*4882a593Smuzhiyun 		dev_err(&pdev->dev, "Failed to register ISR for IRQ %d\n",
464*4882a593Smuzhiyun 					tsd->irq);
465*4882a593Smuzhiyun 		goto exit_free_master;
466*4882a593Smuzhiyun 	}
467*4882a593Smuzhiyun 
468*4882a593Smuzhiyun 	tsd->clk = devm_clk_get(&pdev->dev, NULL);
469*4882a593Smuzhiyun 	if (IS_ERR(tsd->clk)) {
470*4882a593Smuzhiyun 		dev_err(&pdev->dev, "can not get clock\n");
471*4882a593Smuzhiyun 		ret = PTR_ERR(tsd->clk);
472*4882a593Smuzhiyun 		goto exit_free_irq;
473*4882a593Smuzhiyun 	}
474*4882a593Smuzhiyun 
475*4882a593Smuzhiyun 	tsd->rst = devm_reset_control_get_exclusive(&pdev->dev, "spi");
476*4882a593Smuzhiyun 	if (IS_ERR(tsd->rst)) {
477*4882a593Smuzhiyun 		dev_err(&pdev->dev, "can not get reset\n");
478*4882a593Smuzhiyun 		ret = PTR_ERR(tsd->rst);
479*4882a593Smuzhiyun 		goto exit_free_irq;
480*4882a593Smuzhiyun 	}
481*4882a593Smuzhiyun 
482*4882a593Smuzhiyun 	init_completion(&tsd->xfer_completion);
483*4882a593Smuzhiyun 	pm_runtime_enable(&pdev->dev);
484*4882a593Smuzhiyun 	if (!pm_runtime_enabled(&pdev->dev)) {
485*4882a593Smuzhiyun 		ret = tegra_sflash_runtime_resume(&pdev->dev);
486*4882a593Smuzhiyun 		if (ret)
487*4882a593Smuzhiyun 			goto exit_pm_disable;
488*4882a593Smuzhiyun 	}
489*4882a593Smuzhiyun 
490*4882a593Smuzhiyun 	ret = pm_runtime_get_sync(&pdev->dev);
491*4882a593Smuzhiyun 	if (ret < 0) {
492*4882a593Smuzhiyun 		dev_err(&pdev->dev, "pm runtime get failed, e = %d\n", ret);
493*4882a593Smuzhiyun 		pm_runtime_put_noidle(&pdev->dev);
494*4882a593Smuzhiyun 		goto exit_pm_disable;
495*4882a593Smuzhiyun 	}
496*4882a593Smuzhiyun 
497*4882a593Smuzhiyun 	/* Reset controller */
498*4882a593Smuzhiyun 	reset_control_assert(tsd->rst);
499*4882a593Smuzhiyun 	udelay(2);
500*4882a593Smuzhiyun 	reset_control_deassert(tsd->rst);
501*4882a593Smuzhiyun 
502*4882a593Smuzhiyun 	tsd->def_command_reg  = SPI_M_S | SPI_CS_SW;
503*4882a593Smuzhiyun 	tegra_sflash_writel(tsd, tsd->def_command_reg, SPI_COMMAND);
504*4882a593Smuzhiyun 	pm_runtime_put(&pdev->dev);
505*4882a593Smuzhiyun 
506*4882a593Smuzhiyun 	master->dev.of_node = pdev->dev.of_node;
507*4882a593Smuzhiyun 	ret = devm_spi_register_master(&pdev->dev, master);
508*4882a593Smuzhiyun 	if (ret < 0) {
509*4882a593Smuzhiyun 		dev_err(&pdev->dev, "can not register to master err %d\n", ret);
510*4882a593Smuzhiyun 		goto exit_pm_disable;
511*4882a593Smuzhiyun 	}
512*4882a593Smuzhiyun 	return ret;
513*4882a593Smuzhiyun 
514*4882a593Smuzhiyun exit_pm_disable:
515*4882a593Smuzhiyun 	pm_runtime_disable(&pdev->dev);
516*4882a593Smuzhiyun 	if (!pm_runtime_status_suspended(&pdev->dev))
517*4882a593Smuzhiyun 		tegra_sflash_runtime_suspend(&pdev->dev);
518*4882a593Smuzhiyun exit_free_irq:
519*4882a593Smuzhiyun 	free_irq(tsd->irq, tsd);
520*4882a593Smuzhiyun exit_free_master:
521*4882a593Smuzhiyun 	spi_master_put(master);
522*4882a593Smuzhiyun 	return ret;
523*4882a593Smuzhiyun }
524*4882a593Smuzhiyun 
tegra_sflash_remove(struct platform_device * pdev)525*4882a593Smuzhiyun static int tegra_sflash_remove(struct platform_device *pdev)
526*4882a593Smuzhiyun {
527*4882a593Smuzhiyun 	struct spi_master *master = platform_get_drvdata(pdev);
528*4882a593Smuzhiyun 	struct tegra_sflash_data	*tsd = spi_master_get_devdata(master);
529*4882a593Smuzhiyun 
530*4882a593Smuzhiyun 	free_irq(tsd->irq, tsd);
531*4882a593Smuzhiyun 
532*4882a593Smuzhiyun 	pm_runtime_disable(&pdev->dev);
533*4882a593Smuzhiyun 	if (!pm_runtime_status_suspended(&pdev->dev))
534*4882a593Smuzhiyun 		tegra_sflash_runtime_suspend(&pdev->dev);
535*4882a593Smuzhiyun 
536*4882a593Smuzhiyun 	return 0;
537*4882a593Smuzhiyun }
538*4882a593Smuzhiyun 
539*4882a593Smuzhiyun #ifdef CONFIG_PM_SLEEP
tegra_sflash_suspend(struct device * dev)540*4882a593Smuzhiyun static int tegra_sflash_suspend(struct device *dev)
541*4882a593Smuzhiyun {
542*4882a593Smuzhiyun 	struct spi_master *master = dev_get_drvdata(dev);
543*4882a593Smuzhiyun 
544*4882a593Smuzhiyun 	return spi_master_suspend(master);
545*4882a593Smuzhiyun }
546*4882a593Smuzhiyun 
tegra_sflash_resume(struct device * dev)547*4882a593Smuzhiyun static int tegra_sflash_resume(struct device *dev)
548*4882a593Smuzhiyun {
549*4882a593Smuzhiyun 	struct spi_master *master = dev_get_drvdata(dev);
550*4882a593Smuzhiyun 	struct tegra_sflash_data *tsd = spi_master_get_devdata(master);
551*4882a593Smuzhiyun 	int ret;
552*4882a593Smuzhiyun 
553*4882a593Smuzhiyun 	ret = pm_runtime_get_sync(dev);
554*4882a593Smuzhiyun 	if (ret < 0) {
555*4882a593Smuzhiyun 		pm_runtime_put_noidle(dev);
556*4882a593Smuzhiyun 		dev_err(dev, "pm runtime failed, e = %d\n", ret);
557*4882a593Smuzhiyun 		return ret;
558*4882a593Smuzhiyun 	}
559*4882a593Smuzhiyun 	tegra_sflash_writel(tsd, tsd->command_reg, SPI_COMMAND);
560*4882a593Smuzhiyun 	pm_runtime_put(dev);
561*4882a593Smuzhiyun 
562*4882a593Smuzhiyun 	return spi_master_resume(master);
563*4882a593Smuzhiyun }
564*4882a593Smuzhiyun #endif
565*4882a593Smuzhiyun 
tegra_sflash_runtime_suspend(struct device * dev)566*4882a593Smuzhiyun static int tegra_sflash_runtime_suspend(struct device *dev)
567*4882a593Smuzhiyun {
568*4882a593Smuzhiyun 	struct spi_master *master = dev_get_drvdata(dev);
569*4882a593Smuzhiyun 	struct tegra_sflash_data *tsd = spi_master_get_devdata(master);
570*4882a593Smuzhiyun 
571*4882a593Smuzhiyun 	/* Flush all write which are in PPSB queue by reading back */
572*4882a593Smuzhiyun 	tegra_sflash_readl(tsd, SPI_COMMAND);
573*4882a593Smuzhiyun 
574*4882a593Smuzhiyun 	clk_disable_unprepare(tsd->clk);
575*4882a593Smuzhiyun 	return 0;
576*4882a593Smuzhiyun }
577*4882a593Smuzhiyun 
tegra_sflash_runtime_resume(struct device * dev)578*4882a593Smuzhiyun static int tegra_sflash_runtime_resume(struct device *dev)
579*4882a593Smuzhiyun {
580*4882a593Smuzhiyun 	struct spi_master *master = dev_get_drvdata(dev);
581*4882a593Smuzhiyun 	struct tegra_sflash_data *tsd = spi_master_get_devdata(master);
582*4882a593Smuzhiyun 	int ret;
583*4882a593Smuzhiyun 
584*4882a593Smuzhiyun 	ret = clk_prepare_enable(tsd->clk);
585*4882a593Smuzhiyun 	if (ret < 0) {
586*4882a593Smuzhiyun 		dev_err(tsd->dev, "clk_prepare failed: %d\n", ret);
587*4882a593Smuzhiyun 		return ret;
588*4882a593Smuzhiyun 	}
589*4882a593Smuzhiyun 	return 0;
590*4882a593Smuzhiyun }
591*4882a593Smuzhiyun 
592*4882a593Smuzhiyun static const struct dev_pm_ops slink_pm_ops = {
593*4882a593Smuzhiyun 	SET_RUNTIME_PM_OPS(tegra_sflash_runtime_suspend,
594*4882a593Smuzhiyun 		tegra_sflash_runtime_resume, NULL)
595*4882a593Smuzhiyun 	SET_SYSTEM_SLEEP_PM_OPS(tegra_sflash_suspend, tegra_sflash_resume)
596*4882a593Smuzhiyun };
597*4882a593Smuzhiyun static struct platform_driver tegra_sflash_driver = {
598*4882a593Smuzhiyun 	.driver = {
599*4882a593Smuzhiyun 		.name		= "spi-tegra-sflash",
600*4882a593Smuzhiyun 		.pm		= &slink_pm_ops,
601*4882a593Smuzhiyun 		.of_match_table	= tegra_sflash_of_match,
602*4882a593Smuzhiyun 	},
603*4882a593Smuzhiyun 	.probe =	tegra_sflash_probe,
604*4882a593Smuzhiyun 	.remove =	tegra_sflash_remove,
605*4882a593Smuzhiyun };
606*4882a593Smuzhiyun module_platform_driver(tegra_sflash_driver);
607*4882a593Smuzhiyun 
608*4882a593Smuzhiyun MODULE_ALIAS("platform:spi-tegra-sflash");
609*4882a593Smuzhiyun MODULE_DESCRIPTION("NVIDIA Tegra20 Serial Flash Controller Driver");
610*4882a593Smuzhiyun MODULE_AUTHOR("Laxman Dewangan <ldewangan@nvidia.com>");
611*4882a593Smuzhiyun MODULE_LICENSE("GPL v2");
612