1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0
2*4882a593Smuzhiyun //
3*4882a593Smuzhiyun // Driver for AT91 USART Controllers as SPI
4*4882a593Smuzhiyun //
5*4882a593Smuzhiyun // Copyright (C) 2018 Microchip Technology Inc.
6*4882a593Smuzhiyun //
7*4882a593Smuzhiyun // Author: Radu Pirea <radu.pirea@microchip.com>
8*4882a593Smuzhiyun
9*4882a593Smuzhiyun #include <linux/clk.h>
10*4882a593Smuzhiyun #include <linux/delay.h>
11*4882a593Smuzhiyun #include <linux/dmaengine.h>
12*4882a593Smuzhiyun #include <linux/dma-direction.h>
13*4882a593Smuzhiyun #include <linux/interrupt.h>
14*4882a593Smuzhiyun #include <linux/kernel.h>
15*4882a593Smuzhiyun #include <linux/module.h>
16*4882a593Smuzhiyun #include <linux/of_platform.h>
17*4882a593Smuzhiyun #include <linux/of_gpio.h>
18*4882a593Smuzhiyun #include <linux/pinctrl/consumer.h>
19*4882a593Smuzhiyun #include <linux/platform_device.h>
20*4882a593Smuzhiyun #include <linux/pm_runtime.h>
21*4882a593Smuzhiyun
22*4882a593Smuzhiyun #include <linux/spi/spi.h>
23*4882a593Smuzhiyun
24*4882a593Smuzhiyun #define US_CR 0x00
25*4882a593Smuzhiyun #define US_MR 0x04
26*4882a593Smuzhiyun #define US_IER 0x08
27*4882a593Smuzhiyun #define US_IDR 0x0C
28*4882a593Smuzhiyun #define US_CSR 0x14
29*4882a593Smuzhiyun #define US_RHR 0x18
30*4882a593Smuzhiyun #define US_THR 0x1C
31*4882a593Smuzhiyun #define US_BRGR 0x20
32*4882a593Smuzhiyun #define US_VERSION 0xFC
33*4882a593Smuzhiyun
34*4882a593Smuzhiyun #define US_CR_RSTRX BIT(2)
35*4882a593Smuzhiyun #define US_CR_RSTTX BIT(3)
36*4882a593Smuzhiyun #define US_CR_RXEN BIT(4)
37*4882a593Smuzhiyun #define US_CR_RXDIS BIT(5)
38*4882a593Smuzhiyun #define US_CR_TXEN BIT(6)
39*4882a593Smuzhiyun #define US_CR_TXDIS BIT(7)
40*4882a593Smuzhiyun
41*4882a593Smuzhiyun #define US_MR_SPI_MASTER 0x0E
42*4882a593Smuzhiyun #define US_MR_CHRL GENMASK(7, 6)
43*4882a593Smuzhiyun #define US_MR_CPHA BIT(8)
44*4882a593Smuzhiyun #define US_MR_CPOL BIT(16)
45*4882a593Smuzhiyun #define US_MR_CLKO BIT(18)
46*4882a593Smuzhiyun #define US_MR_WRDBT BIT(20)
47*4882a593Smuzhiyun #define US_MR_LOOP BIT(15)
48*4882a593Smuzhiyun
49*4882a593Smuzhiyun #define US_IR_RXRDY BIT(0)
50*4882a593Smuzhiyun #define US_IR_TXRDY BIT(1)
51*4882a593Smuzhiyun #define US_IR_OVRE BIT(5)
52*4882a593Smuzhiyun
53*4882a593Smuzhiyun #define US_BRGR_SIZE BIT(16)
54*4882a593Smuzhiyun
55*4882a593Smuzhiyun #define US_MIN_CLK_DIV 0x06
56*4882a593Smuzhiyun #define US_MAX_CLK_DIV BIT(16)
57*4882a593Smuzhiyun
58*4882a593Smuzhiyun #define US_RESET (US_CR_RSTRX | US_CR_RSTTX)
59*4882a593Smuzhiyun #define US_DISABLE (US_CR_RXDIS | US_CR_TXDIS)
60*4882a593Smuzhiyun #define US_ENABLE (US_CR_RXEN | US_CR_TXEN)
61*4882a593Smuzhiyun #define US_OVRE_RXRDY_IRQS (US_IR_OVRE | US_IR_RXRDY)
62*4882a593Smuzhiyun
63*4882a593Smuzhiyun #define US_INIT \
64*4882a593Smuzhiyun (US_MR_SPI_MASTER | US_MR_CHRL | US_MR_CLKO | US_MR_WRDBT)
65*4882a593Smuzhiyun #define US_DMA_MIN_BYTES 16
66*4882a593Smuzhiyun #define US_DMA_TIMEOUT (msecs_to_jiffies(1000))
67*4882a593Smuzhiyun
68*4882a593Smuzhiyun /* Register access macros */
69*4882a593Smuzhiyun #define at91_usart_spi_readl(port, reg) \
70*4882a593Smuzhiyun readl_relaxed((port)->regs + US_##reg)
71*4882a593Smuzhiyun #define at91_usart_spi_writel(port, reg, value) \
72*4882a593Smuzhiyun writel_relaxed((value), (port)->regs + US_##reg)
73*4882a593Smuzhiyun
74*4882a593Smuzhiyun #define at91_usart_spi_readb(port, reg) \
75*4882a593Smuzhiyun readb_relaxed((port)->regs + US_##reg)
76*4882a593Smuzhiyun #define at91_usart_spi_writeb(port, reg, value) \
77*4882a593Smuzhiyun writeb_relaxed((value), (port)->regs + US_##reg)
78*4882a593Smuzhiyun
79*4882a593Smuzhiyun struct at91_usart_spi {
80*4882a593Smuzhiyun struct platform_device *mpdev;
81*4882a593Smuzhiyun struct spi_transfer *current_transfer;
82*4882a593Smuzhiyun void __iomem *regs;
83*4882a593Smuzhiyun struct device *dev;
84*4882a593Smuzhiyun struct clk *clk;
85*4882a593Smuzhiyun
86*4882a593Smuzhiyun struct completion xfer_completion;
87*4882a593Smuzhiyun
88*4882a593Smuzhiyun /*used in interrupt to protect data reading*/
89*4882a593Smuzhiyun spinlock_t lock;
90*4882a593Smuzhiyun
91*4882a593Smuzhiyun phys_addr_t phybase;
92*4882a593Smuzhiyun
93*4882a593Smuzhiyun int irq;
94*4882a593Smuzhiyun unsigned int current_tx_remaining_bytes;
95*4882a593Smuzhiyun unsigned int current_rx_remaining_bytes;
96*4882a593Smuzhiyun
97*4882a593Smuzhiyun u32 spi_clk;
98*4882a593Smuzhiyun u32 status;
99*4882a593Smuzhiyun
100*4882a593Smuzhiyun bool xfer_failed;
101*4882a593Smuzhiyun bool use_dma;
102*4882a593Smuzhiyun };
103*4882a593Smuzhiyun
dma_callback(void * data)104*4882a593Smuzhiyun static void dma_callback(void *data)
105*4882a593Smuzhiyun {
106*4882a593Smuzhiyun struct spi_controller *ctlr = data;
107*4882a593Smuzhiyun struct at91_usart_spi *aus = spi_master_get_devdata(ctlr);
108*4882a593Smuzhiyun
109*4882a593Smuzhiyun at91_usart_spi_writel(aus, IER, US_IR_RXRDY);
110*4882a593Smuzhiyun aus->current_rx_remaining_bytes = 0;
111*4882a593Smuzhiyun complete(&aus->xfer_completion);
112*4882a593Smuzhiyun }
113*4882a593Smuzhiyun
at91_usart_spi_can_dma(struct spi_controller * ctrl,struct spi_device * spi,struct spi_transfer * xfer)114*4882a593Smuzhiyun static bool at91_usart_spi_can_dma(struct spi_controller *ctrl,
115*4882a593Smuzhiyun struct spi_device *spi,
116*4882a593Smuzhiyun struct spi_transfer *xfer)
117*4882a593Smuzhiyun {
118*4882a593Smuzhiyun struct at91_usart_spi *aus = spi_master_get_devdata(ctrl);
119*4882a593Smuzhiyun
120*4882a593Smuzhiyun return aus->use_dma && xfer->len >= US_DMA_MIN_BYTES;
121*4882a593Smuzhiyun }
122*4882a593Smuzhiyun
at91_usart_spi_configure_dma(struct spi_controller * ctlr,struct at91_usart_spi * aus)123*4882a593Smuzhiyun static int at91_usart_spi_configure_dma(struct spi_controller *ctlr,
124*4882a593Smuzhiyun struct at91_usart_spi *aus)
125*4882a593Smuzhiyun {
126*4882a593Smuzhiyun struct dma_slave_config slave_config;
127*4882a593Smuzhiyun struct device *dev = &aus->mpdev->dev;
128*4882a593Smuzhiyun phys_addr_t phybase = aus->phybase;
129*4882a593Smuzhiyun dma_cap_mask_t mask;
130*4882a593Smuzhiyun int err = 0;
131*4882a593Smuzhiyun
132*4882a593Smuzhiyun dma_cap_zero(mask);
133*4882a593Smuzhiyun dma_cap_set(DMA_SLAVE, mask);
134*4882a593Smuzhiyun
135*4882a593Smuzhiyun ctlr->dma_tx = dma_request_chan(dev, "tx");
136*4882a593Smuzhiyun if (IS_ERR_OR_NULL(ctlr->dma_tx)) {
137*4882a593Smuzhiyun if (IS_ERR(ctlr->dma_tx)) {
138*4882a593Smuzhiyun err = PTR_ERR(ctlr->dma_tx);
139*4882a593Smuzhiyun goto at91_usart_spi_error_clear;
140*4882a593Smuzhiyun }
141*4882a593Smuzhiyun
142*4882a593Smuzhiyun dev_dbg(dev,
143*4882a593Smuzhiyun "DMA TX channel not available, SPI unable to use DMA\n");
144*4882a593Smuzhiyun err = -EBUSY;
145*4882a593Smuzhiyun goto at91_usart_spi_error_clear;
146*4882a593Smuzhiyun }
147*4882a593Smuzhiyun
148*4882a593Smuzhiyun ctlr->dma_rx = dma_request_chan(dev, "rx");
149*4882a593Smuzhiyun if (IS_ERR_OR_NULL(ctlr->dma_rx)) {
150*4882a593Smuzhiyun if (IS_ERR(ctlr->dma_rx)) {
151*4882a593Smuzhiyun err = PTR_ERR(ctlr->dma_rx);
152*4882a593Smuzhiyun goto at91_usart_spi_error;
153*4882a593Smuzhiyun }
154*4882a593Smuzhiyun
155*4882a593Smuzhiyun dev_dbg(dev,
156*4882a593Smuzhiyun "DMA RX channel not available, SPI unable to use DMA\n");
157*4882a593Smuzhiyun err = -EBUSY;
158*4882a593Smuzhiyun goto at91_usart_spi_error;
159*4882a593Smuzhiyun }
160*4882a593Smuzhiyun
161*4882a593Smuzhiyun slave_config.dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
162*4882a593Smuzhiyun slave_config.src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
163*4882a593Smuzhiyun slave_config.dst_addr = (dma_addr_t)phybase + US_THR;
164*4882a593Smuzhiyun slave_config.src_addr = (dma_addr_t)phybase + US_RHR;
165*4882a593Smuzhiyun slave_config.src_maxburst = 1;
166*4882a593Smuzhiyun slave_config.dst_maxburst = 1;
167*4882a593Smuzhiyun slave_config.device_fc = false;
168*4882a593Smuzhiyun
169*4882a593Smuzhiyun slave_config.direction = DMA_DEV_TO_MEM;
170*4882a593Smuzhiyun if (dmaengine_slave_config(ctlr->dma_rx, &slave_config)) {
171*4882a593Smuzhiyun dev_err(&ctlr->dev,
172*4882a593Smuzhiyun "failed to configure rx dma channel\n");
173*4882a593Smuzhiyun err = -EINVAL;
174*4882a593Smuzhiyun goto at91_usart_spi_error;
175*4882a593Smuzhiyun }
176*4882a593Smuzhiyun
177*4882a593Smuzhiyun slave_config.direction = DMA_MEM_TO_DEV;
178*4882a593Smuzhiyun if (dmaengine_slave_config(ctlr->dma_tx, &slave_config)) {
179*4882a593Smuzhiyun dev_err(&ctlr->dev,
180*4882a593Smuzhiyun "failed to configure tx dma channel\n");
181*4882a593Smuzhiyun err = -EINVAL;
182*4882a593Smuzhiyun goto at91_usart_spi_error;
183*4882a593Smuzhiyun }
184*4882a593Smuzhiyun
185*4882a593Smuzhiyun aus->use_dma = true;
186*4882a593Smuzhiyun return 0;
187*4882a593Smuzhiyun
188*4882a593Smuzhiyun at91_usart_spi_error:
189*4882a593Smuzhiyun if (!IS_ERR_OR_NULL(ctlr->dma_tx))
190*4882a593Smuzhiyun dma_release_channel(ctlr->dma_tx);
191*4882a593Smuzhiyun if (!IS_ERR_OR_NULL(ctlr->dma_rx))
192*4882a593Smuzhiyun dma_release_channel(ctlr->dma_rx);
193*4882a593Smuzhiyun ctlr->dma_tx = NULL;
194*4882a593Smuzhiyun ctlr->dma_rx = NULL;
195*4882a593Smuzhiyun
196*4882a593Smuzhiyun at91_usart_spi_error_clear:
197*4882a593Smuzhiyun return err;
198*4882a593Smuzhiyun }
199*4882a593Smuzhiyun
at91_usart_spi_release_dma(struct spi_controller * ctlr)200*4882a593Smuzhiyun static void at91_usart_spi_release_dma(struct spi_controller *ctlr)
201*4882a593Smuzhiyun {
202*4882a593Smuzhiyun if (ctlr->dma_rx)
203*4882a593Smuzhiyun dma_release_channel(ctlr->dma_rx);
204*4882a593Smuzhiyun if (ctlr->dma_tx)
205*4882a593Smuzhiyun dma_release_channel(ctlr->dma_tx);
206*4882a593Smuzhiyun }
207*4882a593Smuzhiyun
at91_usart_spi_stop_dma(struct spi_controller * ctlr)208*4882a593Smuzhiyun static void at91_usart_spi_stop_dma(struct spi_controller *ctlr)
209*4882a593Smuzhiyun {
210*4882a593Smuzhiyun if (ctlr->dma_rx)
211*4882a593Smuzhiyun dmaengine_terminate_all(ctlr->dma_rx);
212*4882a593Smuzhiyun if (ctlr->dma_tx)
213*4882a593Smuzhiyun dmaengine_terminate_all(ctlr->dma_tx);
214*4882a593Smuzhiyun }
215*4882a593Smuzhiyun
at91_usart_spi_dma_transfer(struct spi_controller * ctlr,struct spi_transfer * xfer)216*4882a593Smuzhiyun static int at91_usart_spi_dma_transfer(struct spi_controller *ctlr,
217*4882a593Smuzhiyun struct spi_transfer *xfer)
218*4882a593Smuzhiyun {
219*4882a593Smuzhiyun struct at91_usart_spi *aus = spi_master_get_devdata(ctlr);
220*4882a593Smuzhiyun struct dma_chan *rxchan = ctlr->dma_rx;
221*4882a593Smuzhiyun struct dma_chan *txchan = ctlr->dma_tx;
222*4882a593Smuzhiyun struct dma_async_tx_descriptor *rxdesc;
223*4882a593Smuzhiyun struct dma_async_tx_descriptor *txdesc;
224*4882a593Smuzhiyun dma_cookie_t cookie;
225*4882a593Smuzhiyun
226*4882a593Smuzhiyun /* Disable RX interrupt */
227*4882a593Smuzhiyun at91_usart_spi_writel(aus, IDR, US_IR_RXRDY);
228*4882a593Smuzhiyun
229*4882a593Smuzhiyun rxdesc = dmaengine_prep_slave_sg(rxchan,
230*4882a593Smuzhiyun xfer->rx_sg.sgl,
231*4882a593Smuzhiyun xfer->rx_sg.nents,
232*4882a593Smuzhiyun DMA_DEV_TO_MEM,
233*4882a593Smuzhiyun DMA_PREP_INTERRUPT |
234*4882a593Smuzhiyun DMA_CTRL_ACK);
235*4882a593Smuzhiyun if (!rxdesc)
236*4882a593Smuzhiyun goto at91_usart_spi_err_dma;
237*4882a593Smuzhiyun
238*4882a593Smuzhiyun txdesc = dmaengine_prep_slave_sg(txchan,
239*4882a593Smuzhiyun xfer->tx_sg.sgl,
240*4882a593Smuzhiyun xfer->tx_sg.nents,
241*4882a593Smuzhiyun DMA_MEM_TO_DEV,
242*4882a593Smuzhiyun DMA_PREP_INTERRUPT |
243*4882a593Smuzhiyun DMA_CTRL_ACK);
244*4882a593Smuzhiyun if (!txdesc)
245*4882a593Smuzhiyun goto at91_usart_spi_err_dma;
246*4882a593Smuzhiyun
247*4882a593Smuzhiyun rxdesc->callback = dma_callback;
248*4882a593Smuzhiyun rxdesc->callback_param = ctlr;
249*4882a593Smuzhiyun
250*4882a593Smuzhiyun cookie = rxdesc->tx_submit(rxdesc);
251*4882a593Smuzhiyun if (dma_submit_error(cookie))
252*4882a593Smuzhiyun goto at91_usart_spi_err_dma;
253*4882a593Smuzhiyun
254*4882a593Smuzhiyun cookie = txdesc->tx_submit(txdesc);
255*4882a593Smuzhiyun if (dma_submit_error(cookie))
256*4882a593Smuzhiyun goto at91_usart_spi_err_dma;
257*4882a593Smuzhiyun
258*4882a593Smuzhiyun rxchan->device->device_issue_pending(rxchan);
259*4882a593Smuzhiyun txchan->device->device_issue_pending(txchan);
260*4882a593Smuzhiyun
261*4882a593Smuzhiyun return 0;
262*4882a593Smuzhiyun
263*4882a593Smuzhiyun at91_usart_spi_err_dma:
264*4882a593Smuzhiyun /* Enable RX interrupt if something fails and fallback to PIO */
265*4882a593Smuzhiyun at91_usart_spi_writel(aus, IER, US_IR_RXRDY);
266*4882a593Smuzhiyun at91_usart_spi_stop_dma(ctlr);
267*4882a593Smuzhiyun
268*4882a593Smuzhiyun return -ENOMEM;
269*4882a593Smuzhiyun }
270*4882a593Smuzhiyun
at91_usart_spi_dma_timeout(struct at91_usart_spi * aus)271*4882a593Smuzhiyun static unsigned long at91_usart_spi_dma_timeout(struct at91_usart_spi *aus)
272*4882a593Smuzhiyun {
273*4882a593Smuzhiyun return wait_for_completion_timeout(&aus->xfer_completion,
274*4882a593Smuzhiyun US_DMA_TIMEOUT);
275*4882a593Smuzhiyun }
276*4882a593Smuzhiyun
at91_usart_spi_tx_ready(struct at91_usart_spi * aus)277*4882a593Smuzhiyun static inline u32 at91_usart_spi_tx_ready(struct at91_usart_spi *aus)
278*4882a593Smuzhiyun {
279*4882a593Smuzhiyun return aus->status & US_IR_TXRDY;
280*4882a593Smuzhiyun }
281*4882a593Smuzhiyun
at91_usart_spi_rx_ready(struct at91_usart_spi * aus)282*4882a593Smuzhiyun static inline u32 at91_usart_spi_rx_ready(struct at91_usart_spi *aus)
283*4882a593Smuzhiyun {
284*4882a593Smuzhiyun return aus->status & US_IR_RXRDY;
285*4882a593Smuzhiyun }
286*4882a593Smuzhiyun
at91_usart_spi_check_overrun(struct at91_usart_spi * aus)287*4882a593Smuzhiyun static inline u32 at91_usart_spi_check_overrun(struct at91_usart_spi *aus)
288*4882a593Smuzhiyun {
289*4882a593Smuzhiyun return aus->status & US_IR_OVRE;
290*4882a593Smuzhiyun }
291*4882a593Smuzhiyun
at91_usart_spi_read_status(struct at91_usart_spi * aus)292*4882a593Smuzhiyun static inline u32 at91_usart_spi_read_status(struct at91_usart_spi *aus)
293*4882a593Smuzhiyun {
294*4882a593Smuzhiyun aus->status = at91_usart_spi_readl(aus, CSR);
295*4882a593Smuzhiyun return aus->status;
296*4882a593Smuzhiyun }
297*4882a593Smuzhiyun
at91_usart_spi_tx(struct at91_usart_spi * aus)298*4882a593Smuzhiyun static inline void at91_usart_spi_tx(struct at91_usart_spi *aus)
299*4882a593Smuzhiyun {
300*4882a593Smuzhiyun unsigned int len = aus->current_transfer->len;
301*4882a593Smuzhiyun unsigned int remaining = aus->current_tx_remaining_bytes;
302*4882a593Smuzhiyun const u8 *tx_buf = aus->current_transfer->tx_buf;
303*4882a593Smuzhiyun
304*4882a593Smuzhiyun if (!remaining)
305*4882a593Smuzhiyun return;
306*4882a593Smuzhiyun
307*4882a593Smuzhiyun if (at91_usart_spi_tx_ready(aus)) {
308*4882a593Smuzhiyun at91_usart_spi_writeb(aus, THR, tx_buf[len - remaining]);
309*4882a593Smuzhiyun aus->current_tx_remaining_bytes--;
310*4882a593Smuzhiyun }
311*4882a593Smuzhiyun }
312*4882a593Smuzhiyun
at91_usart_spi_rx(struct at91_usart_spi * aus)313*4882a593Smuzhiyun static inline void at91_usart_spi_rx(struct at91_usart_spi *aus)
314*4882a593Smuzhiyun {
315*4882a593Smuzhiyun int len = aus->current_transfer->len;
316*4882a593Smuzhiyun int remaining = aus->current_rx_remaining_bytes;
317*4882a593Smuzhiyun u8 *rx_buf = aus->current_transfer->rx_buf;
318*4882a593Smuzhiyun
319*4882a593Smuzhiyun if (!remaining)
320*4882a593Smuzhiyun return;
321*4882a593Smuzhiyun
322*4882a593Smuzhiyun rx_buf[len - remaining] = at91_usart_spi_readb(aus, RHR);
323*4882a593Smuzhiyun aus->current_rx_remaining_bytes--;
324*4882a593Smuzhiyun }
325*4882a593Smuzhiyun
326*4882a593Smuzhiyun static inline void
at91_usart_spi_set_xfer_speed(struct at91_usart_spi * aus,struct spi_transfer * xfer)327*4882a593Smuzhiyun at91_usart_spi_set_xfer_speed(struct at91_usart_spi *aus,
328*4882a593Smuzhiyun struct spi_transfer *xfer)
329*4882a593Smuzhiyun {
330*4882a593Smuzhiyun at91_usart_spi_writel(aus, BRGR,
331*4882a593Smuzhiyun DIV_ROUND_UP(aus->spi_clk, xfer->speed_hz));
332*4882a593Smuzhiyun }
333*4882a593Smuzhiyun
at91_usart_spi_interrupt(int irq,void * dev_id)334*4882a593Smuzhiyun static irqreturn_t at91_usart_spi_interrupt(int irq, void *dev_id)
335*4882a593Smuzhiyun {
336*4882a593Smuzhiyun struct spi_controller *controller = dev_id;
337*4882a593Smuzhiyun struct at91_usart_spi *aus = spi_master_get_devdata(controller);
338*4882a593Smuzhiyun
339*4882a593Smuzhiyun spin_lock(&aus->lock);
340*4882a593Smuzhiyun at91_usart_spi_read_status(aus);
341*4882a593Smuzhiyun
342*4882a593Smuzhiyun if (at91_usart_spi_check_overrun(aus)) {
343*4882a593Smuzhiyun aus->xfer_failed = true;
344*4882a593Smuzhiyun at91_usart_spi_writel(aus, IDR, US_IR_OVRE | US_IR_RXRDY);
345*4882a593Smuzhiyun spin_unlock(&aus->lock);
346*4882a593Smuzhiyun return IRQ_HANDLED;
347*4882a593Smuzhiyun }
348*4882a593Smuzhiyun
349*4882a593Smuzhiyun if (at91_usart_spi_rx_ready(aus)) {
350*4882a593Smuzhiyun at91_usart_spi_rx(aus);
351*4882a593Smuzhiyun spin_unlock(&aus->lock);
352*4882a593Smuzhiyun return IRQ_HANDLED;
353*4882a593Smuzhiyun }
354*4882a593Smuzhiyun
355*4882a593Smuzhiyun spin_unlock(&aus->lock);
356*4882a593Smuzhiyun
357*4882a593Smuzhiyun return IRQ_NONE;
358*4882a593Smuzhiyun }
359*4882a593Smuzhiyun
at91_usart_spi_setup(struct spi_device * spi)360*4882a593Smuzhiyun static int at91_usart_spi_setup(struct spi_device *spi)
361*4882a593Smuzhiyun {
362*4882a593Smuzhiyun struct at91_usart_spi *aus = spi_master_get_devdata(spi->controller);
363*4882a593Smuzhiyun u32 *ausd = spi->controller_state;
364*4882a593Smuzhiyun unsigned int mr = at91_usart_spi_readl(aus, MR);
365*4882a593Smuzhiyun
366*4882a593Smuzhiyun if (spi->mode & SPI_CPOL)
367*4882a593Smuzhiyun mr |= US_MR_CPOL;
368*4882a593Smuzhiyun else
369*4882a593Smuzhiyun mr &= ~US_MR_CPOL;
370*4882a593Smuzhiyun
371*4882a593Smuzhiyun if (spi->mode & SPI_CPHA)
372*4882a593Smuzhiyun mr |= US_MR_CPHA;
373*4882a593Smuzhiyun else
374*4882a593Smuzhiyun mr &= ~US_MR_CPHA;
375*4882a593Smuzhiyun
376*4882a593Smuzhiyun if (spi->mode & SPI_LOOP)
377*4882a593Smuzhiyun mr |= US_MR_LOOP;
378*4882a593Smuzhiyun else
379*4882a593Smuzhiyun mr &= ~US_MR_LOOP;
380*4882a593Smuzhiyun
381*4882a593Smuzhiyun if (!ausd) {
382*4882a593Smuzhiyun ausd = kzalloc(sizeof(*ausd), GFP_KERNEL);
383*4882a593Smuzhiyun if (!ausd)
384*4882a593Smuzhiyun return -ENOMEM;
385*4882a593Smuzhiyun
386*4882a593Smuzhiyun spi->controller_state = ausd;
387*4882a593Smuzhiyun }
388*4882a593Smuzhiyun
389*4882a593Smuzhiyun *ausd = mr;
390*4882a593Smuzhiyun
391*4882a593Smuzhiyun dev_dbg(&spi->dev,
392*4882a593Smuzhiyun "setup: bpw %u mode 0x%x -> mr %d %08x\n",
393*4882a593Smuzhiyun spi->bits_per_word, spi->mode, spi->chip_select, mr);
394*4882a593Smuzhiyun
395*4882a593Smuzhiyun return 0;
396*4882a593Smuzhiyun }
397*4882a593Smuzhiyun
at91_usart_spi_transfer_one(struct spi_controller * ctlr,struct spi_device * spi,struct spi_transfer * xfer)398*4882a593Smuzhiyun static int at91_usart_spi_transfer_one(struct spi_controller *ctlr,
399*4882a593Smuzhiyun struct spi_device *spi,
400*4882a593Smuzhiyun struct spi_transfer *xfer)
401*4882a593Smuzhiyun {
402*4882a593Smuzhiyun struct at91_usart_spi *aus = spi_master_get_devdata(ctlr);
403*4882a593Smuzhiyun unsigned long dma_timeout = 0;
404*4882a593Smuzhiyun int ret = 0;
405*4882a593Smuzhiyun
406*4882a593Smuzhiyun at91_usart_spi_set_xfer_speed(aus, xfer);
407*4882a593Smuzhiyun aus->xfer_failed = false;
408*4882a593Smuzhiyun aus->current_transfer = xfer;
409*4882a593Smuzhiyun aus->current_tx_remaining_bytes = xfer->len;
410*4882a593Smuzhiyun aus->current_rx_remaining_bytes = xfer->len;
411*4882a593Smuzhiyun
412*4882a593Smuzhiyun while ((aus->current_tx_remaining_bytes ||
413*4882a593Smuzhiyun aus->current_rx_remaining_bytes) && !aus->xfer_failed) {
414*4882a593Smuzhiyun reinit_completion(&aus->xfer_completion);
415*4882a593Smuzhiyun if (at91_usart_spi_can_dma(ctlr, spi, xfer) &&
416*4882a593Smuzhiyun !ret) {
417*4882a593Smuzhiyun ret = at91_usart_spi_dma_transfer(ctlr, xfer);
418*4882a593Smuzhiyun if (ret)
419*4882a593Smuzhiyun continue;
420*4882a593Smuzhiyun
421*4882a593Smuzhiyun dma_timeout = at91_usart_spi_dma_timeout(aus);
422*4882a593Smuzhiyun
423*4882a593Smuzhiyun if (WARN_ON(dma_timeout == 0)) {
424*4882a593Smuzhiyun dev_err(&spi->dev, "DMA transfer timeout\n");
425*4882a593Smuzhiyun return -EIO;
426*4882a593Smuzhiyun }
427*4882a593Smuzhiyun aus->current_tx_remaining_bytes = 0;
428*4882a593Smuzhiyun } else {
429*4882a593Smuzhiyun at91_usart_spi_read_status(aus);
430*4882a593Smuzhiyun at91_usart_spi_tx(aus);
431*4882a593Smuzhiyun }
432*4882a593Smuzhiyun
433*4882a593Smuzhiyun cpu_relax();
434*4882a593Smuzhiyun }
435*4882a593Smuzhiyun
436*4882a593Smuzhiyun if (aus->xfer_failed) {
437*4882a593Smuzhiyun dev_err(aus->dev, "Overrun!\n");
438*4882a593Smuzhiyun return -EIO;
439*4882a593Smuzhiyun }
440*4882a593Smuzhiyun
441*4882a593Smuzhiyun return 0;
442*4882a593Smuzhiyun }
443*4882a593Smuzhiyun
at91_usart_spi_prepare_message(struct spi_controller * ctlr,struct spi_message * message)444*4882a593Smuzhiyun static int at91_usart_spi_prepare_message(struct spi_controller *ctlr,
445*4882a593Smuzhiyun struct spi_message *message)
446*4882a593Smuzhiyun {
447*4882a593Smuzhiyun struct at91_usart_spi *aus = spi_master_get_devdata(ctlr);
448*4882a593Smuzhiyun struct spi_device *spi = message->spi;
449*4882a593Smuzhiyun u32 *ausd = spi->controller_state;
450*4882a593Smuzhiyun
451*4882a593Smuzhiyun at91_usart_spi_writel(aus, CR, US_ENABLE);
452*4882a593Smuzhiyun at91_usart_spi_writel(aus, IER, US_OVRE_RXRDY_IRQS);
453*4882a593Smuzhiyun at91_usart_spi_writel(aus, MR, *ausd);
454*4882a593Smuzhiyun
455*4882a593Smuzhiyun return 0;
456*4882a593Smuzhiyun }
457*4882a593Smuzhiyun
at91_usart_spi_unprepare_message(struct spi_controller * ctlr,struct spi_message * message)458*4882a593Smuzhiyun static int at91_usart_spi_unprepare_message(struct spi_controller *ctlr,
459*4882a593Smuzhiyun struct spi_message *message)
460*4882a593Smuzhiyun {
461*4882a593Smuzhiyun struct at91_usart_spi *aus = spi_master_get_devdata(ctlr);
462*4882a593Smuzhiyun
463*4882a593Smuzhiyun at91_usart_spi_writel(aus, CR, US_RESET | US_DISABLE);
464*4882a593Smuzhiyun at91_usart_spi_writel(aus, IDR, US_OVRE_RXRDY_IRQS);
465*4882a593Smuzhiyun
466*4882a593Smuzhiyun return 0;
467*4882a593Smuzhiyun }
468*4882a593Smuzhiyun
at91_usart_spi_cleanup(struct spi_device * spi)469*4882a593Smuzhiyun static void at91_usart_spi_cleanup(struct spi_device *spi)
470*4882a593Smuzhiyun {
471*4882a593Smuzhiyun struct at91_usart_spi_device *ausd = spi->controller_state;
472*4882a593Smuzhiyun
473*4882a593Smuzhiyun spi->controller_state = NULL;
474*4882a593Smuzhiyun kfree(ausd);
475*4882a593Smuzhiyun }
476*4882a593Smuzhiyun
at91_usart_spi_init(struct at91_usart_spi * aus)477*4882a593Smuzhiyun static void at91_usart_spi_init(struct at91_usart_spi *aus)
478*4882a593Smuzhiyun {
479*4882a593Smuzhiyun at91_usart_spi_writel(aus, MR, US_INIT);
480*4882a593Smuzhiyun at91_usart_spi_writel(aus, CR, US_RESET | US_DISABLE);
481*4882a593Smuzhiyun }
482*4882a593Smuzhiyun
at91_usart_gpio_setup(struct platform_device * pdev)483*4882a593Smuzhiyun static int at91_usart_gpio_setup(struct platform_device *pdev)
484*4882a593Smuzhiyun {
485*4882a593Smuzhiyun struct device_node *np = pdev->dev.parent->of_node;
486*4882a593Smuzhiyun int i;
487*4882a593Smuzhiyun int ret;
488*4882a593Smuzhiyun int nb;
489*4882a593Smuzhiyun
490*4882a593Smuzhiyun if (!np)
491*4882a593Smuzhiyun return -EINVAL;
492*4882a593Smuzhiyun
493*4882a593Smuzhiyun nb = of_gpio_named_count(np, "cs-gpios");
494*4882a593Smuzhiyun for (i = 0; i < nb; i++) {
495*4882a593Smuzhiyun int cs_gpio = of_get_named_gpio(np, "cs-gpios", i);
496*4882a593Smuzhiyun
497*4882a593Smuzhiyun if (cs_gpio < 0)
498*4882a593Smuzhiyun return cs_gpio;
499*4882a593Smuzhiyun
500*4882a593Smuzhiyun if (gpio_is_valid(cs_gpio)) {
501*4882a593Smuzhiyun ret = devm_gpio_request_one(&pdev->dev, cs_gpio,
502*4882a593Smuzhiyun GPIOF_DIR_OUT,
503*4882a593Smuzhiyun dev_name(&pdev->dev));
504*4882a593Smuzhiyun if (ret)
505*4882a593Smuzhiyun return ret;
506*4882a593Smuzhiyun }
507*4882a593Smuzhiyun }
508*4882a593Smuzhiyun
509*4882a593Smuzhiyun return 0;
510*4882a593Smuzhiyun }
511*4882a593Smuzhiyun
at91_usart_spi_probe(struct platform_device * pdev)512*4882a593Smuzhiyun static int at91_usart_spi_probe(struct platform_device *pdev)
513*4882a593Smuzhiyun {
514*4882a593Smuzhiyun struct resource *regs;
515*4882a593Smuzhiyun struct spi_controller *controller;
516*4882a593Smuzhiyun struct at91_usart_spi *aus;
517*4882a593Smuzhiyun struct clk *clk;
518*4882a593Smuzhiyun int irq;
519*4882a593Smuzhiyun int ret;
520*4882a593Smuzhiyun
521*4882a593Smuzhiyun regs = platform_get_resource(to_platform_device(pdev->dev.parent),
522*4882a593Smuzhiyun IORESOURCE_MEM, 0);
523*4882a593Smuzhiyun if (!regs)
524*4882a593Smuzhiyun return -EINVAL;
525*4882a593Smuzhiyun
526*4882a593Smuzhiyun irq = platform_get_irq(to_platform_device(pdev->dev.parent), 0);
527*4882a593Smuzhiyun if (irq < 0)
528*4882a593Smuzhiyun return irq;
529*4882a593Smuzhiyun
530*4882a593Smuzhiyun clk = devm_clk_get(pdev->dev.parent, "usart");
531*4882a593Smuzhiyun if (IS_ERR(clk))
532*4882a593Smuzhiyun return PTR_ERR(clk);
533*4882a593Smuzhiyun
534*4882a593Smuzhiyun ret = -ENOMEM;
535*4882a593Smuzhiyun controller = spi_alloc_master(&pdev->dev, sizeof(*aus));
536*4882a593Smuzhiyun if (!controller)
537*4882a593Smuzhiyun goto at91_usart_spi_probe_fail;
538*4882a593Smuzhiyun
539*4882a593Smuzhiyun ret = at91_usart_gpio_setup(pdev);
540*4882a593Smuzhiyun if (ret)
541*4882a593Smuzhiyun goto at91_usart_spi_probe_fail;
542*4882a593Smuzhiyun
543*4882a593Smuzhiyun controller->mode_bits = SPI_CPOL | SPI_CPHA | SPI_LOOP | SPI_CS_HIGH;
544*4882a593Smuzhiyun controller->dev.of_node = pdev->dev.parent->of_node;
545*4882a593Smuzhiyun controller->bits_per_word_mask = SPI_BPW_MASK(8);
546*4882a593Smuzhiyun controller->setup = at91_usart_spi_setup;
547*4882a593Smuzhiyun controller->flags = SPI_MASTER_MUST_RX | SPI_MASTER_MUST_TX;
548*4882a593Smuzhiyun controller->transfer_one = at91_usart_spi_transfer_one;
549*4882a593Smuzhiyun controller->prepare_message = at91_usart_spi_prepare_message;
550*4882a593Smuzhiyun controller->unprepare_message = at91_usart_spi_unprepare_message;
551*4882a593Smuzhiyun controller->can_dma = at91_usart_spi_can_dma;
552*4882a593Smuzhiyun controller->cleanup = at91_usart_spi_cleanup;
553*4882a593Smuzhiyun controller->max_speed_hz = DIV_ROUND_UP(clk_get_rate(clk),
554*4882a593Smuzhiyun US_MIN_CLK_DIV);
555*4882a593Smuzhiyun controller->min_speed_hz = DIV_ROUND_UP(clk_get_rate(clk),
556*4882a593Smuzhiyun US_MAX_CLK_DIV);
557*4882a593Smuzhiyun platform_set_drvdata(pdev, controller);
558*4882a593Smuzhiyun
559*4882a593Smuzhiyun aus = spi_master_get_devdata(controller);
560*4882a593Smuzhiyun
561*4882a593Smuzhiyun aus->dev = &pdev->dev;
562*4882a593Smuzhiyun aus->regs = devm_ioremap_resource(&pdev->dev, regs);
563*4882a593Smuzhiyun if (IS_ERR(aus->regs)) {
564*4882a593Smuzhiyun ret = PTR_ERR(aus->regs);
565*4882a593Smuzhiyun goto at91_usart_spi_probe_fail;
566*4882a593Smuzhiyun }
567*4882a593Smuzhiyun
568*4882a593Smuzhiyun aus->irq = irq;
569*4882a593Smuzhiyun aus->clk = clk;
570*4882a593Smuzhiyun
571*4882a593Smuzhiyun ret = devm_request_irq(&pdev->dev, irq, at91_usart_spi_interrupt, 0,
572*4882a593Smuzhiyun dev_name(&pdev->dev), controller);
573*4882a593Smuzhiyun if (ret)
574*4882a593Smuzhiyun goto at91_usart_spi_probe_fail;
575*4882a593Smuzhiyun
576*4882a593Smuzhiyun ret = clk_prepare_enable(clk);
577*4882a593Smuzhiyun if (ret)
578*4882a593Smuzhiyun goto at91_usart_spi_probe_fail;
579*4882a593Smuzhiyun
580*4882a593Smuzhiyun aus->spi_clk = clk_get_rate(clk);
581*4882a593Smuzhiyun at91_usart_spi_init(aus);
582*4882a593Smuzhiyun
583*4882a593Smuzhiyun aus->phybase = regs->start;
584*4882a593Smuzhiyun
585*4882a593Smuzhiyun aus->mpdev = to_platform_device(pdev->dev.parent);
586*4882a593Smuzhiyun
587*4882a593Smuzhiyun ret = at91_usart_spi_configure_dma(controller, aus);
588*4882a593Smuzhiyun if (ret)
589*4882a593Smuzhiyun goto at91_usart_fail_dma;
590*4882a593Smuzhiyun
591*4882a593Smuzhiyun spin_lock_init(&aus->lock);
592*4882a593Smuzhiyun init_completion(&aus->xfer_completion);
593*4882a593Smuzhiyun
594*4882a593Smuzhiyun ret = devm_spi_register_master(&pdev->dev, controller);
595*4882a593Smuzhiyun if (ret)
596*4882a593Smuzhiyun goto at91_usart_fail_register_master;
597*4882a593Smuzhiyun
598*4882a593Smuzhiyun dev_info(&pdev->dev,
599*4882a593Smuzhiyun "AT91 USART SPI Controller version 0x%x at %pa (irq %d)\n",
600*4882a593Smuzhiyun at91_usart_spi_readl(aus, VERSION),
601*4882a593Smuzhiyun ®s->start, irq);
602*4882a593Smuzhiyun
603*4882a593Smuzhiyun return 0;
604*4882a593Smuzhiyun
605*4882a593Smuzhiyun at91_usart_fail_register_master:
606*4882a593Smuzhiyun at91_usart_spi_release_dma(controller);
607*4882a593Smuzhiyun at91_usart_fail_dma:
608*4882a593Smuzhiyun clk_disable_unprepare(clk);
609*4882a593Smuzhiyun at91_usart_spi_probe_fail:
610*4882a593Smuzhiyun spi_master_put(controller);
611*4882a593Smuzhiyun return ret;
612*4882a593Smuzhiyun }
613*4882a593Smuzhiyun
at91_usart_spi_runtime_suspend(struct device * dev)614*4882a593Smuzhiyun __maybe_unused static int at91_usart_spi_runtime_suspend(struct device *dev)
615*4882a593Smuzhiyun {
616*4882a593Smuzhiyun struct spi_controller *ctlr = dev_get_drvdata(dev);
617*4882a593Smuzhiyun struct at91_usart_spi *aus = spi_master_get_devdata(ctlr);
618*4882a593Smuzhiyun
619*4882a593Smuzhiyun clk_disable_unprepare(aus->clk);
620*4882a593Smuzhiyun pinctrl_pm_select_sleep_state(dev);
621*4882a593Smuzhiyun
622*4882a593Smuzhiyun return 0;
623*4882a593Smuzhiyun }
624*4882a593Smuzhiyun
at91_usart_spi_runtime_resume(struct device * dev)625*4882a593Smuzhiyun __maybe_unused static int at91_usart_spi_runtime_resume(struct device *dev)
626*4882a593Smuzhiyun {
627*4882a593Smuzhiyun struct spi_controller *ctrl = dev_get_drvdata(dev);
628*4882a593Smuzhiyun struct at91_usart_spi *aus = spi_master_get_devdata(ctrl);
629*4882a593Smuzhiyun
630*4882a593Smuzhiyun pinctrl_pm_select_default_state(dev);
631*4882a593Smuzhiyun
632*4882a593Smuzhiyun return clk_prepare_enable(aus->clk);
633*4882a593Smuzhiyun }
634*4882a593Smuzhiyun
at91_usart_spi_suspend(struct device * dev)635*4882a593Smuzhiyun __maybe_unused static int at91_usart_spi_suspend(struct device *dev)
636*4882a593Smuzhiyun {
637*4882a593Smuzhiyun struct spi_controller *ctrl = dev_get_drvdata(dev);
638*4882a593Smuzhiyun int ret;
639*4882a593Smuzhiyun
640*4882a593Smuzhiyun ret = spi_controller_suspend(ctrl);
641*4882a593Smuzhiyun if (ret)
642*4882a593Smuzhiyun return ret;
643*4882a593Smuzhiyun
644*4882a593Smuzhiyun if (!pm_runtime_suspended(dev))
645*4882a593Smuzhiyun at91_usart_spi_runtime_suspend(dev);
646*4882a593Smuzhiyun
647*4882a593Smuzhiyun return 0;
648*4882a593Smuzhiyun }
649*4882a593Smuzhiyun
at91_usart_spi_resume(struct device * dev)650*4882a593Smuzhiyun __maybe_unused static int at91_usart_spi_resume(struct device *dev)
651*4882a593Smuzhiyun {
652*4882a593Smuzhiyun struct spi_controller *ctrl = dev_get_drvdata(dev);
653*4882a593Smuzhiyun struct at91_usart_spi *aus = spi_master_get_devdata(ctrl);
654*4882a593Smuzhiyun int ret;
655*4882a593Smuzhiyun
656*4882a593Smuzhiyun if (!pm_runtime_suspended(dev)) {
657*4882a593Smuzhiyun ret = at91_usart_spi_runtime_resume(dev);
658*4882a593Smuzhiyun if (ret)
659*4882a593Smuzhiyun return ret;
660*4882a593Smuzhiyun }
661*4882a593Smuzhiyun
662*4882a593Smuzhiyun at91_usart_spi_init(aus);
663*4882a593Smuzhiyun
664*4882a593Smuzhiyun return spi_controller_resume(ctrl);
665*4882a593Smuzhiyun }
666*4882a593Smuzhiyun
at91_usart_spi_remove(struct platform_device * pdev)667*4882a593Smuzhiyun static int at91_usart_spi_remove(struct platform_device *pdev)
668*4882a593Smuzhiyun {
669*4882a593Smuzhiyun struct spi_controller *ctlr = platform_get_drvdata(pdev);
670*4882a593Smuzhiyun struct at91_usart_spi *aus = spi_master_get_devdata(ctlr);
671*4882a593Smuzhiyun
672*4882a593Smuzhiyun at91_usart_spi_release_dma(ctlr);
673*4882a593Smuzhiyun clk_disable_unprepare(aus->clk);
674*4882a593Smuzhiyun
675*4882a593Smuzhiyun return 0;
676*4882a593Smuzhiyun }
677*4882a593Smuzhiyun
678*4882a593Smuzhiyun static const struct dev_pm_ops at91_usart_spi_pm_ops = {
679*4882a593Smuzhiyun SET_SYSTEM_SLEEP_PM_OPS(at91_usart_spi_suspend, at91_usart_spi_resume)
680*4882a593Smuzhiyun SET_RUNTIME_PM_OPS(at91_usart_spi_runtime_suspend,
681*4882a593Smuzhiyun at91_usart_spi_runtime_resume, NULL)
682*4882a593Smuzhiyun };
683*4882a593Smuzhiyun
684*4882a593Smuzhiyun static struct platform_driver at91_usart_spi_driver = {
685*4882a593Smuzhiyun .driver = {
686*4882a593Smuzhiyun .name = "at91_usart_spi",
687*4882a593Smuzhiyun .pm = &at91_usart_spi_pm_ops,
688*4882a593Smuzhiyun },
689*4882a593Smuzhiyun .probe = at91_usart_spi_probe,
690*4882a593Smuzhiyun .remove = at91_usart_spi_remove,
691*4882a593Smuzhiyun };
692*4882a593Smuzhiyun
693*4882a593Smuzhiyun module_platform_driver(at91_usart_spi_driver);
694*4882a593Smuzhiyun
695*4882a593Smuzhiyun MODULE_DESCRIPTION("Microchip AT91 USART SPI Controller driver");
696*4882a593Smuzhiyun MODULE_AUTHOR("Radu Pirea <radu.pirea@microchip.com>");
697*4882a593Smuzhiyun MODULE_LICENSE("GPL v2");
698*4882a593Smuzhiyun MODULE_ALIAS("platform:at91_usart_spi");
699