xref: /OK3568_Linux_fs/kernel/drivers/usb/musb/musbhsdma.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * MUSB OTG driver - support for Mentor's DMA controller
4*4882a593Smuzhiyun  *
5*4882a593Smuzhiyun  * Copyright 2005 Mentor Graphics Corporation
6*4882a593Smuzhiyun  * Copyright (C) 2005-2007 by Texas Instruments
7*4882a593Smuzhiyun  */
8*4882a593Smuzhiyun #include <linux/device.h>
9*4882a593Smuzhiyun #include <linux/interrupt.h>
10*4882a593Smuzhiyun #include <linux/platform_device.h>
11*4882a593Smuzhiyun #include <linux/slab.h>
12*4882a593Smuzhiyun #include "musb_core.h"
13*4882a593Smuzhiyun #include "musb_dma.h"
14*4882a593Smuzhiyun 
15*4882a593Smuzhiyun #define MUSB_HSDMA_CHANNEL_OFFSET(_bchannel, _offset)		\
16*4882a593Smuzhiyun 		(MUSB_HSDMA_BASE + (_bchannel << 4) + _offset)
17*4882a593Smuzhiyun 
18*4882a593Smuzhiyun #define musb_read_hsdma_addr(mbase, bchannel)	\
19*4882a593Smuzhiyun 	musb_readl(mbase,	\
20*4882a593Smuzhiyun 		   MUSB_HSDMA_CHANNEL_OFFSET(bchannel, MUSB_HSDMA_ADDRESS))
21*4882a593Smuzhiyun 
22*4882a593Smuzhiyun #define musb_write_hsdma_addr(mbase, bchannel, addr) \
23*4882a593Smuzhiyun 	musb_writel(mbase, \
24*4882a593Smuzhiyun 		    MUSB_HSDMA_CHANNEL_OFFSET(bchannel, MUSB_HSDMA_ADDRESS), \
25*4882a593Smuzhiyun 		    addr)
26*4882a593Smuzhiyun 
27*4882a593Smuzhiyun #define musb_read_hsdma_count(mbase, bchannel)	\
28*4882a593Smuzhiyun 	musb_readl(mbase,	\
29*4882a593Smuzhiyun 		   MUSB_HSDMA_CHANNEL_OFFSET(bchannel, MUSB_HSDMA_COUNT))
30*4882a593Smuzhiyun 
31*4882a593Smuzhiyun #define musb_write_hsdma_count(mbase, bchannel, len) \
32*4882a593Smuzhiyun 	musb_writel(mbase, \
33*4882a593Smuzhiyun 		    MUSB_HSDMA_CHANNEL_OFFSET(bchannel, MUSB_HSDMA_COUNT), \
34*4882a593Smuzhiyun 		    len)
35*4882a593Smuzhiyun /* control register (16-bit): */
36*4882a593Smuzhiyun #define MUSB_HSDMA_ENABLE_SHIFT		0
37*4882a593Smuzhiyun #define MUSB_HSDMA_TRANSMIT_SHIFT	1
38*4882a593Smuzhiyun #define MUSB_HSDMA_MODE1_SHIFT		2
39*4882a593Smuzhiyun #define MUSB_HSDMA_IRQENABLE_SHIFT	3
40*4882a593Smuzhiyun #define MUSB_HSDMA_ENDPOINT_SHIFT	4
41*4882a593Smuzhiyun #define MUSB_HSDMA_BUSERROR_SHIFT	8
42*4882a593Smuzhiyun #define MUSB_HSDMA_BURSTMODE_SHIFT	9
43*4882a593Smuzhiyun #define MUSB_HSDMA_BURSTMODE		(3 << MUSB_HSDMA_BURSTMODE_SHIFT)
44*4882a593Smuzhiyun #define MUSB_HSDMA_BURSTMODE_UNSPEC	0
45*4882a593Smuzhiyun #define MUSB_HSDMA_BURSTMODE_INCR4	1
46*4882a593Smuzhiyun #define MUSB_HSDMA_BURSTMODE_INCR8	2
47*4882a593Smuzhiyun #define MUSB_HSDMA_BURSTMODE_INCR16	3
48*4882a593Smuzhiyun 
49*4882a593Smuzhiyun #define MUSB_HSDMA_CHANNELS		8
50*4882a593Smuzhiyun 
51*4882a593Smuzhiyun struct musb_dma_controller;
52*4882a593Smuzhiyun 
53*4882a593Smuzhiyun struct musb_dma_channel {
54*4882a593Smuzhiyun 	struct dma_channel		channel;
55*4882a593Smuzhiyun 	struct musb_dma_controller	*controller;
56*4882a593Smuzhiyun 	u32				start_addr;
57*4882a593Smuzhiyun 	u32				len;
58*4882a593Smuzhiyun 	u16				max_packet_sz;
59*4882a593Smuzhiyun 	u8				idx;
60*4882a593Smuzhiyun 	u8				epnum;
61*4882a593Smuzhiyun 	u8				transmit;
62*4882a593Smuzhiyun };
63*4882a593Smuzhiyun 
64*4882a593Smuzhiyun struct musb_dma_controller {
65*4882a593Smuzhiyun 	struct dma_controller		controller;
66*4882a593Smuzhiyun 	struct musb_dma_channel		channel[MUSB_HSDMA_CHANNELS];
67*4882a593Smuzhiyun 	void				*private_data;
68*4882a593Smuzhiyun 	void __iomem			*base;
69*4882a593Smuzhiyun 	u8				channel_count;
70*4882a593Smuzhiyun 	u8				used_channels;
71*4882a593Smuzhiyun 	int				irq;
72*4882a593Smuzhiyun };
73*4882a593Smuzhiyun 
74*4882a593Smuzhiyun static void dma_channel_release(struct dma_channel *channel);
75*4882a593Smuzhiyun 
dma_controller_stop(struct musb_dma_controller * controller)76*4882a593Smuzhiyun static void dma_controller_stop(struct musb_dma_controller *controller)
77*4882a593Smuzhiyun {
78*4882a593Smuzhiyun 	struct musb *musb = controller->private_data;
79*4882a593Smuzhiyun 	struct dma_channel *channel;
80*4882a593Smuzhiyun 	u8 bit;
81*4882a593Smuzhiyun 
82*4882a593Smuzhiyun 	if (controller->used_channels != 0) {
83*4882a593Smuzhiyun 		dev_err(musb->controller,
84*4882a593Smuzhiyun 			"Stopping DMA controller while channel active\n");
85*4882a593Smuzhiyun 
86*4882a593Smuzhiyun 		for (bit = 0; bit < MUSB_HSDMA_CHANNELS; bit++) {
87*4882a593Smuzhiyun 			if (controller->used_channels & (1 << bit)) {
88*4882a593Smuzhiyun 				channel = &controller->channel[bit].channel;
89*4882a593Smuzhiyun 				dma_channel_release(channel);
90*4882a593Smuzhiyun 
91*4882a593Smuzhiyun 				if (!controller->used_channels)
92*4882a593Smuzhiyun 					break;
93*4882a593Smuzhiyun 			}
94*4882a593Smuzhiyun 		}
95*4882a593Smuzhiyun 	}
96*4882a593Smuzhiyun }
97*4882a593Smuzhiyun 
dma_channel_allocate(struct dma_controller * c,struct musb_hw_ep * hw_ep,u8 transmit)98*4882a593Smuzhiyun static struct dma_channel *dma_channel_allocate(struct dma_controller *c,
99*4882a593Smuzhiyun 				struct musb_hw_ep *hw_ep, u8 transmit)
100*4882a593Smuzhiyun {
101*4882a593Smuzhiyun 	struct musb_dma_controller *controller = container_of(c,
102*4882a593Smuzhiyun 			struct musb_dma_controller, controller);
103*4882a593Smuzhiyun 	struct musb_dma_channel *musb_channel = NULL;
104*4882a593Smuzhiyun 	struct dma_channel *channel = NULL;
105*4882a593Smuzhiyun 	u8 bit;
106*4882a593Smuzhiyun 
107*4882a593Smuzhiyun 	for (bit = 0; bit < MUSB_HSDMA_CHANNELS; bit++) {
108*4882a593Smuzhiyun 		if (!(controller->used_channels & (1 << bit))) {
109*4882a593Smuzhiyun 			controller->used_channels |= (1 << bit);
110*4882a593Smuzhiyun 			musb_channel = &(controller->channel[bit]);
111*4882a593Smuzhiyun 			musb_channel->controller = controller;
112*4882a593Smuzhiyun 			musb_channel->idx = bit;
113*4882a593Smuzhiyun 			musb_channel->epnum = hw_ep->epnum;
114*4882a593Smuzhiyun 			musb_channel->transmit = transmit;
115*4882a593Smuzhiyun 			channel = &(musb_channel->channel);
116*4882a593Smuzhiyun 			channel->private_data = musb_channel;
117*4882a593Smuzhiyun 			channel->status = MUSB_DMA_STATUS_FREE;
118*4882a593Smuzhiyun 			channel->max_len = 0x100000;
119*4882a593Smuzhiyun 			/* Tx => mode 1; Rx => mode 0 */
120*4882a593Smuzhiyun 			channel->desired_mode = transmit;
121*4882a593Smuzhiyun 			channel->actual_len = 0;
122*4882a593Smuzhiyun 			break;
123*4882a593Smuzhiyun 		}
124*4882a593Smuzhiyun 	}
125*4882a593Smuzhiyun 
126*4882a593Smuzhiyun 	return channel;
127*4882a593Smuzhiyun }
128*4882a593Smuzhiyun 
dma_channel_release(struct dma_channel * channel)129*4882a593Smuzhiyun static void dma_channel_release(struct dma_channel *channel)
130*4882a593Smuzhiyun {
131*4882a593Smuzhiyun 	struct musb_dma_channel *musb_channel = channel->private_data;
132*4882a593Smuzhiyun 
133*4882a593Smuzhiyun 	channel->actual_len = 0;
134*4882a593Smuzhiyun 	musb_channel->start_addr = 0;
135*4882a593Smuzhiyun 	musb_channel->len = 0;
136*4882a593Smuzhiyun 
137*4882a593Smuzhiyun 	musb_channel->controller->used_channels &=
138*4882a593Smuzhiyun 		~(1 << musb_channel->idx);
139*4882a593Smuzhiyun 
140*4882a593Smuzhiyun 	channel->status = MUSB_DMA_STATUS_UNKNOWN;
141*4882a593Smuzhiyun }
142*4882a593Smuzhiyun 
configure_channel(struct dma_channel * channel,u16 packet_sz,u8 mode,dma_addr_t dma_addr,u32 len)143*4882a593Smuzhiyun static void configure_channel(struct dma_channel *channel,
144*4882a593Smuzhiyun 				u16 packet_sz, u8 mode,
145*4882a593Smuzhiyun 				dma_addr_t dma_addr, u32 len)
146*4882a593Smuzhiyun {
147*4882a593Smuzhiyun 	struct musb_dma_channel *musb_channel = channel->private_data;
148*4882a593Smuzhiyun 	struct musb_dma_controller *controller = musb_channel->controller;
149*4882a593Smuzhiyun 	struct musb *musb = controller->private_data;
150*4882a593Smuzhiyun 	void __iomem *mbase = controller->base;
151*4882a593Smuzhiyun 	u8 bchannel = musb_channel->idx;
152*4882a593Smuzhiyun 	u16 csr = 0;
153*4882a593Smuzhiyun 
154*4882a593Smuzhiyun 	musb_dbg(musb, "%p, pkt_sz %d, addr %pad, len %d, mode %d",
155*4882a593Smuzhiyun 			channel, packet_sz, &dma_addr, len, mode);
156*4882a593Smuzhiyun 
157*4882a593Smuzhiyun 	if (mode) {
158*4882a593Smuzhiyun 		csr |= 1 << MUSB_HSDMA_MODE1_SHIFT;
159*4882a593Smuzhiyun 		BUG_ON(len < packet_sz);
160*4882a593Smuzhiyun 	}
161*4882a593Smuzhiyun 	csr |= MUSB_HSDMA_BURSTMODE_INCR16
162*4882a593Smuzhiyun 				<< MUSB_HSDMA_BURSTMODE_SHIFT;
163*4882a593Smuzhiyun 
164*4882a593Smuzhiyun 	csr |= (musb_channel->epnum << MUSB_HSDMA_ENDPOINT_SHIFT)
165*4882a593Smuzhiyun 		| (1 << MUSB_HSDMA_ENABLE_SHIFT)
166*4882a593Smuzhiyun 		| (1 << MUSB_HSDMA_IRQENABLE_SHIFT)
167*4882a593Smuzhiyun 		| (musb_channel->transmit
168*4882a593Smuzhiyun 				? (1 << MUSB_HSDMA_TRANSMIT_SHIFT)
169*4882a593Smuzhiyun 				: 0);
170*4882a593Smuzhiyun 
171*4882a593Smuzhiyun 	/* address/count */
172*4882a593Smuzhiyun 	musb_write_hsdma_addr(mbase, bchannel, dma_addr);
173*4882a593Smuzhiyun 	musb_write_hsdma_count(mbase, bchannel, len);
174*4882a593Smuzhiyun 
175*4882a593Smuzhiyun 	/* control (this should start things) */
176*4882a593Smuzhiyun 	musb_writew(mbase,
177*4882a593Smuzhiyun 		MUSB_HSDMA_CHANNEL_OFFSET(bchannel, MUSB_HSDMA_CONTROL),
178*4882a593Smuzhiyun 		csr);
179*4882a593Smuzhiyun }
180*4882a593Smuzhiyun 
dma_channel_program(struct dma_channel * channel,u16 packet_sz,u8 mode,dma_addr_t dma_addr,u32 len)181*4882a593Smuzhiyun static int dma_channel_program(struct dma_channel *channel,
182*4882a593Smuzhiyun 				u16 packet_sz, u8 mode,
183*4882a593Smuzhiyun 				dma_addr_t dma_addr, u32 len)
184*4882a593Smuzhiyun {
185*4882a593Smuzhiyun 	struct musb_dma_channel *musb_channel = channel->private_data;
186*4882a593Smuzhiyun 	struct musb_dma_controller *controller = musb_channel->controller;
187*4882a593Smuzhiyun 	struct musb *musb = controller->private_data;
188*4882a593Smuzhiyun 
189*4882a593Smuzhiyun 	musb_dbg(musb, "ep%d-%s pkt_sz %d, dma_addr %pad length %d, mode %d",
190*4882a593Smuzhiyun 		musb_channel->epnum,
191*4882a593Smuzhiyun 		musb_channel->transmit ? "Tx" : "Rx",
192*4882a593Smuzhiyun 		packet_sz, &dma_addr, len, mode);
193*4882a593Smuzhiyun 
194*4882a593Smuzhiyun 	BUG_ON(channel->status == MUSB_DMA_STATUS_UNKNOWN ||
195*4882a593Smuzhiyun 		channel->status == MUSB_DMA_STATUS_BUSY);
196*4882a593Smuzhiyun 
197*4882a593Smuzhiyun 	/*
198*4882a593Smuzhiyun 	 * The DMA engine in RTL1.8 and above cannot handle
199*4882a593Smuzhiyun 	 * DMA addresses that are not aligned to a 4 byte boundary.
200*4882a593Smuzhiyun 	 * It ends up masking the last two bits of the address
201*4882a593Smuzhiyun 	 * programmed in DMA_ADDR.
202*4882a593Smuzhiyun 	 *
203*4882a593Smuzhiyun 	 * Fail such DMA transfers, so that the backup PIO mode
204*4882a593Smuzhiyun 	 * can carry out the transfer
205*4882a593Smuzhiyun 	 */
206*4882a593Smuzhiyun 	if ((musb->hwvers >= MUSB_HWVERS_1800) && (dma_addr % 4))
207*4882a593Smuzhiyun 		return false;
208*4882a593Smuzhiyun 
209*4882a593Smuzhiyun 	channel->actual_len = 0;
210*4882a593Smuzhiyun 	musb_channel->start_addr = dma_addr;
211*4882a593Smuzhiyun 	musb_channel->len = len;
212*4882a593Smuzhiyun 	musb_channel->max_packet_sz = packet_sz;
213*4882a593Smuzhiyun 	channel->status = MUSB_DMA_STATUS_BUSY;
214*4882a593Smuzhiyun 
215*4882a593Smuzhiyun 	configure_channel(channel, packet_sz, mode, dma_addr, len);
216*4882a593Smuzhiyun 
217*4882a593Smuzhiyun 	return true;
218*4882a593Smuzhiyun }
219*4882a593Smuzhiyun 
dma_channel_abort(struct dma_channel * channel)220*4882a593Smuzhiyun static int dma_channel_abort(struct dma_channel *channel)
221*4882a593Smuzhiyun {
222*4882a593Smuzhiyun 	struct musb_dma_channel *musb_channel = channel->private_data;
223*4882a593Smuzhiyun 	void __iomem *mbase = musb_channel->controller->base;
224*4882a593Smuzhiyun 	struct musb *musb = musb_channel->controller->private_data;
225*4882a593Smuzhiyun 
226*4882a593Smuzhiyun 	u8 bchannel = musb_channel->idx;
227*4882a593Smuzhiyun 	int offset;
228*4882a593Smuzhiyun 	u16 csr;
229*4882a593Smuzhiyun 
230*4882a593Smuzhiyun 	if (channel->status == MUSB_DMA_STATUS_BUSY) {
231*4882a593Smuzhiyun 		if (musb_channel->transmit) {
232*4882a593Smuzhiyun 			offset = musb->io.ep_offset(musb_channel->epnum,
233*4882a593Smuzhiyun 						MUSB_TXCSR);
234*4882a593Smuzhiyun 
235*4882a593Smuzhiyun 			/*
236*4882a593Smuzhiyun 			 * The programming guide says that we must clear
237*4882a593Smuzhiyun 			 * the DMAENAB bit before the DMAMODE bit...
238*4882a593Smuzhiyun 			 */
239*4882a593Smuzhiyun 			csr = musb_readw(mbase, offset);
240*4882a593Smuzhiyun 			csr &= ~(MUSB_TXCSR_AUTOSET | MUSB_TXCSR_DMAENAB);
241*4882a593Smuzhiyun 			musb_writew(mbase, offset, csr);
242*4882a593Smuzhiyun 			csr &= ~MUSB_TXCSR_DMAMODE;
243*4882a593Smuzhiyun 			musb_writew(mbase, offset, csr);
244*4882a593Smuzhiyun 		} else {
245*4882a593Smuzhiyun 			offset = musb->io.ep_offset(musb_channel->epnum,
246*4882a593Smuzhiyun 						MUSB_RXCSR);
247*4882a593Smuzhiyun 
248*4882a593Smuzhiyun 			csr = musb_readw(mbase, offset);
249*4882a593Smuzhiyun 			csr &= ~(MUSB_RXCSR_AUTOCLEAR |
250*4882a593Smuzhiyun 				 MUSB_RXCSR_DMAENAB |
251*4882a593Smuzhiyun 				 MUSB_RXCSR_DMAMODE);
252*4882a593Smuzhiyun 			musb_writew(mbase, offset, csr);
253*4882a593Smuzhiyun 		}
254*4882a593Smuzhiyun 
255*4882a593Smuzhiyun 		musb_writew(mbase,
256*4882a593Smuzhiyun 			MUSB_HSDMA_CHANNEL_OFFSET(bchannel, MUSB_HSDMA_CONTROL),
257*4882a593Smuzhiyun 			0);
258*4882a593Smuzhiyun 		musb_write_hsdma_addr(mbase, bchannel, 0);
259*4882a593Smuzhiyun 		musb_write_hsdma_count(mbase, bchannel, 0);
260*4882a593Smuzhiyun 		channel->status = MUSB_DMA_STATUS_FREE;
261*4882a593Smuzhiyun 	}
262*4882a593Smuzhiyun 
263*4882a593Smuzhiyun 	return 0;
264*4882a593Smuzhiyun }
265*4882a593Smuzhiyun 
dma_controller_irq(int irq,void * private_data)266*4882a593Smuzhiyun irqreturn_t dma_controller_irq(int irq, void *private_data)
267*4882a593Smuzhiyun {
268*4882a593Smuzhiyun 	struct musb_dma_controller *controller = private_data;
269*4882a593Smuzhiyun 	struct musb *musb = controller->private_data;
270*4882a593Smuzhiyun 	struct musb_dma_channel *musb_channel;
271*4882a593Smuzhiyun 	struct dma_channel *channel;
272*4882a593Smuzhiyun 
273*4882a593Smuzhiyun 	void __iomem *mbase = controller->base;
274*4882a593Smuzhiyun 
275*4882a593Smuzhiyun 	irqreturn_t retval = IRQ_NONE;
276*4882a593Smuzhiyun 
277*4882a593Smuzhiyun 	unsigned long flags;
278*4882a593Smuzhiyun 
279*4882a593Smuzhiyun 	u8 bchannel;
280*4882a593Smuzhiyun 	u8 int_hsdma;
281*4882a593Smuzhiyun 
282*4882a593Smuzhiyun 	u32 addr, count;
283*4882a593Smuzhiyun 	u16 csr;
284*4882a593Smuzhiyun 
285*4882a593Smuzhiyun 	spin_lock_irqsave(&musb->lock, flags);
286*4882a593Smuzhiyun 
287*4882a593Smuzhiyun 	int_hsdma = musb_clearb(mbase, MUSB_HSDMA_INTR);
288*4882a593Smuzhiyun 
289*4882a593Smuzhiyun 	if (!int_hsdma) {
290*4882a593Smuzhiyun 		musb_dbg(musb, "spurious DMA irq");
291*4882a593Smuzhiyun 
292*4882a593Smuzhiyun 		for (bchannel = 0; bchannel < MUSB_HSDMA_CHANNELS; bchannel++) {
293*4882a593Smuzhiyun 			musb_channel = (struct musb_dma_channel *)
294*4882a593Smuzhiyun 					&(controller->channel[bchannel]);
295*4882a593Smuzhiyun 			channel = &musb_channel->channel;
296*4882a593Smuzhiyun 			if (channel->status == MUSB_DMA_STATUS_BUSY) {
297*4882a593Smuzhiyun 				count = musb_read_hsdma_count(mbase, bchannel);
298*4882a593Smuzhiyun 
299*4882a593Smuzhiyun 				if (count == 0)
300*4882a593Smuzhiyun 					int_hsdma |= (1 << bchannel);
301*4882a593Smuzhiyun 			}
302*4882a593Smuzhiyun 		}
303*4882a593Smuzhiyun 
304*4882a593Smuzhiyun 		musb_dbg(musb, "int_hsdma = 0x%x", int_hsdma);
305*4882a593Smuzhiyun 
306*4882a593Smuzhiyun 		if (!int_hsdma)
307*4882a593Smuzhiyun 			goto done;
308*4882a593Smuzhiyun 	}
309*4882a593Smuzhiyun 
310*4882a593Smuzhiyun 	for (bchannel = 0; bchannel < MUSB_HSDMA_CHANNELS; bchannel++) {
311*4882a593Smuzhiyun 		if (int_hsdma & (1 << bchannel)) {
312*4882a593Smuzhiyun 			musb_channel = (struct musb_dma_channel *)
313*4882a593Smuzhiyun 					&(controller->channel[bchannel]);
314*4882a593Smuzhiyun 			channel = &musb_channel->channel;
315*4882a593Smuzhiyun 
316*4882a593Smuzhiyun 			csr = musb_readw(mbase,
317*4882a593Smuzhiyun 					MUSB_HSDMA_CHANNEL_OFFSET(bchannel,
318*4882a593Smuzhiyun 							MUSB_HSDMA_CONTROL));
319*4882a593Smuzhiyun 
320*4882a593Smuzhiyun 			if (csr & (1 << MUSB_HSDMA_BUSERROR_SHIFT)) {
321*4882a593Smuzhiyun 				musb_channel->channel.status =
322*4882a593Smuzhiyun 					MUSB_DMA_STATUS_BUS_ABORT;
323*4882a593Smuzhiyun 			} else {
324*4882a593Smuzhiyun 				u8 devctl;
325*4882a593Smuzhiyun 
326*4882a593Smuzhiyun 				addr = musb_read_hsdma_addr(mbase,
327*4882a593Smuzhiyun 						bchannel);
328*4882a593Smuzhiyun 				channel->actual_len = addr
329*4882a593Smuzhiyun 					- musb_channel->start_addr;
330*4882a593Smuzhiyun 
331*4882a593Smuzhiyun 				musb_dbg(musb, "ch %p, 0x%x -> 0x%x (%zu / %d) %s",
332*4882a593Smuzhiyun 					channel, musb_channel->start_addr,
333*4882a593Smuzhiyun 					addr, channel->actual_len,
334*4882a593Smuzhiyun 					musb_channel->len,
335*4882a593Smuzhiyun 					(channel->actual_len
336*4882a593Smuzhiyun 						< musb_channel->len) ?
337*4882a593Smuzhiyun 					"=> reconfig 0" : "=> complete");
338*4882a593Smuzhiyun 
339*4882a593Smuzhiyun 				devctl = musb_readb(mbase, MUSB_DEVCTL);
340*4882a593Smuzhiyun 
341*4882a593Smuzhiyun 				channel->status = MUSB_DMA_STATUS_FREE;
342*4882a593Smuzhiyun 
343*4882a593Smuzhiyun 				/* completed */
344*4882a593Smuzhiyun 				if (musb_channel->transmit &&
345*4882a593Smuzhiyun 					(!channel->desired_mode ||
346*4882a593Smuzhiyun 					(channel->actual_len %
347*4882a593Smuzhiyun 					    musb_channel->max_packet_sz))) {
348*4882a593Smuzhiyun 					u8  epnum  = musb_channel->epnum;
349*4882a593Smuzhiyun 					int offset = musb->io.ep_offset(epnum,
350*4882a593Smuzhiyun 								    MUSB_TXCSR);
351*4882a593Smuzhiyun 					u16 txcsr;
352*4882a593Smuzhiyun 
353*4882a593Smuzhiyun 					/*
354*4882a593Smuzhiyun 					 * The programming guide says that we
355*4882a593Smuzhiyun 					 * must clear DMAENAB before DMAMODE.
356*4882a593Smuzhiyun 					 */
357*4882a593Smuzhiyun 					musb_ep_select(mbase, epnum);
358*4882a593Smuzhiyun 					txcsr = musb_readw(mbase, offset);
359*4882a593Smuzhiyun 					if (channel->desired_mode == 1) {
360*4882a593Smuzhiyun 						txcsr &= ~(MUSB_TXCSR_DMAENAB
361*4882a593Smuzhiyun 							| MUSB_TXCSR_AUTOSET);
362*4882a593Smuzhiyun 						musb_writew(mbase, offset, txcsr);
363*4882a593Smuzhiyun 						/* Send out the packet */
364*4882a593Smuzhiyun 						txcsr &= ~MUSB_TXCSR_DMAMODE;
365*4882a593Smuzhiyun 						txcsr |= MUSB_TXCSR_DMAENAB;
366*4882a593Smuzhiyun 					}
367*4882a593Smuzhiyun 					txcsr |=  MUSB_TXCSR_TXPKTRDY;
368*4882a593Smuzhiyun 					musb_writew(mbase, offset, txcsr);
369*4882a593Smuzhiyun 				}
370*4882a593Smuzhiyun 				musb_dma_completion(musb, musb_channel->epnum,
371*4882a593Smuzhiyun 						    musb_channel->transmit);
372*4882a593Smuzhiyun 			}
373*4882a593Smuzhiyun 		}
374*4882a593Smuzhiyun 	}
375*4882a593Smuzhiyun 
376*4882a593Smuzhiyun 	retval = IRQ_HANDLED;
377*4882a593Smuzhiyun done:
378*4882a593Smuzhiyun 	spin_unlock_irqrestore(&musb->lock, flags);
379*4882a593Smuzhiyun 	return retval;
380*4882a593Smuzhiyun }
381*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(dma_controller_irq);
382*4882a593Smuzhiyun 
musbhs_dma_controller_destroy(struct dma_controller * c)383*4882a593Smuzhiyun void musbhs_dma_controller_destroy(struct dma_controller *c)
384*4882a593Smuzhiyun {
385*4882a593Smuzhiyun 	struct musb_dma_controller *controller = container_of(c,
386*4882a593Smuzhiyun 			struct musb_dma_controller, controller);
387*4882a593Smuzhiyun 
388*4882a593Smuzhiyun 	dma_controller_stop(controller);
389*4882a593Smuzhiyun 
390*4882a593Smuzhiyun 	if (controller->irq)
391*4882a593Smuzhiyun 		free_irq(controller->irq, c);
392*4882a593Smuzhiyun 
393*4882a593Smuzhiyun 	kfree(controller);
394*4882a593Smuzhiyun }
395*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(musbhs_dma_controller_destroy);
396*4882a593Smuzhiyun 
397*4882a593Smuzhiyun static struct musb_dma_controller *
dma_controller_alloc(struct musb * musb,void __iomem * base)398*4882a593Smuzhiyun dma_controller_alloc(struct musb *musb, void __iomem *base)
399*4882a593Smuzhiyun {
400*4882a593Smuzhiyun 	struct musb_dma_controller *controller;
401*4882a593Smuzhiyun 
402*4882a593Smuzhiyun 	controller = kzalloc(sizeof(*controller), GFP_KERNEL);
403*4882a593Smuzhiyun 	if (!controller)
404*4882a593Smuzhiyun 		return NULL;
405*4882a593Smuzhiyun 
406*4882a593Smuzhiyun 	controller->channel_count = MUSB_HSDMA_CHANNELS;
407*4882a593Smuzhiyun 	controller->private_data = musb;
408*4882a593Smuzhiyun 	controller->base = base;
409*4882a593Smuzhiyun 
410*4882a593Smuzhiyun 	controller->controller.channel_alloc = dma_channel_allocate;
411*4882a593Smuzhiyun 	controller->controller.channel_release = dma_channel_release;
412*4882a593Smuzhiyun 	controller->controller.channel_program = dma_channel_program;
413*4882a593Smuzhiyun 	controller->controller.channel_abort = dma_channel_abort;
414*4882a593Smuzhiyun 	return controller;
415*4882a593Smuzhiyun }
416*4882a593Smuzhiyun 
417*4882a593Smuzhiyun struct dma_controller *
musbhs_dma_controller_create(struct musb * musb,void __iomem * base)418*4882a593Smuzhiyun musbhs_dma_controller_create(struct musb *musb, void __iomem *base)
419*4882a593Smuzhiyun {
420*4882a593Smuzhiyun 	struct musb_dma_controller *controller;
421*4882a593Smuzhiyun 	struct device *dev = musb->controller;
422*4882a593Smuzhiyun 	struct platform_device *pdev = to_platform_device(dev);
423*4882a593Smuzhiyun 	int irq = platform_get_irq_byname(pdev, "dma");
424*4882a593Smuzhiyun 
425*4882a593Smuzhiyun 	if (irq <= 0) {
426*4882a593Smuzhiyun 		dev_err(dev, "No DMA interrupt line!\n");
427*4882a593Smuzhiyun 		return NULL;
428*4882a593Smuzhiyun 	}
429*4882a593Smuzhiyun 
430*4882a593Smuzhiyun 	controller = dma_controller_alloc(musb, base);
431*4882a593Smuzhiyun 	if (!controller)
432*4882a593Smuzhiyun 		return NULL;
433*4882a593Smuzhiyun 
434*4882a593Smuzhiyun 	if (request_irq(irq, dma_controller_irq, 0,
435*4882a593Smuzhiyun 			dev_name(musb->controller), controller)) {
436*4882a593Smuzhiyun 		dev_err(dev, "request_irq %d failed!\n", irq);
437*4882a593Smuzhiyun 		musb_dma_controller_destroy(&controller->controller);
438*4882a593Smuzhiyun 
439*4882a593Smuzhiyun 		return NULL;
440*4882a593Smuzhiyun 	}
441*4882a593Smuzhiyun 
442*4882a593Smuzhiyun 	controller->irq = irq;
443*4882a593Smuzhiyun 
444*4882a593Smuzhiyun 	return &controller->controller;
445*4882a593Smuzhiyun }
446*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(musbhs_dma_controller_create);
447*4882a593Smuzhiyun 
448*4882a593Smuzhiyun struct dma_controller *
musbhs_dma_controller_create_noirq(struct musb * musb,void __iomem * base)449*4882a593Smuzhiyun musbhs_dma_controller_create_noirq(struct musb *musb, void __iomem *base)
450*4882a593Smuzhiyun {
451*4882a593Smuzhiyun 	struct musb_dma_controller *controller;
452*4882a593Smuzhiyun 
453*4882a593Smuzhiyun 	controller = dma_controller_alloc(musb, base);
454*4882a593Smuzhiyun 	if (!controller)
455*4882a593Smuzhiyun 		return NULL;
456*4882a593Smuzhiyun 
457*4882a593Smuzhiyun 	return &controller->controller;
458*4882a593Smuzhiyun }
459*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(musbhs_dma_controller_create_noirq);
460