xref: /OK3568_Linux_fs/kernel/drivers/dma/stm32-mdma.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-only
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  *
4*4882a593Smuzhiyun  * Copyright (C) STMicroelectronics SA 2017
5*4882a593Smuzhiyun  * Author(s): M'boumba Cedric Madianga <cedric.madianga@gmail.com>
6*4882a593Smuzhiyun  *            Pierre-Yves Mordret <pierre-yves.mordret@st.com>
7*4882a593Smuzhiyun  *
8*4882a593Smuzhiyun  * Driver for STM32 MDMA controller
9*4882a593Smuzhiyun  *
10*4882a593Smuzhiyun  * Inspired by stm32-dma.c and dma-jz4780.c
11*4882a593Smuzhiyun  */
12*4882a593Smuzhiyun 
13*4882a593Smuzhiyun #include <linux/clk.h>
14*4882a593Smuzhiyun #include <linux/delay.h>
15*4882a593Smuzhiyun #include <linux/dmaengine.h>
16*4882a593Smuzhiyun #include <linux/dma-mapping.h>
17*4882a593Smuzhiyun #include <linux/dmapool.h>
18*4882a593Smuzhiyun #include <linux/err.h>
19*4882a593Smuzhiyun #include <linux/init.h>
20*4882a593Smuzhiyun #include <linux/iopoll.h>
21*4882a593Smuzhiyun #include <linux/jiffies.h>
22*4882a593Smuzhiyun #include <linux/list.h>
23*4882a593Smuzhiyun #include <linux/log2.h>
24*4882a593Smuzhiyun #include <linux/module.h>
25*4882a593Smuzhiyun #include <linux/of.h>
26*4882a593Smuzhiyun #include <linux/of_device.h>
27*4882a593Smuzhiyun #include <linux/of_dma.h>
28*4882a593Smuzhiyun #include <linux/platform_device.h>
29*4882a593Smuzhiyun #include <linux/pm_runtime.h>
30*4882a593Smuzhiyun #include <linux/reset.h>
31*4882a593Smuzhiyun #include <linux/slab.h>
32*4882a593Smuzhiyun 
33*4882a593Smuzhiyun #include "virt-dma.h"
34*4882a593Smuzhiyun 
35*4882a593Smuzhiyun /*  MDMA Generic getter/setter */
36*4882a593Smuzhiyun #define STM32_MDMA_SHIFT(n)		(ffs(n) - 1)
37*4882a593Smuzhiyun #define STM32_MDMA_SET(n, mask)		(((n) << STM32_MDMA_SHIFT(mask)) & \
38*4882a593Smuzhiyun 					 (mask))
39*4882a593Smuzhiyun #define STM32_MDMA_GET(n, mask)		(((n) & (mask)) >> \
40*4882a593Smuzhiyun 					 STM32_MDMA_SHIFT(mask))
41*4882a593Smuzhiyun 
42*4882a593Smuzhiyun #define STM32_MDMA_GISR0		0x0000 /* MDMA Int Status Reg 1 */
43*4882a593Smuzhiyun 
44*4882a593Smuzhiyun /* MDMA Channel x interrupt/status register */
45*4882a593Smuzhiyun #define STM32_MDMA_CISR(x)		(0x40 + 0x40 * (x)) /* x = 0..62 */
46*4882a593Smuzhiyun #define STM32_MDMA_CISR_CRQA		BIT(16)
47*4882a593Smuzhiyun #define STM32_MDMA_CISR_TCIF		BIT(4)
48*4882a593Smuzhiyun #define STM32_MDMA_CISR_BTIF		BIT(3)
49*4882a593Smuzhiyun #define STM32_MDMA_CISR_BRTIF		BIT(2)
50*4882a593Smuzhiyun #define STM32_MDMA_CISR_CTCIF		BIT(1)
51*4882a593Smuzhiyun #define STM32_MDMA_CISR_TEIF		BIT(0)
52*4882a593Smuzhiyun 
53*4882a593Smuzhiyun /* MDMA Channel x interrupt flag clear register */
54*4882a593Smuzhiyun #define STM32_MDMA_CIFCR(x)		(0x44 + 0x40 * (x))
55*4882a593Smuzhiyun #define STM32_MDMA_CIFCR_CLTCIF		BIT(4)
56*4882a593Smuzhiyun #define STM32_MDMA_CIFCR_CBTIF		BIT(3)
57*4882a593Smuzhiyun #define STM32_MDMA_CIFCR_CBRTIF		BIT(2)
58*4882a593Smuzhiyun #define STM32_MDMA_CIFCR_CCTCIF		BIT(1)
59*4882a593Smuzhiyun #define STM32_MDMA_CIFCR_CTEIF		BIT(0)
60*4882a593Smuzhiyun #define STM32_MDMA_CIFCR_CLEAR_ALL	(STM32_MDMA_CIFCR_CLTCIF \
61*4882a593Smuzhiyun 					| STM32_MDMA_CIFCR_CBTIF \
62*4882a593Smuzhiyun 					| STM32_MDMA_CIFCR_CBRTIF \
63*4882a593Smuzhiyun 					| STM32_MDMA_CIFCR_CCTCIF \
64*4882a593Smuzhiyun 					| STM32_MDMA_CIFCR_CTEIF)
65*4882a593Smuzhiyun 
66*4882a593Smuzhiyun /* MDMA Channel x error status register */
67*4882a593Smuzhiyun #define STM32_MDMA_CESR(x)		(0x48 + 0x40 * (x))
68*4882a593Smuzhiyun #define STM32_MDMA_CESR_BSE		BIT(11)
69*4882a593Smuzhiyun #define STM32_MDMA_CESR_ASR		BIT(10)
70*4882a593Smuzhiyun #define STM32_MDMA_CESR_TEMD		BIT(9)
71*4882a593Smuzhiyun #define STM32_MDMA_CESR_TELD		BIT(8)
72*4882a593Smuzhiyun #define STM32_MDMA_CESR_TED		BIT(7)
73*4882a593Smuzhiyun #define STM32_MDMA_CESR_TEA_MASK	GENMASK(6, 0)
74*4882a593Smuzhiyun 
75*4882a593Smuzhiyun /* MDMA Channel x control register */
76*4882a593Smuzhiyun #define STM32_MDMA_CCR(x)		(0x4C + 0x40 * (x))
77*4882a593Smuzhiyun #define STM32_MDMA_CCR_SWRQ		BIT(16)
78*4882a593Smuzhiyun #define STM32_MDMA_CCR_WEX		BIT(14)
79*4882a593Smuzhiyun #define STM32_MDMA_CCR_HEX		BIT(13)
80*4882a593Smuzhiyun #define STM32_MDMA_CCR_BEX		BIT(12)
81*4882a593Smuzhiyun #define STM32_MDMA_CCR_PL_MASK		GENMASK(7, 6)
82*4882a593Smuzhiyun #define STM32_MDMA_CCR_PL(n)		STM32_MDMA_SET(n, \
83*4882a593Smuzhiyun 						       STM32_MDMA_CCR_PL_MASK)
84*4882a593Smuzhiyun #define STM32_MDMA_CCR_TCIE		BIT(5)
85*4882a593Smuzhiyun #define STM32_MDMA_CCR_BTIE		BIT(4)
86*4882a593Smuzhiyun #define STM32_MDMA_CCR_BRTIE		BIT(3)
87*4882a593Smuzhiyun #define STM32_MDMA_CCR_CTCIE		BIT(2)
88*4882a593Smuzhiyun #define STM32_MDMA_CCR_TEIE		BIT(1)
89*4882a593Smuzhiyun #define STM32_MDMA_CCR_EN		BIT(0)
90*4882a593Smuzhiyun #define STM32_MDMA_CCR_IRQ_MASK		(STM32_MDMA_CCR_TCIE \
91*4882a593Smuzhiyun 					| STM32_MDMA_CCR_BTIE \
92*4882a593Smuzhiyun 					| STM32_MDMA_CCR_BRTIE \
93*4882a593Smuzhiyun 					| STM32_MDMA_CCR_CTCIE \
94*4882a593Smuzhiyun 					| STM32_MDMA_CCR_TEIE)
95*4882a593Smuzhiyun 
96*4882a593Smuzhiyun /* MDMA Channel x transfer configuration register */
97*4882a593Smuzhiyun #define STM32_MDMA_CTCR(x)		(0x50 + 0x40 * (x))
98*4882a593Smuzhiyun #define STM32_MDMA_CTCR_BWM		BIT(31)
99*4882a593Smuzhiyun #define STM32_MDMA_CTCR_SWRM		BIT(30)
100*4882a593Smuzhiyun #define STM32_MDMA_CTCR_TRGM_MSK	GENMASK(29, 28)
101*4882a593Smuzhiyun #define STM32_MDMA_CTCR_TRGM(n)		STM32_MDMA_SET((n), \
102*4882a593Smuzhiyun 						       STM32_MDMA_CTCR_TRGM_MSK)
103*4882a593Smuzhiyun #define STM32_MDMA_CTCR_TRGM_GET(n)	STM32_MDMA_GET((n), \
104*4882a593Smuzhiyun 						       STM32_MDMA_CTCR_TRGM_MSK)
105*4882a593Smuzhiyun #define STM32_MDMA_CTCR_PAM_MASK	GENMASK(27, 26)
106*4882a593Smuzhiyun #define STM32_MDMA_CTCR_PAM(n)		STM32_MDMA_SET(n, \
107*4882a593Smuzhiyun 						       STM32_MDMA_CTCR_PAM_MASK)
108*4882a593Smuzhiyun #define STM32_MDMA_CTCR_PKE		BIT(25)
109*4882a593Smuzhiyun #define STM32_MDMA_CTCR_TLEN_MSK	GENMASK(24, 18)
110*4882a593Smuzhiyun #define STM32_MDMA_CTCR_TLEN(n)		STM32_MDMA_SET((n), \
111*4882a593Smuzhiyun 						       STM32_MDMA_CTCR_TLEN_MSK)
112*4882a593Smuzhiyun #define STM32_MDMA_CTCR_TLEN_GET(n)	STM32_MDMA_GET((n), \
113*4882a593Smuzhiyun 						       STM32_MDMA_CTCR_TLEN_MSK)
114*4882a593Smuzhiyun #define STM32_MDMA_CTCR_LEN2_MSK	GENMASK(25, 18)
115*4882a593Smuzhiyun #define STM32_MDMA_CTCR_LEN2(n)		STM32_MDMA_SET((n), \
116*4882a593Smuzhiyun 						       STM32_MDMA_CTCR_LEN2_MSK)
117*4882a593Smuzhiyun #define STM32_MDMA_CTCR_LEN2_GET(n)	STM32_MDMA_GET((n), \
118*4882a593Smuzhiyun 						       STM32_MDMA_CTCR_LEN2_MSK)
119*4882a593Smuzhiyun #define STM32_MDMA_CTCR_DBURST_MASK	GENMASK(17, 15)
120*4882a593Smuzhiyun #define STM32_MDMA_CTCR_DBURST(n)	STM32_MDMA_SET(n, \
121*4882a593Smuzhiyun 						    STM32_MDMA_CTCR_DBURST_MASK)
122*4882a593Smuzhiyun #define STM32_MDMA_CTCR_SBURST_MASK	GENMASK(14, 12)
123*4882a593Smuzhiyun #define STM32_MDMA_CTCR_SBURST(n)	STM32_MDMA_SET(n, \
124*4882a593Smuzhiyun 						    STM32_MDMA_CTCR_SBURST_MASK)
125*4882a593Smuzhiyun #define STM32_MDMA_CTCR_DINCOS_MASK	GENMASK(11, 10)
126*4882a593Smuzhiyun #define STM32_MDMA_CTCR_DINCOS(n)	STM32_MDMA_SET((n), \
127*4882a593Smuzhiyun 						    STM32_MDMA_CTCR_DINCOS_MASK)
128*4882a593Smuzhiyun #define STM32_MDMA_CTCR_SINCOS_MASK	GENMASK(9, 8)
129*4882a593Smuzhiyun #define STM32_MDMA_CTCR_SINCOS(n)	STM32_MDMA_SET((n), \
130*4882a593Smuzhiyun 						    STM32_MDMA_CTCR_SINCOS_MASK)
131*4882a593Smuzhiyun #define STM32_MDMA_CTCR_DSIZE_MASK	GENMASK(7, 6)
132*4882a593Smuzhiyun #define STM32_MDMA_CTCR_DSIZE(n)	STM32_MDMA_SET(n, \
133*4882a593Smuzhiyun 						     STM32_MDMA_CTCR_DSIZE_MASK)
134*4882a593Smuzhiyun #define STM32_MDMA_CTCR_SSIZE_MASK	GENMASK(5, 4)
135*4882a593Smuzhiyun #define STM32_MDMA_CTCR_SSIZE(n)	STM32_MDMA_SET(n, \
136*4882a593Smuzhiyun 						     STM32_MDMA_CTCR_SSIZE_MASK)
137*4882a593Smuzhiyun #define STM32_MDMA_CTCR_DINC_MASK	GENMASK(3, 2)
138*4882a593Smuzhiyun #define STM32_MDMA_CTCR_DINC(n)		STM32_MDMA_SET((n), \
139*4882a593Smuzhiyun 						      STM32_MDMA_CTCR_DINC_MASK)
140*4882a593Smuzhiyun #define STM32_MDMA_CTCR_SINC_MASK	GENMASK(1, 0)
141*4882a593Smuzhiyun #define STM32_MDMA_CTCR_SINC(n)		STM32_MDMA_SET((n), \
142*4882a593Smuzhiyun 						      STM32_MDMA_CTCR_SINC_MASK)
143*4882a593Smuzhiyun #define STM32_MDMA_CTCR_CFG_MASK	(STM32_MDMA_CTCR_SINC_MASK \
144*4882a593Smuzhiyun 					| STM32_MDMA_CTCR_DINC_MASK \
145*4882a593Smuzhiyun 					| STM32_MDMA_CTCR_SINCOS_MASK \
146*4882a593Smuzhiyun 					| STM32_MDMA_CTCR_DINCOS_MASK \
147*4882a593Smuzhiyun 					| STM32_MDMA_CTCR_LEN2_MSK \
148*4882a593Smuzhiyun 					| STM32_MDMA_CTCR_TRGM_MSK)
149*4882a593Smuzhiyun 
150*4882a593Smuzhiyun /* MDMA Channel x block number of data register */
151*4882a593Smuzhiyun #define STM32_MDMA_CBNDTR(x)		(0x54 + 0x40 * (x))
152*4882a593Smuzhiyun #define STM32_MDMA_CBNDTR_BRC_MK	GENMASK(31, 20)
153*4882a593Smuzhiyun #define STM32_MDMA_CBNDTR_BRC(n)	STM32_MDMA_SET(n, \
154*4882a593Smuzhiyun 						       STM32_MDMA_CBNDTR_BRC_MK)
155*4882a593Smuzhiyun #define STM32_MDMA_CBNDTR_BRC_GET(n)	STM32_MDMA_GET((n), \
156*4882a593Smuzhiyun 						       STM32_MDMA_CBNDTR_BRC_MK)
157*4882a593Smuzhiyun 
158*4882a593Smuzhiyun #define STM32_MDMA_CBNDTR_BRDUM		BIT(19)
159*4882a593Smuzhiyun #define STM32_MDMA_CBNDTR_BRSUM		BIT(18)
160*4882a593Smuzhiyun #define STM32_MDMA_CBNDTR_BNDT_MASK	GENMASK(16, 0)
161*4882a593Smuzhiyun #define STM32_MDMA_CBNDTR_BNDT(n)	STM32_MDMA_SET(n, \
162*4882a593Smuzhiyun 						    STM32_MDMA_CBNDTR_BNDT_MASK)
163*4882a593Smuzhiyun 
164*4882a593Smuzhiyun /* MDMA Channel x source address register */
165*4882a593Smuzhiyun #define STM32_MDMA_CSAR(x)		(0x58 + 0x40 * (x))
166*4882a593Smuzhiyun 
167*4882a593Smuzhiyun /* MDMA Channel x destination address register */
168*4882a593Smuzhiyun #define STM32_MDMA_CDAR(x)		(0x5C + 0x40 * (x))
169*4882a593Smuzhiyun 
170*4882a593Smuzhiyun /* MDMA Channel x block repeat address update register */
171*4882a593Smuzhiyun #define STM32_MDMA_CBRUR(x)		(0x60 + 0x40 * (x))
172*4882a593Smuzhiyun #define STM32_MDMA_CBRUR_DUV_MASK	GENMASK(31, 16)
173*4882a593Smuzhiyun #define STM32_MDMA_CBRUR_DUV(n)		STM32_MDMA_SET(n, \
174*4882a593Smuzhiyun 						      STM32_MDMA_CBRUR_DUV_MASK)
175*4882a593Smuzhiyun #define STM32_MDMA_CBRUR_SUV_MASK	GENMASK(15, 0)
176*4882a593Smuzhiyun #define STM32_MDMA_CBRUR_SUV(n)		STM32_MDMA_SET(n, \
177*4882a593Smuzhiyun 						      STM32_MDMA_CBRUR_SUV_MASK)
178*4882a593Smuzhiyun 
179*4882a593Smuzhiyun /* MDMA Channel x link address register */
180*4882a593Smuzhiyun #define STM32_MDMA_CLAR(x)		(0x64 + 0x40 * (x))
181*4882a593Smuzhiyun 
182*4882a593Smuzhiyun /* MDMA Channel x trigger and bus selection register */
183*4882a593Smuzhiyun #define STM32_MDMA_CTBR(x)		(0x68 + 0x40 * (x))
184*4882a593Smuzhiyun #define STM32_MDMA_CTBR_DBUS		BIT(17)
185*4882a593Smuzhiyun #define STM32_MDMA_CTBR_SBUS		BIT(16)
186*4882a593Smuzhiyun #define STM32_MDMA_CTBR_TSEL_MASK	GENMASK(5, 0)
187*4882a593Smuzhiyun #define STM32_MDMA_CTBR_TSEL(n)		STM32_MDMA_SET(n, \
188*4882a593Smuzhiyun 						      STM32_MDMA_CTBR_TSEL_MASK)
189*4882a593Smuzhiyun 
190*4882a593Smuzhiyun /* MDMA Channel x mask address register */
191*4882a593Smuzhiyun #define STM32_MDMA_CMAR(x)		(0x70 + 0x40 * (x))
192*4882a593Smuzhiyun 
193*4882a593Smuzhiyun /* MDMA Channel x mask data register */
194*4882a593Smuzhiyun #define STM32_MDMA_CMDR(x)		(0x74 + 0x40 * (x))
195*4882a593Smuzhiyun 
196*4882a593Smuzhiyun #define STM32_MDMA_MAX_BUF_LEN		128
197*4882a593Smuzhiyun #define STM32_MDMA_MAX_BLOCK_LEN	65536
198*4882a593Smuzhiyun #define STM32_MDMA_MAX_CHANNELS		32
199*4882a593Smuzhiyun #define STM32_MDMA_MAX_REQUESTS		256
200*4882a593Smuzhiyun #define STM32_MDMA_MAX_BURST		128
201*4882a593Smuzhiyun #define STM32_MDMA_VERY_HIGH_PRIORITY	0x11
202*4882a593Smuzhiyun 
203*4882a593Smuzhiyun enum stm32_mdma_trigger_mode {
204*4882a593Smuzhiyun 	STM32_MDMA_BUFFER,
205*4882a593Smuzhiyun 	STM32_MDMA_BLOCK,
206*4882a593Smuzhiyun 	STM32_MDMA_BLOCK_REP,
207*4882a593Smuzhiyun 	STM32_MDMA_LINKED_LIST,
208*4882a593Smuzhiyun };
209*4882a593Smuzhiyun 
210*4882a593Smuzhiyun enum stm32_mdma_width {
211*4882a593Smuzhiyun 	STM32_MDMA_BYTE,
212*4882a593Smuzhiyun 	STM32_MDMA_HALF_WORD,
213*4882a593Smuzhiyun 	STM32_MDMA_WORD,
214*4882a593Smuzhiyun 	STM32_MDMA_DOUBLE_WORD,
215*4882a593Smuzhiyun };
216*4882a593Smuzhiyun 
217*4882a593Smuzhiyun enum stm32_mdma_inc_mode {
218*4882a593Smuzhiyun 	STM32_MDMA_FIXED = 0,
219*4882a593Smuzhiyun 	STM32_MDMA_INC = 2,
220*4882a593Smuzhiyun 	STM32_MDMA_DEC = 3,
221*4882a593Smuzhiyun };
222*4882a593Smuzhiyun 
223*4882a593Smuzhiyun struct stm32_mdma_chan_config {
224*4882a593Smuzhiyun 	u32 request;
225*4882a593Smuzhiyun 	u32 priority_level;
226*4882a593Smuzhiyun 	u32 transfer_config;
227*4882a593Smuzhiyun 	u32 mask_addr;
228*4882a593Smuzhiyun 	u32 mask_data;
229*4882a593Smuzhiyun };
230*4882a593Smuzhiyun 
231*4882a593Smuzhiyun struct stm32_mdma_hwdesc {
232*4882a593Smuzhiyun 	u32 ctcr;
233*4882a593Smuzhiyun 	u32 cbndtr;
234*4882a593Smuzhiyun 	u32 csar;
235*4882a593Smuzhiyun 	u32 cdar;
236*4882a593Smuzhiyun 	u32 cbrur;
237*4882a593Smuzhiyun 	u32 clar;
238*4882a593Smuzhiyun 	u32 ctbr;
239*4882a593Smuzhiyun 	u32 dummy;
240*4882a593Smuzhiyun 	u32 cmar;
241*4882a593Smuzhiyun 	u32 cmdr;
242*4882a593Smuzhiyun } __aligned(64);
243*4882a593Smuzhiyun 
244*4882a593Smuzhiyun struct stm32_mdma_desc_node {
245*4882a593Smuzhiyun 	struct stm32_mdma_hwdesc *hwdesc;
246*4882a593Smuzhiyun 	dma_addr_t hwdesc_phys;
247*4882a593Smuzhiyun };
248*4882a593Smuzhiyun 
249*4882a593Smuzhiyun struct stm32_mdma_desc {
250*4882a593Smuzhiyun 	struct virt_dma_desc vdesc;
251*4882a593Smuzhiyun 	u32 ccr;
252*4882a593Smuzhiyun 	bool cyclic;
253*4882a593Smuzhiyun 	u32 count;
254*4882a593Smuzhiyun 	struct stm32_mdma_desc_node node[];
255*4882a593Smuzhiyun };
256*4882a593Smuzhiyun 
257*4882a593Smuzhiyun struct stm32_mdma_chan {
258*4882a593Smuzhiyun 	struct virt_dma_chan vchan;
259*4882a593Smuzhiyun 	struct dma_pool *desc_pool;
260*4882a593Smuzhiyun 	u32 id;
261*4882a593Smuzhiyun 	struct stm32_mdma_desc *desc;
262*4882a593Smuzhiyun 	u32 curr_hwdesc;
263*4882a593Smuzhiyun 	struct dma_slave_config dma_config;
264*4882a593Smuzhiyun 	struct stm32_mdma_chan_config chan_config;
265*4882a593Smuzhiyun 	bool busy;
266*4882a593Smuzhiyun 	u32 mem_burst;
267*4882a593Smuzhiyun 	u32 mem_width;
268*4882a593Smuzhiyun };
269*4882a593Smuzhiyun 
270*4882a593Smuzhiyun struct stm32_mdma_device {
271*4882a593Smuzhiyun 	struct dma_device ddev;
272*4882a593Smuzhiyun 	void __iomem *base;
273*4882a593Smuzhiyun 	struct clk *clk;
274*4882a593Smuzhiyun 	int irq;
275*4882a593Smuzhiyun 	u32 nr_channels;
276*4882a593Smuzhiyun 	u32 nr_requests;
277*4882a593Smuzhiyun 	u32 nr_ahb_addr_masks;
278*4882a593Smuzhiyun 	struct stm32_mdma_chan chan[STM32_MDMA_MAX_CHANNELS];
279*4882a593Smuzhiyun 	u32 ahb_addr_masks[];
280*4882a593Smuzhiyun };
281*4882a593Smuzhiyun 
stm32_mdma_get_dev(struct stm32_mdma_chan * chan)282*4882a593Smuzhiyun static struct stm32_mdma_device *stm32_mdma_get_dev(
283*4882a593Smuzhiyun 	struct stm32_mdma_chan *chan)
284*4882a593Smuzhiyun {
285*4882a593Smuzhiyun 	return container_of(chan->vchan.chan.device, struct stm32_mdma_device,
286*4882a593Smuzhiyun 			    ddev);
287*4882a593Smuzhiyun }
288*4882a593Smuzhiyun 
to_stm32_mdma_chan(struct dma_chan * c)289*4882a593Smuzhiyun static struct stm32_mdma_chan *to_stm32_mdma_chan(struct dma_chan *c)
290*4882a593Smuzhiyun {
291*4882a593Smuzhiyun 	return container_of(c, struct stm32_mdma_chan, vchan.chan);
292*4882a593Smuzhiyun }
293*4882a593Smuzhiyun 
to_stm32_mdma_desc(struct virt_dma_desc * vdesc)294*4882a593Smuzhiyun static struct stm32_mdma_desc *to_stm32_mdma_desc(struct virt_dma_desc *vdesc)
295*4882a593Smuzhiyun {
296*4882a593Smuzhiyun 	return container_of(vdesc, struct stm32_mdma_desc, vdesc);
297*4882a593Smuzhiyun }
298*4882a593Smuzhiyun 
chan2dev(struct stm32_mdma_chan * chan)299*4882a593Smuzhiyun static struct device *chan2dev(struct stm32_mdma_chan *chan)
300*4882a593Smuzhiyun {
301*4882a593Smuzhiyun 	return &chan->vchan.chan.dev->device;
302*4882a593Smuzhiyun }
303*4882a593Smuzhiyun 
mdma2dev(struct stm32_mdma_device * mdma_dev)304*4882a593Smuzhiyun static struct device *mdma2dev(struct stm32_mdma_device *mdma_dev)
305*4882a593Smuzhiyun {
306*4882a593Smuzhiyun 	return mdma_dev->ddev.dev;
307*4882a593Smuzhiyun }
308*4882a593Smuzhiyun 
stm32_mdma_read(struct stm32_mdma_device * dmadev,u32 reg)309*4882a593Smuzhiyun static u32 stm32_mdma_read(struct stm32_mdma_device *dmadev, u32 reg)
310*4882a593Smuzhiyun {
311*4882a593Smuzhiyun 	return readl_relaxed(dmadev->base + reg);
312*4882a593Smuzhiyun }
313*4882a593Smuzhiyun 
stm32_mdma_write(struct stm32_mdma_device * dmadev,u32 reg,u32 val)314*4882a593Smuzhiyun static void stm32_mdma_write(struct stm32_mdma_device *dmadev, u32 reg, u32 val)
315*4882a593Smuzhiyun {
316*4882a593Smuzhiyun 	writel_relaxed(val, dmadev->base + reg);
317*4882a593Smuzhiyun }
318*4882a593Smuzhiyun 
stm32_mdma_set_bits(struct stm32_mdma_device * dmadev,u32 reg,u32 mask)319*4882a593Smuzhiyun static void stm32_mdma_set_bits(struct stm32_mdma_device *dmadev, u32 reg,
320*4882a593Smuzhiyun 				u32 mask)
321*4882a593Smuzhiyun {
322*4882a593Smuzhiyun 	void __iomem *addr = dmadev->base + reg;
323*4882a593Smuzhiyun 
324*4882a593Smuzhiyun 	writel_relaxed(readl_relaxed(addr) | mask, addr);
325*4882a593Smuzhiyun }
326*4882a593Smuzhiyun 
stm32_mdma_clr_bits(struct stm32_mdma_device * dmadev,u32 reg,u32 mask)327*4882a593Smuzhiyun static void stm32_mdma_clr_bits(struct stm32_mdma_device *dmadev, u32 reg,
328*4882a593Smuzhiyun 				u32 mask)
329*4882a593Smuzhiyun {
330*4882a593Smuzhiyun 	void __iomem *addr = dmadev->base + reg;
331*4882a593Smuzhiyun 
332*4882a593Smuzhiyun 	writel_relaxed(readl_relaxed(addr) & ~mask, addr);
333*4882a593Smuzhiyun }
334*4882a593Smuzhiyun 
stm32_mdma_alloc_desc(struct stm32_mdma_chan * chan,u32 count)335*4882a593Smuzhiyun static struct stm32_mdma_desc *stm32_mdma_alloc_desc(
336*4882a593Smuzhiyun 		struct stm32_mdma_chan *chan, u32 count)
337*4882a593Smuzhiyun {
338*4882a593Smuzhiyun 	struct stm32_mdma_desc *desc;
339*4882a593Smuzhiyun 	int i;
340*4882a593Smuzhiyun 
341*4882a593Smuzhiyun 	desc = kzalloc(offsetof(typeof(*desc), node[count]), GFP_NOWAIT);
342*4882a593Smuzhiyun 	if (!desc)
343*4882a593Smuzhiyun 		return NULL;
344*4882a593Smuzhiyun 
345*4882a593Smuzhiyun 	for (i = 0; i < count; i++) {
346*4882a593Smuzhiyun 		desc->node[i].hwdesc =
347*4882a593Smuzhiyun 			dma_pool_alloc(chan->desc_pool, GFP_NOWAIT,
348*4882a593Smuzhiyun 				       &desc->node[i].hwdesc_phys);
349*4882a593Smuzhiyun 		if (!desc->node[i].hwdesc)
350*4882a593Smuzhiyun 			goto err;
351*4882a593Smuzhiyun 	}
352*4882a593Smuzhiyun 
353*4882a593Smuzhiyun 	desc->count = count;
354*4882a593Smuzhiyun 
355*4882a593Smuzhiyun 	return desc;
356*4882a593Smuzhiyun 
357*4882a593Smuzhiyun err:
358*4882a593Smuzhiyun 	dev_err(chan2dev(chan), "Failed to allocate descriptor\n");
359*4882a593Smuzhiyun 	while (--i >= 0)
360*4882a593Smuzhiyun 		dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
361*4882a593Smuzhiyun 			      desc->node[i].hwdesc_phys);
362*4882a593Smuzhiyun 	kfree(desc);
363*4882a593Smuzhiyun 	return NULL;
364*4882a593Smuzhiyun }
365*4882a593Smuzhiyun 
stm32_mdma_desc_free(struct virt_dma_desc * vdesc)366*4882a593Smuzhiyun static void stm32_mdma_desc_free(struct virt_dma_desc *vdesc)
367*4882a593Smuzhiyun {
368*4882a593Smuzhiyun 	struct stm32_mdma_desc *desc = to_stm32_mdma_desc(vdesc);
369*4882a593Smuzhiyun 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(vdesc->tx.chan);
370*4882a593Smuzhiyun 	int i;
371*4882a593Smuzhiyun 
372*4882a593Smuzhiyun 	for (i = 0; i < desc->count; i++)
373*4882a593Smuzhiyun 		dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
374*4882a593Smuzhiyun 			      desc->node[i].hwdesc_phys);
375*4882a593Smuzhiyun 	kfree(desc);
376*4882a593Smuzhiyun }
377*4882a593Smuzhiyun 
stm32_mdma_get_width(struct stm32_mdma_chan * chan,enum dma_slave_buswidth width)378*4882a593Smuzhiyun static int stm32_mdma_get_width(struct stm32_mdma_chan *chan,
379*4882a593Smuzhiyun 				enum dma_slave_buswidth width)
380*4882a593Smuzhiyun {
381*4882a593Smuzhiyun 	switch (width) {
382*4882a593Smuzhiyun 	case DMA_SLAVE_BUSWIDTH_1_BYTE:
383*4882a593Smuzhiyun 	case DMA_SLAVE_BUSWIDTH_2_BYTES:
384*4882a593Smuzhiyun 	case DMA_SLAVE_BUSWIDTH_4_BYTES:
385*4882a593Smuzhiyun 	case DMA_SLAVE_BUSWIDTH_8_BYTES:
386*4882a593Smuzhiyun 		return ffs(width) - 1;
387*4882a593Smuzhiyun 	default:
388*4882a593Smuzhiyun 		dev_err(chan2dev(chan), "Dma bus width %i not supported\n",
389*4882a593Smuzhiyun 			width);
390*4882a593Smuzhiyun 		return -EINVAL;
391*4882a593Smuzhiyun 	}
392*4882a593Smuzhiyun }
393*4882a593Smuzhiyun 
stm32_mdma_get_max_width(dma_addr_t addr,u32 buf_len,u32 tlen)394*4882a593Smuzhiyun static enum dma_slave_buswidth stm32_mdma_get_max_width(dma_addr_t addr,
395*4882a593Smuzhiyun 							u32 buf_len, u32 tlen)
396*4882a593Smuzhiyun {
397*4882a593Smuzhiyun 	enum dma_slave_buswidth max_width = DMA_SLAVE_BUSWIDTH_8_BYTES;
398*4882a593Smuzhiyun 
399*4882a593Smuzhiyun 	for (max_width = DMA_SLAVE_BUSWIDTH_8_BYTES;
400*4882a593Smuzhiyun 	     max_width > DMA_SLAVE_BUSWIDTH_1_BYTE;
401*4882a593Smuzhiyun 	     max_width >>= 1) {
402*4882a593Smuzhiyun 		/*
403*4882a593Smuzhiyun 		 * Address and buffer length both have to be aligned on
404*4882a593Smuzhiyun 		 * bus width
405*4882a593Smuzhiyun 		 */
406*4882a593Smuzhiyun 		if ((((buf_len | addr) & (max_width - 1)) == 0) &&
407*4882a593Smuzhiyun 		    tlen >= max_width)
408*4882a593Smuzhiyun 			break;
409*4882a593Smuzhiyun 	}
410*4882a593Smuzhiyun 
411*4882a593Smuzhiyun 	return max_width;
412*4882a593Smuzhiyun }
413*4882a593Smuzhiyun 
stm32_mdma_get_best_burst(u32 buf_len,u32 tlen,u32 max_burst,enum dma_slave_buswidth width)414*4882a593Smuzhiyun static u32 stm32_mdma_get_best_burst(u32 buf_len, u32 tlen, u32 max_burst,
415*4882a593Smuzhiyun 				     enum dma_slave_buswidth width)
416*4882a593Smuzhiyun {
417*4882a593Smuzhiyun 	u32 best_burst;
418*4882a593Smuzhiyun 
419*4882a593Smuzhiyun 	best_burst = min((u32)1 << __ffs(tlen | buf_len),
420*4882a593Smuzhiyun 			 max_burst * width) / width;
421*4882a593Smuzhiyun 
422*4882a593Smuzhiyun 	return (best_burst > 0) ? best_burst : 1;
423*4882a593Smuzhiyun }
424*4882a593Smuzhiyun 
stm32_mdma_disable_chan(struct stm32_mdma_chan * chan)425*4882a593Smuzhiyun static int stm32_mdma_disable_chan(struct stm32_mdma_chan *chan)
426*4882a593Smuzhiyun {
427*4882a593Smuzhiyun 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
428*4882a593Smuzhiyun 	u32 ccr, cisr, id, reg;
429*4882a593Smuzhiyun 	int ret;
430*4882a593Smuzhiyun 
431*4882a593Smuzhiyun 	id = chan->id;
432*4882a593Smuzhiyun 	reg = STM32_MDMA_CCR(id);
433*4882a593Smuzhiyun 
434*4882a593Smuzhiyun 	/* Disable interrupts */
435*4882a593Smuzhiyun 	stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_IRQ_MASK);
436*4882a593Smuzhiyun 
437*4882a593Smuzhiyun 	ccr = stm32_mdma_read(dmadev, reg);
438*4882a593Smuzhiyun 	if (ccr & STM32_MDMA_CCR_EN) {
439*4882a593Smuzhiyun 		stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_EN);
440*4882a593Smuzhiyun 
441*4882a593Smuzhiyun 		/* Ensure that any ongoing transfer has been completed */
442*4882a593Smuzhiyun 		ret = readl_relaxed_poll_timeout_atomic(
443*4882a593Smuzhiyun 				dmadev->base + STM32_MDMA_CISR(id), cisr,
444*4882a593Smuzhiyun 				(cisr & STM32_MDMA_CISR_CTCIF), 10, 1000);
445*4882a593Smuzhiyun 		if (ret) {
446*4882a593Smuzhiyun 			dev_err(chan2dev(chan), "%s: timeout!\n", __func__);
447*4882a593Smuzhiyun 			return -EBUSY;
448*4882a593Smuzhiyun 		}
449*4882a593Smuzhiyun 	}
450*4882a593Smuzhiyun 
451*4882a593Smuzhiyun 	return 0;
452*4882a593Smuzhiyun }
453*4882a593Smuzhiyun 
stm32_mdma_stop(struct stm32_mdma_chan * chan)454*4882a593Smuzhiyun static void stm32_mdma_stop(struct stm32_mdma_chan *chan)
455*4882a593Smuzhiyun {
456*4882a593Smuzhiyun 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
457*4882a593Smuzhiyun 	u32 status;
458*4882a593Smuzhiyun 	int ret;
459*4882a593Smuzhiyun 
460*4882a593Smuzhiyun 	/* Disable DMA */
461*4882a593Smuzhiyun 	ret = stm32_mdma_disable_chan(chan);
462*4882a593Smuzhiyun 	if (ret < 0)
463*4882a593Smuzhiyun 		return;
464*4882a593Smuzhiyun 
465*4882a593Smuzhiyun 	/* Clear interrupt status if it is there */
466*4882a593Smuzhiyun 	status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
467*4882a593Smuzhiyun 	if (status) {
468*4882a593Smuzhiyun 		dev_dbg(chan2dev(chan), "%s(): clearing interrupt: 0x%08x\n",
469*4882a593Smuzhiyun 			__func__, status);
470*4882a593Smuzhiyun 		stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status);
471*4882a593Smuzhiyun 	}
472*4882a593Smuzhiyun 
473*4882a593Smuzhiyun 	chan->busy = false;
474*4882a593Smuzhiyun }
475*4882a593Smuzhiyun 
stm32_mdma_set_bus(struct stm32_mdma_device * dmadev,u32 * ctbr,u32 ctbr_mask,u32 src_addr)476*4882a593Smuzhiyun static void stm32_mdma_set_bus(struct stm32_mdma_device *dmadev, u32 *ctbr,
477*4882a593Smuzhiyun 			       u32 ctbr_mask, u32 src_addr)
478*4882a593Smuzhiyun {
479*4882a593Smuzhiyun 	u32 mask;
480*4882a593Smuzhiyun 	int i;
481*4882a593Smuzhiyun 
482*4882a593Smuzhiyun 	/* Check if memory device is on AHB or AXI */
483*4882a593Smuzhiyun 	*ctbr &= ~ctbr_mask;
484*4882a593Smuzhiyun 	mask = src_addr & 0xF0000000;
485*4882a593Smuzhiyun 	for (i = 0; i < dmadev->nr_ahb_addr_masks; i++) {
486*4882a593Smuzhiyun 		if (mask == dmadev->ahb_addr_masks[i]) {
487*4882a593Smuzhiyun 			*ctbr |= ctbr_mask;
488*4882a593Smuzhiyun 			break;
489*4882a593Smuzhiyun 		}
490*4882a593Smuzhiyun 	}
491*4882a593Smuzhiyun }
492*4882a593Smuzhiyun 
stm32_mdma_set_xfer_param(struct stm32_mdma_chan * chan,enum dma_transfer_direction direction,u32 * mdma_ccr,u32 * mdma_ctcr,u32 * mdma_ctbr,dma_addr_t addr,u32 buf_len)493*4882a593Smuzhiyun static int stm32_mdma_set_xfer_param(struct stm32_mdma_chan *chan,
494*4882a593Smuzhiyun 				     enum dma_transfer_direction direction,
495*4882a593Smuzhiyun 				     u32 *mdma_ccr, u32 *mdma_ctcr,
496*4882a593Smuzhiyun 				     u32 *mdma_ctbr, dma_addr_t addr,
497*4882a593Smuzhiyun 				     u32 buf_len)
498*4882a593Smuzhiyun {
499*4882a593Smuzhiyun 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
500*4882a593Smuzhiyun 	struct stm32_mdma_chan_config *chan_config = &chan->chan_config;
501*4882a593Smuzhiyun 	enum dma_slave_buswidth src_addr_width, dst_addr_width;
502*4882a593Smuzhiyun 	phys_addr_t src_addr, dst_addr;
503*4882a593Smuzhiyun 	int src_bus_width, dst_bus_width;
504*4882a593Smuzhiyun 	u32 src_maxburst, dst_maxburst, src_best_burst, dst_best_burst;
505*4882a593Smuzhiyun 	u32 ccr, ctcr, ctbr, tlen;
506*4882a593Smuzhiyun 
507*4882a593Smuzhiyun 	src_addr_width = chan->dma_config.src_addr_width;
508*4882a593Smuzhiyun 	dst_addr_width = chan->dma_config.dst_addr_width;
509*4882a593Smuzhiyun 	src_maxburst = chan->dma_config.src_maxburst;
510*4882a593Smuzhiyun 	dst_maxburst = chan->dma_config.dst_maxburst;
511*4882a593Smuzhiyun 
512*4882a593Smuzhiyun 	ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id));
513*4882a593Smuzhiyun 	ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id));
514*4882a593Smuzhiyun 	ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id));
515*4882a593Smuzhiyun 
516*4882a593Smuzhiyun 	/* Enable HW request mode */
517*4882a593Smuzhiyun 	ctcr &= ~STM32_MDMA_CTCR_SWRM;
518*4882a593Smuzhiyun 
519*4882a593Smuzhiyun 	/* Set DINC, SINC, DINCOS, SINCOS, TRGM and TLEN retrieve from DT */
520*4882a593Smuzhiyun 	ctcr &= ~STM32_MDMA_CTCR_CFG_MASK;
521*4882a593Smuzhiyun 	ctcr |= chan_config->transfer_config & STM32_MDMA_CTCR_CFG_MASK;
522*4882a593Smuzhiyun 
523*4882a593Smuzhiyun 	/*
524*4882a593Smuzhiyun 	 * For buffer transfer length (TLEN) we have to set
525*4882a593Smuzhiyun 	 * the number of bytes - 1 in CTCR register
526*4882a593Smuzhiyun 	 */
527*4882a593Smuzhiyun 	tlen = STM32_MDMA_CTCR_LEN2_GET(ctcr);
528*4882a593Smuzhiyun 	ctcr &= ~STM32_MDMA_CTCR_LEN2_MSK;
529*4882a593Smuzhiyun 	ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1));
530*4882a593Smuzhiyun 
531*4882a593Smuzhiyun 	/* Disable Pack Enable */
532*4882a593Smuzhiyun 	ctcr &= ~STM32_MDMA_CTCR_PKE;
533*4882a593Smuzhiyun 
534*4882a593Smuzhiyun 	/* Check burst size constraints */
535*4882a593Smuzhiyun 	if (src_maxburst * src_addr_width > STM32_MDMA_MAX_BURST ||
536*4882a593Smuzhiyun 	    dst_maxburst * dst_addr_width > STM32_MDMA_MAX_BURST) {
537*4882a593Smuzhiyun 		dev_err(chan2dev(chan),
538*4882a593Smuzhiyun 			"burst size * bus width higher than %d bytes\n",
539*4882a593Smuzhiyun 			STM32_MDMA_MAX_BURST);
540*4882a593Smuzhiyun 		return -EINVAL;
541*4882a593Smuzhiyun 	}
542*4882a593Smuzhiyun 
543*4882a593Smuzhiyun 	if ((!is_power_of_2(src_maxburst) && src_maxburst > 0) ||
544*4882a593Smuzhiyun 	    (!is_power_of_2(dst_maxburst) && dst_maxburst > 0)) {
545*4882a593Smuzhiyun 		dev_err(chan2dev(chan), "burst size must be a power of 2\n");
546*4882a593Smuzhiyun 		return -EINVAL;
547*4882a593Smuzhiyun 	}
548*4882a593Smuzhiyun 
549*4882a593Smuzhiyun 	/*
550*4882a593Smuzhiyun 	 * Configure channel control:
551*4882a593Smuzhiyun 	 * - Clear SW request as in this case this is a HW one
552*4882a593Smuzhiyun 	 * - Clear WEX, HEX and BEX bits
553*4882a593Smuzhiyun 	 * - Set priority level
554*4882a593Smuzhiyun 	 */
555*4882a593Smuzhiyun 	ccr &= ~(STM32_MDMA_CCR_SWRQ | STM32_MDMA_CCR_WEX | STM32_MDMA_CCR_HEX |
556*4882a593Smuzhiyun 		 STM32_MDMA_CCR_BEX | STM32_MDMA_CCR_PL_MASK);
557*4882a593Smuzhiyun 	ccr |= STM32_MDMA_CCR_PL(chan_config->priority_level);
558*4882a593Smuzhiyun 
559*4882a593Smuzhiyun 	/* Configure Trigger selection */
560*4882a593Smuzhiyun 	ctbr &= ~STM32_MDMA_CTBR_TSEL_MASK;
561*4882a593Smuzhiyun 	ctbr |= STM32_MDMA_CTBR_TSEL(chan_config->request);
562*4882a593Smuzhiyun 
563*4882a593Smuzhiyun 	switch (direction) {
564*4882a593Smuzhiyun 	case DMA_MEM_TO_DEV:
565*4882a593Smuzhiyun 		dst_addr = chan->dma_config.dst_addr;
566*4882a593Smuzhiyun 
567*4882a593Smuzhiyun 		/* Set device data size */
568*4882a593Smuzhiyun 		dst_bus_width = stm32_mdma_get_width(chan, dst_addr_width);
569*4882a593Smuzhiyun 		if (dst_bus_width < 0)
570*4882a593Smuzhiyun 			return dst_bus_width;
571*4882a593Smuzhiyun 		ctcr &= ~STM32_MDMA_CTCR_DSIZE_MASK;
572*4882a593Smuzhiyun 		ctcr |= STM32_MDMA_CTCR_DSIZE(dst_bus_width);
573*4882a593Smuzhiyun 
574*4882a593Smuzhiyun 		/* Set device burst value */
575*4882a593Smuzhiyun 		dst_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
576*4882a593Smuzhiyun 							   dst_maxburst,
577*4882a593Smuzhiyun 							   dst_addr_width);
578*4882a593Smuzhiyun 		chan->mem_burst = dst_best_burst;
579*4882a593Smuzhiyun 		ctcr &= ~STM32_MDMA_CTCR_DBURST_MASK;
580*4882a593Smuzhiyun 		ctcr |= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst)));
581*4882a593Smuzhiyun 
582*4882a593Smuzhiyun 		/* Set memory data size */
583*4882a593Smuzhiyun 		src_addr_width = stm32_mdma_get_max_width(addr, buf_len, tlen);
584*4882a593Smuzhiyun 		chan->mem_width = src_addr_width;
585*4882a593Smuzhiyun 		src_bus_width = stm32_mdma_get_width(chan, src_addr_width);
586*4882a593Smuzhiyun 		if (src_bus_width < 0)
587*4882a593Smuzhiyun 			return src_bus_width;
588*4882a593Smuzhiyun 		ctcr &= ~STM32_MDMA_CTCR_SSIZE_MASK |
589*4882a593Smuzhiyun 			STM32_MDMA_CTCR_SINCOS_MASK;
590*4882a593Smuzhiyun 		ctcr |= STM32_MDMA_CTCR_SSIZE(src_bus_width) |
591*4882a593Smuzhiyun 			STM32_MDMA_CTCR_SINCOS(src_bus_width);
592*4882a593Smuzhiyun 
593*4882a593Smuzhiyun 		/* Set memory burst value */
594*4882a593Smuzhiyun 		src_maxburst = STM32_MDMA_MAX_BUF_LEN / src_addr_width;
595*4882a593Smuzhiyun 		src_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
596*4882a593Smuzhiyun 							   src_maxburst,
597*4882a593Smuzhiyun 							   src_addr_width);
598*4882a593Smuzhiyun 		chan->mem_burst = src_best_burst;
599*4882a593Smuzhiyun 		ctcr &= ~STM32_MDMA_CTCR_SBURST_MASK;
600*4882a593Smuzhiyun 		ctcr |= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst)));
601*4882a593Smuzhiyun 
602*4882a593Smuzhiyun 		/* Select bus */
603*4882a593Smuzhiyun 		stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
604*4882a593Smuzhiyun 				   dst_addr);
605*4882a593Smuzhiyun 
606*4882a593Smuzhiyun 		if (dst_bus_width != src_bus_width)
607*4882a593Smuzhiyun 			ctcr |= STM32_MDMA_CTCR_PKE;
608*4882a593Smuzhiyun 
609*4882a593Smuzhiyun 		/* Set destination address */
610*4882a593Smuzhiyun 		stm32_mdma_write(dmadev, STM32_MDMA_CDAR(chan->id), dst_addr);
611*4882a593Smuzhiyun 		break;
612*4882a593Smuzhiyun 
613*4882a593Smuzhiyun 	case DMA_DEV_TO_MEM:
614*4882a593Smuzhiyun 		src_addr = chan->dma_config.src_addr;
615*4882a593Smuzhiyun 
616*4882a593Smuzhiyun 		/* Set device data size */
617*4882a593Smuzhiyun 		src_bus_width = stm32_mdma_get_width(chan, src_addr_width);
618*4882a593Smuzhiyun 		if (src_bus_width < 0)
619*4882a593Smuzhiyun 			return src_bus_width;
620*4882a593Smuzhiyun 		ctcr &= ~STM32_MDMA_CTCR_SSIZE_MASK;
621*4882a593Smuzhiyun 		ctcr |= STM32_MDMA_CTCR_SSIZE(src_bus_width);
622*4882a593Smuzhiyun 
623*4882a593Smuzhiyun 		/* Set device burst value */
624*4882a593Smuzhiyun 		src_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
625*4882a593Smuzhiyun 							   src_maxburst,
626*4882a593Smuzhiyun 							   src_addr_width);
627*4882a593Smuzhiyun 		ctcr &= ~STM32_MDMA_CTCR_SBURST_MASK;
628*4882a593Smuzhiyun 		ctcr |= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst)));
629*4882a593Smuzhiyun 
630*4882a593Smuzhiyun 		/* Set memory data size */
631*4882a593Smuzhiyun 		dst_addr_width = stm32_mdma_get_max_width(addr, buf_len, tlen);
632*4882a593Smuzhiyun 		chan->mem_width = dst_addr_width;
633*4882a593Smuzhiyun 		dst_bus_width = stm32_mdma_get_width(chan, dst_addr_width);
634*4882a593Smuzhiyun 		if (dst_bus_width < 0)
635*4882a593Smuzhiyun 			return dst_bus_width;
636*4882a593Smuzhiyun 		ctcr &= ~(STM32_MDMA_CTCR_DSIZE_MASK |
637*4882a593Smuzhiyun 			STM32_MDMA_CTCR_DINCOS_MASK);
638*4882a593Smuzhiyun 		ctcr |= STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
639*4882a593Smuzhiyun 			STM32_MDMA_CTCR_DINCOS(dst_bus_width);
640*4882a593Smuzhiyun 
641*4882a593Smuzhiyun 		/* Set memory burst value */
642*4882a593Smuzhiyun 		dst_maxburst = STM32_MDMA_MAX_BUF_LEN / dst_addr_width;
643*4882a593Smuzhiyun 		dst_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
644*4882a593Smuzhiyun 							   dst_maxburst,
645*4882a593Smuzhiyun 							   dst_addr_width);
646*4882a593Smuzhiyun 		ctcr &= ~STM32_MDMA_CTCR_DBURST_MASK;
647*4882a593Smuzhiyun 		ctcr |= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst)));
648*4882a593Smuzhiyun 
649*4882a593Smuzhiyun 		/* Select bus */
650*4882a593Smuzhiyun 		stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
651*4882a593Smuzhiyun 				   src_addr);
652*4882a593Smuzhiyun 
653*4882a593Smuzhiyun 		if (dst_bus_width != src_bus_width)
654*4882a593Smuzhiyun 			ctcr |= STM32_MDMA_CTCR_PKE;
655*4882a593Smuzhiyun 
656*4882a593Smuzhiyun 		/* Set source address */
657*4882a593Smuzhiyun 		stm32_mdma_write(dmadev, STM32_MDMA_CSAR(chan->id), src_addr);
658*4882a593Smuzhiyun 		break;
659*4882a593Smuzhiyun 
660*4882a593Smuzhiyun 	default:
661*4882a593Smuzhiyun 		dev_err(chan2dev(chan), "Dma direction is not supported\n");
662*4882a593Smuzhiyun 		return -EINVAL;
663*4882a593Smuzhiyun 	}
664*4882a593Smuzhiyun 
665*4882a593Smuzhiyun 	*mdma_ccr = ccr;
666*4882a593Smuzhiyun 	*mdma_ctcr = ctcr;
667*4882a593Smuzhiyun 	*mdma_ctbr = ctbr;
668*4882a593Smuzhiyun 
669*4882a593Smuzhiyun 	return 0;
670*4882a593Smuzhiyun }
671*4882a593Smuzhiyun 
stm32_mdma_dump_hwdesc(struct stm32_mdma_chan * chan,struct stm32_mdma_desc_node * node)672*4882a593Smuzhiyun static void stm32_mdma_dump_hwdesc(struct stm32_mdma_chan *chan,
673*4882a593Smuzhiyun 				   struct stm32_mdma_desc_node *node)
674*4882a593Smuzhiyun {
675*4882a593Smuzhiyun 	dev_dbg(chan2dev(chan), "hwdesc:  %pad\n", &node->hwdesc_phys);
676*4882a593Smuzhiyun 	dev_dbg(chan2dev(chan), "CTCR:    0x%08x\n", node->hwdesc->ctcr);
677*4882a593Smuzhiyun 	dev_dbg(chan2dev(chan), "CBNDTR:  0x%08x\n", node->hwdesc->cbndtr);
678*4882a593Smuzhiyun 	dev_dbg(chan2dev(chan), "CSAR:    0x%08x\n", node->hwdesc->csar);
679*4882a593Smuzhiyun 	dev_dbg(chan2dev(chan), "CDAR:    0x%08x\n", node->hwdesc->cdar);
680*4882a593Smuzhiyun 	dev_dbg(chan2dev(chan), "CBRUR:   0x%08x\n", node->hwdesc->cbrur);
681*4882a593Smuzhiyun 	dev_dbg(chan2dev(chan), "CLAR:    0x%08x\n", node->hwdesc->clar);
682*4882a593Smuzhiyun 	dev_dbg(chan2dev(chan), "CTBR:    0x%08x\n", node->hwdesc->ctbr);
683*4882a593Smuzhiyun 	dev_dbg(chan2dev(chan), "CMAR:    0x%08x\n", node->hwdesc->cmar);
684*4882a593Smuzhiyun 	dev_dbg(chan2dev(chan), "CMDR:    0x%08x\n\n", node->hwdesc->cmdr);
685*4882a593Smuzhiyun }
686*4882a593Smuzhiyun 
stm32_mdma_setup_hwdesc(struct stm32_mdma_chan * chan,struct stm32_mdma_desc * desc,enum dma_transfer_direction dir,u32 count,dma_addr_t src_addr,dma_addr_t dst_addr,u32 len,u32 ctcr,u32 ctbr,bool is_last,bool is_first,bool is_cyclic)687*4882a593Smuzhiyun static void stm32_mdma_setup_hwdesc(struct stm32_mdma_chan *chan,
688*4882a593Smuzhiyun 				    struct stm32_mdma_desc *desc,
689*4882a593Smuzhiyun 				    enum dma_transfer_direction dir, u32 count,
690*4882a593Smuzhiyun 				    dma_addr_t src_addr, dma_addr_t dst_addr,
691*4882a593Smuzhiyun 				    u32 len, u32 ctcr, u32 ctbr, bool is_last,
692*4882a593Smuzhiyun 				    bool is_first, bool is_cyclic)
693*4882a593Smuzhiyun {
694*4882a593Smuzhiyun 	struct stm32_mdma_chan_config *config = &chan->chan_config;
695*4882a593Smuzhiyun 	struct stm32_mdma_hwdesc *hwdesc;
696*4882a593Smuzhiyun 	u32 next = count + 1;
697*4882a593Smuzhiyun 
698*4882a593Smuzhiyun 	hwdesc = desc->node[count].hwdesc;
699*4882a593Smuzhiyun 	hwdesc->ctcr = ctcr;
700*4882a593Smuzhiyun 	hwdesc->cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK |
701*4882a593Smuzhiyun 			STM32_MDMA_CBNDTR_BRDUM |
702*4882a593Smuzhiyun 			STM32_MDMA_CBNDTR_BRSUM |
703*4882a593Smuzhiyun 			STM32_MDMA_CBNDTR_BNDT_MASK);
704*4882a593Smuzhiyun 	hwdesc->cbndtr |= STM32_MDMA_CBNDTR_BNDT(len);
705*4882a593Smuzhiyun 	hwdesc->csar = src_addr;
706*4882a593Smuzhiyun 	hwdesc->cdar = dst_addr;
707*4882a593Smuzhiyun 	hwdesc->cbrur = 0;
708*4882a593Smuzhiyun 	hwdesc->ctbr = ctbr;
709*4882a593Smuzhiyun 	hwdesc->cmar = config->mask_addr;
710*4882a593Smuzhiyun 	hwdesc->cmdr = config->mask_data;
711*4882a593Smuzhiyun 
712*4882a593Smuzhiyun 	if (is_last) {
713*4882a593Smuzhiyun 		if (is_cyclic)
714*4882a593Smuzhiyun 			hwdesc->clar = desc->node[0].hwdesc_phys;
715*4882a593Smuzhiyun 		else
716*4882a593Smuzhiyun 			hwdesc->clar = 0;
717*4882a593Smuzhiyun 	} else {
718*4882a593Smuzhiyun 		hwdesc->clar = desc->node[next].hwdesc_phys;
719*4882a593Smuzhiyun 	}
720*4882a593Smuzhiyun 
721*4882a593Smuzhiyun 	stm32_mdma_dump_hwdesc(chan, &desc->node[count]);
722*4882a593Smuzhiyun }
723*4882a593Smuzhiyun 
stm32_mdma_setup_xfer(struct stm32_mdma_chan * chan,struct stm32_mdma_desc * desc,struct scatterlist * sgl,u32 sg_len,enum dma_transfer_direction direction)724*4882a593Smuzhiyun static int stm32_mdma_setup_xfer(struct stm32_mdma_chan *chan,
725*4882a593Smuzhiyun 				 struct stm32_mdma_desc *desc,
726*4882a593Smuzhiyun 				 struct scatterlist *sgl, u32 sg_len,
727*4882a593Smuzhiyun 				 enum dma_transfer_direction direction)
728*4882a593Smuzhiyun {
729*4882a593Smuzhiyun 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
730*4882a593Smuzhiyun 	struct dma_slave_config *dma_config = &chan->dma_config;
731*4882a593Smuzhiyun 	struct scatterlist *sg;
732*4882a593Smuzhiyun 	dma_addr_t src_addr, dst_addr;
733*4882a593Smuzhiyun 	u32 ccr, ctcr, ctbr;
734*4882a593Smuzhiyun 	int i, ret = 0;
735*4882a593Smuzhiyun 
736*4882a593Smuzhiyun 	for_each_sg(sgl, sg, sg_len, i) {
737*4882a593Smuzhiyun 		if (sg_dma_len(sg) > STM32_MDMA_MAX_BLOCK_LEN) {
738*4882a593Smuzhiyun 			dev_err(chan2dev(chan), "Invalid block len\n");
739*4882a593Smuzhiyun 			return -EINVAL;
740*4882a593Smuzhiyun 		}
741*4882a593Smuzhiyun 
742*4882a593Smuzhiyun 		if (direction == DMA_MEM_TO_DEV) {
743*4882a593Smuzhiyun 			src_addr = sg_dma_address(sg);
744*4882a593Smuzhiyun 			dst_addr = dma_config->dst_addr;
745*4882a593Smuzhiyun 			ret = stm32_mdma_set_xfer_param(chan, direction, &ccr,
746*4882a593Smuzhiyun 							&ctcr, &ctbr, src_addr,
747*4882a593Smuzhiyun 							sg_dma_len(sg));
748*4882a593Smuzhiyun 			stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
749*4882a593Smuzhiyun 					   src_addr);
750*4882a593Smuzhiyun 		} else {
751*4882a593Smuzhiyun 			src_addr = dma_config->src_addr;
752*4882a593Smuzhiyun 			dst_addr = sg_dma_address(sg);
753*4882a593Smuzhiyun 			ret = stm32_mdma_set_xfer_param(chan, direction, &ccr,
754*4882a593Smuzhiyun 							&ctcr, &ctbr, dst_addr,
755*4882a593Smuzhiyun 							sg_dma_len(sg));
756*4882a593Smuzhiyun 			stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
757*4882a593Smuzhiyun 					   dst_addr);
758*4882a593Smuzhiyun 		}
759*4882a593Smuzhiyun 
760*4882a593Smuzhiyun 		if (ret < 0)
761*4882a593Smuzhiyun 			return ret;
762*4882a593Smuzhiyun 
763*4882a593Smuzhiyun 		stm32_mdma_setup_hwdesc(chan, desc, direction, i, src_addr,
764*4882a593Smuzhiyun 					dst_addr, sg_dma_len(sg), ctcr, ctbr,
765*4882a593Smuzhiyun 					i == sg_len - 1, i == 0, false);
766*4882a593Smuzhiyun 	}
767*4882a593Smuzhiyun 
768*4882a593Smuzhiyun 	/* Enable interrupts */
769*4882a593Smuzhiyun 	ccr &= ~STM32_MDMA_CCR_IRQ_MASK;
770*4882a593Smuzhiyun 	ccr |= STM32_MDMA_CCR_TEIE | STM32_MDMA_CCR_CTCIE;
771*4882a593Smuzhiyun 	if (sg_len > 1)
772*4882a593Smuzhiyun 		ccr |= STM32_MDMA_CCR_BTIE;
773*4882a593Smuzhiyun 	desc->ccr = ccr;
774*4882a593Smuzhiyun 
775*4882a593Smuzhiyun 	return 0;
776*4882a593Smuzhiyun }
777*4882a593Smuzhiyun 
778*4882a593Smuzhiyun static struct dma_async_tx_descriptor *
stm32_mdma_prep_slave_sg(struct dma_chan * c,struct scatterlist * sgl,u32 sg_len,enum dma_transfer_direction direction,unsigned long flags,void * context)779*4882a593Smuzhiyun stm32_mdma_prep_slave_sg(struct dma_chan *c, struct scatterlist *sgl,
780*4882a593Smuzhiyun 			 u32 sg_len, enum dma_transfer_direction direction,
781*4882a593Smuzhiyun 			 unsigned long flags, void *context)
782*4882a593Smuzhiyun {
783*4882a593Smuzhiyun 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
784*4882a593Smuzhiyun 	struct stm32_mdma_desc *desc;
785*4882a593Smuzhiyun 	int i, ret;
786*4882a593Smuzhiyun 
787*4882a593Smuzhiyun 	/*
788*4882a593Smuzhiyun 	 * Once DMA is in setup cyclic mode the channel we cannot assign this
789*4882a593Smuzhiyun 	 * channel anymore. The DMA channel needs to be aborted or terminated
790*4882a593Smuzhiyun 	 * for allowing another request.
791*4882a593Smuzhiyun 	 */
792*4882a593Smuzhiyun 	if (chan->desc && chan->desc->cyclic) {
793*4882a593Smuzhiyun 		dev_err(chan2dev(chan),
794*4882a593Smuzhiyun 			"Request not allowed when dma in cyclic mode\n");
795*4882a593Smuzhiyun 		return NULL;
796*4882a593Smuzhiyun 	}
797*4882a593Smuzhiyun 
798*4882a593Smuzhiyun 	desc = stm32_mdma_alloc_desc(chan, sg_len);
799*4882a593Smuzhiyun 	if (!desc)
800*4882a593Smuzhiyun 		return NULL;
801*4882a593Smuzhiyun 
802*4882a593Smuzhiyun 	ret = stm32_mdma_setup_xfer(chan, desc, sgl, sg_len, direction);
803*4882a593Smuzhiyun 	if (ret < 0)
804*4882a593Smuzhiyun 		goto xfer_setup_err;
805*4882a593Smuzhiyun 
806*4882a593Smuzhiyun 	desc->cyclic = false;
807*4882a593Smuzhiyun 
808*4882a593Smuzhiyun 	return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
809*4882a593Smuzhiyun 
810*4882a593Smuzhiyun xfer_setup_err:
811*4882a593Smuzhiyun 	for (i = 0; i < desc->count; i++)
812*4882a593Smuzhiyun 		dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
813*4882a593Smuzhiyun 			      desc->node[i].hwdesc_phys);
814*4882a593Smuzhiyun 	kfree(desc);
815*4882a593Smuzhiyun 	return NULL;
816*4882a593Smuzhiyun }
817*4882a593Smuzhiyun 
818*4882a593Smuzhiyun static struct dma_async_tx_descriptor *
stm32_mdma_prep_dma_cyclic(struct dma_chan * c,dma_addr_t buf_addr,size_t buf_len,size_t period_len,enum dma_transfer_direction direction,unsigned long flags)819*4882a593Smuzhiyun stm32_mdma_prep_dma_cyclic(struct dma_chan *c, dma_addr_t buf_addr,
820*4882a593Smuzhiyun 			   size_t buf_len, size_t period_len,
821*4882a593Smuzhiyun 			   enum dma_transfer_direction direction,
822*4882a593Smuzhiyun 			   unsigned long flags)
823*4882a593Smuzhiyun {
824*4882a593Smuzhiyun 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
825*4882a593Smuzhiyun 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
826*4882a593Smuzhiyun 	struct dma_slave_config *dma_config = &chan->dma_config;
827*4882a593Smuzhiyun 	struct stm32_mdma_desc *desc;
828*4882a593Smuzhiyun 	dma_addr_t src_addr, dst_addr;
829*4882a593Smuzhiyun 	u32 ccr, ctcr, ctbr, count;
830*4882a593Smuzhiyun 	int i, ret;
831*4882a593Smuzhiyun 
832*4882a593Smuzhiyun 	/*
833*4882a593Smuzhiyun 	 * Once DMA is in setup cyclic mode the channel we cannot assign this
834*4882a593Smuzhiyun 	 * channel anymore. The DMA channel needs to be aborted or terminated
835*4882a593Smuzhiyun 	 * for allowing another request.
836*4882a593Smuzhiyun 	 */
837*4882a593Smuzhiyun 	if (chan->desc && chan->desc->cyclic) {
838*4882a593Smuzhiyun 		dev_err(chan2dev(chan),
839*4882a593Smuzhiyun 			"Request not allowed when dma in cyclic mode\n");
840*4882a593Smuzhiyun 		return NULL;
841*4882a593Smuzhiyun 	}
842*4882a593Smuzhiyun 
843*4882a593Smuzhiyun 	if (!buf_len || !period_len || period_len > STM32_MDMA_MAX_BLOCK_LEN) {
844*4882a593Smuzhiyun 		dev_err(chan2dev(chan), "Invalid buffer/period len\n");
845*4882a593Smuzhiyun 		return NULL;
846*4882a593Smuzhiyun 	}
847*4882a593Smuzhiyun 
848*4882a593Smuzhiyun 	if (buf_len % period_len) {
849*4882a593Smuzhiyun 		dev_err(chan2dev(chan), "buf_len not multiple of period_len\n");
850*4882a593Smuzhiyun 		return NULL;
851*4882a593Smuzhiyun 	}
852*4882a593Smuzhiyun 
853*4882a593Smuzhiyun 	count = buf_len / period_len;
854*4882a593Smuzhiyun 
855*4882a593Smuzhiyun 	desc = stm32_mdma_alloc_desc(chan, count);
856*4882a593Smuzhiyun 	if (!desc)
857*4882a593Smuzhiyun 		return NULL;
858*4882a593Smuzhiyun 
859*4882a593Smuzhiyun 	/* Select bus */
860*4882a593Smuzhiyun 	if (direction == DMA_MEM_TO_DEV) {
861*4882a593Smuzhiyun 		src_addr = buf_addr;
862*4882a593Smuzhiyun 		ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, &ctcr,
863*4882a593Smuzhiyun 						&ctbr, src_addr, period_len);
864*4882a593Smuzhiyun 		stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
865*4882a593Smuzhiyun 				   src_addr);
866*4882a593Smuzhiyun 	} else {
867*4882a593Smuzhiyun 		dst_addr = buf_addr;
868*4882a593Smuzhiyun 		ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, &ctcr,
869*4882a593Smuzhiyun 						&ctbr, dst_addr, period_len);
870*4882a593Smuzhiyun 		stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
871*4882a593Smuzhiyun 				   dst_addr);
872*4882a593Smuzhiyun 	}
873*4882a593Smuzhiyun 
874*4882a593Smuzhiyun 	if (ret < 0)
875*4882a593Smuzhiyun 		goto xfer_setup_err;
876*4882a593Smuzhiyun 
877*4882a593Smuzhiyun 	/* Enable interrupts */
878*4882a593Smuzhiyun 	ccr &= ~STM32_MDMA_CCR_IRQ_MASK;
879*4882a593Smuzhiyun 	ccr |= STM32_MDMA_CCR_TEIE | STM32_MDMA_CCR_CTCIE | STM32_MDMA_CCR_BTIE;
880*4882a593Smuzhiyun 	desc->ccr = ccr;
881*4882a593Smuzhiyun 
882*4882a593Smuzhiyun 	/* Configure hwdesc list */
883*4882a593Smuzhiyun 	for (i = 0; i < count; i++) {
884*4882a593Smuzhiyun 		if (direction == DMA_MEM_TO_DEV) {
885*4882a593Smuzhiyun 			src_addr = buf_addr + i * period_len;
886*4882a593Smuzhiyun 			dst_addr = dma_config->dst_addr;
887*4882a593Smuzhiyun 		} else {
888*4882a593Smuzhiyun 			src_addr = dma_config->src_addr;
889*4882a593Smuzhiyun 			dst_addr = buf_addr + i * period_len;
890*4882a593Smuzhiyun 		}
891*4882a593Smuzhiyun 
892*4882a593Smuzhiyun 		stm32_mdma_setup_hwdesc(chan, desc, direction, i, src_addr,
893*4882a593Smuzhiyun 					dst_addr, period_len, ctcr, ctbr,
894*4882a593Smuzhiyun 					i == count - 1, i == 0, true);
895*4882a593Smuzhiyun 	}
896*4882a593Smuzhiyun 
897*4882a593Smuzhiyun 	desc->cyclic = true;
898*4882a593Smuzhiyun 
899*4882a593Smuzhiyun 	return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
900*4882a593Smuzhiyun 
901*4882a593Smuzhiyun xfer_setup_err:
902*4882a593Smuzhiyun 	for (i = 0; i < desc->count; i++)
903*4882a593Smuzhiyun 		dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
904*4882a593Smuzhiyun 			      desc->node[i].hwdesc_phys);
905*4882a593Smuzhiyun 	kfree(desc);
906*4882a593Smuzhiyun 	return NULL;
907*4882a593Smuzhiyun }
908*4882a593Smuzhiyun 
909*4882a593Smuzhiyun static struct dma_async_tx_descriptor *
stm32_mdma_prep_dma_memcpy(struct dma_chan * c,dma_addr_t dest,dma_addr_t src,size_t len,unsigned long flags)910*4882a593Smuzhiyun stm32_mdma_prep_dma_memcpy(struct dma_chan *c, dma_addr_t dest, dma_addr_t src,
911*4882a593Smuzhiyun 			   size_t len, unsigned long flags)
912*4882a593Smuzhiyun {
913*4882a593Smuzhiyun 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
914*4882a593Smuzhiyun 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
915*4882a593Smuzhiyun 	enum dma_slave_buswidth max_width;
916*4882a593Smuzhiyun 	struct stm32_mdma_desc *desc;
917*4882a593Smuzhiyun 	struct stm32_mdma_hwdesc *hwdesc;
918*4882a593Smuzhiyun 	u32 ccr, ctcr, ctbr, cbndtr, count, max_burst, mdma_burst;
919*4882a593Smuzhiyun 	u32 best_burst, tlen;
920*4882a593Smuzhiyun 	size_t xfer_count, offset;
921*4882a593Smuzhiyun 	int src_bus_width, dst_bus_width;
922*4882a593Smuzhiyun 	int i;
923*4882a593Smuzhiyun 
924*4882a593Smuzhiyun 	/*
925*4882a593Smuzhiyun 	 * Once DMA is in setup cyclic mode the channel we cannot assign this
926*4882a593Smuzhiyun 	 * channel anymore. The DMA channel needs to be aborted or terminated
927*4882a593Smuzhiyun 	 * to allow another request
928*4882a593Smuzhiyun 	 */
929*4882a593Smuzhiyun 	if (chan->desc && chan->desc->cyclic) {
930*4882a593Smuzhiyun 		dev_err(chan2dev(chan),
931*4882a593Smuzhiyun 			"Request not allowed when dma in cyclic mode\n");
932*4882a593Smuzhiyun 		return NULL;
933*4882a593Smuzhiyun 	}
934*4882a593Smuzhiyun 
935*4882a593Smuzhiyun 	count = DIV_ROUND_UP(len, STM32_MDMA_MAX_BLOCK_LEN);
936*4882a593Smuzhiyun 	desc = stm32_mdma_alloc_desc(chan, count);
937*4882a593Smuzhiyun 	if (!desc)
938*4882a593Smuzhiyun 		return NULL;
939*4882a593Smuzhiyun 
940*4882a593Smuzhiyun 	ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id));
941*4882a593Smuzhiyun 	ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id));
942*4882a593Smuzhiyun 	ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id));
943*4882a593Smuzhiyun 	cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id));
944*4882a593Smuzhiyun 
945*4882a593Smuzhiyun 	/* Enable sw req, some interrupts and clear other bits */
946*4882a593Smuzhiyun 	ccr &= ~(STM32_MDMA_CCR_WEX | STM32_MDMA_CCR_HEX |
947*4882a593Smuzhiyun 		 STM32_MDMA_CCR_BEX | STM32_MDMA_CCR_PL_MASK |
948*4882a593Smuzhiyun 		 STM32_MDMA_CCR_IRQ_MASK);
949*4882a593Smuzhiyun 	ccr |= STM32_MDMA_CCR_TEIE;
950*4882a593Smuzhiyun 
951*4882a593Smuzhiyun 	/* Enable SW request mode, dest/src inc and clear other bits */
952*4882a593Smuzhiyun 	ctcr &= ~(STM32_MDMA_CTCR_BWM | STM32_MDMA_CTCR_TRGM_MSK |
953*4882a593Smuzhiyun 		  STM32_MDMA_CTCR_PAM_MASK | STM32_MDMA_CTCR_PKE |
954*4882a593Smuzhiyun 		  STM32_MDMA_CTCR_TLEN_MSK | STM32_MDMA_CTCR_DBURST_MASK |
955*4882a593Smuzhiyun 		  STM32_MDMA_CTCR_SBURST_MASK | STM32_MDMA_CTCR_DINCOS_MASK |
956*4882a593Smuzhiyun 		  STM32_MDMA_CTCR_SINCOS_MASK | STM32_MDMA_CTCR_DSIZE_MASK |
957*4882a593Smuzhiyun 		  STM32_MDMA_CTCR_SSIZE_MASK | STM32_MDMA_CTCR_DINC_MASK |
958*4882a593Smuzhiyun 		  STM32_MDMA_CTCR_SINC_MASK);
959*4882a593Smuzhiyun 	ctcr |= STM32_MDMA_CTCR_SWRM | STM32_MDMA_CTCR_SINC(STM32_MDMA_INC) |
960*4882a593Smuzhiyun 		STM32_MDMA_CTCR_DINC(STM32_MDMA_INC);
961*4882a593Smuzhiyun 
962*4882a593Smuzhiyun 	/* Reset HW request */
963*4882a593Smuzhiyun 	ctbr &= ~STM32_MDMA_CTBR_TSEL_MASK;
964*4882a593Smuzhiyun 
965*4882a593Smuzhiyun 	/* Select bus */
966*4882a593Smuzhiyun 	stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS, src);
967*4882a593Smuzhiyun 	stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS, dest);
968*4882a593Smuzhiyun 
969*4882a593Smuzhiyun 	/* Clear CBNDTR registers */
970*4882a593Smuzhiyun 	cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK | STM32_MDMA_CBNDTR_BRDUM |
971*4882a593Smuzhiyun 			STM32_MDMA_CBNDTR_BRSUM | STM32_MDMA_CBNDTR_BNDT_MASK);
972*4882a593Smuzhiyun 
973*4882a593Smuzhiyun 	if (len <= STM32_MDMA_MAX_BLOCK_LEN) {
974*4882a593Smuzhiyun 		cbndtr |= STM32_MDMA_CBNDTR_BNDT(len);
975*4882a593Smuzhiyun 		if (len <= STM32_MDMA_MAX_BUF_LEN) {
976*4882a593Smuzhiyun 			/* Setup a buffer transfer */
977*4882a593Smuzhiyun 			ccr |= STM32_MDMA_CCR_TCIE | STM32_MDMA_CCR_CTCIE;
978*4882a593Smuzhiyun 			ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BUFFER);
979*4882a593Smuzhiyun 		} else {
980*4882a593Smuzhiyun 			/* Setup a block transfer */
981*4882a593Smuzhiyun 			ccr |= STM32_MDMA_CCR_BTIE | STM32_MDMA_CCR_CTCIE;
982*4882a593Smuzhiyun 			ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BLOCK);
983*4882a593Smuzhiyun 		}
984*4882a593Smuzhiyun 
985*4882a593Smuzhiyun 		tlen = STM32_MDMA_MAX_BUF_LEN;
986*4882a593Smuzhiyun 		ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1));
987*4882a593Smuzhiyun 
988*4882a593Smuzhiyun 		/* Set source best burst size */
989*4882a593Smuzhiyun 		max_width = stm32_mdma_get_max_width(src, len, tlen);
990*4882a593Smuzhiyun 		src_bus_width = stm32_mdma_get_width(chan, max_width);
991*4882a593Smuzhiyun 
992*4882a593Smuzhiyun 		max_burst = tlen / max_width;
993*4882a593Smuzhiyun 		best_burst = stm32_mdma_get_best_burst(len, tlen, max_burst,
994*4882a593Smuzhiyun 						       max_width);
995*4882a593Smuzhiyun 		mdma_burst = ilog2(best_burst);
996*4882a593Smuzhiyun 
997*4882a593Smuzhiyun 		ctcr |= STM32_MDMA_CTCR_SBURST(mdma_burst) |
998*4882a593Smuzhiyun 			STM32_MDMA_CTCR_SSIZE(src_bus_width) |
999*4882a593Smuzhiyun 			STM32_MDMA_CTCR_SINCOS(src_bus_width);
1000*4882a593Smuzhiyun 
1001*4882a593Smuzhiyun 		/* Set destination best burst size */
1002*4882a593Smuzhiyun 		max_width = stm32_mdma_get_max_width(dest, len, tlen);
1003*4882a593Smuzhiyun 		dst_bus_width = stm32_mdma_get_width(chan, max_width);
1004*4882a593Smuzhiyun 
1005*4882a593Smuzhiyun 		max_burst = tlen / max_width;
1006*4882a593Smuzhiyun 		best_burst = stm32_mdma_get_best_burst(len, tlen, max_burst,
1007*4882a593Smuzhiyun 						       max_width);
1008*4882a593Smuzhiyun 		mdma_burst = ilog2(best_burst);
1009*4882a593Smuzhiyun 
1010*4882a593Smuzhiyun 		ctcr |= STM32_MDMA_CTCR_DBURST(mdma_burst) |
1011*4882a593Smuzhiyun 			STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
1012*4882a593Smuzhiyun 			STM32_MDMA_CTCR_DINCOS(dst_bus_width);
1013*4882a593Smuzhiyun 
1014*4882a593Smuzhiyun 		if (dst_bus_width != src_bus_width)
1015*4882a593Smuzhiyun 			ctcr |= STM32_MDMA_CTCR_PKE;
1016*4882a593Smuzhiyun 
1017*4882a593Smuzhiyun 		/* Prepare hardware descriptor */
1018*4882a593Smuzhiyun 		hwdesc = desc->node[0].hwdesc;
1019*4882a593Smuzhiyun 		hwdesc->ctcr = ctcr;
1020*4882a593Smuzhiyun 		hwdesc->cbndtr = cbndtr;
1021*4882a593Smuzhiyun 		hwdesc->csar = src;
1022*4882a593Smuzhiyun 		hwdesc->cdar = dest;
1023*4882a593Smuzhiyun 		hwdesc->cbrur = 0;
1024*4882a593Smuzhiyun 		hwdesc->clar = 0;
1025*4882a593Smuzhiyun 		hwdesc->ctbr = ctbr;
1026*4882a593Smuzhiyun 		hwdesc->cmar = 0;
1027*4882a593Smuzhiyun 		hwdesc->cmdr = 0;
1028*4882a593Smuzhiyun 
1029*4882a593Smuzhiyun 		stm32_mdma_dump_hwdesc(chan, &desc->node[0]);
1030*4882a593Smuzhiyun 	} else {
1031*4882a593Smuzhiyun 		/* Setup a LLI transfer */
1032*4882a593Smuzhiyun 		ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_LINKED_LIST) |
1033*4882a593Smuzhiyun 			STM32_MDMA_CTCR_TLEN((STM32_MDMA_MAX_BUF_LEN - 1));
1034*4882a593Smuzhiyun 		ccr |= STM32_MDMA_CCR_BTIE | STM32_MDMA_CCR_CTCIE;
1035*4882a593Smuzhiyun 		tlen = STM32_MDMA_MAX_BUF_LEN;
1036*4882a593Smuzhiyun 
1037*4882a593Smuzhiyun 		for (i = 0, offset = 0; offset < len;
1038*4882a593Smuzhiyun 		     i++, offset += xfer_count) {
1039*4882a593Smuzhiyun 			xfer_count = min_t(size_t, len - offset,
1040*4882a593Smuzhiyun 					   STM32_MDMA_MAX_BLOCK_LEN);
1041*4882a593Smuzhiyun 
1042*4882a593Smuzhiyun 			/* Set source best burst size */
1043*4882a593Smuzhiyun 			max_width = stm32_mdma_get_max_width(src, len, tlen);
1044*4882a593Smuzhiyun 			src_bus_width = stm32_mdma_get_width(chan, max_width);
1045*4882a593Smuzhiyun 
1046*4882a593Smuzhiyun 			max_burst = tlen / max_width;
1047*4882a593Smuzhiyun 			best_burst = stm32_mdma_get_best_burst(len, tlen,
1048*4882a593Smuzhiyun 							       max_burst,
1049*4882a593Smuzhiyun 							       max_width);
1050*4882a593Smuzhiyun 			mdma_burst = ilog2(best_burst);
1051*4882a593Smuzhiyun 
1052*4882a593Smuzhiyun 			ctcr |= STM32_MDMA_CTCR_SBURST(mdma_burst) |
1053*4882a593Smuzhiyun 				STM32_MDMA_CTCR_SSIZE(src_bus_width) |
1054*4882a593Smuzhiyun 				STM32_MDMA_CTCR_SINCOS(src_bus_width);
1055*4882a593Smuzhiyun 
1056*4882a593Smuzhiyun 			/* Set destination best burst size */
1057*4882a593Smuzhiyun 			max_width = stm32_mdma_get_max_width(dest, len, tlen);
1058*4882a593Smuzhiyun 			dst_bus_width = stm32_mdma_get_width(chan, max_width);
1059*4882a593Smuzhiyun 
1060*4882a593Smuzhiyun 			max_burst = tlen / max_width;
1061*4882a593Smuzhiyun 			best_burst = stm32_mdma_get_best_burst(len, tlen,
1062*4882a593Smuzhiyun 							       max_burst,
1063*4882a593Smuzhiyun 							       max_width);
1064*4882a593Smuzhiyun 			mdma_burst = ilog2(best_burst);
1065*4882a593Smuzhiyun 
1066*4882a593Smuzhiyun 			ctcr |= STM32_MDMA_CTCR_DBURST(mdma_burst) |
1067*4882a593Smuzhiyun 				STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
1068*4882a593Smuzhiyun 				STM32_MDMA_CTCR_DINCOS(dst_bus_width);
1069*4882a593Smuzhiyun 
1070*4882a593Smuzhiyun 			if (dst_bus_width != src_bus_width)
1071*4882a593Smuzhiyun 				ctcr |= STM32_MDMA_CTCR_PKE;
1072*4882a593Smuzhiyun 
1073*4882a593Smuzhiyun 			/* Prepare hardware descriptor */
1074*4882a593Smuzhiyun 			stm32_mdma_setup_hwdesc(chan, desc, DMA_MEM_TO_MEM, i,
1075*4882a593Smuzhiyun 						src + offset, dest + offset,
1076*4882a593Smuzhiyun 						xfer_count, ctcr, ctbr,
1077*4882a593Smuzhiyun 						i == count - 1, i == 0, false);
1078*4882a593Smuzhiyun 		}
1079*4882a593Smuzhiyun 	}
1080*4882a593Smuzhiyun 
1081*4882a593Smuzhiyun 	desc->ccr = ccr;
1082*4882a593Smuzhiyun 
1083*4882a593Smuzhiyun 	desc->cyclic = false;
1084*4882a593Smuzhiyun 
1085*4882a593Smuzhiyun 	return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
1086*4882a593Smuzhiyun }
1087*4882a593Smuzhiyun 
stm32_mdma_dump_reg(struct stm32_mdma_chan * chan)1088*4882a593Smuzhiyun static void stm32_mdma_dump_reg(struct stm32_mdma_chan *chan)
1089*4882a593Smuzhiyun {
1090*4882a593Smuzhiyun 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1091*4882a593Smuzhiyun 
1092*4882a593Smuzhiyun 	dev_dbg(chan2dev(chan), "CCR:     0x%08x\n",
1093*4882a593Smuzhiyun 		stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)));
1094*4882a593Smuzhiyun 	dev_dbg(chan2dev(chan), "CTCR:    0x%08x\n",
1095*4882a593Smuzhiyun 		stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id)));
1096*4882a593Smuzhiyun 	dev_dbg(chan2dev(chan), "CBNDTR:  0x%08x\n",
1097*4882a593Smuzhiyun 		stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id)));
1098*4882a593Smuzhiyun 	dev_dbg(chan2dev(chan), "CSAR:    0x%08x\n",
1099*4882a593Smuzhiyun 		stm32_mdma_read(dmadev, STM32_MDMA_CSAR(chan->id)));
1100*4882a593Smuzhiyun 	dev_dbg(chan2dev(chan), "CDAR:    0x%08x\n",
1101*4882a593Smuzhiyun 		stm32_mdma_read(dmadev, STM32_MDMA_CDAR(chan->id)));
1102*4882a593Smuzhiyun 	dev_dbg(chan2dev(chan), "CBRUR:   0x%08x\n",
1103*4882a593Smuzhiyun 		stm32_mdma_read(dmadev, STM32_MDMA_CBRUR(chan->id)));
1104*4882a593Smuzhiyun 	dev_dbg(chan2dev(chan), "CLAR:    0x%08x\n",
1105*4882a593Smuzhiyun 		stm32_mdma_read(dmadev, STM32_MDMA_CLAR(chan->id)));
1106*4882a593Smuzhiyun 	dev_dbg(chan2dev(chan), "CTBR:    0x%08x\n",
1107*4882a593Smuzhiyun 		stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id)));
1108*4882a593Smuzhiyun 	dev_dbg(chan2dev(chan), "CMAR:    0x%08x\n",
1109*4882a593Smuzhiyun 		stm32_mdma_read(dmadev, STM32_MDMA_CMAR(chan->id)));
1110*4882a593Smuzhiyun 	dev_dbg(chan2dev(chan), "CMDR:    0x%08x\n",
1111*4882a593Smuzhiyun 		stm32_mdma_read(dmadev, STM32_MDMA_CMDR(chan->id)));
1112*4882a593Smuzhiyun }
1113*4882a593Smuzhiyun 
stm32_mdma_start_transfer(struct stm32_mdma_chan * chan)1114*4882a593Smuzhiyun static void stm32_mdma_start_transfer(struct stm32_mdma_chan *chan)
1115*4882a593Smuzhiyun {
1116*4882a593Smuzhiyun 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1117*4882a593Smuzhiyun 	struct virt_dma_desc *vdesc;
1118*4882a593Smuzhiyun 	struct stm32_mdma_hwdesc *hwdesc;
1119*4882a593Smuzhiyun 	u32 id = chan->id;
1120*4882a593Smuzhiyun 	u32 status, reg;
1121*4882a593Smuzhiyun 
1122*4882a593Smuzhiyun 	vdesc = vchan_next_desc(&chan->vchan);
1123*4882a593Smuzhiyun 	if (!vdesc) {
1124*4882a593Smuzhiyun 		chan->desc = NULL;
1125*4882a593Smuzhiyun 		return;
1126*4882a593Smuzhiyun 	}
1127*4882a593Smuzhiyun 
1128*4882a593Smuzhiyun 	list_del(&vdesc->node);
1129*4882a593Smuzhiyun 
1130*4882a593Smuzhiyun 	chan->desc = to_stm32_mdma_desc(vdesc);
1131*4882a593Smuzhiyun 	hwdesc = chan->desc->node[0].hwdesc;
1132*4882a593Smuzhiyun 	chan->curr_hwdesc = 0;
1133*4882a593Smuzhiyun 
1134*4882a593Smuzhiyun 	stm32_mdma_write(dmadev, STM32_MDMA_CCR(id), chan->desc->ccr);
1135*4882a593Smuzhiyun 	stm32_mdma_write(dmadev, STM32_MDMA_CTCR(id), hwdesc->ctcr);
1136*4882a593Smuzhiyun 	stm32_mdma_write(dmadev, STM32_MDMA_CBNDTR(id), hwdesc->cbndtr);
1137*4882a593Smuzhiyun 	stm32_mdma_write(dmadev, STM32_MDMA_CSAR(id), hwdesc->csar);
1138*4882a593Smuzhiyun 	stm32_mdma_write(dmadev, STM32_MDMA_CDAR(id), hwdesc->cdar);
1139*4882a593Smuzhiyun 	stm32_mdma_write(dmadev, STM32_MDMA_CBRUR(id), hwdesc->cbrur);
1140*4882a593Smuzhiyun 	stm32_mdma_write(dmadev, STM32_MDMA_CLAR(id), hwdesc->clar);
1141*4882a593Smuzhiyun 	stm32_mdma_write(dmadev, STM32_MDMA_CTBR(id), hwdesc->ctbr);
1142*4882a593Smuzhiyun 	stm32_mdma_write(dmadev, STM32_MDMA_CMAR(id), hwdesc->cmar);
1143*4882a593Smuzhiyun 	stm32_mdma_write(dmadev, STM32_MDMA_CMDR(id), hwdesc->cmdr);
1144*4882a593Smuzhiyun 
1145*4882a593Smuzhiyun 	/* Clear interrupt status if it is there */
1146*4882a593Smuzhiyun 	status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(id));
1147*4882a593Smuzhiyun 	if (status)
1148*4882a593Smuzhiyun 		stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(id), status);
1149*4882a593Smuzhiyun 
1150*4882a593Smuzhiyun 	stm32_mdma_dump_reg(chan);
1151*4882a593Smuzhiyun 
1152*4882a593Smuzhiyun 	/* Start DMA */
1153*4882a593Smuzhiyun 	stm32_mdma_set_bits(dmadev, STM32_MDMA_CCR(id), STM32_MDMA_CCR_EN);
1154*4882a593Smuzhiyun 
1155*4882a593Smuzhiyun 	/* Set SW request in case of MEM2MEM transfer */
1156*4882a593Smuzhiyun 	if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM) {
1157*4882a593Smuzhiyun 		reg = STM32_MDMA_CCR(id);
1158*4882a593Smuzhiyun 		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ);
1159*4882a593Smuzhiyun 	}
1160*4882a593Smuzhiyun 
1161*4882a593Smuzhiyun 	chan->busy = true;
1162*4882a593Smuzhiyun 
1163*4882a593Smuzhiyun 	dev_dbg(chan2dev(chan), "vchan %pK: started\n", &chan->vchan);
1164*4882a593Smuzhiyun }
1165*4882a593Smuzhiyun 
stm32_mdma_issue_pending(struct dma_chan * c)1166*4882a593Smuzhiyun static void stm32_mdma_issue_pending(struct dma_chan *c)
1167*4882a593Smuzhiyun {
1168*4882a593Smuzhiyun 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1169*4882a593Smuzhiyun 	unsigned long flags;
1170*4882a593Smuzhiyun 
1171*4882a593Smuzhiyun 	spin_lock_irqsave(&chan->vchan.lock, flags);
1172*4882a593Smuzhiyun 
1173*4882a593Smuzhiyun 	if (!vchan_issue_pending(&chan->vchan))
1174*4882a593Smuzhiyun 		goto end;
1175*4882a593Smuzhiyun 
1176*4882a593Smuzhiyun 	dev_dbg(chan2dev(chan), "vchan %pK: issued\n", &chan->vchan);
1177*4882a593Smuzhiyun 
1178*4882a593Smuzhiyun 	if (!chan->desc && !chan->busy)
1179*4882a593Smuzhiyun 		stm32_mdma_start_transfer(chan);
1180*4882a593Smuzhiyun 
1181*4882a593Smuzhiyun end:
1182*4882a593Smuzhiyun 	spin_unlock_irqrestore(&chan->vchan.lock, flags);
1183*4882a593Smuzhiyun }
1184*4882a593Smuzhiyun 
stm32_mdma_pause(struct dma_chan * c)1185*4882a593Smuzhiyun static int stm32_mdma_pause(struct dma_chan *c)
1186*4882a593Smuzhiyun {
1187*4882a593Smuzhiyun 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1188*4882a593Smuzhiyun 	unsigned long flags;
1189*4882a593Smuzhiyun 	int ret;
1190*4882a593Smuzhiyun 
1191*4882a593Smuzhiyun 	spin_lock_irqsave(&chan->vchan.lock, flags);
1192*4882a593Smuzhiyun 	ret = stm32_mdma_disable_chan(chan);
1193*4882a593Smuzhiyun 	spin_unlock_irqrestore(&chan->vchan.lock, flags);
1194*4882a593Smuzhiyun 
1195*4882a593Smuzhiyun 	if (!ret)
1196*4882a593Smuzhiyun 		dev_dbg(chan2dev(chan), "vchan %pK: pause\n", &chan->vchan);
1197*4882a593Smuzhiyun 
1198*4882a593Smuzhiyun 	return ret;
1199*4882a593Smuzhiyun }
1200*4882a593Smuzhiyun 
stm32_mdma_resume(struct dma_chan * c)1201*4882a593Smuzhiyun static int stm32_mdma_resume(struct dma_chan *c)
1202*4882a593Smuzhiyun {
1203*4882a593Smuzhiyun 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1204*4882a593Smuzhiyun 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1205*4882a593Smuzhiyun 	struct stm32_mdma_hwdesc *hwdesc;
1206*4882a593Smuzhiyun 	unsigned long flags;
1207*4882a593Smuzhiyun 	u32 status, reg;
1208*4882a593Smuzhiyun 
1209*4882a593Smuzhiyun 	hwdesc = chan->desc->node[chan->curr_hwdesc].hwdesc;
1210*4882a593Smuzhiyun 
1211*4882a593Smuzhiyun 	spin_lock_irqsave(&chan->vchan.lock, flags);
1212*4882a593Smuzhiyun 
1213*4882a593Smuzhiyun 	/* Re-configure control register */
1214*4882a593Smuzhiyun 	stm32_mdma_write(dmadev, STM32_MDMA_CCR(chan->id), chan->desc->ccr);
1215*4882a593Smuzhiyun 
1216*4882a593Smuzhiyun 	/* Clear interrupt status if it is there */
1217*4882a593Smuzhiyun 	status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
1218*4882a593Smuzhiyun 	if (status)
1219*4882a593Smuzhiyun 		stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status);
1220*4882a593Smuzhiyun 
1221*4882a593Smuzhiyun 	stm32_mdma_dump_reg(chan);
1222*4882a593Smuzhiyun 
1223*4882a593Smuzhiyun 	/* Re-start DMA */
1224*4882a593Smuzhiyun 	reg = STM32_MDMA_CCR(chan->id);
1225*4882a593Smuzhiyun 	stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_EN);
1226*4882a593Smuzhiyun 
1227*4882a593Smuzhiyun 	/* Set SW request in case of MEM2MEM transfer */
1228*4882a593Smuzhiyun 	if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM)
1229*4882a593Smuzhiyun 		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ);
1230*4882a593Smuzhiyun 
1231*4882a593Smuzhiyun 	spin_unlock_irqrestore(&chan->vchan.lock, flags);
1232*4882a593Smuzhiyun 
1233*4882a593Smuzhiyun 	dev_dbg(chan2dev(chan), "vchan %pK: resume\n", &chan->vchan);
1234*4882a593Smuzhiyun 
1235*4882a593Smuzhiyun 	return 0;
1236*4882a593Smuzhiyun }
1237*4882a593Smuzhiyun 
stm32_mdma_terminate_all(struct dma_chan * c)1238*4882a593Smuzhiyun static int stm32_mdma_terminate_all(struct dma_chan *c)
1239*4882a593Smuzhiyun {
1240*4882a593Smuzhiyun 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1241*4882a593Smuzhiyun 	unsigned long flags;
1242*4882a593Smuzhiyun 	LIST_HEAD(head);
1243*4882a593Smuzhiyun 
1244*4882a593Smuzhiyun 	spin_lock_irqsave(&chan->vchan.lock, flags);
1245*4882a593Smuzhiyun 	if (chan->desc) {
1246*4882a593Smuzhiyun 		vchan_terminate_vdesc(&chan->desc->vdesc);
1247*4882a593Smuzhiyun 		if (chan->busy)
1248*4882a593Smuzhiyun 			stm32_mdma_stop(chan);
1249*4882a593Smuzhiyun 		chan->desc = NULL;
1250*4882a593Smuzhiyun 	}
1251*4882a593Smuzhiyun 	vchan_get_all_descriptors(&chan->vchan, &head);
1252*4882a593Smuzhiyun 	spin_unlock_irqrestore(&chan->vchan.lock, flags);
1253*4882a593Smuzhiyun 
1254*4882a593Smuzhiyun 	vchan_dma_desc_free_list(&chan->vchan, &head);
1255*4882a593Smuzhiyun 
1256*4882a593Smuzhiyun 	return 0;
1257*4882a593Smuzhiyun }
1258*4882a593Smuzhiyun 
stm32_mdma_synchronize(struct dma_chan * c)1259*4882a593Smuzhiyun static void stm32_mdma_synchronize(struct dma_chan *c)
1260*4882a593Smuzhiyun {
1261*4882a593Smuzhiyun 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1262*4882a593Smuzhiyun 
1263*4882a593Smuzhiyun 	vchan_synchronize(&chan->vchan);
1264*4882a593Smuzhiyun }
1265*4882a593Smuzhiyun 
stm32_mdma_slave_config(struct dma_chan * c,struct dma_slave_config * config)1266*4882a593Smuzhiyun static int stm32_mdma_slave_config(struct dma_chan *c,
1267*4882a593Smuzhiyun 				   struct dma_slave_config *config)
1268*4882a593Smuzhiyun {
1269*4882a593Smuzhiyun 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1270*4882a593Smuzhiyun 
1271*4882a593Smuzhiyun 	memcpy(&chan->dma_config, config, sizeof(*config));
1272*4882a593Smuzhiyun 
1273*4882a593Smuzhiyun 	return 0;
1274*4882a593Smuzhiyun }
1275*4882a593Smuzhiyun 
stm32_mdma_desc_residue(struct stm32_mdma_chan * chan,struct stm32_mdma_desc * desc,u32 curr_hwdesc)1276*4882a593Smuzhiyun static size_t stm32_mdma_desc_residue(struct stm32_mdma_chan *chan,
1277*4882a593Smuzhiyun 				      struct stm32_mdma_desc *desc,
1278*4882a593Smuzhiyun 				      u32 curr_hwdesc)
1279*4882a593Smuzhiyun {
1280*4882a593Smuzhiyun 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1281*4882a593Smuzhiyun 	struct stm32_mdma_hwdesc *hwdesc = desc->node[0].hwdesc;
1282*4882a593Smuzhiyun 	u32 cbndtr, residue, modulo, burst_size;
1283*4882a593Smuzhiyun 	int i;
1284*4882a593Smuzhiyun 
1285*4882a593Smuzhiyun 	residue = 0;
1286*4882a593Smuzhiyun 	for (i = curr_hwdesc + 1; i < desc->count; i++) {
1287*4882a593Smuzhiyun 		hwdesc = desc->node[i].hwdesc;
1288*4882a593Smuzhiyun 		residue += STM32_MDMA_CBNDTR_BNDT(hwdesc->cbndtr);
1289*4882a593Smuzhiyun 	}
1290*4882a593Smuzhiyun 	cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id));
1291*4882a593Smuzhiyun 	residue += cbndtr & STM32_MDMA_CBNDTR_BNDT_MASK;
1292*4882a593Smuzhiyun 
1293*4882a593Smuzhiyun 	if (!chan->mem_burst)
1294*4882a593Smuzhiyun 		return residue;
1295*4882a593Smuzhiyun 
1296*4882a593Smuzhiyun 	burst_size = chan->mem_burst * chan->mem_width;
1297*4882a593Smuzhiyun 	modulo = residue % burst_size;
1298*4882a593Smuzhiyun 	if (modulo)
1299*4882a593Smuzhiyun 		residue = residue - modulo + burst_size;
1300*4882a593Smuzhiyun 
1301*4882a593Smuzhiyun 	return residue;
1302*4882a593Smuzhiyun }
1303*4882a593Smuzhiyun 
stm32_mdma_tx_status(struct dma_chan * c,dma_cookie_t cookie,struct dma_tx_state * state)1304*4882a593Smuzhiyun static enum dma_status stm32_mdma_tx_status(struct dma_chan *c,
1305*4882a593Smuzhiyun 					    dma_cookie_t cookie,
1306*4882a593Smuzhiyun 					    struct dma_tx_state *state)
1307*4882a593Smuzhiyun {
1308*4882a593Smuzhiyun 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1309*4882a593Smuzhiyun 	struct virt_dma_desc *vdesc;
1310*4882a593Smuzhiyun 	enum dma_status status;
1311*4882a593Smuzhiyun 	unsigned long flags;
1312*4882a593Smuzhiyun 	u32 residue = 0;
1313*4882a593Smuzhiyun 
1314*4882a593Smuzhiyun 	status = dma_cookie_status(c, cookie, state);
1315*4882a593Smuzhiyun 	if ((status == DMA_COMPLETE) || (!state))
1316*4882a593Smuzhiyun 		return status;
1317*4882a593Smuzhiyun 
1318*4882a593Smuzhiyun 	spin_lock_irqsave(&chan->vchan.lock, flags);
1319*4882a593Smuzhiyun 
1320*4882a593Smuzhiyun 	vdesc = vchan_find_desc(&chan->vchan, cookie);
1321*4882a593Smuzhiyun 	if (chan->desc && cookie == chan->desc->vdesc.tx.cookie)
1322*4882a593Smuzhiyun 		residue = stm32_mdma_desc_residue(chan, chan->desc,
1323*4882a593Smuzhiyun 						  chan->curr_hwdesc);
1324*4882a593Smuzhiyun 	else if (vdesc)
1325*4882a593Smuzhiyun 		residue = stm32_mdma_desc_residue(chan,
1326*4882a593Smuzhiyun 						  to_stm32_mdma_desc(vdesc), 0);
1327*4882a593Smuzhiyun 	dma_set_residue(state, residue);
1328*4882a593Smuzhiyun 
1329*4882a593Smuzhiyun 	spin_unlock_irqrestore(&chan->vchan.lock, flags);
1330*4882a593Smuzhiyun 
1331*4882a593Smuzhiyun 	return status;
1332*4882a593Smuzhiyun }
1333*4882a593Smuzhiyun 
stm32_mdma_xfer_end(struct stm32_mdma_chan * chan)1334*4882a593Smuzhiyun static void stm32_mdma_xfer_end(struct stm32_mdma_chan *chan)
1335*4882a593Smuzhiyun {
1336*4882a593Smuzhiyun 	vchan_cookie_complete(&chan->desc->vdesc);
1337*4882a593Smuzhiyun 	chan->desc = NULL;
1338*4882a593Smuzhiyun 	chan->busy = false;
1339*4882a593Smuzhiyun 
1340*4882a593Smuzhiyun 	/* Start the next transfer if this driver has a next desc */
1341*4882a593Smuzhiyun 	stm32_mdma_start_transfer(chan);
1342*4882a593Smuzhiyun }
1343*4882a593Smuzhiyun 
stm32_mdma_irq_handler(int irq,void * devid)1344*4882a593Smuzhiyun static irqreturn_t stm32_mdma_irq_handler(int irq, void *devid)
1345*4882a593Smuzhiyun {
1346*4882a593Smuzhiyun 	struct stm32_mdma_device *dmadev = devid;
1347*4882a593Smuzhiyun 	struct stm32_mdma_chan *chan;
1348*4882a593Smuzhiyun 	u32 reg, id, ccr, ien, status;
1349*4882a593Smuzhiyun 
1350*4882a593Smuzhiyun 	/* Find out which channel generates the interrupt */
1351*4882a593Smuzhiyun 	status = readl_relaxed(dmadev->base + STM32_MDMA_GISR0);
1352*4882a593Smuzhiyun 	if (!status) {
1353*4882a593Smuzhiyun 		dev_dbg(mdma2dev(dmadev), "spurious it\n");
1354*4882a593Smuzhiyun 		return IRQ_NONE;
1355*4882a593Smuzhiyun 	}
1356*4882a593Smuzhiyun 	id = __ffs(status);
1357*4882a593Smuzhiyun 
1358*4882a593Smuzhiyun 	chan = &dmadev->chan[id];
1359*4882a593Smuzhiyun 	if (!chan) {
1360*4882a593Smuzhiyun 		dev_warn(mdma2dev(dmadev), "MDMA channel not initialized\n");
1361*4882a593Smuzhiyun 		return IRQ_NONE;
1362*4882a593Smuzhiyun 	}
1363*4882a593Smuzhiyun 
1364*4882a593Smuzhiyun 	/* Handle interrupt for the channel */
1365*4882a593Smuzhiyun 	spin_lock(&chan->vchan.lock);
1366*4882a593Smuzhiyun 	status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(id));
1367*4882a593Smuzhiyun 	/* Mask Channel ReQuest Active bit which can be set in case of MEM2MEM */
1368*4882a593Smuzhiyun 	status &= ~STM32_MDMA_CISR_CRQA;
1369*4882a593Smuzhiyun 	ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(id));
1370*4882a593Smuzhiyun 	ien = (ccr & STM32_MDMA_CCR_IRQ_MASK) >> 1;
1371*4882a593Smuzhiyun 
1372*4882a593Smuzhiyun 	if (!(status & ien)) {
1373*4882a593Smuzhiyun 		spin_unlock(&chan->vchan.lock);
1374*4882a593Smuzhiyun 		dev_warn(chan2dev(chan),
1375*4882a593Smuzhiyun 			 "spurious it (status=0x%04x, ien=0x%04x)\n",
1376*4882a593Smuzhiyun 			 status, ien);
1377*4882a593Smuzhiyun 		return IRQ_NONE;
1378*4882a593Smuzhiyun 	}
1379*4882a593Smuzhiyun 
1380*4882a593Smuzhiyun 	reg = STM32_MDMA_CIFCR(id);
1381*4882a593Smuzhiyun 
1382*4882a593Smuzhiyun 	if (status & STM32_MDMA_CISR_TEIF) {
1383*4882a593Smuzhiyun 		dev_err(chan2dev(chan), "Transfer Err: stat=0x%08x\n",
1384*4882a593Smuzhiyun 			readl_relaxed(dmadev->base + STM32_MDMA_CESR(id)));
1385*4882a593Smuzhiyun 		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CTEIF);
1386*4882a593Smuzhiyun 		status &= ~STM32_MDMA_CISR_TEIF;
1387*4882a593Smuzhiyun 	}
1388*4882a593Smuzhiyun 
1389*4882a593Smuzhiyun 	if (status & STM32_MDMA_CISR_CTCIF) {
1390*4882a593Smuzhiyun 		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CCTCIF);
1391*4882a593Smuzhiyun 		status &= ~STM32_MDMA_CISR_CTCIF;
1392*4882a593Smuzhiyun 		stm32_mdma_xfer_end(chan);
1393*4882a593Smuzhiyun 	}
1394*4882a593Smuzhiyun 
1395*4882a593Smuzhiyun 	if (status & STM32_MDMA_CISR_BRTIF) {
1396*4882a593Smuzhiyun 		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBRTIF);
1397*4882a593Smuzhiyun 		status &= ~STM32_MDMA_CISR_BRTIF;
1398*4882a593Smuzhiyun 	}
1399*4882a593Smuzhiyun 
1400*4882a593Smuzhiyun 	if (status & STM32_MDMA_CISR_BTIF) {
1401*4882a593Smuzhiyun 		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBTIF);
1402*4882a593Smuzhiyun 		status &= ~STM32_MDMA_CISR_BTIF;
1403*4882a593Smuzhiyun 		chan->curr_hwdesc++;
1404*4882a593Smuzhiyun 		if (chan->desc && chan->desc->cyclic) {
1405*4882a593Smuzhiyun 			if (chan->curr_hwdesc == chan->desc->count)
1406*4882a593Smuzhiyun 				chan->curr_hwdesc = 0;
1407*4882a593Smuzhiyun 			vchan_cyclic_callback(&chan->desc->vdesc);
1408*4882a593Smuzhiyun 		}
1409*4882a593Smuzhiyun 	}
1410*4882a593Smuzhiyun 
1411*4882a593Smuzhiyun 	if (status & STM32_MDMA_CISR_TCIF) {
1412*4882a593Smuzhiyun 		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CLTCIF);
1413*4882a593Smuzhiyun 		status &= ~STM32_MDMA_CISR_TCIF;
1414*4882a593Smuzhiyun 	}
1415*4882a593Smuzhiyun 
1416*4882a593Smuzhiyun 	if (status) {
1417*4882a593Smuzhiyun 		stm32_mdma_set_bits(dmadev, reg, status);
1418*4882a593Smuzhiyun 		dev_err(chan2dev(chan), "DMA error: status=0x%08x\n", status);
1419*4882a593Smuzhiyun 		if (!(ccr & STM32_MDMA_CCR_EN))
1420*4882a593Smuzhiyun 			dev_err(chan2dev(chan), "chan disabled by HW\n");
1421*4882a593Smuzhiyun 	}
1422*4882a593Smuzhiyun 
1423*4882a593Smuzhiyun 	spin_unlock(&chan->vchan.lock);
1424*4882a593Smuzhiyun 
1425*4882a593Smuzhiyun 	return IRQ_HANDLED;
1426*4882a593Smuzhiyun }
1427*4882a593Smuzhiyun 
stm32_mdma_alloc_chan_resources(struct dma_chan * c)1428*4882a593Smuzhiyun static int stm32_mdma_alloc_chan_resources(struct dma_chan *c)
1429*4882a593Smuzhiyun {
1430*4882a593Smuzhiyun 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1431*4882a593Smuzhiyun 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1432*4882a593Smuzhiyun 	int ret;
1433*4882a593Smuzhiyun 
1434*4882a593Smuzhiyun 	chan->desc_pool = dmam_pool_create(dev_name(&c->dev->device),
1435*4882a593Smuzhiyun 					   c->device->dev,
1436*4882a593Smuzhiyun 					   sizeof(struct stm32_mdma_hwdesc),
1437*4882a593Smuzhiyun 					  __alignof__(struct stm32_mdma_hwdesc),
1438*4882a593Smuzhiyun 					   0);
1439*4882a593Smuzhiyun 	if (!chan->desc_pool) {
1440*4882a593Smuzhiyun 		dev_err(chan2dev(chan), "failed to allocate descriptor pool\n");
1441*4882a593Smuzhiyun 		return -ENOMEM;
1442*4882a593Smuzhiyun 	}
1443*4882a593Smuzhiyun 
1444*4882a593Smuzhiyun 	ret = pm_runtime_resume_and_get(dmadev->ddev.dev);
1445*4882a593Smuzhiyun 	if (ret < 0)
1446*4882a593Smuzhiyun 		return ret;
1447*4882a593Smuzhiyun 
1448*4882a593Smuzhiyun 	ret = stm32_mdma_disable_chan(chan);
1449*4882a593Smuzhiyun 	if (ret < 0)
1450*4882a593Smuzhiyun 		pm_runtime_put(dmadev->ddev.dev);
1451*4882a593Smuzhiyun 
1452*4882a593Smuzhiyun 	return ret;
1453*4882a593Smuzhiyun }
1454*4882a593Smuzhiyun 
stm32_mdma_free_chan_resources(struct dma_chan * c)1455*4882a593Smuzhiyun static void stm32_mdma_free_chan_resources(struct dma_chan *c)
1456*4882a593Smuzhiyun {
1457*4882a593Smuzhiyun 	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1458*4882a593Smuzhiyun 	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1459*4882a593Smuzhiyun 	unsigned long flags;
1460*4882a593Smuzhiyun 
1461*4882a593Smuzhiyun 	dev_dbg(chan2dev(chan), "Freeing channel %d\n", chan->id);
1462*4882a593Smuzhiyun 
1463*4882a593Smuzhiyun 	if (chan->busy) {
1464*4882a593Smuzhiyun 		spin_lock_irqsave(&chan->vchan.lock, flags);
1465*4882a593Smuzhiyun 		stm32_mdma_stop(chan);
1466*4882a593Smuzhiyun 		chan->desc = NULL;
1467*4882a593Smuzhiyun 		spin_unlock_irqrestore(&chan->vchan.lock, flags);
1468*4882a593Smuzhiyun 	}
1469*4882a593Smuzhiyun 
1470*4882a593Smuzhiyun 	pm_runtime_put(dmadev->ddev.dev);
1471*4882a593Smuzhiyun 	vchan_free_chan_resources(to_virt_chan(c));
1472*4882a593Smuzhiyun 	dmam_pool_destroy(chan->desc_pool);
1473*4882a593Smuzhiyun 	chan->desc_pool = NULL;
1474*4882a593Smuzhiyun }
1475*4882a593Smuzhiyun 
stm32_mdma_of_xlate(struct of_phandle_args * dma_spec,struct of_dma * ofdma)1476*4882a593Smuzhiyun static struct dma_chan *stm32_mdma_of_xlate(struct of_phandle_args *dma_spec,
1477*4882a593Smuzhiyun 					    struct of_dma *ofdma)
1478*4882a593Smuzhiyun {
1479*4882a593Smuzhiyun 	struct stm32_mdma_device *dmadev = ofdma->of_dma_data;
1480*4882a593Smuzhiyun 	struct stm32_mdma_chan *chan;
1481*4882a593Smuzhiyun 	struct dma_chan *c;
1482*4882a593Smuzhiyun 	struct stm32_mdma_chan_config config;
1483*4882a593Smuzhiyun 
1484*4882a593Smuzhiyun 	if (dma_spec->args_count < 5) {
1485*4882a593Smuzhiyun 		dev_err(mdma2dev(dmadev), "Bad number of args\n");
1486*4882a593Smuzhiyun 		return NULL;
1487*4882a593Smuzhiyun 	}
1488*4882a593Smuzhiyun 
1489*4882a593Smuzhiyun 	config.request = dma_spec->args[0];
1490*4882a593Smuzhiyun 	config.priority_level = dma_spec->args[1];
1491*4882a593Smuzhiyun 	config.transfer_config = dma_spec->args[2];
1492*4882a593Smuzhiyun 	config.mask_addr = dma_spec->args[3];
1493*4882a593Smuzhiyun 	config.mask_data = dma_spec->args[4];
1494*4882a593Smuzhiyun 
1495*4882a593Smuzhiyun 	if (config.request >= dmadev->nr_requests) {
1496*4882a593Smuzhiyun 		dev_err(mdma2dev(dmadev), "Bad request line\n");
1497*4882a593Smuzhiyun 		return NULL;
1498*4882a593Smuzhiyun 	}
1499*4882a593Smuzhiyun 
1500*4882a593Smuzhiyun 	if (config.priority_level > STM32_MDMA_VERY_HIGH_PRIORITY) {
1501*4882a593Smuzhiyun 		dev_err(mdma2dev(dmadev), "Priority level not supported\n");
1502*4882a593Smuzhiyun 		return NULL;
1503*4882a593Smuzhiyun 	}
1504*4882a593Smuzhiyun 
1505*4882a593Smuzhiyun 	c = dma_get_any_slave_channel(&dmadev->ddev);
1506*4882a593Smuzhiyun 	if (!c) {
1507*4882a593Smuzhiyun 		dev_err(mdma2dev(dmadev), "No more channels available\n");
1508*4882a593Smuzhiyun 		return NULL;
1509*4882a593Smuzhiyun 	}
1510*4882a593Smuzhiyun 
1511*4882a593Smuzhiyun 	chan = to_stm32_mdma_chan(c);
1512*4882a593Smuzhiyun 	chan->chan_config = config;
1513*4882a593Smuzhiyun 
1514*4882a593Smuzhiyun 	return c;
1515*4882a593Smuzhiyun }
1516*4882a593Smuzhiyun 
1517*4882a593Smuzhiyun static const struct of_device_id stm32_mdma_of_match[] = {
1518*4882a593Smuzhiyun 	{ .compatible = "st,stm32h7-mdma", },
1519*4882a593Smuzhiyun 	{ /* sentinel */ },
1520*4882a593Smuzhiyun };
1521*4882a593Smuzhiyun MODULE_DEVICE_TABLE(of, stm32_mdma_of_match);
1522*4882a593Smuzhiyun 
stm32_mdma_probe(struct platform_device * pdev)1523*4882a593Smuzhiyun static int stm32_mdma_probe(struct platform_device *pdev)
1524*4882a593Smuzhiyun {
1525*4882a593Smuzhiyun 	struct stm32_mdma_chan *chan;
1526*4882a593Smuzhiyun 	struct stm32_mdma_device *dmadev;
1527*4882a593Smuzhiyun 	struct dma_device *dd;
1528*4882a593Smuzhiyun 	struct device_node *of_node;
1529*4882a593Smuzhiyun 	struct resource *res;
1530*4882a593Smuzhiyun 	struct reset_control *rst;
1531*4882a593Smuzhiyun 	u32 nr_channels, nr_requests;
1532*4882a593Smuzhiyun 	int i, count, ret;
1533*4882a593Smuzhiyun 
1534*4882a593Smuzhiyun 	of_node = pdev->dev.of_node;
1535*4882a593Smuzhiyun 	if (!of_node)
1536*4882a593Smuzhiyun 		return -ENODEV;
1537*4882a593Smuzhiyun 
1538*4882a593Smuzhiyun 	ret = device_property_read_u32(&pdev->dev, "dma-channels",
1539*4882a593Smuzhiyun 				       &nr_channels);
1540*4882a593Smuzhiyun 	if (ret) {
1541*4882a593Smuzhiyun 		nr_channels = STM32_MDMA_MAX_CHANNELS;
1542*4882a593Smuzhiyun 		dev_warn(&pdev->dev, "MDMA defaulting on %i channels\n",
1543*4882a593Smuzhiyun 			 nr_channels);
1544*4882a593Smuzhiyun 	}
1545*4882a593Smuzhiyun 
1546*4882a593Smuzhiyun 	ret = device_property_read_u32(&pdev->dev, "dma-requests",
1547*4882a593Smuzhiyun 				       &nr_requests);
1548*4882a593Smuzhiyun 	if (ret) {
1549*4882a593Smuzhiyun 		nr_requests = STM32_MDMA_MAX_REQUESTS;
1550*4882a593Smuzhiyun 		dev_warn(&pdev->dev, "MDMA defaulting on %i request lines\n",
1551*4882a593Smuzhiyun 			 nr_requests);
1552*4882a593Smuzhiyun 	}
1553*4882a593Smuzhiyun 
1554*4882a593Smuzhiyun 	count = device_property_count_u32(&pdev->dev, "st,ahb-addr-masks");
1555*4882a593Smuzhiyun 	if (count < 0)
1556*4882a593Smuzhiyun 		count = 0;
1557*4882a593Smuzhiyun 
1558*4882a593Smuzhiyun 	dmadev = devm_kzalloc(&pdev->dev, sizeof(*dmadev) + sizeof(u32) * count,
1559*4882a593Smuzhiyun 			      GFP_KERNEL);
1560*4882a593Smuzhiyun 	if (!dmadev)
1561*4882a593Smuzhiyun 		return -ENOMEM;
1562*4882a593Smuzhiyun 
1563*4882a593Smuzhiyun 	dmadev->nr_channels = nr_channels;
1564*4882a593Smuzhiyun 	dmadev->nr_requests = nr_requests;
1565*4882a593Smuzhiyun 	device_property_read_u32_array(&pdev->dev, "st,ahb-addr-masks",
1566*4882a593Smuzhiyun 				       dmadev->ahb_addr_masks,
1567*4882a593Smuzhiyun 				       count);
1568*4882a593Smuzhiyun 	dmadev->nr_ahb_addr_masks = count;
1569*4882a593Smuzhiyun 
1570*4882a593Smuzhiyun 	res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1571*4882a593Smuzhiyun 	dmadev->base = devm_ioremap_resource(&pdev->dev, res);
1572*4882a593Smuzhiyun 	if (IS_ERR(dmadev->base))
1573*4882a593Smuzhiyun 		return PTR_ERR(dmadev->base);
1574*4882a593Smuzhiyun 
1575*4882a593Smuzhiyun 	dmadev->clk = devm_clk_get(&pdev->dev, NULL);
1576*4882a593Smuzhiyun 	if (IS_ERR(dmadev->clk))
1577*4882a593Smuzhiyun 		return dev_err_probe(&pdev->dev, PTR_ERR(dmadev->clk),
1578*4882a593Smuzhiyun 				     "Missing clock controller\n");
1579*4882a593Smuzhiyun 
1580*4882a593Smuzhiyun 	ret = clk_prepare_enable(dmadev->clk);
1581*4882a593Smuzhiyun 	if (ret < 0) {
1582*4882a593Smuzhiyun 		dev_err(&pdev->dev, "clk_prep_enable error: %d\n", ret);
1583*4882a593Smuzhiyun 		return ret;
1584*4882a593Smuzhiyun 	}
1585*4882a593Smuzhiyun 
1586*4882a593Smuzhiyun 	rst = devm_reset_control_get(&pdev->dev, NULL);
1587*4882a593Smuzhiyun 	if (IS_ERR(rst)) {
1588*4882a593Smuzhiyun 		ret = PTR_ERR(rst);
1589*4882a593Smuzhiyun 		if (ret == -EPROBE_DEFER)
1590*4882a593Smuzhiyun 			goto err_clk;
1591*4882a593Smuzhiyun 	} else {
1592*4882a593Smuzhiyun 		reset_control_assert(rst);
1593*4882a593Smuzhiyun 		udelay(2);
1594*4882a593Smuzhiyun 		reset_control_deassert(rst);
1595*4882a593Smuzhiyun 	}
1596*4882a593Smuzhiyun 
1597*4882a593Smuzhiyun 	dd = &dmadev->ddev;
1598*4882a593Smuzhiyun 	dma_cap_set(DMA_SLAVE, dd->cap_mask);
1599*4882a593Smuzhiyun 	dma_cap_set(DMA_PRIVATE, dd->cap_mask);
1600*4882a593Smuzhiyun 	dma_cap_set(DMA_CYCLIC, dd->cap_mask);
1601*4882a593Smuzhiyun 	dma_cap_set(DMA_MEMCPY, dd->cap_mask);
1602*4882a593Smuzhiyun 	dd->device_alloc_chan_resources = stm32_mdma_alloc_chan_resources;
1603*4882a593Smuzhiyun 	dd->device_free_chan_resources = stm32_mdma_free_chan_resources;
1604*4882a593Smuzhiyun 	dd->device_tx_status = stm32_mdma_tx_status;
1605*4882a593Smuzhiyun 	dd->device_issue_pending = stm32_mdma_issue_pending;
1606*4882a593Smuzhiyun 	dd->device_prep_slave_sg = stm32_mdma_prep_slave_sg;
1607*4882a593Smuzhiyun 	dd->device_prep_dma_cyclic = stm32_mdma_prep_dma_cyclic;
1608*4882a593Smuzhiyun 	dd->device_prep_dma_memcpy = stm32_mdma_prep_dma_memcpy;
1609*4882a593Smuzhiyun 	dd->device_config = stm32_mdma_slave_config;
1610*4882a593Smuzhiyun 	dd->device_pause = stm32_mdma_pause;
1611*4882a593Smuzhiyun 	dd->device_resume = stm32_mdma_resume;
1612*4882a593Smuzhiyun 	dd->device_terminate_all = stm32_mdma_terminate_all;
1613*4882a593Smuzhiyun 	dd->device_synchronize = stm32_mdma_synchronize;
1614*4882a593Smuzhiyun 	dd->descriptor_reuse = true;
1615*4882a593Smuzhiyun 
1616*4882a593Smuzhiyun 	dd->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
1617*4882a593Smuzhiyun 		BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
1618*4882a593Smuzhiyun 		BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) |
1619*4882a593Smuzhiyun 		BIT(DMA_SLAVE_BUSWIDTH_8_BYTES);
1620*4882a593Smuzhiyun 	dd->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
1621*4882a593Smuzhiyun 		BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
1622*4882a593Smuzhiyun 		BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) |
1623*4882a593Smuzhiyun 		BIT(DMA_SLAVE_BUSWIDTH_8_BYTES);
1624*4882a593Smuzhiyun 	dd->directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV) |
1625*4882a593Smuzhiyun 		BIT(DMA_MEM_TO_MEM);
1626*4882a593Smuzhiyun 	dd->residue_granularity = DMA_RESIDUE_GRANULARITY_BURST;
1627*4882a593Smuzhiyun 	dd->max_burst = STM32_MDMA_MAX_BURST;
1628*4882a593Smuzhiyun 	dd->dev = &pdev->dev;
1629*4882a593Smuzhiyun 	INIT_LIST_HEAD(&dd->channels);
1630*4882a593Smuzhiyun 
1631*4882a593Smuzhiyun 	for (i = 0; i < dmadev->nr_channels; i++) {
1632*4882a593Smuzhiyun 		chan = &dmadev->chan[i];
1633*4882a593Smuzhiyun 		chan->id = i;
1634*4882a593Smuzhiyun 		chan->vchan.desc_free = stm32_mdma_desc_free;
1635*4882a593Smuzhiyun 		vchan_init(&chan->vchan, dd);
1636*4882a593Smuzhiyun 	}
1637*4882a593Smuzhiyun 
1638*4882a593Smuzhiyun 	dmadev->irq = platform_get_irq(pdev, 0);
1639*4882a593Smuzhiyun 	if (dmadev->irq < 0) {
1640*4882a593Smuzhiyun 		ret = dmadev->irq;
1641*4882a593Smuzhiyun 		goto err_clk;
1642*4882a593Smuzhiyun 	}
1643*4882a593Smuzhiyun 
1644*4882a593Smuzhiyun 	ret = devm_request_irq(&pdev->dev, dmadev->irq, stm32_mdma_irq_handler,
1645*4882a593Smuzhiyun 			       0, dev_name(&pdev->dev), dmadev);
1646*4882a593Smuzhiyun 	if (ret) {
1647*4882a593Smuzhiyun 		dev_err(&pdev->dev, "failed to request IRQ\n");
1648*4882a593Smuzhiyun 		goto err_clk;
1649*4882a593Smuzhiyun 	}
1650*4882a593Smuzhiyun 
1651*4882a593Smuzhiyun 	ret = dmaenginem_async_device_register(dd);
1652*4882a593Smuzhiyun 	if (ret)
1653*4882a593Smuzhiyun 		goto err_clk;
1654*4882a593Smuzhiyun 
1655*4882a593Smuzhiyun 	ret = of_dma_controller_register(of_node, stm32_mdma_of_xlate, dmadev);
1656*4882a593Smuzhiyun 	if (ret < 0) {
1657*4882a593Smuzhiyun 		dev_err(&pdev->dev,
1658*4882a593Smuzhiyun 			"STM32 MDMA DMA OF registration failed %d\n", ret);
1659*4882a593Smuzhiyun 		goto err_clk;
1660*4882a593Smuzhiyun 	}
1661*4882a593Smuzhiyun 
1662*4882a593Smuzhiyun 	platform_set_drvdata(pdev, dmadev);
1663*4882a593Smuzhiyun 	pm_runtime_set_active(&pdev->dev);
1664*4882a593Smuzhiyun 	pm_runtime_enable(&pdev->dev);
1665*4882a593Smuzhiyun 	pm_runtime_get_noresume(&pdev->dev);
1666*4882a593Smuzhiyun 	pm_runtime_put(&pdev->dev);
1667*4882a593Smuzhiyun 
1668*4882a593Smuzhiyun 	dev_info(&pdev->dev, "STM32 MDMA driver registered\n");
1669*4882a593Smuzhiyun 
1670*4882a593Smuzhiyun 	return 0;
1671*4882a593Smuzhiyun 
1672*4882a593Smuzhiyun err_clk:
1673*4882a593Smuzhiyun 	clk_disable_unprepare(dmadev->clk);
1674*4882a593Smuzhiyun 
1675*4882a593Smuzhiyun 	return ret;
1676*4882a593Smuzhiyun }
1677*4882a593Smuzhiyun 
1678*4882a593Smuzhiyun #ifdef CONFIG_PM
stm32_mdma_runtime_suspend(struct device * dev)1679*4882a593Smuzhiyun static int stm32_mdma_runtime_suspend(struct device *dev)
1680*4882a593Smuzhiyun {
1681*4882a593Smuzhiyun 	struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
1682*4882a593Smuzhiyun 
1683*4882a593Smuzhiyun 	clk_disable_unprepare(dmadev->clk);
1684*4882a593Smuzhiyun 
1685*4882a593Smuzhiyun 	return 0;
1686*4882a593Smuzhiyun }
1687*4882a593Smuzhiyun 
stm32_mdma_runtime_resume(struct device * dev)1688*4882a593Smuzhiyun static int stm32_mdma_runtime_resume(struct device *dev)
1689*4882a593Smuzhiyun {
1690*4882a593Smuzhiyun 	struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
1691*4882a593Smuzhiyun 	int ret;
1692*4882a593Smuzhiyun 
1693*4882a593Smuzhiyun 	ret = clk_prepare_enable(dmadev->clk);
1694*4882a593Smuzhiyun 	if (ret) {
1695*4882a593Smuzhiyun 		dev_err(dev, "failed to prepare_enable clock\n");
1696*4882a593Smuzhiyun 		return ret;
1697*4882a593Smuzhiyun 	}
1698*4882a593Smuzhiyun 
1699*4882a593Smuzhiyun 	return 0;
1700*4882a593Smuzhiyun }
1701*4882a593Smuzhiyun #endif
1702*4882a593Smuzhiyun 
1703*4882a593Smuzhiyun #ifdef CONFIG_PM_SLEEP
stm32_mdma_pm_suspend(struct device * dev)1704*4882a593Smuzhiyun static int stm32_mdma_pm_suspend(struct device *dev)
1705*4882a593Smuzhiyun {
1706*4882a593Smuzhiyun 	struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
1707*4882a593Smuzhiyun 	u32 ccr, id;
1708*4882a593Smuzhiyun 	int ret;
1709*4882a593Smuzhiyun 
1710*4882a593Smuzhiyun 	ret = pm_runtime_resume_and_get(dev);
1711*4882a593Smuzhiyun 	if (ret < 0)
1712*4882a593Smuzhiyun 		return ret;
1713*4882a593Smuzhiyun 
1714*4882a593Smuzhiyun 	for (id = 0; id < dmadev->nr_channels; id++) {
1715*4882a593Smuzhiyun 		ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(id));
1716*4882a593Smuzhiyun 		if (ccr & STM32_MDMA_CCR_EN) {
1717*4882a593Smuzhiyun 			dev_warn(dev, "Suspend is prevented by Chan %i\n", id);
1718*4882a593Smuzhiyun 			return -EBUSY;
1719*4882a593Smuzhiyun 		}
1720*4882a593Smuzhiyun 	}
1721*4882a593Smuzhiyun 
1722*4882a593Smuzhiyun 	pm_runtime_put_sync(dev);
1723*4882a593Smuzhiyun 
1724*4882a593Smuzhiyun 	pm_runtime_force_suspend(dev);
1725*4882a593Smuzhiyun 
1726*4882a593Smuzhiyun 	return 0;
1727*4882a593Smuzhiyun }
1728*4882a593Smuzhiyun 
stm32_mdma_pm_resume(struct device * dev)1729*4882a593Smuzhiyun static int stm32_mdma_pm_resume(struct device *dev)
1730*4882a593Smuzhiyun {
1731*4882a593Smuzhiyun 	return pm_runtime_force_resume(dev);
1732*4882a593Smuzhiyun }
1733*4882a593Smuzhiyun #endif
1734*4882a593Smuzhiyun 
1735*4882a593Smuzhiyun static const struct dev_pm_ops stm32_mdma_pm_ops = {
1736*4882a593Smuzhiyun 	SET_SYSTEM_SLEEP_PM_OPS(stm32_mdma_pm_suspend, stm32_mdma_pm_resume)
1737*4882a593Smuzhiyun 	SET_RUNTIME_PM_OPS(stm32_mdma_runtime_suspend,
1738*4882a593Smuzhiyun 			   stm32_mdma_runtime_resume, NULL)
1739*4882a593Smuzhiyun };
1740*4882a593Smuzhiyun 
1741*4882a593Smuzhiyun static struct platform_driver stm32_mdma_driver = {
1742*4882a593Smuzhiyun 	.probe = stm32_mdma_probe,
1743*4882a593Smuzhiyun 	.driver = {
1744*4882a593Smuzhiyun 		.name = "stm32-mdma",
1745*4882a593Smuzhiyun 		.of_match_table = stm32_mdma_of_match,
1746*4882a593Smuzhiyun 		.pm = &stm32_mdma_pm_ops,
1747*4882a593Smuzhiyun 	},
1748*4882a593Smuzhiyun };
1749*4882a593Smuzhiyun 
stm32_mdma_init(void)1750*4882a593Smuzhiyun static int __init stm32_mdma_init(void)
1751*4882a593Smuzhiyun {
1752*4882a593Smuzhiyun 	return platform_driver_register(&stm32_mdma_driver);
1753*4882a593Smuzhiyun }
1754*4882a593Smuzhiyun 
1755*4882a593Smuzhiyun subsys_initcall(stm32_mdma_init);
1756*4882a593Smuzhiyun 
1757*4882a593Smuzhiyun MODULE_DESCRIPTION("Driver for STM32 MDMA controller");
1758*4882a593Smuzhiyun MODULE_AUTHOR("M'boumba Cedric Madianga <cedric.madianga@gmail.com>");
1759*4882a593Smuzhiyun MODULE_AUTHOR("Pierre-Yves Mordret <pierre-yves.mordret@st.com>");
1760*4882a593Smuzhiyun MODULE_LICENSE("GPL v2");
1761