1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun #ifndef _MXS_DMA_H_
3*4882a593Smuzhiyun #define _MXS_DMA_H_
4*4882a593Smuzhiyun
5*4882a593Smuzhiyun #include <linux/dmaengine.h>
6*4882a593Smuzhiyun
7*4882a593Smuzhiyun #define MXS_DMA_CTRL_WAIT4END BIT(31)
8*4882a593Smuzhiyun #define MXS_DMA_CTRL_WAIT4RDY BIT(30)
9*4882a593Smuzhiyun
10*4882a593Smuzhiyun /*
11*4882a593Smuzhiyun * The mxs dmaengine can do PIO transfers. We pass a pointer to the PIO words
12*4882a593Smuzhiyun * in the second argument to dmaengine_prep_slave_sg when the direction is
13*4882a593Smuzhiyun * set to DMA_TRANS_NONE. To make this clear and to prevent users from doing
14*4882a593Smuzhiyun * the error prone casting we have this wrapper function
15*4882a593Smuzhiyun */
mxs_dmaengine_prep_pio(struct dma_chan * chan,u32 * pio,unsigned int npio,enum dma_transfer_direction dir,unsigned long flags)16*4882a593Smuzhiyun static inline struct dma_async_tx_descriptor *mxs_dmaengine_prep_pio(
17*4882a593Smuzhiyun struct dma_chan *chan, u32 *pio, unsigned int npio,
18*4882a593Smuzhiyun enum dma_transfer_direction dir, unsigned long flags)
19*4882a593Smuzhiyun {
20*4882a593Smuzhiyun return dmaengine_prep_slave_sg(chan, (struct scatterlist *)pio, npio,
21*4882a593Smuzhiyun dir, flags);
22*4882a593Smuzhiyun }
23*4882a593Smuzhiyun
24*4882a593Smuzhiyun #endif /* _MXS_DMA_H_ */
25