xref: /OK3568_Linux_fs/kernel/include/linux/pci-dma-compat.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun /* include this file if the platform implements the dma_ DMA Mapping API
3*4882a593Smuzhiyun  * and wants to provide the pci_ DMA Mapping API in terms of it */
4*4882a593Smuzhiyun 
5*4882a593Smuzhiyun #ifndef _ASM_GENERIC_PCI_DMA_COMPAT_H
6*4882a593Smuzhiyun #define _ASM_GENERIC_PCI_DMA_COMPAT_H
7*4882a593Smuzhiyun 
8*4882a593Smuzhiyun #include <linux/dma-mapping.h>
9*4882a593Smuzhiyun 
10*4882a593Smuzhiyun /* This defines the direction arg to the DMA mapping routines. */
11*4882a593Smuzhiyun #define PCI_DMA_BIDIRECTIONAL	DMA_BIDIRECTIONAL
12*4882a593Smuzhiyun #define PCI_DMA_TODEVICE	DMA_TO_DEVICE
13*4882a593Smuzhiyun #define PCI_DMA_FROMDEVICE	DMA_FROM_DEVICE
14*4882a593Smuzhiyun #define PCI_DMA_NONE		DMA_NONE
15*4882a593Smuzhiyun 
16*4882a593Smuzhiyun static inline void *
pci_alloc_consistent(struct pci_dev * hwdev,size_t size,dma_addr_t * dma_handle)17*4882a593Smuzhiyun pci_alloc_consistent(struct pci_dev *hwdev, size_t size,
18*4882a593Smuzhiyun 		     dma_addr_t *dma_handle)
19*4882a593Smuzhiyun {
20*4882a593Smuzhiyun 	return dma_alloc_coherent(&hwdev->dev, size, dma_handle, GFP_ATOMIC);
21*4882a593Smuzhiyun }
22*4882a593Smuzhiyun 
23*4882a593Smuzhiyun static inline void *
pci_zalloc_consistent(struct pci_dev * hwdev,size_t size,dma_addr_t * dma_handle)24*4882a593Smuzhiyun pci_zalloc_consistent(struct pci_dev *hwdev, size_t size,
25*4882a593Smuzhiyun 		      dma_addr_t *dma_handle)
26*4882a593Smuzhiyun {
27*4882a593Smuzhiyun 	return dma_alloc_coherent(&hwdev->dev, size, dma_handle, GFP_ATOMIC);
28*4882a593Smuzhiyun }
29*4882a593Smuzhiyun 
30*4882a593Smuzhiyun static inline void
pci_free_consistent(struct pci_dev * hwdev,size_t size,void * vaddr,dma_addr_t dma_handle)31*4882a593Smuzhiyun pci_free_consistent(struct pci_dev *hwdev, size_t size,
32*4882a593Smuzhiyun 		    void *vaddr, dma_addr_t dma_handle)
33*4882a593Smuzhiyun {
34*4882a593Smuzhiyun 	dma_free_coherent(&hwdev->dev, size, vaddr, dma_handle);
35*4882a593Smuzhiyun }
36*4882a593Smuzhiyun 
37*4882a593Smuzhiyun static inline dma_addr_t
pci_map_single(struct pci_dev * hwdev,void * ptr,size_t size,int direction)38*4882a593Smuzhiyun pci_map_single(struct pci_dev *hwdev, void *ptr, size_t size, int direction)
39*4882a593Smuzhiyun {
40*4882a593Smuzhiyun 	return dma_map_single(&hwdev->dev, ptr, size, (enum dma_data_direction)direction);
41*4882a593Smuzhiyun }
42*4882a593Smuzhiyun 
43*4882a593Smuzhiyun static inline void
pci_unmap_single(struct pci_dev * hwdev,dma_addr_t dma_addr,size_t size,int direction)44*4882a593Smuzhiyun pci_unmap_single(struct pci_dev *hwdev, dma_addr_t dma_addr,
45*4882a593Smuzhiyun 		 size_t size, int direction)
46*4882a593Smuzhiyun {
47*4882a593Smuzhiyun 	dma_unmap_single(&hwdev->dev, dma_addr, size, (enum dma_data_direction)direction);
48*4882a593Smuzhiyun }
49*4882a593Smuzhiyun 
50*4882a593Smuzhiyun static inline dma_addr_t
pci_map_page(struct pci_dev * hwdev,struct page * page,unsigned long offset,size_t size,int direction)51*4882a593Smuzhiyun pci_map_page(struct pci_dev *hwdev, struct page *page,
52*4882a593Smuzhiyun 	     unsigned long offset, size_t size, int direction)
53*4882a593Smuzhiyun {
54*4882a593Smuzhiyun 	return dma_map_page(&hwdev->dev, page, offset, size, (enum dma_data_direction)direction);
55*4882a593Smuzhiyun }
56*4882a593Smuzhiyun 
57*4882a593Smuzhiyun static inline void
pci_unmap_page(struct pci_dev * hwdev,dma_addr_t dma_address,size_t size,int direction)58*4882a593Smuzhiyun pci_unmap_page(struct pci_dev *hwdev, dma_addr_t dma_address,
59*4882a593Smuzhiyun 	       size_t size, int direction)
60*4882a593Smuzhiyun {
61*4882a593Smuzhiyun 	dma_unmap_page(&hwdev->dev, dma_address, size, (enum dma_data_direction)direction);
62*4882a593Smuzhiyun }
63*4882a593Smuzhiyun 
64*4882a593Smuzhiyun static inline int
pci_map_sg(struct pci_dev * hwdev,struct scatterlist * sg,int nents,int direction)65*4882a593Smuzhiyun pci_map_sg(struct pci_dev *hwdev, struct scatterlist *sg,
66*4882a593Smuzhiyun 	   int nents, int direction)
67*4882a593Smuzhiyun {
68*4882a593Smuzhiyun 	return dma_map_sg(&hwdev->dev, sg, nents, (enum dma_data_direction)direction);
69*4882a593Smuzhiyun }
70*4882a593Smuzhiyun 
71*4882a593Smuzhiyun static inline void
pci_unmap_sg(struct pci_dev * hwdev,struct scatterlist * sg,int nents,int direction)72*4882a593Smuzhiyun pci_unmap_sg(struct pci_dev *hwdev, struct scatterlist *sg,
73*4882a593Smuzhiyun 	     int nents, int direction)
74*4882a593Smuzhiyun {
75*4882a593Smuzhiyun 	dma_unmap_sg(&hwdev->dev, sg, nents, (enum dma_data_direction)direction);
76*4882a593Smuzhiyun }
77*4882a593Smuzhiyun 
78*4882a593Smuzhiyun static inline void
pci_dma_sync_single_for_cpu(struct pci_dev * hwdev,dma_addr_t dma_handle,size_t size,int direction)79*4882a593Smuzhiyun pci_dma_sync_single_for_cpu(struct pci_dev *hwdev, dma_addr_t dma_handle,
80*4882a593Smuzhiyun 		    size_t size, int direction)
81*4882a593Smuzhiyun {
82*4882a593Smuzhiyun 	dma_sync_single_for_cpu(&hwdev->dev, dma_handle, size, (enum dma_data_direction)direction);
83*4882a593Smuzhiyun }
84*4882a593Smuzhiyun 
85*4882a593Smuzhiyun static inline void
pci_dma_sync_single_for_device(struct pci_dev * hwdev,dma_addr_t dma_handle,size_t size,int direction)86*4882a593Smuzhiyun pci_dma_sync_single_for_device(struct pci_dev *hwdev, dma_addr_t dma_handle,
87*4882a593Smuzhiyun 		    size_t size, int direction)
88*4882a593Smuzhiyun {
89*4882a593Smuzhiyun 	dma_sync_single_for_device(&hwdev->dev, dma_handle, size, (enum dma_data_direction)direction);
90*4882a593Smuzhiyun }
91*4882a593Smuzhiyun 
92*4882a593Smuzhiyun static inline void
pci_dma_sync_sg_for_cpu(struct pci_dev * hwdev,struct scatterlist * sg,int nelems,int direction)93*4882a593Smuzhiyun pci_dma_sync_sg_for_cpu(struct pci_dev *hwdev, struct scatterlist *sg,
94*4882a593Smuzhiyun 		int nelems, int direction)
95*4882a593Smuzhiyun {
96*4882a593Smuzhiyun 	dma_sync_sg_for_cpu(&hwdev->dev, sg, nelems, (enum dma_data_direction)direction);
97*4882a593Smuzhiyun }
98*4882a593Smuzhiyun 
99*4882a593Smuzhiyun static inline void
pci_dma_sync_sg_for_device(struct pci_dev * hwdev,struct scatterlist * sg,int nelems,int direction)100*4882a593Smuzhiyun pci_dma_sync_sg_for_device(struct pci_dev *hwdev, struct scatterlist *sg,
101*4882a593Smuzhiyun 		int nelems, int direction)
102*4882a593Smuzhiyun {
103*4882a593Smuzhiyun 	dma_sync_sg_for_device(&hwdev->dev, sg, nelems, (enum dma_data_direction)direction);
104*4882a593Smuzhiyun }
105*4882a593Smuzhiyun 
106*4882a593Smuzhiyun static inline int
pci_dma_mapping_error(struct pci_dev * pdev,dma_addr_t dma_addr)107*4882a593Smuzhiyun pci_dma_mapping_error(struct pci_dev *pdev, dma_addr_t dma_addr)
108*4882a593Smuzhiyun {
109*4882a593Smuzhiyun 	return dma_mapping_error(&pdev->dev, dma_addr);
110*4882a593Smuzhiyun }
111*4882a593Smuzhiyun 
112*4882a593Smuzhiyun #ifdef CONFIG_PCI
pci_set_dma_mask(struct pci_dev * dev,u64 mask)113*4882a593Smuzhiyun static inline int pci_set_dma_mask(struct pci_dev *dev, u64 mask)
114*4882a593Smuzhiyun {
115*4882a593Smuzhiyun 	return dma_set_mask(&dev->dev, mask);
116*4882a593Smuzhiyun }
117*4882a593Smuzhiyun 
pci_set_consistent_dma_mask(struct pci_dev * dev,u64 mask)118*4882a593Smuzhiyun static inline int pci_set_consistent_dma_mask(struct pci_dev *dev, u64 mask)
119*4882a593Smuzhiyun {
120*4882a593Smuzhiyun 	return dma_set_coherent_mask(&dev->dev, mask);
121*4882a593Smuzhiyun }
122*4882a593Smuzhiyun #else
pci_set_dma_mask(struct pci_dev * dev,u64 mask)123*4882a593Smuzhiyun static inline int pci_set_dma_mask(struct pci_dev *dev, u64 mask)
124*4882a593Smuzhiyun { return -EIO; }
pci_set_consistent_dma_mask(struct pci_dev * dev,u64 mask)125*4882a593Smuzhiyun static inline int pci_set_consistent_dma_mask(struct pci_dev *dev, u64 mask)
126*4882a593Smuzhiyun { return -EIO; }
127*4882a593Smuzhiyun #endif
128*4882a593Smuzhiyun 
129*4882a593Smuzhiyun #endif
130