Home
last modified time | relevance | path

Searched refs:iova (Results 1 – 25 of 245) sorted by relevance

12345678910

/OK3568_Linux_fs/kernel/drivers/staging/media/tegra-vde/
H A Diommu.c21 struct iova **iovap, in tegra_vde_iommu_map()
24 struct iova *iova; in tegra_vde_iommu_map() local
30 size = iova_align(&vde->iova, size); in tegra_vde_iommu_map()
31 shift = iova_shift(&vde->iova); in tegra_vde_iommu_map()
33 iova = alloc_iova(&vde->iova, size >> shift, end >> shift, true); in tegra_vde_iommu_map()
34 if (!iova) in tegra_vde_iommu_map()
37 addr = iova_dma_addr(&vde->iova, iova); in tegra_vde_iommu_map()
42 __free_iova(&vde->iova, iova); in tegra_vde_iommu_map()
46 *iovap = iova; in tegra_vde_iommu_map()
51 void tegra_vde_iommu_unmap(struct tegra_vde *vde, struct iova *iova) in tegra_vde_iommu_unmap() argument
[all …]
H A Ddmabuf-cache.c25 struct iova *iova; member
36 tegra_vde_iommu_unmap(entry->vde, entry->iova); in tegra_vde_release_entry()
70 struct iova *iova; in tegra_vde_dmabuf_cache_map() local
88 *addrp = iova_dma_addr(&vde->iova, entry->iova); in tegra_vde_dmabuf_cache_map()
122 err = tegra_vde_iommu_map(vde, sgt, &iova, dmabuf->size); in tegra_vde_dmabuf_cache_map()
126 *addrp = iova_dma_addr(&vde->iova, iova); in tegra_vde_dmabuf_cache_map()
129 iova = NULL; in tegra_vde_dmabuf_cache_map()
136 entry->iova = iova; in tegra_vde_dmabuf_cache_map()
/OK3568_Linux_fs/kernel/drivers/iommu/
H A Diova.c126 __cached_rbnode_insert_update(struct iova_domain *iovad, struct iova *new) in __cached_rbnode_insert_update()
135 __cached_rbnode_delete_update(struct iova_domain *iovad, struct iova *free) in __cached_rbnode_delete_update()
137 struct iova *cached_iova; in __cached_rbnode_delete_update()
139 cached_iova = rb_entry(iovad->cached32_node, struct iova, node); in __cached_rbnode_delete_update()
148 cached_iova = rb_entry(iovad->cached_node, struct iova, node); in __cached_rbnode_delete_update()
155 iova_insert_rbtree(struct rb_root *root, struct iova *iova, in iova_insert_rbtree() argument
163 struct iova *this = rb_entry(*new, struct iova, node); in iova_insert_rbtree()
167 if (iova->pfn_lo < this->pfn_lo) in iova_insert_rbtree()
169 else if (iova->pfn_lo > this->pfn_lo) in iova_insert_rbtree()
177 rb_link_node(&iova->node, parent, new); in iova_insert_rbtree()
[all …]
H A Dtegra-gart.c60 #define for_each_gart_pte(gart, iova) \ argument
61 for (iova = gart->iovmm_base; \
62 iova < gart->iovmm_end; \
63 iova += GART_PAGE_SIZE)
66 unsigned long iova, unsigned long pte) in gart_set_pte() argument
68 writel_relaxed(iova, gart->regs + GART_ENTRY_ADDR); in gart_set_pte()
73 unsigned long iova) in gart_read_pte() argument
77 writel_relaxed(iova, gart->regs + GART_ENTRY_ADDR); in gart_read_pte()
85 unsigned long iova; in do_gart_setup() local
87 for_each_gart_pte(gart, iova) in do_gart_setup()
[all …]
H A Dio-pgtable-arm-v7s.c419 unsigned long iova, phys_addr_t paddr, int prot, in arm_v7s_init_pte() argument
435 tblp = ptep - ARM_V7S_LVL_IDX(iova, lvl, cfg); in arm_v7s_init_pte()
436 if (WARN_ON(__arm_v7s_unmap(data, NULL, iova + i * sz, in arm_v7s_init_pte()
479 static int __arm_v7s_map(struct arm_v7s_io_pgtable *data, unsigned long iova, in __arm_v7s_map() argument
488 ptep += ARM_V7S_LVL_IDX(iova, lvl, cfg); in __arm_v7s_map()
492 return arm_v7s_init_pte(data, iova, paddr, prot, in __arm_v7s_map()
523 return __arm_v7s_map(data, iova, paddr, size, prot, lvl + 1, cptep, gfp); in __arm_v7s_map()
526 static int arm_v7s_map_pages(struct io_pgtable_ops *ops, unsigned long iova, in arm_v7s_map_pages() argument
537 if (WARN_ON(iova >= (1ULL << data->iop.cfg.ias) || in arm_v7s_map_pages()
542 ret = __arm_v7s_map(data, iova, paddr, pgsize, prot, 1, data->pgd, in arm_v7s_map_pages()
[all …]
H A Dexynos-iommu.c99 #define section_offs(iova) (iova & (SECT_SIZE - 1)) argument
101 #define lpage_offs(iova) (iova & (LPAGE_SIZE - 1)) argument
103 #define spage_offs(iova) (iova & (SPAGE_SIZE - 1)) argument
108 static u32 lv1ent_offset(sysmmu_iova_t iova) in lv1ent_offset() argument
110 return iova >> SECT_ORDER; in lv1ent_offset()
113 static u32 lv2ent_offset(sysmmu_iova_t iova) in lv2ent_offset() argument
115 return (iova >> SPAGE_ORDER) & (NUM_LV2ENTRIES - 1); in lv2ent_offset()
183 static sysmmu_pte_t *section_entry(sysmmu_pte_t *pgtable, sysmmu_iova_t iova) in section_entry() argument
185 return pgtable + lv1ent_offset(iova); in section_entry()
188 static sysmmu_pte_t *page_entry(sysmmu_pte_t *sent, sysmmu_iova_t iova) in page_entry() argument
[all …]
H A Dio-pgtable-arm.c260 unsigned long iova, size_t size, size_t pgcount,
285 unsigned long iova, phys_addr_t paddr, in arm_lpae_init_pte() argument
304 tblp = ptep - ARM_LPAE_LVL_IDX(iova, lvl, data); in arm_lpae_init_pte()
305 if (__arm_lpae_unmap(data, NULL, iova + i * sz, sz, 1, in arm_lpae_init_pte()
348 static int __arm_lpae_map(struct arm_lpae_io_pgtable *data, unsigned long iova, in __arm_lpae_map() argument
360 map_idx_start = ARM_LPAE_LVL_IDX(iova, lvl, data); in __arm_lpae_map()
367 ret = arm_lpae_init_pte(data, iova, paddr, prot, lvl, num_entries, ptep); in __arm_lpae_map()
401 return __arm_lpae_map(data, iova, paddr, size, pgcount, prot, lvl + 1, in __arm_lpae_map()
479 static int arm_lpae_map_pages(struct io_pgtable_ops *ops, unsigned long iova, in arm_lpae_map_pages() argument
488 long iaext = (s64)iova >> cfg->ias; in arm_lpae_map_pages()
[all …]
H A Dtegra-smmu.c155 static unsigned int iova_pd_index(unsigned long iova) in iova_pd_index() argument
157 return (iova >> SMMU_PDE_SHIFT) & (SMMU_NUM_PDE - 1); in iova_pd_index()
160 static unsigned int iova_pt_index(unsigned long iova) in iova_pt_index() argument
162 return (iova >> SMMU_PTE_SHIFT) & (SMMU_NUM_PTE - 1); in iova_pt_index()
222 unsigned long iova) in smmu_flush_tlb_section() argument
231 value |= SMMU_TLB_FLUSH_ASID_MATCH | SMMU_TLB_FLUSH_VA_SECTION(iova); in smmu_flush_tlb_section()
237 unsigned long iova) in smmu_flush_tlb_group() argument
246 value |= SMMU_TLB_FLUSH_ASID_MATCH | SMMU_TLB_FLUSH_VA_GROUP(iova); in smmu_flush_tlb_group()
536 static void tegra_smmu_set_pde(struct tegra_smmu_as *as, unsigned long iova, in tegra_smmu_set_pde() argument
539 unsigned int pd_index = iova_pd_index(iova); in tegra_smmu_set_pde()
[all …]
H A Ddma-iommu.c30 dma_addr_t iova; member
192 msi_page->iova = start; in cookie_init_hw_msi_region()
478 unsigned long shift, iova_len, iova = 0; in iommu_dma_alloc_iova() local
503 iova = alloc_iova_fast(iovad, iova_len, in iommu_dma_alloc_iova()
506 if (!iova) in iommu_dma_alloc_iova()
507 iova = alloc_iova_fast(iovad, iova_len, dma_limit >> shift, in iommu_dma_alloc_iova()
510 trace_android_vh_iommu_alloc_iova(dev, (dma_addr_t)iova << shift, size); in iommu_dma_alloc_iova()
511 trace_android_vh_iommu_iovad_alloc_iova(dev, iovad, (dma_addr_t)iova << shift, size); in iommu_dma_alloc_iova()
513 return (dma_addr_t)iova << shift; in iommu_dma_alloc_iova()
517 dma_addr_t iova, size_t size) in iommu_dma_free_iova() argument
[all …]
H A Dsun50i-iommu.c163 static u32 sun50i_iova_get_dte_index(dma_addr_t iova) in sun50i_iova_get_dte_index() argument
165 return FIELD_GET(SUN50I_IOVA_DTE_MASK, iova); in sun50i_iova_get_dte_index()
168 static u32 sun50i_iova_get_pte_index(dma_addr_t iova) in sun50i_iova_get_pte_index() argument
170 return FIELD_GET(SUN50I_IOVA_PTE_MASK, iova); in sun50i_iova_get_pte_index()
173 static u32 sun50i_iova_get_page_offset(dma_addr_t iova) in sun50i_iova_get_page_offset() argument
175 return FIELD_GET(SUN50I_IOVA_PAGE_MASK, iova); in sun50i_iova_get_page_offset()
483 dma_addr_t iova, gfp_t gfp) in sun50i_dte_get_page_table() argument
491 dte_addr = &sun50i_domain->dt[sun50i_iova_get_dte_index(iova)]; in sun50i_dte_get_page_table()
521 static int sun50i_iommu_map(struct iommu_domain *domain, unsigned long iova, in sun50i_iommu_map() argument
530 page_table = sun50i_dte_get_page_table(sun50i_domain, iova, gfp); in sun50i_iommu_map()
[all …]
H A Dvirtio-iommu.c59 struct interval_tree_node iova; member
313 static int viommu_add_mapping(struct viommu_domain *vdomain, unsigned long iova, in viommu_add_mapping() argument
324 mapping->iova.start = iova; in viommu_add_mapping()
325 mapping->iova.last = iova + size - 1; in viommu_add_mapping()
329 interval_tree_insert(&mapping->iova, &vdomain->mappings); in viommu_add_mapping()
346 unsigned long iova, size_t size) in viommu_del_mappings() argument
350 unsigned long last = iova + size - 1; in viommu_del_mappings()
355 next = interval_tree_iter_first(&vdomain->mappings, iova, last); in viommu_del_mappings()
358 mapping = container_of(node, struct viommu_mapping, iova); in viommu_del_mappings()
359 next = interval_tree_iter_next(node, iova, last); in viommu_del_mappings()
[all …]
/OK3568_Linux_fs/kernel/include/linux/
H A Diova.h19 struct iova { struct
85 struct iova anchor; /* rbtree lookup anchor */
101 static inline unsigned long iova_size(struct iova *iova) in iova_size() argument
103 return iova->pfn_hi - iova->pfn_lo + 1; in iova_size()
116 static inline size_t iova_offset(struct iova_domain *iovad, dma_addr_t iova) in iova_offset() argument
118 return iova & iova_mask(iovad); in iova_offset()
126 static inline dma_addr_t iova_dma_addr(struct iova_domain *iovad, struct iova *iova) in iova_dma_addr() argument
128 return (dma_addr_t)iova->pfn_lo << iova_shift(iovad); in iova_dma_addr()
131 static inline unsigned long iova_pfn(struct iova_domain *iovad, dma_addr_t iova) in iova_pfn() argument
133 return iova >> iova_shift(iovad); in iova_pfn()
[all …]
H A Dio-pgtable.h39 void (*tlb_flush_walk)(unsigned long iova, size_t size, size_t granule,
42 unsigned long iova, size_t granule, void *cookie);
153 int (*map)(struct io_pgtable_ops *ops, unsigned long iova,
155 int (*map_pages)(struct io_pgtable_ops *ops, unsigned long iova,
158 int (*map_sg)(struct io_pgtable_ops *ops, unsigned long iova,
161 size_t (*unmap)(struct io_pgtable_ops *ops, unsigned long iova,
163 size_t (*unmap_pages)(struct io_pgtable_ops *ops, unsigned long iova,
167 unsigned long iova);
223 io_pgtable_tlb_flush_walk(struct io_pgtable *iop, unsigned long iova, in io_pgtable_tlb_flush_walk() argument
227 iop->cfg.tlb->tlb_flush_walk(iova, size, granule, iop->cookie); in io_pgtable_tlb_flush_walk()
[all …]
/OK3568_Linux_fs/kernel/drivers/fpga/
H A Ddfl-afu-dma-region.c125 u64 iova, u64 size) in dma_region_check_iova() argument
127 if (!size && region->iova != iova) in dma_region_check_iova()
130 return (region->iova <= iova) && in dma_region_check_iova()
131 (region->length + region->iova >= iova + size); in dma_region_check_iova()
150 (unsigned long long)region->iova); in afu_dma_region_add()
161 if (dma_region_check_iova(this, region->iova, region->length)) in afu_dma_region_add()
164 if (region->iova < this->iova) in afu_dma_region_add()
166 else if (region->iova > this->iova) in afu_dma_region_add()
191 (unsigned long long)region->iova); in afu_dma_region_remove()
213 (unsigned long long)region->iova); in afu_dma_region_destroy()
[all …]
/OK3568_Linux_fs/kernel/drivers/vfio/
H A Dvfio_iommu_type1.c90 dma_addr_t iova; /* Device address */ member
125 dma_addr_t iova; /* Device address */ member
132 dma_addr_t iova; member
172 if (start + size <= dma->iova) in vfio_find_dma()
174 else if (start >= dma->iova + dma->size) in vfio_find_dma()
192 if (new->iova + new->size <= dma->iova) in vfio_link_dma()
242 bitmap_set(dma->bitmap, (vpfn->iova - dma->iova) >> pgshift, 1); in vfio_dma_populate_bitmap()
297 static struct vfio_pfn *vfio_find_vpfn(struct vfio_dma *dma, dma_addr_t iova) in vfio_find_vpfn() argument
305 if (iova < vpfn->iova) in vfio_find_vpfn()
307 else if (iova > vpfn->iova) in vfio_find_vpfn()
[all …]
/OK3568_Linux_fs/kernel/include/trace/events/
H A Diommu.h88 TP_PROTO(unsigned long iova, phys_addr_t paddr, size_t size),
90 TP_ARGS(iova, paddr, size),
93 __field(u64, iova)
99 __entry->iova = iova;
105 __entry->iova, __entry->paddr, __entry->size
111 TP_PROTO(unsigned long iova, size_t size, size_t unmapped_size),
113 TP_ARGS(iova, size, unmapped_size),
116 __field(u64, iova)
122 __entry->iova = iova;
128 __entry->iova, __entry->size, __entry->unmapped_size
[all …]
/OK3568_Linux_fs/kernel/drivers/staging/media/ipu3/
H A Dipu3-dmamap.c102 struct iova *iova; in imgu_dmamap_alloc() local
107 iova = alloc_iova(&imgu->iova_domain, size >> shift, in imgu_dmamap_alloc()
109 if (!iova) in imgu_dmamap_alloc()
117 iovaddr = iova_dma_addr(&imgu->iova_domain, iova); in imgu_dmamap_alloc()
133 map->daddr = iova_dma_addr(&imgu->iova_domain, iova); in imgu_dmamap_alloc()
142 imgu_mmu_unmap(imgu->mmu, iova_dma_addr(&imgu->iova_domain, iova), in imgu_dmamap_alloc()
146 __free_iova(&imgu->iova_domain, iova); in imgu_dmamap_alloc()
153 struct iova *iova; in imgu_dmamap_unmap() local
155 iova = find_iova(&imgu->iova_domain, in imgu_dmamap_unmap()
157 if (WARN_ON(!iova)) in imgu_dmamap_unmap()
[all …]
H A Dipu3-mmu.c154 static inline void address_to_pte_idx(unsigned long iova, u32 *l1pt_idx, in address_to_pte_idx() argument
157 iova >>= IPU3_PAGE_SHIFT; in address_to_pte_idx()
160 *l2pt_idx = iova & IPU3_L2PT_MASK; in address_to_pte_idx()
162 iova >>= IPU3_L2PT_SHIFT; in address_to_pte_idx()
165 *l1pt_idx = iova & IPU3_L1PT_MASK; in address_to_pte_idx()
210 static int __imgu_mmu_map(struct imgu_mmu *mmu, unsigned long iova, in __imgu_mmu_map() argument
220 address_to_pte_idx(iova, &l1pt_idx, &l2pt_idx); in __imgu_mmu_map()
251 int imgu_mmu_map(struct imgu_mmu_info *info, unsigned long iova, in imgu_mmu_map() argument
262 if (!IS_ALIGNED(iova | paddr | size, IPU3_PAGE_SIZE)) { in imgu_mmu_map()
264 iova, &paddr, size); in imgu_mmu_map()
[all …]
/OK3568_Linux_fs/kernel/drivers/video/rockchip/mpp/hack/
H A Dmpp_rkvdec2_link_hack_rk3568.c191 u32 iova = fix->iova; in rkvdec2_link_hack_data_setup() local
195 rkvdec2_3568_hack_link[72] = iova; in rkvdec2_link_hack_data_setup()
197 rkvdec2_3568_hack_link[73] = iova + RKDEC_HACK_DATA_RLC_OFFSET; in rkvdec2_link_hack_data_setup()
199 rkvdec2_3568_hack_link[74] = iova + RKDEC_HACK_DATA_OUT_OFFSET; in rkvdec2_link_hack_data_setup()
201 rkvdec2_3568_hack_link[75] = iova + 128 * 6; in rkvdec2_link_hack_data_setup()
203 rkvdec2_3568_hack_link[76] = iova + 128 * 4; in rkvdec2_link_hack_data_setup()
206 rkvdec2_3568_hack_link[89] = iova + RKDEC_HACK_DATA_PPS_OFFSET; in rkvdec2_link_hack_data_setup()
208 rkvdec2_3568_hack_link[91] = iova + RKDEC_HACK_DATA_RPS_OFFSET; in rkvdec2_link_hack_data_setup()
210 rkvdec2_3568_hack_link[92 + i] = iova + RKDEC_HACK_DATA_COLMV_OFFSET; in rkvdec2_link_hack_data_setup()
212 rkvdec2_3568_hack_link[125] = iova + PAGE_SIZE; in rkvdec2_link_hack_data_setup()
H A Dmpp_rkvdec2_hack_rk3568.c679 u32 iova = fix->iova; in rkvdec2_3568_hack_data_setup() local
686 rkvdec2_3568_hack[71].data = iova; in rkvdec2_3568_hack_data_setup()
688 rkvdec2_3568_hack[72].data = iova + RKDEC_HACK_DATA_RLC_OFFSET; in rkvdec2_3568_hack_data_setup()
690 rkvdec2_3568_hack[73].data = iova + RKDEC_HACK_DATA_OUT_OFFSET; in rkvdec2_3568_hack_data_setup()
692 rkvdec2_3568_hack[74].data = iova + RKDEC_HACK_DATA_COLMV_OFFSET; in rkvdec2_3568_hack_data_setup()
695 rkvdec2_3568_hack[87].data = iova + RKDEC_HACK_DATA_PPS_OFFSET; in rkvdec2_3568_hack_data_setup()
697 rkvdec2_3568_hack[89].data = iova + RKDEC_HACK_DATA_RPS_OFFSET; in rkvdec2_3568_hack_data_setup()
699 rkvdec2_3568_hack[90 + i].data = iova + RKDEC_HACK_DATA_COLMV_OFFSET; in rkvdec2_3568_hack_data_setup()
700 rkvdec2_3568_hack[123].data = iova + PAGE_SIZE; in rkvdec2_3568_hack_data_setup()
/OK3568_Linux_fs/kernel/drivers/gpu/drm/msm/
H A Dmsm_iommu.c32 static int msm_iommu_pagetable_unmap(struct msm_mmu *mmu, u64 iova, in msm_iommu_pagetable_unmap() argument
41 unmapped += ops->unmap(ops, iova, 4096, NULL); in msm_iommu_pagetable_unmap()
42 iova += 4096; in msm_iommu_pagetable_unmap()
51 static int msm_iommu_pagetable_map(struct msm_mmu *mmu, u64 iova, in msm_iommu_pagetable_map() argument
58 u64 addr = iova; in msm_iommu_pagetable_map()
68 msm_iommu_pagetable_unmap(mmu, iova, mapped); in msm_iommu_pagetable_map()
129 static void msm_iommu_tlb_flush_walk(unsigned long iova, size_t size, in msm_iommu_tlb_flush_walk() argument
135 unsigned long iova, size_t granule, void *cookie) in msm_iommu_tlb_add_page() argument
211 unsigned long iova, int flags, void *arg) in msm_fault_handler() argument
215 return iommu->base.handler(iommu->base.arg, iova, flags); in msm_fault_handler()
[all …]
/OK3568_Linux_fs/kernel/drivers/gpu/drm/etnaviv/
H A Detnaviv_mmu.c17 unsigned long iova, size_t size) in etnaviv_context_unmap() argument
22 if (!IS_ALIGNED(iova | size, pgsize)) { in etnaviv_context_unmap()
24 iova, size, pgsize); in etnaviv_context_unmap()
29 unmapped_page = context->global->ops->unmap(context, iova, in etnaviv_context_unmap()
34 iova += unmapped_page; in etnaviv_context_unmap()
40 unsigned long iova, phys_addr_t paddr, in etnaviv_context_map() argument
43 unsigned long orig_iova = iova; in etnaviv_context_map()
48 if (!IS_ALIGNED(iova | paddr | size, pgsize)) { in etnaviv_context_map()
50 iova, &paddr, size, pgsize); in etnaviv_context_map()
55 ret = context->global->ops->map(context, iova, paddr, pgsize, in etnaviv_context_map()
[all …]
/OK3568_Linux_fs/kernel/include/trace/hooks/
H A Diommu.h30 TP_PROTO(struct device *dev, dma_addr_t iova, size_t size),
31 TP_ARGS(dev, iova, size));
35 TP_PROTO(struct device *dev, struct iova_domain *iovad, dma_addr_t iova, size_t size),
36 TP_ARGS(dev, iovad, iova, size));
39 TP_PROTO(dma_addr_t iova, size_t size),
40 TP_ARGS(iova, size));
43 TP_PROTO(struct iova_domain *iovad, dma_addr_t iova, size_t size),
44 TP_ARGS(iovad, iova, size));
/OK3568_Linux_fs/kernel/drivers/infiniband/sw/rxe/
H A Drxe_mr.c27 int mem_check_range(struct rxe_mem *mem, u64 iova, size_t length) in mem_check_range() argument
35 if (iova < mem->iova || in mem_check_range()
37 iova > mem->iova + mem->length - length) in mem_check_range()
127 u64 length, u64 iova, int access, struct ib_udata *udata, in rxe_mem_init_user() argument
193 mem->iova = iova; in rxe_mem_init_user()
232 u64 iova, in lookup_iova() argument
237 size_t offset = iova - mem->iova + mem->offset; in lookup_iova()
270 void *iova_to_vaddr(struct rxe_mem *mem, u64 iova, int length) in iova_to_vaddr() argument
283 addr = (void *)(uintptr_t)iova; in iova_to_vaddr()
287 if (mem_check_range(mem, iova, length)) { in iova_to_vaddr()
[all …]
/OK3568_Linux_fs/kernel/drivers/video/rockchip/rga3/
H A Drga_dma_buf.c208 unsigned long shift, iova_len, iova = 0; in rga_iommu_dma_alloc_iova() local
236 iova = alloc_iova_fast(iovad, iova_len, in rga_iommu_dma_alloc_iova()
240 iova = alloc_iova_fast(iovad, iova_len, dma_limit >> shift, true); in rga_iommu_dma_alloc_iova()
243 return (dma_addr_t)iova << shift; in rga_iommu_dma_alloc_iova()
247 dma_addr_t iova, size_t size) in rga_iommu_dma_free_iova() argument
252 free_iova_fast(iovad, iova_pfn(iovad, iova), size >> iova_shift(iovad)); in rga_iommu_dma_free_iova()
264 if (buffer->iova == 0) in rga_iommu_unmap()
267 iommu_unmap(buffer->domain, buffer->iova, buffer->size); in rga_iommu_unmap()
268 rga_iommu_dma_free_iova(buffer->domain, buffer->iova, buffer->size); in rga_iommu_unmap()
278 dma_addr_t iova; in rga_iommu_map_sgt() local
[all …]

12345678910