| /OK3568_Linux_fs/kernel/drivers/media/common/videobuf2/ |
| H A D | videobuf2-dma-contig.c | 49 static unsigned long vb2_dc_get_contiguous_size(struct sg_table *sgt) in vb2_dc_get_contiguous_size() argument 52 dma_addr_t expected = sg_dma_address(sgt->sgl); in vb2_dc_get_contiguous_size() 56 for_each_sgtable_dma_sg(sgt, s, i) { in vb2_dc_get_contiguous_size() 96 struct sg_table *sgt = buf->dma_sgt; in vb2_dc_prepare() local 98 if (!sgt) in vb2_dc_prepare() 101 dma_sync_sgtable_for_device(buf->dev, sgt, buf->dma_dir); in vb2_dc_prepare() 107 struct sg_table *sgt = buf->dma_sgt; in vb2_dc_finish() local 109 if (!sgt) in vb2_dc_finish() 112 dma_sync_sgtable_for_cpu(buf->dev, sgt, buf->dma_dir); in vb2_dc_finish() 211 struct sg_table sgt; member [all …]
|
| H A D | videobuf2-dma-sg.c | 104 struct sg_table *sgt; in vb2_dma_sg_alloc() local 145 sgt = &buf->sg_table; in vb2_dma_sg_alloc() 150 if (dma_map_sgtable(buf->dev, sgt, buf->dma_dir, in vb2_dma_sg_alloc() 181 struct sg_table *sgt = &buf->sg_table; in vb2_dma_sg_put() local 187 dma_unmap_sgtable(buf->dev, sgt, buf->dma_dir, in vb2_dma_sg_put() 203 struct sg_table *sgt = buf->dma_sgt; in vb2_dma_sg_prepare() local 205 dma_sync_sgtable_for_device(buf->dev, sgt, buf->dma_dir); in vb2_dma_sg_prepare() 211 struct sg_table *sgt = buf->dma_sgt; in vb2_dma_sg_finish() local 213 dma_sync_sgtable_for_cpu(buf->dev, sgt, buf->dma_dir); in vb2_dma_sg_finish() 221 struct sg_table *sgt; in vb2_dma_sg_get_userptr() local [all …]
|
| H A D | videobuf2-cma-sg.c | 146 struct sg_table *sgt; in vb2_cma_sg_alloc() local 184 sgt = &buf->sg_table; in vb2_cma_sg_alloc() 189 if (dma_map_sgtable(buf->dev, sgt, buf->dma_dir, in vb2_cma_sg_alloc() 219 struct sg_table *sgt = &buf->sg_table; in vb2_cma_sg_put() local 222 dma_unmap_sgtable(buf->dev, sgt, buf->dma_dir, in vb2_cma_sg_put() 241 struct sg_table *sgt = buf->dma_sgt; in vb2_cma_sg_prepare() local 243 dma_sync_sgtable_for_device(buf->dev, sgt, buf->dma_dir); in vb2_cma_sg_prepare() 249 struct sg_table *sgt = buf->dma_sgt; in vb2_cma_sg_finish() local 251 dma_sync_sgtable_for_cpu(buf->dev, sgt, buf->dma_dir); in vb2_cma_sg_finish() 259 struct sg_table *sgt; in vb2_cma_sg_get_userptr() local [all …]
|
| H A D | videobuf2-vmalloc.c | 206 struct sg_table sgt; member 216 struct sg_table *sgt; in vb2_vmalloc_dmabuf_ops_attach() local 226 sgt = &attach->sgt; in vb2_vmalloc_dmabuf_ops_attach() 227 ret = sg_alloc_table(sgt, num_pages, GFP_KERNEL); in vb2_vmalloc_dmabuf_ops_attach() 232 for_each_sgtable_sg(sgt, sg, i) { in vb2_vmalloc_dmabuf_ops_attach() 236 sg_free_table(sgt); in vb2_vmalloc_dmabuf_ops_attach() 253 struct sg_table *sgt; in vb2_vmalloc_dmabuf_ops_detach() local 258 sgt = &attach->sgt; in vb2_vmalloc_dmabuf_ops_detach() 262 dma_unmap_sgtable(db_attach->dev, sgt, attach->dma_dir, 0); in vb2_vmalloc_dmabuf_ops_detach() 263 sg_free_table(sgt); in vb2_vmalloc_dmabuf_ops_detach() [all …]
|
| /OK3568_Linux_fs/kernel/drivers/gpu/drm/tegra/ |
| H A D | gem.c | 31 static int sg_alloc_table_from_sg(struct sg_table *sgt, struct scatterlist *sg, in sg_alloc_table_from_sg() argument 38 err = sg_alloc_table(sgt, nents, gfp_mask); in sg_alloc_table_from_sg() 42 dst = sgt->sgl; in sg_alloc_table_from_sg() 57 struct sg_table *sgt; in tegra_bo_pin() local 81 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in tegra_bo_pin() 82 if (!sgt) in tegra_bo_pin() 90 err = sg_alloc_table_from_pages(sgt, obj->pages, obj->num_pages, in tegra_bo_pin() 94 } else if (obj->sgt) { in tegra_bo_pin() 101 err = sg_alloc_table_from_sg(sgt, obj->sgt->sgl, in tegra_bo_pin() 102 obj->sgt->orig_nents, GFP_KERNEL); in tegra_bo_pin() [all …]
|
| H A D | plane.c | 73 copy->sgt[i] = NULL; in tegra_plane_atomic_duplicate_state() 120 struct sg_table *sgt; in tegra_dc_pin() local 127 sgt = host1x_bo_pin(dc->dev, &bo->base, phys); in tegra_dc_pin() 128 if (IS_ERR(sgt)) { in tegra_dc_pin() 129 err = PTR_ERR(sgt); in tegra_dc_pin() 133 if (sgt) { in tegra_dc_pin() 134 err = dma_map_sgtable(dc->dev, sgt, DMA_TO_DEVICE, 0); in tegra_dc_pin() 144 if (sgt->nents > 1) { in tegra_dc_pin() 149 state->iova[i] = sg_dma_address(sgt->sgl); in tegra_dc_pin() 150 state->sgt[i] = sgt; in tegra_dc_pin() [all …]
|
| /OK3568_Linux_fs/kernel/drivers/gpu/drm/armada/ |
| H A D | armada_gem.c | 66 if (dobj->sgt) in armada_gem_free_object() 68 dobj->sgt, DMA_TO_DEVICE); in armada_gem_free_object() 381 struct sg_table *sgt; in armada_gem_prime_map_dma_buf() local 384 sgt = kmalloc(sizeof(*sgt), GFP_KERNEL); in armada_gem_prime_map_dma_buf() 385 if (!sgt) in armada_gem_prime_map_dma_buf() 393 if (sg_alloc_table(sgt, count, GFP_KERNEL)) in armada_gem_prime_map_dma_buf() 398 for_each_sgtable_sg(sgt, sg, i) { in armada_gem_prime_map_dma_buf() 408 if (dma_map_sgtable(attach->dev, sgt, dir, 0)) in armada_gem_prime_map_dma_buf() 412 if (sg_alloc_table(sgt, 1, GFP_KERNEL)) in armada_gem_prime_map_dma_buf() 415 sg_set_page(sgt->sgl, dobj->page, dobj->obj.size, 0); in armada_gem_prime_map_dma_buf() [all …]
|
| /OK3568_Linux_fs/kernel/drivers/gpu/drm/rockchip/ |
| H A D | rockchip_drm_gem.c | 55 ret = iommu_map_sgtable(private->domain, rk_obj->dma_addr, rk_obj->sgt, in rockchip_gem_iommu_map() 216 rk_obj->sgt = drm_prime_pages_to_sg(rk_obj->base.dev, in rockchip_gem_get_pages() 218 if (IS_ERR(rk_obj->sgt)) { in rockchip_gem_get_pages() 219 ret = PTR_ERR(rk_obj->sgt); in rockchip_gem_get_pages() 232 for_each_sgtable_sg(rk_obj->sgt, s, i) in rockchip_gem_get_pages() 235 dma_sync_sgtable_for_device(drm->dev, rk_obj->sgt, DMA_TO_DEVICE); in rockchip_gem_get_pages() 251 sg_free_table(rk_obj->sgt); in rockchip_gem_put_pages() 252 kfree(rk_obj->sgt); in rockchip_gem_put_pages() 264 struct sg_table *sgt; in rockchip_gem_alloc_dma() local 281 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in rockchip_gem_alloc_dma() [all …]
|
| /OK3568_Linux_fs/kernel/drivers/xen/ |
| H A D | gntdev-dmabuf.c | 51 struct sg_table *sgt; member 70 struct sg_table *sgt; member 203 struct sg_table *sgt; in dmabuf_pages_to_sgt() local 206 sgt = kmalloc(sizeof(*sgt), GFP_KERNEL); in dmabuf_pages_to_sgt() 207 if (!sgt) { in dmabuf_pages_to_sgt() 212 ret = sg_alloc_table_from_pages(sgt, pages, nr_pages, 0, in dmabuf_pages_to_sgt() 218 return sgt; in dmabuf_pages_to_sgt() 221 kfree(sgt); in dmabuf_pages_to_sgt() 246 struct sg_table *sgt = gntdev_dmabuf_attach->sgt; in dmabuf_exp_ops_detach() local 248 if (sgt) { in dmabuf_exp_ops_detach() [all …]
|
| /OK3568_Linux_fs/kernel/drivers/gpu/drm/i915/ |
| H A D | i915_mm.c | 36 struct sgt_iter sgt; member 56 return (r->sgt.dma + r->sgt.curr + r->iobase) >> PAGE_SHIFT; in sgt_pfn() 58 return r->sgt.pfn + (r->sgt.curr >> PAGE_SHIFT); in sgt_pfn() 65 if (GEM_WARN_ON(!r->sgt.pfn)) in remap_sg() 73 r->sgt.curr += PAGE_SIZE; in remap_sg() 74 if (r->sgt.curr >= r->sgt.max) in remap_sg() 75 r->sgt = __sgt_iter(__sg_next(r->sgt.sgp), use_dma(r->iobase)); in remap_sg() 132 .sgt = __sgt_iter(sgl, use_dma(iobase)), in remap_io_sg()
|
| /OK3568_Linux_fs/kernel/drivers/hwtracing/intel_th/ |
| H A D | msu-sink.c | 51 static int msu_sink_alloc_window(void *data, struct sg_table **sgt, size_t size) in msu_sink_alloc_window() argument 64 ret = sg_alloc_table(*sgt, nents, GFP_KERNEL); in msu_sink_alloc_window() 68 priv->sgts[priv->nr_sgts++] = *sgt; in msu_sink_alloc_window() 70 for_each_sg((*sgt)->sgl, sg_ptr, nents, i) { in msu_sink_alloc_window() 84 static void msu_sink_free_window(void *data, struct sg_table *sgt) in msu_sink_free_window() argument 90 for_each_sg(sgt->sgl, sg_ptr, sgt->nents, i) { in msu_sink_free_window() 95 sg_free_table(sgt); in msu_sink_free_window() 99 static int msu_sink_ready(void *data, struct sg_table *sgt, size_t bytes) in msu_sink_ready() argument 103 intel_th_msc_window_unlock(priv->dev, sgt); in msu_sink_ready()
|
| /OK3568_Linux_fs/kernel/drivers/gpu/drm/ |
| H A D | drm_gem_shmem_helper.c | 126 drm_prime_gem_destroy(obj, shmem->sgt); in drm_gem_shmem_free_object() 128 if (shmem->sgt) { in drm_gem_shmem_free_object() 129 dma_unmap_sgtable(obj->dev->dev, shmem->sgt, in drm_gem_shmem_free_object() 131 sg_free_table(shmem->sgt); in drm_gem_shmem_free_object() 132 kfree(shmem->sgt); in drm_gem_shmem_free_object() 428 dma_unmap_sgtable(obj->dev->dev, shmem->sgt, DMA_BIDIRECTIONAL, 0); in drm_gem_shmem_purge_locked() 429 sg_free_table(shmem->sgt); in drm_gem_shmem_purge_locked() 430 kfree(shmem->sgt); in drm_gem_shmem_purge_locked() 431 shmem->sgt = NULL; in drm_gem_shmem_purge_locked() 705 struct sg_table *sgt; in drm_gem_shmem_get_pages_sgt() local [all …]
|
| H A D | drm_prime.c | 623 struct sg_table *sgt; in drm_gem_map_dma_buf() local 630 sgt = obj->funcs->get_sg_table(obj); in drm_gem_map_dma_buf() 632 sgt = obj->dev->driver->gem_prime_get_sg_table(obj); in drm_gem_map_dma_buf() 634 ret = dma_map_sgtable(attach->dev, sgt, dir, in drm_gem_map_dma_buf() 637 sg_free_table(sgt); in drm_gem_map_dma_buf() 638 kfree(sgt); in drm_gem_map_dma_buf() 639 sgt = ERR_PTR(ret); in drm_gem_map_dma_buf() 642 return sgt; in drm_gem_map_dma_buf() 655 struct sg_table *sgt, in drm_gem_unmap_dma_buf() argument 658 if (!sgt) in drm_gem_unmap_dma_buf() [all …]
|
| H A D | drm_gem_cma_helper.c | 185 drm_prime_gem_destroy(gem_obj, cma_obj->sgt); in drm_gem_cma_free_object() 430 struct sg_table *sgt; in drm_gem_cma_prime_get_sg_table() local 433 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in drm_gem_cma_prime_get_sg_table() 434 if (!sgt) in drm_gem_cma_prime_get_sg_table() 437 ret = dma_get_sgtable(obj->dev->dev, sgt, cma_obj->vaddr, in drm_gem_cma_prime_get_sg_table() 442 return sgt; in drm_gem_cma_prime_get_sg_table() 445 kfree(sgt); in drm_gem_cma_prime_get_sg_table() 470 struct sg_table *sgt) in drm_gem_cma_prime_import_sg_table() argument 475 if (drm_prime_get_contiguous_size(sgt) < attach->dmabuf->size) in drm_gem_cma_prime_import_sg_table() 483 cma_obj->paddr = sg_dma_address(sgt->sgl); in drm_gem_cma_prime_import_sg_table() [all …]
|
| /OK3568_Linux_fs/kernel/drivers/rknpu/ |
| H A D | rknpu_gem.c | 50 rknpu_obj->sgt = drm_prime_pages_to_sg(drm, rknpu_obj->pages, in rknpu_gem_get_pages() 53 rknpu_obj->sgt = in rknpu_gem_get_pages() 56 if (IS_ERR(rknpu_obj->sgt)) { in rknpu_gem_get_pages() 57 ret = PTR_ERR(rknpu_obj->sgt); in rknpu_gem_get_pages() 62 ret = dma_map_sg(drm->dev, rknpu_obj->sgt->sgl, rknpu_obj->sgt->nents, in rknpu_gem_get_pages() 83 dma_addr = sg_dma_address(rknpu_obj->sgt->sgl); in rknpu_gem_get_pages() 86 for_each_sg(rknpu_obj->sgt->sgl, s, rknpu_obj->sgt->nents, i) { in rknpu_gem_get_pages() 97 dma_unmap_sg(drm->dev, rknpu_obj->sgt->sgl, rknpu_obj->sgt->nents, in rknpu_gem_get_pages() 101 sg_free_table(rknpu_obj->sgt); in rknpu_gem_get_pages() 102 kfree(rknpu_obj->sgt); in rknpu_gem_get_pages() [all …]
|
| /OK3568_Linux_fs/kernel/net/ceph/ |
| H A D | crypto.c | 160 static int setup_sgtable(struct sg_table *sgt, struct scatterlist *prealloc_sg, in setup_sgtable() argument 172 memset(sgt, 0, sizeof(*sgt)); in setup_sgtable() 182 ret = sg_alloc_table(sgt, chunk_cnt, GFP_NOFS); in setup_sgtable() 188 sgt->sgl = prealloc_sg; in setup_sgtable() 189 sgt->nents = sgt->orig_nents = 1; in setup_sgtable() 192 for_each_sg(sgt->sgl, sg, sgt->orig_nents, i) { in setup_sgtable() 212 static void teardown_sgtable(struct sg_table *sgt) in teardown_sgtable() argument 214 if (sgt->orig_nents > 1) in teardown_sgtable() 215 sg_free_table(sgt); in teardown_sgtable() 222 struct sg_table sgt; in ceph_aes_crypt() local [all …]
|
| /OK3568_Linux_fs/kernel/drivers/staging/media/tegra-vde/ |
| H A D | dmabuf-cache.c | 24 struct sg_table *sgt; member 38 dma_buf_unmap_attachment(entry->a, entry->sgt, entry->dma_dir); in tegra_vde_release_entry() 69 struct sg_table *sgt; in tegra_vde_dmabuf_cache_map() local 90 *addrp = sg_dma_address(entry->sgt->sgl); in tegra_vde_dmabuf_cache_map() 102 sgt = dma_buf_map_attachment(attachment, dma_dir); in tegra_vde_dmabuf_cache_map() 103 if (IS_ERR(sgt)) { in tegra_vde_dmabuf_cache_map() 105 err = PTR_ERR(sgt); in tegra_vde_dmabuf_cache_map() 109 if (!vde->domain && sgt->nents > 1) { in tegra_vde_dmabuf_cache_map() 122 err = tegra_vde_iommu_map(vde, sgt, &iova, dmabuf->size); in tegra_vde_dmabuf_cache_map() 128 *addrp = sg_dma_address(sgt->sgl); in tegra_vde_dmabuf_cache_map() [all …]
|
| /OK3568_Linux_fs/kernel/drivers/gpu/host1x/ |
| H A D | job.c | 119 struct sg_table *sgt; in pin_job() local 144 sgt = host1x_bo_pin(dev, reloc->target.bo, phys); in pin_job() 145 if (IS_ERR(sgt)) { in pin_job() 146 err = PTR_ERR(sgt); in pin_job() 150 if (sgt) { in pin_job() 173 err = dma_map_sgtable(dev, sgt, dir, 0); in pin_job() 179 phys_addr = sg_dma_address(sgt->sgl); in pin_job() 184 job->unpins[job->num_unpins].sgt = sgt; in pin_job() 198 struct sg_table *sgt; in pin_job() local 222 sgt = host1x_bo_pin(host->dev, g->bo, phys); in pin_job() [all …]
|
| /OK3568_Linux_fs/kernel/drivers/gpu/drm/etnaviv/ |
| H A D | etnaviv_gem.c | 23 struct sg_table *sgt = etnaviv_obj->sgt; in etnaviv_gem_scatter_map() local 30 dma_map_sgtable(dev->dev, sgt, DMA_BIDIRECTIONAL, 0); in etnaviv_gem_scatter_map() 36 struct sg_table *sgt = etnaviv_obj->sgt; in etnaviv_gem_scatterlist_unmap() local 54 dma_unmap_sgtable(dev->dev, sgt, DMA_BIDIRECTIONAL, 0); in etnaviv_gem_scatterlist_unmap() 75 if (etnaviv_obj->sgt) { in put_pages() 77 sg_free_table(etnaviv_obj->sgt); in put_pages() 78 kfree(etnaviv_obj->sgt); in put_pages() 79 etnaviv_obj->sgt = NULL; in put_pages() 101 if (!etnaviv_obj->sgt) { in etnaviv_gem_get_pages() 104 struct sg_table *sgt; in etnaviv_gem_get_pages() local [all …]
|
| /OK3568_Linux_fs/kernel/drivers/gpu/drm/lima/ |
| H A D | lima_gem.c | 28 struct sg_table sgt; in lima_heap_alloc() local 66 ret = sg_alloc_table_from_pages(&sgt, pages, i, 0, in lima_heap_alloc() 71 if (bo->base.sgt) { in lima_heap_alloc() 72 dma_unmap_sgtable(dev, bo->base.sgt, DMA_BIDIRECTIONAL, 0); in lima_heap_alloc() 73 sg_free_table(bo->base.sgt); in lima_heap_alloc() 75 bo->base.sgt = kmalloc(sizeof(*bo->base.sgt), GFP_KERNEL); in lima_heap_alloc() 76 if (!bo->base.sgt) { in lima_heap_alloc() 77 sg_free_table(&sgt); in lima_heap_alloc() 82 ret = dma_map_sgtable(dev, &sgt, DMA_BIDIRECTIONAL, 0); in lima_heap_alloc() 84 sg_free_table(&sgt); in lima_heap_alloc() [all …]
|
| /OK3568_Linux_fs/kernel/include/linux/ |
| H A D | scatterlist.h | 157 #define for_each_sgtable_sg(sgt, sg, i) \ argument 158 for_each_sg((sgt)->sgl, sg, (sgt)->orig_nents, i) 165 #define for_each_sgtable_dma_sg(sgt, sg, i) \ argument 166 for_each_sg((sgt)->sgl, sg, (sgt)->nents, i) 294 struct scatterlist *__sg_alloc_table_from_pages(struct sg_table *sgt, 299 int sg_alloc_table_from_pages(struct sg_table *sgt, struct page **pages, 459 #define for_each_sgtable_page(sgt, piter, pgoffset) \ argument 460 for_each_sg_page((sgt)->sgl, piter, (sgt)->orig_nents, pgoffset) 473 #define for_each_sgtable_dma_page(sgt, dma_iter, pgoffset) \ argument 474 for_each_sg_dma_page((sgt)->sgl, dma_iter, (sgt)->nents, pgoffset)
|
| /OK3568_Linux_fs/kernel/drivers/gpu/arm/mali400/ump/linux/ |
| H A D | ump_ukk_ref_wrappers.c | 118 struct sg_table *sgt = NULL; in ump_dmabuf_import_wrapper() local 156 sgt = dma_buf_map_attachment(attach, DMA_BIDIRECTIONAL); in ump_dmabuf_import_wrapper() 157 if (IS_ERR(sgt)) { in ump_dmabuf_import_wrapper() 158 ret = PTR_ERR(sgt); in ump_dmabuf_import_wrapper() 162 blocks = (ump_dd_physical_block *)_mali_osk_malloc(sizeof(ump_dd_physical_block) * sgt->nents); in ump_dmabuf_import_wrapper() 168 for_each_sg(sgt->sgl, sgl, sgt->nents, i) { in ump_dmabuf_import_wrapper() 193 session->mem->sgt = sgt; in ump_dmabuf_import_wrapper() 223 dma_buf_unmap_attachment(attach, sgt, DMA_BIDIRECTIONAL); in ump_dmabuf_import_wrapper()
|
| /OK3568_Linux_fs/kernel/drivers/video/rockchip/rga3/ |
| H A D | rga_dma_buf.c | 271 int rga_iommu_map_sgt(struct sg_table *sgt, size_t size, in rga_iommu_map_sgt() argument 282 if (sgt == NULL) { in rga_iommu_map_sgt() 301 map_size = iommu_map_sg(domain, iova, sgt->sgl, sgt->orig_nents, in rga_iommu_map_sgt() 432 struct sg_table *sgt = NULL; in rga_dma_map_buf() local 450 sgt = dma_buf_map_attachment(attach, dir); in rga_dma_map_buf() 451 if (IS_ERR(sgt)) { in rga_dma_map_buf() 452 ret = PTR_ERR(sgt); in rga_dma_map_buf() 459 rga_dma_buffer->sgt = sgt; in rga_dma_map_buf() 460 rga_dma_buffer->iova = sg_dma_address(sgt->sgl); in rga_dma_map_buf() 463 for_each_sgtable_sg(sgt, sg, i) in rga_dma_map_buf() [all …]
|
| H A D | rga_mm.c | 175 struct sg_table *sgt = NULL; in rga_alloc_sgt() local 177 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in rga_alloc_sgt() 178 if (sgt == NULL) { in rga_alloc_sgt() 185 ret = sg_alloc_table_from_pages(sgt, in rga_alloc_sgt() 196 return sgt; in rga_alloc_sgt() 199 kfree(sgt); in rga_alloc_sgt() 328 static int rga_mm_check_range_sgt(struct sg_table *sgt) in rga_mm_check_range_sgt() argument 334 for_each_sg(sgt->sgl, sg, sgt->orig_nents, i) { in rga_mm_check_range_sgt() 348 static inline bool rga_mm_check_contiguous_sgt(struct sg_table *sgt) in rga_mm_check_contiguous_sgt() argument 350 if (sgt->orig_nents == 1) in rga_mm_check_contiguous_sgt() [all …]
|
| /OK3568_Linux_fs/kernel/drivers/gpu/drm/mediatek/ |
| H A D | mtk_drm_gem.c | 192 struct sg_table *sgt; in mtk_gem_prime_get_sg_table() local 195 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in mtk_gem_prime_get_sg_table() 196 if (!sgt) in mtk_gem_prime_get_sg_table() 199 ret = dma_get_sgtable_attrs(priv->dma_dev, sgt, mtk_gem->cookie, in mtk_gem_prime_get_sg_table() 204 kfree(sgt); in mtk_gem_prime_get_sg_table() 208 return sgt; in mtk_gem_prime_get_sg_table() 235 struct sg_table *sgt; in mtk_drm_gem_prime_vmap() local 241 sgt = mtk_gem_prime_get_sg_table(obj); in mtk_drm_gem_prime_vmap() 242 if (IS_ERR(sgt)) in mtk_drm_gem_prime_vmap() 250 drm_prime_sg_to_page_addr_arrays(sgt, mtk_gem->pages, NULL, npages); in mtk_drm_gem_prime_vmap() [all …]
|