Lines Matching refs:sgt

175 	struct sg_table *sgt = NULL;  in rga_alloc_sgt()  local
177 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in rga_alloc_sgt()
178 if (sgt == NULL) { in rga_alloc_sgt()
185 ret = sg_alloc_table_from_pages(sgt, in rga_alloc_sgt()
196 return sgt; in rga_alloc_sgt()
199 kfree(sgt); in rga_alloc_sgt()
328 static int rga_mm_check_range_sgt(struct sg_table *sgt) in rga_mm_check_range_sgt() argument
334 for_each_sg(sgt->sgl, sg, sgt->orig_nents, i) { in rga_mm_check_range_sgt()
348 static inline bool rga_mm_check_contiguous_sgt(struct sg_table *sgt) in rga_mm_check_contiguous_sgt() argument
350 if (sgt->orig_nents == 1) in rga_mm_check_contiguous_sgt()
447 if (rga_mm_check_range_sgt(buffer->sgt)) in rga_mm_map_dma_buffer()
454 if (rga_mm_check_contiguous_sgt(buffer->sgt)) { in rga_mm_map_dma_buffer()
455 phys_addr = sg_phys(buffer->sgt->sgl); in rga_mm_map_dma_buffer()
499 internal_buffer->dma_buffer->sgt->sgl, in rga_mm_unmap_virt_addr()
500 internal_buffer->dma_buffer->sgt->orig_nents, in rga_mm_unmap_virt_addr()
511 rga_free_sgt(&internal_buffer->dma_buffer->sgt); in rga_mm_unmap_virt_addr()
530 struct sg_table *sgt; in rga_mm_map_virt_addr() local
560 sgt = rga_alloc_sgt(virt_addr); in rga_mm_map_virt_addr()
561 if (IS_ERR(sgt)) { in rga_mm_map_virt_addr()
563 ret = PTR_ERR(sgt); in rga_mm_map_virt_addr()
567 if (rga_mm_check_range_sgt(sgt)) in rga_mm_map_virt_addr()
570 if (rga_mm_check_contiguous_sgt(sgt)) { in rga_mm_map_virt_addr()
571 phys_addr = sg_phys(sgt->sgl); in rga_mm_map_virt_addr()
603 ret = rga_iommu_map_sgt(sgt, virt_addr->size, buffer, scheduler->dev); in rga_mm_map_virt_addr()
611 ret = dma_map_sg(scheduler->dev, sgt->sgl, sgt->orig_nents, DMA_BIDIRECTIONAL); in rga_mm_map_virt_addr()
615 (unsigned long)virt_addr->addr, sgt->orig_nents); in rga_mm_map_virt_addr()
629 buffer->sgt = sgt; in rga_mm_map_virt_addr()
644 rga_free_sgt(&sgt); in rga_mm_map_virt_addr()
952 return buffer->dma_buffer->sgt; in rga_mm_lookup_sgt()
971 dump_buffer->dma_buffer->sgt, in rga_mm_dump_buffer()
995 dump_buffer->dma_buffer->sgt, in rga_mm_dump_buffer()
1146 struct sg_table *sgt = NULL; in rga_mm_set_mmu_base() local
1209 sgt = rga_mm_lookup_sgt(job_buf->y_addr); in rga_mm_set_mmu_base()
1210 if (sgt == NULL) { in rga_mm_set_mmu_base()
1215 rga_mm_sgt_to_page_table(sgt, page_table, yrgb_count, false); in rga_mm_set_mmu_base()
1217 sgt = rga_mm_lookup_sgt(job_buf->uv_addr); in rga_mm_set_mmu_base()
1218 if (sgt == NULL) { in rga_mm_set_mmu_base()
1223 rga_mm_sgt_to_page_table(sgt, page_table + yrgb_count, uv_count, false); in rga_mm_set_mmu_base()
1225 sgt = rga_mm_lookup_sgt(job_buf->v_addr); in rga_mm_set_mmu_base()
1226 if (sgt == NULL) { in rga_mm_set_mmu_base()
1231 rga_mm_sgt_to_page_table(sgt, page_table + yrgb_count + uv_count, v_count, false); in rga_mm_set_mmu_base()
1274 sgt = rga_mm_lookup_sgt(job_buf->addr); in rga_mm_set_mmu_base()
1275 if (sgt == NULL) { in rga_mm_set_mmu_base()
1280 rga_mm_sgt_to_page_table(sgt, page_table, page_count, false); in rga_mm_set_mmu_base()
1302 struct sg_table *sgt; in rga_mm_sync_dma_sg_for_device() local
1305 sgt = rga_mm_lookup_sgt(buffer); in rga_mm_sync_dma_sg_for_device()
1306 if (sgt == NULL) { in rga_mm_sync_dma_sg_for_device()
1319 dma_sync_sg_for_device(scheduler->dev, sgt->sgl, sgt->orig_nents, dir); in rga_mm_sync_dma_sg_for_device()
1328 struct sg_table *sgt; in rga_mm_sync_dma_sg_for_cpu() local
1331 sgt = rga_mm_lookup_sgt(buffer); in rga_mm_sync_dma_sg_for_cpu()
1332 if (sgt == NULL) { in rga_mm_sync_dma_sg_for_cpu()
1345 dma_sync_sg_for_cpu(scheduler->dev, sgt->sgl, sgt->orig_nents, dir); in rga_mm_sync_dma_sg_for_cpu()