Lines Matching refs:sgt
50 rknpu_obj->sgt = drm_prime_pages_to_sg(drm, rknpu_obj->pages, in rknpu_gem_get_pages()
53 rknpu_obj->sgt = in rknpu_gem_get_pages()
56 if (IS_ERR(rknpu_obj->sgt)) { in rknpu_gem_get_pages()
57 ret = PTR_ERR(rknpu_obj->sgt); in rknpu_gem_get_pages()
62 ret = dma_map_sg(drm->dev, rknpu_obj->sgt->sgl, rknpu_obj->sgt->nents, in rknpu_gem_get_pages()
83 dma_addr = sg_dma_address(rknpu_obj->sgt->sgl); in rknpu_gem_get_pages()
86 for_each_sg(rknpu_obj->sgt->sgl, s, rknpu_obj->sgt->nents, i) { in rknpu_gem_get_pages()
97 dma_unmap_sg(drm->dev, rknpu_obj->sgt->sgl, rknpu_obj->sgt->nents, in rknpu_gem_get_pages()
101 sg_free_table(rknpu_obj->sgt); in rknpu_gem_get_pages()
102 kfree(rknpu_obj->sgt); in rknpu_gem_get_pages()
119 if (rknpu_obj->sgt != NULL) { in rknpu_gem_put_pages()
120 dma_unmap_sg(drm->dev, rknpu_obj->sgt->sgl, in rknpu_gem_put_pages()
121 rknpu_obj->sgt->nents, DMA_BIDIRECTIONAL); in rknpu_gem_put_pages()
122 sg_free_table(rknpu_obj->sgt); in rknpu_gem_put_pages()
123 kfree(rknpu_obj->sgt); in rknpu_gem_put_pages()
135 struct sg_table *sgt = NULL; in rknpu_gem_alloc_buf() local
238 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in rknpu_gem_alloc_buf()
239 if (!sgt) { in rknpu_gem_alloc_buf()
244 ret = dma_get_sgtable_attrs(drm->dev, sgt, rknpu_obj->cookie, in rknpu_gem_alloc_buf()
252 for_each_sg(sgt->sgl, s, sgt->nents, i) { in rknpu_gem_alloc_buf()
259 ret = drm_prime_sg_to_page_addr_arrays(sgt, rknpu_obj->pages, NULL, in rknpu_gem_alloc_buf()
262 ret = drm_prime_sg_to_page_array(sgt, rknpu_obj->pages, nr_pages); in rknpu_gem_alloc_buf()
270 rknpu_obj->sgt = sgt; in rknpu_gem_alloc_buf()
275 sg_free_table(sgt); in rknpu_gem_alloc_buf()
277 kfree(sgt); in rknpu_gem_alloc_buf()
307 sg_free_table(rknpu_obj->sgt); in rknpu_gem_free_buf()
308 kfree(rknpu_obj->sgt); in rknpu_gem_free_buf()
518 rknpu_obj->sgt = drm_prime_pages_to_sg(drm, rknpu_obj->pages, in rknpu_gem_alloc_buf_with_cache()
521 rknpu_obj->sgt = in rknpu_gem_alloc_buf_with_cache()
524 if (IS_ERR(rknpu_obj->sgt)) { in rknpu_gem_alloc_buf_with_cache()
525 ret = PTR_ERR(rknpu_obj->sgt); in rknpu_gem_alloc_buf_with_cache()
533 for_each_sg(rknpu_obj->sgt->sgl, s, rknpu_obj->sgt->nents, i) { in rknpu_gem_alloc_buf_with_cache()
558 sg_free_table(rknpu_obj->sgt); in rknpu_gem_alloc_buf_with_cache()
559 kfree(rknpu_obj->sgt); in rknpu_gem_alloc_buf_with_cache()
609 if (rknpu_obj->sgt != NULL) { in rknpu_gem_free_buf_with_cache()
610 sg_free_table(rknpu_obj->sgt); in rknpu_gem_free_buf_with_cache()
611 kfree(rknpu_obj->sgt); in rknpu_gem_free_buf_with_cache()
764 drm_prime_gem_destroy(obj, rknpu_obj->sgt); in rknpu_gem_object_destroy()
1258 struct sg_table *sgt) in rknpu_gem_prime_import_sg_table() argument
1270 rknpu_obj->dma_addr = sg_dma_address(sgt->sgl); in rknpu_gem_prime_import_sg_table()
1280 ret = drm_prime_sg_to_page_addr_arrays(sgt, rknpu_obj->pages, NULL, in rknpu_gem_prime_import_sg_table()
1283 ret = drm_prime_sg_to_page_array(sgt, rknpu_obj->pages, npages); in rknpu_gem_prime_import_sg_table()
1288 rknpu_obj->sgt = sgt; in rknpu_gem_prime_import_sg_table()
1290 if (sgt->nents == 1) { in rknpu_gem_prime_import_sg_table()
1456 for_each_sg(rknpu_obj->sgt->sgl, sg, rknpu_obj->sgt->nents, in rknpu_gem_sync_ioctl()