Home
last modified time | relevance | path

Searched refs:gpu_alloc (Results 1 – 24 of 24) sorted by relevance

/OK3568_Linux_fs/kernel/drivers/gpu/arm/bifrost/
H A Dmali_kbase_mem_linux.c260 WARN_ON(reg->gpu_alloc != kern_mapping->gpu_alloc); in kbase_phy_alloc_mapping_get()
262 (void)kbase_mem_phy_alloc_get(kern_mapping->gpu_alloc); in kbase_phy_alloc_mapping_get()
281 kbase_mem_phy_alloc_put(kern_mapping->gpu_alloc); in kbase_phy_alloc_mapping_put()
404 if (unlikely(reg->cpu_alloc != reg->gpu_alloc)) in kbase_mem_alloc()
543 kbase_mem_phy_alloc_put(reg->gpu_alloc); in kbase_mem_alloc()
856 int kbase_mem_evictable_make(struct kbase_mem_phy_alloc *gpu_alloc) in kbase_mem_evictable_make() argument
858 struct kbase_context *kctx = gpu_alloc->imported.native.kctx; in kbase_mem_evictable_make()
865 kbase_mem_shrink_cpu_mapping(kctx, gpu_alloc->reg, in kbase_mem_evictable_make()
866 0, gpu_alloc->nents); in kbase_mem_evictable_make()
870 WARN_ON(!list_empty(&gpu_alloc->evict_node)); in kbase_mem_evictable_make()
[all …]
H A Dmali_kbase_mem.c1619 new_reg->gpu_alloc = NULL; /* no alloc bound yet */ in kbase_alloc_free_region()
1690 if (!list_empty(&reg->gpu_alloc->evict_node)) { in kbase_free_alloced_region()
1698 if (reg->cpu_alloc != reg->gpu_alloc) in kbase_free_alloced_region()
1699 reg->gpu_alloc->reg = NULL; in kbase_free_alloced_region()
1713 kbase_mem_evictable_unmake(reg->gpu_alloc); in kbase_free_alloced_region()
1727 kbase_mem_phy_alloc_put(reg->gpu_alloc); in kbase_free_alloced_region()
1768 alloc = reg->gpu_alloc; in kbase_gpu_mmap()
1771 if (reg->gpu_alloc->type == KBASE_MEM_TYPE_ALIAS) { in kbase_gpu_mmap()
1802 if (reg->gpu_alloc->type == KBASE_MEM_TYPE_IMPORTED_UMM || in kbase_gpu_mmap()
1803 reg->gpu_alloc->type == KBASE_MEM_TYPE_IMPORTED_USER_BUF) { in kbase_gpu_mmap()
[all …]
H A Dmali_kbase_softjobs.c518 struct kbase_mem_phy_alloc *gpu_alloc = buffers[i].gpu_alloc; in kbase_debug_copy_finish() local
532 if (gpu_alloc) { in kbase_debug_copy_finish()
533 switch (gpu_alloc->type) { in kbase_debug_copy_finish()
543 kbase_mem_phy_alloc_put(gpu_alloc); in kbase_debug_copy_finish()
662 reg->gpu_alloc == NULL) { in kbase_debug_copy_prepare()
667 buffers[i].gpu_alloc = kbase_mem_phy_alloc_get(reg->gpu_alloc); in kbase_debug_copy_prepare()
673 switch (reg->gpu_alloc->type) { in kbase_debug_copy_prepare()
676 struct kbase_mem_phy_alloc *alloc = reg->gpu_alloc; in kbase_debug_copy_prepare()
732 static void *dma_buf_kmap_page(struct kbase_mem_phy_alloc *gpu_alloc, in dma_buf_kmap_page() argument
735 struct sg_table *sgt = gpu_alloc->imported.umm.sgt; in dma_buf_kmap_page()
[all …]
H A Dmali_kbase_mem.h653 struct kbase_mem_phy_alloc *gpu_alloc; member
843 KBASE_DEBUG_ASSERT(reg->gpu_alloc); in kbase_get_cpu_phy_pages()
844 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_cpu_phy_pages()
854 KBASE_DEBUG_ASSERT(reg->gpu_alloc); in kbase_get_gpu_phy_pages()
855 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_gpu_phy_pages()
857 return reg->gpu_alloc->pages; in kbase_get_gpu_phy_pages()
868 KBASE_DEBUG_ASSERT(reg->gpu_alloc); in kbase_reg_current_backed_size()
869 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_reg_current_backed_size()
940 KBASE_DEBUG_ASSERT(!reg->gpu_alloc); in kbase_reg_prepare_native()
953 reg->gpu_alloc = kbase_alloc_create(kctx, reg->nr_pages, in kbase_reg_prepare_native()
[all …]
H A Dmali_kbase_mem_linux.h202 int kbase_mem_evictable_make(struct kbase_mem_phy_alloc *gpu_alloc);
243 struct kbase_mem_phy_alloc *gpu_alloc; member
H A Dmali_kbase_gwt.c41 reg->gpu_alloc->nents, in kbase_gpu_gwt_setup_page_permission()
43 reg->gpu_alloc->group_id); in kbase_gpu_gwt_setup_page_permission()
H A Dmali_kbase_debug_mem_view.c203 if (reg->gpu_alloc == NULL) in debug_mem_zone_open()
220 mapping->alloc = kbase_mem_phy_alloc_get(reg->gpu_alloc); in debug_mem_zone_open()
H A Dmali_kbase_debug_mem_allocs.c68 reg->flags, type_names[reg->gpu_alloc->type]); in debug_zone_mem_allocs_show()
H A Dmali_kbase_defs.h217 struct kbase_mem_phy_alloc *gpu_alloc; member
/OK3568_Linux_fs/kernel/drivers/gpu/arm/midgard/
H A Dmali_kbase_mem_linux.c247 kbase_mem_phy_alloc_put(reg->gpu_alloc); in kbase_mem_alloc()
509 int kbase_mem_evictable_make(struct kbase_mem_phy_alloc *gpu_alloc) in kbase_mem_evictable_make() argument
511 struct kbase_context *kctx = gpu_alloc->imported.kctx; in kbase_mem_evictable_make()
516 WARN_ON(!list_empty(&gpu_alloc->evict_node)); in kbase_mem_evictable_make()
518 kbase_mem_shrink_cpu_mapping(kctx, gpu_alloc->reg, in kbase_mem_evictable_make()
519 0, gpu_alloc->nents); in kbase_mem_evictable_make()
526 list_add(&gpu_alloc->evict_node, &kctx->evict_list); in kbase_mem_evictable_make()
528 kbase_mem_evictable_mark_reclaim(gpu_alloc); in kbase_mem_evictable_make()
530 gpu_alloc->reg->flags |= KBASE_REG_DONT_NEED; in kbase_mem_evictable_make()
534 bool kbase_mem_evictable_unmake(struct kbase_mem_phy_alloc *gpu_alloc) in kbase_mem_evictable_unmake() argument
[all …]
H A Dmali_kbase_mem.c808 new_reg->gpu_alloc = NULL; /* no alloc bound yet */ in kbase_alloc_free_region()
842 if (!list_empty(&reg->gpu_alloc->evict_node)) { in kbase_free_alloced_region()
850 if (reg->cpu_alloc != reg->gpu_alloc) in kbase_free_alloced_region()
851 reg->gpu_alloc->reg = NULL; in kbase_free_alloced_region()
863 kbase_mem_evictable_unmake(reg->gpu_alloc); in kbase_free_alloced_region()
875 kbase_mem_phy_alloc_put(reg->gpu_alloc); in kbase_free_alloced_region()
904 if (reg->gpu_alloc->type == KBASE_MEM_TYPE_ALIAS) { in kbase_gpu_mmap()
908 alloc = reg->gpu_alloc; in kbase_gpu_mmap()
940 kbase_mem_phy_alloc_gpu_mapped(reg->gpu_alloc); in kbase_gpu_mmap()
946 if (reg->gpu_alloc->type == KBASE_MEM_TYPE_ALIAS) { in kbase_gpu_mmap()
[all …]
H A Dmali_kbase_mem.h301 …struct kbase_mem_phy_alloc *gpu_alloc; /* the one alloc object we mmap to the GPU when mapping thi… member
315 KBASE_DEBUG_ASSERT(reg->gpu_alloc); in kbase_get_cpu_phy_pages()
316 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_cpu_phy_pages()
325 KBASE_DEBUG_ASSERT(reg->gpu_alloc); in kbase_get_gpu_phy_pages()
326 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_gpu_phy_pages()
328 return reg->gpu_alloc->pages; in kbase_get_gpu_phy_pages()
339 KBASE_DEBUG_ASSERT(reg->gpu_alloc); in kbase_reg_current_backed_size()
340 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_reg_current_backed_size()
400 KBASE_DEBUG_ASSERT(!reg->gpu_alloc); in kbase_reg_prepare_native()
413 reg->gpu_alloc = kbase_alloc_create(reg->nr_pages, in kbase_reg_prepare_native()
[all …]
H A Dmali_kbase_softjobs.c499 struct kbase_mem_phy_alloc *gpu_alloc; member
536 struct kbase_mem_phy_alloc *gpu_alloc = buffers[i].gpu_alloc; in kbase_debug_copy_finish() local
547 if (gpu_alloc) { in kbase_debug_copy_finish()
548 switch (gpu_alloc->type) { in kbase_debug_copy_finish()
558 kbase_mem_phy_alloc_put(gpu_alloc); in kbase_debug_copy_finish()
651 if (NULL == reg || NULL == reg->gpu_alloc || in kbase_debug_copy_prepare()
657 buffers[i].gpu_alloc = kbase_mem_phy_alloc_get(reg->gpu_alloc); in kbase_debug_copy_prepare()
663 switch (reg->gpu_alloc->type) { in kbase_debug_copy_prepare()
666 struct kbase_mem_phy_alloc *alloc = reg->gpu_alloc; in kbase_debug_copy_prepare()
763 static void *dma_buf_kmap_page(struct kbase_mem_phy_alloc *gpu_alloc, in dma_buf_kmap_page() argument
[all …]
H A Dmali_kbase_mem_linux.h106 int kbase_mem_evictable_make(struct kbase_mem_phy_alloc *gpu_alloc);
127 struct kbase_mem_phy_alloc *gpu_alloc; member
H A Dmali_kbase_debug_mem_view.c169 if (reg->gpu_alloc == NULL) in debug_mem_zone_open()
179 mapping->alloc = kbase_mem_phy_alloc_get(reg->gpu_alloc); in debug_mem_zone_open()
H A Dmali_kbase_mmu.c212 if (region->gpu_alloc->type == KBASE_MEM_TYPE_IMPORTED_UMM) { in page_fault_worker()
297 if (kbase_alloc_phy_pages_helper(region->gpu_alloc, new_pages) == 0) { in page_fault_worker()
298 if (region->gpu_alloc != region->cpu_alloc) { in page_fault_worker()
303 kbase_free_phy_pages_helper(region->gpu_alloc, in page_fault_worker()
334 kbase_free_phy_pages_helper(region->gpu_alloc, new_pages); in page_fault_worker()
335 if (region->gpu_alloc != region->cpu_alloc) in page_fault_worker()
H A Dmali_kbase_context.c200 kbase_mem_phy_alloc_put(reg->gpu_alloc); in kbase_reg_pending_dtor()
H A Dmali_kbase_jd.c418 reg->gpu_alloc->type == KBASE_MEM_TYPE_IMPORTED_UMM) { in kbase_jd_pre_external_resources()
421 resv = reg->gpu_alloc->imported.umm.dma_buf->resv; in kbase_jd_pre_external_resources()
/OK3568_Linux_fs/kernel/drivers/gpu/arm/bifrost/csf/
H A Dmali_kbase_csf_tiler_heap.c305 if (unlikely(atomic_read(&chunk->region->gpu_alloc->kernel_mappings) > 0)) { in alloc_new_chunk()
327 if (WARN(!chunk->region->gpu_alloc, in alloc_new_chunk()
332 if (WARN(chunk->region->gpu_alloc->type != KBASE_MEM_TYPE_NATIVE, in alloc_new_chunk()
366 kbase_set_phy_alloc_page_status(chunk->region->gpu_alloc, NOT_MOVABLE); in alloc_new_chunk()
658 if (reg->gpu_alloc->type != KBASE_MEM_TYPE_NATIVE) { in kbasep_is_buffer_descriptor_region_suitable()
752 kbase_set_phy_alloc_page_status(buf_desc_reg->gpu_alloc, NOT_MOVABLE); in kbase_csf_tiler_heap_init()
1093 err = kbase_mem_shrink_gpu_mapping(kctx, chunk->region, 0, chunk->region->gpu_alloc->nents); in delete_chunk_physical_pages()
1109 err = kbase_free_phy_pages_helper(chunk->region->gpu_alloc, in delete_chunk_physical_pages()
1110 chunk->region->gpu_alloc->nents); in delete_chunk_physical_pages()
H A Dmali_kbase_csf_kcpu.c68 !kbase_mem_is_imported(reg->gpu_alloc->type)) { in kbase_kcpu_map_import_prepare()
73 if (reg->gpu_alloc->type == KBASE_MEM_TYPE_IMPORTED_USER_BUF) { in kbase_kcpu_map_import_prepare()
95 alloc = reg->gpu_alloc; in kbase_kcpu_map_import_prepare()
132 !kbase_mem_is_imported(reg->gpu_alloc->type)) { in kbase_kcpu_unmap_import_prepare_internal()
137 if (reg->gpu_alloc->type == KBASE_MEM_TYPE_IMPORTED_USER_BUF) { in kbase_kcpu_unmap_import_prepare_internal()
141 if (reg->gpu_alloc->nents != in kbase_kcpu_unmap_import_prepare_internal()
142 reg->gpu_alloc->imported.user_buf.nr_pages) { in kbase_kcpu_unmap_import_prepare_internal()
516 pages_used = reg->gpu_alloc->nents; in kbase_kcpu_jit_free_process()
685 (reg->gpu_alloc->type != KBASE_MEM_TYPE_NATIVE) || in kbase_csf_queue_group_suspend_prepare()
H A Dmali_kbase_csf_sync_debugfs.c478 struct page *page = as_page(queue->queue_reg->gpu_alloc->pages[page_off]); in kbasep_csf_read_ringbuffer_value()
H A Dmali_kbase_csf.c482 region->gpu_alloc->type != KBASE_MEM_TYPE_NATIVE) { in csf_queue_register_internal()
/OK3568_Linux_fs/kernel/drivers/gpu/arm/bifrost/mmu/
H A Dmali_kbase_mmu.c715 region->flags, region->gpu_alloc->group_id, &dirty_pgds); in kbase_gpu_mmu_handle_write_fault()
829 if (WARN_ON(region->gpu_alloc->group_id >= in page_fault_try_alloc()
837 root_pool = &kctx->mem_pools.large[region->gpu_alloc->group_id]; in page_fault_try_alloc()
840 root_pool = &kctx->mem_pools.small[region->gpu_alloc->group_id]; in page_fault_try_alloc()
844 if (region->gpu_alloc != region->cpu_alloc) in page_fault_try_alloc()
876 if (region->gpu_alloc == region->cpu_alloc) in page_fault_try_alloc()
883 kbase_alloc_phy_pages_helper_locked(region->gpu_alloc, pool, in page_fault_try_alloc()
892 if (!alloc_failed && region->gpu_alloc != region->cpu_alloc) { in page_fault_try_alloc()
922 kbase_free_phy_pages_helper(region->gpu_alloc, total_gpu_pages_alloced); in page_fault_try_alloc()
923 if (region->gpu_alloc != region->cpu_alloc && total_cpu_pages_alloced > 0) in page_fault_try_alloc()
[all …]
/OK3568_Linux_fs/kernel/drivers/gpu/arm/bifrost/context/
H A Dmali_kbase_context.c383 kctx->pending_regions[cookie]->gpu_alloc); in kbase_context_sticky_resource_term()