Home
last modified time | relevance | path

Searched refs:cpu_alloc (Results 1 – 15 of 15) sorted by relevance

/OK3568_Linux_fs/kernel/drivers/gpu/arm/midgard/
H A Dmali_kbase_mem.h300 …struct kbase_mem_phy_alloc *cpu_alloc; /* the one alloc object we mmap to the CPU when mapping thi… member
314 KBASE_DEBUG_ASSERT(reg->cpu_alloc); in kbase_get_cpu_phy_pages()
316 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_cpu_phy_pages()
318 return reg->cpu_alloc->pages; in kbase_get_cpu_phy_pages()
324 KBASE_DEBUG_ASSERT(reg->cpu_alloc); in kbase_get_gpu_phy_pages()
326 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_gpu_phy_pages()
335 if (!reg->cpu_alloc) in kbase_reg_current_backed_size()
338 KBASE_DEBUG_ASSERT(reg->cpu_alloc); in kbase_reg_current_backed_size()
340 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_reg_current_backed_size()
342 return reg->cpu_alloc->nents; in kbase_reg_current_backed_size()
[all …]
H A Dmali_kbase_mem_linux.c246 kbase_mem_phy_alloc_put(reg->cpu_alloc); in kbase_mem_alloc()
281 if (reg->cpu_alloc->type != KBASE_MEM_TYPE_ALIAS) { in kbase_mem_query()
287 aliased = reg->cpu_alloc->imported.alias.aliased; in kbase_mem_query()
288 for (i = 0; i < reg->cpu_alloc->imported.alias.nents; i++) in kbase_mem_query()
627 if (atomic_read(&reg->cpu_alloc->gpu_mappings) > 1) in kbase_mem_flags_change()
632 if (reg->cpu_alloc->type != KBASE_MEM_TYPE_NATIVE) { in kbase_mem_flags_change()
743 reg->cpu_alloc = kbase_mem_phy_alloc_get(reg->gpu_alloc); in kbase_mem_from_ump()
866 reg->cpu_alloc = kbase_mem_phy_alloc_get(reg->gpu_alloc); in kbase_mem_from_umm()
988 reg->cpu_alloc = kbase_mem_phy_alloc_get(reg->gpu_alloc); in kbase_mem_from_user_buffer()
1092 kbase_mem_phy_alloc_put(reg->cpu_alloc); in kbase_mem_from_user_buffer()
[all …]
H A Dmali_kbase_mem.c807 new_reg->cpu_alloc = NULL; /* no alloc bound yet */ in kbase_alloc_free_region()
849 reg->cpu_alloc->reg = NULL; in kbase_free_alloced_region()
850 if (reg->cpu_alloc != reg->gpu_alloc) in kbase_free_alloced_region()
861 KBASE_DEBUG_ASSERT(reg->cpu_alloc->type == in kbase_free_alloced_region()
874 kbase_mem_phy_alloc_put(reg->cpu_alloc); in kbase_free_alloced_region()
1257 reg->cpu_alloc->reg = NULL; in kbase_mem_free_region()
1258 if (reg->cpu_alloc != reg->gpu_alloc) in kbase_mem_free_region()
1268 KBASE_DEBUG_ASSERT(reg->cpu_alloc->type == in kbase_mem_free_region()
1585 if ((size_t) vsize > ((size_t) -1 / sizeof(*reg->cpu_alloc->pages))) in kbase_alloc_phy_pages()
1590 if (kbase_alloc_phy_pages_helper(reg->cpu_alloc, size) != 0) in kbase_alloc_phy_pages()
[all …]
H A Dmali_kbase_mem_linux.h126 struct kbase_mem_phy_alloc *cpu_alloc; member
H A Dmali_kbase_context.c199 kbase_mem_phy_alloc_put(reg->cpu_alloc); in kbase_reg_pending_dtor()
H A Dmali_kbase_mmu.c298 if (region->gpu_alloc != region->cpu_alloc) { in page_fault_worker()
300 region->cpu_alloc, new_pages) == 0) { in page_fault_worker()
335 if (region->gpu_alloc != region->cpu_alloc) in page_fault_worker()
336 kbase_free_phy_pages_helper(region->cpu_alloc, in page_fault_worker()
/OK3568_Linux_fs/kernel/drivers/gpu/arm/bifrost/
H A Dmali_kbase_mem_linux.c193 if (vsize != size || reg->cpu_alloc->permanent_map != NULL || in kbase_phy_alloc_mapping_init()
194 reg->cpu_alloc->type != KBASE_MEM_TYPE_NATIVE) in kbase_phy_alloc_mapping_init()
209 reg->cpu_alloc->permanent_map = kern_mapping; in kbase_phy_alloc_mapping_init()
252 kern_mapping = reg->cpu_alloc->permanent_map; in kbase_phy_alloc_mapping_get()
259 WARN_ON(reg->cpu_alloc != kern_mapping->cpu_alloc); in kbase_phy_alloc_mapping_get()
261 (void)kbase_mem_phy_alloc_get(kern_mapping->cpu_alloc); in kbase_phy_alloc_mapping_get()
277 WARN_ON(kctx != kern_mapping->cpu_alloc->imported.native.kctx); in kbase_phy_alloc_mapping_put()
278 WARN_ON(kern_mapping != kern_mapping->cpu_alloc->permanent_map); in kbase_phy_alloc_mapping_put()
280 kbase_mem_phy_alloc_put(kern_mapping->cpu_alloc); in kbase_phy_alloc_mapping_put()
404 if (unlikely(reg->cpu_alloc != reg->gpu_alloc)) in kbase_mem_alloc()
[all …]
H A Dmali_kbase_mem.h652 struct kbase_mem_phy_alloc *cpu_alloc; member
842 KBASE_DEBUG_ASSERT(reg->cpu_alloc); in kbase_get_cpu_phy_pages()
844 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_cpu_phy_pages()
846 return reg->cpu_alloc->pages; in kbase_get_cpu_phy_pages()
853 KBASE_DEBUG_ASSERT(reg->cpu_alloc); in kbase_get_gpu_phy_pages()
855 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_gpu_phy_pages()
864 if (!reg->cpu_alloc) in kbase_reg_current_backed_size()
867 KBASE_DEBUG_ASSERT(reg->cpu_alloc); in kbase_reg_current_backed_size()
869 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_reg_current_backed_size()
871 return reg->cpu_alloc->nents; in kbase_reg_current_backed_size()
[all …]
H A Dmali_kbase_mem.c1618 new_reg->cpu_alloc = NULL; /* no alloc bound yet */ in kbase_alloc_free_region()
1697 reg->cpu_alloc->reg = NULL; in kbase_free_alloced_region()
1698 if (reg->cpu_alloc != reg->gpu_alloc) in kbase_free_alloced_region()
1711 KBASE_DEBUG_ASSERT(reg->cpu_alloc->type == in kbase_free_alloced_region()
1726 kbase_mem_phy_alloc_put(reg->cpu_alloc); in kbase_free_alloced_region()
2285 WARN_ON(reg->cpu_alloc->type != KBASE_MEM_TYPE_NATIVE); in kbase_mem_free_region()
2291 reg->cpu_alloc->reg = NULL; in kbase_mem_free_region()
2292 if (reg->cpu_alloc != reg->gpu_alloc) in kbase_mem_free_region()
3253 if ((size_t) vsize > ((size_t) -1 / sizeof(*reg->cpu_alloc->pages))) in kbase_alloc_phy_pages()
3258 if (kbase_alloc_phy_pages_helper(reg->cpu_alloc, size) != 0) in kbase_alloc_phy_pages()
[all …]
H A Dmali_kbase_mem_linux.h242 struct kbase_mem_phy_alloc *cpu_alloc; member
/OK3568_Linux_fs/kernel/drivers/gpu/arm/bifrost/csf/
H A Dmali_kbase_csf_kcpu.h186 struct kbase_mem_phy_alloc *cpu_alloc; member
H A Dmali_kbase_csf_kcpu.c699 sus_buf->cpu_alloc = kbase_mem_phy_alloc_get(reg->cpu_alloc); in kbase_csf_queue_group_suspend_prepare()
700 kbase_mem_phy_alloc_kernel_mapped(reg->cpu_alloc); in kbase_csf_queue_group_suspend_prepare()
2090 if (!sus_buf->cpu_alloc) { in kcpu_queue_process()
2097 sus_buf->cpu_alloc); in kcpu_queue_process()
2099 sus_buf->cpu_alloc); in kcpu_queue_process()
H A Dmali_kbase_csf_tiler_heap.c114 if (chunk->region->cpu_alloc != NULL) { in remove_external_chunk_mappings()
116 chunk->region->cpu_alloc->nents); in remove_external_chunk_mappings()
347 if (WARN(atomic_read(&chunk->region->cpu_alloc->gpu_mappings) > 1, in alloc_new_chunk()
/OK3568_Linux_fs/kernel/drivers/gpu/arm/bifrost/mmu/
H A Dmali_kbase_mmu.c844 if (region->gpu_alloc != region->cpu_alloc) in page_fault_try_alloc()
876 if (region->gpu_alloc == region->cpu_alloc) in page_fault_try_alloc()
892 if (!alloc_failed && region->gpu_alloc != region->cpu_alloc) { in page_fault_try_alloc()
894 region->cpu_alloc, pool, pages_to_alloc_4k_per_alloc, in page_fault_try_alloc()
923 if (region->gpu_alloc != region->cpu_alloc && total_cpu_pages_alloced > 0) in page_fault_try_alloc()
924 kbase_free_phy_pages_helper(region->cpu_alloc, total_cpu_pages_alloced); in page_fault_try_alloc()
1306 if (region->gpu_alloc != region->cpu_alloc) in kbase_mmu_page_fault_worker()
1307 kbase_free_phy_pages_helper(region->cpu_alloc, in kbase_mmu_page_fault_worker()
/OK3568_Linux_fs/kernel/drivers/gpu/arm/bifrost/context/
H A Dmali_kbase_context.c381 kctx->pending_regions[cookie]->cpu_alloc); in kbase_context_sticky_resource_term()