Searched refs:heap_gpu_va (Results 1 – 3 of 3) sorted by relevance
40 u64 heap_gpu_va = 0; in sub_alloc() local56 heap_gpu_va = ctx_alloc->gpu_va + ctx_offset; in sub_alloc()57 ctx_ptr = kbase_vmap_prot(kctx, heap_gpu_va, in sub_alloc()63 heap_nr, heap_gpu_va); in sub_alloc()73 heap_nr, heap_gpu_va); in sub_alloc()75 return heap_gpu_va; in sub_alloc()91 u64 const heap_gpu_va) in evict_heap_context() argument94 u32 offset_in_bytes = (u32)(heap_gpu_va - ctx_alloc->gpu_va); in evict_heap_context()120 u64 const heap_gpu_va) in sub_free() argument131 if (WARN_ON(heap_gpu_va < ctx_alloc->gpu_va)) in sub_free()[all …]
73 u64 const heap_gpu_va);
556 struct kbase_context *const kctx, u64 const heap_gpu_va) in find_tiler_heap() argument563 if (heap_gpu_va == heap->gpu_va) in find_tiler_heap()568 heap_gpu_va); in find_tiler_heap()677 u64 *const heap_gpu_va, u64 *const first_chunk_va) in kbase_csf_tiler_heap_init() argument796 *heap_gpu_va = heap->gpu_va; in kbase_csf_tiler_heap_init()847 u64 const heap_gpu_va) in kbase_csf_tiler_heap_term() argument855 heap = find_tiler_heap(kctx, heap_gpu_va); in kbase_csf_tiler_heap_term()