Home
last modified time | relevance | path

Searched refs:heap (Results 1 – 25 of 246) sorted by relevance

12345678910

/OK3568_Linux_fs/kernel/drivers/staging/android/ion/
H A Dion_heap.c23 struct ion_heap *heap = container_of(shrinker, struct ion_heap, in ion_heap_shrink_count() local
27 total = ion_heap_freelist_size(heap) / PAGE_SIZE; in ion_heap_shrink_count()
29 if (heap->ops->shrink) in ion_heap_shrink_count()
30 total += heap->ops->shrink(heap, sc->gfp_mask, 0); in ion_heap_shrink_count()
38 struct ion_heap *heap = container_of(shrinker, struct ion_heap, in ion_heap_shrink_scan() local
50 if (heap->flags & ION_HEAP_FLAG_DEFER_FREE) in ion_heap_shrink_scan()
51 freed = ion_heap_freelist_shrink(heap, to_scan * PAGE_SIZE) / in ion_heap_shrink_scan()
58 if (heap->ops->shrink) in ion_heap_shrink_scan()
59 freed += heap->ops->shrink(heap, sc->gfp_mask, to_scan); in ion_heap_shrink_scan()
64 static size_t _ion_heap_freelist_drain(struct ion_heap *heap, size_t size, in _ion_heap_freelist_drain() argument
[all …]
H A Dion.c69 struct ion_heap *heap; in ion_query_heaps_kernel() local
79 plist_for_each_entry(heap, &dev->heaps, node) { in ion_query_heaps_kernel()
80 strncpy(hdata[i].name, heap->name, MAX_HEAP_NAME); in ion_query_heaps_kernel()
82 hdata[i].type = heap->type; in ion_query_heaps_kernel()
83 hdata[i].heap_id = heap->id; in ion_query_heaps_kernel()
102 struct ion_heap *heap; in ion_query_heaps() local
119 plist_for_each_entry(heap, &dev->heaps, node) { in ion_query_heaps()
120 strncpy(hdata.name, heap->name, MAX_HEAP_NAME); in ion_query_heaps()
122 hdata.type = heap->type; in ion_query_heaps()
123 hdata.heap_id = heap->id; in ion_query_heaps()
[all …]
H A Dion_dma_buf.c54 struct ion_heap *heap = buffer->heap; in ion_dma_buf_attach() local
56 if (heap->buf_ops.attach) in ion_dma_buf_attach()
57 return heap->buf_ops.attach(dmabuf, attachment); in ion_dma_buf_attach()
88 struct ion_heap *heap = buffer->heap; in ion_dma_buf_detatch() local
90 if (heap->buf_ops.detach) in ion_dma_buf_detatch()
91 return heap->buf_ops.detach(dmabuf, attachment); in ion_dma_buf_detatch()
105 struct ion_heap *heap = buffer->heap; in ion_map_dma_buf() local
110 if (heap->buf_ops.map_dma_buf) in ion_map_dma_buf()
111 return heap->buf_ops.map_dma_buf(attachment, direction); in ion_map_dma_buf()
133 struct ion_heap *heap = buffer->heap; in ion_unmap_dma_buf() local
[all …]
H A Dion_buffer.c35 static struct ion_buffer *ion_buffer_create(struct ion_heap *heap, in ion_buffer_create() argument
47 buffer->heap = heap; in ion_buffer_create()
51 ret = heap->ops->allocate(heap, buffer, len, flags); in ion_buffer_create()
54 if (!(heap->flags & ION_HEAP_FLAG_DEFER_FREE)) in ion_buffer_create()
57 ion_heap_freelist_drain(heap, 0); in ion_buffer_create()
58 ret = heap->ops->allocate(heap, buffer, len, flags); in ion_buffer_create()
69 spin_lock(&heap->stat_lock); in ion_buffer_create()
70 heap->num_of_buffers++; in ion_buffer_create()
71 heap->num_of_alloc_bytes += len; in ion_buffer_create()
72 if (heap->num_of_alloc_bytes > heap->alloc_bytes_wm) in ion_buffer_create()
[all …]
H A Dion_protected_heap.c169 struct ion_heap heap; member
180 static void free_buffer_page(struct ion_heap *heap, in free_buffer_page() argument
188 pheap = container_of(heap, struct ion_protected_heap, heap); in free_buffer_page()
199 static struct page *alloc_buffer_page(struct ion_heap *heap, in alloc_buffer_page() argument
208 pheap = container_of(heap, struct ion_protected_heap, heap); in alloc_buffer_page()
221 static struct page_info *alloc_largest_available(struct ion_heap *heap, in alloc_largest_available() argument
240 page = alloc_buffer_page(heap, buffer, orders[i]); in alloc_largest_available()
254 static int ion_protected_heap_allocate(struct ion_heap *heap, in ion_protected_heap_allocate() argument
278 info = alloc_largest_available(heap, buffer, size_remaining, in ion_protected_heap_allocate()
335 free_buffer_page(heap, buffer, info->page, info->order); in ion_protected_heap_allocate()
[all …]
/OK3568_Linux_fs/kernel/drivers/gpu/arm/bifrost/csf/
H A Dmali_kbase_csf_tiler_heap.c87 struct kbase_csf_tiler_heap *const heap) in get_last_chunk() argument
89 if (list_empty(&heap->chunks_list)) in get_last_chunk()
92 return list_last_entry(&heap->chunks_list, in get_last_chunk()
138 static int link_chunk(struct kbase_csf_tiler_heap *const heap, in link_chunk() argument
141 struct kbase_csf_tiler_heap_chunk *const prev = get_last_chunk(heap); in link_chunk()
144 struct kbase_context *const kctx = heap->kctx; in link_chunk()
150 *prev_hdr = encode_chunk_ptr(heap->chunk_size, chunk->gpu_va); in link_chunk()
174 static int init_chunk(struct kbase_csf_tiler_heap *const heap, in init_chunk() argument
179 struct kbase_context *const kctx = heap->kctx; in init_chunk()
201 err = link_chunk(heap, chunk); in init_chunk()
[all …]
H A Dmali_kbase_csf_tiler_heap_debugfs.c41 struct kbase_csf_tiler_heap *heap; in kbasep_csf_tiler_heap_debugfs_show() local
48 list_for_each_entry(heap, &tiler_heaps_p->list, link) { in kbasep_csf_tiler_heap_debugfs_show()
49 if (heap->kctx != kctx) in kbasep_csf_tiler_heap_debugfs_show()
52 seq_printf(file, "HEAP(gpu_va = 0x%llx):\n", heap->gpu_va); in kbasep_csf_tiler_heap_debugfs_show()
53 seq_printf(file, "\tchunk_size = %u\n", heap->chunk_size); in kbasep_csf_tiler_heap_debugfs_show()
54 seq_printf(file, "\tchunk_count = %u\n", heap->chunk_count); in kbasep_csf_tiler_heap_debugfs_show()
55 seq_printf(file, "\tmax_chunks = %u\n", heap->max_chunks); in kbasep_csf_tiler_heap_debugfs_show()
56 seq_printf(file, "\ttarget_in_flight = %u\n", heap->target_in_flight); in kbasep_csf_tiler_heap_debugfs_show()
58 list_for_each_entry(chunk, &heap->chunks_list, link) in kbasep_csf_tiler_heap_debugfs_show()
/OK3568_Linux_fs/kernel/drivers/dma-buf/rk_heaps/
H A Drk-dma-heap.c118 struct dma_buf *rk_dma_heap_buffer_alloc(struct rk_dma_heap *heap, size_t len, in rk_dma_heap_buffer_alloc() argument
138 dmabuf = heap->ops->allocate(heap, len, fd_flags, heap_flags, name); in rk_dma_heap_buffer_alloc()
147 int rk_dma_heap_bufferfd_alloc(struct rk_dma_heap *heap, size_t len, in rk_dma_heap_bufferfd_alloc() argument
155 dmabuf = rk_dma_heap_buffer_alloc(heap, len, fd_flags, heap_flags, in rk_dma_heap_bufferfd_alloc()
172 struct page *rk_dma_heap_alloc_contig_pages(struct rk_dma_heap *heap, in rk_dma_heap_alloc_contig_pages() argument
175 if (!heap->support_cma) { in rk_dma_heap_alloc_contig_pages()
176 WARN_ON(!heap->support_cma); in rk_dma_heap_alloc_contig_pages()
184 return heap->ops->alloc_contig_pages(heap, len, name); in rk_dma_heap_alloc_contig_pages()
188 void rk_dma_heap_free_contig_pages(struct rk_dma_heap *heap, in rk_dma_heap_free_contig_pages() argument
192 if (!heap->support_cma) { in rk_dma_heap_free_contig_pages()
[all …]
H A Drk-cma-heap.c33 struct rk_dma_heap *heap; member
38 struct rk_cma_heap *heap; member
158 dma_sync_single_for_cpu(rk_dma_heap_get_dev(buffer->heap->heap), in rk_cma_heap_dma_buf_begin_cpu_access_partial()
188 dma_sync_single_for_device(rk_dma_heap_get_dev(buffer->heap->heap), in rk_cma_heap_dma_buf_end_cpu_access_partial()
281 struct rk_cma_heap *cma_heap = buffer->heap; in rk_cma_heap_remove_dmabuf_list()
282 struct rk_dma_heap *heap = cma_heap->heap; in rk_cma_heap_remove_dmabuf_list() local
284 mutex_lock(&heap->dmabuf_lock); in rk_cma_heap_remove_dmabuf_list()
285 list_for_each_entry(buf, &heap->dmabuf_list, node) { in rk_cma_heap_remove_dmabuf_list()
291 rk_dma_heap_get_name(heap)); in rk_cma_heap_remove_dmabuf_list()
297 mutex_unlock(&heap->dmabuf_lock); in rk_cma_heap_remove_dmabuf_list()
[all …]
H A Drk-dma-heap.h42 struct dma_buf *(*allocate)(struct rk_dma_heap *heap,
47 struct page *(*alloc_contig_pages)(struct rk_dma_heap *heap,
49 void (*free_contig_pages)(struct rk_dma_heap *heap,
52 long (*get_pool_size)(struct rk_dma_heap *heap);
122 void *rk_dma_heap_get_drvdata(struct rk_dma_heap *heap);
131 struct device *rk_dma_heap_get_dev(struct rk_dma_heap *heap);
140 const char *rk_dma_heap_get_name(struct rk_dma_heap *heap);
152 void rk_dma_heap_put(struct rk_dma_heap *heap);
167 void rk_dma_heap_total_inc(struct rk_dma_heap *heap, size_t len);
173 void rk_dma_heap_total_dec(struct rk_dma_heap *heap, size_t len);
/OK3568_Linux_fs/kernel/drivers/dma-buf/
H A Ddma-heap.c79 struct dma_buf *dma_heap_buffer_alloc(struct dma_heap *heap, size_t len, in dma_heap_buffer_alloc() argument
96 return heap->ops->allocate(heap, len, fd_flags, heap_flags); in dma_heap_buffer_alloc()
100 int dma_heap_bufferfd_alloc(struct dma_heap *heap, size_t len, in dma_heap_bufferfd_alloc() argument
107 dmabuf = dma_heap_buffer_alloc(heap, len, fd_flags, heap_flags); in dma_heap_bufferfd_alloc()
124 struct dma_heap *heap; in dma_heap_open() local
126 heap = xa_load(&dma_heap_minors, iminor(inode)); in dma_heap_open()
127 if (!heap) { in dma_heap_open()
133 file->private_data = heap; in dma_heap_open()
142 struct dma_heap *heap = file->private_data; in dma_heap_ioctl_allocate() local
148 fd = dma_heap_bufferfd_alloc(heap, heap_allocation->len, in dma_heap_ioctl_allocate()
[all …]
/OK3568_Linux_fs/kernel/include/linux/
H A Dion.h40 struct ion_heap *heap; member
66 int (*allocate)(struct ion_heap *heap,
70 int (*shrink)(struct ion_heap *heap, gfp_t gfp_mask, int nr_to_scan);
71 long (*get_pool_size)(struct ion_heap *heap);
147 #define ion_device_add_heap(heap) __ion_device_add_heap(heap, THIS_MODULE) argument
171 int __ion_device_add_heap(struct ion_heap *heap, struct module *owner);
178 void ion_device_remove_heap(struct ion_heap *heap);
188 int ion_heap_init_shrinker(struct ion_heap *heap);
198 int ion_heap_init_deferred_free(struct ion_heap *heap);
207 void ion_heap_freelist_add(struct ion_heap *heap, struct ion_buffer *buffer);
[all …]
H A Dmin_heap.h35 void min_heapify(struct min_heap *heap, int pos, in min_heapify() argument
39 void *data = heap->data; in min_heapify()
42 if (pos * 2 + 1 >= heap->nr) in min_heapify()
51 if (pos * 2 + 2 < heap->nr) { in min_heapify()
68 void min_heapify_all(struct min_heap *heap, in min_heapify_all() argument
73 for (i = heap->nr / 2; i >= 0; i--) in min_heapify_all()
74 min_heapify(heap, i, func); in min_heapify_all()
79 void min_heap_pop(struct min_heap *heap, in min_heap_pop() argument
82 void *data = heap->data; in min_heap_pop()
84 if (WARN_ONCE(heap->nr <= 0, "Popping an empty heap")) in min_heap_pop()
[all …]
H A Ddma-heap.h26 struct dma_buf *(*allocate)(struct dma_heap *heap,
30 long (*get_pool_size)(struct dma_heap *heap);
32 int (*get_phys)(struct dma_heap *heap, struct dma_heap_phys_data *phys);
57 void *dma_heap_get_drvdata(struct dma_heap *heap);
66 struct device *dma_heap_get_dev(struct dma_heap *heap);
75 const char *dma_heap_get_name(struct dma_heap *heap);
87 void dma_heap_put(struct dma_heap *heap);
107 struct dma_buf *dma_heap_buffer_alloc(struct dma_heap *heap, size_t len,
125 int dma_heap_bufferfd_alloc(struct dma_heap *heap, size_t len,
H A Drk-dma-heap.h54 struct dma_buf *rk_dma_heap_buffer_alloc(struct rk_dma_heap *heap, size_t len,
66 int rk_dma_heap_bufferfd_alloc(struct rk_dma_heap *heap, size_t len,
77 struct page *rk_dma_heap_alloc_contig_pages(struct rk_dma_heap *heap,
87 void rk_dma_heap_free_contig_pages(struct rk_dma_heap *heap, struct page *pages,
110 static inline struct dma_buf *rk_dma_heap_buffer_alloc(struct rk_dma_heap *heap, size_t len, in rk_dma_heap_buffer_alloc() argument
118 static inline int rk_dma_heap_bufferfd_alloc(struct rk_dma_heap *heap, size_t len, in rk_dma_heap_bufferfd_alloc() argument
126 static inline struct page *rk_dma_heap_alloc_contig_pages(struct rk_dma_heap *heap, in rk_dma_heap_alloc_contig_pages() argument
132 static inline void rk_dma_heap_free_contig_pages(struct rk_dma_heap *heap, struct page *pages, in rk_dma_heap_free_contig_pages() argument
/OK3568_Linux_fs/kernel/lib/
H A Dtest_min_heap.c33 struct min_heap *heap, in pop_verify_heap() argument
36 int *values = heap->data; in pop_verify_heap()
41 min_heap_pop(heap, funcs); in pop_verify_heap()
42 while (heap->nr > 0) { in pop_verify_heap()
57 min_heap_pop(heap, funcs); in pop_verify_heap()
66 struct min_heap heap = { in test_heapify_all() local
79 min_heapify_all(&heap, &funcs); in test_heapify_all()
80 err = pop_verify_heap(min_heap, &heap, &funcs); in test_heapify_all()
84 heap.nr = ARRAY_SIZE(values); in test_heapify_all()
85 for (i = 0; i < heap.nr; i++) in test_heapify_all()
[all …]
/OK3568_Linux_fs/u-boot/lib/bzip2/
H A Dbzlib_huffman.c77 zz = z; tmp = heap[zz]; \
78 while (weight[tmp] < weight[heap[zz >> 1]]) { \
79 heap[zz] = heap[zz >> 1]; \
82 heap[zz] = tmp; \
88 zz = z; tmp = heap[zz]; \
93 weight[heap[yy+1]] < weight[heap[yy]]) \
95 if (weight[tmp] < weight[heap[yy]]) break; \
96 heap[zz] = heap[yy]; \
99 heap[zz] = tmp; \
116 Int32 heap [ BZ_MAX_ALPHA_SIZE + 2 ]; in BZ2_hbMakeCodeLengths() local
[all …]
/OK3568_Linux_fs/u-boot/fs/ubifs/
H A Dlprops.c54 static void move_up_lpt_heap(struct ubifs_info *c, struct ubifs_lpt_heap *heap, in move_up_lpt_heap() argument
67 val2 = get_heap_comp_val(heap->arr[ppos], cat); in move_up_lpt_heap()
71 heap->arr[ppos]->hpos = hpos; in move_up_lpt_heap()
72 heap->arr[hpos] = heap->arr[ppos]; in move_up_lpt_heap()
73 heap->arr[ppos] = lprops; in move_up_lpt_heap()
91 static void adjust_lpt_heap(struct ubifs_info *c, struct ubifs_lpt_heap *heap, in adjust_lpt_heap() argument
101 val2 = get_heap_comp_val(heap->arr[ppos], cat); in adjust_lpt_heap()
105 heap->arr[ppos]->hpos = hpos; in adjust_lpt_heap()
106 heap->arr[hpos] = heap->arr[ppos]; in adjust_lpt_heap()
107 heap->arr[ppos] = lprops; in adjust_lpt_heap()
[all …]
/OK3568_Linux_fs/kernel/fs/ubifs/
H A Dlprops.c50 static void move_up_lpt_heap(struct ubifs_info *c, struct ubifs_lpt_heap *heap, in move_up_lpt_heap() argument
63 val2 = get_heap_comp_val(heap->arr[ppos], cat); in move_up_lpt_heap()
67 heap->arr[ppos]->hpos = hpos; in move_up_lpt_heap()
68 heap->arr[hpos] = heap->arr[ppos]; in move_up_lpt_heap()
69 heap->arr[ppos] = lprops; in move_up_lpt_heap()
87 static void adjust_lpt_heap(struct ubifs_info *c, struct ubifs_lpt_heap *heap, in adjust_lpt_heap() argument
97 val2 = get_heap_comp_val(heap->arr[ppos], cat); in adjust_lpt_heap()
101 heap->arr[ppos]->hpos = hpos; in adjust_lpt_heap()
102 heap->arr[hpos] = heap->arr[ppos]; in adjust_lpt_heap()
103 heap->arr[ppos] = lprops; in adjust_lpt_heap()
[all …]
/OK3568_Linux_fs/kernel/drivers/staging/android/ion/heaps/
H A Dion_system_heap.c45 struct ion_heap heap; member
49 static struct page *alloc_buffer_page(struct ion_system_heap *heap, in alloc_buffer_page() argument
53 struct ion_page_pool *pool = heap->pools[order_to_index(order)]; in alloc_buffer_page()
58 static void free_buffer_page(struct ion_system_heap *heap, in free_buffer_page() argument
70 pool = heap->pools[order_to_index(order)]; in free_buffer_page()
75 static struct page *alloc_largest_available(struct ion_system_heap *heap, in alloc_largest_available() argument
89 page = alloc_buffer_page(heap, buffer, orders[i]); in alloc_largest_available()
99 static int ion_system_heap_allocate(struct ion_heap *heap, in ion_system_heap_allocate() argument
104 struct ion_system_heap *sys_heap = container_of(heap, in ion_system_heap_allocate()
106 heap); in ion_system_heap_allocate()
[all …]
H A Dion_cma_heap.c19 struct ion_heap heap; member
23 #define to_cma_heap(x) container_of(x, struct ion_cma_heap, heap)
26 static int ion_cma_allocate(struct ion_heap *heap, struct ion_buffer *buffer, in ion_cma_allocate() argument
30 struct ion_cma_heap *cma_heap = to_cma_heap(heap); in ion_cma_allocate()
87 struct ion_cma_heap *cma_heap = to_cma_heap(buffer->heap); in ion_cma_free()
113 cma_heap->heap.ops = &ion_cma_ops; in __ion_add_cma_heap()
114 cma_heap->heap.type = ION_HEAP_TYPE_DMA; in __ion_add_cma_heap()
115 cma_heap->heap.name = cma_get_name(cma); in __ion_add_cma_heap()
117 ret = ion_device_add_heap(&cma_heap->heap); in __ion_add_cma_heap()
135 ion_device_remove_heap(&cma_heaps[nr].heap); in ion_cma_heap_init()
[all …]
/OK3568_Linux_fs/external/rknpu2/examples/3rdparty/opencv/opencv-linux-aarch64/include/opencv2/flann/
H A Dheap.h55 std::vector<T> heap; variable
76 heap.reserve(length); in Heap()
104 heap.clear(); in clear()
132 heap.push_back(value); in insert()
134 std::push_heap(heap.begin(), heap.end(), compareT); in insert()
153 value = heap[0]; in popMin()
155 std::pop_heap(heap.begin(), heap.end(), compareT); in popMin()
156 heap.pop_back(); in popMin()
/OK3568_Linux_fs/kernel/drivers/dma-buf/heaps/
H A Drk_cma_heap.c30 struct dma_heap *heap; member
35 struct cma_heap *heap; member
154 dma_sync_single_for_cpu(dma_heap_get_dev(buffer->heap->heap), in cma_heap_dma_buf_begin_cpu_access_partial()
179 dma_sync_single_for_device(dma_heap_get_dev(buffer->heap->heap), in cma_heap_dma_buf_end_cpu_access_partial()
316 struct cma_heap *cma_heap = buffer->heap; in cma_heap_dma_buf_release()
347 static struct dma_buf *cma_heap_do_allocate(struct dma_heap *heap, in cma_heap_do_allocate() argument
352 struct cma_heap *cma_heap = dma_heap_get_drvdata(heap); in cma_heap_do_allocate()
414 buffer->heap = cma_heap; in cma_heap_do_allocate()
418 exp_info.exp_name = dma_heap_get_name(heap); in cma_heap_do_allocate()
430 dma = dma_map_page(dma_heap_get_dev(heap), buffer->cma_pages, 0, in cma_heap_do_allocate()
[all …]
/OK3568_Linux_fs/kernel/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/
H A Dbase.c255 nvkm_mmu_type(struct nvkm_mmu *mmu, int heap, u8 type) in nvkm_mmu_type() argument
257 if (heap >= 0 && !WARN_ON(mmu->type_nr == ARRAY_SIZE(mmu->type))) { in nvkm_mmu_type()
258 mmu->type[mmu->type_nr].type = type | mmu->heap[heap].type; in nvkm_mmu_type()
259 mmu->type[mmu->type_nr].heap = heap; in nvkm_mmu_type()
268 if (!WARN_ON(mmu->heap_nr == ARRAY_SIZE(mmu->heap))) { in nvkm_mmu_heap()
269 mmu->heap[mmu->heap_nr].type = type; in nvkm_mmu_heap()
270 mmu->heap[mmu->heap_nr].size = size; in nvkm_mmu_heap()
282 int heap; in nvkm_mmu_host() local
285 heap = nvkm_mmu_heap(mmu, NVKM_MEM_HOST, ~0ULL); in nvkm_mmu_host()
286 nvkm_mmu_type(mmu, heap, type); in nvkm_mmu_host()
[all …]
/OK3568_Linux_fs/kernel/drivers/gpu/drm/nouveau/include/nvkm/core/
H A Dmm.h12 u8 heap; member
34 int nvkm_mm_init(struct nvkm_mm *, u8 heap, u32 offset, u32 length, u32 block);
36 int nvkm_mm_head(struct nvkm_mm *, u8 heap, u8 type, u32 size_max,
38 int nvkm_mm_tail(struct nvkm_mm *, u8 heap, u8 type, u32 size_max,
44 nvkm_mm_heap_size(struct nvkm_mm *mm, u8 heap) in nvkm_mm_heap_size() argument
49 if (node->heap == heap) in nvkm_mm_heap_size()

12345678910