Lines Matching refs:heap
33 struct rk_dma_heap *heap; member
38 struct rk_cma_heap *heap; member
158 dma_sync_single_for_cpu(rk_dma_heap_get_dev(buffer->heap->heap), in rk_cma_heap_dma_buf_begin_cpu_access_partial()
188 dma_sync_single_for_device(rk_dma_heap_get_dev(buffer->heap->heap), in rk_cma_heap_dma_buf_end_cpu_access_partial()
281 struct rk_cma_heap *cma_heap = buffer->heap; in rk_cma_heap_remove_dmabuf_list()
282 struct rk_dma_heap *heap = cma_heap->heap; in rk_cma_heap_remove_dmabuf_list() local
284 mutex_lock(&heap->dmabuf_lock); in rk_cma_heap_remove_dmabuf_list()
285 list_for_each_entry(buf, &heap->dmabuf_list, node) { in rk_cma_heap_remove_dmabuf_list()
291 rk_dma_heap_get_name(heap)); in rk_cma_heap_remove_dmabuf_list()
297 mutex_unlock(&heap->dmabuf_lock); in rk_cma_heap_remove_dmabuf_list()
304 struct rk_cma_heap *cma_heap = buffer->heap; in rk_cma_heap_add_dmabuf_list()
305 struct rk_dma_heap *heap = cma_heap->heap; in rk_cma_heap_add_dmabuf_list() local
315 mutex_lock(&heap->dmabuf_lock); in rk_cma_heap_add_dmabuf_list()
316 list_add_tail(&buf->node, &heap->dmabuf_list); in rk_cma_heap_add_dmabuf_list()
317 mutex_unlock(&heap->dmabuf_lock); in rk_cma_heap_add_dmabuf_list()
321 &buf->start, &buf->end, rk_dma_heap_get_name(heap)); in rk_cma_heap_add_dmabuf_list()
326 static int rk_cma_heap_remove_contig_list(struct rk_dma_heap *heap, in rk_cma_heap_remove_contig_list() argument
331 mutex_lock(&heap->contig_lock); in rk_cma_heap_remove_contig_list()
332 list_for_each_entry(buf, &heap->contig_list, node) { in rk_cma_heap_remove_contig_list()
336 rk_dma_heap_get_name(heap)); in rk_cma_heap_remove_contig_list()
343 mutex_unlock(&heap->contig_lock); in rk_cma_heap_remove_contig_list()
348 static int rk_cma_heap_add_contig_list(struct rk_dma_heap *heap, in rk_cma_heap_add_contig_list() argument
373 mutex_lock(&heap->contig_lock); in rk_cma_heap_add_contig_list()
374 list_add_tail(&buf->node, &heap->contig_list); in rk_cma_heap_add_contig_list()
375 mutex_unlock(&heap->contig_lock); in rk_cma_heap_add_contig_list()
379 rk_dma_heap_get_name(heap)); in rk_cma_heap_add_contig_list()
387 struct rk_cma_heap *cma_heap = buffer->heap; in rk_cma_heap_dma_buf_release()
388 struct rk_dma_heap *heap = cma_heap->heap; in rk_cma_heap_dma_buf_release() local
401 rk_dma_heap_total_dec(heap, buffer->len); in rk_cma_heap_dma_buf_release()
422 static struct dma_buf *rk_cma_heap_allocate(struct rk_dma_heap *heap, in rk_cma_heap_allocate() argument
428 struct rk_cma_heap *cma_heap = rk_dma_heap_get_drvdata(heap); in rk_cma_heap_allocate()
488 buffer->heap = cma_heap; in rk_cma_heap_allocate()
492 exp_info.exp_name = rk_dma_heap_get_name(heap); in rk_cma_heap_allocate()
504 dma_sync_single_for_cpu(rk_dma_heap_get_dev(heap), buffer->phys, in rk_cma_heap_allocate()
512 rk_dma_heap_total_inc(heap, buffer->len); in rk_cma_heap_allocate()
528 static struct page *rk_cma_heap_allocate_pages(struct rk_dma_heap *heap, in rk_cma_heap_allocate_pages() argument
531 struct rk_cma_heap *cma_heap = rk_dma_heap_get_drvdata(heap); in rk_cma_heap_allocate_pages()
545 ret = rk_cma_heap_add_contig_list(heap, page, size, name); in rk_cma_heap_allocate_pages()
551 rk_dma_heap_total_inc(heap, size); in rk_cma_heap_allocate_pages()
556 static void rk_cma_heap_free_pages(struct rk_dma_heap *heap, in rk_cma_heap_free_pages() argument
560 struct rk_cma_heap *cma_heap = rk_dma_heap_get_drvdata(heap); in rk_cma_heap_free_pages()
563 rk_cma_heap_remove_contig_list(heap, page, name); in rk_cma_heap_free_pages()
567 rk_dma_heap_total_dec(heap, len); in rk_cma_heap_free_pages()
593 cma_heap->heap = rk_dma_heap_add(&exp_info); in __rk_add_cma_heap()
594 if (IS_ERR(cma_heap->heap)) { in __rk_add_cma_heap()
595 int ret = PTR_ERR(cma_heap->heap); in __rk_add_cma_heap()
601 if (cma_heap->heap->procfs) in __rk_add_cma_heap()
602 proc_create_single_data("alloc_bitmap", 0, cma_heap->heap->procfs, in __rk_add_cma_heap()