Lines Matching refs:heap

118 struct dma_buf *rk_dma_heap_buffer_alloc(struct rk_dma_heap *heap, size_t len,  in rk_dma_heap_buffer_alloc()  argument
138 dmabuf = heap->ops->allocate(heap, len, fd_flags, heap_flags, name); in rk_dma_heap_buffer_alloc()
147 int rk_dma_heap_bufferfd_alloc(struct rk_dma_heap *heap, size_t len, in rk_dma_heap_bufferfd_alloc() argument
155 dmabuf = rk_dma_heap_buffer_alloc(heap, len, fd_flags, heap_flags, in rk_dma_heap_bufferfd_alloc()
172 struct page *rk_dma_heap_alloc_contig_pages(struct rk_dma_heap *heap, in rk_dma_heap_alloc_contig_pages() argument
175 if (!heap->support_cma) { in rk_dma_heap_alloc_contig_pages()
176 WARN_ON(!heap->support_cma); in rk_dma_heap_alloc_contig_pages()
184 return heap->ops->alloc_contig_pages(heap, len, name); in rk_dma_heap_alloc_contig_pages()
188 void rk_dma_heap_free_contig_pages(struct rk_dma_heap *heap, in rk_dma_heap_free_contig_pages() argument
192 if (!heap->support_cma) { in rk_dma_heap_free_contig_pages()
193 WARN_ON(!heap->support_cma); in rk_dma_heap_free_contig_pages()
197 return heap->ops->free_contig_pages(heap, pages, len, name); in rk_dma_heap_free_contig_pages()
201 void rk_dma_heap_total_inc(struct rk_dma_heap *heap, size_t len) in rk_dma_heap_total_inc() argument
204 heap->total_size += len; in rk_dma_heap_total_inc()
208 void rk_dma_heap_total_dec(struct rk_dma_heap *heap, size_t len) in rk_dma_heap_total_dec() argument
211 if (WARN_ON(heap->total_size < len)) in rk_dma_heap_total_dec()
212 heap->total_size = 0; in rk_dma_heap_total_dec()
214 heap->total_size -= len; in rk_dma_heap_total_dec()
220 struct rk_dma_heap *heap; in rk_dma_heap_open() local
222 heap = xa_load(&rk_dma_heap_minors, iminor(inode)); in rk_dma_heap_open()
223 if (!heap) { in rk_dma_heap_open()
229 file->private_data = heap; in rk_dma_heap_open()
238 struct rk_dma_heap *heap = file->private_data; in rk_dma_heap_ioctl_allocate() local
244 fd = rk_dma_heap_bufferfd_alloc(heap, heap_allocation->len, in rk_dma_heap_ioctl_allocate()
334 void *rk_dma_heap_get_drvdata(struct rk_dma_heap *heap) in rk_dma_heap_get_drvdata() argument
336 return heap->priv; in rk_dma_heap_get_drvdata()
341 struct rk_dma_heap *heap = container_of(ref, struct rk_dma_heap, refcount); in rk_dma_heap_release() local
342 int minor = MINOR(heap->heap_devt); in rk_dma_heap_release()
345 list_del(&heap->list); in rk_dma_heap_release()
347 device_destroy(rk_dma_heap_class, heap->heap_devt); in rk_dma_heap_release()
348 cdev_del(&heap->heap_cdev); in rk_dma_heap_release()
351 kfree(heap); in rk_dma_heap_release()
372 struct device *rk_dma_heap_get_dev(struct rk_dma_heap *heap) in rk_dma_heap_get_dev() argument
374 return heap->heap_dev; in rk_dma_heap_get_dev()
384 const char *rk_dma_heap_get_name(struct rk_dma_heap *heap) in rk_dma_heap_get_name() argument
386 return heap->name; in rk_dma_heap_get_name()
391 struct rk_dma_heap *heap, *err_ret; in rk_dma_heap_add() local
406 heap = rk_dma_heap_find(exp_info->name); in rk_dma_heap_add()
407 if (heap) { in rk_dma_heap_add()
410 rk_dma_heap_put(heap); in rk_dma_heap_add()
414 heap = kzalloc(sizeof(*heap), GFP_KERNEL); in rk_dma_heap_add()
415 if (!heap) in rk_dma_heap_add()
418 kref_init(&heap->refcount); in rk_dma_heap_add()
419 heap->name = exp_info->name; in rk_dma_heap_add()
420 heap->ops = exp_info->ops; in rk_dma_heap_add()
421 heap->priv = exp_info->priv; in rk_dma_heap_add()
422 heap->support_cma = exp_info->support_cma; in rk_dma_heap_add()
423 INIT_LIST_HEAD(&heap->dmabuf_list); in rk_dma_heap_add()
424 INIT_LIST_HEAD(&heap->contig_list); in rk_dma_heap_add()
425 mutex_init(&heap->dmabuf_lock); in rk_dma_heap_add()
426 mutex_init(&heap->contig_lock); in rk_dma_heap_add()
429 ret = xa_alloc(&rk_dma_heap_minors, &minor, heap, in rk_dma_heap_add()
438 heap->heap_devt = MKDEV(MAJOR(rk_dma_heap_devt), minor); in rk_dma_heap_add()
440 cdev_init(&heap->heap_cdev, &rk_dma_heap_fops); in rk_dma_heap_add()
441 ret = cdev_add(&heap->heap_cdev, heap->heap_devt, 1); in rk_dma_heap_add()
448 heap->heap_dev = device_create(rk_dma_heap_class, in rk_dma_heap_add()
450 heap->heap_devt, in rk_dma_heap_add()
452 heap->name); in rk_dma_heap_add()
453 if (IS_ERR(heap->heap_dev)) { in rk_dma_heap_add()
455 err_ret = ERR_CAST(heap->heap_dev); in rk_dma_heap_add()
459 heap->procfs = proc_rk_dma_heap_dir; in rk_dma_heap_add()
462 heap->heap_dev = get_device(heap->heap_dev); in rk_dma_heap_add()
466 list_add(&heap->list, &rk_heap_list); in rk_dma_heap_add()
469 return heap; in rk_dma_heap_add()
472 cdev_del(&heap->heap_cdev); in rk_dma_heap_add()
476 kfree(heap); in rk_dma_heap_add()
487 struct rk_dma_heap *heap = (struct rk_dma_heap *)data; in rk_dma_heap_dump_dmabuf() local
494 if (!strcmp(dmabuf->exp_name, heap->name)) { in rk_dma_heap_dump_dmabuf()
495 seq_printf(heap->s, "dma-heap:<%s> -dmabuf", heap->name); in rk_dma_heap_dump_dmabuf()
496 mutex_lock(&heap->dmabuf_lock); in rk_dma_heap_dump_dmabuf()
497 list_for_each_entry(buf, &heap->dmabuf_list, node) { in rk_dma_heap_dump_dmabuf()
500 seq_printf(heap->s, in rk_dma_heap_dump_dmabuf()
504 seq_printf(heap->s, in rk_dma_heap_dump_dmabuf()
508 seq_puts(heap->s, "\t\tAttached Devices:\n"); in rk_dma_heap_dump_dmabuf()
516 seq_printf(heap->s, "\t\t%s\n", in rk_dma_heap_dump_dmabuf()
521 seq_printf(heap->s, in rk_dma_heap_dump_dmabuf()
526 mutex_unlock(&heap->dmabuf_lock); in rk_dma_heap_dump_dmabuf()
531 mutex_unlock(&heap->dmabuf_lock); in rk_dma_heap_dump_dmabuf()
537 struct rk_dma_heap *heap = (struct rk_dma_heap *)data; in rk_dma_heap_dump_contig() local
541 mutex_lock(&heap->contig_lock); in rk_dma_heap_dump_contig()
542 list_for_each_entry(buf, &heap->contig_list, node) { in rk_dma_heap_dump_contig()
544 seq_printf(heap->s, "dma-heap:<%s> -non dmabuf\n", heap->name); in rk_dma_heap_dump_contig()
545 seq_printf(heap->s, "\tAlloc by (%-20s)\t[%pa-%pa]\t%pa (%lu KiB)\n", in rk_dma_heap_dump_contig()
548 mutex_unlock(&heap->contig_lock); in rk_dma_heap_dump_contig()
556 struct rk_dma_heap *heap; in rk_total_pools_kb_show() local
560 list_for_each_entry(heap, &rk_heap_list, list) in rk_total_pools_kb_show()
561 if (heap->ops->get_pool_size) in rk_total_pools_kb_show()
562 total_pool_size += heap->ops->get_pool_size(heap); in rk_total_pools_kb_show()
610 struct rk_dma_heap *heap; in rk_dma_heap_debug_show() local
614 list_for_each_entry(heap, &rk_heap_list, list) { in rk_dma_heap_debug_show()
615 heap->s = s; in rk_dma_heap_debug_show()
616 get_each_dmabuf(rk_dma_heap_dump_dmabuf, heap); in rk_dma_heap_debug_show()
617 rk_dma_heap_dump_contig(heap); in rk_dma_heap_debug_show()
618 total += heap->total_size; in rk_dma_heap_debug_show()
659 struct rk_dma_heap *heap; in rk_dma_heap_proc_show() local
663 list_for_each_entry(heap, &rk_heap_list, list) { in rk_dma_heap_proc_show()
664 heap->s = s; in rk_dma_heap_proc_show()
665 get_each_dmabuf(rk_dma_heap_dump_dmabuf, heap); in rk_dma_heap_proc_show()
666 rk_dma_heap_dump_contig(heap); in rk_dma_heap_proc_show()
667 total += heap->total_size; in rk_dma_heap_proc_show()