Lines Matching refs:heap

79 struct dma_buf *dma_heap_buffer_alloc(struct dma_heap *heap, size_t len,  in dma_heap_buffer_alloc()  argument
96 return heap->ops->allocate(heap, len, fd_flags, heap_flags); in dma_heap_buffer_alloc()
100 int dma_heap_bufferfd_alloc(struct dma_heap *heap, size_t len, in dma_heap_bufferfd_alloc() argument
107 dmabuf = dma_heap_buffer_alloc(heap, len, fd_flags, heap_flags); in dma_heap_bufferfd_alloc()
124 struct dma_heap *heap; in dma_heap_open() local
126 heap = xa_load(&dma_heap_minors, iminor(inode)); in dma_heap_open()
127 if (!heap) { in dma_heap_open()
133 file->private_data = heap; in dma_heap_open()
142 struct dma_heap *heap = file->private_data; in dma_heap_ioctl_allocate() local
148 fd = dma_heap_bufferfd_alloc(heap, heap_allocation->len, in dma_heap_ioctl_allocate()
162 struct dma_heap *heap = file->private_data; in dma_heap_ioctl_get_phys() local
165 if (heap->ops->get_phys) in dma_heap_ioctl_get_phys()
166 return heap->ops->get_phys(heap, phys); in dma_heap_ioctl_get_phys()
256 void *dma_heap_get_drvdata(struct dma_heap *heap) in dma_heap_get_drvdata() argument
258 return heap->priv; in dma_heap_get_drvdata()
264 struct dma_heap *heap = container_of(ref, struct dma_heap, refcount); in dma_heap_release() local
265 int minor = MINOR(heap->heap_devt); in dma_heap_release()
268 list_del(&heap->list); in dma_heap_release()
270 device_destroy(dma_heap_class, heap->heap_devt); in dma_heap_release()
271 cdev_del(&heap->heap_cdev); in dma_heap_release()
274 kfree(heap); in dma_heap_release()
296 struct device *dma_heap_get_dev(struct dma_heap *heap) in dma_heap_get_dev() argument
298 return heap->heap_dev; in dma_heap_get_dev()
309 const char *dma_heap_get_name(struct dma_heap *heap) in dma_heap_get_name() argument
311 return heap->name; in dma_heap_get_name()
317 struct dma_heap *heap, *h, *err_ret; in dma_heap_add() local
331 heap = kzalloc(sizeof(*heap), GFP_KERNEL); in dma_heap_add()
332 if (!heap) in dma_heap_add()
335 kref_init(&heap->refcount); in dma_heap_add()
336 heap->name = exp_info->name; in dma_heap_add()
337 heap->ops = exp_info->ops; in dma_heap_add()
338 heap->priv = exp_info->priv; in dma_heap_add()
341 ret = xa_alloc(&dma_heap_minors, &minor, heap, in dma_heap_add()
350 heap->heap_devt = MKDEV(MAJOR(dma_heap_devt), minor); in dma_heap_add()
352 cdev_init(&heap->heap_cdev, &dma_heap_fops); in dma_heap_add()
353 ret = cdev_add(&heap->heap_cdev, heap->heap_devt, 1); in dma_heap_add()
360 heap->heap_dev = device_create(dma_heap_class, in dma_heap_add()
362 heap->heap_devt, in dma_heap_add()
364 heap->name); in dma_heap_add()
365 if (IS_ERR(heap->heap_dev)) { in dma_heap_add()
367 err_ret = ERR_CAST(heap->heap_dev); in dma_heap_add()
372 heap->heap_dev = get_device(heap->heap_dev); in dma_heap_add()
382 put_device(heap->heap_dev); in dma_heap_add()
388 list_add(&heap->list, &heap_list); in dma_heap_add()
391 return heap; in dma_heap_add()
394 device_destroy(dma_heap_class, heap->heap_devt); in dma_heap_add()
396 cdev_del(&heap->heap_cdev); in dma_heap_add()
400 kfree(heap); in dma_heap_add()
413 struct dma_heap *heap; in total_pools_kb_show() local
417 list_for_each_entry(heap, &heap_list, list) { in total_pools_kb_show()
418 if (heap->ops->get_pool_size) in total_pools_kb_show()
419 total_pool_size += heap->ops->get_pool_size(heap); in total_pools_kb_show()