xref: /OK3568_Linux_fs/kernel/drivers/gpu/drm/etnaviv/etnaviv_gem_prime.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * Copyright (C) 2014-2018 Etnaviv Project
4*4882a593Smuzhiyun  */
5*4882a593Smuzhiyun 
6*4882a593Smuzhiyun #include <drm/drm_prime.h>
7*4882a593Smuzhiyun #include <linux/dma-buf.h>
8*4882a593Smuzhiyun 
9*4882a593Smuzhiyun #include "etnaviv_drv.h"
10*4882a593Smuzhiyun #include "etnaviv_gem.h"
11*4882a593Smuzhiyun 
12*4882a593Smuzhiyun static struct lock_class_key etnaviv_prime_lock_class;
13*4882a593Smuzhiyun 
etnaviv_gem_prime_get_sg_table(struct drm_gem_object * obj)14*4882a593Smuzhiyun struct sg_table *etnaviv_gem_prime_get_sg_table(struct drm_gem_object *obj)
15*4882a593Smuzhiyun {
16*4882a593Smuzhiyun 	struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj);
17*4882a593Smuzhiyun 	int npages = obj->size >> PAGE_SHIFT;
18*4882a593Smuzhiyun 
19*4882a593Smuzhiyun 	if (WARN_ON(!etnaviv_obj->pages))  /* should have already pinned! */
20*4882a593Smuzhiyun 		return ERR_PTR(-EINVAL);
21*4882a593Smuzhiyun 
22*4882a593Smuzhiyun 	return drm_prime_pages_to_sg(obj->dev, etnaviv_obj->pages, npages);
23*4882a593Smuzhiyun }
24*4882a593Smuzhiyun 
etnaviv_gem_prime_vmap(struct drm_gem_object * obj)25*4882a593Smuzhiyun void *etnaviv_gem_prime_vmap(struct drm_gem_object *obj)
26*4882a593Smuzhiyun {
27*4882a593Smuzhiyun 	return etnaviv_gem_vmap(obj);
28*4882a593Smuzhiyun }
29*4882a593Smuzhiyun 
etnaviv_gem_prime_vunmap(struct drm_gem_object * obj,void * vaddr)30*4882a593Smuzhiyun void etnaviv_gem_prime_vunmap(struct drm_gem_object *obj, void *vaddr)
31*4882a593Smuzhiyun {
32*4882a593Smuzhiyun 	/* TODO msm_gem_vunmap() */
33*4882a593Smuzhiyun }
34*4882a593Smuzhiyun 
etnaviv_gem_prime_mmap(struct drm_gem_object * obj,struct vm_area_struct * vma)35*4882a593Smuzhiyun int etnaviv_gem_prime_mmap(struct drm_gem_object *obj,
36*4882a593Smuzhiyun 			   struct vm_area_struct *vma)
37*4882a593Smuzhiyun {
38*4882a593Smuzhiyun 	struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj);
39*4882a593Smuzhiyun 	int ret;
40*4882a593Smuzhiyun 
41*4882a593Smuzhiyun 	ret = drm_gem_mmap_obj(obj, obj->size, vma);
42*4882a593Smuzhiyun 	if (ret < 0)
43*4882a593Smuzhiyun 		return ret;
44*4882a593Smuzhiyun 
45*4882a593Smuzhiyun 	return etnaviv_obj->ops->mmap(etnaviv_obj, vma);
46*4882a593Smuzhiyun }
47*4882a593Smuzhiyun 
etnaviv_gem_prime_pin(struct drm_gem_object * obj)48*4882a593Smuzhiyun int etnaviv_gem_prime_pin(struct drm_gem_object *obj)
49*4882a593Smuzhiyun {
50*4882a593Smuzhiyun 	if (!obj->import_attach) {
51*4882a593Smuzhiyun 		struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj);
52*4882a593Smuzhiyun 
53*4882a593Smuzhiyun 		mutex_lock(&etnaviv_obj->lock);
54*4882a593Smuzhiyun 		etnaviv_gem_get_pages(etnaviv_obj);
55*4882a593Smuzhiyun 		mutex_unlock(&etnaviv_obj->lock);
56*4882a593Smuzhiyun 	}
57*4882a593Smuzhiyun 	return 0;
58*4882a593Smuzhiyun }
59*4882a593Smuzhiyun 
etnaviv_gem_prime_unpin(struct drm_gem_object * obj)60*4882a593Smuzhiyun void etnaviv_gem_prime_unpin(struct drm_gem_object *obj)
61*4882a593Smuzhiyun {
62*4882a593Smuzhiyun 	if (!obj->import_attach) {
63*4882a593Smuzhiyun 		struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj);
64*4882a593Smuzhiyun 
65*4882a593Smuzhiyun 		mutex_lock(&etnaviv_obj->lock);
66*4882a593Smuzhiyun 		etnaviv_gem_put_pages(to_etnaviv_bo(obj));
67*4882a593Smuzhiyun 		mutex_unlock(&etnaviv_obj->lock);
68*4882a593Smuzhiyun 	}
69*4882a593Smuzhiyun }
70*4882a593Smuzhiyun 
etnaviv_gem_prime_release(struct etnaviv_gem_object * etnaviv_obj)71*4882a593Smuzhiyun static void etnaviv_gem_prime_release(struct etnaviv_gem_object *etnaviv_obj)
72*4882a593Smuzhiyun {
73*4882a593Smuzhiyun 	if (etnaviv_obj->vaddr)
74*4882a593Smuzhiyun 		dma_buf_vunmap(etnaviv_obj->base.import_attach->dmabuf,
75*4882a593Smuzhiyun 			       etnaviv_obj->vaddr);
76*4882a593Smuzhiyun 
77*4882a593Smuzhiyun 	/* Don't drop the pages for imported dmabuf, as they are not
78*4882a593Smuzhiyun 	 * ours, just free the array we allocated:
79*4882a593Smuzhiyun 	 */
80*4882a593Smuzhiyun 	if (etnaviv_obj->pages)
81*4882a593Smuzhiyun 		kvfree(etnaviv_obj->pages);
82*4882a593Smuzhiyun 
83*4882a593Smuzhiyun 	drm_prime_gem_destroy(&etnaviv_obj->base, etnaviv_obj->sgt);
84*4882a593Smuzhiyun }
85*4882a593Smuzhiyun 
etnaviv_gem_prime_vmap_impl(struct etnaviv_gem_object * etnaviv_obj)86*4882a593Smuzhiyun static void *etnaviv_gem_prime_vmap_impl(struct etnaviv_gem_object *etnaviv_obj)
87*4882a593Smuzhiyun {
88*4882a593Smuzhiyun 	lockdep_assert_held(&etnaviv_obj->lock);
89*4882a593Smuzhiyun 
90*4882a593Smuzhiyun 	return dma_buf_vmap(etnaviv_obj->base.import_attach->dmabuf);
91*4882a593Smuzhiyun }
92*4882a593Smuzhiyun 
etnaviv_gem_prime_mmap_obj(struct etnaviv_gem_object * etnaviv_obj,struct vm_area_struct * vma)93*4882a593Smuzhiyun static int etnaviv_gem_prime_mmap_obj(struct etnaviv_gem_object *etnaviv_obj,
94*4882a593Smuzhiyun 		struct vm_area_struct *vma)
95*4882a593Smuzhiyun {
96*4882a593Smuzhiyun 	return dma_buf_mmap(etnaviv_obj->base.dma_buf, vma, 0);
97*4882a593Smuzhiyun }
98*4882a593Smuzhiyun 
99*4882a593Smuzhiyun static const struct etnaviv_gem_ops etnaviv_gem_prime_ops = {
100*4882a593Smuzhiyun 	/* .get_pages should never be called */
101*4882a593Smuzhiyun 	.release = etnaviv_gem_prime_release,
102*4882a593Smuzhiyun 	.vmap = etnaviv_gem_prime_vmap_impl,
103*4882a593Smuzhiyun 	.mmap = etnaviv_gem_prime_mmap_obj,
104*4882a593Smuzhiyun };
105*4882a593Smuzhiyun 
etnaviv_gem_prime_import_sg_table(struct drm_device * dev,struct dma_buf_attachment * attach,struct sg_table * sgt)106*4882a593Smuzhiyun struct drm_gem_object *etnaviv_gem_prime_import_sg_table(struct drm_device *dev,
107*4882a593Smuzhiyun 	struct dma_buf_attachment *attach, struct sg_table *sgt)
108*4882a593Smuzhiyun {
109*4882a593Smuzhiyun 	struct etnaviv_gem_object *etnaviv_obj;
110*4882a593Smuzhiyun 	size_t size = PAGE_ALIGN(attach->dmabuf->size);
111*4882a593Smuzhiyun 	int ret, npages;
112*4882a593Smuzhiyun 
113*4882a593Smuzhiyun 	ret = etnaviv_gem_new_private(dev, size, ETNA_BO_WC,
114*4882a593Smuzhiyun 				      &etnaviv_gem_prime_ops, &etnaviv_obj);
115*4882a593Smuzhiyun 	if (ret < 0)
116*4882a593Smuzhiyun 		return ERR_PTR(ret);
117*4882a593Smuzhiyun 
118*4882a593Smuzhiyun 	lockdep_set_class(&etnaviv_obj->lock, &etnaviv_prime_lock_class);
119*4882a593Smuzhiyun 
120*4882a593Smuzhiyun 	npages = size / PAGE_SIZE;
121*4882a593Smuzhiyun 
122*4882a593Smuzhiyun 	etnaviv_obj->sgt = sgt;
123*4882a593Smuzhiyun 	etnaviv_obj->pages = kvmalloc_array(npages, sizeof(struct page *), GFP_KERNEL);
124*4882a593Smuzhiyun 	if (!etnaviv_obj->pages) {
125*4882a593Smuzhiyun 		ret = -ENOMEM;
126*4882a593Smuzhiyun 		goto fail;
127*4882a593Smuzhiyun 	}
128*4882a593Smuzhiyun 
129*4882a593Smuzhiyun 	ret = drm_prime_sg_to_page_addr_arrays(sgt, etnaviv_obj->pages,
130*4882a593Smuzhiyun 					       NULL, npages);
131*4882a593Smuzhiyun 	if (ret)
132*4882a593Smuzhiyun 		goto fail;
133*4882a593Smuzhiyun 
134*4882a593Smuzhiyun 	etnaviv_gem_obj_add(dev, &etnaviv_obj->base);
135*4882a593Smuzhiyun 
136*4882a593Smuzhiyun 	return &etnaviv_obj->base;
137*4882a593Smuzhiyun 
138*4882a593Smuzhiyun fail:
139*4882a593Smuzhiyun 	drm_gem_object_put(&etnaviv_obj->base);
140*4882a593Smuzhiyun 
141*4882a593Smuzhiyun 	return ERR_PTR(ret);
142*4882a593Smuzhiyun }
143