1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * Copyright (C) 2017-2018 Etnaviv Project
4*4882a593Smuzhiyun */
5*4882a593Smuzhiyun
6*4882a593Smuzhiyun #include <linux/dma-mapping.h>
7*4882a593Smuzhiyun
8*4882a593Smuzhiyun #include <drm/drm_mm.h>
9*4882a593Smuzhiyun
10*4882a593Smuzhiyun #include "etnaviv_cmdbuf.h"
11*4882a593Smuzhiyun #include "etnaviv_gem.h"
12*4882a593Smuzhiyun #include "etnaviv_gpu.h"
13*4882a593Smuzhiyun #include "etnaviv_mmu.h"
14*4882a593Smuzhiyun #include "etnaviv_perfmon.h"
15*4882a593Smuzhiyun
16*4882a593Smuzhiyun #define SUBALLOC_SIZE SZ_512K
17*4882a593Smuzhiyun #define SUBALLOC_GRANULE SZ_4K
18*4882a593Smuzhiyun #define SUBALLOC_GRANULES (SUBALLOC_SIZE / SUBALLOC_GRANULE)
19*4882a593Smuzhiyun
20*4882a593Smuzhiyun struct etnaviv_cmdbuf_suballoc {
21*4882a593Smuzhiyun /* suballocated dma buffer properties */
22*4882a593Smuzhiyun struct device *dev;
23*4882a593Smuzhiyun void *vaddr;
24*4882a593Smuzhiyun dma_addr_t paddr;
25*4882a593Smuzhiyun
26*4882a593Smuzhiyun /* allocation management */
27*4882a593Smuzhiyun struct mutex lock;
28*4882a593Smuzhiyun DECLARE_BITMAP(granule_map, SUBALLOC_GRANULES);
29*4882a593Smuzhiyun int free_space;
30*4882a593Smuzhiyun wait_queue_head_t free_event;
31*4882a593Smuzhiyun };
32*4882a593Smuzhiyun
33*4882a593Smuzhiyun struct etnaviv_cmdbuf_suballoc *
etnaviv_cmdbuf_suballoc_new(struct device * dev)34*4882a593Smuzhiyun etnaviv_cmdbuf_suballoc_new(struct device *dev)
35*4882a593Smuzhiyun {
36*4882a593Smuzhiyun struct etnaviv_cmdbuf_suballoc *suballoc;
37*4882a593Smuzhiyun int ret;
38*4882a593Smuzhiyun
39*4882a593Smuzhiyun suballoc = kzalloc(sizeof(*suballoc), GFP_KERNEL);
40*4882a593Smuzhiyun if (!suballoc)
41*4882a593Smuzhiyun return ERR_PTR(-ENOMEM);
42*4882a593Smuzhiyun
43*4882a593Smuzhiyun suballoc->dev = dev;
44*4882a593Smuzhiyun mutex_init(&suballoc->lock);
45*4882a593Smuzhiyun init_waitqueue_head(&suballoc->free_event);
46*4882a593Smuzhiyun
47*4882a593Smuzhiyun BUILD_BUG_ON(ETNAVIV_SOFTPIN_START_ADDRESS < SUBALLOC_SIZE);
48*4882a593Smuzhiyun suballoc->vaddr = dma_alloc_wc(dev, SUBALLOC_SIZE,
49*4882a593Smuzhiyun &suballoc->paddr, GFP_KERNEL);
50*4882a593Smuzhiyun if (!suballoc->vaddr) {
51*4882a593Smuzhiyun ret = -ENOMEM;
52*4882a593Smuzhiyun goto free_suballoc;
53*4882a593Smuzhiyun }
54*4882a593Smuzhiyun
55*4882a593Smuzhiyun return suballoc;
56*4882a593Smuzhiyun
57*4882a593Smuzhiyun free_suballoc:
58*4882a593Smuzhiyun kfree(suballoc);
59*4882a593Smuzhiyun
60*4882a593Smuzhiyun return ERR_PTR(ret);
61*4882a593Smuzhiyun }
62*4882a593Smuzhiyun
etnaviv_cmdbuf_suballoc_map(struct etnaviv_cmdbuf_suballoc * suballoc,struct etnaviv_iommu_context * context,struct etnaviv_vram_mapping * mapping,u32 memory_base)63*4882a593Smuzhiyun int etnaviv_cmdbuf_suballoc_map(struct etnaviv_cmdbuf_suballoc *suballoc,
64*4882a593Smuzhiyun struct etnaviv_iommu_context *context,
65*4882a593Smuzhiyun struct etnaviv_vram_mapping *mapping,
66*4882a593Smuzhiyun u32 memory_base)
67*4882a593Smuzhiyun {
68*4882a593Smuzhiyun return etnaviv_iommu_get_suballoc_va(context, mapping, memory_base,
69*4882a593Smuzhiyun suballoc->paddr, SUBALLOC_SIZE);
70*4882a593Smuzhiyun }
71*4882a593Smuzhiyun
etnaviv_cmdbuf_suballoc_unmap(struct etnaviv_iommu_context * context,struct etnaviv_vram_mapping * mapping)72*4882a593Smuzhiyun void etnaviv_cmdbuf_suballoc_unmap(struct etnaviv_iommu_context *context,
73*4882a593Smuzhiyun struct etnaviv_vram_mapping *mapping)
74*4882a593Smuzhiyun {
75*4882a593Smuzhiyun etnaviv_iommu_put_suballoc_va(context, mapping);
76*4882a593Smuzhiyun }
77*4882a593Smuzhiyun
etnaviv_cmdbuf_suballoc_destroy(struct etnaviv_cmdbuf_suballoc * suballoc)78*4882a593Smuzhiyun void etnaviv_cmdbuf_suballoc_destroy(struct etnaviv_cmdbuf_suballoc *suballoc)
79*4882a593Smuzhiyun {
80*4882a593Smuzhiyun dma_free_wc(suballoc->dev, SUBALLOC_SIZE, suballoc->vaddr,
81*4882a593Smuzhiyun suballoc->paddr);
82*4882a593Smuzhiyun kfree(suballoc);
83*4882a593Smuzhiyun }
84*4882a593Smuzhiyun
etnaviv_cmdbuf_init(struct etnaviv_cmdbuf_suballoc * suballoc,struct etnaviv_cmdbuf * cmdbuf,u32 size)85*4882a593Smuzhiyun int etnaviv_cmdbuf_init(struct etnaviv_cmdbuf_suballoc *suballoc,
86*4882a593Smuzhiyun struct etnaviv_cmdbuf *cmdbuf, u32 size)
87*4882a593Smuzhiyun {
88*4882a593Smuzhiyun int granule_offs, order, ret;
89*4882a593Smuzhiyun
90*4882a593Smuzhiyun cmdbuf->suballoc = suballoc;
91*4882a593Smuzhiyun cmdbuf->size = size;
92*4882a593Smuzhiyun
93*4882a593Smuzhiyun order = order_base_2(ALIGN(size, SUBALLOC_GRANULE) / SUBALLOC_GRANULE);
94*4882a593Smuzhiyun retry:
95*4882a593Smuzhiyun mutex_lock(&suballoc->lock);
96*4882a593Smuzhiyun granule_offs = bitmap_find_free_region(suballoc->granule_map,
97*4882a593Smuzhiyun SUBALLOC_GRANULES, order);
98*4882a593Smuzhiyun if (granule_offs < 0) {
99*4882a593Smuzhiyun suballoc->free_space = 0;
100*4882a593Smuzhiyun mutex_unlock(&suballoc->lock);
101*4882a593Smuzhiyun ret = wait_event_interruptible_timeout(suballoc->free_event,
102*4882a593Smuzhiyun suballoc->free_space,
103*4882a593Smuzhiyun msecs_to_jiffies(10 * 1000));
104*4882a593Smuzhiyun if (!ret) {
105*4882a593Smuzhiyun dev_err(suballoc->dev,
106*4882a593Smuzhiyun "Timeout waiting for cmdbuf space\n");
107*4882a593Smuzhiyun return -ETIMEDOUT;
108*4882a593Smuzhiyun }
109*4882a593Smuzhiyun goto retry;
110*4882a593Smuzhiyun }
111*4882a593Smuzhiyun mutex_unlock(&suballoc->lock);
112*4882a593Smuzhiyun cmdbuf->suballoc_offset = granule_offs * SUBALLOC_GRANULE;
113*4882a593Smuzhiyun cmdbuf->vaddr = suballoc->vaddr + cmdbuf->suballoc_offset;
114*4882a593Smuzhiyun
115*4882a593Smuzhiyun return 0;
116*4882a593Smuzhiyun }
117*4882a593Smuzhiyun
etnaviv_cmdbuf_free(struct etnaviv_cmdbuf * cmdbuf)118*4882a593Smuzhiyun void etnaviv_cmdbuf_free(struct etnaviv_cmdbuf *cmdbuf)
119*4882a593Smuzhiyun {
120*4882a593Smuzhiyun struct etnaviv_cmdbuf_suballoc *suballoc = cmdbuf->suballoc;
121*4882a593Smuzhiyun int order = order_base_2(ALIGN(cmdbuf->size, SUBALLOC_GRANULE) /
122*4882a593Smuzhiyun SUBALLOC_GRANULE);
123*4882a593Smuzhiyun
124*4882a593Smuzhiyun mutex_lock(&suballoc->lock);
125*4882a593Smuzhiyun bitmap_release_region(suballoc->granule_map,
126*4882a593Smuzhiyun cmdbuf->suballoc_offset / SUBALLOC_GRANULE,
127*4882a593Smuzhiyun order);
128*4882a593Smuzhiyun suballoc->free_space = 1;
129*4882a593Smuzhiyun mutex_unlock(&suballoc->lock);
130*4882a593Smuzhiyun wake_up_all(&suballoc->free_event);
131*4882a593Smuzhiyun }
132*4882a593Smuzhiyun
etnaviv_cmdbuf_get_va(struct etnaviv_cmdbuf * buf,struct etnaviv_vram_mapping * mapping)133*4882a593Smuzhiyun u32 etnaviv_cmdbuf_get_va(struct etnaviv_cmdbuf *buf,
134*4882a593Smuzhiyun struct etnaviv_vram_mapping *mapping)
135*4882a593Smuzhiyun {
136*4882a593Smuzhiyun return mapping->iova + buf->suballoc_offset;
137*4882a593Smuzhiyun }
138*4882a593Smuzhiyun
etnaviv_cmdbuf_get_pa(struct etnaviv_cmdbuf * buf)139*4882a593Smuzhiyun dma_addr_t etnaviv_cmdbuf_get_pa(struct etnaviv_cmdbuf *buf)
140*4882a593Smuzhiyun {
141*4882a593Smuzhiyun return buf->suballoc->paddr + buf->suballoc_offset;
142*4882a593Smuzhiyun }
143