1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun * Copyright © 2016 Intel Corporation
3*4882a593Smuzhiyun *
4*4882a593Smuzhiyun * Permission is hereby granted, free of charge, to any person obtaining a
5*4882a593Smuzhiyun * copy of this software and associated documentation files (the "Software"),
6*4882a593Smuzhiyun * to deal in the Software without restriction, including without limitation
7*4882a593Smuzhiyun * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8*4882a593Smuzhiyun * and/or sell copies of the Software, and to permit persons to whom the
9*4882a593Smuzhiyun * Software is furnished to do so, subject to the following conditions:
10*4882a593Smuzhiyun *
11*4882a593Smuzhiyun * The above copyright notice and this permission notice (including the next
12*4882a593Smuzhiyun * paragraph) shall be included in all copies or substantial portions of the
13*4882a593Smuzhiyun * Software.
14*4882a593Smuzhiyun *
15*4882a593Smuzhiyun * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16*4882a593Smuzhiyun * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17*4882a593Smuzhiyun * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18*4882a593Smuzhiyun * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19*4882a593Smuzhiyun * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20*4882a593Smuzhiyun * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21*4882a593Smuzhiyun * IN THE SOFTWARE.
22*4882a593Smuzhiyun *
23*4882a593Smuzhiyun */
24*4882a593Smuzhiyun
25*4882a593Smuzhiyun #ifndef __I915_VMA_H__
26*4882a593Smuzhiyun #define __I915_VMA_H__
27*4882a593Smuzhiyun
28*4882a593Smuzhiyun #include <linux/io-mapping.h>
29*4882a593Smuzhiyun #include <linux/rbtree.h>
30*4882a593Smuzhiyun
31*4882a593Smuzhiyun #include <drm/drm_mm.h>
32*4882a593Smuzhiyun
33*4882a593Smuzhiyun #include "gt/intel_ggtt_fencing.h"
34*4882a593Smuzhiyun #include "gem/i915_gem_object.h"
35*4882a593Smuzhiyun
36*4882a593Smuzhiyun #include "i915_gem_gtt.h"
37*4882a593Smuzhiyun
38*4882a593Smuzhiyun #include "i915_active.h"
39*4882a593Smuzhiyun #include "i915_request.h"
40*4882a593Smuzhiyun #include "i915_vma_types.h"
41*4882a593Smuzhiyun
42*4882a593Smuzhiyun struct i915_vma *
43*4882a593Smuzhiyun i915_vma_instance(struct drm_i915_gem_object *obj,
44*4882a593Smuzhiyun struct i915_address_space *vm,
45*4882a593Smuzhiyun const struct i915_ggtt_view *view);
46*4882a593Smuzhiyun
47*4882a593Smuzhiyun void i915_vma_unpin_and_release(struct i915_vma **p_vma, unsigned int flags);
48*4882a593Smuzhiyun #define I915_VMA_RELEASE_MAP BIT(0)
49*4882a593Smuzhiyun
i915_vma_is_active(const struct i915_vma * vma)50*4882a593Smuzhiyun static inline bool i915_vma_is_active(const struct i915_vma *vma)
51*4882a593Smuzhiyun {
52*4882a593Smuzhiyun return !i915_active_is_idle(&vma->active);
53*4882a593Smuzhiyun }
54*4882a593Smuzhiyun
55*4882a593Smuzhiyun int __must_check __i915_vma_move_to_active(struct i915_vma *vma,
56*4882a593Smuzhiyun struct i915_request *rq);
57*4882a593Smuzhiyun int __must_check i915_vma_move_to_active(struct i915_vma *vma,
58*4882a593Smuzhiyun struct i915_request *rq,
59*4882a593Smuzhiyun unsigned int flags);
60*4882a593Smuzhiyun
61*4882a593Smuzhiyun #define __i915_vma_flags(v) ((unsigned long *)&(v)->flags.counter)
62*4882a593Smuzhiyun
i915_vma_is_ggtt(const struct i915_vma * vma)63*4882a593Smuzhiyun static inline bool i915_vma_is_ggtt(const struct i915_vma *vma)
64*4882a593Smuzhiyun {
65*4882a593Smuzhiyun return test_bit(I915_VMA_GGTT_BIT, __i915_vma_flags(vma));
66*4882a593Smuzhiyun }
67*4882a593Smuzhiyun
i915_vma_has_ggtt_write(const struct i915_vma * vma)68*4882a593Smuzhiyun static inline bool i915_vma_has_ggtt_write(const struct i915_vma *vma)
69*4882a593Smuzhiyun {
70*4882a593Smuzhiyun return test_bit(I915_VMA_GGTT_WRITE_BIT, __i915_vma_flags(vma));
71*4882a593Smuzhiyun }
72*4882a593Smuzhiyun
i915_vma_set_ggtt_write(struct i915_vma * vma)73*4882a593Smuzhiyun static inline void i915_vma_set_ggtt_write(struct i915_vma *vma)
74*4882a593Smuzhiyun {
75*4882a593Smuzhiyun GEM_BUG_ON(!i915_vma_is_ggtt(vma));
76*4882a593Smuzhiyun set_bit(I915_VMA_GGTT_WRITE_BIT, __i915_vma_flags(vma));
77*4882a593Smuzhiyun }
78*4882a593Smuzhiyun
i915_vma_unset_ggtt_write(struct i915_vma * vma)79*4882a593Smuzhiyun static inline bool i915_vma_unset_ggtt_write(struct i915_vma *vma)
80*4882a593Smuzhiyun {
81*4882a593Smuzhiyun return test_and_clear_bit(I915_VMA_GGTT_WRITE_BIT,
82*4882a593Smuzhiyun __i915_vma_flags(vma));
83*4882a593Smuzhiyun }
84*4882a593Smuzhiyun
85*4882a593Smuzhiyun void i915_vma_flush_writes(struct i915_vma *vma);
86*4882a593Smuzhiyun
i915_vma_is_map_and_fenceable(const struct i915_vma * vma)87*4882a593Smuzhiyun static inline bool i915_vma_is_map_and_fenceable(const struct i915_vma *vma)
88*4882a593Smuzhiyun {
89*4882a593Smuzhiyun return test_bit(I915_VMA_CAN_FENCE_BIT, __i915_vma_flags(vma));
90*4882a593Smuzhiyun }
91*4882a593Smuzhiyun
i915_vma_set_userfault(struct i915_vma * vma)92*4882a593Smuzhiyun static inline bool i915_vma_set_userfault(struct i915_vma *vma)
93*4882a593Smuzhiyun {
94*4882a593Smuzhiyun GEM_BUG_ON(!i915_vma_is_map_and_fenceable(vma));
95*4882a593Smuzhiyun return test_and_set_bit(I915_VMA_USERFAULT_BIT, __i915_vma_flags(vma));
96*4882a593Smuzhiyun }
97*4882a593Smuzhiyun
i915_vma_unset_userfault(struct i915_vma * vma)98*4882a593Smuzhiyun static inline void i915_vma_unset_userfault(struct i915_vma *vma)
99*4882a593Smuzhiyun {
100*4882a593Smuzhiyun return clear_bit(I915_VMA_USERFAULT_BIT, __i915_vma_flags(vma));
101*4882a593Smuzhiyun }
102*4882a593Smuzhiyun
i915_vma_has_userfault(const struct i915_vma * vma)103*4882a593Smuzhiyun static inline bool i915_vma_has_userfault(const struct i915_vma *vma)
104*4882a593Smuzhiyun {
105*4882a593Smuzhiyun return test_bit(I915_VMA_USERFAULT_BIT, __i915_vma_flags(vma));
106*4882a593Smuzhiyun }
107*4882a593Smuzhiyun
i915_vma_is_closed(const struct i915_vma * vma)108*4882a593Smuzhiyun static inline bool i915_vma_is_closed(const struct i915_vma *vma)
109*4882a593Smuzhiyun {
110*4882a593Smuzhiyun return !list_empty(&vma->closed_link);
111*4882a593Smuzhiyun }
112*4882a593Smuzhiyun
i915_ggtt_offset(const struct i915_vma * vma)113*4882a593Smuzhiyun static inline u32 i915_ggtt_offset(const struct i915_vma *vma)
114*4882a593Smuzhiyun {
115*4882a593Smuzhiyun GEM_BUG_ON(!i915_vma_is_ggtt(vma));
116*4882a593Smuzhiyun GEM_BUG_ON(!drm_mm_node_allocated(&vma->node));
117*4882a593Smuzhiyun GEM_BUG_ON(upper_32_bits(vma->node.start));
118*4882a593Smuzhiyun GEM_BUG_ON(upper_32_bits(vma->node.start + vma->node.size - 1));
119*4882a593Smuzhiyun return lower_32_bits(vma->node.start);
120*4882a593Smuzhiyun }
121*4882a593Smuzhiyun
i915_ggtt_pin_bias(struct i915_vma * vma)122*4882a593Smuzhiyun static inline u32 i915_ggtt_pin_bias(struct i915_vma *vma)
123*4882a593Smuzhiyun {
124*4882a593Smuzhiyun return i915_vm_to_ggtt(vma->vm)->pin_bias;
125*4882a593Smuzhiyun }
126*4882a593Smuzhiyun
i915_vma_get(struct i915_vma * vma)127*4882a593Smuzhiyun static inline struct i915_vma *i915_vma_get(struct i915_vma *vma)
128*4882a593Smuzhiyun {
129*4882a593Smuzhiyun i915_gem_object_get(vma->obj);
130*4882a593Smuzhiyun return vma;
131*4882a593Smuzhiyun }
132*4882a593Smuzhiyun
i915_vma_tryget(struct i915_vma * vma)133*4882a593Smuzhiyun static inline struct i915_vma *i915_vma_tryget(struct i915_vma *vma)
134*4882a593Smuzhiyun {
135*4882a593Smuzhiyun if (likely(kref_get_unless_zero(&vma->obj->base.refcount)))
136*4882a593Smuzhiyun return vma;
137*4882a593Smuzhiyun
138*4882a593Smuzhiyun return NULL;
139*4882a593Smuzhiyun }
140*4882a593Smuzhiyun
i915_vma_put(struct i915_vma * vma)141*4882a593Smuzhiyun static inline void i915_vma_put(struct i915_vma *vma)
142*4882a593Smuzhiyun {
143*4882a593Smuzhiyun i915_gem_object_put(vma->obj);
144*4882a593Smuzhiyun }
145*4882a593Smuzhiyun
ptrdiff(const void * a,const void * b)146*4882a593Smuzhiyun static __always_inline ptrdiff_t ptrdiff(const void *a, const void *b)
147*4882a593Smuzhiyun {
148*4882a593Smuzhiyun return a - b;
149*4882a593Smuzhiyun }
150*4882a593Smuzhiyun
151*4882a593Smuzhiyun static inline long
i915_vma_compare(struct i915_vma * vma,struct i915_address_space * vm,const struct i915_ggtt_view * view)152*4882a593Smuzhiyun i915_vma_compare(struct i915_vma *vma,
153*4882a593Smuzhiyun struct i915_address_space *vm,
154*4882a593Smuzhiyun const struct i915_ggtt_view *view)
155*4882a593Smuzhiyun {
156*4882a593Smuzhiyun ptrdiff_t cmp;
157*4882a593Smuzhiyun
158*4882a593Smuzhiyun GEM_BUG_ON(view && !i915_is_ggtt(vm));
159*4882a593Smuzhiyun
160*4882a593Smuzhiyun cmp = ptrdiff(vma->vm, vm);
161*4882a593Smuzhiyun if (cmp)
162*4882a593Smuzhiyun return cmp;
163*4882a593Smuzhiyun
164*4882a593Smuzhiyun BUILD_BUG_ON(I915_GGTT_VIEW_NORMAL != 0);
165*4882a593Smuzhiyun cmp = vma->ggtt_view.type;
166*4882a593Smuzhiyun if (!view)
167*4882a593Smuzhiyun return cmp;
168*4882a593Smuzhiyun
169*4882a593Smuzhiyun cmp -= view->type;
170*4882a593Smuzhiyun if (cmp)
171*4882a593Smuzhiyun return cmp;
172*4882a593Smuzhiyun
173*4882a593Smuzhiyun assert_i915_gem_gtt_types();
174*4882a593Smuzhiyun
175*4882a593Smuzhiyun /* ggtt_view.type also encodes its size so that we both distinguish
176*4882a593Smuzhiyun * different views using it as a "type" and also use a compact (no
177*4882a593Smuzhiyun * accessing of uninitialised padding bytes) memcmp without storing
178*4882a593Smuzhiyun * an extra parameter or adding more code.
179*4882a593Smuzhiyun *
180*4882a593Smuzhiyun * To ensure that the memcmp is valid for all branches of the union,
181*4882a593Smuzhiyun * even though the code looks like it is just comparing one branch,
182*4882a593Smuzhiyun * we assert above that all branches have the same address, and that
183*4882a593Smuzhiyun * each branch has a unique type/size.
184*4882a593Smuzhiyun */
185*4882a593Smuzhiyun BUILD_BUG_ON(I915_GGTT_VIEW_NORMAL >= I915_GGTT_VIEW_PARTIAL);
186*4882a593Smuzhiyun BUILD_BUG_ON(I915_GGTT_VIEW_PARTIAL >= I915_GGTT_VIEW_ROTATED);
187*4882a593Smuzhiyun BUILD_BUG_ON(I915_GGTT_VIEW_ROTATED >= I915_GGTT_VIEW_REMAPPED);
188*4882a593Smuzhiyun BUILD_BUG_ON(offsetof(typeof(*view), rotated) !=
189*4882a593Smuzhiyun offsetof(typeof(*view), partial));
190*4882a593Smuzhiyun BUILD_BUG_ON(offsetof(typeof(*view), rotated) !=
191*4882a593Smuzhiyun offsetof(typeof(*view), remapped));
192*4882a593Smuzhiyun return memcmp(&vma->ggtt_view.partial, &view->partial, view->type);
193*4882a593Smuzhiyun }
194*4882a593Smuzhiyun
195*4882a593Smuzhiyun struct i915_vma_work *i915_vma_work(void);
196*4882a593Smuzhiyun int i915_vma_bind(struct i915_vma *vma,
197*4882a593Smuzhiyun enum i915_cache_level cache_level,
198*4882a593Smuzhiyun u32 flags,
199*4882a593Smuzhiyun struct i915_vma_work *work);
200*4882a593Smuzhiyun
201*4882a593Smuzhiyun bool i915_gem_valid_gtt_space(struct i915_vma *vma, unsigned long color);
202*4882a593Smuzhiyun bool i915_vma_misplaced(const struct i915_vma *vma,
203*4882a593Smuzhiyun u64 size, u64 alignment, u64 flags);
204*4882a593Smuzhiyun void __i915_vma_set_map_and_fenceable(struct i915_vma *vma);
205*4882a593Smuzhiyun void i915_vma_revoke_mmap(struct i915_vma *vma);
206*4882a593Smuzhiyun void __i915_vma_evict(struct i915_vma *vma);
207*4882a593Smuzhiyun int __i915_vma_unbind(struct i915_vma *vma);
208*4882a593Smuzhiyun int __must_check i915_vma_unbind(struct i915_vma *vma);
209*4882a593Smuzhiyun void i915_vma_unlink_ctx(struct i915_vma *vma);
210*4882a593Smuzhiyun void i915_vma_close(struct i915_vma *vma);
211*4882a593Smuzhiyun void i915_vma_reopen(struct i915_vma *vma);
212*4882a593Smuzhiyun
__i915_vma_get(struct i915_vma * vma)213*4882a593Smuzhiyun static inline struct i915_vma *__i915_vma_get(struct i915_vma *vma)
214*4882a593Smuzhiyun {
215*4882a593Smuzhiyun if (kref_get_unless_zero(&vma->ref))
216*4882a593Smuzhiyun return vma;
217*4882a593Smuzhiyun
218*4882a593Smuzhiyun return NULL;
219*4882a593Smuzhiyun }
220*4882a593Smuzhiyun
221*4882a593Smuzhiyun void i915_vma_release(struct kref *ref);
__i915_vma_put(struct i915_vma * vma)222*4882a593Smuzhiyun static inline void __i915_vma_put(struct i915_vma *vma)
223*4882a593Smuzhiyun {
224*4882a593Smuzhiyun kref_put(&vma->ref, i915_vma_release);
225*4882a593Smuzhiyun }
226*4882a593Smuzhiyun
227*4882a593Smuzhiyun #define assert_vma_held(vma) dma_resv_assert_held((vma)->resv)
228*4882a593Smuzhiyun
i915_vma_lock(struct i915_vma * vma)229*4882a593Smuzhiyun static inline void i915_vma_lock(struct i915_vma *vma)
230*4882a593Smuzhiyun {
231*4882a593Smuzhiyun dma_resv_lock(vma->resv, NULL);
232*4882a593Smuzhiyun }
233*4882a593Smuzhiyun
i915_vma_unlock(struct i915_vma * vma)234*4882a593Smuzhiyun static inline void i915_vma_unlock(struct i915_vma *vma)
235*4882a593Smuzhiyun {
236*4882a593Smuzhiyun dma_resv_unlock(vma->resv);
237*4882a593Smuzhiyun }
238*4882a593Smuzhiyun
239*4882a593Smuzhiyun int __must_check
240*4882a593Smuzhiyun i915_vma_pin_ww(struct i915_vma *vma, struct i915_gem_ww_ctx *ww,
241*4882a593Smuzhiyun u64 size, u64 alignment, u64 flags);
242*4882a593Smuzhiyun
243*4882a593Smuzhiyun static inline int __must_check
i915_vma_pin(struct i915_vma * vma,u64 size,u64 alignment,u64 flags)244*4882a593Smuzhiyun i915_vma_pin(struct i915_vma *vma, u64 size, u64 alignment, u64 flags)
245*4882a593Smuzhiyun {
246*4882a593Smuzhiyun return i915_vma_pin_ww(vma, NULL, size, alignment, flags);
247*4882a593Smuzhiyun }
248*4882a593Smuzhiyun
249*4882a593Smuzhiyun int i915_ggtt_pin(struct i915_vma *vma, struct i915_gem_ww_ctx *ww,
250*4882a593Smuzhiyun u32 align, unsigned int flags);
251*4882a593Smuzhiyun
i915_vma_pin_count(const struct i915_vma * vma)252*4882a593Smuzhiyun static inline int i915_vma_pin_count(const struct i915_vma *vma)
253*4882a593Smuzhiyun {
254*4882a593Smuzhiyun return atomic_read(&vma->flags) & I915_VMA_PIN_MASK;
255*4882a593Smuzhiyun }
256*4882a593Smuzhiyun
i915_vma_is_pinned(const struct i915_vma * vma)257*4882a593Smuzhiyun static inline bool i915_vma_is_pinned(const struct i915_vma *vma)
258*4882a593Smuzhiyun {
259*4882a593Smuzhiyun return i915_vma_pin_count(vma);
260*4882a593Smuzhiyun }
261*4882a593Smuzhiyun
__i915_vma_pin(struct i915_vma * vma)262*4882a593Smuzhiyun static inline void __i915_vma_pin(struct i915_vma *vma)
263*4882a593Smuzhiyun {
264*4882a593Smuzhiyun atomic_inc(&vma->flags);
265*4882a593Smuzhiyun GEM_BUG_ON(!i915_vma_is_pinned(vma));
266*4882a593Smuzhiyun }
267*4882a593Smuzhiyun
__i915_vma_unpin(struct i915_vma * vma)268*4882a593Smuzhiyun static inline void __i915_vma_unpin(struct i915_vma *vma)
269*4882a593Smuzhiyun {
270*4882a593Smuzhiyun GEM_BUG_ON(!i915_vma_is_pinned(vma));
271*4882a593Smuzhiyun atomic_dec(&vma->flags);
272*4882a593Smuzhiyun }
273*4882a593Smuzhiyun
i915_vma_unpin(struct i915_vma * vma)274*4882a593Smuzhiyun static inline void i915_vma_unpin(struct i915_vma *vma)
275*4882a593Smuzhiyun {
276*4882a593Smuzhiyun GEM_BUG_ON(!drm_mm_node_allocated(&vma->node));
277*4882a593Smuzhiyun __i915_vma_unpin(vma);
278*4882a593Smuzhiyun }
279*4882a593Smuzhiyun
i915_vma_is_bound(const struct i915_vma * vma,unsigned int where)280*4882a593Smuzhiyun static inline bool i915_vma_is_bound(const struct i915_vma *vma,
281*4882a593Smuzhiyun unsigned int where)
282*4882a593Smuzhiyun {
283*4882a593Smuzhiyun return atomic_read(&vma->flags) & where;
284*4882a593Smuzhiyun }
285*4882a593Smuzhiyun
i915_node_color_differs(const struct drm_mm_node * node,unsigned long color)286*4882a593Smuzhiyun static inline bool i915_node_color_differs(const struct drm_mm_node *node,
287*4882a593Smuzhiyun unsigned long color)
288*4882a593Smuzhiyun {
289*4882a593Smuzhiyun return drm_mm_node_allocated(node) && node->color != color;
290*4882a593Smuzhiyun }
291*4882a593Smuzhiyun
292*4882a593Smuzhiyun /**
293*4882a593Smuzhiyun * i915_vma_pin_iomap - calls ioremap_wc to map the GGTT VMA via the aperture
294*4882a593Smuzhiyun * @vma: VMA to iomap
295*4882a593Smuzhiyun *
296*4882a593Smuzhiyun * The passed in VMA has to be pinned in the global GTT mappable region.
297*4882a593Smuzhiyun * An extra pinning of the VMA is acquired for the return iomapping,
298*4882a593Smuzhiyun * the caller must call i915_vma_unpin_iomap to relinquish the pinning
299*4882a593Smuzhiyun * after the iomapping is no longer required.
300*4882a593Smuzhiyun *
301*4882a593Smuzhiyun * Returns a valid iomapped pointer or ERR_PTR.
302*4882a593Smuzhiyun */
303*4882a593Smuzhiyun void __iomem *i915_vma_pin_iomap(struct i915_vma *vma);
304*4882a593Smuzhiyun #define IO_ERR_PTR(x) ((void __iomem *)ERR_PTR(x))
305*4882a593Smuzhiyun
306*4882a593Smuzhiyun /**
307*4882a593Smuzhiyun * i915_vma_unpin_iomap - unpins the mapping returned from i915_vma_iomap
308*4882a593Smuzhiyun * @vma: VMA to unpin
309*4882a593Smuzhiyun *
310*4882a593Smuzhiyun * Unpins the previously iomapped VMA from i915_vma_pin_iomap().
311*4882a593Smuzhiyun *
312*4882a593Smuzhiyun * This function is only valid to be called on a VMA previously
313*4882a593Smuzhiyun * iomapped by the caller with i915_vma_pin_iomap().
314*4882a593Smuzhiyun */
315*4882a593Smuzhiyun void i915_vma_unpin_iomap(struct i915_vma *vma);
316*4882a593Smuzhiyun
i915_vma_first_page(struct i915_vma * vma)317*4882a593Smuzhiyun static inline struct page *i915_vma_first_page(struct i915_vma *vma)
318*4882a593Smuzhiyun {
319*4882a593Smuzhiyun GEM_BUG_ON(!vma->pages);
320*4882a593Smuzhiyun return sg_page(vma->pages->sgl);
321*4882a593Smuzhiyun }
322*4882a593Smuzhiyun
323*4882a593Smuzhiyun /**
324*4882a593Smuzhiyun * i915_vma_pin_fence - pin fencing state
325*4882a593Smuzhiyun * @vma: vma to pin fencing for
326*4882a593Smuzhiyun *
327*4882a593Smuzhiyun * This pins the fencing state (whether tiled or untiled) to make sure the
328*4882a593Smuzhiyun * vma (and its object) is ready to be used as a scanout target. Fencing
329*4882a593Smuzhiyun * status must be synchronize first by calling i915_vma_get_fence():
330*4882a593Smuzhiyun *
331*4882a593Smuzhiyun * The resulting fence pin reference must be released again with
332*4882a593Smuzhiyun * i915_vma_unpin_fence().
333*4882a593Smuzhiyun *
334*4882a593Smuzhiyun * Returns:
335*4882a593Smuzhiyun *
336*4882a593Smuzhiyun * True if the vma has a fence, false otherwise.
337*4882a593Smuzhiyun */
338*4882a593Smuzhiyun int __must_check i915_vma_pin_fence(struct i915_vma *vma);
339*4882a593Smuzhiyun void i915_vma_revoke_fence(struct i915_vma *vma);
340*4882a593Smuzhiyun
341*4882a593Smuzhiyun int __i915_vma_pin_fence(struct i915_vma *vma);
342*4882a593Smuzhiyun
__i915_vma_unpin_fence(struct i915_vma * vma)343*4882a593Smuzhiyun static inline void __i915_vma_unpin_fence(struct i915_vma *vma)
344*4882a593Smuzhiyun {
345*4882a593Smuzhiyun GEM_BUG_ON(atomic_read(&vma->fence->pin_count) <= 0);
346*4882a593Smuzhiyun atomic_dec(&vma->fence->pin_count);
347*4882a593Smuzhiyun }
348*4882a593Smuzhiyun
349*4882a593Smuzhiyun /**
350*4882a593Smuzhiyun * i915_vma_unpin_fence - unpin fencing state
351*4882a593Smuzhiyun * @vma: vma to unpin fencing for
352*4882a593Smuzhiyun *
353*4882a593Smuzhiyun * This releases the fence pin reference acquired through
354*4882a593Smuzhiyun * i915_vma_pin_fence. It will handle both objects with and without an
355*4882a593Smuzhiyun * attached fence correctly, callers do not need to distinguish this.
356*4882a593Smuzhiyun */
357*4882a593Smuzhiyun static inline void
i915_vma_unpin_fence(struct i915_vma * vma)358*4882a593Smuzhiyun i915_vma_unpin_fence(struct i915_vma *vma)
359*4882a593Smuzhiyun {
360*4882a593Smuzhiyun if (vma->fence)
361*4882a593Smuzhiyun __i915_vma_unpin_fence(vma);
362*4882a593Smuzhiyun }
363*4882a593Smuzhiyun
364*4882a593Smuzhiyun void i915_vma_parked(struct intel_gt *gt);
365*4882a593Smuzhiyun
366*4882a593Smuzhiyun #define for_each_until(cond) if (cond) break; else
367*4882a593Smuzhiyun
368*4882a593Smuzhiyun /**
369*4882a593Smuzhiyun * for_each_ggtt_vma - Iterate over the GGTT VMA belonging to an object.
370*4882a593Smuzhiyun * @V: the #i915_vma iterator
371*4882a593Smuzhiyun * @OBJ: the #drm_i915_gem_object
372*4882a593Smuzhiyun *
373*4882a593Smuzhiyun * GGTT VMA are placed at the being of the object's vma_list, see
374*4882a593Smuzhiyun * vma_create(), so we can stop our walk as soon as we see a ppgtt VMA,
375*4882a593Smuzhiyun * or the list is empty ofc.
376*4882a593Smuzhiyun */
377*4882a593Smuzhiyun #define for_each_ggtt_vma(V, OBJ) \
378*4882a593Smuzhiyun list_for_each_entry(V, &(OBJ)->vma.list, obj_link) \
379*4882a593Smuzhiyun for_each_until(!i915_vma_is_ggtt(V))
380*4882a593Smuzhiyun
381*4882a593Smuzhiyun struct i915_vma *i915_vma_alloc(void);
382*4882a593Smuzhiyun void i915_vma_free(struct i915_vma *vma);
383*4882a593Smuzhiyun
384*4882a593Smuzhiyun struct i915_vma *i915_vma_make_unshrinkable(struct i915_vma *vma);
385*4882a593Smuzhiyun void i915_vma_make_shrinkable(struct i915_vma *vma);
386*4882a593Smuzhiyun void i915_vma_make_purgeable(struct i915_vma *vma);
387*4882a593Smuzhiyun
388*4882a593Smuzhiyun int i915_vma_wait_for_bind(struct i915_vma *vma);
389*4882a593Smuzhiyun
i915_vma_sync(struct i915_vma * vma)390*4882a593Smuzhiyun static inline int i915_vma_sync(struct i915_vma *vma)
391*4882a593Smuzhiyun {
392*4882a593Smuzhiyun /* Wait for the asynchronous bindings and pending GPU reads */
393*4882a593Smuzhiyun return i915_active_wait(&vma->active);
394*4882a593Smuzhiyun }
395*4882a593Smuzhiyun
396*4882a593Smuzhiyun #endif
397