1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun * Copyright © 2017 Intel Corporation
3*4882a593Smuzhiyun *
4*4882a593Smuzhiyun * Permission is hereby granted, free of charge, to any person obtaining a
5*4882a593Smuzhiyun * copy of this software and associated documentation files (the "Software"),
6*4882a593Smuzhiyun * to deal in the Software without restriction, including without limitation
7*4882a593Smuzhiyun * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8*4882a593Smuzhiyun * and/or sell copies of the Software, and to permit persons to whom the
9*4882a593Smuzhiyun * Software is furnished to do so, subject to the following conditions:
10*4882a593Smuzhiyun *
11*4882a593Smuzhiyun * The above copyright notice and this permission notice (including the next
12*4882a593Smuzhiyun * paragraph) shall be included in all copies or substantial portions of the
13*4882a593Smuzhiyun * Software.
14*4882a593Smuzhiyun *
15*4882a593Smuzhiyun * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16*4882a593Smuzhiyun * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17*4882a593Smuzhiyun * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18*4882a593Smuzhiyun * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19*4882a593Smuzhiyun * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20*4882a593Smuzhiyun * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21*4882a593Smuzhiyun * IN THE SOFTWARE.
22*4882a593Smuzhiyun *
23*4882a593Smuzhiyun */
24*4882a593Smuzhiyun
25*4882a593Smuzhiyun #include "../i915_selftest.h"
26*4882a593Smuzhiyun #include "i915_random.h"
27*4882a593Smuzhiyun
28*4882a593Smuzhiyun static char *
__sync_print(struct i915_syncmap * p,char * buf,unsigned long * sz,unsigned int depth,unsigned int last,unsigned int idx)29*4882a593Smuzhiyun __sync_print(struct i915_syncmap *p,
30*4882a593Smuzhiyun char *buf, unsigned long *sz,
31*4882a593Smuzhiyun unsigned int depth,
32*4882a593Smuzhiyun unsigned int last,
33*4882a593Smuzhiyun unsigned int idx)
34*4882a593Smuzhiyun {
35*4882a593Smuzhiyun unsigned long len;
36*4882a593Smuzhiyun unsigned int i, X;
37*4882a593Smuzhiyun
38*4882a593Smuzhiyun if (depth) {
39*4882a593Smuzhiyun unsigned int d;
40*4882a593Smuzhiyun
41*4882a593Smuzhiyun for (d = 0; d < depth - 1; d++) {
42*4882a593Smuzhiyun if (last & BIT(depth - d - 1))
43*4882a593Smuzhiyun len = scnprintf(buf, *sz, "| ");
44*4882a593Smuzhiyun else
45*4882a593Smuzhiyun len = scnprintf(buf, *sz, " ");
46*4882a593Smuzhiyun buf += len;
47*4882a593Smuzhiyun *sz -= len;
48*4882a593Smuzhiyun }
49*4882a593Smuzhiyun len = scnprintf(buf, *sz, "%x-> ", idx);
50*4882a593Smuzhiyun buf += len;
51*4882a593Smuzhiyun *sz -= len;
52*4882a593Smuzhiyun }
53*4882a593Smuzhiyun
54*4882a593Smuzhiyun /* We mark bits after the prefix as "X" */
55*4882a593Smuzhiyun len = scnprintf(buf, *sz, "0x%016llx", p->prefix << p->height << SHIFT);
56*4882a593Smuzhiyun buf += len;
57*4882a593Smuzhiyun *sz -= len;
58*4882a593Smuzhiyun X = (p->height + SHIFT) / 4;
59*4882a593Smuzhiyun scnprintf(buf - X, *sz + X, "%*s", X, "XXXXXXXXXXXXXXXXX");
60*4882a593Smuzhiyun
61*4882a593Smuzhiyun if (!p->height) {
62*4882a593Smuzhiyun for_each_set_bit(i, (unsigned long *)&p->bitmap, KSYNCMAP) {
63*4882a593Smuzhiyun len = scnprintf(buf, *sz, " %x:%x,",
64*4882a593Smuzhiyun i, __sync_seqno(p)[i]);
65*4882a593Smuzhiyun buf += len;
66*4882a593Smuzhiyun *sz -= len;
67*4882a593Smuzhiyun }
68*4882a593Smuzhiyun buf -= 1;
69*4882a593Smuzhiyun *sz += 1;
70*4882a593Smuzhiyun }
71*4882a593Smuzhiyun
72*4882a593Smuzhiyun len = scnprintf(buf, *sz, "\n");
73*4882a593Smuzhiyun buf += len;
74*4882a593Smuzhiyun *sz -= len;
75*4882a593Smuzhiyun
76*4882a593Smuzhiyun if (p->height) {
77*4882a593Smuzhiyun for_each_set_bit(i, (unsigned long *)&p->bitmap, KSYNCMAP) {
78*4882a593Smuzhiyun buf = __sync_print(__sync_child(p)[i], buf, sz,
79*4882a593Smuzhiyun depth + 1,
80*4882a593Smuzhiyun last << 1 | !!(p->bitmap >> (i + 1)),
81*4882a593Smuzhiyun i);
82*4882a593Smuzhiyun }
83*4882a593Smuzhiyun }
84*4882a593Smuzhiyun
85*4882a593Smuzhiyun return buf;
86*4882a593Smuzhiyun }
87*4882a593Smuzhiyun
88*4882a593Smuzhiyun static bool
i915_syncmap_print_to_buf(struct i915_syncmap * p,char * buf,unsigned long sz)89*4882a593Smuzhiyun i915_syncmap_print_to_buf(struct i915_syncmap *p, char *buf, unsigned long sz)
90*4882a593Smuzhiyun {
91*4882a593Smuzhiyun if (!p)
92*4882a593Smuzhiyun return false;
93*4882a593Smuzhiyun
94*4882a593Smuzhiyun while (p->parent)
95*4882a593Smuzhiyun p = p->parent;
96*4882a593Smuzhiyun
97*4882a593Smuzhiyun __sync_print(p, buf, &sz, 0, 1, 0);
98*4882a593Smuzhiyun return true;
99*4882a593Smuzhiyun }
100*4882a593Smuzhiyun
check_syncmap_free(struct i915_syncmap ** sync)101*4882a593Smuzhiyun static int check_syncmap_free(struct i915_syncmap **sync)
102*4882a593Smuzhiyun {
103*4882a593Smuzhiyun i915_syncmap_free(sync);
104*4882a593Smuzhiyun if (*sync) {
105*4882a593Smuzhiyun pr_err("sync not cleared after free\n");
106*4882a593Smuzhiyun return -EINVAL;
107*4882a593Smuzhiyun }
108*4882a593Smuzhiyun
109*4882a593Smuzhiyun return 0;
110*4882a593Smuzhiyun }
111*4882a593Smuzhiyun
dump_syncmap(struct i915_syncmap * sync,int err)112*4882a593Smuzhiyun static int dump_syncmap(struct i915_syncmap *sync, int err)
113*4882a593Smuzhiyun {
114*4882a593Smuzhiyun char *buf;
115*4882a593Smuzhiyun
116*4882a593Smuzhiyun if (!err)
117*4882a593Smuzhiyun return check_syncmap_free(&sync);
118*4882a593Smuzhiyun
119*4882a593Smuzhiyun buf = kmalloc(PAGE_SIZE, GFP_KERNEL);
120*4882a593Smuzhiyun if (!buf)
121*4882a593Smuzhiyun goto skip;
122*4882a593Smuzhiyun
123*4882a593Smuzhiyun if (i915_syncmap_print_to_buf(sync, buf, PAGE_SIZE))
124*4882a593Smuzhiyun pr_err("%s", buf);
125*4882a593Smuzhiyun
126*4882a593Smuzhiyun kfree(buf);
127*4882a593Smuzhiyun
128*4882a593Smuzhiyun skip:
129*4882a593Smuzhiyun i915_syncmap_free(&sync);
130*4882a593Smuzhiyun return err;
131*4882a593Smuzhiyun }
132*4882a593Smuzhiyun
igt_syncmap_init(void * arg)133*4882a593Smuzhiyun static int igt_syncmap_init(void *arg)
134*4882a593Smuzhiyun {
135*4882a593Smuzhiyun struct i915_syncmap *sync = (void *)~0ul;
136*4882a593Smuzhiyun
137*4882a593Smuzhiyun /*
138*4882a593Smuzhiyun * Cursory check that we can initialise a random pointer and transform
139*4882a593Smuzhiyun * it into the root pointer of a syncmap.
140*4882a593Smuzhiyun */
141*4882a593Smuzhiyun
142*4882a593Smuzhiyun i915_syncmap_init(&sync);
143*4882a593Smuzhiyun return check_syncmap_free(&sync);
144*4882a593Smuzhiyun }
145*4882a593Smuzhiyun
check_seqno(struct i915_syncmap * leaf,unsigned int idx,u32 seqno)146*4882a593Smuzhiyun static int check_seqno(struct i915_syncmap *leaf, unsigned int idx, u32 seqno)
147*4882a593Smuzhiyun {
148*4882a593Smuzhiyun if (leaf->height) {
149*4882a593Smuzhiyun pr_err("%s: not a leaf, height is %d\n",
150*4882a593Smuzhiyun __func__, leaf->height);
151*4882a593Smuzhiyun return -EINVAL;
152*4882a593Smuzhiyun }
153*4882a593Smuzhiyun
154*4882a593Smuzhiyun if (__sync_seqno(leaf)[idx] != seqno) {
155*4882a593Smuzhiyun pr_err("%s: seqno[%d], found %x, expected %x\n",
156*4882a593Smuzhiyun __func__, idx, __sync_seqno(leaf)[idx], seqno);
157*4882a593Smuzhiyun return -EINVAL;
158*4882a593Smuzhiyun }
159*4882a593Smuzhiyun
160*4882a593Smuzhiyun return 0;
161*4882a593Smuzhiyun }
162*4882a593Smuzhiyun
check_one(struct i915_syncmap ** sync,u64 context,u32 seqno)163*4882a593Smuzhiyun static int check_one(struct i915_syncmap **sync, u64 context, u32 seqno)
164*4882a593Smuzhiyun {
165*4882a593Smuzhiyun int err;
166*4882a593Smuzhiyun
167*4882a593Smuzhiyun err = i915_syncmap_set(sync, context, seqno);
168*4882a593Smuzhiyun if (err)
169*4882a593Smuzhiyun return err;
170*4882a593Smuzhiyun
171*4882a593Smuzhiyun if ((*sync)->height) {
172*4882a593Smuzhiyun pr_err("Inserting first context=%llx did not return leaf (height=%d, prefix=%llx\n",
173*4882a593Smuzhiyun context, (*sync)->height, (*sync)->prefix);
174*4882a593Smuzhiyun return -EINVAL;
175*4882a593Smuzhiyun }
176*4882a593Smuzhiyun
177*4882a593Smuzhiyun if ((*sync)->parent) {
178*4882a593Smuzhiyun pr_err("Inserting first context=%llx created branches!\n",
179*4882a593Smuzhiyun context);
180*4882a593Smuzhiyun return -EINVAL;
181*4882a593Smuzhiyun }
182*4882a593Smuzhiyun
183*4882a593Smuzhiyun if (hweight32((*sync)->bitmap) != 1) {
184*4882a593Smuzhiyun pr_err("First bitmap does not contain a single entry, found %x (count=%d)!\n",
185*4882a593Smuzhiyun (*sync)->bitmap, hweight32((*sync)->bitmap));
186*4882a593Smuzhiyun return -EINVAL;
187*4882a593Smuzhiyun }
188*4882a593Smuzhiyun
189*4882a593Smuzhiyun err = check_seqno((*sync), ilog2((*sync)->bitmap), seqno);
190*4882a593Smuzhiyun if (err)
191*4882a593Smuzhiyun return err;
192*4882a593Smuzhiyun
193*4882a593Smuzhiyun if (!i915_syncmap_is_later(sync, context, seqno)) {
194*4882a593Smuzhiyun pr_err("Lookup of first context=%llx/seqno=%x failed!\n",
195*4882a593Smuzhiyun context, seqno);
196*4882a593Smuzhiyun return -EINVAL;
197*4882a593Smuzhiyun }
198*4882a593Smuzhiyun
199*4882a593Smuzhiyun return 0;
200*4882a593Smuzhiyun }
201*4882a593Smuzhiyun
igt_syncmap_one(void * arg)202*4882a593Smuzhiyun static int igt_syncmap_one(void *arg)
203*4882a593Smuzhiyun {
204*4882a593Smuzhiyun I915_RND_STATE(prng);
205*4882a593Smuzhiyun IGT_TIMEOUT(end_time);
206*4882a593Smuzhiyun struct i915_syncmap *sync;
207*4882a593Smuzhiyun unsigned long max = 1;
208*4882a593Smuzhiyun int err;
209*4882a593Smuzhiyun
210*4882a593Smuzhiyun /*
211*4882a593Smuzhiyun * Check that inserting a new id, creates a leaf and only that leaf.
212*4882a593Smuzhiyun */
213*4882a593Smuzhiyun
214*4882a593Smuzhiyun i915_syncmap_init(&sync);
215*4882a593Smuzhiyun
216*4882a593Smuzhiyun do {
217*4882a593Smuzhiyun u64 context = i915_prandom_u64_state(&prng);
218*4882a593Smuzhiyun unsigned long loop;
219*4882a593Smuzhiyun
220*4882a593Smuzhiyun err = check_syncmap_free(&sync);
221*4882a593Smuzhiyun if (err)
222*4882a593Smuzhiyun goto out;
223*4882a593Smuzhiyun
224*4882a593Smuzhiyun for (loop = 0; loop <= max; loop++) {
225*4882a593Smuzhiyun err = check_one(&sync, context,
226*4882a593Smuzhiyun prandom_u32_state(&prng));
227*4882a593Smuzhiyun if (err)
228*4882a593Smuzhiyun goto out;
229*4882a593Smuzhiyun }
230*4882a593Smuzhiyun max++;
231*4882a593Smuzhiyun } while (!__igt_timeout(end_time, NULL));
232*4882a593Smuzhiyun pr_debug("%s: Completed %lu single insertions\n",
233*4882a593Smuzhiyun __func__, max * (max - 1) / 2);
234*4882a593Smuzhiyun out:
235*4882a593Smuzhiyun return dump_syncmap(sync, err);
236*4882a593Smuzhiyun }
237*4882a593Smuzhiyun
check_leaf(struct i915_syncmap ** sync,u64 context,u32 seqno)238*4882a593Smuzhiyun static int check_leaf(struct i915_syncmap **sync, u64 context, u32 seqno)
239*4882a593Smuzhiyun {
240*4882a593Smuzhiyun int err;
241*4882a593Smuzhiyun
242*4882a593Smuzhiyun err = i915_syncmap_set(sync, context, seqno);
243*4882a593Smuzhiyun if (err)
244*4882a593Smuzhiyun return err;
245*4882a593Smuzhiyun
246*4882a593Smuzhiyun if ((*sync)->height) {
247*4882a593Smuzhiyun pr_err("Inserting context=%llx did not return leaf (height=%d, prefix=%llx\n",
248*4882a593Smuzhiyun context, (*sync)->height, (*sync)->prefix);
249*4882a593Smuzhiyun return -EINVAL;
250*4882a593Smuzhiyun }
251*4882a593Smuzhiyun
252*4882a593Smuzhiyun if (hweight32((*sync)->bitmap) != 1) {
253*4882a593Smuzhiyun pr_err("First entry into leaf (context=%llx) does not contain a single entry, found %x (count=%d)!\n",
254*4882a593Smuzhiyun context, (*sync)->bitmap, hweight32((*sync)->bitmap));
255*4882a593Smuzhiyun return -EINVAL;
256*4882a593Smuzhiyun }
257*4882a593Smuzhiyun
258*4882a593Smuzhiyun err = check_seqno((*sync), ilog2((*sync)->bitmap), seqno);
259*4882a593Smuzhiyun if (err)
260*4882a593Smuzhiyun return err;
261*4882a593Smuzhiyun
262*4882a593Smuzhiyun if (!i915_syncmap_is_later(sync, context, seqno)) {
263*4882a593Smuzhiyun pr_err("Lookup of first entry context=%llx/seqno=%x failed!\n",
264*4882a593Smuzhiyun context, seqno);
265*4882a593Smuzhiyun return -EINVAL;
266*4882a593Smuzhiyun }
267*4882a593Smuzhiyun
268*4882a593Smuzhiyun return 0;
269*4882a593Smuzhiyun }
270*4882a593Smuzhiyun
igt_syncmap_join_above(void * arg)271*4882a593Smuzhiyun static int igt_syncmap_join_above(void *arg)
272*4882a593Smuzhiyun {
273*4882a593Smuzhiyun struct i915_syncmap *sync;
274*4882a593Smuzhiyun unsigned int pass, order;
275*4882a593Smuzhiyun int err;
276*4882a593Smuzhiyun
277*4882a593Smuzhiyun i915_syncmap_init(&sync);
278*4882a593Smuzhiyun
279*4882a593Smuzhiyun /*
280*4882a593Smuzhiyun * When we have a new id that doesn't fit inside the existing tree,
281*4882a593Smuzhiyun * we need to add a new layer above.
282*4882a593Smuzhiyun *
283*4882a593Smuzhiyun * 1: 0x00000001
284*4882a593Smuzhiyun * 2: 0x00000010
285*4882a593Smuzhiyun * 3: 0x00000100
286*4882a593Smuzhiyun * 4: 0x00001000
287*4882a593Smuzhiyun * ...
288*4882a593Smuzhiyun * Each pass the common prefix shrinks and we have to insert a join.
289*4882a593Smuzhiyun * Each join will only contain two branches, the latest of which
290*4882a593Smuzhiyun * is always a leaf.
291*4882a593Smuzhiyun *
292*4882a593Smuzhiyun * If we then reuse the same set of contexts, we expect to build an
293*4882a593Smuzhiyun * identical tree.
294*4882a593Smuzhiyun */
295*4882a593Smuzhiyun for (pass = 0; pass < 3; pass++) {
296*4882a593Smuzhiyun for (order = 0; order < 64; order += SHIFT) {
297*4882a593Smuzhiyun u64 context = BIT_ULL(order);
298*4882a593Smuzhiyun struct i915_syncmap *join;
299*4882a593Smuzhiyun
300*4882a593Smuzhiyun err = check_leaf(&sync, context, 0);
301*4882a593Smuzhiyun if (err)
302*4882a593Smuzhiyun goto out;
303*4882a593Smuzhiyun
304*4882a593Smuzhiyun join = sync->parent;
305*4882a593Smuzhiyun if (!join) /* very first insert will have no parents */
306*4882a593Smuzhiyun continue;
307*4882a593Smuzhiyun
308*4882a593Smuzhiyun if (!join->height) {
309*4882a593Smuzhiyun pr_err("Parent with no height!\n");
310*4882a593Smuzhiyun err = -EINVAL;
311*4882a593Smuzhiyun goto out;
312*4882a593Smuzhiyun }
313*4882a593Smuzhiyun
314*4882a593Smuzhiyun if (hweight32(join->bitmap) != 2) {
315*4882a593Smuzhiyun pr_err("Join does not have 2 children: %x (%d)\n",
316*4882a593Smuzhiyun join->bitmap, hweight32(join->bitmap));
317*4882a593Smuzhiyun err = -EINVAL;
318*4882a593Smuzhiyun goto out;
319*4882a593Smuzhiyun }
320*4882a593Smuzhiyun
321*4882a593Smuzhiyun if (__sync_child(join)[__sync_branch_idx(join, context)] != sync) {
322*4882a593Smuzhiyun pr_err("Leaf misplaced in parent!\n");
323*4882a593Smuzhiyun err = -EINVAL;
324*4882a593Smuzhiyun goto out;
325*4882a593Smuzhiyun }
326*4882a593Smuzhiyun }
327*4882a593Smuzhiyun }
328*4882a593Smuzhiyun out:
329*4882a593Smuzhiyun return dump_syncmap(sync, err);
330*4882a593Smuzhiyun }
331*4882a593Smuzhiyun
igt_syncmap_join_below(void * arg)332*4882a593Smuzhiyun static int igt_syncmap_join_below(void *arg)
333*4882a593Smuzhiyun {
334*4882a593Smuzhiyun struct i915_syncmap *sync;
335*4882a593Smuzhiyun unsigned int step, order, idx;
336*4882a593Smuzhiyun int err = -ENODEV;
337*4882a593Smuzhiyun
338*4882a593Smuzhiyun i915_syncmap_init(&sync);
339*4882a593Smuzhiyun
340*4882a593Smuzhiyun /*
341*4882a593Smuzhiyun * Check that we can split a compacted branch by replacing it with
342*4882a593Smuzhiyun * a join.
343*4882a593Smuzhiyun */
344*4882a593Smuzhiyun for (step = 0; step < KSYNCMAP; step++) {
345*4882a593Smuzhiyun for (order = 64 - SHIFT; order > 0; order -= SHIFT) {
346*4882a593Smuzhiyun u64 context = step * BIT_ULL(order);
347*4882a593Smuzhiyun
348*4882a593Smuzhiyun err = i915_syncmap_set(&sync, context, 0);
349*4882a593Smuzhiyun if (err)
350*4882a593Smuzhiyun goto out;
351*4882a593Smuzhiyun
352*4882a593Smuzhiyun if (sync->height) {
353*4882a593Smuzhiyun pr_err("Inserting context=%llx (order=%d, step=%d) did not return leaf (height=%d, prefix=%llx\n",
354*4882a593Smuzhiyun context, order, step, sync->height, sync->prefix);
355*4882a593Smuzhiyun err = -EINVAL;
356*4882a593Smuzhiyun goto out;
357*4882a593Smuzhiyun }
358*4882a593Smuzhiyun }
359*4882a593Smuzhiyun }
360*4882a593Smuzhiyun
361*4882a593Smuzhiyun for (step = 0; step < KSYNCMAP; step++) {
362*4882a593Smuzhiyun for (order = SHIFT; order < 64; order += SHIFT) {
363*4882a593Smuzhiyun u64 context = step * BIT_ULL(order);
364*4882a593Smuzhiyun
365*4882a593Smuzhiyun if (!i915_syncmap_is_later(&sync, context, 0)) {
366*4882a593Smuzhiyun pr_err("1: context %llx (order=%d, step=%d) not found\n",
367*4882a593Smuzhiyun context, order, step);
368*4882a593Smuzhiyun err = -EINVAL;
369*4882a593Smuzhiyun goto out;
370*4882a593Smuzhiyun }
371*4882a593Smuzhiyun
372*4882a593Smuzhiyun for (idx = 1; idx < KSYNCMAP; idx++) {
373*4882a593Smuzhiyun if (i915_syncmap_is_later(&sync, context + idx, 0)) {
374*4882a593Smuzhiyun pr_err("1: context %llx (order=%d, step=%d) should not exist\n",
375*4882a593Smuzhiyun context + idx, order, step);
376*4882a593Smuzhiyun err = -EINVAL;
377*4882a593Smuzhiyun goto out;
378*4882a593Smuzhiyun }
379*4882a593Smuzhiyun }
380*4882a593Smuzhiyun }
381*4882a593Smuzhiyun }
382*4882a593Smuzhiyun
383*4882a593Smuzhiyun for (order = SHIFT; order < 64; order += SHIFT) {
384*4882a593Smuzhiyun for (step = 0; step < KSYNCMAP; step++) {
385*4882a593Smuzhiyun u64 context = step * BIT_ULL(order);
386*4882a593Smuzhiyun
387*4882a593Smuzhiyun if (!i915_syncmap_is_later(&sync, context, 0)) {
388*4882a593Smuzhiyun pr_err("2: context %llx (order=%d, step=%d) not found\n",
389*4882a593Smuzhiyun context, order, step);
390*4882a593Smuzhiyun err = -EINVAL;
391*4882a593Smuzhiyun goto out;
392*4882a593Smuzhiyun }
393*4882a593Smuzhiyun }
394*4882a593Smuzhiyun }
395*4882a593Smuzhiyun
396*4882a593Smuzhiyun out:
397*4882a593Smuzhiyun return dump_syncmap(sync, err);
398*4882a593Smuzhiyun }
399*4882a593Smuzhiyun
igt_syncmap_neighbours(void * arg)400*4882a593Smuzhiyun static int igt_syncmap_neighbours(void *arg)
401*4882a593Smuzhiyun {
402*4882a593Smuzhiyun I915_RND_STATE(prng);
403*4882a593Smuzhiyun IGT_TIMEOUT(end_time);
404*4882a593Smuzhiyun struct i915_syncmap *sync;
405*4882a593Smuzhiyun int err = -ENODEV;
406*4882a593Smuzhiyun
407*4882a593Smuzhiyun /*
408*4882a593Smuzhiyun * Each leaf holds KSYNCMAP seqno. Check that when we create KSYNCMAP
409*4882a593Smuzhiyun * neighbouring ids, they all fit into the same leaf.
410*4882a593Smuzhiyun */
411*4882a593Smuzhiyun
412*4882a593Smuzhiyun i915_syncmap_init(&sync);
413*4882a593Smuzhiyun do {
414*4882a593Smuzhiyun u64 context = i915_prandom_u64_state(&prng) & ~MASK;
415*4882a593Smuzhiyun unsigned int idx;
416*4882a593Smuzhiyun
417*4882a593Smuzhiyun if (i915_syncmap_is_later(&sync, context, 0)) /* Skip repeats */
418*4882a593Smuzhiyun continue;
419*4882a593Smuzhiyun
420*4882a593Smuzhiyun for (idx = 0; idx < KSYNCMAP; idx++) {
421*4882a593Smuzhiyun err = i915_syncmap_set(&sync, context + idx, 0);
422*4882a593Smuzhiyun if (err)
423*4882a593Smuzhiyun goto out;
424*4882a593Smuzhiyun
425*4882a593Smuzhiyun if (sync->height) {
426*4882a593Smuzhiyun pr_err("Inserting context=%llx did not return leaf (height=%d, prefix=%llx\n",
427*4882a593Smuzhiyun context, sync->height, sync->prefix);
428*4882a593Smuzhiyun err = -EINVAL;
429*4882a593Smuzhiyun goto out;
430*4882a593Smuzhiyun }
431*4882a593Smuzhiyun
432*4882a593Smuzhiyun if (sync->bitmap != BIT(idx + 1) - 1) {
433*4882a593Smuzhiyun pr_err("Inserting neighbouring context=0x%llx+%d, did not fit into the same leaf bitmap=%x (%d), expected %lx (%d)\n",
434*4882a593Smuzhiyun context, idx,
435*4882a593Smuzhiyun sync->bitmap, hweight32(sync->bitmap),
436*4882a593Smuzhiyun BIT(idx + 1) - 1, idx + 1);
437*4882a593Smuzhiyun err = -EINVAL;
438*4882a593Smuzhiyun goto out;
439*4882a593Smuzhiyun }
440*4882a593Smuzhiyun }
441*4882a593Smuzhiyun } while (!__igt_timeout(end_time, NULL));
442*4882a593Smuzhiyun out:
443*4882a593Smuzhiyun return dump_syncmap(sync, err);
444*4882a593Smuzhiyun }
445*4882a593Smuzhiyun
igt_syncmap_compact(void * arg)446*4882a593Smuzhiyun static int igt_syncmap_compact(void *arg)
447*4882a593Smuzhiyun {
448*4882a593Smuzhiyun struct i915_syncmap *sync;
449*4882a593Smuzhiyun unsigned int idx, order;
450*4882a593Smuzhiyun int err = -ENODEV;
451*4882a593Smuzhiyun
452*4882a593Smuzhiyun i915_syncmap_init(&sync);
453*4882a593Smuzhiyun
454*4882a593Smuzhiyun /*
455*4882a593Smuzhiyun * The syncmap are "space efficient" compressed radix trees - any
456*4882a593Smuzhiyun * branch with only one child is skipped and replaced by the child.
457*4882a593Smuzhiyun *
458*4882a593Smuzhiyun * If we construct a tree with ids that are neighbouring at a non-zero
459*4882a593Smuzhiyun * height, we form a join but each child of that join is directly a
460*4882a593Smuzhiyun * leaf holding the single id.
461*4882a593Smuzhiyun */
462*4882a593Smuzhiyun for (order = SHIFT; order < 64; order += SHIFT) {
463*4882a593Smuzhiyun err = check_syncmap_free(&sync);
464*4882a593Smuzhiyun if (err)
465*4882a593Smuzhiyun goto out;
466*4882a593Smuzhiyun
467*4882a593Smuzhiyun /* Create neighbours in the parent */
468*4882a593Smuzhiyun for (idx = 0; idx < KSYNCMAP; idx++) {
469*4882a593Smuzhiyun u64 context = idx * BIT_ULL(order) + idx;
470*4882a593Smuzhiyun
471*4882a593Smuzhiyun err = i915_syncmap_set(&sync, context, 0);
472*4882a593Smuzhiyun if (err)
473*4882a593Smuzhiyun goto out;
474*4882a593Smuzhiyun
475*4882a593Smuzhiyun if (sync->height) {
476*4882a593Smuzhiyun pr_err("Inserting context=%llx (order=%d, idx=%d) did not return leaf (height=%d, prefix=%llx\n",
477*4882a593Smuzhiyun context, order, idx,
478*4882a593Smuzhiyun sync->height, sync->prefix);
479*4882a593Smuzhiyun err = -EINVAL;
480*4882a593Smuzhiyun goto out;
481*4882a593Smuzhiyun }
482*4882a593Smuzhiyun }
483*4882a593Smuzhiyun
484*4882a593Smuzhiyun sync = sync->parent;
485*4882a593Smuzhiyun if (sync->parent) {
486*4882a593Smuzhiyun pr_err("Parent (join) of last leaf was not the sync!\n");
487*4882a593Smuzhiyun err = -EINVAL;
488*4882a593Smuzhiyun goto out;
489*4882a593Smuzhiyun }
490*4882a593Smuzhiyun
491*4882a593Smuzhiyun if (sync->height != order) {
492*4882a593Smuzhiyun pr_err("Join does not have the expected height, found %d, expected %d\n",
493*4882a593Smuzhiyun sync->height, order);
494*4882a593Smuzhiyun err = -EINVAL;
495*4882a593Smuzhiyun goto out;
496*4882a593Smuzhiyun }
497*4882a593Smuzhiyun
498*4882a593Smuzhiyun if (sync->bitmap != BIT(KSYNCMAP) - 1) {
499*4882a593Smuzhiyun pr_err("Join is not full!, found %x (%d) expected %lx (%d)\n",
500*4882a593Smuzhiyun sync->bitmap, hweight32(sync->bitmap),
501*4882a593Smuzhiyun BIT(KSYNCMAP) - 1, KSYNCMAP);
502*4882a593Smuzhiyun err = -EINVAL;
503*4882a593Smuzhiyun goto out;
504*4882a593Smuzhiyun }
505*4882a593Smuzhiyun
506*4882a593Smuzhiyun /* Each of our children should be a leaf */
507*4882a593Smuzhiyun for (idx = 0; idx < KSYNCMAP; idx++) {
508*4882a593Smuzhiyun struct i915_syncmap *leaf = __sync_child(sync)[idx];
509*4882a593Smuzhiyun
510*4882a593Smuzhiyun if (leaf->height) {
511*4882a593Smuzhiyun pr_err("Child %d is a not leaf!\n", idx);
512*4882a593Smuzhiyun err = -EINVAL;
513*4882a593Smuzhiyun goto out;
514*4882a593Smuzhiyun }
515*4882a593Smuzhiyun
516*4882a593Smuzhiyun if (leaf->parent != sync) {
517*4882a593Smuzhiyun pr_err("Child %d is not attached to us!\n",
518*4882a593Smuzhiyun idx);
519*4882a593Smuzhiyun err = -EINVAL;
520*4882a593Smuzhiyun goto out;
521*4882a593Smuzhiyun }
522*4882a593Smuzhiyun
523*4882a593Smuzhiyun if (!is_power_of_2(leaf->bitmap)) {
524*4882a593Smuzhiyun pr_err("Child %d holds more than one id, found %x (%d)\n",
525*4882a593Smuzhiyun idx, leaf->bitmap, hweight32(leaf->bitmap));
526*4882a593Smuzhiyun err = -EINVAL;
527*4882a593Smuzhiyun goto out;
528*4882a593Smuzhiyun }
529*4882a593Smuzhiyun
530*4882a593Smuzhiyun if (leaf->bitmap != BIT(idx)) {
531*4882a593Smuzhiyun pr_err("Child %d has wrong seqno idx, found %d, expected %d\n",
532*4882a593Smuzhiyun idx, ilog2(leaf->bitmap), idx);
533*4882a593Smuzhiyun err = -EINVAL;
534*4882a593Smuzhiyun goto out;
535*4882a593Smuzhiyun }
536*4882a593Smuzhiyun }
537*4882a593Smuzhiyun }
538*4882a593Smuzhiyun out:
539*4882a593Smuzhiyun return dump_syncmap(sync, err);
540*4882a593Smuzhiyun }
541*4882a593Smuzhiyun
igt_syncmap_random(void * arg)542*4882a593Smuzhiyun static int igt_syncmap_random(void *arg)
543*4882a593Smuzhiyun {
544*4882a593Smuzhiyun I915_RND_STATE(prng);
545*4882a593Smuzhiyun IGT_TIMEOUT(end_time);
546*4882a593Smuzhiyun struct i915_syncmap *sync;
547*4882a593Smuzhiyun unsigned long count, phase, i;
548*4882a593Smuzhiyun u32 seqno;
549*4882a593Smuzhiyun int err;
550*4882a593Smuzhiyun
551*4882a593Smuzhiyun i915_syncmap_init(&sync);
552*4882a593Smuzhiyun
553*4882a593Smuzhiyun /*
554*4882a593Smuzhiyun * Having tried to test the individual operations within i915_syncmap,
555*4882a593Smuzhiyun * run a smoketest exploring the entire u64 space with random
556*4882a593Smuzhiyun * insertions.
557*4882a593Smuzhiyun */
558*4882a593Smuzhiyun
559*4882a593Smuzhiyun count = 0;
560*4882a593Smuzhiyun phase = jiffies + HZ/100 + 1;
561*4882a593Smuzhiyun do {
562*4882a593Smuzhiyun u64 context = i915_prandom_u64_state(&prng);
563*4882a593Smuzhiyun
564*4882a593Smuzhiyun err = i915_syncmap_set(&sync, context, 0);
565*4882a593Smuzhiyun if (err)
566*4882a593Smuzhiyun goto out;
567*4882a593Smuzhiyun
568*4882a593Smuzhiyun count++;
569*4882a593Smuzhiyun } while (!time_after(jiffies, phase));
570*4882a593Smuzhiyun seqno = 0;
571*4882a593Smuzhiyun
572*4882a593Smuzhiyun phase = 0;
573*4882a593Smuzhiyun do {
574*4882a593Smuzhiyun I915_RND_STATE(ctx);
575*4882a593Smuzhiyun u32 last_seqno = seqno;
576*4882a593Smuzhiyun bool expect;
577*4882a593Smuzhiyun
578*4882a593Smuzhiyun seqno = prandom_u32_state(&prng);
579*4882a593Smuzhiyun expect = seqno_later(last_seqno, seqno);
580*4882a593Smuzhiyun
581*4882a593Smuzhiyun for (i = 0; i < count; i++) {
582*4882a593Smuzhiyun u64 context = i915_prandom_u64_state(&ctx);
583*4882a593Smuzhiyun
584*4882a593Smuzhiyun if (i915_syncmap_is_later(&sync, context, seqno) != expect) {
585*4882a593Smuzhiyun pr_err("context=%llu, last=%u this=%u did not match expectation (%d)\n",
586*4882a593Smuzhiyun context, last_seqno, seqno, expect);
587*4882a593Smuzhiyun err = -EINVAL;
588*4882a593Smuzhiyun goto out;
589*4882a593Smuzhiyun }
590*4882a593Smuzhiyun
591*4882a593Smuzhiyun err = i915_syncmap_set(&sync, context, seqno);
592*4882a593Smuzhiyun if (err)
593*4882a593Smuzhiyun goto out;
594*4882a593Smuzhiyun }
595*4882a593Smuzhiyun
596*4882a593Smuzhiyun phase++;
597*4882a593Smuzhiyun } while (!__igt_timeout(end_time, NULL));
598*4882a593Smuzhiyun pr_debug("Completed %lu passes, each of %lu contexts\n", phase, count);
599*4882a593Smuzhiyun out:
600*4882a593Smuzhiyun return dump_syncmap(sync, err);
601*4882a593Smuzhiyun }
602*4882a593Smuzhiyun
i915_syncmap_mock_selftests(void)603*4882a593Smuzhiyun int i915_syncmap_mock_selftests(void)
604*4882a593Smuzhiyun {
605*4882a593Smuzhiyun static const struct i915_subtest tests[] = {
606*4882a593Smuzhiyun SUBTEST(igt_syncmap_init),
607*4882a593Smuzhiyun SUBTEST(igt_syncmap_one),
608*4882a593Smuzhiyun SUBTEST(igt_syncmap_join_above),
609*4882a593Smuzhiyun SUBTEST(igt_syncmap_join_below),
610*4882a593Smuzhiyun SUBTEST(igt_syncmap_neighbours),
611*4882a593Smuzhiyun SUBTEST(igt_syncmap_compact),
612*4882a593Smuzhiyun SUBTEST(igt_syncmap_random),
613*4882a593Smuzhiyun };
614*4882a593Smuzhiyun
615*4882a593Smuzhiyun return i915_subtests(tests, NULL);
616*4882a593Smuzhiyun }
617