1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * Copyright © 2020 Intel Corporation
4*4882a593Smuzhiyun */
5*4882a593Smuzhiyun
mock_ring(unsigned long sz)6*4882a593Smuzhiyun static struct intel_ring *mock_ring(unsigned long sz)
7*4882a593Smuzhiyun {
8*4882a593Smuzhiyun struct intel_ring *ring;
9*4882a593Smuzhiyun
10*4882a593Smuzhiyun ring = kzalloc(sizeof(*ring) + sz, GFP_KERNEL);
11*4882a593Smuzhiyun if (!ring)
12*4882a593Smuzhiyun return NULL;
13*4882a593Smuzhiyun
14*4882a593Smuzhiyun kref_init(&ring->ref);
15*4882a593Smuzhiyun ring->size = sz;
16*4882a593Smuzhiyun ring->wrap = BITS_PER_TYPE(ring->size) - ilog2(sz);
17*4882a593Smuzhiyun ring->effective_size = sz;
18*4882a593Smuzhiyun ring->vaddr = (void *)(ring + 1);
19*4882a593Smuzhiyun atomic_set(&ring->pin_count, 1);
20*4882a593Smuzhiyun
21*4882a593Smuzhiyun intel_ring_update_space(ring);
22*4882a593Smuzhiyun
23*4882a593Smuzhiyun return ring;
24*4882a593Smuzhiyun }
25*4882a593Smuzhiyun
mock_ring_free(struct intel_ring * ring)26*4882a593Smuzhiyun static void mock_ring_free(struct intel_ring *ring)
27*4882a593Smuzhiyun {
28*4882a593Smuzhiyun kfree(ring);
29*4882a593Smuzhiyun }
30*4882a593Smuzhiyun
check_ring_direction(struct intel_ring * ring,u32 next,u32 prev,int expected)31*4882a593Smuzhiyun static int check_ring_direction(struct intel_ring *ring,
32*4882a593Smuzhiyun u32 next, u32 prev,
33*4882a593Smuzhiyun int expected)
34*4882a593Smuzhiyun {
35*4882a593Smuzhiyun int result;
36*4882a593Smuzhiyun
37*4882a593Smuzhiyun result = intel_ring_direction(ring, next, prev);
38*4882a593Smuzhiyun if (result < 0)
39*4882a593Smuzhiyun result = -1;
40*4882a593Smuzhiyun else if (result > 0)
41*4882a593Smuzhiyun result = 1;
42*4882a593Smuzhiyun
43*4882a593Smuzhiyun if (result != expected) {
44*4882a593Smuzhiyun pr_err("intel_ring_direction(%u, %u):%d != %d\n",
45*4882a593Smuzhiyun next, prev, result, expected);
46*4882a593Smuzhiyun return -EINVAL;
47*4882a593Smuzhiyun }
48*4882a593Smuzhiyun
49*4882a593Smuzhiyun return 0;
50*4882a593Smuzhiyun }
51*4882a593Smuzhiyun
check_ring_step(struct intel_ring * ring,u32 x,u32 step)52*4882a593Smuzhiyun static int check_ring_step(struct intel_ring *ring, u32 x, u32 step)
53*4882a593Smuzhiyun {
54*4882a593Smuzhiyun u32 prev = x, next = intel_ring_wrap(ring, x + step);
55*4882a593Smuzhiyun int err = 0;
56*4882a593Smuzhiyun
57*4882a593Smuzhiyun err |= check_ring_direction(ring, next, next, 0);
58*4882a593Smuzhiyun err |= check_ring_direction(ring, prev, prev, 0);
59*4882a593Smuzhiyun err |= check_ring_direction(ring, next, prev, 1);
60*4882a593Smuzhiyun err |= check_ring_direction(ring, prev, next, -1);
61*4882a593Smuzhiyun
62*4882a593Smuzhiyun return err;
63*4882a593Smuzhiyun }
64*4882a593Smuzhiyun
check_ring_offset(struct intel_ring * ring,u32 x,u32 step)65*4882a593Smuzhiyun static int check_ring_offset(struct intel_ring *ring, u32 x, u32 step)
66*4882a593Smuzhiyun {
67*4882a593Smuzhiyun int err = 0;
68*4882a593Smuzhiyun
69*4882a593Smuzhiyun err |= check_ring_step(ring, x, step);
70*4882a593Smuzhiyun err |= check_ring_step(ring, intel_ring_wrap(ring, x + 1), step);
71*4882a593Smuzhiyun err |= check_ring_step(ring, intel_ring_wrap(ring, x - 1), step);
72*4882a593Smuzhiyun
73*4882a593Smuzhiyun return err;
74*4882a593Smuzhiyun }
75*4882a593Smuzhiyun
igt_ring_direction(void * dummy)76*4882a593Smuzhiyun static int igt_ring_direction(void *dummy)
77*4882a593Smuzhiyun {
78*4882a593Smuzhiyun struct intel_ring *ring;
79*4882a593Smuzhiyun unsigned int half = 2048;
80*4882a593Smuzhiyun int step, err = 0;
81*4882a593Smuzhiyun
82*4882a593Smuzhiyun ring = mock_ring(2 * half);
83*4882a593Smuzhiyun if (!ring)
84*4882a593Smuzhiyun return -ENOMEM;
85*4882a593Smuzhiyun
86*4882a593Smuzhiyun GEM_BUG_ON(ring->size != 2 * half);
87*4882a593Smuzhiyun
88*4882a593Smuzhiyun /* Precision of wrap detection is limited to ring->size / 2 */
89*4882a593Smuzhiyun for (step = 1; step < half; step <<= 1) {
90*4882a593Smuzhiyun err |= check_ring_offset(ring, 0, step);
91*4882a593Smuzhiyun err |= check_ring_offset(ring, half, step);
92*4882a593Smuzhiyun }
93*4882a593Smuzhiyun err |= check_ring_step(ring, 0, half - 64);
94*4882a593Smuzhiyun
95*4882a593Smuzhiyun /* And check unwrapped handling for good measure */
96*4882a593Smuzhiyun err |= check_ring_offset(ring, 0, 2 * half + 64);
97*4882a593Smuzhiyun err |= check_ring_offset(ring, 3 * half, 1);
98*4882a593Smuzhiyun
99*4882a593Smuzhiyun mock_ring_free(ring);
100*4882a593Smuzhiyun return err;
101*4882a593Smuzhiyun }
102*4882a593Smuzhiyun
intel_ring_mock_selftests(void)103*4882a593Smuzhiyun int intel_ring_mock_selftests(void)
104*4882a593Smuzhiyun {
105*4882a593Smuzhiyun static const struct i915_subtest tests[] = {
106*4882a593Smuzhiyun SUBTEST(igt_ring_direction),
107*4882a593Smuzhiyun };
108*4882a593Smuzhiyun
109*4882a593Smuzhiyun return i915_subtests(tests, NULL);
110*4882a593Smuzhiyun }
111