1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * Test cases for compiler-based stack variable zeroing via future
4*4882a593Smuzhiyun * compiler flags or CONFIG_GCC_PLUGIN_STRUCTLEAK*.
5*4882a593Smuzhiyun */
6*4882a593Smuzhiyun #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
7*4882a593Smuzhiyun
8*4882a593Smuzhiyun #include <linux/init.h>
9*4882a593Smuzhiyun #include <linux/kernel.h>
10*4882a593Smuzhiyun #include <linux/module.h>
11*4882a593Smuzhiyun #include <linux/string.h>
12*4882a593Smuzhiyun
13*4882a593Smuzhiyun /* Exfiltration buffer. */
14*4882a593Smuzhiyun #define MAX_VAR_SIZE 128
15*4882a593Smuzhiyun static u8 check_buf[MAX_VAR_SIZE];
16*4882a593Smuzhiyun
17*4882a593Smuzhiyun /* Character array to trigger stack protector in all functions. */
18*4882a593Smuzhiyun #define VAR_BUFFER 32
19*4882a593Smuzhiyun
20*4882a593Smuzhiyun /* Volatile mask to convince compiler to copy memory with 0xff. */
21*4882a593Smuzhiyun static volatile u8 forced_mask = 0xff;
22*4882a593Smuzhiyun
23*4882a593Smuzhiyun /* Location and size tracking to validate fill and test are colocated. */
24*4882a593Smuzhiyun static void *fill_start, *target_start;
25*4882a593Smuzhiyun static size_t fill_size, target_size;
26*4882a593Smuzhiyun
range_contains(char * haystack_start,size_t haystack_size,char * needle_start,size_t needle_size)27*4882a593Smuzhiyun static bool range_contains(char *haystack_start, size_t haystack_size,
28*4882a593Smuzhiyun char *needle_start, size_t needle_size)
29*4882a593Smuzhiyun {
30*4882a593Smuzhiyun if (needle_start >= haystack_start &&
31*4882a593Smuzhiyun needle_start + needle_size <= haystack_start + haystack_size)
32*4882a593Smuzhiyun return true;
33*4882a593Smuzhiyun return false;
34*4882a593Smuzhiyun }
35*4882a593Smuzhiyun
36*4882a593Smuzhiyun #define DO_NOTHING_TYPE_SCALAR(var_type) var_type
37*4882a593Smuzhiyun #define DO_NOTHING_TYPE_STRING(var_type) void
38*4882a593Smuzhiyun #define DO_NOTHING_TYPE_STRUCT(var_type) void
39*4882a593Smuzhiyun
40*4882a593Smuzhiyun #define DO_NOTHING_RETURN_SCALAR(ptr) *(ptr)
41*4882a593Smuzhiyun #define DO_NOTHING_RETURN_STRING(ptr) /**/
42*4882a593Smuzhiyun #define DO_NOTHING_RETURN_STRUCT(ptr) /**/
43*4882a593Smuzhiyun
44*4882a593Smuzhiyun #define DO_NOTHING_CALL_SCALAR(var, name) \
45*4882a593Smuzhiyun (var) = do_nothing_ ## name(&(var))
46*4882a593Smuzhiyun #define DO_NOTHING_CALL_STRING(var, name) \
47*4882a593Smuzhiyun do_nothing_ ## name(var)
48*4882a593Smuzhiyun #define DO_NOTHING_CALL_STRUCT(var, name) \
49*4882a593Smuzhiyun do_nothing_ ## name(&(var))
50*4882a593Smuzhiyun
51*4882a593Smuzhiyun #define FETCH_ARG_SCALAR(var) &var
52*4882a593Smuzhiyun #define FETCH_ARG_STRING(var) var
53*4882a593Smuzhiyun #define FETCH_ARG_STRUCT(var) &var
54*4882a593Smuzhiyun
55*4882a593Smuzhiyun #define FILL_SIZE_STRING 16
56*4882a593Smuzhiyun
57*4882a593Smuzhiyun #define INIT_CLONE_SCALAR /**/
58*4882a593Smuzhiyun #define INIT_CLONE_STRING [FILL_SIZE_STRING]
59*4882a593Smuzhiyun #define INIT_CLONE_STRUCT /**/
60*4882a593Smuzhiyun
61*4882a593Smuzhiyun #define INIT_SCALAR_none /**/
62*4882a593Smuzhiyun #define INIT_SCALAR_zero = 0
63*4882a593Smuzhiyun
64*4882a593Smuzhiyun #define INIT_STRING_none [FILL_SIZE_STRING] /**/
65*4882a593Smuzhiyun #define INIT_STRING_zero [FILL_SIZE_STRING] = { }
66*4882a593Smuzhiyun
67*4882a593Smuzhiyun #define INIT_STRUCT_none /**/
68*4882a593Smuzhiyun #define INIT_STRUCT_zero = { }
69*4882a593Smuzhiyun #define INIT_STRUCT_static_partial = { .two = 0, }
70*4882a593Smuzhiyun #define INIT_STRUCT_static_all = { .one = 0, \
71*4882a593Smuzhiyun .two = 0, \
72*4882a593Smuzhiyun .three = 0, \
73*4882a593Smuzhiyun .four = 0, \
74*4882a593Smuzhiyun }
75*4882a593Smuzhiyun #define INIT_STRUCT_dynamic_partial = { .two = arg->two, }
76*4882a593Smuzhiyun #define INIT_STRUCT_dynamic_all = { .one = arg->one, \
77*4882a593Smuzhiyun .two = arg->two, \
78*4882a593Smuzhiyun .three = arg->three, \
79*4882a593Smuzhiyun .four = arg->four, \
80*4882a593Smuzhiyun }
81*4882a593Smuzhiyun #define INIT_STRUCT_runtime_partial ; \
82*4882a593Smuzhiyun var.two = 0
83*4882a593Smuzhiyun #define INIT_STRUCT_runtime_all ; \
84*4882a593Smuzhiyun var.one = 0; \
85*4882a593Smuzhiyun var.two = 0; \
86*4882a593Smuzhiyun var.three = 0; \
87*4882a593Smuzhiyun var.four = 0
88*4882a593Smuzhiyun
89*4882a593Smuzhiyun /*
90*4882a593Smuzhiyun * @name: unique string name for the test
91*4882a593Smuzhiyun * @var_type: type to be tested for zeroing initialization
92*4882a593Smuzhiyun * @which: is this a SCALAR, STRING, or STRUCT type?
93*4882a593Smuzhiyun * @init_level: what kind of initialization is performed
94*4882a593Smuzhiyun * @xfail: is this test expected to fail?
95*4882a593Smuzhiyun */
96*4882a593Smuzhiyun #define DEFINE_TEST_DRIVER(name, var_type, which, xfail) \
97*4882a593Smuzhiyun /* Returns 0 on success, 1 on failure. */ \
98*4882a593Smuzhiyun static noinline __init int test_ ## name (void) \
99*4882a593Smuzhiyun { \
100*4882a593Smuzhiyun var_type zero INIT_CLONE_ ## which; \
101*4882a593Smuzhiyun int ignored; \
102*4882a593Smuzhiyun u8 sum = 0, i; \
103*4882a593Smuzhiyun \
104*4882a593Smuzhiyun /* Notice when a new test is larger than expected. */ \
105*4882a593Smuzhiyun BUILD_BUG_ON(sizeof(zero) > MAX_VAR_SIZE); \
106*4882a593Smuzhiyun \
107*4882a593Smuzhiyun /* Fill clone type with zero for per-field init. */ \
108*4882a593Smuzhiyun memset(&zero, 0x00, sizeof(zero)); \
109*4882a593Smuzhiyun /* Clear entire check buffer for 0xFF overlap test. */ \
110*4882a593Smuzhiyun memset(check_buf, 0x00, sizeof(check_buf)); \
111*4882a593Smuzhiyun /* Fill stack with 0xFF. */ \
112*4882a593Smuzhiyun ignored = leaf_ ##name((unsigned long)&ignored, 1, \
113*4882a593Smuzhiyun FETCH_ARG_ ## which(zero)); \
114*4882a593Smuzhiyun /* Verify all bytes overwritten with 0xFF. */ \
115*4882a593Smuzhiyun for (sum = 0, i = 0; i < target_size; i++) \
116*4882a593Smuzhiyun sum += (check_buf[i] != 0xFF); \
117*4882a593Smuzhiyun if (sum) { \
118*4882a593Smuzhiyun pr_err(#name ": leaf fill was not 0xFF!?\n"); \
119*4882a593Smuzhiyun return 1; \
120*4882a593Smuzhiyun } \
121*4882a593Smuzhiyun /* Clear entire check buffer for later bit tests. */ \
122*4882a593Smuzhiyun memset(check_buf, 0x00, sizeof(check_buf)); \
123*4882a593Smuzhiyun /* Extract stack-defined variable contents. */ \
124*4882a593Smuzhiyun ignored = leaf_ ##name((unsigned long)&ignored, 0, \
125*4882a593Smuzhiyun FETCH_ARG_ ## which(zero)); \
126*4882a593Smuzhiyun \
127*4882a593Smuzhiyun /* Validate that compiler lined up fill and target. */ \
128*4882a593Smuzhiyun if (!range_contains(fill_start, fill_size, \
129*4882a593Smuzhiyun target_start, target_size)) { \
130*4882a593Smuzhiyun pr_err(#name ": stack fill missed target!?\n"); \
131*4882a593Smuzhiyun pr_err(#name ": fill %zu wide\n", fill_size); \
132*4882a593Smuzhiyun pr_err(#name ": target offset by %d\n", \
133*4882a593Smuzhiyun (int)((ssize_t)(uintptr_t)fill_start - \
134*4882a593Smuzhiyun (ssize_t)(uintptr_t)target_start)); \
135*4882a593Smuzhiyun return 1; \
136*4882a593Smuzhiyun } \
137*4882a593Smuzhiyun \
138*4882a593Smuzhiyun /* Look for any bytes still 0xFF in check region. */ \
139*4882a593Smuzhiyun for (sum = 0, i = 0; i < target_size; i++) \
140*4882a593Smuzhiyun sum += (check_buf[i] == 0xFF); \
141*4882a593Smuzhiyun \
142*4882a593Smuzhiyun if (sum == 0) { \
143*4882a593Smuzhiyun pr_info(#name " ok\n"); \
144*4882a593Smuzhiyun return 0; \
145*4882a593Smuzhiyun } else { \
146*4882a593Smuzhiyun pr_warn(#name " %sFAIL (uninit bytes: %d)\n", \
147*4882a593Smuzhiyun (xfail) ? "X" : "", sum); \
148*4882a593Smuzhiyun return (xfail) ? 0 : 1; \
149*4882a593Smuzhiyun } \
150*4882a593Smuzhiyun }
151*4882a593Smuzhiyun #define DEFINE_TEST(name, var_type, which, init_level) \
152*4882a593Smuzhiyun /* no-op to force compiler into ignoring "uninitialized" vars */\
153*4882a593Smuzhiyun static noinline __init DO_NOTHING_TYPE_ ## which(var_type) \
154*4882a593Smuzhiyun do_nothing_ ## name(var_type *ptr) \
155*4882a593Smuzhiyun { \
156*4882a593Smuzhiyun /* Will always be true, but compiler doesn't know. */ \
157*4882a593Smuzhiyun if ((unsigned long)ptr > 0x2) \
158*4882a593Smuzhiyun return DO_NOTHING_RETURN_ ## which(ptr); \
159*4882a593Smuzhiyun else \
160*4882a593Smuzhiyun return DO_NOTHING_RETURN_ ## which(ptr + 1); \
161*4882a593Smuzhiyun } \
162*4882a593Smuzhiyun static noinline __init int leaf_ ## name(unsigned long sp, \
163*4882a593Smuzhiyun bool fill, \
164*4882a593Smuzhiyun var_type *arg) \
165*4882a593Smuzhiyun { \
166*4882a593Smuzhiyun char buf[VAR_BUFFER]; \
167*4882a593Smuzhiyun var_type var INIT_ ## which ## _ ## init_level; \
168*4882a593Smuzhiyun \
169*4882a593Smuzhiyun target_start = &var; \
170*4882a593Smuzhiyun target_size = sizeof(var); \
171*4882a593Smuzhiyun /* \
172*4882a593Smuzhiyun * Keep this buffer around to make sure we've got a \
173*4882a593Smuzhiyun * stack frame of SOME kind... \
174*4882a593Smuzhiyun */ \
175*4882a593Smuzhiyun memset(buf, (char)(sp & 0xff), sizeof(buf)); \
176*4882a593Smuzhiyun /* Fill variable with 0xFF. */ \
177*4882a593Smuzhiyun if (fill) { \
178*4882a593Smuzhiyun fill_start = &var; \
179*4882a593Smuzhiyun fill_size = sizeof(var); \
180*4882a593Smuzhiyun memset(fill_start, \
181*4882a593Smuzhiyun (char)((sp & 0xff) | forced_mask), \
182*4882a593Smuzhiyun fill_size); \
183*4882a593Smuzhiyun } \
184*4882a593Smuzhiyun \
185*4882a593Smuzhiyun /* Silence "never initialized" warnings. */ \
186*4882a593Smuzhiyun DO_NOTHING_CALL_ ## which(var, name); \
187*4882a593Smuzhiyun \
188*4882a593Smuzhiyun /* Exfiltrate "var". */ \
189*4882a593Smuzhiyun memcpy(check_buf, target_start, target_size); \
190*4882a593Smuzhiyun \
191*4882a593Smuzhiyun return (int)buf[0] | (int)buf[sizeof(buf) - 1]; \
192*4882a593Smuzhiyun } \
193*4882a593Smuzhiyun DEFINE_TEST_DRIVER(name, var_type, which, 0)
194*4882a593Smuzhiyun
195*4882a593Smuzhiyun /* Structure with no padding. */
196*4882a593Smuzhiyun struct test_packed {
197*4882a593Smuzhiyun unsigned long one;
198*4882a593Smuzhiyun unsigned long two;
199*4882a593Smuzhiyun unsigned long three;
200*4882a593Smuzhiyun unsigned long four;
201*4882a593Smuzhiyun };
202*4882a593Smuzhiyun
203*4882a593Smuzhiyun /* Simple structure with padding likely to be covered by compiler. */
204*4882a593Smuzhiyun struct test_small_hole {
205*4882a593Smuzhiyun size_t one;
206*4882a593Smuzhiyun char two;
207*4882a593Smuzhiyun /* 3 byte padding hole here. */
208*4882a593Smuzhiyun int three;
209*4882a593Smuzhiyun unsigned long four;
210*4882a593Smuzhiyun };
211*4882a593Smuzhiyun
212*4882a593Smuzhiyun /* Trigger unhandled padding in a structure. */
213*4882a593Smuzhiyun struct test_big_hole {
214*4882a593Smuzhiyun u8 one;
215*4882a593Smuzhiyun u8 two;
216*4882a593Smuzhiyun u8 three;
217*4882a593Smuzhiyun /* 61 byte padding hole here. */
218*4882a593Smuzhiyun u8 four __aligned(64);
219*4882a593Smuzhiyun } __aligned(64);
220*4882a593Smuzhiyun
221*4882a593Smuzhiyun struct test_trailing_hole {
222*4882a593Smuzhiyun char *one;
223*4882a593Smuzhiyun char *two;
224*4882a593Smuzhiyun char *three;
225*4882a593Smuzhiyun char four;
226*4882a593Smuzhiyun /* "sizeof(unsigned long) - 1" byte padding hole here. */
227*4882a593Smuzhiyun };
228*4882a593Smuzhiyun
229*4882a593Smuzhiyun /* Test if STRUCTLEAK is clearing structs with __user fields. */
230*4882a593Smuzhiyun struct test_user {
231*4882a593Smuzhiyun u8 one;
232*4882a593Smuzhiyun unsigned long two;
233*4882a593Smuzhiyun char __user *three;
234*4882a593Smuzhiyun unsigned long four;
235*4882a593Smuzhiyun };
236*4882a593Smuzhiyun
237*4882a593Smuzhiyun #define DEFINE_SCALAR_TEST(name, init) \
238*4882a593Smuzhiyun DEFINE_TEST(name ## _ ## init, name, SCALAR, init)
239*4882a593Smuzhiyun
240*4882a593Smuzhiyun #define DEFINE_SCALAR_TESTS(init) \
241*4882a593Smuzhiyun DEFINE_SCALAR_TEST(u8, init); \
242*4882a593Smuzhiyun DEFINE_SCALAR_TEST(u16, init); \
243*4882a593Smuzhiyun DEFINE_SCALAR_TEST(u32, init); \
244*4882a593Smuzhiyun DEFINE_SCALAR_TEST(u64, init); \
245*4882a593Smuzhiyun DEFINE_TEST(char_array_ ## init, unsigned char, STRING, init)
246*4882a593Smuzhiyun
247*4882a593Smuzhiyun #define DEFINE_STRUCT_TEST(name, init) \
248*4882a593Smuzhiyun DEFINE_TEST(name ## _ ## init, \
249*4882a593Smuzhiyun struct test_ ## name, STRUCT, init)
250*4882a593Smuzhiyun
251*4882a593Smuzhiyun #define DEFINE_STRUCT_TESTS(init) \
252*4882a593Smuzhiyun DEFINE_STRUCT_TEST(small_hole, init); \
253*4882a593Smuzhiyun DEFINE_STRUCT_TEST(big_hole, init); \
254*4882a593Smuzhiyun DEFINE_STRUCT_TEST(trailing_hole, init); \
255*4882a593Smuzhiyun DEFINE_STRUCT_TEST(packed, init)
256*4882a593Smuzhiyun
257*4882a593Smuzhiyun /* These should be fully initialized all the time! */
258*4882a593Smuzhiyun DEFINE_SCALAR_TESTS(zero);
259*4882a593Smuzhiyun DEFINE_STRUCT_TESTS(zero);
260*4882a593Smuzhiyun /* Static initialization: padding may be left uninitialized. */
261*4882a593Smuzhiyun DEFINE_STRUCT_TESTS(static_partial);
262*4882a593Smuzhiyun DEFINE_STRUCT_TESTS(static_all);
263*4882a593Smuzhiyun /* Dynamic initialization: padding may be left uninitialized. */
264*4882a593Smuzhiyun DEFINE_STRUCT_TESTS(dynamic_partial);
265*4882a593Smuzhiyun DEFINE_STRUCT_TESTS(dynamic_all);
266*4882a593Smuzhiyun /* Runtime initialization: padding may be left uninitialized. */
267*4882a593Smuzhiyun DEFINE_STRUCT_TESTS(runtime_partial);
268*4882a593Smuzhiyun DEFINE_STRUCT_TESTS(runtime_all);
269*4882a593Smuzhiyun /* No initialization without compiler instrumentation. */
270*4882a593Smuzhiyun DEFINE_SCALAR_TESTS(none);
271*4882a593Smuzhiyun DEFINE_STRUCT_TESTS(none);
272*4882a593Smuzhiyun DEFINE_TEST(user, struct test_user, STRUCT, none);
273*4882a593Smuzhiyun
274*4882a593Smuzhiyun /*
275*4882a593Smuzhiyun * Check two uses through a variable declaration outside either path,
276*4882a593Smuzhiyun * which was noticed as a special case in porting earlier stack init
277*4882a593Smuzhiyun * compiler logic.
278*4882a593Smuzhiyun */
__leaf_switch_none(int path,bool fill)279*4882a593Smuzhiyun static int noinline __leaf_switch_none(int path, bool fill)
280*4882a593Smuzhiyun {
281*4882a593Smuzhiyun switch (path) {
282*4882a593Smuzhiyun uint64_t var;
283*4882a593Smuzhiyun
284*4882a593Smuzhiyun case 1:
285*4882a593Smuzhiyun target_start = &var;
286*4882a593Smuzhiyun target_size = sizeof(var);
287*4882a593Smuzhiyun if (fill) {
288*4882a593Smuzhiyun fill_start = &var;
289*4882a593Smuzhiyun fill_size = sizeof(var);
290*4882a593Smuzhiyun
291*4882a593Smuzhiyun memset(fill_start, forced_mask | 0x55, fill_size);
292*4882a593Smuzhiyun }
293*4882a593Smuzhiyun memcpy(check_buf, target_start, target_size);
294*4882a593Smuzhiyun break;
295*4882a593Smuzhiyun case 2:
296*4882a593Smuzhiyun target_start = &var;
297*4882a593Smuzhiyun target_size = sizeof(var);
298*4882a593Smuzhiyun if (fill) {
299*4882a593Smuzhiyun fill_start = &var;
300*4882a593Smuzhiyun fill_size = sizeof(var);
301*4882a593Smuzhiyun
302*4882a593Smuzhiyun memset(fill_start, forced_mask | 0xaa, fill_size);
303*4882a593Smuzhiyun }
304*4882a593Smuzhiyun memcpy(check_buf, target_start, target_size);
305*4882a593Smuzhiyun break;
306*4882a593Smuzhiyun default:
307*4882a593Smuzhiyun var = 5;
308*4882a593Smuzhiyun return var & forced_mask;
309*4882a593Smuzhiyun }
310*4882a593Smuzhiyun return 0;
311*4882a593Smuzhiyun }
312*4882a593Smuzhiyun
leaf_switch_1_none(unsigned long sp,bool fill,uint64_t * arg)313*4882a593Smuzhiyun static noinline __init int leaf_switch_1_none(unsigned long sp, bool fill,
314*4882a593Smuzhiyun uint64_t *arg)
315*4882a593Smuzhiyun {
316*4882a593Smuzhiyun return __leaf_switch_none(1, fill);
317*4882a593Smuzhiyun }
318*4882a593Smuzhiyun
leaf_switch_2_none(unsigned long sp,bool fill,uint64_t * arg)319*4882a593Smuzhiyun static noinline __init int leaf_switch_2_none(unsigned long sp, bool fill,
320*4882a593Smuzhiyun uint64_t *arg)
321*4882a593Smuzhiyun {
322*4882a593Smuzhiyun return __leaf_switch_none(2, fill);
323*4882a593Smuzhiyun }
324*4882a593Smuzhiyun
325*4882a593Smuzhiyun /*
326*4882a593Smuzhiyun * These are expected to fail for most configurations because neither
327*4882a593Smuzhiyun * GCC nor Clang have a way to perform initialization of variables in
328*4882a593Smuzhiyun * non-code areas (i.e. in a switch statement before the first "case").
329*4882a593Smuzhiyun * https://bugs.llvm.org/show_bug.cgi?id=44916
330*4882a593Smuzhiyun */
331*4882a593Smuzhiyun DEFINE_TEST_DRIVER(switch_1_none, uint64_t, SCALAR, 1);
332*4882a593Smuzhiyun DEFINE_TEST_DRIVER(switch_2_none, uint64_t, SCALAR, 1);
333*4882a593Smuzhiyun
test_stackinit_init(void)334*4882a593Smuzhiyun static int __init test_stackinit_init(void)
335*4882a593Smuzhiyun {
336*4882a593Smuzhiyun unsigned int failures = 0;
337*4882a593Smuzhiyun
338*4882a593Smuzhiyun #define test_scalars(init) do { \
339*4882a593Smuzhiyun failures += test_u8_ ## init (); \
340*4882a593Smuzhiyun failures += test_u16_ ## init (); \
341*4882a593Smuzhiyun failures += test_u32_ ## init (); \
342*4882a593Smuzhiyun failures += test_u64_ ## init (); \
343*4882a593Smuzhiyun failures += test_char_array_ ## init (); \
344*4882a593Smuzhiyun } while (0)
345*4882a593Smuzhiyun
346*4882a593Smuzhiyun #define test_structs(init) do { \
347*4882a593Smuzhiyun failures += test_small_hole_ ## init (); \
348*4882a593Smuzhiyun failures += test_big_hole_ ## init (); \
349*4882a593Smuzhiyun failures += test_trailing_hole_ ## init (); \
350*4882a593Smuzhiyun failures += test_packed_ ## init (); \
351*4882a593Smuzhiyun } while (0)
352*4882a593Smuzhiyun
353*4882a593Smuzhiyun /* These are explicitly initialized and should always pass. */
354*4882a593Smuzhiyun test_scalars(zero);
355*4882a593Smuzhiyun test_structs(zero);
356*4882a593Smuzhiyun /* Padding here appears to be accidentally always initialized? */
357*4882a593Smuzhiyun test_structs(dynamic_partial);
358*4882a593Smuzhiyun /* Padding initialization depends on compiler behaviors. */
359*4882a593Smuzhiyun test_structs(static_partial);
360*4882a593Smuzhiyun test_structs(static_all);
361*4882a593Smuzhiyun test_structs(dynamic_all);
362*4882a593Smuzhiyun test_structs(runtime_partial);
363*4882a593Smuzhiyun test_structs(runtime_all);
364*4882a593Smuzhiyun
365*4882a593Smuzhiyun /* STRUCTLEAK_BYREF_ALL should cover everything from here down. */
366*4882a593Smuzhiyun test_scalars(none);
367*4882a593Smuzhiyun failures += test_switch_1_none();
368*4882a593Smuzhiyun failures += test_switch_2_none();
369*4882a593Smuzhiyun
370*4882a593Smuzhiyun /* STRUCTLEAK_BYREF should cover from here down. */
371*4882a593Smuzhiyun test_structs(none);
372*4882a593Smuzhiyun
373*4882a593Smuzhiyun /* STRUCTLEAK will only cover this. */
374*4882a593Smuzhiyun failures += test_user();
375*4882a593Smuzhiyun
376*4882a593Smuzhiyun if (failures == 0)
377*4882a593Smuzhiyun pr_info("all tests passed!\n");
378*4882a593Smuzhiyun else
379*4882a593Smuzhiyun pr_err("failures: %u\n", failures);
380*4882a593Smuzhiyun
381*4882a593Smuzhiyun return failures ? -EINVAL : 0;
382*4882a593Smuzhiyun }
383*4882a593Smuzhiyun module_init(test_stackinit_init);
384*4882a593Smuzhiyun
test_stackinit_exit(void)385*4882a593Smuzhiyun static void __exit test_stackinit_exit(void)
386*4882a593Smuzhiyun { }
387*4882a593Smuzhiyun module_exit(test_stackinit_exit);
388*4882a593Smuzhiyun
389*4882a593Smuzhiyun MODULE_LICENSE("GPL");
390