1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun #ifndef __ASM_PREEMPT_H
3*4882a593Smuzhiyun #define __ASM_PREEMPT_H
4*4882a593Smuzhiyun
5*4882a593Smuzhiyun #include <asm/current.h>
6*4882a593Smuzhiyun #include <linux/thread_info.h>
7*4882a593Smuzhiyun #include <asm/atomic_ops.h>
8*4882a593Smuzhiyun
9*4882a593Smuzhiyun #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
10*4882a593Smuzhiyun
11*4882a593Smuzhiyun /* We use the MSB mostly because its available */
12*4882a593Smuzhiyun #define PREEMPT_NEED_RESCHED 0x80000000
13*4882a593Smuzhiyun #define PREEMPT_ENABLED (0 + PREEMPT_NEED_RESCHED)
14*4882a593Smuzhiyun
preempt_count(void)15*4882a593Smuzhiyun static inline int preempt_count(void)
16*4882a593Smuzhiyun {
17*4882a593Smuzhiyun return READ_ONCE(S390_lowcore.preempt_count) & ~PREEMPT_NEED_RESCHED;
18*4882a593Smuzhiyun }
19*4882a593Smuzhiyun
preempt_count_set(int pc)20*4882a593Smuzhiyun static inline void preempt_count_set(int pc)
21*4882a593Smuzhiyun {
22*4882a593Smuzhiyun int old, new;
23*4882a593Smuzhiyun
24*4882a593Smuzhiyun do {
25*4882a593Smuzhiyun old = READ_ONCE(S390_lowcore.preempt_count);
26*4882a593Smuzhiyun new = (old & PREEMPT_NEED_RESCHED) |
27*4882a593Smuzhiyun (pc & ~PREEMPT_NEED_RESCHED);
28*4882a593Smuzhiyun } while (__atomic_cmpxchg(&S390_lowcore.preempt_count,
29*4882a593Smuzhiyun old, new) != old);
30*4882a593Smuzhiyun }
31*4882a593Smuzhiyun
set_preempt_need_resched(void)32*4882a593Smuzhiyun static inline void set_preempt_need_resched(void)
33*4882a593Smuzhiyun {
34*4882a593Smuzhiyun __atomic_and(~PREEMPT_NEED_RESCHED, &S390_lowcore.preempt_count);
35*4882a593Smuzhiyun }
36*4882a593Smuzhiyun
clear_preempt_need_resched(void)37*4882a593Smuzhiyun static inline void clear_preempt_need_resched(void)
38*4882a593Smuzhiyun {
39*4882a593Smuzhiyun __atomic_or(PREEMPT_NEED_RESCHED, &S390_lowcore.preempt_count);
40*4882a593Smuzhiyun }
41*4882a593Smuzhiyun
test_preempt_need_resched(void)42*4882a593Smuzhiyun static inline bool test_preempt_need_resched(void)
43*4882a593Smuzhiyun {
44*4882a593Smuzhiyun return !(READ_ONCE(S390_lowcore.preempt_count) & PREEMPT_NEED_RESCHED);
45*4882a593Smuzhiyun }
46*4882a593Smuzhiyun
__preempt_count_add(int val)47*4882a593Smuzhiyun static inline void __preempt_count_add(int val)
48*4882a593Smuzhiyun {
49*4882a593Smuzhiyun /*
50*4882a593Smuzhiyun * With some obscure config options and CONFIG_PROFILE_ALL_BRANCHES
51*4882a593Smuzhiyun * enabled, gcc 12 fails to handle __builtin_constant_p().
52*4882a593Smuzhiyun */
53*4882a593Smuzhiyun if (!IS_ENABLED(CONFIG_PROFILE_ALL_BRANCHES)) {
54*4882a593Smuzhiyun if (__builtin_constant_p(val) && (val >= -128) && (val <= 127)) {
55*4882a593Smuzhiyun __atomic_add_const(val, &S390_lowcore.preempt_count);
56*4882a593Smuzhiyun return;
57*4882a593Smuzhiyun }
58*4882a593Smuzhiyun }
59*4882a593Smuzhiyun __atomic_add(val, &S390_lowcore.preempt_count);
60*4882a593Smuzhiyun }
61*4882a593Smuzhiyun
__preempt_count_sub(int val)62*4882a593Smuzhiyun static inline void __preempt_count_sub(int val)
63*4882a593Smuzhiyun {
64*4882a593Smuzhiyun __preempt_count_add(-val);
65*4882a593Smuzhiyun }
66*4882a593Smuzhiyun
__preempt_count_dec_and_test(void)67*4882a593Smuzhiyun static inline bool __preempt_count_dec_and_test(void)
68*4882a593Smuzhiyun {
69*4882a593Smuzhiyun return __atomic_add(-1, &S390_lowcore.preempt_count) == 1;
70*4882a593Smuzhiyun }
71*4882a593Smuzhiyun
should_resched(int preempt_offset)72*4882a593Smuzhiyun static inline bool should_resched(int preempt_offset)
73*4882a593Smuzhiyun {
74*4882a593Smuzhiyun return unlikely(READ_ONCE(S390_lowcore.preempt_count) ==
75*4882a593Smuzhiyun preempt_offset);
76*4882a593Smuzhiyun }
77*4882a593Smuzhiyun
78*4882a593Smuzhiyun #else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
79*4882a593Smuzhiyun
80*4882a593Smuzhiyun #define PREEMPT_ENABLED (0)
81*4882a593Smuzhiyun
preempt_count(void)82*4882a593Smuzhiyun static inline int preempt_count(void)
83*4882a593Smuzhiyun {
84*4882a593Smuzhiyun return READ_ONCE(S390_lowcore.preempt_count);
85*4882a593Smuzhiyun }
86*4882a593Smuzhiyun
preempt_count_set(int pc)87*4882a593Smuzhiyun static inline void preempt_count_set(int pc)
88*4882a593Smuzhiyun {
89*4882a593Smuzhiyun S390_lowcore.preempt_count = pc;
90*4882a593Smuzhiyun }
91*4882a593Smuzhiyun
set_preempt_need_resched(void)92*4882a593Smuzhiyun static inline void set_preempt_need_resched(void)
93*4882a593Smuzhiyun {
94*4882a593Smuzhiyun }
95*4882a593Smuzhiyun
clear_preempt_need_resched(void)96*4882a593Smuzhiyun static inline void clear_preempt_need_resched(void)
97*4882a593Smuzhiyun {
98*4882a593Smuzhiyun }
99*4882a593Smuzhiyun
test_preempt_need_resched(void)100*4882a593Smuzhiyun static inline bool test_preempt_need_resched(void)
101*4882a593Smuzhiyun {
102*4882a593Smuzhiyun return false;
103*4882a593Smuzhiyun }
104*4882a593Smuzhiyun
__preempt_count_add(int val)105*4882a593Smuzhiyun static inline void __preempt_count_add(int val)
106*4882a593Smuzhiyun {
107*4882a593Smuzhiyun S390_lowcore.preempt_count += val;
108*4882a593Smuzhiyun }
109*4882a593Smuzhiyun
__preempt_count_sub(int val)110*4882a593Smuzhiyun static inline void __preempt_count_sub(int val)
111*4882a593Smuzhiyun {
112*4882a593Smuzhiyun S390_lowcore.preempt_count -= val;
113*4882a593Smuzhiyun }
114*4882a593Smuzhiyun
__preempt_count_dec_and_test(void)115*4882a593Smuzhiyun static inline bool __preempt_count_dec_and_test(void)
116*4882a593Smuzhiyun {
117*4882a593Smuzhiyun return !--S390_lowcore.preempt_count && tif_need_resched();
118*4882a593Smuzhiyun }
119*4882a593Smuzhiyun
should_resched(int preempt_offset)120*4882a593Smuzhiyun static inline bool should_resched(int preempt_offset)
121*4882a593Smuzhiyun {
122*4882a593Smuzhiyun return unlikely(preempt_count() == preempt_offset &&
123*4882a593Smuzhiyun tif_need_resched());
124*4882a593Smuzhiyun }
125*4882a593Smuzhiyun
126*4882a593Smuzhiyun #endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
127*4882a593Smuzhiyun
128*4882a593Smuzhiyun #define init_task_preempt_count(p) do { } while (0)
129*4882a593Smuzhiyun /* Deferred to CPU bringup time */
130*4882a593Smuzhiyun #define init_idle_preempt_count(p, cpu) do { } while (0)
131*4882a593Smuzhiyun
132*4882a593Smuzhiyun #ifdef CONFIG_PREEMPTION
133*4882a593Smuzhiyun extern asmlinkage void preempt_schedule(void);
134*4882a593Smuzhiyun #define __preempt_schedule() preempt_schedule()
135*4882a593Smuzhiyun extern asmlinkage void preempt_schedule_notrace(void);
136*4882a593Smuzhiyun #define __preempt_schedule_notrace() preempt_schedule_notrace()
137*4882a593Smuzhiyun #endif /* CONFIG_PREEMPTION */
138*4882a593Smuzhiyun
139*4882a593Smuzhiyun #endif /* __ASM_PREEMPT_H */
140