1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun #ifndef _ASM_X86_JUMP_LABEL_H
3*4882a593Smuzhiyun #define _ASM_X86_JUMP_LABEL_H
4*4882a593Smuzhiyun
5*4882a593Smuzhiyun #define HAVE_JUMP_LABEL_BATCH
6*4882a593Smuzhiyun
7*4882a593Smuzhiyun #define JUMP_LABEL_NOP_SIZE 5
8*4882a593Smuzhiyun
9*4882a593Smuzhiyun #ifdef CONFIG_X86_64
10*4882a593Smuzhiyun # define STATIC_KEY_INIT_NOP P6_NOP5_ATOMIC
11*4882a593Smuzhiyun #else
12*4882a593Smuzhiyun # define STATIC_KEY_INIT_NOP GENERIC_NOP5_ATOMIC
13*4882a593Smuzhiyun #endif
14*4882a593Smuzhiyun
15*4882a593Smuzhiyun #include <asm/asm.h>
16*4882a593Smuzhiyun #include <asm/nops.h>
17*4882a593Smuzhiyun
18*4882a593Smuzhiyun #ifndef __ASSEMBLY__
19*4882a593Smuzhiyun
20*4882a593Smuzhiyun #include <linux/stringify.h>
21*4882a593Smuzhiyun #include <linux/types.h>
22*4882a593Smuzhiyun
arch_static_branch(struct static_key * key,bool branch)23*4882a593Smuzhiyun static __always_inline bool arch_static_branch(struct static_key *key, bool branch)
24*4882a593Smuzhiyun {
25*4882a593Smuzhiyun asm_volatile_goto("1:"
26*4882a593Smuzhiyun ".byte " __stringify(STATIC_KEY_INIT_NOP) "\n\t"
27*4882a593Smuzhiyun ".pushsection __jump_table, \"aw\" \n\t"
28*4882a593Smuzhiyun _ASM_ALIGN "\n\t"
29*4882a593Smuzhiyun ".long 1b - ., %l[l_yes] - . \n\t"
30*4882a593Smuzhiyun _ASM_PTR "%c0 + %c1 - .\n\t"
31*4882a593Smuzhiyun ".popsection \n\t"
32*4882a593Smuzhiyun : : "i" (key), "i" (branch) : : l_yes);
33*4882a593Smuzhiyun
34*4882a593Smuzhiyun return false;
35*4882a593Smuzhiyun l_yes:
36*4882a593Smuzhiyun return true;
37*4882a593Smuzhiyun }
38*4882a593Smuzhiyun
arch_static_branch_jump(struct static_key * key,bool branch)39*4882a593Smuzhiyun static __always_inline bool arch_static_branch_jump(struct static_key *key, bool branch)
40*4882a593Smuzhiyun {
41*4882a593Smuzhiyun asm_volatile_goto("1:"
42*4882a593Smuzhiyun ".byte 0xe9\n\t .long %l[l_yes] - 2f\n\t"
43*4882a593Smuzhiyun "2:\n\t"
44*4882a593Smuzhiyun ".pushsection __jump_table, \"aw\" \n\t"
45*4882a593Smuzhiyun _ASM_ALIGN "\n\t"
46*4882a593Smuzhiyun ".long 1b - ., %l[l_yes] - . \n\t"
47*4882a593Smuzhiyun _ASM_PTR "%c0 + %c1 - .\n\t"
48*4882a593Smuzhiyun ".popsection \n\t"
49*4882a593Smuzhiyun : : "i" (key), "i" (branch) : : l_yes);
50*4882a593Smuzhiyun
51*4882a593Smuzhiyun return false;
52*4882a593Smuzhiyun l_yes:
53*4882a593Smuzhiyun return true;
54*4882a593Smuzhiyun }
55*4882a593Smuzhiyun
56*4882a593Smuzhiyun #else /* __ASSEMBLY__ */
57*4882a593Smuzhiyun
58*4882a593Smuzhiyun .macro STATIC_JUMP_IF_TRUE target, key, def
59*4882a593Smuzhiyun .Lstatic_jump_\@:
60*4882a593Smuzhiyun .if \def
61*4882a593Smuzhiyun /* Equivalent to "jmp.d32 \target" */
62*4882a593Smuzhiyun .byte 0xe9
63*4882a593Smuzhiyun .long \target - .Lstatic_jump_after_\@
64*4882a593Smuzhiyun .Lstatic_jump_after_\@:
65*4882a593Smuzhiyun .else
66*4882a593Smuzhiyun .byte STATIC_KEY_INIT_NOP
67*4882a593Smuzhiyun .endif
68*4882a593Smuzhiyun .pushsection __jump_table, "aw"
69*4882a593Smuzhiyun _ASM_ALIGN
70*4882a593Smuzhiyun .long .Lstatic_jump_\@ - ., \target - .
71*4882a593Smuzhiyun _ASM_PTR \key - .
72*4882a593Smuzhiyun .popsection
73*4882a593Smuzhiyun .endm
74*4882a593Smuzhiyun
75*4882a593Smuzhiyun .macro STATIC_JUMP_IF_FALSE target, key, def
76*4882a593Smuzhiyun .Lstatic_jump_\@:
77*4882a593Smuzhiyun .if \def
78*4882a593Smuzhiyun .byte STATIC_KEY_INIT_NOP
79*4882a593Smuzhiyun .else
80*4882a593Smuzhiyun /* Equivalent to "jmp.d32 \target" */
81*4882a593Smuzhiyun .byte 0xe9
82*4882a593Smuzhiyun .long \target - .Lstatic_jump_after_\@
83*4882a593Smuzhiyun .Lstatic_jump_after_\@:
84*4882a593Smuzhiyun .endif
85*4882a593Smuzhiyun .pushsection __jump_table, "aw"
86*4882a593Smuzhiyun _ASM_ALIGN
87*4882a593Smuzhiyun .long .Lstatic_jump_\@ - ., \target - .
88*4882a593Smuzhiyun _ASM_PTR \key + 1 - .
89*4882a593Smuzhiyun .popsection
90*4882a593Smuzhiyun .endm
91*4882a593Smuzhiyun
92*4882a593Smuzhiyun #endif /* __ASSEMBLY__ */
93*4882a593Smuzhiyun
94*4882a593Smuzhiyun #endif
95