1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0-only */ 2*4882a593Smuzhiyun /* 3*4882a593Smuzhiyun * arch/arm/include/asm/opcodes.h 4*4882a593Smuzhiyun */ 5*4882a593Smuzhiyun 6*4882a593Smuzhiyun #ifndef __ASM_ARM_OPCODES_H 7*4882a593Smuzhiyun #define __ASM_ARM_OPCODES_H 8*4882a593Smuzhiyun 9*4882a593Smuzhiyun #ifndef __ASSEMBLY__ 10*4882a593Smuzhiyun #include <linux/linkage.h> 11*4882a593Smuzhiyun extern asmlinkage unsigned int arm_check_condition(u32 opcode, u32 psr); 12*4882a593Smuzhiyun #endif 13*4882a593Smuzhiyun 14*4882a593Smuzhiyun #define ARM_OPCODE_CONDTEST_FAIL 0 15*4882a593Smuzhiyun #define ARM_OPCODE_CONDTEST_PASS 1 16*4882a593Smuzhiyun #define ARM_OPCODE_CONDTEST_UNCOND 2 17*4882a593Smuzhiyun 18*4882a593Smuzhiyun 19*4882a593Smuzhiyun /* 20*4882a593Smuzhiyun * Assembler opcode byteswap helpers. 21*4882a593Smuzhiyun * These are only intended for use by this header: don't use them directly, 22*4882a593Smuzhiyun * because they will be suboptimal in most cases. 23*4882a593Smuzhiyun */ 24*4882a593Smuzhiyun #define ___asm_opcode_swab32(x) ( \ 25*4882a593Smuzhiyun (((x) << 24) & 0xFF000000) \ 26*4882a593Smuzhiyun | (((x) << 8) & 0x00FF0000) \ 27*4882a593Smuzhiyun | (((x) >> 8) & 0x0000FF00) \ 28*4882a593Smuzhiyun | (((x) >> 24) & 0x000000FF) \ 29*4882a593Smuzhiyun ) 30*4882a593Smuzhiyun #define ___asm_opcode_swab16(x) ( \ 31*4882a593Smuzhiyun (((x) << 8) & 0xFF00) \ 32*4882a593Smuzhiyun | (((x) >> 8) & 0x00FF) \ 33*4882a593Smuzhiyun ) 34*4882a593Smuzhiyun #define ___asm_opcode_swahb32(x) ( \ 35*4882a593Smuzhiyun (((x) << 8) & 0xFF00FF00) \ 36*4882a593Smuzhiyun | (((x) >> 8) & 0x00FF00FF) \ 37*4882a593Smuzhiyun ) 38*4882a593Smuzhiyun #define ___asm_opcode_swahw32(x) ( \ 39*4882a593Smuzhiyun (((x) << 16) & 0xFFFF0000) \ 40*4882a593Smuzhiyun | (((x) >> 16) & 0x0000FFFF) \ 41*4882a593Smuzhiyun ) 42*4882a593Smuzhiyun #define ___asm_opcode_identity32(x) ((x) & 0xFFFFFFFF) 43*4882a593Smuzhiyun #define ___asm_opcode_identity16(x) ((x) & 0xFFFF) 44*4882a593Smuzhiyun 45*4882a593Smuzhiyun 46*4882a593Smuzhiyun /* 47*4882a593Smuzhiyun * Opcode byteswap helpers 48*4882a593Smuzhiyun * 49*4882a593Smuzhiyun * These macros help with converting instructions between a canonical integer 50*4882a593Smuzhiyun * format and in-memory representation, in an endianness-agnostic manner. 51*4882a593Smuzhiyun * 52*4882a593Smuzhiyun * __mem_to_opcode_*() convert from in-memory representation to canonical form. 53*4882a593Smuzhiyun * __opcode_to_mem_*() convert from canonical form to in-memory representation. 54*4882a593Smuzhiyun * 55*4882a593Smuzhiyun * 56*4882a593Smuzhiyun * Canonical instruction representation: 57*4882a593Smuzhiyun * 58*4882a593Smuzhiyun * ARM: 0xKKLLMMNN 59*4882a593Smuzhiyun * Thumb 16-bit: 0x0000KKLL, where KK < 0xE8 60*4882a593Smuzhiyun * Thumb 32-bit: 0xKKLLMMNN, where KK >= 0xE8 61*4882a593Smuzhiyun * 62*4882a593Smuzhiyun * There is no way to distinguish an ARM instruction in canonical representation 63*4882a593Smuzhiyun * from a Thumb instruction (just as these cannot be distinguished in memory). 64*4882a593Smuzhiyun * Where this distinction is important, it needs to be tracked separately. 65*4882a593Smuzhiyun * 66*4882a593Smuzhiyun * Note that values in the range 0x0000E800..0xE7FFFFFF intentionally do not 67*4882a593Smuzhiyun * represent any valid Thumb-2 instruction. For this range, 68*4882a593Smuzhiyun * __opcode_is_thumb32() and __opcode_is_thumb16() will both be false. 69*4882a593Smuzhiyun * 70*4882a593Smuzhiyun * The ___asm variants are intended only for use by this header, in situations 71*4882a593Smuzhiyun * involving inline assembler. For .S files, the normal __opcode_*() macros 72*4882a593Smuzhiyun * should do the right thing. 73*4882a593Smuzhiyun */ 74*4882a593Smuzhiyun #ifdef __ASSEMBLY__ 75*4882a593Smuzhiyun 76*4882a593Smuzhiyun #define ___opcode_swab32(x) ___asm_opcode_swab32(x) 77*4882a593Smuzhiyun #define ___opcode_swab16(x) ___asm_opcode_swab16(x) 78*4882a593Smuzhiyun #define ___opcode_swahb32(x) ___asm_opcode_swahb32(x) 79*4882a593Smuzhiyun #define ___opcode_swahw32(x) ___asm_opcode_swahw32(x) 80*4882a593Smuzhiyun #define ___opcode_identity32(x) ___asm_opcode_identity32(x) 81*4882a593Smuzhiyun #define ___opcode_identity16(x) ___asm_opcode_identity16(x) 82*4882a593Smuzhiyun 83*4882a593Smuzhiyun #else /* ! __ASSEMBLY__ */ 84*4882a593Smuzhiyun 85*4882a593Smuzhiyun #include <linux/types.h> 86*4882a593Smuzhiyun #include <linux/swab.h> 87*4882a593Smuzhiyun 88*4882a593Smuzhiyun #define ___opcode_swab32(x) swab32(x) 89*4882a593Smuzhiyun #define ___opcode_swab16(x) swab16(x) 90*4882a593Smuzhiyun #define ___opcode_swahb32(x) swahb32(x) 91*4882a593Smuzhiyun #define ___opcode_swahw32(x) swahw32(x) 92*4882a593Smuzhiyun #define ___opcode_identity32(x) ((u32)(x)) 93*4882a593Smuzhiyun #define ___opcode_identity16(x) ((u16)(x)) 94*4882a593Smuzhiyun 95*4882a593Smuzhiyun #endif /* ! __ASSEMBLY__ */ 96*4882a593Smuzhiyun 97*4882a593Smuzhiyun 98*4882a593Smuzhiyun #ifdef CONFIG_CPU_ENDIAN_BE8 99*4882a593Smuzhiyun 100*4882a593Smuzhiyun #define __opcode_to_mem_arm(x) ___opcode_swab32(x) 101*4882a593Smuzhiyun #define __opcode_to_mem_thumb16(x) ___opcode_swab16(x) 102*4882a593Smuzhiyun #define __opcode_to_mem_thumb32(x) ___opcode_swahb32(x) 103*4882a593Smuzhiyun #define ___asm_opcode_to_mem_arm(x) ___asm_opcode_swab32(x) 104*4882a593Smuzhiyun #define ___asm_opcode_to_mem_thumb16(x) ___asm_opcode_swab16(x) 105*4882a593Smuzhiyun #define ___asm_opcode_to_mem_thumb32(x) ___asm_opcode_swahb32(x) 106*4882a593Smuzhiyun 107*4882a593Smuzhiyun #else /* ! CONFIG_CPU_ENDIAN_BE8 */ 108*4882a593Smuzhiyun 109*4882a593Smuzhiyun #define __opcode_to_mem_arm(x) ___opcode_identity32(x) 110*4882a593Smuzhiyun #define __opcode_to_mem_thumb16(x) ___opcode_identity16(x) 111*4882a593Smuzhiyun #define ___asm_opcode_to_mem_arm(x) ___asm_opcode_identity32(x) 112*4882a593Smuzhiyun #define ___asm_opcode_to_mem_thumb16(x) ___asm_opcode_identity16(x) 113*4882a593Smuzhiyun #ifndef CONFIG_CPU_ENDIAN_BE32 114*4882a593Smuzhiyun /* 115*4882a593Smuzhiyun * On BE32 systems, using 32-bit accesses to store Thumb instructions will not 116*4882a593Smuzhiyun * work in all cases, due to alignment constraints. For now, a correct 117*4882a593Smuzhiyun * version is not provided for BE32. 118*4882a593Smuzhiyun */ 119*4882a593Smuzhiyun #define __opcode_to_mem_thumb32(x) ___opcode_swahw32(x) 120*4882a593Smuzhiyun #define ___asm_opcode_to_mem_thumb32(x) ___asm_opcode_swahw32(x) 121*4882a593Smuzhiyun #endif 122*4882a593Smuzhiyun 123*4882a593Smuzhiyun #endif /* ! CONFIG_CPU_ENDIAN_BE8 */ 124*4882a593Smuzhiyun 125*4882a593Smuzhiyun #define __mem_to_opcode_arm(x) __opcode_to_mem_arm(x) 126*4882a593Smuzhiyun #define __mem_to_opcode_thumb16(x) __opcode_to_mem_thumb16(x) 127*4882a593Smuzhiyun #ifndef CONFIG_CPU_ENDIAN_BE32 128*4882a593Smuzhiyun #define __mem_to_opcode_thumb32(x) __opcode_to_mem_thumb32(x) 129*4882a593Smuzhiyun #endif 130*4882a593Smuzhiyun 131*4882a593Smuzhiyun /* Operations specific to Thumb opcodes */ 132*4882a593Smuzhiyun 133*4882a593Smuzhiyun /* Instruction size checks: */ 134*4882a593Smuzhiyun #define __opcode_is_thumb32(x) ( \ 135*4882a593Smuzhiyun ((x) & 0xF8000000) == 0xE8000000 \ 136*4882a593Smuzhiyun || ((x) & 0xF0000000) == 0xF0000000 \ 137*4882a593Smuzhiyun ) 138*4882a593Smuzhiyun #define __opcode_is_thumb16(x) ( \ 139*4882a593Smuzhiyun ((x) & 0xFFFF0000) == 0 \ 140*4882a593Smuzhiyun && !(((x) & 0xF800) == 0xE800 || ((x) & 0xF000) == 0xF000) \ 141*4882a593Smuzhiyun ) 142*4882a593Smuzhiyun 143*4882a593Smuzhiyun /* Operations to construct or split 32-bit Thumb instructions: */ 144*4882a593Smuzhiyun #define __opcode_thumb32_first(x) (___opcode_identity16((x) >> 16)) 145*4882a593Smuzhiyun #define __opcode_thumb32_second(x) (___opcode_identity16(x)) 146*4882a593Smuzhiyun #define __opcode_thumb32_compose(first, second) ( \ 147*4882a593Smuzhiyun (___opcode_identity32(___opcode_identity16(first)) << 16) \ 148*4882a593Smuzhiyun | ___opcode_identity32(___opcode_identity16(second)) \ 149*4882a593Smuzhiyun ) 150*4882a593Smuzhiyun #define ___asm_opcode_thumb32_first(x) (___asm_opcode_identity16((x) >> 16)) 151*4882a593Smuzhiyun #define ___asm_opcode_thumb32_second(x) (___asm_opcode_identity16(x)) 152*4882a593Smuzhiyun #define ___asm_opcode_thumb32_compose(first, second) ( \ 153*4882a593Smuzhiyun (___asm_opcode_identity32(___asm_opcode_identity16(first)) << 16) \ 154*4882a593Smuzhiyun | ___asm_opcode_identity32(___asm_opcode_identity16(second)) \ 155*4882a593Smuzhiyun ) 156*4882a593Smuzhiyun 157*4882a593Smuzhiyun /* 158*4882a593Smuzhiyun * Opcode injection helpers 159*4882a593Smuzhiyun * 160*4882a593Smuzhiyun * In rare cases it is necessary to assemble an opcode which the 161*4882a593Smuzhiyun * assembler does not support directly, or which would normally be 162*4882a593Smuzhiyun * rejected because of the CFLAGS or AFLAGS used to build the affected 163*4882a593Smuzhiyun * file. 164*4882a593Smuzhiyun * 165*4882a593Smuzhiyun * Before using these macros, consider carefully whether it is feasible 166*4882a593Smuzhiyun * instead to change the build flags for your file, or whether it really 167*4882a593Smuzhiyun * makes sense to support old assembler versions when building that 168*4882a593Smuzhiyun * particular kernel feature. 169*4882a593Smuzhiyun * 170*4882a593Smuzhiyun * The macros defined here should only be used where there is no viable 171*4882a593Smuzhiyun * alternative. 172*4882a593Smuzhiyun * 173*4882a593Smuzhiyun * 174*4882a593Smuzhiyun * __inst_arm(x): emit the specified ARM opcode 175*4882a593Smuzhiyun * __inst_thumb16(x): emit the specified 16-bit Thumb opcode 176*4882a593Smuzhiyun * __inst_thumb32(x): emit the specified 32-bit Thumb opcode 177*4882a593Smuzhiyun * 178*4882a593Smuzhiyun * __inst_arm_thumb16(arm, thumb): emit either the specified arm or 179*4882a593Smuzhiyun * 16-bit Thumb opcode, depending on whether an ARM or Thumb-2 180*4882a593Smuzhiyun * kernel is being built 181*4882a593Smuzhiyun * 182*4882a593Smuzhiyun * __inst_arm_thumb32(arm, thumb): emit either the specified arm or 183*4882a593Smuzhiyun * 32-bit Thumb opcode, depending on whether an ARM or Thumb-2 184*4882a593Smuzhiyun * kernel is being built 185*4882a593Smuzhiyun * 186*4882a593Smuzhiyun * 187*4882a593Smuzhiyun * Note that using these macros directly is poor practice. Instead, you 188*4882a593Smuzhiyun * should use them to define human-readable wrapper macros to encode the 189*4882a593Smuzhiyun * instructions that you care about. In code which might run on ARMv7 or 190*4882a593Smuzhiyun * above, you can usually use the __inst_arm_thumb{16,32} macros to 191*4882a593Smuzhiyun * specify the ARM and Thumb alternatives at the same time. This ensures 192*4882a593Smuzhiyun * that the correct opcode gets emitted depending on the instruction set 193*4882a593Smuzhiyun * used for the kernel build. 194*4882a593Smuzhiyun * 195*4882a593Smuzhiyun * Look at opcodes-virt.h for an example of how to use these macros. 196*4882a593Smuzhiyun */ 197*4882a593Smuzhiyun #include <linux/stringify.h> 198*4882a593Smuzhiyun 199*4882a593Smuzhiyun #define __inst_arm(x) ___inst_arm(___asm_opcode_to_mem_arm(x)) 200*4882a593Smuzhiyun #define __inst_thumb32(x) ___inst_thumb32( \ 201*4882a593Smuzhiyun ___asm_opcode_to_mem_thumb16(___asm_opcode_thumb32_first(x)), \ 202*4882a593Smuzhiyun ___asm_opcode_to_mem_thumb16(___asm_opcode_thumb32_second(x)) \ 203*4882a593Smuzhiyun ) 204*4882a593Smuzhiyun #define __inst_thumb16(x) ___inst_thumb16(___asm_opcode_to_mem_thumb16(x)) 205*4882a593Smuzhiyun 206*4882a593Smuzhiyun #ifdef CONFIG_THUMB2_KERNEL 207*4882a593Smuzhiyun #define __inst_arm_thumb16(arm_opcode, thumb_opcode) \ 208*4882a593Smuzhiyun __inst_thumb16(thumb_opcode) 209*4882a593Smuzhiyun #define __inst_arm_thumb32(arm_opcode, thumb_opcode) \ 210*4882a593Smuzhiyun __inst_thumb32(thumb_opcode) 211*4882a593Smuzhiyun #else 212*4882a593Smuzhiyun #define __inst_arm_thumb16(arm_opcode, thumb_opcode) __inst_arm(arm_opcode) 213*4882a593Smuzhiyun #define __inst_arm_thumb32(arm_opcode, thumb_opcode) __inst_arm(arm_opcode) 214*4882a593Smuzhiyun #endif 215*4882a593Smuzhiyun 216*4882a593Smuzhiyun /* Helpers for the helpers. Don't use these directly. */ 217*4882a593Smuzhiyun #ifdef __ASSEMBLY__ 218*4882a593Smuzhiyun #define ___inst_arm(x) .long x 219*4882a593Smuzhiyun #define ___inst_thumb16(x) .short x 220*4882a593Smuzhiyun #define ___inst_thumb32(first, second) .short first, second 221*4882a593Smuzhiyun #else 222*4882a593Smuzhiyun #define ___inst_arm(x) ".long " __stringify(x) "\n\t" 223*4882a593Smuzhiyun #define ___inst_thumb16(x) ".short " __stringify(x) "\n\t" 224*4882a593Smuzhiyun #define ___inst_thumb32(first, second) \ 225*4882a593Smuzhiyun ".short " __stringify(first) ", " __stringify(second) "\n\t" 226*4882a593Smuzhiyun #endif 227*4882a593Smuzhiyun 228*4882a593Smuzhiyun #endif /* __ASM_ARM_OPCODES_H */ 229