1*4882a593Smuzhiyun /* 2*4882a593Smuzhiyun * arch/arm/include/asm/opcodes.h 3*4882a593Smuzhiyun * 4*4882a593Smuzhiyun * SPDX-License-Identifier: GPL-2.0 5*4882a593Smuzhiyun */ 6*4882a593Smuzhiyun 7*4882a593Smuzhiyun #ifndef __ASM_ARM_OPCODES_H 8*4882a593Smuzhiyun #define __ASM_ARM_OPCODES_H 9*4882a593Smuzhiyun 10*4882a593Smuzhiyun #ifndef __ASSEMBLY__ 11*4882a593Smuzhiyun #include <linux/linkage.h> 12*4882a593Smuzhiyun extern asmlinkage unsigned int arm_check_condition(u32 opcode, u32 psr); 13*4882a593Smuzhiyun #endif 14*4882a593Smuzhiyun 15*4882a593Smuzhiyun #define ARM_OPCODE_CONDTEST_FAIL 0 16*4882a593Smuzhiyun #define ARM_OPCODE_CONDTEST_PASS 1 17*4882a593Smuzhiyun #define ARM_OPCODE_CONDTEST_UNCOND 2 18*4882a593Smuzhiyun 19*4882a593Smuzhiyun 20*4882a593Smuzhiyun /* 21*4882a593Smuzhiyun * Assembler opcode byteswap helpers. 22*4882a593Smuzhiyun * These are only intended for use by this header: don't use them directly, 23*4882a593Smuzhiyun * because they will be suboptimal in most cases. 24*4882a593Smuzhiyun */ 25*4882a593Smuzhiyun #define ___asm_opcode_swab32(x) ( \ 26*4882a593Smuzhiyun (((x) << 24) & 0xFF000000) \ 27*4882a593Smuzhiyun | (((x) << 8) & 0x00FF0000) \ 28*4882a593Smuzhiyun | (((x) >> 8) & 0x0000FF00) \ 29*4882a593Smuzhiyun | (((x) >> 24) & 0x000000FF) \ 30*4882a593Smuzhiyun ) 31*4882a593Smuzhiyun #define ___asm_opcode_swab16(x) ( \ 32*4882a593Smuzhiyun (((x) << 8) & 0xFF00) \ 33*4882a593Smuzhiyun | (((x) >> 8) & 0x00FF) \ 34*4882a593Smuzhiyun ) 35*4882a593Smuzhiyun #define ___asm_opcode_swahb32(x) ( \ 36*4882a593Smuzhiyun (((x) << 8) & 0xFF00FF00) \ 37*4882a593Smuzhiyun | (((x) >> 8) & 0x00FF00FF) \ 38*4882a593Smuzhiyun ) 39*4882a593Smuzhiyun #define ___asm_opcode_swahw32(x) ( \ 40*4882a593Smuzhiyun (((x) << 16) & 0xFFFF0000) \ 41*4882a593Smuzhiyun | (((x) >> 16) & 0x0000FFFF) \ 42*4882a593Smuzhiyun ) 43*4882a593Smuzhiyun #define ___asm_opcode_identity32(x) ((x) & 0xFFFFFFFF) 44*4882a593Smuzhiyun #define ___asm_opcode_identity16(x) ((x) & 0xFFFF) 45*4882a593Smuzhiyun 46*4882a593Smuzhiyun 47*4882a593Smuzhiyun /* 48*4882a593Smuzhiyun * Opcode byteswap helpers 49*4882a593Smuzhiyun * 50*4882a593Smuzhiyun * These macros help with converting instructions between a canonical integer 51*4882a593Smuzhiyun * format and in-memory representation, in an endianness-agnostic manner. 52*4882a593Smuzhiyun * 53*4882a593Smuzhiyun * __mem_to_opcode_*() convert from in-memory representation to canonical form. 54*4882a593Smuzhiyun * __opcode_to_mem_*() convert from canonical form to in-memory representation. 55*4882a593Smuzhiyun * 56*4882a593Smuzhiyun * 57*4882a593Smuzhiyun * Canonical instruction representation: 58*4882a593Smuzhiyun * 59*4882a593Smuzhiyun * ARM: 0xKKLLMMNN 60*4882a593Smuzhiyun * Thumb 16-bit: 0x0000KKLL, where KK < 0xE8 61*4882a593Smuzhiyun * Thumb 32-bit: 0xKKLLMMNN, where KK >= 0xE8 62*4882a593Smuzhiyun * 63*4882a593Smuzhiyun * There is no way to distinguish an ARM instruction in canonical representation 64*4882a593Smuzhiyun * from a Thumb instruction (just as these cannot be distinguished in memory). 65*4882a593Smuzhiyun * Where this distinction is important, it needs to be tracked separately. 66*4882a593Smuzhiyun * 67*4882a593Smuzhiyun * Note that values in the range 0x0000E800..0xE7FFFFFF intentionally do not 68*4882a593Smuzhiyun * represent any valid Thumb-2 instruction. For this range, 69*4882a593Smuzhiyun * __opcode_is_thumb32() and __opcode_is_thumb16() will both be false. 70*4882a593Smuzhiyun * 71*4882a593Smuzhiyun * The ___asm variants are intended only for use by this header, in situations 72*4882a593Smuzhiyun * involving inline assembler. For .S files, the normal __opcode_*() macros 73*4882a593Smuzhiyun * should do the right thing. 74*4882a593Smuzhiyun */ 75*4882a593Smuzhiyun #ifdef __ASSEMBLY__ 76*4882a593Smuzhiyun 77*4882a593Smuzhiyun #define ___opcode_swab32(x) ___asm_opcode_swab32(x) 78*4882a593Smuzhiyun #define ___opcode_swab16(x) ___asm_opcode_swab16(x) 79*4882a593Smuzhiyun #define ___opcode_swahb32(x) ___asm_opcode_swahb32(x) 80*4882a593Smuzhiyun #define ___opcode_swahw32(x) ___asm_opcode_swahw32(x) 81*4882a593Smuzhiyun #define ___opcode_identity32(x) ___asm_opcode_identity32(x) 82*4882a593Smuzhiyun #define ___opcode_identity16(x) ___asm_opcode_identity16(x) 83*4882a593Smuzhiyun 84*4882a593Smuzhiyun #else /* ! __ASSEMBLY__ */ 85*4882a593Smuzhiyun 86*4882a593Smuzhiyun #include <linux/types.h> 87*4882a593Smuzhiyun #include <linux/swab.h> 88*4882a593Smuzhiyun 89*4882a593Smuzhiyun #define ___opcode_swab32(x) swab32(x) 90*4882a593Smuzhiyun #define ___opcode_swab16(x) swab16(x) 91*4882a593Smuzhiyun #define ___opcode_swahb32(x) swahb32(x) 92*4882a593Smuzhiyun #define ___opcode_swahw32(x) swahw32(x) 93*4882a593Smuzhiyun #define ___opcode_identity32(x) ((u32)(x)) 94*4882a593Smuzhiyun #define ___opcode_identity16(x) ((u16)(x)) 95*4882a593Smuzhiyun 96*4882a593Smuzhiyun #endif /* ! __ASSEMBLY__ */ 97*4882a593Smuzhiyun 98*4882a593Smuzhiyun 99*4882a593Smuzhiyun #ifdef CONFIG_CPU_ENDIAN_BE8 100*4882a593Smuzhiyun 101*4882a593Smuzhiyun #define __opcode_to_mem_arm(x) ___opcode_swab32(x) 102*4882a593Smuzhiyun #define __opcode_to_mem_thumb16(x) ___opcode_swab16(x) 103*4882a593Smuzhiyun #define __opcode_to_mem_thumb32(x) ___opcode_swahb32(x) 104*4882a593Smuzhiyun #define ___asm_opcode_to_mem_arm(x) ___asm_opcode_swab32(x) 105*4882a593Smuzhiyun #define ___asm_opcode_to_mem_thumb16(x) ___asm_opcode_swab16(x) 106*4882a593Smuzhiyun #define ___asm_opcode_to_mem_thumb32(x) ___asm_opcode_swahb32(x) 107*4882a593Smuzhiyun 108*4882a593Smuzhiyun #else /* ! CONFIG_CPU_ENDIAN_BE8 */ 109*4882a593Smuzhiyun 110*4882a593Smuzhiyun #define __opcode_to_mem_arm(x) ___opcode_identity32(x) 111*4882a593Smuzhiyun #define __opcode_to_mem_thumb16(x) ___opcode_identity16(x) 112*4882a593Smuzhiyun #define ___asm_opcode_to_mem_arm(x) ___asm_opcode_identity32(x) 113*4882a593Smuzhiyun #define ___asm_opcode_to_mem_thumb16(x) ___asm_opcode_identity16(x) 114*4882a593Smuzhiyun #ifndef CONFIG_CPU_ENDIAN_BE32 115*4882a593Smuzhiyun /* 116*4882a593Smuzhiyun * On BE32 systems, using 32-bit accesses to store Thumb instructions will not 117*4882a593Smuzhiyun * work in all cases, due to alignment constraints. For now, a correct 118*4882a593Smuzhiyun * version is not provided for BE32. 119*4882a593Smuzhiyun */ 120*4882a593Smuzhiyun #define __opcode_to_mem_thumb32(x) ___opcode_swahw32(x) 121*4882a593Smuzhiyun #define ___asm_opcode_to_mem_thumb32(x) ___asm_opcode_swahw32(x) 122*4882a593Smuzhiyun #endif 123*4882a593Smuzhiyun 124*4882a593Smuzhiyun #endif /* ! CONFIG_CPU_ENDIAN_BE8 */ 125*4882a593Smuzhiyun 126*4882a593Smuzhiyun #define __mem_to_opcode_arm(x) __opcode_to_mem_arm(x) 127*4882a593Smuzhiyun #define __mem_to_opcode_thumb16(x) __opcode_to_mem_thumb16(x) 128*4882a593Smuzhiyun #ifndef CONFIG_CPU_ENDIAN_BE32 129*4882a593Smuzhiyun #define __mem_to_opcode_thumb32(x) __opcode_to_mem_thumb32(x) 130*4882a593Smuzhiyun #endif 131*4882a593Smuzhiyun 132*4882a593Smuzhiyun /* Operations specific to Thumb opcodes */ 133*4882a593Smuzhiyun 134*4882a593Smuzhiyun /* Instruction size checks: */ 135*4882a593Smuzhiyun #define __opcode_is_thumb32(x) ( \ 136*4882a593Smuzhiyun ((x) & 0xF8000000) == 0xE8000000 \ 137*4882a593Smuzhiyun || ((x) & 0xF0000000) == 0xF0000000 \ 138*4882a593Smuzhiyun ) 139*4882a593Smuzhiyun #define __opcode_is_thumb16(x) ( \ 140*4882a593Smuzhiyun ((x) & 0xFFFF0000) == 0 \ 141*4882a593Smuzhiyun && !(((x) & 0xF800) == 0xE800 || ((x) & 0xF000) == 0xF000) \ 142*4882a593Smuzhiyun ) 143*4882a593Smuzhiyun 144*4882a593Smuzhiyun /* Operations to construct or split 32-bit Thumb instructions: */ 145*4882a593Smuzhiyun #define __opcode_thumb32_first(x) (___opcode_identity16((x) >> 16)) 146*4882a593Smuzhiyun #define __opcode_thumb32_second(x) (___opcode_identity16(x)) 147*4882a593Smuzhiyun #define __opcode_thumb32_compose(first, second) ( \ 148*4882a593Smuzhiyun (___opcode_identity32(___opcode_identity16(first)) << 16) \ 149*4882a593Smuzhiyun | ___opcode_identity32(___opcode_identity16(second)) \ 150*4882a593Smuzhiyun ) 151*4882a593Smuzhiyun #define ___asm_opcode_thumb32_first(x) (___asm_opcode_identity16((x) >> 16)) 152*4882a593Smuzhiyun #define ___asm_opcode_thumb32_second(x) (___asm_opcode_identity16(x)) 153*4882a593Smuzhiyun #define ___asm_opcode_thumb32_compose(first, second) ( \ 154*4882a593Smuzhiyun (___asm_opcode_identity32(___asm_opcode_identity16(first)) << 16) \ 155*4882a593Smuzhiyun | ___asm_opcode_identity32(___asm_opcode_identity16(second)) \ 156*4882a593Smuzhiyun ) 157*4882a593Smuzhiyun 158*4882a593Smuzhiyun /* 159*4882a593Smuzhiyun * Opcode injection helpers 160*4882a593Smuzhiyun * 161*4882a593Smuzhiyun * In rare cases it is necessary to assemble an opcode which the 162*4882a593Smuzhiyun * assembler does not support directly, or which would normally be 163*4882a593Smuzhiyun * rejected because of the CFLAGS or AFLAGS used to build the affected 164*4882a593Smuzhiyun * file. 165*4882a593Smuzhiyun * 166*4882a593Smuzhiyun * Before using these macros, consider carefully whether it is feasible 167*4882a593Smuzhiyun * instead to change the build flags for your file, or whether it really 168*4882a593Smuzhiyun * makes sense to support old assembler versions when building that 169*4882a593Smuzhiyun * particular kernel feature. 170*4882a593Smuzhiyun * 171*4882a593Smuzhiyun * The macros defined here should only be used where there is no viable 172*4882a593Smuzhiyun * alternative. 173*4882a593Smuzhiyun * 174*4882a593Smuzhiyun * 175*4882a593Smuzhiyun * __inst_arm(x): emit the specified ARM opcode 176*4882a593Smuzhiyun * __inst_thumb16(x): emit the specified 16-bit Thumb opcode 177*4882a593Smuzhiyun * __inst_thumb32(x): emit the specified 32-bit Thumb opcode 178*4882a593Smuzhiyun * 179*4882a593Smuzhiyun * __inst_arm_thumb16(arm, thumb): emit either the specified arm or 180*4882a593Smuzhiyun * 16-bit Thumb opcode, depending on whether an ARM or Thumb-2 181*4882a593Smuzhiyun * kernel is being built 182*4882a593Smuzhiyun * 183*4882a593Smuzhiyun * __inst_arm_thumb32(arm, thumb): emit either the specified arm or 184*4882a593Smuzhiyun * 32-bit Thumb opcode, depending on whether an ARM or Thumb-2 185*4882a593Smuzhiyun * kernel is being built 186*4882a593Smuzhiyun * 187*4882a593Smuzhiyun * 188*4882a593Smuzhiyun * Note that using these macros directly is poor practice. Instead, you 189*4882a593Smuzhiyun * should use them to define human-readable wrapper macros to encode the 190*4882a593Smuzhiyun * instructions that you care about. In code which might run on ARMv7 or 191*4882a593Smuzhiyun * above, you can usually use the __inst_arm_thumb{16,32} macros to 192*4882a593Smuzhiyun * specify the ARM and Thumb alternatives at the same time. This ensures 193*4882a593Smuzhiyun * that the correct opcode gets emitted depending on the instruction set 194*4882a593Smuzhiyun * used for the kernel build. 195*4882a593Smuzhiyun * 196*4882a593Smuzhiyun * Look at opcodes-virt.h for an example of how to use these macros. 197*4882a593Smuzhiyun */ 198*4882a593Smuzhiyun #include <linux/stringify.h> 199*4882a593Smuzhiyun 200*4882a593Smuzhiyun #define __inst_arm(x) ___inst_arm(___asm_opcode_to_mem_arm(x)) 201*4882a593Smuzhiyun #define __inst_thumb32(x) ___inst_thumb32( \ 202*4882a593Smuzhiyun ___asm_opcode_to_mem_thumb16(___asm_opcode_thumb32_first(x)), \ 203*4882a593Smuzhiyun ___asm_opcode_to_mem_thumb16(___asm_opcode_thumb32_second(x)) \ 204*4882a593Smuzhiyun ) 205*4882a593Smuzhiyun #define __inst_thumb16(x) ___inst_thumb16(___asm_opcode_to_mem_thumb16(x)) 206*4882a593Smuzhiyun 207*4882a593Smuzhiyun #ifdef CONFIG_THUMB2_KERNEL 208*4882a593Smuzhiyun #define __inst_arm_thumb16(arm_opcode, thumb_opcode) \ 209*4882a593Smuzhiyun __inst_thumb16(thumb_opcode) 210*4882a593Smuzhiyun #define __inst_arm_thumb32(arm_opcode, thumb_opcode) \ 211*4882a593Smuzhiyun __inst_thumb32(thumb_opcode) 212*4882a593Smuzhiyun #else 213*4882a593Smuzhiyun #define __inst_arm_thumb16(arm_opcode, thumb_opcode) __inst_arm(arm_opcode) 214*4882a593Smuzhiyun #define __inst_arm_thumb32(arm_opcode, thumb_opcode) __inst_arm(arm_opcode) 215*4882a593Smuzhiyun #endif 216*4882a593Smuzhiyun 217*4882a593Smuzhiyun /* Helpers for the helpers. Don't use these directly. */ 218*4882a593Smuzhiyun #ifdef __ASSEMBLY__ 219*4882a593Smuzhiyun #define ___inst_arm(x) .long x 220*4882a593Smuzhiyun #define ___inst_thumb16(x) .short x 221*4882a593Smuzhiyun #define ___inst_thumb32(first, second) .short first, second 222*4882a593Smuzhiyun #else 223*4882a593Smuzhiyun #define ___inst_arm(x) ".long " __stringify(x) "\n\t" 224*4882a593Smuzhiyun #define ___inst_thumb16(x) ".short " __stringify(x) "\n\t" 225*4882a593Smuzhiyun #define ___inst_thumb32(first, second) \ 226*4882a593Smuzhiyun ".short " __stringify(first) ", " __stringify(second) "\n\t" 227*4882a593Smuzhiyun #endif 228*4882a593Smuzhiyun 229*4882a593Smuzhiyun #endif /* __ASM_ARM_OPCODES_H */ 230