1*4882a593Smuzhiyun /* 2*4882a593Smuzhiyun * Copyright (C) 1995-1999 Gary Thomas, Paul Mackerras, Cort Dougan. 3*4882a593Smuzhiyun */ 4*4882a593Smuzhiyun #ifndef _ASM_POWERPC_PPC_ASM_H 5*4882a593Smuzhiyun #define _ASM_POWERPC_PPC_ASM_H 6*4882a593Smuzhiyun 7*4882a593Smuzhiyun #include <linux/stringify.h> 8*4882a593Smuzhiyun #include <asm/asm-compat.h> 9*4882a593Smuzhiyun #include <asm/processor.h> 10*4882a593Smuzhiyun #include <asm/ppc-opcode.h> 11*4882a593Smuzhiyun #include <asm/firmware.h> 12*4882a593Smuzhiyun #include <asm/feature-fixups.h> 13*4882a593Smuzhiyun 14*4882a593Smuzhiyun #ifdef __ASSEMBLY__ 15*4882a593Smuzhiyun 16*4882a593Smuzhiyun #define SZL (BITS_PER_LONG/8) 17*4882a593Smuzhiyun 18*4882a593Smuzhiyun /* 19*4882a593Smuzhiyun * Stuff for accurate CPU time accounting. 20*4882a593Smuzhiyun * These macros handle transitions between user and system state 21*4882a593Smuzhiyun * in exception entry and exit and accumulate time to the 22*4882a593Smuzhiyun * user_time and system_time fields in the paca. 23*4882a593Smuzhiyun */ 24*4882a593Smuzhiyun 25*4882a593Smuzhiyun #ifndef CONFIG_VIRT_CPU_ACCOUNTING_NATIVE 26*4882a593Smuzhiyun #define ACCOUNT_CPU_USER_ENTRY(ptr, ra, rb) 27*4882a593Smuzhiyun #define ACCOUNT_CPU_USER_EXIT(ptr, ra, rb) 28*4882a593Smuzhiyun #define ACCOUNT_STOLEN_TIME 29*4882a593Smuzhiyun #else 30*4882a593Smuzhiyun #define ACCOUNT_CPU_USER_ENTRY(ptr, ra, rb) \ 31*4882a593Smuzhiyun MFTB(ra); /* get timebase */ \ 32*4882a593Smuzhiyun PPC_LL rb, ACCOUNT_STARTTIME_USER(ptr); \ 33*4882a593Smuzhiyun PPC_STL ra, ACCOUNT_STARTTIME(ptr); \ 34*4882a593Smuzhiyun subf rb,rb,ra; /* subtract start value */ \ 35*4882a593Smuzhiyun PPC_LL ra, ACCOUNT_USER_TIME(ptr); \ 36*4882a593Smuzhiyun add ra,ra,rb; /* add on to user time */ \ 37*4882a593Smuzhiyun PPC_STL ra, ACCOUNT_USER_TIME(ptr); \ 38*4882a593Smuzhiyun 39*4882a593Smuzhiyun #define ACCOUNT_CPU_USER_EXIT(ptr, ra, rb) \ 40*4882a593Smuzhiyun MFTB(ra); /* get timebase */ \ 41*4882a593Smuzhiyun PPC_LL rb, ACCOUNT_STARTTIME(ptr); \ 42*4882a593Smuzhiyun PPC_STL ra, ACCOUNT_STARTTIME_USER(ptr); \ 43*4882a593Smuzhiyun subf rb,rb,ra; /* subtract start value */ \ 44*4882a593Smuzhiyun PPC_LL ra, ACCOUNT_SYSTEM_TIME(ptr); \ 45*4882a593Smuzhiyun add ra,ra,rb; /* add on to system time */ \ 46*4882a593Smuzhiyun PPC_STL ra, ACCOUNT_SYSTEM_TIME(ptr) 47*4882a593Smuzhiyun 48*4882a593Smuzhiyun #ifdef CONFIG_PPC_SPLPAR 49*4882a593Smuzhiyun #define ACCOUNT_STOLEN_TIME \ 50*4882a593Smuzhiyun BEGIN_FW_FTR_SECTION; \ 51*4882a593Smuzhiyun beq 33f; \ 52*4882a593Smuzhiyun /* from user - see if there are any DTL entries to process */ \ 53*4882a593Smuzhiyun ld r10,PACALPPACAPTR(r13); /* get ptr to VPA */ \ 54*4882a593Smuzhiyun ld r11,PACA_DTL_RIDX(r13); /* get log read index */ \ 55*4882a593Smuzhiyun addi r10,r10,LPPACA_DTLIDX; \ 56*4882a593Smuzhiyun LDX_BE r10,0,r10; /* get log write index */ \ 57*4882a593Smuzhiyun cmpd cr1,r11,r10; \ 58*4882a593Smuzhiyun beq+ cr1,33f; \ 59*4882a593Smuzhiyun bl accumulate_stolen_time; \ 60*4882a593Smuzhiyun ld r12,_MSR(r1); \ 61*4882a593Smuzhiyun andi. r10,r12,MSR_PR; /* Restore cr0 (coming from user) */ \ 62*4882a593Smuzhiyun 33: \ 63*4882a593Smuzhiyun END_FW_FTR_SECTION_IFSET(FW_FEATURE_SPLPAR) 64*4882a593Smuzhiyun 65*4882a593Smuzhiyun #else /* CONFIG_PPC_SPLPAR */ 66*4882a593Smuzhiyun #define ACCOUNT_STOLEN_TIME 67*4882a593Smuzhiyun 68*4882a593Smuzhiyun #endif /* CONFIG_PPC_SPLPAR */ 69*4882a593Smuzhiyun 70*4882a593Smuzhiyun #endif /* CONFIG_VIRT_CPU_ACCOUNTING_NATIVE */ 71*4882a593Smuzhiyun 72*4882a593Smuzhiyun /* 73*4882a593Smuzhiyun * Macros for storing registers into and loading registers from 74*4882a593Smuzhiyun * exception frames. 75*4882a593Smuzhiyun */ 76*4882a593Smuzhiyun #ifdef __powerpc64__ 77*4882a593Smuzhiyun #define SAVE_GPR(n, base) std n,GPR0+8*(n)(base) 78*4882a593Smuzhiyun #define REST_GPR(n, base) ld n,GPR0+8*(n)(base) 79*4882a593Smuzhiyun #define SAVE_NVGPRS(base) SAVE_8GPRS(14, base); SAVE_10GPRS(22, base) 80*4882a593Smuzhiyun #define REST_NVGPRS(base) REST_8GPRS(14, base); REST_10GPRS(22, base) 81*4882a593Smuzhiyun #else 82*4882a593Smuzhiyun #define SAVE_GPR(n, base) stw n,GPR0+4*(n)(base) 83*4882a593Smuzhiyun #define REST_GPR(n, base) lwz n,GPR0+4*(n)(base) 84*4882a593Smuzhiyun #define SAVE_NVGPRS(base) stmw 13, GPR0+4*13(base) 85*4882a593Smuzhiyun #define REST_NVGPRS(base) lmw 13, GPR0+4*13(base) 86*4882a593Smuzhiyun #endif 87*4882a593Smuzhiyun 88*4882a593Smuzhiyun #define SAVE_2GPRS(n, base) SAVE_GPR(n, base); SAVE_GPR(n+1, base) 89*4882a593Smuzhiyun #define SAVE_4GPRS(n, base) SAVE_2GPRS(n, base); SAVE_2GPRS(n+2, base) 90*4882a593Smuzhiyun #define SAVE_8GPRS(n, base) SAVE_4GPRS(n, base); SAVE_4GPRS(n+4, base) 91*4882a593Smuzhiyun #define SAVE_10GPRS(n, base) SAVE_8GPRS(n, base); SAVE_2GPRS(n+8, base) 92*4882a593Smuzhiyun #define REST_2GPRS(n, base) REST_GPR(n, base); REST_GPR(n+1, base) 93*4882a593Smuzhiyun #define REST_4GPRS(n, base) REST_2GPRS(n, base); REST_2GPRS(n+2, base) 94*4882a593Smuzhiyun #define REST_8GPRS(n, base) REST_4GPRS(n, base); REST_4GPRS(n+4, base) 95*4882a593Smuzhiyun #define REST_10GPRS(n, base) REST_8GPRS(n, base); REST_2GPRS(n+8, base) 96*4882a593Smuzhiyun 97*4882a593Smuzhiyun #define SAVE_FPR(n, base) stfd n,8*TS_FPRWIDTH*(n)(base) 98*4882a593Smuzhiyun #define SAVE_2FPRS(n, base) SAVE_FPR(n, base); SAVE_FPR(n+1, base) 99*4882a593Smuzhiyun #define SAVE_4FPRS(n, base) SAVE_2FPRS(n, base); SAVE_2FPRS(n+2, base) 100*4882a593Smuzhiyun #define SAVE_8FPRS(n, base) SAVE_4FPRS(n, base); SAVE_4FPRS(n+4, base) 101*4882a593Smuzhiyun #define SAVE_16FPRS(n, base) SAVE_8FPRS(n, base); SAVE_8FPRS(n+8, base) 102*4882a593Smuzhiyun #define SAVE_32FPRS(n, base) SAVE_16FPRS(n, base); SAVE_16FPRS(n+16, base) 103*4882a593Smuzhiyun #define REST_FPR(n, base) lfd n,8*TS_FPRWIDTH*(n)(base) 104*4882a593Smuzhiyun #define REST_2FPRS(n, base) REST_FPR(n, base); REST_FPR(n+1, base) 105*4882a593Smuzhiyun #define REST_4FPRS(n, base) REST_2FPRS(n, base); REST_2FPRS(n+2, base) 106*4882a593Smuzhiyun #define REST_8FPRS(n, base) REST_4FPRS(n, base); REST_4FPRS(n+4, base) 107*4882a593Smuzhiyun #define REST_16FPRS(n, base) REST_8FPRS(n, base); REST_8FPRS(n+8, base) 108*4882a593Smuzhiyun #define REST_32FPRS(n, base) REST_16FPRS(n, base); REST_16FPRS(n+16, base) 109*4882a593Smuzhiyun 110*4882a593Smuzhiyun #define SAVE_VR(n,b,base) li b,16*(n); stvx n,base,b 111*4882a593Smuzhiyun #define SAVE_2VRS(n,b,base) SAVE_VR(n,b,base); SAVE_VR(n+1,b,base) 112*4882a593Smuzhiyun #define SAVE_4VRS(n,b,base) SAVE_2VRS(n,b,base); SAVE_2VRS(n+2,b,base) 113*4882a593Smuzhiyun #define SAVE_8VRS(n,b,base) SAVE_4VRS(n,b,base); SAVE_4VRS(n+4,b,base) 114*4882a593Smuzhiyun #define SAVE_16VRS(n,b,base) SAVE_8VRS(n,b,base); SAVE_8VRS(n+8,b,base) 115*4882a593Smuzhiyun #define SAVE_32VRS(n,b,base) SAVE_16VRS(n,b,base); SAVE_16VRS(n+16,b,base) 116*4882a593Smuzhiyun #define REST_VR(n,b,base) li b,16*(n); lvx n,base,b 117*4882a593Smuzhiyun #define REST_2VRS(n,b,base) REST_VR(n,b,base); REST_VR(n+1,b,base) 118*4882a593Smuzhiyun #define REST_4VRS(n,b,base) REST_2VRS(n,b,base); REST_2VRS(n+2,b,base) 119*4882a593Smuzhiyun #define REST_8VRS(n,b,base) REST_4VRS(n,b,base); REST_4VRS(n+4,b,base) 120*4882a593Smuzhiyun #define REST_16VRS(n,b,base) REST_8VRS(n,b,base); REST_8VRS(n+8,b,base) 121*4882a593Smuzhiyun #define REST_32VRS(n,b,base) REST_16VRS(n,b,base); REST_16VRS(n+16,b,base) 122*4882a593Smuzhiyun 123*4882a593Smuzhiyun #ifdef __BIG_ENDIAN__ 124*4882a593Smuzhiyun #define STXVD2X_ROT(n,b,base) STXVD2X(n,b,base) 125*4882a593Smuzhiyun #define LXVD2X_ROT(n,b,base) LXVD2X(n,b,base) 126*4882a593Smuzhiyun #else 127*4882a593Smuzhiyun #define STXVD2X_ROT(n,b,base) XXSWAPD(n,n); \ 128*4882a593Smuzhiyun STXVD2X(n,b,base); \ 129*4882a593Smuzhiyun XXSWAPD(n,n) 130*4882a593Smuzhiyun 131*4882a593Smuzhiyun #define LXVD2X_ROT(n,b,base) LXVD2X(n,b,base); \ 132*4882a593Smuzhiyun XXSWAPD(n,n) 133*4882a593Smuzhiyun #endif 134*4882a593Smuzhiyun /* Save the lower 32 VSRs in the thread VSR region */ 135*4882a593Smuzhiyun #define SAVE_VSR(n,b,base) li b,16*(n); STXVD2X_ROT(n,R##base,R##b) 136*4882a593Smuzhiyun #define SAVE_2VSRS(n,b,base) SAVE_VSR(n,b,base); SAVE_VSR(n+1,b,base) 137*4882a593Smuzhiyun #define SAVE_4VSRS(n,b,base) SAVE_2VSRS(n,b,base); SAVE_2VSRS(n+2,b,base) 138*4882a593Smuzhiyun #define SAVE_8VSRS(n,b,base) SAVE_4VSRS(n,b,base); SAVE_4VSRS(n+4,b,base) 139*4882a593Smuzhiyun #define SAVE_16VSRS(n,b,base) SAVE_8VSRS(n,b,base); SAVE_8VSRS(n+8,b,base) 140*4882a593Smuzhiyun #define SAVE_32VSRS(n,b,base) SAVE_16VSRS(n,b,base); SAVE_16VSRS(n+16,b,base) 141*4882a593Smuzhiyun #define REST_VSR(n,b,base) li b,16*(n); LXVD2X_ROT(n,R##base,R##b) 142*4882a593Smuzhiyun #define REST_2VSRS(n,b,base) REST_VSR(n,b,base); REST_VSR(n+1,b,base) 143*4882a593Smuzhiyun #define REST_4VSRS(n,b,base) REST_2VSRS(n,b,base); REST_2VSRS(n+2,b,base) 144*4882a593Smuzhiyun #define REST_8VSRS(n,b,base) REST_4VSRS(n,b,base); REST_4VSRS(n+4,b,base) 145*4882a593Smuzhiyun #define REST_16VSRS(n,b,base) REST_8VSRS(n,b,base); REST_8VSRS(n+8,b,base) 146*4882a593Smuzhiyun #define REST_32VSRS(n,b,base) REST_16VSRS(n,b,base); REST_16VSRS(n+16,b,base) 147*4882a593Smuzhiyun 148*4882a593Smuzhiyun /* 149*4882a593Smuzhiyun * b = base register for addressing, o = base offset from register of 1st EVR 150*4882a593Smuzhiyun * n = first EVR, s = scratch 151*4882a593Smuzhiyun */ 152*4882a593Smuzhiyun #define SAVE_EVR(n,s,b,o) evmergehi s,s,n; stw s,o+4*(n)(b) 153*4882a593Smuzhiyun #define SAVE_2EVRS(n,s,b,o) SAVE_EVR(n,s,b,o); SAVE_EVR(n+1,s,b,o) 154*4882a593Smuzhiyun #define SAVE_4EVRS(n,s,b,o) SAVE_2EVRS(n,s,b,o); SAVE_2EVRS(n+2,s,b,o) 155*4882a593Smuzhiyun #define SAVE_8EVRS(n,s,b,o) SAVE_4EVRS(n,s,b,o); SAVE_4EVRS(n+4,s,b,o) 156*4882a593Smuzhiyun #define SAVE_16EVRS(n,s,b,o) SAVE_8EVRS(n,s,b,o); SAVE_8EVRS(n+8,s,b,o) 157*4882a593Smuzhiyun #define SAVE_32EVRS(n,s,b,o) SAVE_16EVRS(n,s,b,o); SAVE_16EVRS(n+16,s,b,o) 158*4882a593Smuzhiyun #define REST_EVR(n,s,b,o) lwz s,o+4*(n)(b); evmergelo n,s,n 159*4882a593Smuzhiyun #define REST_2EVRS(n,s,b,o) REST_EVR(n,s,b,o); REST_EVR(n+1,s,b,o) 160*4882a593Smuzhiyun #define REST_4EVRS(n,s,b,o) REST_2EVRS(n,s,b,o); REST_2EVRS(n+2,s,b,o) 161*4882a593Smuzhiyun #define REST_8EVRS(n,s,b,o) REST_4EVRS(n,s,b,o); REST_4EVRS(n+4,s,b,o) 162*4882a593Smuzhiyun #define REST_16EVRS(n,s,b,o) REST_8EVRS(n,s,b,o); REST_8EVRS(n+8,s,b,o) 163*4882a593Smuzhiyun #define REST_32EVRS(n,s,b,o) REST_16EVRS(n,s,b,o); REST_16EVRS(n+16,s,b,o) 164*4882a593Smuzhiyun 165*4882a593Smuzhiyun /* Macros to adjust thread priority for hardware multithreading */ 166*4882a593Smuzhiyun #define HMT_VERY_LOW or 31,31,31 # very low priority 167*4882a593Smuzhiyun #define HMT_LOW or 1,1,1 168*4882a593Smuzhiyun #define HMT_MEDIUM_LOW or 6,6,6 # medium low priority 169*4882a593Smuzhiyun #define HMT_MEDIUM or 2,2,2 170*4882a593Smuzhiyun #define HMT_MEDIUM_HIGH or 5,5,5 # medium high priority 171*4882a593Smuzhiyun #define HMT_HIGH or 3,3,3 172*4882a593Smuzhiyun #define HMT_EXTRA_HIGH or 7,7,7 # power7 only 173*4882a593Smuzhiyun 174*4882a593Smuzhiyun #ifdef CONFIG_PPC64 175*4882a593Smuzhiyun #define ULONG_SIZE 8 176*4882a593Smuzhiyun #else 177*4882a593Smuzhiyun #define ULONG_SIZE 4 178*4882a593Smuzhiyun #endif 179*4882a593Smuzhiyun #define __VCPU_GPR(n) (VCPU_GPRS + (n * ULONG_SIZE)) 180*4882a593Smuzhiyun #define VCPU_GPR(n) __VCPU_GPR(__REG_##n) 181*4882a593Smuzhiyun 182*4882a593Smuzhiyun #ifdef __KERNEL__ 183*4882a593Smuzhiyun #ifdef CONFIG_PPC64 184*4882a593Smuzhiyun 185*4882a593Smuzhiyun #define STACKFRAMESIZE 256 186*4882a593Smuzhiyun #define __STK_REG(i) (112 + ((i)-14)*8) 187*4882a593Smuzhiyun #define STK_REG(i) __STK_REG(__REG_##i) 188*4882a593Smuzhiyun 189*4882a593Smuzhiyun #ifdef PPC64_ELF_ABI_v2 190*4882a593Smuzhiyun #define STK_GOT 24 191*4882a593Smuzhiyun #define __STK_PARAM(i) (32 + ((i)-3)*8) 192*4882a593Smuzhiyun #else 193*4882a593Smuzhiyun #define STK_GOT 40 194*4882a593Smuzhiyun #define __STK_PARAM(i) (48 + ((i)-3)*8) 195*4882a593Smuzhiyun #endif 196*4882a593Smuzhiyun #define STK_PARAM(i) __STK_PARAM(__REG_##i) 197*4882a593Smuzhiyun 198*4882a593Smuzhiyun #ifdef PPC64_ELF_ABI_v2 199*4882a593Smuzhiyun 200*4882a593Smuzhiyun #define _GLOBAL(name) \ 201*4882a593Smuzhiyun .align 2 ; \ 202*4882a593Smuzhiyun .type name,@function; \ 203*4882a593Smuzhiyun .globl name; \ 204*4882a593Smuzhiyun name: 205*4882a593Smuzhiyun 206*4882a593Smuzhiyun #define _GLOBAL_TOC(name) \ 207*4882a593Smuzhiyun .align 2 ; \ 208*4882a593Smuzhiyun .type name,@function; \ 209*4882a593Smuzhiyun .globl name; \ 210*4882a593Smuzhiyun name: \ 211*4882a593Smuzhiyun 0: addis r2,r12,(.TOC.-0b)@ha; \ 212*4882a593Smuzhiyun addi r2,r2,(.TOC.-0b)@l; \ 213*4882a593Smuzhiyun .localentry name,.-name 214*4882a593Smuzhiyun 215*4882a593Smuzhiyun #define DOTSYM(a) a 216*4882a593Smuzhiyun 217*4882a593Smuzhiyun #else 218*4882a593Smuzhiyun 219*4882a593Smuzhiyun #define XGLUE(a,b) a##b 220*4882a593Smuzhiyun #define GLUE(a,b) XGLUE(a,b) 221*4882a593Smuzhiyun 222*4882a593Smuzhiyun #define _GLOBAL(name) \ 223*4882a593Smuzhiyun .align 2 ; \ 224*4882a593Smuzhiyun .globl name; \ 225*4882a593Smuzhiyun .globl GLUE(.,name); \ 226*4882a593Smuzhiyun .pushsection ".opd","aw"; \ 227*4882a593Smuzhiyun name: \ 228*4882a593Smuzhiyun .quad GLUE(.,name); \ 229*4882a593Smuzhiyun .quad .TOC.@tocbase; \ 230*4882a593Smuzhiyun .quad 0; \ 231*4882a593Smuzhiyun .popsection; \ 232*4882a593Smuzhiyun .type GLUE(.,name),@function; \ 233*4882a593Smuzhiyun GLUE(.,name): 234*4882a593Smuzhiyun 235*4882a593Smuzhiyun #define _GLOBAL_TOC(name) _GLOBAL(name) 236*4882a593Smuzhiyun 237*4882a593Smuzhiyun #define DOTSYM(a) GLUE(.,a) 238*4882a593Smuzhiyun 239*4882a593Smuzhiyun #endif 240*4882a593Smuzhiyun 241*4882a593Smuzhiyun #else /* 32-bit */ 242*4882a593Smuzhiyun 243*4882a593Smuzhiyun #define _ENTRY(n) \ 244*4882a593Smuzhiyun .globl n; \ 245*4882a593Smuzhiyun n: 246*4882a593Smuzhiyun 247*4882a593Smuzhiyun #define _GLOBAL(n) \ 248*4882a593Smuzhiyun .stabs __stringify(n:F-1),N_FUN,0,0,n;\ 249*4882a593Smuzhiyun .globl n; \ 250*4882a593Smuzhiyun n: 251*4882a593Smuzhiyun 252*4882a593Smuzhiyun #define _GLOBAL_TOC(name) _GLOBAL(name) 253*4882a593Smuzhiyun 254*4882a593Smuzhiyun #endif 255*4882a593Smuzhiyun 256*4882a593Smuzhiyun /* 257*4882a593Smuzhiyun * __kprobes (the C annotation) puts the symbol into the .kprobes.text 258*4882a593Smuzhiyun * section, which gets emitted at the end of regular text. 259*4882a593Smuzhiyun * 260*4882a593Smuzhiyun * _ASM_NOKPROBE_SYMBOL and NOKPROBE_SYMBOL just adds the symbol to 261*4882a593Smuzhiyun * a blacklist. The former is for core kprobe functions/data, the 262*4882a593Smuzhiyun * latter is for those that incdentially must be excluded from probing 263*4882a593Smuzhiyun * and allows them to be linked at more optimal location within text. 264*4882a593Smuzhiyun */ 265*4882a593Smuzhiyun #ifdef CONFIG_KPROBES 266*4882a593Smuzhiyun #define _ASM_NOKPROBE_SYMBOL(entry) \ 267*4882a593Smuzhiyun .pushsection "_kprobe_blacklist","aw"; \ 268*4882a593Smuzhiyun PPC_LONG (entry) ; \ 269*4882a593Smuzhiyun .popsection 270*4882a593Smuzhiyun #else 271*4882a593Smuzhiyun #define _ASM_NOKPROBE_SYMBOL(entry) 272*4882a593Smuzhiyun #endif 273*4882a593Smuzhiyun 274*4882a593Smuzhiyun #define FUNC_START(name) _GLOBAL(name) 275*4882a593Smuzhiyun #define FUNC_END(name) 276*4882a593Smuzhiyun 277*4882a593Smuzhiyun /* 278*4882a593Smuzhiyun * LOAD_REG_IMMEDIATE(rn, expr) 279*4882a593Smuzhiyun * Loads the value of the constant expression 'expr' into register 'rn' 280*4882a593Smuzhiyun * using immediate instructions only. Use this when it's important not 281*4882a593Smuzhiyun * to reference other data (i.e. on ppc64 when the TOC pointer is not 282*4882a593Smuzhiyun * valid) and when 'expr' is a constant or absolute address. 283*4882a593Smuzhiyun * 284*4882a593Smuzhiyun * LOAD_REG_ADDR(rn, name) 285*4882a593Smuzhiyun * Loads the address of label 'name' into register 'rn'. Use this when 286*4882a593Smuzhiyun * you don't particularly need immediate instructions only, but you need 287*4882a593Smuzhiyun * the whole address in one register (e.g. it's a structure address and 288*4882a593Smuzhiyun * you want to access various offsets within it). On ppc32 this is 289*4882a593Smuzhiyun * identical to LOAD_REG_IMMEDIATE. 290*4882a593Smuzhiyun * 291*4882a593Smuzhiyun * LOAD_REG_ADDR_PIC(rn, name) 292*4882a593Smuzhiyun * Loads the address of label 'name' into register 'run'. Use this when 293*4882a593Smuzhiyun * the kernel doesn't run at the linked or relocated address. Please 294*4882a593Smuzhiyun * note that this macro will clobber the lr register. 295*4882a593Smuzhiyun * 296*4882a593Smuzhiyun * LOAD_REG_ADDRBASE(rn, name) 297*4882a593Smuzhiyun * ADDROFF(name) 298*4882a593Smuzhiyun * LOAD_REG_ADDRBASE loads part of the address of label 'name' into 299*4882a593Smuzhiyun * register 'rn'. ADDROFF(name) returns the remainder of the address as 300*4882a593Smuzhiyun * a constant expression. ADDROFF(name) is a signed expression < 16 bits 301*4882a593Smuzhiyun * in size, so is suitable for use directly as an offset in load and store 302*4882a593Smuzhiyun * instructions. Use this when loading/storing a single word or less as: 303*4882a593Smuzhiyun * LOAD_REG_ADDRBASE(rX, name) 304*4882a593Smuzhiyun * ld rY,ADDROFF(name)(rX) 305*4882a593Smuzhiyun */ 306*4882a593Smuzhiyun 307*4882a593Smuzhiyun /* Be careful, this will clobber the lr register. */ 308*4882a593Smuzhiyun #define LOAD_REG_ADDR_PIC(reg, name) \ 309*4882a593Smuzhiyun bl 0f; \ 310*4882a593Smuzhiyun 0: mflr reg; \ 311*4882a593Smuzhiyun addis reg,reg,(name - 0b)@ha; \ 312*4882a593Smuzhiyun addi reg,reg,(name - 0b)@l; 313*4882a593Smuzhiyun 314*4882a593Smuzhiyun #if defined(__powerpc64__) && defined(HAVE_AS_ATHIGH) 315*4882a593Smuzhiyun #define __AS_ATHIGH high 316*4882a593Smuzhiyun #else 317*4882a593Smuzhiyun #define __AS_ATHIGH h 318*4882a593Smuzhiyun #endif 319*4882a593Smuzhiyun 320*4882a593Smuzhiyun .macro __LOAD_REG_IMMEDIATE_32 r, x 321*4882a593Smuzhiyun .if (\x) >= 0x8000 || (\x) < -0x8000 322*4882a593Smuzhiyun lis \r, (\x)@__AS_ATHIGH 323*4882a593Smuzhiyun .if (\x) & 0xffff != 0 324*4882a593Smuzhiyun ori \r, \r, (\x)@l 325*4882a593Smuzhiyun .endif 326*4882a593Smuzhiyun .else 327*4882a593Smuzhiyun li \r, (\x)@l 328*4882a593Smuzhiyun .endif 329*4882a593Smuzhiyun .endm 330*4882a593Smuzhiyun 331*4882a593Smuzhiyun .macro __LOAD_REG_IMMEDIATE r, x 332*4882a593Smuzhiyun .if (\x) >= 0x80000000 || (\x) < -0x80000000 333*4882a593Smuzhiyun __LOAD_REG_IMMEDIATE_32 \r, (\x) >> 32 334*4882a593Smuzhiyun sldi \r, \r, 32 335*4882a593Smuzhiyun .if (\x) & 0xffff0000 != 0 336*4882a593Smuzhiyun oris \r, \r, (\x)@__AS_ATHIGH 337*4882a593Smuzhiyun .endif 338*4882a593Smuzhiyun .if (\x) & 0xffff != 0 339*4882a593Smuzhiyun ori \r, \r, (\x)@l 340*4882a593Smuzhiyun .endif 341*4882a593Smuzhiyun .else 342*4882a593Smuzhiyun __LOAD_REG_IMMEDIATE_32 \r, \x 343*4882a593Smuzhiyun .endif 344*4882a593Smuzhiyun .endm 345*4882a593Smuzhiyun 346*4882a593Smuzhiyun #ifdef __powerpc64__ 347*4882a593Smuzhiyun 348*4882a593Smuzhiyun #define LOAD_REG_IMMEDIATE(reg, expr) __LOAD_REG_IMMEDIATE reg, expr 349*4882a593Smuzhiyun 350*4882a593Smuzhiyun #define LOAD_REG_IMMEDIATE_SYM(reg, tmp, expr) \ 351*4882a593Smuzhiyun lis tmp, (expr)@highest; \ 352*4882a593Smuzhiyun lis reg, (expr)@__AS_ATHIGH; \ 353*4882a593Smuzhiyun ori tmp, tmp, (expr)@higher; \ 354*4882a593Smuzhiyun ori reg, reg, (expr)@l; \ 355*4882a593Smuzhiyun rldimi reg, tmp, 32, 0 356*4882a593Smuzhiyun 357*4882a593Smuzhiyun #define LOAD_REG_ADDR(reg,name) \ 358*4882a593Smuzhiyun ld reg,name@got(r2) 359*4882a593Smuzhiyun 360*4882a593Smuzhiyun #define LOAD_REG_ADDRBASE(reg,name) LOAD_REG_ADDR(reg,name) 361*4882a593Smuzhiyun #define ADDROFF(name) 0 362*4882a593Smuzhiyun 363*4882a593Smuzhiyun /* offsets for stack frame layout */ 364*4882a593Smuzhiyun #define LRSAVE 16 365*4882a593Smuzhiyun 366*4882a593Smuzhiyun #else /* 32-bit */ 367*4882a593Smuzhiyun 368*4882a593Smuzhiyun #define LOAD_REG_IMMEDIATE(reg, expr) __LOAD_REG_IMMEDIATE_32 reg, expr 369*4882a593Smuzhiyun 370*4882a593Smuzhiyun #define LOAD_REG_IMMEDIATE_SYM(reg,expr) \ 371*4882a593Smuzhiyun lis reg,(expr)@ha; \ 372*4882a593Smuzhiyun addi reg,reg,(expr)@l; 373*4882a593Smuzhiyun 374*4882a593Smuzhiyun #define LOAD_REG_ADDR(reg,name) LOAD_REG_IMMEDIATE_SYM(reg, name) 375*4882a593Smuzhiyun 376*4882a593Smuzhiyun #define LOAD_REG_ADDRBASE(reg, name) lis reg,name@ha 377*4882a593Smuzhiyun #define ADDROFF(name) name@l 378*4882a593Smuzhiyun 379*4882a593Smuzhiyun /* offsets for stack frame layout */ 380*4882a593Smuzhiyun #define LRSAVE 4 381*4882a593Smuzhiyun 382*4882a593Smuzhiyun #endif 383*4882a593Smuzhiyun 384*4882a593Smuzhiyun /* various errata or part fixups */ 385*4882a593Smuzhiyun #if defined(CONFIG_PPC_CELL) || defined(CONFIG_PPC_FSL_BOOK3E) 386*4882a593Smuzhiyun #define MFTB(dest) \ 387*4882a593Smuzhiyun 90: mfspr dest, SPRN_TBRL; \ 388*4882a593Smuzhiyun BEGIN_FTR_SECTION_NESTED(96); \ 389*4882a593Smuzhiyun cmpwi dest,0; \ 390*4882a593Smuzhiyun beq- 90b; \ 391*4882a593Smuzhiyun END_FTR_SECTION_NESTED(CPU_FTR_CELL_TB_BUG, CPU_FTR_CELL_TB_BUG, 96) 392*4882a593Smuzhiyun #else 393*4882a593Smuzhiyun #define MFTB(dest) MFTBL(dest) 394*4882a593Smuzhiyun #endif 395*4882a593Smuzhiyun 396*4882a593Smuzhiyun #ifdef CONFIG_PPC_8xx 397*4882a593Smuzhiyun #define MFTBL(dest) mftb dest 398*4882a593Smuzhiyun #define MFTBU(dest) mftbu dest 399*4882a593Smuzhiyun #else 400*4882a593Smuzhiyun #define MFTBL(dest) mfspr dest, SPRN_TBRL 401*4882a593Smuzhiyun #define MFTBU(dest) mfspr dest, SPRN_TBRU 402*4882a593Smuzhiyun #endif 403*4882a593Smuzhiyun 404*4882a593Smuzhiyun #ifndef CONFIG_SMP 405*4882a593Smuzhiyun #define TLBSYNC 406*4882a593Smuzhiyun #else 407*4882a593Smuzhiyun #define TLBSYNC tlbsync; sync 408*4882a593Smuzhiyun #endif 409*4882a593Smuzhiyun 410*4882a593Smuzhiyun #ifdef CONFIG_PPC64 411*4882a593Smuzhiyun #define MTOCRF(FXM, RS) \ 412*4882a593Smuzhiyun BEGIN_FTR_SECTION_NESTED(848); \ 413*4882a593Smuzhiyun mtcrf (FXM), RS; \ 414*4882a593Smuzhiyun FTR_SECTION_ELSE_NESTED(848); \ 415*4882a593Smuzhiyun mtocrf (FXM), RS; \ 416*4882a593Smuzhiyun ALT_FTR_SECTION_END_NESTED_IFCLR(CPU_FTR_NOEXECUTE, 848) 417*4882a593Smuzhiyun #endif 418*4882a593Smuzhiyun 419*4882a593Smuzhiyun /* 420*4882a593Smuzhiyun * This instruction is not implemented on the PPC 603 or 601; however, on 421*4882a593Smuzhiyun * the 403GCX and 405GP tlbia IS defined and tlbie is not. 422*4882a593Smuzhiyun * All of these instructions exist in the 8xx, they have magical powers, 423*4882a593Smuzhiyun * and they must be used. 424*4882a593Smuzhiyun */ 425*4882a593Smuzhiyun 426*4882a593Smuzhiyun #if !defined(CONFIG_4xx) && !defined(CONFIG_PPC_8xx) 427*4882a593Smuzhiyun #define tlbia \ 428*4882a593Smuzhiyun li r4,1024; \ 429*4882a593Smuzhiyun mtctr r4; \ 430*4882a593Smuzhiyun lis r4,KERNELBASE@h; \ 431*4882a593Smuzhiyun .machine push; \ 432*4882a593Smuzhiyun .machine "power4"; \ 433*4882a593Smuzhiyun 0: tlbie r4; \ 434*4882a593Smuzhiyun .machine pop; \ 435*4882a593Smuzhiyun addi r4,r4,0x1000; \ 436*4882a593Smuzhiyun bdnz 0b 437*4882a593Smuzhiyun #endif 438*4882a593Smuzhiyun 439*4882a593Smuzhiyun 440*4882a593Smuzhiyun #ifdef CONFIG_IBM440EP_ERR42 441*4882a593Smuzhiyun #define PPC440EP_ERR42 isync 442*4882a593Smuzhiyun #else 443*4882a593Smuzhiyun #define PPC440EP_ERR42 444*4882a593Smuzhiyun #endif 445*4882a593Smuzhiyun 446*4882a593Smuzhiyun /* The following stops all load and store data streams associated with stream 447*4882a593Smuzhiyun * ID (ie. streams created explicitly). The embedded and server mnemonics for 448*4882a593Smuzhiyun * dcbt are different so this must only be used for server. 449*4882a593Smuzhiyun */ 450*4882a593Smuzhiyun #define DCBT_BOOK3S_STOP_ALL_STREAM_IDS(scratch) \ 451*4882a593Smuzhiyun lis scratch,0x60000000@h; \ 452*4882a593Smuzhiyun dcbt 0,scratch,0b01010 453*4882a593Smuzhiyun 454*4882a593Smuzhiyun /* 455*4882a593Smuzhiyun * toreal/fromreal/tophys/tovirt macros. 32-bit BookE makes them 456*4882a593Smuzhiyun * keep the address intact to be compatible with code shared with 457*4882a593Smuzhiyun * 32-bit classic. 458*4882a593Smuzhiyun * 459*4882a593Smuzhiyun * On the other hand, I find it useful to have them behave as expected 460*4882a593Smuzhiyun * by their name (ie always do the addition) on 64-bit BookE 461*4882a593Smuzhiyun */ 462*4882a593Smuzhiyun #if defined(CONFIG_BOOKE) && !defined(CONFIG_PPC64) 463*4882a593Smuzhiyun #define toreal(rd) 464*4882a593Smuzhiyun #define fromreal(rd) 465*4882a593Smuzhiyun 466*4882a593Smuzhiyun /* 467*4882a593Smuzhiyun * We use addis to ensure compatibility with the "classic" ppc versions of 468*4882a593Smuzhiyun * these macros, which use rs = 0 to get the tophys offset in rd, rather than 469*4882a593Smuzhiyun * converting the address in r0, and so this version has to do that too 470*4882a593Smuzhiyun * (i.e. set register rd to 0 when rs == 0). 471*4882a593Smuzhiyun */ 472*4882a593Smuzhiyun #define tophys(rd,rs) \ 473*4882a593Smuzhiyun addis rd,rs,0 474*4882a593Smuzhiyun 475*4882a593Smuzhiyun #define tovirt(rd,rs) \ 476*4882a593Smuzhiyun addis rd,rs,0 477*4882a593Smuzhiyun 478*4882a593Smuzhiyun #elif defined(CONFIG_PPC64) 479*4882a593Smuzhiyun #define toreal(rd) /* we can access c000... in real mode */ 480*4882a593Smuzhiyun #define fromreal(rd) 481*4882a593Smuzhiyun 482*4882a593Smuzhiyun #define tophys(rd,rs) \ 483*4882a593Smuzhiyun clrldi rd,rs,2 484*4882a593Smuzhiyun 485*4882a593Smuzhiyun #define tovirt(rd,rs) \ 486*4882a593Smuzhiyun rotldi rd,rs,16; \ 487*4882a593Smuzhiyun ori rd,rd,((KERNELBASE>>48)&0xFFFF);\ 488*4882a593Smuzhiyun rotldi rd,rd,48 489*4882a593Smuzhiyun #else 490*4882a593Smuzhiyun #define toreal(rd) tophys(rd,rd) 491*4882a593Smuzhiyun #define fromreal(rd) tovirt(rd,rd) 492*4882a593Smuzhiyun 493*4882a593Smuzhiyun #define tophys(rd, rs) addis rd, rs, -PAGE_OFFSET@h 494*4882a593Smuzhiyun #define tovirt(rd, rs) addis rd, rs, PAGE_OFFSET@h 495*4882a593Smuzhiyun #endif 496*4882a593Smuzhiyun 497*4882a593Smuzhiyun #ifdef CONFIG_PPC_BOOK3S_64 498*4882a593Smuzhiyun #define RFI rfid 499*4882a593Smuzhiyun #define MTMSRD(r) mtmsrd r 500*4882a593Smuzhiyun #define MTMSR_EERI(reg) mtmsrd reg,1 501*4882a593Smuzhiyun #else 502*4882a593Smuzhiyun #ifndef CONFIG_40x 503*4882a593Smuzhiyun #define RFI rfi 504*4882a593Smuzhiyun #else 505*4882a593Smuzhiyun #define RFI rfi; b . /* Prevent prefetch past rfi */ 506*4882a593Smuzhiyun #endif 507*4882a593Smuzhiyun #define MTMSRD(r) mtmsr r 508*4882a593Smuzhiyun #define MTMSR_EERI(reg) mtmsr reg 509*4882a593Smuzhiyun #endif 510*4882a593Smuzhiyun 511*4882a593Smuzhiyun #endif /* __KERNEL__ */ 512*4882a593Smuzhiyun 513*4882a593Smuzhiyun /* The boring bits... */ 514*4882a593Smuzhiyun 515*4882a593Smuzhiyun /* Condition Register Bit Fields */ 516*4882a593Smuzhiyun 517*4882a593Smuzhiyun #define cr0 0 518*4882a593Smuzhiyun #define cr1 1 519*4882a593Smuzhiyun #define cr2 2 520*4882a593Smuzhiyun #define cr3 3 521*4882a593Smuzhiyun #define cr4 4 522*4882a593Smuzhiyun #define cr5 5 523*4882a593Smuzhiyun #define cr6 6 524*4882a593Smuzhiyun #define cr7 7 525*4882a593Smuzhiyun 526*4882a593Smuzhiyun 527*4882a593Smuzhiyun /* 528*4882a593Smuzhiyun * General Purpose Registers (GPRs) 529*4882a593Smuzhiyun * 530*4882a593Smuzhiyun * The lower case r0-r31 should be used in preference to the upper 531*4882a593Smuzhiyun * case R0-R31 as they provide more error checking in the assembler. 532*4882a593Smuzhiyun * Use R0-31 only when really nessesary. 533*4882a593Smuzhiyun */ 534*4882a593Smuzhiyun 535*4882a593Smuzhiyun #define r0 %r0 536*4882a593Smuzhiyun #define r1 %r1 537*4882a593Smuzhiyun #define r2 %r2 538*4882a593Smuzhiyun #define r3 %r3 539*4882a593Smuzhiyun #define r4 %r4 540*4882a593Smuzhiyun #define r5 %r5 541*4882a593Smuzhiyun #define r6 %r6 542*4882a593Smuzhiyun #define r7 %r7 543*4882a593Smuzhiyun #define r8 %r8 544*4882a593Smuzhiyun #define r9 %r9 545*4882a593Smuzhiyun #define r10 %r10 546*4882a593Smuzhiyun #define r11 %r11 547*4882a593Smuzhiyun #define r12 %r12 548*4882a593Smuzhiyun #define r13 %r13 549*4882a593Smuzhiyun #define r14 %r14 550*4882a593Smuzhiyun #define r15 %r15 551*4882a593Smuzhiyun #define r16 %r16 552*4882a593Smuzhiyun #define r17 %r17 553*4882a593Smuzhiyun #define r18 %r18 554*4882a593Smuzhiyun #define r19 %r19 555*4882a593Smuzhiyun #define r20 %r20 556*4882a593Smuzhiyun #define r21 %r21 557*4882a593Smuzhiyun #define r22 %r22 558*4882a593Smuzhiyun #define r23 %r23 559*4882a593Smuzhiyun #define r24 %r24 560*4882a593Smuzhiyun #define r25 %r25 561*4882a593Smuzhiyun #define r26 %r26 562*4882a593Smuzhiyun #define r27 %r27 563*4882a593Smuzhiyun #define r28 %r28 564*4882a593Smuzhiyun #define r29 %r29 565*4882a593Smuzhiyun #define r30 %r30 566*4882a593Smuzhiyun #define r31 %r31 567*4882a593Smuzhiyun 568*4882a593Smuzhiyun 569*4882a593Smuzhiyun /* Floating Point Registers (FPRs) */ 570*4882a593Smuzhiyun 571*4882a593Smuzhiyun #define fr0 0 572*4882a593Smuzhiyun #define fr1 1 573*4882a593Smuzhiyun #define fr2 2 574*4882a593Smuzhiyun #define fr3 3 575*4882a593Smuzhiyun #define fr4 4 576*4882a593Smuzhiyun #define fr5 5 577*4882a593Smuzhiyun #define fr6 6 578*4882a593Smuzhiyun #define fr7 7 579*4882a593Smuzhiyun #define fr8 8 580*4882a593Smuzhiyun #define fr9 9 581*4882a593Smuzhiyun #define fr10 10 582*4882a593Smuzhiyun #define fr11 11 583*4882a593Smuzhiyun #define fr12 12 584*4882a593Smuzhiyun #define fr13 13 585*4882a593Smuzhiyun #define fr14 14 586*4882a593Smuzhiyun #define fr15 15 587*4882a593Smuzhiyun #define fr16 16 588*4882a593Smuzhiyun #define fr17 17 589*4882a593Smuzhiyun #define fr18 18 590*4882a593Smuzhiyun #define fr19 19 591*4882a593Smuzhiyun #define fr20 20 592*4882a593Smuzhiyun #define fr21 21 593*4882a593Smuzhiyun #define fr22 22 594*4882a593Smuzhiyun #define fr23 23 595*4882a593Smuzhiyun #define fr24 24 596*4882a593Smuzhiyun #define fr25 25 597*4882a593Smuzhiyun #define fr26 26 598*4882a593Smuzhiyun #define fr27 27 599*4882a593Smuzhiyun #define fr28 28 600*4882a593Smuzhiyun #define fr29 29 601*4882a593Smuzhiyun #define fr30 30 602*4882a593Smuzhiyun #define fr31 31 603*4882a593Smuzhiyun 604*4882a593Smuzhiyun /* AltiVec Registers (VPRs) */ 605*4882a593Smuzhiyun 606*4882a593Smuzhiyun #define v0 0 607*4882a593Smuzhiyun #define v1 1 608*4882a593Smuzhiyun #define v2 2 609*4882a593Smuzhiyun #define v3 3 610*4882a593Smuzhiyun #define v4 4 611*4882a593Smuzhiyun #define v5 5 612*4882a593Smuzhiyun #define v6 6 613*4882a593Smuzhiyun #define v7 7 614*4882a593Smuzhiyun #define v8 8 615*4882a593Smuzhiyun #define v9 9 616*4882a593Smuzhiyun #define v10 10 617*4882a593Smuzhiyun #define v11 11 618*4882a593Smuzhiyun #define v12 12 619*4882a593Smuzhiyun #define v13 13 620*4882a593Smuzhiyun #define v14 14 621*4882a593Smuzhiyun #define v15 15 622*4882a593Smuzhiyun #define v16 16 623*4882a593Smuzhiyun #define v17 17 624*4882a593Smuzhiyun #define v18 18 625*4882a593Smuzhiyun #define v19 19 626*4882a593Smuzhiyun #define v20 20 627*4882a593Smuzhiyun #define v21 21 628*4882a593Smuzhiyun #define v22 22 629*4882a593Smuzhiyun #define v23 23 630*4882a593Smuzhiyun #define v24 24 631*4882a593Smuzhiyun #define v25 25 632*4882a593Smuzhiyun #define v26 26 633*4882a593Smuzhiyun #define v27 27 634*4882a593Smuzhiyun #define v28 28 635*4882a593Smuzhiyun #define v29 29 636*4882a593Smuzhiyun #define v30 30 637*4882a593Smuzhiyun #define v31 31 638*4882a593Smuzhiyun 639*4882a593Smuzhiyun /* VSX Registers (VSRs) */ 640*4882a593Smuzhiyun 641*4882a593Smuzhiyun #define vs0 0 642*4882a593Smuzhiyun #define vs1 1 643*4882a593Smuzhiyun #define vs2 2 644*4882a593Smuzhiyun #define vs3 3 645*4882a593Smuzhiyun #define vs4 4 646*4882a593Smuzhiyun #define vs5 5 647*4882a593Smuzhiyun #define vs6 6 648*4882a593Smuzhiyun #define vs7 7 649*4882a593Smuzhiyun #define vs8 8 650*4882a593Smuzhiyun #define vs9 9 651*4882a593Smuzhiyun #define vs10 10 652*4882a593Smuzhiyun #define vs11 11 653*4882a593Smuzhiyun #define vs12 12 654*4882a593Smuzhiyun #define vs13 13 655*4882a593Smuzhiyun #define vs14 14 656*4882a593Smuzhiyun #define vs15 15 657*4882a593Smuzhiyun #define vs16 16 658*4882a593Smuzhiyun #define vs17 17 659*4882a593Smuzhiyun #define vs18 18 660*4882a593Smuzhiyun #define vs19 19 661*4882a593Smuzhiyun #define vs20 20 662*4882a593Smuzhiyun #define vs21 21 663*4882a593Smuzhiyun #define vs22 22 664*4882a593Smuzhiyun #define vs23 23 665*4882a593Smuzhiyun #define vs24 24 666*4882a593Smuzhiyun #define vs25 25 667*4882a593Smuzhiyun #define vs26 26 668*4882a593Smuzhiyun #define vs27 27 669*4882a593Smuzhiyun #define vs28 28 670*4882a593Smuzhiyun #define vs29 29 671*4882a593Smuzhiyun #define vs30 30 672*4882a593Smuzhiyun #define vs31 31 673*4882a593Smuzhiyun #define vs32 32 674*4882a593Smuzhiyun #define vs33 33 675*4882a593Smuzhiyun #define vs34 34 676*4882a593Smuzhiyun #define vs35 35 677*4882a593Smuzhiyun #define vs36 36 678*4882a593Smuzhiyun #define vs37 37 679*4882a593Smuzhiyun #define vs38 38 680*4882a593Smuzhiyun #define vs39 39 681*4882a593Smuzhiyun #define vs40 40 682*4882a593Smuzhiyun #define vs41 41 683*4882a593Smuzhiyun #define vs42 42 684*4882a593Smuzhiyun #define vs43 43 685*4882a593Smuzhiyun #define vs44 44 686*4882a593Smuzhiyun #define vs45 45 687*4882a593Smuzhiyun #define vs46 46 688*4882a593Smuzhiyun #define vs47 47 689*4882a593Smuzhiyun #define vs48 48 690*4882a593Smuzhiyun #define vs49 49 691*4882a593Smuzhiyun #define vs50 50 692*4882a593Smuzhiyun #define vs51 51 693*4882a593Smuzhiyun #define vs52 52 694*4882a593Smuzhiyun #define vs53 53 695*4882a593Smuzhiyun #define vs54 54 696*4882a593Smuzhiyun #define vs55 55 697*4882a593Smuzhiyun #define vs56 56 698*4882a593Smuzhiyun #define vs57 57 699*4882a593Smuzhiyun #define vs58 58 700*4882a593Smuzhiyun #define vs59 59 701*4882a593Smuzhiyun #define vs60 60 702*4882a593Smuzhiyun #define vs61 61 703*4882a593Smuzhiyun #define vs62 62 704*4882a593Smuzhiyun #define vs63 63 705*4882a593Smuzhiyun 706*4882a593Smuzhiyun /* SPE Registers (EVPRs) */ 707*4882a593Smuzhiyun 708*4882a593Smuzhiyun #define evr0 0 709*4882a593Smuzhiyun #define evr1 1 710*4882a593Smuzhiyun #define evr2 2 711*4882a593Smuzhiyun #define evr3 3 712*4882a593Smuzhiyun #define evr4 4 713*4882a593Smuzhiyun #define evr5 5 714*4882a593Smuzhiyun #define evr6 6 715*4882a593Smuzhiyun #define evr7 7 716*4882a593Smuzhiyun #define evr8 8 717*4882a593Smuzhiyun #define evr9 9 718*4882a593Smuzhiyun #define evr10 10 719*4882a593Smuzhiyun #define evr11 11 720*4882a593Smuzhiyun #define evr12 12 721*4882a593Smuzhiyun #define evr13 13 722*4882a593Smuzhiyun #define evr14 14 723*4882a593Smuzhiyun #define evr15 15 724*4882a593Smuzhiyun #define evr16 16 725*4882a593Smuzhiyun #define evr17 17 726*4882a593Smuzhiyun #define evr18 18 727*4882a593Smuzhiyun #define evr19 19 728*4882a593Smuzhiyun #define evr20 20 729*4882a593Smuzhiyun #define evr21 21 730*4882a593Smuzhiyun #define evr22 22 731*4882a593Smuzhiyun #define evr23 23 732*4882a593Smuzhiyun #define evr24 24 733*4882a593Smuzhiyun #define evr25 25 734*4882a593Smuzhiyun #define evr26 26 735*4882a593Smuzhiyun #define evr27 27 736*4882a593Smuzhiyun #define evr28 28 737*4882a593Smuzhiyun #define evr29 29 738*4882a593Smuzhiyun #define evr30 30 739*4882a593Smuzhiyun #define evr31 31 740*4882a593Smuzhiyun 741*4882a593Smuzhiyun /* some stab codes */ 742*4882a593Smuzhiyun #define N_FUN 36 743*4882a593Smuzhiyun #define N_RSYM 64 744*4882a593Smuzhiyun #define N_SLINE 68 745*4882a593Smuzhiyun #define N_SO 100 746*4882a593Smuzhiyun 747*4882a593Smuzhiyun #define RFSCV .long 0x4c0000a4 748*4882a593Smuzhiyun 749*4882a593Smuzhiyun /* 750*4882a593Smuzhiyun * Create an endian fixup trampoline 751*4882a593Smuzhiyun * 752*4882a593Smuzhiyun * This starts with a "tdi 0,0,0x48" instruction which is 753*4882a593Smuzhiyun * essentially a "trap never", and thus akin to a nop. 754*4882a593Smuzhiyun * 755*4882a593Smuzhiyun * The opcode for this instruction read with the wrong endian 756*4882a593Smuzhiyun * however results in a b . + 8 757*4882a593Smuzhiyun * 758*4882a593Smuzhiyun * So essentially we use that trick to execute the following 759*4882a593Smuzhiyun * trampoline in "reverse endian" if we are running with the 760*4882a593Smuzhiyun * MSR_LE bit set the "wrong" way for whatever endianness the 761*4882a593Smuzhiyun * kernel is built for. 762*4882a593Smuzhiyun */ 763*4882a593Smuzhiyun 764*4882a593Smuzhiyun #ifdef CONFIG_PPC_BOOK3E 765*4882a593Smuzhiyun #define FIXUP_ENDIAN 766*4882a593Smuzhiyun #else 767*4882a593Smuzhiyun /* 768*4882a593Smuzhiyun * This version may be used in HV or non-HV context. 769*4882a593Smuzhiyun * MSR[EE] must be disabled. 770*4882a593Smuzhiyun */ 771*4882a593Smuzhiyun #define FIXUP_ENDIAN \ 772*4882a593Smuzhiyun tdi 0,0,0x48; /* Reverse endian of b . + 8 */ \ 773*4882a593Smuzhiyun b 191f; /* Skip trampoline if endian is good */ \ 774*4882a593Smuzhiyun .long 0xa600607d; /* mfmsr r11 */ \ 775*4882a593Smuzhiyun .long 0x01006b69; /* xori r11,r11,1 */ \ 776*4882a593Smuzhiyun .long 0x00004039; /* li r10,0 */ \ 777*4882a593Smuzhiyun .long 0x6401417d; /* mtmsrd r10,1 */ \ 778*4882a593Smuzhiyun .long 0x05009f42; /* bcl 20,31,$+4 */ \ 779*4882a593Smuzhiyun .long 0xa602487d; /* mflr r10 */ \ 780*4882a593Smuzhiyun .long 0x14004a39; /* addi r10,r10,20 */ \ 781*4882a593Smuzhiyun .long 0xa6035a7d; /* mtsrr0 r10 */ \ 782*4882a593Smuzhiyun .long 0xa6037b7d; /* mtsrr1 r11 */ \ 783*4882a593Smuzhiyun .long 0x2400004c; /* rfid */ \ 784*4882a593Smuzhiyun 191: 785*4882a593Smuzhiyun 786*4882a593Smuzhiyun /* 787*4882a593Smuzhiyun * This version that may only be used with MSR[HV]=1 788*4882a593Smuzhiyun * - Does not clear MSR[RI], so more robust. 789*4882a593Smuzhiyun * - Slightly smaller and faster. 790*4882a593Smuzhiyun */ 791*4882a593Smuzhiyun #define FIXUP_ENDIAN_HV \ 792*4882a593Smuzhiyun tdi 0,0,0x48; /* Reverse endian of b . + 8 */ \ 793*4882a593Smuzhiyun b 191f; /* Skip trampoline if endian is good */ \ 794*4882a593Smuzhiyun .long 0xa600607d; /* mfmsr r11 */ \ 795*4882a593Smuzhiyun .long 0x01006b69; /* xori r11,r11,1 */ \ 796*4882a593Smuzhiyun .long 0x05009f42; /* bcl 20,31,$+4 */ \ 797*4882a593Smuzhiyun .long 0xa602487d; /* mflr r10 */ \ 798*4882a593Smuzhiyun .long 0x14004a39; /* addi r10,r10,20 */ \ 799*4882a593Smuzhiyun .long 0xa64b5a7d; /* mthsrr0 r10 */ \ 800*4882a593Smuzhiyun .long 0xa64b7b7d; /* mthsrr1 r11 */ \ 801*4882a593Smuzhiyun .long 0x2402004c; /* hrfid */ \ 802*4882a593Smuzhiyun 191: 803*4882a593Smuzhiyun 804*4882a593Smuzhiyun #endif /* !CONFIG_PPC_BOOK3E */ 805*4882a593Smuzhiyun 806*4882a593Smuzhiyun #endif /* __ASSEMBLY__ */ 807*4882a593Smuzhiyun 808*4882a593Smuzhiyun /* 809*4882a593Smuzhiyun * Helper macro for exception table entries 810*4882a593Smuzhiyun */ 811*4882a593Smuzhiyun #define EX_TABLE(_fault, _target) \ 812*4882a593Smuzhiyun stringify_in_c(.section __ex_table,"a";)\ 813*4882a593Smuzhiyun stringify_in_c(.balign 4;) \ 814*4882a593Smuzhiyun stringify_in_c(.long (_fault) - . ;) \ 815*4882a593Smuzhiyun stringify_in_c(.long (_target) - . ;) \ 816*4882a593Smuzhiyun stringify_in_c(.previous) 817*4882a593Smuzhiyun 818*4882a593Smuzhiyun #ifdef CONFIG_PPC_FSL_BOOK3E 819*4882a593Smuzhiyun #define BTB_FLUSH(reg) \ 820*4882a593Smuzhiyun lis reg,BUCSR_INIT@h; \ 821*4882a593Smuzhiyun ori reg,reg,BUCSR_INIT@l; \ 822*4882a593Smuzhiyun mtspr SPRN_BUCSR,reg; \ 823*4882a593Smuzhiyun isync; 824*4882a593Smuzhiyun #else 825*4882a593Smuzhiyun #define BTB_FLUSH(reg) 826*4882a593Smuzhiyun #endif /* CONFIG_PPC_FSL_BOOK3E */ 827*4882a593Smuzhiyun 828*4882a593Smuzhiyun #endif /* _ASM_POWERPC_PPC_ASM_H */ 829