1 /* 2 * Copyright (c) 2016-2019, ARM Limited and Contributors. All rights reserved. 3 * 4 * SPDX-License-Identifier: BSD-3-Clause 5 */ 6 7 #ifndef ARCH_HELPERS_H 8 #define ARCH_HELPERS_H 9 10 #include <cdefs.h> 11 #include <stdint.h> 12 #include <string.h> 13 14 #include <arch.h> 15 16 /********************************************************************** 17 * Macros which create inline functions to read or write CPU system 18 * registers 19 *********************************************************************/ 20 21 #define _DEFINE_COPROCR_WRITE_FUNC(_name, coproc, opc1, CRn, CRm, opc2) \ 22 static inline void write_## _name(u_register_t v) \ 23 { \ 24 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\ 25 } 26 27 #define _DEFINE_COPROCR_READ_FUNC(_name, coproc, opc1, CRn, CRm, opc2) \ 28 static inline u_register_t read_ ## _name(void) \ 29 { \ 30 u_register_t v; \ 31 __asm__ volatile ("mrc "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : "=r" (v));\ 32 return v; \ 33 } 34 35 /* 36 * The undocumented %Q and %R extended asm are used to implemented the below 37 * 64 bit `mrrc` and `mcrr` instructions. 38 */ 39 40 #define _DEFINE_COPROCR_WRITE_FUNC_64(_name, coproc, opc1, CRm) \ 41 static inline void write64_## _name(uint64_t v) \ 42 { \ 43 __asm__ volatile ("mcrr "#coproc","#opc1", %Q0, %R0,"#CRm : : "r" (v));\ 44 } 45 46 #define _DEFINE_COPROCR_READ_FUNC_64(_name, coproc, opc1, CRm) \ 47 static inline uint64_t read64_## _name(void) \ 48 { uint64_t v; \ 49 __asm__ volatile ("mrrc "#coproc","#opc1", %Q0, %R0,"#CRm : "=r" (v));\ 50 return v; \ 51 } 52 53 #define _DEFINE_SYSREG_READ_FUNC(_name, _reg_name) \ 54 static inline u_register_t read_ ## _name(void) \ 55 { \ 56 u_register_t v; \ 57 __asm__ volatile ("mrs %0, " #_reg_name : "=r" (v)); \ 58 return v; \ 59 } 60 61 #define _DEFINE_SYSREG_WRITE_FUNC(_name, _reg_name) \ 62 static inline void write_ ## _name(u_register_t v) \ 63 { \ 64 __asm__ volatile ("msr " #_reg_name ", %0" : : "r" (v)); \ 65 } 66 67 #define _DEFINE_SYSREG_WRITE_CONST_FUNC(_name, _reg_name) \ 68 static inline void write_ ## _name(const u_register_t v) \ 69 { \ 70 __asm__ volatile ("msr " #_reg_name ", %0" : : "i" (v)); \ 71 } 72 73 /* Define read function for coproc register */ 74 #define DEFINE_COPROCR_READ_FUNC(_name, ...) \ 75 _DEFINE_COPROCR_READ_FUNC(_name, __VA_ARGS__) 76 77 /* Define write function for coproc register */ 78 #define DEFINE_COPROCR_WRITE_FUNC(_name, ...) \ 79 _DEFINE_COPROCR_WRITE_FUNC(_name, __VA_ARGS__) 80 81 /* Define read & write function for coproc register */ 82 #define DEFINE_COPROCR_RW_FUNCS(_name, ...) \ 83 _DEFINE_COPROCR_READ_FUNC(_name, __VA_ARGS__) \ 84 _DEFINE_COPROCR_WRITE_FUNC(_name, __VA_ARGS__) 85 86 /* Define 64 bit read function for coproc register */ 87 #define DEFINE_COPROCR_READ_FUNC_64(_name, ...) \ 88 _DEFINE_COPROCR_READ_FUNC_64(_name, __VA_ARGS__) 89 90 /* Define 64 bit write function for coproc register */ 91 #define DEFINE_COPROCR_WRITE_FUNC_64(_name, ...) \ 92 _DEFINE_COPROCR_WRITE_FUNC_64(_name, __VA_ARGS__) 93 94 /* Define 64 bit read & write function for coproc register */ 95 #define DEFINE_COPROCR_RW_FUNCS_64(_name, ...) \ 96 _DEFINE_COPROCR_READ_FUNC_64(_name, __VA_ARGS__) \ 97 _DEFINE_COPROCR_WRITE_FUNC_64(_name, __VA_ARGS__) 98 99 /* Define read & write function for system register */ 100 #define DEFINE_SYSREG_RW_FUNCS(_name) \ 101 _DEFINE_SYSREG_READ_FUNC(_name, _name) \ 102 _DEFINE_SYSREG_WRITE_FUNC(_name, _name) 103 104 /********************************************************************** 105 * Macros to create inline functions for tlbi operations 106 *********************************************************************/ 107 108 #define _DEFINE_TLBIOP_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \ 109 static inline void tlbi##_op(void) \ 110 { \ 111 u_register_t v = 0; \ 112 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\ 113 } 114 115 #define _DEFINE_BPIOP_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \ 116 static inline void bpi##_op(void) \ 117 { \ 118 u_register_t v = 0; \ 119 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\ 120 } 121 122 #define _DEFINE_TLBIOP_PARAM_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \ 123 static inline void tlbi##_op(u_register_t v) \ 124 { \ 125 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\ 126 } 127 128 /* Define function for simple TLBI operation */ 129 #define DEFINE_TLBIOP_FUNC(_op, ...) \ 130 _DEFINE_TLBIOP_FUNC(_op, __VA_ARGS__) 131 132 /* Define function for TLBI operation with register parameter */ 133 #define DEFINE_TLBIOP_PARAM_FUNC(_op, ...) \ 134 _DEFINE_TLBIOP_PARAM_FUNC(_op, __VA_ARGS__) 135 136 /* Define function for simple BPI operation */ 137 #define DEFINE_BPIOP_FUNC(_op, ...) \ 138 _DEFINE_BPIOP_FUNC(_op, __VA_ARGS__) 139 140 /********************************************************************** 141 * Macros to create inline functions for DC operations 142 *********************************************************************/ 143 #define _DEFINE_DCOP_PARAM_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \ 144 static inline void dc##_op(u_register_t v) \ 145 { \ 146 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\ 147 } 148 149 /* Define function for DC operation with register parameter */ 150 #define DEFINE_DCOP_PARAM_FUNC(_op, ...) \ 151 _DEFINE_DCOP_PARAM_FUNC(_op, __VA_ARGS__) 152 153 /********************************************************************** 154 * Macros to create inline functions for system instructions 155 *********************************************************************/ 156 /* Define function for simple system instruction */ 157 #define DEFINE_SYSOP_FUNC(_op) \ 158 static inline void _op(void) \ 159 { \ 160 __asm__ (#_op); \ 161 } 162 163 164 /* Define function for system instruction with type specifier */ 165 #define DEFINE_SYSOP_TYPE_FUNC(_op, _type) \ 166 static inline void _op ## _type(void) \ 167 { \ 168 __asm__ (#_op " " #_type); \ 169 } 170 171 /* Define function for system instruction with register parameter */ 172 #define DEFINE_SYSOP_TYPE_PARAM_FUNC(_op, _type) \ 173 static inline void _op ## _type(u_register_t v) \ 174 { \ 175 __asm__ (#_op " " #_type ", %0" : : "r" (v)); \ 176 } 177 178 void flush_dcache_range(uintptr_t addr, size_t size); 179 void clean_dcache_range(uintptr_t addr, size_t size); 180 void inv_dcache_range(uintptr_t addr, size_t size); 181 182 void dcsw_op_louis(u_register_t op_type); 183 void dcsw_op_all(u_register_t op_type); 184 185 void disable_mmu_secure(void); 186 void disable_mmu_icache_secure(void); 187 188 DEFINE_SYSOP_FUNC(wfi) 189 DEFINE_SYSOP_FUNC(wfe) 190 DEFINE_SYSOP_FUNC(sev) 191 DEFINE_SYSOP_TYPE_FUNC(dsb, sy) 192 DEFINE_SYSOP_TYPE_FUNC(dmb, sy) 193 DEFINE_SYSOP_TYPE_FUNC(dmb, st) 194 195 /* dmb ld is not valid for armv7/thumb machines */ 196 #if ARM_ARCH_MAJOR != 7 197 DEFINE_SYSOP_TYPE_FUNC(dmb, ld) 198 #endif 199 200 DEFINE_SYSOP_TYPE_FUNC(dsb, ish) 201 DEFINE_SYSOP_TYPE_FUNC(dsb, ishst) 202 DEFINE_SYSOP_TYPE_FUNC(dmb, ish) 203 DEFINE_SYSOP_TYPE_FUNC(dmb, ishst) 204 DEFINE_SYSOP_FUNC(isb) 205 206 void __dead2 smc(uint32_t r0, uint32_t r1, uint32_t r2, uint32_t r3, 207 uint32_t r4, uint32_t r5, uint32_t r6, uint32_t r7); 208 209 DEFINE_SYSREG_RW_FUNCS(spsr) 210 DEFINE_SYSREG_RW_FUNCS(cpsr) 211 212 /******************************************************************************* 213 * System register accessor prototypes 214 ******************************************************************************/ 215 DEFINE_COPROCR_READ_FUNC(mpidr, MPIDR) 216 DEFINE_COPROCR_READ_FUNC(midr, MIDR) 217 DEFINE_COPROCR_READ_FUNC(id_mmfr4, ID_MMFR4) 218 DEFINE_COPROCR_READ_FUNC(id_pfr0, ID_PFR0) 219 DEFINE_COPROCR_READ_FUNC(id_pfr1, ID_PFR1) 220 DEFINE_COPROCR_READ_FUNC(isr, ISR) 221 DEFINE_COPROCR_READ_FUNC(clidr, CLIDR) 222 DEFINE_COPROCR_READ_FUNC_64(cntpct, CNTPCT_64) 223 224 DEFINE_COPROCR_RW_FUNCS(scr, SCR) 225 DEFINE_COPROCR_RW_FUNCS(ctr, CTR) 226 DEFINE_COPROCR_RW_FUNCS(sctlr, SCTLR) 227 DEFINE_COPROCR_RW_FUNCS(actlr, ACTLR) 228 DEFINE_COPROCR_RW_FUNCS(hsctlr, HSCTLR) 229 DEFINE_COPROCR_RW_FUNCS(hcr, HCR) 230 DEFINE_COPROCR_RW_FUNCS(hcptr, HCPTR) 231 DEFINE_COPROCR_RW_FUNCS(cntfrq, CNTFRQ) 232 DEFINE_COPROCR_RW_FUNCS(cnthctl, CNTHCTL) 233 DEFINE_COPROCR_RW_FUNCS(mair0, MAIR0) 234 DEFINE_COPROCR_RW_FUNCS(mair1, MAIR1) 235 DEFINE_COPROCR_RW_FUNCS(hmair0, HMAIR0) 236 DEFINE_COPROCR_RW_FUNCS(ttbcr, TTBCR) 237 DEFINE_COPROCR_RW_FUNCS(htcr, HTCR) 238 DEFINE_COPROCR_RW_FUNCS(ttbr0, TTBR0) 239 DEFINE_COPROCR_RW_FUNCS_64(ttbr0, TTBR0_64) 240 DEFINE_COPROCR_RW_FUNCS(ttbr1, TTBR1) 241 DEFINE_COPROCR_RW_FUNCS_64(httbr, HTTBR_64) 242 DEFINE_COPROCR_RW_FUNCS(vpidr, VPIDR) 243 DEFINE_COPROCR_RW_FUNCS(vmpidr, VMPIDR) 244 DEFINE_COPROCR_RW_FUNCS_64(vttbr, VTTBR_64) 245 DEFINE_COPROCR_RW_FUNCS_64(ttbr1, TTBR1_64) 246 DEFINE_COPROCR_RW_FUNCS_64(cntvoff, CNTVOFF_64) 247 DEFINE_COPROCR_RW_FUNCS(csselr, CSSELR) 248 DEFINE_COPROCR_RW_FUNCS(hstr, HSTR) 249 DEFINE_COPROCR_RW_FUNCS(cnthp_ctl_el2, CNTHP_CTL) 250 DEFINE_COPROCR_RW_FUNCS(cnthp_tval_el2, CNTHP_TVAL) 251 DEFINE_COPROCR_RW_FUNCS_64(cnthp_cval_el2, CNTHP_CVAL_64) 252 253 #define get_cntp_ctl_enable(x) (((x) >> CNTP_CTL_ENABLE_SHIFT) & \ 254 CNTP_CTL_ENABLE_MASK) 255 #define get_cntp_ctl_imask(x) (((x) >> CNTP_CTL_IMASK_SHIFT) & \ 256 CNTP_CTL_IMASK_MASK) 257 #define get_cntp_ctl_istatus(x) (((x) >> CNTP_CTL_ISTATUS_SHIFT) & \ 258 CNTP_CTL_ISTATUS_MASK) 259 260 #define set_cntp_ctl_enable(x) ((x) |= U(1) << CNTP_CTL_ENABLE_SHIFT) 261 #define set_cntp_ctl_imask(x) ((x) |= U(1) << CNTP_CTL_IMASK_SHIFT) 262 263 #define clr_cntp_ctl_enable(x) ((x) &= ~(U(1) << CNTP_CTL_ENABLE_SHIFT)) 264 #define clr_cntp_ctl_imask(x) ((x) &= ~(U(1) << CNTP_CTL_IMASK_SHIFT)) 265 266 DEFINE_COPROCR_RW_FUNCS(icc_sre_el1, ICC_SRE) 267 DEFINE_COPROCR_RW_FUNCS(icc_sre_el2, ICC_HSRE) 268 DEFINE_COPROCR_RW_FUNCS(icc_sre_el3, ICC_MSRE) 269 DEFINE_COPROCR_RW_FUNCS(icc_pmr_el1, ICC_PMR) 270 DEFINE_COPROCR_RW_FUNCS(icc_rpr_el1, ICC_RPR) 271 DEFINE_COPROCR_RW_FUNCS(icc_igrpen1_el3, ICC_MGRPEN1) 272 DEFINE_COPROCR_RW_FUNCS(icc_igrpen1_el1, ICC_IGRPEN1) 273 DEFINE_COPROCR_RW_FUNCS(icc_igrpen0_el1, ICC_IGRPEN0) 274 DEFINE_COPROCR_RW_FUNCS(icc_hppir0_el1, ICC_HPPIR0) 275 DEFINE_COPROCR_RW_FUNCS(icc_hppir1_el1, ICC_HPPIR1) 276 DEFINE_COPROCR_RW_FUNCS(icc_iar0_el1, ICC_IAR0) 277 DEFINE_COPROCR_RW_FUNCS(icc_iar1_el1, ICC_IAR1) 278 DEFINE_COPROCR_RW_FUNCS(icc_eoir0_el1, ICC_EOIR0) 279 DEFINE_COPROCR_RW_FUNCS(icc_eoir1_el1, ICC_EOIR1) 280 DEFINE_COPROCR_RW_FUNCS_64(icc_sgi0r_el1, ICC_SGI0R_EL1_64) 281 DEFINE_COPROCR_WRITE_FUNC_64(icc_sgi1r, ICC_SGI1R_EL1_64) 282 283 DEFINE_COPROCR_RW_FUNCS(hdcr, HDCR) 284 DEFINE_COPROCR_RW_FUNCS(cnthp_ctl, CNTHP_CTL) 285 DEFINE_COPROCR_READ_FUNC(pmcr, PMCR) 286 287 /* 288 * Address translation 289 */ 290 DEFINE_COPROCR_WRITE_FUNC(ats1cpr, ATS1CPR) 291 DEFINE_COPROCR_WRITE_FUNC(ats1hr, ATS1HR) 292 DEFINE_COPROCR_RW_FUNCS_64(par, PAR_64) 293 294 DEFINE_COPROCR_RW_FUNCS(nsacr, NSACR) 295 296 /* AArch32 coproc registers for 32bit MMU descriptor support */ 297 DEFINE_COPROCR_RW_FUNCS(prrr, PRRR) 298 DEFINE_COPROCR_RW_FUNCS(nmrr, NMRR) 299 DEFINE_COPROCR_RW_FUNCS(dacr, DACR) 300 301 DEFINE_COPROCR_RW_FUNCS(amcntenset0, AMCNTENSET0) 302 DEFINE_COPROCR_RW_FUNCS(amcntenset1, AMCNTENSET1) 303 DEFINE_COPROCR_RW_FUNCS(amcntenclr0, AMCNTENCLR0) 304 DEFINE_COPROCR_RW_FUNCS(amcntenclr1, AMCNTENCLR1) 305 306 DEFINE_COPROCR_RW_FUNCS_64(amevcntr00, AMEVCNTR00) 307 DEFINE_COPROCR_RW_FUNCS_64(amevcntr01, AMEVCNTR01) 308 DEFINE_COPROCR_RW_FUNCS_64(amevcntr02, AMEVCNTR02) 309 DEFINE_COPROCR_RW_FUNCS_64(amevcntr03, AMEVCNTR03) 310 311 /* 312 * TLBI operation prototypes 313 */ 314 DEFINE_TLBIOP_FUNC(all, TLBIALL) 315 DEFINE_TLBIOP_FUNC(allis, TLBIALLIS) 316 DEFINE_TLBIOP_PARAM_FUNC(mva, TLBIMVA) 317 DEFINE_TLBIOP_PARAM_FUNC(mvaa, TLBIMVAA) 318 DEFINE_TLBIOP_PARAM_FUNC(mvaais, TLBIMVAAIS) 319 DEFINE_TLBIOP_PARAM_FUNC(mvahis, TLBIMVAHIS) 320 321 /* 322 * BPI operation prototypes. 323 */ 324 DEFINE_BPIOP_FUNC(allis, BPIALLIS) 325 326 /* 327 * DC operation prototypes 328 */ 329 DEFINE_DCOP_PARAM_FUNC(civac, DCCIMVAC) 330 DEFINE_DCOP_PARAM_FUNC(ivac, DCIMVAC) 331 DEFINE_DCOP_PARAM_FUNC(cvac, DCCMVAC) 332 333 /* Previously defined accessor functions with incomplete register names */ 334 #define dsb() dsbsy() 335 #define dmb() dmbsy() 336 337 /* dmb ld is not valid for armv7/thumb machines, so alias it to dmb */ 338 #if ARM_ARCH_MAJOR == 7 339 #define dmbld() dmb() 340 #endif 341 342 #define IS_IN_SECURE() \ 343 (GET_NS_BIT(read_scr()) == 0) 344 345 #define IS_IN_HYP() (GET_M32(read_cpsr()) == MODE32_hyp) 346 #define IS_IN_SVC() (GET_M32(read_cpsr()) == MODE32_svc) 347 #define IS_IN_MON() (GET_M32(read_cpsr()) == MODE32_mon) 348 #define IS_IN_EL2() IS_IN_HYP() 349 /* If EL3 is AArch32, then secure PL1 and monitor mode correspond to EL3 */ 350 #define IS_IN_EL3() \ 351 ((GET_M32(read_cpsr()) == MODE32_mon) || \ 352 (IS_IN_SECURE() && (GET_M32(read_cpsr()) != MODE32_usr))) 353 354 static inline unsigned int get_current_el(void) 355 { 356 if (IS_IN_EL3()) { 357 return 3U; 358 } else if (IS_IN_EL2()) { 359 return 2U; 360 } else { 361 return 1U; 362 } 363 } 364 365 /* Macros for compatibility with AArch64 system registers */ 366 #define read_mpidr_el1() read_mpidr() 367 368 #define read_scr_el3() read_scr() 369 #define write_scr_el3(_v) write_scr(_v) 370 371 #define read_hcr_el2() read_hcr() 372 #define write_hcr_el2(_v) write_hcr(_v) 373 374 #define read_cpacr_el1() read_cpacr() 375 #define write_cpacr_el1(_v) write_cpacr(_v) 376 377 #define read_cntfrq_el0() read_cntfrq() 378 #define write_cntfrq_el0(_v) write_cntfrq(_v) 379 #define read_isr_el1() read_isr() 380 381 #define read_cntpct_el0() read64_cntpct() 382 383 #define read_ctr_el0() read_ctr() 384 385 #define write_icc_sgi0r_el1(_v) write64_icc_sgi0r_el1(_v) 386 387 #define read_daif() read_cpsr() 388 #define write_daif(flags) write_cpsr(flags) 389 390 #define read_cnthp_cval_el2() read64_cnthp_cval_el2() 391 #define write_cnthp_cval_el2(v) write64_cnthp_cval_el2(v) 392 393 #define read_amcntenset0_el0() read_amcntenset0() 394 #define read_amcntenset1_el0() read_amcntenset1() 395 396 /* Helper functions to manipulate CPSR */ 397 static inline void enable_irq(void) 398 { 399 /* 400 * The compiler memory barrier will prevent the compiler from 401 * scheduling non-volatile memory access after the write to the 402 * register. 403 * 404 * This could happen if some initialization code issues non-volatile 405 * accesses to an area used by an interrupt handler, in the assumption 406 * that it is safe as the interrupts are disabled at the time it does 407 * that (according to program order). However, non-volatile accesses 408 * are not necessarily in program order relatively with volatile inline 409 * assembly statements (and volatile accesses). 410 */ 411 COMPILER_BARRIER(); 412 __asm__ volatile ("cpsie i"); 413 isb(); 414 } 415 416 static inline void enable_serror(void) 417 { 418 COMPILER_BARRIER(); 419 __asm__ volatile ("cpsie a"); 420 isb(); 421 } 422 423 static inline void enable_fiq(void) 424 { 425 COMPILER_BARRIER(); 426 __asm__ volatile ("cpsie f"); 427 isb(); 428 } 429 430 static inline void disable_irq(void) 431 { 432 COMPILER_BARRIER(); 433 __asm__ volatile ("cpsid i"); 434 isb(); 435 } 436 437 static inline void disable_serror(void) 438 { 439 COMPILER_BARRIER(); 440 __asm__ volatile ("cpsid a"); 441 isb(); 442 } 443 444 static inline void disable_fiq(void) 445 { 446 COMPILER_BARRIER(); 447 __asm__ volatile ("cpsid f"); 448 isb(); 449 } 450 451 #endif /* ARCH_HELPERS_H */ 452