1 /* 2 * Copyright (c) 2016-2021, ARM Limited and Contributors. All rights reserved. 3 * 4 * SPDX-License-Identifier: BSD-3-Clause 5 */ 6 7 #ifndef ARCH_HELPERS_H 8 #define ARCH_HELPERS_H 9 10 #include <cdefs.h> 11 #include <stdbool.h> 12 #include <stdint.h> 13 #include <string.h> 14 15 #include <arch.h> 16 17 /********************************************************************** 18 * Macros which create inline functions to read or write CPU system 19 * registers 20 *********************************************************************/ 21 22 #define _DEFINE_COPROCR_WRITE_FUNC(_name, coproc, opc1, CRn, CRm, opc2) \ 23 static inline void write_## _name(u_register_t v) \ 24 { \ 25 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\ 26 } 27 28 #define _DEFINE_COPROCR_READ_FUNC(_name, coproc, opc1, CRn, CRm, opc2) \ 29 static inline u_register_t read_ ## _name(void) \ 30 { \ 31 u_register_t v; \ 32 __asm__ volatile ("mrc "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : "=r" (v));\ 33 return v; \ 34 } 35 36 /* 37 * The undocumented %Q and %R extended asm are used to implemented the below 38 * 64 bit `mrrc` and `mcrr` instructions. 39 */ 40 41 #define _DEFINE_COPROCR_WRITE_FUNC_64(_name, coproc, opc1, CRm) \ 42 static inline void write64_## _name(uint64_t v) \ 43 { \ 44 __asm__ volatile ("mcrr "#coproc","#opc1", %Q0, %R0,"#CRm : : "r" (v));\ 45 } 46 47 #define _DEFINE_COPROCR_READ_FUNC_64(_name, coproc, opc1, CRm) \ 48 static inline uint64_t read64_## _name(void) \ 49 { uint64_t v; \ 50 __asm__ volatile ("mrrc "#coproc","#opc1", %Q0, %R0,"#CRm : "=r" (v));\ 51 return v; \ 52 } 53 54 #define _DEFINE_SYSREG_READ_FUNC(_name, _reg_name) \ 55 static inline u_register_t read_ ## _name(void) \ 56 { \ 57 u_register_t v; \ 58 __asm__ volatile ("mrs %0, " #_reg_name : "=r" (v)); \ 59 return v; \ 60 } 61 62 #define _DEFINE_SYSREG_WRITE_FUNC(_name, _reg_name) \ 63 static inline void write_ ## _name(u_register_t v) \ 64 { \ 65 __asm__ volatile ("msr " #_reg_name ", %0" : : "r" (v)); \ 66 } 67 68 #define _DEFINE_SYSREG_WRITE_CONST_FUNC(_name, _reg_name) \ 69 static inline void write_ ## _name(const u_register_t v) \ 70 { \ 71 __asm__ volatile ("msr " #_reg_name ", %0" : : "i" (v)); \ 72 } 73 74 /* Define read function for coproc register */ 75 #define DEFINE_COPROCR_READ_FUNC(_name, ...) \ 76 _DEFINE_COPROCR_READ_FUNC(_name, __VA_ARGS__) 77 78 /* Define write function for coproc register */ 79 #define DEFINE_COPROCR_WRITE_FUNC(_name, ...) \ 80 _DEFINE_COPROCR_WRITE_FUNC(_name, __VA_ARGS__) 81 82 /* Define read & write function for coproc register */ 83 #define DEFINE_COPROCR_RW_FUNCS(_name, ...) \ 84 _DEFINE_COPROCR_READ_FUNC(_name, __VA_ARGS__) \ 85 _DEFINE_COPROCR_WRITE_FUNC(_name, __VA_ARGS__) 86 87 /* Define 64 bit read function for coproc register */ 88 #define DEFINE_COPROCR_READ_FUNC_64(_name, ...) \ 89 _DEFINE_COPROCR_READ_FUNC_64(_name, __VA_ARGS__) 90 91 /* Define 64 bit write function for coproc register */ 92 #define DEFINE_COPROCR_WRITE_FUNC_64(_name, ...) \ 93 _DEFINE_COPROCR_WRITE_FUNC_64(_name, __VA_ARGS__) 94 95 /* Define 64 bit read & write function for coproc register */ 96 #define DEFINE_COPROCR_RW_FUNCS_64(_name, ...) \ 97 _DEFINE_COPROCR_READ_FUNC_64(_name, __VA_ARGS__) \ 98 _DEFINE_COPROCR_WRITE_FUNC_64(_name, __VA_ARGS__) 99 100 /* Define read & write function for system register */ 101 #define DEFINE_SYSREG_RW_FUNCS(_name) \ 102 _DEFINE_SYSREG_READ_FUNC(_name, _name) \ 103 _DEFINE_SYSREG_WRITE_FUNC(_name, _name) 104 105 /********************************************************************** 106 * Macros to create inline functions for tlbi operations 107 *********************************************************************/ 108 109 #define _DEFINE_TLBIOP_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \ 110 static inline void tlbi##_op(void) \ 111 { \ 112 u_register_t v = 0; \ 113 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\ 114 } 115 116 #define _DEFINE_BPIOP_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \ 117 static inline void bpi##_op(void) \ 118 { \ 119 u_register_t v = 0; \ 120 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\ 121 } 122 123 #define _DEFINE_TLBIOP_PARAM_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \ 124 static inline void tlbi##_op(u_register_t v) \ 125 { \ 126 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\ 127 } 128 129 /* Define function for simple TLBI operation */ 130 #define DEFINE_TLBIOP_FUNC(_op, ...) \ 131 _DEFINE_TLBIOP_FUNC(_op, __VA_ARGS__) 132 133 /* Define function for TLBI operation with register parameter */ 134 #define DEFINE_TLBIOP_PARAM_FUNC(_op, ...) \ 135 _DEFINE_TLBIOP_PARAM_FUNC(_op, __VA_ARGS__) 136 137 /* Define function for simple BPI operation */ 138 #define DEFINE_BPIOP_FUNC(_op, ...) \ 139 _DEFINE_BPIOP_FUNC(_op, __VA_ARGS__) 140 141 /********************************************************************** 142 * Macros to create inline functions for DC operations 143 *********************************************************************/ 144 #define _DEFINE_DCOP_PARAM_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \ 145 static inline void dc##_op(u_register_t v) \ 146 { \ 147 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\ 148 } 149 150 /* Define function for DC operation with register parameter */ 151 #define DEFINE_DCOP_PARAM_FUNC(_op, ...) \ 152 _DEFINE_DCOP_PARAM_FUNC(_op, __VA_ARGS__) 153 154 /********************************************************************** 155 * Macros to create inline functions for system instructions 156 *********************************************************************/ 157 /* Define function for simple system instruction */ 158 #define DEFINE_SYSOP_FUNC(_op) \ 159 static inline void _op(void) \ 160 { \ 161 __asm__ (#_op); \ 162 } 163 164 165 /* Define function for system instruction with type specifier */ 166 #define DEFINE_SYSOP_TYPE_FUNC(_op, _type) \ 167 static inline void _op ## _type(void) \ 168 { \ 169 __asm__ (#_op " " #_type : : : "memory"); \ 170 } 171 172 /* Define function for system instruction with register parameter */ 173 #define DEFINE_SYSOP_TYPE_PARAM_FUNC(_op, _type) \ 174 static inline void _op ## _type(u_register_t v) \ 175 { \ 176 __asm__ (#_op " " #_type ", %0" : : "r" (v)); \ 177 } 178 179 void flush_dcache_range(uintptr_t addr, size_t size); 180 void clean_dcache_range(uintptr_t addr, size_t size); 181 void inv_dcache_range(uintptr_t addr, size_t size); 182 bool is_dcache_enabled(void); 183 184 void dcsw_op_louis(u_register_t op_type); 185 void dcsw_op_all(u_register_t op_type); 186 187 void disable_mmu_secure(void); 188 void disable_mmu_icache_secure(void); 189 190 DEFINE_SYSOP_FUNC(wfi) 191 DEFINE_SYSOP_FUNC(wfe) 192 DEFINE_SYSOP_FUNC(sev) 193 DEFINE_SYSOP_TYPE_FUNC(dsb, sy) 194 DEFINE_SYSOP_TYPE_FUNC(dmb, sy) 195 DEFINE_SYSOP_TYPE_FUNC(dmb, st) 196 197 /* dmb ld is not valid for armv7/thumb machines */ 198 #if ARM_ARCH_MAJOR != 7 199 DEFINE_SYSOP_TYPE_FUNC(dmb, ld) 200 #endif 201 202 DEFINE_SYSOP_TYPE_FUNC(dsb, ish) 203 DEFINE_SYSOP_TYPE_FUNC(dsb, ishst) 204 DEFINE_SYSOP_TYPE_FUNC(dmb, ish) 205 DEFINE_SYSOP_TYPE_FUNC(dmb, ishst) 206 DEFINE_SYSOP_FUNC(isb) 207 208 void __dead2 smc(uint32_t r0, uint32_t r1, uint32_t r2, uint32_t r3, 209 uint32_t r4, uint32_t r5, uint32_t r6, uint32_t r7); 210 211 DEFINE_SYSREG_RW_FUNCS(spsr) 212 DEFINE_SYSREG_RW_FUNCS(cpsr) 213 214 /******************************************************************************* 215 * System register accessor prototypes 216 ******************************************************************************/ 217 DEFINE_COPROCR_READ_FUNC(mpidr, MPIDR) 218 DEFINE_COPROCR_READ_FUNC(midr, MIDR) 219 DEFINE_COPROCR_READ_FUNC(id_mmfr4, ID_MMFR4) 220 DEFINE_COPROCR_READ_FUNC(id_pfr0, ID_PFR0) 221 DEFINE_COPROCR_READ_FUNC(id_pfr1, ID_PFR1) 222 DEFINE_COPROCR_READ_FUNC(isr, ISR) 223 DEFINE_COPROCR_READ_FUNC(clidr, CLIDR) 224 DEFINE_COPROCR_READ_FUNC_64(cntpct, CNTPCT_64) 225 226 DEFINE_COPROCR_RW_FUNCS(scr, SCR) 227 DEFINE_COPROCR_RW_FUNCS(ctr, CTR) 228 DEFINE_COPROCR_RW_FUNCS(sctlr, SCTLR) 229 DEFINE_COPROCR_RW_FUNCS(actlr, ACTLR) 230 DEFINE_COPROCR_RW_FUNCS(hsctlr, HSCTLR) 231 DEFINE_COPROCR_RW_FUNCS(hcr, HCR) 232 DEFINE_COPROCR_RW_FUNCS(hcptr, HCPTR) 233 DEFINE_COPROCR_RW_FUNCS(cntfrq, CNTFRQ) 234 DEFINE_COPROCR_RW_FUNCS(cnthctl, CNTHCTL) 235 DEFINE_COPROCR_RW_FUNCS(mair0, MAIR0) 236 DEFINE_COPROCR_RW_FUNCS(mair1, MAIR1) 237 DEFINE_COPROCR_RW_FUNCS(hmair0, HMAIR0) 238 DEFINE_COPROCR_RW_FUNCS(ttbcr, TTBCR) 239 DEFINE_COPROCR_RW_FUNCS(htcr, HTCR) 240 DEFINE_COPROCR_RW_FUNCS(ttbr0, TTBR0) 241 DEFINE_COPROCR_RW_FUNCS_64(ttbr0, TTBR0_64) 242 DEFINE_COPROCR_RW_FUNCS(ttbr1, TTBR1) 243 DEFINE_COPROCR_RW_FUNCS_64(httbr, HTTBR_64) 244 DEFINE_COPROCR_RW_FUNCS(vpidr, VPIDR) 245 DEFINE_COPROCR_RW_FUNCS(vmpidr, VMPIDR) 246 DEFINE_COPROCR_RW_FUNCS_64(vttbr, VTTBR_64) 247 DEFINE_COPROCR_RW_FUNCS_64(ttbr1, TTBR1_64) 248 DEFINE_COPROCR_RW_FUNCS_64(cntvoff, CNTVOFF_64) 249 DEFINE_COPROCR_RW_FUNCS(csselr, CSSELR) 250 DEFINE_COPROCR_RW_FUNCS(hstr, HSTR) 251 DEFINE_COPROCR_RW_FUNCS(cnthp_ctl_el2, CNTHP_CTL) 252 DEFINE_COPROCR_RW_FUNCS(cnthp_tval_el2, CNTHP_TVAL) 253 DEFINE_COPROCR_RW_FUNCS_64(cnthp_cval_el2, CNTHP_CVAL_64) 254 255 #define get_cntp_ctl_enable(x) (((x) >> CNTP_CTL_ENABLE_SHIFT) & \ 256 CNTP_CTL_ENABLE_MASK) 257 #define get_cntp_ctl_imask(x) (((x) >> CNTP_CTL_IMASK_SHIFT) & \ 258 CNTP_CTL_IMASK_MASK) 259 #define get_cntp_ctl_istatus(x) (((x) >> CNTP_CTL_ISTATUS_SHIFT) & \ 260 CNTP_CTL_ISTATUS_MASK) 261 262 #define set_cntp_ctl_enable(x) ((x) |= U(1) << CNTP_CTL_ENABLE_SHIFT) 263 #define set_cntp_ctl_imask(x) ((x) |= U(1) << CNTP_CTL_IMASK_SHIFT) 264 265 #define clr_cntp_ctl_enable(x) ((x) &= ~(U(1) << CNTP_CTL_ENABLE_SHIFT)) 266 #define clr_cntp_ctl_imask(x) ((x) &= ~(U(1) << CNTP_CTL_IMASK_SHIFT)) 267 268 DEFINE_COPROCR_RW_FUNCS(icc_sre_el1, ICC_SRE) 269 DEFINE_COPROCR_RW_FUNCS(icc_sre_el2, ICC_HSRE) 270 DEFINE_COPROCR_RW_FUNCS(icc_sre_el3, ICC_MSRE) 271 DEFINE_COPROCR_RW_FUNCS(icc_pmr_el1, ICC_PMR) 272 DEFINE_COPROCR_RW_FUNCS(icc_rpr_el1, ICC_RPR) 273 DEFINE_COPROCR_RW_FUNCS(icc_igrpen1_el3, ICC_MGRPEN1) 274 DEFINE_COPROCR_RW_FUNCS(icc_igrpen1_el1, ICC_IGRPEN1) 275 DEFINE_COPROCR_RW_FUNCS(icc_igrpen0_el1, ICC_IGRPEN0) 276 DEFINE_COPROCR_RW_FUNCS(icc_hppir0_el1, ICC_HPPIR0) 277 DEFINE_COPROCR_RW_FUNCS(icc_hppir1_el1, ICC_HPPIR1) 278 DEFINE_COPROCR_RW_FUNCS(icc_iar0_el1, ICC_IAR0) 279 DEFINE_COPROCR_RW_FUNCS(icc_iar1_el1, ICC_IAR1) 280 DEFINE_COPROCR_RW_FUNCS(icc_eoir0_el1, ICC_EOIR0) 281 DEFINE_COPROCR_RW_FUNCS(icc_eoir1_el1, ICC_EOIR1) 282 DEFINE_COPROCR_RW_FUNCS_64(icc_sgi0r_el1, ICC_SGI0R_EL1_64) 283 DEFINE_COPROCR_WRITE_FUNC_64(icc_sgi1r, ICC_SGI1R_EL1_64) 284 285 DEFINE_COPROCR_RW_FUNCS(hdcr, HDCR) 286 DEFINE_COPROCR_RW_FUNCS(cnthp_ctl, CNTHP_CTL) 287 DEFINE_COPROCR_READ_FUNC(pmcr, PMCR) 288 289 /* 290 * Address translation 291 */ 292 DEFINE_COPROCR_WRITE_FUNC(ats1cpr, ATS1CPR) 293 DEFINE_COPROCR_WRITE_FUNC(ats1hr, ATS1HR) 294 DEFINE_COPROCR_RW_FUNCS_64(par, PAR_64) 295 296 DEFINE_COPROCR_RW_FUNCS(nsacr, NSACR) 297 298 /* AArch32 coproc registers for 32bit MMU descriptor support */ 299 DEFINE_COPROCR_RW_FUNCS(prrr, PRRR) 300 DEFINE_COPROCR_RW_FUNCS(nmrr, NMRR) 301 DEFINE_COPROCR_RW_FUNCS(dacr, DACR) 302 303 /* Coproc registers for 32bit AMU support */ 304 DEFINE_COPROCR_READ_FUNC(amcfgr, AMCFGR) 305 DEFINE_COPROCR_READ_FUNC(amcgcr, AMCGCR) 306 DEFINE_COPROCR_RW_FUNCS(amcr, AMCR) 307 308 DEFINE_COPROCR_RW_FUNCS(amcntenset0, AMCNTENSET0) 309 DEFINE_COPROCR_RW_FUNCS(amcntenset1, AMCNTENSET1) 310 DEFINE_COPROCR_RW_FUNCS(amcntenclr0, AMCNTENCLR0) 311 DEFINE_COPROCR_RW_FUNCS(amcntenclr1, AMCNTENCLR1) 312 313 /* Coproc registers for 64bit AMU support */ 314 DEFINE_COPROCR_RW_FUNCS_64(amevcntr00, AMEVCNTR00) 315 DEFINE_COPROCR_RW_FUNCS_64(amevcntr01, AMEVCNTR01) 316 DEFINE_COPROCR_RW_FUNCS_64(amevcntr02, AMEVCNTR02) 317 DEFINE_COPROCR_RW_FUNCS_64(amevcntr03, AMEVCNTR03) 318 319 /* 320 * TLBI operation prototypes 321 */ 322 DEFINE_TLBIOP_FUNC(all, TLBIALL) 323 DEFINE_TLBIOP_FUNC(allis, TLBIALLIS) 324 DEFINE_TLBIOP_PARAM_FUNC(mva, TLBIMVA) 325 DEFINE_TLBIOP_PARAM_FUNC(mvaa, TLBIMVAA) 326 DEFINE_TLBIOP_PARAM_FUNC(mvaais, TLBIMVAAIS) 327 DEFINE_TLBIOP_PARAM_FUNC(mvahis, TLBIMVAHIS) 328 329 /* 330 * BPI operation prototypes. 331 */ 332 DEFINE_BPIOP_FUNC(allis, BPIALLIS) 333 334 /* 335 * DC operation prototypes 336 */ 337 DEFINE_DCOP_PARAM_FUNC(civac, DCCIMVAC) 338 DEFINE_DCOP_PARAM_FUNC(ivac, DCIMVAC) 339 #if ERRATA_A53_819472 || ERRATA_A53_824069 || ERRATA_A53_827319 340 DEFINE_DCOP_PARAM_FUNC(cvac, DCCIMVAC) 341 #else 342 DEFINE_DCOP_PARAM_FUNC(cvac, DCCMVAC) 343 #endif 344 345 /* 346 * DynamIQ Shared Unit power management 347 */ 348 DEFINE_COPROCR_RW_FUNCS(clusterpwrdn, CLUSTERPWRDN) 349 350 /* Previously defined accessor functions with incomplete register names */ 351 #define dsb() dsbsy() 352 #define dmb() dmbsy() 353 354 /* dmb ld is not valid for armv7/thumb machines, so alias it to dmb */ 355 #if ARM_ARCH_MAJOR == 7 356 #define dmbld() dmb() 357 #endif 358 359 #define IS_IN_SECURE() \ 360 (GET_NS_BIT(read_scr()) == 0) 361 362 #define IS_IN_HYP() (GET_M32(read_cpsr()) == MODE32_hyp) 363 #define IS_IN_SVC() (GET_M32(read_cpsr()) == MODE32_svc) 364 #define IS_IN_MON() (GET_M32(read_cpsr()) == MODE32_mon) 365 #define IS_IN_EL2() IS_IN_HYP() 366 /* If EL3 is AArch32, then secure PL1 and monitor mode correspond to EL3 */ 367 #define IS_IN_EL3() \ 368 ((GET_M32(read_cpsr()) == MODE32_mon) || \ 369 (IS_IN_SECURE() && (GET_M32(read_cpsr()) != MODE32_usr))) 370 371 static inline unsigned int get_current_el(void) 372 { 373 if (IS_IN_EL3()) { 374 return 3U; 375 } else if (IS_IN_EL2()) { 376 return 2U; 377 } else { 378 return 1U; 379 } 380 } 381 382 /* Macros for compatibility with AArch64 system registers */ 383 #define read_mpidr_el1() read_mpidr() 384 385 #define read_scr_el3() read_scr() 386 #define write_scr_el3(_v) write_scr(_v) 387 388 #define read_hcr_el2() read_hcr() 389 #define write_hcr_el2(_v) write_hcr(_v) 390 391 #define read_cpacr_el1() read_cpacr() 392 #define write_cpacr_el1(_v) write_cpacr(_v) 393 394 #define read_cntfrq_el0() read_cntfrq() 395 #define write_cntfrq_el0(_v) write_cntfrq(_v) 396 #define read_isr_el1() read_isr() 397 398 #define read_cntpct_el0() read64_cntpct() 399 400 #define read_ctr_el0() read_ctr() 401 402 #define write_icc_sgi0r_el1(_v) write64_icc_sgi0r_el1(_v) 403 404 #define read_daif() read_cpsr() 405 #define write_daif(flags) write_cpsr(flags) 406 407 #define read_cnthp_cval_el2() read64_cnthp_cval_el2() 408 #define write_cnthp_cval_el2(v) write64_cnthp_cval_el2(v) 409 410 #define read_amcntenset0_el0() read_amcntenset0() 411 #define read_amcntenset1_el0() read_amcntenset1() 412 413 /* Helper functions to manipulate CPSR */ 414 static inline void enable_irq(void) 415 { 416 /* 417 * The compiler memory barrier will prevent the compiler from 418 * scheduling non-volatile memory access after the write to the 419 * register. 420 * 421 * This could happen if some initialization code issues non-volatile 422 * accesses to an area used by an interrupt handler, in the assumption 423 * that it is safe as the interrupts are disabled at the time it does 424 * that (according to program order). However, non-volatile accesses 425 * are not necessarily in program order relatively with volatile inline 426 * assembly statements (and volatile accesses). 427 */ 428 COMPILER_BARRIER(); 429 __asm__ volatile ("cpsie i"); 430 isb(); 431 } 432 433 static inline void enable_serror(void) 434 { 435 COMPILER_BARRIER(); 436 __asm__ volatile ("cpsie a"); 437 isb(); 438 } 439 440 static inline void enable_fiq(void) 441 { 442 COMPILER_BARRIER(); 443 __asm__ volatile ("cpsie f"); 444 isb(); 445 } 446 447 static inline void disable_irq(void) 448 { 449 COMPILER_BARRIER(); 450 __asm__ volatile ("cpsid i"); 451 isb(); 452 } 453 454 static inline void disable_serror(void) 455 { 456 COMPILER_BARRIER(); 457 __asm__ volatile ("cpsid a"); 458 isb(); 459 } 460 461 static inline void disable_fiq(void) 462 { 463 COMPILER_BARRIER(); 464 __asm__ volatile ("cpsid f"); 465 isb(); 466 } 467 468 #endif /* ARCH_HELPERS_H */ 469