1 /* 2 * Copyright (c) 2016-2021, ARM Limited and Contributors. All rights reserved. 3 * Portions copyright (c) 2021-2022, ProvenRun S.A.S. All rights reserved. 4 * 5 * SPDX-License-Identifier: BSD-3-Clause 6 */ 7 8 #ifndef ARCH_HELPERS_H 9 #define ARCH_HELPERS_H 10 11 #include <cdefs.h> 12 #include <stdbool.h> 13 #include <stdint.h> 14 #include <string.h> 15 16 #include <arch.h> 17 18 /********************************************************************** 19 * Macros which create inline functions to read or write CPU system 20 * registers 21 *********************************************************************/ 22 23 #define _DEFINE_COPROCR_WRITE_FUNC(_name, coproc, opc1, CRn, CRm, opc2) \ 24 static inline void write_## _name(u_register_t v) \ 25 { \ 26 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\ 27 } 28 29 #define _DEFINE_COPROCR_READ_FUNC(_name, coproc, opc1, CRn, CRm, opc2) \ 30 static inline u_register_t read_ ## _name(void) \ 31 { \ 32 u_register_t v; \ 33 __asm__ volatile ("mrc "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : "=r" (v));\ 34 return v; \ 35 } 36 37 /* 38 * The undocumented %Q and %R extended asm are used to implemented the below 39 * 64 bit `mrrc` and `mcrr` instructions. 40 */ 41 42 #define _DEFINE_COPROCR_WRITE_FUNC_64(_name, coproc, opc1, CRm) \ 43 static inline void write64_## _name(uint64_t v) \ 44 { \ 45 __asm__ volatile ("mcrr "#coproc","#opc1", %Q0, %R0,"#CRm : : "r" (v));\ 46 } 47 48 #define _DEFINE_COPROCR_READ_FUNC_64(_name, coproc, opc1, CRm) \ 49 static inline uint64_t read64_## _name(void) \ 50 { uint64_t v; \ 51 __asm__ volatile ("mrrc "#coproc","#opc1", %Q0, %R0,"#CRm : "=r" (v));\ 52 return v; \ 53 } 54 55 #define _DEFINE_SYSREG_READ_FUNC(_name, _reg_name) \ 56 static inline u_register_t read_ ## _name(void) \ 57 { \ 58 u_register_t v; \ 59 __asm__ volatile ("mrs %0, " #_reg_name : "=r" (v)); \ 60 return v; \ 61 } 62 63 #define _DEFINE_SYSREG_WRITE_FUNC(_name, _reg_name) \ 64 static inline void write_ ## _name(u_register_t v) \ 65 { \ 66 __asm__ volatile ("msr " #_reg_name ", %0" : : "r" (v)); \ 67 } 68 69 #define _DEFINE_SYSREG_WRITE_CONST_FUNC(_name, _reg_name) \ 70 static inline void write_ ## _name(const u_register_t v) \ 71 { \ 72 __asm__ volatile ("msr " #_reg_name ", %0" : : "i" (v)); \ 73 } 74 75 /* Define read function for coproc register */ 76 #define DEFINE_COPROCR_READ_FUNC(_name, ...) \ 77 _DEFINE_COPROCR_READ_FUNC(_name, __VA_ARGS__) 78 79 /* Define write function for coproc register */ 80 #define DEFINE_COPROCR_WRITE_FUNC(_name, ...) \ 81 _DEFINE_COPROCR_WRITE_FUNC(_name, __VA_ARGS__) 82 83 /* Define read & write function for coproc register */ 84 #define DEFINE_COPROCR_RW_FUNCS(_name, ...) \ 85 _DEFINE_COPROCR_READ_FUNC(_name, __VA_ARGS__) \ 86 _DEFINE_COPROCR_WRITE_FUNC(_name, __VA_ARGS__) 87 88 /* Define 64 bit read function for coproc register */ 89 #define DEFINE_COPROCR_READ_FUNC_64(_name, ...) \ 90 _DEFINE_COPROCR_READ_FUNC_64(_name, __VA_ARGS__) 91 92 /* Define 64 bit write function for coproc register */ 93 #define DEFINE_COPROCR_WRITE_FUNC_64(_name, ...) \ 94 _DEFINE_COPROCR_WRITE_FUNC_64(_name, __VA_ARGS__) 95 96 /* Define 64 bit read & write function for coproc register */ 97 #define DEFINE_COPROCR_RW_FUNCS_64(_name, ...) \ 98 _DEFINE_COPROCR_READ_FUNC_64(_name, __VA_ARGS__) \ 99 _DEFINE_COPROCR_WRITE_FUNC_64(_name, __VA_ARGS__) 100 101 /* Define read & write function for system register */ 102 #define DEFINE_SYSREG_RW_FUNCS(_name) \ 103 _DEFINE_SYSREG_READ_FUNC(_name, _name) \ 104 _DEFINE_SYSREG_WRITE_FUNC(_name, _name) 105 106 /********************************************************************** 107 * Macros to create inline functions for tlbi operations 108 *********************************************************************/ 109 110 #define _DEFINE_TLBIOP_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \ 111 static inline void tlbi##_op(void) \ 112 { \ 113 u_register_t v = 0; \ 114 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\ 115 } 116 117 #define _DEFINE_BPIOP_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \ 118 static inline void bpi##_op(void) \ 119 { \ 120 u_register_t v = 0; \ 121 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\ 122 } 123 124 #define _DEFINE_TLBIOP_PARAM_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \ 125 static inline void tlbi##_op(u_register_t v) \ 126 { \ 127 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\ 128 } 129 130 /* Define function for simple TLBI operation */ 131 #define DEFINE_TLBIOP_FUNC(_op, ...) \ 132 _DEFINE_TLBIOP_FUNC(_op, __VA_ARGS__) 133 134 /* Define function for TLBI operation with register parameter */ 135 #define DEFINE_TLBIOP_PARAM_FUNC(_op, ...) \ 136 _DEFINE_TLBIOP_PARAM_FUNC(_op, __VA_ARGS__) 137 138 /* Define function for simple BPI operation */ 139 #define DEFINE_BPIOP_FUNC(_op, ...) \ 140 _DEFINE_BPIOP_FUNC(_op, __VA_ARGS__) 141 142 /********************************************************************** 143 * Macros to create inline functions for DC operations 144 *********************************************************************/ 145 #define _DEFINE_DCOP_PARAM_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \ 146 static inline void dc##_op(u_register_t v) \ 147 { \ 148 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\ 149 } 150 151 /* Define function for DC operation with register parameter */ 152 #define DEFINE_DCOP_PARAM_FUNC(_op, ...) \ 153 _DEFINE_DCOP_PARAM_FUNC(_op, __VA_ARGS__) 154 155 /********************************************************************** 156 * Macros to create inline functions for system instructions 157 *********************************************************************/ 158 /* Define function for simple system instruction */ 159 #define DEFINE_SYSOP_FUNC(_op) \ 160 static inline void _op(void) \ 161 { \ 162 __asm__ (#_op); \ 163 } 164 165 166 /* Define function for system instruction with type specifier */ 167 #define DEFINE_SYSOP_TYPE_FUNC(_op, _type) \ 168 static inline void _op ## _type(void) \ 169 { \ 170 __asm__ (#_op " " #_type : : : "memory"); \ 171 } 172 173 /* Define function for system instruction with register parameter */ 174 #define DEFINE_SYSOP_TYPE_PARAM_FUNC(_op, _type) \ 175 static inline void _op ## _type(u_register_t v) \ 176 { \ 177 __asm__ (#_op " " #_type ", %0" : : "r" (v)); \ 178 } 179 180 void flush_dcache_range(uintptr_t addr, size_t size); 181 void clean_dcache_range(uintptr_t addr, size_t size); 182 void inv_dcache_range(uintptr_t addr, size_t size); 183 bool is_dcache_enabled(void); 184 185 void dcsw_op_louis(u_register_t op_type); 186 void dcsw_op_all(u_register_t op_type); 187 188 void disable_mmu_secure(void); 189 void disable_mmu_icache_secure(void); 190 191 DEFINE_SYSOP_FUNC(wfi) 192 DEFINE_SYSOP_FUNC(wfe) 193 DEFINE_SYSOP_FUNC(sev) 194 DEFINE_SYSOP_TYPE_FUNC(dsb, sy) 195 DEFINE_SYSOP_TYPE_FUNC(dmb, sy) 196 DEFINE_SYSOP_TYPE_FUNC(dmb, st) 197 198 /* dmb ld is not valid for armv7/thumb machines */ 199 #if ARM_ARCH_MAJOR != 7 200 DEFINE_SYSOP_TYPE_FUNC(dmb, ld) 201 #endif 202 203 DEFINE_SYSOP_TYPE_FUNC(dsb, ish) 204 DEFINE_SYSOP_TYPE_FUNC(dsb, ishst) 205 DEFINE_SYSOP_TYPE_FUNC(dmb, ish) 206 DEFINE_SYSOP_TYPE_FUNC(dmb, ishst) 207 DEFINE_SYSOP_FUNC(isb) 208 209 void __dead2 smc(uint32_t r0, uint32_t r1, uint32_t r2, uint32_t r3, 210 uint32_t r4, uint32_t r5, uint32_t r6, uint32_t r7); 211 212 DEFINE_SYSREG_RW_FUNCS(spsr) 213 DEFINE_SYSREG_RW_FUNCS(cpsr) 214 215 /******************************************************************************* 216 * System register accessor prototypes 217 ******************************************************************************/ 218 DEFINE_COPROCR_READ_FUNC(mpidr, MPIDR) 219 DEFINE_COPROCR_READ_FUNC(midr, MIDR) 220 DEFINE_COPROCR_READ_FUNC(id_mmfr4, ID_MMFR4) 221 DEFINE_COPROCR_READ_FUNC(id_dfr0, ID_DFR0) 222 DEFINE_COPROCR_READ_FUNC(id_pfr0, ID_PFR0) 223 DEFINE_COPROCR_READ_FUNC(id_pfr1, ID_PFR1) 224 DEFINE_COPROCR_READ_FUNC(isr, ISR) 225 DEFINE_COPROCR_READ_FUNC(clidr, CLIDR) 226 DEFINE_COPROCR_READ_FUNC_64(cntpct, CNTPCT_64) 227 228 DEFINE_COPROCR_RW_FUNCS(scr, SCR) 229 DEFINE_COPROCR_RW_FUNCS(ctr, CTR) 230 DEFINE_COPROCR_RW_FUNCS(sctlr, SCTLR) 231 DEFINE_COPROCR_RW_FUNCS(actlr, ACTLR) 232 DEFINE_COPROCR_RW_FUNCS(hsctlr, HSCTLR) 233 DEFINE_COPROCR_RW_FUNCS(hcr, HCR) 234 DEFINE_COPROCR_RW_FUNCS(hcptr, HCPTR) 235 DEFINE_COPROCR_RW_FUNCS(cntfrq, CNTFRQ) 236 DEFINE_COPROCR_RW_FUNCS(cnthctl, CNTHCTL) 237 DEFINE_COPROCR_RW_FUNCS(mair0, MAIR0) 238 DEFINE_COPROCR_RW_FUNCS(mair1, MAIR1) 239 DEFINE_COPROCR_RW_FUNCS(hmair0, HMAIR0) 240 DEFINE_COPROCR_RW_FUNCS(ttbcr, TTBCR) 241 DEFINE_COPROCR_RW_FUNCS(htcr, HTCR) 242 DEFINE_COPROCR_RW_FUNCS(ttbr0, TTBR0) 243 DEFINE_COPROCR_RW_FUNCS_64(ttbr0, TTBR0_64) 244 DEFINE_COPROCR_RW_FUNCS(ttbr1, TTBR1) 245 DEFINE_COPROCR_RW_FUNCS_64(httbr, HTTBR_64) 246 DEFINE_COPROCR_RW_FUNCS(vpidr, VPIDR) 247 DEFINE_COPROCR_RW_FUNCS(vmpidr, VMPIDR) 248 DEFINE_COPROCR_RW_FUNCS_64(vttbr, VTTBR_64) 249 DEFINE_COPROCR_RW_FUNCS_64(ttbr1, TTBR1_64) 250 DEFINE_COPROCR_RW_FUNCS_64(cntvoff, CNTVOFF_64) 251 DEFINE_COPROCR_RW_FUNCS(csselr, CSSELR) 252 DEFINE_COPROCR_RW_FUNCS(hstr, HSTR) 253 DEFINE_COPROCR_RW_FUNCS(cnthp_ctl_el2, CNTHP_CTL) 254 DEFINE_COPROCR_RW_FUNCS(cnthp_tval_el2, CNTHP_TVAL) 255 DEFINE_COPROCR_RW_FUNCS_64(cnthp_cval_el2, CNTHP_CVAL_64) 256 257 #define get_cntp_ctl_enable(x) (((x) >> CNTP_CTL_ENABLE_SHIFT) & \ 258 CNTP_CTL_ENABLE_MASK) 259 #define get_cntp_ctl_imask(x) (((x) >> CNTP_CTL_IMASK_SHIFT) & \ 260 CNTP_CTL_IMASK_MASK) 261 #define get_cntp_ctl_istatus(x) (((x) >> CNTP_CTL_ISTATUS_SHIFT) & \ 262 CNTP_CTL_ISTATUS_MASK) 263 264 #define set_cntp_ctl_enable(x) ((x) |= U(1) << CNTP_CTL_ENABLE_SHIFT) 265 #define set_cntp_ctl_imask(x) ((x) |= U(1) << CNTP_CTL_IMASK_SHIFT) 266 267 #define clr_cntp_ctl_enable(x) ((x) &= ~(U(1) << CNTP_CTL_ENABLE_SHIFT)) 268 #define clr_cntp_ctl_imask(x) ((x) &= ~(U(1) << CNTP_CTL_IMASK_SHIFT)) 269 270 DEFINE_COPROCR_RW_FUNCS(icc_sre_el1, ICC_SRE) 271 DEFINE_COPROCR_RW_FUNCS(icc_sre_el2, ICC_HSRE) 272 DEFINE_COPROCR_RW_FUNCS(icc_sre_el3, ICC_MSRE) 273 DEFINE_COPROCR_RW_FUNCS(icc_pmr_el1, ICC_PMR) 274 DEFINE_COPROCR_RW_FUNCS(icc_rpr_el1, ICC_RPR) 275 DEFINE_COPROCR_RW_FUNCS(icc_igrpen1_el3, ICC_MGRPEN1) 276 DEFINE_COPROCR_RW_FUNCS(icc_igrpen1_el1, ICC_IGRPEN1) 277 DEFINE_COPROCR_RW_FUNCS(icc_igrpen0_el1, ICC_IGRPEN0) 278 DEFINE_COPROCR_RW_FUNCS(icc_hppir0_el1, ICC_HPPIR0) 279 DEFINE_COPROCR_RW_FUNCS(icc_hppir1_el1, ICC_HPPIR1) 280 DEFINE_COPROCR_RW_FUNCS(icc_iar0_el1, ICC_IAR0) 281 DEFINE_COPROCR_RW_FUNCS(icc_iar1_el1, ICC_IAR1) 282 DEFINE_COPROCR_RW_FUNCS(icc_eoir0_el1, ICC_EOIR0) 283 DEFINE_COPROCR_RW_FUNCS(icc_eoir1_el1, ICC_EOIR1) 284 DEFINE_COPROCR_RW_FUNCS_64(icc_sgi0r_el1, ICC_SGI0R_EL1_64) 285 DEFINE_COPROCR_WRITE_FUNC_64(icc_sgi1r, ICC_SGI1R_EL1_64) 286 DEFINE_COPROCR_WRITE_FUNC_64(icc_asgi1r, ICC_ASGI1R_EL1_64) 287 288 DEFINE_COPROCR_RW_FUNCS(sdcr, SDCR) 289 DEFINE_COPROCR_RW_FUNCS(hdcr, HDCR) 290 DEFINE_COPROCR_RW_FUNCS(cnthp_ctl, CNTHP_CTL) 291 DEFINE_COPROCR_READ_FUNC(pmcr, PMCR) 292 293 /* 294 * Address translation 295 */ 296 DEFINE_COPROCR_WRITE_FUNC(ats1cpr, ATS1CPR) 297 DEFINE_COPROCR_WRITE_FUNC(ats1hr, ATS1HR) 298 DEFINE_COPROCR_RW_FUNCS_64(par, PAR_64) 299 300 DEFINE_COPROCR_RW_FUNCS(nsacr, NSACR) 301 302 /* AArch32 coproc registers for 32bit MMU descriptor support */ 303 DEFINE_COPROCR_RW_FUNCS(prrr, PRRR) 304 DEFINE_COPROCR_RW_FUNCS(nmrr, NMRR) 305 DEFINE_COPROCR_RW_FUNCS(dacr, DACR) 306 307 /* Coproc registers for 32bit AMU support */ 308 DEFINE_COPROCR_READ_FUNC(amcfgr, AMCFGR) 309 DEFINE_COPROCR_READ_FUNC(amcgcr, AMCGCR) 310 DEFINE_COPROCR_RW_FUNCS(amcr, AMCR) 311 312 DEFINE_COPROCR_RW_FUNCS(amcntenset0, AMCNTENSET0) 313 DEFINE_COPROCR_RW_FUNCS(amcntenset1, AMCNTENSET1) 314 DEFINE_COPROCR_RW_FUNCS(amcntenclr0, AMCNTENCLR0) 315 DEFINE_COPROCR_RW_FUNCS(amcntenclr1, AMCNTENCLR1) 316 317 /* Coproc registers for 64bit AMU support */ 318 DEFINE_COPROCR_RW_FUNCS_64(amevcntr00, AMEVCNTR00) 319 DEFINE_COPROCR_RW_FUNCS_64(amevcntr01, AMEVCNTR01) 320 DEFINE_COPROCR_RW_FUNCS_64(amevcntr02, AMEVCNTR02) 321 DEFINE_COPROCR_RW_FUNCS_64(amevcntr03, AMEVCNTR03) 322 323 /* 324 * TLBI operation prototypes 325 */ 326 DEFINE_TLBIOP_FUNC(all, TLBIALL) 327 DEFINE_TLBIOP_FUNC(allis, TLBIALLIS) 328 DEFINE_TLBIOP_PARAM_FUNC(mva, TLBIMVA) 329 DEFINE_TLBIOP_PARAM_FUNC(mvaa, TLBIMVAA) 330 DEFINE_TLBIOP_PARAM_FUNC(mvaais, TLBIMVAAIS) 331 DEFINE_TLBIOP_PARAM_FUNC(mvahis, TLBIMVAHIS) 332 333 /* 334 * BPI operation prototypes. 335 */ 336 DEFINE_BPIOP_FUNC(allis, BPIALLIS) 337 338 /* 339 * DC operation prototypes 340 */ 341 DEFINE_DCOP_PARAM_FUNC(civac, DCCIMVAC) 342 DEFINE_DCOP_PARAM_FUNC(ivac, DCIMVAC) 343 #if ERRATA_A53_819472 || ERRATA_A53_824069 || ERRATA_A53_827319 344 DEFINE_DCOP_PARAM_FUNC(cvac, DCCIMVAC) 345 #else 346 DEFINE_DCOP_PARAM_FUNC(cvac, DCCMVAC) 347 #endif 348 349 /* 350 * DynamIQ Shared Unit power management 351 */ 352 DEFINE_COPROCR_RW_FUNCS(clusterpwrdn, CLUSTERPWRDN) 353 354 /* Previously defined accessor functions with incomplete register names */ 355 #define dsb() dsbsy() 356 #define dmb() dmbsy() 357 358 /* dmb ld is not valid for armv7/thumb machines, so alias it to dmb */ 359 #if ARM_ARCH_MAJOR == 7 360 #define dmbld() dmb() 361 #endif 362 363 #define IS_IN_SECURE() \ 364 (GET_NS_BIT(read_scr()) == 0) 365 366 #define IS_IN_HYP() (GET_M32(read_cpsr()) == MODE32_hyp) 367 #define IS_IN_SVC() (GET_M32(read_cpsr()) == MODE32_svc) 368 #define IS_IN_MON() (GET_M32(read_cpsr()) == MODE32_mon) 369 #define IS_IN_EL2() IS_IN_HYP() 370 /* If EL3 is AArch32, then secure PL1 and monitor mode correspond to EL3 */ 371 #define IS_IN_EL3() \ 372 ((GET_M32(read_cpsr()) == MODE32_mon) || \ 373 (IS_IN_SECURE() && (GET_M32(read_cpsr()) != MODE32_usr))) 374 375 static inline unsigned int get_current_el(void) 376 { 377 if (IS_IN_EL3()) { 378 return 3U; 379 } else if (IS_IN_EL2()) { 380 return 2U; 381 } else { 382 return 1U; 383 } 384 } 385 386 /* Macros for compatibility with AArch64 system registers */ 387 #define read_mpidr_el1() read_mpidr() 388 389 #define read_scr_el3() read_scr() 390 #define write_scr_el3(_v) write_scr(_v) 391 392 #define read_hcr_el2() read_hcr() 393 #define write_hcr_el2(_v) write_hcr(_v) 394 395 #define read_cpacr_el1() read_cpacr() 396 #define write_cpacr_el1(_v) write_cpacr(_v) 397 398 #define read_cntfrq_el0() read_cntfrq() 399 #define write_cntfrq_el0(_v) write_cntfrq(_v) 400 #define read_isr_el1() read_isr() 401 402 #define read_cntpct_el0() read64_cntpct() 403 404 #define read_ctr_el0() read_ctr() 405 406 #define write_icc_sgi0r_el1(_v) write64_icc_sgi0r_el1(_v) 407 #define write_icc_sgi1r(_v) write64_icc_sgi1r(_v) 408 #define write_icc_asgi1r(_v) write64_icc_asgi1r(_v) 409 410 #define read_daif() read_cpsr() 411 #define write_daif(flags) write_cpsr(flags) 412 413 #define read_cnthp_cval_el2() read64_cnthp_cval_el2() 414 #define write_cnthp_cval_el2(v) write64_cnthp_cval_el2(v) 415 416 #define read_amcntenset0_el0() read_amcntenset0() 417 #define read_amcntenset1_el0() read_amcntenset1() 418 419 /* Helper functions to manipulate CPSR */ 420 static inline void enable_irq(void) 421 { 422 /* 423 * The compiler memory barrier will prevent the compiler from 424 * scheduling non-volatile memory access after the write to the 425 * register. 426 * 427 * This could happen if some initialization code issues non-volatile 428 * accesses to an area used by an interrupt handler, in the assumption 429 * that it is safe as the interrupts are disabled at the time it does 430 * that (according to program order). However, non-volatile accesses 431 * are not necessarily in program order relatively with volatile inline 432 * assembly statements (and volatile accesses). 433 */ 434 COMPILER_BARRIER(); 435 __asm__ volatile ("cpsie i"); 436 isb(); 437 } 438 439 static inline void enable_serror(void) 440 { 441 COMPILER_BARRIER(); 442 __asm__ volatile ("cpsie a"); 443 isb(); 444 } 445 446 static inline void enable_fiq(void) 447 { 448 COMPILER_BARRIER(); 449 __asm__ volatile ("cpsie f"); 450 isb(); 451 } 452 453 static inline void disable_irq(void) 454 { 455 COMPILER_BARRIER(); 456 __asm__ volatile ("cpsid i"); 457 isb(); 458 } 459 460 static inline void disable_serror(void) 461 { 462 COMPILER_BARRIER(); 463 __asm__ volatile ("cpsid a"); 464 isb(); 465 } 466 467 static inline void disable_fiq(void) 468 { 469 COMPILER_BARRIER(); 470 __asm__ volatile ("cpsid f"); 471 isb(); 472 } 473 474 #endif /* ARCH_HELPERS_H */ 475