1 /* 2 * Copyright (c) 2016, Linaro Limited 3 * Copyright (c) 2014, STMicroelectronics International N.V. 4 * All rights reserved. 5 * 6 * Redistribution and use in source and binary forms, with or without 7 * modification, are permitted provided that the following conditions are met: 8 * 9 * 1. Redistributions of source code must retain the above copyright notice, 10 * this list of conditions and the following disclaimer. 11 * 12 * 2. Redistributions in binary form must reproduce the above copyright notice, 13 * this list of conditions and the following disclaimer in the documentation 14 * and/or other materials provided with the distribution. 15 * 16 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 17 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 18 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 19 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE 20 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 23 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 24 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 25 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 26 * POSSIBILITY OF SUCH DAMAGE. 27 */ 28 29 #ifndef ARM32_H 30 #define ARM32_H 31 32 #include <sys/cdefs.h> 33 #include <stdint.h> 34 #include <util.h> 35 36 #define CPSR_MODE_MASK ARM32_CPSR_MODE_MASK 37 #define CPSR_MODE_USR ARM32_CPSR_MODE_USR 38 #define CPSR_MODE_FIQ ARM32_CPSR_MODE_FIQ 39 #define CPSR_MODE_IRQ ARM32_CPSR_MODE_IRQ 40 #define CPSR_MODE_SVC ARM32_CPSR_MODE_SVC 41 #define CPSR_MODE_MON ARM32_CPSR_MODE_MON 42 #define CPSR_MODE_ABT ARM32_CPSR_MODE_ABT 43 #define CPSR_MODE_UND ARM32_CPSR_MODE_UND 44 #define CPSR_MODE_SYS ARM32_CPSR_MODE_SYS 45 46 #define CPSR_T ARM32_CPSR_T 47 #define CPSR_F_SHIFT ARM32_CPSR_F_SHIFT 48 #define CPSR_F ARM32_CPSR_F 49 #define CPSR_I ARM32_CPSR_I 50 #define CPSR_A ARM32_CPSR_A 51 #define CPSR_FIA ARM32_CPSR_FIA 52 #define CPSR_IT_MASK ARM32_CPSR_IT_MASK 53 #define CPSR_IT_MASK1 ARM32_CPSR_IT_MASK1 54 #define CPSR_IT_MASK2 ARM32_CPSR_IT_MASK2 55 56 #define SCR_NS BIT32(0) 57 #define SCR_IRQ BIT32(1) 58 #define SCR_FIQ BIT32(2) 59 #define SCR_EA BIT32(3) 60 #define SCR_FW BIT32(4) 61 #define SCR_AW BIT32(5) 62 #define SCR_NET BIT32(6) 63 #define SCR_SCD BIT32(7) 64 #define SCR_HCE BIT32(8) 65 #define SCR_SIF BIT32(9) 66 67 #define SCTLR_M BIT32(0) 68 #define SCTLR_A BIT32(1) 69 #define SCTLR_C BIT32(2) 70 #define SCTLR_CP15BEN BIT32(5) 71 #define SCTLR_SW BIT32(10) 72 #define SCTLR_Z BIT32(11) 73 #define SCTLR_I BIT32(12) 74 #define SCTLR_V BIT32(13) 75 #define SCTLR_RR BIT32(14) 76 #define SCTLR_HA BIT32(17) 77 #define SCTLR_WXN BIT32(19) 78 #define SCTLR_UWXN BIT32(20) 79 #define SCTLR_FI BIT32(21) 80 #define SCTLR_VE BIT32(24) 81 #define SCTLR_EE BIT32(25) 82 #define SCTLR_NMFI BIT32(26) 83 #define SCTLR_TRE BIT32(28) 84 #define SCTLR_AFE BIT32(29) 85 #define SCTLR_TE BIT32(30) 86 87 #define ACTLR_SMP BIT32(6) 88 #define ACTLR_DODMBS BIT32(10) 89 #define ACTLR_L2RADIS BIT32(11) 90 #define ACTLR_L1RADIS BIT32(12) 91 #define ACTLR_L1PCTL BIT32(13) 92 #define ACTLR_DDVM BIT32(15) 93 #define ACTLR_DDI BIT32(28) 94 95 #define NSACR_CP10 BIT32(10) 96 #define NSACR_CP11 BIT32(11) 97 #define NSACR_NSD32DIS BIT32(14) 98 #define NSACR_NSASEDIS BIT32(15) 99 #define NSACR_NS_L2ERR BIT32(17) 100 #define NSACR_NS_SMP BIT32(18) 101 102 #define CPACR_ASEDIS BIT32(31) 103 #define CPACR_D32DIS BIT32(30) 104 #define CPACR_CP(co_proc, access) SHIFT_U32((access), ((co_proc) * 2)) 105 #define CPACR_CP_ACCESS_DENIED 0x0 106 #define CPACR_CP_ACCESS_PL1_ONLY 0x1 107 #define CPACR_CP_ACCESS_FULL 0x3 108 109 110 #define DACR_DOMAIN(num, perm) SHIFT_U32((perm), ((num) * 2)) 111 #define DACR_DOMAIN_PERM_NO_ACCESS 0x0 112 #define DACR_DOMAIN_PERM_CLIENT 0x1 113 #define DACR_DOMAIN_PERM_MANAGER 0x3 114 115 #define PAR_F BIT32(0) 116 #define PAR_SS BIT32(1) 117 #define PAR_LPAE BIT32(11) 118 #define PAR_PA_SHIFT 12 119 #define PAR32_PA_MASK (BIT32(20) - 1) 120 #define PAR64_PA_MASK (BIT64(28) - 1) 121 122 /* 123 * TTBCR has different register layout if LPAE is enabled or not. 124 * TTBCR.EAE == 0 => LPAE is not enabled 125 * TTBCR.EAE == 1 => LPAE is enabled 126 */ 127 #define TTBCR_EAE BIT32(31) 128 129 /* When TTBCR.EAE == 0 */ 130 #define TTBCR_PD0 BIT32(4) 131 #define TTBCR_PD1 BIT32(5) 132 133 /* When TTBCR.EAE == 1 */ 134 #define TTBCR_T0SZ_SHIFT 0 135 #define TTBCR_EPD0 BIT32(7) 136 #define TTBCR_IRGN0_SHIFT 8 137 #define TTBCR_ORGN0_SHIFT 10 138 #define TTBCR_SH0_SHIFT 12 139 #define TTBCR_T1SZ_SHIFT 16 140 #define TTBCR_A1 BIT32(22) 141 #define TTBCR_EPD1 BIT32(23) 142 #define TTBCR_IRGN1_SHIFT 24 143 #define TTBCR_ORGN1_SHIFT 26 144 #define TTBCR_SH1_SHIFT 28 145 146 /* Normal memory, Inner/Outer Non-cacheable */ 147 #define TTBCR_XRGNX_NC 0x0 148 /* Normal memory, Inner/Outer Write-Back Write-Allocate Cacheable */ 149 #define TTBCR_XRGNX_WB 0x1 150 /* Normal memory, Inner/Outer Write-Through Cacheable */ 151 #define TTBCR_XRGNX_WT 0x2 152 /* Normal memory, Inner/Outer Write-Back no Write-Allocate Cacheable */ 153 #define TTBCR_XRGNX_WBWA 0x3 154 155 /* Non-shareable */ 156 #define TTBCR_SHX_NSH 0x0 157 /* Outer Shareable */ 158 #define TTBCR_SHX_OSH 0x2 159 /* Inner Shareable */ 160 #define TTBCR_SHX_ISH 0x3 161 162 #define TTBR_ASID_MASK 0xff 163 #define TTBR_ASID_SHIFT 48 164 165 166 #define FSR_LPAE BIT32(9) 167 #define FSR_WNR BIT32(11) 168 169 /* Valid if FSR.LPAE is 1 */ 170 #define FSR_STATUS_MASK (BIT32(6) - 1) 171 172 /* Valid if FSR.LPAE is 0 */ 173 #define FSR_FS_MASK (BIT32(10) | (BIT32(4) - 1)) 174 175 #ifndef ASM 176 static inline uint32_t read_mpidr(void) 177 { 178 uint32_t mpidr; 179 180 asm volatile ("mrc p15, 0, %[mpidr], c0, c0, 5" 181 : [mpidr] "=r" (mpidr) 182 ); 183 184 return mpidr; 185 } 186 187 static inline uint32_t read_sctlr(void) 188 { 189 uint32_t sctlr; 190 191 asm volatile ("mrc p15, 0, %[sctlr], c1, c0, 0" 192 : [sctlr] "=r" (sctlr) 193 ); 194 195 return sctlr; 196 } 197 198 static inline void write_sctlr(uint32_t sctlr) 199 { 200 asm volatile ("mcr p15, 0, %[sctlr], c1, c0, 0" 201 : : [sctlr] "r" (sctlr) 202 ); 203 } 204 205 static inline uint32_t read_cpacr(void) 206 { 207 uint32_t cpacr; 208 209 asm volatile ("mrc p15, 0, %[cpacr], c1, c0, 2" 210 : [cpacr] "=r" (cpacr) 211 ); 212 213 return cpacr; 214 } 215 216 static inline void write_cpacr(uint32_t cpacr) 217 { 218 asm volatile ("mcr p15, 0, %[cpacr], c1, c0, 2" 219 : : [cpacr] "r" (cpacr) 220 ); 221 } 222 223 static inline void write_ttbr0(uint32_t ttbr0) 224 { 225 asm volatile ("mcr p15, 0, %[ttbr0], c2, c0, 0" 226 : : [ttbr0] "r" (ttbr0) 227 ); 228 } 229 230 static inline void write_ttbr0_64bit(uint64_t ttbr0) 231 { 232 asm volatile ("mcrr p15, 0, %Q[ttbr0], %R[ttbr0], c2" 233 : : [ttbr0] "r" (ttbr0) 234 ); 235 } 236 237 static inline uint32_t read_ttbr0(void) 238 { 239 uint32_t ttbr0; 240 241 asm volatile ("mrc p15, 0, %[ttbr0], c2, c0, 0" 242 : [ttbr0] "=r" (ttbr0) 243 ); 244 245 return ttbr0; 246 } 247 248 static inline uint64_t read_ttbr0_64bit(void) 249 { 250 uint64_t ttbr0; 251 252 asm volatile ("mrrc p15, 0, %Q[ttbr0], %R[ttbr0], c2" 253 : [ttbr0] "=r" (ttbr0) 254 ); 255 256 return ttbr0; 257 } 258 259 static inline void write_ttbr1(uint32_t ttbr1) 260 { 261 asm volatile ("mcr p15, 0, %[ttbr1], c2, c0, 1" 262 : : [ttbr1] "r" (ttbr1) 263 ); 264 } 265 266 static inline void write_ttbr1_64bit(uint64_t ttbr1) 267 { 268 asm volatile ("mcrr p15, 1, %Q[ttbr1], %R[ttbr1], c2" 269 : : [ttbr1] "r" (ttbr1) 270 ); 271 } 272 273 static inline uint32_t read_ttbr1(void) 274 { 275 uint32_t ttbr1; 276 277 asm volatile ("mrc p15, 0, %[ttbr1], c2, c0, 1" 278 : [ttbr1] "=r" (ttbr1) 279 ); 280 281 return ttbr1; 282 } 283 284 285 static inline void write_ttbcr(uint32_t ttbcr) 286 { 287 asm volatile ("mcr p15, 0, %[ttbcr], c2, c0, 2" 288 : : [ttbcr] "r" (ttbcr) 289 ); 290 } 291 292 static inline uint32_t read_ttbcr(void) 293 { 294 uint32_t ttbcr; 295 296 asm volatile ("mrc p15, 0, %[ttbcr], c2, c0, 2" 297 : [ttbcr] "=r" (ttbcr) 298 ); 299 300 return ttbcr; 301 } 302 303 static inline void write_dacr(uint32_t dacr) 304 { 305 asm volatile ("mcr p15, 0, %[dacr], c3, c0, 0" 306 : : [dacr] "r" (dacr) 307 ); 308 } 309 310 static inline uint32_t read_ifar(void) 311 { 312 uint32_t ifar; 313 314 asm volatile ("mrc p15, 0, %[ifar], c6, c0, 2" 315 : [ifar] "=r" (ifar) 316 ); 317 318 return ifar; 319 } 320 321 static inline uint32_t read_dfar(void) 322 { 323 uint32_t dfar; 324 325 asm volatile ("mrc p15, 0, %[dfar], c6, c0, 0" 326 : [dfar] "=r" (dfar) 327 ); 328 329 return dfar; 330 } 331 332 static inline uint32_t read_dfsr(void) 333 { 334 uint32_t dfsr; 335 336 asm volatile ("mrc p15, 0, %[dfsr], c5, c0, 0" 337 : [dfsr] "=r" (dfsr) 338 ); 339 340 return dfsr; 341 } 342 343 static inline uint32_t read_ifsr(void) 344 { 345 uint32_t ifsr; 346 347 asm volatile ("mrc p15, 0, %[ifsr], c5, c0, 1" 348 : [ifsr] "=r" (ifsr) 349 ); 350 351 return ifsr; 352 } 353 354 static inline void write_scr(uint32_t scr) 355 { 356 asm volatile ("mcr p15, 0, %[scr], c1, c1, 0" 357 : : [scr] "r" (scr) 358 ); 359 } 360 361 static inline void isb(void) 362 { 363 asm volatile ("isb"); 364 } 365 366 static inline void dsb(void) 367 { 368 asm volatile ("dsb"); 369 } 370 371 static inline void dmb(void) 372 { 373 asm volatile ("dmb"); 374 } 375 376 static inline void sev(void) 377 { 378 asm volatile ("sev"); 379 } 380 381 static inline void wfe(void) 382 { 383 asm volatile ("wfe"); 384 } 385 386 /* Address translate privileged write translation (current state secure PL1) */ 387 static inline void write_ats1cpw(uint32_t va) 388 { 389 asm volatile ("mcr p15, 0, %0, c7, c8, 1" : : "r" (va)); 390 } 391 392 static inline void write_ats1cpr(uint32_t va) 393 { 394 asm volatile ("mcr p15, 0, %0, c7, c8, 0" : : "r" (va)); 395 } 396 397 static inline void write_ats1cpuw(uint32_t va) 398 { 399 asm volatile ("mcr p15, 0, %0, c7, c8, 3" : : "r" (va)); 400 } 401 402 static inline void write_ats1cpur(uint32_t va) 403 { 404 asm volatile ("mcr p15, 0, %0, c7, c8, 2" : : "r" (va)); 405 } 406 407 static inline uint32_t read_par32(void) 408 { 409 uint32_t val; 410 411 asm volatile ("mrc p15, 0, %0, c7, c4, 0" : "=r" (val)); 412 return val; 413 } 414 415 #ifdef CFG_WITH_LPAE 416 static inline uint64_t read_par64(void) 417 { 418 uint64_t val; 419 420 asm volatile ("mrrc p15, 0, %Q0, %R0, c7" : "=r" (val)); 421 return val; 422 } 423 #endif 424 425 static inline void write_mair0(uint32_t mair0) 426 { 427 asm volatile ("mcr p15, 0, %[mair0], c10, c2, 0" 428 : : [mair0] "r" (mair0) 429 ); 430 } 431 432 static inline void write_prrr(uint32_t prrr) 433 { 434 /* 435 * Same physical register as MAIR0. 436 * 437 * When an implementation includes the Large Physical Address 438 * Extension, and address translation is using the Long-descriptor 439 * translation table formats, MAIR0 replaces the PRRR 440 */ 441 write_mair0(prrr); 442 } 443 444 static inline void write_mair1(uint32_t mair1) 445 { 446 asm volatile ("mcr p15, 0, %[mair1], c10, c2, 1" 447 : : [mair1] "r" (mair1) 448 ); 449 } 450 451 static inline void write_nmrr(uint32_t nmrr) 452 { 453 /* 454 * Same physical register as MAIR1. 455 * 456 * When an implementation includes the Large Physical Address 457 * Extension, and address translation is using the Long-descriptor 458 * translation table formats, MAIR1 replaces the NMRR 459 */ 460 write_mair1(nmrr); 461 } 462 463 static inline uint32_t read_contextidr(void) 464 { 465 uint32_t contextidr; 466 467 asm volatile ("mrc p15, 0, %[contextidr], c13, c0, 1" 468 : [contextidr] "=r" (contextidr) 469 ); 470 471 return contextidr; 472 } 473 474 static inline void write_contextidr(uint32_t contextidr) 475 { 476 asm volatile ("mcr p15, 0, %[contextidr], c13, c0, 1" 477 : : [contextidr] "r" (contextidr) 478 ); 479 } 480 481 static inline uint32_t read_cpsr(void) 482 { 483 uint32_t cpsr; 484 485 asm volatile ("mrs %[cpsr], cpsr" 486 : [cpsr] "=r" (cpsr) 487 ); 488 return cpsr; 489 } 490 491 static inline void write_cpsr(uint32_t cpsr) 492 { 493 asm volatile ("msr cpsr_fsxc, %[cpsr]" 494 : : [cpsr] "r" (cpsr) 495 ); 496 } 497 498 static inline uint32_t read_spsr(void) 499 { 500 uint32_t spsr; 501 502 asm volatile ("mrs %[spsr], spsr" 503 : [spsr] "=r" (spsr) 504 ); 505 return spsr; 506 } 507 508 static inline uint32_t read_actlr(void) 509 { 510 uint32_t actlr; 511 512 asm volatile ("mrc p15, 0, %[actlr], c1, c0, 1" 513 : [actlr] "=r" (actlr) 514 ); 515 516 return actlr; 517 } 518 519 static inline void write_actlr(uint32_t actlr) 520 { 521 asm volatile ("mcr p15, 0, %[actlr], c1, c0, 1" 522 : : [actlr] "r" (actlr) 523 ); 524 } 525 526 static inline uint32_t read_nsacr(void) 527 { 528 uint32_t nsacr; 529 530 asm volatile ("mrc p15, 0, %[nsacr], c1, c1, 2" 531 : [nsacr] "=r" (nsacr) 532 ); 533 534 return nsacr; 535 } 536 537 static inline void write_nsacr(uint32_t nsacr) 538 { 539 asm volatile ("mcr p15, 0, %[nsacr], c1, c1, 2" 540 : : [nsacr] "r" (nsacr) 541 ); 542 } 543 544 static inline uint64_t read_cntpct(void) 545 { 546 uint64_t val; 547 548 asm volatile("mrrc p15, 0, %Q0, %R0, c14" : "=r" (val)); 549 return val; 550 } 551 552 static inline uint32_t read_cntfrq(void) 553 { 554 uint32_t frq; 555 556 asm volatile("mrc p15, 0, %0, c14, c0, 0" : "=r" (frq)); 557 return frq; 558 } 559 560 static inline void write_cntfrq(uint32_t frq) 561 { 562 asm volatile("mcr p15, 0, %0, c14, c0, 0" : : "r" (frq)); 563 } 564 565 static __always_inline uint32_t read_pc(void) 566 { 567 uint32_t val; 568 569 asm volatile ("adr %0, ." : "=r" (val)); 570 return val; 571 } 572 573 static __always_inline uint32_t read_sp(void) 574 { 575 uint32_t val; 576 577 asm volatile ("mov %0, sp" : "=r" (val)); 578 return val; 579 } 580 581 static __always_inline uint32_t read_lr(void) 582 { 583 uint32_t val; 584 585 asm volatile ("mov %0, lr" : "=r" (val)); 586 return val; 587 } 588 589 static __always_inline uint32_t read_fp(void) 590 { 591 uint32_t val; 592 593 asm volatile ("mov %0, fp" : "=r" (val)); 594 return val; 595 } 596 597 static __always_inline uint32_t read_r7(void) 598 { 599 uint32_t val; 600 601 asm volatile ("mov %0, r7" : "=r" (val)); 602 return val; 603 } 604 605 /* Register read/write functions for GICC registers by using system interface */ 606 static inline uint32_t read_icc_ctlr(void) 607 { 608 uint32_t v; 609 610 asm volatile ("mrc p15,0,%0,c12,c12,4" : "=r" (v)); 611 return v; 612 } 613 614 static inline void write_icc_ctlr(uint32_t v) 615 { 616 asm volatile ("mcr p15,0,%0,c12,c12,4" : : "r" (v)); 617 } 618 619 static inline void write_icc_pmr(uint32_t v) 620 { 621 asm volatile ("mcr p15,0,%0,c4,c6,0" : : "r" (v)); 622 } 623 624 static inline uint32_t read_icc_iar0(void) 625 { 626 uint32_t v; 627 628 asm volatile ("mrc p15,0,%0,c12,c8,0" : "=r" (v)); 629 return v; 630 } 631 632 static inline void write_icc_eoir0(uint32_t v) 633 { 634 asm volatile ("mcr p15,0,%0,c12,c8,1" : : "r" (v)); 635 } 636 #endif /*ASM*/ 637 638 #endif /*ARM32_H*/ 639