1 /* SPDX-License-Identifier: BSD-2-Clause */ 2 /* 3 * Copyright (c) 2016, Linaro Limited 4 * Copyright (c) 2014, STMicroelectronics International N.V. 5 * All rights reserved. 6 * 7 * Redistribution and use in source and binary forms, with or without 8 * modification, are permitted provided that the following conditions are met: 9 * 10 * 1. Redistributions of source code must retain the above copyright notice, 11 * this list of conditions and the following disclaimer. 12 * 13 * 2. Redistributions in binary form must reproduce the above copyright notice, 14 * this list of conditions and the following disclaimer in the documentation 15 * and/or other materials provided with the distribution. 16 * 17 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 18 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 19 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 20 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE 21 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 22 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 23 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 24 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 25 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 26 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 27 * POSSIBILITY OF SUCH DAMAGE. 28 */ 29 30 #ifndef ARM32_H 31 #define ARM32_H 32 33 #include <sys/cdefs.h> 34 #include <stdint.h> 35 #include <util.h> 36 37 #define CORTEX_A7_PART_NUM 0xC07 38 #define CORTEX_A9_PART_NUM 0xC09 39 40 #define MIDR_PRIMARY_PART_NUM_SHIFT 4 41 #define MIDR_PRIMARY_PART_NUM_WIDTH 12 42 43 #define CPSR_MODE_MASK ARM32_CPSR_MODE_MASK 44 #define CPSR_MODE_USR ARM32_CPSR_MODE_USR 45 #define CPSR_MODE_FIQ ARM32_CPSR_MODE_FIQ 46 #define CPSR_MODE_IRQ ARM32_CPSR_MODE_IRQ 47 #define CPSR_MODE_SVC ARM32_CPSR_MODE_SVC 48 #define CPSR_MODE_MON ARM32_CPSR_MODE_MON 49 #define CPSR_MODE_ABT ARM32_CPSR_MODE_ABT 50 #define CPSR_MODE_UND ARM32_CPSR_MODE_UND 51 #define CPSR_MODE_SYS ARM32_CPSR_MODE_SYS 52 53 #define CPSR_T ARM32_CPSR_T 54 #define CPSR_F_SHIFT ARM32_CPSR_F_SHIFT 55 #define CPSR_F ARM32_CPSR_F 56 #define CPSR_I ARM32_CPSR_I 57 #define CPSR_A ARM32_CPSR_A 58 #define CPSR_FIA ARM32_CPSR_FIA 59 #define CPSR_IT_MASK ARM32_CPSR_IT_MASK 60 #define CPSR_IT_MASK1 ARM32_CPSR_IT_MASK1 61 #define CPSR_IT_MASK2 ARM32_CPSR_IT_MASK2 62 63 #define SCR_NS BIT32(0) 64 #define SCR_IRQ BIT32(1) 65 #define SCR_FIQ BIT32(2) 66 #define SCR_EA BIT32(3) 67 #define SCR_FW BIT32(4) 68 #define SCR_AW BIT32(5) 69 #define SCR_NET BIT32(6) 70 #define SCR_SCD BIT32(7) 71 #define SCR_HCE BIT32(8) 72 #define SCR_SIF BIT32(9) 73 74 #define SCTLR_M BIT32(0) 75 #define SCTLR_A BIT32(1) 76 #define SCTLR_C BIT32(2) 77 #define SCTLR_CP15BEN BIT32(5) 78 #define SCTLR_SW BIT32(10) 79 #define SCTLR_Z BIT32(11) 80 #define SCTLR_I BIT32(12) 81 #define SCTLR_V BIT32(13) 82 #define SCTLR_RR BIT32(14) 83 #define SCTLR_HA BIT32(17) 84 #define SCTLR_WXN BIT32(19) 85 #define SCTLR_UWXN BIT32(20) 86 #define SCTLR_FI BIT32(21) 87 #define SCTLR_VE BIT32(24) 88 #define SCTLR_EE BIT32(25) 89 #define SCTLR_NMFI BIT32(26) 90 #define SCTLR_TRE BIT32(28) 91 #define SCTLR_AFE BIT32(29) 92 #define SCTLR_TE BIT32(30) 93 94 #define ACTLR_SMP BIT32(6) 95 #define ACTLR_DODMBS BIT32(10) 96 #define ACTLR_L2RADIS BIT32(11) 97 #define ACTLR_L1RADIS BIT32(12) 98 #define ACTLR_L1PCTL BIT32(13) 99 #define ACTLR_DDVM BIT32(15) 100 #define ACTLR_DDI BIT32(28) 101 102 #define NSACR_CP10 BIT32(10) 103 #define NSACR_CP11 BIT32(11) 104 #define NSACR_NSD32DIS BIT32(14) 105 #define NSACR_NSASEDIS BIT32(15) 106 #define NSACR_NS_L2ERR BIT32(17) 107 #define NSACR_NS_SMP BIT32(18) 108 109 #define CPACR_ASEDIS BIT32(31) 110 #define CPACR_D32DIS BIT32(30) 111 #define CPACR_CP(co_proc, access) SHIFT_U32((access), ((co_proc) * 2)) 112 #define CPACR_CP_ACCESS_DENIED 0x0 113 #define CPACR_CP_ACCESS_PL1_ONLY 0x1 114 #define CPACR_CP_ACCESS_FULL 0x3 115 116 117 #define DACR_DOMAIN(num, perm) SHIFT_U32((perm), ((num) * 2)) 118 #define DACR_DOMAIN_PERM_NO_ACCESS 0x0 119 #define DACR_DOMAIN_PERM_CLIENT 0x1 120 #define DACR_DOMAIN_PERM_MANAGER 0x3 121 122 #define PAR_F BIT32(0) 123 #define PAR_SS BIT32(1) 124 #define PAR_LPAE BIT32(11) 125 #define PAR_PA_SHIFT 12 126 #define PAR32_PA_MASK (BIT32(20) - 1) 127 #define PAR64_PA_MASK (BIT64(28) - 1) 128 129 /* 130 * TTBCR has different register layout if LPAE is enabled or not. 131 * TTBCR.EAE == 0 => LPAE is not enabled 132 * TTBCR.EAE == 1 => LPAE is enabled 133 */ 134 #define TTBCR_EAE BIT32(31) 135 136 /* When TTBCR.EAE == 0 */ 137 #define TTBCR_PD0 BIT32(4) 138 #define TTBCR_PD1 BIT32(5) 139 140 /* When TTBCR.EAE == 1 */ 141 #define TTBCR_T0SZ_SHIFT 0 142 #define TTBCR_EPD0 BIT32(7) 143 #define TTBCR_IRGN0_SHIFT 8 144 #define TTBCR_ORGN0_SHIFT 10 145 #define TTBCR_SH0_SHIFT 12 146 #define TTBCR_T1SZ_SHIFT 16 147 #define TTBCR_A1 BIT32(22) 148 #define TTBCR_EPD1 BIT32(23) 149 #define TTBCR_IRGN1_SHIFT 24 150 #define TTBCR_ORGN1_SHIFT 26 151 #define TTBCR_SH1_SHIFT 28 152 153 /* Normal memory, Inner/Outer Non-cacheable */ 154 #define TTBCR_XRGNX_NC 0x0 155 /* Normal memory, Inner/Outer Write-Back Write-Allocate Cacheable */ 156 #define TTBCR_XRGNX_WB 0x1 157 /* Normal memory, Inner/Outer Write-Through Cacheable */ 158 #define TTBCR_XRGNX_WT 0x2 159 /* Normal memory, Inner/Outer Write-Back no Write-Allocate Cacheable */ 160 #define TTBCR_XRGNX_WBWA 0x3 161 162 /* Non-shareable */ 163 #define TTBCR_SHX_NSH 0x0 164 /* Outer Shareable */ 165 #define TTBCR_SHX_OSH 0x2 166 /* Inner Shareable */ 167 #define TTBCR_SHX_ISH 0x3 168 169 #define TTBR_ASID_MASK 0xff 170 #define TTBR_ASID_SHIFT 48 171 172 173 #define FSR_LPAE BIT32(9) 174 #define FSR_WNR BIT32(11) 175 176 /* Valid if FSR.LPAE is 1 */ 177 #define FSR_STATUS_MASK (BIT32(6) - 1) 178 179 /* Valid if FSR.LPAE is 0 */ 180 #define FSR_FS_MASK (BIT32(10) | (BIT32(4) - 1)) 181 182 #ifndef ASM 183 static inline uint32_t read_mpidr(void) 184 { 185 uint32_t mpidr; 186 187 asm volatile ("mrc p15, 0, %[mpidr], c0, c0, 5" 188 : [mpidr] "=r" (mpidr) 189 ); 190 191 return mpidr; 192 } 193 194 static inline uint32_t read_sctlr(void) 195 { 196 uint32_t sctlr; 197 198 asm volatile ("mrc p15, 0, %[sctlr], c1, c0, 0" 199 : [sctlr] "=r" (sctlr) 200 ); 201 202 return sctlr; 203 } 204 205 static inline void write_sctlr(uint32_t sctlr) 206 { 207 asm volatile ("mcr p15, 0, %[sctlr], c1, c0, 0" 208 : : [sctlr] "r" (sctlr) 209 ); 210 } 211 212 static inline uint32_t read_cpacr(void) 213 { 214 uint32_t cpacr; 215 216 asm volatile ("mrc p15, 0, %[cpacr], c1, c0, 2" 217 : [cpacr] "=r" (cpacr) 218 ); 219 220 return cpacr; 221 } 222 223 static inline void write_cpacr(uint32_t cpacr) 224 { 225 asm volatile ("mcr p15, 0, %[cpacr], c1, c0, 2" 226 : : [cpacr] "r" (cpacr) 227 ); 228 } 229 230 static inline void write_ttbr0(uint32_t ttbr0) 231 { 232 asm volatile ("mcr p15, 0, %[ttbr0], c2, c0, 0" 233 : : [ttbr0] "r" (ttbr0) 234 ); 235 } 236 237 static inline void write_ttbr0_64bit(uint64_t ttbr0) 238 { 239 asm volatile ("mcrr p15, 0, %Q[ttbr0], %R[ttbr0], c2" 240 : : [ttbr0] "r" (ttbr0) 241 ); 242 } 243 244 static inline uint32_t read_ttbr0(void) 245 { 246 uint32_t ttbr0; 247 248 asm volatile ("mrc p15, 0, %[ttbr0], c2, c0, 0" 249 : [ttbr0] "=r" (ttbr0) 250 ); 251 252 return ttbr0; 253 } 254 255 static inline uint64_t read_ttbr0_64bit(void) 256 { 257 uint64_t ttbr0; 258 259 asm volatile ("mrrc p15, 0, %Q[ttbr0], %R[ttbr0], c2" 260 : [ttbr0] "=r" (ttbr0) 261 ); 262 263 return ttbr0; 264 } 265 266 static inline void write_ttbr1(uint32_t ttbr1) 267 { 268 asm volatile ("mcr p15, 0, %[ttbr1], c2, c0, 1" 269 : : [ttbr1] "r" (ttbr1) 270 ); 271 } 272 273 static inline void write_ttbr1_64bit(uint64_t ttbr1) 274 { 275 asm volatile ("mcrr p15, 1, %Q[ttbr1], %R[ttbr1], c2" 276 : : [ttbr1] "r" (ttbr1) 277 ); 278 } 279 280 static inline uint32_t read_ttbr1(void) 281 { 282 uint32_t ttbr1; 283 284 asm volatile ("mrc p15, 0, %[ttbr1], c2, c0, 1" 285 : [ttbr1] "=r" (ttbr1) 286 ); 287 288 return ttbr1; 289 } 290 291 292 static inline void write_ttbcr(uint32_t ttbcr) 293 { 294 asm volatile ("mcr p15, 0, %[ttbcr], c2, c0, 2" 295 : : [ttbcr] "r" (ttbcr) 296 ); 297 } 298 299 static inline uint32_t read_ttbcr(void) 300 { 301 uint32_t ttbcr; 302 303 asm volatile ("mrc p15, 0, %[ttbcr], c2, c0, 2" 304 : [ttbcr] "=r" (ttbcr) 305 ); 306 307 return ttbcr; 308 } 309 310 static inline void write_dacr(uint32_t dacr) 311 { 312 asm volatile ("mcr p15, 0, %[dacr], c3, c0, 0" 313 : : [dacr] "r" (dacr) 314 ); 315 } 316 317 static inline uint32_t read_ifar(void) 318 { 319 uint32_t ifar; 320 321 asm volatile ("mrc p15, 0, %[ifar], c6, c0, 2" 322 : [ifar] "=r" (ifar) 323 ); 324 325 return ifar; 326 } 327 328 static inline uint32_t read_dfar(void) 329 { 330 uint32_t dfar; 331 332 asm volatile ("mrc p15, 0, %[dfar], c6, c0, 0" 333 : [dfar] "=r" (dfar) 334 ); 335 336 return dfar; 337 } 338 339 static inline uint32_t read_dfsr(void) 340 { 341 uint32_t dfsr; 342 343 asm volatile ("mrc p15, 0, %[dfsr], c5, c0, 0" 344 : [dfsr] "=r" (dfsr) 345 ); 346 347 return dfsr; 348 } 349 350 static inline uint32_t read_ifsr(void) 351 { 352 uint32_t ifsr; 353 354 asm volatile ("mrc p15, 0, %[ifsr], c5, c0, 1" 355 : [ifsr] "=r" (ifsr) 356 ); 357 358 return ifsr; 359 } 360 361 static inline void write_scr(uint32_t scr) 362 { 363 asm volatile ("mcr p15, 0, %[scr], c1, c1, 0" 364 : : [scr] "r" (scr) 365 ); 366 } 367 368 static inline void isb(void) 369 { 370 asm volatile ("isb"); 371 } 372 373 static inline void dsb(void) 374 { 375 asm volatile ("dsb"); 376 } 377 378 static inline void dsb_ish(void) 379 { 380 asm volatile ("dsb ish"); 381 } 382 383 static inline void dsb_ishst(void) 384 { 385 asm volatile ("dsb ishst"); 386 } 387 388 static inline void dmb(void) 389 { 390 asm volatile ("dmb"); 391 } 392 393 static inline void sev(void) 394 { 395 asm volatile ("sev"); 396 } 397 398 static inline void wfe(void) 399 { 400 asm volatile ("wfe"); 401 } 402 403 /* Address translate privileged write translation (current state secure PL1) */ 404 static inline void write_ats1cpw(uint32_t va) 405 { 406 asm volatile ("mcr p15, 0, %0, c7, c8, 1" : : "r" (va)); 407 } 408 409 static inline void write_ats1cpr(uint32_t va) 410 { 411 asm volatile ("mcr p15, 0, %0, c7, c8, 0" : : "r" (va)); 412 } 413 414 static inline void write_ats1cpuw(uint32_t va) 415 { 416 asm volatile ("mcr p15, 0, %0, c7, c8, 3" : : "r" (va)); 417 } 418 419 static inline void write_ats1cpur(uint32_t va) 420 { 421 asm volatile ("mcr p15, 0, %0, c7, c8, 2" : : "r" (va)); 422 } 423 424 static inline uint32_t read_par32(void) 425 { 426 uint32_t val; 427 428 asm volatile ("mrc p15, 0, %0, c7, c4, 0" : "=r" (val)); 429 return val; 430 } 431 432 #ifdef CFG_WITH_LPAE 433 static inline uint64_t read_par64(void) 434 { 435 uint64_t val; 436 437 asm volatile ("mrrc p15, 0, %Q0, %R0, c7" : "=r" (val)); 438 return val; 439 } 440 #endif 441 442 static inline void write_tlbimvaais(uint32_t mva) 443 { 444 asm volatile ("mcr p15, 0, %[mva], c8, c3, 3" 445 : : [mva] "r" (mva) 446 ); 447 } 448 449 static inline void write_mair0(uint32_t mair0) 450 { 451 asm volatile ("mcr p15, 0, %[mair0], c10, c2, 0" 452 : : [mair0] "r" (mair0) 453 ); 454 } 455 456 static inline void write_prrr(uint32_t prrr) 457 { 458 /* 459 * Same physical register as MAIR0. 460 * 461 * When an implementation includes the Large Physical Address 462 * Extension, and address translation is using the Long-descriptor 463 * translation table formats, MAIR0 replaces the PRRR 464 */ 465 write_mair0(prrr); 466 } 467 468 static inline void write_mair1(uint32_t mair1) 469 { 470 asm volatile ("mcr p15, 0, %[mair1], c10, c2, 1" 471 : : [mair1] "r" (mair1) 472 ); 473 } 474 475 static inline void write_nmrr(uint32_t nmrr) 476 { 477 /* 478 * Same physical register as MAIR1. 479 * 480 * When an implementation includes the Large Physical Address 481 * Extension, and address translation is using the Long-descriptor 482 * translation table formats, MAIR1 replaces the NMRR 483 */ 484 write_mair1(nmrr); 485 } 486 487 static inline uint32_t read_contextidr(void) 488 { 489 uint32_t contextidr; 490 491 asm volatile ("mrc p15, 0, %[contextidr], c13, c0, 1" 492 : [contextidr] "=r" (contextidr) 493 ); 494 495 return contextidr; 496 } 497 498 static inline void write_contextidr(uint32_t contextidr) 499 { 500 asm volatile ("mcr p15, 0, %[contextidr], c13, c0, 1" 501 : : [contextidr] "r" (contextidr) 502 ); 503 } 504 505 static inline uint32_t read_cpsr(void) 506 { 507 uint32_t cpsr; 508 509 asm volatile ("mrs %[cpsr], cpsr" 510 : [cpsr] "=r" (cpsr) 511 ); 512 return cpsr; 513 } 514 515 static inline void write_cpsr(uint32_t cpsr) 516 { 517 asm volatile ("msr cpsr_fsxc, %[cpsr]" 518 : : [cpsr] "r" (cpsr) 519 ); 520 } 521 522 static inline uint32_t read_spsr(void) 523 { 524 uint32_t spsr; 525 526 asm volatile ("mrs %[spsr], spsr" 527 : [spsr] "=r" (spsr) 528 ); 529 return spsr; 530 } 531 532 static inline uint32_t read_actlr(void) 533 { 534 uint32_t actlr; 535 536 asm volatile ("mrc p15, 0, %[actlr], c1, c0, 1" 537 : [actlr] "=r" (actlr) 538 ); 539 540 return actlr; 541 } 542 543 static inline void write_actlr(uint32_t actlr) 544 { 545 asm volatile ("mcr p15, 0, %[actlr], c1, c0, 1" 546 : : [actlr] "r" (actlr) 547 ); 548 } 549 550 static inline uint32_t read_nsacr(void) 551 { 552 uint32_t nsacr; 553 554 asm volatile ("mrc p15, 0, %[nsacr], c1, c1, 2" 555 : [nsacr] "=r" (nsacr) 556 ); 557 558 return nsacr; 559 } 560 561 static inline void write_nsacr(uint32_t nsacr) 562 { 563 asm volatile ("mcr p15, 0, %[nsacr], c1, c1, 2" 564 : : [nsacr] "r" (nsacr) 565 ); 566 } 567 568 static inline uint64_t read_cntpct(void) 569 { 570 uint64_t val; 571 572 asm volatile("mrrc p15, 0, %Q0, %R0, c14" : "=r" (val)); 573 return val; 574 } 575 576 static inline uint32_t read_cntfrq(void) 577 { 578 uint32_t frq; 579 580 asm volatile("mrc p15, 0, %0, c14, c0, 0" : "=r" (frq)); 581 return frq; 582 } 583 584 static inline void write_cntfrq(uint32_t frq) 585 { 586 asm volatile("mcr p15, 0, %0, c14, c0, 0" : : "r" (frq)); 587 } 588 589 static inline uint32_t read_cntkctl(void) 590 { 591 uint32_t cntkctl; 592 593 asm volatile("mrc p15, 0, %0, c14, c1, 0" : "=r" (cntkctl)); 594 return cntkctl; 595 } 596 597 static inline void write_cntkctl(uint32_t cntkctl) 598 { 599 asm volatile("mcr p15, 0, %0, c14, c1, 0" : : "r" (cntkctl)); 600 } 601 602 static __always_inline uint32_t read_pc(void) 603 { 604 uint32_t val; 605 606 asm volatile ("adr %0, ." : "=r" (val)); 607 return val; 608 } 609 610 static __always_inline uint32_t read_sp(void) 611 { 612 uint32_t val; 613 614 asm volatile ("mov %0, sp" : "=r" (val)); 615 return val; 616 } 617 618 static __always_inline uint32_t read_lr(void) 619 { 620 uint32_t val; 621 622 asm volatile ("mov %0, lr" : "=r" (val)); 623 return val; 624 } 625 626 static __always_inline uint32_t read_fp(void) 627 { 628 uint32_t val; 629 630 asm volatile ("mov %0, fp" : "=r" (val)); 631 return val; 632 } 633 634 static __always_inline uint32_t read_r7(void) 635 { 636 uint32_t val; 637 638 asm volatile ("mov %0, r7" : "=r" (val)); 639 return val; 640 } 641 642 /* Register read/write functions for GICC registers by using system interface */ 643 static inline uint32_t read_icc_ctlr(void) 644 { 645 uint32_t v; 646 647 asm volatile ("mrc p15,0,%0,c12,c12,4" : "=r" (v)); 648 return v; 649 } 650 651 static inline void write_icc_ctlr(uint32_t v) 652 { 653 asm volatile ("mcr p15,0,%0,c12,c12,4" : : "r" (v)); 654 } 655 656 static inline void write_icc_pmr(uint32_t v) 657 { 658 asm volatile ("mcr p15,0,%0,c4,c6,0" : : "r" (v)); 659 } 660 661 static inline uint32_t read_icc_iar0(void) 662 { 663 uint32_t v; 664 665 asm volatile ("mrc p15,0,%0,c12,c8,0" : "=r" (v)); 666 return v; 667 } 668 669 static inline void write_icc_eoir0(uint32_t v) 670 { 671 asm volatile ("mcr p15,0,%0,c12,c8,1" : : "r" (v)); 672 } 673 674 static inline uint64_t read_pmu_ccnt(void) 675 { 676 uint32_t val; 677 678 asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r"(val)); 679 return val; 680 } 681 682 static inline void wfi(void) 683 { 684 asm volatile("wfi"); 685 } 686 #endif /*ASM*/ 687 688 #endif /*ARM32_H*/ 689