1 /* 2 * Copyright (c) 2014, STMicroelectronics International N.V. 3 * All rights reserved. 4 * 5 * Redistribution and use in source and binary forms, with or without 6 * modification, are permitted provided that the following conditions are met: 7 * 8 * 1. Redistributions of source code must retain the above copyright notice, 9 * this list of conditions and the following disclaimer. 10 * 11 * 2. Redistributions in binary form must reproduce the above copyright notice, 12 * this list of conditions and the following disclaimer in the documentation 13 * and/or other materials provided with the distribution. 14 * 15 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 16 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 17 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 18 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE 19 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 20 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 21 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 22 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 23 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 24 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 25 * POSSIBILITY OF SUCH DAMAGE. 26 */ 27 28 #ifndef ARM32_H 29 #define ARM32_H 30 31 #ifndef ASM 32 #include <stdint.h> 33 #endif 34 35 #define CPSR_MODE_MASK ARM32_CPSR_MODE_MASK 36 #define CPSR_MODE_USR ARM32_CPSR_MODE_USR 37 #define CPSR_MODE_FIQ ARM32_CPSR_MODE_FIQ 38 #define CPSR_MODE_IRQ ARM32_CPSR_MODE_IRQ 39 #define CPSR_MODE_SVC ARM32_CPSR_MODE_SVC 40 #define CPSR_MODE_MON ARM32_CPSR_MODE_MON 41 #define CPSR_MODE_ABT ARM32_CPSR_MODE_ABT 42 #define CPSR_MODE_UND ARM32_CPSR_MODE_UND 43 #define CPSR_MODE_SYS ARM32_CPSR_MODE_SYS 44 45 #define CPSR_T ARM32_CPSR_T 46 #define CPSR_F_SHIFT ARM32_CPSR_F_SHIFT 47 #define CPSR_F ARM32_CPSR_F 48 #define CPSR_I ARM32_CPSR_I 49 #define CPSR_A ARM32_CPSR_A 50 #define CPSR_FIA ARM32_CPSR_FIA 51 #define CPSR_IT_MASK ARM32_CPSR_IT_MASK 52 #define CPSR_IT_MASK1 ARM32_CPSR_IT_MASK1 53 #define CPSR_IT_MASK2 ARM32_CPSR_IT_MASK2 54 55 #define SCR_NS (1 << 0) 56 #define SCR_IRQ (1 << 1) 57 #define SCR_FIQ (1 << 2) 58 #define SCR_EA (1 << 3) 59 #define SCR_FW (1 << 4) 60 #define SCR_AW (1 << 5) 61 #define SCR_NET (1 << 6) 62 #define SCR_SCD (1 << 7) 63 #define SCR_HCE (1 << 8) 64 #define SCR_SIF (1 << 9) 65 66 #define SCTLR_M (1 << 0) 67 #define SCTLR_A (1 << 1) 68 #define SCTLR_C (1 << 2) 69 #define SCTLR_CP15BEN (1 << 5) 70 #define SCTLR_SW (1 << 10) 71 #define SCTLR_Z (1 << 11) 72 #define SCTLR_I (1 << 12) 73 #define SCTLR_V (1 << 13) 74 #define SCTLR_RR (1 << 14) 75 #define SCTLR_HA (1 << 17) 76 #define SCTLR_WXN (1 << 19) 77 #define SCTLR_UWXN (1 << 20) 78 #define SCTLR_FI (1 << 21) 79 #define SCTLR_VE (1 << 24) 80 #define SCTLR_EE (1 << 25) 81 #define SCTLR_NMFI (1 << 26) 82 #define SCTLR_TRE (1 << 28) 83 #define SCTLR_AFE (1 << 29) 84 #define SCTLR_TE (1 << 30) 85 86 #define ACTLR_SMP (1 << 6) 87 #define ACTLR_DODMBS (1 << 10) 88 #define ACTLR_L2RADIS (1 << 11) 89 #define ACTLR_L1RADIS (1 << 12) 90 #define ACTLR_L1PCTL (1 << 13) 91 #define ACTLR_DDVM (1 << 15) 92 #define ACTLR_DDI (1 << 28) 93 94 #define NSACR_CP10 (1 << 10) 95 #define NSACR_CP11 (1 << 11) 96 #define NSACR_NSD32DIS (1 << 14) 97 #define NSACR_NSASEDIS (1 << 15) 98 #define NSACR_NS_L2ERR (1 << 17) 99 #define NSACR_NS_SMP (1 << 18) 100 101 #define CPACR_CP(co_proc, access) ((access) << ((co_proc) * 2)) 102 #define CPACR_CP_ACCESS_DENIED 0x0 103 #define CPACR_CP_ACCESS_PL1_ONLY 0x1 104 #define CPACR_CP_ACCESS_FULL 0x2 105 106 107 #define DACR_DOMAIN(num, perm) ((perm) << ((num) * 2)) 108 #define DACR_DOMAIN_PERM_NO_ACCESS 0x0 109 #define DACR_DOMAIN_PERM_CLIENT 0x1 110 #define DACR_DOMAIN_PERM_MANAGER 0x3 111 112 /* 113 * TTBCR has different register layout if LPAE is enabled or not. 114 * TTBCR.EAE == 0 => LPAE is not enabled 115 * TTBCR.EAE == 1 => LPAE is enabled 116 */ 117 #define TTBCR_EAE (1 << 31) 118 119 /* When TTBCR.EAE == 0 */ 120 #define TTBCR_PD0 (1 << 4) 121 #define TTBCR_PD1 (1 << 5) 122 123 /* When TTBCR.EAE == 1 */ 124 #define TTBCR_T0SZ_SHIFT 0 125 #define TTBCR_EPD0 (1 << 7) 126 #define TTBCR_IRGN0_SHIFT 8 127 #define TTBCR_ORGN0_SHIFT 10 128 #define TTBCR_SH0_SHIFT 12 129 #define TTBCR_T1SZ_SHIFT 16 130 #define TTBCR_A1 (1 << 22) 131 #define TTBCR_EPD1 (1 << 23) 132 #define TTBCR_IRGN1_SHIFT 24 133 #define TTBCR_ORGN1_SHIFT 26 134 #define TTBCR_SH1_SHIFT 28 135 136 /* Normal memory, Inner/Outer Non-cacheable */ 137 #define TTBCR_XRGNX_NC 0x0 138 /* Normal memory, Inner/Outer Write-Back Write-Allocate Cacheable */ 139 #define TTBCR_XRGNX_WB 0x1 140 /* Normal memory, Inner/Outer Write-Through Cacheable */ 141 #define TTBCR_XRGNX_WT 0x2 142 /* Normal memory, Inner/Outer Write-Back no Write-Allocate Cacheable */ 143 #define TTBCR_XRGNX_WBWA 0x3 144 145 /* Non-shareable */ 146 #define TTBCR_SHX_NSH 0x0 147 /* Outer Shareable */ 148 #define TTBCR_SHX_OSH 0x2 149 /* Inner Shareable */ 150 #define TTBCR_SHX_ISH 0x3 151 152 #define TTBR_ASID_MASK 0xff 153 #define TTBR_ASID_SHIFT 48 154 155 156 #define FSR_LPAE (1 << 9) 157 #define FSR_WNR (1 << 11) 158 159 /* Valid if FSR.LPAE is 1 */ 160 #define FSR_STATUS_MASK ((1 << 6) - 1) 161 162 /* Valid if FSR.LPAE is 0 */ 163 #define FSR_FS_MASK ((1 << 10) | ((1 << 4) - 1)) 164 165 #ifndef ASM 166 static inline uint32_t read_mpidr(void) 167 { 168 uint32_t mpidr; 169 170 asm volatile ("mrc p15, 0, %[mpidr], c0, c0, 5" 171 : [mpidr] "=r" (mpidr) 172 ); 173 174 return mpidr; 175 } 176 177 static inline uint32_t read_sctlr(void) 178 { 179 uint32_t sctlr; 180 181 asm volatile ("mrc p15, 0, %[sctlr], c1, c0, 0" 182 : [sctlr] "=r" (sctlr) 183 ); 184 185 return sctlr; 186 } 187 188 static inline void write_sctlr(uint32_t sctlr) 189 { 190 asm volatile ("mcr p15, 0, %[sctlr], c1, c0, 0" 191 : : [sctlr] "r" (sctlr) 192 ); 193 } 194 195 static inline uint32_t read_cpacr(void) 196 { 197 uint32_t cpacr; 198 199 asm volatile ("mrc p15, 0, %[cpacr], c1, c0, 2" 200 : [cpacr] "=r" (cpacr) 201 ); 202 203 return cpacr; 204 } 205 206 static inline void write_cpacr(uint32_t cpacr) 207 { 208 asm volatile ("mcr p15, 0, %[cpacr], c1, c0, 2" 209 : : [cpacr] "r" (cpacr) 210 ); 211 } 212 213 static inline void write_ttbr0(uint32_t ttbr0) 214 { 215 asm volatile ("mcr p15, 0, %[ttbr0], c2, c0, 0" 216 : : [ttbr0] "r" (ttbr0) 217 ); 218 } 219 220 static inline void write_ttbr0_64bit(uint64_t ttbr0) 221 { 222 asm volatile ("mcrr p15, 0, %Q[ttbr0], %R[ttbr0], c2" 223 : : [ttbr0] "r" (ttbr0) 224 ); 225 } 226 227 static inline uint32_t read_ttbr0(void) 228 { 229 uint32_t ttbr0; 230 231 asm volatile ("mrc p15, 0, %[ttbr0], c2, c0, 0" 232 : [ttbr0] "=r" (ttbr0) 233 ); 234 235 return ttbr0; 236 } 237 238 static inline uint64_t read_ttbr0_64bit(void) 239 { 240 uint64_t ttbr0; 241 242 asm volatile ("mrrc p15, 0, %Q[ttbr0], %R[ttbr0], c2" 243 : [ttbr0] "=r" (ttbr0) 244 ); 245 246 return ttbr0; 247 } 248 249 static inline void write_ttbr1(uint32_t ttbr1) 250 { 251 asm volatile ("mcr p15, 0, %[ttbr1], c2, c0, 1" 252 : : [ttbr1] "r" (ttbr1) 253 ); 254 } 255 256 static inline void write_ttbr1_64bit(uint64_t ttbr1) 257 { 258 asm volatile ("mcrr p15, 1, %Q[ttbr1], %R[ttbr1], c2" 259 : : [ttbr1] "r" (ttbr1) 260 ); 261 } 262 263 static inline uint32_t read_ttbr1(void) 264 { 265 uint32_t ttbr1; 266 267 asm volatile ("mrc p15, 0, %[ttbr1], c2, c0, 1" 268 : [ttbr1] "=r" (ttbr1) 269 ); 270 271 return ttbr1; 272 } 273 274 275 static inline void write_ttbcr(uint32_t ttbcr) 276 { 277 asm volatile ("mcr p15, 0, %[ttbcr], c2, c0, 2" 278 : : [ttbcr] "r" (ttbcr) 279 ); 280 } 281 282 static inline uint32_t read_ttbcr(void) 283 { 284 uint32_t ttbcr; 285 286 asm volatile ("mrc p15, 0, %[ttbcr], c2, c0, 2" 287 : [ttbcr] "=r" (ttbcr) 288 ); 289 290 return ttbcr; 291 } 292 293 static inline void write_dacr(uint32_t dacr) 294 { 295 asm volatile ("mcr p15, 0, %[dacr], c3, c0, 0" 296 : : [dacr] "r" (dacr) 297 ); 298 } 299 300 static inline uint32_t read_ifar(void) 301 { 302 uint32_t ifar; 303 304 asm volatile ("mrc p15, 0, %[ifar], c6, c0, 2" 305 : [ifar] "=r" (ifar) 306 ); 307 308 return ifar; 309 } 310 311 static inline uint32_t read_dfar(void) 312 { 313 uint32_t dfar; 314 315 asm volatile ("mrc p15, 0, %[dfar], c6, c0, 0" 316 : [dfar] "=r" (dfar) 317 ); 318 319 return dfar; 320 } 321 322 static inline uint32_t read_dfsr(void) 323 { 324 uint32_t dfsr; 325 326 asm volatile ("mrc p15, 0, %[dfsr], c5, c0, 0" 327 : [dfsr] "=r" (dfsr) 328 ); 329 330 return dfsr; 331 } 332 333 static inline uint32_t read_ifsr(void) 334 { 335 uint32_t ifsr; 336 337 asm volatile ("mrc p15, 0, %[ifsr], c5, c0, 1" 338 : [ifsr] "=r" (ifsr) 339 ); 340 341 return ifsr; 342 } 343 344 345 346 static inline void isb(void) 347 { 348 asm volatile ("isb"); 349 } 350 351 static inline void dsb(void) 352 { 353 asm volatile ("dsb"); 354 } 355 356 static inline void write_ats1cpw(uint32_t va) 357 { 358 asm volatile ("mcr p15, 0, %[va], c7, c8, 1" 359 : : [va] "r" (va) 360 ); 361 } 362 363 static inline uint32_t read_par(void) 364 { 365 uint32_t par; 366 367 asm volatile ("mrc p15, 0, %[par], c7, c4, 0" 368 : [par] "=r" (par) 369 ); 370 return par; 371 } 372 373 static inline void write_mair0(uint32_t mair0) 374 { 375 asm volatile ("mcr p15, 0, %[mair0], c10, c2, 0" 376 : : [mair0] "r" (mair0) 377 ); 378 } 379 380 static inline void write_prrr(uint32_t prrr) 381 { 382 /* 383 * Same physical register as MAIR0. 384 * 385 * When an implementation includes the Large Physical Address 386 * Extension, and address translation is using the Long-descriptor 387 * translation table formats, MAIR0 replaces the PRRR 388 */ 389 write_mair0(prrr); 390 } 391 392 static inline void write_mair1(uint32_t mair1) 393 { 394 asm volatile ("mcr p15, 0, %[mair1], c10, c2, 1" 395 : : [mair1] "r" (mair1) 396 ); 397 } 398 399 static inline void write_nmrr(uint32_t nmrr) 400 { 401 /* 402 * Same physical register as MAIR1. 403 * 404 * When an implementation includes the Large Physical Address 405 * Extension, and address translation is using the Long-descriptor 406 * translation table formats, MAIR1 replaces the NMRR 407 */ 408 write_mair1(nmrr); 409 } 410 411 static inline uint32_t read_contextidr(void) 412 { 413 uint32_t contextidr; 414 415 asm volatile ("mrc p15, 0, %[contextidr], c13, c0, 1" 416 : [contextidr] "=r" (contextidr) 417 ); 418 419 return contextidr; 420 } 421 422 static inline void write_contextidr(uint32_t contextidr) 423 { 424 asm volatile ("mcr p15, 0, %[contextidr], c13, c0, 1" 425 : : [contextidr] "r" (contextidr) 426 ); 427 } 428 429 static inline uint32_t read_cpsr(void) 430 { 431 uint32_t cpsr; 432 433 asm volatile ("mrs %[cpsr], cpsr" 434 : [cpsr] "=r" (cpsr) 435 ); 436 return cpsr; 437 } 438 439 static inline void write_cpsr(uint32_t cpsr) 440 { 441 asm volatile ("msr cpsr_fsxc, %[cpsr]" 442 : : [cpsr] "r" (cpsr) 443 ); 444 } 445 446 static inline uint32_t read_spsr(void) 447 { 448 uint32_t spsr; 449 450 asm volatile ("mrs %[spsr], spsr" 451 : [spsr] "=r" (spsr) 452 ); 453 return spsr; 454 } 455 456 static inline uint32_t read_actlr(void) 457 { 458 uint32_t actlr; 459 460 asm volatile ("mrc p15, 0, %[actlr], c1, c0, 1" 461 : [actlr] "=r" (actlr) 462 ); 463 464 return actlr; 465 } 466 467 static inline void write_actlr(uint32_t actlr) 468 { 469 asm volatile ("mcr p15, 0, %[actlr], c1, c0, 1" 470 : : [actlr] "r" (actlr) 471 ); 472 } 473 474 static inline uint32_t read_nsacr(void) 475 { 476 uint32_t nsacr; 477 478 asm volatile ("mrc p15, 0, %[nsacr], c1, c1, 2" 479 : [nsacr] "=r" (nsacr) 480 ); 481 482 return nsacr; 483 } 484 485 static inline void write_nsacr(uint32_t nsacr) 486 { 487 asm volatile ("mcr p15, 0, %[nsacr], c1, c1, 2" 488 : : [nsacr] "r" (nsacr) 489 ); 490 } 491 492 static inline uint64_t read_cntpct(void) 493 { 494 uint64_t val; 495 496 asm volatile("mrrc p15, 0, %Q0, %R0, c14" : "=r" (val)); 497 return val; 498 } 499 500 static inline uint32_t read_cntfrq(void) 501 { 502 uint32_t frq; 503 504 asm volatile("mrc p15, 0, %0, c14, c0, 0" : "=r" (frq)); 505 return frq; 506 } 507 #endif /*ASM*/ 508 509 #endif /*ARM32_H*/ 510