1 /* SPDX-License-Identifier: GPL-2.0 WITH Linux-syscall-note */ 2 /* 3 * 4 * (C) COPYRIGHT 2010-2022 ARM Limited. All rights reserved. 5 * 6 * This program is free software and is provided to you under the terms of the 7 * GNU General Public License version 2 as published by the Free Software 8 * Foundation, and any use by you of this program is subject to the terms 9 * of such GNU license. 10 * 11 * This program is distributed in the hope that it will be useful, 12 * but WITHOUT ANY WARRANTY; without even the implied warranty of 13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 * GNU General Public License for more details. 15 * 16 * You should have received a copy of the GNU General Public License 17 * along with this program; if not, you can access it online at 18 * http://www.gnu.org/licenses/gpl-2.0.html. 19 * 20 */ 21 22 #ifndef _KBASE_GPU_REGMAP_H_ 23 #define _KBASE_GPU_REGMAP_H_ 24 25 #include <uapi/gpu/arm/bifrost/gpu/mali_kbase_gpu_regmap.h> 26 #include <uapi/gpu/arm/bifrost/gpu/mali_kbase_gpu_coherency.h> 27 #include <uapi/gpu/arm/bifrost/gpu/mali_kbase_gpu_id.h> 28 #if MALI_USE_CSF 29 #include "backend/mali_kbase_gpu_regmap_csf.h" 30 #else 31 #include "backend/mali_kbase_gpu_regmap_jm.h" 32 #endif 33 34 /* GPU_U definition */ 35 #ifdef __ASSEMBLER__ 36 #define GPU_U(x) x 37 #define GPU_UL(x) x 38 #define GPU_ULL(x) x 39 #else 40 #define GPU_U(x) x##u 41 #define GPU_UL(x) x##ul 42 #define GPU_ULL(x) x##ull 43 #endif /* __ASSEMBLER__ */ 44 45 /* Begin Register Offsets */ 46 /* GPU control registers */ 47 48 #define L2_FEATURES 0x004 /* (RO) Level 2 cache features */ 49 #define TILER_FEATURES 0x00C /* (RO) Tiler Features */ 50 #define MEM_FEATURES 0x010 /* (RO) Memory system features */ 51 #define MMU_FEATURES 0x014 /* (RO) MMU features */ 52 #define AS_PRESENT 0x018 /* (RO) Address space slots present */ 53 #define GPU_IRQ_RAWSTAT 0x020 /* (RW) */ 54 #define GPU_IRQ_MASK 0x028 /* (RW) */ 55 56 #define GPU_COMMAND 0x030 /* (WO) */ 57 #define GPU_STATUS 0x034 /* (RO) */ 58 59 #define GPU_DBGEN (1 << 8) /* DBGEN wire status */ 60 61 #define GPU_FAULTSTATUS 0x03C /* (RO) GPU exception type and fault status */ 62 #define GPU_FAULTADDRESS_LO 0x040 /* (RO) GPU exception fault address, low word */ 63 #define GPU_FAULTADDRESS_HI 0x044 /* (RO) GPU exception fault address, high word */ 64 65 #define L2_CONFIG 0x048 /* (RW) Level 2 cache configuration */ 66 67 #define GROUPS_L2_COHERENT (1 << 0) /* Cores groups are l2 coherent */ 68 #define SUPER_L2_COHERENT (1 << 1) /* Shader cores within a core 69 * supergroup are l2 coherent 70 */ 71 72 #define PWR_KEY 0x050 /* (WO) Power manager key register */ 73 #define PWR_OVERRIDE0 0x054 /* (RW) Power manager override settings */ 74 #define PWR_OVERRIDE1 0x058 /* (RW) Power manager override settings */ 75 #define GPU_FEATURES_LO 0x060 /* (RO) GPU features, low word */ 76 #define GPU_FEATURES_HI 0x064 /* (RO) GPU features, high word */ 77 #define PRFCNT_FEATURES 0x068 /* (RO) Performance counter features */ 78 #define TIMESTAMP_OFFSET_LO 0x088 /* (RW) Global time stamp offset, low word */ 79 #define TIMESTAMP_OFFSET_HI 0x08C /* (RW) Global time stamp offset, high word */ 80 #define CYCLE_COUNT_LO 0x090 /* (RO) Cycle counter, low word */ 81 #define CYCLE_COUNT_HI 0x094 /* (RO) Cycle counter, high word */ 82 #define TIMESTAMP_LO 0x098 /* (RO) Global time stamp counter, low word */ 83 #define TIMESTAMP_HI 0x09C /* (RO) Global time stamp counter, high word */ 84 85 #define THREAD_MAX_THREADS 0x0A0 /* (RO) Maximum number of threads per core */ 86 #define THREAD_MAX_WORKGROUP_SIZE 0x0A4 /* (RO) Maximum workgroup size */ 87 #define THREAD_MAX_BARRIER_SIZE 0x0A8 /* (RO) Maximum threads waiting at a barrier */ 88 #define THREAD_FEATURES 0x0AC /* (RO) Thread features */ 89 #define THREAD_TLS_ALLOC 0x310 /* (RO) Number of threads per core that TLS must be allocated for */ 90 91 #define TEXTURE_FEATURES_0 0x0B0 /* (RO) Support flags for indexed texture formats 0..31 */ 92 #define TEXTURE_FEATURES_1 0x0B4 /* (RO) Support flags for indexed texture formats 32..63 */ 93 #define TEXTURE_FEATURES_2 0x0B8 /* (RO) Support flags for indexed texture formats 64..95 */ 94 #define TEXTURE_FEATURES_3 0x0BC /* (RO) Support flags for texture order */ 95 96 #define TEXTURE_FEATURES_REG(n) GPU_CONTROL_REG(TEXTURE_FEATURES_0 + ((n) << 2)) 97 98 #define GPU_COMMAND_ARG0_LO 0x0D0 /* (RW) Additional parameter 0 for GPU commands, low word */ 99 #define GPU_COMMAND_ARG0_HI 0x0D4 /* (RW) Additional parameter 0 for GPU commands, high word */ 100 #define GPU_COMMAND_ARG1_LO 0x0D8 /* (RW) Additional parameter 1 for GPU commands, low word */ 101 #define GPU_COMMAND_ARG1_HI 0x0DC /* (RW) Additional parameter 1 for GPU commands, high word */ 102 103 #define SHADER_PRESENT_LO 0x100 /* (RO) Shader core present bitmap, low word */ 104 #define SHADER_PRESENT_HI 0x104 /* (RO) Shader core present bitmap, high word */ 105 106 #define TILER_PRESENT_LO 0x110 /* (RO) Tiler core present bitmap, low word */ 107 #define TILER_PRESENT_HI 0x114 /* (RO) Tiler core present bitmap, high word */ 108 109 #define L2_PRESENT_LO 0x120 /* (RO) Level 2 cache present bitmap, low word */ 110 #define L2_PRESENT_HI 0x124 /* (RO) Level 2 cache present bitmap, high word */ 111 112 #define STACK_PRESENT_LO 0xE00 /* (RO) Core stack present bitmap, low word */ 113 #define STACK_PRESENT_HI 0xE04 /* (RO) Core stack present bitmap, high word */ 114 115 #define STACK_READY_LO 0xE10 /* (RO) Core stack ready bitmap, low word */ 116 #define STACK_READY_HI 0xE14 /* (RO) Core stack ready bitmap, high word */ 117 118 #define SHADER_PWRFEATURES 0x188 /* (RW) Shader core power features */ 119 120 #define STACK_PWRON_LO 0xE20 /* (RO) Core stack power on bitmap, low word */ 121 #define STACK_PWRON_HI 0xE24 /* (RO) Core stack power on bitmap, high word */ 122 123 #define SHADER_PWROFF_LO 0x1C0 /* (WO) Shader core power off bitmap, low word */ 124 #define SHADER_PWROFF_HI 0x1C4 /* (WO) Shader core power off bitmap, high word */ 125 126 #define TILER_PWROFF_LO 0x1D0 /* (WO) Tiler core power off bitmap, low word */ 127 #define TILER_PWROFF_HI 0x1D4 /* (WO) Tiler core power off bitmap, high word */ 128 129 #define L2_PWROFF_LO 0x1E0 /* (WO) Level 2 cache power off bitmap, low word */ 130 #define L2_PWROFF_HI 0x1E4 /* (WO) Level 2 cache power off bitmap, high word */ 131 132 #define STACK_PWROFF_LO 0xE30 /* (RO) Core stack power off bitmap, low word */ 133 #define STACK_PWROFF_HI 0xE34 /* (RO) Core stack power off bitmap, high word */ 134 135 #define SHADER_PWRTRANS_LO 0x200 /* (RO) Shader core power transition bitmap, low word */ 136 #define SHADER_PWRTRANS_HI 0x204 /* (RO) Shader core power transition bitmap, high word */ 137 138 #define TILER_PWRTRANS_LO 0x210 /* (RO) Tiler core power transition bitmap, low word */ 139 #define TILER_PWRTRANS_HI 0x214 /* (RO) Tiler core power transition bitmap, high word */ 140 141 #define L2_PWRTRANS_LO 0x220 /* (RO) Level 2 cache power transition bitmap, low word */ 142 #define L2_PWRTRANS_HI 0x224 /* (RO) Level 2 cache power transition bitmap, high word */ 143 144 #define ASN_HASH_0 0x02C0 /* (RW) ASN hash function argument 0 */ 145 #define ASN_HASH(n) (ASN_HASH_0 + (n)*4) 146 #define ASN_HASH_COUNT 3 147 148 #define SYSC_ALLOC0 0x0340 /* (RW) System cache allocation hint from source ID */ 149 #define SYSC_ALLOC(n) (SYSC_ALLOC0 + (n)*4) 150 #define SYSC_ALLOC_COUNT 8 151 152 #define STACK_PWRTRANS_LO 0xE40 /* (RO) Core stack power transition bitmap, low word */ 153 #define STACK_PWRTRANS_HI 0xE44 /* (RO) Core stack power transition bitmap, high word */ 154 155 #define SHADER_PWRACTIVE_LO 0x240 /* (RO) Shader core active bitmap, low word */ 156 #define SHADER_PWRACTIVE_HI 0x244 /* (RO) Shader core active bitmap, high word */ 157 158 #define TILER_PWRACTIVE_LO 0x250 /* (RO) Tiler core active bitmap, low word */ 159 #define TILER_PWRACTIVE_HI 0x254 /* (RO) Tiler core active bitmap, high word */ 160 161 #define L2_PWRACTIVE_LO 0x260 /* (RO) Level 2 cache active bitmap, low word */ 162 #define L2_PWRACTIVE_HI 0x264 /* (RO) Level 2 cache active bitmap, high word */ 163 164 #define COHERENCY_FEATURES 0x300 /* (RO) Coherency features present */ 165 #define COHERENCY_ENABLE 0x304 /* (RW) Coherency enable */ 166 167 #define AMBA_FEATURES 0x300 /* (RO) AMBA bus supported features */ 168 #define AMBA_ENABLE 0x304 /* (RW) AMBA features enable */ 169 170 #define SHADER_CONFIG 0xF04 /* (RW) Shader core configuration (implementation-specific) */ 171 #define TILER_CONFIG 0xF08 /* (RW) Tiler core configuration (implementation-specific) */ 172 #define L2_MMU_CONFIG 0xF0C /* (RW) L2 cache and MMU configuration (implementation-specific) */ 173 174 /* Job control registers */ 175 176 #define JOB_IRQ_RAWSTAT 0x000 /* Raw interrupt status register */ 177 178 /* MMU control registers */ 179 180 #define MMU_AS1 0x440 /* Configuration registers for address space 1 */ 181 #define MMU_AS2 0x480 /* Configuration registers for address space 2 */ 182 #define MMU_AS3 0x4C0 /* Configuration registers for address space 3 */ 183 #define MMU_AS4 0x500 /* Configuration registers for address space 4 */ 184 #define MMU_AS5 0x540 /* Configuration registers for address space 5 */ 185 #define MMU_AS6 0x580 /* Configuration registers for address space 6 */ 186 #define MMU_AS7 0x5C0 /* Configuration registers for address space 7 */ 187 #define MMU_AS8 0x600 /* Configuration registers for address space 8 */ 188 #define MMU_AS9 0x640 /* Configuration registers for address space 9 */ 189 #define MMU_AS10 0x680 /* Configuration registers for address space 10 */ 190 #define MMU_AS11 0x6C0 /* Configuration registers for address space 11 */ 191 #define MMU_AS12 0x700 /* Configuration registers for address space 12 */ 192 #define MMU_AS13 0x740 /* Configuration registers for address space 13 */ 193 #define MMU_AS14 0x780 /* Configuration registers for address space 14 */ 194 #define MMU_AS15 0x7C0 /* Configuration registers for address space 15 */ 195 196 /* MMU address space control registers */ 197 #define AS_LOCKADDR_LO 0x10 /* (RW) Lock region address for address space n, low word */ 198 #define AS_LOCKADDR_HI 0x14 /* (RW) Lock region address for address space n, high word */ 199 #define AS_FAULTSTATUS 0x1C /* (RO) MMU fault status register for address space n */ 200 #define AS_FAULTADDRESS_LO 0x20 /* (RO) Fault Address for address space n, low word */ 201 #define AS_FAULTADDRESS_HI 0x24 /* (RO) Fault Address for address space n, high word */ 202 #define AS_STATUS 0x28 /* (RO) Status flags for address space n */ 203 204 /* (RO) Secondary fault address for address space n, low word */ 205 #define AS_FAULTEXTRA_LO 0x38 206 /* (RO) Secondary fault address for address space n, high word */ 207 #define AS_FAULTEXTRA_HI 0x3C 208 209 /* End Register Offsets */ 210 211 #define GPU_IRQ_REG_ALL (GPU_IRQ_REG_COMMON) 212 213 /* 214 * MMU_IRQ_RAWSTAT register values. Values are valid also for 215 * MMU_IRQ_CLEAR, MMU_IRQ_MASK, MMU_IRQ_STATUS registers. 216 */ 217 218 #define MMU_PAGE_FAULT_FLAGS 16 219 220 /* Macros returning a bitmask to retrieve page fault or bus error flags from 221 * MMU registers 222 */ 223 #define MMU_PAGE_FAULT(n) (1UL << (n)) 224 #define MMU_BUS_ERROR(n) (1UL << ((n) + MMU_PAGE_FAULT_FLAGS)) 225 226 /* 227 * Begin AARCH64 MMU TRANSTAB register values 228 */ 229 #define MMU_HW_OUTA_BITS 40 230 #define AS_TRANSTAB_BASE_MASK ((1ULL << MMU_HW_OUTA_BITS) - (1ULL << 4)) 231 232 /* 233 * Begin MMU STATUS register values 234 */ 235 #define AS_STATUS_AS_ACTIVE 0x01 236 237 #define AS_FAULTSTATUS_EXCEPTION_CODE_MASK (0x7<<3) 238 #define AS_FAULTSTATUS_EXCEPTION_CODE_TRANSLATION_FAULT (0x0<<3) 239 #define AS_FAULTSTATUS_EXCEPTION_CODE_PERMISSION_FAULT (0x1<<3) 240 #define AS_FAULTSTATUS_EXCEPTION_CODE_TRANSTAB_BUS_FAULT (0x2<<3) 241 #define AS_FAULTSTATUS_EXCEPTION_CODE_ACCESS_FLAG (0x3<<3) 242 #define AS_FAULTSTATUS_EXCEPTION_CODE_ADDRESS_SIZE_FAULT (0x4<<3) 243 #define AS_FAULTSTATUS_EXCEPTION_CODE_MEMORY_ATTRIBUTES_FAULT (0x5<<3) 244 245 #define AS_FAULTSTATUS_EXCEPTION_TYPE_SHIFT 0 246 #define AS_FAULTSTATUS_EXCEPTION_TYPE_MASK (0xFF << AS_FAULTSTATUS_EXCEPTION_TYPE_SHIFT) 247 #define AS_FAULTSTATUS_EXCEPTION_TYPE_GET(reg_val) \ 248 (((reg_val)&AS_FAULTSTATUS_EXCEPTION_TYPE_MASK) >> AS_FAULTSTATUS_EXCEPTION_TYPE_SHIFT) 249 #define AS_FAULTSTATUS_EXCEPTION_TYPE_TRANSLATION_FAULT_0 0xC0 250 251 #define AS_FAULTSTATUS_ACCESS_TYPE_SHIFT 8 252 #define AS_FAULTSTATUS_ACCESS_TYPE_MASK (0x3 << AS_FAULTSTATUS_ACCESS_TYPE_SHIFT) 253 #define AS_FAULTSTATUS_ACCESS_TYPE_GET(reg_val) \ 254 (((reg_val)&AS_FAULTSTATUS_ACCESS_TYPE_MASK) >> AS_FAULTSTATUS_ACCESS_TYPE_SHIFT) 255 256 #define AS_FAULTSTATUS_ACCESS_TYPE_ATOMIC (0x0) 257 #define AS_FAULTSTATUS_ACCESS_TYPE_EX (0x1) 258 #define AS_FAULTSTATUS_ACCESS_TYPE_READ (0x2) 259 #define AS_FAULTSTATUS_ACCESS_TYPE_WRITE (0x3) 260 261 #define AS_FAULTSTATUS_SOURCE_ID_SHIFT 16 262 #define AS_FAULTSTATUS_SOURCE_ID_MASK (0xFFFF << AS_FAULTSTATUS_SOURCE_ID_SHIFT) 263 #define AS_FAULTSTATUS_SOURCE_ID_GET(reg_val) \ 264 (((reg_val)&AS_FAULTSTATUS_SOURCE_ID_MASK) >> AS_FAULTSTATUS_SOURCE_ID_SHIFT) 265 266 #define PRFCNT_FEATURES_COUNTER_BLOCK_SIZE_SHIFT (0) 267 #define PRFCNT_FEATURES_COUNTER_BLOCK_SIZE_MASK \ 268 ((0xFF) << PRFCNT_FEATURES_COUNTER_BLOCK_SIZE_SHIFT) 269 #define PRFCNT_FEATURES_COUNTER_BLOCK_SIZE_GET(reg_val) \ 270 (((reg_val)&PRFCNT_FEATURES_COUNTER_BLOCK_SIZE_MASK) >> \ 271 PRFCNT_FEATURES_COUNTER_BLOCK_SIZE_SHIFT) 272 273 /* 274 * Begin MMU TRANSCFG register values 275 */ 276 #define AS_TRANSCFG_ADRMODE_LEGACY 0 277 #define AS_TRANSCFG_ADRMODE_UNMAPPED 1 278 #define AS_TRANSCFG_ADRMODE_IDENTITY 2 279 #define AS_TRANSCFG_ADRMODE_AARCH64_4K 6 280 #define AS_TRANSCFG_ADRMODE_AARCH64_64K 8 281 282 #define AS_TRANSCFG_ADRMODE_MASK 0xF 283 284 /* 285 * Begin TRANSCFG register values 286 */ 287 #define AS_TRANSCFG_PTW_MEMATTR_MASK (3ull << 24) 288 #define AS_TRANSCFG_PTW_MEMATTR_NON_CACHEABLE (1ull << 24) 289 #define AS_TRANSCFG_PTW_MEMATTR_WRITE_BACK (2ull << 24) 290 291 #define AS_TRANSCFG_PTW_SH_MASK ((3ull << 28)) 292 #define AS_TRANSCFG_PTW_SH_OS (2ull << 28) 293 #define AS_TRANSCFG_PTW_SH_IS (3ull << 28) 294 #define AS_TRANSCFG_R_ALLOCATE (1ull << 30) 295 296 /* 297 * Begin Command Values 298 */ 299 300 /* AS_COMMAND register commands */ 301 #define AS_COMMAND_NOP 0x00 /* NOP Operation */ 302 #define AS_COMMAND_UPDATE 0x01 /* Broadcasts the values in AS_TRANSTAB and ASn_MEMATTR to all MMUs */ 303 #define AS_COMMAND_LOCK 0x02 /* Issue a lock region command to all MMUs */ 304 #define AS_COMMAND_UNLOCK 0x03 /* Issue a flush region command to all MMUs */ 305 /* Flush all L2 caches then issue a flush region command to all MMUs */ 306 #define AS_COMMAND_FLUSH_PT 0x04 307 /* Wait for memory accesses to complete, flush all the L1s cache then flush all 308 * L2 caches then issue a flush region command to all MMUs 309 */ 310 #define AS_COMMAND_FLUSH_MEM 0x05 311 312 /* AS_LOCKADDR register */ 313 #define AS_LOCKADDR_LOCKADDR_SIZE_SHIFT GPU_U(0) 314 #define AS_LOCKADDR_LOCKADDR_SIZE_MASK \ 315 (GPU_U(0x3F) << AS_LOCKADDR_LOCKADDR_SIZE_SHIFT) 316 #define AS_LOCKADDR_LOCKADDR_SIZE_GET(reg_val) \ 317 (((reg_val)&AS_LOCKADDR_LOCKADDR_SIZE_MASK) >> \ 318 AS_LOCKADDR_LOCKADDR_SIZE_SHIFT) 319 #define AS_LOCKADDR_LOCKADDR_SIZE_SET(reg_val, value) \ 320 (((reg_val) & ~AS_LOCKADDR_LOCKADDR_SIZE_MASK) | \ 321 (((value) << AS_LOCKADDR_LOCKADDR_SIZE_SHIFT) & \ 322 AS_LOCKADDR_LOCKADDR_SIZE_MASK)) 323 #define AS_LOCKADDR_LOCKADDR_BASE_SHIFT GPU_U(12) 324 #define AS_LOCKADDR_LOCKADDR_BASE_MASK \ 325 (GPU_ULL(0xFFFFFFFFFFFFF) << AS_LOCKADDR_LOCKADDR_BASE_SHIFT) 326 #define AS_LOCKADDR_LOCKADDR_BASE_GET(reg_val) \ 327 (((reg_val)&AS_LOCKADDR_LOCKADDR_BASE_MASK) >> \ 328 AS_LOCKADDR_LOCKADDR_BASE_SHIFT) 329 #define AS_LOCKADDR_LOCKADDR_BASE_SET(reg_val, value) \ 330 (((reg_val) & ~AS_LOCKADDR_LOCKADDR_BASE_MASK) | \ 331 (((value) << AS_LOCKADDR_LOCKADDR_BASE_SHIFT) & \ 332 AS_LOCKADDR_LOCKADDR_BASE_MASK)) 333 #define AS_LOCKADDR_FLUSH_SKIP_LEVELS_SHIFT (6) 334 #define AS_LOCKADDR_FLUSH_SKIP_LEVELS_MASK ((0xF) << AS_LOCKADDR_FLUSH_SKIP_LEVELS_SHIFT) 335 #define AS_LOCKADDR_FLUSH_SKIP_LEVELS_SET(reg_val, value) \ 336 (((reg_val) & ~AS_LOCKADDR_FLUSH_SKIP_LEVELS_MASK) | \ 337 ((value << AS_LOCKADDR_FLUSH_SKIP_LEVELS_SHIFT) & AS_LOCKADDR_FLUSH_SKIP_LEVELS_MASK)) 338 339 /* GPU_STATUS values */ 340 #define GPU_STATUS_PRFCNT_ACTIVE (1 << 2) /* Set if the performance counters are active. */ 341 #define GPU_STATUS_CYCLE_COUNT_ACTIVE (1 << 6) /* Set if the cycle counter is active. */ 342 #define GPU_STATUS_PROTECTED_MODE_ACTIVE (1 << 7) /* Set if protected mode is active */ 343 344 /* PRFCNT_CONFIG register values */ 345 #define PRFCNT_CONFIG_MODE_SHIFT 0 /* Counter mode position. */ 346 #define PRFCNT_CONFIG_AS_SHIFT 4 /* Address space bitmap position. */ 347 #define PRFCNT_CONFIG_SETSELECT_SHIFT 8 /* Set select position. */ 348 349 /* The performance counters are disabled. */ 350 #define PRFCNT_CONFIG_MODE_OFF 0 351 /* The performance counters are enabled, but are only written out when a 352 * PRFCNT_SAMPLE command is issued using the GPU_COMMAND register. 353 */ 354 #define PRFCNT_CONFIG_MODE_MANUAL 1 355 /* The performance counters are enabled, and are written out each time a tile 356 * finishes rendering. 357 */ 358 #define PRFCNT_CONFIG_MODE_TILE 2 359 360 /* AS<n>_MEMATTR values from MMU_MEMATTR_STAGE1: */ 361 /* Use GPU implementation-defined caching policy. */ 362 #define AS_MEMATTR_IMPL_DEF_CACHE_POLICY 0x88ull 363 /* The attribute set to force all resources to be cached. */ 364 #define AS_MEMATTR_FORCE_TO_CACHE_ALL 0x8Full 365 /* Inner write-alloc cache setup, no outer caching */ 366 #define AS_MEMATTR_WRITE_ALLOC 0x8Dull 367 368 /* Use GPU implementation-defined caching policy. */ 369 #define AS_MEMATTR_LPAE_IMPL_DEF_CACHE_POLICY 0x48ull 370 /* The attribute set to force all resources to be cached. */ 371 #define AS_MEMATTR_LPAE_FORCE_TO_CACHE_ALL 0x4Full 372 /* Inner write-alloc cache setup, no outer caching */ 373 #define AS_MEMATTR_LPAE_WRITE_ALLOC 0x4Dull 374 /* Set to implementation defined, outer caching */ 375 #define AS_MEMATTR_LPAE_OUTER_IMPL_DEF 0x88ull 376 /* Set to write back memory, outer caching */ 377 #define AS_MEMATTR_LPAE_OUTER_WA 0x8Dull 378 /* There is no LPAE support for non-cacheable, since the memory type is always 379 * write-back. 380 * Marking this setting as reserved for LPAE 381 */ 382 #define AS_MEMATTR_LPAE_NON_CACHEABLE_RESERVED 383 384 /* L2_MMU_CONFIG register */ 385 #define L2_MMU_CONFIG_ALLOW_SNOOP_DISPARITY_SHIFT (23) 386 #define L2_MMU_CONFIG_ALLOW_SNOOP_DISPARITY (0x1 << L2_MMU_CONFIG_ALLOW_SNOOP_DISPARITY_SHIFT) 387 388 /* End L2_MMU_CONFIG register */ 389 390 /* THREAD_* registers */ 391 392 /* THREAD_FEATURES IMPLEMENTATION_TECHNOLOGY values */ 393 #define IMPLEMENTATION_UNSPECIFIED 0 394 #define IMPLEMENTATION_SILICON 1 395 #define IMPLEMENTATION_FPGA 2 396 #define IMPLEMENTATION_MODEL 3 397 398 /* Default values when registers are not supported by the implemented hardware */ 399 #define THREAD_MT_DEFAULT 256 400 #define THREAD_MWS_DEFAULT 256 401 #define THREAD_MBS_DEFAULT 256 402 #define THREAD_MR_DEFAULT 1024 403 #define THREAD_MTQ_DEFAULT 4 404 #define THREAD_MTGS_DEFAULT 10 405 406 /* End THREAD_* registers */ 407 408 /* SHADER_CONFIG register */ 409 #define SC_LS_ALLOW_ATTR_TYPES (1ul << 16) 410 #define SC_TLS_HASH_ENABLE (1ul << 17) 411 #define SC_LS_ATTR_CHECK_DISABLE (1ul << 18) 412 #define SC_VAR_ALGORITHM (1ul << 29) 413 /* End SHADER_CONFIG register */ 414 415 /* TILER_CONFIG register */ 416 #define TC_CLOCK_GATE_OVERRIDE (1ul << 0) 417 /* End TILER_CONFIG register */ 418 419 /* L2_CONFIG register */ 420 #define L2_CONFIG_SIZE_SHIFT 16 421 #define L2_CONFIG_SIZE_MASK (0xFFul << L2_CONFIG_SIZE_SHIFT) 422 #define L2_CONFIG_HASH_SHIFT 24 423 #define L2_CONFIG_HASH_MASK (0xFFul << L2_CONFIG_HASH_SHIFT) 424 #define L2_CONFIG_ASN_HASH_ENABLE_SHIFT 24 425 #define L2_CONFIG_ASN_HASH_ENABLE_MASK (1ul << L2_CONFIG_ASN_HASH_ENABLE_SHIFT) 426 /* End L2_CONFIG register */ 427 428 /* AMBA_FEATURES register */ 429 #define AMBA_FEATURES_ACE_LITE_SHIFT GPU_U(0) 430 #define AMBA_FEATURES_ACE_LITE_MASK (GPU_U(0x1) << AMBA_FEATURES_ACE_LITE_SHIFT) 431 #define AMBA_FEATURES_ACE_LITE_GET(reg_val) \ 432 (((reg_val)&AMBA_FEATURES_ACE_LITE_MASK) >> \ 433 AMBA_FEATURES_ACE_LITE_SHIFT) 434 #define AMBA_FEATURES_ACE_LITE_SET(reg_val, value) \ 435 (((reg_val) & ~AMBA_FEATURES_ACE_LITE_MASK) | \ 436 (((value) << AMBA_FEATURES_ACE_LITE_SHIFT) & \ 437 AMBA_FEATURES_ACE_LITE_MASK)) 438 #define AMBA_FEATURES_ACE_SHIFT GPU_U(1) 439 #define AMBA_FEATURES_ACE_MASK (GPU_U(0x1) << AMBA_FEATURES_ACE_SHIFT) 440 #define AMBA_FEATURES_ACE_GET(reg_val) \ 441 (((reg_val)&AMBA_FEATURES_ACE_MASK) >> AMBA_FEATURES_ACE_SHIFT) 442 #define AMBA_FEATURES_ACE_SET(reg_val, value) \ 443 (((reg_val) & ~AMBA_FEATURES_ACE_MASK) | \ 444 (((value) << AMBA_FEATURES_ACE_SHIFT) & AMBA_FEATURES_ACE_MASK)) 445 #define AMBA_FEATURES_MEMORY_CACHE_SUPPORT_SHIFT GPU_U(5) 446 #define AMBA_FEATURES_MEMORY_CACHE_SUPPORT_MASK \ 447 (GPU_U(0x1) << AMBA_FEATURES_MEMORY_CACHE_SUPPORT_SHIFT) 448 #define AMBA_FEATURES_MEMORY_CACHE_SUPPORT_GET(reg_val) \ 449 (((reg_val)&AMBA_FEATURES_MEMORY_CACHE_SUPPORT_MASK) >> \ 450 AMBA_FEATURES_MEMORY_CACHE_SUPPORT_SHIFT) 451 #define AMBA_FEATURES_MEMORY_CACHE_SUPPORT_SET(reg_val, value) \ 452 (((reg_val) & ~AMBA_FEATURES_MEMORY_CACHE_SUPPORT_MASK) | \ 453 (((value) << AMBA_FEATURES_MEMORY_CACHE_SUPPORT_SHIFT) & \ 454 AMBA_FEATURES_MEMORY_CACHE_SUPPORT_MASK)) 455 #define AMBA_FEATURES_INVALIDATE_HINT_SHIFT GPU_U(6) 456 #define AMBA_FEATURES_INVALIDATE_HINT_MASK \ 457 (GPU_U(0x1) << AMBA_FEATURES_INVALIDATE_HINT_SHIFT) 458 #define AMBA_FEATURES_INVALIDATE_HINT_GET(reg_val) \ 459 (((reg_val)&AMBA_FEATURES_INVALIDATE_HINT_MASK) >> \ 460 AMBA_FEATURES_INVALIDATE_HINT_SHIFT) 461 #define AMBA_FEATURES_INVALIDATE_HINT_SET(reg_val, value) \ 462 (((reg_val) & ~AMBA_FEATURES_INVALIDATE_HINT_MASK) | \ 463 (((value) << AMBA_FEATURES_INVALIDATE_HINT_SHIFT) & \ 464 AMBA_FEATURES_INVALIDATE_HINT_MASK)) 465 466 /* AMBA_ENABLE register */ 467 #define AMBA_ENABLE_COHERENCY_PROTOCOL_SHIFT GPU_U(0) 468 #define AMBA_ENABLE_COHERENCY_PROTOCOL_MASK \ 469 (GPU_U(0x1F) << AMBA_ENABLE_COHERENCY_PROTOCOL_SHIFT) 470 #define AMBA_ENABLE_COHERENCY_PROTOCOL_GET(reg_val) \ 471 (((reg_val)&AMBA_ENABLE_COHERENCY_PROTOCOL_MASK) >> \ 472 AMBA_ENABLE_COHERENCY_PROTOCOL_SHIFT) 473 #define AMBA_ENABLE_COHERENCY_PROTOCOL_SET(reg_val, value) \ 474 (((reg_val) & ~AMBA_ENABLE_COHERENCY_PROTOCOL_MASK) | \ 475 (((value) << AMBA_ENABLE_COHERENCY_PROTOCOL_SHIFT) & \ 476 AMBA_ENABLE_COHERENCY_PROTOCOL_MASK)) 477 /* AMBA_ENABLE_coherency_protocol values */ 478 #define AMBA_ENABLE_COHERENCY_PROTOCOL_ACE_LITE 0x0 479 #define AMBA_ENABLE_COHERENCY_PROTOCOL_ACE 0x1 480 #define AMBA_ENABLE_COHERENCY_PROTOCOL_NO_COHERENCY 0x1F 481 /* End of AMBA_ENABLE_coherency_protocol values */ 482 #define AMBA_ENABLE_MEMORY_CACHE_SUPPORT_SHIFT GPU_U(5) 483 #define AMBA_ENABLE_MEMORY_CACHE_SUPPORT_MASK \ 484 (GPU_U(0x1) << AMBA_ENABLE_MEMORY_CACHE_SUPPORT_SHIFT) 485 #define AMBA_ENABLE_MEMORY_CACHE_SUPPORT_GET(reg_val) \ 486 (((reg_val)&AMBA_ENABLE_MEMORY_CACHE_SUPPORT_MASK) >> \ 487 AMBA_ENABLE_MEMORY_CACHE_SUPPORT_SHIFT) 488 #define AMBA_ENABLE_MEMORY_CACHE_SUPPORT_SET(reg_val, value) \ 489 (((reg_val) & ~AMBA_ENABLE_MEMORY_CACHE_SUPPORT_MASK) | \ 490 (((value) << AMBA_ENABLE_MEMORY_CACHE_SUPPORT_SHIFT) & \ 491 AMBA_ENABLE_MEMORY_CACHE_SUPPORT_MASK)) 492 #define AMBA_ENABLE_INVALIDATE_HINT_SHIFT GPU_U(6) 493 #define AMBA_ENABLE_INVALIDATE_HINT_MASK \ 494 (GPU_U(0x1) << AMBA_ENABLE_INVALIDATE_HINT_SHIFT) 495 #define AMBA_ENABLE_INVALIDATE_HINT_GET(reg_val) \ 496 (((reg_val)&AMBA_ENABLE_INVALIDATE_HINT_MASK) >> \ 497 AMBA_ENABLE_INVALIDATE_HINT_SHIFT) 498 #define AMBA_ENABLE_INVALIDATE_HINT_SET(reg_val, value) \ 499 (((reg_val) & ~AMBA_ENABLE_INVALIDATE_HINT_MASK) | \ 500 (((value) << AMBA_ENABLE_INVALIDATE_HINT_SHIFT) & \ 501 AMBA_ENABLE_INVALIDATE_HINT_MASK)) 502 503 /* IDVS_GROUP register */ 504 #define IDVS_GROUP_SIZE_SHIFT (16) 505 #define IDVS_GROUP_MAX_SIZE (0x3F) 506 507 /* SYSC_ALLOC read IDs */ 508 #define SYSC_ALLOC_ID_R_OTHER 0x00 509 #define SYSC_ALLOC_ID_R_CSF 0x02 510 #define SYSC_ALLOC_ID_R_MMU 0x04 511 #define SYSC_ALLOC_ID_R_TILER_VERT 0x08 512 #define SYSC_ALLOC_ID_R_TILER_PTR 0x09 513 #define SYSC_ALLOC_ID_R_TILER_INDEX 0x0A 514 #define SYSC_ALLOC_ID_R_TILER_OTHER 0x0B 515 #define SYSC_ALLOC_ID_R_IC 0x10 516 #define SYSC_ALLOC_ID_R_ATTR 0x11 517 #define SYSC_ALLOC_ID_R_SCM 0x12 518 #define SYSC_ALLOC_ID_R_FSDC 0x13 519 #define SYSC_ALLOC_ID_R_VL 0x14 520 #define SYSC_ALLOC_ID_R_PLR 0x15 521 #define SYSC_ALLOC_ID_R_TEX 0x18 522 #define SYSC_ALLOC_ID_R_LSC 0x1c 523 524 /* SYSC_ALLOC write IDs */ 525 #define SYSC_ALLOC_ID_W_OTHER 0x00 526 #define SYSC_ALLOC_ID_W_CSF 0x02 527 #define SYSC_ALLOC_ID_W_PCB 0x07 528 #define SYSC_ALLOC_ID_W_TILER_PTR 0x09 529 #define SYSC_ALLOC_ID_W_TILER_VERT_PLIST 0x0A 530 #define SYSC_ALLOC_ID_W_TILER_OTHER 0x0B 531 #define SYSC_ALLOC_ID_W_L2_EVICT 0x0C 532 #define SYSC_ALLOC_ID_W_L2_FLUSH 0x0D 533 #define SYSC_ALLOC_ID_W_TIB_COLOR 0x10 534 #define SYSC_ALLOC_ID_W_TIB_COLOR_AFBCH 0x11 535 #define SYSC_ALLOC_ID_W_TIB_COLOR_AFBCB 0x12 536 #define SYSC_ALLOC_ID_W_TIB_CRC 0x13 537 #define SYSC_ALLOC_ID_W_TIB_DS 0x14 538 #define SYSC_ALLOC_ID_W_TIB_DS_AFBCH 0x15 539 #define SYSC_ALLOC_ID_W_TIB_DS_AFBCB 0x16 540 #define SYSC_ALLOC_ID_W_LSC 0x1C 541 542 /* SYSC_ALLOC values */ 543 #define SYSC_ALLOC_L2_ALLOC 0x0 544 #define SYSC_ALLOC_NEVER_ALLOC 0x2 545 #define SYSC_ALLOC_ALWAYS_ALLOC 0x3 546 #define SYSC_ALLOC_PTL_ALLOC 0x4 547 #define SYSC_ALLOC_L2_PTL_ALLOC 0x5 548 549 /* SYSC_ALLOC register */ 550 #define SYSC_ALLOC_R_SYSC_ALLOC0_SHIFT (0) 551 #define SYSC_ALLOC_R_SYSC_ALLOC0_MASK ((0xF) << SYSC_ALLOC_R_SYSC_ALLOC0_SHIFT) 552 #define SYSC_ALLOC_R_SYSC_ALLOC0_GET(reg_val) \ 553 (((reg_val)&SYSC_ALLOC_R_SYSC_ALLOC0_MASK) >> \ 554 SYSC_ALLOC_R_SYSC_ALLOC0_SHIFT) 555 #define SYSC_ALLOC_R_SYSC_ALLOC0_SET(reg_val, value) \ 556 (((reg_val) & ~SYSC_ALLOC_R_SYSC_ALLOC0_MASK) | \ 557 (((value) << SYSC_ALLOC_R_SYSC_ALLOC0_SHIFT) & \ 558 SYSC_ALLOC_R_SYSC_ALLOC0_MASK)) 559 /* End of SYSC_ALLOC_R_SYSC_ALLOC0 values */ 560 #define SYSC_ALLOC_W_SYSC_ALLOC0_SHIFT (4) 561 #define SYSC_ALLOC_W_SYSC_ALLOC0_MASK ((0xF) << SYSC_ALLOC_W_SYSC_ALLOC0_SHIFT) 562 #define SYSC_ALLOC_W_SYSC_ALLOC0_GET(reg_val) \ 563 (((reg_val)&SYSC_ALLOC_W_SYSC_ALLOC0_MASK) >> \ 564 SYSC_ALLOC_W_SYSC_ALLOC0_SHIFT) 565 #define SYSC_ALLOC_W_SYSC_ALLOC0_SET(reg_val, value) \ 566 (((reg_val) & ~SYSC_ALLOC_W_SYSC_ALLOC0_MASK) | \ 567 (((value) << SYSC_ALLOC_W_SYSC_ALLOC0_SHIFT) & \ 568 SYSC_ALLOC_W_SYSC_ALLOC0_MASK)) 569 /* End of SYSC_ALLOC_W_SYSC_ALLOC0 values */ 570 #define SYSC_ALLOC_R_SYSC_ALLOC1_SHIFT (8) 571 #define SYSC_ALLOC_R_SYSC_ALLOC1_MASK ((0xF) << SYSC_ALLOC_R_SYSC_ALLOC1_SHIFT) 572 #define SYSC_ALLOC_R_SYSC_ALLOC1_GET(reg_val) \ 573 (((reg_val)&SYSC_ALLOC_R_SYSC_ALLOC1_MASK) >> \ 574 SYSC_ALLOC_R_SYSC_ALLOC1_SHIFT) 575 #define SYSC_ALLOC_R_SYSC_ALLOC1_SET(reg_val, value) \ 576 (((reg_val) & ~SYSC_ALLOC_R_SYSC_ALLOC1_MASK) | \ 577 (((value) << SYSC_ALLOC_R_SYSC_ALLOC1_SHIFT) & \ 578 SYSC_ALLOC_R_SYSC_ALLOC1_MASK)) 579 /* End of SYSC_ALLOC_R_SYSC_ALLOC1 values */ 580 #define SYSC_ALLOC_W_SYSC_ALLOC1_SHIFT (12) 581 #define SYSC_ALLOC_W_SYSC_ALLOC1_MASK ((0xF) << SYSC_ALLOC_W_SYSC_ALLOC1_SHIFT) 582 #define SYSC_ALLOC_W_SYSC_ALLOC1_GET(reg_val) \ 583 (((reg_val)&SYSC_ALLOC_W_SYSC_ALLOC1_MASK) >> \ 584 SYSC_ALLOC_W_SYSC_ALLOC1_SHIFT) 585 #define SYSC_ALLOC_W_SYSC_ALLOC1_SET(reg_val, value) \ 586 (((reg_val) & ~SYSC_ALLOC_W_SYSC_ALLOC1_MASK) | \ 587 (((value) << SYSC_ALLOC_W_SYSC_ALLOC1_SHIFT) & \ 588 SYSC_ALLOC_W_SYSC_ALLOC1_MASK)) 589 /* End of SYSC_ALLOC_W_SYSC_ALLOC1 values */ 590 #define SYSC_ALLOC_R_SYSC_ALLOC2_SHIFT (16) 591 #define SYSC_ALLOC_R_SYSC_ALLOC2_MASK ((0xF) << SYSC_ALLOC_R_SYSC_ALLOC2_SHIFT) 592 #define SYSC_ALLOC_R_SYSC_ALLOC2_GET(reg_val) \ 593 (((reg_val)&SYSC_ALLOC_R_SYSC_ALLOC2_MASK) >> \ 594 SYSC_ALLOC_R_SYSC_ALLOC2_SHIFT) 595 #define SYSC_ALLOC_R_SYSC_ALLOC2_SET(reg_val, value) \ 596 (((reg_val) & ~SYSC_ALLOC_R_SYSC_ALLOC2_MASK) | \ 597 (((value) << SYSC_ALLOC_R_SYSC_ALLOC2_SHIFT) & \ 598 SYSC_ALLOC_R_SYSC_ALLOC2_MASK)) 599 /* End of SYSC_ALLOC_R_SYSC_ALLOC2 values */ 600 #define SYSC_ALLOC_W_SYSC_ALLOC2_SHIFT (20) 601 #define SYSC_ALLOC_W_SYSC_ALLOC2_MASK ((0xF) << SYSC_ALLOC_W_SYSC_ALLOC2_SHIFT) 602 #define SYSC_ALLOC_W_SYSC_ALLOC2_GET(reg_val) \ 603 (((reg_val)&SYSC_ALLOC_W_SYSC_ALLOC2_MASK) >> \ 604 SYSC_ALLOC_W_SYSC_ALLOC2_SHIFT) 605 #define SYSC_ALLOC_W_SYSC_ALLOC2_SET(reg_val, value) \ 606 (((reg_val) & ~SYSC_ALLOC_W_SYSC_ALLOC2_MASK) | \ 607 (((value) << SYSC_ALLOC_W_SYSC_ALLOC2_SHIFT) & \ 608 SYSC_ALLOC_W_SYSC_ALLOC2_MASK)) 609 /* End of SYSC_ALLOC_W_SYSC_ALLOC2 values */ 610 #define SYSC_ALLOC_R_SYSC_ALLOC3_SHIFT (24) 611 #define SYSC_ALLOC_R_SYSC_ALLOC3_MASK ((0xF) << SYSC_ALLOC_R_SYSC_ALLOC3_SHIFT) 612 #define SYSC_ALLOC_R_SYSC_ALLOC3_GET(reg_val) \ 613 (((reg_val)&SYSC_ALLOC_R_SYSC_ALLOC3_MASK) >> \ 614 SYSC_ALLOC_R_SYSC_ALLOC3_SHIFT) 615 #define SYSC_ALLOC_R_SYSC_ALLOC3_SET(reg_val, value) \ 616 (((reg_val) & ~SYSC_ALLOC_R_SYSC_ALLOC3_MASK) | \ 617 (((value) << SYSC_ALLOC_R_SYSC_ALLOC3_SHIFT) & \ 618 SYSC_ALLOC_R_SYSC_ALLOC3_MASK)) 619 /* End of SYSC_ALLOC_R_SYSC_ALLOC3 values */ 620 #define SYSC_ALLOC_W_SYSC_ALLOC3_SHIFT (28) 621 #define SYSC_ALLOC_W_SYSC_ALLOC3_MASK ((0xF) << SYSC_ALLOC_W_SYSC_ALLOC3_SHIFT) 622 #define SYSC_ALLOC_W_SYSC_ALLOC3_GET(reg_val) \ 623 (((reg_val)&SYSC_ALLOC_W_SYSC_ALLOC3_MASK) >> \ 624 SYSC_ALLOC_W_SYSC_ALLOC3_SHIFT) 625 #define SYSC_ALLOC_W_SYSC_ALLOC3_SET(reg_val, value) \ 626 (((reg_val) & ~SYSC_ALLOC_W_SYSC_ALLOC3_MASK) | \ 627 (((value) << SYSC_ALLOC_W_SYSC_ALLOC3_SHIFT) & \ 628 SYSC_ALLOC_W_SYSC_ALLOC3_MASK)) 629 /* End of SYSC_ALLOC_W_SYSC_ALLOC3 values */ 630 631 /* Include POWER_CHANGED_SINGLE in debug builds for use in irq latency test. */ 632 #ifdef CONFIG_MALI_BIFROST_DEBUG 633 #undef GPU_IRQ_REG_ALL 634 #define GPU_IRQ_REG_ALL (GPU_IRQ_REG_COMMON | POWER_CHANGED_SINGLE) 635 #endif /* CONFIG_MALI_BIFROST_DEBUG */ 636 637 #endif /* _KBASE_GPU_REGMAP_H_ */ 638