xref: /OK3568_Linux_fs/kernel/drivers/gpu/arm/bifrost/gpu/backend/mali_kbase_gpu_regmap_csf.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1 /* SPDX-License-Identifier: GPL-2.0 WITH Linux-syscall-note */
2 /*
3  *
4  * (C) COPYRIGHT 2019-2022 ARM Limited. All rights reserved.
5  *
6  * This program is free software and is provided to you under the terms of the
7  * GNU General Public License version 2 as published by the Free Software
8  * Foundation, and any use by you of this program is subject to the terms
9  * of such GNU license.
10  *
11  * This program is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14  * GNU General Public License for more details.
15  *
16  * You should have received a copy of the GNU General Public License
17  * along with this program; if not, you can access it online at
18  * http://www.gnu.org/licenses/gpl-2.0.html.
19  *
20  */
21 
22 #ifndef _KBASE_GPU_REGMAP_CSF_H_
23 #define _KBASE_GPU_REGMAP_CSF_H_
24 
25 #include <linux/types.h>
26 
27 #if !MALI_USE_CSF && defined(__KERNEL__)
28 #error "Cannot be compiled with JM"
29 #endif
30 
31 /* GPU_CONTROL_MCU base address */
32 #define GPU_CONTROL_MCU_BASE 0x3000
33 
34 /* MCU_SUBSYSTEM base address */
35 #define MCU_SUBSYSTEM_BASE 0x20000
36 
37 /* IPA control registers */
38 #define COMMAND                0x000 /* (WO) Command register */
39 #define TIMER                  0x008 /* (RW) Timer control register */
40 
41 #define SELECT_CSHW_LO         0x010 /* (RW) Counter select for CS hardware, low word */
42 #define SELECT_CSHW_HI         0x014 /* (RW) Counter select for CS hardware, high word */
43 #define SELECT_MEMSYS_LO       0x018 /* (RW) Counter select for Memory system, low word */
44 #define SELECT_MEMSYS_HI       0x01C /* (RW) Counter select for Memory system, high word */
45 #define SELECT_TILER_LO        0x020 /* (RW) Counter select for Tiler cores, low word */
46 #define SELECT_TILER_HI        0x024 /* (RW) Counter select for Tiler cores, high word */
47 #define SELECT_SHADER_LO       0x028 /* (RW) Counter select for Shader cores, low word */
48 #define SELECT_SHADER_HI       0x02C /* (RW) Counter select for Shader cores, high word */
49 
50 /* Accumulated counter values for CS hardware */
51 #define VALUE_CSHW_BASE        0x100
52 #define VALUE_CSHW_REG_LO(n)   (VALUE_CSHW_BASE + ((n) << 3))       /* (RO) Counter value #n, low word */
53 #define VALUE_CSHW_REG_HI(n)   (VALUE_CSHW_BASE + ((n) << 3) + 4)   /* (RO) Counter value #n, high word */
54 
55 /* Accumulated counter values for memory system */
56 #define VALUE_MEMSYS_BASE      0x140
57 #define VALUE_MEMSYS_REG_LO(n) (VALUE_MEMSYS_BASE + ((n) << 3))     /* (RO) Counter value #n, low word */
58 #define VALUE_MEMSYS_REG_HI(n) (VALUE_MEMSYS_BASE + ((n) << 3) + 4) /* (RO) Counter value #n, high word */
59 
60 #define VALUE_TILER_BASE       0x180
61 #define VALUE_TILER_REG_LO(n)  (VALUE_TILER_BASE + ((n) << 3))      /* (RO) Counter value #n, low word */
62 #define VALUE_TILER_REG_HI(n)  (VALUE_TILER_BASE + ((n) << 3) + 4)  /* (RO) Counter value #n, high word */
63 
64 #define VALUE_SHADER_BASE      0x1C0
65 #define VALUE_SHADER_REG_LO(n) (VALUE_SHADER_BASE + ((n) << 3))     /* (RO) Counter value #n, low word */
66 #define VALUE_SHADER_REG_HI(n) (VALUE_SHADER_BASE + ((n) << 3) + 4) /* (RO) Counter value #n, high word */
67 
68 #define AS_STATUS_AS_ACTIVE_INT 0x2
69 
70 /* Set to implementation defined, outer caching */
71 #define AS_MEMATTR_AARCH64_OUTER_IMPL_DEF 0x88ull
72 /* Set to write back memory, outer caching */
73 #define AS_MEMATTR_AARCH64_OUTER_WA       0x8Dull
74 /* Set to inner non-cacheable, outer-non-cacheable
75  * Setting defined by the alloc bits is ignored, but set to a valid encoding:
76  * - no-alloc on read
77  * - no alloc on write
78  */
79 #define AS_MEMATTR_AARCH64_NON_CACHEABLE  0x4Cull
80 /* Set to shared memory, that is inner cacheable on ACE and inner or outer
81  * shared, otherwise inner non-cacheable.
82  * Outer cacheable if inner or outer shared, otherwise outer non-cacheable.
83  */
84 #define AS_MEMATTR_AARCH64_SHARED         0x8ull
85 
86 /* Symbols for default MEMATTR to use
87  * Default is - HW implementation defined caching
88  */
89 #define AS_MEMATTR_INDEX_DEFAULT               0
90 #define AS_MEMATTR_INDEX_DEFAULT_ACE           3
91 
92 /* HW implementation defined caching */
93 #define AS_MEMATTR_INDEX_IMPL_DEF_CACHE_POLICY 0
94 /* Force cache on */
95 #define AS_MEMATTR_INDEX_FORCE_TO_CACHE_ALL    1
96 /* Write-alloc */
97 #define AS_MEMATTR_INDEX_WRITE_ALLOC           2
98 /* Outer coherent, inner implementation defined policy */
99 #define AS_MEMATTR_INDEX_OUTER_IMPL_DEF        3
100 /* Outer coherent, write alloc inner */
101 #define AS_MEMATTR_INDEX_OUTER_WA              4
102 /* Normal memory, inner non-cacheable, outer non-cacheable (ARMv8 mode only) */
103 #define AS_MEMATTR_INDEX_NON_CACHEABLE         5
104 /* Normal memory, shared between MCU and Host */
105 #define AS_MEMATTR_INDEX_SHARED                6
106 
107 /* Configuration bits for the CSF. */
108 #define CSF_CONFIG 0xF00
109 
110 /* CSF_CONFIG register */
111 #define CSF_CONFIG_FORCE_COHERENCY_FEATURES_SHIFT 2
112 
113 /* GPU control registers */
114 #define CORE_FEATURES           0x008   /* () Shader Core Features */
115 #define MCU_CONTROL             0x700
116 #define MCU_STATUS              0x704
117 
118 #define MCU_CNTRL_ENABLE        (1 << 0)
119 #define MCU_CNTRL_AUTO          (1 << 1)
120 #define MCU_CNTRL_DISABLE       (0)
121 
122 #define MCU_CNTRL_DOORBELL_DISABLE_SHIFT (31)
123 #define MCU_CNTRL_DOORBELL_DISABLE_MASK (1 << MCU_CNTRL_DOORBELL_DISABLE_SHIFT)
124 
125 #define MCU_STATUS_HALTED        (1 << 1)
126 
127 #define L2_CONFIG_PBHA_HWU_SHIFT GPU_U(12)
128 #define L2_CONFIG_PBHA_HWU_MASK (GPU_U(0xF) << L2_CONFIG_PBHA_HWU_SHIFT)
129 #define L2_CONFIG_PBHA_HWU_GET(reg_val)                                                            \
130 	(((reg_val)&L2_CONFIG_PBHA_HWU_MASK) >> L2_CONFIG_PBHA_HWU_SHIFT)
131 #define L2_CONFIG_PBHA_HWU_SET(reg_val, value)                                                     \
132 	(((reg_val) & ~L2_CONFIG_PBHA_HWU_MASK) |                                                  \
133 	 (((value) << L2_CONFIG_PBHA_HWU_SHIFT) & L2_CONFIG_PBHA_HWU_MASK))
134 
135 /* JOB IRQ flags */
136 #define JOB_IRQ_GLOBAL_IF (1u << 31) /* Global interface interrupt received */
137 
138 /* GPU_COMMAND codes */
139 #define GPU_COMMAND_CODE_NOP                0x00 /* No operation, nothing happens */
140 #define GPU_COMMAND_CODE_RESET              0x01 /* Reset the GPU */
141 #define GPU_COMMAND_CODE_TIME               0x03 /* Configure time sources */
142 #define GPU_COMMAND_CODE_FLUSH_CACHES       0x04 /* Flush caches */
143 #define GPU_COMMAND_CODE_SET_PROTECTED_MODE 0x05 /* Places the GPU in protected mode */
144 #define GPU_COMMAND_CODE_FINISH_HALT        0x06 /* Halt CSF */
145 #define GPU_COMMAND_CODE_CLEAR_FAULT        0x07 /* Clear GPU_FAULTSTATUS and GPU_FAULTADDRESS, TODX */
146 #define GPU_COMMAND_CODE_FLUSH_PA_RANGE 0x08 /* Flush the GPU caches for a physical range, TITX */
147 
148 /* GPU_COMMAND_RESET payloads */
149 
150 /* This will leave the state of active jobs UNDEFINED, but will leave the external bus in a defined and idle state.
151  * Power domains will remain powered on.
152  */
153 #define GPU_COMMAND_RESET_PAYLOAD_FAST_RESET 0x00
154 
155 /* This will leave the state of active CSs UNDEFINED, but will leave the external bus in a defined and
156  * idle state.
157  */
158 #define GPU_COMMAND_RESET_PAYLOAD_SOFT_RESET 0x01
159 
160 /* This reset will leave the state of currently active streams UNDEFINED, will likely lose data, and may leave
161  * the system bus in an inconsistent state. Use only as a last resort when nothing else works.
162  */
163 #define GPU_COMMAND_RESET_PAYLOAD_HARD_RESET 0x02
164 
165 /* GPU_COMMAND_TIME payloads */
166 #define GPU_COMMAND_TIME_DISABLE 0x00 /* Disable cycle counter */
167 #define GPU_COMMAND_TIME_ENABLE  0x01 /* Enable cycle counter */
168 
169 /* GPU_COMMAND_FLUSH_CACHES payloads bits for L2 caches */
170 #define GPU_COMMAND_FLUSH_CACHES_PAYLOAD_L2_NONE 0x000 /* No flush */
171 #define GPU_COMMAND_FLUSH_CACHES_PAYLOAD_L2_CLEAN 0x001 /* CLN only */
172 #define GPU_COMMAND_FLUSH_CACHES_PAYLOAD_L2_CLEAN_INVALIDATE 0x003 /* CLN + INV */
173 
174 /* GPU_COMMAND_FLUSH_CACHES payloads bits for Load-store caches */
175 #define GPU_COMMAND_FLUSH_CACHES_PAYLOAD_LSC_NONE 0x000 /* No flush */
176 #define GPU_COMMAND_FLUSH_CACHES_PAYLOAD_LSC_CLEAN 0x010 /* CLN only */
177 #define GPU_COMMAND_FLUSH_CACHES_PAYLOAD_LSC_CLEAN_INVALIDATE 0x030 /* CLN + INV */
178 
179 /* GPU_COMMAND_FLUSH_CACHES payloads bits for Other caches */
180 #define GPU_COMMAND_FLUSH_CACHES_PAYLOAD_OTHER_NONE 0x000 /* No flush */
181 #define GPU_COMMAND_FLUSH_CACHES_PAYLOAD_OTHER_INVALIDATE 0x200 /* INV only */
182 
183 /* GPU_COMMAND_FLUSH_PA_RANGE payload bits for flush modes */
184 #define GPU_COMMAND_FLUSH_PA_RANGE_PAYLOAD_MODE_NONE 0x00 /* No flush */
185 #define GPU_COMMAND_FLUSH_PA_RANGE_PAYLOAD_MODE_CLEAN 0x01 /* CLN only */
186 #define GPU_COMMAND_FLUSH_PA_RANGE_PAYLOAD_MODE_INVALIDATE 0x02 /* INV only */
187 #define GPU_COMMAND_FLUSH_PA_RANGE_PAYLOAD_MODE_CLEAN_INVALIDATE 0x03 /* CLN + INV */
188 
189 /* GPU_COMMAND_FLUSH_PA_RANGE payload bits for which caches should be the target of the command */
190 #define GPU_COMMAND_FLUSH_PA_RANGE_PAYLOAD_OTHER_CACHE 0x10 /* Other caches */
191 #define GPU_COMMAND_FLUSH_PA_RANGE_PAYLOAD_LSC_CACHE 0x20 /* Load-store caches */
192 #define GPU_COMMAND_FLUSH_PA_RANGE_PAYLOAD_L2_CACHE 0x40 /* L2 caches */
193 
194 /* GPU_COMMAND command + payload */
195 #define GPU_COMMAND_CODE_PAYLOAD(opcode, payload) \
196 	((__u32)opcode | ((__u32)payload << 8))
197 
198 /* Final GPU_COMMAND form */
199 /* No operation, nothing happens */
200 #define GPU_COMMAND_NOP \
201 	GPU_COMMAND_CODE_PAYLOAD(GPU_COMMAND_CODE_NOP, 0)
202 
203 /* Stop all external bus interfaces, and then reset the entire GPU. */
204 #define GPU_COMMAND_SOFT_RESET \
205 	GPU_COMMAND_CODE_PAYLOAD(GPU_COMMAND_CODE_RESET, GPU_COMMAND_RESET_PAYLOAD_SOFT_RESET)
206 
207 /* Immediately reset the entire GPU. */
208 #define GPU_COMMAND_HARD_RESET \
209 	GPU_COMMAND_CODE_PAYLOAD(GPU_COMMAND_CODE_RESET, GPU_COMMAND_RESET_PAYLOAD_HARD_RESET)
210 
211 /* Starts the cycle counter, and system timestamp propagation */
212 #define GPU_COMMAND_CYCLE_COUNT_START \
213 	GPU_COMMAND_CODE_PAYLOAD(GPU_COMMAND_CODE_TIME, GPU_COMMAND_TIME_ENABLE)
214 
215 /* Stops the cycle counter, and system timestamp propagation */
216 #define GPU_COMMAND_CYCLE_COUNT_STOP \
217 	GPU_COMMAND_CODE_PAYLOAD(GPU_COMMAND_CODE_TIME, GPU_COMMAND_TIME_DISABLE)
218 
219 /* Clean and invalidate L2 cache (Equivalent to FLUSH_PT) */
220 #define GPU_COMMAND_CACHE_CLN_INV_L2                                                               \
221 	GPU_COMMAND_CODE_PAYLOAD(GPU_COMMAND_CODE_FLUSH_CACHES,                                    \
222 				 (GPU_COMMAND_FLUSH_CACHES_PAYLOAD_L2_CLEAN_INVALIDATE |           \
223 				  GPU_COMMAND_FLUSH_CACHES_PAYLOAD_LSC_NONE |                      \
224 				  GPU_COMMAND_FLUSH_CACHES_PAYLOAD_OTHER_NONE))
225 
226 /* Clean and invalidate L2 and LSC caches (Equivalent to FLUSH_MEM) */
227 #define GPU_COMMAND_CACHE_CLN_INV_L2_LSC                                                           \
228 	GPU_COMMAND_CODE_PAYLOAD(GPU_COMMAND_CODE_FLUSH_CACHES,                                    \
229 				 (GPU_COMMAND_FLUSH_CACHES_PAYLOAD_L2_CLEAN_INVALIDATE |           \
230 				  GPU_COMMAND_FLUSH_CACHES_PAYLOAD_LSC_CLEAN_INVALIDATE |          \
231 				  GPU_COMMAND_FLUSH_CACHES_PAYLOAD_OTHER_NONE))
232 
233 /* Clean and invalidate L2, LSC, and Other caches */
234 #define GPU_COMMAND_CACHE_CLN_INV_FULL                                                             \
235 	GPU_COMMAND_CODE_PAYLOAD(GPU_COMMAND_CODE_FLUSH_CACHES,                                    \
236 				 (GPU_COMMAND_FLUSH_CACHES_PAYLOAD_L2_CLEAN_INVALIDATE |           \
237 				  GPU_COMMAND_FLUSH_CACHES_PAYLOAD_LSC_CLEAN_INVALIDATE |          \
238 				  GPU_COMMAND_FLUSH_CACHES_PAYLOAD_OTHER_INVALIDATE))
239 
240 /* Clean and invalidate only LSC cache */
241 #define GPU_COMMAND_CACHE_CLN_INV_LSC                                                              \
242 	GPU_COMMAND_CODE_PAYLOAD(GPU_COMMAND_CODE_FLUSH_CACHES,                                    \
243 				  (GPU_COMMAND_FLUSH_CACHES_PAYLOAD_L2_NONE |                      \
244 				   GPU_COMMAND_FLUSH_CACHES_PAYLOAD_LSC_CLEAN_INVALIDATE |         \
245 				   GPU_COMMAND_FLUSH_CACHES_PAYLOAD_OTHER_NONE))
246 
247 /* Clean and invalidate physical range L2 cache (equivalent to FLUSH_PT) */
248 #define GPU_COMMAND_FLUSH_PA_RANGE_CLN_INV_L2                                                      \
249 	GPU_COMMAND_CODE_PAYLOAD(GPU_COMMAND_CODE_FLUSH_PA_RANGE,                                  \
250 				 (GPU_COMMAND_FLUSH_PA_RANGE_PAYLOAD_MODE_CLEAN_INVALIDATE |       \
251 				  GPU_COMMAND_FLUSH_PA_RANGE_PAYLOAD_L2_CACHE))
252 
253 /* Clean and invalidate physical range L2 and LSC cache (equivalent to FLUSH_MEM) */
254 #define GPU_COMMAND_FLUSH_PA_RANGE_CLN_INV_L2_LSC                                                  \
255 	GPU_COMMAND_CODE_PAYLOAD(GPU_COMMAND_CODE_FLUSH_PA_RANGE,                                  \
256 				 (GPU_COMMAND_FLUSH_PA_RANGE_PAYLOAD_MODE_CLEAN_INVALIDATE |       \
257 				  GPU_COMMAND_FLUSH_PA_RANGE_PAYLOAD_LSC_CACHE |                   \
258 				  GPU_COMMAND_FLUSH_PA_RANGE_PAYLOAD_L2_CACHE))
259 
260 /* Clean and invalidate physical range L2, LSC and Other caches */
261 #define GPU_COMMAND_FLUSH_PA_RANGE_CLN_INV_FULL                                                    \
262 	GPU_COMMAND_CODE_PAYLOAD(GPU_COMMAND_CODE_FLUSH_PA_RANGE,                                  \
263 				 (GPU_COMMAND_FLUSH_PA_RANGE_PAYLOAD_MODE_CLEAN_INVALIDATE |       \
264 				  GPU_COMMAND_FLUSH_PA_RANGE_PAYLOAD_OTHER_CACHE |                 \
265 				  GPU_COMMAND_FLUSH_PA_RANGE_PAYLOAD_LSC_CACHE |                   \
266 				  GPU_COMMAND_FLUSH_PA_RANGE_PAYLOAD_L2_CACHE))
267 
268 /* Merge cache flush commands */
269 #define GPU_COMMAND_FLUSH_CACHE_MERGE(cmd1, cmd2) ((cmd1) | (cmd2))
270 
271 /* Places the GPU in protected mode */
272 #define GPU_COMMAND_SET_PROTECTED_MODE \
273 	GPU_COMMAND_CODE_PAYLOAD(GPU_COMMAND_CODE_SET_PROTECTED_MODE, 0)
274 
275 /* Halt CSF */
276 #define GPU_COMMAND_FINISH_HALT \
277 	GPU_COMMAND_CODE_PAYLOAD(GPU_COMMAND_CODE_FINISH_HALT, 0)
278 
279 /* Clear GPU faults */
280 #define GPU_COMMAND_CLEAR_FAULT \
281 	GPU_COMMAND_CODE_PAYLOAD(GPU_COMMAND_CODE_CLEAR_FAULT, 0)
282 
283 /* End Command Values */
284 
285 /* GPU_FAULTSTATUS register */
286 #define GPU_FAULTSTATUS_EXCEPTION_TYPE_SHIFT 0
287 #define GPU_FAULTSTATUS_EXCEPTION_TYPE_MASK (0xFFul)
288 #define GPU_FAULTSTATUS_EXCEPTION_TYPE_GET(reg_val) \
289 	(((reg_val)&GPU_FAULTSTATUS_EXCEPTION_TYPE_MASK) \
290 	 >> GPU_FAULTSTATUS_EXCEPTION_TYPE_SHIFT)
291 #define GPU_FAULTSTATUS_ACCESS_TYPE_SHIFT 8
292 #define GPU_FAULTSTATUS_ACCESS_TYPE_MASK \
293 	(0x3ul << GPU_FAULTSTATUS_ACCESS_TYPE_SHIFT)
294 
295 #define GPU_FAULTSTATUS_ADDR_VALID_SHIFT 10
296 #define GPU_FAULTSTATUS_ADDR_VALID_FLAG \
297 	(1ul << GPU_FAULTSTATUS_ADDR_VALID_SHIFT)
298 
299 #define GPU_FAULTSTATUS_JASID_VALID_SHIFT 11
300 #define GPU_FAULTSTATUS_JASID_VALID_FLAG \
301 	(1ul << GPU_FAULTSTATUS_JASID_VALID_SHIFT)
302 
303 #define GPU_FAULTSTATUS_JASID_SHIFT 12
304 #define GPU_FAULTSTATUS_JASID_MASK (0xF << GPU_FAULTSTATUS_JASID_SHIFT)
305 #define GPU_FAULTSTATUS_JASID_GET(reg_val) \
306 	(((reg_val)&GPU_FAULTSTATUS_JASID_MASK) >> GPU_FAULTSTATUS_JASID_SHIFT)
307 #define GPU_FAULTSTATUS_JASID_SET(reg_val, value) \
308 	(((reg_val) & ~GPU_FAULTSTATUS_JASID_MASK) |  \
309 	(((value) << GPU_FAULTSTATUS_JASID_SHIFT) & GPU_FAULTSTATUS_JASID_MASK))
310 
311 #define GPU_FAULTSTATUS_SOURCE_ID_SHIFT 16
312 #define GPU_FAULTSTATUS_SOURCE_ID_MASK \
313 	(0xFFFFul << GPU_FAULTSTATUS_SOURCE_ID_SHIFT)
314 /* End GPU_FAULTSTATUS register */
315 
316 /* GPU_FAULTSTATUS_ACCESS_TYPE values */
317 #define GPU_FAULTSTATUS_ACCESS_TYPE_ATOMIC 0x0
318 #define GPU_FAULTSTATUS_ACCESS_TYPE_EXECUTE 0x1
319 #define GPU_FAULTSTATUS_ACCESS_TYPE_READ 0x2
320 #define GPU_FAULTSTATUS_ACCESS_TYPE_WRITE 0x3
321 /* End of GPU_FAULTSTATUS_ACCESS_TYPE values */
322 
323 /* Implementation-dependent exception codes used to indicate CSG
324  * and CS errors that are not specified in the specs.
325  */
326 #define GPU_EXCEPTION_TYPE_SW_FAULT_0 ((__u8)0x70)
327 #define GPU_EXCEPTION_TYPE_SW_FAULT_1 ((__u8)0x71)
328 #define GPU_EXCEPTION_TYPE_SW_FAULT_2 ((__u8)0x72)
329 
330 /* GPU_FAULTSTATUS_EXCEPTION_TYPE values */
331 #define GPU_FAULTSTATUS_EXCEPTION_TYPE_OK 0x00
332 #define GPU_FAULTSTATUS_EXCEPTION_TYPE_GPU_BUS_FAULT 0x80
333 #define GPU_FAULTSTATUS_EXCEPTION_TYPE_GPU_SHAREABILITY_FAULT 0x88
334 #define GPU_FAULTSTATUS_EXCEPTION_TYPE_SYSTEM_SHAREABILITY_FAULT 0x89
335 #define GPU_FAULTSTATUS_EXCEPTION_TYPE_GPU_CACHEABILITY_FAULT 0x8A
336 /* End of GPU_FAULTSTATUS_EXCEPTION_TYPE values */
337 
338 #define GPU_FAULTSTATUS_ADDRESS_VALID_SHIFT GPU_U(10)
339 #define GPU_FAULTSTATUS_ADDRESS_VALID_MASK (GPU_U(0x1) << GPU_FAULTSTATUS_ADDRESS_VALID_SHIFT)
340 #define GPU_FAULTSTATUS_ADDRESS_VALID_GET(reg_val) \
341 	(((reg_val)&GPU_FAULTSTATUS_ADDRESS_VALID_MASK) >> GPU_FAULTSTATUS_ADDRESS_VALID_SHIFT)
342 #define GPU_FAULTSTATUS_ADDRESS_VALID_SET(reg_val, value) \
343 	(((reg_val) & ~GPU_FAULTSTATUS_ADDRESS_VALID_MASK) |  \
344 	(((value) << GPU_FAULTSTATUS_ADDRESS_VALID_SHIFT) & GPU_FAULTSTATUS_ADDRESS_VALID_MASK))
345 
346 /* IRQ flags */
347 #define GPU_FAULT (1 << 0) /* A GPU Fault has occurred */
348 #define GPU_PROTECTED_FAULT (1 << 1) /* A GPU fault has occurred in protected mode */
349 #define RESET_COMPLETED (1 << 8) /* Set when a reset has completed.  */
350 #define POWER_CHANGED_SINGLE (1 << 9) /* Set when a single core has finished powering up or down. */
351 #define POWER_CHANGED_ALL (1 << 10) /* Set when all cores have finished powering up or down. */
352 #define CLEAN_CACHES_COMPLETED (1 << 17) /* Set when a cache clean operation has completed. */
353 #define DOORBELL_MIRROR (1 << 18) /* Mirrors the doorbell interrupt line to the CPU */
354 #define MCU_STATUS_GPU_IRQ (1 << 19) /* MCU requires attention */
355 #define FLUSH_PA_RANGE_COMPLETED                                                                   \
356 	(1 << 20) /* Set when a physical range cache clean operation has completed. */
357 
358 /*
359  * In Debug build,
360  * GPU_IRQ_REG_COMMON | POWER_CHANGED_SINGLE is used to clear and unmask interupts sources of GPU_IRQ
361  * by writing it onto GPU_IRQ_CLEAR/MASK registers.
362  *
363  * In Release build,
364  * GPU_IRQ_REG_COMMON is used.
365  *
366  * Note:
367  * CLEAN_CACHES_COMPLETED - Used separately for cache operation.
368  * DOORBELL_MIRROR - Do not have it included for GPU_IRQ_REG_COMMON
369  *                   as it can't be cleared by GPU_IRQ_CLEAR, thus interrupt storm might happen
370  */
371 #define GPU_IRQ_REG_COMMON (GPU_FAULT | GPU_PROTECTED_FAULT | RESET_COMPLETED \
372 			| POWER_CHANGED_ALL | MCU_STATUS_GPU_IRQ)
373 
374 /* GPU_FEATURES register */
375 #define GPU_FEATURES_RAY_TRACING_SHIFT GPU_U(2)
376 #define GPU_FEATURES_RAY_TRACING_MASK (GPU_U(0x1) << GPU_FEATURES_RAY_TRACING_SHIFT)
377 #define GPU_FEATURES_RAY_TRACING_GET(reg_val) \
378 	(((reg_val)&GPU_FEATURES_RAY_TRACING_MASK) >> GPU_FEATURES_RAY_TRACING_SHIFT)
379 /* End of GPU_FEATURES register */
380 
381 #endif /* _KBASE_GPU_REGMAP_CSF_H_ */
382