xref: /rk3399_ARM-atf/plat/intel/soc/agilex5/soc/agilex5_cache.S (revision d0658e6086fb346cd7c4089ae66bc77777ca397e)
1/*
2 * Copyright (c) 2024, Altera Corporation. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6#include <arch.h>
7#include <asm_macros.S>
8#include <cpu_macros.S>
9#include <plat_macros.S>
10
11	.globl invalidate_dcache_all
12	.globl invalidate_cache_low_el
13	/* --------------------------------------------------------
14	 * Invalidate for NS EL2 and EL1
15	 * --------------------------------------------------------
16	 */
17func invalidate_cache_low_el
18	mrs	x0, SCR_EL3
19	orr	x1, x0, #SCR_NS_BIT
20	msr	SCR_EL3, x1
21	isb
22	tlbi	ALLE2
23	dsb	sy
24	tlbi	ALLE1
25	dsb	sy
26	msr	SCR_EL3, x0
27	isb
28endfunc invalidate_cache_low_el
29
30.pushsection .text.asm_dcache_level, "ax"
31func asm_dcache_level
32	lsl	x12, x0, #1
33	msr	csselr_el1, x12		/* select cache level */
34	isb				/* sync change of cssidr_el1 */
35	mrs	x6, ccsidr_el1		/* read the new cssidr_el1 */
36	ubfx	x2, x6,  #0,  #3	/* x2 <- log2(cache line size)-4 */
37	ubfx	x3, x6,  #3, #10	/* x3 <- number of cache ways - 1 */
38	ubfx	x4, x6, #13, #15	/* x4 <- number of cache sets - 1 */
39	add	x2, x2, #4		/* x2 <- log2(cache line size) */
40	clz	w5, w3			/* bit position of #ways */
41	/* x12 <- cache level << 1 */
42	/* x2 <- line length offset */
43	/* x3 <- number of cache ways - 1 */
44	/* x4 <- number of cache sets - 1 */
45	/* x5 <- bit position of #ways */
46
47loop_set:
48	mov	x6, x3			/* x6 <- working copy of #ways */
49loop_way:
50	lsl	x7, x6, x5
51	orr	x9, x12, x7		/* map way and level to cisw value */
52	lsl	x7, x4, x2
53	orr	x9, x9, x7		/* map set number to cisw value */
54	tbz	w1, #0, 1f
55	dc	isw, x9
56	b	2f
571:	dc	cisw, x9		/* clean & invalidate by set/way */
582:	subs	x6, x6, #1		/* decrement the way */
59	b.ge	loop_way
60	subs	x4, x4, #1		/* decrement the set */
61	b.ge	loop_set
62
63	ret
64endfunc asm_dcache_level
65.popsection
66
67/*
68 * void __asm_flush_dcache_all(int invalidate_only)
69 *
70 * x0: 0 clean & invalidate, 1 invalidate only
71 *
72 * flush or invalidate all data cache by SET/WAY.
73 */
74.pushsection .text.asm_dcache_all, "ax"
75func asm_dcache_all
76	mov	x1, x0
77	dsb	sy
78	mrs	x10, clidr_el1		/* read clidr_el1 */
79	ubfx	x11, x10, #24, #3	/* x11 <- loc */
80	cbz	x11, finished		/* if loc is 0, exit */
81	mov	x15, x30
82	mov	x0, #0			/* start flush at cache level 0 */
83	/* x0  <- cache level */
84	/* x10 <- clidr_el1 */
85	/* x11 <- loc */
86	/* x15 <- return address */
87
88loop_level:
89	add	x12, x0, x0, lsl #1	/* x12 <- tripled cache level */
90	lsr	x12, x10, x12
91	and	x12, x12, #7		/* x12 <- cache type */
92	cmp	x12, #2
93	b.lt	skip			/* skip if no cache or icache */
94	bl	asm_dcache_level	/* x1 = 0 flush, 1 invalidate */
95skip:
96	add	x0, x0, #1		/* increment cache level */
97	cmp	x11, x0
98	b.gt	loop_level
99
100	mov	x0, #0
101	msr	csselr_el1, x0		/* restore csselr_el1 */
102	dsb	sy
103	isb
104	mov	x30, x15
105
106finished:
107	ret
108endfunc asm_dcache_all
109.popsection
110
111.pushsection .text.invalidate_dcache_all, "ax"
112func invalidate_dcache_all
113	mov	x0, #0x1
114	b	asm_dcache_all
115endfunc invalidate_dcache_all
116.popsection
117