xref: /OK3568_Linux_fs/u-boot/arch/arm/cpu/armv7/cache_v7.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1 /*
2  * (C) Copyright 2010
3  * Texas Instruments, <www.ti.com>
4  * Aneesh V <aneesh@ti.com>
5  *
6  * SPDX-License-Identifier:	GPL-2.0+
7  */
8 #include <linux/types.h>
9 #include <common.h>
10 #include <asm/armv7.h>
11 #include <asm/utils.h>
12 
13 #define ARMV7_DCACHE_INVAL_RANGE	1
14 #define ARMV7_DCACHE_CLEAN_INVAL_RANGE	2
15 
16 #ifndef CONFIG_SYS_DCACHE_OFF
17 
18 /* Asm functions from cache_v7_asm.S */
19 void v7_flush_dcache_all(void);
20 void v7_invalidate_dcache_all(void);
21 
get_ccsidr(void)22 static u32 get_ccsidr(void)
23 {
24 	u32 ccsidr;
25 
26 	/* Read current CP15 Cache Size ID Register */
27 	asm volatile ("mrc p15, 1, %0, c0, c0, 0" : "=r" (ccsidr));
28 	return ccsidr;
29 }
30 
v7_dcache_clean_inval_range(u32 start,u32 stop,u32 line_len)31 static void v7_dcache_clean_inval_range(u32 start, u32 stop, u32 line_len)
32 {
33 	u32 mva;
34 
35 	/* Align start to cache line boundary */
36 	start &= ~(line_len - 1);
37 	for (mva = start; mva < stop; mva = mva + line_len) {
38 		/* DCCIMVAC - Clean & Invalidate data cache by MVA to PoC */
39 		asm volatile ("mcr p15, 0, %0, c7, c14, 1" : : "r" (mva));
40 	}
41 }
42 
v7_dcache_inval_range(u32 start,u32 stop,u32 line_len)43 static void v7_dcache_inval_range(u32 start, u32 stop, u32 line_len)
44 {
45 	u32 mva;
46 
47 #ifdef DEBUG
48 	check_cache_range(start, stop);
49 #endif
50 	/* aligned ? backward and flush a line_len */
51 	if (start & (line_len - 1)) {
52 		mva = start & ~(line_len - 1);
53 		asm volatile ("mcr p15, 0, %0, c7, c14, 1" : : "r" (mva));
54 		start = mva + line_len;
55 	}
56 
57 	/* aligned ? forward and flush a line_len */
58 	if (stop & (line_len - 1)) {
59 		mva = stop & ~(line_len - 1);
60 		asm volatile ("mcr p15, 0, %0, c7, c14, 1" : : "r" (mva));
61 		stop = mva;
62 	}
63 
64 	for (mva = start; mva < stop; mva = mva + line_len) {
65 		/* DCIMVAC - Invalidate data cache by MVA to PoC */
66 		asm volatile ("mcr p15, 0, %0, c7, c6, 1" : : "r" (mva));
67 	}
68 }
69 
v7_dcache_maint_range(u32 start,u32 stop,u32 range_op)70 static void v7_dcache_maint_range(u32 start, u32 stop, u32 range_op)
71 {
72 	u32 line_len, ccsidr;
73 
74 	ccsidr = get_ccsidr();
75 	line_len = ((ccsidr & CCSIDR_LINE_SIZE_MASK) >>
76 			CCSIDR_LINE_SIZE_OFFSET) + 2;
77 	/* Converting from words to bytes */
78 	line_len += 2;
79 	/* converting from log2(linelen) to linelen */
80 	line_len = 1 << line_len;
81 
82 	switch (range_op) {
83 	case ARMV7_DCACHE_CLEAN_INVAL_RANGE:
84 		v7_dcache_clean_inval_range(start, stop, line_len);
85 		break;
86 	case ARMV7_DCACHE_INVAL_RANGE:
87 		v7_dcache_inval_range(start, stop, line_len);
88 		break;
89 	}
90 
91 	/* DSB to make sure the operation is complete */
92 	dsb();
93 }
94 
95 /* Invalidate TLB */
v7_inval_tlb(void)96 static void v7_inval_tlb(void)
97 {
98 	/* Invalidate entire unified TLB */
99 	asm volatile ("mcr p15, 0, %0, c8, c7, 0" : : "r" (0));
100 	/* Invalidate entire data TLB */
101 	asm volatile ("mcr p15, 0, %0, c8, c6, 0" : : "r" (0));
102 	/* Invalidate entire instruction TLB */
103 	asm volatile ("mcr p15, 0, %0, c8, c5, 0" : : "r" (0));
104 	/* Full system DSB - make sure that the invalidation is complete */
105 	dsb();
106 	/* Full system ISB - make sure the instruction stream sees it */
107 	isb();
108 }
109 
invalidate_dcache_all(void)110 void invalidate_dcache_all(void)
111 {
112 	v7_invalidate_dcache_all();
113 
114 	v7_outer_cache_inval_all();
115 }
116 
117 /*
118  * Performs a clean & invalidation of the entire data cache
119  * at all levels
120  */
flush_dcache_all(void)121 void flush_dcache_all(void)
122 {
123 	v7_flush_dcache_all();
124 
125 	v7_outer_cache_flush_all();
126 }
127 
128 /*
129  * Invalidates range in all levels of D-cache/unified cache used:
130  * Affects the range [start, stop - 1]
131  */
invalidate_dcache_range(unsigned long start,unsigned long stop)132 void invalidate_dcache_range(unsigned long start, unsigned long stop)
133 {
134 #ifdef DEBUG
135 	check_cache_range(start, stop);
136 #endif
137 	v7_dcache_maint_range(start, stop, ARMV7_DCACHE_INVAL_RANGE);
138 
139 	v7_outer_cache_inval_range(start, stop);
140 }
141 
142 /*
143  * Flush range(clean & invalidate) from all levels of D-cache/unified
144  * cache used:
145  * Affects the range [start, stop - 1]
146  */
flush_dcache_range(unsigned long start,unsigned long stop)147 void flush_dcache_range(unsigned long start, unsigned long stop)
148 {
149 #ifdef DEBUG
150 	check_cache_range(start, stop);
151 #endif
152 	v7_dcache_maint_range(start, stop, ARMV7_DCACHE_CLEAN_INVAL_RANGE);
153 
154 	v7_outer_cache_flush_range(start, stop);
155 }
156 
arm_init_before_mmu(void)157 void arm_init_before_mmu(void)
158 {
159 	v7_outer_cache_enable();
160 	invalidate_dcache_all();
161 	v7_inval_tlb();
162 }
163 
mmu_page_table_flush(unsigned long start,unsigned long stop)164 void mmu_page_table_flush(unsigned long start, unsigned long stop)
165 {
166 	flush_dcache_range(start, stop);
167 	v7_inval_tlb();
168 }
169 #else /* #ifndef CONFIG_SYS_DCACHE_OFF */
invalidate_dcache_all(void)170 void invalidate_dcache_all(void)
171 {
172 }
173 
flush_dcache_all(void)174 void flush_dcache_all(void)
175 {
176 }
177 
invalidate_dcache_range(unsigned long start,unsigned long stop)178 void invalidate_dcache_range(unsigned long start, unsigned long stop)
179 {
180 }
181 
flush_dcache_range(unsigned long start,unsigned long stop)182 void flush_dcache_range(unsigned long start, unsigned long stop)
183 {
184 }
185 
arm_init_before_mmu(void)186 void arm_init_before_mmu(void)
187 {
188 }
189 
mmu_page_table_flush(unsigned long start,unsigned long stop)190 void mmu_page_table_flush(unsigned long start, unsigned long stop)
191 {
192 }
193 
arm_init_domains(void)194 void arm_init_domains(void)
195 {
196 }
197 #endif /* #ifndef CONFIG_SYS_DCACHE_OFF */
198 
199 #ifndef CONFIG_SYS_ICACHE_OFF
200 /* Invalidate entire I-cache and branch predictor array */
invalidate_icache_all(void)201 void invalidate_icache_all(void)
202 {
203 	/*
204 	 * Invalidate all instruction caches to PoU.
205 	 * Also flushes branch target cache.
206 	 */
207 	asm volatile ("mcr p15, 0, %0, c7, c5, 0" : : "r" (0));
208 
209 	/* Invalidate entire branch predictor array */
210 	asm volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0));
211 
212 	/* Full system DSB - make sure that the invalidation is complete */
213 	dsb();
214 
215 	/* ISB - make sure the instruction stream sees it */
216 	isb();
217 }
218 #else
invalidate_icache_all(void)219 void invalidate_icache_all(void)
220 {
221 }
222 #endif
223 
224 /*  Stub implementations for outer cache operations */
v7_outer_cache_enable(void)225 __weak void v7_outer_cache_enable(void) {}
v7_outer_cache_disable(void)226 __weak void v7_outer_cache_disable(void) {}
v7_outer_cache_flush_all(void)227 __weak void v7_outer_cache_flush_all(void) {}
v7_outer_cache_inval_all(void)228 __weak void v7_outer_cache_inval_all(void) {}
v7_outer_cache_flush_range(u32 start,u32 end)229 __weak void v7_outer_cache_flush_range(u32 start, u32 end) {}
v7_outer_cache_inval_range(u32 start,u32 end)230 __weak void v7_outer_cache_inval_range(u32 start, u32 end) {}
231