1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun * (C) Copyright 2002
3*4882a593Smuzhiyun * Wolfgang Denk, DENX Software Engineering, wd@denx.de.
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * SPDX-License-Identifier: GPL-2.0+
6*4882a593Smuzhiyun */
7*4882a593Smuzhiyun
8*4882a593Smuzhiyun /* for now: just dummy functions to satisfy the linker */
9*4882a593Smuzhiyun
10*4882a593Smuzhiyun #include <common.h>
11*4882a593Smuzhiyun #include <malloc.h>
12*4882a593Smuzhiyun
13*4882a593Smuzhiyun /*
14*4882a593Smuzhiyun * Flush range from all levels of d-cache/unified-cache.
15*4882a593Smuzhiyun * Affects the range [start, start + size - 1].
16*4882a593Smuzhiyun */
flush_cache(unsigned long start,unsigned long size)17*4882a593Smuzhiyun __weak void flush_cache(unsigned long start, unsigned long size)
18*4882a593Smuzhiyun {
19*4882a593Smuzhiyun flush_dcache_range(start, start + size);
20*4882a593Smuzhiyun }
21*4882a593Smuzhiyun
22*4882a593Smuzhiyun /*
23*4882a593Smuzhiyun * Default implementation:
24*4882a593Smuzhiyun * do a range flush for the entire range
25*4882a593Smuzhiyun */
flush_dcache_all(void)26*4882a593Smuzhiyun __weak void flush_dcache_all(void)
27*4882a593Smuzhiyun {
28*4882a593Smuzhiyun flush_cache(0, ~0);
29*4882a593Smuzhiyun }
30*4882a593Smuzhiyun
31*4882a593Smuzhiyun /*
32*4882a593Smuzhiyun * Default implementation of enable_caches()
33*4882a593Smuzhiyun * Real implementation should be in platform code
34*4882a593Smuzhiyun */
enable_caches(void)35*4882a593Smuzhiyun __weak void enable_caches(void)
36*4882a593Smuzhiyun {
37*4882a593Smuzhiyun puts("WARNING: Caches not enabled\n");
38*4882a593Smuzhiyun }
39*4882a593Smuzhiyun
invalidate_dcache_range(unsigned long start,unsigned long stop)40*4882a593Smuzhiyun __weak void invalidate_dcache_range(unsigned long start, unsigned long stop)
41*4882a593Smuzhiyun {
42*4882a593Smuzhiyun /* An empty stub, real implementation should be in platform code */
43*4882a593Smuzhiyun }
flush_dcache_range(unsigned long start,unsigned long stop)44*4882a593Smuzhiyun __weak void flush_dcache_range(unsigned long start, unsigned long stop)
45*4882a593Smuzhiyun {
46*4882a593Smuzhiyun /* An empty stub, real implementation should be in platform code */
47*4882a593Smuzhiyun }
48*4882a593Smuzhiyun
check_cache_range(unsigned long start,unsigned long stop)49*4882a593Smuzhiyun int check_cache_range(unsigned long start, unsigned long stop)
50*4882a593Smuzhiyun {
51*4882a593Smuzhiyun int ok = 1;
52*4882a593Smuzhiyun
53*4882a593Smuzhiyun if (start & (CONFIG_SYS_CACHELINE_SIZE - 1))
54*4882a593Smuzhiyun ok = 0;
55*4882a593Smuzhiyun
56*4882a593Smuzhiyun if (stop & (CONFIG_SYS_CACHELINE_SIZE - 1))
57*4882a593Smuzhiyun ok = 0;
58*4882a593Smuzhiyun
59*4882a593Smuzhiyun if (!ok) {
60*4882a593Smuzhiyun warn_non_spl("CACHE: Misaligned operation at range [%08lx, %08lx]\n",
61*4882a593Smuzhiyun start, stop);
62*4882a593Smuzhiyun }
63*4882a593Smuzhiyun
64*4882a593Smuzhiyun return ok;
65*4882a593Smuzhiyun }
66*4882a593Smuzhiyun
67*4882a593Smuzhiyun #ifdef CONFIG_SYS_NONCACHED_MEMORY
68*4882a593Smuzhiyun /*
69*4882a593Smuzhiyun * Reserve one MMU section worth of address space below the malloc() area that
70*4882a593Smuzhiyun * will be mapped uncached.
71*4882a593Smuzhiyun */
72*4882a593Smuzhiyun static unsigned long noncached_start;
73*4882a593Smuzhiyun static unsigned long noncached_end;
74*4882a593Smuzhiyun static unsigned long noncached_next;
75*4882a593Smuzhiyun
noncached_init(void)76*4882a593Smuzhiyun void noncached_init(void)
77*4882a593Smuzhiyun {
78*4882a593Smuzhiyun phys_addr_t start, end;
79*4882a593Smuzhiyun size_t size;
80*4882a593Smuzhiyun
81*4882a593Smuzhiyun end = ALIGN(mem_malloc_start, MMU_SECTION_SIZE) - MMU_SECTION_SIZE;
82*4882a593Smuzhiyun size = ALIGN(CONFIG_SYS_NONCACHED_MEMORY, MMU_SECTION_SIZE);
83*4882a593Smuzhiyun start = end - size;
84*4882a593Smuzhiyun
85*4882a593Smuzhiyun debug("mapping memory %pa-%pa non-cached\n", &start, &end);
86*4882a593Smuzhiyun
87*4882a593Smuzhiyun noncached_start = start;
88*4882a593Smuzhiyun noncached_end = end;
89*4882a593Smuzhiyun noncached_next = start;
90*4882a593Smuzhiyun
91*4882a593Smuzhiyun #ifndef CONFIG_SYS_DCACHE_OFF
92*4882a593Smuzhiyun mmu_set_region_dcache_behaviour(noncached_start, size, DCACHE_OFF);
93*4882a593Smuzhiyun #endif
94*4882a593Smuzhiyun }
95*4882a593Smuzhiyun
noncached_alloc(size_t size,size_t align)96*4882a593Smuzhiyun phys_addr_t noncached_alloc(size_t size, size_t align)
97*4882a593Smuzhiyun {
98*4882a593Smuzhiyun phys_addr_t next = ALIGN(noncached_next, align);
99*4882a593Smuzhiyun
100*4882a593Smuzhiyun if (next >= noncached_end || (noncached_end - next) < size)
101*4882a593Smuzhiyun return 0;
102*4882a593Smuzhiyun
103*4882a593Smuzhiyun debug("allocated %zu bytes of uncached memory @%pa\n", size, &next);
104*4882a593Smuzhiyun noncached_next = next + size;
105*4882a593Smuzhiyun
106*4882a593Smuzhiyun return next;
107*4882a593Smuzhiyun }
108*4882a593Smuzhiyun #endif /* CONFIG_SYS_NONCACHED_MEMORY */
109*4882a593Smuzhiyun
110*4882a593Smuzhiyun #if CONFIG_IS_ENABLED(SYS_THUMB_BUILD)
invalidate_l2_cache(void)111*4882a593Smuzhiyun void invalidate_l2_cache(void)
112*4882a593Smuzhiyun {
113*4882a593Smuzhiyun unsigned int val = 0;
114*4882a593Smuzhiyun
115*4882a593Smuzhiyun asm volatile("mcr p15, 1, %0, c15, c11, 0 @ invl l2 cache"
116*4882a593Smuzhiyun : : "r" (val) : "cc");
117*4882a593Smuzhiyun isb();
118*4882a593Smuzhiyun }
119*4882a593Smuzhiyun #endif
120