1/* 2 * SPDX-License-Identifier: GPL-2.0+ 3 */ 4 5#include <config.h> 6#include <linux/linkage.h> 7#include <linux/sizes.h> 8#include <asm/system.h> 9 10#if CONFIG_IS_ENABLED(SYS_THUMB_BUILD) 11#define ARM(x...) 12#define THUMB(x...) x 13#else 14#define ARM(x...) x 15#define THUMB(x...) 16#endif 17 18/* 19 * v7_flush_dcache_all() 20 * 21 * Flush the whole D-cache. 22 * 23 * Corrupted registers: r0-r7, r9-r11 (r6 only in Thumb mode) 24 * 25 * Note: copied from arch/arm/mm/cache-v7.S of Linux 4.4 26 */ 27ENTRY(__v7_flush_dcache_all) 28 dmb @ ensure ordering with previous memory accesses 29 mrc p15, 1, r0, c0, c0, 1 @ read clidr 30 mov r3, r0, lsr #23 @ move LoC into position 31 ands r3, r3, #7 << 1 @ extract LoC*2 from clidr 32 beq finished @ if loc is 0, then no need to clean 33start_flush_levels: 34 mov r10, #0 @ start clean at cache level 0 35flush_levels: 36 add r2, r10, r10, lsr #1 @ work out 3x current cache level 37 mov r1, r0, lsr r2 @ extract cache type bits from clidr 38 and r1, r1, #7 @ mask of the bits for current cache only 39 cmp r1, #2 @ see what cache we have at this level 40 blt skip @ skip if no cache, or just i-cache 41 mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr 42 isb @ isb to sych the new cssr&csidr 43 mrc p15, 1, r1, c0, c0, 0 @ read the new csidr 44 and r2, r1, #7 @ extract the length of the cache lines 45 add r2, r2, #4 @ add 4 (line length offset) 46 movw r4, #0x3ff 47 ands r4, r4, r1, lsr #3 @ find maximum number on the way size 48 clz r5, r4 @ find bit position of way size increment 49 movw r7, #0x7fff 50 ands r7, r7, r1, lsr #13 @ extract max number of the index size 51loop1: 52 mov r9, r7 @ create working copy of max index 53loop2: 54 ARM( orr r11, r10, r4, lsl r5 ) @ factor way and cache number into r11 55 THUMB( lsl r6, r4, r5 ) 56 THUMB( orr r11, r10, r6 ) @ factor way and cache number into r11 57 ARM( orr r11, r11, r9, lsl r2 ) @ factor index number into r11 58 THUMB( lsl r6, r9, r2 ) 59 THUMB( orr r11, r11, r6 ) @ factor index number into r11 60 mcr p15, 0, r11, c7, c14, 2 @ clean & invalidate by set/way 61 subs r9, r9, #1 @ decrement the index 62 bge loop2 63 subs r4, r4, #1 @ decrement the way 64 bge loop1 65skip: 66 add r10, r10, #2 @ increment cache number 67 cmp r3, r10 68#ifdef CONFIG_ARM_ERRATA_814220 69 dsb 70#endif 71 bgt flush_levels 72finished: 73 mov r10, #0 @ swith back to cache level 0 74 mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr 75 dsb st 76 isb 77 bx lr 78ENDPROC(__v7_flush_dcache_all) 79 80ENTRY(v7_flush_dcache_all) 81 ARM( stmfd sp!, {r4-r5, r7, r9-r11, lr} ) 82 THUMB( stmfd sp!, {r4-r7, r9-r11, lr} ) 83 bl __v7_flush_dcache_all 84 ARM( ldmfd sp!, {r4-r5, r7, r9-r11, lr} ) 85 THUMB( ldmfd sp!, {r4-r7, r9-r11, lr} ) 86 bx lr 87ENDPROC(v7_flush_dcache_all) 88 89/* 90 * v7_invalidate_dcache_all() 91 * 92 * Invalidate the whole D-cache. 93 * 94 * Corrupted registers: r0-r7, r9-r11 (r6 only in Thumb mode) 95 * 96 * Note: copied from __v7_flush_dcache_all above with 97 * mcr p15, 0, r11, c7, c14, 2 98 * Replaced with: 99 * mcr p15, 0, r11, c7, c6, 2 100 */ 101ENTRY(__v7_invalidate_dcache_all) 102 dmb @ ensure ordering with previous memory accesses 103 mrc p15, 1, r0, c0, c0, 1 @ read clidr 104 mov r3, r0, lsr #23 @ move LoC into position 105 ands r3, r3, #7 << 1 @ extract LoC*2 from clidr 106 beq inval_finished @ if loc is 0, then no need to clean 107 mov r10, #0 @ start clean at cache level 0 108inval_levels: 109 add r2, r10, r10, lsr #1 @ work out 3x current cache level 110 mov r1, r0, lsr r2 @ extract cache type bits from clidr 111 and r1, r1, #7 @ mask of the bits for current cache only 112 cmp r1, #2 @ see what cache we have at this level 113 blt inval_skip @ skip if no cache, or just i-cache 114 mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr 115 isb @ isb to sych the new cssr&csidr 116 mrc p15, 1, r1, c0, c0, 0 @ read the new csidr 117 and r2, r1, #7 @ extract the length of the cache lines 118 add r2, r2, #4 @ add 4 (line length offset) 119 movw r4, #0x3ff 120 ands r4, r4, r1, lsr #3 @ find maximum number on the way size 121 clz r5, r4 @ find bit position of way size increment 122 movw r7, #0x7fff 123 ands r7, r7, r1, lsr #13 @ extract max number of the index size 124inval_loop1: 125 mov r9, r7 @ create working copy of max index 126inval_loop2: 127 ARM( orr r11, r10, r4, lsl r5 ) @ factor way and cache number into r11 128 THUMB( lsl r6, r4, r5 ) 129 THUMB( orr r11, r10, r6 ) @ factor way and cache number into r11 130 ARM( orr r11, r11, r9, lsl r2 ) @ factor index number into r11 131 THUMB( lsl r6, r9, r2 ) 132 THUMB( orr r11, r11, r6 ) @ factor index number into r11 133 mcr p15, 0, r11, c7, c6, 2 @ invalidate by set/way 134 subs r9, r9, #1 @ decrement the index 135 bge inval_loop2 136 subs r4, r4, #1 @ decrement the way 137 bge inval_loop1 138inval_skip: 139 add r10, r10, #2 @ increment cache number 140 cmp r3, r10 141 bgt inval_levels 142inval_finished: 143 mov r10, #0 @ swith back to cache level 0 144 mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr 145 dsb st 146 isb 147 bx lr 148ENDPROC(__v7_invalidate_dcache_all) 149 150ENTRY(v7_invalidate_dcache_all) 151 ARM( stmfd sp!, {r4-r5, r7, r9-r11, lr} ) 152 THUMB( stmfd sp!, {r4-r7, r9-r11, lr} ) 153 bl __v7_invalidate_dcache_all 154 ARM( ldmfd sp!, {r4-r5, r7, r9-r11, lr} ) 155 THUMB( ldmfd sp!, {r4-r7, r9-r11, lr} ) 156 bx lr 157ENDPROC(v7_invalidate_dcache_all) 158