1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */ 2*4882a593Smuzhiyun /* 3*4882a593Smuzhiyun * Bit definitions for the MCF54xx ACR and CACR registers. 4*4882a593Smuzhiyun */ 5*4882a593Smuzhiyun 6*4882a593Smuzhiyun #ifndef m54xxacr_h 7*4882a593Smuzhiyun #define m54xxacr_h 8*4882a593Smuzhiyun 9*4882a593Smuzhiyun /* 10*4882a593Smuzhiyun * Define the Cache register flags. 11*4882a593Smuzhiyun */ 12*4882a593Smuzhiyun #define CACR_DEC 0x80000000 /* Enable data cache */ 13*4882a593Smuzhiyun #define CACR_DWP 0x40000000 /* Data write protection */ 14*4882a593Smuzhiyun #define CACR_DESB 0x20000000 /* Enable data store buffer */ 15*4882a593Smuzhiyun #define CACR_DDPI 0x10000000 /* Disable invalidation by CPUSHL */ 16*4882a593Smuzhiyun #define CACR_DHCLK 0x08000000 /* Half data cache lock mode */ 17*4882a593Smuzhiyun #define CACR_DDCM_WT 0x00000000 /* Write through cache*/ 18*4882a593Smuzhiyun #define CACR_DDCM_CP 0x02000000 /* Copyback cache */ 19*4882a593Smuzhiyun #define CACR_DDCM_P 0x04000000 /* No cache, precise */ 20*4882a593Smuzhiyun #define CACR_DDCM_IMP 0x06000000 /* No cache, imprecise */ 21*4882a593Smuzhiyun #define CACR_DCINVA 0x01000000 /* Invalidate data cache */ 22*4882a593Smuzhiyun #define CACR_BEC 0x00080000 /* Enable branch cache */ 23*4882a593Smuzhiyun #define CACR_BCINVA 0x00040000 /* Invalidate branch cache */ 24*4882a593Smuzhiyun #define CACR_IEC 0x00008000 /* Enable instruction cache */ 25*4882a593Smuzhiyun #define CACR_DNFB 0x00002000 /* Inhibited fill buffer */ 26*4882a593Smuzhiyun #define CACR_IDPI 0x00001000 /* Disable CPUSHL */ 27*4882a593Smuzhiyun #define CACR_IHLCK 0x00000800 /* Instruction cache half lock */ 28*4882a593Smuzhiyun #define CACR_IDCM 0x00000400 /* Instruction cache inhibit */ 29*4882a593Smuzhiyun #define CACR_ICINVA 0x00000100 /* Invalidate instr cache */ 30*4882a593Smuzhiyun #define CACR_EUSP 0x00000020 /* Enable separate user a7 */ 31*4882a593Smuzhiyun 32*4882a593Smuzhiyun #define ACR_BASE_POS 24 /* Address Base */ 33*4882a593Smuzhiyun #define ACR_MASK_POS 16 /* Address Mask */ 34*4882a593Smuzhiyun #define ACR_ENABLE 0x00008000 /* Enable address */ 35*4882a593Smuzhiyun #define ACR_USER 0x00000000 /* User mode access only */ 36*4882a593Smuzhiyun #define ACR_SUPER 0x00002000 /* Supervisor mode only */ 37*4882a593Smuzhiyun #define ACR_ANY 0x00004000 /* Match any access mode */ 38*4882a593Smuzhiyun #define ACR_CM_WT 0x00000000 /* Write through mode */ 39*4882a593Smuzhiyun #define ACR_CM_CP 0x00000020 /* Copyback mode */ 40*4882a593Smuzhiyun #define ACR_CM_OFF_PRE 0x00000040 /* No cache, precise */ 41*4882a593Smuzhiyun #define ACR_CM_OFF_IMP 0x00000060 /* No cache, imprecise */ 42*4882a593Smuzhiyun #define ACR_CM 0x00000060 /* Cache mode mask */ 43*4882a593Smuzhiyun #define ACR_SP 0x00000008 /* Supervisor protect */ 44*4882a593Smuzhiyun #define ACR_WPROTECT 0x00000004 /* Write protect */ 45*4882a593Smuzhiyun 46*4882a593Smuzhiyun #define ACR_BA(x) ((x) & 0xff000000) 47*4882a593Smuzhiyun #define ACR_ADMSK(x) ((((x) - 1) & 0xff000000) >> 8) 48*4882a593Smuzhiyun 49*4882a593Smuzhiyun #if defined(CONFIG_M5407) 50*4882a593Smuzhiyun 51*4882a593Smuzhiyun #define ICACHE_SIZE 0x4000 /* instruction - 16k */ 52*4882a593Smuzhiyun #define DCACHE_SIZE 0x2000 /* data - 8k */ 53*4882a593Smuzhiyun 54*4882a593Smuzhiyun #elif defined(CONFIG_M54xx) 55*4882a593Smuzhiyun 56*4882a593Smuzhiyun #define ICACHE_SIZE 0x8000 /* instruction - 32k */ 57*4882a593Smuzhiyun #define DCACHE_SIZE 0x8000 /* data - 32k */ 58*4882a593Smuzhiyun 59*4882a593Smuzhiyun #elif defined(CONFIG_M5441x) 60*4882a593Smuzhiyun 61*4882a593Smuzhiyun #define ICACHE_SIZE 0x2000 /* instruction - 8k */ 62*4882a593Smuzhiyun #define DCACHE_SIZE 0x2000 /* data - 8k */ 63*4882a593Smuzhiyun #endif 64*4882a593Smuzhiyun 65*4882a593Smuzhiyun #define CACHE_LINE_SIZE 0x0010 /* 16 bytes */ 66*4882a593Smuzhiyun #define CACHE_WAYS 4 /* 4 ways */ 67*4882a593Smuzhiyun 68*4882a593Smuzhiyun #define ICACHE_SET_MASK ((ICACHE_SIZE / 64 - 1) << CACHE_WAYS) 69*4882a593Smuzhiyun #define DCACHE_SET_MASK ((DCACHE_SIZE / 64 - 1) << CACHE_WAYS) 70*4882a593Smuzhiyun #define ICACHE_MAX_ADDR ICACHE_SET_MASK 71*4882a593Smuzhiyun #define DCACHE_MAX_ADDR DCACHE_SET_MASK 72*4882a593Smuzhiyun 73*4882a593Smuzhiyun /* 74*4882a593Smuzhiyun * Version 4 cores have a true harvard style separate instruction 75*4882a593Smuzhiyun * and data cache. Enable data and instruction caches, also enable write 76*4882a593Smuzhiyun * buffers and branch accelerator. 77*4882a593Smuzhiyun */ 78*4882a593Smuzhiyun /* attention : enabling CACR_DESB requires a "nop" to flush the store buffer */ 79*4882a593Smuzhiyun /* use '+' instead of '|' for assembler's sake */ 80*4882a593Smuzhiyun 81*4882a593Smuzhiyun /* Enable data cache */ 82*4882a593Smuzhiyun /* Enable data store buffer */ 83*4882a593Smuzhiyun /* outside ACRs : No cache, precise */ 84*4882a593Smuzhiyun /* Enable instruction+branch caches */ 85*4882a593Smuzhiyun #if defined(CONFIG_M5407) 86*4882a593Smuzhiyun #define CACHE_MODE (CACR_DEC+CACR_DESB+CACR_DDCM_P+CACR_BEC+CACR_IEC) 87*4882a593Smuzhiyun #else 88*4882a593Smuzhiyun #define CACHE_MODE (CACR_DEC+CACR_DESB+CACR_DDCM_P+CACR_BEC+CACR_IEC+CACR_EUSP) 89*4882a593Smuzhiyun #endif 90*4882a593Smuzhiyun #define CACHE_INIT (CACR_DCINVA+CACR_BCINVA+CACR_ICINVA) 91*4882a593Smuzhiyun 92*4882a593Smuzhiyun #if defined(CONFIG_MMU) 93*4882a593Smuzhiyun /* 94*4882a593Smuzhiyun * If running with the MMU enabled then we need to map the internal 95*4882a593Smuzhiyun * register region as non-cacheable. And then we map all our RAM as 96*4882a593Smuzhiyun * cacheable and supervisor access only. 97*4882a593Smuzhiyun */ 98*4882a593Smuzhiyun #define ACR0_MODE (ACR_BA(IOMEMBASE)+ACR_ADMSK(IOMEMSIZE)+ \ 99*4882a593Smuzhiyun ACR_ENABLE+ACR_SUPER+ACR_CM_OFF_PRE+ACR_SP) 100*4882a593Smuzhiyun #if defined(CONFIG_CACHE_COPYBACK) 101*4882a593Smuzhiyun #define ACR1_MODE (ACR_BA(CONFIG_RAMBASE)+ACR_ADMSK(CONFIG_RAMSIZE)+ \ 102*4882a593Smuzhiyun ACR_ENABLE+ACR_SUPER+ACR_SP+ACR_CM_CP) 103*4882a593Smuzhiyun #else 104*4882a593Smuzhiyun #define ACR1_MODE (ACR_BA(CONFIG_RAMBASE)+ACR_ADMSK(CONFIG_RAMSIZE)+ \ 105*4882a593Smuzhiyun ACR_ENABLE+ACR_SUPER+ACR_SP+ACR_CM_WT) 106*4882a593Smuzhiyun #endif 107*4882a593Smuzhiyun #define ACR2_MODE 0 108*4882a593Smuzhiyun #define ACR3_MODE (ACR_BA(CONFIG_RAMBASE)+ACR_ADMSK(CONFIG_RAMSIZE)+ \ 109*4882a593Smuzhiyun ACR_ENABLE+ACR_SUPER+ACR_SP) 110*4882a593Smuzhiyun 111*4882a593Smuzhiyun #else 112*4882a593Smuzhiyun 113*4882a593Smuzhiyun /* 114*4882a593Smuzhiyun * For the non-MMU enabled case we map all of RAM as cacheable. 115*4882a593Smuzhiyun */ 116*4882a593Smuzhiyun #if defined(CONFIG_CACHE_COPYBACK) 117*4882a593Smuzhiyun #define DATA_CACHE_MODE (ACR_ENABLE+ACR_ANY+ACR_CM_CP) 118*4882a593Smuzhiyun #else 119*4882a593Smuzhiyun #define DATA_CACHE_MODE (ACR_ENABLE+ACR_ANY+ACR_CM_WT) 120*4882a593Smuzhiyun #endif 121*4882a593Smuzhiyun #define INSN_CACHE_MODE (ACR_ENABLE+ACR_ANY) 122*4882a593Smuzhiyun 123*4882a593Smuzhiyun #define CACHE_INVALIDATE (CACHE_MODE+CACR_DCINVA+CACR_BCINVA+CACR_ICINVA) 124*4882a593Smuzhiyun #define CACHE_INVALIDATEI (CACHE_MODE+CACR_BCINVA+CACR_ICINVA) 125*4882a593Smuzhiyun #define CACHE_INVALIDATED (CACHE_MODE+CACR_DCINVA) 126*4882a593Smuzhiyun #define ACR0_MODE (0x000f0000+DATA_CACHE_MODE) 127*4882a593Smuzhiyun #define ACR1_MODE 0 128*4882a593Smuzhiyun #define ACR2_MODE (0x000f0000+INSN_CACHE_MODE) 129*4882a593Smuzhiyun #define ACR3_MODE 0 130*4882a593Smuzhiyun 131*4882a593Smuzhiyun #if ((DATA_CACHE_MODE & ACR_CM) == ACR_CM_CP) 132*4882a593Smuzhiyun /* Copyback cache mode must push dirty cache lines first */ 133*4882a593Smuzhiyun #define CACHE_PUSH 134*4882a593Smuzhiyun #endif 135*4882a593Smuzhiyun 136*4882a593Smuzhiyun #endif /* CONFIG_MMU */ 137*4882a593Smuzhiyun #endif /* m54xxacr_h */ 138