1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */ 2*4882a593Smuzhiyun /****************************************************************************/ 3*4882a593Smuzhiyun 4*4882a593Smuzhiyun /* 5*4882a593Smuzhiyun * m53xxacr.h -- ColdFire version 3 core cache support 6*4882a593Smuzhiyun * 7*4882a593Smuzhiyun * (C) Copyright 2010, Greg Ungerer <gerg@snapgear.com> 8*4882a593Smuzhiyun */ 9*4882a593Smuzhiyun 10*4882a593Smuzhiyun /****************************************************************************/ 11*4882a593Smuzhiyun #ifndef m53xxacr_h 12*4882a593Smuzhiyun #define m53xxacr_h 13*4882a593Smuzhiyun /****************************************************************************/ 14*4882a593Smuzhiyun 15*4882a593Smuzhiyun /* 16*4882a593Smuzhiyun * All varients of the ColdFire using version 3 cores have a similar 17*4882a593Smuzhiyun * cache setup. They have a unified instruction and data cache, with 18*4882a593Smuzhiyun * configurable write-through or copy-back operation. 19*4882a593Smuzhiyun */ 20*4882a593Smuzhiyun 21*4882a593Smuzhiyun /* 22*4882a593Smuzhiyun * Define the Cache Control register flags. 23*4882a593Smuzhiyun */ 24*4882a593Smuzhiyun #define CACR_EC 0x80000000 /* Enable cache */ 25*4882a593Smuzhiyun #define CACR_ESB 0x20000000 /* Enable store buffer */ 26*4882a593Smuzhiyun #define CACR_DPI 0x10000000 /* Disable invalidation by CPUSHL */ 27*4882a593Smuzhiyun #define CACR_HLCK 0x08000000 /* Half cache lock mode */ 28*4882a593Smuzhiyun #define CACR_CINVA 0x01000000 /* Invalidate cache */ 29*4882a593Smuzhiyun #define CACR_DNFB 0x00000400 /* Inhibited fill buffer */ 30*4882a593Smuzhiyun #define CACR_DCM_WT 0x00000000 /* Cacheable write-through */ 31*4882a593Smuzhiyun #define CACR_DCM_CB 0x00000100 /* Cacheable copy-back */ 32*4882a593Smuzhiyun #define CACR_DCM_PRE 0x00000200 /* Cache inhibited, precise */ 33*4882a593Smuzhiyun #define CACR_DCM_IMPRE 0x00000300 /* Cache inhibited, imprecise */ 34*4882a593Smuzhiyun #define CACR_WPROTECT 0x00000020 /* Write protect*/ 35*4882a593Smuzhiyun #define CACR_EUSP 0x00000010 /* Eanble separate user a7 */ 36*4882a593Smuzhiyun 37*4882a593Smuzhiyun /* 38*4882a593Smuzhiyun * Define the Access Control register flags. 39*4882a593Smuzhiyun */ 40*4882a593Smuzhiyun #define ACR_BASE_POS 24 /* Address Base (upper 8 bits) */ 41*4882a593Smuzhiyun #define ACR_MASK_POS 16 /* Address Mask (next 8 bits) */ 42*4882a593Smuzhiyun #define ACR_ENABLE 0x00008000 /* Enable this ACR */ 43*4882a593Smuzhiyun #define ACR_USER 0x00000000 /* Allow only user accesses */ 44*4882a593Smuzhiyun #define ACR_SUPER 0x00002000 /* Allow supervisor access only */ 45*4882a593Smuzhiyun #define ACR_ANY 0x00004000 /* Allow any access type */ 46*4882a593Smuzhiyun #define ACR_CM_WT 0x00000000 /* Cacheable, write-through */ 47*4882a593Smuzhiyun #define ACR_CM_CB 0x00000020 /* Cacheable, copy-back */ 48*4882a593Smuzhiyun #define ACR_CM_PRE 0x00000040 /* Cache inhibited, precise */ 49*4882a593Smuzhiyun #define ACR_CM_IMPRE 0x00000060 /* Cache inhibited, imprecise */ 50*4882a593Smuzhiyun #define ACR_WPROTECT 0x00000004 /* Write protect region */ 51*4882a593Smuzhiyun 52*4882a593Smuzhiyun /* 53*4882a593Smuzhiyun * Define the cache type and arrangement (needed for pushes). 54*4882a593Smuzhiyun */ 55*4882a593Smuzhiyun #if defined(CONFIG_M5307) 56*4882a593Smuzhiyun #define CACHE_SIZE 0x2000 /* 8k of unified cache */ 57*4882a593Smuzhiyun #define ICACHE_SIZE CACHE_SIZE 58*4882a593Smuzhiyun #define DCACHE_SIZE CACHE_SIZE 59*4882a593Smuzhiyun #elif defined(CONFIG_M53xx) 60*4882a593Smuzhiyun #define CACHE_SIZE 0x4000 /* 16k of unified cache */ 61*4882a593Smuzhiyun #define ICACHE_SIZE CACHE_SIZE 62*4882a593Smuzhiyun #define DCACHE_SIZE CACHE_SIZE 63*4882a593Smuzhiyun #endif 64*4882a593Smuzhiyun 65*4882a593Smuzhiyun #define CACHE_LINE_SIZE 16 /* 16 byte line size */ 66*4882a593Smuzhiyun #define CACHE_WAYS 4 /* 4 ways - set associative */ 67*4882a593Smuzhiyun 68*4882a593Smuzhiyun /* 69*4882a593Smuzhiyun * Set the cache controller settings we will use. This default in the 70*4882a593Smuzhiyun * CACR is cache inhibited, we use the ACR register to set cacheing 71*4882a593Smuzhiyun * enabled on the regions we want (eg RAM). 72*4882a593Smuzhiyun */ 73*4882a593Smuzhiyun #if defined(CONFIG_CACHE_COPYBACK) 74*4882a593Smuzhiyun #define CACHE_TYPE ACR_CM_CB 75*4882a593Smuzhiyun #define CACHE_PUSH 76*4882a593Smuzhiyun #else 77*4882a593Smuzhiyun #define CACHE_TYPE ACR_CM_WT 78*4882a593Smuzhiyun #endif 79*4882a593Smuzhiyun 80*4882a593Smuzhiyun #ifdef CONFIG_COLDFIRE_SW_A7 81*4882a593Smuzhiyun #define CACHE_MODE (CACR_EC + CACR_ESB + CACR_DCM_PRE) 82*4882a593Smuzhiyun #else 83*4882a593Smuzhiyun #define CACHE_MODE (CACR_EC + CACR_ESB + CACR_DCM_PRE + CACR_EUSP) 84*4882a593Smuzhiyun #endif 85*4882a593Smuzhiyun 86*4882a593Smuzhiyun /* 87*4882a593Smuzhiyun * Unified cache means we will never need to flush for coherency of 88*4882a593Smuzhiyun * instruction fetch. We will need to flush to maintain memory/DMA 89*4882a593Smuzhiyun * coherency though in all cases. And for copyback caches we will need 90*4882a593Smuzhiyun * to push cached data as well. 91*4882a593Smuzhiyun */ 92*4882a593Smuzhiyun #define CACHE_INIT (CACHE_MODE + CACR_CINVA - CACR_EC) 93*4882a593Smuzhiyun #define CACHE_INVALIDATE (CACHE_MODE + CACR_CINVA) 94*4882a593Smuzhiyun #define CACHE_INVALIDATED (CACHE_MODE + CACR_CINVA) 95*4882a593Smuzhiyun 96*4882a593Smuzhiyun #define ACR0_MODE ((CONFIG_RAMBASE & 0xff000000) + \ 97*4882a593Smuzhiyun (0x000f0000) + \ 98*4882a593Smuzhiyun (ACR_ENABLE + ACR_ANY + CACHE_TYPE)) 99*4882a593Smuzhiyun #define ACR1_MODE 0 100*4882a593Smuzhiyun 101*4882a593Smuzhiyun /****************************************************************************/ 102*4882a593Smuzhiyun #endif /* m53xxsim_h */ 103