12c451f78SAneesh V /*
22c451f78SAneesh V * (C) Copyright 2010
32c451f78SAneesh V * Texas Instruments, <www.ti.com>
42c451f78SAneesh V * Aneesh V <aneesh@ti.com>
52c451f78SAneesh V *
61a459660SWolfgang Denk * SPDX-License-Identifier: GPL-2.0+
72c451f78SAneesh V */
82c451f78SAneesh V #ifndef ARMV7_H
92c451f78SAneesh V #define ARMV7_H
102c451f78SAneesh V
11ad577c8aSAneesh V /* Cortex-A9 revisions */
12ad577c8aSAneesh V #define MIDR_CORTEX_A9_R0P1 0x410FC091
13ad577c8aSAneesh V #define MIDR_CORTEX_A9_R1P2 0x411FC092
14ad577c8aSAneesh V #define MIDR_CORTEX_A9_R1P3 0x411FC093
155ab12a9eSAneesh V #define MIDR_CORTEX_A9_R2P10 0x412FC09A
16ad577c8aSAneesh V
17508a58faSSricharan /* Cortex-A15 revisions */
18508a58faSSricharan #define MIDR_CORTEX_A15_R0P0 0x410FC0F0
19eed7c0f7SSRICHARAN R #define MIDR_CORTEX_A15_R2P2 0x412FC0F2
20508a58faSSricharan
2116212b59SAndre Przywara /* Cortex-A7 revisions */
2216212b59SAndre Przywara #define MIDR_CORTEX_A7_R0P0 0x410FC070
2316212b59SAndre Przywara
2416212b59SAndre Przywara #define MIDR_PRIMARY_PART_MASK 0xFF0FFFF0
2516212b59SAndre Przywara
2616212b59SAndre Przywara /* ID_PFR1 feature fields */
2716212b59SAndre Przywara #define CPUID_ARM_SEC_SHIFT 4
2816212b59SAndre Przywara #define CPUID_ARM_SEC_MASK (0xF << CPUID_ARM_SEC_SHIFT)
2916212b59SAndre Przywara #define CPUID_ARM_VIRT_SHIFT 12
3016212b59SAndre Przywara #define CPUID_ARM_VIRT_MASK (0xF << CPUID_ARM_VIRT_SHIFT)
3116212b59SAndre Przywara #define CPUID_ARM_GENTIMER_SHIFT 16
3216212b59SAndre Przywara #define CPUID_ARM_GENTIMER_MASK (0xF << CPUID_ARM_GENTIMER_SHIFT)
3316212b59SAndre Przywara
3416212b59SAndre Przywara /* valid bits in CBAR register / PERIPHBASE value */
3516212b59SAndre Przywara #define CBAR_MASK 0xFFFF8000
3616212b59SAndre Przywara
372c451f78SAneesh V /* CCSIDR */
382c451f78SAneesh V #define CCSIDR_LINE_SIZE_OFFSET 0
392c451f78SAneesh V #define CCSIDR_LINE_SIZE_MASK 0x7
402c451f78SAneesh V #define CCSIDR_ASSOCIATIVITY_OFFSET 3
412c451f78SAneesh V #define CCSIDR_ASSOCIATIVITY_MASK (0x3FF << 3)
422c451f78SAneesh V #define CCSIDR_NUM_SETS_OFFSET 13
432c451f78SAneesh V #define CCSIDR_NUM_SETS_MASK (0x7FFF << 13)
442c451f78SAneesh V
452c451f78SAneesh V /*
462c451f78SAneesh V * Values for InD field in CSSELR
472c451f78SAneesh V * Selects the type of cache
482c451f78SAneesh V */
492c451f78SAneesh V #define ARMV7_CSSELR_IND_DATA_UNIFIED 0
502c451f78SAneesh V #define ARMV7_CSSELR_IND_INSTRUCTION 1
512c451f78SAneesh V
522c451f78SAneesh V /* Values for Ctype fields in CLIDR */
532c451f78SAneesh V #define ARMV7_CLIDR_CTYPE_NO_CACHE 0
542c451f78SAneesh V #define ARMV7_CLIDR_CTYPE_INSTRUCTION_ONLY 1
552c451f78SAneesh V #define ARMV7_CLIDR_CTYPE_DATA_ONLY 2
562c451f78SAneesh V #define ARMV7_CLIDR_CTYPE_INSTRUCTION_DATA 3
572c451f78SAneesh V #define ARMV7_CLIDR_CTYPE_UNIFIED 4
582c451f78SAneesh V
59d75ba503SAndre Przywara #ifndef __ASSEMBLY__
60d75ba503SAndre Przywara #include <linux/types.h>
61301c1283STom Rini #include <asm/io.h>
621ea4fac5SAndre Przywara #include <asm/barriers.h>
639ba379adSValentine Barshak
64*2bae3f50SJagan Teki /* read L2 control register (L2CTLR) */
read_l2ctlr(void)65*2bae3f50SJagan Teki static inline uint32_t read_l2ctlr(void)
66*2bae3f50SJagan Teki {
67*2bae3f50SJagan Teki uint32_t val = 0;
68*2bae3f50SJagan Teki
69*2bae3f50SJagan Teki asm volatile ("mrc p15, 1, %0, c9, c0, 2" : "=r" (val));
70*2bae3f50SJagan Teki
71*2bae3f50SJagan Teki return val;
72*2bae3f50SJagan Teki }
73*2bae3f50SJagan Teki
74*2bae3f50SJagan Teki /* write L2 control register (L2CTLR) */
write_l2ctlr(uint32_t val)75*2bae3f50SJagan Teki static inline void write_l2ctlr(uint32_t val)
76*2bae3f50SJagan Teki {
77*2bae3f50SJagan Teki /*
78*2bae3f50SJagan Teki * Note: L2CTLR can only be written when the L2 memory system
79*2bae3f50SJagan Teki * is idle, ie before the MMU is enabled.
80*2bae3f50SJagan Teki */
81*2bae3f50SJagan Teki asm volatile("mcr p15, 1, %0, c9, c0, 2" : : "r" (val) : "memory");
82*2bae3f50SJagan Teki isb();
83*2bae3f50SJagan Teki }
84*2bae3f50SJagan Teki
850c08baf0SAkshay Saraswat /*
860c08baf0SAkshay Saraswat * Workaround for ARM errata # 798870
870c08baf0SAkshay Saraswat * Set L2ACTLR[7] to reissue any memory transaction in the L2 that has been
880c08baf0SAkshay Saraswat * stalled for 1024 cycles to verify that its hazard condition still exists.
890c08baf0SAkshay Saraswat */
v7_enable_l2_hazard_detect(void)900c08baf0SAkshay Saraswat static inline void v7_enable_l2_hazard_detect(void)
910c08baf0SAkshay Saraswat {
920c08baf0SAkshay Saraswat uint32_t val;
930c08baf0SAkshay Saraswat
940c08baf0SAkshay Saraswat /* L2ACTLR[7]: Enable hazard detect timeout */
950c08baf0SAkshay Saraswat asm volatile ("mrc p15, 1, %0, c15, c0, 0\n\t" : "=r"(val));
960c08baf0SAkshay Saraswat val |= (1 << 7);
970c08baf0SAkshay Saraswat asm volatile ("mcr p15, 1, %0, c15, c0, 0\n\t" : : "r"(val));
980c08baf0SAkshay Saraswat }
990c08baf0SAkshay Saraswat
100a3895314SAkshay Saraswat /*
101a3895314SAkshay Saraswat * Workaround for ARM errata # 799270
102a3895314SAkshay Saraswat * Ensure that the L2 logic has been used within the previous 256 cycles
103a3895314SAkshay Saraswat * before modifying the ACTLR.SMP bit. This is required during boot before
104a3895314SAkshay Saraswat * MMU has been enabled, or during a specified reset or power down sequence.
105a3895314SAkshay Saraswat */
v7_enable_smp(uint32_t address)106a3895314SAkshay Saraswat static inline void v7_enable_smp(uint32_t address)
107a3895314SAkshay Saraswat {
108a3895314SAkshay Saraswat uint32_t temp, val;
109a3895314SAkshay Saraswat
110a3895314SAkshay Saraswat /* Read auxiliary control register */
111a3895314SAkshay Saraswat asm volatile ("mrc p15, 0, %0, c1, c0, 1\n\t" : "=r"(val));
112a3895314SAkshay Saraswat
113a3895314SAkshay Saraswat /* Enable SMP */
114a3895314SAkshay Saraswat val |= (1 << 6);
115a3895314SAkshay Saraswat
116a3895314SAkshay Saraswat /* Dummy read to assure L2 access */
117a3895314SAkshay Saraswat temp = readl(address);
118a3895314SAkshay Saraswat temp &= 0;
119a3895314SAkshay Saraswat val |= temp;
120a3895314SAkshay Saraswat
121a3895314SAkshay Saraswat /* Write auxiliary control register */
122a3895314SAkshay Saraswat asm volatile ("mcr p15, 0, %0, c1, c0, 1\n\t" : : "r"(val));
123a3895314SAkshay Saraswat
124a3895314SAkshay Saraswat CP15DSB;
125a3895314SAkshay Saraswat CP15ISB;
126a3895314SAkshay Saraswat }
127a3895314SAkshay Saraswat
1280c08baf0SAkshay Saraswat void v7_en_l2_hazard_detect(void);
1292c451f78SAneesh V void v7_outer_cache_enable(void);
1302c451f78SAneesh V void v7_outer_cache_disable(void);
1312c451f78SAneesh V void v7_outer_cache_flush_all(void);
1322c451f78SAneesh V void v7_outer_cache_inval_all(void);
1332c451f78SAneesh V void v7_outer_cache_flush_range(u32 start, u32 end);
1342c451f78SAneesh V void v7_outer_cache_inval_range(u32 start, u32 end);
1352c451f78SAneesh V
136104d6fb6SJan Kiszka #ifdef CONFIG_ARMV7_NONSEC
1371ef92385SAndre Przywara
138f510aeaeSMarc Zyngier int armv7_init_nonsec(void);
139d6b72da0SJan Kiszka int armv7_apply_memory_carveout(u64 *start, u64 *size);
14097a81964SIan Campbell bool armv7_boot_nonsec(void);
1411ef92385SAndre Przywara
14216212b59SAndre Przywara /* defined in assembly file */
14316212b59SAndre Przywara unsigned int _nonsec_init(void);
144f510aeaeSMarc Zyngier void _do_nonsec_entry(void *target_pc, unsigned long r0,
145f510aeaeSMarc Zyngier unsigned long r1, unsigned long r2);
146ba6a1698SAndre Przywara void _smp_pen(void);
147f510aeaeSMarc Zyngier
148f510aeaeSMarc Zyngier extern char __secure_start[];
149f510aeaeSMarc Zyngier extern char __secure_end[];
150980d6a55SChen-Yu Tsai extern char __secure_stack_start[];
151980d6a55SChen-Yu Tsai extern char __secure_stack_end[];
152f510aeaeSMarc Zyngier
153104d6fb6SJan Kiszka #endif /* CONFIG_ARMV7_NONSEC */
15416212b59SAndre Przywara
155c616a0dfSNishanth Menon void v7_arch_cp15_set_l2aux_ctrl(u32 l2auxctrl, u32 cpu_midr,
156c616a0dfSNishanth Menon u32 cpu_rev_comb, u32 cpu_variant,
157c616a0dfSNishanth Menon u32 cpu_rev);
158b45c48a7SNishanth Menon void v7_arch_cp15_set_acr(u32 acr, u32 cpu_midr, u32 cpu_rev_comb,
159b45c48a7SNishanth Menon u32 cpu_variant, u32 cpu_rev);
160d75ba503SAndre Przywara #endif /* ! __ASSEMBLY__ */
161d75ba503SAndre Przywara
1622c451f78SAneesh V #endif
163