1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0-only */
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * Copyright (C) 2014-15 Synopsys, Inc. (www.synopsys.com)
4*4882a593Smuzhiyun * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com)
5*4882a593Smuzhiyun */
6*4882a593Smuzhiyun
7*4882a593Smuzhiyun #ifndef __ASM_IRQFLAGS_ARCOMPACT_H
8*4882a593Smuzhiyun #define __ASM_IRQFLAGS_ARCOMPACT_H
9*4882a593Smuzhiyun
10*4882a593Smuzhiyun /* vineetg: March 2010 : local_irq_save( ) optimisation
11*4882a593Smuzhiyun * -Remove explicit mov of current status32 into reg, that is not needed
12*4882a593Smuzhiyun * -Use BIC insn instead of INVERTED + AND
13*4882a593Smuzhiyun * -Conditionally disable interrupts (if they are not enabled, don't disable)
14*4882a593Smuzhiyun */
15*4882a593Smuzhiyun
16*4882a593Smuzhiyun #include <asm/arcregs.h>
17*4882a593Smuzhiyun
18*4882a593Smuzhiyun /* status32 Reg bits related to Interrupt Handling */
19*4882a593Smuzhiyun #define STATUS_E1_BIT 1 /* Int 1 enable */
20*4882a593Smuzhiyun #define STATUS_E2_BIT 2 /* Int 2 enable */
21*4882a593Smuzhiyun #define STATUS_A1_BIT 3 /* Int 1 active */
22*4882a593Smuzhiyun #define STATUS_A2_BIT 4 /* Int 2 active */
23*4882a593Smuzhiyun #define STATUS_AE_BIT 5 /* Exception active */
24*4882a593Smuzhiyun
25*4882a593Smuzhiyun #define STATUS_E1_MASK (1<<STATUS_E1_BIT)
26*4882a593Smuzhiyun #define STATUS_E2_MASK (1<<STATUS_E2_BIT)
27*4882a593Smuzhiyun #define STATUS_A1_MASK (1<<STATUS_A1_BIT)
28*4882a593Smuzhiyun #define STATUS_A2_MASK (1<<STATUS_A2_BIT)
29*4882a593Smuzhiyun #define STATUS_AE_MASK (1<<STATUS_AE_BIT)
30*4882a593Smuzhiyun #define STATUS_IE_MASK (STATUS_E1_MASK | STATUS_E2_MASK)
31*4882a593Smuzhiyun
32*4882a593Smuzhiyun /* Other Interrupt Handling related Aux regs */
33*4882a593Smuzhiyun #define AUX_IRQ_LEV 0x200 /* IRQ Priority: L1 or L2 */
34*4882a593Smuzhiyun #define AUX_IRQ_HINT 0x201 /* For generating Soft Interrupts */
35*4882a593Smuzhiyun #define AUX_IRQ_LV12 0x43 /* interrupt level register */
36*4882a593Smuzhiyun
37*4882a593Smuzhiyun #define AUX_IENABLE 0x40c
38*4882a593Smuzhiyun #define AUX_ITRIGGER 0x40d
39*4882a593Smuzhiyun #define AUX_IPULSE 0x415
40*4882a593Smuzhiyun
41*4882a593Smuzhiyun #define ISA_INIT_STATUS_BITS STATUS_IE_MASK
42*4882a593Smuzhiyun
43*4882a593Smuzhiyun #ifndef __ASSEMBLY__
44*4882a593Smuzhiyun
45*4882a593Smuzhiyun /******************************************************************
46*4882a593Smuzhiyun * IRQ Control Macros
47*4882a593Smuzhiyun *
48*4882a593Smuzhiyun * All of them have "memory" clobber (compiler barrier) which is needed to
49*4882a593Smuzhiyun * ensure that LD/ST requiring irq safetly (R-M-W when LLSC is not available)
50*4882a593Smuzhiyun * are redone after IRQs are re-enabled (and gcc doesn't reuse stale register)
51*4882a593Smuzhiyun *
52*4882a593Smuzhiyun * Noted at the time of Abilis Timer List corruption
53*4882a593Smuzhiyun * Orig Bug + Rejected solution : https://lkml.org/lkml/2013/3/29/67
54*4882a593Smuzhiyun * Reasoning : https://lkml.org/lkml/2013/4/8/15
55*4882a593Smuzhiyun *
56*4882a593Smuzhiyun ******************************************************************/
57*4882a593Smuzhiyun
58*4882a593Smuzhiyun /*
59*4882a593Smuzhiyun * Save IRQ state and disable IRQs
60*4882a593Smuzhiyun */
arch_local_irq_save(void)61*4882a593Smuzhiyun static inline long arch_local_irq_save(void)
62*4882a593Smuzhiyun {
63*4882a593Smuzhiyun unsigned long temp, flags;
64*4882a593Smuzhiyun
65*4882a593Smuzhiyun __asm__ __volatile__(
66*4882a593Smuzhiyun " lr %1, [status32] \n"
67*4882a593Smuzhiyun " bic %0, %1, %2 \n"
68*4882a593Smuzhiyun " and.f 0, %1, %2 \n"
69*4882a593Smuzhiyun " flag.nz %0 \n"
70*4882a593Smuzhiyun : "=r"(temp), "=r"(flags)
71*4882a593Smuzhiyun : "n"((STATUS_E1_MASK | STATUS_E2_MASK))
72*4882a593Smuzhiyun : "memory", "cc");
73*4882a593Smuzhiyun
74*4882a593Smuzhiyun return flags;
75*4882a593Smuzhiyun }
76*4882a593Smuzhiyun
77*4882a593Smuzhiyun /*
78*4882a593Smuzhiyun * restore saved IRQ state
79*4882a593Smuzhiyun */
arch_local_irq_restore(unsigned long flags)80*4882a593Smuzhiyun static inline void arch_local_irq_restore(unsigned long flags)
81*4882a593Smuzhiyun {
82*4882a593Smuzhiyun
83*4882a593Smuzhiyun __asm__ __volatile__(
84*4882a593Smuzhiyun " flag %0 \n"
85*4882a593Smuzhiyun :
86*4882a593Smuzhiyun : "r"(flags)
87*4882a593Smuzhiyun : "memory");
88*4882a593Smuzhiyun }
89*4882a593Smuzhiyun
90*4882a593Smuzhiyun /*
91*4882a593Smuzhiyun * Unconditionally Enable IRQs
92*4882a593Smuzhiyun */
93*4882a593Smuzhiyun #ifdef CONFIG_ARC_COMPACT_IRQ_LEVELS
94*4882a593Smuzhiyun extern void arch_local_irq_enable(void);
95*4882a593Smuzhiyun #else
arch_local_irq_enable(void)96*4882a593Smuzhiyun static inline void arch_local_irq_enable(void)
97*4882a593Smuzhiyun {
98*4882a593Smuzhiyun unsigned long temp;
99*4882a593Smuzhiyun
100*4882a593Smuzhiyun __asm__ __volatile__(
101*4882a593Smuzhiyun " lr %0, [status32] \n"
102*4882a593Smuzhiyun " or %0, %0, %1 \n"
103*4882a593Smuzhiyun " flag %0 \n"
104*4882a593Smuzhiyun : "=&r"(temp)
105*4882a593Smuzhiyun : "n"((STATUS_E1_MASK | STATUS_E2_MASK))
106*4882a593Smuzhiyun : "cc", "memory");
107*4882a593Smuzhiyun }
108*4882a593Smuzhiyun #endif
109*4882a593Smuzhiyun
110*4882a593Smuzhiyun /*
111*4882a593Smuzhiyun * Unconditionally Disable IRQs
112*4882a593Smuzhiyun */
arch_local_irq_disable(void)113*4882a593Smuzhiyun static inline void arch_local_irq_disable(void)
114*4882a593Smuzhiyun {
115*4882a593Smuzhiyun unsigned long temp;
116*4882a593Smuzhiyun
117*4882a593Smuzhiyun __asm__ __volatile__(
118*4882a593Smuzhiyun " lr %0, [status32] \n"
119*4882a593Smuzhiyun " and %0, %0, %1 \n"
120*4882a593Smuzhiyun " flag %0 \n"
121*4882a593Smuzhiyun : "=&r"(temp)
122*4882a593Smuzhiyun : "n"(~(STATUS_E1_MASK | STATUS_E2_MASK))
123*4882a593Smuzhiyun : "memory");
124*4882a593Smuzhiyun }
125*4882a593Smuzhiyun
126*4882a593Smuzhiyun /*
127*4882a593Smuzhiyun * save IRQ state
128*4882a593Smuzhiyun */
arch_local_save_flags(void)129*4882a593Smuzhiyun static inline long arch_local_save_flags(void)
130*4882a593Smuzhiyun {
131*4882a593Smuzhiyun unsigned long temp;
132*4882a593Smuzhiyun
133*4882a593Smuzhiyun __asm__ __volatile__(
134*4882a593Smuzhiyun " lr %0, [status32] \n"
135*4882a593Smuzhiyun : "=&r"(temp)
136*4882a593Smuzhiyun :
137*4882a593Smuzhiyun : "memory");
138*4882a593Smuzhiyun
139*4882a593Smuzhiyun return temp;
140*4882a593Smuzhiyun }
141*4882a593Smuzhiyun
142*4882a593Smuzhiyun /*
143*4882a593Smuzhiyun * Query IRQ state
144*4882a593Smuzhiyun */
arch_irqs_disabled_flags(unsigned long flags)145*4882a593Smuzhiyun static inline int arch_irqs_disabled_flags(unsigned long flags)
146*4882a593Smuzhiyun {
147*4882a593Smuzhiyun return !(flags & (STATUS_E1_MASK
148*4882a593Smuzhiyun #ifdef CONFIG_ARC_COMPACT_IRQ_LEVELS
149*4882a593Smuzhiyun | STATUS_E2_MASK
150*4882a593Smuzhiyun #endif
151*4882a593Smuzhiyun ));
152*4882a593Smuzhiyun }
153*4882a593Smuzhiyun
arch_irqs_disabled(void)154*4882a593Smuzhiyun static inline int arch_irqs_disabled(void)
155*4882a593Smuzhiyun {
156*4882a593Smuzhiyun return arch_irqs_disabled_flags(arch_local_save_flags());
157*4882a593Smuzhiyun }
158*4882a593Smuzhiyun
159*4882a593Smuzhiyun #else
160*4882a593Smuzhiyun
161*4882a593Smuzhiyun #ifdef CONFIG_TRACE_IRQFLAGS
162*4882a593Smuzhiyun
163*4882a593Smuzhiyun .macro TRACE_ASM_IRQ_DISABLE
164*4882a593Smuzhiyun bl trace_hardirqs_off
165*4882a593Smuzhiyun .endm
166*4882a593Smuzhiyun
167*4882a593Smuzhiyun .macro TRACE_ASM_IRQ_ENABLE
168*4882a593Smuzhiyun bl trace_hardirqs_on
169*4882a593Smuzhiyun .endm
170*4882a593Smuzhiyun
171*4882a593Smuzhiyun #else
172*4882a593Smuzhiyun
173*4882a593Smuzhiyun .macro TRACE_ASM_IRQ_DISABLE
174*4882a593Smuzhiyun .endm
175*4882a593Smuzhiyun
176*4882a593Smuzhiyun .macro TRACE_ASM_IRQ_ENABLE
177*4882a593Smuzhiyun .endm
178*4882a593Smuzhiyun
179*4882a593Smuzhiyun #endif
180*4882a593Smuzhiyun
181*4882a593Smuzhiyun .macro IRQ_DISABLE scratch
182*4882a593Smuzhiyun lr \scratch, [status32]
183*4882a593Smuzhiyun bic \scratch, \scratch, (STATUS_E1_MASK | STATUS_E2_MASK)
184*4882a593Smuzhiyun flag \scratch
185*4882a593Smuzhiyun TRACE_ASM_IRQ_DISABLE
186*4882a593Smuzhiyun .endm
187*4882a593Smuzhiyun
188*4882a593Smuzhiyun .macro IRQ_ENABLE scratch
189*4882a593Smuzhiyun TRACE_ASM_IRQ_ENABLE
190*4882a593Smuzhiyun lr \scratch, [status32]
191*4882a593Smuzhiyun or \scratch, \scratch, (STATUS_E1_MASK | STATUS_E2_MASK)
192*4882a593Smuzhiyun flag \scratch
193*4882a593Smuzhiyun .endm
194*4882a593Smuzhiyun
195*4882a593Smuzhiyun #endif /* __ASSEMBLY__ */
196*4882a593Smuzhiyun
197*4882a593Smuzhiyun #endif
198