1819833afSPeter Tyser /*
2819833afSPeter Tyser * Copyright (C) 1994 - 1999 by Ralf Baechle
3819833afSPeter Tyser * Copyright (C) 1996 by Paul M. Antoine
4819833afSPeter Tyser * Copyright (C) 1994 - 1999 by Ralf Baechle
5819833afSPeter Tyser *
6819833afSPeter Tyser * Changed set_except_vector declaration to allow return of previous
7819833afSPeter Tyser * vector address value - necessary for "borrowing" vectors.
8819833afSPeter Tyser *
9819833afSPeter Tyser * Kevin D. Kissell, kevink@mips.org and Carsten Langgaard, carstenl@mips.com
10819833afSPeter Tyser * Copyright (C) 2000 MIPS Technologies, Inc.
11898582bdSDaniel Schwierzeck *
12898582bdSDaniel Schwierzeck * SPDX-License-Identifier: GPL-2.0
13819833afSPeter Tyser */
14819833afSPeter Tyser #ifndef _ASM_SYSTEM_H
15819833afSPeter Tyser #define _ASM_SYSTEM_H
16819833afSPeter Tyser
17819833afSPeter Tyser #include <asm/sgidefs.h>
18819833afSPeter Tyser #include <asm/ptrace.h>
19819833afSPeter Tyser #if 0
20819833afSPeter Tyser #include <linux/kernel.h>
21819833afSPeter Tyser #endif
22819833afSPeter Tyser
23b11c5d1dSDaniel Schwierzeck static __inline__ void
__sti(void)24819833afSPeter Tyser __sti(void)
25819833afSPeter Tyser {
26819833afSPeter Tyser __asm__ __volatile__(
27819833afSPeter Tyser ".set\tpush\n\t"
28819833afSPeter Tyser ".set\treorder\n\t"
29819833afSPeter Tyser ".set\tnoat\n\t"
30819833afSPeter Tyser "mfc0\t$1,$12\n\t"
31819833afSPeter Tyser "ori\t$1,0x1f\n\t"
32819833afSPeter Tyser "xori\t$1,0x1e\n\t"
33819833afSPeter Tyser "mtc0\t$1,$12\n\t"
34819833afSPeter Tyser ".set\tpop\n\t"
35819833afSPeter Tyser : /* no outputs */
36819833afSPeter Tyser : /* no inputs */
37819833afSPeter Tyser : "$1", "memory");
38819833afSPeter Tyser }
39819833afSPeter Tyser
40819833afSPeter Tyser /*
41819833afSPeter Tyser * For cli() we have to insert nops to make shure that the new value
42819833afSPeter Tyser * has actually arrived in the status register before the end of this
43819833afSPeter Tyser * macro.
44819833afSPeter Tyser * R4000/R4400 need three nops, the R4600 two nops and the R10000 needs
45819833afSPeter Tyser * no nops at all.
46819833afSPeter Tyser */
47b11c5d1dSDaniel Schwierzeck static __inline__ void
__cli(void)48819833afSPeter Tyser __cli(void)
49819833afSPeter Tyser {
50819833afSPeter Tyser __asm__ __volatile__(
51819833afSPeter Tyser ".set\tpush\n\t"
52819833afSPeter Tyser ".set\treorder\n\t"
53819833afSPeter Tyser ".set\tnoat\n\t"
54819833afSPeter Tyser "mfc0\t$1,$12\n\t"
55819833afSPeter Tyser "ori\t$1,1\n\t"
56819833afSPeter Tyser "xori\t$1,1\n\t"
57819833afSPeter Tyser ".set\tnoreorder\n\t"
58819833afSPeter Tyser "mtc0\t$1,$12\n\t"
59819833afSPeter Tyser "nop\n\t"
60819833afSPeter Tyser "nop\n\t"
61819833afSPeter Tyser "nop\n\t"
62819833afSPeter Tyser ".set\tpop\n\t"
63819833afSPeter Tyser : /* no outputs */
64819833afSPeter Tyser : /* no inputs */
65819833afSPeter Tyser : "$1", "memory");
66819833afSPeter Tyser }
67819833afSPeter Tyser
68819833afSPeter Tyser #define __save_flags(x) \
69819833afSPeter Tyser __asm__ __volatile__( \
70819833afSPeter Tyser ".set\tpush\n\t" \
71819833afSPeter Tyser ".set\treorder\n\t" \
72819833afSPeter Tyser "mfc0\t%0,$12\n\t" \
73819833afSPeter Tyser ".set\tpop\n\t" \
74819833afSPeter Tyser : "=r" (x))
75819833afSPeter Tyser
76819833afSPeter Tyser #define __save_and_cli(x) \
77819833afSPeter Tyser __asm__ __volatile__( \
78819833afSPeter Tyser ".set\tpush\n\t" \
79819833afSPeter Tyser ".set\treorder\n\t" \
80819833afSPeter Tyser ".set\tnoat\n\t" \
81819833afSPeter Tyser "mfc0\t%0,$12\n\t" \
82819833afSPeter Tyser "ori\t$1,%0,1\n\t" \
83819833afSPeter Tyser "xori\t$1,1\n\t" \
84819833afSPeter Tyser ".set\tnoreorder\n\t" \
85819833afSPeter Tyser "mtc0\t$1,$12\n\t" \
86819833afSPeter Tyser "nop\n\t" \
87819833afSPeter Tyser "nop\n\t" \
88819833afSPeter Tyser "nop\n\t" \
89819833afSPeter Tyser ".set\tpop\n\t" \
90819833afSPeter Tyser : "=r" (x) \
91819833afSPeter Tyser : /* no inputs */ \
92819833afSPeter Tyser : "$1", "memory")
93819833afSPeter Tyser
94819833afSPeter Tyser #define __restore_flags(flags) \
95819833afSPeter Tyser do { \
96819833afSPeter Tyser unsigned long __tmp1; \
97819833afSPeter Tyser \
98819833afSPeter Tyser __asm__ __volatile__( \
99819833afSPeter Tyser ".set\tnoreorder\t\t\t# __restore_flags\n\t" \
100819833afSPeter Tyser ".set\tnoat\n\t" \
101819833afSPeter Tyser "mfc0\t$1, $12\n\t" \
102819833afSPeter Tyser "andi\t%0, 1\n\t" \
103819833afSPeter Tyser "ori\t$1, 1\n\t" \
104819833afSPeter Tyser "xori\t$1, 1\n\t" \
105819833afSPeter Tyser "or\t%0, $1\n\t" \
106819833afSPeter Tyser "mtc0\t%0, $12\n\t" \
107819833afSPeter Tyser "nop\n\t" \
108819833afSPeter Tyser "nop\n\t" \
109819833afSPeter Tyser "nop\n\t" \
110819833afSPeter Tyser ".set\tat\n\t" \
111819833afSPeter Tyser ".set\treorder" \
112819833afSPeter Tyser : "=r" (__tmp1) \
113819833afSPeter Tyser : "0" (flags) \
114819833afSPeter Tyser : "$1", "memory"); \
115819833afSPeter Tyser } while(0)
116819833afSPeter Tyser
117819833afSPeter Tyser #ifdef CONFIG_SMP
118819833afSPeter Tyser
119819833afSPeter Tyser extern void __global_sti(void);
120819833afSPeter Tyser extern void __global_cli(void);
121819833afSPeter Tyser extern unsigned long __global_save_flags(void);
122819833afSPeter Tyser extern void __global_restore_flags(unsigned long);
123819833afSPeter Tyser # define sti() __global_sti()
124819833afSPeter Tyser # define cli() __global_cli()
125819833afSPeter Tyser # define save_flags(x) do { x = __global_save_flags(); } while (0)
126819833afSPeter Tyser # define restore_flags(x) __global_restore_flags(x)
127819833afSPeter Tyser # define save_and_cli(x) do { save_flags(x); cli(); } while(0)
128819833afSPeter Tyser
129819833afSPeter Tyser #else /* Single processor */
130819833afSPeter Tyser
131819833afSPeter Tyser # define sti() __sti()
132819833afSPeter Tyser # define cli() __cli()
133819833afSPeter Tyser # define save_flags(x) __save_flags(x)
134819833afSPeter Tyser # define save_and_cli(x) __save_and_cli(x)
135819833afSPeter Tyser # define restore_flags(x) __restore_flags(x)
136819833afSPeter Tyser
137819833afSPeter Tyser #endif /* SMP */
138819833afSPeter Tyser
139819833afSPeter Tyser /* For spinlocks etc */
140819833afSPeter Tyser #define local_irq_save(x) __save_and_cli(x);
141819833afSPeter Tyser #define local_irq_restore(x) __restore_flags(x);
142819833afSPeter Tyser #define local_irq_disable() __cli();
143819833afSPeter Tyser #define local_irq_enable() __sti();
144819833afSPeter Tyser
145819833afSPeter Tyser /*
146819833afSPeter Tyser * These are probably defined overly paranoid ...
147819833afSPeter Tyser */
148819833afSPeter Tyser #ifdef CONFIG_CPU_HAS_WB
149819833afSPeter Tyser
150819833afSPeter Tyser #include <asm/wbflush.h>
151819833afSPeter Tyser #define rmb() do { } while(0)
152819833afSPeter Tyser #define wmb() wbflush()
153819833afSPeter Tyser #define mb() wbflush()
154819833afSPeter Tyser
155819833afSPeter Tyser #else /* CONFIG_CPU_HAS_WB */
156819833afSPeter Tyser
157819833afSPeter Tyser #define mb() \
158819833afSPeter Tyser __asm__ __volatile__( \
159819833afSPeter Tyser "# prevent instructions being moved around\n\t" \
160819833afSPeter Tyser ".set\tnoreorder\n\t" \
161819833afSPeter Tyser "# 8 nops to fool the R4400 pipeline\n\t" \
162819833afSPeter Tyser "nop;nop;nop;nop;nop;nop;nop;nop\n\t" \
163819833afSPeter Tyser ".set\treorder" \
164819833afSPeter Tyser : /* no output */ \
165819833afSPeter Tyser : /* no input */ \
166819833afSPeter Tyser : "memory")
167819833afSPeter Tyser #define rmb() mb()
168819833afSPeter Tyser #define wmb() mb()
169819833afSPeter Tyser
170819833afSPeter Tyser #endif /* CONFIG_CPU_HAS_WB */
171819833afSPeter Tyser
172819833afSPeter Tyser #ifdef CONFIG_SMP
173819833afSPeter Tyser #define smp_mb() mb()
174819833afSPeter Tyser #define smp_rmb() rmb()
175819833afSPeter Tyser #define smp_wmb() wmb()
176819833afSPeter Tyser #else
177819833afSPeter Tyser #define smp_mb() barrier()
178819833afSPeter Tyser #define smp_rmb() barrier()
179819833afSPeter Tyser #define smp_wmb() barrier()
180819833afSPeter Tyser #endif
181819833afSPeter Tyser
182819833afSPeter Tyser #define set_mb(var, value) \
183819833afSPeter Tyser do { var = value; mb(); } while (0)
184819833afSPeter Tyser
185819833afSPeter Tyser #define set_wmb(var, value) \
186819833afSPeter Tyser do { var = value; wmb(); } while (0)
187819833afSPeter Tyser
188819833afSPeter Tyser #if !defined (_LANGUAGE_ASSEMBLY)
189819833afSPeter Tyser /*
190819833afSPeter Tyser * switch_to(n) should switch tasks to task nr n, first
191819833afSPeter Tyser * checking that n isn't the current task, in which case it does nothing.
192819833afSPeter Tyser */
193819833afSPeter Tyser #if 0
194819833afSPeter Tyser extern asmlinkage void *resume(void *last, void *next);
195819833afSPeter Tyser #endif
196819833afSPeter Tyser #endif /* !defined (_LANGUAGE_ASSEMBLY) */
197819833afSPeter Tyser
198819833afSPeter Tyser #define prepare_to_switch() do { } while(0)
199819833afSPeter Tyser #define switch_to(prev,next,last) \
200819833afSPeter Tyser do { \
201819833afSPeter Tyser (last) = resume(prev, next); \
202819833afSPeter Tyser } while(0)
203819833afSPeter Tyser
204819833afSPeter Tyser /*
205819833afSPeter Tyser * For 32 and 64 bit operands we can take advantage of ll and sc.
206819833afSPeter Tyser * FIXME: This doesn't work for R3000 machines.
207819833afSPeter Tyser */
xchg_u32(volatile int * m,unsigned long val)208b11c5d1dSDaniel Schwierzeck static __inline__ unsigned long xchg_u32(volatile int * m, unsigned long val)
209819833afSPeter Tyser {
210819833afSPeter Tyser #ifdef CONFIG_CPU_HAS_LLSC
211819833afSPeter Tyser unsigned long dummy;
212819833afSPeter Tyser
213819833afSPeter Tyser __asm__ __volatile__(
214819833afSPeter Tyser ".set\tnoreorder\t\t\t# xchg_u32\n\t"
215819833afSPeter Tyser ".set\tnoat\n\t"
216819833afSPeter Tyser "ll\t%0, %3\n"
217819833afSPeter Tyser "1:\tmove\t$1, %2\n\t"
218819833afSPeter Tyser "sc\t$1, %1\n\t"
219819833afSPeter Tyser "beqzl\t$1, 1b\n\t"
220819833afSPeter Tyser " ll\t%0, %3\n\t"
221819833afSPeter Tyser ".set\tat\n\t"
222819833afSPeter Tyser ".set\treorder"
223819833afSPeter Tyser : "=r" (val), "=o" (*m), "=r" (dummy)
224819833afSPeter Tyser : "o" (*m), "2" (val)
225819833afSPeter Tyser : "memory");
226819833afSPeter Tyser
227819833afSPeter Tyser return val;
228819833afSPeter Tyser #else
229819833afSPeter Tyser unsigned long flags, retval;
230819833afSPeter Tyser
231819833afSPeter Tyser save_flags(flags);
232819833afSPeter Tyser cli();
233819833afSPeter Tyser retval = *m;
234819833afSPeter Tyser *m = val;
235819833afSPeter Tyser restore_flags(flags);
236819833afSPeter Tyser return retval;
237819833afSPeter Tyser #endif /* Processor-dependent optimization */
238819833afSPeter Tyser }
239819833afSPeter Tyser
240819833afSPeter Tyser #define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
241819833afSPeter Tyser #define tas(ptr) (xchg((ptr),1))
242819833afSPeter Tyser
243819833afSPeter Tyser static __inline__ unsigned long
__xchg(unsigned long x,volatile void * ptr,int size)244819833afSPeter Tyser __xchg(unsigned long x, volatile void * ptr, int size)
245819833afSPeter Tyser {
246819833afSPeter Tyser switch (size) {
247819833afSPeter Tyser case 4:
248819833afSPeter Tyser return xchg_u32(ptr, x);
249819833afSPeter Tyser }
250819833afSPeter Tyser return x;
251819833afSPeter Tyser }
252819833afSPeter Tyser
253819833afSPeter Tyser extern void *set_except_vector(int n, void *addr);
254819833afSPeter Tyser
255819833afSPeter Tyser extern void __die(const char *, struct pt_regs *, const char *where,
256819833afSPeter Tyser unsigned long line) __attribute__((noreturn));
257819833afSPeter Tyser extern void __die_if_kernel(const char *, struct pt_regs *, const char *where,
258819833afSPeter Tyser unsigned long line);
259819833afSPeter Tyser
260819833afSPeter Tyser #define die(msg, regs) \
261819833afSPeter Tyser __die(msg, regs, __FILE__ ":"__FUNCTION__, __LINE__)
262819833afSPeter Tyser #define die_if_kernel(msg, regs) \
263819833afSPeter Tyser __die_if_kernel(msg, regs, __FILE__ ":"__FUNCTION__, __LINE__)
264819833afSPeter Tyser
execution_hazard_barrier(void)265*6c593630SDaniel Schwierzeck static inline void execution_hazard_barrier(void)
266*6c593630SDaniel Schwierzeck {
267*6c593630SDaniel Schwierzeck __asm__ __volatile__(
268*6c593630SDaniel Schwierzeck ".set noreorder\n"
269*6c593630SDaniel Schwierzeck "ehb\n"
270*6c593630SDaniel Schwierzeck ".set reorder");
271*6c593630SDaniel Schwierzeck }
272*6c593630SDaniel Schwierzeck
273819833afSPeter Tyser #endif /* _ASM_SYSTEM_H */
274