xref: /OK3568_Linux_fs/kernel/arch/xtensa/include/asm/vectors.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun  * arch/xtensa/include/asm/xchal_vaddr_remap.h
3*4882a593Smuzhiyun  *
4*4882a593Smuzhiyun  * Xtensa macros for MMU V3 Support. Deals with re-mapping the Virtual
5*4882a593Smuzhiyun  * Memory Addresses from "Virtual == Physical" to their prevvious V2 MMU
6*4882a593Smuzhiyun  * mappings (KSEG at 0xD0000000 and KIO at 0XF0000000).
7*4882a593Smuzhiyun  *
8*4882a593Smuzhiyun  * This file is subject to the terms and conditions of the GNU General Public
9*4882a593Smuzhiyun  * License.  See the file "COPYING" in the main directory of this archive
10*4882a593Smuzhiyun  * for more details.
11*4882a593Smuzhiyun  *
12*4882a593Smuzhiyun  * Copyright (C) 2008 - 2012 Tensilica Inc.
13*4882a593Smuzhiyun  *
14*4882a593Smuzhiyun  * Pete Delaney <piet@tensilica.com>
15*4882a593Smuzhiyun  * Marc Gauthier <marc@tensilica.com
16*4882a593Smuzhiyun  */
17*4882a593Smuzhiyun 
18*4882a593Smuzhiyun #ifndef _XTENSA_VECTORS_H
19*4882a593Smuzhiyun #define _XTENSA_VECTORS_H
20*4882a593Smuzhiyun 
21*4882a593Smuzhiyun #include <asm/core.h>
22*4882a593Smuzhiyun #include <asm/kmem_layout.h>
23*4882a593Smuzhiyun 
24*4882a593Smuzhiyun #if defined(CONFIG_MMU) && XCHAL_HAVE_PTP_MMU && XCHAL_HAVE_SPANNING_WAY
25*4882a593Smuzhiyun #ifdef CONFIG_KERNEL_VIRTUAL_ADDRESS
26*4882a593Smuzhiyun #define KERNELOFFSET			CONFIG_KERNEL_VIRTUAL_ADDRESS
27*4882a593Smuzhiyun #else
28*4882a593Smuzhiyun #define KERNELOFFSET			(CONFIG_KERNEL_LOAD_ADDRESS + \
29*4882a593Smuzhiyun 					 XCHAL_KSEG_CACHED_VADDR - \
30*4882a593Smuzhiyun 					 XCHAL_KSEG_PADDR)
31*4882a593Smuzhiyun #endif
32*4882a593Smuzhiyun #else
33*4882a593Smuzhiyun #define KERNELOFFSET			CONFIG_KERNEL_LOAD_ADDRESS
34*4882a593Smuzhiyun #endif
35*4882a593Smuzhiyun 
36*4882a593Smuzhiyun #define RESET_VECTOR1_VADDR		(XCHAL_RESET_VECTOR1_VADDR)
37*4882a593Smuzhiyun #ifdef CONFIG_VECTORS_ADDR
38*4882a593Smuzhiyun #define VECBASE_VADDR			(CONFIG_VECTORS_ADDR)
39*4882a593Smuzhiyun #else
40*4882a593Smuzhiyun #define VECBASE_VADDR			_vecbase
41*4882a593Smuzhiyun #endif
42*4882a593Smuzhiyun 
43*4882a593Smuzhiyun #if XCHAL_HAVE_VECBASE
44*4882a593Smuzhiyun 
45*4882a593Smuzhiyun #define VECTOR_VADDR(offset)		(VECBASE_VADDR + offset)
46*4882a593Smuzhiyun 
47*4882a593Smuzhiyun #define USER_VECTOR_VADDR		VECTOR_VADDR(XCHAL_USER_VECOFS)
48*4882a593Smuzhiyun #define KERNEL_VECTOR_VADDR		VECTOR_VADDR(XCHAL_KERNEL_VECOFS)
49*4882a593Smuzhiyun #define DOUBLEEXC_VECTOR_VADDR		VECTOR_VADDR(XCHAL_DOUBLEEXC_VECOFS)
50*4882a593Smuzhiyun #define WINDOW_VECTORS_VADDR		VECTOR_VADDR(XCHAL_WINDOW_OF4_VECOFS)
51*4882a593Smuzhiyun #define INTLEVEL2_VECTOR_VADDR		VECTOR_VADDR(XCHAL_INTLEVEL2_VECOFS)
52*4882a593Smuzhiyun #define INTLEVEL3_VECTOR_VADDR		VECTOR_VADDR(XCHAL_INTLEVEL3_VECOFS)
53*4882a593Smuzhiyun #define INTLEVEL4_VECTOR_VADDR		VECTOR_VADDR(XCHAL_INTLEVEL4_VECOFS)
54*4882a593Smuzhiyun #define INTLEVEL5_VECTOR_VADDR		VECTOR_VADDR(XCHAL_INTLEVEL5_VECOFS)
55*4882a593Smuzhiyun #define INTLEVEL6_VECTOR_VADDR		VECTOR_VADDR(XCHAL_INTLEVEL6_VECOFS)
56*4882a593Smuzhiyun #define INTLEVEL7_VECTOR_VADDR		VECTOR_VADDR(XCHAL_INTLEVEL7_VECOFS)
57*4882a593Smuzhiyun #define DEBUG_VECTOR_VADDR		VECTOR_VADDR(XCHAL_DEBUG_VECOFS)
58*4882a593Smuzhiyun 
59*4882a593Smuzhiyun /*
60*4882a593Smuzhiyun  * These XCHAL_* #defines from varian/core.h
61*4882a593Smuzhiyun  * are not valid to use with V3 MMU. Non-XCHAL
62*4882a593Smuzhiyun  * constants are defined above and should be used.
63*4882a593Smuzhiyun  */
64*4882a593Smuzhiyun #undef  XCHAL_VECBASE_RESET_VADDR
65*4882a593Smuzhiyun #undef  XCHAL_USER_VECTOR_VADDR
66*4882a593Smuzhiyun #undef  XCHAL_KERNEL_VECTOR_VADDR
67*4882a593Smuzhiyun #undef  XCHAL_DOUBLEEXC_VECTOR_VADDR
68*4882a593Smuzhiyun #undef  XCHAL_WINDOW_VECTORS_VADDR
69*4882a593Smuzhiyun #undef  XCHAL_INTLEVEL2_VECTOR_VADDR
70*4882a593Smuzhiyun #undef  XCHAL_INTLEVEL3_VECTOR_VADDR
71*4882a593Smuzhiyun #undef  XCHAL_INTLEVEL4_VECTOR_VADDR
72*4882a593Smuzhiyun #undef  XCHAL_INTLEVEL5_VECTOR_VADDR
73*4882a593Smuzhiyun #undef  XCHAL_INTLEVEL6_VECTOR_VADDR
74*4882a593Smuzhiyun #undef  XCHAL_INTLEVEL7_VECTOR_VADDR
75*4882a593Smuzhiyun #undef  XCHAL_DEBUG_VECTOR_VADDR
76*4882a593Smuzhiyun 
77*4882a593Smuzhiyun #else
78*4882a593Smuzhiyun 
79*4882a593Smuzhiyun #define USER_VECTOR_VADDR		XCHAL_USER_VECTOR_VADDR
80*4882a593Smuzhiyun #define KERNEL_VECTOR_VADDR		XCHAL_KERNEL_VECTOR_VADDR
81*4882a593Smuzhiyun #define DOUBLEEXC_VECTOR_VADDR		XCHAL_DOUBLEEXC_VECTOR_VADDR
82*4882a593Smuzhiyun #define WINDOW_VECTORS_VADDR		XCHAL_WINDOW_VECTORS_VADDR
83*4882a593Smuzhiyun #define INTLEVEL2_VECTOR_VADDR		XCHAL_INTLEVEL2_VECTOR_VADDR
84*4882a593Smuzhiyun #define INTLEVEL3_VECTOR_VADDR		XCHAL_INTLEVEL3_VECTOR_VADDR
85*4882a593Smuzhiyun #define INTLEVEL4_VECTOR_VADDR		XCHAL_INTLEVEL4_VECTOR_VADDR
86*4882a593Smuzhiyun #define INTLEVEL5_VECTOR_VADDR		XCHAL_INTLEVEL5_VECTOR_VADDR
87*4882a593Smuzhiyun #define INTLEVEL6_VECTOR_VADDR		XCHAL_INTLEVEL6_VECTOR_VADDR
88*4882a593Smuzhiyun #define INTLEVEL7_VECTOR_VADDR		XCHAL_INTLEVEL6_VECTOR_VADDR
89*4882a593Smuzhiyun #define DEBUG_VECTOR_VADDR		XCHAL_DEBUG_VECTOR_VADDR
90*4882a593Smuzhiyun 
91*4882a593Smuzhiyun #endif
92*4882a593Smuzhiyun 
93*4882a593Smuzhiyun #endif /* _XTENSA_VECTORS_H */
94