• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * arch/xtensa/include/asm/xchal_vaddr_remap.h
3  *
4  * Xtensa macros for MMU V3 Support. Deals with re-mapping the Virtual
5  * Memory Addresses from "Virtual == Physical" to their prevvious V2 MMU
6  * mappings (KSEG at 0xD0000000 and KIO at 0XF0000000).
7  *
8  * This file is subject to the terms and conditions of the GNU General Public
9  * License.  See the file "COPYING" in the main directory of this archive
10  * for more details.
11  *
12  * Copyright (C) 2008 - 2012 Tensilica Inc.
13  *
14  * Pete Delaney <piet@tensilica.com>
15  * Marc Gauthier <marc@tensilica.com
16  */
17 
18 #ifndef _XTENSA_VECTORS_H
19 #define _XTENSA_VECTORS_H
20 
21 #include <variant/core.h>
22 
23 #define XCHAL_KIO_CACHED_VADDR		0xe0000000
24 #define XCHAL_KIO_BYPASS_VADDR		0xf0000000
25 #define XCHAL_KIO_DEFAULT_PADDR		0xf0000000
26 #define XCHAL_KIO_SIZE			0x10000000
27 
28 #if XCHAL_HAVE_PTP_MMU && XCHAL_HAVE_SPANNING_WAY && defined(CONFIG_OF)
29 #define XCHAL_KIO_PADDR			xtensa_get_kio_paddr()
30 #else
31 #define XCHAL_KIO_PADDR			XCHAL_KIO_DEFAULT_PADDR
32 #endif
33 
34 #if defined(CONFIG_MMU)
35 
36 /* Will Become VECBASE */
37 #define VIRTUAL_MEMORY_ADDRESS		0xD0000000
38 
39 /* Image Virtual Start Address */
40 #define KERNELOFFSET			0xD0003000
41 
42 #if defined(XCHAL_HAVE_PTP_MMU) && XCHAL_HAVE_PTP_MMU && XCHAL_HAVE_SPANNING_WAY
43   /* MMU v3  - XCHAL_HAVE_PTP_MMU  == 1 */
44   #define LOAD_MEMORY_ADDRESS		0x00003000
45 #else
46   /* MMU V2 -  XCHAL_HAVE_PTP_MMU  == 0 */
47   #define LOAD_MEMORY_ADDRESS		0xD0003000
48 #endif
49 
50 #else /* !defined(CONFIG_MMU) */
51   /* MMU Not being used - Virtual == Physical */
52 
53   /* VECBASE */
54   #define VIRTUAL_MEMORY_ADDRESS	0x00002000
55 
56   /* Location of the start of the kernel text, _start */
57   #define KERNELOFFSET			0x00003000
58 
59   /* Loaded just above possibly live vectors */
60   #define LOAD_MEMORY_ADDRESS		0x00003000
61 
62 #endif /* CONFIG_MMU */
63 
64 #define XC_VADDR(offset)		(VIRTUAL_MEMORY_ADDRESS  + offset)
65 
66 /* Used to set VECBASE register */
67 #define VECBASE_RESET_VADDR		VIRTUAL_MEMORY_ADDRESS
68 
69 #define RESET_VECTOR_VECOFS		(XCHAL_RESET_VECTOR_VADDR - \
70 						VECBASE_RESET_VADDR)
71 #define RESET_VECTOR_VADDR		XC_VADDR(RESET_VECTOR_VECOFS)
72 
73 #define RESET_VECTOR1_VECOFS		(XCHAL_RESET_VECTOR1_VADDR - \
74 						VECBASE_RESET_VADDR)
75 #define RESET_VECTOR1_VADDR		XC_VADDR(RESET_VECTOR1_VECOFS)
76 
77 #if defined(XCHAL_HAVE_VECBASE) && XCHAL_HAVE_VECBASE
78 
79 #define USER_VECTOR_VADDR		XC_VADDR(XCHAL_USER_VECOFS)
80 #define KERNEL_VECTOR_VADDR		XC_VADDR(XCHAL_KERNEL_VECOFS)
81 #define DOUBLEEXC_VECTOR_VADDR		XC_VADDR(XCHAL_DOUBLEEXC_VECOFS)
82 #define WINDOW_VECTORS_VADDR		XC_VADDR(XCHAL_WINDOW_OF4_VECOFS)
83 #define INTLEVEL2_VECTOR_VADDR		XC_VADDR(XCHAL_INTLEVEL2_VECOFS)
84 #define INTLEVEL3_VECTOR_VADDR		XC_VADDR(XCHAL_INTLEVEL3_VECOFS)
85 #define INTLEVEL4_VECTOR_VADDR		XC_VADDR(XCHAL_INTLEVEL4_VECOFS)
86 #define INTLEVEL5_VECTOR_VADDR		XC_VADDR(XCHAL_INTLEVEL5_VECOFS)
87 #define INTLEVEL6_VECTOR_VADDR		XC_VADDR(XCHAL_INTLEVEL6_VECOFS)
88 
89 #define DEBUG_VECTOR_VADDR		XC_VADDR(XCHAL_DEBUG_VECOFS)
90 
91 #define NMI_VECTOR_VADDR		XC_VADDR(XCHAL_NMI_VECOFS)
92 
93 #define INTLEVEL7_VECTOR_VADDR		XC_VADDR(XCHAL_INTLEVEL7_VECOFS)
94 
95 /*
96  * These XCHAL_* #defines from varian/core.h
97  * are not valid to use with V3 MMU. Non-XCHAL
98  * constants are defined above and should be used.
99  */
100 #undef  XCHAL_VECBASE_RESET_VADDR
101 #undef  XCHAL_RESET_VECTOR0_VADDR
102 #undef  XCHAL_USER_VECTOR_VADDR
103 #undef  XCHAL_KERNEL_VECTOR_VADDR
104 #undef  XCHAL_DOUBLEEXC_VECTOR_VADDR
105 #undef  XCHAL_WINDOW_VECTORS_VADDR
106 #undef  XCHAL_INTLEVEL2_VECTOR_VADDR
107 #undef  XCHAL_INTLEVEL3_VECTOR_VADDR
108 #undef  XCHAL_INTLEVEL4_VECTOR_VADDR
109 #undef  XCHAL_INTLEVEL5_VECTOR_VADDR
110 #undef  XCHAL_INTLEVEL6_VECTOR_VADDR
111 #undef  XCHAL_DEBUG_VECTOR_VADDR
112 #undef  XCHAL_NMI_VECTOR_VADDR
113 #undef  XCHAL_INTLEVEL7_VECTOR_VADDR
114 
115 #else
116 
117 #define USER_VECTOR_VADDR		XCHAL_USER_VECTOR_VADDR
118 #define KERNEL_VECTOR_VADDR		XCHAL_KERNEL_VECTOR_VADDR
119 #define DOUBLEEXC_VECTOR_VADDR		XCHAL_DOUBLEEXC_VECTOR_VADDR
120 #define WINDOW_VECTORS_VADDR		XCHAL_WINDOW_VECTORS_VADDR
121 #define INTLEVEL2_VECTOR_VADDR		XCHAL_INTLEVEL2_VECTOR_VADDR
122 #define INTLEVEL3_VECTOR_VADDR		XCHAL_INTLEVEL3_VECTOR_VADDR
123 #define INTLEVEL4_VECTOR_VADDR		XCHAL_INTLEVEL4_VECTOR_VADDR
124 #define INTLEVEL5_VECTOR_VADDR		XCHAL_INTLEVEL5_VECTOR_VADDR
125 #define INTLEVEL6_VECTOR_VADDR		XCHAL_INTLEVEL6_VECTOR_VADDR
126 #define DEBUG_VECTOR_VADDR		XCHAL_DEBUG_VECTOR_VADDR
127 
128 #endif
129 
130 #endif /* _XTENSA_VECTORS_H */
131