• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2003-2011 NetLogic Microsystems, Inc. (NetLogic). All rights
3  * reserved.
4  *
5  * This software is available to you under a choice of one of two
6  * licenses.  You may choose to be licensed under the terms of the GNU
7  * General Public License (GPL) Version 2, available from the file
8  * COPYING in the main directory of this source tree, or the NetLogic
9  * license below:
10  *
11  * Redistribution and use in source and binary forms, with or without
12  * modification, are permitted provided that the following conditions
13  * are met:
14  *
15  * 1. Redistributions of source code must retain the above copyright
16  *    notice, this list of conditions and the following disclaimer.
17  * 2. Redistributions in binary form must reproduce the above copyright
18  *    notice, this list of conditions and the following disclaimer in
19  *    the documentation and/or other materials provided with the
20  *    distribution.
21  *
22  * THIS SOFTWARE IS PROVIDED BY NETLOGIC ``AS IS'' AND ANY EXPRESS OR
23  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
24  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
25  * ARE DISCLAIMED. IN NO EVENT SHALL NETLOGIC OR CONTRIBUTORS BE LIABLE
26  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
27  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
28  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
29  * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
30  * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
31  * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
32  * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33  */
34 
35 #ifndef _ASM_NLM_MIPS_EXTS_H
36 #define _ASM_NLM_MIPS_EXTS_H
37 
38 /*
39  * XLR and XLP interrupt request and interrupt mask registers
40  */
41 /*
42  * NOTE: Do not save/restore flags around write_c0_eimr().
43  * On non-R2 platforms the flags has part of EIMR that is shadowed in STATUS
44  * register. Restoring flags will overwrite the lower 8 bits of EIMR.
45  *
46  * Call with interrupts disabled.
47  */
48 #define write_c0_eimr(val)						\
49 do {									\
50 	if (sizeof(unsigned long) == 4) {				\
51 		__asm__ __volatile__(					\
52 			".set\tmips64\n\t"				\
53 			"dsll\t%L0, %L0, 32\n\t"			\
54 			"dsrl\t%L0, %L0, 32\n\t"			\
55 			"dsll\t%M0, %M0, 32\n\t"			\
56 			"or\t%L0, %L0, %M0\n\t"				\
57 			"dmtc0\t%L0, $9, 7\n\t"				\
58 			".set\tmips0"					\
59 			: : "r" (val));					\
60 	} else								\
61 		__write_64bit_c0_register($9, 7, (val));		\
62 } while (0)
63 
64 /*
65  * Handling the 64 bit EIMR and EIRR registers in 32-bit mode with
66  * standard functions will be very inefficient. This provides
67  * optimized functions for the normal operations on the registers.
68  *
69  * Call with interrupts disabled.
70  */
ack_c0_eirr(int irq)71 static inline void ack_c0_eirr(int irq)
72 {
73 	__asm__ __volatile__(
74 		".set	push\n\t"
75 		".set	mips64\n\t"
76 		".set	noat\n\t"
77 		"li	$1, 1\n\t"
78 		"dsllv	$1, $1, %0\n\t"
79 		"dmtc0	$1, $9, 6\n\t"
80 		".set	pop"
81 		: : "r" (irq));
82 }
83 
set_c0_eimr(int irq)84 static inline void set_c0_eimr(int irq)
85 {
86 	__asm__ __volatile__(
87 		".set	push\n\t"
88 		".set	mips64\n\t"
89 		".set	noat\n\t"
90 		"li	$1, 1\n\t"
91 		"dsllv	%0, $1, %0\n\t"
92 		"dmfc0	$1, $9, 7\n\t"
93 		"or	$1, %0\n\t"
94 		"dmtc0	$1, $9, 7\n\t"
95 		".set	pop"
96 		: "+r" (irq));
97 }
98 
clear_c0_eimr(int irq)99 static inline void clear_c0_eimr(int irq)
100 {
101 	__asm__ __volatile__(
102 		".set	push\n\t"
103 		".set	mips64\n\t"
104 		".set	noat\n\t"
105 		"li	$1, 1\n\t"
106 		"dsllv	%0, $1, %0\n\t"
107 		"dmfc0	$1, $9, 7\n\t"
108 		"or	$1, %0\n\t"
109 		"xor	$1, %0\n\t"
110 		"dmtc0	$1, $9, 7\n\t"
111 		".set	pop"
112 		: "+r" (irq));
113 }
114 
115 /*
116  * Read c0 eimr and c0 eirr, do AND of the two values, the result is
117  * the interrupts which are raised and are not masked.
118  */
read_c0_eirr_and_eimr(void)119 static inline uint64_t read_c0_eirr_and_eimr(void)
120 {
121 	uint64_t val;
122 
123 #ifdef CONFIG_64BIT
124 	val = __read_64bit_c0_register($9, 6) & __read_64bit_c0_register($9, 7);
125 #else
126 	__asm__ __volatile__(
127 		".set	push\n\t"
128 		".set	mips64\n\t"
129 		".set	noat\n\t"
130 		"dmfc0	%M0, $9, 6\n\t"
131 		"dmfc0	%L0, $9, 7\n\t"
132 		"and	%M0, %L0\n\t"
133 		"dsll	%L0, %M0, 32\n\t"
134 		"dsra	%M0, %M0, 32\n\t"
135 		"dsra	%L0, %L0, 32\n\t"
136 		".set	pop"
137 		: "=r" (val));
138 #endif
139 	return val;
140 }
141 
hard_smp_processor_id(void)142 static inline int hard_smp_processor_id(void)
143 {
144 	return __read_32bit_c0_register($15, 1) & 0x3ff;
145 }
146 
nlm_nodeid(void)147 static inline int nlm_nodeid(void)
148 {
149 	return (__read_32bit_c0_register($15, 1) >> 5) & 0x3;
150 }
151 
nlm_core_id(void)152 static inline unsigned int nlm_core_id(void)
153 {
154 	return (read_c0_ebase() & 0x1c) >> 2;
155 }
156 
nlm_thread_id(void)157 static inline unsigned int nlm_thread_id(void)
158 {
159 	return read_c0_ebase() & 0x3;
160 }
161 
162 #define __read_64bit_c2_split(source, sel)				\
163 ({									\
164 	unsigned long long __val;					\
165 	unsigned long __flags;						\
166 									\
167 	local_irq_save(__flags);					\
168 	if (sel == 0)							\
169 		__asm__ __volatile__(					\
170 			".set\tmips64\n\t"				\
171 			"dmfc2\t%M0, " #source "\n\t"			\
172 			"dsll\t%L0, %M0, 32\n\t"			\
173 			"dsra\t%M0, %M0, 32\n\t"			\
174 			"dsra\t%L0, %L0, 32\n\t"			\
175 			".set\tmips0\n\t"				\
176 			: "=r" (__val));				\
177 	else								\
178 		__asm__ __volatile__(					\
179 			".set\tmips64\n\t"				\
180 			"dmfc2\t%M0, " #source ", " #sel "\n\t"		\
181 			"dsll\t%L0, %M0, 32\n\t"			\
182 			"dsra\t%M0, %M0, 32\n\t"			\
183 			"dsra\t%L0, %L0, 32\n\t"			\
184 			".set\tmips0\n\t"				\
185 			: "=r" (__val));				\
186 	local_irq_restore(__flags);					\
187 									\
188 	__val;								\
189 })
190 
191 #define __write_64bit_c2_split(source, sel, val)			\
192 do {									\
193 	unsigned long __flags;						\
194 									\
195 	local_irq_save(__flags);					\
196 	if (sel == 0)							\
197 		__asm__ __volatile__(					\
198 			".set\tmips64\n\t"				\
199 			"dsll\t%L0, %L0, 32\n\t"			\
200 			"dsrl\t%L0, %L0, 32\n\t"			\
201 			"dsll\t%M0, %M0, 32\n\t"			\
202 			"or\t%L0, %L0, %M0\n\t"				\
203 			"dmtc2\t%L0, " #source "\n\t"			\
204 			".set\tmips0\n\t"				\
205 			: : "r" (val));					\
206 	else								\
207 		__asm__ __volatile__(					\
208 			".set\tmips64\n\t"				\
209 			"dsll\t%L0, %L0, 32\n\t"			\
210 			"dsrl\t%L0, %L0, 32\n\t"			\
211 			"dsll\t%M0, %M0, 32\n\t"			\
212 			"or\t%L0, %L0, %M0\n\t"				\
213 			"dmtc2\t%L0, " #source ", " #sel "\n\t"		\
214 			".set\tmips0\n\t"				\
215 			: : "r" (val));					\
216 	local_irq_restore(__flags);					\
217 } while (0)
218 
219 #define __read_32bit_c2_register(source, sel)				\
220 ({ uint32_t __res;							\
221 	if (sel == 0)							\
222 		__asm__ __volatile__(					\
223 			".set\tmips32\n\t"				\
224 			"mfc2\t%0, " #source "\n\t"			\
225 			".set\tmips0\n\t"				\
226 			: "=r" (__res));				\
227 	else								\
228 		__asm__ __volatile__(					\
229 			".set\tmips32\n\t"				\
230 			"mfc2\t%0, " #source ", " #sel "\n\t"		\
231 			".set\tmips0\n\t"				\
232 			: "=r" (__res));				\
233 	__res;								\
234 })
235 
236 #define __read_64bit_c2_register(source, sel)				\
237 ({ unsigned long long __res;						\
238 	if (sizeof(unsigned long) == 4)					\
239 		__res = __read_64bit_c2_split(source, sel);		\
240 	else if (sel == 0)						\
241 		__asm__ __volatile__(					\
242 			".set\tmips64\n\t"				\
243 			"dmfc2\t%0, " #source "\n\t"			\
244 			".set\tmips0\n\t"				\
245 			: "=r" (__res));				\
246 	else								\
247 		__asm__ __volatile__(					\
248 			".set\tmips64\n\t"				\
249 			"dmfc2\t%0, " #source ", " #sel "\n\t"		\
250 			".set\tmips0\n\t"				\
251 			: "=r" (__res));				\
252 	__res;								\
253 })
254 
255 #define __write_64bit_c2_register(register, sel, value)			\
256 do {									\
257 	if (sizeof(unsigned long) == 4)					\
258 		__write_64bit_c2_split(register, sel, value);		\
259 	else if (sel == 0)						\
260 		__asm__ __volatile__(					\
261 			".set\tmips64\n\t"				\
262 			"dmtc2\t%z0, " #register "\n\t"			\
263 			".set\tmips0\n\t"				\
264 			: : "Jr" (value));				\
265 	else								\
266 		__asm__ __volatile__(					\
267 			".set\tmips64\n\t"				\
268 			"dmtc2\t%z0, " #register ", " #sel "\n\t"	\
269 			".set\tmips0\n\t"				\
270 			: : "Jr" (value));				\
271 } while (0)
272 
273 #define __write_32bit_c2_register(reg, sel, value)			\
274 ({									\
275 	if (sel == 0)							\
276 		__asm__ __volatile__(					\
277 			".set\tmips32\n\t"				\
278 			"mtc2\t%z0, " #reg "\n\t"			\
279 			".set\tmips0\n\t"				\
280 			: : "Jr" (value));				\
281 	else								\
282 		__asm__ __volatile__(					\
283 			".set\tmips32\n\t"				\
284 			"mtc2\t%z0, " #reg ", " #sel "\n\t"		\
285 			".set\tmips0\n\t"				\
286 			: : "Jr" (value));				\
287 })
288 
289 #endif /*_ASM_NLM_MIPS_EXTS_H */
290