• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __HEAD_32_H__
3 #define __HEAD_32_H__
4 
5 #include <asm/ptrace.h>	/* for STACK_FRAME_REGS_MARKER */
6 
7 /*
8  * Exception entry code.  This code runs with address translation
9  * turned off, i.e. using physical addresses.
10  * We assume sprg3 has the physical address of the current
11  * task's thread_struct.
12  */
13 .macro EXCEPTION_PROLOG handle_dar_dsisr=0
14 	EXCEPTION_PROLOG_0	handle_dar_dsisr=\handle_dar_dsisr
15 	EXCEPTION_PROLOG_1
16 	EXCEPTION_PROLOG_2	handle_dar_dsisr=\handle_dar_dsisr
17 .endm
18 
19 .macro EXCEPTION_PROLOG_0 handle_dar_dsisr=0
20 	mtspr	SPRN_SPRG_SCRATCH0,r10
21 	mtspr	SPRN_SPRG_SCRATCH1,r11
22 #ifdef CONFIG_VMAP_STACK
23 	mfspr	r10, SPRN_SPRG_THREAD
24 	.if	\handle_dar_dsisr
25 	mfspr	r11, SPRN_DAR
26 	stw	r11, DAR(r10)
27 	mfspr	r11, SPRN_DSISR
28 	stw	r11, DSISR(r10)
29 	.endif
30 	mfspr	r11, SPRN_SRR0
31 	stw	r11, SRR0(r10)
32 #endif
33 	mfspr	r11, SPRN_SRR1		/* check whether user or kernel */
34 #ifdef CONFIG_VMAP_STACK
35 	stw	r11, SRR1(r10)
36 #endif
37 	mfcr	r10
38 	andi.	r11, r11, MSR_PR
39 .endm
40 
41 .macro EXCEPTION_PROLOG_1 for_rtas=0
42 #ifdef CONFIG_VMAP_STACK
43 	mr	r11, r1
44 	subi	r1, r1, INT_FRAME_SIZE		/* use r1 if kernel */
45 	beq	1f
46 	mfspr	r1,SPRN_SPRG_THREAD
47 	lwz	r1,TASK_STACK-THREAD(r1)
48 	addi	r1, r1, THREAD_SIZE - INT_FRAME_SIZE
49 #else
50 	subi	r11, r1, INT_FRAME_SIZE		/* use r1 if kernel */
51 	beq	1f
52 	mfspr	r11,SPRN_SPRG_THREAD
53 	lwz	r11,TASK_STACK-THREAD(r11)
54 	addi	r11, r11, THREAD_SIZE - INT_FRAME_SIZE
55 #endif
56 1:
57 	tophys_novmstack r11, r11
58 #ifdef CONFIG_VMAP_STACK
59 	mtcrf	0x3f, r1
60 	bt	32 - THREAD_ALIGN_SHIFT, stack_overflow
61 #endif
62 .endm
63 
64 .macro EXCEPTION_PROLOG_2 handle_dar_dsisr=0
65 #ifdef CONFIG_VMAP_STACK
66 	mtcr	r10
67 	li	r10, MSR_KERNEL & ~(MSR_IR | MSR_RI) /* can take DTLB miss */
68 	mtmsr	r10
69 	isync
70 #else
71 	stw	r10,_CCR(r11)		/* save registers */
72 #endif
73 	mfspr	r10, SPRN_SPRG_SCRATCH0
74 #ifdef CONFIG_VMAP_STACK
75 	stw	r11,GPR1(r1)
76 	stw	r11,0(r1)
77 	mr	r11, r1
78 #else
79 	stw	r1,GPR1(r11)
80 	stw	r1,0(r11)
81 	tovirt(r1, r11)		/* set new kernel sp */
82 #endif
83 	stw	r12,GPR12(r11)
84 	stw	r9,GPR9(r11)
85 	stw	r10,GPR10(r11)
86 #ifdef CONFIG_VMAP_STACK
87 	mfcr	r10
88 	stw	r10, _CCR(r11)
89 #endif
90 	mfspr	r12,SPRN_SPRG_SCRATCH1
91 	stw	r12,GPR11(r11)
92 	mflr	r10
93 	stw	r10,_LINK(r11)
94 #ifdef CONFIG_VMAP_STACK
95 	mfspr	r12, SPRN_SPRG_THREAD
96 	tovirt(r12, r12)
97 	.if	\handle_dar_dsisr
98 	lwz	r10, DAR(r12)
99 	stw	r10, _DAR(r11)
100 	lwz	r10, DSISR(r12)
101 	stw	r10, _DSISR(r11)
102 	.endif
103 	lwz	r9, SRR1(r12)
104 	andi.	r10, r9, MSR_PR
105 	lwz	r12, SRR0(r12)
106 #else
107 	mfspr	r12,SPRN_SRR0
108 	mfspr	r9,SPRN_SRR1
109 #endif
110 #ifdef CONFIG_40x
111 	rlwinm	r9,r9,0,14,12		/* clear MSR_WE (necessary?) */
112 #else
113 #ifdef CONFIG_VMAP_STACK
114 	li	r10, MSR_KERNEL & ~MSR_IR /* can take exceptions */
115 #else
116 	li	r10,MSR_KERNEL & ~(MSR_IR|MSR_DR) /* can take exceptions */
117 #endif
118 	mtmsr	r10			/* (except for mach check in rtas) */
119 #endif
120 	stw	r0,GPR0(r11)
121 	lis	r10,STACK_FRAME_REGS_MARKER@ha /* exception frame marker */
122 	addi	r10,r10,STACK_FRAME_REGS_MARKER@l
123 	stw	r10,8(r11)
124 	SAVE_4GPRS(3, r11)
125 	SAVE_2GPRS(7, r11)
126 .endm
127 
128 .macro SYSCALL_ENTRY trapno
129 	mfspr	r12,SPRN_SPRG_THREAD
130 	mfspr	r9, SPRN_SRR1
131 #ifdef CONFIG_VMAP_STACK
132 	mfspr	r11, SPRN_SRR0
133 	mtctr	r11
134 	andi.	r11, r9, MSR_PR
135 	mr	r11, r1
136 	lwz	r1,TASK_STACK-THREAD(r12)
137 	beq-	99f
138 	addi	r1, r1, THREAD_SIZE - INT_FRAME_SIZE
139 	li	r10, MSR_KERNEL & ~(MSR_IR | MSR_RI) /* can take DTLB miss */
140 	mtmsr	r10
141 	isync
142 	tovirt(r12, r12)
143 	stw	r11,GPR1(r1)
144 	stw	r11,0(r1)
145 	mr	r11, r1
146 #else
147 	andi.	r11, r9, MSR_PR
148 	lwz	r11,TASK_STACK-THREAD(r12)
149 	beq-	99f
150 	addi	r11, r11, THREAD_SIZE - INT_FRAME_SIZE
151 	tophys(r11, r11)
152 	stw	r1,GPR1(r11)
153 	stw	r1,0(r11)
154 	tovirt(r1, r11)		/* set new kernel sp */
155 #endif
156 	mflr	r10
157 	stw	r10, _LINK(r11)
158 #ifdef CONFIG_VMAP_STACK
159 	mfctr	r10
160 #else
161 	mfspr	r10,SPRN_SRR0
162 #endif
163 	stw	r10,_NIP(r11)
164 	mfcr	r10
165 	rlwinm	r10,r10,0,4,2	/* Clear SO bit in CR */
166 	stw	r10,_CCR(r11)		/* save registers */
167 #ifdef CONFIG_40x
168 	rlwinm	r9,r9,0,14,12		/* clear MSR_WE (necessary?) */
169 #else
170 #ifdef CONFIG_VMAP_STACK
171 	LOAD_REG_IMMEDIATE(r10, MSR_KERNEL & ~MSR_IR) /* can take exceptions */
172 #else
173 	LOAD_REG_IMMEDIATE(r10, MSR_KERNEL & ~(MSR_IR|MSR_DR)) /* can take exceptions */
174 #endif
175 	mtmsr	r10			/* (except for mach check in rtas) */
176 #endif
177 	lis	r10,STACK_FRAME_REGS_MARKER@ha /* exception frame marker */
178 	stw	r2,GPR2(r11)
179 	addi	r10,r10,STACK_FRAME_REGS_MARKER@l
180 	stw	r9,_MSR(r11)
181 	li	r2, \trapno + 1
182 	stw	r10,8(r11)
183 	stw	r2,_TRAP(r11)
184 	SAVE_GPR(0, r11)
185 	SAVE_4GPRS(3, r11)
186 	SAVE_2GPRS(7, r11)
187 	addi	r11,r1,STACK_FRAME_OVERHEAD
188 	addi	r2,r12,-THREAD
189 	stw	r11,PT_REGS(r12)
190 #if defined(CONFIG_40x)
191 	/* Check to see if the dbcr0 register is set up to debug.  Use the
192 	   internal debug mode bit to do this. */
193 	lwz	r12,THREAD_DBCR0(r12)
194 	andis.	r12,r12,DBCR0_IDM@h
195 #endif
196 	ACCOUNT_CPU_USER_ENTRY(r2, r11, r12)
197 #if defined(CONFIG_40x)
198 	beq+	3f
199 	/* From user and task is ptraced - load up global dbcr0 */
200 	li	r12,-1			/* clear all pending debug events */
201 	mtspr	SPRN_DBSR,r12
202 	lis	r11,global_dbcr0@ha
203 	tophys(r11,r11)
204 	addi	r11,r11,global_dbcr0@l
205 	lwz	r12,0(r11)
206 	mtspr	SPRN_DBCR0,r12
207 	lwz	r12,4(r11)
208 	addi	r12,r12,-1
209 	stw	r12,4(r11)
210 #endif
211 
212 3:
213 	tovirt_novmstack r2, r2 	/* set r2 to current */
214 	lis	r11, transfer_to_syscall@h
215 	ori	r11, r11, transfer_to_syscall@l
216 #ifdef CONFIG_TRACE_IRQFLAGS
217 	/*
218 	 * If MSR is changing we need to keep interrupts disabled at this point
219 	 * otherwise we might risk taking an interrupt before we tell lockdep
220 	 * they are enabled.
221 	 */
222 	LOAD_REG_IMMEDIATE(r10, MSR_KERNEL)
223 	rlwimi	r10, r9, 0, MSR_EE
224 #else
225 	LOAD_REG_IMMEDIATE(r10, MSR_KERNEL | MSR_EE)
226 #endif
227 #if defined(CONFIG_PPC_8xx) && defined(CONFIG_PERF_EVENTS)
228 	mtspr	SPRN_NRI, r0
229 #endif
230 	mtspr	SPRN_SRR1,r10
231 	mtspr	SPRN_SRR0,r11
232 	RFI				/* jump to handler, enable MMU */
233 99:	b	ret_from_kernel_syscall
234 .endm
235 
236 .macro save_dar_dsisr_on_stack reg1, reg2, sp
237 #ifndef CONFIG_VMAP_STACK
238 	mfspr	\reg1, SPRN_DAR
239 	mfspr	\reg2, SPRN_DSISR
240 	stw	\reg1, _DAR(\sp)
241 	stw	\reg2, _DSISR(\sp)
242 #endif
243 .endm
244 
245 .macro get_and_save_dar_dsisr_on_stack reg1, reg2, sp
246 #ifdef CONFIG_VMAP_STACK
247 	lwz	\reg1, _DAR(\sp)
248 	lwz	\reg2, _DSISR(\sp)
249 #else
250 	save_dar_dsisr_on_stack \reg1, \reg2, \sp
251 #endif
252 .endm
253 
254 .macro tovirt_vmstack dst, src
255 #ifdef CONFIG_VMAP_STACK
256 	tovirt(\dst, \src)
257 #else
258 	.ifnc	\dst, \src
259 	mr	\dst, \src
260 	.endif
261 #endif
262 .endm
263 
264 .macro tovirt_novmstack dst, src
265 #ifndef CONFIG_VMAP_STACK
266 	tovirt(\dst, \src)
267 #else
268 	.ifnc	\dst, \src
269 	mr	\dst, \src
270 	.endif
271 #endif
272 .endm
273 
274 .macro tophys_novmstack dst, src
275 #ifndef CONFIG_VMAP_STACK
276 	tophys(\dst, \src)
277 #else
278 	.ifnc	\dst, \src
279 	mr	\dst, \src
280 	.endif
281 #endif
282 .endm
283 
284 /*
285  * Note: code which follows this uses cr0.eq (set if from kernel),
286  * r11, r12 (SRR0), and r9 (SRR1).
287  *
288  * Note2: once we have set r1 we are in a position to take exceptions
289  * again, and we could thus set MSR:RI at that point.
290  */
291 
292 /*
293  * Exception vectors.
294  */
295 #ifdef CONFIG_PPC_BOOK3S
296 #define	START_EXCEPTION(n, label)		\
297 	. = n;					\
298 	DO_KVM n;				\
299 label:
300 
301 #else
302 #define	START_EXCEPTION(n, label)		\
303 	. = n;					\
304 label:
305 
306 #endif
307 
308 #define EXCEPTION(n, label, hdlr, xfer)		\
309 	START_EXCEPTION(n, label)		\
310 	EXCEPTION_PROLOG;			\
311 	addi	r3,r1,STACK_FRAME_OVERHEAD;	\
312 	xfer(n, hdlr)
313 
314 #define EXC_XFER_TEMPLATE(hdlr, trap, msr, tfer, ret)		\
315 	li	r10,trap;					\
316 	stw	r10,_TRAP(r11);					\
317 	LOAD_REG_IMMEDIATE(r10, msr);				\
318 	bl	tfer;						\
319 	.long	hdlr;						\
320 	.long	ret
321 
322 #define EXC_XFER_STD(n, hdlr)		\
323 	EXC_XFER_TEMPLATE(hdlr, n, MSR_KERNEL, transfer_to_handler_full,	\
324 			  ret_from_except_full)
325 
326 #define EXC_XFER_LITE(n, hdlr)		\
327 	EXC_XFER_TEMPLATE(hdlr, n+1, MSR_KERNEL, transfer_to_handler, \
328 			  ret_from_except)
329 
330 .macro vmap_stack_overflow_exception
331 #ifdef CONFIG_VMAP_STACK
332 #ifdef CONFIG_SMP
333 	mfspr	r1, SPRN_SPRG_THREAD
334 	lwz	r1, TASK_CPU - THREAD(r1)
335 	slwi	r1, r1, 3
336 	addis	r1, r1, emergency_ctx-PAGE_OFFSET@ha
337 #else
338 	lis	r1, emergency_ctx-PAGE_OFFSET@ha
339 #endif
340 	lwz	r1, emergency_ctx-PAGE_OFFSET@l(r1)
341 	addi	r1, r1, THREAD_SIZE - INT_FRAME_SIZE
342 	EXCEPTION_PROLOG_2
343 	SAVE_NVGPRS(r11)
344 	addi	r3, r1, STACK_FRAME_OVERHEAD
345 	EXC_XFER_STD(0, stack_overflow_exception)
346 #endif
347 .endm
348 
349 #endif /* __HEAD_32_H__ */
350