• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * arch/arm/kernel/kprobes-common.c
3  *
4  * Copyright (C) 2011 Jon Medhurst <tixy@yxit.co.uk>.
5  *
6  * Some contents moved here from arch/arm/include/asm/kprobes-arm.c which is
7  * Copyright (C) 2006, 2007 Motorola Inc.
8  *
9  * This program is free software; you can redistribute it and/or modify
10  * it under the terms of the GNU General Public License version 2 as
11  * published by the Free Software Foundation.
12  */
13 
14 #include <linux/kernel.h>
15 #include <linux/kprobes.h>
16 #include <asm/system_info.h>
17 
18 #include "kprobes.h"
19 
20 
21 #ifndef find_str_pc_offset
22 
23 /*
24  * For STR and STM instructions, an ARM core may choose to use either
25  * a +8 or a +12 displacement from the current instruction's address.
26  * Whichever value is chosen for a given core, it must be the same for
27  * both instructions and may not change.  This function measures it.
28  */
29 
30 int str_pc_offset;
31 
find_str_pc_offset(void)32 void __init find_str_pc_offset(void)
33 {
34 	int addr, scratch, ret;
35 
36 	__asm__ (
37 		"sub	%[ret], pc, #4		\n\t"
38 		"str	pc, %[addr]		\n\t"
39 		"ldr	%[scr], %[addr]		\n\t"
40 		"sub	%[ret], %[scr], %[ret]	\n\t"
41 		: [ret] "=r" (ret), [scr] "=r" (scratch), [addr] "+m" (addr));
42 
43 	str_pc_offset = ret;
44 }
45 
46 #endif /* !find_str_pc_offset */
47 
48 
49 #ifndef test_load_write_pc_interworking
50 
51 bool load_write_pc_interworks;
52 
test_load_write_pc_interworking(void)53 void __init test_load_write_pc_interworking(void)
54 {
55 	int arch = cpu_architecture();
56 	BUG_ON(arch == CPU_ARCH_UNKNOWN);
57 	load_write_pc_interworks = arch >= CPU_ARCH_ARMv5T;
58 }
59 
60 #endif /* !test_load_write_pc_interworking */
61 
62 
63 #ifndef test_alu_write_pc_interworking
64 
65 bool alu_write_pc_interworks;
66 
test_alu_write_pc_interworking(void)67 void __init test_alu_write_pc_interworking(void)
68 {
69 	int arch = cpu_architecture();
70 	BUG_ON(arch == CPU_ARCH_UNKNOWN);
71 	alu_write_pc_interworks = arch >= CPU_ARCH_ARMv7;
72 }
73 
74 #endif /* !test_alu_write_pc_interworking */
75 
76 
arm_kprobe_decode_init(void)77 void __init arm_kprobe_decode_init(void)
78 {
79 	find_str_pc_offset();
80 	test_load_write_pc_interworking();
81 	test_alu_write_pc_interworking();
82 }
83 
84 
__check_eq(unsigned long cpsr)85 static unsigned long __kprobes __check_eq(unsigned long cpsr)
86 {
87 	return cpsr & PSR_Z_BIT;
88 }
89 
__check_ne(unsigned long cpsr)90 static unsigned long __kprobes __check_ne(unsigned long cpsr)
91 {
92 	return (~cpsr) & PSR_Z_BIT;
93 }
94 
__check_cs(unsigned long cpsr)95 static unsigned long __kprobes __check_cs(unsigned long cpsr)
96 {
97 	return cpsr & PSR_C_BIT;
98 }
99 
__check_cc(unsigned long cpsr)100 static unsigned long __kprobes __check_cc(unsigned long cpsr)
101 {
102 	return (~cpsr) & PSR_C_BIT;
103 }
104 
__check_mi(unsigned long cpsr)105 static unsigned long __kprobes __check_mi(unsigned long cpsr)
106 {
107 	return cpsr & PSR_N_BIT;
108 }
109 
__check_pl(unsigned long cpsr)110 static unsigned long __kprobes __check_pl(unsigned long cpsr)
111 {
112 	return (~cpsr) & PSR_N_BIT;
113 }
114 
__check_vs(unsigned long cpsr)115 static unsigned long __kprobes __check_vs(unsigned long cpsr)
116 {
117 	return cpsr & PSR_V_BIT;
118 }
119 
__check_vc(unsigned long cpsr)120 static unsigned long __kprobes __check_vc(unsigned long cpsr)
121 {
122 	return (~cpsr) & PSR_V_BIT;
123 }
124 
__check_hi(unsigned long cpsr)125 static unsigned long __kprobes __check_hi(unsigned long cpsr)
126 {
127 	cpsr &= ~(cpsr >> 1); /* PSR_C_BIT &= ~PSR_Z_BIT */
128 	return cpsr & PSR_C_BIT;
129 }
130 
__check_ls(unsigned long cpsr)131 static unsigned long __kprobes __check_ls(unsigned long cpsr)
132 {
133 	cpsr &= ~(cpsr >> 1); /* PSR_C_BIT &= ~PSR_Z_BIT */
134 	return (~cpsr) & PSR_C_BIT;
135 }
136 
__check_ge(unsigned long cpsr)137 static unsigned long __kprobes __check_ge(unsigned long cpsr)
138 {
139 	cpsr ^= (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
140 	return (~cpsr) & PSR_N_BIT;
141 }
142 
__check_lt(unsigned long cpsr)143 static unsigned long __kprobes __check_lt(unsigned long cpsr)
144 {
145 	cpsr ^= (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
146 	return cpsr & PSR_N_BIT;
147 }
148 
__check_gt(unsigned long cpsr)149 static unsigned long __kprobes __check_gt(unsigned long cpsr)
150 {
151 	unsigned long temp = cpsr ^ (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
152 	temp |= (cpsr << 1);			 /* PSR_N_BIT |= PSR_Z_BIT */
153 	return (~temp) & PSR_N_BIT;
154 }
155 
__check_le(unsigned long cpsr)156 static unsigned long __kprobes __check_le(unsigned long cpsr)
157 {
158 	unsigned long temp = cpsr ^ (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
159 	temp |= (cpsr << 1);			 /* PSR_N_BIT |= PSR_Z_BIT */
160 	return temp & PSR_N_BIT;
161 }
162 
__check_al(unsigned long cpsr)163 static unsigned long __kprobes __check_al(unsigned long cpsr)
164 {
165 	return true;
166 }
167 
168 kprobe_check_cc * const kprobe_condition_checks[16] = {
169 	&__check_eq, &__check_ne, &__check_cs, &__check_cc,
170 	&__check_mi, &__check_pl, &__check_vs, &__check_vc,
171 	&__check_hi, &__check_ls, &__check_ge, &__check_lt,
172 	&__check_gt, &__check_le, &__check_al, &__check_al
173 };
174 
175 
kprobe_simulate_nop(struct kprobe * p,struct pt_regs * regs)176 void __kprobes kprobe_simulate_nop(struct kprobe *p, struct pt_regs *regs)
177 {
178 }
179 
kprobe_emulate_none(struct kprobe * p,struct pt_regs * regs)180 void __kprobes kprobe_emulate_none(struct kprobe *p, struct pt_regs *regs)
181 {
182 	p->ainsn.insn_fn();
183 }
184 
simulate_ldm1stm1(struct kprobe * p,struct pt_regs * regs)185 static void __kprobes simulate_ldm1stm1(struct kprobe *p, struct pt_regs *regs)
186 {
187 	kprobe_opcode_t insn = p->opcode;
188 	int rn = (insn >> 16) & 0xf;
189 	int lbit = insn & (1 << 20);
190 	int wbit = insn & (1 << 21);
191 	int ubit = insn & (1 << 23);
192 	int pbit = insn & (1 << 24);
193 	long *addr = (long *)regs->uregs[rn];
194 	int reg_bit_vector;
195 	int reg_count;
196 
197 	reg_count = 0;
198 	reg_bit_vector = insn & 0xffff;
199 	while (reg_bit_vector) {
200 		reg_bit_vector &= (reg_bit_vector - 1);
201 		++reg_count;
202 	}
203 
204 	if (!ubit)
205 		addr -= reg_count;
206 	addr += (!pbit == !ubit);
207 
208 	reg_bit_vector = insn & 0xffff;
209 	while (reg_bit_vector) {
210 		int reg = __ffs(reg_bit_vector);
211 		reg_bit_vector &= (reg_bit_vector - 1);
212 		if (lbit)
213 			regs->uregs[reg] = *addr++;
214 		else
215 			*addr++ = regs->uregs[reg];
216 	}
217 
218 	if (wbit) {
219 		if (!ubit)
220 			addr -= reg_count;
221 		addr -= (!pbit == !ubit);
222 		regs->uregs[rn] = (long)addr;
223 	}
224 }
225 
simulate_stm1_pc(struct kprobe * p,struct pt_regs * regs)226 static void __kprobes simulate_stm1_pc(struct kprobe *p, struct pt_regs *regs)
227 {
228 	regs->ARM_pc = (long)p->addr + str_pc_offset;
229 	simulate_ldm1stm1(p, regs);
230 	regs->ARM_pc = (long)p->addr + 4;
231 }
232 
simulate_ldm1_pc(struct kprobe * p,struct pt_regs * regs)233 static void __kprobes simulate_ldm1_pc(struct kprobe *p, struct pt_regs *regs)
234 {
235 	simulate_ldm1stm1(p, regs);
236 	load_write_pc(regs->ARM_pc, regs);
237 }
238 
239 static void __kprobes
emulate_generic_r0_12_noflags(struct kprobe * p,struct pt_regs * regs)240 emulate_generic_r0_12_noflags(struct kprobe *p, struct pt_regs *regs)
241 {
242 	register void *rregs asm("r1") = regs;
243 	register void *rfn asm("lr") = p->ainsn.insn_fn;
244 
245 	__asm__ __volatile__ (
246 		"stmdb	sp!, {%[regs], r11}	\n\t"
247 		"ldmia	%[regs], {r0-r12}	\n\t"
248 #if __LINUX_ARM_ARCH__ >= 6
249 		"blx	%[fn]			\n\t"
250 #else
251 		"str	%[fn], [sp, #-4]!	\n\t"
252 		"adr	lr, 1f			\n\t"
253 		"ldr	pc, [sp], #4		\n\t"
254 		"1:				\n\t"
255 #endif
256 		"ldr	lr, [sp], #4		\n\t" /* lr = regs */
257 		"stmia	lr, {r0-r12}		\n\t"
258 		"ldr	r11, [sp], #4		\n\t"
259 		: [regs] "=r" (rregs), [fn] "=r" (rfn)
260 		: "0" (rregs), "1" (rfn)
261 		: "r0", "r2", "r3", "r4", "r5", "r6", "r7",
262 		  "r8", "r9", "r10", "r12", "memory", "cc"
263 		);
264 }
265 
266 static void __kprobes
emulate_generic_r2_14_noflags(struct kprobe * p,struct pt_regs * regs)267 emulate_generic_r2_14_noflags(struct kprobe *p, struct pt_regs *regs)
268 {
269 	emulate_generic_r0_12_noflags(p, (struct pt_regs *)(regs->uregs+2));
270 }
271 
272 static void __kprobes
emulate_ldm_r3_15(struct kprobe * p,struct pt_regs * regs)273 emulate_ldm_r3_15(struct kprobe *p, struct pt_regs *regs)
274 {
275 	emulate_generic_r0_12_noflags(p, (struct pt_regs *)(regs->uregs+3));
276 	load_write_pc(regs->ARM_pc, regs);
277 }
278 
279 enum kprobe_insn __kprobes
kprobe_decode_ldmstm(kprobe_opcode_t insn,struct arch_specific_insn * asi)280 kprobe_decode_ldmstm(kprobe_opcode_t insn, struct arch_specific_insn *asi)
281 {
282 	kprobe_insn_handler_t *handler = 0;
283 	unsigned reglist = insn & 0xffff;
284 	int is_ldm = insn & 0x100000;
285 	int rn = (insn >> 16) & 0xf;
286 
287 	if (rn <= 12 && (reglist & 0xe000) == 0) {
288 		/* Instruction only uses registers in the range R0..R12 */
289 		handler = emulate_generic_r0_12_noflags;
290 
291 	} else if (rn >= 2 && (reglist & 0x8003) == 0) {
292 		/* Instruction only uses registers in the range R2..R14 */
293 		rn -= 2;
294 		reglist >>= 2;
295 		handler = emulate_generic_r2_14_noflags;
296 
297 	} else if (rn >= 3 && (reglist & 0x0007) == 0) {
298 		/* Instruction only uses registers in the range R3..R15 */
299 		if (is_ldm && (reglist & 0x8000)) {
300 			rn -= 3;
301 			reglist >>= 3;
302 			handler = emulate_ldm_r3_15;
303 		}
304 	}
305 
306 	if (handler) {
307 		/* We can emulate the instruction in (possibly) modified form */
308 		asi->insn[0] = (insn & 0xfff00000) | (rn << 16) | reglist;
309 		asi->insn_handler = handler;
310 		return INSN_GOOD;
311 	}
312 
313 	/* Fallback to slower simulation... */
314 	if (reglist & 0x8000)
315 		handler = is_ldm ? simulate_ldm1_pc : simulate_stm1_pc;
316 	else
317 		handler = simulate_ldm1stm1;
318 	asi->insn_handler = handler;
319 	return INSN_GOOD_NO_SLOT;
320 }
321 
322 
323 /*
324  * Prepare an instruction slot to receive an instruction for emulating.
325  * This is done by placing a subroutine return after the location where the
326  * instruction will be placed. We also modify ARM instructions to be
327  * unconditional as the condition code will already be checked before any
328  * emulation handler is called.
329  */
330 static kprobe_opcode_t __kprobes
prepare_emulated_insn(kprobe_opcode_t insn,struct arch_specific_insn * asi,bool thumb)331 prepare_emulated_insn(kprobe_opcode_t insn, struct arch_specific_insn *asi,
332 								bool thumb)
333 {
334 #ifdef CONFIG_THUMB2_KERNEL
335 	if (thumb) {
336 		u16 *thumb_insn = (u16 *)asi->insn;
337 		thumb_insn[1] = 0x4770; /* Thumb bx lr */
338 		thumb_insn[2] = 0x4770; /* Thumb bx lr */
339 		return insn;
340 	}
341 	asi->insn[1] = 0xe12fff1e; /* ARM bx lr */
342 #else
343 	asi->insn[1] = 0xe1a0f00e; /* mov pc, lr */
344 #endif
345 	/* Make an ARM instruction unconditional */
346 	if (insn < 0xe0000000)
347 		insn = (insn | 0xe0000000) & ~0x10000000;
348 	return insn;
349 }
350 
351 /*
352  * Write a (probably modified) instruction into the slot previously prepared by
353  * prepare_emulated_insn
354  */
355 static void  __kprobes
set_emulated_insn(kprobe_opcode_t insn,struct arch_specific_insn * asi,bool thumb)356 set_emulated_insn(kprobe_opcode_t insn, struct arch_specific_insn *asi,
357 								bool thumb)
358 {
359 #ifdef CONFIG_THUMB2_KERNEL
360 	if (thumb) {
361 		u16 *ip = (u16 *)asi->insn;
362 		if (is_wide_instruction(insn))
363 			*ip++ = insn >> 16;
364 		*ip++ = insn;
365 		return;
366 	}
367 #endif
368 	asi->insn[0] = insn;
369 }
370 
371 /*
372  * When we modify the register numbers encoded in an instruction to be emulated,
373  * the new values come from this define. For ARM and 32-bit Thumb instructions
374  * this gives...
375  *
376  *	bit position	  16  12   8   4   0
377  *	---------------+---+---+---+---+---+
378  *	register	 r2  r0  r1  --  r3
379  */
380 #define INSN_NEW_BITS		0x00020103
381 
382 /* Each nibble has same value as that at INSN_NEW_BITS bit 16 */
383 #define INSN_SAMEAS16_BITS	0x22222222
384 
385 /*
386  * Validate and modify each of the registers encoded in an instruction.
387  *
388  * Each nibble in regs contains a value from enum decode_reg_type. For each
389  * non-zero value, the corresponding nibble in pinsn is validated and modified
390  * according to the type.
391  */
decode_regs(kprobe_opcode_t * pinsn,u32 regs)392 static bool __kprobes decode_regs(kprobe_opcode_t* pinsn, u32 regs)
393 {
394 	kprobe_opcode_t insn = *pinsn;
395 	kprobe_opcode_t mask = 0xf; /* Start at least significant nibble */
396 
397 	for (; regs != 0; regs >>= 4, mask <<= 4) {
398 
399 		kprobe_opcode_t new_bits = INSN_NEW_BITS;
400 
401 		switch (regs & 0xf) {
402 
403 		case REG_TYPE_NONE:
404 			/* Nibble not a register, skip to next */
405 			continue;
406 
407 		case REG_TYPE_ANY:
408 			/* Any register is allowed */
409 			break;
410 
411 		case REG_TYPE_SAMEAS16:
412 			/* Replace register with same as at bit position 16 */
413 			new_bits = INSN_SAMEAS16_BITS;
414 			break;
415 
416 		case REG_TYPE_SP:
417 			/* Only allow SP (R13) */
418 			if ((insn ^ 0xdddddddd) & mask)
419 				goto reject;
420 			break;
421 
422 		case REG_TYPE_PC:
423 			/* Only allow PC (R15) */
424 			if ((insn ^ 0xffffffff) & mask)
425 				goto reject;
426 			break;
427 
428 		case REG_TYPE_NOSP:
429 			/* Reject SP (R13) */
430 			if (((insn ^ 0xdddddddd) & mask) == 0)
431 				goto reject;
432 			break;
433 
434 		case REG_TYPE_NOSPPC:
435 		case REG_TYPE_NOSPPCX:
436 			/* Reject SP and PC (R13 and R15) */
437 			if (((insn ^ 0xdddddddd) & 0xdddddddd & mask) == 0)
438 				goto reject;
439 			break;
440 
441 		case REG_TYPE_NOPCWB:
442 			if (!is_writeback(insn))
443 				break; /* No writeback, so any register is OK */
444 			/* fall through... */
445 		case REG_TYPE_NOPC:
446 		case REG_TYPE_NOPCX:
447 			/* Reject PC (R15) */
448 			if (((insn ^ 0xffffffff) & mask) == 0)
449 				goto reject;
450 			break;
451 		}
452 
453 		/* Replace value of nibble with new register number... */
454 		insn &= ~mask;
455 		insn |= new_bits & mask;
456 	}
457 
458 	*pinsn = insn;
459 	return true;
460 
461 reject:
462 	return false;
463 }
464 
465 static const int decode_struct_sizes[NUM_DECODE_TYPES] = {
466 	[DECODE_TYPE_TABLE]	= sizeof(struct decode_table),
467 	[DECODE_TYPE_CUSTOM]	= sizeof(struct decode_custom),
468 	[DECODE_TYPE_SIMULATE]	= sizeof(struct decode_simulate),
469 	[DECODE_TYPE_EMULATE]	= sizeof(struct decode_emulate),
470 	[DECODE_TYPE_OR]	= sizeof(struct decode_or),
471 	[DECODE_TYPE_REJECT]	= sizeof(struct decode_reject)
472 };
473 
474 /*
475  * kprobe_decode_insn operates on data tables in order to decode an ARM
476  * architecture instruction onto which a kprobe has been placed.
477  *
478  * These instruction decoding tables are a concatenation of entries each
479  * of which consist of one of the following structs:
480  *
481  *	decode_table
482  *	decode_custom
483  *	decode_simulate
484  *	decode_emulate
485  *	decode_or
486  *	decode_reject
487  *
488  * Each of these starts with a struct decode_header which has the following
489  * fields:
490  *
491  *	type_regs
492  *	mask
493  *	value
494  *
495  * The least significant DECODE_TYPE_BITS of type_regs contains a value
496  * from enum decode_type, this indicates which of the decode_* structs
497  * the entry contains. The value DECODE_TYPE_END indicates the end of the
498  * table.
499  *
500  * When the table is parsed, each entry is checked in turn to see if it
501  * matches the instruction to be decoded using the test:
502  *
503  *	(insn & mask) == value
504  *
505  * If no match is found before the end of the table is reached then decoding
506  * fails with INSN_REJECTED.
507  *
508  * When a match is found, decode_regs() is called to validate and modify each
509  * of the registers encoded in the instruction; the data it uses to do this
510  * is (type_regs >> DECODE_TYPE_BITS). A validation failure will cause decoding
511  * to fail with INSN_REJECTED.
512  *
513  * Once the instruction has passed the above tests, further processing
514  * depends on the type of the table entry's decode struct.
515  *
516  */
517 int __kprobes
kprobe_decode_insn(kprobe_opcode_t insn,struct arch_specific_insn * asi,const union decode_item * table,bool thumb)518 kprobe_decode_insn(kprobe_opcode_t insn, struct arch_specific_insn *asi,
519 				const union decode_item *table, bool thumb)
520 {
521 	const struct decode_header *h = (struct decode_header *)table;
522 	const struct decode_header *next;
523 	bool matched = false;
524 
525 	insn = prepare_emulated_insn(insn, asi, thumb);
526 
527 	for (;; h = next) {
528 		enum decode_type type = h->type_regs.bits & DECODE_TYPE_MASK;
529 		u32 regs = h->type_regs.bits >> DECODE_TYPE_BITS;
530 
531 		if (type == DECODE_TYPE_END)
532 			return INSN_REJECTED;
533 
534 		next = (struct decode_header *)
535 				((uintptr_t)h + decode_struct_sizes[type]);
536 
537 		if (!matched && (insn & h->mask.bits) != h->value.bits)
538 			continue;
539 
540 		if (!decode_regs(&insn, regs))
541 			return INSN_REJECTED;
542 
543 		switch (type) {
544 
545 		case DECODE_TYPE_TABLE: {
546 			struct decode_table *d = (struct decode_table *)h;
547 			next = (struct decode_header *)d->table.table;
548 			break;
549 		}
550 
551 		case DECODE_TYPE_CUSTOM: {
552 			struct decode_custom *d = (struct decode_custom *)h;
553 			return (*d->decoder.decoder)(insn, asi);
554 		}
555 
556 		case DECODE_TYPE_SIMULATE: {
557 			struct decode_simulate *d = (struct decode_simulate *)h;
558 			asi->insn_handler = d->handler.handler;
559 			return INSN_GOOD_NO_SLOT;
560 		}
561 
562 		case DECODE_TYPE_EMULATE: {
563 			struct decode_emulate *d = (struct decode_emulate *)h;
564 			asi->insn_handler = d->handler.handler;
565 			set_emulated_insn(insn, asi, thumb);
566 			return INSN_GOOD;
567 		}
568 
569 		case DECODE_TYPE_OR:
570 			matched = true;
571 			break;
572 
573 		case DECODE_TYPE_REJECT:
574 		default:
575 			return INSN_REJECTED;
576 		}
577 		}
578 	}
579