• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: GPL-2.0
2 #include <linux/static_call.h>
3 #include <linux/memory.h>
4 #include <linux/bug.h>
5 #include <asm/text-patching.h>
6 
7 enum insn_type {
8 	CALL = 0, /* site call */
9 	NOP = 1,  /* site cond-call */
10 	JMP = 2,  /* tramp / site tail-call */
11 	RET = 3,  /* tramp / site cond-tail-call */
12 };
13 
14 /*
15  * ud1 %esp, %ecx - a 3 byte #UD that is unique to trampolines, chosen such
16  * that there is no false-positive trampoline identification while also being a
17  * speculation stop.
18  */
19 static const u8 tramp_ud[] = { 0x0f, 0xb9, 0xcc };
20 
21 static const u8 retinsn[] = { RET_INSN_OPCODE, 0xcc, 0xcc, 0xcc, 0xcc };
22 
__static_call_transform(void * insn,enum insn_type type,void * func,bool modinit)23 static void __ref __static_call_transform(void *insn, enum insn_type type,
24 					  void *func, bool modinit)
25 {
26 	int size = CALL_INSN_SIZE;
27 	const void *code;
28 
29 	switch (type) {
30 	case CALL:
31 		code = text_gen_insn(CALL_INSN_OPCODE, insn, func);
32 		break;
33 
34 	case NOP:
35 		code = ideal_nops[NOP_ATOMIC5];
36 		break;
37 
38 	case JMP:
39 		code = text_gen_insn(JMP32_INSN_OPCODE, insn, func);
40 		break;
41 
42 	case RET:
43 		if (cpu_feature_enabled(X86_FEATURE_RETHUNK))
44 			code = text_gen_insn(JMP32_INSN_OPCODE, insn, x86_return_thunk);
45 		else
46 			code = &retinsn;
47 		break;
48 	}
49 
50 	if (memcmp(insn, code, size) == 0)
51 		return;
52 
53 	if (system_state == SYSTEM_BOOTING || modinit)
54 		return text_poke_early(insn, code, size);
55 
56 	text_poke_bp(insn, code, size, NULL);
57 }
58 
__static_call_validate(void * insn,bool tail)59 static void __static_call_validate(void *insn, bool tail)
60 {
61 	u8 opcode = *(u8 *)insn;
62 
63 	if (tail) {
64 		if (opcode == JMP32_INSN_OPCODE ||
65 		    opcode == RET_INSN_OPCODE)
66 			return;
67 	} else {
68 		if (opcode == CALL_INSN_OPCODE ||
69 		    !memcmp(insn, ideal_nops[NOP_ATOMIC5], 5))
70 			return;
71 	}
72 
73 	/*
74 	 * If we ever trigger this, our text is corrupt, we'll probably not live long.
75 	 */
76 	WARN_ONCE(1, "unexpected static_call insn opcode 0x%x at %pS\n", opcode, insn);
77 }
78 
__sc_insn(bool null,bool tail)79 static inline enum insn_type __sc_insn(bool null, bool tail)
80 {
81 	/*
82 	 * Encode the following table without branches:
83 	 *
84 	 *	tail	null	insn
85 	 *	-----+-------+------
86 	 *	  0  |   0   |  CALL
87 	 *	  0  |   1   |  NOP
88 	 *	  1  |   0   |  JMP
89 	 *	  1  |   1   |  RET
90 	 */
91 	return 2*tail + null;
92 }
93 
arch_static_call_transform(void * site,void * tramp,void * func,bool tail)94 void arch_static_call_transform(void *site, void *tramp, void *func, bool tail)
95 {
96 	mutex_lock(&text_mutex);
97 
98 	if (tramp) {
99 		__static_call_validate(tramp, true);
100 		__static_call_transform(tramp, __sc_insn(!func, true), func, false);
101 	}
102 
103 	if (IS_ENABLED(CONFIG_HAVE_STATIC_CALL_INLINE) && site) {
104 		__static_call_validate(site, tail);
105 		__static_call_transform(site, __sc_insn(!func, tail), func, false);
106 	}
107 
108 	mutex_unlock(&text_mutex);
109 }
110 EXPORT_SYMBOL_GPL(arch_static_call_transform);
111 
112 #ifdef CONFIG_RETHUNK
113 /*
114  * This is called by apply_returns() to fix up static call trampolines,
115  * specifically ARCH_DEFINE_STATIC_CALL_NULL_TRAMP which is recorded as
116  * having a return trampoline.
117  *
118  * The problem is that static_call() is available before determining
119  * X86_FEATURE_RETHUNK and, by implication, running alternatives.
120  *
121  * This means that __static_call_transform() above can have overwritten the
122  * return trampoline and we now need to fix things up to be consistent.
123  */
__static_call_fixup(void * tramp,u8 op,void * dest)124 bool __static_call_fixup(void *tramp, u8 op, void *dest)
125 {
126 	unsigned long addr = (unsigned long)tramp;
127 	/*
128 	 * Not all .return_sites are a static_call trampoline (most are not).
129 	 * Check if the 3 bytes after the return are still kernel text, if not,
130 	 * then this definitely is not a trampoline and we need not worry
131 	 * further.
132 	 *
133 	 * This avoids the memcmp() below tripping over pagefaults etc..
134 	 */
135 	if (((addr >> PAGE_SHIFT) != ((addr + 7) >> PAGE_SHIFT)) &&
136 	    !kernel_text_address(addr + 7))
137 		return false;
138 
139 	if (memcmp(tramp+5, tramp_ud, 3)) {
140 		/* Not a trampoline site, not our problem. */
141 		return false;
142 	}
143 
144 	mutex_lock(&text_mutex);
145 	if (op == RET_INSN_OPCODE || dest == &__x86_return_thunk)
146 		__static_call_transform(tramp, RET, NULL, true);
147 	mutex_unlock(&text_mutex);
148 
149 	return true;
150 }
151 #endif
152