1/* SPDX-License-Identifier: GPL-2.0 */ 2/* 3 * Copyright (C) 2014 Steven Rostedt, Red Hat Inc 4 */ 5 6#include <linux/linkage.h> 7#include <asm/ptrace.h> 8#include <asm/ftrace.h> 9#include <asm/export.h> 10#include <asm/nospec-branch.h> 11 12 .code64 13 .section .entry.text, "ax" 14 15#ifdef CC_USING_FENTRY 16# define function_hook __fentry__ 17EXPORT_SYMBOL(__fentry__) 18#else 19# define function_hook mcount 20EXPORT_SYMBOL(mcount) 21#endif 22 23/* All cases save the original rbp (8 bytes) */ 24#ifdef CONFIG_FRAME_POINTER 25# ifdef CC_USING_FENTRY 26/* Save parent and function stack frames (rip and rbp) */ 27# define MCOUNT_FRAME_SIZE (8+16*2) 28# else 29/* Save just function stack frame (rip and rbp) */ 30# define MCOUNT_FRAME_SIZE (8+16) 31# endif 32#else 33/* No need to save a stack frame */ 34# define MCOUNT_FRAME_SIZE 8 35#endif /* CONFIG_FRAME_POINTER */ 36 37/* Size of stack used to save mcount regs in save_mcount_regs */ 38#define MCOUNT_REG_SIZE (SS+8 + MCOUNT_FRAME_SIZE) 39 40/* 41 * gcc -pg option adds a call to 'mcount' in most functions. 42 * When -mfentry is used, the call is to 'fentry' and not 'mcount' 43 * and is done before the function's stack frame is set up. 44 * They both require a set of regs to be saved before calling 45 * any C code and restored before returning back to the function. 46 * 47 * On boot up, all these calls are converted into nops. When tracing 48 * is enabled, the call can jump to either ftrace_caller or 49 * ftrace_regs_caller. Callbacks (tracing functions) that require 50 * ftrace_regs_caller (like kprobes) need to have pt_regs passed to 51 * it. For this reason, the size of the pt_regs structure will be 52 * allocated on the stack and the required mcount registers will 53 * be saved in the locations that pt_regs has them in. 54 */ 55 56/* 57 * @added: the amount of stack added before calling this 58 * 59 * After this is called, the following registers contain: 60 * 61 * %rdi - holds the address that called the trampoline 62 * %rsi - holds the parent function (traced function's return address) 63 * %rdx - holds the original %rbp 64 */ 65.macro save_mcount_regs added=0 66 67 /* Always save the original rbp */ 68 pushq %rbp 69 70#ifdef CONFIG_FRAME_POINTER 71 /* 72 * Stack traces will stop at the ftrace trampoline if the frame pointer 73 * is not set up properly. If fentry is used, we need to save a frame 74 * pointer for the parent as well as the function traced, because the 75 * fentry is called before the stack frame is set up, where as mcount 76 * is called afterward. 77 */ 78#ifdef CC_USING_FENTRY 79 /* Save the parent pointer (skip orig rbp and our return address) */ 80 pushq \added+8*2(%rsp) 81 pushq %rbp 82 movq %rsp, %rbp 83 /* Save the return address (now skip orig rbp, rbp and parent) */ 84 pushq \added+8*3(%rsp) 85#else 86 /* Can't assume that rip is before this (unless added was zero) */ 87 pushq \added+8(%rsp) 88#endif 89 pushq %rbp 90 movq %rsp, %rbp 91#endif /* CONFIG_FRAME_POINTER */ 92 93 /* 94 * We add enough stack to save all regs. 95 */ 96 subq $(MCOUNT_REG_SIZE - MCOUNT_FRAME_SIZE), %rsp 97 movq %rax, RAX(%rsp) 98 movq %rcx, RCX(%rsp) 99 movq %rdx, RDX(%rsp) 100 movq %rsi, RSI(%rsp) 101 movq %rdi, RDI(%rsp) 102 movq %r8, R8(%rsp) 103 movq %r9, R9(%rsp) 104 /* 105 * Save the original RBP. Even though the mcount ABI does not 106 * require this, it helps out callers. 107 */ 108 movq MCOUNT_REG_SIZE-8(%rsp), %rdx 109 movq %rdx, RBP(%rsp) 110 111 /* Copy the parent address into %rsi (second parameter) */ 112#ifdef CC_USING_FENTRY 113 movq MCOUNT_REG_SIZE+8+\added(%rsp), %rsi 114#else 115 /* %rdx contains original %rbp */ 116 movq 8(%rdx), %rsi 117#endif 118 119 /* Move RIP to its proper location */ 120 movq MCOUNT_REG_SIZE+\added(%rsp), %rdi 121 movq %rdi, RIP(%rsp) 122 123 /* 124 * Now %rdi (the first parameter) has the return address of 125 * where ftrace_call returns. But the callbacks expect the 126 * address of the call itself. 127 */ 128 subq $MCOUNT_INSN_SIZE, %rdi 129 .endm 130 131.macro restore_mcount_regs 132 movq R9(%rsp), %r9 133 movq R8(%rsp), %r8 134 movq RDI(%rsp), %rdi 135 movq RSI(%rsp), %rsi 136 movq RDX(%rsp), %rdx 137 movq RCX(%rsp), %rcx 138 movq RAX(%rsp), %rax 139 140 /* ftrace_regs_caller can modify %rbp */ 141 movq RBP(%rsp), %rbp 142 143 addq $MCOUNT_REG_SIZE, %rsp 144 145 .endm 146 147#ifdef CONFIG_DYNAMIC_FTRACE 148 149ENTRY(function_hook) 150 retq 151END(function_hook) 152 153ENTRY(ftrace_caller) 154 /* save_mcount_regs fills in first two parameters */ 155 save_mcount_regs 156 157GLOBAL(ftrace_caller_op_ptr) 158 /* Load the ftrace_ops into the 3rd parameter */ 159 movq function_trace_op(%rip), %rdx 160 161 /* regs go into 4th parameter (but make it NULL) */ 162 movq $0, %rcx 163 164GLOBAL(ftrace_call) 165 call ftrace_stub 166 167 restore_mcount_regs 168 169 /* 170 * The copied trampoline must call ftrace_epilogue as it 171 * still may need to call the function graph tracer. 172 * 173 * The code up to this label is copied into trampolines so 174 * think twice before adding any new code or changing the 175 * layout here. 176 */ 177GLOBAL(ftrace_epilogue) 178 179#ifdef CONFIG_FUNCTION_GRAPH_TRACER 180GLOBAL(ftrace_graph_call) 181 jmp ftrace_stub 182#endif 183 184/* This is weak to keep gas from relaxing the jumps */ 185WEAK(ftrace_stub) 186 retq 187END(ftrace_caller) 188 189ENTRY(ftrace_regs_caller) 190 /* Save the current flags before any operations that can change them */ 191 pushfq 192 193 /* added 8 bytes to save flags */ 194 save_mcount_regs 8 195 /* save_mcount_regs fills in first two parameters */ 196 197GLOBAL(ftrace_regs_caller_op_ptr) 198 /* Load the ftrace_ops into the 3rd parameter */ 199 movq function_trace_op(%rip), %rdx 200 201 /* Save the rest of pt_regs */ 202 movq %r15, R15(%rsp) 203 movq %r14, R14(%rsp) 204 movq %r13, R13(%rsp) 205 movq %r12, R12(%rsp) 206 movq %r11, R11(%rsp) 207 movq %r10, R10(%rsp) 208 movq %rbx, RBX(%rsp) 209 /* Copy saved flags */ 210 movq MCOUNT_REG_SIZE(%rsp), %rcx 211 movq %rcx, EFLAGS(%rsp) 212 /* Kernel segments */ 213 movq $__KERNEL_DS, %rcx 214 movq %rcx, SS(%rsp) 215 movq $__KERNEL_CS, %rcx 216 movq %rcx, CS(%rsp) 217 /* Stack - skipping return address and flags */ 218 leaq MCOUNT_REG_SIZE+8*2(%rsp), %rcx 219 movq %rcx, RSP(%rsp) 220 221 /* regs go into 4th parameter */ 222 leaq (%rsp), %rcx 223 224GLOBAL(ftrace_regs_call) 225 call ftrace_stub 226 227 /* Copy flags back to SS, to restore them */ 228 movq EFLAGS(%rsp), %rax 229 movq %rax, MCOUNT_REG_SIZE(%rsp) 230 231 /* Handlers can change the RIP */ 232 movq RIP(%rsp), %rax 233 movq %rax, MCOUNT_REG_SIZE+8(%rsp) 234 235 /* restore the rest of pt_regs */ 236 movq R15(%rsp), %r15 237 movq R14(%rsp), %r14 238 movq R13(%rsp), %r13 239 movq R12(%rsp), %r12 240 movq R10(%rsp), %r10 241 movq RBX(%rsp), %rbx 242 243 restore_mcount_regs 244 245 /* Restore flags */ 246 popfq 247 248 /* 249 * As this jmp to ftrace_epilogue can be a short jump 250 * it must not be copied into the trampoline. 251 * The trampoline will add the code to jump 252 * to the return. 253 */ 254GLOBAL(ftrace_regs_caller_end) 255 256 jmp ftrace_epilogue 257 258END(ftrace_regs_caller) 259 260 261#else /* ! CONFIG_DYNAMIC_FTRACE */ 262 263ENTRY(function_hook) 264 cmpq $ftrace_stub, ftrace_trace_function 265 jnz trace 266 267fgraph_trace: 268#ifdef CONFIG_FUNCTION_GRAPH_TRACER 269 cmpq $ftrace_stub, ftrace_graph_return 270 jnz ftrace_graph_caller 271 272 cmpq $ftrace_graph_entry_stub, ftrace_graph_entry 273 jnz ftrace_graph_caller 274#endif 275 276GLOBAL(ftrace_stub) 277 retq 278 279trace: 280 /* save_mcount_regs fills in first two parameters */ 281 save_mcount_regs 282 283 /* 284 * When DYNAMIC_FTRACE is not defined, ARCH_SUPPORTS_FTRACE_OPS is not 285 * set (see include/asm/ftrace.h and include/linux/ftrace.h). Only the 286 * ip and parent ip are used and the list function is called when 287 * function tracing is enabled. 288 */ 289 movq ftrace_trace_function, %r8 290 CALL_NOSPEC %r8 291 restore_mcount_regs 292 293 jmp fgraph_trace 294END(function_hook) 295#endif /* CONFIG_DYNAMIC_FTRACE */ 296 297#ifdef CONFIG_FUNCTION_GRAPH_TRACER 298ENTRY(ftrace_graph_caller) 299 /* Saves rbp into %rdx and fills first parameter */ 300 save_mcount_regs 301 302#ifdef CC_USING_FENTRY 303 leaq MCOUNT_REG_SIZE+8(%rsp), %rsi 304 movq $0, %rdx /* No framepointers needed */ 305#else 306 /* Save address of the return address of traced function */ 307 leaq 8(%rdx), %rsi 308 /* ftrace does sanity checks against frame pointers */ 309 movq (%rdx), %rdx 310#endif 311 call prepare_ftrace_return 312 313 restore_mcount_regs 314 315 retq 316END(ftrace_graph_caller) 317 318GLOBAL(return_to_handler) 319 subq $24, %rsp 320 321 /* Save the return values */ 322 movq %rax, (%rsp) 323 movq %rdx, 8(%rsp) 324 movq %rbp, %rdi 325 326 call ftrace_return_to_handler 327 328 movq %rax, %rdi 329 movq 8(%rsp), %rdx 330 movq (%rsp), %rax 331 addq $24, %rsp 332 JMP_NOSPEC %rdi 333#endif 334