• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * preemptoff and irqoff tracepoints
4  *
5  * Copyright (C) Joel Fernandes (Google) <joel@joelfernandes.org>
6  */
7 
8 #include <linux/kallsyms.h>
9 #include <linux/uaccess.h>
10 #include <linux/module.h>
11 #include <linux/ftrace.h>
12 #include <linux/kprobes.h>
13 #include "trace.h"
14 
15 #define CREATE_TRACE_POINTS
16 #include <trace/events/preemptirq.h>
17 #undef CREATE_TRACE_POINTS
18 #include <trace/hooks/preemptirq.h>
19 
20 /*
21  * Use regular trace points on architectures that implement noinstr
22  * tooling: these calls will only happen with RCU enabled, which can
23  * use a regular tracepoint.
24  *
25  * On older architectures, use the rcuidle tracing methods (which
26  * aren't NMI-safe - so exclude NMI contexts):
27  */
28 #ifdef CONFIG_ARCH_WANTS_NO_INSTR
29 #define trace(point)	trace_##point
30 #else
31 #define trace(point)	if (!in_nmi()) trace_##point##_rcuidle
32 #endif
33 
34 #ifdef CONFIG_TRACE_IRQFLAGS
35 /* Per-cpu variable to prevent redundant calls when IRQs already off */
36 static DEFINE_PER_CPU(int, tracing_irq_cpu);
37 
38 /*
39  * Like trace_hardirqs_on() but without the lockdep invocation. This is
40  * used in the low level entry code where the ordering vs. RCU is important
41  * and lockdep uses a staged approach which splits the lockdep hardirq
42  * tracking into a RCU on and a RCU off section.
43  */
trace_hardirqs_on_prepare(void)44 void trace_hardirqs_on_prepare(void)
45 {
46 	if (this_cpu_read(tracing_irq_cpu)) {
47 		trace(irq_enable)(CALLER_ADDR0, CALLER_ADDR1);
48 		if (!in_nmi()) {
49 			trace_android_rvh_irqs_enable(CALLER_ADDR0,
50 						      CALLER_ADDR1);
51 		}
52 		tracer_hardirqs_on(CALLER_ADDR0, CALLER_ADDR1);
53 		this_cpu_write(tracing_irq_cpu, 0);
54 	}
55 }
56 EXPORT_SYMBOL(trace_hardirqs_on_prepare);
57 NOKPROBE_SYMBOL(trace_hardirqs_on_prepare);
58 
trace_hardirqs_on(void)59 void trace_hardirqs_on(void)
60 {
61 	if (this_cpu_read(tracing_irq_cpu)) {
62 		trace(irq_enable)(CALLER_ADDR0, CALLER_ADDR1);
63 		if (!in_nmi()) {
64 			trace_android_rvh_irqs_enable(CALLER_ADDR0,
65 						      CALLER_ADDR1);
66 		}
67 		tracer_hardirqs_on(CALLER_ADDR0, CALLER_ADDR1);
68 		this_cpu_write(tracing_irq_cpu, 0);
69 	}
70 
71 	lockdep_hardirqs_on_prepare();
72 	lockdep_hardirqs_on(CALLER_ADDR0);
73 }
74 EXPORT_SYMBOL(trace_hardirqs_on);
75 NOKPROBE_SYMBOL(trace_hardirqs_on);
76 
77 /*
78  * Like trace_hardirqs_off() but without the lockdep invocation. This is
79  * used in the low level entry code where the ordering vs. RCU is important
80  * and lockdep uses a staged approach which splits the lockdep hardirq
81  * tracking into a RCU on and a RCU off section.
82  */
trace_hardirqs_off_finish(void)83 void trace_hardirqs_off_finish(void)
84 {
85 	if (!this_cpu_read(tracing_irq_cpu)) {
86 		this_cpu_write(tracing_irq_cpu, 1);
87 		tracer_hardirqs_off(CALLER_ADDR0, CALLER_ADDR1);
88 		trace(irq_disable)(CALLER_ADDR0, CALLER_ADDR1);
89 		if (!in_nmi()) {
90 			trace_android_rvh_irqs_disable(CALLER_ADDR0,
91 						       CALLER_ADDR1);
92 		}
93 	}
94 
95 }
96 EXPORT_SYMBOL(trace_hardirqs_off_finish);
97 NOKPROBE_SYMBOL(trace_hardirqs_off_finish);
98 
trace_hardirqs_off(void)99 void trace_hardirqs_off(void)
100 {
101 	lockdep_hardirqs_off(CALLER_ADDR0);
102 
103 	if (!this_cpu_read(tracing_irq_cpu)) {
104 		this_cpu_write(tracing_irq_cpu, 1);
105 		tracer_hardirqs_off(CALLER_ADDR0, CALLER_ADDR1);
106 		trace(irq_disable)(CALLER_ADDR0, CALLER_ADDR1);
107 		if (!in_nmi()) {
108 			trace_android_rvh_irqs_disable(CALLER_ADDR0,
109 						       CALLER_ADDR1);
110 		}
111 	}
112 }
113 EXPORT_SYMBOL(trace_hardirqs_off);
114 NOKPROBE_SYMBOL(trace_hardirqs_off);
115 #endif /* CONFIG_TRACE_IRQFLAGS */
116 
117 #ifdef CONFIG_TRACE_PREEMPT_TOGGLE
118 
trace_preempt_on(unsigned long a0,unsigned long a1)119 void trace_preempt_on(unsigned long a0, unsigned long a1)
120 {
121 	trace(preempt_enable)(a0, a1);
122 	if (!in_nmi()) {
123 		trace_android_rvh_preempt_enable(a0, a1);
124 	}
125 	tracer_preempt_on(a0, a1);
126 }
127 
trace_preempt_off(unsigned long a0,unsigned long a1)128 void trace_preempt_off(unsigned long a0, unsigned long a1)
129 {
130 	trace(preempt_disable)(a0, a1);
131 	if (!in_nmi()) {
132 		trace_android_rvh_preempt_disable(a0, a1);
133 	}
134 	tracer_preempt_off(a0, a1);
135 }
136 #endif
137