Lines Matching refs:op
132 int arch_check_optimized_kprobe(struct optimized_kprobe *op) in arch_check_optimized_kprobe() argument
153 __arch_remove_optimized_kprobe(struct optimized_kprobe *op, int dirty) in __arch_remove_optimized_kprobe() argument
155 if (op->optinsn.insn) { in __arch_remove_optimized_kprobe()
156 free_optinsn_slot(op->optinsn.insn, dirty); in __arch_remove_optimized_kprobe()
157 op->optinsn.insn = NULL; in __arch_remove_optimized_kprobe()
164 optimized_callback(struct optimized_kprobe *op, struct pt_regs *regs) in optimized_callback() argument
167 struct kprobe *p = &op->kp; in optimized_callback()
171 regs->ARM_pc = (unsigned long)op->kp.addr; in optimized_callback()
177 kprobes_inc_nmissed_count(&op->kp); in optimized_callback()
179 __this_cpu_write(current_kprobe, &op->kp); in optimized_callback()
181 opt_pre_handler(&op->kp, regs); in optimized_callback()
190 op->kp.ainsn.insn_singlestep(p->opcode, &p->ainsn, regs); in optimized_callback()
195 int arch_prepare_optimized_kprobe(struct optimized_kprobe *op, struct kprobe *orig) in arch_prepare_optimized_kprobe() argument
265 val = (unsigned long)op; in arch_prepare_optimized_kprobe()
277 (unsigned long)(op->kp.addr) + 4); in arch_prepare_optimized_kprobe()
298 op->optinsn.insn = code; in arch_prepare_optimized_kprobe()
304 struct optimized_kprobe *op, *tmp; in arch_optimize_kprobes() local
306 list_for_each_entry_safe(op, tmp, oplist, list) { in arch_optimize_kprobes()
308 WARN_ON(kprobe_disabled(&op->kp)); in arch_optimize_kprobes()
314 memcpy(op->optinsn.copied_insn, op->kp.addr, in arch_optimize_kprobes()
317 insn = arm_gen_branch((unsigned long)op->kp.addr, in arch_optimize_kprobes()
318 (unsigned long)op->optinsn.insn); in arch_optimize_kprobes()
326 op->optinsn.copied_insn[0]) & 0xf0000000) | in arch_optimize_kprobes()
334 kprobes_remove_breakpoint(op->kp.addr, insn); in arch_optimize_kprobes()
336 list_del_init(&op->list); in arch_optimize_kprobes()
340 void arch_unoptimize_kprobe(struct optimized_kprobe *op) in arch_unoptimize_kprobe() argument
342 arch_arm_kprobe(&op->kp); in arch_unoptimize_kprobe()
352 struct optimized_kprobe *op, *tmp; in arch_unoptimize_kprobes() local
354 list_for_each_entry_safe(op, tmp, oplist, list) { in arch_unoptimize_kprobes()
355 arch_unoptimize_kprobe(op); in arch_unoptimize_kprobes()
356 list_move(&op->list, done_list); in arch_unoptimize_kprobes()
360 int arch_within_optimized_kprobe(struct optimized_kprobe *op, in arch_within_optimized_kprobe() argument
363 return ((unsigned long)op->kp.addr <= addr && in arch_within_optimized_kprobe()
364 (unsigned long)op->kp.addr + RELATIVEJUMP_SIZE > addr); in arch_within_optimized_kprobe()
367 void arch_remove_optimized_kprobe(struct optimized_kprobe *op) in arch_remove_optimized_kprobe() argument
369 __arch_remove_optimized_kprobe(op, 1); in arch_remove_optimized_kprobe()