• Home
  • Raw
  • Download

Lines Matching refs:op

36 	struct optimized_kprobe *op;  in __recover_optprobed_insn()  local
45 op = container_of(kp, struct optimized_kprobe, kp); in __recover_optprobed_insn()
47 if (list_empty(&op->list)) in __recover_optprobed_insn()
65 memcpy(buf + 1, op->optinsn.copied_insn, RELATIVE_ADDR_SIZE); in __recover_optprobed_insn()
68 memcpy(buf, op->optinsn.copied_insn + offs, RELATIVE_ADDR_SIZE - offs); in __recover_optprobed_insn()
148 optimized_callback(struct optimized_kprobe *op, struct pt_regs *regs) in optimized_callback() argument
151 if (kprobe_disabled(&op->kp)) in optimized_callback()
156 kprobes_inc_nmissed_count(&op->kp); in optimized_callback()
165 regs->ip = (unsigned long)op->kp.addr + INT3_SIZE; in optimized_callback()
168 __this_cpu_write(current_kprobe, &op->kp); in optimized_callback()
170 opt_pre_handler(&op->kp, regs); in optimized_callback()
310 int arch_check_optimized_kprobe(struct optimized_kprobe *op) in arch_check_optimized_kprobe() argument
315 for (i = 1; i < op->optinsn.size; i++) { in arch_check_optimized_kprobe()
316 p = get_kprobe(op->kp.addr + i); in arch_check_optimized_kprobe()
325 int arch_within_optimized_kprobe(struct optimized_kprobe *op, in arch_within_optimized_kprobe() argument
328 return ((unsigned long)op->kp.addr <= addr && in arch_within_optimized_kprobe()
329 (unsigned long)op->kp.addr + op->optinsn.size > addr); in arch_within_optimized_kprobe()
334 void __arch_remove_optimized_kprobe(struct optimized_kprobe *op, int dirty) in __arch_remove_optimized_kprobe() argument
336 if (op->optinsn.insn) { in __arch_remove_optimized_kprobe()
337 free_optinsn_slot(op->optinsn.insn, dirty); in __arch_remove_optimized_kprobe()
338 op->optinsn.insn = NULL; in __arch_remove_optimized_kprobe()
339 op->optinsn.size = 0; in __arch_remove_optimized_kprobe()
343 void arch_remove_optimized_kprobe(struct optimized_kprobe *op) in arch_remove_optimized_kprobe() argument
345 __arch_remove_optimized_kprobe(op, 1); in arch_remove_optimized_kprobe()
353 int arch_prepare_optimized_kprobe(struct optimized_kprobe *op, in arch_prepare_optimized_kprobe() argument
360 if (!can_optimize((unsigned long)op->kp.addr)) in arch_prepare_optimized_kprobe()
367 op->optinsn.insn = slot = get_optinsn_slot(); in arch_prepare_optimized_kprobe()
377 rel = (long)slot - (long)op->kp.addr + RELATIVEJUMP_SIZE; in arch_prepare_optimized_kprobe()
387 ret = copy_optimized_instructions(buf + TMPL_END_IDX, op->kp.addr, in arch_prepare_optimized_kprobe()
391 op->optinsn.size = ret; in arch_prepare_optimized_kprobe()
392 len = TMPL_END_IDX + op->optinsn.size; in arch_prepare_optimized_kprobe()
395 synthesize_set_arg1(buf + TMPL_MOVE_IDX, (unsigned long)op); in arch_prepare_optimized_kprobe()
403 (u8 *)op->kp.addr + op->optinsn.size); in arch_prepare_optimized_kprobe()
414 __arch_remove_optimized_kprobe(op, 0); in arch_prepare_optimized_kprobe()
424 struct optimized_kprobe *op, *tmp; in arch_optimize_kprobes() local
427 list_for_each_entry_safe(op, tmp, oplist, list) { in arch_optimize_kprobes()
428 s32 rel = (s32)((long)op->optinsn.insn - in arch_optimize_kprobes()
429 ((long)op->kp.addr + RELATIVEJUMP_SIZE)); in arch_optimize_kprobes()
431 WARN_ON(kprobe_disabled(&op->kp)); in arch_optimize_kprobes()
434 memcpy(op->optinsn.copied_insn, op->kp.addr + INT3_SIZE, in arch_optimize_kprobes()
440 text_poke_bp(op->kp.addr, insn_buff, RELATIVEJUMP_SIZE, in arch_optimize_kprobes()
441 op->optinsn.insn); in arch_optimize_kprobes()
443 list_del_init(&op->list); in arch_optimize_kprobes()
448 void arch_unoptimize_kprobe(struct optimized_kprobe *op) in arch_unoptimize_kprobe() argument
454 memcpy(insn_buff + 1, op->optinsn.copied_insn, RELATIVE_ADDR_SIZE); in arch_unoptimize_kprobe()
455 text_poke_bp(op->kp.addr, insn_buff, RELATIVEJUMP_SIZE, in arch_unoptimize_kprobe()
456 op->optinsn.insn); in arch_unoptimize_kprobe()
466 struct optimized_kprobe *op, *tmp; in arch_unoptimize_kprobes() local
468 list_for_each_entry_safe(op, tmp, oplist, list) { in arch_unoptimize_kprobes()
469 arch_unoptimize_kprobe(op); in arch_unoptimize_kprobes()
470 list_move(&op->list, done_list); in arch_unoptimize_kprobes()
476 struct optimized_kprobe *op; in setup_detour_execution() local
480 op = container_of(p, struct optimized_kprobe, kp); in setup_detour_execution()
482 regs->ip = (unsigned long)op->optinsn.insn + TMPL_END_IDX; in setup_detour_execution()