Lines Matching refs:op
66 struct instruction_op op; in can_optimize() local
103 analyse_instr(&op, ®s, *p->ainsn.insn) == 1) { in can_optimize()
104 emulate_update_regs(®s, &op); in can_optimize()
111 static void optimized_callback(struct optimized_kprobe *op, in optimized_callback() argument
115 if (kprobe_disabled(&op->kp)) in optimized_callback()
121 kprobes_inc_nmissed_count(&op->kp); in optimized_callback()
123 __this_cpu_write(current_kprobe, &op->kp); in optimized_callback()
124 regs->nip = (unsigned long)op->kp.addr; in optimized_callback()
126 opt_pre_handler(&op->kp, regs); in optimized_callback()
134 void arch_remove_optimized_kprobe(struct optimized_kprobe *op) in arch_remove_optimized_kprobe() argument
136 if (op->optinsn.insn) { in arch_remove_optimized_kprobe()
137 free_ppc_optinsn_slot(op->optinsn.insn, 1); in arch_remove_optimized_kprobe()
138 op->optinsn.insn = NULL; in arch_remove_optimized_kprobe()
190 int arch_prepare_optimized_kprobe(struct optimized_kprobe *op, struct kprobe *p) in arch_prepare_optimized_kprobe() argument
242 patch_imm64_load_insns((unsigned long)op, buff + TMPL_OP_IDX); in arch_prepare_optimized_kprobe()
281 op->optinsn.insn = buff; in arch_prepare_optimized_kprobe()
301 int arch_check_optimized_kprobe(struct optimized_kprobe *op) in arch_check_optimized_kprobe() argument
308 struct optimized_kprobe *op; in arch_optimize_kprobes() local
311 list_for_each_entry_safe(op, tmp, oplist, list) { in arch_optimize_kprobes()
316 memcpy(op->optinsn.copied_insn, op->kp.addr, in arch_optimize_kprobes()
318 patch_instruction(op->kp.addr, in arch_optimize_kprobes()
319 create_branch((unsigned int *)op->kp.addr, in arch_optimize_kprobes()
320 (unsigned long)op->optinsn.insn, 0)); in arch_optimize_kprobes()
321 list_del_init(&op->list); in arch_optimize_kprobes()
325 void arch_unoptimize_kprobe(struct optimized_kprobe *op) in arch_unoptimize_kprobe() argument
327 arch_arm_kprobe(&op->kp); in arch_unoptimize_kprobe()
333 struct optimized_kprobe *op; in arch_unoptimize_kprobes() local
336 list_for_each_entry_safe(op, tmp, oplist, list) { in arch_unoptimize_kprobes()
337 arch_unoptimize_kprobe(op); in arch_unoptimize_kprobes()
338 list_move(&op->list, done_list); in arch_unoptimize_kprobes()
342 int arch_within_optimized_kprobe(struct optimized_kprobe *op, in arch_within_optimized_kprobe() argument
345 return ((unsigned long)op->kp.addr <= addr && in arch_within_optimized_kprobe()
346 (unsigned long)op->kp.addr + RELATIVEJUMP_SIZE > addr); in arch_within_optimized_kprobe()