Lines Matching refs:buff
206 kprobe_opcode_t *buff, branch_op_callback, branch_emulate_step; in arch_prepare_optimized_kprobe() local
219 buff = get_ppc_optinsn_slot(); in arch_prepare_optimized_kprobe()
220 if (!buff) in arch_prepare_optimized_kprobe()
232 b_offset = (unsigned long)buff - (unsigned long)p->addr; in arch_prepare_optimized_kprobe()
237 b_offset = (unsigned long)(buff + TMPL_RET_IDX) - in arch_prepare_optimized_kprobe()
245 pr_devel("Copying template to %p, size %lu\n", buff, size); in arch_prepare_optimized_kprobe()
247 rc = patch_instruction(buff + i, *(optprobe_template_entry + i)); in arch_prepare_optimized_kprobe()
256 patch_imm64_load_insns((unsigned long)op, buff + TMPL_OP_IDX); in arch_prepare_optimized_kprobe()
268 branch_op_callback = create_branch((unsigned int *)buff + TMPL_CALL_HDLR_IDX, in arch_prepare_optimized_kprobe()
272 branch_emulate_step = create_branch((unsigned int *)buff + TMPL_EMULATE_IDX, in arch_prepare_optimized_kprobe()
279 patch_instruction(buff + TMPL_CALL_HDLR_IDX, branch_op_callback); in arch_prepare_optimized_kprobe()
280 patch_instruction(buff + TMPL_EMULATE_IDX, branch_emulate_step); in arch_prepare_optimized_kprobe()
285 patch_imm32_load_insns(*p->ainsn.insn, buff + TMPL_INSN_IDX); in arch_prepare_optimized_kprobe()
290 patch_branch(buff + TMPL_RET_IDX, (unsigned long)nip, 0); in arch_prepare_optimized_kprobe()
292 flush_icache_range((unsigned long)buff, in arch_prepare_optimized_kprobe()
293 (unsigned long)(&buff[TMPL_END_IDX])); in arch_prepare_optimized_kprobe()
295 op->optinsn.insn = buff; in arch_prepare_optimized_kprobe()
300 free_ppc_optinsn_slot(buff, 0); in arch_prepare_optimized_kprobe()