Lines Matching +full:reg +full:- +full:addr
1 // SPDX-License-Identifier: GPL-2.0-or-later
16 #include <asm/code-patching.h>
18 #include <asm/ppc-opcode.h>
21 #define TMPL_CALL_HDLR_IDX (optprobe_template_call_handler - optprobe_template_entry)
22 #define TMPL_EMULATE_IDX (optprobe_template_call_emulate - optprobe_template_entry)
23 #define TMPL_RET_IDX (optprobe_template_ret - optprobe_template_entry)
24 #define TMPL_OP_IDX (optprobe_template_op_address - optprobe_template_entry)
25 #define TMPL_INSN_IDX (optprobe_template_insn - optprobe_template_entry)
26 #define TMPL_END_IDX (optprobe_template_end - optprobe_template_entry)
44 * Check if we can optimize this probe. Returns NIP post-emulation if this can
52 unsigned long addr = (unsigned long)p->addr; in can_optimize() local
59 if (p->addr == (kprobe_opcode_t *)&__kretprobe_trampoline) in can_optimize()
60 return addr + sizeof(kprobe_opcode_t); in can_optimize()
68 if (!is_kernel_addr(addr)) in can_optimize()
72 regs.nip = addr; in can_optimize()
87 if (!is_conditional_branch(ppc_inst_read(p->ainsn.insn)) && in can_optimize()
88 analyse_instr(&op, ®s, ppc_inst_read(p->ainsn.insn)) == 1) { in can_optimize()
100 if (kprobe_disabled(&op->kp)) in optimized_callback()
106 kprobes_inc_nmissed_count(&op->kp); in optimized_callback()
108 __this_cpu_write(current_kprobe, &op->kp); in optimized_callback()
109 regs_set_return_ip(regs, (unsigned long)op->kp.addr); in optimized_callback()
110 get_kprobe_ctlblk()->kprobe_status = KPROBE_HIT_ACTIVE; in optimized_callback()
111 opt_pre_handler(&op->kp, regs); in optimized_callback()
121 if (op->optinsn.insn) { in arch_remove_optimized_kprobe()
122 free_optinsn_slot(op->optinsn.insn, 1); in arch_remove_optimized_kprobe()
123 op->optinsn.insn = NULL; in arch_remove_optimized_kprobe()
127 static void patch_imm32_load_insns(unsigned long val, int reg, kprobe_opcode_t *addr) in patch_imm32_load_insns() argument
129 patch_instruction(addr++, ppc_inst(PPC_RAW_LIS(reg, PPC_HI(val)))); in patch_imm32_load_insns()
130 patch_instruction(addr, ppc_inst(PPC_RAW_ORI(reg, reg, PPC_LO(val)))); in patch_imm32_load_insns()
134 * Generate instructions to load provided immediate 64-bit value
135 * to register 'reg' and patch these instructions at 'addr'.
137 static void patch_imm64_load_insns(unsigned long long val, int reg, kprobe_opcode_t *addr) in patch_imm64_load_insns() argument
139 patch_instruction(addr++, ppc_inst(PPC_RAW_LIS(reg, PPC_HIGHEST(val)))); in patch_imm64_load_insns()
140 patch_instruction(addr++, ppc_inst(PPC_RAW_ORI(reg, reg, PPC_HIGHER(val)))); in patch_imm64_load_insns()
141 patch_instruction(addr++, ppc_inst(PPC_RAW_SLDI(reg, reg, 32))); in patch_imm64_load_insns()
142 patch_instruction(addr++, ppc_inst(PPC_RAW_ORIS(reg, reg, PPC_HI(val)))); in patch_imm64_load_insns()
143 patch_instruction(addr, ppc_inst(PPC_RAW_ORI(reg, reg, PPC_LO(val)))); in patch_imm64_load_insns()
146 static void patch_imm_load_insns(unsigned long val, int reg, kprobe_opcode_t *addr) in patch_imm_load_insns() argument
149 patch_imm64_load_insns(val, reg, addr); in patch_imm_load_insns()
151 patch_imm32_load_insns(val, reg, addr); in patch_imm_load_insns()
165 return -EILSEQ; in arch_prepare_optimized_kprobe()
170 return -ENOMEM; in arch_prepare_optimized_kprobe()
181 b_offset = (unsigned long)buff - (unsigned long)p->addr; in arch_prepare_optimized_kprobe()
186 b_offset = (unsigned long)(buff + TMPL_RET_IDX) - nip; in arch_prepare_optimized_kprobe()
231 temp = ppc_inst_read(p->ainsn.insn); in arch_prepare_optimized_kprobe()
241 op->optinsn.insn = buff; in arch_prepare_optimized_kprobe()
247 return -ERANGE; in arch_prepare_optimized_kprobe()
253 return optinsn->insn != NULL; in arch_prepared_optinsn()
277 memcpy(op->optinsn.copied_insn, op->kp.addr, RELATIVEJUMP_SIZE); in arch_optimize_kprobes()
278 create_branch(&instr, op->kp.addr, (unsigned long)op->optinsn.insn, 0); in arch_optimize_kprobes()
279 patch_instruction(op->kp.addr, instr); in arch_optimize_kprobes()
280 list_del_init(&op->list); in arch_optimize_kprobes()
286 arch_arm_kprobe(&op->kp); in arch_unoptimize_kprobe()
296 list_move(&op->list, done_list); in arch_unoptimize_kprobes()
300 int arch_within_optimized_kprobe(struct optimized_kprobe *op, kprobe_opcode_t *addr) in arch_within_optimized_kprobe() argument
302 return (op->kp.addr <= addr && in arch_within_optimized_kprobe()
303 op->kp.addr + (RELATIVEJUMP_SIZE / sizeof(kprobe_opcode_t)) > addr); in arch_within_optimized_kprobe()