Lines Matching refs:insn
115 } __packed *insn; in __synthesize_relative_insn() local
117 insn = (struct __arch_relative_insn *)dest; in __synthesize_relative_insn()
118 insn->raddr = (s32)((long)(to) - ((long)(from) + 5)); in __synthesize_relative_insn()
119 insn->op = op; in __synthesize_relative_insn()
140 int can_boost(struct insn *insn, void *addr) in can_boost() argument
150 if (insn->opcode.nbytes == 2) in can_boost()
151 return test_bit(insn->opcode.bytes[1], in can_boost()
154 if (insn->opcode.nbytes != 1) in can_boost()
157 for_each_insn_prefix(insn, i, prefix) { in can_boost()
166 opcode = insn->opcode.bytes[0]; in can_boost()
187 return X86_MODRM_REG(insn->modrm.bytes[0]) == 4; in can_boost()
259 struct insn insn; in can_probe() local
282 ret = insn_decode_kernel(&insn, (void *)__addr); in can_probe()
291 if (insn.opcode.bytes[0] == INT3_INSN_OPCODE && in can_probe()
295 addr += insn.length; in can_probe()
315 if (insn_decode_kernel(&insn, (void *)__addr) < 0) in can_probe()
318 if (insn.opcode.value == 0xBA) in can_probe()
320 else if (insn.opcode.value == 0x3) in can_probe()
357 int __copy_instruction(u8 *dest, u8 *src, u8 *real, struct insn *insn) in __copy_instruction() argument
363 if (!recovered_insn || !insn) in __copy_instruction()
371 ret = insn_decode_kernel(insn, dest); in __copy_instruction()
376 if (insn_has_emulate_prefix(insn)) in __copy_instruction()
380 if (insn->opcode.bytes[0] == INT3_INSN_OPCODE) in __copy_instruction()
384 if (insn_masking_exception(insn)) in __copy_instruction()
389 if (insn_rip_relative(insn)) { in __copy_instruction()
404 newdisp = (u8 *) src + (s64) insn->displacement.value in __copy_instruction()
410 disp = (u8 *) dest + insn_offset_displacement(insn); in __copy_instruction()
414 return insn->length; in __copy_instruction()
419 struct insn *insn) in prepare_singlestep() argument
421 int len = insn->length; in prepare_singlestep()
424 !p->post_handler && can_boost(insn, p->addr) && in prepare_singlestep()
430 synthesize_reljump(buf + len, p->ainsn.insn + len, in prepare_singlestep()
431 p->addr + insn->length); in prepare_singlestep()
591 static int prepare_emulation(struct kprobe *p, struct insn *insn) in prepare_emulation() argument
593 insn_byte_t opcode = insn->opcode.bytes[0]; in prepare_emulation()
621 if (insn->immediate.nbytes == 2) in prepare_emulation()
622 p->ainsn.rel32 = *(s16 *)&insn->immediate.value; in prepare_emulation()
624 p->ainsn.rel32 = *(s32 *)&insn->immediate.value; in prepare_emulation()
629 if (insn->immediate.nbytes == 1) in prepare_emulation()
630 p->ainsn.rel32 = *(s8 *)&insn->immediate.value; in prepare_emulation()
631 else if (insn->immediate.nbytes == 2) in prepare_emulation()
632 p->ainsn.rel32 = *(s16 *)&insn->immediate.value; in prepare_emulation()
634 p->ainsn.rel32 = *(s32 *)&insn->immediate.value; in prepare_emulation()
640 p->ainsn.rel32 = insn->immediate.value; in prepare_emulation()
643 opcode = insn->opcode.bytes[1]; in prepare_emulation()
648 if (insn->immediate.nbytes == 2) in prepare_emulation()
649 p->ainsn.rel32 = *(s16 *)&insn->immediate.value; in prepare_emulation()
651 p->ainsn.rel32 = *(s32 *)&insn->immediate.value; in prepare_emulation()
653 X86_MODRM_REG(insn->modrm.bytes[0]) == 0 && in prepare_emulation()
654 X86_MODRM_MOD(insn->modrm.bytes[0]) == 3) { in prepare_emulation()
665 p->ainsn.loop.asize = insn->addr_bytes * 8; in prepare_emulation()
666 p->ainsn.rel32 = *(s8 *)&insn->immediate.value; in prepare_emulation()
673 opcode = insn->modrm.bytes[0]; in prepare_emulation()
689 if (insn->addr_bytes != sizeof(unsigned long)) in prepare_emulation()
696 if (X86_REX_B(insn->rex_prefix.value)) in prepare_emulation()
703 p->ainsn.size = insn->length; in prepare_emulation()
710 struct insn insn; in arch_copy_kprobe() local
715 len = __copy_instruction(buf, p->addr, p->ainsn.insn, &insn); in arch_copy_kprobe()
720 ret = prepare_emulation(p, &insn); in arch_copy_kprobe()
725 len = prepare_singlestep(buf, p, &insn); in arch_copy_kprobe()
733 perf_event_text_poke(p->ainsn.insn, NULL, 0, buf, len); in arch_copy_kprobe()
736 text_poke(p->ainsn.insn, buf, len); in arch_copy_kprobe()
754 p->ainsn.insn = get_insn_slot(); in arch_prepare_kprobe()
755 if (!p->ainsn.insn) in arch_prepare_kprobe()
760 free_insn_slot(p->ainsn.insn, 0); in arch_prepare_kprobe()
761 p->ainsn.insn = NULL; in arch_prepare_kprobe()
787 if (p->ainsn.insn) { in arch_remove_kprobe()
789 perf_event_text_poke(p->ainsn.insn, p->ainsn.insn, in arch_remove_kprobe()
791 free_insn_slot(p->ainsn.insn, p->ainsn.boostable); in arch_remove_kprobe()
792 p->ainsn.insn = NULL; in arch_remove_kprobe()
859 regs->ip = (unsigned long)p->ainsn.insn; in setup_singlestep()
878 regs->ip = (unsigned long)p->ainsn.insn; in setup_singlestep()
903 unsigned long copy_ip = (unsigned long)p->ainsn.insn; in resume_singlestep()
1000 if ((unsigned long)p->ainsn.insn < regs->ip && in kprobe_int3_handler()
1001 (unsigned long)p->ainsn.insn + MAX_INSN_SIZE > regs->ip) { in kprobe_int3_handler()
1018 if (unlikely(regs->ip == (unsigned long)cur->ainsn.insn)) { in kprobe_fault_handler()