Lines Matching refs:kp

360 static inline void set_kprobe_instance(struct kprobe *kp)  in set_kprobe_instance()  argument
362 __this_cpu_write(kprobe_instance, kp); in set_kprobe_instance()
424 struct kprobe *kp; in opt_pre_handler() local
426 list_for_each_entry_rcu(kp, &p->list, list) { in opt_pre_handler()
427 if (kp->pre_handler && likely(!kprobe_disabled(kp))) { in opt_pre_handler()
428 set_kprobe_instance(kp); in opt_pre_handler()
429 kp->pre_handler(kp, regs); in opt_pre_handler()
441 op = container_of(p, struct optimized_kprobe, kp); in free_aggr_kprobe()
453 op = container_of(p, struct optimized_kprobe, kp); in kprobe_optready()
469 op = container_of(p, struct optimized_kprobe, kp); in kprobe_disarmed()
480 op = container_of(p, struct optimized_kprobe, kp); in kprobe_queued()
502 op = container_of(p, struct optimized_kprobe, kp); in get_optimized_kprobe()
566 op->kp.flags &= ~KPROBE_FLAG_OPTIMIZED; in do_unoptimize_kprobes()
568 if (kprobe_disabled(&op->kp)) in do_unoptimize_kprobes()
569 arch_disarm_kprobe(&op->kp); in do_unoptimize_kprobes()
570 if (kprobe_unused(&op->kp)) { in do_unoptimize_kprobes()
576 hlist_del_rcu(&op->kp.hlist); in do_unoptimize_kprobes()
589 if (WARN_ON_ONCE(!kprobe_unused(&op->kp))) { in do_free_cleaned_kprobes()
596 free_aggr_kprobe(&op->kp); in do_free_cleaned_kprobes()
691 op = container_of(p, struct optimized_kprobe, kp); in optimize_kprobe()
698 if (op->kp.flags & KPROBE_FLAG_OPTIMIZED) { in optimize_kprobe()
705 op->kp.flags |= KPROBE_FLAG_OPTIMIZED; in optimize_kprobe()
723 op->kp.flags &= ~KPROBE_FLAG_OPTIMIZED; in force_unoptimize_kprobe()
734 op = container_of(p, struct optimized_kprobe, kp); in unoptimize_kprobe()
752 op->kp.flags &= ~KPROBE_FLAG_OPTIMIZED; in unoptimize_kprobe()
776 op = container_of(ap, struct optimized_kprobe, kp); in reuse_unused_kprobe()
793 op = container_of(p, struct optimized_kprobe, kp); in kill_optimized_kprobe()
797 op->kp.flags &= ~KPROBE_FLAG_OPTIMIZED; in kill_optimized_kprobe()
807 hlist_del_rcu(&op->kp.hlist); in kill_optimized_kprobe()
826 op = container_of(p, struct optimized_kprobe, kp); in prepare_optimized_kprobe()
840 op->kp.addr = p->addr; in alloc_aggr_kprobe()
843 return &op->kp; in alloc_aggr_kprobe()
870 op = container_of(ap, struct optimized_kprobe, kp); in try_to_optimize_kprobe()
1163 static int arm_kprobe(struct kprobe *kp) in arm_kprobe() argument
1165 if (unlikely(kprobe_ftrace(kp))) in arm_kprobe()
1166 return arm_kprobe_ftrace(kp); in arm_kprobe()
1170 __arm_kprobe(kp); in arm_kprobe()
1177 static int disarm_kprobe(struct kprobe *kp, bool reopt) in disarm_kprobe() argument
1179 if (unlikely(kprobe_ftrace(kp))) in disarm_kprobe()
1180 return disarm_kprobe_ftrace(kp); in disarm_kprobe()
1184 __disarm_kprobe(kp, reopt); in disarm_kprobe()
1197 struct kprobe *kp; in aggr_pre_handler() local
1199 list_for_each_entry_rcu(kp, &p->list, list) { in aggr_pre_handler()
1200 if (kp->pre_handler && likely(!kprobe_disabled(kp))) { in aggr_pre_handler()
1201 set_kprobe_instance(kp); in aggr_pre_handler()
1202 if (kp->pre_handler(kp, regs)) in aggr_pre_handler()
1214 struct kprobe *kp; in aggr_post_handler() local
1216 list_for_each_entry_rcu(kp, &p->list, list) { in aggr_post_handler()
1217 if (kp->post_handler && likely(!kprobe_disabled(kp))) { in aggr_post_handler()
1218 set_kprobe_instance(kp); in aggr_post_handler()
1219 kp->post_handler(kp, regs, flags); in aggr_post_handler()
1229 struct kprobe *kp; in kprobes_inc_nmissed_count() local
1234 list_for_each_entry_rcu(kp, &p->list, list) in kprobes_inc_nmissed_count()
1235 kp->nmissed++; in kprobes_inc_nmissed_count()
1680 struct kprobe *kp; in aggr_kprobe_disabled() local
1684 list_for_each_entry(kp, &ap->list, list) in aggr_kprobe_disabled()
1685 if (!kprobe_disabled(kp)) in aggr_kprobe_disabled()
2044 __this_cpu_write(current_kprobe, &rp->kp); in __kretprobe_trampoline_handler()
2080 struct kretprobe *rp = container_of(p, struct kretprobe, kp); in NOKPROBE_SYMBOL()
2111 struct kretprobe *rp = container_of(p, struct kretprobe, kp); in pre_handler_kretprobe()
2143 __this_cpu_write(current_kprobe, &rp->kp); in kretprobe_rethook_handler()
2190 ret = kprobe_on_func_entry(rp->kp.addr, rp->kp.symbol_name, rp->kp.offset); in register_kretprobe()
2195 if (rp->kp.addr && warn_kprobe_rereg(&rp->kp)) in register_kretprobe()
2199 addr = kprobe_addr(&rp->kp); in register_kretprobe()
2212 rp->kp.pre_handler = pre_handler_kretprobe; in register_kretprobe()
2213 rp->kp.post_handler = NULL; in register_kretprobe()
2240 ret = register_kprobe(&rp->kp); in register_kretprobe()
2267 ret = register_kprobe(&rp->kp); in register_kretprobe()
2307 if (__unregister_kprobe_top(&rps[i]->kp) < 0) in unregister_kretprobes()
2308 rps[i]->kp.addr = NULL; in unregister_kretprobes()
2319 if (rps[i]->kp.addr) { in unregister_kretprobes()
2320 __unregister_kprobe_bottom(&rps[i]->kp); in unregister_kretprobes()
2363 struct kprobe *kp; in kill_kprobe() local
2373 list_for_each_entry(kp, &p->list, list) in kill_kprobe()
2374 kp->flags |= KPROBE_FLAG_GONE; in kill_kprobe()
2394 int disable_kprobe(struct kprobe *kp) in disable_kprobe() argument
2402 p = __disable_kprobe(kp); in disable_kprobe()
2412 int enable_kprobe(struct kprobe *kp) in enable_kprobe() argument
2420 p = __get_valid_kprobe(kp); in enable_kprobe()
2426 if (kprobe_gone(kp)) { in enable_kprobe()
2432 if (p != kp) in enable_kprobe()
2433 kp->flags &= ~KPROBE_FLAG_DISABLED; in enable_kprobe()
2440 if (p != kp) in enable_kprobe()
2441 kp->flags |= KPROBE_FLAG_DISABLED; in enable_kprobe()
2451 void dump_kprobe(struct kprobe *kp) in dump_kprobe() argument
2454 kp->symbol_name, kp->offset, kp->addr); in dump_kprobe()
2813 struct kprobe *p, *kp; in show_kprobe_addr() local
2825 list_for_each_entry_rcu(kp, &p->list, list) in show_kprobe_addr()
2826 report_probe(pi, kp, sym, offset, modname, p); in show_kprobe_addr()