• Home
  • Raw
  • Download

Lines Matching refs:op

387 	struct optimized_kprobe *op;  in free_aggr_kprobe()  local
389 op = container_of(p, struct optimized_kprobe, kp); in free_aggr_kprobe()
390 arch_remove_optimized_kprobe(op); in free_aggr_kprobe()
392 kfree(op); in free_aggr_kprobe()
398 struct optimized_kprobe *op; in kprobe_optready() local
401 op = container_of(p, struct optimized_kprobe, kp); in kprobe_optready()
402 return arch_prepared_optinsn(&op->optinsn); in kprobe_optready()
411 struct optimized_kprobe *op; in kprobe_disarmed() local
417 op = container_of(p, struct optimized_kprobe, kp); in kprobe_disarmed()
419 return kprobe_disabled(p) && list_empty(&op->list); in kprobe_disarmed()
425 struct optimized_kprobe *op; in kprobe_queued() local
428 op = container_of(p, struct optimized_kprobe, kp); in kprobe_queued()
429 if (!list_empty(&op->list)) in kprobe_queued()
443 struct optimized_kprobe *op; in get_optimized_kprobe() local
450 op = container_of(p, struct optimized_kprobe, kp); in get_optimized_kprobe()
451 if (arch_within_optimized_kprobe(op, addr)) in get_optimized_kprobe()
500 struct optimized_kprobe *op, *tmp; in do_unoptimize_kprobes() local
512 list_for_each_entry_safe(op, tmp, &freeing_list, list) { in do_unoptimize_kprobes()
514 if (kprobe_disabled(&op->kp)) in do_unoptimize_kprobes()
515 arch_disarm_kprobe(&op->kp); in do_unoptimize_kprobes()
516 if (kprobe_unused(&op->kp)) { in do_unoptimize_kprobes()
522 hlist_del_rcu(&op->kp.hlist); in do_unoptimize_kprobes()
524 list_del_init(&op->list); in do_unoptimize_kprobes()
531 struct optimized_kprobe *op, *tmp; in do_free_cleaned_kprobes() local
533 list_for_each_entry_safe(op, tmp, &freeing_list, list) { in do_free_cleaned_kprobes()
534 list_del_init(&op->list); in do_free_cleaned_kprobes()
535 if (WARN_ON_ONCE(!kprobe_unused(&op->kp))) { in do_free_cleaned_kprobes()
542 free_aggr_kprobe(&op->kp); in do_free_cleaned_kprobes()
616 struct optimized_kprobe *op; in optimize_kprobe() local
627 op = container_of(p, struct optimized_kprobe, kp); in optimize_kprobe()
630 if (arch_check_optimized_kprobe(op) < 0) in optimize_kprobe()
634 if (op->kp.flags & KPROBE_FLAG_OPTIMIZED) in optimize_kprobe()
636 op->kp.flags |= KPROBE_FLAG_OPTIMIZED; in optimize_kprobe()
638 if (!list_empty(&op->list)) in optimize_kprobe()
640 list_del_init(&op->list); in optimize_kprobe()
642 list_add(&op->list, &optimizing_list); in optimize_kprobe()
648 static void force_unoptimize_kprobe(struct optimized_kprobe *op) in force_unoptimize_kprobe() argument
651 arch_unoptimize_kprobe(op); in force_unoptimize_kprobe()
652 if (kprobe_disabled(&op->kp)) in force_unoptimize_kprobe()
653 arch_disarm_kprobe(&op->kp); in force_unoptimize_kprobe()
659 struct optimized_kprobe *op; in unoptimize_kprobe() local
664 op = container_of(p, struct optimized_kprobe, kp); in unoptimize_kprobe()
667 if (force && !list_empty(&op->list)) { in unoptimize_kprobe()
673 list_del_init(&op->list); in unoptimize_kprobe()
674 force_unoptimize_kprobe(op); in unoptimize_kprobe()
679 op->kp.flags &= ~KPROBE_FLAG_OPTIMIZED; in unoptimize_kprobe()
680 if (!list_empty(&op->list)) { in unoptimize_kprobe()
682 list_del_init(&op->list); in unoptimize_kprobe()
688 force_unoptimize_kprobe(op); in unoptimize_kprobe()
690 list_add(&op->list, &unoptimizing_list); in unoptimize_kprobe()
698 struct optimized_kprobe *op; in reuse_unused_kprobe() local
704 op = container_of(ap, struct optimized_kprobe, kp); in reuse_unused_kprobe()
705 WARN_ON_ONCE(list_empty(&op->list)); in reuse_unused_kprobe()
719 struct optimized_kprobe *op; in kill_optimized_kprobe() local
721 op = container_of(p, struct optimized_kprobe, kp); in kill_optimized_kprobe()
722 if (!list_empty(&op->list)) in kill_optimized_kprobe()
724 list_del_init(&op->list); in kill_optimized_kprobe()
725 op->kp.flags &= ~KPROBE_FLAG_OPTIMIZED; in kill_optimized_kprobe()
729 list_add(&op->list, &freeing_list); in kill_optimized_kprobe()
735 hlist_del_rcu(&op->kp.hlist); in kill_optimized_kprobe()
739 arch_remove_optimized_kprobe(op); in kill_optimized_kprobe()
743 void __prepare_optimized_kprobe(struct optimized_kprobe *op, struct kprobe *p) in __prepare_optimized_kprobe() argument
746 arch_prepare_optimized_kprobe(op, p); in __prepare_optimized_kprobe()
752 struct optimized_kprobe *op; in prepare_optimized_kprobe() local
754 op = container_of(p, struct optimized_kprobe, kp); in prepare_optimized_kprobe()
755 __prepare_optimized_kprobe(op, p); in prepare_optimized_kprobe()
761 struct optimized_kprobe *op; in alloc_aggr_kprobe() local
763 op = kzalloc(sizeof(struct optimized_kprobe), GFP_KERNEL); in alloc_aggr_kprobe()
764 if (!op) in alloc_aggr_kprobe()
767 INIT_LIST_HEAD(&op->list); in alloc_aggr_kprobe()
768 op->kp.addr = p->addr; in alloc_aggr_kprobe()
769 __prepare_optimized_kprobe(op, p); in alloc_aggr_kprobe()
771 return &op->kp; in alloc_aggr_kprobe()
783 struct optimized_kprobe *op; in try_to_optimize_kprobe() local
798 op = container_of(ap, struct optimized_kprobe, kp); in try_to_optimize_kprobe()
799 if (!arch_prepared_optinsn(&op->optinsn)) { in try_to_optimize_kprobe()
801 arch_remove_optimized_kprobe(op); in try_to_optimize_kprobe()
802 kfree(op); in try_to_optimize_kprobe()