Lines Matching refs:optimized_kprobe

346 /* For optimized_kprobe buffer */
436 /* Free optimized instructions and optimized_kprobe */
439 struct optimized_kprobe *op;
441 op = container_of(p, struct optimized_kprobe, kp);
450 struct optimized_kprobe *op;
453 op = container_of(p, struct optimized_kprobe, kp);
463 struct optimized_kprobe *op;
469 op = container_of(p, struct optimized_kprobe, kp);
477 struct optimized_kprobe *op;
480 op = container_of(p, struct optimized_kprobe, kp);
495 struct optimized_kprobe *op;
502 op = container_of(p, struct optimized_kprobe, kp);
552 struct optimized_kprobe *op, *tmp;
585 struct optimized_kprobe *op, *tmp;
665 static bool optprobe_queued_unopt(struct optimized_kprobe *op)
667 struct optimized_kprobe *_op;
680 struct optimized_kprobe *op;
691 op = container_of(p, struct optimized_kprobe, kp);
719 static void force_unoptimize_kprobe(struct optimized_kprobe *op)
729 struct optimized_kprobe *op;
734 op = container_of(p, struct optimized_kprobe, kp);
770 struct optimized_kprobe *op;
776 op = container_of(ap, struct optimized_kprobe, kp);
791 struct optimized_kprobe *op;
793 op = container_of(p, struct optimized_kprobe, kp);
815 void __prepare_optimized_kprobe(struct optimized_kprobe *op, struct kprobe *p)
824 struct optimized_kprobe *op;
826 op = container_of(p, struct optimized_kprobe, kp);
830 /* Allocate new optimized_kprobe and try to prepare optimized instructions. */
833 struct optimized_kprobe *op;
835 op = kzalloc(sizeof(struct optimized_kprobe), GFP_KERNEL);
849 * Prepare an optimized_kprobe and optimize it.
855 struct optimized_kprobe *op;
870 op = container_of(ap, struct optimized_kprobe, kp);