Home
last modified time | relevance | path

Searched refs:sched_in (Results 1 – 20 of 20) sorted by relevance

/linux-6.6.21/arch/x86/events/amd/
Dbrs.c384 void amd_pmu_brs_sched_task(struct perf_event_pmu_context *pmu_ctx, bool sched_in) in amd_pmu_brs_sched_task() argument
397 if (sched_in) in amd_pmu_brs_sched_task()
Dlbr.c376 void amd_pmu_lbr_sched_task(struct perf_event_pmu_context *pmu_ctx, bool sched_in) in amd_pmu_lbr_sched_task() argument
385 if (cpuc->lbr_users && sched_in) in amd_pmu_lbr_sched_task()
/linux-6.6.21/tools/perf/
Dbuiltin-sched.c1137 struct thread *sched_out, *sched_in; in latency_switch_event() local
1157 sched_in = machine__findnew_thread(machine, -1, next_pid); in latency_switch_event()
1158 if (sched_out == NULL || sched_in == NULL) in latency_switch_event()
1174 in_events = thread_atoms_search(&sched->atom_root, sched_in, &sched->cmp_pid); in latency_switch_event()
1176 if (thread_atoms_insert(sched, sched_in)) in latency_switch_event()
1178 in_events = thread_atoms_search(&sched->atom_root, sched_in, &sched->cmp_pid); in latency_switch_event()
1194 thread__put(sched_in); in latency_switch_event()
1570 struct thread *sched_in; in map_switch_event() local
1610 sched_in = map__findnew_thread(sched, machine, -1, next_pid); in map_switch_event()
1611 if (sched_in == NULL) in map_switch_event()
[all …]
/linux-6.6.21/arch/x86/events/
Dperf_event.h819 bool sched_in);
1312 void amd_pmu_lbr_sched_task(struct perf_event_pmu_context *pmu_ctx, bool sched_in);
1353 void amd_pmu_brs_sched_task(struct perf_event_pmu_context *pmu_ctx, bool sched_in);
1378 static inline void amd_pmu_brs_sched_task(struct perf_event_pmu_context *pmu_ctx, bool sched_in) in amd_pmu_brs_sched_task() argument
1540 void intel_pmu_pebs_sched_task(struct perf_event_pmu_context *pmu_ctx, bool sched_in);
1551 void intel_pmu_lbr_sched_task(struct perf_event_pmu_context *pmu_ctx, bool sched_in);
Dcore.c2623 static void x86_pmu_sched_task(struct perf_event_pmu_context *pmu_ctx, bool sched_in) in x86_pmu_sched_task() argument
2625 static_call_cond(x86_pmu_sched_task)(pmu_ctx, sched_in); in x86_pmu_sched_task()
/linux-6.6.21/arch/x86/include/asm/
Dkvm-x86-ops.h105 KVM_X86_OP(sched_in)
Dkvm_host.h1694 void (*sched_in)(struct kvm_vcpu *kvm, int cpu); member
/linux-6.6.21/include/linux/
Dpreempt.h338 void (*sched_in)(struct preempt_notifier *notifier, int cpu); member
Dperf_event.h456 bool sched_in);
/linux-6.6.21/arch/s390/kernel/
Dperf_pai_crypto.c381 static void paicrypt_sched_task(struct perf_event_pmu_context *pmu_ctx, bool sched_in) in paicrypt_sched_task() argument
386 if (!sched_in) in paicrypt_sched_task()
Dperf_pai_ext.c470 static void paiext_sched_task(struct perf_event_pmu_context *pmu_ctx, bool sched_in) in paiext_sched_task() argument
475 if (!sched_in) in paiext_sched_task()
/linux-6.6.21/arch/x86/events/intel/
Dlbr.c541 void intel_pmu_lbr_sched_task(struct perf_event_pmu_context *pmu_ctx, bool sched_in) in intel_pmu_lbr_sched_task() argument
556 if (sched_in) in intel_pmu_lbr_sched_task()
569 if (sched_in) in intel_pmu_lbr_sched_task()
Dds.c1128 void intel_pmu_pebs_sched_task(struct perf_event_pmu_context *pmu_ctx, bool sched_in) in intel_pmu_pebs_sched_task() argument
1132 if (!sched_in && pebs_needs_sched_cb(cpuc)) in intel_pmu_pebs_sched_task()
Dcore.c4820 bool sched_in) in intel_pmu_sched_task() argument
4822 intel_pmu_pebs_sched_task(pmu_ctx, sched_in); in intel_pmu_sched_task()
4823 intel_pmu_lbr_sched_task(pmu_ctx, sched_in); in intel_pmu_sched_task()
/linux-6.6.21/arch/powerpc/perf/
Dcore-book3s.c135 static void power_pmu_sched_task(struct perf_event_pmu_context *pmu_ctx, bool sched_in) {} in power_pmu_sched_task() argument
454 static void power_pmu_sched_task(struct perf_event_pmu_context *pmu_ctx, bool sched_in) in power_pmu_sched_task() argument
459 if (sched_in) in power_pmu_sched_task()
/linux-6.6.21/kernel/events/
Dcore.c3479 static void perf_ctx_sched_task_cb(struct perf_event_context *ctx, bool sched_in) in perf_ctx_sched_task_cb() argument
3488 pmu_ctx->pmu->sched_task(pmu_ctx, sched_in); in perf_ctx_sched_task_cb()
3618 static void __perf_pmu_sched_task(struct perf_cpu_pmu_context *cpc, bool sched_in) in __perf_pmu_sched_task() argument
3632 pmu->sched_task(cpc->task_epc, sched_in); in __perf_pmu_sched_task()
3640 bool sched_in) in perf_pmu_sched_task() argument
3650 __perf_pmu_sched_task(cpc, sched_in); in perf_pmu_sched_task()
3654 struct task_struct *next_prev, bool sched_in);
9052 struct task_struct *next_prev, bool sched_in) in perf_event_switch() argument
9064 .misc = sched_in ? 0 : PERF_RECORD_MISC_SWITCH_OUT, in perf_event_switch()
9072 if (!sched_in && task->on_rq) { in perf_event_switch()
/linux-6.6.21/arch/x86/kvm/svm/
Dsvm.c5010 .sched_in = svm_sched_in,
/linux-6.6.21/virt/kvm/
Dkvm_main.c6099 kvm_preempt_ops.sched_in = kvm_sched_in; in kvm_init()
/linux-6.6.21/arch/x86/kvm/vmx/
Dvmx.c8332 .sched_in = vmx_sched_in,
/linux-6.6.21/kernel/sched/
Dcore.c4939 notifier->ops->sched_in(notifier, raw_smp_processor_id()); in __fire_sched_in_preempt_notifiers()