Home
last modified time | relevance | path

Searched refs:vcpu_to_pmu (Results 1 – 7 of 7) sorted by relevance

/linux-6.1.9/arch/x86/kvm/svm/
Dpmu.c89 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_is_valid_rdpmc_ecx()
100 return amd_pmc_idx_to_pmc(vcpu_to_pmu(vcpu), idx & ~(3u << 30)); in amd_rdpmc_ecx_to_pmc()
111 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_msr_idx_to_pmc()
122 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_pmu_get_msr()
144 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_pmu_set_msr()
172 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_pmu_refresh()
192 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_pmu_init()
208 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_pmu_reset()
/linux-6.1.9/arch/x86/kvm/vmx/
Dpmu_intel.c118 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_is_valid_rdpmc_ecx()
130 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_rdpmc_ecx_to_pmc()
190 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_is_valid_msr()
225 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_msr_idx_to_pmc()
242 vcpu_to_pmu(vcpu)->event_count--; in intel_pmu_release_guest_lbr_event()
249 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_pmu_create_guest_lbr_event()
327 __set_bit(INTEL_PMC_IDX_FIXED_VLBR, vcpu_to_pmu(vcpu)->pmc_in_use); in intel_pmu_handle_lbr_msrs_access()
331 clear_bit(INTEL_PMC_IDX_FIXED_VLBR, vcpu_to_pmu(vcpu)->pmc_in_use); in intel_pmu_handle_lbr_msrs_access()
342 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_pmu_get_msr()
392 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_pmu_set_msr()
[all …]
Dnested.c2623 intel_pmu_has_perf_global_ctrl(vcpu_to_pmu(vcpu)) && in prepare_vmcs02()
2899 CC(!kvm_valid_perf_global_ctrl(vcpu_to_pmu(vcpu), in nested_vmx_check_host_state()
3024 CC(!kvm_valid_perf_global_ctrl(vcpu_to_pmu(vcpu), in nested_vmx_check_guest_state()
4487 intel_pmu_has_perf_global_ctrl(vcpu_to_pmu(vcpu))) in load_vmcs12_host_state()
Dvmx.c2341 if (data && !vcpu_to_pmu(vcpu)->version) in vmx_set_msr()
6975 struct kvm_pmu *pmu = vcpu_to_pmu(&vmx->vcpu); in atomic_switch_perf_msrs()
/linux-6.1.9/arch/x86/kvm/
Dpmu.c342 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_handle_event()
407 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_rdpmc()
446 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_mark_pmc_in_use()
475 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_reset()
483 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_init()
496 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_cleanup()
560 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_trigger_event()
Dpmu.h7 #define vcpu_to_pmu(vcpu) (&(vcpu)->arch.pmu) macro
Dx86.c12349 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_arch_sched_in()