/linux-6.6.21/tools/testing/selftests/kvm/lib/x86_64/ |
D | svm.c | 75 efer = rdmsr(MSR_EFER); in generic_svm_setup() 76 wrmsr(MSR_EFER, efer | EFER_SVME); in generic_svm_setup() 90 save->efer = rdmsr(MSR_EFER); in generic_svm_setup()
|
D | vmx.c | 266 vmwrite(HOST_IA32_EFER, rdmsr(MSR_EFER)); in init_vmcs_host_state()
|
/linux-6.6.21/arch/x86/kernel/acpi/ |
D | sleep.c | 84 if (!rdmsr_safe(MSR_EFER, in x86_acpi_suspend_lowlevel() 87 !wrmsr_safe(MSR_EFER, in x86_acpi_suspend_lowlevel()
|
/linux-6.6.21/arch/x86/boot/compressed/ |
D | efi_mixed.S | 188 movl $MSR_EFER, %ecx 214 movl $MSR_EFER, %ecx
|
D | head_64.S | 239 movl $MSR_EFER, %ecx 545 movl $MSR_EFER, %ecx
|
/linux-6.6.21/arch/x86/realmode/rm/ |
D | reboot.S | 39 movl $MSR_EFER, %ecx
|
D | wakeup_asm.S | 123 movl $MSR_EFER, %ecx
|
D | trampoline_64.S | 162 movl $MSR_EFER, %ecx
|
/linux-6.6.21/arch/x86/power/ |
D | cpu.c | 117 rdmsrl(MSR_EFER, ctxt->efer); in __save_processor_state() 210 wrmsrl(MSR_EFER, ctxt->efer); in __restore_processor_state()
|
/linux-6.6.21/arch/x86/platform/pvh/ |
D | head.S | 80 mov $MSR_EFER, %ecx
|
/linux-6.6.21/arch/x86/realmode/ |
D | init.c | 148 rdmsrl(MSR_EFER, efer); in setup_real_mode()
|
/linux-6.6.21/arch/x86/hyperv/ |
D | hv_vtl.c | 110 input->vp_context.efer = __rdmsr(MSR_EFER); in hv_vtl_bringup_vcpu()
|
D | ivm.c | 321 vmsa->efer = native_read_msr(MSR_EFER); in hv_snp_boot_ap()
|
/linux-6.6.21/arch/x86/kernel/ |
D | head_32.S | 228 movl $MSR_EFER, %ecx
|
D | head_64.S | 387 movl $MSR_EFER, %ecx
|
/linux-6.6.21/arch/x86/kvm/ |
D | smm.c | 539 if (kvm_set_msr(vcpu, MSR_EFER, smstate->efer & ~EFER_LMA)) in rsm_load_state_64() 630 kvm_set_msr(vcpu, MSR_EFER, efer); in emulator_leave_smm()
|
D | emulate.c | 787 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in emulator_recalc_and_set_mode() 1513 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in get_descriptor_ptr() 1687 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in __load_segment_descriptor() 2414 ops->get_msr(ctxt, MSR_EFER, &efer); in em_syscall() 2465 ops->get_msr(ctxt, MSR_EFER, &efer); in em_sysenter() 3915 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in check_svme()
|
/linux-6.6.21/arch/x86/kvm/vmx/ |
D | vmx.c | 993 case MSR_EFER: in clear_atomic_switch_msr() 1046 case MSR_EFER: in add_atomic_switch_msr() 1136 add_atomic_switch_msr(vmx, MSR_EFER, in update_transition_efer() 1139 clear_atomic_switch_msr(vmx, MSR_EFER); in update_transition_efer() 1143 i = kvm_find_user_return_msr(MSR_EFER); in update_transition_efer() 1147 clear_atomic_switch_msr(vmx, MSR_EFER); in update_transition_efer() 1883 vmx_setup_uret_msr(vmx, MSR_EFER, update_transition_efer(vmx)); in vmx_setup_uret_msrs() 2008 case MSR_EFER: in vmx_get_msr() 2179 case MSR_EFER: in vmx_set_msr() 3125 if (!vmx_find_uret_msr(vmx, MSR_EFER)) in vmx_set_efer() [all …]
|
D | nested.c | 4602 if (vmx->msr_autoload.guest.val[i].index == MSR_EFER) in nested_vmx_get_vmcs01_guest_efer() 4606 efer_msr = vmx_find_uret_msr(vmx, MSR_EFER); in nested_vmx_get_vmcs01_guest_efer()
|
/linux-6.6.21/tools/arch/x86/include/asm/ |
D | msr-index.h | 10 #define MSR_EFER 0xc0000080 /* extended feature register */ macro
|
/linux-6.6.21/arch/x86/include/asm/ |
D | msr-index.h | 10 #define MSR_EFER 0xc0000080 /* extended feature register */ macro
|
/linux-6.6.21/arch/x86/kvm/svm/ |
D | svm.c | 106 { .index = MSR_EFER, .always = false }, 594 rdmsrl(MSR_EFER, efer); in kvm_cpu_svm_disable() 601 wrmsrl(MSR_EFER, efer & ~EFER_SVME); in kvm_cpu_svm_disable() 630 rdmsrl(MSR_EFER, efer); in svm_hardware_enable() 640 wrmsrl(MSR_EFER, efer | EFER_SVME); in svm_hardware_enable() 2821 msr_info.index = MSR_EFER; in efer_trap()
|
D | sev.c | 3047 set_msr_interception(vcpu, svm->msrpm, MSR_EFER, 1, 1); in sev_es_init_vmcb()
|
/linux-6.6.21/arch/x86/kernel/cpu/ |
D | amd.c | 1148 WARN_ON_ONCE(msr_set_bit(MSR_EFER, _EFER_AUTOIBRS)); in init_amd()
|
D | bugs.c | 1613 msr_set_bit(MSR_EFER, _EFER_AUTOIBRS); in spectre_v2_select_mitigation()
|