Home
last modified time | relevance | path

Searched refs:is_guest_mode (Results 1 – 19 of 19) sorted by relevance

/linux-6.6.21/arch/x86/kvm/
Dhyperv.h182 bool is_guest_mode) in kvm_hv_get_tlb_flush_fifo() argument
185 int i = is_guest_mode ? HV_L2_TLB_FLUSH_FIFO : in kvm_hv_get_tlb_flush_fifo()
198 tlb_flush_fifo = kvm_hv_get_tlb_flush_fifo(vcpu, is_guest_mode(vcpu)); in kvm_hv_vcpu_purge_flush_tlb()
Dirq.c87 if (!is_guest_mode(v) && kvm_vcpu_apicv_active(v)) in kvm_cpu_has_injectable_intr()
Dkvm_cache_regs.h226 static inline bool is_guest_mode(struct kvm_vcpu *vcpu) in is_guest_mode() function
Dhyperv.c1924 tlb_flush_fifo = kvm_hv_get_tlb_flush_fifo(vcpu, is_guest_mode(vcpu)); in kvm_hv_vcpu_flush_tlb()
1987 if (!hc->fast && is_guest_mode(vcpu)) { in kvm_hv_flush_tlb()
2009 is_guest_mode(vcpu)); in kvm_hv_flush_tlb()
2040 flush_ex.flags, is_guest_mode(vcpu)); in kvm_hv_flush_tlb()
2084 if (all_cpus && !is_guest_mode(vcpu)) { in kvm_hv_flush_tlb()
2092 } else if (!is_guest_mode(vcpu)) { in kvm_hv_flush_tlb()
2331 if (hv_result_success(result) && is_guest_mode(vcpu) && in kvm_hv_hypercall_complete()
Dkvm_emulate.h223 bool (*is_guest_mode)(struct x86_emulate_ctxt *ctxt); member
Dx86.c662 if (!reinject && is_guest_mode(vcpu) && in kvm_multiple_exception()
698 if (!is_guest_mode(vcpu)) in kvm_multiple_exception()
787 if (is_guest_mode(vcpu) && fault->async_page_fault) in kvm_inject_page_fault()
2625 if (is_guest_mode(vcpu)) in kvm_vcpu_write_tsc_offset()
2641 if (is_guest_mode(vcpu)) in kvm_vcpu_write_tsc_multiplier()
8296 return is_guest_mode(emul_to_vcpu(ctxt)); in emulator_is_guest_mode()
8365 .is_guest_mode = emulator_is_guest_mode,
8560 if (!is_guest_mode(vcpu) && static_call(kvm_x86_get_cpl)(vcpu) == 0) { in handle_emulation_failure()
8577 if (WARN_ON_ONCE(is_guest_mode(vcpu)) || in reexecute_instruction()
8669 if (WARN_ON_ONCE(is_guest_mode(vcpu)) || in retry_instruction()
[all …]
Demulate.c5142 bool is_guest_mode = ctxt->ops->is_guest_mode(ctxt); in x86_emulate_insn() local
5190 if (unlikely(is_guest_mode) && ctxt->intercept) { in x86_emulate_insn()
5219 if (unlikely(is_guest_mode) && (ctxt->d & Intercept)) { in x86_emulate_insn()
5273 if (unlikely(is_guest_mode) && (ctxt->d & Intercept)) { in x86_emulate_insn()
Dlapic.c3269 if (is_guest_mode(vcpu)) { in kvm_apic_accept_events()
/linux-6.6.21/arch/x86/kvm/vmx/
Dvmx.c909 if (is_guest_mode(vcpu)) in vmx_update_exception_bitmap()
1759 if (!is_guest_mode(vcpu)) in vmx_update_emulated_instruction()
2214 if (is_guest_mode(vcpu)) in vmx_set_msr()
2219 if (is_guest_mode(vcpu)) { in vmx_set_msr()
2226 if (is_guest_mode(vcpu)) { in vmx_set_msr()
2245 if (is_guest_mode(vcpu) && get_vmcs12(vcpu)->vm_exit_controls & in vmx_set_msr()
2264 if (is_guest_mode(vcpu) && in vmx_set_msr()
2321 if (is_guest_mode(vcpu) && in vmx_set_msr()
3085 WARN_ON_ONCE(is_guest_mode(vcpu)); in enter_rmode()
3194 if (is_guest_mode(vcpu)) in vmx_get_current_vpid()
[all …]
Dnested.c3299 if (is_guest_mode(vcpu) && !nested_get_vmcs12_pages(vcpu)) in vmx_get_nested_state_pages()
3311 if (WARN_ON_ONCE(!is_guest_mode(vcpu))) in nested_vmx_write_pml_buffer()
5345 struct vmcs12 *vmcs12 = is_guest_mode(vcpu) ? get_shadow_vmcs12(vcpu) in handle_vmread()
5369 (is_guest_mode(vcpu) && in handle_vmread()
5377 if (!is_guest_mode(vcpu) && is_vmcs12_ext_field(field)) in handle_vmread()
5393 if (WARN_ON_ONCE(is_guest_mode(vcpu))) in handle_vmread()
5451 struct vmcs12 *vmcs12 = is_guest_mode(vcpu) ? get_shadow_vmcs12(vcpu) in handle_vmwrite()
5479 (is_guest_mode(vcpu) && in handle_vmwrite()
5513 if (!is_guest_mode(vcpu) && !is_shadow_field_rw(field)) in handle_vmwrite()
5535 if (!is_guest_mode(vcpu) && !is_shadow_field_rw(field)) { in handle_vmwrite()
[all …]
Dsgx.c503 if (!vmcs12 && is_guest_mode(vcpu)) in vmx_write_encls_bitmap()
Dvmx.h725 return enable_unrestricted_guest && (!is_guest_mode(vcpu) || in is_unrestricted_guest()
/linux-6.6.21/arch/x86/kvm/svm/
Davic.c131 if (is_guest_mode(&svm->vcpu) && in avic_deactivate_vmcb()
544 if (is_guest_mode(vcpu)) in avic_vcpu_get_apicv_inhibit_reasons()
941 pi.is_guest_mode = true; in avic_pi_update_irte()
952 if (!ret && pi.is_guest_mode) in avic_pi_update_irte()
964 pi.is_guest_mode = false; in avic_pi_update_irte()
Dsvm.c827 msrpm = is_guest_mode(vcpu) ? to_svm(vcpu)->nested.msrpm: in msr_write_intercepted()
1021 if (is_guest_mode(vcpu)) in svm_enable_lbrv()
1039 if (is_guest_mode(vcpu)) in svm_disable_lbrv()
1059 (is_guest_mode(vcpu) && guest_can_use(vcpu, X86_FEATURE_LBRV) && in svm_update_lbrv()
1686 if (is_guest_mode(&svm->vcpu)) { in svm_clear_vintr()
2366 if (is_guest_mode(vcpu)) { in emulate_svm_instr()
2408 if (!is_guest_mode(vcpu)) in gp_interception()
2626 if (!is_guest_mode(vcpu) || in check_selective_cr0_intercepted()
3024 is_guest_mode(vcpu)) in svm_set_msr()
3034 if (is_guest_mode(vcpu)) in svm_set_msr()
[all …]
Dsvm.h455 if (is_guest_mode(&svm->vcpu) && !nested_vgif_enabled(svm)) in get_vgif_vmcb()
516 if (is_guest_mode(&svm->vcpu)) in get_vnmi_vmcb_l1()
571 return is_guest_mode(vcpu) && (svm->nested.ctl.int_ctl & V_INTR_MASKING_MASK); in nested_svm_virtualize_tpr()
Dnested.c130 if (!is_guest_mode(&svm->vcpu)) in recalc_intercepts()
1231 if (is_guest_mode(vcpu)) { in svm_leave_nested()
1600 if (is_guest_mode(vcpu)) { in svm_get_nested_state()
1615 if (!is_guest_mode(vcpu)) in svm_get_nested_state()
1737 if (is_guest_mode(vcpu)) in svm_set_nested_state()
1782 if (WARN_ON(!is_guest_mode(vcpu))) in svm_get_nested_state_pages()
/linux-6.6.21/include/linux/
Damd-iommu.h24 bool is_guest_mode; member
/linux-6.6.21/drivers/iommu/amd/
Diommu.c3634 pi_data->is_guest_mode = false; in amd_ir_set_vcpu_affinity()
3638 if (pi_data->is_guest_mode) { in amd_ir_set_vcpu_affinity()
/linux-6.6.21/arch/x86/kvm/mmu/
Dmmu.c4269 if (is_guest_mode(vcpu)) { in __kvm_faultin_pfn()
5117 role.base.guest_mode = is_guest_mode(vcpu); in kvm_calc_cpu_role()
5760 if (!mmio_info_in_cache(vcpu, cr2_or_gpa, direct) && !is_guest_mode(vcpu)) in kvm_mmu_page_fault()