Lines Matching refs:vmcs12
55 #define SHADOW_FIELD_RO(x, y) { x, offsetof(struct vmcs12, y) },
62 #define SHADOW_FIELD_RW(x, y) { x, offsetof(struct vmcs12, y) },
370 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_ept_inject_page_fault() local
392 nested_ept_invalidate_addr(vcpu, vmcs12->ept_pointer, in nested_ept_inject_page_fault()
397 vmcs12->guest_physical_address = fault->address; in nested_ept_inject_page_fault()
430 static bool nested_vmx_is_page_fault_vmexit(struct vmcs12 *vmcs12, in nested_vmx_is_page_fault_vmexit() argument
435 bit = (vmcs12->exception_bitmap & (1u << PF_VECTOR)) != 0; in nested_vmx_is_page_fault_vmexit()
437 (error_code & vmcs12->page_fault_error_code_mask) != in nested_vmx_is_page_fault_vmexit()
438 vmcs12->page_fault_error_code_match; in nested_vmx_is_page_fault_vmexit()
445 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_is_exception_vmexit() local
455 return nested_vmx_is_page_fault_vmexit(vmcs12, (u16)error_code); in nested_vmx_is_exception_vmexit()
457 return (vmcs12->exception_bitmap & (1u << vector)); in nested_vmx_is_exception_vmexit()
461 struct vmcs12 *vmcs12) in nested_vmx_check_io_bitmap_controls() argument
463 if (!nested_cpu_has(vmcs12, CPU_BASED_USE_IO_BITMAPS)) in nested_vmx_check_io_bitmap_controls()
466 if (CC(!page_address_valid(vcpu, vmcs12->io_bitmap_a)) || in nested_vmx_check_io_bitmap_controls()
467 CC(!page_address_valid(vcpu, vmcs12->io_bitmap_b))) in nested_vmx_check_io_bitmap_controls()
474 struct vmcs12 *vmcs12) in nested_vmx_check_msr_bitmap_controls() argument
476 if (!nested_cpu_has(vmcs12, CPU_BASED_USE_MSR_BITMAPS)) in nested_vmx_check_msr_bitmap_controls()
479 if (CC(!page_address_valid(vcpu, vmcs12->msr_bitmap))) in nested_vmx_check_msr_bitmap_controls()
486 struct vmcs12 *vmcs12) in nested_vmx_check_tpr_shadow_controls() argument
488 if (!nested_cpu_has(vmcs12, CPU_BASED_TPR_SHADOW)) in nested_vmx_check_tpr_shadow_controls()
491 if (CC(!page_address_valid(vcpu, vmcs12->virtual_apic_page_addr))) in nested_vmx_check_tpr_shadow_controls()
558 struct vmcs12 *vmcs12) in nested_vmx_prepare_msr_bitmap() argument
569 !nested_cpu_has(vmcs12, CPU_BASED_USE_MSR_BITMAPS)) in nested_vmx_prepare_msr_bitmap()
585 if (kvm_vcpu_map(vcpu, gpa_to_gfn(vmcs12->msr_bitmap), map)) in nested_vmx_prepare_msr_bitmap()
597 if (nested_cpu_has_virt_x2apic_mode(vmcs12)) { in nested_vmx_prepare_msr_bitmap()
598 if (nested_cpu_has_apic_reg_virt(vmcs12)) { in nested_vmx_prepare_msr_bitmap()
617 if (nested_cpu_has_vid(vmcs12)) { in nested_vmx_prepare_msr_bitmap()
657 struct vmcs12 *vmcs12) in nested_cache_shadow_vmcs12() argument
662 if (!nested_cpu_has_shadow_vmcs(vmcs12) || in nested_cache_shadow_vmcs12()
663 vmcs12->vmcs_link_pointer == INVALID_GPA) in nested_cache_shadow_vmcs12()
666 if (ghc->gpa != vmcs12->vmcs_link_pointer && in nested_cache_shadow_vmcs12()
668 vmcs12->vmcs_link_pointer, VMCS12_SIZE)) in nested_cache_shadow_vmcs12()
676 struct vmcs12 *vmcs12) in nested_flush_cached_shadow_vmcs12() argument
681 if (!nested_cpu_has_shadow_vmcs(vmcs12) || in nested_flush_cached_shadow_vmcs12()
682 vmcs12->vmcs_link_pointer == INVALID_GPA) in nested_flush_cached_shadow_vmcs12()
685 if (ghc->gpa != vmcs12->vmcs_link_pointer && in nested_flush_cached_shadow_vmcs12()
687 vmcs12->vmcs_link_pointer, VMCS12_SIZE)) in nested_flush_cached_shadow_vmcs12()
705 struct vmcs12 *vmcs12) in nested_vmx_check_apic_access_controls() argument
707 if (nested_cpu_has2(vmcs12, SECONDARY_EXEC_VIRTUALIZE_APIC_ACCESSES) && in nested_vmx_check_apic_access_controls()
708 CC(!page_address_valid(vcpu, vmcs12->apic_access_addr))) in nested_vmx_check_apic_access_controls()
715 struct vmcs12 *vmcs12) in nested_vmx_check_apicv_controls() argument
717 if (!nested_cpu_has_virt_x2apic_mode(vmcs12) && in nested_vmx_check_apicv_controls()
718 !nested_cpu_has_apic_reg_virt(vmcs12) && in nested_vmx_check_apicv_controls()
719 !nested_cpu_has_vid(vmcs12) && in nested_vmx_check_apicv_controls()
720 !nested_cpu_has_posted_intr(vmcs12)) in nested_vmx_check_apicv_controls()
727 if (CC(nested_cpu_has_virt_x2apic_mode(vmcs12) && in nested_vmx_check_apicv_controls()
728 nested_cpu_has2(vmcs12, SECONDARY_EXEC_VIRTUALIZE_APIC_ACCESSES))) in nested_vmx_check_apicv_controls()
735 if (CC(nested_cpu_has_vid(vmcs12) && !nested_exit_on_intr(vcpu))) in nested_vmx_check_apicv_controls()
745 if (nested_cpu_has_posted_intr(vmcs12) && in nested_vmx_check_apicv_controls()
746 (CC(!nested_cpu_has_vid(vmcs12)) || in nested_vmx_check_apicv_controls()
748 CC((vmcs12->posted_intr_nv & 0xff00)) || in nested_vmx_check_apicv_controls()
749 CC(!kvm_vcpu_is_legal_aligned_gpa(vcpu, vmcs12->posted_intr_desc_addr, 64)))) in nested_vmx_check_apicv_controls()
753 if (CC(!nested_cpu_has(vmcs12, CPU_BASED_TPR_SHADOW))) in nested_vmx_check_apicv_controls()
773 struct vmcs12 *vmcs12) in nested_vmx_check_exit_msr_switch_controls() argument
776 vmcs12->vm_exit_msr_load_count, in nested_vmx_check_exit_msr_switch_controls()
777 vmcs12->vm_exit_msr_load_addr)) || in nested_vmx_check_exit_msr_switch_controls()
779 vmcs12->vm_exit_msr_store_count, in nested_vmx_check_exit_msr_switch_controls()
780 vmcs12->vm_exit_msr_store_addr))) in nested_vmx_check_exit_msr_switch_controls()
787 struct vmcs12 *vmcs12) in nested_vmx_check_entry_msr_switch_controls() argument
790 vmcs12->vm_entry_msr_load_count, in nested_vmx_check_entry_msr_switch_controls()
791 vmcs12->vm_entry_msr_load_addr))) in nested_vmx_check_entry_msr_switch_controls()
798 struct vmcs12 *vmcs12) in nested_vmx_check_pml_controls() argument
800 if (!nested_cpu_has_pml(vmcs12)) in nested_vmx_check_pml_controls()
803 if (CC(!nested_cpu_has_ept(vmcs12)) || in nested_vmx_check_pml_controls()
804 CC(!page_address_valid(vcpu, vmcs12->pml_address))) in nested_vmx_check_pml_controls()
811 struct vmcs12 *vmcs12) in nested_vmx_check_unrestricted_guest_controls() argument
813 if (CC(nested_cpu_has2(vmcs12, SECONDARY_EXEC_UNRESTRICTED_GUEST) && in nested_vmx_check_unrestricted_guest_controls()
814 !nested_cpu_has_ept(vmcs12))) in nested_vmx_check_unrestricted_guest_controls()
820 struct vmcs12 *vmcs12) in nested_vmx_check_mode_based_ept_exec_controls() argument
822 if (CC(nested_cpu_has2(vmcs12, SECONDARY_EXEC_MODE_BASED_EPT_EXEC) && in nested_vmx_check_mode_based_ept_exec_controls()
823 !nested_cpu_has_ept(vmcs12))) in nested_vmx_check_mode_based_ept_exec_controls()
829 struct vmcs12 *vmcs12) in nested_vmx_check_shadow_vmcs_controls() argument
831 if (!nested_cpu_has_shadow_vmcs(vmcs12)) in nested_vmx_check_shadow_vmcs_controls()
834 if (CC(!page_address_valid(vcpu, vmcs12->vmread_bitmap)) || in nested_vmx_check_shadow_vmcs_controls()
835 CC(!page_address_valid(vcpu, vmcs12->vmwrite_bitmap))) in nested_vmx_check_shadow_vmcs_controls()
1012 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_msr_store_list_has_msr() local
1013 u32 count = vmcs12->vm_exit_msr_store_count; in nested_msr_store_list_has_msr()
1014 u64 gpa = vmcs12->vm_exit_msr_store_addr; in nested_msr_store_list_has_msr()
1116 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_has_guest_tlb_tag() local
1119 (nested_cpu_has_vpid(vmcs12) && to_vmx(vcpu)->nested.vpid02); in nested_has_guest_tlb_tag()
1123 struct vmcs12 *vmcs12, in nested_vmx_transition_tlb_flush() argument
1142 if (!nested_cpu_has_vpid(vmcs12)) { in nested_vmx_transition_tlb_flush()
1157 if (is_vmenter && vmcs12->virtual_processor_id != vmx->nested.last_vpid) { in nested_vmx_transition_tlb_flush()
1158 vmx->nested.last_vpid = vmcs12->virtual_processor_id; in nested_vmx_transition_tlb_flush()
1498 struct vmcs12 *vmcs12 = get_vmcs12(&vmx->vcpu); in copy_shadow_to_vmcs12() local
1513 vmcs12_write_any(vmcs12, field.encoding, field.offset, val); in copy_shadow_to_vmcs12()
1533 struct vmcs12 *vmcs12 = get_vmcs12(&vmx->vcpu); in copy_vmcs12_to_shadow() local
1546 val = vmcs12_read_any(vmcs12, field.encoding, in copy_vmcs12_to_shadow()
1558 struct vmcs12 *vmcs12 = vmx->nested.cached_vmcs12; in copy_enlightened_to_vmcs12() local
1562 vmcs12->tpr_threshold = evmcs->tpr_threshold; in copy_enlightened_to_vmcs12()
1563 vmcs12->guest_rip = evmcs->guest_rip; in copy_enlightened_to_vmcs12()
1567 vmcs12->guest_rsp = evmcs->guest_rsp; in copy_enlightened_to_vmcs12()
1568 vmcs12->guest_rflags = evmcs->guest_rflags; in copy_enlightened_to_vmcs12()
1569 vmcs12->guest_interruptibility_info = in copy_enlightened_to_vmcs12()
1579 vmcs12->cpu_based_vm_exec_control = in copy_enlightened_to_vmcs12()
1585 vmcs12->exception_bitmap = evmcs->exception_bitmap; in copy_enlightened_to_vmcs12()
1590 vmcs12->vm_entry_controls = evmcs->vm_entry_controls; in copy_enlightened_to_vmcs12()
1595 vmcs12->vm_entry_intr_info_field = in copy_enlightened_to_vmcs12()
1597 vmcs12->vm_entry_exception_error_code = in copy_enlightened_to_vmcs12()
1599 vmcs12->vm_entry_instruction_len = in copy_enlightened_to_vmcs12()
1605 vmcs12->host_ia32_pat = evmcs->host_ia32_pat; in copy_enlightened_to_vmcs12()
1606 vmcs12->host_ia32_efer = evmcs->host_ia32_efer; in copy_enlightened_to_vmcs12()
1607 vmcs12->host_cr0 = evmcs->host_cr0; in copy_enlightened_to_vmcs12()
1608 vmcs12->host_cr3 = evmcs->host_cr3; in copy_enlightened_to_vmcs12()
1609 vmcs12->host_cr4 = evmcs->host_cr4; in copy_enlightened_to_vmcs12()
1610 vmcs12->host_ia32_sysenter_esp = evmcs->host_ia32_sysenter_esp; in copy_enlightened_to_vmcs12()
1611 vmcs12->host_ia32_sysenter_eip = evmcs->host_ia32_sysenter_eip; in copy_enlightened_to_vmcs12()
1612 vmcs12->host_rip = evmcs->host_rip; in copy_enlightened_to_vmcs12()
1613 vmcs12->host_ia32_sysenter_cs = evmcs->host_ia32_sysenter_cs; in copy_enlightened_to_vmcs12()
1614 vmcs12->host_es_selector = evmcs->host_es_selector; in copy_enlightened_to_vmcs12()
1615 vmcs12->host_cs_selector = evmcs->host_cs_selector; in copy_enlightened_to_vmcs12()
1616 vmcs12->host_ss_selector = evmcs->host_ss_selector; in copy_enlightened_to_vmcs12()
1617 vmcs12->host_ds_selector = evmcs->host_ds_selector; in copy_enlightened_to_vmcs12()
1618 vmcs12->host_fs_selector = evmcs->host_fs_selector; in copy_enlightened_to_vmcs12()
1619 vmcs12->host_gs_selector = evmcs->host_gs_selector; in copy_enlightened_to_vmcs12()
1620 vmcs12->host_tr_selector = evmcs->host_tr_selector; in copy_enlightened_to_vmcs12()
1621 vmcs12->host_ia32_perf_global_ctrl = evmcs->host_ia32_perf_global_ctrl; in copy_enlightened_to_vmcs12()
1632 vmcs12->pin_based_vm_exec_control = in copy_enlightened_to_vmcs12()
1634 vmcs12->vm_exit_controls = evmcs->vm_exit_controls; in copy_enlightened_to_vmcs12()
1635 vmcs12->secondary_vm_exec_control = in copy_enlightened_to_vmcs12()
1641 vmcs12->io_bitmap_a = evmcs->io_bitmap_a; in copy_enlightened_to_vmcs12()
1642 vmcs12->io_bitmap_b = evmcs->io_bitmap_b; in copy_enlightened_to_vmcs12()
1647 vmcs12->msr_bitmap = evmcs->msr_bitmap; in copy_enlightened_to_vmcs12()
1652 vmcs12->guest_es_base = evmcs->guest_es_base; in copy_enlightened_to_vmcs12()
1653 vmcs12->guest_cs_base = evmcs->guest_cs_base; in copy_enlightened_to_vmcs12()
1654 vmcs12->guest_ss_base = evmcs->guest_ss_base; in copy_enlightened_to_vmcs12()
1655 vmcs12->guest_ds_base = evmcs->guest_ds_base; in copy_enlightened_to_vmcs12()
1656 vmcs12->guest_fs_base = evmcs->guest_fs_base; in copy_enlightened_to_vmcs12()
1657 vmcs12->guest_gs_base = evmcs->guest_gs_base; in copy_enlightened_to_vmcs12()
1658 vmcs12->guest_ldtr_base = evmcs->guest_ldtr_base; in copy_enlightened_to_vmcs12()
1659 vmcs12->guest_tr_base = evmcs->guest_tr_base; in copy_enlightened_to_vmcs12()
1660 vmcs12->guest_gdtr_base = evmcs->guest_gdtr_base; in copy_enlightened_to_vmcs12()
1661 vmcs12->guest_idtr_base = evmcs->guest_idtr_base; in copy_enlightened_to_vmcs12()
1662 vmcs12->guest_es_limit = evmcs->guest_es_limit; in copy_enlightened_to_vmcs12()
1663 vmcs12->guest_cs_limit = evmcs->guest_cs_limit; in copy_enlightened_to_vmcs12()
1664 vmcs12->guest_ss_limit = evmcs->guest_ss_limit; in copy_enlightened_to_vmcs12()
1665 vmcs12->guest_ds_limit = evmcs->guest_ds_limit; in copy_enlightened_to_vmcs12()
1666 vmcs12->guest_fs_limit = evmcs->guest_fs_limit; in copy_enlightened_to_vmcs12()
1667 vmcs12->guest_gs_limit = evmcs->guest_gs_limit; in copy_enlightened_to_vmcs12()
1668 vmcs12->guest_ldtr_limit = evmcs->guest_ldtr_limit; in copy_enlightened_to_vmcs12()
1669 vmcs12->guest_tr_limit = evmcs->guest_tr_limit; in copy_enlightened_to_vmcs12()
1670 vmcs12->guest_gdtr_limit = evmcs->guest_gdtr_limit; in copy_enlightened_to_vmcs12()
1671 vmcs12->guest_idtr_limit = evmcs->guest_idtr_limit; in copy_enlightened_to_vmcs12()
1672 vmcs12->guest_es_ar_bytes = evmcs->guest_es_ar_bytes; in copy_enlightened_to_vmcs12()
1673 vmcs12->guest_cs_ar_bytes = evmcs->guest_cs_ar_bytes; in copy_enlightened_to_vmcs12()
1674 vmcs12->guest_ss_ar_bytes = evmcs->guest_ss_ar_bytes; in copy_enlightened_to_vmcs12()
1675 vmcs12->guest_ds_ar_bytes = evmcs->guest_ds_ar_bytes; in copy_enlightened_to_vmcs12()
1676 vmcs12->guest_fs_ar_bytes = evmcs->guest_fs_ar_bytes; in copy_enlightened_to_vmcs12()
1677 vmcs12->guest_gs_ar_bytes = evmcs->guest_gs_ar_bytes; in copy_enlightened_to_vmcs12()
1678 vmcs12->guest_ldtr_ar_bytes = evmcs->guest_ldtr_ar_bytes; in copy_enlightened_to_vmcs12()
1679 vmcs12->guest_tr_ar_bytes = evmcs->guest_tr_ar_bytes; in copy_enlightened_to_vmcs12()
1680 vmcs12->guest_es_selector = evmcs->guest_es_selector; in copy_enlightened_to_vmcs12()
1681 vmcs12->guest_cs_selector = evmcs->guest_cs_selector; in copy_enlightened_to_vmcs12()
1682 vmcs12->guest_ss_selector = evmcs->guest_ss_selector; in copy_enlightened_to_vmcs12()
1683 vmcs12->guest_ds_selector = evmcs->guest_ds_selector; in copy_enlightened_to_vmcs12()
1684 vmcs12->guest_fs_selector = evmcs->guest_fs_selector; in copy_enlightened_to_vmcs12()
1685 vmcs12->guest_gs_selector = evmcs->guest_gs_selector; in copy_enlightened_to_vmcs12()
1686 vmcs12->guest_ldtr_selector = evmcs->guest_ldtr_selector; in copy_enlightened_to_vmcs12()
1687 vmcs12->guest_tr_selector = evmcs->guest_tr_selector; in copy_enlightened_to_vmcs12()
1692 vmcs12->tsc_offset = evmcs->tsc_offset; in copy_enlightened_to_vmcs12()
1693 vmcs12->virtual_apic_page_addr = evmcs->virtual_apic_page_addr; in copy_enlightened_to_vmcs12()
1694 vmcs12->xss_exit_bitmap = evmcs->xss_exit_bitmap; in copy_enlightened_to_vmcs12()
1695 vmcs12->encls_exiting_bitmap = evmcs->encls_exiting_bitmap; in copy_enlightened_to_vmcs12()
1696 vmcs12->tsc_multiplier = evmcs->tsc_multiplier; in copy_enlightened_to_vmcs12()
1701 vmcs12->cr0_guest_host_mask = evmcs->cr0_guest_host_mask; in copy_enlightened_to_vmcs12()
1702 vmcs12->cr4_guest_host_mask = evmcs->cr4_guest_host_mask; in copy_enlightened_to_vmcs12()
1703 vmcs12->cr0_read_shadow = evmcs->cr0_read_shadow; in copy_enlightened_to_vmcs12()
1704 vmcs12->cr4_read_shadow = evmcs->cr4_read_shadow; in copy_enlightened_to_vmcs12()
1705 vmcs12->guest_cr0 = evmcs->guest_cr0; in copy_enlightened_to_vmcs12()
1706 vmcs12->guest_cr3 = evmcs->guest_cr3; in copy_enlightened_to_vmcs12()
1707 vmcs12->guest_cr4 = evmcs->guest_cr4; in copy_enlightened_to_vmcs12()
1708 vmcs12->guest_dr7 = evmcs->guest_dr7; in copy_enlightened_to_vmcs12()
1713 vmcs12->host_fs_base = evmcs->host_fs_base; in copy_enlightened_to_vmcs12()
1714 vmcs12->host_gs_base = evmcs->host_gs_base; in copy_enlightened_to_vmcs12()
1715 vmcs12->host_tr_base = evmcs->host_tr_base; in copy_enlightened_to_vmcs12()
1716 vmcs12->host_gdtr_base = evmcs->host_gdtr_base; in copy_enlightened_to_vmcs12()
1717 vmcs12->host_idtr_base = evmcs->host_idtr_base; in copy_enlightened_to_vmcs12()
1718 vmcs12->host_rsp = evmcs->host_rsp; in copy_enlightened_to_vmcs12()
1723 vmcs12->ept_pointer = evmcs->ept_pointer; in copy_enlightened_to_vmcs12()
1724 vmcs12->virtual_processor_id = evmcs->virtual_processor_id; in copy_enlightened_to_vmcs12()
1729 vmcs12->vmcs_link_pointer = evmcs->vmcs_link_pointer; in copy_enlightened_to_vmcs12()
1730 vmcs12->guest_ia32_debugctl = evmcs->guest_ia32_debugctl; in copy_enlightened_to_vmcs12()
1731 vmcs12->guest_ia32_pat = evmcs->guest_ia32_pat; in copy_enlightened_to_vmcs12()
1732 vmcs12->guest_ia32_efer = evmcs->guest_ia32_efer; in copy_enlightened_to_vmcs12()
1733 vmcs12->guest_pdptr0 = evmcs->guest_pdptr0; in copy_enlightened_to_vmcs12()
1734 vmcs12->guest_pdptr1 = evmcs->guest_pdptr1; in copy_enlightened_to_vmcs12()
1735 vmcs12->guest_pdptr2 = evmcs->guest_pdptr2; in copy_enlightened_to_vmcs12()
1736 vmcs12->guest_pdptr3 = evmcs->guest_pdptr3; in copy_enlightened_to_vmcs12()
1737 vmcs12->guest_pending_dbg_exceptions = in copy_enlightened_to_vmcs12()
1739 vmcs12->guest_sysenter_esp = evmcs->guest_sysenter_esp; in copy_enlightened_to_vmcs12()
1740 vmcs12->guest_sysenter_eip = evmcs->guest_sysenter_eip; in copy_enlightened_to_vmcs12()
1741 vmcs12->guest_bndcfgs = evmcs->guest_bndcfgs; in copy_enlightened_to_vmcs12()
1742 vmcs12->guest_activity_state = evmcs->guest_activity_state; in copy_enlightened_to_vmcs12()
1743 vmcs12->guest_sysenter_cs = evmcs->guest_sysenter_cs; in copy_enlightened_to_vmcs12()
1744 vmcs12->guest_ia32_perf_global_ctrl = evmcs->guest_ia32_perf_global_ctrl; in copy_enlightened_to_vmcs12()
1794 struct vmcs12 *vmcs12 = vmx->nested.cached_vmcs12; in copy_vmcs12_to_enlightened() local
1872 evmcs->guest_es_selector = vmcs12->guest_es_selector; in copy_vmcs12_to_enlightened()
1873 evmcs->guest_cs_selector = vmcs12->guest_cs_selector; in copy_vmcs12_to_enlightened()
1874 evmcs->guest_ss_selector = vmcs12->guest_ss_selector; in copy_vmcs12_to_enlightened()
1875 evmcs->guest_ds_selector = vmcs12->guest_ds_selector; in copy_vmcs12_to_enlightened()
1876 evmcs->guest_fs_selector = vmcs12->guest_fs_selector; in copy_vmcs12_to_enlightened()
1877 evmcs->guest_gs_selector = vmcs12->guest_gs_selector; in copy_vmcs12_to_enlightened()
1878 evmcs->guest_ldtr_selector = vmcs12->guest_ldtr_selector; in copy_vmcs12_to_enlightened()
1879 evmcs->guest_tr_selector = vmcs12->guest_tr_selector; in copy_vmcs12_to_enlightened()
1881 evmcs->guest_es_limit = vmcs12->guest_es_limit; in copy_vmcs12_to_enlightened()
1882 evmcs->guest_cs_limit = vmcs12->guest_cs_limit; in copy_vmcs12_to_enlightened()
1883 evmcs->guest_ss_limit = vmcs12->guest_ss_limit; in copy_vmcs12_to_enlightened()
1884 evmcs->guest_ds_limit = vmcs12->guest_ds_limit; in copy_vmcs12_to_enlightened()
1885 evmcs->guest_fs_limit = vmcs12->guest_fs_limit; in copy_vmcs12_to_enlightened()
1886 evmcs->guest_gs_limit = vmcs12->guest_gs_limit; in copy_vmcs12_to_enlightened()
1887 evmcs->guest_ldtr_limit = vmcs12->guest_ldtr_limit; in copy_vmcs12_to_enlightened()
1888 evmcs->guest_tr_limit = vmcs12->guest_tr_limit; in copy_vmcs12_to_enlightened()
1889 evmcs->guest_gdtr_limit = vmcs12->guest_gdtr_limit; in copy_vmcs12_to_enlightened()
1890 evmcs->guest_idtr_limit = vmcs12->guest_idtr_limit; in copy_vmcs12_to_enlightened()
1892 evmcs->guest_es_ar_bytes = vmcs12->guest_es_ar_bytes; in copy_vmcs12_to_enlightened()
1893 evmcs->guest_cs_ar_bytes = vmcs12->guest_cs_ar_bytes; in copy_vmcs12_to_enlightened()
1894 evmcs->guest_ss_ar_bytes = vmcs12->guest_ss_ar_bytes; in copy_vmcs12_to_enlightened()
1895 evmcs->guest_ds_ar_bytes = vmcs12->guest_ds_ar_bytes; in copy_vmcs12_to_enlightened()
1896 evmcs->guest_fs_ar_bytes = vmcs12->guest_fs_ar_bytes; in copy_vmcs12_to_enlightened()
1897 evmcs->guest_gs_ar_bytes = vmcs12->guest_gs_ar_bytes; in copy_vmcs12_to_enlightened()
1898 evmcs->guest_ldtr_ar_bytes = vmcs12->guest_ldtr_ar_bytes; in copy_vmcs12_to_enlightened()
1899 evmcs->guest_tr_ar_bytes = vmcs12->guest_tr_ar_bytes; in copy_vmcs12_to_enlightened()
1901 evmcs->guest_es_base = vmcs12->guest_es_base; in copy_vmcs12_to_enlightened()
1902 evmcs->guest_cs_base = vmcs12->guest_cs_base; in copy_vmcs12_to_enlightened()
1903 evmcs->guest_ss_base = vmcs12->guest_ss_base; in copy_vmcs12_to_enlightened()
1904 evmcs->guest_ds_base = vmcs12->guest_ds_base; in copy_vmcs12_to_enlightened()
1905 evmcs->guest_fs_base = vmcs12->guest_fs_base; in copy_vmcs12_to_enlightened()
1906 evmcs->guest_gs_base = vmcs12->guest_gs_base; in copy_vmcs12_to_enlightened()
1907 evmcs->guest_ldtr_base = vmcs12->guest_ldtr_base; in copy_vmcs12_to_enlightened()
1908 evmcs->guest_tr_base = vmcs12->guest_tr_base; in copy_vmcs12_to_enlightened()
1909 evmcs->guest_gdtr_base = vmcs12->guest_gdtr_base; in copy_vmcs12_to_enlightened()
1910 evmcs->guest_idtr_base = vmcs12->guest_idtr_base; in copy_vmcs12_to_enlightened()
1912 evmcs->guest_ia32_pat = vmcs12->guest_ia32_pat; in copy_vmcs12_to_enlightened()
1913 evmcs->guest_ia32_efer = vmcs12->guest_ia32_efer; in copy_vmcs12_to_enlightened()
1915 evmcs->guest_pdptr0 = vmcs12->guest_pdptr0; in copy_vmcs12_to_enlightened()
1916 evmcs->guest_pdptr1 = vmcs12->guest_pdptr1; in copy_vmcs12_to_enlightened()
1917 evmcs->guest_pdptr2 = vmcs12->guest_pdptr2; in copy_vmcs12_to_enlightened()
1918 evmcs->guest_pdptr3 = vmcs12->guest_pdptr3; in copy_vmcs12_to_enlightened()
1921 vmcs12->guest_pending_dbg_exceptions; in copy_vmcs12_to_enlightened()
1922 evmcs->guest_sysenter_esp = vmcs12->guest_sysenter_esp; in copy_vmcs12_to_enlightened()
1923 evmcs->guest_sysenter_eip = vmcs12->guest_sysenter_eip; in copy_vmcs12_to_enlightened()
1925 evmcs->guest_activity_state = vmcs12->guest_activity_state; in copy_vmcs12_to_enlightened()
1926 evmcs->guest_sysenter_cs = vmcs12->guest_sysenter_cs; in copy_vmcs12_to_enlightened()
1928 evmcs->guest_cr0 = vmcs12->guest_cr0; in copy_vmcs12_to_enlightened()
1929 evmcs->guest_cr3 = vmcs12->guest_cr3; in copy_vmcs12_to_enlightened()
1930 evmcs->guest_cr4 = vmcs12->guest_cr4; in copy_vmcs12_to_enlightened()
1931 evmcs->guest_dr7 = vmcs12->guest_dr7; in copy_vmcs12_to_enlightened()
1933 evmcs->guest_physical_address = vmcs12->guest_physical_address; in copy_vmcs12_to_enlightened()
1935 evmcs->vm_instruction_error = vmcs12->vm_instruction_error; in copy_vmcs12_to_enlightened()
1936 evmcs->vm_exit_reason = vmcs12->vm_exit_reason; in copy_vmcs12_to_enlightened()
1937 evmcs->vm_exit_intr_info = vmcs12->vm_exit_intr_info; in copy_vmcs12_to_enlightened()
1938 evmcs->vm_exit_intr_error_code = vmcs12->vm_exit_intr_error_code; in copy_vmcs12_to_enlightened()
1939 evmcs->idt_vectoring_info_field = vmcs12->idt_vectoring_info_field; in copy_vmcs12_to_enlightened()
1940 evmcs->idt_vectoring_error_code = vmcs12->idt_vectoring_error_code; in copy_vmcs12_to_enlightened()
1941 evmcs->vm_exit_instruction_len = vmcs12->vm_exit_instruction_len; in copy_vmcs12_to_enlightened()
1942 evmcs->vmx_instruction_info = vmcs12->vmx_instruction_info; in copy_vmcs12_to_enlightened()
1944 evmcs->exit_qualification = vmcs12->exit_qualification; in copy_vmcs12_to_enlightened()
1946 evmcs->guest_linear_address = vmcs12->guest_linear_address; in copy_vmcs12_to_enlightened()
1947 evmcs->guest_rsp = vmcs12->guest_rsp; in copy_vmcs12_to_enlightened()
1948 evmcs->guest_rflags = vmcs12->guest_rflags; in copy_vmcs12_to_enlightened()
1951 vmcs12->guest_interruptibility_info; in copy_vmcs12_to_enlightened()
1952 evmcs->cpu_based_vm_exec_control = vmcs12->cpu_based_vm_exec_control; in copy_vmcs12_to_enlightened()
1953 evmcs->vm_entry_controls = vmcs12->vm_entry_controls; in copy_vmcs12_to_enlightened()
1954 evmcs->vm_entry_intr_info_field = vmcs12->vm_entry_intr_info_field; in copy_vmcs12_to_enlightened()
1956 vmcs12->vm_entry_exception_error_code; in copy_vmcs12_to_enlightened()
1957 evmcs->vm_entry_instruction_len = vmcs12->vm_entry_instruction_len; in copy_vmcs12_to_enlightened()
1959 evmcs->guest_rip = vmcs12->guest_rip; in copy_vmcs12_to_enlightened()
1961 evmcs->guest_bndcfgs = vmcs12->guest_bndcfgs; in copy_vmcs12_to_enlightened()
2034 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_handle_enlightened_vmptrld() local
2035 memset(vmcs12, 0, sizeof(*vmcs12)); in nested_vmx_handle_enlightened_vmptrld()
2036 vmcs12->hdr.revision_id = VMCS12_REVISION; in nested_vmx_handle_enlightened_vmptrld()
2082 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in vmx_calc_preemption_timer_value() local
2089 vmcs12->vmx_preemption_timer_value + l1_scaled_tsc; in vmx_calc_preemption_timer_value()
2120 static u64 nested_vmx_calc_efer(struct vcpu_vmx *vmx, struct vmcs12 *vmcs12) in nested_vmx_calc_efer() argument
2123 (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_IA32_EFER)) in nested_vmx_calc_efer()
2124 return vmcs12->guest_ia32_efer; in nested_vmx_calc_efer()
2125 else if (vmcs12->vm_entry_controls & VM_ENTRY_IA32E_MODE) in nested_vmx_calc_efer()
2193 struct vmcs12 *vmcs12) in prepare_vmcs02_early_rare() argument
2200 if (nested_cpu_has_vpid(vmcs12) && vmx->nested.vpid02) in prepare_vmcs02_early_rare()
2208 struct vmcs12 *vmcs12) in prepare_vmcs02_early() argument
2211 u64 guest_efer = nested_vmx_calc_efer(vmx, vmcs12); in prepare_vmcs02_early()
2214 prepare_vmcs02_early_rare(vmx, vmcs12); in prepare_vmcs02_early()
2220 exec_control |= (vmcs12->pin_based_vm_exec_control & in prepare_vmcs02_early()
2225 if (nested_cpu_has_posted_intr(vmcs12)) in prepare_vmcs02_early()
2226 vmx->nested.posted_intr_nv = vmcs12->posted_intr_nv; in prepare_vmcs02_early()
2238 exec_control |= vmcs12->cpu_based_vm_exec_control; in prepare_vmcs02_early()
2242 vmcs_write32(TPR_THRESHOLD, vmcs12->tpr_threshold); in prepare_vmcs02_early()
2285 if (nested_cpu_has(vmcs12, in prepare_vmcs02_early()
2287 exec_control |= vmcs12->secondary_vm_exec_control; in prepare_vmcs02_early()
2300 (vmcs12->guest_cr4 & X86_CR4_UMIP)) in prepare_vmcs02_early()
2305 vmcs12->guest_intr_status); in prepare_vmcs02_early()
2307 if (!nested_cpu_has2(vmcs12, SECONDARY_EXEC_UNRESTRICTED_GUEST)) in prepare_vmcs02_early()
2311 vmx_write_encls_bitmap(&vmx->vcpu, vmcs12); in prepare_vmcs02_early()
2329 exec_control |= (vmcs12->vm_entry_controls & in prepare_vmcs02_early()
2359 vmcs12->vm_entry_intr_info_field); in prepare_vmcs02_early()
2361 vmcs12->vm_entry_exception_error_code); in prepare_vmcs02_early()
2363 vmcs12->vm_entry_instruction_len); in prepare_vmcs02_early()
2365 vmcs12->guest_interruptibility_info); in prepare_vmcs02_early()
2367 !(vmcs12->guest_interruptibility_info & GUEST_INTR_STATE_NMI); in prepare_vmcs02_early()
2373 static void prepare_vmcs02_rare(struct vcpu_vmx *vmx, struct vmcs12 *vmcs12) in prepare_vmcs02_rare() argument
2379 vmcs_write16(GUEST_ES_SELECTOR, vmcs12->guest_es_selector); in prepare_vmcs02_rare()
2380 vmcs_write16(GUEST_CS_SELECTOR, vmcs12->guest_cs_selector); in prepare_vmcs02_rare()
2381 vmcs_write16(GUEST_SS_SELECTOR, vmcs12->guest_ss_selector); in prepare_vmcs02_rare()
2382 vmcs_write16(GUEST_DS_SELECTOR, vmcs12->guest_ds_selector); in prepare_vmcs02_rare()
2383 vmcs_write16(GUEST_FS_SELECTOR, vmcs12->guest_fs_selector); in prepare_vmcs02_rare()
2384 vmcs_write16(GUEST_GS_SELECTOR, vmcs12->guest_gs_selector); in prepare_vmcs02_rare()
2385 vmcs_write16(GUEST_LDTR_SELECTOR, vmcs12->guest_ldtr_selector); in prepare_vmcs02_rare()
2386 vmcs_write16(GUEST_TR_SELECTOR, vmcs12->guest_tr_selector); in prepare_vmcs02_rare()
2387 vmcs_write32(GUEST_ES_LIMIT, vmcs12->guest_es_limit); in prepare_vmcs02_rare()
2388 vmcs_write32(GUEST_CS_LIMIT, vmcs12->guest_cs_limit); in prepare_vmcs02_rare()
2389 vmcs_write32(GUEST_SS_LIMIT, vmcs12->guest_ss_limit); in prepare_vmcs02_rare()
2390 vmcs_write32(GUEST_DS_LIMIT, vmcs12->guest_ds_limit); in prepare_vmcs02_rare()
2391 vmcs_write32(GUEST_FS_LIMIT, vmcs12->guest_fs_limit); in prepare_vmcs02_rare()
2392 vmcs_write32(GUEST_GS_LIMIT, vmcs12->guest_gs_limit); in prepare_vmcs02_rare()
2393 vmcs_write32(GUEST_LDTR_LIMIT, vmcs12->guest_ldtr_limit); in prepare_vmcs02_rare()
2394 vmcs_write32(GUEST_TR_LIMIT, vmcs12->guest_tr_limit); in prepare_vmcs02_rare()
2395 vmcs_write32(GUEST_GDTR_LIMIT, vmcs12->guest_gdtr_limit); in prepare_vmcs02_rare()
2396 vmcs_write32(GUEST_IDTR_LIMIT, vmcs12->guest_idtr_limit); in prepare_vmcs02_rare()
2397 vmcs_write32(GUEST_CS_AR_BYTES, vmcs12->guest_cs_ar_bytes); in prepare_vmcs02_rare()
2398 vmcs_write32(GUEST_SS_AR_BYTES, vmcs12->guest_ss_ar_bytes); in prepare_vmcs02_rare()
2399 vmcs_write32(GUEST_ES_AR_BYTES, vmcs12->guest_es_ar_bytes); in prepare_vmcs02_rare()
2400 vmcs_write32(GUEST_DS_AR_BYTES, vmcs12->guest_ds_ar_bytes); in prepare_vmcs02_rare()
2401 vmcs_write32(GUEST_FS_AR_BYTES, vmcs12->guest_fs_ar_bytes); in prepare_vmcs02_rare()
2402 vmcs_write32(GUEST_GS_AR_BYTES, vmcs12->guest_gs_ar_bytes); in prepare_vmcs02_rare()
2403 vmcs_write32(GUEST_LDTR_AR_BYTES, vmcs12->guest_ldtr_ar_bytes); in prepare_vmcs02_rare()
2404 vmcs_write32(GUEST_TR_AR_BYTES, vmcs12->guest_tr_ar_bytes); in prepare_vmcs02_rare()
2405 vmcs_writel(GUEST_ES_BASE, vmcs12->guest_es_base); in prepare_vmcs02_rare()
2406 vmcs_writel(GUEST_CS_BASE, vmcs12->guest_cs_base); in prepare_vmcs02_rare()
2407 vmcs_writel(GUEST_SS_BASE, vmcs12->guest_ss_base); in prepare_vmcs02_rare()
2408 vmcs_writel(GUEST_DS_BASE, vmcs12->guest_ds_base); in prepare_vmcs02_rare()
2409 vmcs_writel(GUEST_FS_BASE, vmcs12->guest_fs_base); in prepare_vmcs02_rare()
2410 vmcs_writel(GUEST_GS_BASE, vmcs12->guest_gs_base); in prepare_vmcs02_rare()
2411 vmcs_writel(GUEST_LDTR_BASE, vmcs12->guest_ldtr_base); in prepare_vmcs02_rare()
2412 vmcs_writel(GUEST_TR_BASE, vmcs12->guest_tr_base); in prepare_vmcs02_rare()
2413 vmcs_writel(GUEST_GDTR_BASE, vmcs12->guest_gdtr_base); in prepare_vmcs02_rare()
2414 vmcs_writel(GUEST_IDTR_BASE, vmcs12->guest_idtr_base); in prepare_vmcs02_rare()
2421 vmcs_write32(GUEST_SYSENTER_CS, vmcs12->guest_sysenter_cs); in prepare_vmcs02_rare()
2423 vmcs12->guest_pending_dbg_exceptions); in prepare_vmcs02_rare()
2424 vmcs_writel(GUEST_SYSENTER_ESP, vmcs12->guest_sysenter_esp); in prepare_vmcs02_rare()
2425 vmcs_writel(GUEST_SYSENTER_EIP, vmcs12->guest_sysenter_eip); in prepare_vmcs02_rare()
2432 vmcs_write64(GUEST_PDPTR0, vmcs12->guest_pdptr0); in prepare_vmcs02_rare()
2433 vmcs_write64(GUEST_PDPTR1, vmcs12->guest_pdptr1); in prepare_vmcs02_rare()
2434 vmcs_write64(GUEST_PDPTR2, vmcs12->guest_pdptr2); in prepare_vmcs02_rare()
2435 vmcs_write64(GUEST_PDPTR3, vmcs12->guest_pdptr3); in prepare_vmcs02_rare()
2439 (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_BNDCFGS)) in prepare_vmcs02_rare()
2440 vmcs_write64(GUEST_BNDCFGS, vmcs12->guest_bndcfgs); in prepare_vmcs02_rare()
2443 if (nested_cpu_has_xsaves(vmcs12)) in prepare_vmcs02_rare()
2444 vmcs_write64(XSS_EXIT_BITMAP, vmcs12->xss_exit_bitmap); in prepare_vmcs02_rare()
2467 vmcs_write32(PAGE_FAULT_ERROR_CODE_MASK, vmcs12->page_fault_error_code_mask); in prepare_vmcs02_rare()
2468 vmcs_write32(PAGE_FAULT_ERROR_CODE_MATCH, vmcs12->page_fault_error_code_match); in prepare_vmcs02_rare()
2472 vmcs_write64(EOI_EXIT_BITMAP0, vmcs12->eoi_exit_bitmap0); in prepare_vmcs02_rare()
2473 vmcs_write64(EOI_EXIT_BITMAP1, vmcs12->eoi_exit_bitmap1); in prepare_vmcs02_rare()
2474 vmcs_write64(EOI_EXIT_BITMAP2, vmcs12->eoi_exit_bitmap2); in prepare_vmcs02_rare()
2475 vmcs_write64(EOI_EXIT_BITMAP3, vmcs12->eoi_exit_bitmap3); in prepare_vmcs02_rare()
2502 static int prepare_vmcs02(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12, in prepare_vmcs02() argument
2510 prepare_vmcs02_rare(vmx, vmcs12); in prepare_vmcs02()
2519 (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_DEBUG_CONTROLS)) { in prepare_vmcs02()
2520 kvm_set_dr(vcpu, 7, vmcs12->guest_dr7); in prepare_vmcs02()
2521 vmcs_write64(GUEST_IA32_DEBUGCTL, vmcs12->guest_ia32_debugctl); in prepare_vmcs02()
2527 !(vmcs12->vm_entry_controls & VM_ENTRY_LOAD_BNDCFGS))) in prepare_vmcs02()
2529 vmx_set_rflags(vcpu, vmcs12->guest_rflags); in prepare_vmcs02()
2536 vcpu->arch.cr0_guest_owned_bits &= ~vmcs12->cr0_guest_host_mask; in prepare_vmcs02()
2540 (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_IA32_PAT)) { in prepare_vmcs02()
2541 vmcs_write64(GUEST_IA32_PAT, vmcs12->guest_ia32_pat); in prepare_vmcs02()
2542 vcpu->arch.pat = vmcs12->guest_ia32_pat; in prepare_vmcs02()
2560 nested_vmx_transition_tlb_flush(vcpu, vmcs12, true); in prepare_vmcs02()
2562 if (nested_cpu_has_ept(vmcs12)) in prepare_vmcs02()
2573 vmx_set_cr0(vcpu, vmcs12->guest_cr0); in prepare_vmcs02()
2574 vmcs_writel(CR0_READ_SHADOW, nested_read_cr0(vmcs12)); in prepare_vmcs02()
2576 vmx_set_cr4(vcpu, vmcs12->guest_cr4); in prepare_vmcs02()
2577 vmcs_writel(CR4_READ_SHADOW, nested_read_cr4(vmcs12)); in prepare_vmcs02()
2579 vcpu->arch.efer = nested_vmx_calc_efer(vmx, vmcs12); in prepare_vmcs02()
2599 if (nested_vmx_load_cr3(vcpu, vmcs12->guest_cr3, nested_cpu_has_ept(vmcs12), in prepare_vmcs02()
2611 vmcs_writel(GUEST_CR3, vmcs12->guest_cr3); in prepare_vmcs02()
2614 if (load_guest_pdptrs_vmcs12 && nested_cpu_has_ept(vmcs12) && in prepare_vmcs02()
2616 vmcs_write64(GUEST_PDPTR0, vmcs12->guest_pdptr0); in prepare_vmcs02()
2617 vmcs_write64(GUEST_PDPTR1, vmcs12->guest_pdptr1); in prepare_vmcs02()
2618 vmcs_write64(GUEST_PDPTR2, vmcs12->guest_pdptr2); in prepare_vmcs02()
2619 vmcs_write64(GUEST_PDPTR3, vmcs12->guest_pdptr3); in prepare_vmcs02()
2622 if ((vmcs12->vm_entry_controls & VM_ENTRY_LOAD_IA32_PERF_GLOBAL_CTRL) && in prepare_vmcs02()
2625 vmcs12->guest_ia32_perf_global_ctrl))) { in prepare_vmcs02()
2630 kvm_rsp_write(vcpu, vmcs12->guest_rsp); in prepare_vmcs02()
2631 kvm_rip_write(vcpu, vmcs12->guest_rip); in prepare_vmcs02()
2646 static int nested_vmx_check_nmi_controls(struct vmcs12 *vmcs12) in nested_vmx_check_nmi_controls() argument
2648 if (CC(!nested_cpu_has_nmi_exiting(vmcs12) && in nested_vmx_check_nmi_controls()
2649 nested_cpu_has_virtual_nmis(vmcs12))) in nested_vmx_check_nmi_controls()
2652 if (CC(!nested_cpu_has_virtual_nmis(vmcs12) && in nested_vmx_check_nmi_controls()
2653 nested_cpu_has(vmcs12, CPU_BASED_NMI_WINDOW_EXITING))) in nested_vmx_check_nmi_controls()
2708 struct vmcs12 *vmcs12) in nested_check_vm_execution_controls() argument
2712 if (CC(!vmx_control_verify(vmcs12->pin_based_vm_exec_control, in nested_check_vm_execution_controls()
2715 CC(!vmx_control_verify(vmcs12->cpu_based_vm_exec_control, in nested_check_vm_execution_controls()
2720 if (nested_cpu_has(vmcs12, CPU_BASED_ACTIVATE_SECONDARY_CONTROLS) && in nested_check_vm_execution_controls()
2721 CC(!vmx_control_verify(vmcs12->secondary_vm_exec_control, in nested_check_vm_execution_controls()
2726 if (CC(vmcs12->cr3_target_count > nested_cpu_vmx_misc_cr3_count(vcpu)) || in nested_check_vm_execution_controls()
2727 nested_vmx_check_io_bitmap_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2728 nested_vmx_check_msr_bitmap_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2729 nested_vmx_check_tpr_shadow_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2730 nested_vmx_check_apic_access_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2731 nested_vmx_check_apicv_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2732 nested_vmx_check_nmi_controls(vmcs12) || in nested_check_vm_execution_controls()
2733 nested_vmx_check_pml_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2734 nested_vmx_check_unrestricted_guest_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2735 nested_vmx_check_mode_based_ept_exec_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2736 nested_vmx_check_shadow_vmcs_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2737 CC(nested_cpu_has_vpid(vmcs12) && !vmcs12->virtual_processor_id)) in nested_check_vm_execution_controls()
2740 if (!nested_cpu_has_preemption_timer(vmcs12) && in nested_check_vm_execution_controls()
2741 nested_cpu_has_save_preemption_timer(vmcs12)) in nested_check_vm_execution_controls()
2744 if (nested_cpu_has_ept(vmcs12) && in nested_check_vm_execution_controls()
2745 CC(!nested_vmx_check_eptp(vcpu, vmcs12->ept_pointer))) in nested_check_vm_execution_controls()
2748 if (nested_cpu_has_vmfunc(vmcs12)) { in nested_check_vm_execution_controls()
2749 if (CC(vmcs12->vm_function_control & in nested_check_vm_execution_controls()
2753 if (nested_cpu_has_eptp_switching(vmcs12)) { in nested_check_vm_execution_controls()
2754 if (CC(!nested_cpu_has_ept(vmcs12)) || in nested_check_vm_execution_controls()
2755 CC(!page_address_valid(vcpu, vmcs12->eptp_list_address))) in nested_check_vm_execution_controls()
2767 struct vmcs12 *vmcs12) in nested_check_vm_exit_controls() argument
2771 if (CC(!vmx_control_verify(vmcs12->vm_exit_controls, in nested_check_vm_exit_controls()
2774 CC(nested_vmx_check_exit_msr_switch_controls(vcpu, vmcs12))) in nested_check_vm_exit_controls()
2784 struct vmcs12 *vmcs12) in nested_check_vm_entry_controls() argument
2788 if (CC(!vmx_control_verify(vmcs12->vm_entry_controls, in nested_check_vm_entry_controls()
2799 if (vmcs12->vm_entry_intr_info_field & INTR_INFO_VALID_MASK) { in nested_check_vm_entry_controls()
2800 u32 intr_info = vmcs12->vm_entry_intr_info_field; in nested_check_vm_entry_controls()
2805 bool urg = nested_cpu_has2(vmcs12, in nested_check_vm_entry_controls()
2807 bool prot_mode = !urg || vmcs12->guest_cr0 & X86_CR0_PE; in nested_check_vm_entry_controls()
2830 vmcs12->vm_entry_exception_error_code & GENMASK(31, 16))) in nested_check_vm_entry_controls()
2842 if (CC(vmcs12->vm_entry_instruction_len > 15) || in nested_check_vm_entry_controls()
2843 CC(vmcs12->vm_entry_instruction_len == 0 && in nested_check_vm_entry_controls()
2849 if (nested_vmx_check_entry_msr_switch_controls(vcpu, vmcs12)) in nested_check_vm_entry_controls()
2856 struct vmcs12 *vmcs12) in nested_vmx_check_controls() argument
2858 if (nested_check_vm_execution_controls(vcpu, vmcs12) || in nested_vmx_check_controls()
2859 nested_check_vm_exit_controls(vcpu, vmcs12) || in nested_vmx_check_controls()
2860 nested_check_vm_entry_controls(vcpu, vmcs12)) in nested_vmx_check_controls()
2864 return nested_evmcs_check_controls(vmcs12); in nested_vmx_check_controls()
2870 struct vmcs12 *vmcs12) in nested_vmx_check_address_space_size() argument
2873 if (CC(!!(vmcs12->vm_exit_controls & VM_EXIT_HOST_ADDR_SPACE_SIZE) != in nested_vmx_check_address_space_size()
2881 struct vmcs12 *vmcs12) in nested_vmx_check_host_state() argument
2885 if (CC(!nested_host_cr0_valid(vcpu, vmcs12->host_cr0)) || in nested_vmx_check_host_state()
2886 CC(!nested_host_cr4_valid(vcpu, vmcs12->host_cr4)) || in nested_vmx_check_host_state()
2887 CC(kvm_vcpu_is_illegal_gpa(vcpu, vmcs12->host_cr3))) in nested_vmx_check_host_state()
2890 if (CC(is_noncanonical_address(vmcs12->host_ia32_sysenter_esp, vcpu)) || in nested_vmx_check_host_state()
2891 CC(is_noncanonical_address(vmcs12->host_ia32_sysenter_eip, vcpu))) in nested_vmx_check_host_state()
2894 if ((vmcs12->vm_exit_controls & VM_EXIT_LOAD_IA32_PAT) && in nested_vmx_check_host_state()
2895 CC(!kvm_pat_valid(vmcs12->host_ia32_pat))) in nested_vmx_check_host_state()
2898 if ((vmcs12->vm_exit_controls & VM_EXIT_LOAD_IA32_PERF_GLOBAL_CTRL) && in nested_vmx_check_host_state()
2900 vmcs12->host_ia32_perf_global_ctrl))) in nested_vmx_check_host_state()
2904 ia32e = !!(vmcs12->vm_exit_controls & VM_EXIT_HOST_ADDR_SPACE_SIZE); in nested_vmx_check_host_state()
2910 if (CC(!(vmcs12->host_cr4 & X86_CR4_PAE))) in nested_vmx_check_host_state()
2913 if (CC(vmcs12->vm_entry_controls & VM_ENTRY_IA32E_MODE) || in nested_vmx_check_host_state()
2914 CC(vmcs12->host_cr4 & X86_CR4_PCIDE) || in nested_vmx_check_host_state()
2915 CC((vmcs12->host_rip) >> 32)) in nested_vmx_check_host_state()
2919 if (CC(vmcs12->host_cs_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || in nested_vmx_check_host_state()
2920 CC(vmcs12->host_ss_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || in nested_vmx_check_host_state()
2921 CC(vmcs12->host_ds_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || in nested_vmx_check_host_state()
2922 CC(vmcs12->host_es_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || in nested_vmx_check_host_state()
2923 CC(vmcs12->host_fs_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || in nested_vmx_check_host_state()
2924 CC(vmcs12->host_gs_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || in nested_vmx_check_host_state()
2925 CC(vmcs12->host_tr_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || in nested_vmx_check_host_state()
2926 CC(vmcs12->host_cs_selector == 0) || in nested_vmx_check_host_state()
2927 CC(vmcs12->host_tr_selector == 0) || in nested_vmx_check_host_state()
2928 CC(vmcs12->host_ss_selector == 0 && !ia32e)) in nested_vmx_check_host_state()
2931 if (CC(is_noncanonical_address(vmcs12->host_fs_base, vcpu)) || in nested_vmx_check_host_state()
2932 CC(is_noncanonical_address(vmcs12->host_gs_base, vcpu)) || in nested_vmx_check_host_state()
2933 CC(is_noncanonical_address(vmcs12->host_gdtr_base, vcpu)) || in nested_vmx_check_host_state()
2934 CC(is_noncanonical_address(vmcs12->host_idtr_base, vcpu)) || in nested_vmx_check_host_state()
2935 CC(is_noncanonical_address(vmcs12->host_tr_base, vcpu)) || in nested_vmx_check_host_state()
2936 CC(is_noncanonical_address(vmcs12->host_rip, vcpu))) in nested_vmx_check_host_state()
2945 if (vmcs12->vm_exit_controls & VM_EXIT_LOAD_IA32_EFER) { in nested_vmx_check_host_state()
2946 if (CC(!kvm_valid_efer(vcpu, vmcs12->host_ia32_efer)) || in nested_vmx_check_host_state()
2947 CC(ia32e != !!(vmcs12->host_ia32_efer & EFER_LMA)) || in nested_vmx_check_host_state()
2948 CC(ia32e != !!(vmcs12->host_ia32_efer & EFER_LME))) in nested_vmx_check_host_state()
2956 struct vmcs12 *vmcs12) in nested_vmx_check_vmcs_link_ptr() argument
2962 if (vmcs12->vmcs_link_pointer == INVALID_GPA) in nested_vmx_check_vmcs_link_ptr()
2965 if (CC(!page_address_valid(vcpu, vmcs12->vmcs_link_pointer))) in nested_vmx_check_vmcs_link_ptr()
2968 if (ghc->gpa != vmcs12->vmcs_link_pointer && in nested_vmx_check_vmcs_link_ptr()
2970 vmcs12->vmcs_link_pointer, VMCS12_SIZE))) in nested_vmx_check_vmcs_link_ptr()
2974 offsetof(struct vmcs12, hdr), in nested_vmx_check_vmcs_link_ptr()
2979 CC(hdr.shadow_vmcs != nested_cpu_has_shadow_vmcs(vmcs12))) in nested_vmx_check_vmcs_link_ptr()
2988 static int nested_check_guest_non_reg_state(struct vmcs12 *vmcs12) in nested_check_guest_non_reg_state() argument
2990 if (CC(vmcs12->guest_activity_state != GUEST_ACTIVITY_ACTIVE && in nested_check_guest_non_reg_state()
2991 vmcs12->guest_activity_state != GUEST_ACTIVITY_HLT && in nested_check_guest_non_reg_state()
2992 vmcs12->guest_activity_state != GUEST_ACTIVITY_WAIT_SIPI)) in nested_check_guest_non_reg_state()
2999 struct vmcs12 *vmcs12, in nested_vmx_check_guest_state() argument
3006 if (CC(!nested_guest_cr0_valid(vcpu, vmcs12->guest_cr0)) || in nested_vmx_check_guest_state()
3007 CC(!nested_guest_cr4_valid(vcpu, vmcs12->guest_cr4))) in nested_vmx_check_guest_state()
3010 if ((vmcs12->vm_entry_controls & VM_ENTRY_LOAD_DEBUG_CONTROLS) && in nested_vmx_check_guest_state()
3011 CC(!kvm_dr7_valid(vmcs12->guest_dr7))) in nested_vmx_check_guest_state()
3014 if ((vmcs12->vm_entry_controls & VM_ENTRY_LOAD_IA32_PAT) && in nested_vmx_check_guest_state()
3015 CC(!kvm_pat_valid(vmcs12->guest_ia32_pat))) in nested_vmx_check_guest_state()
3018 if (nested_vmx_check_vmcs_link_ptr(vcpu, vmcs12)) { in nested_vmx_check_guest_state()
3023 if ((vmcs12->vm_entry_controls & VM_ENTRY_LOAD_IA32_PERF_GLOBAL_CTRL) && in nested_vmx_check_guest_state()
3025 vmcs12->guest_ia32_perf_global_ctrl))) in nested_vmx_check_guest_state()
3038 (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_IA32_EFER)) { in nested_vmx_check_guest_state()
3039 ia32e = (vmcs12->vm_entry_controls & VM_ENTRY_IA32E_MODE) != 0; in nested_vmx_check_guest_state()
3040 if (CC(!kvm_valid_efer(vcpu, vmcs12->guest_ia32_efer)) || in nested_vmx_check_guest_state()
3041 CC(ia32e != !!(vmcs12->guest_ia32_efer & EFER_LMA)) || in nested_vmx_check_guest_state()
3042 CC(((vmcs12->guest_cr0 & X86_CR0_PG) && in nested_vmx_check_guest_state()
3043 ia32e != !!(vmcs12->guest_ia32_efer & EFER_LME)))) in nested_vmx_check_guest_state()
3047 if ((vmcs12->vm_entry_controls & VM_ENTRY_LOAD_BNDCFGS) && in nested_vmx_check_guest_state()
3048 (CC(is_noncanonical_address(vmcs12->guest_bndcfgs & PAGE_MASK, vcpu)) || in nested_vmx_check_guest_state()
3049 CC((vmcs12->guest_bndcfgs & MSR_IA32_BNDCFGS_RSVD)))) in nested_vmx_check_guest_state()
3052 if (nested_check_guest_non_reg_state(vmcs12)) in nested_vmx_check_guest_state()
3166 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_get_vmcs12_pages() local
3171 !nested_cpu_has_ept(vmcs12) && is_pae_paging(vcpu)) { in nested_get_vmcs12_pages()
3182 if (nested_cpu_has2(vmcs12, SECONDARY_EXEC_VIRTUALIZE_APIC_ACCESSES)) { in nested_get_vmcs12_pages()
3185 if (!kvm_vcpu_map(vcpu, gpa_to_gfn(vmcs12->apic_access_addr), map)) { in nested_get_vmcs12_pages()
3198 if (nested_cpu_has(vmcs12, CPU_BASED_TPR_SHADOW)) { in nested_get_vmcs12_pages()
3201 if (!kvm_vcpu_map(vcpu, gpa_to_gfn(vmcs12->virtual_apic_page_addr), map)) { in nested_get_vmcs12_pages()
3203 } else if (nested_cpu_has(vmcs12, CPU_BASED_CR8_LOAD_EXITING) && in nested_get_vmcs12_pages()
3204 nested_cpu_has(vmcs12, CPU_BASED_CR8_STORE_EXITING) && in nested_get_vmcs12_pages()
3205 !nested_cpu_has2(vmcs12, SECONDARY_EXEC_VIRTUALIZE_APIC_ACCESSES)) { in nested_get_vmcs12_pages()
3224 if (nested_cpu_has_posted_intr(vmcs12)) { in nested_get_vmcs12_pages()
3227 if (!kvm_vcpu_map(vcpu, gpa_to_gfn(vmcs12->posted_intr_desc_addr), map)) { in nested_get_vmcs12_pages()
3230 offset_in_page(vmcs12->posted_intr_desc_addr)); in nested_get_vmcs12_pages()
3232 pfn_to_hpa(map->pfn) + offset_in_page(vmcs12->posted_intr_desc_addr)); in nested_get_vmcs12_pages()
3244 if (nested_vmx_prepare_msr_bitmap(vcpu, vmcs12)) in nested_get_vmcs12_pages()
3273 struct vmcs12 *vmcs12; in nested_vmx_write_pml_buffer() local
3287 vmcs12 = get_vmcs12(vcpu); in nested_vmx_write_pml_buffer()
3288 if (!nested_cpu_has_pml(vmcs12)) in nested_vmx_write_pml_buffer()
3291 if (vmcs12->guest_pml_index >= PML_ENTITY_NUM) { in nested_vmx_write_pml_buffer()
3297 dst = vmcs12->pml_address + sizeof(u64) * vmcs12->guest_pml_index; in nested_vmx_write_pml_buffer()
3303 vmcs12->guest_pml_index--; in nested_vmx_write_pml_buffer()
3339 struct vmcs12 *vmcs12);
3355 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_enter_non_root_mode() local
3366 vmcs12->guest_rip, in nested_vmx_enter_non_root_mode()
3367 vmcs12->guest_intr_status, in nested_vmx_enter_non_root_mode()
3368 vmcs12->vm_entry_intr_info_field, in nested_vmx_enter_non_root_mode()
3369 vmcs12->secondary_vm_exec_control & SECONDARY_EXEC_ENABLE_EPT, in nested_vmx_enter_non_root_mode()
3370 vmcs12->ept_pointer, in nested_vmx_enter_non_root_mode()
3371 vmcs12->guest_cr3, in nested_vmx_enter_non_root_mode()
3384 !(vmcs12->vm_entry_controls & VM_ENTRY_LOAD_DEBUG_CONTROLS)) in nested_vmx_enter_non_root_mode()
3388 !(vmcs12->vm_entry_controls & VM_ENTRY_LOAD_BNDCFGS))) in nested_vmx_enter_non_root_mode()
3412 prepare_vmcs02_early(vmx, &vmx->vmcs01, vmcs12); in nested_vmx_enter_non_root_mode()
3425 if (nested_vmx_check_guest_state(vcpu, vmcs12, in nested_vmx_enter_non_root_mode()
3428 vmcs12->exit_qualification = entry_failure_code; in nested_vmx_enter_non_root_mode()
3435 if (prepare_vmcs02(vcpu, vmcs12, from_vmentry, &entry_failure_code)) { in nested_vmx_enter_non_root_mode()
3437 vmcs12->exit_qualification = entry_failure_code; in nested_vmx_enter_non_root_mode()
3443 vmcs12->vm_entry_msr_load_addr, in nested_vmx_enter_non_root_mode()
3444 vmcs12->vm_entry_msr_load_count); in nested_vmx_enter_non_root_mode()
3447 vmcs12->exit_qualification = failed_index; in nested_vmx_enter_non_root_mode()
3476 if (nested_cpu_has_preemption_timer(vmcs12)) { in nested_vmx_enter_non_root_mode()
3495 if (vmcs12->cpu_based_vm_exec_control & CPU_BASED_USE_TSC_OFFSETTING) in nested_vmx_enter_non_root_mode()
3496 vcpu->arch.tsc_offset -= vmcs12->tsc_offset; in nested_vmx_enter_non_root_mode()
3505 load_vmcs12_host_state(vcpu, vmcs12); in nested_vmx_enter_non_root_mode()
3506 vmcs12->vm_exit_reason = exit_reason.full; in nested_vmx_enter_non_root_mode()
3518 struct vmcs12 *vmcs12; in nested_vmx_run() local
3542 vmcs12 = get_vmcs12(vcpu); in nested_vmx_run()
3550 if (CC(vmcs12->hdr.shadow_vmcs)) in nested_vmx_run()
3556 vmcs12->launch_state = !launch; in nested_vmx_run()
3574 if (CC(vmcs12->launch_state == launch)) in nested_vmx_run()
3579 if (nested_vmx_check_controls(vcpu, vmcs12)) in nested_vmx_run()
3582 if (nested_vmx_check_address_space_size(vcpu, vmcs12)) in nested_vmx_run()
3585 if (nested_vmx_check_host_state(vcpu, vmcs12)) in nested_vmx_run()
3599 if (nested_cpu_has_posted_intr(vmcs12) && in nested_vmx_run()
3619 nested_cache_shadow_vmcs12(vcpu, vmcs12); in nested_vmx_run()
3621 switch (vmcs12->guest_activity_state) { in nested_vmx_run()
3628 if (!(vmcs12->vm_entry_intr_info_field & INTR_INFO_VALID_MASK) && in nested_vmx_run()
3629 !nested_cpu_has(vmcs12, CPU_BASED_NMI_WINDOW_EXITING) && in nested_vmx_run()
3630 !(nested_cpu_has(vmcs12, CPU_BASED_INTR_WINDOW_EXITING) && in nested_vmx_run()
3631 (vmcs12->guest_rflags & X86_EFLAGS_IF))) { in nested_vmx_run()
3674 vmcs12_guest_cr0(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12) in vmcs12_guest_cr0() argument
3678 /*2*/ (vmcs12->guest_cr0 & vmcs12->cr0_guest_host_mask) | in vmcs12_guest_cr0()
3679 /*3*/ (vmcs_readl(CR0_READ_SHADOW) & ~(vmcs12->cr0_guest_host_mask | in vmcs12_guest_cr0()
3684 vmcs12_guest_cr4(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12) in vmcs12_guest_cr4() argument
3688 /*2*/ (vmcs12->guest_cr4 & vmcs12->cr4_guest_host_mask) | in vmcs12_guest_cr4()
3689 /*3*/ (vmcs_readl(CR4_READ_SHADOW) & ~(vmcs12->cr4_guest_host_mask | in vmcs12_guest_cr4()
3694 struct vmcs12 *vmcs12, in vmcs12_save_pending_event() argument
3720 vmcs12->idt_vectoring_info_field = 0; in vmcs12_save_pending_event()
3726 vmcs12->vm_exit_instruction_len = in vmcs12_save_pending_event()
3734 vmcs12->idt_vectoring_error_code = in vmcs12_save_pending_event()
3738 vmcs12->idt_vectoring_info_field = idt_vectoring; in vmcs12_save_pending_event()
3740 vmcs12->idt_vectoring_info_field = in vmcs12_save_pending_event()
3748 vmcs12->vm_entry_instruction_len = in vmcs12_save_pending_event()
3753 vmcs12->idt_vectoring_info_field = idt_vectoring; in vmcs12_save_pending_event()
3755 vmcs12->idt_vectoring_info_field = 0; in vmcs12_save_pending_event()
3762 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_mark_vmcs12_pages_dirty() local
3770 if (nested_cpu_has(vmcs12, CPU_BASED_TPR_SHADOW)) { in nested_mark_vmcs12_pages_dirty()
3771 gfn = vmcs12->virtual_apic_page_addr >> PAGE_SHIFT; in nested_mark_vmcs12_pages_dirty()
3775 if (nested_cpu_has_posted_intr(vmcs12)) { in nested_mark_vmcs12_pages_dirty()
3776 gfn = vmcs12->posted_intr_desc_addr >> PAGE_SHIFT; in nested_mark_vmcs12_pages_dirty()
3827 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_inject_exception_vmexit() local
3852 vmcs12->vm_exit_intr_error_code = (u16)ex->error_code; in nested_vmx_inject_exception_vmexit()
3861 if (!(vmcs12->idt_vectoring_info_field & VECTORING_INFO_VALID_MASK) && in nested_vmx_inject_exception_vmexit()
4216 struct vmcs12 *vmcs12) in sync_vmcs02_to_vmcs12_rare() argument
4220 vmcs12->guest_es_selector = vmcs_read16(GUEST_ES_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
4221 vmcs12->guest_cs_selector = vmcs_read16(GUEST_CS_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
4222 vmcs12->guest_ss_selector = vmcs_read16(GUEST_SS_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
4223 vmcs12->guest_ds_selector = vmcs_read16(GUEST_DS_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
4224 vmcs12->guest_fs_selector = vmcs_read16(GUEST_FS_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
4225 vmcs12->guest_gs_selector = vmcs_read16(GUEST_GS_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
4226 vmcs12->guest_ldtr_selector = vmcs_read16(GUEST_LDTR_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
4227 vmcs12->guest_tr_selector = vmcs_read16(GUEST_TR_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
4228 vmcs12->guest_es_limit = vmcs_read32(GUEST_ES_LIMIT); in sync_vmcs02_to_vmcs12_rare()
4229 vmcs12->guest_cs_limit = vmcs_read32(GUEST_CS_LIMIT); in sync_vmcs02_to_vmcs12_rare()
4230 vmcs12->guest_ss_limit = vmcs_read32(GUEST_SS_LIMIT); in sync_vmcs02_to_vmcs12_rare()
4231 vmcs12->guest_ds_limit = vmcs_read32(GUEST_DS_LIMIT); in sync_vmcs02_to_vmcs12_rare()
4232 vmcs12->guest_fs_limit = vmcs_read32(GUEST_FS_LIMIT); in sync_vmcs02_to_vmcs12_rare()
4233 vmcs12->guest_gs_limit = vmcs_read32(GUEST_GS_LIMIT); in sync_vmcs02_to_vmcs12_rare()
4234 vmcs12->guest_ldtr_limit = vmcs_read32(GUEST_LDTR_LIMIT); in sync_vmcs02_to_vmcs12_rare()
4235 vmcs12->guest_tr_limit = vmcs_read32(GUEST_TR_LIMIT); in sync_vmcs02_to_vmcs12_rare()
4236 vmcs12->guest_gdtr_limit = vmcs_read32(GUEST_GDTR_LIMIT); in sync_vmcs02_to_vmcs12_rare()
4237 vmcs12->guest_idtr_limit = vmcs_read32(GUEST_IDTR_LIMIT); in sync_vmcs02_to_vmcs12_rare()
4238 vmcs12->guest_es_ar_bytes = vmcs_read32(GUEST_ES_AR_BYTES); in sync_vmcs02_to_vmcs12_rare()
4239 vmcs12->guest_ds_ar_bytes = vmcs_read32(GUEST_DS_AR_BYTES); in sync_vmcs02_to_vmcs12_rare()
4240 vmcs12->guest_fs_ar_bytes = vmcs_read32(GUEST_FS_AR_BYTES); in sync_vmcs02_to_vmcs12_rare()
4241 vmcs12->guest_gs_ar_bytes = vmcs_read32(GUEST_GS_AR_BYTES); in sync_vmcs02_to_vmcs12_rare()
4242 vmcs12->guest_ldtr_ar_bytes = vmcs_read32(GUEST_LDTR_AR_BYTES); in sync_vmcs02_to_vmcs12_rare()
4243 vmcs12->guest_tr_ar_bytes = vmcs_read32(GUEST_TR_AR_BYTES); in sync_vmcs02_to_vmcs12_rare()
4244 vmcs12->guest_es_base = vmcs_readl(GUEST_ES_BASE); in sync_vmcs02_to_vmcs12_rare()
4245 vmcs12->guest_cs_base = vmcs_readl(GUEST_CS_BASE); in sync_vmcs02_to_vmcs12_rare()
4246 vmcs12->guest_ss_base = vmcs_readl(GUEST_SS_BASE); in sync_vmcs02_to_vmcs12_rare()
4247 vmcs12->guest_ds_base = vmcs_readl(GUEST_DS_BASE); in sync_vmcs02_to_vmcs12_rare()
4248 vmcs12->guest_fs_base = vmcs_readl(GUEST_FS_BASE); in sync_vmcs02_to_vmcs12_rare()
4249 vmcs12->guest_gs_base = vmcs_readl(GUEST_GS_BASE); in sync_vmcs02_to_vmcs12_rare()
4250 vmcs12->guest_ldtr_base = vmcs_readl(GUEST_LDTR_BASE); in sync_vmcs02_to_vmcs12_rare()
4251 vmcs12->guest_tr_base = vmcs_readl(GUEST_TR_BASE); in sync_vmcs02_to_vmcs12_rare()
4252 vmcs12->guest_gdtr_base = vmcs_readl(GUEST_GDTR_BASE); in sync_vmcs02_to_vmcs12_rare()
4253 vmcs12->guest_idtr_base = vmcs_readl(GUEST_IDTR_BASE); in sync_vmcs02_to_vmcs12_rare()
4254 vmcs12->guest_pending_dbg_exceptions = in sync_vmcs02_to_vmcs12_rare()
4261 struct vmcs12 *vmcs12) in copy_vmcs02_to_vmcs12_rare() argument
4276 sync_vmcs02_to_vmcs12_rare(vcpu, vmcs12); in copy_vmcs02_to_vmcs12_rare()
4289 static void sync_vmcs02_to_vmcs12(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12) in sync_vmcs02_to_vmcs12() argument
4294 sync_vmcs02_to_vmcs12_rare(vcpu, vmcs12); in sync_vmcs02_to_vmcs12()
4299 vmcs12->guest_cr0 = vmcs12_guest_cr0(vcpu, vmcs12); in sync_vmcs02_to_vmcs12()
4300 vmcs12->guest_cr4 = vmcs12_guest_cr4(vcpu, vmcs12); in sync_vmcs02_to_vmcs12()
4302 vmcs12->guest_rsp = kvm_rsp_read(vcpu); in sync_vmcs02_to_vmcs12()
4303 vmcs12->guest_rip = kvm_rip_read(vcpu); in sync_vmcs02_to_vmcs12()
4304 vmcs12->guest_rflags = vmcs_readl(GUEST_RFLAGS); in sync_vmcs02_to_vmcs12()
4306 vmcs12->guest_cs_ar_bytes = vmcs_read32(GUEST_CS_AR_BYTES); in sync_vmcs02_to_vmcs12()
4307 vmcs12->guest_ss_ar_bytes = vmcs_read32(GUEST_SS_AR_BYTES); in sync_vmcs02_to_vmcs12()
4309 vmcs12->guest_interruptibility_info = in sync_vmcs02_to_vmcs12()
4313 vmcs12->guest_activity_state = GUEST_ACTIVITY_HLT; in sync_vmcs02_to_vmcs12()
4315 vmcs12->guest_activity_state = GUEST_ACTIVITY_WAIT_SIPI; in sync_vmcs02_to_vmcs12()
4317 vmcs12->guest_activity_state = GUEST_ACTIVITY_ACTIVE; in sync_vmcs02_to_vmcs12()
4319 if (nested_cpu_has_preemption_timer(vmcs12) && in sync_vmcs02_to_vmcs12()
4320 vmcs12->vm_exit_controls & VM_EXIT_SAVE_VMX_PREEMPTION_TIMER && in sync_vmcs02_to_vmcs12()
4322 vmcs12->vmx_preemption_timer_value = in sync_vmcs02_to_vmcs12()
4334 vmcs12->guest_cr3 = vmcs_readl(GUEST_CR3); in sync_vmcs02_to_vmcs12()
4335 if (nested_cpu_has_ept(vmcs12) && is_pae_paging(vcpu)) { in sync_vmcs02_to_vmcs12()
4336 vmcs12->guest_pdptr0 = vmcs_read64(GUEST_PDPTR0); in sync_vmcs02_to_vmcs12()
4337 vmcs12->guest_pdptr1 = vmcs_read64(GUEST_PDPTR1); in sync_vmcs02_to_vmcs12()
4338 vmcs12->guest_pdptr2 = vmcs_read64(GUEST_PDPTR2); in sync_vmcs02_to_vmcs12()
4339 vmcs12->guest_pdptr3 = vmcs_read64(GUEST_PDPTR3); in sync_vmcs02_to_vmcs12()
4343 vmcs12->guest_linear_address = vmcs_readl(GUEST_LINEAR_ADDRESS); in sync_vmcs02_to_vmcs12()
4345 if (nested_cpu_has_vid(vmcs12)) in sync_vmcs02_to_vmcs12()
4346 vmcs12->guest_intr_status = vmcs_read16(GUEST_INTR_STATUS); in sync_vmcs02_to_vmcs12()
4348 vmcs12->vm_entry_controls = in sync_vmcs02_to_vmcs12()
4349 (vmcs12->vm_entry_controls & ~VM_ENTRY_IA32E_MODE) | in sync_vmcs02_to_vmcs12()
4352 if (vmcs12->vm_exit_controls & VM_EXIT_SAVE_DEBUG_CONTROLS) in sync_vmcs02_to_vmcs12()
4353 kvm_get_dr(vcpu, 7, (unsigned long *)&vmcs12->guest_dr7); in sync_vmcs02_to_vmcs12()
4355 if (vmcs12->vm_exit_controls & VM_EXIT_SAVE_IA32_EFER) in sync_vmcs02_to_vmcs12()
4356 vmcs12->guest_ia32_efer = vcpu->arch.efer; in sync_vmcs02_to_vmcs12()
4370 static void prepare_vmcs12(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12, in prepare_vmcs12() argument
4375 vmcs12->vm_exit_reason = vm_exit_reason; in prepare_vmcs12()
4377 vmcs12->vm_exit_reason |= VMX_EXIT_REASONS_SGX_ENCLAVE_MODE; in prepare_vmcs12()
4378 vmcs12->exit_qualification = exit_qualification; in prepare_vmcs12()
4385 if (!(vmcs12->vm_exit_reason & VMX_EXIT_REASONS_FAILED_VMENTRY)) { in prepare_vmcs12()
4386 vmcs12->launch_state = 1; in prepare_vmcs12()
4390 vmcs12->vm_entry_intr_info_field &= ~INTR_INFO_VALID_MASK; in prepare_vmcs12()
4396 vmcs12_save_pending_event(vcpu, vmcs12, in prepare_vmcs12()
4399 vmcs12->vm_exit_intr_info = exit_intr_info; in prepare_vmcs12()
4400 vmcs12->vm_exit_instruction_len = vmcs_read32(VM_EXIT_INSTRUCTION_LEN); in prepare_vmcs12()
4401 vmcs12->vmx_instruction_info = vmcs_read32(VMX_INSTRUCTION_INFO); in prepare_vmcs12()
4410 vmcs12->vm_exit_msr_store_addr, in prepare_vmcs12()
4411 vmcs12->vm_exit_msr_store_count)) in prepare_vmcs12()
4427 struct vmcs12 *vmcs12) in load_vmcs12_host_state() argument
4432 if (vmcs12->vm_exit_controls & VM_EXIT_LOAD_IA32_EFER) in load_vmcs12_host_state()
4433 vcpu->arch.efer = vmcs12->host_ia32_efer; in load_vmcs12_host_state()
4434 else if (vmcs12->vm_exit_controls & VM_EXIT_HOST_ADDR_SPACE_SIZE) in load_vmcs12_host_state()
4440 kvm_rsp_write(vcpu, vmcs12->host_rsp); in load_vmcs12_host_state()
4441 kvm_rip_write(vcpu, vmcs12->host_rip); in load_vmcs12_host_state()
4453 vmx_set_cr0(vcpu, vmcs12->host_cr0); in load_vmcs12_host_state()
4457 vmx_set_cr4(vcpu, vmcs12->host_cr4); in load_vmcs12_host_state()
4465 if (nested_vmx_load_cr3(vcpu, vmcs12->host_cr3, false, true, &ignored)) in load_vmcs12_host_state()
4468 nested_vmx_transition_tlb_flush(vcpu, vmcs12, false); in load_vmcs12_host_state()
4470 vmcs_write32(GUEST_SYSENTER_CS, vmcs12->host_ia32_sysenter_cs); in load_vmcs12_host_state()
4471 vmcs_writel(GUEST_SYSENTER_ESP, vmcs12->host_ia32_sysenter_esp); in load_vmcs12_host_state()
4472 vmcs_writel(GUEST_SYSENTER_EIP, vmcs12->host_ia32_sysenter_eip); in load_vmcs12_host_state()
4473 vmcs_writel(GUEST_IDTR_BASE, vmcs12->host_idtr_base); in load_vmcs12_host_state()
4474 vmcs_writel(GUEST_GDTR_BASE, vmcs12->host_gdtr_base); in load_vmcs12_host_state()
4479 if (vmcs12->vm_exit_controls & VM_EXIT_CLEAR_BNDCFGS) in load_vmcs12_host_state()
4482 if (vmcs12->vm_exit_controls & VM_EXIT_LOAD_IA32_PAT) { in load_vmcs12_host_state()
4483 vmcs_write64(GUEST_IA32_PAT, vmcs12->host_ia32_pat); in load_vmcs12_host_state()
4484 vcpu->arch.pat = vmcs12->host_ia32_pat; in load_vmcs12_host_state()
4486 if ((vmcs12->vm_exit_controls & VM_EXIT_LOAD_IA32_PERF_GLOBAL_CTRL) && in load_vmcs12_host_state()
4489 vmcs12->host_ia32_perf_global_ctrl)); in load_vmcs12_host_state()
4496 .selector = vmcs12->host_cs_selector, in load_vmcs12_host_state()
4502 if (vmcs12->vm_exit_controls & VM_EXIT_HOST_ADDR_SPACE_SIZE) in load_vmcs12_host_state()
4516 seg.selector = vmcs12->host_ds_selector; in load_vmcs12_host_state()
4518 seg.selector = vmcs12->host_es_selector; in load_vmcs12_host_state()
4520 seg.selector = vmcs12->host_ss_selector; in load_vmcs12_host_state()
4522 seg.selector = vmcs12->host_fs_selector; in load_vmcs12_host_state()
4523 seg.base = vmcs12->host_fs_base; in load_vmcs12_host_state()
4525 seg.selector = vmcs12->host_gs_selector; in load_vmcs12_host_state()
4526 seg.base = vmcs12->host_gs_base; in load_vmcs12_host_state()
4529 .base = vmcs12->host_tr_base, in load_vmcs12_host_state()
4531 .selector = vmcs12->host_tr_selector, in load_vmcs12_host_state()
4544 if (nested_vmx_load_msr(vcpu, vmcs12->vm_exit_msr_load_addr, in load_vmcs12_host_state()
4545 vmcs12->vm_exit_msr_load_count)) in load_vmcs12_host_state()
4576 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_restore_host_state() local
4584 if (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_DEBUG_CONTROLS) { in nested_vmx_restore_host_state()
4635 for (i = 0; i < vmcs12->vm_entry_msr_load_count; i++) { in nested_vmx_restore_host_state()
4636 gpa = vmcs12->vm_entry_msr_load_addr + (i * sizeof(g)); in nested_vmx_restore_host_state()
4644 for (j = 0; j < vmcs12->vm_exit_msr_load_count; j++) { in nested_vmx_restore_host_state()
4645 gpa = vmcs12->vm_exit_msr_load_addr + (j * sizeof(h)); in nested_vmx_restore_host_state()
4688 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_vmexit() local
4719 if (nested_cpu_has_preemption_timer(vmcs12)) in nested_vmx_vmexit()
4722 if (nested_cpu_has(vmcs12, CPU_BASED_USE_TSC_OFFSETTING)) { in nested_vmx_vmexit()
4724 if (nested_cpu_has2(vmcs12, SECONDARY_EXEC_TSC_SCALING)) in nested_vmx_vmexit()
4729 sync_vmcs02_to_vmcs12(vcpu, vmcs12); in nested_vmx_vmexit()
4732 prepare_vmcs12(vcpu, vmcs12, vm_exit_reason, in nested_vmx_vmexit()
4744 nested_flush_cached_shadow_vmcs12(vcpu, vmcs12); in nested_vmx_vmexit()
4818 vmcs12->vm_exit_intr_info = irq | in nested_vmx_vmexit()
4823 trace_kvm_nested_vmexit_inject(vmcs12->vm_exit_reason, in nested_vmx_vmexit()
4824 vmcs12->exit_qualification, in nested_vmx_vmexit()
4825 vmcs12->idt_vectoring_info_field, in nested_vmx_vmexit()
4826 vmcs12->vm_exit_intr_info, in nested_vmx_vmexit()
4827 vmcs12->vm_exit_intr_error_code, in nested_vmx_vmexit()
4830 load_vmcs12_host_state(vcpu, vmcs12); in nested_vmx_vmexit()
5262 vmptr + offsetof(struct vmcs12, in handle_vmclear()
5287 struct vmcs12 *vmcs12 = is_guest_mode(vcpu) ? get_shadow_vmcs12(vcpu) in handle_vmread() local
5320 copy_vmcs02_to_vmcs12_rare(vcpu, vmcs12); in handle_vmread()
5323 value = vmcs12_read_any(vmcs12, field, offset); in handle_vmread()
5393 struct vmcs12 *vmcs12 = is_guest_mode(vcpu) ? get_shadow_vmcs12(vcpu) in handle_vmwrite() local
5456 copy_vmcs02_to_vmcs12_rare(vcpu, vmcs12); in handle_vmwrite()
5469 vmcs12_write_any(vmcs12, field, offset, value); in handle_vmwrite()
5550 offsetof(struct vmcs12, hdr), in handle_vmptrld()
5780 struct vmcs12 *vmcs12) in nested_vmx_eptp_switching() argument
5785 if (WARN_ON_ONCE(!nested_cpu_has_ept(vmcs12))) in nested_vmx_eptp_switching()
5790 if (kvm_vcpu_read_guest_page(vcpu, vmcs12->eptp_list_address >> PAGE_SHIFT, in nested_vmx_eptp_switching()
5798 if (vmcs12->ept_pointer != new_eptp) { in nested_vmx_eptp_switching()
5802 vmcs12->ept_pointer = new_eptp; in nested_vmx_eptp_switching()
5805 if (!nested_cpu_has_vpid(vmcs12)) in nested_vmx_eptp_switching()
5815 struct vmcs12 *vmcs12; in handle_vmfunc() local
5828 vmcs12 = get_vmcs12(vcpu); in handle_vmfunc()
5834 if (WARN_ON_ONCE((function > 63) || !nested_cpu_has_vmfunc(vmcs12))) { in handle_vmfunc()
5839 if (!(vmcs12->vm_function_control & BIT_ULL(function))) in handle_vmfunc()
5844 if (nested_vmx_eptp_switching(vcpu, vmcs12)) in handle_vmfunc()
5871 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_check_io_bitmaps() local
5880 bitmap = vmcs12->io_bitmap_a; in nested_vmx_check_io_bitmaps()
5882 bitmap = vmcs12->io_bitmap_b; in nested_vmx_check_io_bitmaps()
5902 struct vmcs12 *vmcs12) in nested_vmx_exit_handled_io() argument
5908 if (!nested_cpu_has(vmcs12, CPU_BASED_USE_IO_BITMAPS)) in nested_vmx_exit_handled_io()
5909 return nested_cpu_has(vmcs12, CPU_BASED_UNCOND_IO_EXITING); in nested_vmx_exit_handled_io()
5926 struct vmcs12 *vmcs12, in nested_vmx_exit_handled_msr() argument
5932 if (!nested_cpu_has(vmcs12, CPU_BASED_USE_MSR_BITMAPS)) in nested_vmx_exit_handled_msr()
5940 bitmap = vmcs12->msr_bitmap; in nested_vmx_exit_handled_msr()
5964 struct vmcs12 *vmcs12) in nested_vmx_exit_handled_cr() argument
5977 if (vmcs12->cr0_guest_host_mask & in nested_vmx_exit_handled_cr()
5978 (val ^ vmcs12->cr0_read_shadow)) in nested_vmx_exit_handled_cr()
5982 if (nested_cpu_has(vmcs12, CPU_BASED_CR3_LOAD_EXITING)) in nested_vmx_exit_handled_cr()
5986 if (vmcs12->cr4_guest_host_mask & in nested_vmx_exit_handled_cr()
5987 (vmcs12->cr4_read_shadow ^ val)) in nested_vmx_exit_handled_cr()
5991 if (nested_cpu_has(vmcs12, CPU_BASED_CR8_LOAD_EXITING)) in nested_vmx_exit_handled_cr()
5997 if ((vmcs12->cr0_guest_host_mask & X86_CR0_TS) && in nested_vmx_exit_handled_cr()
5998 (vmcs12->cr0_read_shadow & X86_CR0_TS)) in nested_vmx_exit_handled_cr()
6004 if (vmcs12->cpu_based_vm_exec_control & in nested_vmx_exit_handled_cr()
6009 if (vmcs12->cpu_based_vm_exec_control & in nested_vmx_exit_handled_cr()
6021 if (vmcs12->cr0_guest_host_mask & 0xe & in nested_vmx_exit_handled_cr()
6022 (val ^ vmcs12->cr0_read_shadow)) in nested_vmx_exit_handled_cr()
6024 if ((vmcs12->cr0_guest_host_mask & 0x1) && in nested_vmx_exit_handled_cr()
6025 !(vmcs12->cr0_read_shadow & 0x1) && in nested_vmx_exit_handled_cr()
6034 struct vmcs12 *vmcs12) in nested_vmx_exit_handled_encls() argument
6039 !nested_cpu_has2(vmcs12, SECONDARY_EXEC_ENCLS_EXITING)) in nested_vmx_exit_handled_encls()
6045 return vmcs12->encls_exiting_bitmap & BIT_ULL(encls_leaf); in nested_vmx_exit_handled_encls()
6049 struct vmcs12 *vmcs12, gpa_t bitmap) in nested_vmx_exit_handled_vmcs_access() argument
6055 if (!nested_cpu_has_shadow_vmcs(vmcs12)) in nested_vmx_exit_handled_vmcs_access()
6072 static bool nested_vmx_exit_handled_mtf(struct vmcs12 *vmcs12) in nested_vmx_exit_handled_mtf() argument
6074 u32 entry_intr_info = vmcs12->vm_entry_intr_info_field; in nested_vmx_exit_handled_mtf()
6076 if (nested_cpu_has_mtf(vmcs12)) in nested_vmx_exit_handled_mtf()
6167 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_l1_wants_exit() local
6177 return vmcs12->exception_bitmap & in nested_vmx_l1_wants_exit()
6184 return nested_cpu_has(vmcs12, CPU_BASED_INTR_WINDOW_EXITING); in nested_vmx_l1_wants_exit()
6186 return nested_cpu_has(vmcs12, CPU_BASED_NMI_WINDOW_EXITING); in nested_vmx_l1_wants_exit()
6192 return nested_cpu_has(vmcs12, CPU_BASED_HLT_EXITING); in nested_vmx_l1_wants_exit()
6196 return nested_cpu_has(vmcs12, CPU_BASED_INVLPG_EXITING); in nested_vmx_l1_wants_exit()
6198 return nested_cpu_has(vmcs12, CPU_BASED_RDPMC_EXITING); in nested_vmx_l1_wants_exit()
6200 return nested_cpu_has2(vmcs12, SECONDARY_EXEC_RDRAND_EXITING); in nested_vmx_l1_wants_exit()
6202 return nested_cpu_has2(vmcs12, SECONDARY_EXEC_RDSEED_EXITING); in nested_vmx_l1_wants_exit()
6204 return nested_cpu_has(vmcs12, CPU_BASED_RDTSC_EXITING); in nested_vmx_l1_wants_exit()
6206 return nested_vmx_exit_handled_vmcs_access(vcpu, vmcs12, in nested_vmx_l1_wants_exit()
6207 vmcs12->vmread_bitmap); in nested_vmx_l1_wants_exit()
6209 return nested_vmx_exit_handled_vmcs_access(vcpu, vmcs12, in nested_vmx_l1_wants_exit()
6210 vmcs12->vmwrite_bitmap); in nested_vmx_l1_wants_exit()
6222 return nested_vmx_exit_handled_cr(vcpu, vmcs12); in nested_vmx_l1_wants_exit()
6224 return nested_cpu_has(vmcs12, CPU_BASED_MOV_DR_EXITING); in nested_vmx_l1_wants_exit()
6226 return nested_vmx_exit_handled_io(vcpu, vmcs12); in nested_vmx_l1_wants_exit()
6228 return nested_cpu_has2(vmcs12, SECONDARY_EXEC_DESC); in nested_vmx_l1_wants_exit()
6231 return nested_vmx_exit_handled_msr(vcpu, vmcs12, exit_reason); in nested_vmx_l1_wants_exit()
6235 return nested_cpu_has(vmcs12, CPU_BASED_MWAIT_EXITING); in nested_vmx_l1_wants_exit()
6237 return nested_vmx_exit_handled_mtf(vmcs12); in nested_vmx_l1_wants_exit()
6239 return nested_cpu_has(vmcs12, CPU_BASED_MONITOR_EXITING); in nested_vmx_l1_wants_exit()
6241 return nested_cpu_has(vmcs12, CPU_BASED_PAUSE_EXITING) || in nested_vmx_l1_wants_exit()
6242 nested_cpu_has2(vmcs12, in nested_vmx_l1_wants_exit()
6247 return nested_cpu_has(vmcs12, CPU_BASED_TPR_SHADOW); in nested_vmx_l1_wants_exit()
6259 nested_cpu_has2(vmcs12, SECONDARY_EXEC_ENABLE_INVPCID) && in nested_vmx_l1_wants_exit()
6260 nested_cpu_has(vmcs12, CPU_BASED_INVLPG_EXITING); in nested_vmx_l1_wants_exit()
6262 return nested_cpu_has2(vmcs12, SECONDARY_EXEC_WBINVD_EXITING); in nested_vmx_l1_wants_exit()
6272 return nested_cpu_has2(vmcs12, SECONDARY_EXEC_XSAVES); in nested_vmx_l1_wants_exit()
6275 return nested_cpu_has2(vmcs12, in nested_vmx_l1_wants_exit()
6278 return nested_vmx_exit_handled_encls(vcpu, vmcs12); in nested_vmx_l1_wants_exit()
6331 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_reflect_vmexit() local
6333 vmcs12->vm_exit_intr_error_code = in nested_vmx_reflect_vmexit()
6348 struct vmcs12 *vmcs12; in vmx_get_nested_state() local
6365 vmcs12 = get_vmcs12(vcpu); in vmx_get_nested_state()
6373 kvm_state.size += sizeof(user_vmx_nested_state->vmcs12); in vmx_get_nested_state()
6380 nested_cpu_has_shadow_vmcs(vmcs12) && in vmx_get_nested_state()
6381 vmcs12->vmcs_link_pointer != INVALID_GPA) in vmx_get_nested_state()
6400 if (nested_cpu_has_preemption_timer(vmcs12) && in vmx_get_nested_state()
6427 sync_vmcs02_to_vmcs12(vcpu, vmcs12); in vmx_get_nested_state()
6428 sync_vmcs02_to_vmcs12_rare(vcpu, vmcs12); in vmx_get_nested_state()
6446 BUILD_BUG_ON(sizeof(user_vmx_nested_state->vmcs12) < VMCS12_SIZE); in vmx_get_nested_state()
6453 if (copy_to_user(user_vmx_nested_state->vmcs12, vmcs12, VMCS12_SIZE)) in vmx_get_nested_state()
6456 if (nested_cpu_has_shadow_vmcs(vmcs12) && in vmx_get_nested_state()
6457 vmcs12->vmcs_link_pointer != INVALID_GPA) { in vmx_get_nested_state()
6480 struct vmcs12 *vmcs12; in vmx_set_nested_state() local
6556 if (kvm_state->size < sizeof(*kvm_state) + sizeof(*vmcs12)) { in vmx_set_nested_state()
6593 vmcs12 = get_vmcs12(vcpu); in vmx_set_nested_state()
6594 if (copy_from_user(vmcs12, user_vmx_nested_state->vmcs12, sizeof(*vmcs12))) in vmx_set_nested_state()
6597 if (vmcs12->hdr.revision_id != VMCS12_REVISION) in vmx_set_nested_state()
6610 if (nested_cpu_has_shadow_vmcs(vmcs12) && in vmx_set_nested_state()
6611 vmcs12->vmcs_link_pointer != INVALID_GPA) { in vmx_set_nested_state()
6612 struct vmcs12 *shadow_vmcs12 = get_shadow_vmcs12(vcpu); in vmx_set_nested_state()
6616 sizeof(user_vmx_nested_state->vmcs12) + sizeof(*shadow_vmcs12)) in vmx_set_nested_state()
6638 if (nested_vmx_check_controls(vcpu, vmcs12) || in vmx_set_nested_state()
6639 nested_vmx_check_host_state(vcpu, vmcs12) || in vmx_set_nested_state()
6640 nested_vmx_check_guest_state(vcpu, vmcs12, &ignored)) in vmx_set_nested_state()