Lines Matching refs:vmcb01

114 	kvm_init_shadow_npt_mmu(vcpu, X86_CR0_PG, svm->vmcb01.ptr->save.cr4,  in nested_svm_init_mmu_context()
115 svm->vmcb01.ptr->save.efer, in nested_svm_init_mmu_context()
155 h = &svm->vmcb01.ptr->control; in recalc_intercepts()
540 svm->nested.vmcb02.ptr->save.g_pat = svm->vmcb01.ptr->save.g_pat; in nested_vmcb02_compute_g_pat()
546 struct vmcb *vmcb01 = svm->vmcb01.ptr; in nested_vmcb02_prepare_save() local
607 } else if (unlikely(vmcb01->control.virt_ext & LBR_CTL_ENABLE_MASK)) { in nested_vmcb02_prepare_save()
608 svm_copy_lbrs(vmcb02, vmcb01); in nested_vmcb02_prepare_save()
618 struct vmcb *vmcb01 = svm->vmcb01.ptr; in nested_vmcb02_prepare_control() local
634 vmcb02->control.nested_ctl = vmcb01->control.nested_ctl; in nested_vmcb02_prepare_control()
635 vmcb02->control.iopm_base_pa = vmcb01->control.iopm_base_pa; in nested_vmcb02_prepare_control()
636 vmcb02->control.msrpm_base_pa = vmcb01->control.msrpm_base_pa; in nested_vmcb02_prepare_control()
661 (vmcb01->control.int_ctl & int_ctl_vmcb01_bits); in nested_vmcb02_prepare_control()
668 vmcb02->control.virt_ext = vmcb01->control.virt_ext & in nested_vmcb02_prepare_control()
686 vmcb02->control.pause_filter_count = vmcb01->control.pause_filter_count; in nested_vmcb02_prepare_control()
687 vmcb02->control.pause_filter_thresh = vmcb01->control.pause_filter_thresh; in nested_vmcb02_prepare_control()
746 nested_svm_copy_common_state(svm->vmcb01.ptr, svm->nested.vmcb02.ptr); in enter_svm_guest_mode()
775 struct vmcb *vmcb01 = svm->vmcb01.ptr; in nested_svm_vmrun() local
819 vmcb01->save.efer = vcpu->arch.efer; in nested_svm_vmrun()
820 vmcb01->save.cr0 = kvm_read_cr0(vcpu); in nested_svm_vmrun()
821 vmcb01->save.cr4 = vcpu->arch.cr4; in nested_svm_vmrun()
822 vmcb01->save.rflags = kvm_get_rflags(vcpu); in nested_svm_vmrun()
823 vmcb01->save.rip = kvm_rip_read(vcpu); in nested_svm_vmrun()
826 vmcb01->save.cr3 = kvm_read_cr3(vcpu); in nested_svm_vmrun()
892 struct vmcb *vmcb01 = svm->vmcb01.ptr; in nested_svm_vmexit() local
956 vmcb01->control.pause_filter_count = vmcb02->control.pause_filter_count; in nested_svm_vmexit()
957 vmcb_mark_dirty(vmcb01, VMCB_INTERCEPTS); in nested_svm_vmexit()
961 nested_svm_copy_common_state(svm->nested.vmcb02.ptr, svm->vmcb01.ptr); in nested_svm_vmexit()
963 svm_switch_vmcb(svm, &svm->vmcb01); in nested_svm_vmexit()
968 } else if (unlikely(vmcb01->control.virt_ext & LBR_CTL_ENABLE_MASK)) { in nested_svm_vmexit()
969 svm_copy_lbrs(vmcb01, vmcb02); in nested_svm_vmexit()
978 vmcb01->control.exit_int_info = 0; in nested_svm_vmexit()
981 if (vmcb01->control.tsc_offset != svm->vcpu.arch.tsc_offset) { in nested_svm_vmexit()
982 vmcb01->control.tsc_offset = svm->vcpu.arch.tsc_offset; in nested_svm_vmexit()
983 vmcb_mark_dirty(vmcb01, VMCB_INTERCEPTS); in nested_svm_vmexit()
997 kvm_set_rflags(vcpu, vmcb01->save.rflags); in nested_svm_vmexit()
998 svm_set_efer(vcpu, vmcb01->save.efer); in nested_svm_vmexit()
999 svm_set_cr0(vcpu, vmcb01->save.cr0 | X86_CR0_PE); in nested_svm_vmexit()
1000 svm_set_cr4(vcpu, vmcb01->save.cr4); in nested_svm_vmexit()
1001 kvm_rax_write(vcpu, vmcb01->save.rax); in nested_svm_vmexit()
1002 kvm_rsp_write(vcpu, vmcb01->save.rsp); in nested_svm_vmexit()
1003 kvm_rip_write(vcpu, vmcb01->save.rip); in nested_svm_vmexit()
1021 rc = nested_svm_load_cr3(vcpu, vmcb01->save.cr3, false, true); in nested_svm_vmexit()
1039 if (unlikely(vmcb01->save.rflags & X86_EFLAGS_TF)) in nested_svm_vmexit()
1119 svm_switch_vmcb(svm, &svm->vmcb01); in svm_leave_nested()
1371 if (svm->vmcb01.ptr->control.intercepts[INTERCEPT_EXCEPTION] & in nested_svm_exit_special()
1492 if (copy_to_user(&user_vmcb->save, &svm->vmcb01.ptr->save, in svm_get_nested_state()
1596 svm->nested.vmcb02.ptr->save = svm->vmcb01.ptr->save; in svm_set_nested_state()
1605 svm_copy_vmrun_state(&svm->vmcb01.ptr->save, save); in svm_set_nested_state()