Home
last modified time | relevance | path

Searched refs:walk_mmu (Results 1 – 7 of 7) sorted by relevance

/linux-2.6.39/arch/x86/kvm/
Dx86.h55 return vcpu->arch.walk_mmu == &vcpu->arch.nested_mmu; in mmu_is_nested()
Dkvm_cache_regs.h45 return vcpu->arch.walk_mmu->pdptrs[index]; in kvm_pdptr_read()
Dx86.c420 return kvm_read_guest_page_mmu(vcpu, vcpu->arch.walk_mmu, gfn, in kvm_read_nested_guest_page()
464 u64 pdpte[ARRAY_SIZE(vcpu->arch.walk_mmu->pdptrs)]; in pdptrs_changed()
483 changed = memcmp(pdpte, vcpu->arch.walk_mmu->pdptrs, sizeof(pdpte)) != 0; in pdptrs_changed()
522 if (is_pae(vcpu) && !load_pdptrs(vcpu, vcpu->arch.walk_mmu, in kvm_set_cr0()
617 && !load_pdptrs(vcpu, vcpu->arch.walk_mmu, in kvm_set_cr4()
652 !load_pdptrs(vcpu, vcpu->arch.walk_mmu, cr3)) in kvm_set_cr3()
3649 return vcpu->arch.walk_mmu->gva_to_gpa(vcpu, gva, access, exception); in kvm_mmu_gva_to_gpa_read()
3657 return vcpu->arch.walk_mmu->gva_to_gpa(vcpu, gva, access, exception); in kvm_mmu_gva_to_gpa_fetch()
3665 return vcpu->arch.walk_mmu->gva_to_gpa(vcpu, gva, access, exception); in kvm_mmu_gva_to_gpa_write()
3672 return vcpu->arch.walk_mmu->gva_to_gpa(vcpu, gva, 0, exception); in kvm_mmu_gva_to_gpa_system()
[all …]
Dmmu.c2981 struct kvm_mmu *context = vcpu->arch.walk_mmu; in init_kvm_tdp_mmu()
3047 int r = kvm_init_shadow_mmu(vcpu, vcpu->arch.walk_mmu); in init_kvm_softmmu()
3049 vcpu->arch.walk_mmu->set_cr3 = kvm_x86_ops->set_cr3; in init_kvm_softmmu()
3050 vcpu->arch.walk_mmu->get_cr3 = get_cr3; in init_kvm_softmmu()
3051 vcpu->arch.walk_mmu->inject_page_fault = kvm_inject_page_fault; in init_kvm_softmmu()
Dpaging_tmpl.h281 walker->fault.nested_page_fault = mmu != vcpu->arch.walk_mmu; in FNAME()
Dsvm.c1181 load_pdptrs(vcpu, vcpu->arch.walk_mmu, kvm_read_cr3(vcpu)); in svm_cache_reg()
1801 vcpu->arch.walk_mmu = &vcpu->arch.nested_mmu; in nested_svm_init_mmu_context()
1808 vcpu->arch.walk_mmu = &vcpu->arch.mmu; in nested_svm_uninit_mmu_context()
/linux-2.6.39/arch/x86/include/asm/
Dkvm_host.h324 struct kvm_mmu *walk_mmu; member