Home
last modified time | relevance | path

Searched refs:kvm_mmu (Results 1 – 9 of 9) sorted by relevance

/linux-5.19.10/arch/x86/kvm/mmu/
Dtdp_mmu.h73 static inline bool is_tdp_mmu(struct kvm_mmu *mmu) in is_tdp_mmu()
93 static inline bool is_tdp_mmu(struct kvm_mmu *mmu) { return false; } in is_tdp_mmu()
Dmmu.c223 static inline bool __maybe_unused is_##reg##_##name(struct kvm_mmu *mmu) \
236 static inline bool is_cr0_pg(struct kvm_mmu *mmu) in is_cr0_pg()
241 static inline bool is_cr4_pae(struct kvm_mmu *mmu) in is_cr4_pae()
3281 void kvm_mmu_free_roots(struct kvm *kvm, struct kvm_mmu *mmu, in kvm_mmu_free_roots()
3333 void kvm_mmu_free_guest_mode_roots(struct kvm *kvm, struct kvm_mmu *mmu) in kvm_mmu_free_guest_mode_roots()
3385 struct kvm_mmu *mmu = vcpu->arch.mmu; in mmu_alloc_direct_roots()
3493 struct kvm_mmu *mmu = vcpu->arch.mmu; in mmu_alloc_shadow_roots()
3603 struct kvm_mmu *mmu = vcpu->arch.mmu; in mmu_alloc_special_roots()
3762 static gpa_t nonpaging_gva_to_gpa(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu, in nonpaging_gva_to_gpa()
4171 static void nonpaging_init_context(struct kvm_mmu *context) in nonpaging_init_context()
[all …]
Dpaging_tmpl.h105 static inline void FNAME(protect_clean_gpte)(struct kvm_mmu *mmu, unsigned *access, in FNAME()
141 static bool FNAME(is_rsvd_bits_set)(struct kvm_mmu *mmu, u64 gpte, int level) in FNAME()
194 struct kvm_mmu *mmu, in FNAME()
266 static inline bool FNAME(is_last_gpte)(struct kvm_mmu *mmu, in FNAME()
299 struct kvm_vcpu *vcpu, struct kvm_mmu *mmu, in FNAME()
956 static gpa_t FNAME(gva_to_gpa)(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu, in FNAME()
Dtdp_mmu.c1159 struct kvm_mmu *mmu = vcpu->arch.mmu; in kvm_tdp_mmu_map()
1878 struct kvm_mmu *mmu = vcpu->arch.mmu; in kvm_tdp_mmu_get_walk()
1907 struct kvm_mmu *mmu = vcpu->arch.mmu; in kvm_tdp_mmu_fast_pf_get_last_sptep()
/linux-5.19.10/arch/x86/include/asm/
Dkvm_host.h427 struct kvm_mmu { struct
433 gpa_t (*gva_to_gpa)(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu, argument
673 struct kvm_mmu *mmu;
676 struct kvm_mmu root_mmu;
679 struct kvm_mmu guest_mmu;
689 struct kvm_mmu nested_mmu;
695 struct kvm_mmu *walk_mmu;
1860 void kvm_mmu_free_roots(struct kvm *kvm, struct kvm_mmu *mmu,
1862 void kvm_mmu_free_guest_mode_roots(struct kvm *kvm, struct kvm_mmu *mmu);
1897 void kvm_mmu_invalidate_gva(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu,
/linux-5.19.10/arch/x86/kvm/
Dmmu.h174 static inline u8 permission_fault(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu, in permission_fault()
283 struct kvm_mmu *mmu, in kvm_translate_gpa()
Dx86.c762 struct kvm_mmu *fault_mmu; in kvm_inject_emulated_page_fault()
852 struct kvm_mmu *mmu = vcpu->arch.walk_mmu; in load_pdptrs()
1193 struct kvm_mmu *mmu = vcpu->arch.mmu; in kvm_invalidate_pcid()
6847 struct kvm_mmu *mmu = vcpu->arch.mmu; in translate_nested_gpa()
6862 struct kvm_mmu *mmu = vcpu->arch.walk_mmu; in kvm_mmu_gva_to_gpa_read()
6872 struct kvm_mmu *mmu = vcpu->arch.walk_mmu; in kvm_mmu_gva_to_gpa_fetch()
6882 struct kvm_mmu *mmu = vcpu->arch.walk_mmu; in kvm_mmu_gva_to_gpa_write()
6894 struct kvm_mmu *mmu = vcpu->arch.walk_mmu; in kvm_mmu_gva_to_gpa_system()
6903 struct kvm_mmu *mmu = vcpu->arch.walk_mmu; in kvm_read_guest_virt_helper()
6936 struct kvm_mmu *mmu = vcpu->arch.walk_mmu; in kvm_fetch_guest_virt()
[all …]
/linux-5.19.10/arch/x86/kvm/vmx/
Dvmx.c3020 struct kvm_mmu *mmu = vcpu->arch.mmu; in vmx_flush_tlb_current()
3057 struct kvm_mmu *mmu = vcpu->arch.walk_mmu; in vmx_ept_load_pdptrs()
3072 struct kvm_mmu *mmu = vcpu->arch.walk_mmu; in ept_save_pdptrs()
Dnested.c5463 struct kvm_mmu *mmu; in handle_invept()