/linux-6.1.9/virt/kvm/ |
D | pfncache.c | 82 struct kvm_memslots *slots = kvm_memslots(kvm); in kvm_gfn_to_pfn_cache_check() 244 struct kvm_memslots *slots = kvm_memslots(kvm); in kvm_gfn_to_pfn_cache_refresh()
|
D | kvm_main.c | 584 struct kvm_memslots *slots; in __kvm_handle_hva_range() 976 static void kvm_free_memslots(struct kvm *kvm, struct kvm_memslots *slots) in kvm_free_memslots() 1132 struct kvm_memslots *slots; in kvm_create_vm() 1394 static struct kvm_memslots *kvm_get_inactive_memslots(struct kvm *kvm, int as_id) in kvm_get_inactive_memslots() 1396 struct kvm_memslots *active = __kvm_memslots(kvm, as_id); in kvm_get_inactive_memslots() 1422 static void kvm_insert_gfn_node(struct kvm_memslots *slots, in kvm_insert_gfn_node() 1447 static void kvm_erase_gfn_node(struct kvm_memslots *slots, in kvm_erase_gfn_node() 1453 static void kvm_replace_gfn_node(struct kvm_memslots *slots, in kvm_replace_gfn_node() 1479 struct kvm_memslots *slots = kvm_get_inactive_memslots(kvm, as_id); in kvm_replace_memslot() 1543 struct kvm_memslots *slots = kvm_get_inactive_memslots(kvm, as_id); in kvm_swap_active_memslots() [all …]
|
/linux-6.1.9/include/linux/ |
D | kvm_host.h | 672 struct kvm_memslots { struct 709 struct kvm_memslots __memslots[KVM_ADDRESS_SPACE_NUM][2]; argument 711 struct kvm_memslots __rcu *memslots[KVM_ADDRESS_SPACE_NUM]; 950 static inline struct kvm_memslots *__kvm_memslots(struct kvm *kvm, int as_id) in __kvm_memslots() 958 static inline struct kvm_memslots *kvm_memslots(struct kvm *kvm) in kvm_memslots() function 963 static inline struct kvm_memslots *kvm_vcpu_memslots(struct kvm_vcpu *vcpu) in kvm_vcpu_memslots() 970 static inline bool kvm_memslots_empty(struct kvm_memslots *slots) in kvm_memslots_empty() 981 struct kvm_memory_slot *id_to_memslot(struct kvm_memslots *slots, int id) in id_to_memslot() 996 struct kvm_memslots *slots; 1011 struct kvm_memslots *slots, in kvm_memslot_iter_start() [all …]
|
D | kvm_types.h | 17 struct kvm_memslots;
|
/linux-6.1.9/arch/powerpc/kvm/ |
D | book3s_hv_uvmem.c | 462 struct kvm_memslots *slots; in kvmppc_h_svm_init_start() 483 slots = kvm_memslots(kvm); in kvmppc_h_svm_init_start() 491 slots = kvm_memslots(kvm); in kvmppc_h_svm_init_start() 671 kvm_for_each_memslot(memslot, bkt, kvm_memslots(kvm)) in kvmppc_h_svm_init_abort() 827 struct kvm_memslots *slots; in kvmppc_h_svm_init_done() 837 slots = kvm_memslots(kvm); in kvmppc_h_svm_init_done()
|
D | book3s_64_mmu_hv.c | 743 struct kvm_memslots *slots; in kvmppc_rmap_reset() 748 slots = kvm_memslots(kvm); in kvmppc_rmap_reset() 1264 __gfn_to_memslot(kvm_memslots(kvm), gfn); in resize_hpt_rehash_hpte()
|
D | book3s_64_vio.c | 360 memslot = __gfn_to_memslot(kvm_memslots(kvm), gfn); in kvmppc_tce_to_ua()
|
D | book3s_hv_nested.c | 787 kvm_for_each_memslot(memslot, bkt, kvm_memslots(kvm)) in kvmhv_release_all_nested()
|
D | book3s_hv.c | 4946 struct kvm_memslots *slots; in kvm_vm_ioctl_get_dirty_log_hv() 4959 slots = kvm_memslots(kvm); in kvm_vm_ioctl_get_dirty_log_hv() 6069 struct kvm_memslots *slots = __kvm_memslots(kvm, i); in kvmhv_svm_off()
|
/linux-6.1.9/arch/arm64/kvm/ |
D | mmu.c | 246 struct kvm_memslots *slots; in stage2_flush_vm() 253 slots = kvm_memslots(kvm); in stage2_flush_vm() 773 struct kvm_memslots *slots; in stage2_unmap_vm() 781 slots = kvm_memslots(kvm); in stage2_unmap_vm() 883 struct kvm_memslots *slots = kvm_memslots(kvm); in kvm_mmu_wp_memory_region()
|
/linux-6.1.9/arch/x86/kvm/ |
D | x86.h | 206 u64 gen = kvm_memslots(vcpu->kvm)->generation; in vcpu_cache_mmio_info() 223 return vcpu->arch.mmio_gen == kvm_memslots(vcpu->kvm)->generation; in vcpu_match_mmio_gen()
|
D | debugfs.c | 92 struct kvm_memslots *slots; in kvm_mmu_rmaps_stat_show()
|
D | x86.c | 3447 struct kvm_memslots *slots; in record_steal_time() 3463 slots = kvm_memslots(vcpu->kvm); in record_steal_time() 4747 struct kvm_memslots *slots; in kvm_steal_time_set_preempted() 4774 slots = kvm_memslots(vcpu->kvm); in kvm_steal_time_set_preempted() 12488 struct kvm_memslots *slots = kvm_memslots(kvm); in __x86_set_memory_region()
|
/linux-6.1.9/arch/riscv/kvm/ |
D | mmu.c | 335 struct kvm_memslots *slots = kvm_memslots(kvm); in gstage_wp_memory_region()
|
/linux-6.1.9/Documentation/virt/kvm/x86/ |
D | mmu.rst | 197 determines which of the kvm_memslots array was used to build this 451 kvm_memslots(kvm)->generation, and increased whenever guest memory info 462 Unfortunately, a single memory access might access kvm_memslots(kvm) multiple 468 returns; thus, bit 63 of kvm_memslots(kvm)->generation set to 1 only during a
|
/linux-6.1.9/arch/powerpc/include/asm/ |
D | kvm_book3s_64.h | 538 static inline struct kvm_memslots *kvm_memslots_raw(struct kvm *kvm) in kvm_memslots_raw()
|
/linux-6.1.9/arch/s390/kvm/ |
D | pv.c | 144 npages = kvm_s390_get_gfn_end(kvm_memslots(kvm)); in kvm_s390_pv_alloc_vm()
|
D | kvm-s390.h | 221 static inline unsigned long kvm_s390_get_gfn_end(struct kvm_memslots *slots) in kvm_s390_get_gfn_end()
|
D | kvm-s390.c | 1127 struct kvm_memslots *slots; in kvm_s390_vm_start_migration() 1134 slots = kvm_memslots(kvm); in kvm_s390_vm_start_migration() 2128 static struct kvm_memory_slot *gfn_to_memslot_approx(struct kvm_memslots *slots, in gfn_to_memslot_approx() 2134 static unsigned long kvm_s390_next_dirty_cmma(struct kvm_memslots *slots, in kvm_s390_next_dirty_cmma() 2162 struct kvm_memslots *slots = kvm_memslots(kvm); in kvm_s390_get_cmma()
|
/linux-6.1.9/arch/x86/kvm/mmu/ |
D | mmu.c | 785 struct kvm_memslots *slots; in account_shadowed() 818 struct kvm_memslots *slots; in unaccount_shadowed() 1035 struct kvm_memslots *slots; in rmap_remove() 3597 struct kvm_memslots *slots; in mmu_first_shadow_root_alloc() 6018 struct kvm_memslots *slots; in kvm_rmap_zap_gfn_range()
|