Home
last modified time | relevance | path

Searched refs:X86_CR0_PG (Results 1 – 22 of 22) sorted by relevance

/linux-6.1.9/arch/x86/platform/pvh/
Dhead.S88 mov $(X86_CR0_PG | X86_CR0_PE), %eax
119 or $(X86_CR0_PG | X86_CR0_PE), %eax
131 and $~X86_CR0_PG, %eax
/linux-6.1.9/arch/x86/include/uapi/asm/
Dprocessor-flags.h72 #define X86_CR0_PG _BITUL(X86_CR0_PG_BIT) macro
166 X86_CR0_PG)
/linux-6.1.9/arch/x86/realmode/rm/
Dtrampoline_64.S73 movl $(CR0_STATE & ~X86_CR0_PG), %eax
183 movl $(CR0_STATE & ~X86_CR0_PG), %eax
Dreboot.S32 andl $~X86_CR0_PG, %eax
/linux-6.1.9/arch/x86/kernel/
Drelocate_kernel_32.S117 andl $~(X86_CR0_PG | X86_CR0_AM | X86_CR0_WP | X86_CR0_TS | X86_CR0_EM), %eax
194 orl $X86_CR0_PG, %eax
Drelocate_kernel_64.S140 orl $(X86_CR0_PG | X86_CR0_PE), %eax
Dhead_32.S180 movl $(CR0_STATE & ~X86_CR0_PG),%eax
/linux-6.1.9/arch/x86/kvm/
Dkvm_cache_regs.h12 #define X86_CR0_PDPTR_BITS (X86_CR0_CD | X86_CR0_NW | X86_CR0_PG)
Dmmu.h42 #define KVM_MMU_CR0_ROLE_BITS (X86_CR0_PG | X86_CR0_WP)
Dx86.h185 return likely(kvm_read_cr0_bits(vcpu, X86_CR0_PG)); in is_paging()
Dx86.c909 if ((cr0 ^ old_cr0) & X86_CR0_PG) { in kvm_post_set_cr0()
917 if (!(cr0 & X86_CR0_PG)) in kvm_post_set_cr0()
947 if ((cr0 & X86_CR0_PG) && !(cr0 & X86_CR0_PE)) in kvm_set_cr0()
952 (cr0 & X86_CR0_PG)) { in kvm_set_cr0()
962 if (!(vcpu->arch.efer & EFER_LME) && (cr0 & X86_CR0_PG) && in kvm_set_cr0()
967 if (!(cr0 & X86_CR0_PG) && in kvm_set_cr0()
10277 cr0 = vcpu->arch.cr0 & ~(X86_CR0_PE | X86_CR0_EM | X86_CR0_TS | X86_CR0_PG); in enter_smm()
11462 if ((sregs->efer & EFER_LME) && (sregs->cr0 & X86_CR0_PG)) { in kvm_is_valid_sregs()
11585 bool pae = (sregs2->cr0 & X86_CR0_PG) && (sregs2->cr4 & X86_CR4_PAE) && in __set_sregs2()
12123 if (old_cr0 & X86_CR0_PG) { in kvm_vcpu_reset()
Demulate.c2626 ctxt->ops->set_cr(ctxt, 0, cr0 & ~(X86_CR0_PG | X86_CR0_PE)); in em_rsm()
/linux-6.1.9/arch/x86/kvm/vmx/
Dnested.h264 fixed0 &= ~(X86_CR0_PE | X86_CR0_PG); in nested_guest_cr0_valid()
Dvmx.c146 (KVM_VM_CR0_ALWAYS_ON_UNRESTRICTED_GUEST | X86_CR0_PG | X86_CR0_PE)
3138 old_cr0_pg = kvm_read_cr0_bits(vcpu, X86_CR0_PG); in vmx_set_cr0()
3162 if (!old_cr0_pg && (cr0 & X86_CR0_PG)) in vmx_set_cr0()
3164 else if (old_cr0_pg && !(cr0 & X86_CR0_PG)) in vmx_set_cr0()
3194 if (!(cr0 & X86_CR0_PG)) { in vmx_set_cr0()
3206 if ((old_cr0_pg ^ cr0) & X86_CR0_PG) in vmx_set_cr0()
3213 if (!(old_cr0_pg & X86_CR0_PG) && (cr0 & X86_CR0_PG)) in vmx_set_cr0()
Dnested.c3042 CC(((vmcs12->guest_cr0 & X86_CR0_PG) && in nested_vmx_check_guest_state()
6930 #define VMXON_CR0_ALWAYSON (X86_CR0_PE | X86_CR0_PG | X86_CR0_NE) in nested_vmx_setup_ctls_msrs()
/linux-6.1.9/arch/x86/kvm/svm/
Dnested.c92 kvm_init_shadow_npt_mmu(vcpu, X86_CR0_PG, svm->vmcb01.ptr->save.cr4, in nested_svm_init_mmu_context()
299 if ((save->efer & EFER_LME) && (save->cr0 & X86_CR0_PG)) { in __nested_vmcb_check_save()
1638 if (!(save->cr0 & X86_CR0_PG) || in svm_set_nested_state()
Dsvm.c1774 if (!is_paging(vcpu) && (cr0 & X86_CR0_PG)) { in svm_set_cr0()
1779 if (is_paging(vcpu) && !(cr0 & X86_CR0_PG)) { in svm_set_cr0()
1788 hcr0 |= X86_CR0_PG | X86_CR0_WP; in svm_set_cr0()
/linux-6.1.9/arch/x86/boot/compressed/
Dhead_64.S958 movl $(X86_CR0_PG | X86_CR0_PE), %ecx /* Enable Paging and Protected mode */
/linux-6.1.9/tools/testing/selftests/kvm/include/x86_64/
Dprocessor.h889 #define X86_CR0_PG (1UL<<31) /* Paging */ macro
/linux-6.1.9/tools/testing/selftests/kvm/lib/x86_64/
Dprocessor.c589 sregs.cr0 = X86_CR0_PE | X86_CR0_NE | X86_CR0_PG; in vcpu_setup()
/linux-6.1.9/arch/x86/include/asm/
Dkvm_host.h115 | X86_CR0_NW | X86_CR0_CD | X86_CR0_PG))
/linux-6.1.9/arch/x86/kvm/mmu/
Dmmu.c183 BUILD_MMU_ROLE_REGS_ACCESSOR(cr0, pg, X86_CR0_PG);