Searched refs:X86_CR0_PG (Results 1 – 12 of 12) sorted by relevance
40 #define X86_CR0_PG 0x80000000 /* Paging */ macro
46 | X86_CR0_NW | X86_CR0_CD | X86_CR0_PG))
67 return kvm_read_cr0_bits(vcpu, X86_CR0_PG); in is_paging()
87 (KVM_GUEST_CR0_MASK_UNRESTRICTED_GUEST | X86_CR0_PG | X86_CR0_PE)91 (KVM_VM_CR0_ALWAYS_ON_UNRESTRICTED_GUEST | X86_CR0_PG | X86_CR0_PE)2043 #define VMXON_CR0_ALWAYSON (X86_CR0_PE | X86_CR0_PG | X86_CR0_NE) in vmx_get_vmx_msr()2935 if (!(cr0 & X86_CR0_PG)) { in ept_update_paging_mode_cr0()2976 if (!is_paging(vcpu) && (cr0 & X86_CR0_PG)) in vmx_set_cr0()2978 if (is_paging(vcpu) && !(cr0 & X86_CR0_PG)) in vmx_set_cr0()
496 unsigned long update_bits = X86_CR0_PG | X86_CR0_WP | in kvm_set_cr0()511 if ((cr0 & X86_CR0_PG) && !(cr0 & X86_CR0_PE)) in kvm_set_cr0()514 if (!is_paging(vcpu) && (cr0 & X86_CR0_PG)) { in kvm_set_cr0()533 if ((cr0 ^ old_cr0) & X86_CR0_PG) { in kvm_set_cr0()
1563 if (!is_paging(vcpu) && (cr0 & X86_CR0_PG)) { in svm_set_cr0()1568 if (is_paging(vcpu) && !(cr0 & X86_CR0_PG)) { in svm_set_cr0()1577 cr0 |= X86_CR0_PG | X86_CR0_WP; in svm_set_cr0()
3139 if (((new_val & X86_CR0_PG) && !(new_val & X86_CR0_PE)) || in check_cr_write()3146 if ((new_val & X86_CR0_PG) && (efer & EFER_LME) && in check_cr_write()3160 else if (ctxt->ops->get_cr(ctxt, 0) & X86_CR0_PG) in check_cr_write()
119 orl $(X86_CR0_PG | X86_CR0_PE), %eax
115 andl $~(X86_CR0_PG | X86_CR0_AM | X86_CR0_WP | X86_CR0_TS | X86_CR0_EM), %eax
194 X86_CR0_PG)
351 orl $X86_CR0_PG,%eax
172 movl $(X86_CR0_PG | X86_CR0_PE), %eax /* Enable Paging and Protected mode */