Searched refs:X86_CR0_PE (Results 1 – 13 of 13) sorted by relevance
55 orb $X86_CR0_PE, %al66 andb $~X86_CR0_PE, %al
30 #define X86_CR0_PE 0x00000001 /* Protection Enable */ macro
44 (~(unsigned long)(X86_CR0_PE | X86_CR0_MP | X86_CR0_EM | X86_CR0_TS \
40 orb $X86_CR0_PE, %dl # Protected mode
38 return kvm_read_cr0_bits(vcpu, X86_CR0_PE); in is_protmode()
87 (KVM_GUEST_CR0_MASK_UNRESTRICTED_GUEST | X86_CR0_PG | X86_CR0_PE)91 (KVM_VM_CR0_ALWAYS_ON_UNRESTRICTED_GUEST | X86_CR0_PG | X86_CR0_PE)2043 #define VMXON_CR0_ALWAYSON (X86_CR0_PE | X86_CR0_PG | X86_CR0_NE) in vmx_get_vmx_msr()2968 if (vmx->rmode.vm86_active && (cr0 & X86_CR0_PE)) in vmx_set_cr0()2971 if (!vmx->rmode.vm86_active && !(cr0 & X86_CR0_PE)) in vmx_set_cr0()5029 !kvm_read_cr0_bits(vcpu, X86_CR0_PE) || in handle_vmon()
2375 svm_set_cr0(&svm->vcpu, hsave->save.cr0 | X86_CR0_PE); in nested_svm_vmexit()4160 if (cr0 & X86_CR0_PE)4161 val |= X86_CR0_PE;
3139 if (((new_val & X86_CR0_PG) && !(new_val & X86_CR0_PE)) || in check_cr_write()
511 if ((cr0 & X86_CR0_PG) && !(cr0 & X86_CR0_PE)) in kvm_set_cr0()
119 orl $(X86_CR0_PG | X86_CR0_PE), %eax
116 orl $(X86_CR0_PE), %eax
192 #define CR0_STATE (X86_CR0_PE | X86_CR0_MP | X86_CR0_ET | \
172 movl $(X86_CR0_PG | X86_CR0_PE), %eax /* Enable Paging and Protected mode */