Searched refs:smsr (Results 1 – 3 of 3) sorted by relevance
110 ulong smsr = vcpu->arch.shared->msr; in kvmppc_recalc_shadow_msr() local113 smsr &= MSR_FE0 | MSR_FE1 | MSR_SF | MSR_SE | MSR_BE | MSR_DE; in kvmppc_recalc_shadow_msr()115 smsr |= MSR_ME | MSR_RI | MSR_IR | MSR_DR | MSR_PR | MSR_EE; in kvmppc_recalc_shadow_msr()117 smsr |= (vcpu->arch.shared->msr & vcpu->arch.guest_owned_ext); in kvmppc_recalc_shadow_msr()120 smsr |= MSR_ISF | MSR_HV; in kvmppc_recalc_shadow_msr()122 vcpu->arch.shadow_msr = smsr; in kvmppc_recalc_shadow_msr()
187 struct kvm_shared_msrs *smsr; in shared_msr_update() local190 smsr = &__get_cpu_var(shared_msrs); in shared_msr_update()198 smsr->values[slot].host = value; in shared_msr_update()199 smsr->values[slot].curr = value; in shared_msr_update()222 struct kvm_shared_msrs *smsr = &__get_cpu_var(shared_msrs); in kvm_set_shared_msr() local224 if (((value ^ smsr->values[slot].curr) & mask) == 0) in kvm_set_shared_msr()226 smsr->values[slot].curr = value; in kvm_set_shared_msr()228 if (!smsr->registered) { in kvm_set_shared_msr()229 smsr->urn.on_user_return = kvm_on_user_return; in kvm_set_shared_msr()230 user_return_notifier_register(&smsr->urn); in kvm_set_shared_msr()[all …]
321 volatile unsigned int smsr; member