Searched refs:MSR_TS_MASK (Results 1 – 12 of 12) sorted by relevance
111 msr = (msr & ~MSR_TS_MASK) | MSR_TS_T; in kvmhv_p9_tm_emulation()157 (((msr & MSR_TS_MASK) >> MSR_TS_S_LG) << 29); in kvmhv_p9_tm_emulation()161 msr = (msr & ~MSR_TS_MASK) | MSR_TS_T; in kvmhv_p9_tm_emulation()164 msr = (msr & ~MSR_TS_MASK) | MSR_TS_S; in kvmhv_p9_tm_emulation()204 (((msr & MSR_TS_MASK) >> MSR_TS_S_LG) << 29); in kvmhv_p9_tm_emulation()205 vcpu->arch.shregs.msr &= ~MSR_TS_MASK; in kvmhv_p9_tm_emulation()237 (((msr & MSR_TS_MASK) >> MSR_TS_S_LG) << 29); in kvmhv_p9_tm_emulation()
67 msr = (msr & ~MSR_TS_MASK) | MSR_TS_T; in kvmhv_p9_tm_emulation_early()99 vcpu->arch.shregs.msr = (msr & ~MSR_TS_MASK) | MSR_TS_T; in kvmhv_p9_tm_emulation_early()115 vcpu->arch.shregs.msr &= ~MSR_TS_MASK; /* go to N state */ in kvmhv_emulate_tm_rollback()
511 if ((msr & MSR_TS_MASK) == MSR_TS_MASK) in kvmppc_set_msr_hv()512 msr &= ~MSR_TS_MASK; in kvmppc_set_msr_hv()531 new_msr |= msr & MSR_TS_MASK; in inject_interrupt()
130 (((guest_msr & MSR_TS_MASK) >> (MSR_TS_S_LG - 1)) in kvmppc_emulate_treclaim()160 guest_msr &= ~(MSR_TS_MASK); in kvmppc_emulate_treclaim()185 guest_msr &= ~(MSR_TS_MASK); in kvmppc_emulate_trchkpt()209 (((guest_msr & MSR_TS_MASK) >> (MSR_TS_S_LG - 1)) in kvmppc_emulate_tabort()
123 new_msr |= msr & MSR_TS_MASK; in kvmppc_inject_interrupt_pr()239 MSR_TM | MSR_TS_MASK; in kvmppc_recalc_shadow_msr()327 (vcpu->arch.shadow_srr1 & (MSR_TS_MASK)) != in kvmppc_copy_from_svcpu()328 (old_msr & (MSR_TS_MASK)))) { in kvmppc_copy_from_svcpu()329 old_msr &= ~(MSR_TS_MASK); in kvmppc_copy_from_svcpu()330 old_msr |= (vcpu->arch.shadow_srr1 & (MSR_TS_MASK)); in kvmppc_copy_from_svcpu()
337 if (l2_regs.msr & MSR_TS_MASK) in kvmhv_enter_nested_guest()339 if (WARN_ON_ONCE(vcpu->arch.shregs.msr & MSR_TS_MASK)) in kvmhv_enter_nested_guest()393 vcpu->arch.shregs.msr = saved_l1_regs.msr & ~MSR_TS_MASK; in kvmhv_enter_nested_guest()395 if (l2_regs.msr & MSR_TS_MASK) in kvmhv_enter_nested_guest()
872 vcpu->arch.shregs.msr & MSR_TS_MASK) in kvmhv_vcpu_entry_p9()
119 #define MSR_TS_MASK (MSR_TS_T | MSR_TS_S) /* Transaction State bits */ macro120 #define MSR_TM_RESV(x) (((x) & MSR_TS_MASK) == MSR_TS_MASK) /* Reserved */121 #define MSR_TM_TRANSACTIONAL(x) (((x) & MSR_TS_MASK) == MSR_TS_T)122 #define MSR_TM_SUSPENDED(x) (((x) & MSR_TS_MASK) == MSR_TS_S)125 #define MSR_TM_ACTIVE(x) (((x) & MSR_TS_MASK) != 0) /* Transaction active? */
576 regs_set_return_msr(regs, regs->msr | (msr & MSR_TS_MASK)); in restore_tm_sigcontexts()798 regs_set_return_msr(regs, regs->msr & ~MSR_TS_MASK); in SYSCALL_DEFINE0()831 current->thread.regs->msr & ~MSR_TS_MASK); in SYSCALL_DEFINE0()
685 regs_set_return_msr(regs, (regs->msr & ~MSR_TS_MASK) | (msr_hi & MSR_TS_MASK)); in restore_tm_user_regs()1157 regs_set_return_msr(regs, regs->msr & ~MSR_TS_MASK); in COMPAT_SYSCALL_DEFINE0()
357 regs_set_return_msr(regs, regs->msr & ~MSR_TS_MASK); in get_tm_stackpointer()
470 li r5, (MSR_TS_MASK)@higher