Searched refs:MSR_TM_ACTIVE (Results 1 – 14 of 14) sorted by relevance
63 if (!MSR_TM_ACTIVE(target->thread.regs->msr)) in tm_cgpr_active()97 if (!MSR_TM_ACTIVE(target->thread.regs->msr)) in tm_cgpr_get()144 if (!MSR_TM_ACTIVE(target->thread.regs->msr)) in tm_cgpr_set()205 if (!MSR_TM_ACTIVE(target->thread.regs->msr)) in tm_cfpr_active()238 if (!MSR_TM_ACTIVE(target->thread.regs->msr)) in tm_cfpr_get()283 if (!MSR_TM_ACTIVE(target->thread.regs->msr)) in tm_cfpr_set()317 if (!MSR_TM_ACTIVE(target->thread.regs->msr)) in tm_cvmx_active()354 if (!MSR_TM_ACTIVE(target->thread.regs->msr)) in tm_cvmx_get()404 if (!MSR_TM_ACTIVE(target->thread.regs->msr)) in tm_cvmx_set()445 if (!MSR_TM_ACTIVE(target->thread.regs->msr)) in tm_cvsx_active()[all …]
94 MSR_TM_ACTIVE(tsk->thread.regs->msr) && in check_if_tm_restore_required()224 if (!MSR_TM_ACTIVE(cpumsr) && in enable_kernel_fp()225 MSR_TM_ACTIVE(current->thread.regs->msr)) in enable_kernel_fp()275 if (!MSR_TM_ACTIVE(cpumsr) && in enable_kernel_altivec()276 MSR_TM_ACTIVE(current->thread.regs->msr)) in enable_kernel_altivec()346 if (!MSR_TM_ACTIVE(cpumsr) && in enable_kernel_vsx()347 MSR_TM_ACTIVE(current->thread.regs->msr)) in enable_kernel_vsx()968 if (!MSR_TM_ACTIVE(thr->regs->msr)) in tm_reclaim_task()1035 if (!MSR_TM_ACTIVE(new->thread.regs->msr)){ in tm_recheckpoint_new_task()1067 if (!MSR_TM_ACTIVE(prev->thread.regs->msr) && prev->thread.load_tm == 0) in __switch_to_tm()[all …]
751 if (MSR_TM_ACTIVE(msr)) in handle_rt_signal32()768 if (MSR_TM_ACTIVE(msr)) { in handle_rt_signal32()851 if (MSR_TM_ACTIVE(msr)) in handle_signal32()873 if (MSR_TM_ACTIVE(msr)) in handle_signal32()1139 if (MSR_TM_ACTIVE(msr_hi<<32)) { in COMPAT_SYSCALL_DEFINE0()1333 if (MSR_TM_ACTIVE(msr_hi<<32)) { in COMPAT_SYSCALL_DEFINE0()
229 BUG_ON(!MSR_TM_ACTIVE(msr)); in setup_tm_sigcontexts()804 if (IS_ENABLED(CONFIG_PPC_TRANSACTIONAL_MEM) && MSR_TM_ACTIVE(msr)) { in SYSCALL_DEFINE0()873 if (!MSR_TM_ACTIVE(msr)) in handle_rt_signal64()886 if (MSR_TM_ACTIVE(msr)) { in handle_rt_signal64()
344 if (MSR_TM_ACTIVE(regs->msr)) { in get_tm_stackpointer()
188 if (!MSR_TM_ACTIVE(msr)) { in kvmhv_p9_tm_emulation()228 if (MSR_TM_ACTIVE(msr) || !(vcpu->arch.texasr & TEXASR_FS)) { in kvmhv_p9_tm_emulation()
218 MSR_TM_ACTIVE(guest_msr)) { in kvmppc_emulate_tabort()278 !MSR_TM_ACTIVE(srr1)) in kvmppc_core_emulate_op_pr()569 if (!MSR_TM_ACTIVE(guest_msr)) { in kvmppc_core_emulate_op_pr()607 if (MSR_TM_ACTIVE(guest_msr) || in kvmppc_core_emulate_op_pr()791 if (MSR_TM_ACTIVE(kvmppc_get_msr(vcpu)) && in kvmppc_core_emulate_mtspr_pr()
95 if (MSR_TM_ACTIVE(guest_msr)) { in load_vcpu_state()130 if (MSR_TM_ACTIVE(guest_msr)) { in store_vcpu_state()
385 if (!(MSR_TM_ACTIVE(kvmppc_get_msr(vcpu)))) { in kvmppc_save_tm_pr()400 if (!MSR_TM_ACTIVE(kvmppc_get_msr(vcpu))) { in kvmppc_restore_tm_pr()
334 if (!MSR_TM_ACTIVE(l2_regs.msr)) in kvmhv_enter_nested_guest()
4794 if (MSR_TM_ACTIVE(current->thread.regs->msr)) { in kvmppc_vcpu_run_hv()
125 #define MSR_TM_ACTIVE(x) (((x) & MSR_TS_MASK) != 0) /* Transaction active? */ macro127 #define MSR_TM_ACTIVE(x) ((void)(x), 0) macro
140 if (MSR_TM_ACTIVE(msr)) {
1827 MSR_TM_ACTIVE(current->thread.regs->msr)) { in tm_flush_hash_page()