Searched refs:MSR_LE (Results 1 – 22 of 22) sorted by relevance
/linux-6.1.9/arch/powerpc/kvm/ |
D | book3s_hv_tm_builtin.c | 82 newmsr = (newmsr & ~MSR_LE) | (msr & MSR_LE); in kvmhv_p9_tm_emulation_early()
|
D | book3s_hv_tm.c | 126 newmsr = (newmsr & ~MSR_LE) | (msr & MSR_LE); in kvmhv_p9_tm_emulation()
|
D | book3s_pr.c | 238 smsr &= MSR_FE0 | MSR_FE1 | MSR_SF | MSR_SE | MSR_BE | MSR_LE | in kvmppc_recalc_shadow_msr() 241 smsr &= MSR_FE0 | MSR_FE1 | MSR_SF | MSR_SE | MSR_BE | MSR_LE; in kvmppc_recalc_shadow_msr() 1553 if (vcpu->arch.intr_msr & MSR_LE) in kvmppc_get_one_reg_pr() 1636 vcpu->arch.intr_msr |= MSR_LE; in kvmppc_set_lpcr_pr() 1638 vcpu->arch.intr_msr &= ~MSR_LE; in kvmppc_set_lpcr_pr() 1781 vcpu->arch.shadow_msr = MSR_USER64 & ~MSR_LE; in kvmppc_core_vcpu_create_pr()
|
D | book3s_emulate.c | 247 if ((kvmppc_get_msr(vcpu) & MSR_LE) && in kvmppc_core_emulate_op_pr()
|
D | powerpc.c | 187 if (vcpu->arch.intr_msr & MSR_LE) in kvmppc_kvm_pv()
|
D | book3s_hv.c | 2164 vcpu->arch.intr_msr |= MSR_LE; in kvmppc_set_lpcr() 2166 vcpu->arch.intr_msr &= ~MSR_LE; in kvmppc_set_lpcr()
|
/linux-6.1.9/tools/testing/selftests/powerpc/tm/ |
D | tm-trap.c | 48 #define MSR_LE 1UL macro 67 thread_endianness = MSR_LE & ucp->uc_mcontext.gp_regs[PT_MSR]; in trap_signal_handler()
|
/linux-6.1.9/arch/powerpc/kernel/ |
D | signal_32.c | 488 regs_set_return_msr(regs, (regs->msr & ~MSR_LE) | (msr & MSR_LE)); in restore_user_regs() 593 regs_set_return_msr(regs, (regs->msr & ~MSR_LE) | (msr & MSR_LE)); in restore_tm_user_regs() 816 regs_set_return_msr(regs, (regs->msr & ~MSR_LE) | (MSR_KERNEL & MSR_LE)); in handle_rt_signal32() 904 regs_set_return_msr(regs, (regs->msr & ~MSR_LE) | (MSR_KERNEL & MSR_LE)); in handle_signal32()
|
D | signal_64.c | 362 regs_set_return_msr(regs, (regs->msr & ~MSR_LE) | (msr & MSR_LE)); in __unsafe_restore_sigcontext() 479 regs_set_return_msr(regs, (regs->msr & ~MSR_LE) | (msr & MSR_LE)); in restore_tm_sigcontexts() 955 regs_set_return_msr(regs, (regs->msr & ~MSR_LE) | (MSR_KERNEL & MSR_LE)); in handle_rt_signal64()
|
D | align.c | 314 if ((regs->msr & MSR_LE) != (MSR_KERNEL & MSR_LE)) { in fix_alignment()
|
D | syscalls.c | 116 current->thread.regs->msr ^ MSR_LE); in SYSCALL_DEFINE0()
|
D | entry_64.S | 300 LOAD_REG_IMMEDIATE(r12, MSR_SF | MSR_LE)
|
D | process.c | 1458 {MSR_LE, "LE"}, 2038 regs_set_return_msr(regs, regs->msr & ~MSR_LE); in set_endian() 2040 regs_set_return_msr(regs, regs->msr | MSR_LE); in set_endian() 2059 if (regs->msr & MSR_LE) { in get_endian()
|
D | traps.c | 919 swap = (msr & MSR_LE) != (MSR_KERNEL & MSR_LE); in p9_hmi_special_emu()
|
D | exceptions-64s.S | 2008 xori r12,r12,MSR_LE
|
/linux-6.1.9/arch/powerpc/platforms/powernv/ |
D | opal-wrappers.S | 27 li r0,MSR_IR|MSR_DR|MSR_LE
|
/linux-6.1.9/arch/powerpc/boot/ |
D | ppc_asm.h | 63 #define MSR_LE 0x0000000000000001 macro
|
D | opal-calls.S | 43 li r11,MSR_LE
|
/linux-6.1.9/arch/powerpc/include/asm/ |
D | kvm_book3s.h | 398 return (kvmppc_get_msr(vcpu) & MSR_LE) != (MSR_KERNEL & MSR_LE); in kvmppc_need_byteswap()
|
D | reg.h | 113 #define MSR_LE __MASK(MSR_LE_LG) /* Little Endian */ macro 139 #define MSR_ (__MSR | MSR_LE) 140 #define MSR_IDLE (MSR_ME | MSR_SF | MSR_HV | MSR_LE)
|
/linux-6.1.9/arch/powerpc/platforms/pseries/ |
D | ras.c | 496 (MSR_LE|MSR_RI|MSR_DR|MSR_IR|MSR_ME|MSR_PR| in pSeries_system_reset_exception()
|
/linux-6.1.9/arch/powerpc/lib/ |
D | sstep.c | 1745 op->val = 0xffffffff & ~(MSR_ME | MSR_LE); in analyse_instr() 3325 cross_endian = (regs->msr & MSR_LE) != (MSR_KERNEL & MSR_LE); in emulate_loadstore()
|