/linux-3.4.99/arch/powerpc/kvm/ |
D | book3s_32_mmu.c | 160 if (vcpu->arch.shared->msr & MSR_PR) { in kvmppc_mmu_book3s_32_xlate_bat() 241 if ((sr_kp(sre) && (vcpu->arch.shared->msr & MSR_PR)) || in kvmppc_mmu_book3s_32_xlate_pte() 242 (sr_ks(sre) && !(vcpu->arch.shared->msr & MSR_PR))) in kvmppc_mmu_book3s_32_xlate_pte() 315 !(vcpu->arch.shared->msr & MSR_PR)) { in kvmppc_mmu_book3s_32_xlate() 389 if (vcpu->arch.shared->msr & MSR_PR) in kvmppc_mmu_book3s_32_esid_to_vsid()
|
D | book3s_64_mmu.c | 177 !(vcpu->arch.shared->msr & MSR_PR)) { in kvmppc_mmu_book3s_64_xlate() 205 if ((vcpu->arch.shared->msr & MSR_PR) && slbe->Kp) in kvmppc_mmu_book3s_64_xlate() 207 else if (!(vcpu->arch.shared->msr & MSR_PR) && slbe->Ks) in kvmppc_mmu_book3s_64_xlate() 495 if (vcpu->arch.shared->msr & MSR_PR) in kvmppc_mmu_book3s_64_esid_to_vsid() 504 !(vcpu->arch.shared->msr & MSR_PR)) { in kvmppc_mmu_book3s_64_esid_to_vsid()
|
D | book3s_pr.c | 98 smsr |= MSR_ME | MSR_RI | MSR_IR | MSR_DR | MSR_PR | MSR_EE; in kvmppc_recalc_shadow_msr() 131 if ((vcpu->arch.shared->msr & (MSR_PR|MSR_IR|MSR_DR)) != in kvmppc_set_msr() 132 (old_msr & (MSR_PR|MSR_IR|MSR_DR))) { in kvmppc_set_msr() 137 if (!(msr & MSR_PR) && vcpu->arch.magic_page_pa) { in kvmppc_set_msr() 641 if (vcpu->arch.shared->msr & MSR_PR) { in kvmppc_handle_exit() 680 !(vcpu->arch.shared->msr & MSR_PR)) { in kvmppc_handle_exit() 712 } else if (!(vcpu->arch.shared->msr & MSR_PR) && in kvmppc_handle_exit()
|
D | book3s_64_mmu_host.c | 61 if (vcpu->arch.shared->msr & MSR_PR) in find_sid_vsid() 181 if (vcpu->arch.shared->msr & MSR_PR) in create_sid_map()
|
D | e500_tlb.h | 121 return !!(vcpu->arch.shared->msr & MSR_PR); in get_cur_pr()
|
D | book3s_32_mmu_host.c | 95 if (vcpu->arch.shared->msr & MSR_PR) in find_sid_vsid() 265 if (vcpu->arch.shared->msr & MSR_PR) in create_sid_map()
|
D | 44x_tlb.c | 358 vcpu->arch.shared->msr & MSR_PR); in kvmppc_mmu_map() 392 int usermode = vcpu->arch.shared->msr & MSR_PR; in kvmppc_mmu_msr_notify()
|
D | booke.c | 219 crit = crit && !(vcpu->arch.shared->msr & MSR_PR); in kvmppc_booke_irqprio_deliver() 410 if (vcpu->arch.shared->msr & MSR_PR) { in kvmppc_handle_exit() 510 if (!(vcpu->arch.shared->msr & MSR_PR) && in kvmppc_handle_exit() 530 if (!(vcpu->arch.shared->msr & MSR_PR) && in kvmppc_handle_exit()
|
D | book3s_emulate.c | 80 if ((vcpu->arch.shared->msr & MSR_PR) && level > PRIV_PROBLEM) in spr_allowed()
|
D | e500_tlb.c | 602 vcpu_e500->vcpu.arch.shared->msr & MSR_PR); in kvmppc_e500_setup_stlbe() 605 vcpu_e500->vcpu.arch.shared->msr & MSR_PR); in kvmppc_e500_setup_stlbe()
|
D | book3s_hv.c | 314 if (vcpu->arch.shregs.msr & MSR_PR) { in kvmppc_handle_exit() 912 !(vcpu->arch.shregs.msr & MSR_PR)) { in kvmppc_vcpu_run()
|
D | book3s_hv_rm_mmu.c | 783 key = (vcpu->arch.shregs.msr & MSR_PR) ? SLB_VSID_KP : SLB_VSID_KS; in kvmppc_hpte_hv_fault()
|
/linux-3.4.99/arch/powerpc/include/asm/ |
D | reg_booke.h | 34 #define MSR_USER32 MSR_ | MSR_PR | MSR_EE 38 #define MSR_USER (MSR_KERNEL|MSR_PR|MSR_EE) 41 #define MSR_USER (MSR_KERNEL|MSR_PR|MSR_EE)
|
D | reg.h | 82 #define MSR_PR __MASK(MSR_PR_LG) /* Problem State / Privilege Level */ macro 107 #define MSR_USER32 MSR_ | MSR_PR | MSR_EE 112 #define MSR_USER (MSR_KERNEL|MSR_PR|MSR_EE)
|
D | ptrace.h | 115 #define user_mode(regs) (((regs)->msr & MSR_PR) != 0)
|
D | exception-64s.h | 161 andi. r10,r12,MSR_PR; /* See if coming from user */ \
|
D | kvm_book3s.h | 350 crit = crit && !(vcpu->arch.shared->msr & MSR_PR); in kvmppc_critical_section()
|
/linux-3.4.99/arch/powerpc/kernel/ |
D | entry_64.S | 55 andi. r10,r12,MSR_PR 219 andi. r6,r8,MSR_PR 588 andi. r3,r3,MSR_PR 686 andi. r0,r3,MSR_PR 782 andi. r0,r3,MSR_PR /* Returning to user mode? */
|
D | exceptions-64e.S | 49 andi. r10,r11,MSR_PR; /* save stack pointer */ \ 354 andi. r0,r12,MSR_PR; 441 1: andi. r14,r11,MSR_PR; /* check for userspace again */ 506 1: andi. r14,r11,MSR_PR; /* check for userspace again */ 710 andi. r6,r10,MSR_PR
|
D | head_booke.h | 38 andi. r11,r11,MSR_PR; \ 125 andi. r10,r10,MSR_PR; \
|
D | head_40x.S | 113 andi. r11,r11,MSR_PR; \ 152 andi. r11,r11,MSR_PR; \ 708 andi. r10,r9,MSR_IR|MSR_PR /* check supervisor + MMU off */
|
D | entry_32.S | 137 andi. r2,r9,MSR_PR 229 andi. r12,r12,MSR_PR 813 andi. r0,r3,MSR_PR 1023 andi. r3,r3,MSR_PR; \
|
D | process.c | 599 {MSR_PR, "PR"}, 746 if ((childregs->msr & MSR_PR) == 0) { in copy_thread()
|
/linux-3.4.99/arch/powerpc/xmon/ |
D | xmon.c | 400 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) == (MSR_IR|MSR_64BIT)) in xmon_core() 530 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) == (MSR_IR|MSR_64BIT)) { in xmon_core() 579 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) != (MSR_IR|MSR_64BIT)) in xmon_bpt() 610 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) != (MSR_IR|MSR_64BIT)) in xmon_dabr_match() 620 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) != (MSR_IR|MSR_64BIT)) in xmon_iabr_match() 645 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) == (MSR_IR|MSR_64BIT)) { in xmon_fault_handler() 928 if ((regs->msr & (MSR_64BIT|MSR_PR|MSR_IR)) == (MSR_64BIT|MSR_IR)) { in do_step() 1393 if (regs->msr & MSR_PR) in print_bug_trap()
|
/linux-3.4.99/arch/powerpc/lib/ |
D | sstep.c | 837 if (regs->msr & MSR_PR) in emulate_step() 842 if (regs->msr & MSR_PR) in emulate_step() 854 if (regs->msr & MSR_PR) in emulate_step()
|