Searched refs:r21 (Results 1 – 25 of 105) sorted by relevance
12345
189 movi MMUIR_FIRST, r21192 putcfg r21, 0, ZERO /* Clear MMUIR[n].PTEH.V */193 addi r21, MMUIR_STEP, r21194 bne r21, r22, tr1198 movi MMUDR_FIRST, r21201 putcfg r21, 0, ZERO /* Clear MMUDR[n].PTEH.V */202 addi r21, MMUDR_STEP, r21203 bne r21, r22, tr1206 movi MMUIR_FIRST, r21209 putcfg r21, 1, r22 /* Set MMUIR[0].PTEL */[all …]
65 movi ITLB_FIXED, r2167 1: putcfg r21, 0, r63 /* Clear MMUIR[n].PTEH.V */68 addi r21, TLB_STEP, r2169 bne r21, r22, tr173 movi DTLB_FIXED, r2175 1: putcfg r21, 0, r63 /* Clear MMUDR[n].PTEH.V */76 addi r21, TLB_STEP, r2177 bne r21, r22, tr180 movi ITLB_FIXED, r2182 putcfg r21, 1, r22 /* Set MMUIR[0].PTEL */[all …]
18 shari r25, 58, r21 /* extract 5(6) bit index (s2.4 with hole -1..1) */20 ldx.ub r20, r21, r19 /* u0.8 */22 shlli r21, 1, r2124 ldx.w r20, r21, r21 /* s2.14 */27 sub r21, r19, r19 /* some 11 bit inverse in s1.14 */28 muls.l r19, r19, r21 /* u0.28 */31 muls.l r25, r21, r18 /* s2.58 */45 xor r21, r0, r21 /* You could also use the constant 1 << 27. */46 add r21, r25, r2147 sub r21, r19, r21[all …]
9 movi 0xffffffffffffbaf1,r21 /* .l shift count 17. */10 sub r21,r5,r135 mshalds.l r1,r21,r142 shlri r2,22,r2143 mulu.l r21,r1,r2146 shlrd r21,r0,r2147 mulu.l r21,r3,r548 add r8,r21,r849 mcmpgt.l r21,r63,r21 // See Note 151 mshfhi.l r63,r21,r21[all …]
16 sub r20,r25,r2117 mmulfx.w r21,r21,r1918 mshflo.w r21,r63,r2123 msub.w r21,r19,r1930 addi r19,-2,r2131 mulu.l r4,r21,r1833 shlli r21,15,r2136 mmacnfx.wl r25,r19,r2140 mulu.l r25,r21,r1949 mulu.l r25,r21,r19
35 sub r3, r2, r2136 addi r21, 8, r2037 ldx.q r0, r21, r587 ldx.q r0, r21, r5
29 mov r21=136 shl r21=r21,r20 // r21: stride size of the i-cache(s)52 add r24=r21,r24 // we flush "stride size" bytes per iteration81 mov r21=189 shl r21=r21,r20 // r21: stride size of the i-cache(s)107 add r24=r21,r24 // we flush "stride size" bytes per iteration
43 (p7) ld4 r21=[r15],850 add r20=r20,r2198 ld4 r21=[in1],4108 add r16=r20,r21
41 #define src_pre_l2 r21172 and r21=-8,tmp178 add src0=src0,r21 // setting up src pointer179 add dst0=dst0,r21 // setting up dest pointer294 shr.u r21=in2,7 // this much cache line299 cmp.lt p7,p8=1,r21300 add cnt=-1,r21362 (p6) or r21=r28,r27392 EX(.ex_handler, (p6) st8 [dst1]=r21,8) // more than 8 byte to copy512 shrp r21=r22,r38,shift; /* speculative work */ \[all …]
32 ldw -52(%r30), %r21 ;! 5th argument66 STREG %r21, TASK_PT_GR21(%r1) /* 5th argument */86 stw %r21, -52(%r30) ;! 5th argument91 ldil L%hpux_call_table, %r2192 ldo R%hpux_call_table(%r21), %r2195 LDREGX %r22(%r21), %r2197 be 0(%sr7,%r21)
441 push_reg r21, r52467 moveli r21, lo16(__per_cpu_offset)470 auli r21, r21, ha16(__per_cpu_offset)473 s2a r20, r20, r21489 PTREGS_PTR(r21, PTREGS_OFFSET_FLAGS)501 sw r21, r32561 IRQ_DISABLE(r20, r21)860 IRQ_DISABLE(r20, r21)929 IRQ_DISABLE(r20,r21)938 IRQ_ENABLE(r20, r21)[all …]
430 push_reg r21, r52457 moveli r21, hw2_last(__per_cpu_offset)460 shl16insli r21, r21, hw1(__per_cpu_offset)463 shl16insli r21, r21, hw0(__per_cpu_offset)464 shl3add r20, r20, r21478 PTREGS_PTR(r21, PTREGS_OFFSET_FLAGS)490 st r21, r32505 moveli r21, hw2_last(intvec_feedback)506 shl16insli r21, r21, hw1(intvec_feedback)507 shl16insli r21, r21, hw0(intvec_feedback)[all …]
81 LDREG ITLB_SID_STRIDE(%r1), %r2194 add %r21, %r20, %r20 /* increment space */117 add %r21, %r20, %r20 /* increment space */124 LDREG DTLB_SID_STRIDE(%r1), %r21137 add %r21, %r20, %r20 /* increment space */160 add %r21, %r20, %r20 /* increment space */301 ldd 16(%r25), %r21308 std %r21, 16(%r26)311 ldd 48(%r25), %r21318 std %r21, 48(%r26)[all …]
109 depdi 0, 31, 32, %r21151 STREG %r21, TASK_PT_GR21(%r1)178 stw %r21, -56(%r30) /* 6th argument */313 LDREG TASK_PT_GR21(%r1), %r21452 LDREGX %r20(%sr2,r28), %r21 /* Scratch use of r21 */455 be,n 0(%sr2,%r21)458 ldo -ENOSYS(%r0),%r21 /* set errno */551 mfctl %cr27, %r21 /* Get current thread register */552 cmpb,<>,n %r21, %r28, cas_lock /* Called recursive? */554 ldo -EDEADLOCK(%r0), %r21[all …]
28 add r16=VMM_VPD_BASE_OFFSET,r21; \51 adds r29 = VMM_VCPU_VSA_BASE_OFFSET,r21124 add r18=VMM_VCPU_ITC_OFS_OFFSET, r21125 add r16=VMM_VCPU_LAST_ITC_OFFSET,r21151 add r18=VMM_VCPU_ITC_OFS_OFFSET, r21154 add r16=VMM_VCPU_LAST_ITC_OFFSET,r21187 add r27=VMM_VCPU_VRR0_OFFSET,r21218 add r27=VMM_VCPU_VRR0_OFFSET,r21264 adds r16=VMM_VCPU_MODE_FLAGS_OFFSET,r21265 (p6) adds r17=VMM_VCPU_META_SAVED_RR0_OFFSET,r21[all …]
371 .mem.offset 8,0; st8.spill [r3]=r21,16430 (p6) add r29 = VMM_VCPU_SAVED_GP_OFFSET,r21606 adds r16 = VMM_VCPU_SAVED_GP_OFFSET,r21609 adds r17 = VMM_VCPU_GP_OFFSET, r21628 adds r16 = VMM_VCPU_SAVED_GP_OFFSET,r21633 adds r16 = VMM_VCPU_CAUSE_OFFSET,r21634 adds r17 = VMM_VCPU_OPCODE_OFFSET,r21642 adds r18=VMM_VPD_BASE_OFFSET,r21885 adds r16 = VMM_VCPU_CAUSE_OFFSET,r21886 adds r17 = VMM_VCPU_OPCODE_OFFSET,r21[all …]
56 add r25 = VMM_VPD_BASE_OFFSET, r21; \69 #define KVM_MINSTATE_GET_CURRENT(reg) mov reg=r21164 mov r13 = r21; /* establish `current' */ \215 .mem.offset 8,0; st8.spill [r3] = r21,16; \
120 shl r21=r16,3 // shift bit 60 into sign bit123 shr.u r22=r21,3143 (p6) shr.u r21=r21,PGDIR_SHIFT+PAGE_SHIFT144 (p7) shr.u r21=r21,PGDIR_SHIFT+PAGE_SHIFT-3148 cmp.eq p7,p6=0,r21 // unused address bits all zeroes?174 dep r21=r19,r20,3,(PAGE_SHIFT-3) // r21=pte_offset(pmd,addr)176 (p7) ld8 r18=[r21] // read *pte223 ld8 r25=[r21] // read *pte again341 MOV_FROM_IPSR(p0, r21)356 extr.u r23=r21,IA64_PSR_CPL0_BIT,2 // extract psr.cpl[all …]
185 adds r21=IA64_TASK_THREAD_KSP_OFFSET,in0199 ld8 sp=[r21] // load kernel stack pointer of new task294 mov r21=b0308 st8 [r14]=r21,SW(B1)-SW(B0) // save b0315 mov r21=ar.lc // I-unit325 st8 [r15]=r21 // save ar.lc353 mov r21=pr356 st8 [r3]=r21 // save predicate registers385 ld8 r21=[r2],16 // restore b0420 mov b0=r21[all …]
248 add r21 = IA64_CLKSRC_MMIO_OFFSET,r20265 ld8 r30 = [r21] // clocksource->mmio_ptr328 mov r21 = r8347 (p14) shr.u r21 = r2, 4350 EX(.fail_efault, st8 [r23] = r21)613 mov r21=ar.fpsr732 ld8 r21=[r17] // cumulated utime739 add r21=r21,r18 // sum utime742 st8 [r17]=r21 // update utime
137 tns r21, ATOMIC_LOCK_REG_NAME141 bzt r21, 1b /* branch if lock acquired */155 tns r21, ATOMIC_LOCK_REG_NAME159 bzt r21, 1b /* branch if lock acquired */
144 stw r21, VCPU_GPR(r21)(r4)228 lwz r21, VCPU_GPR(r21)(r4)266 stw r21, VCPU_GPR(r21)(r4)286 lwz r21, HOST_NV_GPR(r21)(r1)331 stw r21, HOST_NV_GPR(r21)(r1)351 lwz r21, VCPU_GPR(r21)(r4)
49 PPC_LL r21, VCPU_GPR(r21)(vcpu); \141 PPC_STL r21, VCPU_GPR(r21)(r7)
111 4: lwi r21, r6, 0x000C + offset; \119 12: swi r21, r5, 0x000C + offset; \199 swi r21, r1, 20222 lwi r21, r1, 20242 lwi r21, r1, 20
30 r20, r21, r22, r23, r24, r25, r26, r27, r28) \ argument52 .radio_rxtx4a = r21, \