Lines Matching refs:r16
74 # define DBG_FAULT(i) mov r16=ar.k2;; shl r16=r16,8;; add r16=(i),r16;;mov ar.k2=r16
114 MOV_FROM_IFA(r16) // get address that caused the TLB miss
123 shl r21=r16,3 // shift bit 60 into sign bit
124 shr.u r17=r16,61 // get the region number into r17
242 (p6) ptc.l r16,r27 // purge translation
260 MOV_FROM_IFA(r16) // get virtual address
287 (p7) ptc.l r16,r20
304 MOV_FROM_IFA(r16) // get virtual address
331 (p7) ptc.l r16,r20
342 MOV_FROM_IFA(r16) // get address that caused the TLB miss
349 shr.u r22=r16,61 // get the region number into r21
353 THASH(p8, r17, r16, r23)
360 and r19=r19,r16 // clear ed, reserved bits, and PTE control bits
361 shr.u r18=r16,57 // move address bit 61 to bit 4
380 MOV_FROM_IFA(r16) // get address that caused the TLB miss
389 shr.u r22=r16,61 // get the region number into r21
393 THASH(p8, r17, r16, r25)
399 cmp.ge p10,p11=r16,r24 // access to per_cpu_data?
400 tbit.z p12,p0=r16,61 // access to region 6?
407 (p11) and r19=r19,r16 // clear non-ppn fields
458 shl r21=r16,3 // shift bit 60 into sign bit
461 shr.u r17=r16,61 // get the region number into r17
468 shr.u r22=r16,r22
469 shr.u r18=r16,r18
541 MOV_FROM_IFA(r16) // get the address that caused the fault
544 THASH(p0, r17, r16, r18) // compute virtual address of L3 PTE
573 (p7) ptc.l r16,r24
584 ITC_D(p0, r18, r16) // install updated PTE
596 MOV_FROM_IFA(r16) // get the address that caused the fault
608 (p6) mov r16=r18 // if so, use cr.iip instead of cr.ifa
611 THASH(p0, r17, r16, r18) // compute virtual address of L3 PTE
639 (p7) ptc.l r16,r24
650 ITC_I(p0, r18, r16) // install updated PTE
662 MOV_FROM_IFA(r16) // get the address that caused the fault
665 THASH(p0, r17, r16, r18) // compute virtual address of L3 PTE
693 (p7) ptc.l r16,r24
702 ITC_D(p0, r18, r16) // install updated PTE
729 mov.m r16=IA64_KR(CURRENT) // M2 r16 <- current task (12 cyc)
760 mov r1=r16 // A move task-pointer to "addl"-addressable reg
761 mov r2=r16 // A setup r2 for ia64_syscall_setup
762 add r9=TI_FLAGS+IA64_TASK_SIZE,r16 // A r9 = ¤t_thread_info()->flags
764 adds r16=IA64_TASK_THREAD_ON_USTACK_OFFSET,r16
768 ld1.bias r17=[r16] // M0|1 r17 = current->thread.on_ustack flag
806 st1 [r16]=r0 // M2|3 clear current->thread.on_ustack flag
825 add r16=TI_AC_STAMP+IA64_TASK_SIZE,r13 // A
829 ld8 r18=[r16],TI_AC_STIME-TI_AC_STAMP // M get last stamp
832 ld8 r20=[r16],TI_AC_STAMP-TI_AC_STIME // M cumulated stime
836 st8 [r16]=r30,TI_AC_STIME-TI_AC_STAMP // M update stamp
842 st8 [r16]=r20 // M update stime
852 SSM_PSR_IC_AND_DEFAULT_BITS_AND_SRLZ_I(r3, r16) // M2 now it's safe to re-enable intr.-collection
859 SSM_PSR_I(p15, p15, r16) // M2 restore psr.i
948 add r16=PT(CR_IPSR),r1 // initialize first base pointer
952 st8 [r16]=r29,PT(AR_PFS)-PT(CR_IPSR) // save cr.ipsr
960 st8 [r16]=r26,PT(CR_IFS)-PT(AR_PFS) // save ar.pfs
970 st8 [r16]=r19,PT(AR_RNAT)-PT(CR_IFS) // store ar.pfs.pfm in cr.ifs
983 (pKStk) adds r16=PT(PR)-PT(AR_RNAT),r16 // skip over ar_rnat field
991 (pUStk) st8 [r16]=r24,PT(PR)-PT(AR_RNAT) // save ar.rnat
995 st8 [r16]=r31,PT(LOADRS)-PT(PR) // save predicates
999 st8 [r16]=r18,PT(R12)-PT(LOADRS) // save ar.rsc value for "loadrs"
1004 .mem.offset 0,0; st8.spill [r16]=r12,PT(AR_FPSR)-PT(R12) // save r12
1008 st8 [r16]=r21,PT(R8)-PT(AR_FPSR) // save ar.fpsr
1023 st8 [r16]=r8 // ensure pt_regs.r8 != 0 (see handle_syscall_error)
1059 add r16=TI_AC_STAMP+IA64_TASK_SIZE,r13
1062 ld8 r18=[r16],TI_AC_STIME-TI_AC_STAMP // time at last check in kernel
1065 ld8 r23=[r16],TI_AC_STAMP-TI_AC_STIME // cumulated stime
1069 st8 [r16]=r20,TI_AC_STIME-TI_AC_STAMP // update stamp
1075 st8 [r16]=r23 // update stime
1109 MOV_FROM_IFA(r16)
1117 ptc.l r16,r17
1129 MOV_FROM_IFA(r16)
1142 MOV_FROM_IFA(r16)
1155 MOV_FROM_IFA(r16)
1168 MOV_FROM_ISR(r16)
1171 cmp4.eq p6,p0=0,r16
1197 MOV_FROM_IPSR(p0, r16)
1205 dep r16=-1,r16,IA64_PSR_ED_BIT,1
1208 MOV_TO_IPSR(p0, r16, r18)
1239 MOV_FROM_IPSR(p0, r16)
1246 dep r16=0,r16,41,2 // clear EI
1249 MOV_TO_IPSR(p0, r16, r19)