Home
last modified time | relevance | path

Searched refs:t1 (Results 1 – 25 of 333) sorted by relevance

12345678910>>...14

/linux-6.1.9/arch/loongarch/mm/
Dtlbex.S50 csrwr t1, EXCEPTION_KS1
58 csrrd t1, LOONGARCH_CSR_PGDL
63 alsl.d t1, ra, t1, 3
65 ld.d t1, t1, 0
67 alsl.d t1, ra, t1, 3
70 ld.d t1, t1, 0
72 alsl.d t1, ra, t1, 3
74 ld.d ra, t1, 0
86 alsl.d t1, t0, ra, _PTE_T_LOG2
90 ll.d t0, t1, 0
[all …]
/linux-6.1.9/arch/arm/crypto/
Dsha512-armv4.pl74 $t1="r10";
99 mov $t1,$Ehi,lsr#14
103 eor $t1,$t1,$Elo,lsl#18
106 eor $t1,$t1,$Ehi,lsr#18
108 eor $t1,$t1,$Elo,lsl#14
110 eor $t1,$t1,$Elo,lsr#9
112 eor $t1,$t1,$Ehi,lsl#23 @ Sigma1(e)
115 adc $Thi,$Thi,$t1 @ T += Sigma1(e)
116 ldr $t1,[sp,#$Foff+4] @ f.hi
124 eor $t1,$t1,$t3
[all …]
Dsha256-armv4.pl52 $len="r2"; $t1="r2";
76 @ ldr $t1,[$inp],#4 @ $i
84 rev $t1,$t1
87 @ ldrb $t1,[$inp,#3] @ $i
91 orr $t1,$t1,$t2,lsl#8
93 orr $t1,$t1,$t0,lsl#16
98 orr $t1,$t1,$t2,lsl#24
104 add $h,$h,$t1 @ h+=X[i]
105 str $t1,[sp,#`$i%16`*4]
106 eor $t1,$f,$g
[all …]
/linux-6.1.9/arch/mips/kernel/
Dcps-vec.S178 1: PTR_L t1, VPEBOOTCFG_PC(v1)
181 jr t1
241 PTR_LA t1, 1f
242 jr.hb t1
272 sll t1, ta1, VPECONF0_XTC_SHIFT
273 or t0, t0, t1
310 li t1, COREBOOTCFG_SIZE
311 mul t0, t0, t1
312 PTR_LA t1, mips_cps_core_bootcfg
313 PTR_L t1, 0(t1)
[all …]
Docteon_switch.S27 mfc0 t1, CP0_STATUS
28 LONG_S t1, THREAD_STATUS(a0)
42 li t1, -32768 /* Base address of CVMSEG */
47 LONG_L t8, 0(t1) /* Load from CVMSEG */
49 LONG_L t9, LONGSIZE(t1)/* Load from CVMSEG */
50 LONG_ADDU t1, LONGSIZE*2 /* Increment loc in CVMSEG */
78 set_saved_sp t0, t1, t2
80 mfc0 t1, CP0_STATUS /* Do we really need this? */
82 and t1, a3
86 or a2, t1
[all …]
Dbmips_5xxx_init.S30 addu t1, kva, size ; \
34 addiu t1, t1, -1 ; \
35 and t1, t2 ; \
37 bne t0, t1, 9b ; \
421 li t1, 0x4
422 or t0, t1
427 li t1, 0x4
428 or t0, t1
433 li t1, 0x4
434 or t0, t1
[all …]
/linux-6.1.9/include/crypto/
Daria.h343 static inline void aria_sbox_layer1_with_pre_diff(u32 *t0, u32 *t1, u32 *t2, in aria_sbox_layer1_with_pre_diff() argument
350 *t1 = s1[get_u8(*t1, 0)] ^ in aria_sbox_layer1_with_pre_diff()
351 s2[get_u8(*t1, 1)] ^ in aria_sbox_layer1_with_pre_diff()
352 x1[get_u8(*t1, 2)] ^ in aria_sbox_layer1_with_pre_diff()
353 x2[get_u8(*t1, 3)]; in aria_sbox_layer1_with_pre_diff()
365 static inline void aria_sbox_layer2_with_pre_diff(u32 *t0, u32 *t1, u32 *t2, in aria_sbox_layer2_with_pre_diff() argument
372 *t1 = x1[get_u8(*t1, 0)] ^ in aria_sbox_layer2_with_pre_diff()
373 x2[get_u8(*t1, 1)] ^ in aria_sbox_layer2_with_pre_diff()
374 s1[get_u8(*t1, 2)] ^ in aria_sbox_layer2_with_pre_diff()
375 s2[get_u8(*t1, 3)]; in aria_sbox_layer2_with_pre_diff()
[all …]
/linux-6.1.9/arch/csky/abiv2/
Dstrcmp.S13 andi t1, a0, 0x3
14 bnez t1, 5f
19 ldw t1, (a1, 0)
21 cmpne t0, t1
29 ldw t1, (a1, 4)
30 cmpne t0, t1
36 ldw t1, (a1, 8)
37 cmpne t0, t1
43 ldw t1, (a1, 12)
44 cmpne t0, t1
[all …]
/linux-6.1.9/arch/alpha/lib/
Dstxcpy.S49 mskqh t1, a1, t3 # e0 :
50 ornot t1, t2, t2 # .. e1 :
53 or t0, t3, t1 # e0 :
61 stq_u t1, 0(a0) # e0 :
63 ldq_u t1, 0(a1) # e0 :
65 cmpbge zero, t1, t8 # e0 (stall)
85 zapnot t1, t6, t1 # e0 : clear src bytes >= null
88 or t0, t1, t1 # e1 :
90 1: stq_u t1, 0(a0) # e0 :
109 ldq_u t1, 0(a1) # e0 : load first src word
[all …]
Dev6-stxcpy.S60 mskqh t1, a1, t3 # U :
61 ornot t1, t2, t2 # E : (stall)
65 or t0, t3, t1 # E : (stall)
74 stq_u t1, 0(a0) # L :
79 ldq_u t1, 0(a1) # L : Latency=3
81 cmpbge zero, t1, t8 # E : (3 cycle stall)
100 zapnot t1, t6, t1 # U : clear src bytes >= null (stall)
104 or t0, t1, t1 # E : (stall)
108 1: stq_u t1, 0(a0) # L :
129 ldq_u t1, 0(a1) # L : load first src word
[all …]
Dstxncpy.S57 mskqh t1, a1, t3 # e0 :
58 ornot t1, t2, t2 # .. e1 :
96 ldq_u t1, 0(a0) # e0 :
101 zap t1, t8, t1 # .. e1 : clear dst bytes <= null
102 or t0, t1, t0 # e1 :
122 xor a0, a1, t1 # e0 :
124 and t1, 7, t1 # e0 :
131 bne t1, $unaligned # .. e1 :
135 ldq_u t1, 0(a1) # e0 : load first src word
162 or t1, t4, t1 # e1 : first aligned src word complete
[all …]
/linux-6.1.9/arch/loongarch/kernel/
Dfpu.S153 fpu_save_csr a0 t1
154 fpu_save_double a0 t1 # clobbers t1
155 fpu_save_cc a0 t1 t2 # clobbers t1, t2
164 fpu_restore_double a0 t1 # clobbers t1
165 fpu_restore_csr a0 t1
166 fpu_restore_cc a0 t1 t2 # clobbers t1, t2
179 li.w t1, CSR_EUEN_FPEN
180 csrxchg t1, t1, LOONGARCH_CSR_EUEN
184 li.w t1, -1 # SNaN
186 movgr2fr.d $f0, t1
[all …]
/linux-6.1.9/lib/mpi/
Dmpi-inv.c33 MPI u, v, u1, u2 = NULL, u3, v1, v2 = NULL, v3, t1, t2 = NULL, t3; in mpi_invm() local
63 t1 = mpi_alloc_set_ui(0); in mpi_invm()
72 t1 = mpi_alloc_set_ui(1); in mpi_invm()
81 if (mpi_test_bit(t1, 0) || mpi_test_bit(t2, 0)) { in mpi_invm()
83 mpi_add(t1, t1, v); in mpi_invm()
86 mpi_rshift(t1, t1, 1); in mpi_invm()
90 if (mpi_test_bit(t1, 0)) in mpi_invm()
91 mpi_add(t1, t1, v); in mpi_invm()
92 mpi_rshift(t1, t1, 1); in mpi_invm()
100 mpi_set(u1, t1); in mpi_invm()
[all …]
/linux-6.1.9/drivers/crypto/vmx/
Dghashp8-ppc.pl57 my ($zero,$t0,$t1,$t2,$xC2,$H,$Hh,$Hl,$lemask)=map("v$_",(4..12));
86 vsldoi $t1,$zero,$t0,1 # ...1
89 vor $xC2,$xC2,$t1 # 0xc2....01
90 vspltb $t1,$H,0 # most significant byte
92 vsrab $t1,$t1,$t2 # broadcast carry bit
93 vand $t1,$t1,$xC2
94 vxor $H,$H,$t1 # twisted H
139 vsldoi $t1,$zero,$Xm,8
141 vxor $Xh,$Xh,$t1
146 vsldoi $t1,$Xl,$Xl,8 # 2nd phase
[all …]
/linux-6.1.9/drivers/soc/bcm/brcmstb/pm/
Ds2-mips.S52 addiu t1, s3, -1
53 not t1
56 and t0, t1
59 and t2, t1
68 2: move t1, s4
69 cache 0x1c, 0(t1)
70 addu t1, s3
89 li t1, ~(ST0_IM | ST0_IE)
90 and t0, t1
121 lw t1, TIMER_TIMER1_STAT(s2)
[all …]
Ds3-mips.S40 mfc0 t1, CP0_STATUS
41 sw t1, 48(t0)
44 addiu t1, a1, -1
45 not t1
46 and t0, t1
50 and t2, t1
57 li t1, PM_WARM_CONFIG
60 sw t1, AON_CTRL_PM_CTRL(a0)
61 lw t1, AON_CTRL_PM_CTRL(a0)
63 li t1, (PM_WARM_CONFIG | PM_PWR_DOWN)
[all …]
/linux-6.1.9/arch/mips/dec/
Dint-handler.S133 mfc0 t1,CP0_STATUS
138 and t0,t1 # isolate allowed ones
150 # open coded PTR_LA t1, cpu_mask_nr_tbl
152 # open coded la t1, cpu_mask_nr_tbl
153 lui t1, %hi(cpu_mask_nr_tbl)
154 addiu t1, %lo(cpu_mask_nr_tbl)
214 2: lw t2,(t1)
218 addu t1,2*PTRSIZE # delay slot
223 lw a0,%lo(-PTRSIZE)(t1)
239 li t1,CAUSEF_IP>>CAUSEB_IP # mask
[all …]
/linux-6.1.9/arch/riscv/lib/
Dmemmove.S89 andi t1, t0, (SZREG - 1)
90 beqz t1, coaligned_copy
135 REG_L t1, (1 * SZREG)(a1)
138 sll t2, t1, a7
146 srl t1, t1, a6
148 or t2, t1, t2
195 REG_L t1, ( 0 * SZREG)(a4)
199 sll t1, t1, a7
201 or t2, t1, t2
206 REG_L t1, (-2 * SZREG)(a4)
[all …]
/linux-6.1.9/arch/loongarch/net/
Dbpf_jit.c212 u8 t1 = LOONGARCH_GPR_T1; in emit_bpf_tail_call() local
229 emit_insn(ctx, ldwu, t1, a1, off); in emit_bpf_tail_call()
231 if (emit_tailcall_jmp(ctx, BPF_JGE, a2, t1, jmp_offset) < 0) in emit_bpf_tail_call()
279 const u8 t1 = LOONGARCH_GPR_T1; in emit_atomic() local
289 move_imm(ctx, t1, off, false); in emit_atomic()
290 emit_insn(ctx, addd, t1, dst, t1); in emit_atomic()
297 emit_insn(ctx, amaddd, t2, t1, src); in emit_atomic()
299 emit_insn(ctx, amaddw, t2, t1, src); in emit_atomic()
303 emit_insn(ctx, amandd, t2, t1, src); in emit_atomic()
305 emit_insn(ctx, amandw, t2, t1, src); in emit_atomic()
[all …]
/linux-6.1.9/arch/mips/include/asm/mach-cavium-octeon/
Dkernel-entry-init.h47 and t1, v1, 0xfff8
48 xor t1, t1, 0x9000 # 63-P1
49 beqz t1, 4f
50 and t1, v1, 0xfff8
51 xor t1, t1, 0x9008 # 63-P2
52 beqz t1, 4f
53 and t1, v1, 0xfff8
54 xor t1, t1, 0x9100 # 68-P1
55 beqz t1, 4f
56 and t1, v1, 0xff00
[all …]
/linux-6.1.9/arch/parisc/lib/
Dlusercopy.S93 t1 = r19 define
101 a1 = t1
119 extru t0,31,2,t1
120 cmpib,<>,n 0,t1,.Lunaligned_copy
124 extru t0,31,3,t1
125 cmpib,<>,n 0,t1,.Lalign_loop32
129 extru dst,31,3,t1
130 cmpib,=,n 0,t1,.Lcopy_loop_16_start
131 20: ldb,ma 1(srcspc,src),t1
132 21: stb,ma t1,1(dstspc,dst)
[all …]
/linux-6.1.9/crypto/
Dsha512_generic.c101 u64 a, b, c, d, e, f, g, h, t1, t2; in sha512_transform() local
126 t1 = h + e1(e) + Ch(e,f,g) + sha512_K[i ] + W[(i & 15)]; in sha512_transform()
127 t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2; in sha512_transform()
128 t1 = g + e1(d) + Ch(d,e,f) + sha512_K[i+1] + W[(i & 15) + 1]; in sha512_transform()
129 t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2; in sha512_transform()
130 t1 = f + e1(c) + Ch(c,d,e) + sha512_K[i+2] + W[(i & 15) + 2]; in sha512_transform()
131 t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2; in sha512_transform()
132 t1 = e + e1(b) + Ch(b,c,d) + sha512_K[i+3] + W[(i & 15) + 3]; in sha512_transform()
133 t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2; in sha512_transform()
134 t1 = d + e1(a) + Ch(a,b,c) + sha512_K[i+4] + W[(i & 15) + 4]; in sha512_transform()
[all …]
Dseed.c313 t1 = X4 ^ ks[rbase+1]; \
314 t1 ^= t0; \
315 t1 = SS0[byte(t1, 0)] ^ SS1[byte(t1, 1)] ^ \
316 SS2[byte(t1, 2)] ^ SS3[byte(t1, 3)]; \
317 t0 += t1; \
320 t1 += t0; \
321 t1 = SS0[byte(t1, 0)] ^ SS1[byte(t1, 1)] ^ \
322 SS2[byte(t1, 2)] ^ SS3[byte(t1, 3)]; \
323 t0 += t1; \
325 X2 ^= t1
[all …]
/linux-6.1.9/arch/mips/include/asm/sibyte/
Dboard.h29 #define setleds(t0, t1, c0, c1, c2, c3) \
31 li t1, c0; \
32 sb t1, 0x18(t0); \
33 li t1, c1; \
34 sb t1, 0x10(t0); \
35 li t1, c2; \
36 sb t1, 0x08(t0); \
37 li t1, c3; \
38 sb t1, 0x00(t0)
40 #define setleds(t0, t1, c0, c1, c2, c3)
/linux-6.1.9/include/net/netfilter/
Dnf_conntrack_tuple.h124 static inline bool __nf_ct_tuple_src_equal(const struct nf_conntrack_tuple *t1, in __nf_ct_tuple_src_equal() argument
127 return (nf_inet_addr_cmp(&t1->src.u3, &t2->src.u3) && in __nf_ct_tuple_src_equal()
128 t1->src.u.all == t2->src.u.all && in __nf_ct_tuple_src_equal()
129 t1->src.l3num == t2->src.l3num); in __nf_ct_tuple_src_equal()
132 static inline bool __nf_ct_tuple_dst_equal(const struct nf_conntrack_tuple *t1, in __nf_ct_tuple_dst_equal() argument
135 return (nf_inet_addr_cmp(&t1->dst.u3, &t2->dst.u3) && in __nf_ct_tuple_dst_equal()
136 t1->dst.u.all == t2->dst.u.all && in __nf_ct_tuple_dst_equal()
137 t1->dst.protonum == t2->dst.protonum); in __nf_ct_tuple_dst_equal()
140 static inline bool nf_ct_tuple_equal(const struct nf_conntrack_tuple *t1, in nf_ct_tuple_equal() argument
143 return __nf_ct_tuple_src_equal(t1, t2) && in nf_ct_tuple_equal()
[all …]

12345678910>>...14