Lines Matching refs:o0

45 	ldd	[%o0], %g2			! load two
48 add %o0, 8, %o0 ! advance buf ptr
53 ld [%o0], %g2 ! load it
55 add %o0, 4, %o0 ! advance buf ptr
64 2: lduh [%o0], %o4 ! get hword
66 add %o0, 2, %o0 ! advance buf ptr either way
68 4: ldub [%o0], %o5 ! get final byte
73 addx %g0, %o2, %o0 ! add final carry into retval
79 andcc %o0, 0x2, %g0
81 andcc %o0, 0x4, %g0
82 lduh [%o0 + 0x00], %g2
84 add %o0, 2, %o0
92 andcc %o0, 0x4, %g0
96 ld [%o0 + 0x00], %g2
99 add %o0, 4, %o0
109 andcc %o0, 0x7, %g0 ! alignment problems?
115 5: CSUM_BIGCHUNK(%o0, 0x00, %o2, %o4, %o5, %g2, %g3, %g4, %g5)
116 CSUM_BIGCHUNK(%o0, 0x20, %o2, %o4, %o5, %g2, %g3, %g4, %g5)
117 CSUM_BIGCHUNK(%o0, 0x40, %o2, %o4, %o5, %g2, %g3, %g4, %g5)
118 CSUM_BIGCHUNK(%o0, 0x60, %o2, %o4, %o5, %g2, %g3, %g4, %g5)
122 add %o0, 128, %o0 ! advance buf ptr
130 add %o0, %g1, %o0 ! advance buf ptr
131 cptbl: CSUM_LASTCHUNK(%o0, 0x68, %o2, %g2, %g3, %g4, %g5)
132 CSUM_LASTCHUNK(%o0, 0x58, %o2, %g2, %g3, %g4, %g5)
133 CSUM_LASTCHUNK(%o0, 0x48, %o2, %g2, %g3, %g4, %g5)
134 CSUM_LASTCHUNK(%o0, 0x38, %o2, %g2, %g3, %g4, %g5)
135 CSUM_LASTCHUNK(%o0, 0x28, %o2, %g2, %g3, %g4, %g5)
136 CSUM_LASTCHUNK(%o0, 0x18, %o2, %g2, %g3, %g4, %g5)
137 CSUM_LASTCHUNK(%o0, 0x08, %o2, %g2, %g3, %g4, %g5)
143 mov %o2, %o0 ! return computed csum
253 EX(ldd [%o0 + 0x00], %g2, and %o3, 0xf)
256 add %o0, 8, %o0
264 EX(ld [%o0 + 0x00], %g2, add %o3, 4)
270 add %o0, 4, %o0
277 2: EX(lduh [%o0 + 0x00], %o4, add %o3, 2)
278 add %o0, 2, %o0
283 4: EX(ldub [%o0 + 0x00], %o5, add %g0, 1)
289 addx %g0, %g7, %o0
296 andcc %o0, 0x1, %g0
298 andcc %o0, 0x2, %g0
300 andcc %o0, 0x4, %g0
301 EX(lduh [%o0 + 0x00], %g4, add %g1, 0)
304 add %o0, 2, %o0
313 andcc %o0, 0x4, %g0
317 EX(ld [%o0 + 0x00], %g4, add %g1, 0)
320 add %o0, 4, %o0
335 xor %o0, %o1, %o4 ! get changing bits
338 andcc %o0, 7, %g0 ! need to align things?
344 5: CSUMCOPY_BIGCHUNK(%o0,%o1,%g7,0x00,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
345 CSUMCOPY_BIGCHUNK(%o0,%o1,%g7,0x20,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
346 CSUMCOPY_BIGCHUNK(%o0,%o1,%g7,0x40,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
347 CSUMCOPY_BIGCHUNK(%o0,%o1,%g7,0x60,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
352 add %o0, 128, %o0 ! advance src ptr
360 add %o0, %o2, %o0 ! advance src ptr
366 cctbl: CSUMCOPY_LASTCHUNK(%o0,%o1,%g7,0x68,%g2,%g3,%g4,%g5)
367 CSUMCOPY_LASTCHUNK(%o0,%o1,%g7,0x58,%g2,%g3,%g4,%g5)
368 CSUMCOPY_LASTCHUNK(%o0,%o1,%g7,0x48,%g2,%g3,%g4,%g5)
369 CSUMCOPY_LASTCHUNK(%o0,%o1,%g7,0x38,%g2,%g3,%g4,%g5)
370 CSUMCOPY_LASTCHUNK(%o0,%o1,%g7,0x28,%g2,%g3,%g4,%g5)
371 CSUMCOPY_LASTCHUNK(%o0,%o1,%g7,0x18,%g2,%g3,%g4,%g5)
372 CSUMCOPY_LASTCHUNK(%o0,%o1,%g7,0x08,%g2,%g3,%g4,%g5)
379 mov %g7, %o0 ! give em the computed checksum
380 ccdbl: CSUMCOPY_BIGCHUNK_ALIGNED(%o0,%o1,%g7,0x00,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
381 CSUMCOPY_BIGCHUNK_ALIGNED(%o0,%o1,%g7,0x20,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
382 CSUMCOPY_BIGCHUNK_ALIGNED(%o0,%o1,%g7,0x40,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
383 CSUMCOPY_BIGCHUNK_ALIGNED(%o0,%o1,%g7,0x60,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
388 add %o0, 128, %o0 ! advance src ptr
397 andcc %o0, 1, %o5
401 EX(ldub [%o0], %g5, add %g1, 1)
402 add %o0, 1, %o0
409 andcc %o0, 2, %g0
412 EX(lduh [%o0], %o4, add %g1, 0)
419 add %o0, 2, %o0
425 EX3(ld [%o0], %o4)
431 add %o0, 4, %o0
439 EX3(ld [%o0], %o4)
447 EX(lduh [%o0], %o4, and %g1, 3)
450 add %o0, 2, %o0
457 EX(ldub [%o0], %g2, add %g0, 1)
474 addx %g0, %g7, %o0
517 sub %o0, 0x70, %o0
518 add %o0, 16, %o0
551 mov %i5, %o0
556 cmp %o0, 2
562 mov %i1, %o0
566 tst %o0
571 mov %i1, %o0