Lines Matching refs:r5
76 ld r5,0(r3)
78 cmpb r10,r7,r5 /* Check for null bytes in DWORD1. */
88 addi r5,r3,16 /* Align up, or just add the 16B we
91 and r7,r5,r0 /* Find offset into 16B alignment. */
92 andc r5,r5,r0 /* Quadword align up s to the next quadword. */
113 andi. r7,r5,63 /* Check if is 64 bytes aligned. */
115 lvx v1,r5,r6
117 addi r5,r5,16
122 andi. r7,r5,63
124 lvx v1,r5,r6
126 addi r5,r5,16
130 andi. r7,r5,63
132 lvx v1,r5,r6
134 addi r5,r5,16
159 lvx v1,r5,r6 /* r5 is the pointer to s. */
160 lvx v2,r5,r7
161 lvx v3,r5,r10
162 lvx v4,r5,r9
169 addi r5,r5,64 /* Add pointer to next iteraction. */
200 lvx v1,r5,r6 /* Load quadword into vector register. */
201 addi r5,r5,16 /* Increment address to next 16B block. */
223 addi r5,r5,-16 /* Undo speculative bump. */
225 add r5,r5,r0 /* Add the offset of whatever was found. */
227 subf r3,r3,r5 /* Length is equal to the offset of null byte
264 addi r5,r5,-64
274 subf r5,r3,r5
275 add r3,r5,r0 /* Compute final length. */
293 addi r5,r5,-16 /* Adjust address to offset of last 16 bytes
297 subf r5,r3,r5
298 add r3,r5,r0
305 bpermd r5,r8,r10 /* r8 contains the bit permute constants. */
307 sldi r5,r5,8
308 or r5,r5,r6 /* r5 should hold a 16B mask of
310 cntlzd r5,r5 /* Count leading zeros. */
311 addi r3,r5,-48 /* Deduct the 48 leading zeros always