Lines Matching refs:ir
52 const unsigned char *ir = ip + 4; in lzo1x_1_do_compress() local
58 for (; (ir + 32) <= limit; ir += 32) { in lzo1x_1_do_compress()
59 dv64 = get_unaligned((u64 *)ir); in lzo1x_1_do_compress()
60 dv64 |= get_unaligned((u64 *)ir + 1); in lzo1x_1_do_compress()
61 dv64 |= get_unaligned((u64 *)ir + 2); in lzo1x_1_do_compress()
62 dv64 |= get_unaligned((u64 *)ir + 3); in lzo1x_1_do_compress()
66 for (; (ir + 8) <= limit; ir += 8) { in lzo1x_1_do_compress()
67 dv64 = get_unaligned((u64 *)ir); in lzo1x_1_do_compress()
70 ir += __builtin_ctzll(dv64) >> 3; in lzo1x_1_do_compress()
72 ir += __builtin_clzll(dv64) >> 3; in lzo1x_1_do_compress()
80 while ((ir < (const unsigned char *) in lzo1x_1_do_compress()
81 ALIGN((uintptr_t)ir, 4)) && in lzo1x_1_do_compress()
82 (ir < limit) && (*ir == 0)) in lzo1x_1_do_compress()
83 ir++; in lzo1x_1_do_compress()
84 if (IS_ALIGNED((uintptr_t)ir, 4)) { in lzo1x_1_do_compress()
85 for (; (ir + 4) <= limit; ir += 4) { in lzo1x_1_do_compress()
86 dv = *((u32 *)ir); in lzo1x_1_do_compress()
89 ir += __builtin_ctz(dv) >> 3; in lzo1x_1_do_compress()
91 ir += __builtin_clz(dv) >> 3; in lzo1x_1_do_compress()
100 while (likely(ir < limit) && unlikely(*ir == 0)) in lzo1x_1_do_compress()
101 ir++; in lzo1x_1_do_compress()
102 run_length = ir - ip; in lzo1x_1_do_compress()