Lines Matching refs:p_scr
72 #define p_scr p6 macro
177 cmp.eq p_scr, p0 = in2, r0 // if (len == 0)
184 (p_scr) br.cond.dpnt.few .restore_and_exit // Branch no. 1: return dest
194 cmp.eq p_scr, p0 = tmp4, r0 // is destination aligned?
196 (p_scr) br.cond.dptk.many .dest_aligned
203 cmp.ne p_scr, p0 = 0, loopcnt // avoid loading beyond end-point
209 (p_scr) ld1 tmp2 = [src], 1 //
211 cmp.lt p_scr, p0 = 1, loopcnt // avoid load beyond end-point
222 cmp.ne p_scr, p0 = tmp4, r0 // is source also aligned
227 (p_scr) br.cond.dptk.many .src_not_aligned
235 (p_xtr) cmp.gt p_scr, p0 = ALIGN_UNROLL_no+1, elemcnt // Need N + 1 to qualify
236 (p_nxtr) cmp.gt p_scr, p0 = ALIGN_UNROLL_no, elemcnt // Need only N to qualify
239 (p_scr) br.cond.dpnt.many .copy_full_words
317 cmp.gt p_scr, p0 = 8, len //
319 (p_scr) br.cond.dpnt.many .copy_bytes
326 cmp.ne p_scr, p0 = 0, loopcnt //
333 (p_scr) load tempreg = [src], 8 //
336 cmp.lt p_scr, p0 = 1, loopcnt // avoid load beyond end-point
343 cmp.eq p_scr, p0 = len, r0 // is len == 0 ?
345 (p_scr) br.cond.spnt .restore_and_exit
350 cmp.ne p_scr, p0 = 0, loopcnt // avoid load beyond end-point
356 (p_scr) ld1 tmp2 = [src], 1
358 cmp.lt p_scr, p0 = 1, loopcnt // avoid load beyond end-point
375 cmp.gt p_scr, p0 = 16, len
381 (p_scr) br.cond.dpnt.many .copy_bytes // do byte by byte if too few