Lines Matching refs:XCC
21 #ifndef XCC
22 # define XCC xcc macro
73 bgeu,pn %XCC, .Lforcpy /* else use backward if ... */
76 bleu,pn %XCC, .Lforcpy /* if size is bigger, do overlapped copy */
82 blu,pn %XCC, 2f /* else continue */
88 bz,a,pn %XCC, .Ldbbck /* skip if dst is 8 byte aligned */
95 bgu,pt %XCC, 1b /* if not done keep copying */
98 bz,pn %XCC, 2f /* if size < 8, move to byte copy */
103 bz,a,pn %XCC, .Ldbcopybc /* if src is aligned do fast memmove */
121 bg,pt %XCC, 1b /* if size > 0 continue */
135 bgu,pt %XCC, 1b /* if size is bigger 0 continue */
145 bgeu,a,pt %XCC, 1b /* if size is >= 0 continue */
158 blt,pn %XCC, .Lmv_short /* merge with memcpy */
171 bgu,pt %XCC, 7b
182 bleu,pt %XCC, .Lmedlong
228 bgt,pt %XCC, .Lmv_align_loop
250 bgeu,pn %XCC, .Lmedium /* go to larger cases */
253 ble,pn %XCC, .Lsmallfin
256 bnz,pn %XCC, .Lsmallunalign /* branch to non-word aligned case */
259 ble,pn %XCC, .Lsmallwordx
264 bnz,pn %XCC, .Lsmallwords /* branch to word aligned case */
266 bge,a %XCC, .Lmedl64 /* if we branch */
271 blt,a,pn %XCC, .Lsmall_long_l
293 bgu,pn %XCC, .Lsmall_long_l /* loop until done */
296 bnz,pn %XCC, .Lsmall_long_x /* check for completion */
302 blt,pn %XCC, .Lsmallleft3 /* if not, go to finish up */
308 bnz,pn %XCC, .Lsmallleft3
318 bz,pt %XCC, .Lsmallexit
322 bz,pt %XCC, .Lsmallexit
326 bz,pt %XCC, .Lsmallexit
337 ble,pn %XCC, .Lsmallrest
339 bge,pt %XCC, .Lmedium_join
341 bz,pn %XCC, .Laldst
343 be,pt %XCC, .Ls2algn
350 bne,pt %XCC, .Ls2algn
367 bz,pn %XCC, .Lw4cp
369 be,pn %XCC, .Lw2cp
375 bne,pt %XCC, .Lw1cp
388 bnz,pt %XCC, 1b
409 bnz,pt %XCC, 2b
432 bnz,pt %XCC, 3b
445 bgu,pt %XCC, 1b
454 bz,pt %XCC, .Lsmallx
456 blt,pn %XCC, .Lsmallleft3
470 bgu,pt %XCC, .Lsmallnotalign4 /* loop til 3 or fewer bytes remain */
473 bz,pt %XCC, .Lsmallx
477 bz,pt %XCC, .Lsmallx
481 bz,pt %XCC, .Lsmallx
491 bnz,pn %XCC, .Lsmallleft3
504 bgu,pt %XCC, .Lsmallwords /* loop until done */
507 bz,pt %XCC, .Lsmallexit /* check for completion */
509 blt,pt %XCC, .Lsmallleft3 /* if not, go to finish up */
515 bnz,pn %XCC, .Lsmallleft3
534 bgu,pt %XCC, 7b
545 bgu,pn %XCC, .Llarge_align8_copy
553 ble,pn %XCC, .Lmedl63 /* skip big loop if < 64 bytes */
574 bgu,pt %XCC, .Lmedl64 /* repeat if at least 64 bytes left */
578 ble,pt %XCC, .Lmedl31 /* to skip if 31 or fewer bytes left */
593 ble,pt %XCC, .Lmedl15 /* skip if 15 or fewer bytes left */
604 bz,pt %XCC, .Lsmallexit /* exit if finished */
606 blt,pt %XCC, .Lmedw7 /* skip if 7 or fewer bytes left */
612 bnz,pn %XCC, .Lmedw7
621 bnz,pt %XCC, .Lunalignsetup /* branch if not word aligned */
632 bge,pt %XCC, .Lunalignrejoin /* otherwise rejoin main loop */
637 ble,pt %XCC, .Lmedw31 /* skip big loop if less than 16 */
661 bgu,pt %XCC, .Lmedw32 /* repeat if at least 32 bytes left */
665 bz,pt %XCC, .Lsmallexit /* exit if finished */
667 blt,pt %XCC, .Lmedw15
683 bz,pt %XCC, .Lsmallexit /* exit if finished */
685 blt,pn %XCC, .Lmedw7 /* skip if 7 or fewer bytes left */
694 bz,pt %XCC, .Lsmallexit /* exit if finished */
697 blt,pn %XCC, .Lsmallleft3 /* skip if 3 or fewer bytes left */
703 bnz,pt %XCC, .Lsmallleft3
755 bge,pt %XCC, 1f
760 bgt,pt %XCC, .Lmv_aligned_on_64
768 bgt,pt %XCC, .Lmv_aligned_on_64
798 blu,pt %XCC, .Lalign_short
812 bgu,pt %XCC, .Lalign_loop_start
838 bgu,pt %XCC, .Lalign_loop_rest
843 bgu,pt %XCC, .Lalign_loop_start
847 beq,pt %XCC, .Lalign_done
858 bgu,pt %XCC, .Lalign_loop_short
882 bgu,pt %XCC, .Lalign_short_rest
890 bne,pt %XCC, .Lmedl63
905 bz,a %XCC, 1f
911 bgt,pt %XCC, .Lunalign_adjust /* end of source buffer */
948 bgu,pt %XCC, .Lunalign_loop
954 bleu,pt %XCC, .Lunalign_short
970 bgu,pt %XCC, .Lunalign_by8