Lines Matching refs:m0
429 s_mov_b32 s_save_m0, m0
443 s_lshr_b32 m0, s_wave_size, S_WAVE_SIZE
444 s_and_b32 m0, m0, 1
445 s_cmp_eq_u32 m0, 1
492 s_mov_b32 m0, 0x0 //Next lane of v2 to write to
546 s_mov_b32 m0, 0x0 //SGPR initial index value =0
572 s_add_u32 m0, m0, 16 //next sgpr index
573 s_cmp_lt_u32 m0, 96 //scc = (m0 < first 96 SGPR) ? 1 : 0
597 s_lshr_b32 m0, s_wave_size, S_WAVE_SIZE
598 s_and_b32 m0, m0, 1
599 s_cmp_eq_u32 m0, 1
635 s_lshr_b32 m0, s_wave_size, S_WAVE_SIZE
636 s_and_b32 m0, m0, 1
637 s_cmp_eq_u32 m0, 1
638 s_mov_b32 m0, 0x0
651 s_add_u32 m0, m0, s3 //every buffer_store_lds does 256 bytes
654 s_cmp_lt_u32 m0, s_save_alloc_size //scc=(m0 < s_save_alloc_size) ? 1 : 0
669 s_add_u32 m0, m0, s3 //every buffer_store_lds does 256 bytes
672 s_cmp_lt_u32 m0, s_save_alloc_size //scc=(m0 < s_save_alloc_size) ? 1 : 0
680 s_lshr_b32 m0, s_wave_size, S_WAVE_SIZE
681 s_and_b32 m0, m0, 1
682 s_cmp_eq_u32 m0, 1
695 s_lshr_b32 m0, s_wave_size, S_WAVE_SIZE
696 s_and_b32 m0, m0, 1
697 s_cmp_eq_u32 m0, 1
705 s_mov_b32 m0, 0x4 //VGPR initial index value =4
706 s_cmp_lt_u32 m0, s_save_alloc_size
720 s_add_u32 m0, m0, 4 //next vgpr index
722 s_cmp_lt_u32 m0, s_save_alloc_size //scc = (m0 < s_save_alloc_size) ? 1 : 0
731 s_mov_b32 m0, 0x4 //VGPR initial index value =4
732 s_cmp_lt_u32 m0, s_save_alloc_size
746 s_add_u32 m0, m0, 4 //next vgpr index
748 s_cmp_lt_u32 m0, s_save_alloc_size //scc = (m0 < s_save_alloc_size) ? 1 : 0
759 s_add_u32 s_save_alloc_size, s_save_alloc_size, m0
765 s_add_u32 m0, m0, 1 //next vgpr index
767 s_cmp_lt_u32 m0, s_save_alloc_size //scc = (m0 < s_save_alloc_size) ? 1 : 0
790 s_lshr_b32 m0, s_restore_size, S_WAVE_SIZE
791 s_and_b32 m0, m0, 1
792 s_cmp_eq_u32 m0, 1
816 s_lshr_b32 m0, s_restore_size, S_WAVE_SIZE
817 s_and_b32 m0, m0, 1
818 s_cmp_eq_u32 m0, 1
819 s_mov_b32 m0, 0x0
830 s_add_u32 m0, m0, 128 // 128 DW
832 s_cmp_lt_u32 m0, s_restore_alloc_size //scc=(m0 < s_restore_alloc_size) ? 1 : 0
844 s_add_u32 m0, m0, 256 // 256 DW
846 s_cmp_lt_u32 m0, s_restore_alloc_size //scc=(m0 < s_restore_alloc_size) ? 1 : 0
854 s_lshr_b32 m0, s_restore_size, S_WAVE_SIZE
855 s_and_b32 m0, m0, 1
856 s_cmp_eq_u32 m0, 1
867 s_lshr_b32 m0, s_restore_size, S_WAVE_SIZE
868 s_and_b32 m0, m0, 1
869 s_cmp_eq_u32 m0, 1
877 s_mov_b32 m0, 4 //VGPR initial index value = 4
878 s_cmp_lt_u32 m0, s_restore_alloc_size
891 s_add_u32 m0, m0, 4 //next vgpr index
893 s_cmp_lt_u32 m0, s_restore_alloc_size //scc = (m0 < s_restore_alloc_size) ? 1 : 0
911 s_mov_b32 m0, 4 //VGPR initial index value = 4
912 s_cmp_lt_u32 m0, s_restore_alloc_size
925 s_add_u32 m0, m0, 4 //next vgpr index
927 s_cmp_lt_u32 m0, s_restore_alloc_size //scc = (m0 < s_restore_alloc_size) ? 1 : 0
938 s_add_u32 s_restore_alloc_size, s_restore_alloc_size, m0
945 s_add_u32 m0, m0, 1 //next vgpr index
947 s_cmp_lt_u32 m0, s_restore_alloc_size //scc = (m0 < s_restore_alloc_size) ? 1 : 0
972 s_mov_b32 m0, s_sgpr_save_num
977 s_sub_u32 m0, m0, 4 // Restore from S[0] to S[104]
986 s_sub_u32 m0, m0, 8 // Restore from S[0] to S[96]
998 s_sub_u32 m0, m0, 16 // Restore from S[n] to S[0]
1010 s_cmp_eq_u32 m0, 0 //scc = (m0 < s_sgpr_save_num) ? 1 : 0
1047 s_mov_b32 m0, s_restore_m0
1107 v_writelane_b32 v2, s, m0
1108 s_add_u32 m0, m0, 0x1
1110 s_mov_b32 exec_lo, m0
1111 s_mov_b32 m0, s_mem_offset
1112 s_buffer_store_dword s, s_rsrc, m0 glc:1
1114 s_mov_b32 m0, exec_lo