Lines Matching +full:1 +full:v2
32 #define SINGLE_STEP_MISSED_WORKAROUND 1 //workaround for lost TRAP_AFTER_INST exception when SAVECT…
42 var SQ_WAVE_STATUS_WAVE64_SIZE = 1
291 SQ_WAVE_STATE_PRIV_POISON_ERR_SHIFT - SQ_WAVE_STATE_PRIV_SCC_SHIFT + 1), s_save_state_priv
306 …s_setreg_b32 hwreg(HW_REG_WAVE_EXCP_FLAG_PRIV, SQ_WAVE_EXCP_FLAG_PRIV_SAVE_CONTEXT_SHIFT, 1), s_sa…
381 s_and_b32 m0, m0, 1
382 s_cmp_eq_u32 m0, 1
395 buffer_store_dword v2, v0, s_save_buf_rsrc0, s_save_mem_offset scope:SCOPE_SYS offset:128*2
405 buffer_store_dword v2, v0, s_save_buf_rsrc0, s_save_mem_offset scope:SCOPE_SYS offset:256*2
421 v_mov_b32 v2, 0x0 //Set of SGPRs for TCP store
435 v_writelane_b32 v2, s_save_m0, 0x0
436 v_writelane_b32 v2, s_save_pc_lo, 0x1
437 v_writelane_b32 v2, s_save_tmp, 0x2
438 v_writelane_b32 v2, s_save_exec_lo, 0x3
439 v_writelane_b32 v2, s_save_exec_hi, 0x4
440 v_writelane_b32 v2, s_save_state_priv, 0x5
441 v_writelane_b32 v2, s_save_xnack_mask, 0x7
445 v_writelane_b32 v2, s_save_tmp, 0x6
448 v_writelane_b32 v2, s_save_tmp, 0x8
451 v_writelane_b32 v2, s_save_tmp, 0x9
454 v_writelane_b32 v2, s_save_tmp, 0xA
457 v_writelane_b32 v2, s_save_tmp, 0xB
460 v_writelane_b32 v2, s_save_tmp, 0xC
463 v_writelane_b32 v2, s_save_tmp, 0xD
465 s_get_barrier_state s_save_tmp, -1
467 v_writelane_b32 v2, s_save_tmp, 0xE
473 buffer_store_dword v2, v0, s_save_buf_rsrc0, s_save_mem_offset scope:SCOPE_SYS
493 s_movrels_b64 s0, s0 //s0 = s[0+m0], s1 = s[1+m0]
509 buffer_store_dword v2, v0, s_save_buf_rsrc0, s_save_mem_offset scope:SCOPE_SYS
512 v_mov_b32 v2, 0x0
516 s_cmp_lt_u32 m0, 96 //scc = (m0 < first 96 SGPR) ? 1 : 0
520 s_movrels_b64 s0, s0 //s0 = s[0+m0], s1 = s[1+m0]
528 buffer_store_dword v2, v0, s_save_buf_rsrc0, s_save_mem_offset scope:SCOPE_SYS
536 s_and_b32 m0, m0, 1
537 s_cmp_eq_u32 m0, 1
567 v_mbcnt_lo_u32_b32 v0, -1, 0
568 v_mbcnt_hi_u32_b32 v0, -1, v0
572 s_and_b32 m0, m0, 1
573 s_cmp_eq_u32 m0, 1
590 s_cmp_lt_u32 m0, s_save_alloc_size //scc=(m0 < s_save_alloc_size) ? 1 : 0
608 s_cmp_lt_u32 m0, s_save_alloc_size //scc=(m0 < s_save_alloc_size) ? 1 : 0
617 s_and_b32 m0, m0, 1
618 s_cmp_eq_u32 m0, 1
628 s_add_u32 s_save_alloc_size, s_save_alloc_size, 1
629 …s_lshl_b32 s_save_alloc_size, s_save_alloc_size, 2 //Number of VGPRs = (vgpr_size + 1) * 4 (n…
632 s_and_b32 m0, m0, 1
633 s_cmp_eq_u32 m0, 1
647 v_movrels_b32 v1, v1 //v1 = v[1+m0]
648 v_movrels_b32 v2, v2 //v2 = v[2+m0]
653 buffer_store_dword v2, v0, s_save_buf_rsrc0, s_save_mem_offset scope:SCOPE_SYS offset:128*2
658 s_cmp_lt_u32 m0, s_save_alloc_size //scc = (m0 < s_save_alloc_size) ? 1 : 0
673 v_movrels_b32 v1, v1 //v1 = v[1+m0]
674 v_movrels_b32 v2, v2 //v2 = v[2+m0]
679 buffer_store_dword v2, v0, s_save_buf_rsrc0, s_save_mem_offset scope:SCOPE_SYS offset:256*2
684 s_cmp_lt_u32 m0, s_save_alloc_size //scc = (m0 < s_save_alloc_size) ? 1 : 0
701 s_add_u32 m0, m0, 1 //next vgpr index
703 s_cmp_lt_u32 m0, s_save_alloc_size //scc = (m0 < s_save_alloc_size) ? 1 : 0
730 s_and_b32 m0, m0, 1
731 s_cmp_eq_u32 m0, 1
755 s_and_b32 m0, m0, 1
756 s_cmp_eq_u32 m0, 1
766 s_cmp_lt_u32 m0, s_restore_alloc_size //scc=(m0 < s_restore_alloc_size) ? 1 : 0
776 s_cmp_lt_u32 m0, s_restore_alloc_size //scc=(m0 < s_restore_alloc_size) ? 1 : 0
785 s_and_b32 m0, m0, 1
786 s_cmp_eq_u32 m0, 1
794 s_add_u32 s_restore_alloc_size, s_restore_alloc_size, 1
795 …s_lshl_b32 s_restore_alloc_size, s_restore_alloc_size, 2 //Number of VGPRs = (vgpr_size + 1) * 4 …
798 s_and_b32 m0, m0, 1
799 s_cmp_eq_u32 m0, 1
814 buffer_load_dword v2, v0, s_restore_buf_rsrc0, s_restore_mem_offset scope:SCOPE_SYS offset:128*2
819 v_movreld_b32 v2, v2
823 s_cmp_lt_u32 m0, s_restore_alloc_size //scc = (m0 < s_restore_alloc_size) ? 1 : 0
829 …buffer_load_dword v2, v0, s_restore_buf_rsrc0, s_restore_mem_offset_save scope:SCOPE_SYS offset:12…
848 buffer_load_dword v2, v0, s_restore_buf_rsrc0, s_restore_mem_offset scope:SCOPE_SYS offset:256*2
853 v_movreld_b32 v2, v2
857 s_cmp_lt_u32 m0, s_restore_alloc_size //scc = (m0 < s_restore_alloc_size) ? 1 : 0
874 s_add_u32 m0, m0, 1 //next vgpr index
876 s_cmp_lt_u32 m0, s_restore_alloc_size //scc = (m0 < s_restore_alloc_size) ? 1 : 0
885 …buffer_load_dword v2, v0, s_restore_buf_rsrc0, s_restore_mem_offset_save scope:SCOPE_SYS offset:25…
939 s_cmp_eq_u32 m0, 0 //scc = (m0 < s_sgpr_save_num) ? 1 : 0
942 // s_barrier with STATE_PRIV.TRAP_AFTER_INST=1, STATUS.PRIV=1 incorrectly asserts debug exception.
1008 s_barrier_signal -1
1009 s_add_i32 s_restore_tmp, s_restore_tmp, -1
1066 v_writelane_b32 v2, s[sgpr_idx], sgpr_idx + lane_offset
1075 v_writelane_b32 v2, s[sgpr_idx], sgpr_idx
1102 s_add_u32 s_vgpr_size_byte, s_vgpr_size_byte, 1
1105 …s_lshl_b32 s_vgpr_size_byte, s_vgpr_size_byte, (2+7) //Number of VGPRs = (vgpr_size + 1) * 4 * 32…
1108 …s_lshl_b32 s_vgpr_size_byte, s_vgpr_size_byte, (2+8) //Number of VGPRs = (vgpr_size + 1) * 4 * 64…