Lines Matching full:v0
380 // Save v0 by itself since it requires only two SGPRs.
385 global_store_dword_addtid v0, [s_save_ttmps_lo, s_save_ttmps_hi] slc:1 glc:1
386 v_mov_b32 v0, 0x0
403 v_writelane_b32 v0, ttmp4, 0x4
404 v_writelane_b32 v0, ttmp5, 0x5
405 v_writelane_b32 v0, ttmp6, 0x6
406 v_writelane_b32 v0, ttmp7, 0x7
407 v_writelane_b32 v0, ttmp8, 0x8
408 v_writelane_b32 v0, ttmp9, 0x9
409 v_writelane_b32 v0, ttmp10, 0xA
410 v_writelane_b32 v0, ttmp11, 0xB
411 v_writelane_b32 v0, ttmp13, 0xD
412 v_writelane_b32 v0, exec_lo, 0xE
413 v_writelane_b32 v0, exec_hi, 0xF
417 global_store_dword_addtid v0, [s_save_ttmps_lo, s_save_ttmps_hi] inst_offset:0x40 slc:1 glc:1
418 v_readlane_b32 ttmp14, v0, 0xE
419 v_readlane_b32 ttmp15, v0, 0xF
464 buffer_store_dword v0, v0, s_save_buf_rsrc0, s_save_mem_offset slc:1 glc:1
466 buffer_store_dword v1, v0, s_save_buf_rsrc0, s_save_mem_offset slc:1 glc:1 offset:128
467 buffer_store_dword v2, v0, s_save_buf_rsrc0, s_save_mem_offset slc:1 glc:1 offset:128*2
468 buffer_store_dword v3, v0, s_save_buf_rsrc0, s_save_mem_offset slc:1 glc:1 offset:128*3
477 buffer_store_dword v0, v0, s_save_buf_rsrc0, s_save_mem_offset slc:1 glc:1
479 buffer_store_dword v1, v0, s_save_buf_rsrc0, s_save_mem_offset slc:1 glc:1 offset:256
480 buffer_store_dword v2, v0, s_save_buf_rsrc0, s_save_mem_offset slc:1 glc:1 offset:256*2
481 buffer_store_dword v3, v0, s_save_buf_rsrc0, s_save_mem_offset slc:1 glc:1 offset:256*3
495 v_mov_b32 v0, 0x0 //Offset[31:0] from buffer resource
528 buffer_store_dword v2, v0, s_save_buf_rsrc0, s_save_mem_offset slc:1 glc:1
571 buffer_store_dword v2, v0, s_save_buf_rsrc0, s_save_mem_offset slc:1 glc:1
592 buffer_store_dword v2, v0, s_save_buf_rsrc0, s_save_mem_offset slc:1 glc:1
636 //load 0~63*4(byte address) to vgpr v0
637 v_mbcnt_lo_u32_b32 v0, -1, 0
638 v_mbcnt_hi_u32_b32 v0, -1, v0
639 v_mul_u32_u24 v0, 4, v0
653 ds_read_b32 v1, v0
655 buffer_store_dword v1, v0, s_save_buf_rsrc0, s_save_mem_offset slc:1 glc:1
659 v_add_nc_u32 v0, v0, 128 //mem offset increased by 128 bytes
671 ds_read_b32 v1, v0
673 buffer_store_dword v1, v0, s_save_buf_rsrc0, s_save_mem_offset slc:1 glc:1
677 v_add_nc_u32 v0, v0, 256 //mem offset increased by 256 bytes
716 v_movrels_b32 v0, v0 //v0 = v[0+m0]
721 buffer_store_dword v0, v0, s_save_buf_rsrc0, s_save_mem_offset slc:1 glc:1
722 buffer_store_dword v1, v0, s_save_buf_rsrc0, s_save_mem_offset slc:1 glc:1 offset:128
723 buffer_store_dword v2, v0, s_save_buf_rsrc0, s_save_mem_offset slc:1 glc:1 offset:128*2
724 buffer_store_dword v3, v0, s_save_buf_rsrc0, s_save_mem_offset slc:1 glc:1 offset:128*3
742 v_movrels_b32 v0, v0 //v0 = v[0+m0]
747 buffer_store_dword v0, v0, s_save_buf_rsrc0, s_save_mem_offset slc:1 glc:1
748 buffer_store_dword v1, v0, s_save_buf_rsrc0, s_save_mem_offset slc:1 glc:1 offset:256
749 buffer_store_dword v2, v0, s_save_buf_rsrc0, s_save_mem_offset slc:1 glc:1 offset:256*2
750 buffer_store_dword v3, v0, s_save_buf_rsrc0, s_save_mem_offset slc:1 glc:1 offset:256*3
769 v_movrels_b32 v0, v0 //v0 = v[0+m0]
770 buffer_store_dword v0, v0, s_save_buf_rsrc0, s_save_mem_offset slc:1 glc:1
830 buffer_load_dword v0, v0, s_restore_buf_rsrc0, s_restore_mem_offset lds:1 // first 64DW
832 buffer_load_dword v0, v0, s_restore_buf_rsrc0, s_restore_mem_offset
834 ds_store_addtid_b32 v0
844 buffer_load_dword v0, v0, s_restore_buf_rsrc0, s_restore_mem_offset lds:1 // first 64DW
846 buffer_load_dword v0, v0, s_restore_buf_rsrc0, s_restore_mem_offset
848 ds_store_addtid_b32 v0
881 …s_mov_b32 s_restore_mem_offset_save, s_restore_mem_offset // restore start with v1, v0 will be th…
888 buffer_load_dword v0, v0, s_restore_buf_rsrc0, s_restore_mem_offset slc:1 glc:1
889 buffer_load_dword v1, v0, s_restore_buf_rsrc0, s_restore_mem_offset slc:1 glc:1 offset:128
890 buffer_load_dword v2, v0, s_restore_buf_rsrc0, s_restore_mem_offset slc:1 glc:1 offset:128*2
891 buffer_load_dword v3, v0, s_restore_buf_rsrc0, s_restore_mem_offset slc:1 glc:1 offset:128*3
893 v_movreld_b32 v0, v0 //v[0+m0] = v0
900 s_cbranch_scc1 L_RESTORE_VGPR_WAVE32_LOOP //VGPR restore (except v0) is complete?
902 /* VGPR restore on v0 */
903 buffer_load_dword v0, v0, s_restore_buf_rsrc0, s_restore_mem_offset_save slc:1 glc:1
904 buffer_load_dword v1, v0, s_restore_buf_rsrc0, s_restore_mem_offset_save slc:1 glc:1 offset:128
905 buffer_load_dword v2, v0, s_restore_buf_rsrc0, s_restore_mem_offset_save slc:1 glc:1 offset:128*2
906 buffer_load_dword v3, v0, s_restore_buf_rsrc0, s_restore_mem_offset_save slc:1 glc:1 offset:128*3
915 …s_mov_b32 s_restore_mem_offset_save, s_restore_mem_offset // restore start with v4, v0 will be th…
922 buffer_load_dword v0, v0, s_restore_buf_rsrc0, s_restore_mem_offset slc:1 glc:1
923 buffer_load_dword v1, v0, s_restore_buf_rsrc0, s_restore_mem_offset slc:1 glc:1 offset:256
924 buffer_load_dword v2, v0, s_restore_buf_rsrc0, s_restore_mem_offset slc:1 glc:1 offset:256*2
925 buffer_load_dword v3, v0, s_restore_buf_rsrc0, s_restore_mem_offset slc:1 glc:1 offset:256*3
927 v_movreld_b32 v0, v0 //v[0+m0] = v0
934 s_cbranch_scc1 L_RESTORE_VGPR_WAVE64_LOOP //VGPR restore (except v0) is complete?
948 buffer_load_dword v0, v0, s_restore_buf_rsrc0, s_restore_mem_offset slc:1 glc:1
950 v_movreld_b32 v0, v0 //v[0+m0] = v0
954 s_cbranch_scc1 L_RESTORE_SHARED_VGPR_WAVE64_LOOP //VGPR restore (except v0) is complete?
956 s_mov_b32 exec_hi, 0xFFFFFFFF //restore back exec_hi before restoring V0!!
958 /* VGPR restore on v0 */
960 buffer_load_dword v0, v0, s_restore_buf_rsrc0, s_restore_mem_offset_save slc:1 glc:1
961 buffer_load_dword v1, v0, s_restore_buf_rsrc0, s_restore_mem_offset_save slc:1 glc:1 offset:256
962 buffer_load_dword v2, v0, s_restore_buf_rsrc0, s_restore_mem_offset_save slc:1 glc:1 offset:256*2
963 buffer_load_dword v3, v0, s_restore_buf_rsrc0, s_restore_mem_offset_save slc:1 glc:1 offset:256*3