Lines Matching full:indirect

664 					bool indirect)  in vcn_v2_5_mc_resume_dpg_mode()  argument
673 if (!indirect) { in vcn_v2_5_mc_resume_dpg_mode()
676 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx].tmr_mc_addr_lo), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
679 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx].tmr_mc_addr_hi), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
681 VCN, 0, mmUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
684 VCN, 0, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW), 0, 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
686 VCN, 0, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH), 0, 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
688 VCN, 0, mmUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
694 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
697 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
701 AMDGPU_UVD_FIRMWARE_OFFSET >> 3, 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
704 if (!indirect) in vcn_v2_5_mc_resume_dpg_mode()
706 VCN, 0, mmUVD_VCPU_CACHE_SIZE0), size, 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
709 VCN, 0, mmUVD_VCPU_CACHE_SIZE0), 0, 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
712 if (!indirect) { in vcn_v2_5_mc_resume_dpg_mode()
715 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
718 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
720 VCN, 0, mmUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
723 VCN, 0, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_LOW), 0, 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
725 VCN, 0, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_HIGH), 0, 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
727 VCN, 0, mmUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
730 VCN, 0, mmUVD_VCPU_CACHE_SIZE1), AMDGPU_VCN_STACK_SIZE, 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
735 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
738 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
740 VCN, 0, mmUVD_VCPU_CACHE_OFFSET2), 0, 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
742 VCN, 0, mmUVD_VCPU_CACHE_SIZE2), AMDGPU_VCN_CONTEXT_SIZE, 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
747 lower_32_bits(adev->vcn.inst[inst_idx].fw_shared.gpu_addr), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
750 upper_32_bits(adev->vcn.inst[inst_idx].fw_shared.gpu_addr), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
752 VCN, 0, mmUVD_VCPU_NONCACHE_OFFSET0), 0, 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
755 AMDGPU_GPU_PAGE_ALIGN(sizeof(struct amdgpu_fw_shared)), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
759 VCN, 0, mmUVD_GFX8_ADDR_CONFIG), adev->gfx.config.gb_addr_config, 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
879 uint8_t sram_sel, uint8_t indirect) in vcn_v2_5_clock_gating_dpg_mode() argument
913 VCN, 0, mmUVD_CGC_CTRL), reg_data, sram_sel, indirect); in vcn_v2_5_clock_gating_dpg_mode()
917 VCN, 0, mmUVD_CGC_GATE), 0, sram_sel, indirect); in vcn_v2_5_clock_gating_dpg_mode()
921 VCN, 0, mmUVD_SUVD_CGC_GATE), 1, sram_sel, indirect); in vcn_v2_5_clock_gating_dpg_mode()
925 VCN, 0, mmUVD_SUVD_CGC_CTRL), 0, sram_sel, indirect); in vcn_v2_5_clock_gating_dpg_mode()
990 bool indirect) in vcn_v2_6_enable_ras() argument
1005 tmp, 0, indirect); in vcn_v2_6_enable_ras()
1010 tmp, 0, indirect); in vcn_v2_6_enable_ras()
1015 tmp, 0, indirect); in vcn_v2_6_enable_ras()
1018 static int vcn_v2_5_start_dpg_mode(struct amdgpu_vcn_inst *vinst, bool indirect) in vcn_v2_5_start_dpg_mode() argument
1035 if (indirect) in vcn_v2_5_start_dpg_mode()
1039 vcn_v2_5_clock_gating_dpg_mode(vinst, 0, indirect); in vcn_v2_5_start_dpg_mode()
1046 VCN, 0, mmUVD_VCPU_CNTL), tmp, 0, indirect); in vcn_v2_5_start_dpg_mode()
1050 VCN, 0, mmUVD_MASTINT_EN), 0, 0, indirect); in vcn_v2_5_start_dpg_mode()
1062 VCN, 0, mmUVD_LMI_CTRL), tmp, 0, indirect); in vcn_v2_5_start_dpg_mode()
1066 0x2 << UVD_MPC_CNTL__REPLACEMENT_MODE__SHIFT, 0, indirect); in vcn_v2_5_start_dpg_mode()
1073 (0x4 << UVD_MPC_SET_MUXA0__VARA_4__SHIFT)), 0, indirect); in vcn_v2_5_start_dpg_mode()
1080 (0x4 << UVD_MPC_SET_MUXB0__VARB_4__SHIFT)), 0, indirect); in vcn_v2_5_start_dpg_mode()
1086 (0x2 << UVD_MPC_SET_MUX__SET_2__SHIFT)), 0, indirect); in vcn_v2_5_start_dpg_mode()
1088 vcn_v2_5_mc_resume_dpg_mode(vinst, indirect); in vcn_v2_5_start_dpg_mode()
1091 VCN, 0, mmUVD_REG_XX_MASK), 0x10, 0, indirect); in vcn_v2_5_start_dpg_mode()
1093 VCN, 0, mmUVD_RBC_XX_IB_REG_CHECK), 0x3, 0, indirect); in vcn_v2_5_start_dpg_mode()
1097 VCN, 0, mmUVD_LMI_CTRL2), 0, 0, indirect); in vcn_v2_5_start_dpg_mode()
1099 vcn_v2_6_enable_ras(vinst, indirect); in vcn_v2_5_start_dpg_mode()
1103 VCN, 0, mmUVD_RB_ARB_CTRL), 0, 0, indirect); in vcn_v2_5_start_dpg_mode()
1108 VCN, 0, mmUVD_VCPU_CNTL), tmp, 0, indirect); in vcn_v2_5_start_dpg_mode()
1113 UVD_MASTINT_EN__VCPU_EN_MASK, 0, indirect); in vcn_v2_5_start_dpg_mode()
1115 if (indirect) in vcn_v2_5_start_dpg_mode()