Lines Matching full:indirect

384  * @indirect: indirectly write sram
388 static void vcn_v4_0_5_mc_resume_dpg_mode(struct amdgpu_device *adev, int inst_idx, bool indirect) in vcn_v4_0_5_mc_resume_dpg_mode() argument
398 if (!indirect) { in vcn_v4_0_5_mc_resume_dpg_mode()
402 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
406 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
408 VCN, inst_idx, regUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
411 VCN, inst_idx, regUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW), 0, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
413 VCN, inst_idx, regUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH), 0, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
415 VCN, inst_idx, regUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
421 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
424 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
428 AMDGPU_UVD_FIRMWARE_OFFSET >> 3, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
431 if (!indirect) in vcn_v4_0_5_mc_resume_dpg_mode()
433 VCN, inst_idx, regUVD_VCPU_CACHE_SIZE0), size, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
436 VCN, inst_idx, regUVD_VCPU_CACHE_SIZE0), 0, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
439 if (!indirect) { in vcn_v4_0_5_mc_resume_dpg_mode()
442 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
445 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
447 VCN, inst_idx, regUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
450 VCN, inst_idx, regUVD_LMI_VCPU_CACHE1_64BIT_BAR_LOW), 0, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
452 VCN, inst_idx, regUVD_LMI_VCPU_CACHE1_64BIT_BAR_HIGH), 0, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
454 VCN, inst_idx, regUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
458 VCN, inst_idx, regUVD_VCPU_CACHE_SIZE1), AMDGPU_VCN_STACK_SIZE, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
464 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
468 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
470 VCN, inst_idx, regUVD_VCPU_CACHE_OFFSET2), 0, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
472 VCN, inst_idx, regUVD_VCPU_CACHE_SIZE2), AMDGPU_VCN_CONTEXT_SIZE, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
477 lower_32_bits(adev->vcn.inst[inst_idx].fw_shared.gpu_addr), 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
480 upper_32_bits(adev->vcn.inst[inst_idx].fw_shared.gpu_addr), 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
482 VCN, inst_idx, regUVD_VCPU_NONCACHE_OFFSET0), 0, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
485 AMDGPU_GPU_PAGE_ALIGN(sizeof(struct amdgpu_vcn4_fw_shared)), 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
489 VCN, 0, regUVD_GFX10_ADDR_CONFIG), adev->gfx.config.gb_addr_config, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
710 * @indirect: indirectly write sram
715 int inst_idx, uint8_t indirect) in vcn_v4_0_5_disable_clock_gating_dpg_mode() argument
746 VCN, inst_idx, regUVD_CGC_CTRL), reg_data, sram_sel, indirect); in vcn_v4_0_5_disable_clock_gating_dpg_mode()
750 VCN, inst_idx, regUVD_CGC_GATE), 0, sram_sel, indirect); in vcn_v4_0_5_disable_clock_gating_dpg_mode()
754 VCN, inst_idx, regUVD_SUVD_CGC_GATE), 1, sram_sel, indirect); in vcn_v4_0_5_disable_clock_gating_dpg_mode()
758 VCN, inst_idx, regUVD_SUVD_CGC_CTRL), 0, sram_sel, indirect); in vcn_v4_0_5_disable_clock_gating_dpg_mode()
825 * @indirect: indirectly write sram
829 static int vcn_v4_0_5_start_dpg_mode(struct amdgpu_device *adev, int inst_idx, bool indirect) in vcn_v4_0_5_start_dpg_mode() argument
844 if (indirect) in vcn_v4_0_5_start_dpg_mode()
849 vcn_v4_0_5_disable_clock_gating_dpg_mode(adev, 0, inst_idx, indirect); in vcn_v4_0_5_start_dpg_mode()
855 VCN, inst_idx, regUVD_VCPU_CNTL), tmp, 0, indirect); in vcn_v4_0_5_start_dpg_mode()
859 VCN, inst_idx, regUVD_MASTINT_EN), 0, 0, indirect); in vcn_v4_0_5_start_dpg_mode()
871 VCN, inst_idx, regUVD_LMI_CTRL), tmp, 0, indirect); in vcn_v4_0_5_start_dpg_mode()
875 0x2 << UVD_MPC_CNTL__REPLACEMENT_MODE__SHIFT, 0, indirect); in vcn_v4_0_5_start_dpg_mode()
882 (0x4 << UVD_MPC_SET_MUXA0__VARA_4__SHIFT)), 0, indirect); in vcn_v4_0_5_start_dpg_mode()
889 (0x4 << UVD_MPC_SET_MUXB0__VARB_4__SHIFT)), 0, indirect); in vcn_v4_0_5_start_dpg_mode()
895 (0x2 << UVD_MPC_SET_MUX__SET_2__SHIFT)), 0, indirect); in vcn_v4_0_5_start_dpg_mode()
897 vcn_v4_0_5_mc_resume_dpg_mode(adev, inst_idx, indirect); in vcn_v4_0_5_start_dpg_mode()
902 VCN, inst_idx, regUVD_VCPU_CNTL), tmp, 0, indirect); in vcn_v4_0_5_start_dpg_mode()
907 VCN, inst_idx, regUVD_LMI_CTRL2), tmp, 0, indirect); in vcn_v4_0_5_start_dpg_mode()
912 UVD_MASTINT_EN__VCPU_EN_MASK, 0, indirect); in vcn_v4_0_5_start_dpg_mode()
915 if (indirect) in vcn_v4_0_5_start_dpg_mode()