Lines Matching full:indirect

58 				  int inst_idx, bool indirect);
401 * @indirect: indirectly write sram
405 static void vcn_v4_0_3_mc_resume_dpg_mode(struct amdgpu_device *adev, int inst_idx, bool indirect) in vcn_v4_0_3_mc_resume_dpg_mode() argument
415 if (!indirect) { in vcn_v4_0_3_mc_resume_dpg_mode()
419 inst_idx].tmr_mc_addr_lo), 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode()
423 inst_idx].tmr_mc_addr_hi), 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode()
425 VCN, 0, regUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode()
428 VCN, 0, regUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW), 0, 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode()
430 VCN, 0, regUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH), 0, 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode()
432 VCN, 0, regUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode()
438 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode()
441 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode()
445 AMDGPU_UVD_FIRMWARE_OFFSET >> 3, 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode()
448 if (!indirect) in vcn_v4_0_3_mc_resume_dpg_mode()
450 VCN, 0, regUVD_VCPU_CACHE_SIZE0), size, 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode()
453 VCN, 0, regUVD_VCPU_CACHE_SIZE0), 0, 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode()
456 if (!indirect) { in vcn_v4_0_3_mc_resume_dpg_mode()
459 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode()
462 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode()
464 VCN, 0, regUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode()
467 VCN, 0, regUVD_LMI_VCPU_CACHE1_64BIT_BAR_LOW), 0, 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode()
469 VCN, 0, regUVD_LMI_VCPU_CACHE1_64BIT_BAR_HIGH), 0, 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode()
471 VCN, 0, regUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode()
474 VCN, 0, regUVD_VCPU_CACHE_SIZE1), AMDGPU_VCN_STACK_SIZE, 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode()
480 AMDGPU_VCN_STACK_SIZE), 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode()
484 AMDGPU_VCN_STACK_SIZE), 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode()
486 VCN, 0, regUVD_VCPU_CACHE_OFFSET2), 0, 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode()
488 VCN, 0, regUVD_VCPU_CACHE_SIZE2), AMDGPU_VCN_CONTEXT_SIZE, 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode()
493 lower_32_bits(adev->vcn.inst[inst_idx].fw_shared.gpu_addr), 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode()
496 upper_32_bits(adev->vcn.inst[inst_idx].fw_shared.gpu_addr), 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode()
498 VCN, 0, regUVD_VCPU_NONCACHE_OFFSET0), 0, 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode()
501 AMDGPU_GPU_PAGE_ALIGN(sizeof(struct amdgpu_vcn4_fw_shared)), 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode()
505 VCN, 0, regUVD_GFX8_ADDR_CONFIG), adev->gfx.config.gb_addr_config, 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode()
507 VCN, 0, regUVD_GFX10_ADDR_CONFIG), adev->gfx.config.gb_addr_config, 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode()
609 * @indirect: indirectly write sram
614 int inst_idx, uint8_t indirect) in vcn_v4_0_3_disable_clock_gating_dpg_mode() argument
639 VCN, 0, regUVD_CGC_CTRL), reg_data, sram_sel, indirect); in vcn_v4_0_3_disable_clock_gating_dpg_mode()
643 VCN, 0, regUVD_CGC_GATE), 0, sram_sel, indirect); in vcn_v4_0_3_disable_clock_gating_dpg_mode()
647 VCN, 0, regUVD_SUVD_CGC_GATE), 1, sram_sel, indirect); in vcn_v4_0_3_disable_clock_gating_dpg_mode()
651 VCN, 0, regUVD_SUVD_CGC_CTRL), 0, sram_sel, indirect); in vcn_v4_0_3_disable_clock_gating_dpg_mode()
710 * @indirect: indirectly write sram
714 static int vcn_v4_0_3_start_dpg_mode(struct amdgpu_device *adev, int inst_idx, bool indirect) in vcn_v4_0_3_start_dpg_mode() argument
732 if (indirect) { in vcn_v4_0_3_start_dpg_mode()
743 vcn_v4_0_3_disable_clock_gating_dpg_mode(adev, 0, inst_idx, indirect); in vcn_v4_0_3_start_dpg_mode()
751 VCN, 0, regUVD_VCPU_CNTL), tmp, 0, indirect); in vcn_v4_0_3_start_dpg_mode()
755 VCN, 0, regUVD_MASTINT_EN), 0, 0, indirect); in vcn_v4_0_3_start_dpg_mode()
767 VCN, 0, regUVD_LMI_CTRL), tmp, 0, indirect); in vcn_v4_0_3_start_dpg_mode()
771 0x2 << UVD_MPC_CNTL__REPLACEMENT_MODE__SHIFT, 0, indirect); in vcn_v4_0_3_start_dpg_mode()
778 (0x4 << UVD_MPC_SET_MUXA0__VARA_4__SHIFT)), 0, indirect); in vcn_v4_0_3_start_dpg_mode()
785 (0x4 << UVD_MPC_SET_MUXB0__VARB_4__SHIFT)), 0, indirect); in vcn_v4_0_3_start_dpg_mode()
791 (0x2 << UVD_MPC_SET_MUX__SET_2__SHIFT)), 0, indirect); in vcn_v4_0_3_start_dpg_mode()
793 vcn_v4_0_3_mc_resume_dpg_mode(adev, inst_idx, indirect); in vcn_v4_0_3_start_dpg_mode()
798 VCN, 0, regUVD_VCPU_CNTL), tmp, 0, indirect); in vcn_v4_0_3_start_dpg_mode()
803 VCN, 0, regUVD_LMI_CTRL2), tmp, 0, indirect); in vcn_v4_0_3_start_dpg_mode()
805 vcn_v4_0_3_enable_ras(adev, inst_idx, indirect); in vcn_v4_0_3_start_dpg_mode()
810 UVD_MASTINT_EN__VCPU_EN_MASK, 0, indirect); in vcn_v4_0_3_start_dpg_mode()
812 if (indirect) in vcn_v4_0_3_start_dpg_mode()
1748 int inst_idx, bool indirect) in vcn_v4_0_3_enable_ras() argument
1761 tmp, 0, indirect); in vcn_v4_0_3_enable_ras()
1766 tmp, 0, indirect); in vcn_v4_0_3_enable_ras()
1771 tmp, 0, indirect); in vcn_v4_0_3_enable_ras()