Lines Matching full:indirect

455  * @indirect: indirectly write sram
460 bool indirect) in vcn_v4_0_5_mc_resume_dpg_mode() argument
472 if (!indirect) { in vcn_v4_0_5_mc_resume_dpg_mode()
476 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
480 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
482 VCN, inst_idx, regUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
485 VCN, inst_idx, regUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW), 0, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
487 VCN, inst_idx, regUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH), 0, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
489 VCN, inst_idx, regUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
495 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
498 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
502 AMDGPU_UVD_FIRMWARE_OFFSET >> 3, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
505 if (!indirect) in vcn_v4_0_5_mc_resume_dpg_mode()
507 VCN, inst_idx, regUVD_VCPU_CACHE_SIZE0), size, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
510 VCN, inst_idx, regUVD_VCPU_CACHE_SIZE0), 0, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
513 if (!indirect) { in vcn_v4_0_5_mc_resume_dpg_mode()
516 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
519 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
521 VCN, inst_idx, regUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
524 VCN, inst_idx, regUVD_LMI_VCPU_CACHE1_64BIT_BAR_LOW), 0, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
526 VCN, inst_idx, regUVD_LMI_VCPU_CACHE1_64BIT_BAR_HIGH), 0, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
528 VCN, inst_idx, regUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
532 VCN, inst_idx, regUVD_VCPU_CACHE_SIZE1), AMDGPU_VCN_STACK_SIZE, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
538 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
542 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
544 VCN, inst_idx, regUVD_VCPU_CACHE_OFFSET2), 0, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
546 VCN, inst_idx, regUVD_VCPU_CACHE_SIZE2), AMDGPU_VCN_CONTEXT_SIZE, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
551 lower_32_bits(adev->vcn.inst[inst_idx].fw_shared.gpu_addr), 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
554 upper_32_bits(adev->vcn.inst[inst_idx].fw_shared.gpu_addr), 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
556 VCN, inst_idx, regUVD_VCPU_NONCACHE_OFFSET0), 0, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
559 AMDGPU_GPU_PAGE_ALIGN(sizeof(struct amdgpu_vcn4_fw_shared)), 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
564 adev->gfx.config.gb_addr_config, 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
787 * @indirect: indirectly write sram
793 uint8_t indirect) in vcn_v4_0_5_disable_clock_gating_dpg_mode() argument
826 VCN, inst_idx, regUVD_CGC_CTRL), reg_data, sram_sel, indirect); in vcn_v4_0_5_disable_clock_gating_dpg_mode()
830 VCN, inst_idx, regUVD_CGC_GATE), 0, sram_sel, indirect); in vcn_v4_0_5_disable_clock_gating_dpg_mode()
834 VCN, inst_idx, regUVD_SUVD_CGC_GATE), 1, sram_sel, indirect); in vcn_v4_0_5_disable_clock_gating_dpg_mode()
838 VCN, inst_idx, regUVD_SUVD_CGC_CTRL), 0, sram_sel, indirect); in vcn_v4_0_5_disable_clock_gating_dpg_mode()
905 * @indirect: indirectly write sram
910 bool indirect) in vcn_v4_0_5_start_dpg_mode() argument
927 if (indirect) in vcn_v4_0_5_start_dpg_mode()
932 vcn_v4_0_5_disable_clock_gating_dpg_mode(vinst, 0, indirect); in vcn_v4_0_5_start_dpg_mode()
938 VCN, inst_idx, regUVD_VCPU_CNTL), tmp, 0, indirect); in vcn_v4_0_5_start_dpg_mode()
942 VCN, inst_idx, regUVD_MASTINT_EN), 0, 0, indirect); in vcn_v4_0_5_start_dpg_mode()
954 VCN, inst_idx, regUVD_LMI_CTRL), tmp, 0, indirect); in vcn_v4_0_5_start_dpg_mode()
958 0x2 << UVD_MPC_CNTL__REPLACEMENT_MODE__SHIFT, 0, indirect); in vcn_v4_0_5_start_dpg_mode()
965 (0x4 << UVD_MPC_SET_MUXA0__VARA_4__SHIFT)), 0, indirect); in vcn_v4_0_5_start_dpg_mode()
972 (0x4 << UVD_MPC_SET_MUXB0__VARB_4__SHIFT)), 0, indirect); in vcn_v4_0_5_start_dpg_mode()
978 (0x2 << UVD_MPC_SET_MUX__SET_2__SHIFT)), 0, indirect); in vcn_v4_0_5_start_dpg_mode()
980 vcn_v4_0_5_mc_resume_dpg_mode(vinst, indirect); in vcn_v4_0_5_start_dpg_mode()
985 VCN, inst_idx, regUVD_VCPU_CNTL), tmp, 0, indirect); in vcn_v4_0_5_start_dpg_mode()
990 VCN, inst_idx, regUVD_LMI_CTRL2), tmp, 0, indirect); in vcn_v4_0_5_start_dpg_mode()
995 UVD_MASTINT_EN__VCPU_EN_MASK, 0, indirect); in vcn_v4_0_5_start_dpg_mode()
997 if (indirect) in vcn_v4_0_5_start_dpg_mode()