Lines Matching full:indirect
433 * @indirect: indirectly write sram
438 bool indirect) in vcn_v5_0_0_mc_resume_dpg_mode() argument
450 if (!indirect) { in vcn_v5_0_0_mc_resume_dpg_mode()
453 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx].tmr_mc_addr_lo), 0, indirect); in vcn_v5_0_0_mc_resume_dpg_mode()
456 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx].tmr_mc_addr_hi), 0, indirect); in vcn_v5_0_0_mc_resume_dpg_mode()
458 VCN, inst_idx, regUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect); in vcn_v5_0_0_mc_resume_dpg_mode()
461 VCN, inst_idx, regUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW), 0, 0, indirect); in vcn_v5_0_0_mc_resume_dpg_mode()
463 VCN, inst_idx, regUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH), 0, 0, indirect); in vcn_v5_0_0_mc_resume_dpg_mode()
465 VCN, inst_idx, regUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect); in vcn_v5_0_0_mc_resume_dpg_mode()
471 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v5_0_0_mc_resume_dpg_mode()
474 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v5_0_0_mc_resume_dpg_mode()
478 AMDGPU_UVD_FIRMWARE_OFFSET >> 3, 0, indirect); in vcn_v5_0_0_mc_resume_dpg_mode()
481 if (!indirect) in vcn_v5_0_0_mc_resume_dpg_mode()
483 VCN, inst_idx, regUVD_VCPU_CACHE_SIZE0), size, 0, indirect); in vcn_v5_0_0_mc_resume_dpg_mode()
486 VCN, inst_idx, regUVD_VCPU_CACHE_SIZE0), 0, 0, indirect); in vcn_v5_0_0_mc_resume_dpg_mode()
489 if (!indirect) { in vcn_v5_0_0_mc_resume_dpg_mode()
492 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect); in vcn_v5_0_0_mc_resume_dpg_mode()
495 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect); in vcn_v5_0_0_mc_resume_dpg_mode()
497 VCN, inst_idx, regUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect); in vcn_v5_0_0_mc_resume_dpg_mode()
500 VCN, inst_idx, regUVD_LMI_VCPU_CACHE1_64BIT_BAR_LOW), 0, 0, indirect); in vcn_v5_0_0_mc_resume_dpg_mode()
502 VCN, inst_idx, regUVD_LMI_VCPU_CACHE1_64BIT_BAR_HIGH), 0, 0, indirect); in vcn_v5_0_0_mc_resume_dpg_mode()
504 VCN, inst_idx, regUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect); in vcn_v5_0_0_mc_resume_dpg_mode()
507 VCN, inst_idx, regUVD_VCPU_CACHE_SIZE1), AMDGPU_VCN_STACK_SIZE, 0, indirect); in vcn_v5_0_0_mc_resume_dpg_mode()
512 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE), 0, indirect); in vcn_v5_0_0_mc_resume_dpg_mode()
515 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE), 0, indirect); in vcn_v5_0_0_mc_resume_dpg_mode()
517 VCN, inst_idx, regUVD_VCPU_CACHE_OFFSET2), 0, 0, indirect); in vcn_v5_0_0_mc_resume_dpg_mode()
519 VCN, inst_idx, regUVD_VCPU_CACHE_SIZE2), AMDGPU_VCN_CONTEXT_SIZE, 0, indirect); in vcn_v5_0_0_mc_resume_dpg_mode()
524 lower_32_bits(adev->vcn.inst[inst_idx].fw_shared.gpu_addr), 0, indirect); in vcn_v5_0_0_mc_resume_dpg_mode()
527 upper_32_bits(adev->vcn.inst[inst_idx].fw_shared.gpu_addr), 0, indirect); in vcn_v5_0_0_mc_resume_dpg_mode()
529 VCN, inst_idx, regUVD_VCPU_NONCACHE_OFFSET0), 0, 0, indirect); in vcn_v5_0_0_mc_resume_dpg_mode()
532 AMDGPU_GPU_PAGE_ALIGN(sizeof(struct amdgpu_vcn5_fw_shared)), 0, indirect); in vcn_v5_0_0_mc_resume_dpg_mode()
537 adev->gfx.config.gb_addr_config, 0, indirect); in vcn_v5_0_0_mc_resume_dpg_mode()
675 * @indirect: indirectly write sram
681 uint8_t indirect)
703 * @indirect: indirectly write sram
708 bool indirect) in vcn_v5_0_0_start_dpg_mode() argument
726 if (indirect) in vcn_v5_0_0_start_dpg_mode()
733 VCN, inst_idx, regUVD_VCPU_CNTL), tmp, 0, indirect); in vcn_v5_0_0_start_dpg_mode()
737 VCN, inst_idx, regUVD_MASTINT_EN), 0, 0, indirect); in vcn_v5_0_0_start_dpg_mode()
749 VCN, inst_idx, regUVD_LMI_CTRL), tmp, 0, indirect); in vcn_v5_0_0_start_dpg_mode()
751 vcn_v5_0_0_mc_resume_dpg_mode(vinst, indirect); in vcn_v5_0_0_start_dpg_mode()
756 VCN, inst_idx, regUVD_VCPU_CNTL), tmp, 0, indirect); in vcn_v5_0_0_start_dpg_mode()
761 VCN, inst_idx, regUVD_LMI_CTRL2), tmp, 0, indirect); in vcn_v5_0_0_start_dpg_mode()
766 UVD_MASTINT_EN__VCPU_EN_MASK, 0, indirect); in vcn_v5_0_0_start_dpg_mode()
768 if (indirect) in vcn_v5_0_0_start_dpg_mode()