Lines Matching full:indirect
373 * @indirect: indirectly write sram
378 bool indirect) in vcn_v5_0_1_mc_resume_dpg_mode() argument
390 if (!indirect) { in vcn_v5_0_1_mc_resume_dpg_mode()
394 inst_idx].tmr_mc_addr_lo), 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
398 inst_idx].tmr_mc_addr_hi), 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
400 VCN, 0, regUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
403 VCN, 0, regUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW), 0, 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
405 VCN, 0, regUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH), 0, 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
407 VCN, 0, regUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
413 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
416 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
420 AMDGPU_UVD_FIRMWARE_OFFSET >> 3, 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
423 if (!indirect) in vcn_v5_0_1_mc_resume_dpg_mode()
425 VCN, 0, regUVD_VCPU_CACHE_SIZE0), size, 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
428 VCN, 0, regUVD_VCPU_CACHE_SIZE0), 0, 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
431 if (!indirect) { in vcn_v5_0_1_mc_resume_dpg_mode()
434 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
437 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
439 VCN, 0, regUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
442 VCN, 0, regUVD_LMI_VCPU_CACHE1_64BIT_BAR_LOW), 0, 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
444 VCN, 0, regUVD_LMI_VCPU_CACHE1_64BIT_BAR_HIGH), 0, 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
446 VCN, 0, regUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
449 VCN, 0, regUVD_VCPU_CACHE_SIZE1), AMDGPU_VCN_STACK_SIZE, 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
455 AMDGPU_VCN_STACK_SIZE), 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
459 AMDGPU_VCN_STACK_SIZE), 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
461 VCN, 0, regUVD_VCPU_CACHE_OFFSET2), 0, 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
463 VCN, 0, regUVD_VCPU_CACHE_SIZE2), AMDGPU_VCN_CONTEXT_SIZE, 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
468 lower_32_bits(adev->vcn.inst[inst_idx].fw_shared.gpu_addr), 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
471 upper_32_bits(adev->vcn.inst[inst_idx].fw_shared.gpu_addr), 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
473 VCN, 0, regUVD_VCPU_NONCACHE_OFFSET0), 0, 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
476 AMDGPU_GPU_PAGE_ALIGN(sizeof(struct amdgpu_vcn5_fw_shared)), 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
480 VCN, 0, regUVD_GFX10_ADDR_CONFIG), adev->gfx.config.gb_addr_config, 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
555 * @indirect: indirectly write sram
560 bool indirect) in vcn_v5_0_1_start_dpg_mode() argument
582 if (indirect) { in vcn_v5_0_1_start_dpg_mode()
594 VCN, 0, regUVD_VCPU_CNTL), tmp, 0, indirect); in vcn_v5_0_1_start_dpg_mode()
598 VCN, 0, regUVD_MASTINT_EN), 0, 0, indirect); in vcn_v5_0_1_start_dpg_mode()
610 VCN, 0, regUVD_LMI_CTRL), tmp, 0, indirect); in vcn_v5_0_1_start_dpg_mode()
612 vcn_v5_0_1_mc_resume_dpg_mode(vinst, indirect); in vcn_v5_0_1_start_dpg_mode()
617 VCN, 0, regUVD_VCPU_CNTL), tmp, 0, indirect); in vcn_v5_0_1_start_dpg_mode()
622 VCN, 0, regUVD_LMI_CTRL2), tmp, 0, indirect); in vcn_v5_0_1_start_dpg_mode()
627 UVD_MASTINT_EN__VCPU_EN_MASK, 0, indirect); in vcn_v5_0_1_start_dpg_mode()
629 if (indirect) in vcn_v5_0_1_start_dpg_mode()