Lines Matching full:indirect

434 					bool indirect)  in vcn_v2_0_mc_resume_dpg_mode()  argument
442 if (!indirect) { in vcn_v2_0_mc_resume_dpg_mode()
445 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN].tmr_mc_addr_lo), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
448 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN].tmr_mc_addr_hi), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
450 UVD, 0, mmUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
453 UVD, 0, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW), 0, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
455 UVD, 0, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH), 0, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
457 UVD, 0, mmUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
463 lower_32_bits(adev->vcn.inst->gpu_addr), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
466 upper_32_bits(adev->vcn.inst->gpu_addr), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
470 AMDGPU_UVD_FIRMWARE_OFFSET >> 3, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
473 if (!indirect) in vcn_v2_0_mc_resume_dpg_mode()
475 UVD, 0, mmUVD_VCPU_CACHE_SIZE0), size, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
478 UVD, 0, mmUVD_VCPU_CACHE_SIZE0), 0, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
481 if (!indirect) { in vcn_v2_0_mc_resume_dpg_mode()
484 lower_32_bits(adev->vcn.inst->gpu_addr + offset), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
487 upper_32_bits(adev->vcn.inst->gpu_addr + offset), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
489 UVD, 0, mmUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
492 UVD, 0, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_LOW), 0, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
494 UVD, 0, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_HIGH), 0, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
496 UVD, 0, mmUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
499 UVD, 0, mmUVD_VCPU_CACHE_SIZE1), AMDGPU_VCN_STACK_SIZE, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
504 lower_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
507 upper_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
509 UVD, 0, mmUVD_VCPU_CACHE_OFFSET2), 0, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
511 UVD, 0, mmUVD_VCPU_CACHE_SIZE2), AMDGPU_VCN_CONTEXT_SIZE, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
516 lower_32_bits(adev->vcn.inst->fw_shared.gpu_addr), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
519 upper_32_bits(adev->vcn.inst->fw_shared.gpu_addr), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
521 UVD, 0, mmUVD_VCPU_NONCACHE_OFFSET0), 0, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
524 AMDGPU_GPU_PAGE_ALIGN(sizeof(struct amdgpu_fw_shared)), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
528 UVD, 0, mmUVD_GFX10_ADDR_CONFIG), adev->gfx.config.gb_addr_config, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
645 uint8_t sram_sel, uint8_t indirect) in vcn_v2_0_clock_gating_dpg_mode() argument
678 UVD, 0, mmUVD_CGC_CTRL), reg_data, sram_sel, indirect); in vcn_v2_0_clock_gating_dpg_mode()
682 UVD, 0, mmUVD_CGC_GATE), 0, sram_sel, indirect); in vcn_v2_0_clock_gating_dpg_mode()
686 UVD, 0, mmUVD_SUVD_CGC_GATE), 1, sram_sel, indirect); in vcn_v2_0_clock_gating_dpg_mode()
690 UVD, 0, mmUVD_SUVD_CGC_CTRL), 0, sram_sel, indirect); in vcn_v2_0_clock_gating_dpg_mode()
848 static int vcn_v2_0_start_dpg_mode(struct amdgpu_vcn_inst *vinst, bool indirect) in vcn_v2_0_start_dpg_mode() argument
863 if (indirect) in vcn_v2_0_start_dpg_mode()
867 vcn_v2_0_clock_gating_dpg_mode(vinst, 0, indirect); in vcn_v2_0_start_dpg_mode()
874 UVD, 0, mmUVD_VCPU_CNTL), tmp, 0, indirect); in vcn_v2_0_start_dpg_mode()
878 UVD, 0, mmUVD_MASTINT_EN), 0, 0, indirect); in vcn_v2_0_start_dpg_mode()
890 UVD, 0, mmUVD_LMI_CTRL), tmp, 0, indirect); in vcn_v2_0_start_dpg_mode()
894 0x2 << UVD_MPC_CNTL__REPLACEMENT_MODE__SHIFT, 0, indirect); in vcn_v2_0_start_dpg_mode()
901 (0x4 << UVD_MPC_SET_MUXA0__VARA_4__SHIFT)), 0, indirect); in vcn_v2_0_start_dpg_mode()
908 (0x4 << UVD_MPC_SET_MUXB0__VARB_4__SHIFT)), 0, indirect); in vcn_v2_0_start_dpg_mode()
914 (0x2 << UVD_MPC_SET_MUX__SET_2__SHIFT)), 0, indirect); in vcn_v2_0_start_dpg_mode()
916 vcn_v2_0_mc_resume_dpg_mode(vinst, indirect); in vcn_v2_0_start_dpg_mode()
919 UVD, 0, mmUVD_REG_XX_MASK), 0x10, 0, indirect); in vcn_v2_0_start_dpg_mode()
921 UVD, 0, mmUVD_RBC_XX_IB_REG_CHECK), 0x3, 0, indirect); in vcn_v2_0_start_dpg_mode()
925 UVD, 0, mmUVD_SOFT_RESET), 0, 0, indirect); in vcn_v2_0_start_dpg_mode()
930 0x1F << UVD_LMI_CTRL2__RE_OFLD_MIF_WR_REQ_NUM__SHIFT, 0, indirect); in vcn_v2_0_start_dpg_mode()
935 UVD_MASTINT_EN__VCPU_EN_MASK, 0, indirect); in vcn_v2_0_start_dpg_mode()
937 if (indirect) in vcn_v2_0_start_dpg_mode()
1523 * vcn_v2_0_dec_ring_emit_ib - execute indirect buffer
1527 * @ib: indirect buffer to execute
1530 * Write ring commands to execute the indirect buffer
1698 * vcn_v2_0_enc_ring_emit_ib - enc execute indirect buffer
1702 * @ib: indirect buffer to execute
1705 * Write enc ring commands to execute the indirect buffer