Lines Matching full:indirect

467  * @indirect: indirectly write sram
472 bool indirect)
484 if (!indirect) {
488 0, indirect);
492 0, indirect);
494 VCN, inst_idx, regUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect);
497 VCN, inst_idx, regUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW), 0, 0, indirect);
499 VCN, inst_idx, regUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH), 0, 0, indirect);
501 VCN, inst_idx, regUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect);
507 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect);
510 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect);
514 AMDGPU_UVD_FIRMWARE_OFFSET >> 3, 0, indirect);
517 if (!indirect)
519 VCN, inst_idx, regUVD_VCPU_CACHE_SIZE0), size, 0, indirect);
522 VCN, inst_idx, regUVD_VCPU_CACHE_SIZE0), 0, 0, indirect);
525 if (!indirect) {
528 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect);
531 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect);
533 VCN, inst_idx, regUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect);
536 VCN, inst_idx, regUVD_LMI_VCPU_CACHE1_64BIT_BAR_LOW), 0, 0, indirect);
538 VCN, inst_idx, regUVD_LMI_VCPU_CACHE1_64BIT_BAR_HIGH), 0, 0, indirect);
540 VCN, inst_idx, regUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect);
544 VCN, inst_idx, regUVD_VCPU_CACHE_SIZE1), AMDGPU_VCN_STACK_SIZE, 0, indirect);
550 0, indirect);
554 0, indirect);
556 VCN, inst_idx, regUVD_VCPU_CACHE_OFFSET2), 0, 0, indirect);
558 VCN, inst_idx, regUVD_VCPU_CACHE_SIZE2), AMDGPU_VCN_CONTEXT_SIZE, 0, indirect);
563 lower_32_bits(adev->vcn.inst[inst_idx].fw_shared.gpu_addr), 0, indirect);
566 upper_32_bits(adev->vcn.inst[inst_idx].fw_shared.gpu_addr), 0, indirect);
568 VCN, inst_idx, regUVD_VCPU_NONCACHE_OFFSET0), 0, 0, indirect);
571 AMDGPU_GPU_PAGE_ALIGN(sizeof(struct amdgpu_vcn4_fw_shared)), 0, indirect);
576 adev->gfx.config.gb_addr_config, 0, indirect);
799 * @indirect: indirectly write sram
805 uint8_t indirect)
838 VCN, inst_idx, regUVD_CGC_CTRL), reg_data, sram_sel, indirect);
842 VCN, inst_idx, regUVD_CGC_GATE), 0, sram_sel, indirect);
846 VCN, inst_idx, regUVD_SUVD_CGC_GATE), 1, sram_sel, indirect);
850 VCN, inst_idx, regUVD_SUVD_CGC_CTRL), 0, sram_sel, indirect);
917 * @indirect: indirectly write sram
922 bool indirect)
939 if (indirect)
944 vcn_v4_0_5_disable_clock_gating_dpg_mode(vinst, 0, indirect);
950 VCN, inst_idx, regUVD_VCPU_CNTL), tmp, 0, indirect);
954 VCN, inst_idx, regUVD_MASTINT_EN), 0, 0, indirect);
966 VCN, inst_idx, regUVD_LMI_CTRL), tmp, 0, indirect);
970 0x2 << UVD_MPC_CNTL__REPLACEMENT_MODE__SHIFT, 0, indirect);
977 (0x4 << UVD_MPC_SET_MUXA0__VARA_4__SHIFT)), 0, indirect);
984 (0x4 << UVD_MPC_SET_MUXB0__VARB_4__SHIFT)), 0, indirect);
990 (0x2 << UVD_MPC_SET_MUX__SET_2__SHIFT)), 0, indirect);
992 vcn_v4_0_5_mc_resume_dpg_mode(vinst, indirect);
997 VCN, inst_idx, regUVD_VCPU_CNTL), tmp, 0, indirect);
1002 VCN, inst_idx, regUVD_LMI_CTRL2), tmp, 0, indirect);
1007 UVD_MASTINT_EN__VCPU_EN_MASK, 0, indirect);
1009 if (indirect)