/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | gfx_v9_4_3.c | 304 WREG32_SOC15(GC, GET_INST(GC, xcc_id), regCP_HQD_DEQUEUE_REQUEST, 0x2); in gfx_v9_4_3_kiq_reset_hw_queue() 305 WREG32_SOC15(GC, GET_INST(GC, xcc_id), regSPI_COMPUTE_QUEUE_RESET, 0x1); in gfx_v9_4_3_kiq_reset_hw_queue() 308 if (!(RREG32_SOC15(GC, GET_INST(GC, xcc_id), regCP_HQD_ACTIVE) & 1)) in gfx_v9_4_3_kiq_reset_hw_queue() 353 dev_inst = GET_INST(GC, i); in gfx_v9_4_3_init_golden_registers() 426 scratch_reg0_offset = SOC15_REG_OFFSET(GC, GET_INST(GC, ring->xcc_id), regSCRATCH_REG0); in gfx_v9_4_3_ring_test_ring() 514 WREG32_SOC15(GC, GET_INST(GC, 0), regRLC_CAPTURE_GPU_CLOCK_COUNT, 1); in gfx_v9_4_3_get_gpu_clock_counter() 515 clock = (uint64_t)RREG32_SOC15(GC, GET_INST(GC, 0), regRLC_GPU_CLOCK_COUNT_LSB) | in gfx_v9_4_3_get_gpu_clock_counter() 516 ((uint64_t)RREG32_SOC15(GC, GET_INST(GC, 0), regRLC_GPU_CLOCK_COUNT_MSB) << 32ULL); in gfx_v9_4_3_get_gpu_clock_counter() 713 WREG32_SOC15_RLC_SHADOW_EX(reg, GC, GET_INST(GC, xcc_id), regGRBM_GFX_INDEX, data); in gfx_v9_4_3_xcc_select_se_sh() 718 WREG32_SOC15_RLC(GC, GET_INST(G in wave_read_ind() [all...] |
H A D | amdgpu_amdkfd_gfx_v9.c | 54 soc15_grbm_select(adev, mec, pipe, queue, vmid, GET_INST(GC, inst)); in kgd_gfx_v9_lock_srbm() 59 soc15_grbm_select(adev, 0, 0, 0, 0, GET_INST(GC, inst)); in kgd_gfx_v9_unlock_srbm() 94 WREG32_SOC15_RLC(GC, GET_INST(GC, inst), mmSH_MEM_CONFIG, sh_mem_config); in kgd_gfx_v9_program_sh_mem_settings() 95 WREG32_SOC15_RLC(GC, GET_INST(GC, inst), mmSH_MEM_BASES, sh_mem_bases); in kgd_gfx_v9_program_sh_mem_settings() 171 WREG32_SOC15(GC, GET_INST(GC, inst), mmCPC_INT_CNTL, in kgd_gfx_v9_init_interrupts() 238 hqd_base = SOC15_REG_OFFSET(GC, GET_INST(GC, inst), mmCP_MQD_BASE_ADDR); in kgd_gfx_v9_hqd_load() 241 reg <= SOC15_REG_OFFSET(GC, GET_INST(GC, inst), mmCP_HQD_PQ_WPTR_HI); reg++) in kgd_gfx_v9_hqd_load() 248 WREG32_SOC15_RLC(GC, GET_INST(GC, inst), mmCP_HQD_PQ_DOORBELL_CONTROL, data); in kgd_gfx_v9_hqd_load() 277 WREG32_SOC15_RLC(GC, GET_INST(GC, inst), mmCP_HQD_PQ_WPTR_LO, in kgd_gfx_v9_hqd_load() 279 WREG32_SOC15_RLC(GC, GET_INST(G in kgd_gfx_v9_hqd_load() [all...] |
H A D | amdgpu_jpeg.h | 39 WREG32_SOC15(JPEG, GET_INST(JPEG, inst_idx), \ 42 JPEG, GET_INST(JPEG, inst_idx), \ 66 WREG32_SOC15(JPEG, GET_INST(JPEG, inst_idx), \ 68 WREG32_SOC15(JPEG, GET_INST(JPEG, inst_idx), \ 71 JPEG, GET_INST(JPEG, inst_idx), \ 80 WREG32_SOC15(JPEG, GET_INST(JPEG, inst_idx), \ 82 WREG32_SOC15(JPEG, GET_INST(JPEG, inst_idx), \
|
H A D | amdgpu_amdkfd_gc_9_4_3.c | 48 SOC15_REG_OFFSET(SDMA0, GET_INST(SDMA0, engine_id), in get_sdma_rlc_reg_offset() 228 unsigned int phy_inst = GET_INST(GC, xcc_inst); in kgd_gfx_v9_4_3_set_pasid_vmid_mapping() 299 hqd_base = SOC15_REG_OFFSET(GC, GET_INST(GC, inst), regCP_MQD_BASE_ADDR); in kgd_gfx_v9_4_3_hqd_load() 300 hqd_end = SOC15_REG_OFFSET(GC, GET_INST(GC, inst), regCP_HQD_AQL_DISPATCH_ID_HI); in kgd_gfx_v9_4_3_hqd_load() 309 WREG32_SOC15_RLC(GC, GET_INST(GC, inst), regCP_HQD_PQ_DOORBELL_CONTROL, data); in kgd_gfx_v9_4_3_hqd_load() 338 WREG32_SOC15_RLC(GC, GET_INST(GC, inst), regCP_HQD_PQ_WPTR_LO, in kgd_gfx_v9_4_3_hqd_load() 340 WREG32_SOC15_RLC(GC, GET_INST(GC, inst), regCP_HQD_PQ_WPTR_HI, in kgd_gfx_v9_4_3_hqd_load() 342 WREG32_SOC15_RLC(GC, GET_INST(GC, inst), regCP_HQD_PQ_WPTR_POLL_ADDR, in kgd_gfx_v9_4_3_hqd_load() 344 WREG32_SOC15_RLC(GC, GET_INST(GC, inst), regCP_HQD_PQ_WPTR_POLL_ADDR_HI, in kgd_gfx_v9_4_3_hqd_load() 346 WREG32_SOC15_RLC(GC, GET_INST(G in kgd_gfx_v9_4_3_hqd_load() [all...] |
H A D | jpeg_v4_0_3.c | 173 jpeg_inst = GET_INST(JPEG, i); in jpeg_v4_0_3_sw_init() 276 jpeg_inst = GET_INST(JPEG, i); in jpeg_v4_0_3_start_sriov() 392 if (RREG32_SOC15(VCN, GET_INST(VCN, 0), regVCN_RRMT_CNTL) & in jpeg_v4_0_3_hw_init() 397 jpeg_inst = GET_INST(JPEG, i); in jpeg_v4_0_3_hw_init() 412 VCN, GET_INST(VCN, i), in jpeg_v4_0_3_hw_init() 498 jpeg_inst = GET_INST(JPEG, inst_idx); in jpeg_v4_0_3_disable_clock_gating() 523 jpeg_inst = GET_INST(JPEG, inst_idx); in jpeg_v4_0_3_enable_clock_gating() 545 int jpeg_inst = GET_INST(JPEG, inst); in jpeg_v4_0_3_start_inst() 575 int jpeg_inst = GET_INST(JPEG, ring->me); in jpeg_v4_0_3_start_jrbc() 637 int jpeg_inst = GET_INST(JPE in jpeg_v4_0_3_stop_inst() [all...] |
H A D | jpeg_v5_0_1.c | 170 jpeg_inst = GET_INST(JPEG, i); in jpeg_v5_0_1_sw_init() 263 if (RREG32_SOC15(VCN, GET_INST(VCN, 0), regVCN_RRMT_CNTL) & 0x100) in jpeg_v5_0_1_hw_init() 267 jpeg_inst = GET_INST(JPEG, i); in jpeg_v5_0_1_hw_init() 277 WREG32_SOC15_OFFSET(VCN, GET_INST(VCN, i), regVCN_JPEG_DB_CTRL, in jpeg_v5_0_1_hw_init() 360 int jpeg_inst = GET_INST(JPEG, i); in jpeg_v5_0_1_init_inst() 381 int jpeg_inst = GET_INST(JPEG, i); in jpeg_v5_0_1_deinit_inst() 397 int jpeg_inst = GET_INST(JPEG, ring->me); in jpeg_v5_0_1_init_jrbc() 466 jpeg_inst = GET_INST(JPEG, i); in jpeg_v5_0_1_start_sriov() 605 return RREG32_SOC15_OFFSET(JPEG, GET_INST(JPEG, ring->me), regUVD_JRBC_RB_RPTR, in jpeg_v5_0_1_dec_ring_get_rptr() 623 return RREG32_SOC15_OFFSET(JPEG, GET_INST(JPE in jpeg_v5_0_1_dec_ring_get_wptr() [all...] |
H A D | vcn_v5_0_1.c | 122 vcn_inst = GET_INST(VCN, i); in vcn_v5_0_1_sw_init() 243 if (RREG32_SOC15(VCN, GET_INST(VCN, 0), regVCN_RRMT_CNTL) & 0x100) in vcn_v5_0_1_hw_init() 246 vcn_inst = GET_INST(VCN, i); in vcn_v5_0_1_hw_init() 363 vcn_inst = GET_INST(VCN, inst); in vcn_v5_0_1_mc_resume() 560 vcn_inst = GET_INST(VCN, vinst->inst); in vcn_v5_0_1_pause_dpg_mode() 611 vcn_inst = GET_INST(VCN, inst_idx); in vcn_v5_0_1_start_dpg_mode() 745 vcn_inst = GET_INST(VCN, i); in vcn_v5_0_1_start_sriov() 920 vcn_inst = GET_INST(VCN, i); in vcn_v5_0_1_start() 1068 vcn_inst = GET_INST(VCN, inst_idx); in vcn_v5_0_1_stop_dpg_mode() 1106 vcn_inst = GET_INST(VC in vcn_v5_0_1_stop() [all...] |
H A D | vcn_v4_0_3.c | 188 vcn_inst = GET_INST(VCN, i); in vcn_v4_0_3_sw_init() 302 vcn_inst = GET_INST(VCN, inst_idx); in vcn_v4_0_3_hw_init_inst() 348 if (RREG32_SOC15(VCN, GET_INST(VCN, 0), regVCN_RRMT_CNTL) & in vcn_v4_0_3_hw_init() 465 vcn_inst = GET_INST(VCN, inst_idx); in vcn_v4_0_3_mc_resume() 656 vcn_inst = GET_INST(VCN, inst_idx); in vcn_v4_0_3_disable_clock_gating() 803 vcn_inst = GET_INST(VCN, inst_idx); in vcn_v4_0_3_enable_clock_gating() 857 vcn_inst = GET_INST(VCN, inst_idx); in vcn_v4_0_3_start_dpg_mode() 1023 vcn_inst = GET_INST(VCN, i); in vcn_v4_0_3_start_sriov() 1196 vcn_inst = GET_INST(VCN, i); in vcn_v4_0_3_start() 1362 vcn_inst = GET_INST(VC in vcn_v4_0_3_stop_dpg_mode() [all...] |
H A D | amdgpu_vcn.h | 146 WREG32_SOC15(VCN, GET_INST(VCN, inst_idx), \ 149 VCN, GET_INST(VCN, inst_idx), \ 198 WREG32_SOC15(VCN, GET_INST(VCN, inst_idx), \ 201 VCN, GET_INST(VCN, inst_idx), \
|
H A D | gmc_v9_0.c | 882 tmp = RREG32_SOC15_IP_NO_KIQ(MMHUB, sem, GET_INST(GC, inst)); in gmc_v9_0_flush_gpu_tlb() 884 tmp = RREG32_SOC15_IP_NO_KIQ(GC, sem, GET_INST(GC, inst)); in gmc_v9_0_flush_gpu_tlb() 895 WREG32_SOC15_IP_NO_KIQ(MMHUB, req, inv_req, GET_INST(GC, inst)); in gmc_v9_0_flush_gpu_tlb() 897 WREG32_SOC15_IP_NO_KIQ(GC, req, inv_req, GET_INST(GC, inst)); in gmc_v9_0_flush_gpu_tlb() 910 tmp = RREG32_SOC15_IP_NO_KIQ(MMHUB, ack, GET_INST(GC, inst)); in gmc_v9_0_flush_gpu_tlb() 912 tmp = RREG32_SOC15_IP_NO_KIQ(GC, ack, GET_INST(GC, inst)); in gmc_v9_0_flush_gpu_tlb() 925 WREG32_SOC15_IP_NO_KIQ(MMHUB, sem, 0, GET_INST(GC, inst)); in gmc_v9_0_flush_gpu_tlb() 927 WREG32_SOC15_IP_NO_KIQ(GC, sem, 0, GET_INST(GC, inst)); in gmc_v9_0_flush_gpu_tlb()
|
H A D | sdma_v4_4_2.c | 119 u32 dev_inst = GET_INST(SDMA0, instance); in sdma_v4_4_2_get_reg_offset() 1743 return amdgpu_dpm_reset_sdma(adev, 1 << GET_INST(SDMA0, instance_id)); in sdma_v4_4_2_soft_reset_engine() 2040 data = RREG32(SOC15_REG_OFFSET(SDMA0, GET_INST(SDMA0, 0), regSDMA_CLK_CTRL)); in sdma_v4_4_2_get_clockgating_state() 2045 data = RREG32(SOC15_REG_OFFSET(SDMA0, GET_INST(SDMA0, 0), regSDMA_POWER_CNTL)); in sdma_v4_4_2_get_clockgating_state() 2187 dev_inst = GET_INST(SDMA0, i); in sdma_v4_4_2_set_ring_funcs() 2459 uint32_t sdma_dev_inst = GET_INST(SDMA0, sdma_inst); in sdma_v4_4_2_inst_query_ras_error_count() 2497 uint32_t sdma_dev_inst = GET_INST(SDMA0, sdma_inst); in sdma_v4_4_2_inst_reset_ras_error_count()
|
H A D | soc15_common.h | 27 /* GET_INST returns the physical instance corresponding to a logical instance */ 28 #define GET_INST(ip, inst) \ macro
|
H A D | amdgpu_jpeg.c | 496 inst_id = GET_INST(JPEG, i); in amdgpu_jpeg_dump_ip_state()
|
H A D | gmc_v12_0.c | 311 1 << vmid, GET_INST(GC, 0)); in gmc_v12_0_flush_gpu_tlb()
|
H A D | gmc_v11_0.c | 237 1 << vmid, GET_INST(GC, 0)); in gmc_v11_0_flush_gpu_tlb()
|
H A D | gmc_v10_0.c | 276 1 << vmid, GET_INST(GC, 0)); in gmc_v10_0_flush_gpu_tlb()
|
H A D | amdgpu_device.c | 792 ret = amdgpu_virt_rlcg_reg_rw(adev, reg, 0, rlcg_flag, GET_INST(GC, xcc_id)); in amdgpu_device_xcc_rreg() 923 amdgpu_virt_rlcg_reg_rw(adev, reg, v, rlcg_flag, GET_INST(GC, xcc_id)); in amdgpu_device_xcc_wreg()
|
/linux/drivers/gpu/drm/amd/pm/swsmu/smu13/ |
H A D | smu_v13_0_12_ppt.c | 310 xcc_id = GET_INST(GC, 0); in smu_v13_0_12_get_smu_metrics_data() 378 inst = GET_INST(VCN, k); in smu_v13_0_12_get_xcp_metrics() 402 inst = GET_INST(GC, k); in smu_v13_0_12_get_xcp_metrics() 457 xcc_id = GET_INST(GC, i); in smu_v13_0_12_get_gpu_metrics() 465 inst = GET_INST(VCN, i); in smu_v13_0_12_get_gpu_metrics() 488 gpu_metrics->gfxclk_lock_status = metrics->GfxLockXCDMak >> GET_INST(GC, 0); in smu_v13_0_12_get_gpu_metrics() 531 inst = GET_INST(VCN, k); in smu_v13_0_12_get_gpu_metrics() 547 inst = GET_INST(GC, k); in smu_v13_0_12_get_gpu_metrics()
|
H A D | smu_v13_0_6_ppt.c | 1213 xcc_id = GET_INST(GC, 0); in smu_v13_0_6_get_smu_metrics_data() 2611 inst = GET_INST(VCN, k); in smu_v13_0_6_get_xcp_metrics() 2639 inst = GET_INST(GC, k); in smu_v13_0_6_get_xcp_metrics() 2737 xcc_id = GET_INST(GC, i); in smu_v13_0_6_get_gpu_metrics() 2745 inst = GET_INST(VCN, i); in smu_v13_0_6_get_gpu_metrics() 2772 version) >> GET_INST(GC, 0); in smu_v13_0_6_get_gpu_metrics() 2844 inst = GET_INST(VCN, k); in smu_v13_0_6_get_gpu_metrics() 2862 inst = GET_INST(GC, k); in smu_v13_0_6_get_gpu_metrics()
|
/linux/drivers/gpu/drm/amd/amdkfd/ |
H A D | kfd_device.c | 706 mapped_xcc = GET_INST(GC, xcc); in kfd_setup_interrupt_bitmap()
|