/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | soc24.c | 93 return adev->nbio.funcs->get_memsize(adev); in soc24_get_config_memsize() 248 (adev->nbio.funcs->program_aspm)) in soc24_program_aspm() 249 adev->nbio.funcs->program_aspm(adev); in soc24_program_aspm() 369 adev->nbio.funcs->set_reg_remap(adev); in soc24_common_early_init() 449 if (adev->nbio.ras && in soc24_common_late_init() 450 adev->nbio.ras_err_event_athub_irq.funcs) in soc24_common_late_init() 456 amdgpu_irq_get(adev, &adev->nbio.ras_err_event_athub_irq, 0); in soc24_common_late_init() 462 adev->nbio.funcs->enable_doorbell_selfring_aperture(adev, true); in soc24_common_late_init() 483 /* setup nbio registers */ in soc24_common_hw_init() 484 adev->nbio in soc24_common_hw_init() [all...] |
H A D | soc21.c | 223 return adev->nbio.funcs->get_memsize(adev); in soc21_get_config_memsize() 352 u32 memsize = adev->nbio.funcs->get_memsize(adev); 440 if (adev->nbio.funcs->program_aspm) in soc21_program_aspm() 441 adev->nbio.funcs->program_aspm(adev); in soc21_program_aspm() 558 adev->nbio.funcs->set_reg_remap(adev); in soc21_common_early_init() 841 if (adev->nbio.ras && in soc21_common_late_init() 842 adev->nbio.ras_err_event_athub_irq.funcs) in soc21_common_late_init() 845 * nbio v4_3 only support fatal error hanlding in soc21_common_late_init() 847 amdgpu_irq_get(adev, &adev->nbio.ras_err_event_athub_irq, 0); in soc21_common_late_init() 853 adev->nbio in soc21_common_late_init() [all...] |
H A D | nv.c | 308 return adev->nbio.funcs->get_memsize(adev); in nv_get_config_memsize() 430 u32 memsize = adev->nbio.funcs->get_memsize(adev); in nv_asic_mode2_reset() 519 if (adev->nbio.funcs->program_aspm) in nv_program_aspm() 520 adev->nbio.funcs->program_aspm(adev); in nv_program_aspm() 611 if (adev->nbio.funcs->enable_aspm && in nv_update_umd_stable_pstate() 613 adev->nbio.funcs->enable_aspm(adev, !enter); in nv_update_umd_stable_pstate() 642 adev->nbio.funcs->set_reg_remap(adev); in nv_common_early_init() 972 adev->nbio.funcs->enable_doorbell_selfring_aperture(adev, true); in nv_common_late_init() 991 if (adev->nbio.funcs->apply_lc_spc_mode_wa) in nv_common_hw_init() 992 adev->nbio in nv_common_hw_init() [all...] |
H A D | df_v3_6.c | 51 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_get_fica() 52 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_get_fica() 74 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_set_fica() 75 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_set_fica() 102 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_perfmon_rreg() 103 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_perfmon_rreg() 124 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_perfmon_wreg() 125 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_perfmon_wreg() 143 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_perfmon_arm_with_status() 144 data = adev->nbio in df_v3_6_perfmon_arm_with_status() [all...] |
H A D | amdgpu_hdp.c | 59 if (adev->nbio.funcs->get_memsize) in amdgpu_hdp_generic_flush() 60 adev->nbio.funcs->get_memsize(adev); in amdgpu_hdp_generic_flush()
|
H A D | hdp_v5_2.c | 44 if (adev->nbio.funcs->get_memsize) in hdp_v5_2_flush_hdp() 45 adev->nbio.funcs->get_memsize(adev); in hdp_v5_2_flush_hdp()
|
H A D | amdgpu_device.c | 953 pcie_index = adev->nbio.funcs->get_pcie_index_offset(adev); in amdgpu_device_indirect_rreg() 954 pcie_data = adev->nbio.funcs->get_pcie_data_offset(adev); in amdgpu_device_indirect_rreg() 977 if (unlikely(!adev->nbio.funcs)) { in amdgpu_device_indirect_rreg_ext() 981 pcie_index = adev->nbio.funcs->get_pcie_index_offset(adev); in amdgpu_device_indirect_rreg_ext() 982 pcie_data = adev->nbio.funcs->get_pcie_data_offset(adev); in amdgpu_device_indirect_rreg_ext() 986 if (unlikely(!adev->nbio.funcs)) in amdgpu_device_indirect_rreg_ext() 989 pcie_index_hi = adev->nbio.funcs->get_pcie_index_hi_offset(adev); in amdgpu_device_indirect_rreg_ext() 1036 pcie_index = adev->nbio.funcs->get_pcie_index_offset(adev); in amdgpu_device_indirect_rreg64() 1037 pcie_data = adev->nbio.funcs->get_pcie_data_offset(adev); in amdgpu_device_indirect_rreg64() 1066 pcie_index = adev->nbio in amdgpu_device_indirect_rreg64_ext() [all...] |
H A D | amdgpu_discovery.c | 2855 adev->nbio.funcs = &nbio_v6_1_funcs; in amdgpu_discovery_set_ip_blocks() 2856 adev->nbio.hdp_flush_reg = &nbio_v6_1_hdp_flush_reg; in amdgpu_discovery_set_ip_blocks() 2861 adev->nbio.funcs = &nbio_v7_0_funcs; in amdgpu_discovery_set_ip_blocks() 2862 adev->nbio.hdp_flush_reg = &nbio_v7_0_hdp_flush_reg; in amdgpu_discovery_set_ip_blocks() 2867 adev->nbio.funcs = &nbio_v7_4_funcs; in amdgpu_discovery_set_ip_blocks() 2868 adev->nbio.hdp_flush_reg = &nbio_v7_4_hdp_flush_reg; in amdgpu_discovery_set_ip_blocks() 2872 adev->nbio.funcs = &nbio_v7_9_funcs; in amdgpu_discovery_set_ip_blocks() 2873 adev->nbio.hdp_flush_reg = &nbio_v7_9_hdp_flush_reg; in amdgpu_discovery_set_ip_blocks() 2879 adev->nbio.funcs = &nbio_v7_11_funcs; in amdgpu_discovery_set_ip_blocks() 2880 adev->nbio in amdgpu_discovery_set_ip_blocks() [all...] |
H A D | amdgpu_bios.c | 560 if (adev->nbio.funcs && in amdgpu_soc15_read_bios_from_rom() 561 adev->nbio.funcs->get_rom_offset) { in amdgpu_soc15_read_bios_from_rom() 562 rom_offset = adev->nbio.funcs->get_rom_offset(adev); in amdgpu_soc15_read_bios_from_rom()
|
H A D | nbif_v6_3_1.c | 30 #include "ivsrcid/nbio/irqsrcs_nbif_7_4.h" 535 adev->nbio.ras_err_event_athub_irq.funcs = in nbif_v6_3_1_init_ras_err_event_athub_interrupt() 537 adev->nbio.ras_err_event_athub_irq.num_types = 1; in nbif_v6_3_1_init_ras_err_event_athub_interrupt() 540 * nbif v6_3_1 uses the same irq source as nbio v7_4 in nbif_v6_3_1_init_ras_err_event_athub_interrupt() 544 &adev->nbio.ras_err_event_athub_irq); in nbif_v6_3_1_init_ras_err_event_athub_interrupt()
|
H A D | nbio_v4_3.c | 26 #include "nbio/nbio_4_3_0_offset.h" 27 #include "nbio/nbio_4_3_0_sh_mask.h" 28 #include "ivsrcid/nbio/irqsrcs_nbif_7_4.h" 618 adev->nbio.ras_err_event_athub_irq.funcs = in nbio_v4_3_init_ras_err_event_athub_interrupt() 620 adev->nbio.ras_err_event_athub_irq.num_types = 1; in nbio_v4_3_init_ras_err_event_athub_interrupt() 623 * nbio v4_3 uses the same irq source as nbio v7_4 */ in nbio_v4_3_init_ras_err_event_athub_interrupt() 626 &adev->nbio.ras_err_event_athub_irq); in nbio_v4_3_init_ras_err_event_athub_interrupt()
|
H A D | amdgpu_ras.c | 37 #include "ivsrcid/nbio/irqsrcs_nbif_7_4.h" 2181 if (adev->nbio.ras && in amdgpu_ras_interrupt_fatal_error_handler() 2182 adev->nbio.ras->handle_ras_controller_intr_no_bifring) in amdgpu_ras_interrupt_fatal_error_handler() 2183 adev->nbio.ras->handle_ras_controller_intr_no_bifring(adev); in amdgpu_ras_interrupt_fatal_error_handler() 2185 if (adev->nbio.ras && in amdgpu_ras_interrupt_fatal_error_handler() 2186 adev->nbio.ras->handle_ras_err_event_athub_intr_no_bifring) in amdgpu_ras_interrupt_fatal_error_handler() 2187 adev->nbio.ras->handle_ras_err_event_athub_intr_no_bifring(adev); in amdgpu_ras_interrupt_fatal_error_handler() 4004 /* initialize nbio ras function ahead of any other in amdgpu_ras_init() 4012 adev->nbio.ras = &nbio_v7_4_ras; in amdgpu_ras_init() 4016 /* unlike other generation of nbio ra in amdgpu_ras_init() [all...] |
H A D | vega10_ih.c | 273 adev->nbio.funcs->ih_control(adev); in vega10_ih_irq_init() 293 adev->nbio.funcs->ih_doorbell_range(adev, adev->irq.ih.use_doorbell, in vega10_ih_irq_init()
|
H A D | navi10_ih.c | 329 adev->nbio.funcs->ih_control(adev); in navi10_ih_irq_init() 361 adev->nbio.funcs->ih_doorbell_range(adev, ih[0]->use_doorbell, in navi10_ih_irq_init()
|
H A D | vega20_ih.c | 319 adev->nbio.funcs->ih_control(adev); in vega20_ih_irq_init() 357 adev->nbio.funcs->ih_doorbell_range(adev, adev->irq.ih.use_doorbell, in vega20_ih_irq_init()
|
H A D | ih_v7_0.c | 306 adev->nbio.funcs->ih_control(adev); in ih_v7_0_irq_init() 327 adev->nbio.funcs->ih_doorbell_range(adev, ih[0]->use_doorbell, in ih_v7_0_irq_init()
|
H A D | ih_v6_0.c | 334 adev->nbio.funcs->ih_control(adev); in ih_v6_0_irq_init() 356 adev->nbio.funcs->ih_doorbell_range(adev, ih[0]->use_doorbell, in ih_v6_0_irq_init()
|
H A D | ih_v6_1.c | 306 adev->nbio.funcs->ih_control(adev); in ih_v6_1_irq_init() 327 adev->nbio.funcs->ih_doorbell_range(adev, ih[0]->use_doorbell, in ih_v6_1_irq_init()
|
H A D | gmc_v9_0.c | 1380 if (adev->nbio.funcs && adev->nbio.funcs->is_nps_switch_requested && in gmc_v9_0_need_reset_on_init() 1381 adev->nbio.funcs->is_nps_switch_requested(adev)) { in gmc_v9_0_need_reset_on_init() 1707 adev->nbio.funcs->get_memsize(adev) * 1024ULL * 1024ULL; in gmc_v9_0_mc_init()
|
H A D | sdma_v7_0.c | 332 const struct nbio_hdp_flush_reg *nbio_hf_reg = adev->nbio.hdp_flush_reg; in sdma_v7_0_ring_emit_hdp_flush() 339 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_done_offset(adev)) << 2); in sdma_v7_0_ring_emit_hdp_flush() 340 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_req_offset(adev)) << 2); in sdma_v7_0_ring_emit_hdp_flush() 561 adev->nbio.funcs->sdma_doorbell_range(adev, i, ring->use_doorbell, in sdma_v7_0_gfx_resume_instance()
|
H A D | sdma_v6_0.c | 328 const struct nbio_hdp_flush_reg *nbio_hf_reg = adev->nbio.hdp_flush_reg; in sdma_v6_0_ring_emit_hdp_flush() 335 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_done_offset(adev)) << 2); in sdma_v6_0_ring_emit_hdp_flush() 336 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_req_offset(adev)) << 2); in sdma_v6_0_ring_emit_hdp_flush() 565 adev->nbio.funcs->sdma_doorbell_range(adev, i, ring->use_doorbell, in sdma_v6_0_gfx_resume_instance()
|
H A D | amdgpu_gmc.c | 1476 if (adev->nbio.funcs && in amdgpu_gmc_get_memory_partition() 1477 adev->nbio.funcs->get_memory_partition_mode) in amdgpu_gmc_get_memory_partition() 1478 mode = adev->nbio.funcs->get_memory_partition_mode(adev, in amdgpu_gmc_get_memory_partition()
|
H A D | aqua_vanjaram.c | 125 if (adev->nbio.funcs->get_compute_partition_mode) { in aqua_vanjaram_query_partition_mode() 126 mode = adev->nbio.funcs->get_compute_partition_mode(adev); in aqua_vanjaram_query_partition_mode()
|
H A D | sdma_v5_0.c | 493 const struct nbio_hdp_flush_reg *nbio_hf_reg = adev->nbio.hdp_flush_reg; in sdma_v5_0_ring_emit_hdp_flush() 503 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_done_offset(adev)) << 2); in sdma_v5_0_ring_emit_hdp_flush() 504 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_req_offset(adev)) << 2); in sdma_v5_0_ring_emit_hdp_flush() 782 adev->nbio.funcs->sdma_doorbell_range(adev, i, ring->use_doorbell, in sdma_v5_0_gfx_resume_instance()
|
H A D | sdma_v5_2.c | 342 const struct nbio_hdp_flush_reg *nbio_hf_reg = adev->nbio.hdp_flush_reg; in sdma_v5_2_ring_emit_hdp_flush() 352 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_done_offset(adev)) << 2); in sdma_v5_2_ring_emit_hdp_flush() 353 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_req_offset(adev)) << 2); in sdma_v5_2_ring_emit_hdp_flush() 626 adev->nbio.funcs->sdma_doorbell_range(adev, i, ring->use_doorbell, in sdma_v5_2_gfx_resume_instance()
|