Lines Matching full:sdma
530 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_setup_ulv()
562 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_destroy_inst_ctx()
563 release_firmware(adev->sdma.instance[i].fw); in sdma_v4_0_destroy_inst_ctx()
564 adev->sdma.instance[i].fw = NULL; in sdma_v4_0_destroy_inst_ctx()
567 all SDMA isntances */ in sdma_v4_0_destroy_inst_ctx()
572 memset((void*)adev->sdma.instance, 0, in sdma_v4_0_destroy_inst_ctx()
634 err = request_firmware(&adev->sdma.instance[0].fw, fw_name, adev->dev); in sdma_v4_0_init_microcode()
638 err = sdma_v4_0_init_inst_ctx(&adev->sdma.instance[0]); in sdma_v4_0_init_microcode()
642 for (i = 1; i < adev->sdma.num_instances; i++) { in sdma_v4_0_init_microcode()
645 for every SDMA instance */ in sdma_v4_0_init_microcode()
646 memcpy((void*)&adev->sdma.instance[i], in sdma_v4_0_init_microcode()
647 (void*)&adev->sdma.instance[0], in sdma_v4_0_init_microcode()
653 err = request_firmware(&adev->sdma.instance[i].fw, fw_name, adev->dev); in sdma_v4_0_init_microcode()
657 err = sdma_v4_0_init_inst_ctx(&adev->sdma.instance[i]); in sdma_v4_0_init_microcode()
667 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_init_microcode()
670 info->fw = adev->sdma.instance[i].fw; in sdma_v4_0_init_microcode()
825 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in sdma_v4_0_ring_insert_nop() local
829 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v4_0_ring_insert_nop()
959 struct amdgpu_ring *sdma[AMDGPU_MAX_SDMA_INSTANCES]; in sdma_v4_0_gfx_stop() local
963 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_gfx_stop()
964 sdma[i] = &adev->sdma.instance[i].ring; in sdma_v4_0_gfx_stop()
966 if ((adev->mman.buffer_funcs_ring == sdma[i]) && unset != 1) { in sdma_v4_0_gfx_stop()
1001 struct amdgpu_ring *sdma[AMDGPU_MAX_SDMA_INSTANCES]; in sdma_v4_0_page_stop() local
1006 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_page_stop()
1007 sdma[i] = &adev->sdma.instance[i].page; in sdma_v4_0_page_stop()
1009 if ((adev->mman.buffer_funcs_ring == sdma[i]) && in sdma_v4_0_page_stop()
1063 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_ctx_switch_enable()
1075 * Enable SDMA utilization. Its only supported on in sdma_v4_0_ctx_switch_enable()
1080 adev->sdma.instance[i].fw_version >= 14) in sdma_v4_0_ctx_switch_enable()
1102 if (adev->sdma.has_page_queue) in sdma_v4_0_enable()
1106 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_enable()
1141 struct amdgpu_ring *ring = &adev->sdma.instance[i].ring; in sdma_v4_0_gfx_resume()
1231 struct amdgpu_ring *ring = &adev->sdma.instance[i].page; in sdma_v4_0_page_resume()
1391 * sdma_v4_0_load_microcode - load the sDMA ME ucode
1408 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_load_microcode()
1409 if (!adev->sdma.instance[i].fw) in sdma_v4_0_load_microcode()
1412 hdr = (const struct sdma_firmware_header_v1_0 *)adev->sdma.instance[i].fw->data; in sdma_v4_0_load_microcode()
1417 (adev->sdma.instance[i].fw->data + in sdma_v4_0_load_microcode()
1427 adev->sdma.instance[i].fw_version); in sdma_v4_0_load_microcode()
1459 /* enable sdma ring preemption */ in sdma_v4_0_start()
1464 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_start()
1469 if (adev->sdma.has_page_queue) in sdma_v4_0_start()
1494 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_start()
1495 ring = &adev->sdma.instance[i].ring; in sdma_v4_0_start()
1501 if (adev->sdma.has_page_queue) { in sdma_v4_0_start()
1502 struct amdgpu_ring *page = &adev->sdma.instance[i].page; in sdma_v4_0_start()
1648 * Update PTEs by copying them from the GART using sDMA (VEGA10).
1677 * Update PTEs by writing them manually using sDMA (VEGA10).
1698 * sdma_v4_0_vm_set_pte_pde - update the page tables using sDMA
1707 * Update the page tables using sDMA (VEGA10).
1735 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in sdma_v4_0_ring_pad_ib() local
1741 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v4_0_ring_pad_ib()
1772 * sdma_v4_0_ring_emit_vm_flush - vm flush using sDMA
1778 * using sDMA (VEGA10).
1803 uint fw_version = adev->sdma.instance[0].fw_version; in sdma_v4_0_fw_support_paging_queue()
1824 adev->sdma.num_instances = 1; in sdma_v4_0_early_init()
1826 adev->sdma.num_instances = 8; in sdma_v4_0_early_init()
1828 adev->sdma.num_instances = 2; in sdma_v4_0_early_init()
1832 DRM_ERROR("Failed to load sdma firmware!\n"); in sdma_v4_0_early_init()
1838 adev->sdma.has_page_queue = false; in sdma_v4_0_early_init()
1840 adev->sdma.has_page_queue = true; in sdma_v4_0_early_init()
1864 if (adev->sdma.funcs && adev->sdma.funcs->reset_ras_error_count) in sdma_v4_0_late_init()
1865 adev->sdma.funcs->reset_ras_error_count(adev); in sdma_v4_0_late_init()
1867 if (adev->sdma.funcs && adev->sdma.funcs->ras_late_init) in sdma_v4_0_late_init()
1868 return adev->sdma.funcs->ras_late_init(adev, &ih_info); in sdma_v4_0_late_init()
1879 /* SDMA trap event */ in sdma_v4_0_sw_init()
1880 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_sw_init()
1883 &adev->sdma.trap_irq); in sdma_v4_0_sw_init()
1888 /* SDMA SRAM ECC event */ in sdma_v4_0_sw_init()
1889 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_sw_init()
1892 &adev->sdma.ecc_irq); in sdma_v4_0_sw_init()
1897 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_sw_init()
1898 ring = &adev->sdma.instance[i].ring; in sdma_v4_0_sw_init()
1902 DRM_DEBUG("SDMA %d use_doorbell being set to: [%s]\n", i, in sdma_v4_0_sw_init()
1908 sprintf(ring->name, "sdma%d", i); in sdma_v4_0_sw_init()
1909 r = amdgpu_ring_init(adev, ring, 1024, &adev->sdma.trap_irq, in sdma_v4_0_sw_init()
1915 if (adev->sdma.has_page_queue) { in sdma_v4_0_sw_init()
1916 ring = &adev->sdma.instance[i].page; in sdma_v4_0_sw_init()
1928 &adev->sdma.trap_irq, in sdma_v4_0_sw_init()
1944 if (adev->sdma.funcs && adev->sdma.funcs->ras_fini) in sdma_v4_0_sw_fini()
1945 adev->sdma.funcs->ras_fini(adev); in sdma_v4_0_sw_fini()
1947 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_sw_fini()
1948 amdgpu_ring_fini(&adev->sdma.instance[i].ring); in sdma_v4_0_sw_fini()
1949 if (adev->sdma.has_page_queue) in sdma_v4_0_sw_fini()
1950 amdgpu_ring_fini(&adev->sdma.instance[i].page); in sdma_v4_0_sw_fini()
1982 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_hw_fini()
1983 amdgpu_irq_put(adev, &adev->sdma.ecc_irq, in sdma_v4_0_hw_fini()
2015 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_is_idle()
2028 u32 sdma[AMDGPU_MAX_SDMA_INSTANCES]; in sdma_v4_0_wait_for_idle() local
2032 for (j = 0; j < adev->sdma.num_instances; j++) { in sdma_v4_0_wait_for_idle()
2033 sdma[j] = RREG32_SDMA(j, mmSDMA0_STATUS_REG); in sdma_v4_0_wait_for_idle()
2034 if (!(sdma[j] & SDMA0_STATUS_REG__IDLE_MASK)) in sdma_v4_0_wait_for_idle()
2037 if (j == adev->sdma.num_instances) in sdma_v4_0_wait_for_idle()
2072 DRM_DEBUG("IH: SDMA trap\n"); in sdma_v4_0_process_trap_irq()
2076 amdgpu_fence_process(&adev->sdma.instance[instance].ring); in sdma_v4_0_process_trap_irq()
2080 amdgpu_fence_process(&adev->sdma.instance[instance].page); in sdma_v4_0_process_trap_irq()
2087 amdgpu_fence_process(&adev->sdma.instance[instance].page); in sdma_v4_0_process_trap_irq()
2122 DRM_ERROR("Illegal instruction in SDMA command stream\n"); in sdma_v4_0_process_illegal_inst_irq()
2130 drm_sched_fault(&adev->sdma.instance[instance].ring.sched); in sdma_v4_0_process_illegal_inst_irq()
2159 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_update_medium_grain_clock_gating()
2173 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_update_medium_grain_clock_gating()
2198 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_update_medium_grain_light_sleep()
2199 /* 1-not override: enable sdma mem light sleep */ in sdma_v4_0_update_medium_grain_light_sleep()
2206 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_update_medium_grain_light_sleep()
2207 /* 0-override:disable sdma mem light sleep */ in sdma_v4_0_update_medium_grain_light_sleep()
2330 * On Arcturus, SDMA instance 5~7 has a different vmhub type(AMDGPU_MMHUB_1).
2433 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_set_ring_funcs()
2435 adev->sdma.instance[i].ring.funcs = in sdma_v4_0_set_ring_funcs()
2438 adev->sdma.instance[i].ring.funcs = in sdma_v4_0_set_ring_funcs()
2440 adev->sdma.instance[i].ring.me = i; in sdma_v4_0_set_ring_funcs()
2441 if (adev->sdma.has_page_queue) { in sdma_v4_0_set_ring_funcs()
2443 adev->sdma.instance[i].page.funcs = in sdma_v4_0_set_ring_funcs()
2446 adev->sdma.instance[i].page.funcs = in sdma_v4_0_set_ring_funcs()
2448 adev->sdma.instance[i].page.me = i; in sdma_v4_0_set_ring_funcs()
2471 switch (adev->sdma.num_instances) { in sdma_v4_0_set_irq_funcs()
2473 adev->sdma.trap_irq.num_types = AMDGPU_SDMA_IRQ_INSTANCE1; in sdma_v4_0_set_irq_funcs()
2474 adev->sdma.ecc_irq.num_types = AMDGPU_SDMA_IRQ_INSTANCE1; in sdma_v4_0_set_irq_funcs()
2477 adev->sdma.trap_irq.num_types = AMDGPU_SDMA_IRQ_LAST; in sdma_v4_0_set_irq_funcs()
2478 adev->sdma.ecc_irq.num_types = AMDGPU_SDMA_IRQ_LAST; in sdma_v4_0_set_irq_funcs()
2482 adev->sdma.trap_irq.num_types = AMDGPU_SDMA_IRQ_INSTANCE2; in sdma_v4_0_set_irq_funcs()
2483 adev->sdma.ecc_irq.num_types = AMDGPU_SDMA_IRQ_INSTANCE2; in sdma_v4_0_set_irq_funcs()
2486 adev->sdma.trap_irq.funcs = &sdma_v4_0_trap_irq_funcs; in sdma_v4_0_set_irq_funcs()
2487 adev->sdma.illegal_inst_irq.funcs = &sdma_v4_0_illegal_inst_irq_funcs; in sdma_v4_0_set_irq_funcs()
2488 adev->sdma.ecc_irq.funcs = &sdma_v4_0_ecc_irq_funcs; in sdma_v4_0_set_irq_funcs()
2492 * sdma_v4_0_emit_copy_buffer - copy buffer using the sDMA engine
2521 * sdma_v4_0_emit_fill_buffer - fill buffer using the sDMA engine
2555 if (adev->sdma.has_page_queue) in sdma_v4_0_set_buffer_funcs()
2556 adev->mman.buffer_funcs_ring = &adev->sdma.instance[0].page; in sdma_v4_0_set_buffer_funcs()
2558 adev->mman.buffer_funcs_ring = &adev->sdma.instance[0].ring; in sdma_v4_0_set_buffer_funcs()
2575 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_set_vm_pte_funcs()
2576 if (adev->sdma.has_page_queue) in sdma_v4_0_set_vm_pte_funcs()
2577 sched = &adev->sdma.instance[i].page.sched; in sdma_v4_0_set_vm_pte_funcs()
2579 sched = &adev->sdma.instance[i].ring.sched; in sdma_v4_0_set_vm_pte_funcs()
2582 adev->vm_manager.vm_pte_num_scheds = adev->sdma.num_instances; in sdma_v4_0_set_vm_pte_funcs()
2594 /* the SDMA_EDC_COUNTER register in each sdma instance in sdma_v4_0_get_ras_error_count()
2601 DRM_INFO("Detected %s in SDMA%d, SED %d\n", in sdma_v4_0_get_ras_error_count()
2637 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v4_0_reset_ras_error_count()
2654 adev->sdma.funcs = &sdma_v4_0_ras_funcs; in sdma_v4_0_set_ras_funcs()