Searched refs:num_compute_rings (Results 1 – 10 of 10) sorted by relevance
196 if (adev->gfx.num_compute_rings > 1 && in amdgpu_gfx_is_high_priority_compute_queue() 209 adev->gfx.num_compute_rings); in amdgpu_gfx_compute_queue_acquire() 444 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in amdgpu_gfx_mqd_sw_init() 445 j = i + xcc_id * adev->gfx.num_compute_rings; in amdgpu_gfx_mqd_sw_init() 485 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in amdgpu_gfx_mqd_sw_fini() 486 j = i + xcc_id * adev->gfx.num_compute_rings; in amdgpu_gfx_mqd_sw_fini() 509 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in amdgpu_gfx_disable_kcq() 510 j = i + xcc_id * adev->gfx.num_compute_rings; in amdgpu_gfx_disable_kcq() 526 adev->gfx.num_compute_rings)) { in amdgpu_gfx_disable_kcq() 531 for (i = 0; i < adev->gfx.num_compute_rings; in amdgpu_gfx_disable_kcq() [all...]
635 adev->gfx.num_compute_rings * num_xcc * GFX9_MEC_HPD_SIZE; in gfx_v9_4_3_mec_init() 978 ring = &adev->gfx.compute_ring[xcc_id * adev->gfx.num_compute_rings + in gfx_v9_4_3_compute_ring_init() 993 (ring_id + xcc_id * adev->gfx.num_compute_rings) * in gfx_v9_4_3_compute_ring_init() 1190 for (i = 0; i < adev->gfx.num_compute_rings * num_xcc; i++) in gfx_v9_4_3_sw_fini() 2198 for (j = 0; j < adev->gfx.num_compute_rings; j++) { in gfx_v9_4_3_xcc_kcq_fini_register() 2199 ring = &adev->gfx.compute_ring[j + xcc_id * adev->gfx.num_compute_rings]; in gfx_v9_4_3_xcc_kcq_fini_register() 2227 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_4_3_xcc_kcq_resume() 2229 adev->gfx.num_compute_rings]; in gfx_v9_4_3_xcc_kcq_resume() 2264 for (j = 0; j < adev->gfx.num_compute_rings; j++) { in gfx_v9_4_3_xcc_cp_resume() 2266 [j + xcc_id * adev->gfx.num_compute_rings]; in gfx_v9_4_3_xcc_cp_resume() [all...]
794 mec_hpd_size = adev->gfx.num_compute_rings * GFX12_MEC_HPD_SIZE; in gfx_v12_0_mec_init() 1395 unsigned num_compute_rings; in gfx_v12_0_sw_init() local 1452 if (adev->gfx.num_compute_rings) { in gfx_v12_0_sw_init() 1454 num_compute_rings = (adev->gfx.mec.num_pipe_per_mec * in gfx_v12_0_sw_init() 1456 adev->gfx.num_compute_rings = min(adev->gfx.num_compute_rings, in gfx_v12_0_sw_init() 1457 num_compute_rings); in gfx_v12_0_sw_init() 1522 if (adev->gfx.num_compute_rings) { in gfx_v12_0_sw_init() 1633 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v12_0_sw_fini() 3433 for (i = 0; i < adev->gfx.num_compute_rings; in gfx_v12_0_kcq_resume() [all...]
289 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in suspend_resume_compute_scheduler()
1299 mec_hpd_size = adev->gfx.num_compute_rings * GFX8_MEC_HPD_SIZE; in gfx_v8_0_mec_init() 2037 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v8_0_sw_fini() 4321 r = amdgpu_ring_alloc(kiq_ring, (8 * adev->gfx.num_compute_rings) + 8); in gfx_v8_0_kiq_kcq_enable() 4335 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_kiq_kcq_enable() 4671 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_kcq_resume() 4698 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_cp_test_all_rings() 4762 r = amdgpu_ring_alloc(kiq_ring, 6 * adev->gfx.num_compute_rings); in gfx_v8_0_kcq_disable() 4766 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_kcq_disable() 4973 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_pre_soft_reset() 5068 for (i = 0; i < adev->gfx.num_compute_rings; in gfx_v8_0_post_soft_reset() [all...]
2721 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_cp_compute_fini() 3024 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_cp_compute_resume() 3034 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_cp_compute_resume() 4107 adev->gfx.num_compute_rings = min(amdgpu_gfx_get_num_kcq(adev), in gfx_v7_0_early_init() 4413 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v7_0_sw_fini() 4760 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_eop_irq() 4785 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_fault() 4976 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v7_0_set_ring_funcs()
454 unsigned num_compute_rings; member
3031 adev->gfx.num_compute_rings = min(amdgpu_gfx_get_num_kcq(adev), in gfx_v6_0_early_init() 3083 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v6_0_sw_init() 3116 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v6_0_sw_fini() 3508 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v6_0_set_ring_funcs()
1884 mec_hpd_size = adev->gfx.num_compute_rings * GFX9_MEC_HPD_SIZE; in gfx_v9_0_mec_init() 2465 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v9_0_sw_fini() 3926 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_kcq_resume() 3981 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_cp_resume() 4804 adev->gfx.num_compute_rings = min(amdgpu_gfx_get_num_kcq(adev), in gfx_v9_0_early_init() 6223 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_eop_irq() 6253 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_fault() 7605 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v9_0_set_ring_funcs()
426 for (i = 0; i < adev->gfx.num_compute_rings; i++) in amdgpu_hw_ip_info()