Home
last modified time | relevance | path

Searched refs:tmp_adev (Results 1 – 8 of 8) sorted by relevance

/linux/drivers/gpu/drm/amd/amdgpu/
H A Daldebaran.c153 struct amdgpu_device *tmp_adev = NULL; in aldebaran_mode2_perform_reset() local
167 list_for_each_entry(tmp_adev, reset_device_list, reset_list) { in aldebaran_mode2_perform_reset()
168 mutex_lock(&tmp_adev->reset_cntl->reset_lock); in aldebaran_mode2_perform_reset()
169 tmp_adev->reset_cntl->active_reset = AMD_RESET_METHOD_MODE2; in aldebaran_mode2_perform_reset()
175 list_for_each_entry(tmp_adev, reset_device_list, reset_list) { in aldebaran_mode2_perform_reset()
177 if (tmp_adev->gmc.xgmi.num_physical_nodes > 1) { in aldebaran_mode2_perform_reset()
179 &tmp_adev->reset_cntl->reset_work)) in aldebaran_mode2_perform_reset()
182 r = aldebaran_mode2_reset(tmp_adev); in aldebaran_mode2_perform_reset()
184 dev_err(tmp_adev->dev, in aldebaran_mode2_perform_reset()
186 r, adev_to_drm(tmp_adev)->unique); in aldebaran_mode2_perform_reset()
[all …]
H A Damdgpu_reset.c61 struct amdgpu_device *tmp_adev; in amdgpu_reset_xgmi_reset_on_init_prep_hwctxt() local
64 list_for_each_entry(tmp_adev, reset_device_list, reset_list) { in amdgpu_reset_xgmi_reset_on_init_prep_hwctxt()
65 amdgpu_unregister_gpu_instance(tmp_adev); in amdgpu_reset_xgmi_reset_on_init_prep_hwctxt()
66 r = amdgpu_reset_xgmi_reset_on_init_suspend(tmp_adev); in amdgpu_reset_xgmi_reset_on_init_prep_hwctxt()
68 dev_err(tmp_adev->dev, in amdgpu_reset_xgmi_reset_on_init_prep_hwctxt()
82 struct amdgpu_device *tmp_adev = NULL; in amdgpu_reset_xgmi_reset_on_init_restore_hwctxt() local
88 list_for_each_entry(tmp_adev, reset_device_list, reset_list) { in amdgpu_reset_xgmi_reset_on_init_restore_hwctxt()
89 if (!tmp_adev->kfd.init_complete) { in amdgpu_reset_xgmi_reset_on_init_restore_hwctxt()
90 kgd2kfd_init_zone_device(tmp_adev); in amdgpu_reset_xgmi_reset_on_init_restore_hwctxt()
91 amdgpu_amdkfd_device_init(tmp_adev); in amdgpu_reset_xgmi_reset_on_init_restore_hwctxt()
[all …]
H A Dsienna_cichlid.c221 struct amdgpu_device *tmp_adev = (struct amdgpu_device *)reset_ctl->handle; in sienna_cichlid_mode2_restore_hwcontext() local
223 amdgpu_set_init_level(tmp_adev, AMDGPU_INIT_LEVEL_RESET_RECOVERY); in sienna_cichlid_mode2_restore_hwcontext()
224 dev_info(tmp_adev->dev, in sienna_cichlid_mode2_restore_hwcontext()
226 r = sienna_cichlid_mode2_restore_ip(tmp_adev); in sienna_cichlid_mode2_restore_hwcontext()
234 amdgpu_register_gpu_instance(tmp_adev); in sienna_cichlid_mode2_restore_hwcontext()
237 amdgpu_ras_resume(tmp_adev); in sienna_cichlid_mode2_restore_hwcontext()
239 amdgpu_irq_gpu_reset_resume_helper(tmp_adev); in sienna_cichlid_mode2_restore_hwcontext()
241 amdgpu_set_init_level(tmp_adev, AMDGPU_INIT_LEVEL_DEFAULT); in sienna_cichlid_mode2_restore_hwcontext()
242 r = amdgpu_ib_ring_tests(tmp_adev); in sienna_cichlid_mode2_restore_hwcontext()
244 dev_err(tmp_adev->dev, in sienna_cichlid_mode2_restore_hwcontext()
H A Dsmu_v13_0_10.c222 struct amdgpu_device *tmp_adev = (struct amdgpu_device *)reset_ctl->handle; in smu_v13_0_10_mode2_restore_hwcontext() local
224 amdgpu_set_init_level(tmp_adev, AMDGPU_INIT_LEVEL_RESET_RECOVERY); in smu_v13_0_10_mode2_restore_hwcontext()
225 dev_info(tmp_adev->dev, in smu_v13_0_10_mode2_restore_hwcontext()
227 r = smu_v13_0_10_mode2_restore_ip(tmp_adev); in smu_v13_0_10_mode2_restore_hwcontext()
231 amdgpu_register_gpu_instance(tmp_adev); in smu_v13_0_10_mode2_restore_hwcontext()
234 amdgpu_ras_resume(tmp_adev); in smu_v13_0_10_mode2_restore_hwcontext()
236 amdgpu_irq_gpu_reset_resume_helper(tmp_adev); in smu_v13_0_10_mode2_restore_hwcontext()
238 amdgpu_set_init_level(tmp_adev, AMDGPU_INIT_LEVEL_DEFAULT); in smu_v13_0_10_mode2_restore_hwcontext()
239 r = amdgpu_ib_ring_tests(tmp_adev); in smu_v13_0_10_mode2_restore_hwcontext()
241 dev_err(tmp_adev->dev, in smu_v13_0_10_mode2_restore_hwcontext()
H A Damdgpu_xgmi.c944 struct amdgpu_device *tmp_adev; in amdgpu_xgmi_initialize_hive_get_data_partition() local
947 list_for_each_entry(tmp_adev, &hive->device_list, gmc.xgmi.head) { in amdgpu_xgmi_initialize_hive_get_data_partition()
948 ret = psp_xgmi_initialize(&tmp_adev->psp, set_extended_data, false); in amdgpu_xgmi_initialize_hive_get_data_partition()
950 dev_err(tmp_adev->dev, in amdgpu_xgmi_initialize_hive_get_data_partition()
988 struct amdgpu_device *tmp_adev = NULL; in amdgpu_xgmi_add_device() local
1042 list_for_each_entry(tmp_adev, &hive->device_list, gmc.xgmi.head) { in amdgpu_xgmi_add_device()
1044 if (tmp_adev != adev) { in amdgpu_xgmi_add_device()
1045 top_info = &tmp_adev->psp.xgmi_context.top_info; in amdgpu_xgmi_add_device()
1050 ret = amdgpu_xgmi_update_topology(hive, tmp_adev); in amdgpu_xgmi_add_device()
1070 list_for_each_entry(tmp_adev, &hive->device_list, gmc.xgmi.head) { in amdgpu_xgmi_add_device()
[all …]
H A Damdgpu_device.c5726 struct amdgpu_device *tmp_adev = reset_context->reset_req_dev; in amdgpu_device_pre_asic_reset() local
5784 dev_info(tmp_adev->dev, "Dumping IP State\n"); in amdgpu_device_pre_asic_reset()
5786 for (i = 0; i < tmp_adev->num_ip_blocks; i++) in amdgpu_device_pre_asic_reset()
5787 if (tmp_adev->ip_blocks[i].version->funcs->dump_ip_state) in amdgpu_device_pre_asic_reset()
5788 tmp_adev->ip_blocks[i].version->funcs in amdgpu_device_pre_asic_reset()
5789 ->dump_ip_state((void *)&tmp_adev->ip_blocks[i]); in amdgpu_device_pre_asic_reset()
5790 dev_info(tmp_adev->dev, "Dumping IP State Completed\n"); in amdgpu_device_pre_asic_reset()
5809 struct amdgpu_device *tmp_adev; in amdgpu_device_reinit_after_reset() local
5829 list_for_each_entry(tmp_adev, device_list_handle, reset_list) { in amdgpu_device_reinit_after_reset()
5830 amdgpu_set_init_level(tmp_adev, init_level); in amdgpu_device_reinit_after_reset()
[all …]
H A Damdgpu_job.c55 struct amdgpu_device *tmp_adev = NULL; in amdgpu_job_core_dump() local
68 list_for_each_entry(tmp_adev, &hive->device_list, gmc.xgmi.head) in amdgpu_job_core_dump()
69 list_add_tail(&tmp_adev->reset_list, &device_list); in amdgpu_job_core_dump()
79 list_for_each_entry(tmp_adev, device_list_handle, reset_list) in amdgpu_job_core_dump()
80 amdgpu_job_do_core_dump(tmp_adev, job); in amdgpu_job_core_dump()
H A Damdgpu_ras.c2598 struct amdgpu_device *tmp_adev; in amdgpu_ras_set_fed_all() local
2601 list_for_each_entry(tmp_adev, &hive->device_list, gmc.xgmi.head) in amdgpu_ras_set_fed_all()
2602 amdgpu_ras_set_fed(tmp_adev, status); in amdgpu_ras_set_fed_all()