Lines Matching +full:write +full:- +full:1 +full:- +full:bps

43 		vf2pf_info->ucode_info[ucode].id = ucode; \
44 vf2pf_info->ucode_info[ucode].version = ver; \
60 if (adev->asic_type != CHIP_ALDEBARAN && in amdgpu_virt_init_setting()
61 adev->asic_type != CHIP_ARCTURUS && in amdgpu_virt_init_setting()
62 ((adev->pdev->class >> 8) != PCI_CLASS_ACCELERATOR_PROCESSING)) { in amdgpu_virt_init_setting()
63 if (adev->mode_info.num_crtc == 0) in amdgpu_virt_init_setting()
64 adev->mode_info.num_crtc = 1; in amdgpu_virt_init_setting()
65 adev->enable_virtual_display = true; in amdgpu_virt_init_setting()
67 ddev->driver_features &= ~DRIVER_ATOMIC; in amdgpu_virt_init_setting()
68 adev->cg_flags = 0; in amdgpu_virt_init_setting()
69 adev->pg_flags = 0; in amdgpu_virt_init_setting()
72 if (amdgpu_num_kcq == -1) in amdgpu_virt_init_setting()
77 * amdgpu_virt_request_full_gpu() - request full gpu access
85 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_request_full_gpu()
88 if (virt->ops && virt->ops->req_full_gpu) { in amdgpu_virt_request_full_gpu()
89 r = virt->ops->req_full_gpu(adev, init); in amdgpu_virt_request_full_gpu()
91 adev->no_hw_access = true; in amdgpu_virt_request_full_gpu()
95 adev->virt.caps &= ~AMDGPU_SRIOV_CAPS_RUNTIME; in amdgpu_virt_request_full_gpu()
102 * amdgpu_virt_release_full_gpu() - release full gpu access
110 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_release_full_gpu()
113 if (virt->ops && virt->ops->rel_full_gpu) { in amdgpu_virt_release_full_gpu()
114 r = virt->ops->rel_full_gpu(adev, init); in amdgpu_virt_release_full_gpu()
118 adev->virt.caps |= AMDGPU_SRIOV_CAPS_RUNTIME; in amdgpu_virt_release_full_gpu()
124 * amdgpu_virt_reset_gpu() - reset gpu
131 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_reset_gpu()
134 if (virt->ops && virt->ops->reset_gpu) { in amdgpu_virt_reset_gpu()
135 r = virt->ops->reset_gpu(adev); in amdgpu_virt_reset_gpu()
139 adev->virt.caps &= ~AMDGPU_SRIOV_CAPS_RUNTIME; in amdgpu_virt_reset_gpu()
147 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_request_init_data()
149 if (virt->ops && virt->ops->req_init_data) in amdgpu_virt_request_init_data()
150 virt->ops->req_init_data(adev); in amdgpu_virt_request_init_data()
152 if (adev->virt.req_init_data_ver > 0) in amdgpu_virt_request_init_data()
159 * amdgpu_virt_ready_to_reset() - send ready to reset to host
166 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_ready_to_reset()
168 if (virt->ops && virt->ops->reset_gpu) in amdgpu_virt_ready_to_reset()
169 virt->ops->ready_to_reset(adev); in amdgpu_virt_ready_to_reset()
173 * amdgpu_virt_wait_reset() - wait for reset gpu completed
180 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_wait_reset()
182 if (!virt->ops || !virt->ops->wait_reset) in amdgpu_virt_wait_reset()
183 return -EINVAL; in amdgpu_virt_wait_reset()
185 return virt->ops->wait_reset(adev); in amdgpu_virt_wait_reset()
189 * amdgpu_virt_alloc_mm_table() - alloc memory for mm table
198 if (!amdgpu_sriov_vf(adev) || adev->virt.mm_table.gpu_addr) in amdgpu_virt_alloc_mm_table()
204 &adev->virt.mm_table.bo, in amdgpu_virt_alloc_mm_table()
205 &adev->virt.mm_table.gpu_addr, in amdgpu_virt_alloc_mm_table()
206 (void *)&adev->virt.mm_table.cpu_addr); in amdgpu_virt_alloc_mm_table()
212 memset((void *)adev->virt.mm_table.cpu_addr, 0, PAGE_SIZE); in amdgpu_virt_alloc_mm_table()
214 adev->virt.mm_table.gpu_addr, in amdgpu_virt_alloc_mm_table()
215 adev->virt.mm_table.cpu_addr); in amdgpu_virt_alloc_mm_table()
220 * amdgpu_virt_free_mm_table() - free mm table memory
226 if (!amdgpu_sriov_vf(adev) || !adev->virt.mm_table.gpu_addr) in amdgpu_virt_free_mm_table()
229 amdgpu_bo_free_kernel(&adev->virt.mm_table.bo, in amdgpu_virt_free_mm_table()
230 &adev->virt.mm_table.gpu_addr, in amdgpu_virt_free_mm_table()
231 (void *)&adev->virt.mm_table.cpu_addr); in amdgpu_virt_free_mm_table()
232 adev->virt.mm_table.gpu_addr = 0; in amdgpu_virt_free_mm_table()
236 * amdgpu_virt_rcvd_ras_interrupt() - receive ras interrupt
243 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_rcvd_ras_interrupt()
245 if (!virt->ops || !virt->ops->rcvd_ras_intr) in amdgpu_virt_rcvd_ras_interrupt()
248 return virt->ops->rcvd_ras_intr(adev); in amdgpu_virt_rcvd_ras_interrupt()
268 ret -= *(pos + i); in amd_sriov_msg_checksum()
274 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_init_ras_err_handler_data()
275 struct amdgpu_virt_ras_err_handler_data **data = &virt->virt_eh_data; in amdgpu_virt_init_ras_err_handler_data()
280 void *bps = NULL; in amdgpu_virt_init_ras_err_handler_data() local
287 bps = kmalloc_array(align_space, sizeof(*(*data)->bps), GFP_KERNEL); in amdgpu_virt_init_ras_err_handler_data()
288 if (!bps) in amdgpu_virt_init_ras_err_handler_data()
291 bps_bo = kmalloc_array(align_space, sizeof(*(*data)->bps_bo), GFP_KERNEL); in amdgpu_virt_init_ras_err_handler_data()
295 (*data)->bps = bps; in amdgpu_virt_init_ras_err_handler_data()
296 (*data)->bps_bo = bps_bo; in amdgpu_virt_init_ras_err_handler_data()
297 (*data)->count = 0; in amdgpu_virt_init_ras_err_handler_data()
298 (*data)->last_reserved = 0; in amdgpu_virt_init_ras_err_handler_data()
300 virt->ras_init_done = true; in amdgpu_virt_init_ras_err_handler_data()
305 kfree(bps); in amdgpu_virt_init_ras_err_handler_data()
309 return -ENOMEM; in amdgpu_virt_init_ras_err_handler_data()
314 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_ras_release_bp()
315 struct amdgpu_virt_ras_err_handler_data *data = virt->virt_eh_data; in amdgpu_virt_ras_release_bp()
322 for (i = data->last_reserved - 1; i >= 0; i--) { in amdgpu_virt_ras_release_bp()
323 bo = data->bps_bo[i]; in amdgpu_virt_ras_release_bp()
326 data->bps_bo[i] = bo; in amdgpu_virt_ras_release_bp()
328 data->last_reserved = i; in amdgpu_virt_ras_release_bp()
334 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_release_ras_err_handler_data()
335 struct amdgpu_virt_ras_err_handler_data *data = virt->virt_eh_data; in amdgpu_virt_release_ras_err_handler_data()
337 virt->ras_init_done = false; in amdgpu_virt_release_ras_err_handler_data()
344 kfree(data->bps); in amdgpu_virt_release_ras_err_handler_data()
345 kfree(data->bps_bo); in amdgpu_virt_release_ras_err_handler_data()
347 virt->virt_eh_data = NULL; in amdgpu_virt_release_ras_err_handler_data()
351 struct eeprom_table_record *bps, int pages) in amdgpu_virt_ras_add_bps() argument
353 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_ras_add_bps()
354 struct amdgpu_virt_ras_err_handler_data *data = virt->virt_eh_data; in amdgpu_virt_ras_add_bps()
359 memcpy(&data->bps[data->count], bps, pages * sizeof(*data->bps)); in amdgpu_virt_ras_add_bps()
360 data->count += pages; in amdgpu_virt_ras_add_bps()
365 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_ras_reserve_bps()
366 struct amdgpu_virt_ras_err_handler_data *data = virt->virt_eh_data; in amdgpu_virt_ras_reserve_bps()
367 struct amdgpu_vram_mgr *mgr = &adev->mman.vram_mgr; in amdgpu_virt_ras_reserve_bps()
368 struct ttm_resource_manager *man = &mgr->manager; in amdgpu_virt_ras_reserve_bps()
376 for (i = data->last_reserved; i < data->count; i++) { in amdgpu_virt_ras_reserve_bps()
377 bp = data->bps[i].retired_page; in amdgpu_virt_ras_reserve_bps()
380 * 1) a ras bad page has been allocated (used by someone); in amdgpu_virt_ras_reserve_bps()
385 amdgpu_vram_mgr_reserve_range(&adev->mman.vram_mgr, in amdgpu_virt_ras_reserve_bps()
388 data->bps_bo[i] = NULL; in amdgpu_virt_ras_reserve_bps()
394 data->bps_bo[i] = bo; in amdgpu_virt_ras_reserve_bps()
396 data->last_reserved = i + 1; in amdgpu_virt_ras_reserve_bps()
404 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_ras_check_bad_page()
405 struct amdgpu_virt_ras_err_handler_data *data = virt->virt_eh_data; in amdgpu_virt_ras_check_bad_page()
411 for (i = 0; i < data->count; i++) in amdgpu_virt_ras_check_bad_page()
412 if (retired_page == data->bps[i].retired_page) in amdgpu_virt_ras_check_bad_page()
426 if (adev->mman.fw_vram_usage_va) in amdgpu_virt_add_bad_page()
427 vram_usage_va = adev->mman.fw_vram_usage_va; in amdgpu_virt_add_bad_page()
429 vram_usage_va = adev->mman.drv_vram_usage_va; in amdgpu_virt_add_bad_page()
443 amdgpu_virt_ras_add_bps(adev, &bp, 1); in amdgpu_virt_add_bad_page()
452 struct amd_sriov_msg_pf2vf_info_header *pf2vf_info = adev->virt.fw_reserve.p_pf2vf; in amdgpu_virt_read_pf2vf_data()
459 if (adev->virt.fw_reserve.p_pf2vf == NULL) in amdgpu_virt_read_pf2vf_data()
460 return -EINVAL; in amdgpu_virt_read_pf2vf_data()
462 if (pf2vf_info->size > 1024) { in amdgpu_virt_read_pf2vf_data()
463 dev_err(adev->dev, "invalid pf2vf message size: 0x%x\n", pf2vf_info->size); in amdgpu_virt_read_pf2vf_data()
464 return -EINVAL; in amdgpu_virt_read_pf2vf_data()
467 switch (pf2vf_info->version) { in amdgpu_virt_read_pf2vf_data()
468 case 1: in amdgpu_virt_read_pf2vf_data()
469 checksum = ((struct amdgim_pf2vf_info_v1 *)pf2vf_info)->checksum; in amdgpu_virt_read_pf2vf_data()
471 adev->virt.fw_reserve.p_pf2vf, pf2vf_info->size, in amdgpu_virt_read_pf2vf_data()
472 adev->virt.fw_reserve.checksum_key, checksum); in amdgpu_virt_read_pf2vf_data()
474 dev_err(adev->dev, in amdgpu_virt_read_pf2vf_data()
477 return -EINVAL; in amdgpu_virt_read_pf2vf_data()
480 adev->virt.gim_feature = in amdgpu_virt_read_pf2vf_data()
481 ((struct amdgim_pf2vf_info_v1 *)pf2vf_info)->feature_flags; in amdgpu_virt_read_pf2vf_data()
485 checksum = ((struct amd_sriov_msg_pf2vf_info *)pf2vf_info)->checksum; in amdgpu_virt_read_pf2vf_data()
487 adev->virt.fw_reserve.p_pf2vf, pf2vf_info->size, in amdgpu_virt_read_pf2vf_data()
490 dev_err(adev->dev, in amdgpu_virt_read_pf2vf_data()
493 return -EINVAL; in amdgpu_virt_read_pf2vf_data()
496 adev->virt.vf2pf_update_interval_ms = in amdgpu_virt_read_pf2vf_data()
497 ((struct amd_sriov_msg_pf2vf_info *)pf2vf_info)->vf2pf_update_interval_ms; in amdgpu_virt_read_pf2vf_data()
498 adev->virt.gim_feature = in amdgpu_virt_read_pf2vf_data()
499 ((struct amd_sriov_msg_pf2vf_info *)pf2vf_info)->feature_flags.all; in amdgpu_virt_read_pf2vf_data()
500 adev->virt.reg_access = in amdgpu_virt_read_pf2vf_data()
501 ((struct amd_sriov_msg_pf2vf_info *)pf2vf_info)->reg_access_flags.all; in amdgpu_virt_read_pf2vf_data()
503 adev->virt.decode_max_dimension_pixels = 0; in amdgpu_virt_read_pf2vf_data()
504 adev->virt.decode_max_frame_pixels = 0; in amdgpu_virt_read_pf2vf_data()
505 adev->virt.encode_max_dimension_pixels = 0; in amdgpu_virt_read_pf2vf_data()
506 adev->virt.encode_max_frame_pixels = 0; in amdgpu_virt_read_pf2vf_data()
507 adev->virt.is_mm_bw_enabled = false; in amdgpu_virt_read_pf2vf_data()
509 …tmp = ((struct amd_sriov_msg_pf2vf_info *)pf2vf_info)->mm_bw_management[i].decode_max_dimension_pi… in amdgpu_virt_read_pf2vf_data()
510 adev->virt.decode_max_dimension_pixels = max(tmp, adev->virt.decode_max_dimension_pixels); in amdgpu_virt_read_pf2vf_data()
512 …tmp = ((struct amd_sriov_msg_pf2vf_info *)pf2vf_info)->mm_bw_management[i].decode_max_frame_pixels; in amdgpu_virt_read_pf2vf_data()
513 adev->virt.decode_max_frame_pixels = max(tmp, adev->virt.decode_max_frame_pixels); in amdgpu_virt_read_pf2vf_data()
515 …tmp = ((struct amd_sriov_msg_pf2vf_info *)pf2vf_info)->mm_bw_management[i].encode_max_dimension_pi… in amdgpu_virt_read_pf2vf_data()
516 adev->virt.encode_max_dimension_pixels = max(tmp, adev->virt.encode_max_dimension_pixels); in amdgpu_virt_read_pf2vf_data()
518 …tmp = ((struct amd_sriov_msg_pf2vf_info *)pf2vf_info)->mm_bw_management[i].encode_max_frame_pixels; in amdgpu_virt_read_pf2vf_data()
519 adev->virt.encode_max_frame_pixels = max(tmp, adev->virt.encode_max_frame_pixels); in amdgpu_virt_read_pf2vf_data()
521 if ((adev->virt.decode_max_dimension_pixels > 0) || (adev->virt.encode_max_dimension_pixels > 0)) in amdgpu_virt_read_pf2vf_data()
522 adev->virt.is_mm_bw_enabled = true; in amdgpu_virt_read_pf2vf_data()
524 adev->unique_id = in amdgpu_virt_read_pf2vf_data()
525 ((struct amd_sriov_msg_pf2vf_info *)pf2vf_info)->uuid; in amdgpu_virt_read_pf2vf_data()
526 adev->virt.ras_en_caps.all = ((struct amd_sriov_msg_pf2vf_info *)pf2vf_info)->ras_en_caps.all; in amdgpu_virt_read_pf2vf_data()
527 adev->virt.ras_telemetry_en_caps.all = in amdgpu_virt_read_pf2vf_data()
528 ((struct amd_sriov_msg_pf2vf_info *)pf2vf_info)->ras_telemetry_en_caps.all; in amdgpu_virt_read_pf2vf_data()
531 dev_err(adev->dev, "invalid pf2vf version: 0x%x\n", pf2vf_info->version); in amdgpu_virt_read_pf2vf_data()
532 return -EINVAL; in amdgpu_virt_read_pf2vf_data()
536 if (adev->virt.vf2pf_update_interval_ms < 200 || adev->virt.vf2pf_update_interval_ms > 10000) in amdgpu_virt_read_pf2vf_data()
537 adev->virt.vf2pf_update_interval_ms = 2000; in amdgpu_virt_read_pf2vf_data()
545 vf2pf_info = (struct amd_sriov_msg_vf2pf_info *) adev->virt.fw_reserve.p_vf2pf; in amdgpu_virt_populate_vf2pf_ucode_info()
547 if (adev->virt.fw_reserve.p_vf2pf == NULL) in amdgpu_virt_populate_vf2pf_ucode_info()
550 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_VCE, adev->vce.fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
551 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_UVD, adev->uvd.fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
552 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_MC, adev->gmc.fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
553 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_ME, adev->gfx.me_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
554 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_PFP, adev->gfx.pfp_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
555 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_CE, adev->gfx.ce_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
556 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_RLC, adev->gfx.rlc_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
557 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_RLC_SRLC, adev->gfx.rlc_srlc_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
558 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_RLC_SRLG, adev->gfx.rlc_srlg_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
559 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_RLC_SRLS, adev->gfx.rlc_srls_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
560 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_MEC, adev->gfx.mec_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
561 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_MEC2, adev->gfx.mec2_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
562 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_SOS, adev->psp.sos.fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
564 adev->psp.asd_context.bin_desc.fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
566 adev->psp.ras_context.context.bin_desc.fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
568 adev->psp.xgmi_context.context.bin_desc.fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
569 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_SMC, adev->pm.fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
570 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_SDMA, adev->sdma.instance[0].fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
571 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_SDMA2, adev->sdma.instance[1].fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
572 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_VCN, adev->vcn.fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
573 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_DMCU, adev->dm.dmcu_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
580 vf2pf_info = (struct amd_sriov_msg_vf2pf_info *) adev->virt.fw_reserve.p_vf2pf; in amdgpu_virt_write_vf2pf_data()
582 if (adev->virt.fw_reserve.p_vf2pf == NULL) in amdgpu_virt_write_vf2pf_data()
583 return -EINVAL; in amdgpu_virt_write_vf2pf_data()
587 vf2pf_info->header.size = sizeof(struct amd_sriov_msg_vf2pf_info); in amdgpu_virt_write_vf2pf_data()
588 vf2pf_info->header.version = AMD_SRIOV_MSG_FW_VRAM_VF2PF_VER; in amdgpu_virt_write_vf2pf_data()
591 if (THIS_MODULE->version != NULL) in amdgpu_virt_write_vf2pf_data()
592 strcpy(vf2pf_info->driver_version, THIS_MODULE->version); in amdgpu_virt_write_vf2pf_data()
595 strcpy(vf2pf_info->driver_version, "N/A"); in amdgpu_virt_write_vf2pf_data()
597 vf2pf_info->pf2vf_version_required = 0; // no requirement, guest understands all in amdgpu_virt_write_vf2pf_data()
598 vf2pf_info->driver_cert = 0; in amdgpu_virt_write_vf2pf_data()
599 vf2pf_info->os_info.all = 0; in amdgpu_virt_write_vf2pf_data()
601 vf2pf_info->fb_usage = in amdgpu_virt_write_vf2pf_data()
602 ttm_resource_manager_usage(&adev->mman.vram_mgr.manager) >> 20; in amdgpu_virt_write_vf2pf_data()
603 vf2pf_info->fb_vis_usage = in amdgpu_virt_write_vf2pf_data()
604 amdgpu_vram_mgr_vis_usage(&adev->mman.vram_mgr) >> 20; in amdgpu_virt_write_vf2pf_data()
605 vf2pf_info->fb_size = adev->gmc.real_vram_size >> 20; in amdgpu_virt_write_vf2pf_data()
606 vf2pf_info->fb_vis_size = adev->gmc.visible_vram_size >> 20; in amdgpu_virt_write_vf2pf_data()
611 vf2pf_info->gfx_usage = 0; in amdgpu_virt_write_vf2pf_data()
612 vf2pf_info->compute_usage = 0; in amdgpu_virt_write_vf2pf_data()
613 vf2pf_info->encode_usage = 0; in amdgpu_virt_write_vf2pf_data()
614 vf2pf_info->decode_usage = 0; in amdgpu_virt_write_vf2pf_data()
616 vf2pf_info->dummy_page_addr = (uint64_t)adev->dummy_page_addr; in amdgpu_virt_write_vf2pf_data()
618 vf2pf_info->mes_info_addr = in amdgpu_virt_write_vf2pf_data()
619 (uint64_t)(adev->mes.resource_1_gpu_addr[0] + AMDGPU_GPU_PAGE_SIZE); in amdgpu_virt_write_vf2pf_data()
620 vf2pf_info->mes_info_size = in amdgpu_virt_write_vf2pf_data()
621 adev->mes.resource_1[0]->tbo.base.size - AMDGPU_GPU_PAGE_SIZE; in amdgpu_virt_write_vf2pf_data()
623 vf2pf_info->checksum = in amdgpu_virt_write_vf2pf_data()
637 adev->virt.vf2pf_update_retry_cnt++; in amdgpu_virt_update_vf2pf_work_item()
640 adev->virt.vf2pf_update_retry_cnt >= AMDGPU_VF2PF_UPDATE_MAX_RETRY_LIMIT) && in amdgpu_virt_update_vf2pf_work_item()
644 if (amdgpu_reset_domain_schedule(adev->reset_domain, in amdgpu_virt_update_vf2pf_work_item()
645 &adev->kfd.reset_work)) in amdgpu_virt_update_vf2pf_work_item()
648 dev_err(adev->dev, "Failed to queue work! at %s", __func__); in amdgpu_virt_update_vf2pf_work_item()
654 adev->virt.vf2pf_update_retry_cnt = 0; in amdgpu_virt_update_vf2pf_work_item()
658 schedule_delayed_work(&(adev->virt.vf2pf_work), adev->virt.vf2pf_update_interval_ms); in amdgpu_virt_update_vf2pf_work_item()
663 if (adev->virt.vf2pf_update_interval_ms != 0) { in amdgpu_virt_fini_data_exchange()
665 cancel_delayed_work_sync(&adev->virt.vf2pf_work); in amdgpu_virt_fini_data_exchange()
666 adev->virt.vf2pf_update_interval_ms = 0; in amdgpu_virt_fini_data_exchange()
672 adev->virt.fw_reserve.p_pf2vf = NULL; in amdgpu_virt_init_data_exchange()
673 adev->virt.fw_reserve.p_vf2pf = NULL; in amdgpu_virt_init_data_exchange()
674 adev->virt.vf2pf_update_interval_ms = 0; in amdgpu_virt_init_data_exchange()
675 adev->virt.vf2pf_update_retry_cnt = 0; in amdgpu_virt_init_data_exchange()
677 if (adev->mman.fw_vram_usage_va && adev->mman.drv_vram_usage_va) { in amdgpu_virt_init_data_exchange()
679 } else if (adev->mman.fw_vram_usage_va || adev->mman.drv_vram_usage_va) { in amdgpu_virt_init_data_exchange()
683 INIT_DELAYED_WORK(&adev->virt.vf2pf_work, amdgpu_virt_update_vf2pf_work_item); in amdgpu_virt_init_data_exchange()
684 …schedule_delayed_work(&(adev->virt.vf2pf_work), msecs_to_jiffies(adev->virt.vf2pf_update_interval_… in amdgpu_virt_init_data_exchange()
685 } else if (adev->bios != NULL) { in amdgpu_virt_init_data_exchange()
687 adev->virt.fw_reserve.p_pf2vf = in amdgpu_virt_init_data_exchange()
689 (adev->bios + (AMD_SRIOV_MSG_PF2VF_OFFSET_KB << 10)); in amdgpu_virt_init_data_exchange()
702 if (adev->mman.fw_vram_usage_va || adev->mman.drv_vram_usage_va) { in amdgpu_virt_exchange_data()
703 if (adev->mman.fw_vram_usage_va) { in amdgpu_virt_exchange_data()
704 adev->virt.fw_reserve.p_pf2vf = in amdgpu_virt_exchange_data()
706 (adev->mman.fw_vram_usage_va + (AMD_SRIOV_MSG_PF2VF_OFFSET_KB << 10)); in amdgpu_virt_exchange_data()
707 adev->virt.fw_reserve.p_vf2pf = in amdgpu_virt_exchange_data()
709 (adev->mman.fw_vram_usage_va + (AMD_SRIOV_MSG_VF2PF_OFFSET_KB << 10)); in amdgpu_virt_exchange_data()
710 adev->virt.fw_reserve.ras_telemetry = in amdgpu_virt_exchange_data()
711 (adev->mman.fw_vram_usage_va + (AMD_SRIOV_MSG_RAS_TELEMETRY_OFFSET_KB << 10)); in amdgpu_virt_exchange_data()
712 } else if (adev->mman.drv_vram_usage_va) { in amdgpu_virt_exchange_data()
713 adev->virt.fw_reserve.p_pf2vf = in amdgpu_virt_exchange_data()
715 (adev->mman.drv_vram_usage_va + (AMD_SRIOV_MSG_PF2VF_OFFSET_KB << 10)); in amdgpu_virt_exchange_data()
716 adev->virt.fw_reserve.p_vf2pf = in amdgpu_virt_exchange_data()
718 (adev->mman.drv_vram_usage_va + (AMD_SRIOV_MSG_VF2PF_OFFSET_KB << 10)); in amdgpu_virt_exchange_data()
719 adev->virt.fw_reserve.ras_telemetry = in amdgpu_virt_exchange_data()
720 (adev->mman.drv_vram_usage_va + (AMD_SRIOV_MSG_RAS_TELEMETRY_OFFSET_KB << 10)); in amdgpu_virt_exchange_data()
727 if (adev->virt.fw_reserve.p_pf2vf->version == 2) { in amdgpu_virt_exchange_data()
728 pf2vf_v2 = (struct amd_sriov_msg_pf2vf_info *)adev->virt.fw_reserve.p_pf2vf; in amdgpu_virt_exchange_data()
730 bp_block_offset = ((uint64_t)pf2vf_v2->bp_block_offset_low & 0xFFFFFFFF) | in amdgpu_virt_exchange_data()
731 ((((uint64_t)pf2vf_v2->bp_block_offset_high) << 32) & 0xFFFFFFFF00000000); in amdgpu_virt_exchange_data()
732 bp_block_size = pf2vf_v2->bp_block_size; in amdgpu_virt_exchange_data()
734 if (bp_block_size && !adev->virt.ras_init_done) in amdgpu_virt_exchange_data()
737 if (adev->virt.ras_init_done) in amdgpu_virt_exchange_data()
747 switch (adev->asic_type) { in amdgpu_virt_init_detect_asic()
767 if (reg & 1) in amdgpu_virt_init_detect_asic()
768 adev->virt.caps |= AMDGPU_SRIOV_CAPS_IS_VF; in amdgpu_virt_init_detect_asic()
771 adev->virt.caps |= AMDGPU_SRIOV_CAPS_ENABLE_IOV; in amdgpu_virt_init_detect_asic()
776 adev->virt.caps |= AMDGPU_PASSTHROUGH_MODE; in amdgpu_virt_init_detect_asic()
790 switch (adev->asic_type) { in amdgpu_virt_init_req_data()
819 DRM_ERROR("Unknown asic type: %d!\n", adev->asic_type); in amdgpu_virt_init_req_data()
829 ratelimit_state_init(&adev->virt.ras.ras_error_cnt_rs, 5 * HZ, 1); in amdgpu_virt_init_ras()
830 ratelimit_state_init(&adev->virt.ras.ras_cper_dump_rs, 5 * HZ, 1); in amdgpu_virt_init_ras()
832 ratelimit_set_flags(&adev->virt.ras.ras_error_cnt_rs, in amdgpu_virt_init_ras()
834 ratelimit_set_flags(&adev->virt.ras.ras_cper_dump_rs, in amdgpu_virt_init_ras()
837 mutex_init(&adev->virt.ras.ras_telemetry_mutex); in amdgpu_virt_init_ras()
839 adev->virt.ras.cper_rptr = 0; in amdgpu_virt_init_ras()
870 adev->virt.caps &= ~AMDGPU_SRIOV_CAPS_RUNTIME; in amdgpu_virt_enable_access_debugfs()
872 return -EPERM; in amdgpu_virt_enable_access_debugfs()
880 adev->virt.caps |= AMDGPU_SRIOV_CAPS_RUNTIME; in amdgpu_virt_disable_access_debugfs()
912 adev->gfx.is_poweron = false; in amdgpu_virt_post_reset()
915 adev->mes.ring[0].sched.ready = false; in amdgpu_virt_post_reset()
989 if (!adev->virt.is_mm_bw_enabled) in amdgpu_virt_update_sriov_video_codec()
994 encode[i].max_width = adev->virt.encode_max_dimension_pixels; in amdgpu_virt_update_sriov_video_codec()
995 encode[i].max_pixels_per_frame = adev->virt.encode_max_frame_pixels; in amdgpu_virt_update_sriov_video_codec()
1005 decode[i].max_width = adev->virt.decode_max_dimension_pixels; in amdgpu_virt_update_sriov_video_codec()
1006 decode[i].max_pixels_per_frame = adev->virt.decode_max_frame_pixels; in amdgpu_virt_update_sriov_video_codec()
1017 bool write, u32 *rlcg_flag) in amdgpu_virt_get_rlcg_reg_access_flag() argument
1025 write ? AMDGPU_RLCG_GC_WRITE : AMDGPU_RLCG_GC_READ; in amdgpu_virt_get_rlcg_reg_access_flag()
1030 !(acc_flags & AMDGPU_REGS_NO_KIQ) && write) { in amdgpu_virt_get_rlcg_reg_access_flag()
1037 (acc_flags & AMDGPU_REGS_RLC) && write) { in amdgpu_virt_get_rlcg_reg_access_flag()
1061 if (!adev->gfx.rlc.rlcg_reg_access_supported) { in amdgpu_virt_rlcg_reg_rw()
1062 dev_err(adev->dev, in amdgpu_virt_rlcg_reg_rw()
1067 if (adev->gfx.xcc_mask && (((1 << xcc_id) & adev->gfx.xcc_mask) == 0)) { in amdgpu_virt_rlcg_reg_rw()
1068 dev_err(adev->dev, "invalid xcc\n"); in amdgpu_virt_rlcg_reg_rw()
1075 reg_access_ctrl = &adev->gfx.rlc.reg_access_ctrl[xcc_id]; in amdgpu_virt_rlcg_reg_rw()
1076 scratch_reg0 = (void __iomem *)adev->rmmio + 4 * reg_access_ctrl->scratch_reg0; in amdgpu_virt_rlcg_reg_rw()
1077 scratch_reg1 = (void __iomem *)adev->rmmio + 4 * reg_access_ctrl->scratch_reg1; in amdgpu_virt_rlcg_reg_rw()
1078 scratch_reg2 = (void __iomem *)adev->rmmio + 4 * reg_access_ctrl->scratch_reg2; in amdgpu_virt_rlcg_reg_rw()
1079 scratch_reg3 = (void __iomem *)adev->rmmio + 4 * reg_access_ctrl->scratch_reg3; in amdgpu_virt_rlcg_reg_rw()
1081 spin_lock_irqsave(&adev->virt.rlcg_reg_lock, flags); in amdgpu_virt_rlcg_reg_rw()
1083 if (reg_access_ctrl->spare_int) in amdgpu_virt_rlcg_reg_rw()
1084 spare_int = (void __iomem *)adev->rmmio + 4 * reg_access_ctrl->spare_int; in amdgpu_virt_rlcg_reg_rw()
1086 if (offset == reg_access_ctrl->grbm_cntl) { in amdgpu_virt_rlcg_reg_rw()
1087 /* if the target reg offset is grbm_cntl, write to scratch_reg2 */ in amdgpu_virt_rlcg_reg_rw()
1090 writel(v, ((void __iomem *)adev->rmmio) + (offset * 4)); in amdgpu_virt_rlcg_reg_rw()
1091 } else if (offset == reg_access_ctrl->grbm_idx) { in amdgpu_virt_rlcg_reg_rw()
1092 /* if the target reg offset is grbm_idx, write to scratch_reg3 */ in amdgpu_virt_rlcg_reg_rw()
1095 writel(v, ((void __iomem *)adev->rmmio) + (offset * 4)); in amdgpu_virt_rlcg_reg_rw()
1098 * SCRATCH_REG0 = read/write value in amdgpu_virt_rlcg_reg_rw()
1105 if (reg_access_ctrl->spare_int) in amdgpu_virt_rlcg_reg_rw()
1106 writel(1, spare_int); in amdgpu_virt_rlcg_reg_rw()
1119 dev_err(adev->dev, in amdgpu_virt_rlcg_reg_rw()
1122 dev_err(adev->dev, in amdgpu_virt_rlcg_reg_rw()
1125 dev_err(adev->dev, in amdgpu_virt_rlcg_reg_rw()
1128 dev_err(adev->dev, in amdgpu_virt_rlcg_reg_rw()
1132 dev_err(adev->dev, in amdgpu_virt_rlcg_reg_rw()
1140 spin_unlock_irqrestore(&adev->virt.rlcg_reg_lock, flags); in amdgpu_virt_rlcg_reg_rw()
1202 if (adev->virt.ras_en_caps.bits.block_umc) in amdgpu_virt_get_ras_capability()
1203 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__UMC); in amdgpu_virt_get_ras_capability()
1204 if (adev->virt.ras_en_caps.bits.block_sdma) in amdgpu_virt_get_ras_capability()
1205 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__SDMA); in amdgpu_virt_get_ras_capability()
1206 if (adev->virt.ras_en_caps.bits.block_gfx) in amdgpu_virt_get_ras_capability()
1207 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__GFX); in amdgpu_virt_get_ras_capability()
1208 if (adev->virt.ras_en_caps.bits.block_mmhub) in amdgpu_virt_get_ras_capability()
1209 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__MMHUB); in amdgpu_virt_get_ras_capability()
1210 if (adev->virt.ras_en_caps.bits.block_athub) in amdgpu_virt_get_ras_capability()
1211 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__ATHUB); in amdgpu_virt_get_ras_capability()
1212 if (adev->virt.ras_en_caps.bits.block_pcie_bif) in amdgpu_virt_get_ras_capability()
1213 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__PCIE_BIF); in amdgpu_virt_get_ras_capability()
1214 if (adev->virt.ras_en_caps.bits.block_hdp) in amdgpu_virt_get_ras_capability()
1215 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__HDP); in amdgpu_virt_get_ras_capability()
1216 if (adev->virt.ras_en_caps.bits.block_xgmi_wafl) in amdgpu_virt_get_ras_capability()
1217 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__XGMI_WAFL); in amdgpu_virt_get_ras_capability()
1218 if (adev->virt.ras_en_caps.bits.block_df) in amdgpu_virt_get_ras_capability()
1219 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__DF); in amdgpu_virt_get_ras_capability()
1220 if (adev->virt.ras_en_caps.bits.block_smn) in amdgpu_virt_get_ras_capability()
1221 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__SMN); in amdgpu_virt_get_ras_capability()
1222 if (adev->virt.ras_en_caps.bits.block_sem) in amdgpu_virt_get_ras_capability()
1223 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__SEM); in amdgpu_virt_get_ras_capability()
1224 if (adev->virt.ras_en_caps.bits.block_mp0) in amdgpu_virt_get_ras_capability()
1225 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__MP0); in amdgpu_virt_get_ras_capability()
1226 if (adev->virt.ras_en_caps.bits.block_mp1) in amdgpu_virt_get_ras_capability()
1227 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__MP1); in amdgpu_virt_get_ras_capability()
1228 if (adev->virt.ras_en_caps.bits.block_fuse) in amdgpu_virt_get_ras_capability()
1229 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__FUSE); in amdgpu_virt_get_ras_capability()
1230 if (adev->virt.ras_en_caps.bits.block_mca) in amdgpu_virt_get_ras_capability()
1231 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__MCA); in amdgpu_virt_get_ras_capability()
1232 if (adev->virt.ras_en_caps.bits.block_vcn) in amdgpu_virt_get_ras_capability()
1233 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__VCN); in amdgpu_virt_get_ras_capability()
1234 if (adev->virt.ras_en_caps.bits.block_jpeg) in amdgpu_virt_get_ras_capability()
1235 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__JPEG); in amdgpu_virt_get_ras_capability()
1236 if (adev->virt.ras_en_caps.bits.block_ih) in amdgpu_virt_get_ras_capability()
1237 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__IH); in amdgpu_virt_get_ras_capability()
1238 if (adev->virt.ras_en_caps.bits.block_mpio) in amdgpu_virt_get_ras_capability()
1239 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__MPIO); in amdgpu_virt_get_ras_capability()
1241 if (adev->virt.ras_en_caps.bits.poison_propogation_mode) in amdgpu_virt_get_ras_capability()
1242 con->poison_supported = true; /* Poison is handled by host */ in amdgpu_virt_get_ras_capability()
1301 checksum = host_telemetry->header.checksum; in amdgpu_virt_cache_host_error_counts()
1302 used_size = host_telemetry->header.used_size; in amdgpu_virt_cache_host_error_counts()
1307 tmp = kmemdup(&host_telemetry->body.error_count, used_size, GFP_KERNEL); in amdgpu_virt_cache_host_error_counts()
1309 return -ENOMEM; in amdgpu_virt_cache_host_error_counts()
1314 memcpy(&adev->virt.count_cache, tmp, in amdgpu_virt_cache_host_error_counts()
1315 min(used_size, sizeof(adev->virt.count_cache))); in amdgpu_virt_cache_host_error_counts()
1324 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_req_ras_err_count_internal()
1330 if (__ratelimit(&virt->ras.ras_error_cnt_rs) || force_update) { in amdgpu_virt_req_ras_err_count_internal()
1331 mutex_lock(&virt->ras.ras_telemetry_mutex); in amdgpu_virt_req_ras_err_count_internal()
1332 if (!virt->ops->req_ras_err_count(adev)) in amdgpu_virt_req_ras_err_count_internal()
1334 virt->fw_reserve.ras_telemetry); in amdgpu_virt_req_ras_err_count_internal()
1335 mutex_unlock(&virt->ras.ras_telemetry_mutex); in amdgpu_virt_req_ras_err_count_internal()
1351 return -EOPNOTSUPP; in amdgpu_virt_req_ras_err_count()
1354 if (down_read_trylock(&adev->reset_domain->sem)) { in amdgpu_virt_req_ras_err_count()
1356 up_read(&adev->reset_domain->sem); in amdgpu_virt_req_ras_err_count()
1359 err_data->ue_count = adev->virt.count_cache.block[sriov_block].ue_count; in amdgpu_virt_req_ras_err_count()
1360 err_data->ce_count = adev->virt.count_cache.block[sriov_block].ce_count; in amdgpu_virt_req_ras_err_count()
1361 err_data->de_count = adev->virt.count_cache.block[sriov_block].de_count; in amdgpu_virt_req_ras_err_count()
1373 struct amdgpu_ring *ring = &adev->cper.ring_buf; in amdgpu_virt_write_cpers_to_ring()
1377 checksum = host_telemetry->header.checksum; in amdgpu_virt_write_cpers_to_ring()
1378 used_size = host_telemetry->header.used_size; in amdgpu_virt_write_cpers_to_ring()
1383 cper_dump = kmemdup(&host_telemetry->body.cper_dump, used_size, GFP_KERNEL); in amdgpu_virt_write_cpers_to_ring()
1385 return -ENOMEM; in amdgpu_virt_write_cpers_to_ring()
1390 *more = cper_dump->more; in amdgpu_virt_write_cpers_to_ring()
1392 if (cper_dump->wptr < adev->virt.ras.cper_rptr) { in amdgpu_virt_write_cpers_to_ring()
1394 adev->dev, in amdgpu_virt_write_cpers_to_ring()
1396 adev->virt.ras.cper_rptr, cper_dump->wptr); in amdgpu_virt_write_cpers_to_ring()
1398 adev->virt.ras.cper_rptr = cper_dump->wptr; in amdgpu_virt_write_cpers_to_ring()
1402 entry = (struct cper_hdr *)&cper_dump->buf[0]; in amdgpu_virt_write_cpers_to_ring()
1404 for (i = 0; i < cper_dump->count; i++) { in amdgpu_virt_write_cpers_to_ring()
1405 amdgpu_cper_ring_write(ring, entry, entry->record_length); in amdgpu_virt_write_cpers_to_ring()
1407 entry->record_length); in amdgpu_virt_write_cpers_to_ring()
1410 if (cper_dump->overflow_count) in amdgpu_virt_write_cpers_to_ring()
1411 dev_warn(adev->dev, in amdgpu_virt_write_cpers_to_ring()
1413 cper_dump->overflow_count); in amdgpu_virt_write_cpers_to_ring()
1415 adev->virt.ras.cper_rptr = cper_dump->wptr; in amdgpu_virt_write_cpers_to_ring()
1424 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_req_ras_cper_dump_internal()
1429 return -EOPNOTSUPP; in amdgpu_virt_req_ras_cper_dump_internal()
1432 if (!virt->ops->req_ras_cper_dump(adev, virt->ras.cper_rptr)) in amdgpu_virt_req_ras_cper_dump_internal()
1434 adev, virt->fw_reserve.ras_telemetry, &more); in amdgpu_virt_req_ras_cper_dump_internal()
1444 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_req_ras_cper_dump()
1447 if ((__ratelimit(&virt->ras.ras_cper_dump_rs) || force_update) && in amdgpu_virt_req_ras_cper_dump()
1448 down_read_trylock(&adev->reset_domain->sem)) { in amdgpu_virt_req_ras_cper_dump()
1449 mutex_lock(&virt->ras.ras_telemetry_mutex); in amdgpu_virt_req_ras_cper_dump()
1451 mutex_unlock(&virt->ras.ras_telemetry_mutex); in amdgpu_virt_req_ras_cper_dump()
1452 up_read(&adev->reset_domain->sem); in amdgpu_virt_req_ras_cper_dump()