Lines Matching full:dm
174 * **dm**) sits between DRM and DC. It acts as a liaison, converting DRM
233 static void amdgpu_dm_destroy_drm_device(struct amdgpu_display_manager *dm);
235 static int amdgpu_dm_connector_init(struct amdgpu_display_manager *dm,
253 static void amdgpu_dm_backlight_set_level(struct amdgpu_display_manager *dm,
296 struct dc *dc = adev->dm.dc; in dm_crtc_get_scanoutpos()
403 * dc_update_planes_and_stream function; however, DM might need some
588 adev->dm.freesync_module, in dm_vupdate_high_irq()
593 adev->dm.dc, in dm_vupdate_high_irq()
643 dc_stream_fc_disable_writeback(adev->dm.dc, in dm_crtc_high_irq()
683 mod_freesync_handle_v_update(adev->dm.freesync_module, in dm_crtc_high_irq()
689 dc_stream_adjust_vmin_vmax(adev->dm.dc, in dm_crtc_high_irq()
748 * Copies dmub notification to DM which is to be read by AUX command.
754 if (adev->dm.dmub_notify) in dmub_aux_setconfig_callback()
755 memcpy(adev->dm.dmub_notify, notify, sizeof(struct dmub_notification)); in dmub_aux_setconfig_callback()
757 complete(&adev->dm.dmub_aux_transfer_done); in dmub_aux_setconfig_callback()
787 if (notify->link_index > adev->dm.dc->link_count) { in dmub_hpd_callback()
799 link = adev->dm.dc->links[link_index]; in dmub_hpd_callback()
800 dev = adev->dm.ddev; in dmub_hpd_callback()
866 if (callback != NULL && type < ARRAY_SIZE(adev->dm.dmub_thread_offload)) { in register_dmub_notify_callback()
867 adev->dm.dmub_callback[type] = callback; in register_dmub_notify_callback()
868 adev->dm.dmub_thread_offload[type] = dmub_int_thread_offload; in register_dmub_notify_callback()
886 if (dmub_hpd_wrk->dmub_notify->type < ARRAY_SIZE(dmub_hpd_wrk->adev->dm.dmub_callback)) { in dm_handle_hpd_work()
887 dmub_hpd_wrk->adev->dm.dmub_callback[dmub_hpd_wrk->dmub_notify->type](dmub_hpd_wrk->adev, in dm_handle_hpd_work()
909 struct amdgpu_display_manager *dm = &adev->dm; in dm_dmub_outbox1_low_irq() local
924 if (dc_dmub_srv_get_dmub_outbox0_msg(dm->dc, &entry)) { in dm_dmub_outbox1_low_irq()
940 if (dc_enable_dmub_notifications(adev->dm.dc) && in dm_dmub_outbox1_low_irq()
944 dc_stat_get_dmub_notification(adev->dm.dc, ¬ify); in dm_dmub_outbox1_low_irq()
945 if (notify.type >= ARRAY_SIZE(dm->dmub_thread_offload)) { in dm_dmub_outbox1_low_irq()
946 DRM_ERROR("DM: notify type %d invalid!", notify.type); in dm_dmub_outbox1_low_irq()
949 if (!dm->dmub_callback[notify.type]) { in dm_dmub_outbox1_low_irq()
954 if (dm->dmub_thread_offload[notify.type] == true) { in dm_dmub_outbox1_low_irq()
969 queue_work(adev->dm.delayed_hpd_wq, &dmub_hpd_wrk->handle_hpd_work); in dm_dmub_outbox1_low_irq()
971 dm->dmub_callback[notify.type](adev, ¬ify); in dm_dmub_outbox1_low_irq()
996 struct dm_compressor_info *compressor = &adev->dm.compressor; in amdgpu_dm_fbc_init()
1001 if (adev->dm.dc->fbc_compressor == NULL) in amdgpu_dm_fbc_init()
1022 DRM_ERROR("DM: Failed to initialize FBC\n"); in amdgpu_dm_fbc_init()
1024 adev->dm.dc->ctx->fbc_gpu_addr = compressor->gpu_addr; in amdgpu_dm_fbc_init()
1025 DRM_INFO("DM: FBC alloc %lu\n", max_size*4); in amdgpu_dm_fbc_init()
1045 mutex_lock(&adev->dm.audio_lock); in amdgpu_dm_audio_component_get_eld()
1067 mutex_unlock(&adev->dm.audio_lock); in amdgpu_dm_audio_component_get_eld()
1087 adev->dm.audio_component = acomp; in amdgpu_dm_audio_component_bind()
1100 adev->dm.audio_component = NULL; in amdgpu_dm_audio_component_unbind()
1117 adev->mode_info.audio.num_pins = adev->dm.dc->res_pool->audio_count; in amdgpu_dm_audio_init()
1127 adev->dm.dc->res_pool->audios[i]->inst; in amdgpu_dm_audio_init()
1135 adev->dm.audio_registered = true; in amdgpu_dm_audio_init()
1148 if (adev->dm.audio_registered) { in amdgpu_dm_audio_fini()
1150 adev->dm.audio_registered = false; in amdgpu_dm_audio_fini()
1160 struct drm_audio_component *acomp = adev->dm.audio_component; in amdgpu_dm_audio_eld_notify()
1173 struct dmub_srv *dmub_srv = adev->dm.dmub_srv; in dm_dmub_hw_init()
1174 struct dmub_srv_fb_info *fb_info = adev->dm.dmub_fb_info; in dm_dmub_hw_init()
1175 const struct firmware *dmub_fw = adev->dm.dmub_fw; in dm_dmub_hw_init()
1176 struct dmcu *dmcu = adev->dm.dc->res_pool->dmcu; in dm_dmub_hw_init()
1177 struct abm *abm = adev->dm.dc->res_pool->abm; in dm_dmub_hw_init()
1178 struct dc_context *ctx = adev->dm.dc->ctx; in dm_dmub_hw_init()
1290 hw_params.disable_dpia = adev->dm.dc->debug.dpia_debug.bits.disable_dpia; in dm_dmub_hw_init()
1323 if (!adev->dm.dc->ctx->dmub_srv) in dm_dmub_hw_init()
1324 adev->dm.dc->ctx->dmub_srv = dc_dmub_srv_create(adev->dm.dc, dmub_srv); in dm_dmub_hw_init()
1325 if (!adev->dm.dc->ctx->dmub_srv) { in dm_dmub_hw_init()
1331 adev->dm.dmcub_fw_version); in dm_dmub_hw_init()
1341 if (adev->dm.dmcub_fw_version && in dm_dmub_hw_init()
1342 adev->dm.dmcub_fw_version >= DMUB_FW_VERSION(4, 0, 0) && in dm_dmub_hw_init()
1343 adev->dm.dmcub_fw_version < DMUB_FW_VERSION(4, 0, 59)) in dm_dmub_hw_init()
1344 adev->dm.dc->debug.sanity_checks = true; in dm_dmub_hw_init()
1347 if (adev->dm.dmcub_fw_version && in dm_dmub_hw_init()
1348 adev->dm.dmcub_fw_version >= DMUB_FW_VERSION(4, 0, 0) && in dm_dmub_hw_init()
1349 adev->dm.dmcub_fw_version < DMUB_FW_VERSION(8, 0, 16)) in dm_dmub_hw_init()
1350 adev->dm.dc->debug.sanity_checks = true; in dm_dmub_hw_init()
1361 struct dmub_srv *dmub_srv = adev->dm.dmub_srv; in dm_dmub_hw_resume()
1523 mutex_lock(&adev->dm.dc_lock); in dm_handle_hpd_rx_offload_work()
1567 mutex_unlock(&adev->dm.dc_lock); in dm_handle_hpd_rx_offload_work()
1765 static void retrieve_dmi_info(struct amdgpu_display_manager *dm, struct dc_init_data *init_data) in retrieve_dmi_info() argument
1768 struct drm_device *dev = dm->ddev; in retrieve_dmi_info()
1770 dm->aux_hpd_discon_quirk = false; in retrieve_dmi_info()
1779 dm->aux_hpd_discon_quirk = true; in retrieve_dmi_info()
1815 /* add da to list in dm */ in dm_allocate_gpu_mem()
1816 list_add(&da->list, &adev->dm.da_list); in dm_allocate_gpu_mem()
1829 /* walk the da list in DM */ in dm_free_gpu_mem()
1830 list_for_each_entry(da, &adev->dm.da_list, list) { in dm_free_gpu_mem()
1856 cgs_write_register(adev->dm.cgs_device, 0x34c0 + 0x01f8, reg.all); in dm_dmub_send_vbios_gpint_command()
1864 cgs_read_register(adev->dm.cgs_device, 0x34c0 + 0x01f8); in dm_dmub_send_vbios_gpint_command()
1950 adev->dm.ddev = adev_to_drm(adev); in amdgpu_dm_init()
1951 adev->dm.adev = adev; in amdgpu_dm_init()
1957 mutex_init(&adev->dm.dpia_aux_lock); in amdgpu_dm_init()
1958 mutex_init(&adev->dm.dc_lock); in amdgpu_dm_init()
1959 mutex_init(&adev->dm.audio_lock); in amdgpu_dm_init()
1962 DRM_ERROR("amdgpu: failed to initialize DM IRQ support.\n"); in amdgpu_dm_init()
1980 init_data.cgs_device = adev->dm.cgs_device; in amdgpu_dm_init()
1986 switch (adev->dm.dmcub_fw_version) { in amdgpu_dm_init()
2047 drm_dbg(adev->dm.ddev, "Seamless boot requested\n"); in amdgpu_dm_init()
2073 retrieve_dmi_info(&adev->dm, &init_data); in amdgpu_dm_init()
2075 if (adev->dm.bb_from_dmub) in amdgpu_dm_init()
2076 init_data.bb_from_dmub = adev->dm.bb_from_dmub; in amdgpu_dm_init()
2081 adev->dm.dc = dc_create(&init_data); in amdgpu_dm_init()
2083 if (adev->dm.dc) { in amdgpu_dm_init()
2085 dce_version_to_string(adev->dm.dc->ctx->dce_version)); in amdgpu_dm_init()
2092 adev->dm.dc->debug.force_single_disp_pipe_split = false; in amdgpu_dm_init()
2093 adev->dm.dc->debug.pipe_split_policy = MPC_SPLIT_AVOID; in amdgpu_dm_init()
2097 adev->dm.dc->debug.disable_stutter = amdgpu_pp_feature_mask & PP_STUTTER_MODE ? false : true; in amdgpu_dm_init()
2099 adev->dm.dc->debug.disable_stutter = true; in amdgpu_dm_init()
2102 adev->dm.dc->debug.disable_stutter = true; in amdgpu_dm_init()
2105 adev->dm.dc->debug.disable_dsc = true; in amdgpu_dm_init()
2108 adev->dm.dc->debug.disable_clock_gate = true; in amdgpu_dm_init()
2111 adev->dm.dc->debug.force_subvp_mclk_switch = true; in amdgpu_dm_init()
2114 adev->dm.dc->debug.force_disable_subvp = true; in amdgpu_dm_init()
2117 adev->dm.dc->debug.using_dml2 = true; in amdgpu_dm_init()
2118 adev->dm.dc->debug.using_dml21 = true; in amdgpu_dm_init()
2121 adev->dm.dc->debug.visual_confirm = amdgpu_dc_visual_confirm; in amdgpu_dm_init()
2124 adev->dm.dc->debug.ignore_cable_id = true; in amdgpu_dm_init()
2126 if (adev->dm.dc->caps.dp_hdmi21_pcon_support) in amdgpu_dm_init()
2135 dc_hardware_init(adev->dm.dc); in amdgpu_dm_init()
2137 adev->dm.hpd_rx_offload_wq = hpd_rx_irq_create_workqueue(adev->dm.dc); in amdgpu_dm_init()
2138 if (!adev->dm.hpd_rx_offload_wq) { in amdgpu_dm_init()
2149 dc_setup_system_context(adev->dm.dc, &pa_config); in amdgpu_dm_init()
2152 adev->dm.freesync_module = mod_freesync_create(adev->dm.dc); in amdgpu_dm_init()
2153 if (!adev->dm.freesync_module) { in amdgpu_dm_init()
2158 adev->dm.freesync_module); in amdgpu_dm_init()
2162 if (adev->dm.dc->caps.max_links > 0) { in amdgpu_dm_init()
2163 adev->dm.vblank_control_workqueue = in amdgpu_dm_init()
2165 if (!adev->dm.vblank_control_workqueue) in amdgpu_dm_init()
2169 if (adev->dm.dc->caps.ips_support && in amdgpu_dm_init()
2170 adev->dm.dc->config.disable_ips != DMUB_IPS_DISABLE_ALL) in amdgpu_dm_init()
2171 adev->dm.idle_workqueue = idle_create_workqueue(adev); in amdgpu_dm_init()
2173 if (adev->dm.dc->caps.max_links > 0 && adev->family >= AMDGPU_FAMILY_RV) { in amdgpu_dm_init()
2174 adev->dm.hdcp_workqueue = hdcp_create_workqueue(adev, &init_params.cp_psp, adev->dm.dc); in amdgpu_dm_init()
2176 if (!adev->dm.hdcp_workqueue) in amdgpu_dm_init()
2179 DRM_DEBUG_DRIVER("amdgpu: hdcp_workqueue init done %p.\n", adev->dm.hdcp_workqueue); in amdgpu_dm_init()
2181 dc_init_callbacks(adev->dm.dc, &init_params); in amdgpu_dm_init()
2183 if (dc_is_dmub_outbox_supported(adev->dm.dc)) { in amdgpu_dm_init()
2184 init_completion(&adev->dm.dmub_aux_transfer_done); in amdgpu_dm_init()
2185 adev->dm.dmub_notify = kzalloc(sizeof(struct dmub_notification), GFP_KERNEL); in amdgpu_dm_init()
2186 if (!adev->dm.dmub_notify) { in amdgpu_dm_init()
2187 DRM_INFO("amdgpu: fail to allocate adev->dm.dmub_notify"); in amdgpu_dm_init()
2191 adev->dm.delayed_hpd_wq = create_singlethread_workqueue("amdgpu_dm_hpd_wq"); in amdgpu_dm_init()
2192 if (!adev->dm.delayed_hpd_wq) { in amdgpu_dm_init()
2209 dc_enable_dmub_outbox(adev->dm.dc); in amdgpu_dm_init()
2213 dc_dmub_srv_enable_dpia_trace(adev->dm.dc); in amdgpu_dm_init()
2228 adev_to_drm(adev)->mode_config.cursor_width = adev->dm.dc->caps.max_cursor_size; in amdgpu_dm_init()
2229 adev_to_drm(adev)->mode_config.cursor_height = adev->dm.dc->caps.max_cursor_size; in amdgpu_dm_init()
2231 if (drm_vblank_init(adev_to_drm(adev), adev->dm.display_indexes_num)) { in amdgpu_dm_init()
2239 if (!adev->dm.secure_display_ctx.crtc_ctx) in amdgpu_dm_init()
2243 adev->dm.secure_display_ctx.support_mul_roi = true; in amdgpu_dm_init()
2269 if (adev->dm.vblank_control_workqueue) { in amdgpu_dm_fini()
2270 destroy_workqueue(adev->dm.vblank_control_workqueue); in amdgpu_dm_fini()
2271 adev->dm.vblank_control_workqueue = NULL; in amdgpu_dm_fini()
2274 if (adev->dm.idle_workqueue) { in amdgpu_dm_fini()
2275 if (adev->dm.idle_workqueue->running) { in amdgpu_dm_fini()
2276 adev->dm.idle_workqueue->enable = false; in amdgpu_dm_fini()
2277 flush_work(&adev->dm.idle_workqueue->work); in amdgpu_dm_fini()
2280 kfree(adev->dm.idle_workqueue); in amdgpu_dm_fini()
2281 adev->dm.idle_workqueue = NULL; in amdgpu_dm_fini()
2284 amdgpu_dm_destroy_drm_device(&adev->dm); in amdgpu_dm_fini()
2287 if (adev->dm.secure_display_ctx.crtc_ctx) { in amdgpu_dm_fini()
2289 if (adev->dm.secure_display_ctx.crtc_ctx[i].crtc) { in amdgpu_dm_fini()
2290 flush_work(&adev->dm.secure_display_ctx.crtc_ctx[i].notify_ta_work); in amdgpu_dm_fini()
2291 flush_work(&adev->dm.secure_display_ctx.crtc_ctx[i].forward_roi_work); in amdgpu_dm_fini()
2294 kfree(adev->dm.secure_display_ctx.crtc_ctx); in amdgpu_dm_fini()
2295 adev->dm.secure_display_ctx.crtc_ctx = NULL; in amdgpu_dm_fini()
2298 if (adev->dm.hdcp_workqueue) { in amdgpu_dm_fini()
2299 hdcp_destroy(&adev->dev->kobj, adev->dm.hdcp_workqueue); in amdgpu_dm_fini()
2300 adev->dm.hdcp_workqueue = NULL; in amdgpu_dm_fini()
2303 if (adev->dm.dc) { in amdgpu_dm_fini()
2304 dc_deinit_callbacks(adev->dm.dc); in amdgpu_dm_fini()
2305 dc_dmub_srv_destroy(&adev->dm.dc->ctx->dmub_srv); in amdgpu_dm_fini()
2306 if (dc_enable_dmub_notifications(adev->dm.dc)) { in amdgpu_dm_fini()
2307 kfree(adev->dm.dmub_notify); in amdgpu_dm_fini()
2308 adev->dm.dmub_notify = NULL; in amdgpu_dm_fini()
2309 destroy_workqueue(adev->dm.delayed_hpd_wq); in amdgpu_dm_fini()
2310 adev->dm.delayed_hpd_wq = NULL; in amdgpu_dm_fini()
2314 if (adev->dm.dmub_bo) in amdgpu_dm_fini()
2315 amdgpu_bo_free_kernel(&adev->dm.dmub_bo, in amdgpu_dm_fini()
2316 &adev->dm.dmub_bo_gpu_addr, in amdgpu_dm_fini()
2317 &adev->dm.dmub_bo_cpu_addr); in amdgpu_dm_fini()
2319 if (adev->dm.hpd_rx_offload_wq && adev->dm.dc) { in amdgpu_dm_fini()
2320 for (i = 0; i < adev->dm.dc->caps.max_links; i++) { in amdgpu_dm_fini()
2321 if (adev->dm.hpd_rx_offload_wq[i].wq) { in amdgpu_dm_fini()
2322 destroy_workqueue(adev->dm.hpd_rx_offload_wq[i].wq); in amdgpu_dm_fini()
2323 adev->dm.hpd_rx_offload_wq[i].wq = NULL; in amdgpu_dm_fini()
2327 kfree(adev->dm.hpd_rx_offload_wq); in amdgpu_dm_fini()
2328 adev->dm.hpd_rx_offload_wq = NULL; in amdgpu_dm_fini()
2332 if (adev->dm.dc) in amdgpu_dm_fini()
2333 dc_destroy(&adev->dm.dc); in amdgpu_dm_fini()
2340 if (adev->dm.cgs_device) { in amdgpu_dm_fini()
2341 amdgpu_cgs_destroy_device(adev->dm.cgs_device); in amdgpu_dm_fini()
2342 adev->dm.cgs_device = NULL; in amdgpu_dm_fini()
2344 if (adev->dm.freesync_module) { in amdgpu_dm_fini()
2345 mod_freesync_destroy(adev->dm.freesync_module); in amdgpu_dm_fini()
2346 adev->dm.freesync_module = NULL; in amdgpu_dm_fini()
2349 mutex_destroy(&adev->dm.audio_lock); in amdgpu_dm_fini()
2350 mutex_destroy(&adev->dm.dc_lock); in amdgpu_dm_fini()
2351 mutex_destroy(&adev->dm.dpia_aux_lock); in amdgpu_dm_fini()
2425 DRM_DEBUG_KMS("dm: DMCU firmware not supported on direct or SMU loading\n"); in load_dmcu_fw()
2429 r = amdgpu_ucode_request(adev, &adev->dm.fw_dmcu, AMDGPU_UCODE_REQUIRED, in load_dmcu_fw()
2433 DRM_DEBUG_KMS("dm: DMCU firmware not found\n"); in load_dmcu_fw()
2434 adev->dm.fw_dmcu = NULL; in load_dmcu_fw()
2440 amdgpu_ucode_release(&adev->dm.fw_dmcu); in load_dmcu_fw()
2444 hdr = (const struct dmcu_firmware_header_v1_0 *)adev->dm.fw_dmcu->data; in load_dmcu_fw()
2446 adev->firmware.ucode[AMDGPU_UCODE_ID_DMCU_ERAM].fw = adev->dm.fw_dmcu; in load_dmcu_fw()
2451 adev->firmware.ucode[AMDGPU_UCODE_ID_DMCU_INTV].fw = adev->dm.fw_dmcu; in load_dmcu_fw()
2455 adev->dm.dmcu_fw_version = le32_to_cpu(hdr->header.ucode_version); in load_dmcu_fw()
2466 return dm_read_reg(adev->dm.dc->ctx, address); in amdgpu_dm_dmub_reg_read()
2474 return dm_write_reg(adev->dm.dc->ctx, address, value); in amdgpu_dm_dmub_reg_write()
2552 hdr = (const struct dmcub_firmware_header_v1_0 *)adev->dm.dmub_fw->data; in dm_dmub_sw_init()
2553 adev->dm.dmcub_fw_version = le32_to_cpu(hdr->header.ucode_version); in dm_dmub_sw_init()
2559 adev->dm.dmub_fw; in dm_dmub_sw_init()
2564 adev->dm.dmcub_fw_version); in dm_dmub_sw_init()
2568 adev->dm.dmub_srv = kzalloc(sizeof(*adev->dm.dmub_srv), GFP_KERNEL); in dm_dmub_sw_init()
2569 dmub_srv = adev->dm.dmub_srv; in dm_dmub_sw_init()
2597 adev->dm.dmub_fw->data + in dm_dmub_sw_init()
2601 adev->dm.dmub_fw->data + in dm_dmub_sw_init()
2621 &adev->dm.dmub_bo, in dm_dmub_sw_init()
2622 &adev->dm.dmub_bo_gpu_addr, in dm_dmub_sw_init()
2623 &adev->dm.dmub_bo_cpu_addr); in dm_dmub_sw_init()
2629 memory_params.cpu_fb_addr = adev->dm.dmub_bo_cpu_addr; in dm_dmub_sw_init()
2630 memory_params.gpu_fb_addr = adev->dm.dmub_bo_gpu_addr; in dm_dmub_sw_init()
2634 adev->dm.dmub_fb_info = in dm_dmub_sw_init()
2635 kzalloc(sizeof(*adev->dm.dmub_fb_info), GFP_KERNEL); in dm_dmub_sw_init()
2636 fb_info = adev->dm.dmub_fb_info; in dm_dmub_sw_init()
2650 adev->dm.bb_from_dmub = dm_dmub_get_vbios_bounding_box(adev); in dm_dmub_sw_init()
2660 adev->dm.cgs_device = amdgpu_cgs_create_device(adev); in dm_sw_init()
2662 if (!adev->dm.cgs_device) { in dm_sw_init()
2667 /* Moved from dm init since we need to use allocations for storing bounding box data */ in dm_sw_init()
2668 INIT_LIST_HEAD(&adev->dm.da_list); in dm_sw_init()
2682 list_for_each_entry(da, &adev->dm.da_list, list) { in dm_sw_fini()
2683 if (adev->dm.bb_from_dmub == (void *) da->cpu_ptr) { in dm_sw_fini()
2687 adev->dm.bb_from_dmub = NULL; in dm_sw_fini()
2693 kfree(adev->dm.dmub_fb_info); in dm_sw_fini()
2694 adev->dm.dmub_fb_info = NULL; in dm_sw_fini()
2696 if (adev->dm.dmub_srv) { in dm_sw_fini()
2697 dmub_srv_destroy(adev->dm.dmub_srv); in dm_sw_fini()
2698 kfree(adev->dm.dmub_srv); in dm_sw_fini()
2699 adev->dm.dmub_srv = NULL; in dm_sw_fini()
2702 amdgpu_ucode_release(&adev->dm.dmub_fw); in dm_sw_fini()
2703 amdgpu_ucode_release(&adev->dm.fw_dmcu); in dm_sw_fini()
2753 dmcu = adev->dm.dc->res_pool->dmcu; in dm_late_init()
2776 } else if (adev->dm.dc->ctx->dmub_srv) { in dm_late_init()
2780 dc_get_edp_links(adev->dm.dc, edp_links, &edp_num); in dm_late_init()
2782 if (!dmub_init_abm_config(adev->dm.dc->res_pool, params, i)) in dm_late_init()
2978 struct amdgpu_display_manager *dm = &adev->dm; in dm_oem_i2c_hw_init() local
2983 oem_ddc_service = dc_get_oem_i2c_device(adev->dm.dc); in dm_oem_i2c_hw_init()
2997 dm->oem_i2c = oem_i2c; in dm_oem_i2c_hw_init()
3008 * the initializers of each DM component, then populating the struct with them.
3053 kfree(adev->dm.oem_i2c); in dm_hw_fini()
3077 rc = dc_interrupt_set(adev->dm.dc, irq_source, enable) ? 0 : -EBUSY; in dm_gpureset_toggle_interrupts()
3095 if (!dc_interrupt_set(adev->dm.dc, irq_source, enable)) in dm_gpureset_toggle_interrupts()
3143 static void hpd_rx_irq_work_suspend(struct amdgpu_display_manager *dm) in hpd_rx_irq_work_suspend() argument
3147 if (dm->hpd_rx_offload_wq) { in hpd_rx_irq_work_suspend()
3148 for (i = 0; i < dm->dc->caps.max_links; i++) in hpd_rx_irq_work_suspend()
3149 flush_workqueue(dm->hpd_rx_offload_wq[i].wq); in hpd_rx_irq_work_suspend()
3160 WARN_ON(adev->dm.cached_state); in dm_prepare_suspend()
3161 adev->dm.cached_state = drm_atomic_helper_suspend(adev_to_drm(adev)); in dm_prepare_suspend()
3162 if (IS_ERR(adev->dm.cached_state)) in dm_prepare_suspend()
3163 return PTR_ERR(adev->dm.cached_state); in dm_prepare_suspend()
3171 struct amdgpu_display_manager *dm = &adev->dm; in dm_suspend() local
3176 mutex_lock(&dm->dc_lock); in dm_suspend()
3178 dc_allow_idle_optimizations(adev->dm.dc, false); in dm_suspend()
3180 dm->cached_dc_state = dc_state_create_copy(dm->dc->current_state); in dm_suspend()
3182 if (dm->cached_dc_state) in dm_suspend()
3183 dm_gpureset_toggle_interrupts(adev, dm->cached_dc_state, false); in dm_suspend()
3185 res = amdgpu_dm_commit_zero_streams(dm->dc); in dm_suspend()
3193 hpd_rx_irq_work_suspend(dm); in dm_suspend()
3198 if (!adev->dm.cached_state) { in dm_suspend()
3199 adev->dm.cached_state = drm_atomic_helper_suspend(adev_to_drm(adev)); in dm_suspend()
3200 if (IS_ERR(adev->dm.cached_state)) in dm_suspend()
3201 return PTR_ERR(adev->dm.cached_state); in dm_suspend()
3210 hpd_rx_irq_work_suspend(dm); in dm_suspend()
3212 dc_set_power_state(dm->dc, DC_ACPI_CM_POWER_STATE_D3); in dm_suspend()
3214 if (dm->dc->caps.ips_support && adev->in_s0ix) in dm_suspend()
3215 dc_allow_idle_optimizations(dm->dc, true); in dm_suspend()
3217 dc_dmub_srv_set_power_state(dm->dc->ctx->dmub_srv, DC_ACPI_CM_POWER_STATE_D3); in dm_suspend()
3325 struct amdgpu_display_manager *dm) in dm_gpureset_commit_state() argument
3339 drm_err(dm->ddev, "Failed to allocate update bundle\n"); in dm_gpureset_commit_state()
3353 update_planes_and_stream_adapter(dm->dc, in dm_gpureset_commit_state()
3384 struct amdgpu_display_manager *dm = &adev->dm; in dm_resume() local
3394 struct dm_atomic_state *dm_state = to_dm_atomic_state(dm->atomic_obj.state); in dm_resume()
3400 if (dm->dc->caps.ips_support) { in dm_resume()
3401 dc_dmub_srv_apply_idle_power_optimizations(dm->dc, false); in dm_resume()
3405 dc_state = dm->cached_dc_state; in dm_resume()
3408 * The dc->current_state is backed up into dm->cached_dc_state in dm_resume()
3422 link_enc_cfg_copy(adev->dm.dc->current_state, dc_state); in dm_resume()
3428 dc_dmub_srv_set_power_state(dm->dc->ctx->dmub_srv, DC_ACPI_CM_POWER_STATE_D0); in dm_resume()
3429 dc_set_power_state(dm->dc, DC_ACPI_CM_POWER_STATE_D0); in dm_resume()
3431 dc_resume(dm->dc); in dm_resume()
3443 if (dc_is_dmub_outbox_supported(adev->dm.dc)) { in dm_resume()
3445 dc_enable_dmub_outbox(adev->dm.dc); in dm_resume()
3450 dc_exit_ips_for_hw_access(dm->dc); in dm_resume()
3451 WARN_ON(!dc_commit_streams(dm->dc, &commit_params)); in dm_resume()
3453 dm_gpureset_commit_state(dm->cached_dc_state, dm); in dm_resume()
3455 dm_gpureset_toggle_interrupts(adev, dm->cached_dc_state, true); in dm_resume()
3457 dc_state_release(dm->cached_dc_state); in dm_resume()
3458 dm->cached_dc_state = NULL; in dm_resume()
3462 mutex_unlock(&dm->dc_lock); in dm_resume()
3465 for (i = 0; i < dm->num_of_edps; i++) { in dm_resume()
3466 if (dm->backlight_dev[i]) in dm_resume()
3467 amdgpu_dm_backlight_set_level(dm, i, dm->brightness[i]); in dm_resume()
3474 dm_state->context = dc_state_create(dm->dc, NULL); in dm_resume()
3481 if (dc_is_dmub_outbox_supported(adev->dm.dc)) { in dm_resume()
3483 dc_enable_dmub_outbox(adev->dm.dc); in dm_resume()
3487 dc_dmub_srv_set_power_state(dm->dc->ctx->dmub_srv, DC_ACPI_CM_POWER_STATE_D0); in dm_resume()
3488 dc_set_power_state(dm->dc, DC_ACPI_CM_POWER_STATE_D0); in dm_resume()
3491 dc_resume(dm->dc); in dm_resume()
3531 guard(mutex)(&dm->dc_lock); in dm_resume()
3532 dc_exit_ips_for_hw_access(dm->dc); in dm_resume()
3551 for_each_new_crtc_in_state(dm->cached_state, crtc, new_crtc_state, i) { in dm_resume()
3562 for_each_new_crtc_in_state(dm->cached_state, crtc, new_crtc_state, i) { in dm_resume()
3572 for_each_new_plane_in_state(dm->cached_state, plane, new_plane_state, i) { in dm_resume()
3581 drm_atomic_helper_resume(ddev, dm->cached_state); in dm_resume()
3583 dm->cached_state = NULL; in dm_resume()
3611 * DOC: DM Lifecycle
3613 * DM (and consequently DC) is registered in the amdgpu base driver as a IP
3614 * block. When CONFIG_DRM_AMD_DC is enabled, the DM device IP block is added to
3621 .name = "dm",
3682 caps = &adev->dm.backlight_caps[aconnector->bl_idx]; in update_connector_ext_caps()
3868 if (adev->dm.disable_hpd_irq) in handle_hpd_irq_helper()
3877 if (adev->dm.hdcp_workqueue) { in handle_hpd_irq_helper()
3878 hdcp_reset_display(adev->dm.hdcp_workqueue, aconnector->dc_link->link_index); in handle_hpd_irq_helper()
3899 scoped_guard(mutex, &adev->dm.dc_lock) { in handle_hpd_irq_helper()
3959 struct hpd_rx_irq_offload_work_queue *offload_wq = &adev->dm.hpd_rx_offload_wq[idx]; in handle_hpd_rx_irq()
3964 if (adev->dm.disable_hpd_irq) in handle_hpd_rx_irq()
4049 mutex_lock(&adev->dm.dc_lock); in handle_hpd_rx_irq()
4052 mutex_unlock(&adev->dm.dc_lock); in handle_hpd_rx_irq()
4069 if (adev->dm.hdcp_workqueue) in handle_hpd_rx_irq()
4070 hdcp_handle_cpirq(adev->dm.hdcp_workqueue, aconnector->base.index); in handle_hpd_rx_irq()
4090 if (dc_is_dmub_outbox_supported(adev->dm.dc)) { in register_hpd_handlers()
4160 struct dc *dc = adev->dm.dc; in dce60_register_irq_handlers()
4201 c_irq_params = &adev->dm.vblank_params[int_params.irq_source - DC_IRQ_SOURCE_VBLANK1]; in dce60_register_irq_handlers()
4231 c_irq_params = &adev->dm.pflip_params[int_params.irq_source - DC_IRQ_SOURCE_PFLIP_FIRST]; in dce60_register_irq_handlers()
4258 struct dc *dc = adev->dm.dc; in dce110_register_irq_handlers()
4302 c_irq_params = &adev->dm.vblank_params[int_params.irq_source - DC_IRQ_SOURCE_VBLANK1]; in dce110_register_irq_handlers()
4331 c_irq_params = &adev->dm.vupdate_params[int_params.irq_source - DC_IRQ_SOURCE_VUPDATE1]; in dce110_register_irq_handlers()
4361 c_irq_params = &adev->dm.pflip_params[int_params.irq_source - DC_IRQ_SOURCE_PFLIP_FIRST]; in dce110_register_irq_handlers()
4387 struct dc *dc = adev->dm.dc; in dcn10_register_irq_handlers()
4440 c_irq_params = &adev->dm.vblank_params[int_params.irq_source - DC_IRQ_SOURCE_VBLANK1]; in dcn10_register_irq_handlers()
4472 c_irq_params = &adev->dm.vline0_params[int_params.irq_source in dcn10_register_irq_handlers()
4511 c_irq_params = &adev->dm.vupdate_params[int_params.irq_source - DC_IRQ_SOURCE_VUPDATE1]; in dcn10_register_irq_handlers()
4542 c_irq_params = &adev->dm.pflip_params[int_params.irq_source - DC_IRQ_SOURCE_PFLIP_FIRST]; in dcn10_register_irq_handlers()
4567 struct dc *dc = adev->dm.dc; in register_outbox_irq_handlers()
4588 c_irq_params = &adev->dm.dmub_outbox_params[0]; in register_outbox_irq_handlers()
4612 struct amdgpu_display_manager *dm = &adev->dm; in dm_atomic_get_state() local
4618 priv_state = drm_atomic_get_private_obj_state(state, &dm->atomic_obj); in dm_atomic_get_state()
4632 struct amdgpu_display_manager *dm = &adev->dm; in dm_atomic_get_new_state() local
4638 if (obj->funcs == dm->atomic_obj.funcs) in dm_atomic_get_new_state()
4711 state->context = dc_state_create_current_copy(adev->dm.dc); in amdgpu_dm_mode_config_init()
4718 &adev->dm.atomic_obj, in amdgpu_dm_mode_config_init()
4752 static void amdgpu_dm_update_backlight_caps(struct amdgpu_display_manager *dm, in amdgpu_dm_update_backlight_caps() argument
4755 struct amdgpu_dm_backlight_caps *caps = &dm->backlight_caps[bl_idx]; in amdgpu_dm_update_backlight_caps()
4771 DRM_DEBUG_KMS("DM: Invalid backlight caps: min=%d, max=%d\n", in amdgpu_dm_update_backlight_caps()
4866 static void amdgpu_dm_backlight_set_level(struct amdgpu_display_manager *dm, in amdgpu_dm_backlight_set_level() argument
4875 amdgpu_dm_update_backlight_caps(dm, bl_idx); in amdgpu_dm_backlight_set_level()
4876 caps = &dm->backlight_caps[bl_idx]; in amdgpu_dm_backlight_set_level()
4878 dm->brightness[bl_idx] = user_brightness; in amdgpu_dm_backlight_set_level()
4881 amdgpu_atombios_scratch_regs_set_backlight_level(dm->adev, dm->brightness[bl_idx]); in amdgpu_dm_backlight_set_level()
4882 brightness = convert_brightness_from_user(caps, dm->brightness[bl_idx]); in amdgpu_dm_backlight_set_level()
4883 link = (struct dc_link *)dm->backlight_link[bl_idx]; in amdgpu_dm_backlight_set_level()
4886 mutex_lock(&dm->dc_lock); in amdgpu_dm_backlight_set_level()
4887 if (dm->dc->caps.ips_support && dm->dc->ctx->dmub_srv->idle_allowed) { in amdgpu_dm_backlight_set_level()
4888 dc_allow_idle_optimizations(dm->dc, false); in amdgpu_dm_backlight_set_level()
4896 DRM_DEBUG("DM: Failed to update backlight via AUX on eDP[%d]\n", bl_idx); in amdgpu_dm_backlight_set_level()
4905 DRM_DEBUG("DM: Failed to update backlight on eDP[%d]\n", bl_idx); in amdgpu_dm_backlight_set_level()
4908 if (dm->dc->caps.ips_support && reallow_idle) in amdgpu_dm_backlight_set_level()
4909 dc_allow_idle_optimizations(dm->dc, true); in amdgpu_dm_backlight_set_level()
4911 mutex_unlock(&dm->dc_lock); in amdgpu_dm_backlight_set_level()
4914 dm->actual_brightness[bl_idx] = user_brightness; in amdgpu_dm_backlight_set_level()
4919 struct amdgpu_display_manager *dm = bl_get_data(bd); in amdgpu_dm_backlight_update_status() local
4922 for (i = 0; i < dm->num_of_edps; i++) { in amdgpu_dm_backlight_update_status()
4923 if (bd == dm->backlight_dev[i]) in amdgpu_dm_backlight_update_status()
4928 amdgpu_dm_backlight_set_level(dm, i, bd->props.brightness); in amdgpu_dm_backlight_update_status()
4933 static u32 amdgpu_dm_backlight_get_level(struct amdgpu_display_manager *dm, in amdgpu_dm_backlight_get_level() argument
4938 struct dc_link *link = (struct dc_link *)dm->backlight_link[bl_idx]; in amdgpu_dm_backlight_get_level()
4940 amdgpu_dm_update_backlight_caps(dm, bl_idx); in amdgpu_dm_backlight_get_level()
4941 caps = dm->backlight_caps[bl_idx]; in amdgpu_dm_backlight_get_level()
4949 return dm->brightness[bl_idx]; in amdgpu_dm_backlight_get_level()
4956 return dm->brightness[bl_idx]; in amdgpu_dm_backlight_get_level()
4963 struct amdgpu_display_manager *dm = bl_get_data(bd); in amdgpu_dm_backlight_get_brightness() local
4966 for (i = 0; i < dm->num_of_edps; i++) { in amdgpu_dm_backlight_get_brightness()
4967 if (bd == dm->backlight_dev[i]) in amdgpu_dm_backlight_get_brightness()
4972 return amdgpu_dm_backlight_get_level(dm, i); in amdgpu_dm_backlight_get_brightness()
4985 struct amdgpu_display_manager *dm = &drm_to_adev(drm)->dm; in amdgpu_dm_register_backlight_device() local
4994 drm_info(drm, "Skipping amdgpu DM backlight registration\n"); in amdgpu_dm_register_backlight_device()
5017 dm->backlight_dev[aconnector->bl_idx] = in amdgpu_dm_register_backlight_device()
5018 backlight_device_register(bl_name, aconnector->base.kdev, dm, in amdgpu_dm_register_backlight_device()
5020 dm->brightness[aconnector->bl_idx] = props.brightness; in amdgpu_dm_register_backlight_device()
5022 if (IS_ERR(dm->backlight_dev[aconnector->bl_idx])) { in amdgpu_dm_register_backlight_device()
5023 DRM_ERROR("DM: Backlight registration failed!\n"); in amdgpu_dm_register_backlight_device()
5024 dm->backlight_dev[aconnector->bl_idx] = NULL; in amdgpu_dm_register_backlight_device()
5026 DRM_DEBUG_DRIVER("DM: Registered Backlight device: %s\n", bl_name); in amdgpu_dm_register_backlight_device()
5029 static int initialize_plane(struct amdgpu_display_manager *dm, in initialize_plane() argument
5052 if (plane_id >= dm->dc->caps.max_streams) in initialize_plane()
5055 ret = amdgpu_dm_plane_init(dm, plane, possible_crtcs, plane_cap); in initialize_plane()
5070 static void setup_backlight_device(struct amdgpu_display_manager *dm, in setup_backlight_device() argument
5074 int bl_idx = dm->num_of_edps; in setup_backlight_device()
5080 if (dm->num_of_edps >= AMDGPU_DM_MAX_NUM_EDP) { in setup_backlight_device()
5081 …drm_warn(adev_to_drm(dm->adev), "Too much eDP connections, skipping backlight setup for additional… in setup_backlight_device()
5087 amdgpu_dm_update_backlight_caps(dm, bl_idx); in setup_backlight_device()
5088 dm->backlight_link[bl_idx] = link; in setup_backlight_device()
5089 dm->num_of_edps++; in setup_backlight_device()
5106 struct amdgpu_display_manager *dm = &adev->dm; in amdgpu_dm_initialize_drm_device() local
5117 int max_overlay = dm->dc->caps.max_slave_planes; in amdgpu_dm_initialize_drm_device()
5119 dm->display_indexes_num = dm->dc->caps.max_streams; in amdgpu_dm_initialize_drm_device()
5121 adev->mode_info.num_crtc = adev->dm.display_indexes_num; in amdgpu_dm_initialize_drm_device()
5125 link_cnt = dm->dc->caps.max_links; in amdgpu_dm_initialize_drm_device()
5126 if (amdgpu_dm_mode_config_init(dm->adev)) { in amdgpu_dm_initialize_drm_device()
5127 DRM_ERROR("DM: Failed to initialize mode config\n"); in amdgpu_dm_initialize_drm_device()
5132 primary_planes = dm->dc->caps.max_streams; in amdgpu_dm_initialize_drm_device()
5134 DRM_ERROR("DM: Plane nums out of 6 planes\n"); in amdgpu_dm_initialize_drm_device()
5143 plane = &dm->dc->caps.planes[i]; in amdgpu_dm_initialize_drm_device()
5145 if (initialize_plane(dm, mode_info, i, in amdgpu_dm_initialize_drm_device()
5161 for (i = 0; i < dm->dc->caps.max_planes; ++i) { in amdgpu_dm_initialize_drm_device()
5162 struct dc_plane_cap *plane = &dm->dc->caps.planes[i]; in amdgpu_dm_initialize_drm_device()
5177 if (initialize_plane(dm, NULL, primary_planes + i, in amdgpu_dm_initialize_drm_device()
5184 for (i = 0; i < dm->dc->caps.max_streams; i++) in amdgpu_dm_initialize_drm_device()
5185 if (amdgpu_dm_crtc_init(dm, mode_info->planes[i], i)) { in amdgpu_dm_initialize_drm_device()
5205 if (register_outbox_irq_handlers(dm->adev)) { in amdgpu_dm_initialize_drm_device()
5206 DRM_ERROR("DM: Failed to initialize IRQ\n"); in amdgpu_dm_initialize_drm_device()
5266 link = dc_get_link_at_index(dm->dc, i); in amdgpu_dm_initialize_drm_device()
5276 if (amdgpu_dm_wb_connector_init(dm, wbcon, i)) { in amdgpu_dm_initialize_drm_device()
5296 if (amdgpu_dm_encoder_init(dm->ddev, aencoder, i)) { in amdgpu_dm_initialize_drm_device()
5301 if (amdgpu_dm_connector_init(dm, aconnector, i, aencoder)) { in amdgpu_dm_initialize_drm_device()
5306 if (dm->hpd_rx_offload_wq) in amdgpu_dm_initialize_drm_device()
5307 dm->hpd_rx_offload_wq[aconnector->base.index].aconnector = in amdgpu_dm_initialize_drm_device()
5319 mutex_lock(&dm->dc_lock); in amdgpu_dm_initialize_drm_device()
5320 dc_exit_ips_for_hw_access(dm->dc); in amdgpu_dm_initialize_drm_device()
5322 mutex_unlock(&dm->dc_lock); in amdgpu_dm_initialize_drm_device()
5326 setup_backlight_device(dm, aconnector); in amdgpu_dm_initialize_drm_device()
5347 if (dce60_register_irq_handlers(dm->adev)) { in amdgpu_dm_initialize_drm_device()
5348 DRM_ERROR("DM: Failed to initialize IRQ\n"); in amdgpu_dm_initialize_drm_device()
5369 if (dce110_register_irq_handlers(dm->adev)) { in amdgpu_dm_initialize_drm_device()
5370 DRM_ERROR("DM: Failed to initialize IRQ\n"); in amdgpu_dm_initialize_drm_device()
5397 if (dcn10_register_irq_handlers(dm->adev)) { in amdgpu_dm_initialize_drm_device()
5398 DRM_ERROR("DM: Failed to initialize IRQ\n"); in amdgpu_dm_initialize_drm_device()
5418 static void amdgpu_dm_destroy_drm_device(struct amdgpu_display_manager *dm) in amdgpu_dm_destroy_drm_device() argument
5420 drm_atomic_private_obj_fini(&dm->atomic_obj); in amdgpu_dm_destroy_drm_device()
5548 r = amdgpu_ucode_request(adev, &adev->dm.dmub_fw, AMDGPU_UCODE_REQUIRED, in dm_init_microcode()
5561 /* if there is no object header, skip DM */ in dm_early_init()
5564 dev_info(adev->dev, "No object header, skipping DM\n"); in dm_early_init()
7266 struct amdgpu_display_manager *dm = &drm_to_adev(drm)->dm; in amdgpu_dm_should_create_sysfs() local
7269 caps = &dm->backlight_caps[amdgpu_dm_connector->bl_idx]; in amdgpu_dm_should_create_sysfs()
7292 struct amdgpu_display_manager *dm = &adev->dm; in amdgpu_dm_connector_destroy() local
7302 backlight_device_unregister(dm->backlight_dev[aconnector->bl_idx]); in amdgpu_dm_connector_destroy()
7303 dm->backlight_dev[aconnector->bl_idx] = NULL; in amdgpu_dm_connector_destroy()
7626 dc_result = dc_validate_stream(adev->dm.dc, stream); in create_validate_stream_for_sink()
7635 dc_result = dm_validate_stream_and_context(adev->dm.dc, stream); in create_validate_stream_for_sink()
8377 void amdgpu_dm_connector_init_helper(struct amdgpu_display_manager *dm, in amdgpu_dm_connector_init_helper() argument
8383 struct amdgpu_device *adev = drm_to_adev(dm->ddev); in amdgpu_dm_connector_init_helper()
8433 dm->ddev->mode_config.scaling_mode_property, in amdgpu_dm_connector_init_helper()
8478 if (adev->dm.hdcp_workqueue) in amdgpu_dm_connector_init_helper()
8551 snprintf(i2c->base.name, sizeof(i2c->base.name), "AMDGPU DM i2c OEM bus"); in create_i2c()
8553 snprintf(i2c->base.name, sizeof(i2c->base.name), "AMDGPU DM i2c hw bus %d", in create_i2c()
8588 static int amdgpu_dm_connector_init(struct amdgpu_display_manager *dm, in amdgpu_dm_connector_init() argument
8595 struct dc *dc = dm->dc; in amdgpu_dm_connector_init()
8620 dm->ddev, in amdgpu_dm_connector_init()
8637 dm, in amdgpu_dm_connector_init()
8652 amdgpu_dm_initialize_dp_connector(dm, aconnector, link->link_index); in amdgpu_dm_connector_init()
8947 struct amdgpu_display_manager *dm, in update_freesync_state_on_stream() argument
8955 struct amdgpu_device *adev = dm->adev; in update_freesync_state_on_stream()
8978 dm->freesync_module, in update_freesync_state_on_stream()
8986 mod_freesync_handle_v_update(dm->freesync_module, in update_freesync_state_on_stream()
8990 dc_stream_adjust_vmin_vmax(dm->dc, in update_freesync_state_on_stream()
9013 dm->freesync_module, in update_freesync_state_on_stream()
9042 struct amdgpu_display_manager *dm, in update_stream_irq_parameters() argument
9048 struct amdgpu_device *adev = dm->adev; in update_stream_irq_parameters()
9088 mod_freesync_build_vrr_params(dm->freesync_module, in update_stream_irq_parameters()
9093 /* Copy state for access from DM IRQ handler */ in update_stream_irq_parameters()
9203 adev->dm.dc->caps.color.dpp.gamma_corr) in amdgpu_dm_update_cursor()
9278 struct amdgpu_display_manager *dm, in amdgpu_dm_commit_planes() argument
9355 if (amdgpu_ip_version(dm->adev, DCE_HWIP, 0) != 0) in amdgpu_dm_commit_planes()
9384 amdgpu_dm_plane_fill_dc_scaling_info(dm->adev, new_plane_state, in amdgpu_dm_commit_planes()
9400 dm->adev, new_plane_state, in amdgpu_dm_commit_planes()
9434 mutex_lock(&dm->dc_lock); in amdgpu_dm_commit_planes()
9439 mutex_unlock(&dm->dc_lock); in amdgpu_dm_commit_planes()
9476 dm, in amdgpu_dm_commit_planes()
9521 (amdgpu_display_get_crtc_scanoutpos(dm->ddev, acrtc_attach->crtc_id, in amdgpu_dm_commit_planes()
9573 if (dm->vblank_control_workqueue) in amdgpu_dm_commit_planes()
9574 flush_workqueue(dm->vblank_control_workqueue); in amdgpu_dm_commit_planes()
9603 mutex_lock(&dm->dc_lock); in amdgpu_dm_commit_planes()
9610 mutex_unlock(&dm->dc_lock); in amdgpu_dm_commit_planes()
9620 dm->dc, acrtc_state->stream, in amdgpu_dm_commit_planes()
9624 mutex_lock(&dm->dc_lock); in amdgpu_dm_commit_planes()
9625 update_planes_and_stream_adapter(dm->dc, in amdgpu_dm_commit_planes()
9651 mutex_unlock(&dm->dc_lock); in amdgpu_dm_commit_planes()
9660 (!updated_planes_and_streams || amdgpu_ip_version(dm->adev, DCE_HWIP, 0) == 0) && in amdgpu_dm_commit_planes()
9705 mutex_lock(&adev->dm.audio_lock); in amdgpu_dm_commit_audio()
9708 mutex_unlock(&adev->dm.audio_lock); in amdgpu_dm_commit_audio()
9740 mutex_lock(&adev->dm.audio_lock); in amdgpu_dm_commit_audio()
9743 mutex_unlock(&adev->dm.audio_lock); in amdgpu_dm_commit_audio()
9763 static void dm_clear_writeback(struct amdgpu_display_manager *dm, in dm_clear_writeback() argument
9766 dc_stream_remove_writeback(dm->dc, crtc_state->stream, 0); in dm_clear_writeback()
9774 struct amdgpu_display_manager *dm = &adev->dm; in amdgpu_dm_commit_streams() local
9808 dm_clear_writeback(dm, dm_old_crtc_state); in amdgpu_dm_commit_streams()
9850 mutex_lock(&dm->dc_lock); in amdgpu_dm_commit_streams()
9851 dc_exit_ips_for_hw_access(dm->dc); in amdgpu_dm_commit_streams()
9853 mutex_unlock(&dm->dc_lock); in amdgpu_dm_commit_streams()
9918 if (dm->vblank_control_workqueue) in amdgpu_dm_commit_streams()
9919 flush_workqueue(dm->vblank_control_workqueue); in amdgpu_dm_commit_streams()
9921 amdgpu_dm_replay_disable_all(dm); in amdgpu_dm_commit_streams()
9922 amdgpu_dm_psr_disable_all(dm); in amdgpu_dm_commit_streams()
9926 mutex_lock(&dm->dc_lock); in amdgpu_dm_commit_streams()
9927 dc_exit_ips_for_hw_access(dm->dc); in amdgpu_dm_commit_streams()
9928 WARN_ON(!dc_commit_streams(dm->dc, ¶ms)); in amdgpu_dm_commit_streams()
9931 if ((dm->active_vblank_irq_count == 0) && amdgpu_dm_is_headless(dm->adev)) in amdgpu_dm_commit_streams()
9932 dc_allow_idle_optimizations(dm->dc, true); in amdgpu_dm_commit_streams()
9933 mutex_unlock(&dm->dc_lock); in amdgpu_dm_commit_streams()
9958 * It will cause the dm->actual_brightness is not the current panel brightness in amdgpu_dm_commit_streams()
9959 * level. (the dm->brightness is the correct panel level) in amdgpu_dm_commit_streams()
9960 * So we set the backlight level with dm->brightness value after set mode in amdgpu_dm_commit_streams()
9963 for (i = 0; i < dm->num_of_edps; i++) { in amdgpu_dm_commit_streams()
9964 if (dm->backlight_dev[i]) in amdgpu_dm_commit_streams()
9965 amdgpu_dm_backlight_set_level(dm, i, dm->brightness[i]); in amdgpu_dm_commit_streams()
9970 static void dm_set_writeback(struct amdgpu_display_manager *dm, in dm_set_writeback() argument
9976 struct amdgpu_device *adev = dm->adev; in dm_set_writeback()
10004 if (dm->dc->current_state->res_ctx.pipe_ctx[i].stream == crtc_state->stream) { in dm_set_writeback()
10005 pipe = &dm->dc->current_state->res_ctx.pipe_ctx[i]; in dm_set_writeback()
10062 dc_stream_add_writeback(dm->dc, crtc_state->stream, wb_info); in dm_set_writeback()
10070 * amdgpu_dm_atomic_commit_tail() - AMDgpu DM's commit tail implementation.
10081 struct amdgpu_display_manager *dm = &adev->dm; in amdgpu_dm_atomic_commit_tail() local
10115 if (!adev->dm.hdcp_workqueue) in amdgpu_dm_atomic_commit_tail()
10166 if (!adev->dm.hdcp_workqueue) in amdgpu_dm_atomic_commit_tail()
10181 hdcp_reset_display(adev->dm.hdcp_workqueue, aconnector->dc_link->link_index); in amdgpu_dm_atomic_commit_tail()
10188 old_con_state, connector, adev->dm.hdcp_workqueue)) { in amdgpu_dm_atomic_commit_tail()
10205 struct hdcp_workqueue *hdcp_work = adev->dm.hdcp_workqueue; in amdgpu_dm_atomic_commit_tail()
10223 adev->dm.hdcp_workqueue, aconnector->dc_link->link_index, aconnector, in amdgpu_dm_atomic_commit_tail()
10320 mutex_lock(&dm->dc_lock); in amdgpu_dm_atomic_commit_tail()
10321 dc_exit_ips_for_hw_access(dm->dc); in amdgpu_dm_atomic_commit_tail()
10322 dc_update_planes_and_stream(dm->dc, in amdgpu_dm_atomic_commit_tail()
10327 mutex_unlock(&dm->dc_lock); in amdgpu_dm_atomic_commit_tail()
10350 update_stream_irq_parameters(dm, dm_new_crtc_state); in amdgpu_dm_atomic_commit_tail()
10412 amdgpu_dm_commit_planes(state, dev, dm, crtc, wait_for_vblank); in amdgpu_dm_atomic_commit_tail()
10436 dm_set_writeback(dm, dm_new_crtc_state, connector, new_con_state); in amdgpu_dm_atomic_commit_tail()
10444 for (i = 0; i < dm->num_of_edps; i++) { in amdgpu_dm_atomic_commit_tail()
10445 if (dm->backlight_dev[i] && in amdgpu_dm_atomic_commit_tail()
10446 (dm->actual_brightness[i] != dm->brightness[i])) in amdgpu_dm_atomic_commit_tail()
10447 amdgpu_dm_backlight_set_level(dm, i, dm->brightness[i]); in amdgpu_dm_atomic_commit_tail()
10735 static int dm_update_crtc_state(struct amdgpu_display_manager *dm, in dm_update_crtc_state() argument
10810 dm->force_timing_sync; in dm_update_crtc_state()
10901 dm->dc, in dm_update_crtc_state()
10944 dm->dc, in dm_update_crtc_state()
11762 * amdgpu_dm_atomic_check() - Atomic check implementation for AMDgpu DM.
11779 * Note that DM adds the affected connectors for all CRTCs in state, when that
11791 struct dc *dc = adev->dm.dc; in amdgpu_dm_atomic_check()
11973 ret = dm_update_crtc_state(&adev->dm, state, crtc, in amdgpu_dm_atomic_check()
11986 ret = dm_update_crtc_state(&adev->dm, state, crtc, in amdgpu_dm_atomic_check()
12152 * TODO: Remove this stall and drop DM state private objects. in amdgpu_dm_atomic_check()
12208 * the DM atomic state from validation we need to free it and in amdgpu_dm_atomic_check()
12211 * Furthermore, since the DM atomic state only contains the DC in amdgpu_dm_atomic_check()
12220 if (obj->funcs == adev->dm.atomic_obj.funcs) { in amdgpu_dm_atomic_check()
12288 static bool dm_edid_parser_send_cea(struct amdgpu_display_manager *dm, in dm_edid_parser_send_cea() argument
12316 res = dc_wake_and_execute_dmub_cmd(dm->dc->ctx, &cmd, DM_DMUB_WAIT_TYPE_WAIT_WITH_REPLY); in dm_edid_parser_send_cea()
12345 static bool parse_edid_cea_dmcu(struct amdgpu_display_manager *dm, in parse_edid_cea_dmcu() argument
12357 if (!dc_edid_parser_send_cea(dm->dc, i, len, &edid_ext[i], 8)) in parse_edid_cea_dmcu()
12364 res = dc_edid_parser_recv_amd_vsdb(dm->dc, &version, &min_rate, &max_rate); in parse_edid_cea_dmcu()
12378 res = dc_edid_parser_recv_cea_ack(dm->dc, &offset); in parse_edid_cea_dmcu()
12386 static bool parse_edid_cea_dmub(struct amdgpu_display_manager *dm, in parse_edid_cea_dmub() argument
12395 if (!dm_edid_parser_send_cea(dm, i, len, &edid_ext[i], 8, vsdb_info)) in parse_edid_cea_dmub()
12409 mutex_lock(&adev->dm.dc_lock); in parse_edid_cea()
12410 if (adev->dm.dmub_srv) in parse_edid_cea()
12411 ret = parse_edid_cea_dmub(&adev->dm, edid_ext, len, vsdb_info); in parse_edid_cea()
12413 ret = parse_edid_cea_dmcu(&adev->dm, edid_ext, len, vsdb_info); in parse_edid_cea()
12414 mutex_unlock(&adev->dm.dc_lock); in parse_edid_cea()
12576 if (!adev->dm.freesync_module) in amdgpu_dm_update_freesync_caps()
12656 struct dc *dc = adev->dm.dc; in amdgpu_dm_trigger_timing_sync()
12659 mutex_lock(&adev->dm.dc_lock); in amdgpu_dm_trigger_timing_sync()
12664 adev->dm.force_timing_sync; in amdgpu_dm_trigger_timing_sync()
12669 mutex_unlock(&adev->dm.dc_lock); in amdgpu_dm_trigger_timing_sync()
12729 struct dmub_notification *p_notify = adev->dm.dmub_notify; in amdgpu_dm_process_dmub_aux_transfer_sync()
12732 mutex_lock(&adev->dm.dpia_aux_lock); in amdgpu_dm_process_dmub_aux_transfer_sync()
12738 if (!wait_for_completion_timeout(&adev->dm.dmub_aux_transfer_done, 10 * HZ)) { in amdgpu_dm_process_dmub_aux_transfer_sync()
12758 payload->reply[0] = adev->dm.dmub_notify->aux_reply.command & 0xF; in amdgpu_dm_process_dmub_aux_transfer_sync()
12759 if (adev->dm.dmub_notify->aux_reply.command & 0xF0) in amdgpu_dm_process_dmub_aux_transfer_sync()
12761 payload->reply[0] = (adev->dm.dmub_notify->aux_reply.command >> 4) & 0xF; in amdgpu_dm_process_dmub_aux_transfer_sync()
12772 reinit_completion(&adev->dm.dmub_aux_transfer_done); in amdgpu_dm_process_dmub_aux_transfer_sync()
12773 mutex_unlock(&adev->dm.dpia_aux_lock); in amdgpu_dm_process_dmub_aux_transfer_sync()
12787 mutex_lock(&adev->dm.dpia_aux_lock); in amdgpu_dm_process_dmub_set_config_sync()
12789 link_index, payload, adev->dm.dmub_notify); in amdgpu_dm_process_dmub_set_config_sync()
12791 if (is_cmd_complete || wait_for_completion_timeout(&adev->dm.dmub_aux_transfer_done, 10 * HZ)) { in amdgpu_dm_process_dmub_set_config_sync()
12793 *operation_result = adev->dm.dmub_notify->sc_status; in amdgpu_dm_process_dmub_set_config_sync()
12801 reinit_completion(&adev->dm.dmub_aux_transfer_done); in amdgpu_dm_process_dmub_set_config_sync()
12802 mutex_unlock(&adev->dm.dpia_aux_lock); in amdgpu_dm_process_dmub_set_config_sync()