Lines Matching +full:needs +full:- +full:hpd

82 	(0x13830 - 0x7030) >> 2,
89 uint32_t hpd; member
95 .hpd = DISP_INTERRUPT_STATUS__DC_HPD1_INTERRUPT_MASK
100 .hpd = DISP_INTERRUPT_STATUS_CONTINUE__DC_HPD2_INTERRUPT_MASK
105 .hpd = DISP_INTERRUPT_STATUS_CONTINUE2__DC_HPD3_INTERRUPT_MASK
110 .hpd = DISP_INTERRUPT_STATUS_CONTINUE3__DC_HPD4_INTERRUPT_MASK
115 .hpd = DISP_INTERRUPT_STATUS_CONTINUE4__DC_HPD5_INTERRUPT_MASK
120 .hpd = DISP_INTERRUPT_STATUS_CONTINUE5__DC_HPD6_INTERRUPT_MASK
129 spin_lock_irqsave(&adev->audio_endpt_idx_lock, flags); in dce_v8_0_audio_endpt_rreg()
132 spin_unlock_irqrestore(&adev->audio_endpt_idx_lock, flags); in dce_v8_0_audio_endpt_rreg()
142 spin_lock_irqsave(&adev->audio_endpt_idx_lock, flags); in dce_v8_0_audio_endpt_wreg()
145 spin_unlock_irqrestore(&adev->audio_endpt_idx_lock, flags); in dce_v8_0_audio_endpt_wreg()
150 if (crtc >= adev->mode_info.num_crtc) in dce_v8_0_vblank_get_counter()
161 for (i = 0; i < adev->mode_info.num_crtc; i++) in dce_v8_0_pageflip_interrupt_init()
162 amdgpu_irq_get(adev, &adev->pageflip_irq, i); in dce_v8_0_pageflip_interrupt_init()
170 for (i = 0; i < adev->mode_info.num_crtc; i++) in dce_v8_0_pageflip_interrupt_fini()
171 amdgpu_irq_put(adev, &adev->pageflip_irq, i); in dce_v8_0_pageflip_interrupt_fini()
175 * dce_v8_0_page_flip - pageflip callback.
188 struct amdgpu_crtc *amdgpu_crtc = adev->mode_info.crtcs[crtc_id]; in dce_v8_0_page_flip()
189 struct drm_framebuffer *fb = amdgpu_crtc->base.primary->fb; in dce_v8_0_page_flip()
192 WREG32(mmGRPH_FLIP_CONTROL + amdgpu_crtc->crtc_offset, async ? in dce_v8_0_page_flip()
195 WREG32(mmGRPH_PITCH + amdgpu_crtc->crtc_offset, in dce_v8_0_page_flip()
196 fb->pitches[0] / fb->format->cpp[0]); in dce_v8_0_page_flip()
198 WREG32(mmGRPH_PRIMARY_SURFACE_ADDRESS_HIGH + amdgpu_crtc->crtc_offset, in dce_v8_0_page_flip()
201 WREG32(mmGRPH_PRIMARY_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset, in dce_v8_0_page_flip()
204 RREG32(mmGRPH_PRIMARY_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset); in dce_v8_0_page_flip()
210 if ((crtc < 0) || (crtc >= adev->mode_info.num_crtc)) in dce_v8_0_crtc_get_scanoutpos()
211 return -EINVAL; in dce_v8_0_crtc_get_scanoutpos()
220 * dce_v8_0_hpd_sense - hpd sense callback.
223 * @hpd: hpd (hotplug detect) pin
229 enum amdgpu_hpd_id hpd) in dce_v8_0_hpd_sense() argument
233 if (hpd >= adev->mode_info.num_hpd) in dce_v8_0_hpd_sense()
236 if (RREG32(mmDC_HPD1_INT_STATUS + hpd_offsets[hpd]) & in dce_v8_0_hpd_sense()
244 * dce_v8_0_hpd_set_polarity - hpd set polarity callback.
247 * @hpd: hpd (hotplug detect) pin
249 * Set the polarity of the hpd pin (evergreen+).
252 enum amdgpu_hpd_id hpd) in dce_v8_0_hpd_set_polarity() argument
255 bool connected = dce_v8_0_hpd_sense(adev, hpd); in dce_v8_0_hpd_set_polarity()
257 if (hpd >= adev->mode_info.num_hpd) in dce_v8_0_hpd_set_polarity()
260 tmp = RREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[hpd]); in dce_v8_0_hpd_set_polarity()
265 WREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[hpd], tmp); in dce_v8_0_hpd_set_polarity()
269 * dce_v8_0_hpd_init - hpd setup callback.
273 * Setup the hpd pins used by the card (evergreen+).
274 * Enable the pin, set the polarity, and enable the hpd interrupts.
287 if (amdgpu_connector->hpd.hpd >= adev->mode_info.num_hpd) in dce_v8_0_hpd_init()
290 tmp = RREG32(mmDC_HPD1_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd]); in dce_v8_0_hpd_init()
292 WREG32(mmDC_HPD1_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd], tmp); in dce_v8_0_hpd_init()
294 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP || in dce_v8_0_hpd_init()
295 connector->connector_type == DRM_MODE_CONNECTOR_LVDS) { in dce_v8_0_hpd_init()
296 /* don't try to enable hpd on eDP or LVDS avoid breaking the in dce_v8_0_hpd_init()
301 tmp = RREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd]); in dce_v8_0_hpd_init()
303 WREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd], tmp); in dce_v8_0_hpd_init()
307 dce_v8_0_hpd_set_polarity(adev, amdgpu_connector->hpd.hpd); in dce_v8_0_hpd_init()
308 amdgpu_irq_get(adev, &adev->hpd_irq, amdgpu_connector->hpd.hpd); in dce_v8_0_hpd_init()
314 * dce_v8_0_hpd_fini - hpd tear down callback.
318 * Tear down the hpd pins used by the card (evergreen+).
319 * Disable the hpd interrupts.
332 if (amdgpu_connector->hpd.hpd >= adev->mode_info.num_hpd) in dce_v8_0_hpd_fini()
335 tmp = RREG32(mmDC_HPD1_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd]); in dce_v8_0_hpd_fini()
337 WREG32(mmDC_HPD1_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd], tmp); in dce_v8_0_hpd_fini()
339 amdgpu_irq_put(adev, &adev->hpd_irq, amdgpu_connector->hpd.hpd); in dce_v8_0_hpd_fini()
355 for (i = 0; i < adev->mode_info.num_crtc; i++) { in dce_v8_0_is_display_hung()
363 for (i = 0; i < adev->mode_info.num_crtc; i++) { in dce_v8_0_is_display_hung()
404 switch (adev->asic_type) { in dce_v8_0_get_num_crtc()
448 struct drm_device *dev = encoder->dev; in dce_v8_0_program_fmt()
451 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); in dce_v8_0_program_fmt()
460 dither = amdgpu_connector->dither; in dce_v8_0_program_fmt()
464 if (amdgpu_encoder->devices & ATOM_DEVICE_LCD_SUPPORT) in dce_v8_0_program_fmt()
468 if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) || in dce_v8_0_program_fmt()
469 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2)) in dce_v8_0_program_fmt()
516 WREG32(mmFMT_BIT_DEPTH_CONTROL + amdgpu_crtc->crtc_offset, tmp); in dce_v8_0_program_fmt()
522 * dce_v8_0_line_buffer_adjust - Set up the line buffer
538 u32 pipe_offset = amdgpu_crtc->crtc_id * 0x8; in dce_v8_0_line_buffer_adjust()
547 if (amdgpu_crtc->base.enabled && mode) { in dce_v8_0_line_buffer_adjust()
548 if (mode->crtc_hdisplay < 1920) { in dce_v8_0_line_buffer_adjust()
551 } else if (mode->crtc_hdisplay < 2560) { in dce_v8_0_line_buffer_adjust()
554 } else if (mode->crtc_hdisplay < 4096) { in dce_v8_0_line_buffer_adjust()
556 buffer_alloc = (adev->flags & AMD_IS_APU) ? 2 : 4; in dce_v8_0_line_buffer_adjust()
560 buffer_alloc = (adev->flags & AMD_IS_APU) ? 2 : 4; in dce_v8_0_line_buffer_adjust()
567 WREG32(mmLB_MEMORY_CTRL + amdgpu_crtc->crtc_offset, in dce_v8_0_line_buffer_adjust()
573 for (i = 0; i < adev->usec_timeout; i++) { in dce_v8_0_line_buffer_adjust()
580 if (amdgpu_crtc->base.enabled && mode) { in dce_v8_0_line_buffer_adjust()
597 * cik_get_number_of_dram_channels - get the number of dram channels
649 * dce_v8_0_dram_bandwidth - get the dram bandwidth
665 yclk.full = dfixed_const(wm->yclk); in dce_v8_0_dram_bandwidth()
667 dram_channels.full = dfixed_const(wm->dram_channels * 4); in dce_v8_0_dram_bandwidth()
678 * dce_v8_0_dram_bandwidth_for_display - get the dram bandwidth for display
694 yclk.full = dfixed_const(wm->yclk); in dce_v8_0_dram_bandwidth_for_display()
696 dram_channels.full = dfixed_const(wm->dram_channels * 4); in dce_v8_0_dram_bandwidth_for_display()
707 * dce_v8_0_data_return_bandwidth - get the data return bandwidth
723 sclk.full = dfixed_const(wm->sclk); in dce_v8_0_data_return_bandwidth()
736 * dce_v8_0_dmif_request_bandwidth - get the dmif bandwidth
752 disp_clk.full = dfixed_const(wm->disp_clk); in dce_v8_0_dmif_request_bandwidth()
767 * dce_v8_0_available_bandwidth - get the min available bandwidth
786 * dce_v8_0_average_bandwidth - get the average available bandwidth
807 line_time.full = dfixed_const(wm->active_time + wm->blank_time); in dce_v8_0_average_bandwidth()
809 bpp.full = dfixed_const(wm->bytes_per_pixel); in dce_v8_0_average_bandwidth()
810 src_width.full = dfixed_const(wm->src_width); in dce_v8_0_average_bandwidth()
812 bandwidth.full = dfixed_mul(bandwidth, wm->vsc); in dce_v8_0_average_bandwidth()
819 * dce_v8_0_latency_watermark - get the latency watermark
834 u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */ in dce_v8_0_latency_watermark()
835 u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) + in dce_v8_0_latency_watermark()
836 (wm->num_heads * cursor_line_pair_return_time); in dce_v8_0_latency_watermark()
842 if (wm->num_heads == 0) in dce_v8_0_latency_watermark()
847 if ((wm->vsc.full > a.full) || in dce_v8_0_latency_watermark()
848 ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) || in dce_v8_0_latency_watermark()
849 (wm->vtaps >= 5) || in dce_v8_0_latency_watermark()
850 ((wm->vsc.full >= a.full) && wm->interlaced)) in dce_v8_0_latency_watermark()
856 b.full = dfixed_const(wm->num_heads); in dce_v8_0_latency_watermark()
858 tmp = div_u64((u64) dmif_size * (u64) wm->disp_clk, mc_latency + 512); in dce_v8_0_latency_watermark()
861 lb_fill_bw = min(tmp, wm->disp_clk * wm->bytes_per_pixel / 1000); in dce_v8_0_latency_watermark()
863 a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel); in dce_v8_0_latency_watermark()
870 if (line_fill_time < wm->active_time) in dce_v8_0_latency_watermark()
873 return latency + (line_fill_time - wm->active_time); in dce_v8_0_latency_watermark()
878 * dce_v8_0_average_bandwidth_vs_dram_bandwidth_for_display - check
891 (dce_v8_0_dram_bandwidth_for_display(wm) / wm->num_heads)) in dce_v8_0_average_bandwidth_vs_dram_bandwidth_for_display()
898 * dce_v8_0_average_bandwidth_vs_available_bandwidth - check
911 (dce_v8_0_available_bandwidth(wm) / wm->num_heads)) in dce_v8_0_average_bandwidth_vs_available_bandwidth()
918 * dce_v8_0_check_latency_hiding - check latency hiding
928 u32 lb_partitions = wm->lb_size / wm->src_width; in dce_v8_0_check_latency_hiding()
929 u32 line_time = wm->active_time + wm->blank_time; in dce_v8_0_check_latency_hiding()
935 if (wm->vsc.full > a.full) in dce_v8_0_check_latency_hiding()
938 if (lb_partitions <= (wm->vtaps + 1)) in dce_v8_0_check_latency_hiding()
944 latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time); in dce_v8_0_check_latency_hiding()
953 * dce_v8_0_program_watermarks - program display watermarks
967 struct drm_display_mode *mode = &amdgpu_crtc->base.mode; in dce_v8_0_program_watermarks()
974 if (amdgpu_crtc->base.enabled && num_heads && mode) { in dce_v8_0_program_watermarks()
975 active_time = (u32) div_u64((u64)mode->crtc_hdisplay * 1000000, in dce_v8_0_program_watermarks()
976 (u32)mode->clock); in dce_v8_0_program_watermarks()
977 line_time = (u32) div_u64((u64)mode->crtc_htotal * 1000000, in dce_v8_0_program_watermarks()
978 (u32)mode->clock); in dce_v8_0_program_watermarks()
982 if (adev->pm.dpm_enabled) { in dce_v8_0_program_watermarks()
988 wm_high.yclk = adev->pm.current_mclk * 10; in dce_v8_0_program_watermarks()
989 wm_high.sclk = adev->pm.current_sclk * 10; in dce_v8_0_program_watermarks()
992 wm_high.disp_clk = mode->clock; in dce_v8_0_program_watermarks()
993 wm_high.src_width = mode->crtc_hdisplay; in dce_v8_0_program_watermarks()
995 wm_high.blank_time = line_time - wm_high.active_time; in dce_v8_0_program_watermarks()
997 if (mode->flags & DRM_MODE_FLAG_INTERLACE) in dce_v8_0_program_watermarks()
999 wm_high.vsc = amdgpu_crtc->vsc; in dce_v8_0_program_watermarks()
1001 if (amdgpu_crtc->rmx_type != RMX_OFF) in dce_v8_0_program_watermarks()
1016 (adev->mode_info.disp_priority == 2)) { in dce_v8_0_program_watermarks()
1021 if (adev->pm.dpm_enabled) { in dce_v8_0_program_watermarks()
1027 wm_low.yclk = adev->pm.current_mclk * 10; in dce_v8_0_program_watermarks()
1028 wm_low.sclk = adev->pm.current_sclk * 10; in dce_v8_0_program_watermarks()
1031 wm_low.disp_clk = mode->clock; in dce_v8_0_program_watermarks()
1032 wm_low.src_width = mode->crtc_hdisplay; in dce_v8_0_program_watermarks()
1034 wm_low.blank_time = line_time - wm_low.active_time; in dce_v8_0_program_watermarks()
1036 if (mode->flags & DRM_MODE_FLAG_INTERLACE) in dce_v8_0_program_watermarks()
1038 wm_low.vsc = amdgpu_crtc->vsc; in dce_v8_0_program_watermarks()
1040 if (amdgpu_crtc->rmx_type != RMX_OFF) in dce_v8_0_program_watermarks()
1055 (adev->mode_info.disp_priority == 2)) { in dce_v8_0_program_watermarks()
1058 lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode->crtc_hdisplay); in dce_v8_0_program_watermarks()
1062 wm_mask = RREG32(mmDPG_WATERMARK_MASK_CONTROL + amdgpu_crtc->crtc_offset); in dce_v8_0_program_watermarks()
1066 WREG32(mmDPG_WATERMARK_MASK_CONTROL + amdgpu_crtc->crtc_offset, tmp); in dce_v8_0_program_watermarks()
1067 WREG32(mmDPG_PIPE_URGENCY_CONTROL + amdgpu_crtc->crtc_offset, in dce_v8_0_program_watermarks()
1071 tmp = RREG32(mmDPG_WATERMARK_MASK_CONTROL + amdgpu_crtc->crtc_offset); in dce_v8_0_program_watermarks()
1074 WREG32(mmDPG_WATERMARK_MASK_CONTROL + amdgpu_crtc->crtc_offset, tmp); in dce_v8_0_program_watermarks()
1075 WREG32(mmDPG_PIPE_URGENCY_CONTROL + amdgpu_crtc->crtc_offset, in dce_v8_0_program_watermarks()
1079 WREG32(mmDPG_WATERMARK_MASK_CONTROL + amdgpu_crtc->crtc_offset, wm_mask); in dce_v8_0_program_watermarks()
1082 amdgpu_crtc->line_time = line_time; in dce_v8_0_program_watermarks()
1083 amdgpu_crtc->wm_high = latency_watermark_a; in dce_v8_0_program_watermarks()
1084 amdgpu_crtc->wm_low = latency_watermark_b; in dce_v8_0_program_watermarks()
1086 amdgpu_crtc->lb_vblank_lead_lines = lb_vblank_lead_lines; in dce_v8_0_program_watermarks()
1090 * dce_v8_0_bandwidth_update - program display watermarks
1105 for (i = 0; i < adev->mode_info.num_crtc; i++) { in dce_v8_0_bandwidth_update()
1106 if (adev->mode_info.crtcs[i]->base.enabled) in dce_v8_0_bandwidth_update()
1109 for (i = 0; i < adev->mode_info.num_crtc; i++) { in dce_v8_0_bandwidth_update()
1110 mode = &adev->mode_info.crtcs[i]->base.mode; in dce_v8_0_bandwidth_update()
1111 lb_size = dce_v8_0_line_buffer_adjust(adev, adev->mode_info.crtcs[i], mode); in dce_v8_0_bandwidth_update()
1112 dce_v8_0_program_watermarks(adev, adev->mode_info.crtcs[i], in dce_v8_0_bandwidth_update()
1122 for (i = 0; i < adev->mode_info.audio.num_pins; i++) { in dce_v8_0_audio_get_connected_pins()
1123 offset = adev->mode_info.audio.pin[i].offset; in dce_v8_0_audio_get_connected_pins()
1129 adev->mode_info.audio.pin[i].connected = false; in dce_v8_0_audio_get_connected_pins()
1131 adev->mode_info.audio.pin[i].connected = true; in dce_v8_0_audio_get_connected_pins()
1141 for (i = 0; i < adev->mode_info.audio.num_pins; i++) { in dce_v8_0_audio_get_pin()
1142 if (adev->mode_info.audio.pin[i].connected) in dce_v8_0_audio_get_pin()
1143 return &adev->mode_info.audio.pin[i]; in dce_v8_0_audio_get_pin()
1151 struct amdgpu_device *adev = drm_to_adev(encoder->dev); in dce_v8_0_afmt_audio_select_pin()
1153 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; in dce_v8_0_afmt_audio_select_pin()
1156 if (!dig || !dig->afmt || !dig->afmt->pin) in dce_v8_0_afmt_audio_select_pin()
1159 offset = dig->afmt->offset; in dce_v8_0_afmt_audio_select_pin()
1162 (dig->afmt->pin->id << AFMT_AUDIO_SRC_CONTROL__AFMT_AUDIO_SRC_SELECT__SHIFT)); in dce_v8_0_afmt_audio_select_pin()
1168 struct drm_device *dev = encoder->dev; in dce_v8_0_audio_write_latency_fields()
1171 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; in dce_v8_0_audio_write_latency_fields()
1177 if (!dig || !dig->afmt || !dig->afmt->pin) in dce_v8_0_audio_write_latency_fields()
1180 offset = dig->afmt->pin->offset; in dce_v8_0_audio_write_latency_fields()
1184 if (connector->encoder == encoder) { in dce_v8_0_audio_write_latency_fields()
1196 if (mode->flags & DRM_MODE_FLAG_INTERLACE) { in dce_v8_0_audio_write_latency_fields()
1197 if (connector->latency_present[1]) in dce_v8_0_audio_write_latency_fields()
1199 (connector->video_latency[1] << in dce_v8_0_audio_write_latency_fields()
1201 (connector->audio_latency[1] << in dce_v8_0_audio_write_latency_fields()
1210 if (connector->latency_present[0]) in dce_v8_0_audio_write_latency_fields()
1212 (connector->video_latency[0] << in dce_v8_0_audio_write_latency_fields()
1214 (connector->audio_latency[0] << in dce_v8_0_audio_write_latency_fields()
1229 struct drm_device *dev = encoder->dev; in dce_v8_0_audio_write_speaker_allocation()
1232 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; in dce_v8_0_audio_write_speaker_allocation()
1240 if (!dig || !dig->afmt || !dig->afmt->pin) in dce_v8_0_audio_write_speaker_allocation()
1243 offset = dig->afmt->pin->offset; in dce_v8_0_audio_write_speaker_allocation()
1247 if (connector->encoder == encoder) { in dce_v8_0_audio_write_speaker_allocation()
1282 struct drm_device *dev = encoder->dev; in dce_v8_0_audio_write_sad_regs()
1285 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; in dce_v8_0_audio_write_sad_regs()
1308 if (!dig || !dig->afmt || !dig->afmt->pin) in dce_v8_0_audio_write_sad_regs()
1311 offset = dig->afmt->pin->offset; in dce_v8_0_audio_write_sad_regs()
1315 if (connector->encoder == encoder) { in dce_v8_0_audio_write_sad_regs()
1337 int max_channels = -1; in dce_v8_0_audio_write_sad_regs()
1343 if (sad->format == eld_reg_to_type[i][1]) { in dce_v8_0_audio_write_sad_regs()
1344 if (sad->channels > max_channels) { in dce_v8_0_audio_write_sad_regs()
1345 value = (sad->channels << in dce_v8_0_audio_write_sad_regs()
1347 (sad->byte2 << in dce_v8_0_audio_write_sad_regs()
1349 (sad->freq << in dce_v8_0_audio_write_sad_regs()
1351 max_channels = sad->channels; in dce_v8_0_audio_write_sad_regs()
1354 if (sad->format == HDMI_AUDIO_CODING_TYPE_PCM) in dce_v8_0_audio_write_sad_regs()
1355 stereo_freqs |= sad->freq; in dce_v8_0_audio_write_sad_regs()
1377 WREG32_AUDIO_ENDPT(pin->offset, ixAZALIA_F0_CODEC_PIN_CONTROL_HOT_PLUG_CONTROL, in dce_v8_0_audio_enable()
1382 (0x1780 - 0x1780),
1383 (0x1786 - 0x1780),
1384 (0x178c - 0x1780),
1385 (0x1792 - 0x1780),
1386 (0x1798 - 0x1780),
1387 (0x179d - 0x1780),
1388 (0x17a4 - 0x1780),
1398 adev->mode_info.audio.enabled = true; in dce_v8_0_audio_init()
1400 if (adev->asic_type == CHIP_KAVERI) /* KV: 4 streams, 7 endpoints */ in dce_v8_0_audio_init()
1401 adev->mode_info.audio.num_pins = 7; in dce_v8_0_audio_init()
1402 else if ((adev->asic_type == CHIP_KABINI) || in dce_v8_0_audio_init()
1403 (adev->asic_type == CHIP_MULLINS)) /* KB/ML: 2 streams, 3 endpoints */ in dce_v8_0_audio_init()
1404 adev->mode_info.audio.num_pins = 3; in dce_v8_0_audio_init()
1405 else if ((adev->asic_type == CHIP_BONAIRE) || in dce_v8_0_audio_init()
1406 (adev->asic_type == CHIP_HAWAII))/* BN/HW: 6 streams, 7 endpoints */ in dce_v8_0_audio_init()
1407 adev->mode_info.audio.num_pins = 7; in dce_v8_0_audio_init()
1409 adev->mode_info.audio.num_pins = 3; in dce_v8_0_audio_init()
1411 for (i = 0; i < adev->mode_info.audio.num_pins; i++) { in dce_v8_0_audio_init()
1412 adev->mode_info.audio.pin[i].channels = -1; in dce_v8_0_audio_init()
1413 adev->mode_info.audio.pin[i].rate = -1; in dce_v8_0_audio_init()
1414 adev->mode_info.audio.pin[i].bits_per_sample = -1; in dce_v8_0_audio_init()
1415 adev->mode_info.audio.pin[i].status_bits = 0; in dce_v8_0_audio_init()
1416 adev->mode_info.audio.pin[i].category_code = 0; in dce_v8_0_audio_init()
1417 adev->mode_info.audio.pin[i].connected = false; in dce_v8_0_audio_init()
1418 adev->mode_info.audio.pin[i].offset = pin_offsets[i]; in dce_v8_0_audio_init()
1419 adev->mode_info.audio.pin[i].id = i; in dce_v8_0_audio_init()
1422 dce_v8_0_audio_enable(adev, &adev->mode_info.audio.pin[i], false); in dce_v8_0_audio_init()
1435 if (!adev->mode_info.audio.enabled) in dce_v8_0_audio_fini()
1438 for (i = 0; i < adev->mode_info.audio.num_pins; i++) in dce_v8_0_audio_fini()
1439 dce_v8_0_audio_enable(adev, &adev->mode_info.audio.pin[i], false); in dce_v8_0_audio_fini()
1441 adev->mode_info.audio.enabled = false; in dce_v8_0_audio_fini()
1449 struct drm_device *dev = encoder->dev; in dce_v8_0_afmt_update_ACR()
1453 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; in dce_v8_0_afmt_update_ACR()
1454 uint32_t offset = dig->afmt->offset; in dce_v8_0_afmt_update_ACR()
1472 struct drm_device *dev = encoder->dev; in dce_v8_0_afmt_update_avi_infoframe()
1475 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; in dce_v8_0_afmt_update_avi_infoframe()
1476 uint32_t offset = dig->afmt->offset; in dce_v8_0_afmt_update_avi_infoframe()
1492 struct drm_device *dev = encoder->dev; in dce_v8_0_audio_set_dto()
1495 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; in dce_v8_0_audio_set_dto()
1496 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); in dce_v8_0_audio_set_dto()
1500 if (!dig || !dig->afmt) in dce_v8_0_audio_set_dto()
1508 …WREG32(mmDCCG_AUDIO_DTO_SOURCE, (amdgpu_crtc->crtc_id << DCCG_AUDIO_DTO_SOURCE__DCCG_AUDIO_DTO0_SO… in dce_v8_0_audio_set_dto()
1519 struct drm_device *dev = encoder->dev; in dce_v8_0_afmt_setmode()
1522 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; in dce_v8_0_afmt_setmode()
1530 if (!dig || !dig->afmt) in dce_v8_0_afmt_setmode()
1534 if (!dig->afmt->enabled) in dce_v8_0_afmt_setmode()
1537 offset = dig->afmt->offset; in dce_v8_0_afmt_setmode()
1540 if (encoder->crtc) { in dce_v8_0_afmt_setmode()
1541 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); in dce_v8_0_afmt_setmode()
1542 bpc = amdgpu_crtc->bpc; in dce_v8_0_afmt_setmode()
1546 dig->afmt->pin = dce_v8_0_audio_get_pin(adev); in dce_v8_0_afmt_setmode()
1547 dce_v8_0_audio_enable(adev, dig->afmt->pin, false); in dce_v8_0_afmt_setmode()
1549 dce_v8_0_audio_set_dto(encoder, mode->clock); in dce_v8_0_afmt_setmode()
1567 connector->name, bpc); in dce_v8_0_afmt_setmode()
1573 connector->name); in dce_v8_0_afmt_setmode()
1579 connector->name); in dce_v8_0_afmt_setmode()
1619 dce_v8_0_afmt_update_ACR(encoder, mode->clock); in dce_v8_0_afmt_setmode()
1676 dce_v8_0_audio_enable(adev, dig->afmt->pin, true); in dce_v8_0_afmt_setmode()
1681 struct drm_device *dev = encoder->dev; in dce_v8_0_afmt_enable()
1684 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; in dce_v8_0_afmt_enable()
1686 if (!dig || !dig->afmt) in dce_v8_0_afmt_enable()
1690 if (enable && dig->afmt->enabled) in dce_v8_0_afmt_enable()
1692 if (!enable && !dig->afmt->enabled) in dce_v8_0_afmt_enable()
1695 if (!enable && dig->afmt->pin) { in dce_v8_0_afmt_enable()
1696 dce_v8_0_audio_enable(adev, dig->afmt->pin, false); in dce_v8_0_afmt_enable()
1697 dig->afmt->pin = NULL; in dce_v8_0_afmt_enable()
1700 dig->afmt->enabled = enable; in dce_v8_0_afmt_enable()
1703 enable ? "En" : "Dis", dig->afmt->offset, amdgpu_encoder->encoder_id); in dce_v8_0_afmt_enable()
1710 for (i = 0; i < adev->mode_info.num_dig; i++) in dce_v8_0_afmt_init()
1711 adev->mode_info.afmt[i] = NULL; in dce_v8_0_afmt_init()
1714 for (i = 0; i < adev->mode_info.num_dig; i++) { in dce_v8_0_afmt_init()
1715 adev->mode_info.afmt[i] = kzalloc(sizeof(struct amdgpu_afmt), GFP_KERNEL); in dce_v8_0_afmt_init()
1716 if (adev->mode_info.afmt[i]) { in dce_v8_0_afmt_init()
1717 adev->mode_info.afmt[i]->offset = dig_offsets[i]; in dce_v8_0_afmt_init()
1718 adev->mode_info.afmt[i]->id = i; in dce_v8_0_afmt_init()
1722 kfree(adev->mode_info.afmt[j]); in dce_v8_0_afmt_init()
1723 adev->mode_info.afmt[j] = NULL; in dce_v8_0_afmt_init()
1725 return -ENOMEM; in dce_v8_0_afmt_init()
1735 for (i = 0; i < adev->mode_info.num_dig; i++) { in dce_v8_0_afmt_fini()
1736 kfree(adev->mode_info.afmt[i]); in dce_v8_0_afmt_fini()
1737 adev->mode_info.afmt[i] = NULL; in dce_v8_0_afmt_fini()
1753 struct drm_device *dev = crtc->dev; in dce_v8_0_vga_enable()
1757 vga_control = RREG32(vga_control_regs[amdgpu_crtc->crtc_id]) & ~1; in dce_v8_0_vga_enable()
1759 WREG32(vga_control_regs[amdgpu_crtc->crtc_id], vga_control | 1); in dce_v8_0_vga_enable()
1761 WREG32(vga_control_regs[amdgpu_crtc->crtc_id], vga_control); in dce_v8_0_vga_enable()
1767 struct drm_device *dev = crtc->dev; in dce_v8_0_grph_enable()
1771 WREG32(mmGRPH_ENABLE + amdgpu_crtc->crtc_offset, 1); in dce_v8_0_grph_enable()
1773 WREG32(mmGRPH_ENABLE + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_grph_enable()
1781 struct drm_device *dev = crtc->dev; in dce_v8_0_crtc_do_set_base()
1795 if (!atomic && !crtc->primary->fb) { in dce_v8_0_crtc_do_set_base()
1803 target_fb = crtc->primary->fb; in dce_v8_0_crtc_do_set_base()
1808 obj = target_fb->obj[0]; in dce_v8_0_crtc_do_set_base()
1818 return -EINVAL; in dce_v8_0_crtc_do_set_base()
1828 switch (target_fb->format->format) { in dce_v8_0_crtc_do_set_base()
1879 /* Greater 8 bpc fb needs to bypass hw-lut to retain precision */ in dce_v8_0_crtc_do_set_base()
1889 /* Greater 8 bpc fb needs to bypass hw-lut to retain precision */ in dce_v8_0_crtc_do_set_base()
1904 &target_fb->format->format); in dce_v8_0_crtc_do_set_base()
1905 return -EINVAL; in dce_v8_0_crtc_do_set_base()
1935 WREG32(mmGRPH_FLIP_CONTROL + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_crtc_do_set_base()
1937 WREG32(mmGRPH_PRIMARY_SURFACE_ADDRESS_HIGH + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_do_set_base()
1939 WREG32(mmGRPH_SECONDARY_SURFACE_ADDRESS_HIGH + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_do_set_base()
1941 WREG32(mmGRPH_PRIMARY_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_do_set_base()
1943 WREG32(mmGRPH_SECONDARY_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_do_set_base()
1945 WREG32(mmGRPH_CONTROL + amdgpu_crtc->crtc_offset, fb_format); in dce_v8_0_crtc_do_set_base()
1946 WREG32(mmGRPH_SWAP_CNTL + amdgpu_crtc->crtc_offset, fb_swap); in dce_v8_0_crtc_do_set_base()
1953 WREG32_P(mmGRPH_LUT_10BIT_BYPASS_CONTROL + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_do_set_base()
1960 WREG32(mmGRPH_SURFACE_OFFSET_X + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_crtc_do_set_base()
1961 WREG32(mmGRPH_SURFACE_OFFSET_Y + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_crtc_do_set_base()
1962 WREG32(mmGRPH_X_START + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_crtc_do_set_base()
1963 WREG32(mmGRPH_Y_START + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_crtc_do_set_base()
1964 WREG32(mmGRPH_X_END + amdgpu_crtc->crtc_offset, target_fb->width); in dce_v8_0_crtc_do_set_base()
1965 WREG32(mmGRPH_Y_END + amdgpu_crtc->crtc_offset, target_fb->height); in dce_v8_0_crtc_do_set_base()
1967 fb_pitch_pixels = target_fb->pitches[0] / target_fb->format->cpp[0]; in dce_v8_0_crtc_do_set_base()
1968 WREG32(mmGRPH_PITCH + amdgpu_crtc->crtc_offset, fb_pitch_pixels); in dce_v8_0_crtc_do_set_base()
1972 WREG32(mmLB_DESKTOP_HEIGHT + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_do_set_base()
1973 target_fb->height); in dce_v8_0_crtc_do_set_base()
1977 WREG32(mmVIEWPORT_START + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_do_set_base()
1979 viewport_w = crtc->mode.hdisplay; in dce_v8_0_crtc_do_set_base()
1980 viewport_h = (crtc->mode.vdisplay + 1) & ~1; in dce_v8_0_crtc_do_set_base()
1981 WREG32(mmVIEWPORT_SIZE + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_do_set_base()
1985 WREG32(mmMASTER_UPDATE_MODE + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_crtc_do_set_base()
1987 if (!atomic && fb && fb != crtc->primary->fb) { in dce_v8_0_crtc_do_set_base()
1988 abo = gem_to_amdgpu_bo(fb->obj[0]); in dce_v8_0_crtc_do_set_base()
2005 struct drm_device *dev = crtc->dev; in dce_v8_0_set_interleave()
2009 if (mode->flags & DRM_MODE_FLAG_INTERLACE) in dce_v8_0_set_interleave()
2010 WREG32(mmLB_DATA_FORMAT + amdgpu_crtc->crtc_offset, in dce_v8_0_set_interleave()
2013 WREG32(mmLB_DATA_FORMAT + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_set_interleave()
2019 struct drm_device *dev = crtc->dev; in dce_v8_0_crtc_load_lut()
2024 DRM_DEBUG_KMS("%d\n", amdgpu_crtc->crtc_id); in dce_v8_0_crtc_load_lut()
2026 WREG32(mmINPUT_CSC_CONTROL + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_load_lut()
2029 WREG32(mmPRESCALE_GRPH_CONTROL + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_load_lut()
2031 WREG32(mmPRESCALE_OVL_CONTROL + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_load_lut()
2033 WREG32(mmINPUT_GAMMA_CONTROL + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_load_lut()
2037 WREG32(mmDC_LUT_CONTROL + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_crtc_load_lut()
2039 WREG32(mmDC_LUT_BLACK_OFFSET_BLUE + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_crtc_load_lut()
2040 WREG32(mmDC_LUT_BLACK_OFFSET_GREEN + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_crtc_load_lut()
2041 WREG32(mmDC_LUT_BLACK_OFFSET_RED + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_crtc_load_lut()
2043 WREG32(mmDC_LUT_WHITE_OFFSET_BLUE + amdgpu_crtc->crtc_offset, 0xffff); in dce_v8_0_crtc_load_lut()
2044 WREG32(mmDC_LUT_WHITE_OFFSET_GREEN + amdgpu_crtc->crtc_offset, 0xffff); in dce_v8_0_crtc_load_lut()
2045 WREG32(mmDC_LUT_WHITE_OFFSET_RED + amdgpu_crtc->crtc_offset, 0xffff); in dce_v8_0_crtc_load_lut()
2047 WREG32(mmDC_LUT_RW_MODE + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_crtc_load_lut()
2048 WREG32(mmDC_LUT_WRITE_EN_MASK + amdgpu_crtc->crtc_offset, 0x00000007); in dce_v8_0_crtc_load_lut()
2050 WREG32(mmDC_LUT_RW_INDEX + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_crtc_load_lut()
2051 r = crtc->gamma_store; in dce_v8_0_crtc_load_lut()
2052 g = r + crtc->gamma_size; in dce_v8_0_crtc_load_lut()
2053 b = g + crtc->gamma_size; in dce_v8_0_crtc_load_lut()
2055 WREG32(mmDC_LUT_30_COLOR + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_load_lut()
2061 WREG32(mmDEGAMMA_CONTROL + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_load_lut()
2065 WREG32(mmGAMUT_REMAP_CONTROL + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_load_lut()
2068 WREG32(mmREGAMMA_CONTROL + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_load_lut()
2071 WREG32(mmOUTPUT_CSC_CONTROL + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_load_lut()
2075 WREG32(0x1a50 + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_crtc_load_lut()
2076 /* XXX this only needs to be programmed once per crtc at startup, in dce_v8_0_crtc_load_lut()
2079 WREG32(mmALPHA_CONTROL + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_load_lut()
2086 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; in dce_v8_0_pick_dig_encoder()
2088 switch (amdgpu_encoder->encoder_id) { in dce_v8_0_pick_dig_encoder()
2090 if (dig->linkb) in dce_v8_0_pick_dig_encoder()
2095 if (dig->linkb) in dce_v8_0_pick_dig_encoder()
2100 if (dig->linkb) in dce_v8_0_pick_dig_encoder()
2107 DRM_ERROR("invalid encoder_id: 0x%x\n", amdgpu_encoder->encoder_id); in dce_v8_0_pick_dig_encoder()
2113 * dce_v8_0_pick_pll - Allocate a PPLL for use by the crtc.
2118 * a single PPLL can be used for all DP crtcs/encoders. For non-DP
2129 * - PPLL1, PPLL2 are available for all UNIPHY (both DP and non-DP)
2131 * - PPLL0, PPLL1, PPLL2 are available for all UNIPHY (both DP and non-DP) and DAC
2137 struct drm_device *dev = crtc->dev; in dce_v8_0_pick_pll()
2142 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(amdgpu_crtc->encoder))) { in dce_v8_0_pick_pll()
2143 if (adev->clock.dp_extclk) in dce_v8_0_pick_pll()
2159 if ((adev->asic_type == CHIP_KABINI) || in dce_v8_0_pick_pll()
2160 (adev->asic_type == CHIP_MULLINS)) { in dce_v8_0_pick_pll()
2186 struct amdgpu_device *adev = drm_to_adev(crtc->dev); in dce_v8_0_lock_cursor()
2190 cur_lock = RREG32(mmCUR_UPDATE + amdgpu_crtc->crtc_offset); in dce_v8_0_lock_cursor()
2195 WREG32(mmCUR_UPDATE + amdgpu_crtc->crtc_offset, cur_lock); in dce_v8_0_lock_cursor()
2201 struct amdgpu_device *adev = drm_to_adev(crtc->dev); in dce_v8_0_hide_cursor()
2203 WREG32(mmCUR_CONTROL + amdgpu_crtc->crtc_offset, in dce_v8_0_hide_cursor()
2211 struct amdgpu_device *adev = drm_to_adev(crtc->dev); in dce_v8_0_show_cursor()
2213 WREG32(mmCUR_SURFACE_ADDRESS_HIGH + amdgpu_crtc->crtc_offset, in dce_v8_0_show_cursor()
2214 upper_32_bits(amdgpu_crtc->cursor_addr)); in dce_v8_0_show_cursor()
2215 WREG32(mmCUR_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset, in dce_v8_0_show_cursor()
2216 lower_32_bits(amdgpu_crtc->cursor_addr)); in dce_v8_0_show_cursor()
2218 WREG32(mmCUR_CONTROL + amdgpu_crtc->crtc_offset, in dce_v8_0_show_cursor()
2228 struct amdgpu_device *adev = drm_to_adev(crtc->dev); in dce_v8_0_cursor_move_locked()
2231 amdgpu_crtc->cursor_x = x; in dce_v8_0_cursor_move_locked()
2232 amdgpu_crtc->cursor_y = y; in dce_v8_0_cursor_move_locked()
2235 x += crtc->x; in dce_v8_0_cursor_move_locked()
2236 y += crtc->y; in dce_v8_0_cursor_move_locked()
2237 DRM_DEBUG("x %d y %d c->x %d c->y %d\n", x, y, crtc->x, crtc->y); in dce_v8_0_cursor_move_locked()
2240 xorigin = min(-x, amdgpu_crtc->max_cursor_width - 1); in dce_v8_0_cursor_move_locked()
2244 yorigin = min(-y, amdgpu_crtc->max_cursor_height - 1); in dce_v8_0_cursor_move_locked()
2248 WREG32(mmCUR_POSITION + amdgpu_crtc->crtc_offset, (x << 16) | y); in dce_v8_0_cursor_move_locked()
2249 WREG32(mmCUR_HOT_SPOT + amdgpu_crtc->crtc_offset, (xorigin << 16) | yorigin); in dce_v8_0_cursor_move_locked()
2250 WREG32(mmCUR_SIZE + amdgpu_crtc->crtc_offset, in dce_v8_0_cursor_move_locked()
2251 ((amdgpu_crtc->cursor_width - 1) << 16) | (amdgpu_crtc->cursor_height - 1)); in dce_v8_0_cursor_move_locked()
2288 if ((width > amdgpu_crtc->max_cursor_width) || in dce_v8_0_crtc_cursor_set2()
2289 (height > amdgpu_crtc->max_cursor_height)) { in dce_v8_0_crtc_cursor_set2()
2291 return -EINVAL; in dce_v8_0_crtc_cursor_set2()
2296 DRM_ERROR("Cannot find cursor object %x for crtc %d\n", handle, amdgpu_crtc->crtc_id); in dce_v8_0_crtc_cursor_set2()
2297 return -ENOENT; in dce_v8_0_crtc_cursor_set2()
2314 amdgpu_crtc->cursor_addr = amdgpu_bo_gpu_offset(aobj); in dce_v8_0_crtc_cursor_set2()
2318 if (width != amdgpu_crtc->cursor_width || in dce_v8_0_crtc_cursor_set2()
2319 height != amdgpu_crtc->cursor_height || in dce_v8_0_crtc_cursor_set2()
2320 hot_x != amdgpu_crtc->cursor_hot_x || in dce_v8_0_crtc_cursor_set2()
2321 hot_y != amdgpu_crtc->cursor_hot_y) { in dce_v8_0_crtc_cursor_set2()
2324 x = amdgpu_crtc->cursor_x + amdgpu_crtc->cursor_hot_x - hot_x; in dce_v8_0_crtc_cursor_set2()
2325 y = amdgpu_crtc->cursor_y + amdgpu_crtc->cursor_hot_y - hot_y; in dce_v8_0_crtc_cursor_set2()
2329 amdgpu_crtc->cursor_width = width; in dce_v8_0_crtc_cursor_set2()
2330 amdgpu_crtc->cursor_height = height; in dce_v8_0_crtc_cursor_set2()
2331 amdgpu_crtc->cursor_hot_x = hot_x; in dce_v8_0_crtc_cursor_set2()
2332 amdgpu_crtc->cursor_hot_y = hot_y; in dce_v8_0_crtc_cursor_set2()
2339 if (amdgpu_crtc->cursor_bo) { in dce_v8_0_crtc_cursor_set2()
2340 struct amdgpu_bo *aobj = gem_to_amdgpu_bo(amdgpu_crtc->cursor_bo); in dce_v8_0_crtc_cursor_set2()
2346 drm_gem_object_put(amdgpu_crtc->cursor_bo); in dce_v8_0_crtc_cursor_set2()
2349 amdgpu_crtc->cursor_bo = obj; in dce_v8_0_crtc_cursor_set2()
2357 if (amdgpu_crtc->cursor_bo) { in dce_v8_0_cursor_reset()
2360 dce_v8_0_cursor_move_locked(crtc, amdgpu_crtc->cursor_x, in dce_v8_0_cursor_reset()
2361 amdgpu_crtc->cursor_y); in dce_v8_0_cursor_reset()
2401 struct drm_device *dev = crtc->dev; in dce_v8_0_crtc_dpms()
2408 amdgpu_crtc->enabled = true; in dce_v8_0_crtc_dpms()
2415 amdgpu_crtc->crtc_id); in dce_v8_0_crtc_dpms()
2416 amdgpu_irq_update(adev, &adev->crtc_irq, type); in dce_v8_0_crtc_dpms()
2417 amdgpu_irq_update(adev, &adev->pageflip_irq, type); in dce_v8_0_crtc_dpms()
2425 if (amdgpu_crtc->enabled) { in dce_v8_0_crtc_dpms()
2431 amdgpu_crtc->enabled = false; in dce_v8_0_crtc_dpms()
2455 struct drm_device *dev = crtc->dev; in dce_v8_0_crtc_disable()
2461 if (crtc->primary->fb) { in dce_v8_0_crtc_disable()
2465 abo = gem_to_amdgpu_bo(crtc->primary->fb->obj[0]); in dce_v8_0_crtc_disable()
2479 for (i = 0; i < adev->mode_info.num_crtc; i++) { in dce_v8_0_crtc_disable()
2480 if (adev->mode_info.crtcs[i] && in dce_v8_0_crtc_disable()
2481 adev->mode_info.crtcs[i]->enabled && in dce_v8_0_crtc_disable()
2482 i != amdgpu_crtc->crtc_id && in dce_v8_0_crtc_disable()
2483 amdgpu_crtc->pll_id == adev->mode_info.crtcs[i]->pll_id) { in dce_v8_0_crtc_disable()
2491 switch (amdgpu_crtc->pll_id) { in dce_v8_0_crtc_disable()
2495 amdgpu_atombios_crtc_program_pll(crtc, amdgpu_crtc->crtc_id, amdgpu_crtc->pll_id, in dce_v8_0_crtc_disable()
2500 if ((adev->asic_type == CHIP_KAVERI) || in dce_v8_0_crtc_disable()
2501 (adev->asic_type == CHIP_BONAIRE) || in dce_v8_0_crtc_disable()
2502 (adev->asic_type == CHIP_HAWAII)) in dce_v8_0_crtc_disable()
2503 amdgpu_atombios_crtc_program_pll(crtc, amdgpu_crtc->crtc_id, amdgpu_crtc->pll_id, in dce_v8_0_crtc_disable()
2510 amdgpu_crtc->pll_id = ATOM_PPLL_INVALID; in dce_v8_0_crtc_disable()
2511 amdgpu_crtc->adjusted_clock = 0; in dce_v8_0_crtc_disable()
2512 amdgpu_crtc->encoder = NULL; in dce_v8_0_crtc_disable()
2513 amdgpu_crtc->connector = NULL; in dce_v8_0_crtc_disable()
2523 if (!amdgpu_crtc->adjusted_clock) in dce_v8_0_crtc_mode_set()
2524 return -EINVAL; in dce_v8_0_crtc_mode_set()
2533 amdgpu_crtc->hw_mode = *adjusted_mode; in dce_v8_0_crtc_mode_set()
2543 struct drm_device *dev = crtc->dev; in dce_v8_0_crtc_mode_fixup()
2547 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { in dce_v8_0_crtc_mode_fixup()
2548 if (encoder->crtc == crtc) { in dce_v8_0_crtc_mode_fixup()
2549 amdgpu_crtc->encoder = encoder; in dce_v8_0_crtc_mode_fixup()
2550 amdgpu_crtc->connector = amdgpu_get_connector_for_encoder(encoder); in dce_v8_0_crtc_mode_fixup()
2554 if ((amdgpu_crtc->encoder == NULL) || (amdgpu_crtc->connector == NULL)) { in dce_v8_0_crtc_mode_fixup()
2555 amdgpu_crtc->encoder = NULL; in dce_v8_0_crtc_mode_fixup()
2556 amdgpu_crtc->connector = NULL; in dce_v8_0_crtc_mode_fixup()
2564 amdgpu_crtc->pll_id = dce_v8_0_pick_pll(crtc); in dce_v8_0_crtc_mode_fixup()
2565 /* if we can't get a PPLL for a non-DP encoder, fail */ in dce_v8_0_crtc_mode_fixup()
2566 if ((amdgpu_crtc->pll_id == ATOM_PPLL_INVALID) && in dce_v8_0_crtc_mode_fixup()
2567 !ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(amdgpu_crtc->encoder))) in dce_v8_0_crtc_mode_fixup()
2605 return -ENOMEM; in dce_v8_0_crtc_init()
2607 drm_crtc_init(adev_to_drm(adev), &amdgpu_crtc->base, &dce_v8_0_crtc_funcs); in dce_v8_0_crtc_init()
2609 drm_mode_crtc_set_gamma_size(&amdgpu_crtc->base, 256); in dce_v8_0_crtc_init()
2610 amdgpu_crtc->crtc_id = index; in dce_v8_0_crtc_init()
2611 adev->mode_info.crtcs[index] = amdgpu_crtc; in dce_v8_0_crtc_init()
2613 amdgpu_crtc->max_cursor_width = CIK_CURSOR_WIDTH; in dce_v8_0_crtc_init()
2614 amdgpu_crtc->max_cursor_height = CIK_CURSOR_HEIGHT; in dce_v8_0_crtc_init()
2615 adev_to_drm(adev)->mode_config.cursor_width = amdgpu_crtc->max_cursor_width; in dce_v8_0_crtc_init()
2616 adev_to_drm(adev)->mode_config.cursor_height = amdgpu_crtc->max_cursor_height; in dce_v8_0_crtc_init()
2618 amdgpu_crtc->crtc_offset = crtc_offsets[amdgpu_crtc->crtc_id]; in dce_v8_0_crtc_init()
2620 amdgpu_crtc->pll_id = ATOM_PPLL_INVALID; in dce_v8_0_crtc_init()
2621 amdgpu_crtc->adjusted_clock = 0; in dce_v8_0_crtc_init()
2622 amdgpu_crtc->encoder = NULL; in dce_v8_0_crtc_init()
2623 amdgpu_crtc->connector = NULL; in dce_v8_0_crtc_init()
2624 drm_crtc_helper_add(&amdgpu_crtc->base, &dce_v8_0_crtc_helper_funcs); in dce_v8_0_crtc_init()
2633 adev->audio_endpt_rreg = &dce_v8_0_audio_endpt_rreg; in dce_v8_0_early_init()
2634 adev->audio_endpt_wreg = &dce_v8_0_audio_endpt_wreg; in dce_v8_0_early_init()
2638 adev->mode_info.num_crtc = dce_v8_0_get_num_crtc(adev); in dce_v8_0_early_init()
2640 switch (adev->asic_type) { in dce_v8_0_early_init()
2643 adev->mode_info.num_hpd = 6; in dce_v8_0_early_init()
2644 adev->mode_info.num_dig = 6; in dce_v8_0_early_init()
2647 adev->mode_info.num_hpd = 6; in dce_v8_0_early_init()
2648 adev->mode_info.num_dig = 7; in dce_v8_0_early_init()
2652 adev->mode_info.num_hpd = 6; in dce_v8_0_early_init()
2653 adev->mode_info.num_dig = 6; /* ? */ in dce_v8_0_early_init()
2657 return -EINVAL; in dce_v8_0_early_init()
2670 for (i = 0; i < adev->mode_info.num_crtc; i++) { in dce_v8_0_sw_init()
2671 r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, i + 1, &adev->crtc_irq); in dce_v8_0_sw_init()
2677 r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, i, &adev->pageflip_irq); in dce_v8_0_sw_init()
2682 /* HPD hotplug */ in dce_v8_0_sw_init()
2683 r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, 42, &adev->hpd_irq); in dce_v8_0_sw_init()
2687 adev_to_drm(adev)->mode_config.funcs = &amdgpu_mode_funcs; in dce_v8_0_sw_init()
2689 adev_to_drm(adev)->mode_config.async_page_flip = true; in dce_v8_0_sw_init()
2691 adev_to_drm(adev)->mode_config.max_width = 16384; in dce_v8_0_sw_init()
2692 adev_to_drm(adev)->mode_config.max_height = 16384; in dce_v8_0_sw_init()
2694 adev_to_drm(adev)->mode_config.preferred_depth = 24; in dce_v8_0_sw_init()
2695 if (adev->asic_type == CHIP_HAWAII) in dce_v8_0_sw_init()
2697 adev_to_drm(adev)->mode_config.prefer_shadow = 0; in dce_v8_0_sw_init()
2699 adev_to_drm(adev)->mode_config.prefer_shadow = 1; in dce_v8_0_sw_init()
2701 adev_to_drm(adev)->mode_config.fb_modifiers_not_supported = true; in dce_v8_0_sw_init()
2707 adev_to_drm(adev)->mode_config.max_width = 16384; in dce_v8_0_sw_init()
2708 adev_to_drm(adev)->mode_config.max_height = 16384; in dce_v8_0_sw_init()
2711 for (i = 0; i < adev->mode_info.num_crtc; i++) { in dce_v8_0_sw_init()
2720 return -EINVAL; in dce_v8_0_sw_init()
2731 /* Disable vblank IRQs aggressively for power-saving */ in dce_v8_0_sw_init()
2733 adev_to_drm(adev)->vblank_disable_immediate = true; in dce_v8_0_sw_init()
2735 r = drm_vblank_init(adev_to_drm(adev), adev->mode_info.num_crtc); in dce_v8_0_sw_init()
2739 /* Pre-DCE11 */ in dce_v8_0_sw_init()
2740 INIT_DELAYED_WORK(&adev->hotplug_work, in dce_v8_0_sw_init()
2745 adev->mode_info.mode_config_initialized = true; in dce_v8_0_sw_init()
2753 kfree(adev->mode_info.bios_hardcoded_edid); in dce_v8_0_sw_fini()
2762 adev->mode_info.mode_config_initialized = false; in dce_v8_0_sw_fini()
2776 amdgpu_atombios_crtc_set_disp_eng_pll(adev, adev->clock.default_dispclk); in dce_v8_0_hw_init()
2778 /* initialize hpd */ in dce_v8_0_hw_init()
2781 for (i = 0; i < adev->mode_info.audio.num_pins; i++) { in dce_v8_0_hw_init()
2782 dce_v8_0_audio_enable(adev, &adev->mode_info.audio.pin[i], false); in dce_v8_0_hw_init()
2797 for (i = 0; i < adev->mode_info.audio.num_pins; i++) { in dce_v8_0_hw_fini()
2798 dce_v8_0_audio_enable(adev, &adev->mode_info.audio.pin[i], false); in dce_v8_0_hw_fini()
2803 flush_delayed_work(&adev->hotplug_work); in dce_v8_0_hw_fini()
2817 adev->mode_info.bl_level = in dce_v8_0_suspend()
2829 adev->mode_info.bl_level); in dce_v8_0_resume()
2834 if (adev->mode_info.bl_encoder) { in dce_v8_0_resume()
2836 adev->mode_info.bl_encoder); in dce_v8_0_resume()
2837 amdgpu_display_backlight_set_level(adev, adev->mode_info.bl_encoder, in dce_v8_0_resume()
2867 dev_info(adev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp); in dce_v8_0_soft_reset()
2889 if (crtc >= adev->mode_info.num_crtc) { in dce_v8_0_set_crtc_vblank_interrupt_state()
2940 if (crtc >= adev->mode_info.num_crtc) { in dce_v8_0_set_crtc_vline_interrupt_state()
2992 if (type >= adev->mode_info.num_hpd) { in dce_v8_0_set_hpd_interrupt_state()
3067 unsigned crtc = entry->src_id - 1; in dce_v8_0_crtc_irq()
3072 switch (entry->src_data[0]) { in dce_v8_0_crtc_irq()
3093 DRM_DEBUG("Unhandled interrupt: %d %d\n", entry->src_id, entry->src_data[0]); in dce_v8_0_crtc_irq()
3107 if (type >= adev->mode_info.num_crtc) { in dce_v8_0_set_pageflip_interrupt_state()
3109 return -EINVAL; in dce_v8_0_set_pageflip_interrupt_state()
3132 crtc_id = (entry->src_id - 8) >> 1; in dce_v8_0_pageflip_irq()
3133 amdgpu_crtc = adev->mode_info.crtcs[crtc_id]; in dce_v8_0_pageflip_irq()
3135 if (crtc_id >= adev->mode_info.num_crtc) { in dce_v8_0_pageflip_irq()
3137 return -EINVAL; in dce_v8_0_pageflip_irq()
3149 spin_lock_irqsave(&adev_to_drm(adev)->event_lock, flags); in dce_v8_0_pageflip_irq()
3150 works = amdgpu_crtc->pflip_works; in dce_v8_0_pageflip_irq()
3151 if (amdgpu_crtc->pflip_status != AMDGPU_FLIP_SUBMITTED) { in dce_v8_0_pageflip_irq()
3152 DRM_DEBUG_DRIVER("amdgpu_crtc->pflip_status = %d != " in dce_v8_0_pageflip_irq()
3154 amdgpu_crtc->pflip_status, in dce_v8_0_pageflip_irq()
3156 spin_unlock_irqrestore(&adev_to_drm(adev)->event_lock, flags); in dce_v8_0_pageflip_irq()
3161 amdgpu_crtc->pflip_status = AMDGPU_FLIP_NONE; in dce_v8_0_pageflip_irq()
3162 amdgpu_crtc->pflip_works = NULL; in dce_v8_0_pageflip_irq()
3165 if (works->event) in dce_v8_0_pageflip_irq()
3166 drm_crtc_send_vblank_event(&amdgpu_crtc->base, works->event); in dce_v8_0_pageflip_irq()
3168 spin_unlock_irqrestore(&adev_to_drm(adev)->event_lock, flags); in dce_v8_0_pageflip_irq()
3170 drm_crtc_vblank_put(&amdgpu_crtc->base); in dce_v8_0_pageflip_irq()
3171 schedule_work(&works->unpin_work); in dce_v8_0_pageflip_irq()
3181 unsigned hpd; in dce_v8_0_hpd_irq() local
3183 if (entry->src_data[0] >= adev->mode_info.num_hpd) { in dce_v8_0_hpd_irq()
3184 DRM_DEBUG("Unhandled interrupt: %d %d\n", entry->src_id, entry->src_data[0]); in dce_v8_0_hpd_irq()
3188 hpd = entry->src_data[0]; in dce_v8_0_hpd_irq()
3189 disp_int = RREG32(interrupt_status_offsets[hpd].reg); in dce_v8_0_hpd_irq()
3190 mask = interrupt_status_offsets[hpd].hpd; in dce_v8_0_hpd_irq()
3193 tmp = RREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[hpd]); in dce_v8_0_hpd_irq()
3195 WREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[hpd], tmp); in dce_v8_0_hpd_irq()
3196 schedule_delayed_work(&adev->hotplug_work, 0); in dce_v8_0_hpd_irq()
3197 DRM_DEBUG("IH: HPD%d\n", hpd + 1); in dce_v8_0_hpd_irq()
3240 amdgpu_encoder->pixel_clock = adjusted_mode->clock; in dce_v8_0_encoder_mode_set()
3246 dce_v8_0_set_interleave(encoder->crtc, mode); in dce_v8_0_encoder_mode_set()
3256 struct amdgpu_device *adev = drm_to_adev(encoder->dev); in dce_v8_0_encoder_prepare()
3260 if ((amdgpu_encoder->active_device & in dce_v8_0_encoder_prepare()
3264 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; in dce_v8_0_encoder_prepare()
3266 dig->dig_encoder = dce_v8_0_pick_dig_encoder(encoder); in dce_v8_0_encoder_prepare()
3267 if (amdgpu_encoder->active_device & ATOM_DEVICE_DFP_SUPPORT) in dce_v8_0_encoder_prepare()
3268 dig->afmt = adev->mode_info.afmt[dig->dig_encoder]; in dce_v8_0_encoder_prepare()
3278 if (amdgpu_connector->router.cd_valid) in dce_v8_0_encoder_prepare()
3282 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) in dce_v8_0_encoder_prepare()
3295 struct drm_device *dev = encoder->dev; in dce_v8_0_encoder_commit()
3313 dig = amdgpu_encoder->enc_priv; in dce_v8_0_encoder_disable()
3314 dig->dig_encoder = -1; in dce_v8_0_encoder_disable()
3316 amdgpu_encoder->active_device = 0; in dce_v8_0_encoder_disable()
3380 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) in dce_v8_0_encoder_destroy()
3382 kfree(amdgpu_encoder->enc_priv); in dce_v8_0_encoder_destroy()
3401 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { in dce_v8_0_encoder_add()
3403 if (amdgpu_encoder->encoder_enum == encoder_enum) { in dce_v8_0_encoder_add()
3404 amdgpu_encoder->devices |= supported_device; in dce_v8_0_encoder_add()
3415 encoder = &amdgpu_encoder->base; in dce_v8_0_encoder_add()
3416 switch (adev->mode_info.num_crtc) { in dce_v8_0_encoder_add()
3418 encoder->possible_crtcs = 0x1; in dce_v8_0_encoder_add()
3422 encoder->possible_crtcs = 0x3; in dce_v8_0_encoder_add()
3425 encoder->possible_crtcs = 0xf; in dce_v8_0_encoder_add()
3428 encoder->possible_crtcs = 0x3f; in dce_v8_0_encoder_add()
3432 amdgpu_encoder->enc_priv = NULL; in dce_v8_0_encoder_add()
3434 amdgpu_encoder->encoder_enum = encoder_enum; in dce_v8_0_encoder_add()
3435 amdgpu_encoder->encoder_id = (encoder_enum & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT; in dce_v8_0_encoder_add()
3436 amdgpu_encoder->devices = supported_device; in dce_v8_0_encoder_add()
3437 amdgpu_encoder->rmx_type = RMX_OFF; in dce_v8_0_encoder_add()
3438 amdgpu_encoder->underscan_type = UNDERSCAN_OFF; in dce_v8_0_encoder_add()
3439 amdgpu_encoder->is_ext_encoder = false; in dce_v8_0_encoder_add()
3440 amdgpu_encoder->caps = caps; in dce_v8_0_encoder_add()
3442 switch (amdgpu_encoder->encoder_id) { in dce_v8_0_encoder_add()
3454 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) { in dce_v8_0_encoder_add()
3455 amdgpu_encoder->rmx_type = RMX_FULL; in dce_v8_0_encoder_add()
3458 amdgpu_encoder->enc_priv = amdgpu_atombios_encoder_get_lcd_info(amdgpu_encoder); in dce_v8_0_encoder_add()
3459 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_CRT_SUPPORT)) { in dce_v8_0_encoder_add()
3462 amdgpu_encoder->enc_priv = amdgpu_atombios_encoder_get_dig_info(amdgpu_encoder); in dce_v8_0_encoder_add()
3466 amdgpu_encoder->enc_priv = amdgpu_atombios_encoder_get_dig_info(amdgpu_encoder); in dce_v8_0_encoder_add()
3480 amdgpu_encoder->is_ext_encoder = true; in dce_v8_0_encoder_add()
3481 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) in dce_v8_0_encoder_add()
3484 else if (amdgpu_encoder->devices & (ATOM_DEVICE_CRT_SUPPORT)) in dce_v8_0_encoder_add()
3511 adev->mode_info.funcs = &dce_v8_0_display_funcs; in dce_v8_0_set_display_funcs()
3531 if (adev->mode_info.num_crtc > 0) in dce_v8_0_set_irq_funcs()
3532 adev->crtc_irq.num_types = AMDGPU_CRTC_IRQ_VLINE1 + adev->mode_info.num_crtc; in dce_v8_0_set_irq_funcs()
3534 adev->crtc_irq.num_types = 0; in dce_v8_0_set_irq_funcs()
3535 adev->crtc_irq.funcs = &dce_v8_0_crtc_irq_funcs; in dce_v8_0_set_irq_funcs()
3537 adev->pageflip_irq.num_types = adev->mode_info.num_crtc; in dce_v8_0_set_irq_funcs()
3538 adev->pageflip_irq.funcs = &dce_v8_0_pageflip_irq_funcs; in dce_v8_0_set_irq_funcs()
3540 adev->hpd_irq.num_types = adev->mode_info.num_hpd; in dce_v8_0_set_irq_funcs()
3541 adev->hpd_irq.funcs = &dce_v8_0_hpd_irq_funcs; in dce_v8_0_set_irq_funcs()