1 /*
2 * Copyright 2007-8 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be included in
13 * all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
22 *
23 * Authors: Dave Airlie
24 * Alex Deucher
25 */
26 #include <drm/drmP.h>
27 #include <drm/drm_crtc_helper.h>
28 #include <drm/radeon_drm.h>
29 #include <drm/drm_fixed.h>
30 #include "radeon.h"
31 #include "atom.h"
32 #include "atom-bits.h"
33
atombios_overscan_setup(struct drm_crtc * crtc,struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)34 static void atombios_overscan_setup(struct drm_crtc *crtc,
35 struct drm_display_mode *mode,
36 struct drm_display_mode *adjusted_mode)
37 {
38 struct drm_device *dev = crtc->dev;
39 struct radeon_device *rdev = dev->dev_private;
40 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
41 SET_CRTC_OVERSCAN_PS_ALLOCATION args;
42 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_OverScan);
43 int a1, a2;
44
45 memset(&args, 0, sizeof(args));
46
47 args.ucCRTC = radeon_crtc->crtc_id;
48
49 switch (radeon_crtc->rmx_type) {
50 case RMX_CENTER:
51 args.usOverscanTop = cpu_to_le16((adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2);
52 args.usOverscanBottom = cpu_to_le16((adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2);
53 args.usOverscanLeft = cpu_to_le16((adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2);
54 args.usOverscanRight = cpu_to_le16((adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2);
55 break;
56 case RMX_ASPECT:
57 a1 = mode->crtc_vdisplay * adjusted_mode->crtc_hdisplay;
58 a2 = adjusted_mode->crtc_vdisplay * mode->crtc_hdisplay;
59
60 if (a1 > a2) {
61 args.usOverscanLeft = cpu_to_le16((adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2);
62 args.usOverscanRight = cpu_to_le16((adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2);
63 } else if (a2 > a1) {
64 args.usOverscanTop = cpu_to_le16((adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2);
65 args.usOverscanBottom = cpu_to_le16((adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2);
66 }
67 break;
68 case RMX_FULL:
69 default:
70 args.usOverscanRight = cpu_to_le16(radeon_crtc->h_border);
71 args.usOverscanLeft = cpu_to_le16(radeon_crtc->h_border);
72 args.usOverscanBottom = cpu_to_le16(radeon_crtc->v_border);
73 args.usOverscanTop = cpu_to_le16(radeon_crtc->v_border);
74 break;
75 }
76 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
77 }
78
atombios_scaler_setup(struct drm_crtc * crtc)79 static void atombios_scaler_setup(struct drm_crtc *crtc)
80 {
81 struct drm_device *dev = crtc->dev;
82 struct radeon_device *rdev = dev->dev_private;
83 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
84 ENABLE_SCALER_PS_ALLOCATION args;
85 int index = GetIndexIntoMasterTable(COMMAND, EnableScaler);
86
87 /* fixme - fill in enc_priv for atom dac */
88 enum radeon_tv_std tv_std = TV_STD_NTSC;
89 bool is_tv = false, is_cv = false;
90 struct drm_encoder *encoder;
91
92 if (!ASIC_IS_AVIVO(rdev) && radeon_crtc->crtc_id)
93 return;
94
95 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
96 /* find tv std */
97 if (encoder->crtc == crtc) {
98 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
99 if (radeon_encoder->active_device & ATOM_DEVICE_TV_SUPPORT) {
100 struct radeon_encoder_atom_dac *tv_dac = radeon_encoder->enc_priv;
101 tv_std = tv_dac->tv_std;
102 is_tv = true;
103 }
104 }
105 }
106
107 memset(&args, 0, sizeof(args));
108
109 args.ucScaler = radeon_crtc->crtc_id;
110
111 if (is_tv) {
112 switch (tv_std) {
113 case TV_STD_NTSC:
114 default:
115 args.ucTVStandard = ATOM_TV_NTSC;
116 break;
117 case TV_STD_PAL:
118 args.ucTVStandard = ATOM_TV_PAL;
119 break;
120 case TV_STD_PAL_M:
121 args.ucTVStandard = ATOM_TV_PALM;
122 break;
123 case TV_STD_PAL_60:
124 args.ucTVStandard = ATOM_TV_PAL60;
125 break;
126 case TV_STD_NTSC_J:
127 args.ucTVStandard = ATOM_TV_NTSCJ;
128 break;
129 case TV_STD_SCART_PAL:
130 args.ucTVStandard = ATOM_TV_PAL; /* ??? */
131 break;
132 case TV_STD_SECAM:
133 args.ucTVStandard = ATOM_TV_SECAM;
134 break;
135 case TV_STD_PAL_CN:
136 args.ucTVStandard = ATOM_TV_PALCN;
137 break;
138 }
139 args.ucEnable = SCALER_ENABLE_MULTITAP_MODE;
140 } else if (is_cv) {
141 args.ucTVStandard = ATOM_TV_CV;
142 args.ucEnable = SCALER_ENABLE_MULTITAP_MODE;
143 } else {
144 switch (radeon_crtc->rmx_type) {
145 case RMX_FULL:
146 args.ucEnable = ATOM_SCALER_EXPANSION;
147 break;
148 case RMX_CENTER:
149 args.ucEnable = ATOM_SCALER_CENTER;
150 break;
151 case RMX_ASPECT:
152 args.ucEnable = ATOM_SCALER_EXPANSION;
153 break;
154 default:
155 if (ASIC_IS_AVIVO(rdev))
156 args.ucEnable = ATOM_SCALER_DISABLE;
157 else
158 args.ucEnable = ATOM_SCALER_CENTER;
159 break;
160 }
161 }
162 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
163 if ((is_tv || is_cv)
164 && rdev->family >= CHIP_RV515 && rdev->family <= CHIP_R580) {
165 atom_rv515_force_tv_scaler(rdev, radeon_crtc);
166 }
167 }
168
atombios_lock_crtc(struct drm_crtc * crtc,int lock)169 static void atombios_lock_crtc(struct drm_crtc *crtc, int lock)
170 {
171 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
172 struct drm_device *dev = crtc->dev;
173 struct radeon_device *rdev = dev->dev_private;
174 int index =
175 GetIndexIntoMasterTable(COMMAND, UpdateCRTC_DoubleBufferRegisters);
176 ENABLE_CRTC_PS_ALLOCATION args;
177
178 memset(&args, 0, sizeof(args));
179
180 args.ucCRTC = radeon_crtc->crtc_id;
181 args.ucEnable = lock;
182
183 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
184 }
185
atombios_enable_crtc(struct drm_crtc * crtc,int state)186 static void atombios_enable_crtc(struct drm_crtc *crtc, int state)
187 {
188 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
189 struct drm_device *dev = crtc->dev;
190 struct radeon_device *rdev = dev->dev_private;
191 int index = GetIndexIntoMasterTable(COMMAND, EnableCRTC);
192 ENABLE_CRTC_PS_ALLOCATION args;
193
194 memset(&args, 0, sizeof(args));
195
196 args.ucCRTC = radeon_crtc->crtc_id;
197 args.ucEnable = state;
198
199 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
200 }
201
atombios_enable_crtc_memreq(struct drm_crtc * crtc,int state)202 static void atombios_enable_crtc_memreq(struct drm_crtc *crtc, int state)
203 {
204 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
205 struct drm_device *dev = crtc->dev;
206 struct radeon_device *rdev = dev->dev_private;
207 int index = GetIndexIntoMasterTable(COMMAND, EnableCRTCMemReq);
208 ENABLE_CRTC_PS_ALLOCATION args;
209
210 memset(&args, 0, sizeof(args));
211
212 args.ucCRTC = radeon_crtc->crtc_id;
213 args.ucEnable = state;
214
215 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
216 }
217
atombios_blank_crtc(struct drm_crtc * crtc,int state)218 static void atombios_blank_crtc(struct drm_crtc *crtc, int state)
219 {
220 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
221 struct drm_device *dev = crtc->dev;
222 struct radeon_device *rdev = dev->dev_private;
223 int index = GetIndexIntoMasterTable(COMMAND, BlankCRTC);
224 BLANK_CRTC_PS_ALLOCATION args;
225
226 memset(&args, 0, sizeof(args));
227
228 args.ucCRTC = radeon_crtc->crtc_id;
229 args.ucBlanking = state;
230
231 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
232 }
233
atombios_crtc_dpms(struct drm_crtc * crtc,int mode)234 void atombios_crtc_dpms(struct drm_crtc *crtc, int mode)
235 {
236 struct drm_device *dev = crtc->dev;
237 struct radeon_device *rdev = dev->dev_private;
238 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
239
240 switch (mode) {
241 case DRM_MODE_DPMS_ON:
242 radeon_crtc->enabled = true;
243 /* adjust pm to dpms changes BEFORE enabling crtcs */
244 radeon_pm_compute_clocks(rdev);
245 atombios_enable_crtc(crtc, ATOM_ENABLE);
246 if (ASIC_IS_DCE3(rdev))
247 atombios_enable_crtc_memreq(crtc, ATOM_ENABLE);
248 atombios_blank_crtc(crtc, ATOM_DISABLE);
249 drm_vblank_post_modeset(dev, radeon_crtc->crtc_id);
250 radeon_crtc_load_lut(crtc);
251 break;
252 case DRM_MODE_DPMS_STANDBY:
253 case DRM_MODE_DPMS_SUSPEND:
254 case DRM_MODE_DPMS_OFF:
255 drm_vblank_pre_modeset(dev, radeon_crtc->crtc_id);
256 if (radeon_crtc->enabled)
257 atombios_blank_crtc(crtc, ATOM_ENABLE);
258 if (ASIC_IS_DCE3(rdev))
259 atombios_enable_crtc_memreq(crtc, ATOM_DISABLE);
260 atombios_enable_crtc(crtc, ATOM_DISABLE);
261 radeon_crtc->enabled = false;
262 /* adjust pm to dpms changes AFTER disabling crtcs */
263 radeon_pm_compute_clocks(rdev);
264 break;
265 }
266 }
267
268 static void
atombios_set_crtc_dtd_timing(struct drm_crtc * crtc,struct drm_display_mode * mode)269 atombios_set_crtc_dtd_timing(struct drm_crtc *crtc,
270 struct drm_display_mode *mode)
271 {
272 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
273 struct drm_device *dev = crtc->dev;
274 struct radeon_device *rdev = dev->dev_private;
275 SET_CRTC_USING_DTD_TIMING_PARAMETERS args;
276 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_UsingDTDTiming);
277 u16 misc = 0;
278
279 memset(&args, 0, sizeof(args));
280 args.usH_Size = cpu_to_le16(mode->crtc_hdisplay - (radeon_crtc->h_border * 2));
281 args.usH_Blanking_Time =
282 cpu_to_le16(mode->crtc_hblank_end - mode->crtc_hdisplay + (radeon_crtc->h_border * 2));
283 args.usV_Size = cpu_to_le16(mode->crtc_vdisplay - (radeon_crtc->v_border * 2));
284 args.usV_Blanking_Time =
285 cpu_to_le16(mode->crtc_vblank_end - mode->crtc_vdisplay + (radeon_crtc->v_border * 2));
286 args.usH_SyncOffset =
287 cpu_to_le16(mode->crtc_hsync_start - mode->crtc_hdisplay + radeon_crtc->h_border);
288 args.usH_SyncWidth =
289 cpu_to_le16(mode->crtc_hsync_end - mode->crtc_hsync_start);
290 args.usV_SyncOffset =
291 cpu_to_le16(mode->crtc_vsync_start - mode->crtc_vdisplay + radeon_crtc->v_border);
292 args.usV_SyncWidth =
293 cpu_to_le16(mode->crtc_vsync_end - mode->crtc_vsync_start);
294 args.ucH_Border = radeon_crtc->h_border;
295 args.ucV_Border = radeon_crtc->v_border;
296
297 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
298 misc |= ATOM_VSYNC_POLARITY;
299 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
300 misc |= ATOM_HSYNC_POLARITY;
301 if (mode->flags & DRM_MODE_FLAG_CSYNC)
302 misc |= ATOM_COMPOSITESYNC;
303 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
304 misc |= ATOM_INTERLACE;
305 if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
306 misc |= ATOM_DOUBLE_CLOCK_MODE;
307
308 args.susModeMiscInfo.usAccess = cpu_to_le16(misc);
309 args.ucCRTC = radeon_crtc->crtc_id;
310
311 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
312 }
313
atombios_crtc_set_timing(struct drm_crtc * crtc,struct drm_display_mode * mode)314 static void atombios_crtc_set_timing(struct drm_crtc *crtc,
315 struct drm_display_mode *mode)
316 {
317 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
318 struct drm_device *dev = crtc->dev;
319 struct radeon_device *rdev = dev->dev_private;
320 SET_CRTC_TIMING_PARAMETERS_PS_ALLOCATION args;
321 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_Timing);
322 u16 misc = 0;
323
324 memset(&args, 0, sizeof(args));
325 args.usH_Total = cpu_to_le16(mode->crtc_htotal);
326 args.usH_Disp = cpu_to_le16(mode->crtc_hdisplay);
327 args.usH_SyncStart = cpu_to_le16(mode->crtc_hsync_start);
328 args.usH_SyncWidth =
329 cpu_to_le16(mode->crtc_hsync_end - mode->crtc_hsync_start);
330 args.usV_Total = cpu_to_le16(mode->crtc_vtotal);
331 args.usV_Disp = cpu_to_le16(mode->crtc_vdisplay);
332 args.usV_SyncStart = cpu_to_le16(mode->crtc_vsync_start);
333 args.usV_SyncWidth =
334 cpu_to_le16(mode->crtc_vsync_end - mode->crtc_vsync_start);
335
336 args.ucOverscanRight = radeon_crtc->h_border;
337 args.ucOverscanLeft = radeon_crtc->h_border;
338 args.ucOverscanBottom = radeon_crtc->v_border;
339 args.ucOverscanTop = radeon_crtc->v_border;
340
341 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
342 misc |= ATOM_VSYNC_POLARITY;
343 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
344 misc |= ATOM_HSYNC_POLARITY;
345 if (mode->flags & DRM_MODE_FLAG_CSYNC)
346 misc |= ATOM_COMPOSITESYNC;
347 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
348 misc |= ATOM_INTERLACE;
349 if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
350 misc |= ATOM_DOUBLE_CLOCK_MODE;
351
352 args.susModeMiscInfo.usAccess = cpu_to_le16(misc);
353 args.ucCRTC = radeon_crtc->crtc_id;
354
355 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
356 }
357
atombios_disable_ss(struct radeon_device * rdev,int pll_id)358 static void atombios_disable_ss(struct radeon_device *rdev, int pll_id)
359 {
360 u32 ss_cntl;
361
362 if (ASIC_IS_DCE4(rdev)) {
363 switch (pll_id) {
364 case ATOM_PPLL1:
365 ss_cntl = RREG32(EVERGREEN_P1PLL_SS_CNTL);
366 ss_cntl &= ~EVERGREEN_PxPLL_SS_EN;
367 WREG32(EVERGREEN_P1PLL_SS_CNTL, ss_cntl);
368 break;
369 case ATOM_PPLL2:
370 ss_cntl = RREG32(EVERGREEN_P2PLL_SS_CNTL);
371 ss_cntl &= ~EVERGREEN_PxPLL_SS_EN;
372 WREG32(EVERGREEN_P2PLL_SS_CNTL, ss_cntl);
373 break;
374 case ATOM_DCPLL:
375 case ATOM_PPLL_INVALID:
376 return;
377 }
378 } else if (ASIC_IS_AVIVO(rdev)) {
379 switch (pll_id) {
380 case ATOM_PPLL1:
381 ss_cntl = RREG32(AVIVO_P1PLL_INT_SS_CNTL);
382 ss_cntl &= ~1;
383 WREG32(AVIVO_P1PLL_INT_SS_CNTL, ss_cntl);
384 break;
385 case ATOM_PPLL2:
386 ss_cntl = RREG32(AVIVO_P2PLL_INT_SS_CNTL);
387 ss_cntl &= ~1;
388 WREG32(AVIVO_P2PLL_INT_SS_CNTL, ss_cntl);
389 break;
390 case ATOM_DCPLL:
391 case ATOM_PPLL_INVALID:
392 return;
393 }
394 }
395 }
396
397
398 union atom_enable_ss {
399 ENABLE_LVDS_SS_PARAMETERS lvds_ss;
400 ENABLE_LVDS_SS_PARAMETERS_V2 lvds_ss_2;
401 ENABLE_SPREAD_SPECTRUM_ON_PPLL_PS_ALLOCATION v1;
402 ENABLE_SPREAD_SPECTRUM_ON_PPLL_V2 v2;
403 ENABLE_SPREAD_SPECTRUM_ON_PPLL_V3 v3;
404 };
405
atombios_crtc_program_ss(struct radeon_device * rdev,int enable,int pll_id,struct radeon_atom_ss * ss)406 static void atombios_crtc_program_ss(struct radeon_device *rdev,
407 int enable,
408 int pll_id,
409 struct radeon_atom_ss *ss)
410 {
411 int index = GetIndexIntoMasterTable(COMMAND, EnableSpreadSpectrumOnPPLL);
412 union atom_enable_ss args;
413
414 memset(&args, 0, sizeof(args));
415
416 if (ASIC_IS_DCE5(rdev)) {
417 args.v3.usSpreadSpectrumAmountFrac = cpu_to_le16(0);
418 args.v3.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
419 switch (pll_id) {
420 case ATOM_PPLL1:
421 args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_P1PLL;
422 args.v3.usSpreadSpectrumAmount = cpu_to_le16(ss->amount);
423 args.v3.usSpreadSpectrumStep = cpu_to_le16(ss->step);
424 break;
425 case ATOM_PPLL2:
426 args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_P2PLL;
427 args.v3.usSpreadSpectrumAmount = cpu_to_le16(ss->amount);
428 args.v3.usSpreadSpectrumStep = cpu_to_le16(ss->step);
429 break;
430 case ATOM_DCPLL:
431 args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_DCPLL;
432 args.v3.usSpreadSpectrumAmount = cpu_to_le16(0);
433 args.v3.usSpreadSpectrumStep = cpu_to_le16(0);
434 break;
435 case ATOM_PPLL_INVALID:
436 return;
437 }
438 args.v3.ucEnable = enable;
439 if ((ss->percentage == 0) || (ss->type & ATOM_EXTERNAL_SS_MASK))
440 args.v3.ucEnable = ATOM_DISABLE;
441 } else if (ASIC_IS_DCE4(rdev)) {
442 args.v2.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
443 args.v2.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
444 switch (pll_id) {
445 case ATOM_PPLL1:
446 args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_P1PLL;
447 args.v2.usSpreadSpectrumAmount = cpu_to_le16(ss->amount);
448 args.v2.usSpreadSpectrumStep = cpu_to_le16(ss->step);
449 break;
450 case ATOM_PPLL2:
451 args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_P2PLL;
452 args.v2.usSpreadSpectrumAmount = cpu_to_le16(ss->amount);
453 args.v2.usSpreadSpectrumStep = cpu_to_le16(ss->step);
454 break;
455 case ATOM_DCPLL:
456 args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_DCPLL;
457 args.v2.usSpreadSpectrumAmount = cpu_to_le16(0);
458 args.v2.usSpreadSpectrumStep = cpu_to_le16(0);
459 break;
460 case ATOM_PPLL_INVALID:
461 return;
462 }
463 args.v2.ucEnable = enable;
464 if ((ss->percentage == 0) || (ss->type & ATOM_EXTERNAL_SS_MASK) || ASIC_IS_DCE41(rdev))
465 args.v2.ucEnable = ATOM_DISABLE;
466 } else if (ASIC_IS_DCE3(rdev)) {
467 args.v1.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
468 args.v1.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
469 args.v1.ucSpreadSpectrumStep = ss->step;
470 args.v1.ucSpreadSpectrumDelay = ss->delay;
471 args.v1.ucSpreadSpectrumRange = ss->range;
472 args.v1.ucPpll = pll_id;
473 args.v1.ucEnable = enable;
474 } else if (ASIC_IS_AVIVO(rdev)) {
475 if ((enable == ATOM_DISABLE) || (ss->percentage == 0) ||
476 (ss->type & ATOM_EXTERNAL_SS_MASK)) {
477 atombios_disable_ss(rdev, pll_id);
478 return;
479 }
480 args.lvds_ss_2.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
481 args.lvds_ss_2.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
482 args.lvds_ss_2.ucSpreadSpectrumStep = ss->step;
483 args.lvds_ss_2.ucSpreadSpectrumDelay = ss->delay;
484 args.lvds_ss_2.ucSpreadSpectrumRange = ss->range;
485 args.lvds_ss_2.ucEnable = enable;
486 } else {
487 if ((enable == ATOM_DISABLE) || (ss->percentage == 0) ||
488 (ss->type & ATOM_EXTERNAL_SS_MASK)) {
489 atombios_disable_ss(rdev, pll_id);
490 return;
491 }
492 args.lvds_ss.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
493 args.lvds_ss.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
494 args.lvds_ss.ucSpreadSpectrumStepSize_Delay = (ss->step & 3) << 2;
495 args.lvds_ss.ucSpreadSpectrumStepSize_Delay |= (ss->delay & 7) << 4;
496 args.lvds_ss.ucEnable = enable;
497 }
498 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
499 }
500
501 union adjust_pixel_clock {
502 ADJUST_DISPLAY_PLL_PS_ALLOCATION v1;
503 ADJUST_DISPLAY_PLL_PS_ALLOCATION_V3 v3;
504 };
505
atombios_adjust_pll(struct drm_crtc * crtc,struct drm_display_mode * mode,struct radeon_pll * pll,bool ss_enabled,struct radeon_atom_ss * ss)506 static u32 atombios_adjust_pll(struct drm_crtc *crtc,
507 struct drm_display_mode *mode,
508 struct radeon_pll *pll,
509 bool ss_enabled,
510 struct radeon_atom_ss *ss)
511 {
512 struct drm_device *dev = crtc->dev;
513 struct radeon_device *rdev = dev->dev_private;
514 struct drm_encoder *encoder = NULL;
515 struct radeon_encoder *radeon_encoder = NULL;
516 struct drm_connector *connector = NULL;
517 u32 adjusted_clock = mode->clock;
518 int encoder_mode = 0;
519 u32 dp_clock = mode->clock;
520 int bpc = 8;
521 bool is_duallink = false;
522
523 /* reset the pll flags */
524 pll->flags = 0;
525
526 if (ASIC_IS_AVIVO(rdev)) {
527 if ((rdev->family == CHIP_RS600) ||
528 (rdev->family == CHIP_RS690) ||
529 (rdev->family == CHIP_RS740))
530 pll->flags |= (/*RADEON_PLL_USE_FRAC_FB_DIV |*/
531 RADEON_PLL_PREFER_CLOSEST_LOWER);
532
533 if (ASIC_IS_DCE32(rdev) && mode->clock > 200000) /* range limits??? */
534 pll->flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
535 else
536 pll->flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
537
538 if (rdev->family < CHIP_RV770)
539 pll->flags |= RADEON_PLL_PREFER_MINM_OVER_MAXP;
540 } else {
541 pll->flags |= RADEON_PLL_LEGACY;
542
543 if (mode->clock > 200000) /* range limits??? */
544 pll->flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
545 else
546 pll->flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
547 }
548
549 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
550 if (encoder->crtc == crtc) {
551 radeon_encoder = to_radeon_encoder(encoder);
552 connector = radeon_get_connector_for_encoder(encoder);
553 if (connector && connector->display_info.bpc)
554 bpc = connector->display_info.bpc;
555 encoder_mode = atombios_get_encoder_mode(encoder);
556 is_duallink = radeon_dig_monitor_is_duallink(encoder, mode->clock);
557 if ((radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT | ATOM_DEVICE_DFP_SUPPORT)) ||
558 (radeon_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
559 if (connector) {
560 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
561 struct radeon_connector_atom_dig *dig_connector =
562 radeon_connector->con_priv;
563
564 dp_clock = dig_connector->dp_clock;
565 }
566 }
567
568 /* use recommended ref_div for ss */
569 if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
570 if (ss_enabled) {
571 if (ss->refdiv) {
572 pll->flags |= RADEON_PLL_USE_REF_DIV;
573 pll->reference_div = ss->refdiv;
574 if (ASIC_IS_AVIVO(rdev))
575 pll->flags |= RADEON_PLL_USE_FRAC_FB_DIV;
576 }
577 }
578 }
579
580 if (ASIC_IS_AVIVO(rdev)) {
581 /* DVO wants 2x pixel clock if the DVO chip is in 12 bit mode */
582 if (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1)
583 adjusted_clock = mode->clock * 2;
584 if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
585 pll->flags |= RADEON_PLL_PREFER_CLOSEST_LOWER;
586 if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
587 pll->flags |= RADEON_PLL_IS_LCD;
588 } else {
589 if (encoder->encoder_type != DRM_MODE_ENCODER_DAC)
590 pll->flags |= RADEON_PLL_NO_ODD_POST_DIV;
591 if (encoder->encoder_type == DRM_MODE_ENCODER_LVDS)
592 pll->flags |= RADEON_PLL_USE_REF_DIV;
593 }
594 break;
595 }
596 }
597
598 /* DCE3+ has an AdjustDisplayPll that will adjust the pixel clock
599 * accordingly based on the encoder/transmitter to work around
600 * special hw requirements.
601 */
602 if (ASIC_IS_DCE3(rdev)) {
603 union adjust_pixel_clock args;
604 u8 frev, crev;
605 int index;
606
607 index = GetIndexIntoMasterTable(COMMAND, AdjustDisplayPll);
608 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
609 &crev))
610 return adjusted_clock;
611
612 memset(&args, 0, sizeof(args));
613
614 switch (frev) {
615 case 1:
616 switch (crev) {
617 case 1:
618 case 2:
619 args.v1.usPixelClock = cpu_to_le16(mode->clock / 10);
620 args.v1.ucTransmitterID = radeon_encoder->encoder_id;
621 args.v1.ucEncodeMode = encoder_mode;
622 if (ss_enabled && ss->percentage)
623 args.v1.ucConfig |=
624 ADJUST_DISPLAY_CONFIG_SS_ENABLE;
625
626 atom_execute_table(rdev->mode_info.atom_context,
627 index, (uint32_t *)&args);
628 adjusted_clock = le16_to_cpu(args.v1.usPixelClock) * 10;
629 break;
630 case 3:
631 args.v3.sInput.usPixelClock = cpu_to_le16(mode->clock / 10);
632 args.v3.sInput.ucTransmitterID = radeon_encoder->encoder_id;
633 args.v3.sInput.ucEncodeMode = encoder_mode;
634 args.v3.sInput.ucDispPllConfig = 0;
635 if (ss_enabled && ss->percentage)
636 args.v3.sInput.ucDispPllConfig |=
637 DISPPLL_CONFIG_SS_ENABLE;
638 if (ENCODER_MODE_IS_DP(encoder_mode)) {
639 args.v3.sInput.ucDispPllConfig |=
640 DISPPLL_CONFIG_COHERENT_MODE;
641 /* 16200 or 27000 */
642 args.v3.sInput.usPixelClock = cpu_to_le16(dp_clock / 10);
643 } else if (radeon_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
644 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
645 if (encoder_mode == ATOM_ENCODER_MODE_HDMI)
646 /* deep color support */
647 args.v3.sInput.usPixelClock =
648 cpu_to_le16((mode->clock * bpc / 8) / 10);
649 if (dig->coherent_mode)
650 args.v3.sInput.ucDispPllConfig |=
651 DISPPLL_CONFIG_COHERENT_MODE;
652 if (is_duallink)
653 args.v3.sInput.ucDispPllConfig |=
654 DISPPLL_CONFIG_DUAL_LINK;
655 }
656 if (radeon_encoder_get_dp_bridge_encoder_id(encoder) !=
657 ENCODER_OBJECT_ID_NONE)
658 args.v3.sInput.ucExtTransmitterID =
659 radeon_encoder_get_dp_bridge_encoder_id(encoder);
660 else
661 args.v3.sInput.ucExtTransmitterID = 0;
662
663 atom_execute_table(rdev->mode_info.atom_context,
664 index, (uint32_t *)&args);
665 adjusted_clock = le32_to_cpu(args.v3.sOutput.ulDispPllFreq) * 10;
666 if (args.v3.sOutput.ucRefDiv) {
667 pll->flags |= RADEON_PLL_USE_FRAC_FB_DIV;
668 pll->flags |= RADEON_PLL_USE_REF_DIV;
669 pll->reference_div = args.v3.sOutput.ucRefDiv;
670 }
671 if (args.v3.sOutput.ucPostDiv) {
672 pll->flags |= RADEON_PLL_USE_FRAC_FB_DIV;
673 pll->flags |= RADEON_PLL_USE_POST_DIV;
674 pll->post_div = args.v3.sOutput.ucPostDiv;
675 }
676 break;
677 default:
678 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
679 return adjusted_clock;
680 }
681 break;
682 default:
683 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
684 return adjusted_clock;
685 }
686 }
687 return adjusted_clock;
688 }
689
690 union set_pixel_clock {
691 SET_PIXEL_CLOCK_PS_ALLOCATION base;
692 PIXEL_CLOCK_PARAMETERS v1;
693 PIXEL_CLOCK_PARAMETERS_V2 v2;
694 PIXEL_CLOCK_PARAMETERS_V3 v3;
695 PIXEL_CLOCK_PARAMETERS_V5 v5;
696 PIXEL_CLOCK_PARAMETERS_V6 v6;
697 };
698
699 /* on DCE5, make sure the voltage is high enough to support the
700 * required disp clk.
701 */
atombios_crtc_set_dcpll(struct radeon_device * rdev,u32 dispclk)702 static void atombios_crtc_set_dcpll(struct radeon_device *rdev,
703 u32 dispclk)
704 {
705 u8 frev, crev;
706 int index;
707 union set_pixel_clock args;
708
709 memset(&args, 0, sizeof(args));
710
711 index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
712 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
713 &crev))
714 return;
715
716 switch (frev) {
717 case 1:
718 switch (crev) {
719 case 5:
720 /* if the default dcpll clock is specified,
721 * SetPixelClock provides the dividers
722 */
723 args.v5.ucCRTC = ATOM_CRTC_INVALID;
724 args.v5.usPixelClock = cpu_to_le16(dispclk);
725 args.v5.ucPpll = ATOM_DCPLL;
726 break;
727 case 6:
728 /* if the default dcpll clock is specified,
729 * SetPixelClock provides the dividers
730 */
731 args.v6.ulDispEngClkFreq = cpu_to_le32(dispclk);
732 args.v6.ucPpll = ATOM_DCPLL;
733 break;
734 default:
735 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
736 return;
737 }
738 break;
739 default:
740 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
741 return;
742 }
743 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
744 }
745
atombios_crtc_program_pll(struct drm_crtc * crtc,u32 crtc_id,int pll_id,u32 encoder_mode,u32 encoder_id,u32 clock,u32 ref_div,u32 fb_div,u32 frac_fb_div,u32 post_div,int bpc,bool ss_enabled,struct radeon_atom_ss * ss)746 static void atombios_crtc_program_pll(struct drm_crtc *crtc,
747 u32 crtc_id,
748 int pll_id,
749 u32 encoder_mode,
750 u32 encoder_id,
751 u32 clock,
752 u32 ref_div,
753 u32 fb_div,
754 u32 frac_fb_div,
755 u32 post_div,
756 int bpc,
757 bool ss_enabled,
758 struct radeon_atom_ss *ss)
759 {
760 struct drm_device *dev = crtc->dev;
761 struct radeon_device *rdev = dev->dev_private;
762 u8 frev, crev;
763 int index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
764 union set_pixel_clock args;
765
766 memset(&args, 0, sizeof(args));
767
768 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
769 &crev))
770 return;
771
772 switch (frev) {
773 case 1:
774 switch (crev) {
775 case 1:
776 if (clock == ATOM_DISABLE)
777 return;
778 args.v1.usPixelClock = cpu_to_le16(clock / 10);
779 args.v1.usRefDiv = cpu_to_le16(ref_div);
780 args.v1.usFbDiv = cpu_to_le16(fb_div);
781 args.v1.ucFracFbDiv = frac_fb_div;
782 args.v1.ucPostDiv = post_div;
783 args.v1.ucPpll = pll_id;
784 args.v1.ucCRTC = crtc_id;
785 args.v1.ucRefDivSrc = 1;
786 break;
787 case 2:
788 args.v2.usPixelClock = cpu_to_le16(clock / 10);
789 args.v2.usRefDiv = cpu_to_le16(ref_div);
790 args.v2.usFbDiv = cpu_to_le16(fb_div);
791 args.v2.ucFracFbDiv = frac_fb_div;
792 args.v2.ucPostDiv = post_div;
793 args.v2.ucPpll = pll_id;
794 args.v2.ucCRTC = crtc_id;
795 args.v2.ucRefDivSrc = 1;
796 break;
797 case 3:
798 args.v3.usPixelClock = cpu_to_le16(clock / 10);
799 args.v3.usRefDiv = cpu_to_le16(ref_div);
800 args.v3.usFbDiv = cpu_to_le16(fb_div);
801 args.v3.ucFracFbDiv = frac_fb_div;
802 args.v3.ucPostDiv = post_div;
803 args.v3.ucPpll = pll_id;
804 args.v3.ucMiscInfo = (pll_id << 2);
805 if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
806 args.v3.ucMiscInfo |= PIXEL_CLOCK_MISC_REF_DIV_SRC;
807 args.v3.ucTransmitterId = encoder_id;
808 args.v3.ucEncoderMode = encoder_mode;
809 break;
810 case 5:
811 args.v5.ucCRTC = crtc_id;
812 args.v5.usPixelClock = cpu_to_le16(clock / 10);
813 args.v5.ucRefDiv = ref_div;
814 args.v5.usFbDiv = cpu_to_le16(fb_div);
815 args.v5.ulFbDivDecFrac = cpu_to_le32(frac_fb_div * 100000);
816 args.v5.ucPostDiv = post_div;
817 args.v5.ucMiscInfo = 0; /* HDMI depth, etc. */
818 if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
819 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_REF_DIV_SRC;
820 switch (bpc) {
821 case 8:
822 default:
823 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_HDMI_24BPP;
824 break;
825 case 10:
826 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_HDMI_30BPP;
827 break;
828 }
829 args.v5.ucTransmitterID = encoder_id;
830 args.v5.ucEncoderMode = encoder_mode;
831 args.v5.ucPpll = pll_id;
832 break;
833 case 6:
834 args.v6.ulDispEngClkFreq = cpu_to_le32(crtc_id << 24 | clock / 10);
835 args.v6.ucRefDiv = ref_div;
836 args.v6.usFbDiv = cpu_to_le16(fb_div);
837 args.v6.ulFbDivDecFrac = cpu_to_le32(frac_fb_div * 100000);
838 args.v6.ucPostDiv = post_div;
839 args.v6.ucMiscInfo = 0; /* HDMI depth, etc. */
840 if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
841 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_REF_DIV_SRC;
842 switch (bpc) {
843 case 8:
844 default:
845 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_24BPP;
846 break;
847 case 10:
848 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_30BPP;
849 break;
850 case 12:
851 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_36BPP;
852 break;
853 case 16:
854 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_48BPP;
855 break;
856 }
857 args.v6.ucTransmitterID = encoder_id;
858 args.v6.ucEncoderMode = encoder_mode;
859 args.v6.ucPpll = pll_id;
860 break;
861 default:
862 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
863 return;
864 }
865 break;
866 default:
867 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
868 return;
869 }
870
871 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
872 }
873
atombios_crtc_set_pll(struct drm_crtc * crtc,struct drm_display_mode * mode)874 static void atombios_crtc_set_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
875 {
876 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
877 struct drm_device *dev = crtc->dev;
878 struct radeon_device *rdev = dev->dev_private;
879 struct drm_encoder *encoder = NULL;
880 struct radeon_encoder *radeon_encoder = NULL;
881 u32 pll_clock = mode->clock;
882 u32 ref_div = 0, fb_div = 0, frac_fb_div = 0, post_div = 0;
883 struct radeon_pll *pll;
884 u32 adjusted_clock;
885 int encoder_mode = 0;
886 struct radeon_atom_ss ss;
887 bool ss_enabled = false;
888 int bpc = 8;
889
890 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
891 if (encoder->crtc == crtc) {
892 radeon_encoder = to_radeon_encoder(encoder);
893 encoder_mode = atombios_get_encoder_mode(encoder);
894 break;
895 }
896 }
897
898 if (!radeon_encoder)
899 return;
900
901 switch (radeon_crtc->pll_id) {
902 case ATOM_PPLL1:
903 pll = &rdev->clock.p1pll;
904 break;
905 case ATOM_PPLL2:
906 pll = &rdev->clock.p2pll;
907 break;
908 case ATOM_DCPLL:
909 case ATOM_PPLL_INVALID:
910 default:
911 pll = &rdev->clock.dcpll;
912 break;
913 }
914
915 if (radeon_encoder->active_device &
916 (ATOM_DEVICE_LCD_SUPPORT | ATOM_DEVICE_DFP_SUPPORT)) {
917 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
918 struct drm_connector *connector =
919 radeon_get_connector_for_encoder(encoder);
920 struct radeon_connector *radeon_connector =
921 to_radeon_connector(connector);
922 struct radeon_connector_atom_dig *dig_connector =
923 radeon_connector->con_priv;
924 int dp_clock;
925 bpc = connector->display_info.bpc;
926
927 switch (encoder_mode) {
928 case ATOM_ENCODER_MODE_DP_MST:
929 case ATOM_ENCODER_MODE_DP:
930 /* DP/eDP */
931 dp_clock = dig_connector->dp_clock / 10;
932 if (ASIC_IS_DCE4(rdev))
933 ss_enabled =
934 radeon_atombios_get_asic_ss_info(rdev, &ss,
935 ASIC_INTERNAL_SS_ON_DP,
936 dp_clock);
937 else {
938 if (dp_clock == 16200) {
939 ss_enabled =
940 radeon_atombios_get_ppll_ss_info(rdev, &ss,
941 ATOM_DP_SS_ID2);
942 if (!ss_enabled)
943 ss_enabled =
944 radeon_atombios_get_ppll_ss_info(rdev, &ss,
945 ATOM_DP_SS_ID1);
946 } else
947 ss_enabled =
948 radeon_atombios_get_ppll_ss_info(rdev, &ss,
949 ATOM_DP_SS_ID1);
950 }
951 break;
952 case ATOM_ENCODER_MODE_LVDS:
953 if (ASIC_IS_DCE4(rdev))
954 ss_enabled = radeon_atombios_get_asic_ss_info(rdev, &ss,
955 dig->lcd_ss_id,
956 mode->clock / 10);
957 else
958 ss_enabled = radeon_atombios_get_ppll_ss_info(rdev, &ss,
959 dig->lcd_ss_id);
960 break;
961 case ATOM_ENCODER_MODE_DVI:
962 if (ASIC_IS_DCE4(rdev))
963 ss_enabled =
964 radeon_atombios_get_asic_ss_info(rdev, &ss,
965 ASIC_INTERNAL_SS_ON_TMDS,
966 mode->clock / 10);
967 break;
968 case ATOM_ENCODER_MODE_HDMI:
969 if (ASIC_IS_DCE4(rdev))
970 ss_enabled =
971 radeon_atombios_get_asic_ss_info(rdev, &ss,
972 ASIC_INTERNAL_SS_ON_HDMI,
973 mode->clock / 10);
974 break;
975 default:
976 break;
977 }
978 }
979
980 /* adjust pixel clock as needed */
981 adjusted_clock = atombios_adjust_pll(crtc, mode, pll, ss_enabled, &ss);
982
983 if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
984 /* TV seems to prefer the legacy algo on some boards */
985 radeon_compute_pll_legacy(pll, adjusted_clock, &pll_clock, &fb_div, &frac_fb_div,
986 &ref_div, &post_div);
987 else if (ASIC_IS_AVIVO(rdev))
988 radeon_compute_pll_avivo(pll, adjusted_clock, &pll_clock, &fb_div, &frac_fb_div,
989 &ref_div, &post_div);
990 else
991 radeon_compute_pll_legacy(pll, adjusted_clock, &pll_clock, &fb_div, &frac_fb_div,
992 &ref_div, &post_div);
993
994 atombios_crtc_program_ss(rdev, ATOM_DISABLE, radeon_crtc->pll_id, &ss);
995
996 atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
997 encoder_mode, radeon_encoder->encoder_id, mode->clock,
998 ref_div, fb_div, frac_fb_div, post_div, bpc, ss_enabled, &ss);
999
1000 if (ss_enabled) {
1001 /* calculate ss amount and step size */
1002 if (ASIC_IS_DCE4(rdev)) {
1003 u32 step_size;
1004 u32 amount = (((fb_div * 10) + frac_fb_div) * ss.percentage) / 10000;
1005 ss.amount = (amount / 10) & ATOM_PPLL_SS_AMOUNT_V2_FBDIV_MASK;
1006 ss.amount |= ((amount - (amount / 10)) << ATOM_PPLL_SS_AMOUNT_V2_NFRAC_SHIFT) &
1007 ATOM_PPLL_SS_AMOUNT_V2_NFRAC_MASK;
1008 if (ss.type & ATOM_PPLL_SS_TYPE_V2_CENTRE_SPREAD)
1009 step_size = (4 * amount * ref_div * (ss.rate * 2048)) /
1010 (125 * 25 * pll->reference_freq / 100);
1011 else
1012 step_size = (2 * amount * ref_div * (ss.rate * 2048)) /
1013 (125 * 25 * pll->reference_freq / 100);
1014 ss.step = step_size;
1015 }
1016
1017 atombios_crtc_program_ss(rdev, ATOM_ENABLE, radeon_crtc->pll_id, &ss);
1018 }
1019 }
1020
dce4_crtc_do_set_base(struct drm_crtc * crtc,struct drm_framebuffer * fb,int x,int y,int atomic)1021 static int dce4_crtc_do_set_base(struct drm_crtc *crtc,
1022 struct drm_framebuffer *fb,
1023 int x, int y, int atomic)
1024 {
1025 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1026 struct drm_device *dev = crtc->dev;
1027 struct radeon_device *rdev = dev->dev_private;
1028 struct radeon_framebuffer *radeon_fb;
1029 struct drm_framebuffer *target_fb;
1030 struct drm_gem_object *obj;
1031 struct radeon_bo *rbo;
1032 uint64_t fb_location;
1033 uint32_t fb_format, fb_pitch_pixels, tiling_flags;
1034 u32 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_NONE);
1035 u32 tmp, viewport_w, viewport_h;
1036 int r;
1037
1038 /* no fb bound */
1039 if (!atomic && !crtc->fb) {
1040 DRM_DEBUG_KMS("No FB bound\n");
1041 return 0;
1042 }
1043
1044 if (atomic) {
1045 radeon_fb = to_radeon_framebuffer(fb);
1046 target_fb = fb;
1047 }
1048 else {
1049 radeon_fb = to_radeon_framebuffer(crtc->fb);
1050 target_fb = crtc->fb;
1051 }
1052
1053 /* If atomic, assume fb object is pinned & idle & fenced and
1054 * just update base pointers
1055 */
1056 obj = radeon_fb->obj;
1057 rbo = gem_to_radeon_bo(obj);
1058 r = radeon_bo_reserve(rbo, false);
1059 if (unlikely(r != 0))
1060 return r;
1061
1062 if (atomic)
1063 fb_location = radeon_bo_gpu_offset(rbo);
1064 else {
1065 r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location);
1066 if (unlikely(r != 0)) {
1067 radeon_bo_unreserve(rbo);
1068 return -EINVAL;
1069 }
1070 }
1071
1072 radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
1073 radeon_bo_unreserve(rbo);
1074
1075 switch (target_fb->bits_per_pixel) {
1076 case 8:
1077 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_8BPP) |
1078 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_INDEXED));
1079 break;
1080 case 15:
1081 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1082 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB1555));
1083 break;
1084 case 16:
1085 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1086 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB565));
1087 #ifdef __BIG_ENDIAN
1088 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16);
1089 #endif
1090 break;
1091 case 24:
1092 case 32:
1093 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) |
1094 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB8888));
1095 #ifdef __BIG_ENDIAN
1096 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN32);
1097 #endif
1098 break;
1099 default:
1100 DRM_ERROR("Unsupported screen depth %d\n",
1101 target_fb->bits_per_pixel);
1102 return -EINVAL;
1103 }
1104
1105 if (tiling_flags & RADEON_TILING_MACRO) {
1106 if (rdev->family >= CHIP_CAYMAN)
1107 tmp = rdev->config.cayman.tile_config;
1108 else
1109 tmp = rdev->config.evergreen.tile_config;
1110
1111 switch ((tmp & 0xf0) >> 4) {
1112 case 0: /* 4 banks */
1113 fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_4_BANK);
1114 break;
1115 case 1: /* 8 banks */
1116 default:
1117 fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_8_BANK);
1118 break;
1119 case 2: /* 16 banks */
1120 fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_16_BANK);
1121 break;
1122 }
1123
1124 switch ((tmp & 0xf000) >> 12) {
1125 case 0: /* 1KB rows */
1126 default:
1127 fb_format |= EVERGREEN_GRPH_TILE_SPLIT(EVERGREEN_ADDR_SURF_TILE_SPLIT_1KB);
1128 break;
1129 case 1: /* 2KB rows */
1130 fb_format |= EVERGREEN_GRPH_TILE_SPLIT(EVERGREEN_ADDR_SURF_TILE_SPLIT_2KB);
1131 break;
1132 case 2: /* 4KB rows */
1133 fb_format |= EVERGREEN_GRPH_TILE_SPLIT(EVERGREEN_ADDR_SURF_TILE_SPLIT_4KB);
1134 break;
1135 }
1136
1137 fb_format |= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_2D_TILED_THIN1);
1138 } else if (tiling_flags & RADEON_TILING_MICRO)
1139 fb_format |= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_1D_TILED_THIN1);
1140
1141 switch (radeon_crtc->crtc_id) {
1142 case 0:
1143 WREG32(AVIVO_D1VGA_CONTROL, 0);
1144 break;
1145 case 1:
1146 WREG32(AVIVO_D2VGA_CONTROL, 0);
1147 break;
1148 case 2:
1149 WREG32(EVERGREEN_D3VGA_CONTROL, 0);
1150 break;
1151 case 3:
1152 WREG32(EVERGREEN_D4VGA_CONTROL, 0);
1153 break;
1154 case 4:
1155 WREG32(EVERGREEN_D5VGA_CONTROL, 0);
1156 break;
1157 case 5:
1158 WREG32(EVERGREEN_D6VGA_CONTROL, 0);
1159 break;
1160 default:
1161 break;
1162 }
1163
1164 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1165 upper_32_bits(fb_location));
1166 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1167 upper_32_bits(fb_location));
1168 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1169 (u32)fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK);
1170 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1171 (u32) fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK);
1172 WREG32(EVERGREEN_GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format);
1173 WREG32(EVERGREEN_GRPH_SWAP_CONTROL + radeon_crtc->crtc_offset, fb_swap);
1174
1175 WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0);
1176 WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0);
1177 WREG32(EVERGREEN_GRPH_X_START + radeon_crtc->crtc_offset, 0);
1178 WREG32(EVERGREEN_GRPH_Y_START + radeon_crtc->crtc_offset, 0);
1179 WREG32(EVERGREEN_GRPH_X_END + radeon_crtc->crtc_offset, target_fb->width);
1180 WREG32(EVERGREEN_GRPH_Y_END + radeon_crtc->crtc_offset, target_fb->height);
1181
1182 fb_pitch_pixels = target_fb->pitches[0] / (target_fb->bits_per_pixel / 8);
1183 WREG32(EVERGREEN_GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels);
1184 WREG32(EVERGREEN_GRPH_ENABLE + radeon_crtc->crtc_offset, 1);
1185
1186 WREG32(EVERGREEN_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
1187 target_fb->height);
1188 x &= ~3;
1189 y &= ~1;
1190 WREG32(EVERGREEN_VIEWPORT_START + radeon_crtc->crtc_offset,
1191 (x << 16) | y);
1192 viewport_w = crtc->mode.hdisplay;
1193 viewport_h = (crtc->mode.vdisplay + 1) & ~1;
1194 WREG32(EVERGREEN_VIEWPORT_SIZE + radeon_crtc->crtc_offset,
1195 (viewport_w << 16) | viewport_h);
1196
1197 /* pageflip setup */
1198 /* make sure flip is at vb rather than hb */
1199 tmp = RREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset);
1200 tmp &= ~EVERGREEN_GRPH_SURFACE_UPDATE_H_RETRACE_EN;
1201 WREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset, tmp);
1202
1203 /* set pageflip to happen anywhere in vblank interval */
1204 WREG32(EVERGREEN_MASTER_UPDATE_MODE + radeon_crtc->crtc_offset, 0);
1205
1206 if (!atomic && fb && fb != crtc->fb) {
1207 radeon_fb = to_radeon_framebuffer(fb);
1208 rbo = gem_to_radeon_bo(radeon_fb->obj);
1209 r = radeon_bo_reserve(rbo, false);
1210 if (unlikely(r != 0))
1211 return r;
1212 radeon_bo_unpin(rbo);
1213 radeon_bo_unreserve(rbo);
1214 }
1215
1216 /* Bytes per pixel may have changed */
1217 radeon_bandwidth_update(rdev);
1218
1219 return 0;
1220 }
1221
avivo_crtc_do_set_base(struct drm_crtc * crtc,struct drm_framebuffer * fb,int x,int y,int atomic)1222 static int avivo_crtc_do_set_base(struct drm_crtc *crtc,
1223 struct drm_framebuffer *fb,
1224 int x, int y, int atomic)
1225 {
1226 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1227 struct drm_device *dev = crtc->dev;
1228 struct radeon_device *rdev = dev->dev_private;
1229 struct radeon_framebuffer *radeon_fb;
1230 struct drm_gem_object *obj;
1231 struct radeon_bo *rbo;
1232 struct drm_framebuffer *target_fb;
1233 uint64_t fb_location;
1234 uint32_t fb_format, fb_pitch_pixels, tiling_flags;
1235 u32 fb_swap = R600_D1GRPH_SWAP_ENDIAN_NONE;
1236 u32 tmp, viewport_w, viewport_h;
1237 int r;
1238
1239 /* no fb bound */
1240 if (!atomic && !crtc->fb) {
1241 DRM_DEBUG_KMS("No FB bound\n");
1242 return 0;
1243 }
1244
1245 if (atomic) {
1246 radeon_fb = to_radeon_framebuffer(fb);
1247 target_fb = fb;
1248 }
1249 else {
1250 radeon_fb = to_radeon_framebuffer(crtc->fb);
1251 target_fb = crtc->fb;
1252 }
1253
1254 obj = radeon_fb->obj;
1255 rbo = gem_to_radeon_bo(obj);
1256 r = radeon_bo_reserve(rbo, false);
1257 if (unlikely(r != 0))
1258 return r;
1259
1260 /* If atomic, assume fb object is pinned & idle & fenced and
1261 * just update base pointers
1262 */
1263 if (atomic)
1264 fb_location = radeon_bo_gpu_offset(rbo);
1265 else {
1266 r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location);
1267 if (unlikely(r != 0)) {
1268 radeon_bo_unreserve(rbo);
1269 return -EINVAL;
1270 }
1271 }
1272 radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
1273 radeon_bo_unreserve(rbo);
1274
1275 switch (target_fb->bits_per_pixel) {
1276 case 8:
1277 fb_format =
1278 AVIVO_D1GRPH_CONTROL_DEPTH_8BPP |
1279 AVIVO_D1GRPH_CONTROL_8BPP_INDEXED;
1280 break;
1281 case 15:
1282 fb_format =
1283 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
1284 AVIVO_D1GRPH_CONTROL_16BPP_ARGB1555;
1285 break;
1286 case 16:
1287 fb_format =
1288 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
1289 AVIVO_D1GRPH_CONTROL_16BPP_RGB565;
1290 #ifdef __BIG_ENDIAN
1291 fb_swap = R600_D1GRPH_SWAP_ENDIAN_16BIT;
1292 #endif
1293 break;
1294 case 24:
1295 case 32:
1296 fb_format =
1297 AVIVO_D1GRPH_CONTROL_DEPTH_32BPP |
1298 AVIVO_D1GRPH_CONTROL_32BPP_ARGB8888;
1299 #ifdef __BIG_ENDIAN
1300 fb_swap = R600_D1GRPH_SWAP_ENDIAN_32BIT;
1301 #endif
1302 break;
1303 default:
1304 DRM_ERROR("Unsupported screen depth %d\n",
1305 target_fb->bits_per_pixel);
1306 return -EINVAL;
1307 }
1308
1309 if (rdev->family >= CHIP_R600) {
1310 if (tiling_flags & RADEON_TILING_MACRO)
1311 fb_format |= R600_D1GRPH_ARRAY_MODE_2D_TILED_THIN1;
1312 else if (tiling_flags & RADEON_TILING_MICRO)
1313 fb_format |= R600_D1GRPH_ARRAY_MODE_1D_TILED_THIN1;
1314 } else {
1315 if (tiling_flags & RADEON_TILING_MACRO)
1316 fb_format |= AVIVO_D1GRPH_MACRO_ADDRESS_MODE;
1317
1318 if (tiling_flags & RADEON_TILING_MICRO)
1319 fb_format |= AVIVO_D1GRPH_TILED;
1320 }
1321
1322 if (radeon_crtc->crtc_id == 0)
1323 WREG32(AVIVO_D1VGA_CONTROL, 0);
1324 else
1325 WREG32(AVIVO_D2VGA_CONTROL, 0);
1326
1327 if (rdev->family >= CHIP_RV770) {
1328 if (radeon_crtc->crtc_id) {
1329 WREG32(R700_D2GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1330 WREG32(R700_D2GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1331 } else {
1332 WREG32(R700_D1GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1333 WREG32(R700_D1GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1334 }
1335 }
1336 WREG32(AVIVO_D1GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1337 (u32) fb_location);
1338 WREG32(AVIVO_D1GRPH_SECONDARY_SURFACE_ADDRESS +
1339 radeon_crtc->crtc_offset, (u32) fb_location);
1340 WREG32(AVIVO_D1GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format);
1341 if (rdev->family >= CHIP_R600)
1342 WREG32(R600_D1GRPH_SWAP_CONTROL + radeon_crtc->crtc_offset, fb_swap);
1343
1344 WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0);
1345 WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0);
1346 WREG32(AVIVO_D1GRPH_X_START + radeon_crtc->crtc_offset, 0);
1347 WREG32(AVIVO_D1GRPH_Y_START + radeon_crtc->crtc_offset, 0);
1348 WREG32(AVIVO_D1GRPH_X_END + radeon_crtc->crtc_offset, target_fb->width);
1349 WREG32(AVIVO_D1GRPH_Y_END + radeon_crtc->crtc_offset, target_fb->height);
1350
1351 fb_pitch_pixels = target_fb->pitches[0] / (target_fb->bits_per_pixel / 8);
1352 WREG32(AVIVO_D1GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels);
1353 WREG32(AVIVO_D1GRPH_ENABLE + radeon_crtc->crtc_offset, 1);
1354
1355 WREG32(AVIVO_D1MODE_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
1356 target_fb->height);
1357 x &= ~3;
1358 y &= ~1;
1359 WREG32(AVIVO_D1MODE_VIEWPORT_START + radeon_crtc->crtc_offset,
1360 (x << 16) | y);
1361 viewport_w = crtc->mode.hdisplay;
1362 viewport_h = (crtc->mode.vdisplay + 1) & ~1;
1363 WREG32(AVIVO_D1MODE_VIEWPORT_SIZE + radeon_crtc->crtc_offset,
1364 (viewport_w << 16) | viewport_h);
1365
1366 /* pageflip setup */
1367 /* make sure flip is at vb rather than hb */
1368 tmp = RREG32(AVIVO_D1GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset);
1369 tmp &= ~AVIVO_D1GRPH_SURFACE_UPDATE_H_RETRACE_EN;
1370 WREG32(AVIVO_D1GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset, tmp);
1371
1372 /* set pageflip to happen anywhere in vblank interval */
1373 WREG32(AVIVO_D1MODE_MASTER_UPDATE_MODE + radeon_crtc->crtc_offset, 0);
1374
1375 if (!atomic && fb && fb != crtc->fb) {
1376 radeon_fb = to_radeon_framebuffer(fb);
1377 rbo = gem_to_radeon_bo(radeon_fb->obj);
1378 r = radeon_bo_reserve(rbo, false);
1379 if (unlikely(r != 0))
1380 return r;
1381 radeon_bo_unpin(rbo);
1382 radeon_bo_unreserve(rbo);
1383 }
1384
1385 /* Bytes per pixel may have changed */
1386 radeon_bandwidth_update(rdev);
1387
1388 return 0;
1389 }
1390
atombios_crtc_set_base(struct drm_crtc * crtc,int x,int y,struct drm_framebuffer * old_fb)1391 int atombios_crtc_set_base(struct drm_crtc *crtc, int x, int y,
1392 struct drm_framebuffer *old_fb)
1393 {
1394 struct drm_device *dev = crtc->dev;
1395 struct radeon_device *rdev = dev->dev_private;
1396
1397 if (ASIC_IS_DCE4(rdev))
1398 return dce4_crtc_do_set_base(crtc, old_fb, x, y, 0);
1399 else if (ASIC_IS_AVIVO(rdev))
1400 return avivo_crtc_do_set_base(crtc, old_fb, x, y, 0);
1401 else
1402 return radeon_crtc_do_set_base(crtc, old_fb, x, y, 0);
1403 }
1404
atombios_crtc_set_base_atomic(struct drm_crtc * crtc,struct drm_framebuffer * fb,int x,int y,enum mode_set_atomic state)1405 int atombios_crtc_set_base_atomic(struct drm_crtc *crtc,
1406 struct drm_framebuffer *fb,
1407 int x, int y, enum mode_set_atomic state)
1408 {
1409 struct drm_device *dev = crtc->dev;
1410 struct radeon_device *rdev = dev->dev_private;
1411
1412 if (ASIC_IS_DCE4(rdev))
1413 return dce4_crtc_do_set_base(crtc, fb, x, y, 1);
1414 else if (ASIC_IS_AVIVO(rdev))
1415 return avivo_crtc_do_set_base(crtc, fb, x, y, 1);
1416 else
1417 return radeon_crtc_do_set_base(crtc, fb, x, y, 1);
1418 }
1419
1420 /* properly set additional regs when using atombios */
radeon_legacy_atom_fixup(struct drm_crtc * crtc)1421 static void radeon_legacy_atom_fixup(struct drm_crtc *crtc)
1422 {
1423 struct drm_device *dev = crtc->dev;
1424 struct radeon_device *rdev = dev->dev_private;
1425 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1426 u32 disp_merge_cntl;
1427
1428 switch (radeon_crtc->crtc_id) {
1429 case 0:
1430 disp_merge_cntl = RREG32(RADEON_DISP_MERGE_CNTL);
1431 disp_merge_cntl &= ~RADEON_DISP_RGB_OFFSET_EN;
1432 WREG32(RADEON_DISP_MERGE_CNTL, disp_merge_cntl);
1433 break;
1434 case 1:
1435 disp_merge_cntl = RREG32(RADEON_DISP2_MERGE_CNTL);
1436 disp_merge_cntl &= ~RADEON_DISP2_RGB_OFFSET_EN;
1437 WREG32(RADEON_DISP2_MERGE_CNTL, disp_merge_cntl);
1438 WREG32(RADEON_FP_H2_SYNC_STRT_WID, RREG32(RADEON_CRTC2_H_SYNC_STRT_WID));
1439 WREG32(RADEON_FP_V2_SYNC_STRT_WID, RREG32(RADEON_CRTC2_V_SYNC_STRT_WID));
1440 break;
1441 }
1442 }
1443
radeon_atom_pick_pll(struct drm_crtc * crtc)1444 static int radeon_atom_pick_pll(struct drm_crtc *crtc)
1445 {
1446 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1447 struct drm_device *dev = crtc->dev;
1448 struct radeon_device *rdev = dev->dev_private;
1449 struct drm_encoder *test_encoder;
1450 struct drm_crtc *test_crtc;
1451 uint32_t pll_in_use = 0;
1452
1453 if (ASIC_IS_DCE4(rdev)) {
1454 list_for_each_entry(test_encoder, &dev->mode_config.encoder_list, head) {
1455 if (test_encoder->crtc && (test_encoder->crtc == crtc)) {
1456 /* in DP mode, the DP ref clock can come from PPLL, DCPLL, or ext clock,
1457 * depending on the asic:
1458 * DCE4: PPLL or ext clock
1459 * DCE5: DCPLL or ext clock
1460 *
1461 * Setting ATOM_PPLL_INVALID will cause SetPixelClock to skip
1462 * PPLL/DCPLL programming and only program the DP DTO for the
1463 * crtc virtual pixel clock.
1464 */
1465 if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(test_encoder))) {
1466 if (ASIC_IS_DCE5(rdev) || rdev->clock.dp_extclk)
1467 return ATOM_PPLL_INVALID;
1468 }
1469 }
1470 }
1471
1472 /* otherwise, pick one of the plls */
1473 list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) {
1474 struct radeon_crtc *radeon_test_crtc;
1475
1476 if (crtc == test_crtc)
1477 continue;
1478
1479 radeon_test_crtc = to_radeon_crtc(test_crtc);
1480 if ((radeon_test_crtc->pll_id >= ATOM_PPLL1) &&
1481 (radeon_test_crtc->pll_id <= ATOM_PPLL2))
1482 pll_in_use |= (1 << radeon_test_crtc->pll_id);
1483 }
1484 if (!(pll_in_use & 1))
1485 return ATOM_PPLL1;
1486 return ATOM_PPLL2;
1487 } else
1488 return radeon_crtc->crtc_id;
1489
1490 }
1491
radeon_atom_dcpll_init(struct radeon_device * rdev)1492 void radeon_atom_dcpll_init(struct radeon_device *rdev)
1493 {
1494 /* always set DCPLL */
1495 if (ASIC_IS_DCE4(rdev)) {
1496 struct radeon_atom_ss ss;
1497 bool ss_enabled = radeon_atombios_get_asic_ss_info(rdev, &ss,
1498 ASIC_INTERNAL_SS_ON_DCPLL,
1499 rdev->clock.default_dispclk);
1500 if (ss_enabled)
1501 atombios_crtc_program_ss(rdev, ATOM_DISABLE, ATOM_DCPLL, &ss);
1502 /* XXX: DCE5, make sure voltage, dispclk is high enough */
1503 atombios_crtc_set_dcpll(rdev, rdev->clock.default_dispclk);
1504 if (ss_enabled)
1505 atombios_crtc_program_ss(rdev, ATOM_ENABLE, ATOM_DCPLL, &ss);
1506 }
1507
1508 }
1509
atombios_crtc_mode_set(struct drm_crtc * crtc,struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode,int x,int y,struct drm_framebuffer * old_fb)1510 int atombios_crtc_mode_set(struct drm_crtc *crtc,
1511 struct drm_display_mode *mode,
1512 struct drm_display_mode *adjusted_mode,
1513 int x, int y, struct drm_framebuffer *old_fb)
1514 {
1515 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1516 struct drm_device *dev = crtc->dev;
1517 struct radeon_device *rdev = dev->dev_private;
1518 struct drm_encoder *encoder;
1519 bool is_tvcv = false;
1520
1521 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1522 /* find tv std */
1523 if (encoder->crtc == crtc) {
1524 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1525 if (radeon_encoder->active_device &
1526 (ATOM_DEVICE_TV_SUPPORT | ATOM_DEVICE_CV_SUPPORT))
1527 is_tvcv = true;
1528 }
1529 }
1530
1531 atombios_crtc_set_pll(crtc, adjusted_mode);
1532
1533 if (ASIC_IS_DCE4(rdev))
1534 atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
1535 else if (ASIC_IS_AVIVO(rdev)) {
1536 if (is_tvcv)
1537 atombios_crtc_set_timing(crtc, adjusted_mode);
1538 else
1539 atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
1540 } else {
1541 atombios_crtc_set_timing(crtc, adjusted_mode);
1542 if (radeon_crtc->crtc_id == 0)
1543 atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
1544 radeon_legacy_atom_fixup(crtc);
1545 }
1546 atombios_crtc_set_base(crtc, x, y, old_fb);
1547 atombios_overscan_setup(crtc, mode, adjusted_mode);
1548 atombios_scaler_setup(crtc);
1549 return 0;
1550 }
1551
atombios_crtc_mode_fixup(struct drm_crtc * crtc,struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)1552 static bool atombios_crtc_mode_fixup(struct drm_crtc *crtc,
1553 struct drm_display_mode *mode,
1554 struct drm_display_mode *adjusted_mode)
1555 {
1556 if (!radeon_crtc_scaling_mode_fixup(crtc, mode, adjusted_mode))
1557 return false;
1558 return true;
1559 }
1560
atombios_crtc_prepare(struct drm_crtc * crtc)1561 static void atombios_crtc_prepare(struct drm_crtc *crtc)
1562 {
1563 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1564
1565 /* pick pll */
1566 radeon_crtc->pll_id = radeon_atom_pick_pll(crtc);
1567
1568 atombios_lock_crtc(crtc, ATOM_ENABLE);
1569 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
1570 }
1571
atombios_crtc_commit(struct drm_crtc * crtc)1572 static void atombios_crtc_commit(struct drm_crtc *crtc)
1573 {
1574 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
1575 atombios_lock_crtc(crtc, ATOM_DISABLE);
1576 }
1577
atombios_crtc_disable(struct drm_crtc * crtc)1578 static void atombios_crtc_disable(struct drm_crtc *crtc)
1579 {
1580 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1581 struct radeon_atom_ss ss;
1582
1583 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
1584
1585 switch (radeon_crtc->pll_id) {
1586 case ATOM_PPLL1:
1587 case ATOM_PPLL2:
1588 /* disable the ppll */
1589 atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
1590 0, 0, ATOM_DISABLE, 0, 0, 0, 0, 0, false, &ss);
1591 break;
1592 default:
1593 break;
1594 }
1595 radeon_crtc->pll_id = -1;
1596 }
1597
1598 static const struct drm_crtc_helper_funcs atombios_helper_funcs = {
1599 .dpms = atombios_crtc_dpms,
1600 .mode_fixup = atombios_crtc_mode_fixup,
1601 .mode_set = atombios_crtc_mode_set,
1602 .mode_set_base = atombios_crtc_set_base,
1603 .mode_set_base_atomic = atombios_crtc_set_base_atomic,
1604 .prepare = atombios_crtc_prepare,
1605 .commit = atombios_crtc_commit,
1606 .load_lut = radeon_crtc_load_lut,
1607 .disable = atombios_crtc_disable,
1608 };
1609
radeon_atombios_init_crtc(struct drm_device * dev,struct radeon_crtc * radeon_crtc)1610 void radeon_atombios_init_crtc(struct drm_device *dev,
1611 struct radeon_crtc *radeon_crtc)
1612 {
1613 struct radeon_device *rdev = dev->dev_private;
1614
1615 if (ASIC_IS_DCE4(rdev)) {
1616 switch (radeon_crtc->crtc_id) {
1617 case 0:
1618 default:
1619 radeon_crtc->crtc_offset = EVERGREEN_CRTC0_REGISTER_OFFSET;
1620 break;
1621 case 1:
1622 radeon_crtc->crtc_offset = EVERGREEN_CRTC1_REGISTER_OFFSET;
1623 break;
1624 case 2:
1625 radeon_crtc->crtc_offset = EVERGREEN_CRTC2_REGISTER_OFFSET;
1626 break;
1627 case 3:
1628 radeon_crtc->crtc_offset = EVERGREEN_CRTC3_REGISTER_OFFSET;
1629 break;
1630 case 4:
1631 radeon_crtc->crtc_offset = EVERGREEN_CRTC4_REGISTER_OFFSET;
1632 break;
1633 case 5:
1634 radeon_crtc->crtc_offset = EVERGREEN_CRTC5_REGISTER_OFFSET;
1635 break;
1636 }
1637 } else {
1638 if (radeon_crtc->crtc_id == 1)
1639 radeon_crtc->crtc_offset =
1640 AVIVO_D2CRTC_H_TOTAL - AVIVO_D1CRTC_H_TOTAL;
1641 else
1642 radeon_crtc->crtc_offset = 0;
1643 }
1644 radeon_crtc->pll_id = -1;
1645 drm_crtc_helper_add(&radeon_crtc->base, &atombios_helper_funcs);
1646 }
1647