1 /*
2 * Copyright © 2012 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 * Authors:
24 * Keith Packard <keithp@keithp.com>
25 *
26 */
27
28 #include <linux/i2c.h>
29 #include <linux/module.h>
30 #include <linux/slab.h>
31
32 #include <drm/display/drm_dp_helper.h>
33 #include <drm/drm_crtc.h>
34 #include <drm/drm_crtc_helper.h>
35 #include <drm/drm_edid.h>
36 #include <drm/drm_modeset_helper_vtables.h>
37 #include <drm/drm_simple_kms_helper.h>
38
39 #include "gma_display.h"
40 #include "psb_drv.h"
41 #include "psb_intel_drv.h"
42 #include "psb_intel_reg.h"
43
44 /**
45 * struct i2c_algo_dp_aux_data - driver interface structure for i2c over dp
46 * aux algorithm
47 * @running: set by the algo indicating whether an i2c is ongoing or whether
48 * the i2c bus is quiescent
49 * @address: i2c target address for the currently ongoing transfer
50 * @aux_ch: driver callback to transfer a single byte of the i2c payload
51 */
52 struct i2c_algo_dp_aux_data {
53 bool running;
54 u16 address;
55 int (*aux_ch) (struct i2c_adapter *adapter,
56 int mode, uint8_t write_byte,
57 uint8_t *read_byte);
58 };
59
60 /* Run a single AUX_CH I2C transaction, writing/reading data as necessary */
61 static int
i2c_algo_dp_aux_transaction(struct i2c_adapter * adapter,int mode,uint8_t write_byte,uint8_t * read_byte)62 i2c_algo_dp_aux_transaction(struct i2c_adapter *adapter, int mode,
63 uint8_t write_byte, uint8_t *read_byte)
64 {
65 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
66 int ret;
67
68 ret = (*algo_data->aux_ch)(adapter, mode,
69 write_byte, read_byte);
70 return ret;
71 }
72
73 /*
74 * I2C over AUX CH
75 */
76
77 /*
78 * Send the address. If the I2C link is running, this 'restarts'
79 * the connection with the new address, this is used for doing
80 * a write followed by a read (as needed for DDC)
81 */
82 static int
i2c_algo_dp_aux_address(struct i2c_adapter * adapter,u16 address,bool reading)83 i2c_algo_dp_aux_address(struct i2c_adapter *adapter, u16 address, bool reading)
84 {
85 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
86 int mode = MODE_I2C_START;
87
88 if (reading)
89 mode |= MODE_I2C_READ;
90 else
91 mode |= MODE_I2C_WRITE;
92 algo_data->address = address;
93 algo_data->running = true;
94 return i2c_algo_dp_aux_transaction(adapter, mode, 0, NULL);
95 }
96
97 /*
98 * Stop the I2C transaction. This closes out the link, sending
99 * a bare address packet with the MOT bit turned off
100 */
101 static void
i2c_algo_dp_aux_stop(struct i2c_adapter * adapter,bool reading)102 i2c_algo_dp_aux_stop(struct i2c_adapter *adapter, bool reading)
103 {
104 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
105 int mode = MODE_I2C_STOP;
106
107 if (reading)
108 mode |= MODE_I2C_READ;
109 else
110 mode |= MODE_I2C_WRITE;
111 if (algo_data->running) {
112 (void) i2c_algo_dp_aux_transaction(adapter, mode, 0, NULL);
113 algo_data->running = false;
114 }
115 }
116
117 /*
118 * Write a single byte to the current I2C address, the
119 * I2C link must be running or this returns -EIO
120 */
121 static int
i2c_algo_dp_aux_put_byte(struct i2c_adapter * adapter,u8 byte)122 i2c_algo_dp_aux_put_byte(struct i2c_adapter *adapter, u8 byte)
123 {
124 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
125
126 if (!algo_data->running)
127 return -EIO;
128
129 return i2c_algo_dp_aux_transaction(adapter, MODE_I2C_WRITE, byte, NULL);
130 }
131
132 /*
133 * Read a single byte from the current I2C address, the
134 * I2C link must be running or this returns -EIO
135 */
136 static int
i2c_algo_dp_aux_get_byte(struct i2c_adapter * adapter,u8 * byte_ret)137 i2c_algo_dp_aux_get_byte(struct i2c_adapter *adapter, u8 *byte_ret)
138 {
139 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
140
141 if (!algo_data->running)
142 return -EIO;
143
144 return i2c_algo_dp_aux_transaction(adapter, MODE_I2C_READ, 0, byte_ret);
145 }
146
147 static int
i2c_algo_dp_aux_xfer(struct i2c_adapter * adapter,struct i2c_msg * msgs,int num)148 i2c_algo_dp_aux_xfer(struct i2c_adapter *adapter,
149 struct i2c_msg *msgs,
150 int num)
151 {
152 int ret = 0;
153 bool reading = false;
154 int m;
155 int b;
156
157 for (m = 0; m < num; m++) {
158 u16 len = msgs[m].len;
159 u8 *buf = msgs[m].buf;
160 reading = (msgs[m].flags & I2C_M_RD) != 0;
161 ret = i2c_algo_dp_aux_address(adapter, msgs[m].addr, reading);
162 if (ret < 0)
163 break;
164 if (reading) {
165 for (b = 0; b < len; b++) {
166 ret = i2c_algo_dp_aux_get_byte(adapter, &buf[b]);
167 if (ret < 0)
168 break;
169 }
170 } else {
171 for (b = 0; b < len; b++) {
172 ret = i2c_algo_dp_aux_put_byte(adapter, buf[b]);
173 if (ret < 0)
174 break;
175 }
176 }
177 if (ret < 0)
178 break;
179 }
180 if (ret >= 0)
181 ret = num;
182 i2c_algo_dp_aux_stop(adapter, reading);
183 DRM_DEBUG_KMS("dp_aux_xfer return %d\n", ret);
184 return ret;
185 }
186
187 static u32
i2c_algo_dp_aux_functionality(struct i2c_adapter * adapter)188 i2c_algo_dp_aux_functionality(struct i2c_adapter *adapter)
189 {
190 return I2C_FUNC_I2C | I2C_FUNC_SMBUS_EMUL |
191 I2C_FUNC_SMBUS_READ_BLOCK_DATA |
192 I2C_FUNC_SMBUS_BLOCK_PROC_CALL |
193 I2C_FUNC_10BIT_ADDR;
194 }
195
196 static const struct i2c_algorithm i2c_dp_aux_algo = {
197 .master_xfer = i2c_algo_dp_aux_xfer,
198 .functionality = i2c_algo_dp_aux_functionality,
199 };
200
201 static void
i2c_dp_aux_reset_bus(struct i2c_adapter * adapter)202 i2c_dp_aux_reset_bus(struct i2c_adapter *adapter)
203 {
204 (void) i2c_algo_dp_aux_address(adapter, 0, false);
205 (void) i2c_algo_dp_aux_stop(adapter, false);
206 }
207
208 static int
i2c_dp_aux_prepare_bus(struct i2c_adapter * adapter)209 i2c_dp_aux_prepare_bus(struct i2c_adapter *adapter)
210 {
211 adapter->algo = &i2c_dp_aux_algo;
212 adapter->retries = 3;
213 i2c_dp_aux_reset_bus(adapter);
214 return 0;
215 }
216
217 /*
218 * FIXME: This is the old dp aux helper, gma500 is the last driver that needs to
219 * be ported over to the new helper code in drm_dp_helper.c like i915 or radeon.
220 */
221 static int
i2c_dp_aux_add_bus(struct i2c_adapter * adapter)222 i2c_dp_aux_add_bus(struct i2c_adapter *adapter)
223 {
224 int error;
225
226 error = i2c_dp_aux_prepare_bus(adapter);
227 if (error)
228 return error;
229 error = i2c_add_adapter(adapter);
230 return error;
231 }
232
233 #define _wait_for(COND, MS, W) ({ \
234 unsigned long timeout__ = jiffies + msecs_to_jiffies(MS); \
235 int ret__ = 0; \
236 while (! (COND)) { \
237 if (time_after(jiffies, timeout__)) { \
238 ret__ = -ETIMEDOUT; \
239 break; \
240 } \
241 if (W && !in_dbg_master()) msleep(W); \
242 } \
243 ret__; \
244 })
245
246 #define wait_for(COND, MS) _wait_for(COND, MS, 1)
247
248 #define DP_LINK_CHECK_TIMEOUT (10 * 1000)
249
250 #define DP_LINK_CONFIGURATION_SIZE 9
251
252 #define CDV_FAST_LINK_TRAIN 1
253
254 struct cdv_intel_dp {
255 uint32_t output_reg;
256 uint32_t DP;
257 uint8_t link_configuration[DP_LINK_CONFIGURATION_SIZE];
258 bool has_audio;
259 int force_audio;
260 uint32_t color_range;
261 uint8_t link_bw;
262 uint8_t lane_count;
263 uint8_t dpcd[4];
264 struct gma_encoder *encoder;
265 struct i2c_adapter adapter;
266 struct i2c_algo_dp_aux_data algo;
267 uint8_t train_set[4];
268 uint8_t link_status[DP_LINK_STATUS_SIZE];
269 int panel_power_up_delay;
270 int panel_power_down_delay;
271 int panel_power_cycle_delay;
272 int backlight_on_delay;
273 int backlight_off_delay;
274 struct drm_display_mode *panel_fixed_mode; /* for eDP */
275 bool panel_on;
276 };
277
278 struct ddi_regoff {
279 uint32_t PreEmph1;
280 uint32_t PreEmph2;
281 uint32_t VSwing1;
282 uint32_t VSwing2;
283 uint32_t VSwing3;
284 uint32_t VSwing4;
285 uint32_t VSwing5;
286 };
287
288 static struct ddi_regoff ddi_DP_train_table[] = {
289 {.PreEmph1 = 0x812c, .PreEmph2 = 0x8124, .VSwing1 = 0x8154,
290 .VSwing2 = 0x8148, .VSwing3 = 0x814C, .VSwing4 = 0x8150,
291 .VSwing5 = 0x8158,},
292 {.PreEmph1 = 0x822c, .PreEmph2 = 0x8224, .VSwing1 = 0x8254,
293 .VSwing2 = 0x8248, .VSwing3 = 0x824C, .VSwing4 = 0x8250,
294 .VSwing5 = 0x8258,},
295 };
296
297 static uint32_t dp_vswing_premph_table[] = {
298 0x55338954, 0x4000,
299 0x554d8954, 0x2000,
300 0x55668954, 0,
301 0x559ac0d4, 0x6000,
302 };
303 /**
304 * is_edp - is the given port attached to an eDP panel (either CPU or PCH)
305 * @encoder: GMA encoder struct
306 *
307 * If a CPU or PCH DP output is attached to an eDP panel, this function
308 * will return true, and false otherwise.
309 */
is_edp(struct gma_encoder * encoder)310 static bool is_edp(struct gma_encoder *encoder)
311 {
312 return encoder->type == INTEL_OUTPUT_EDP;
313 }
314
315
316 static void cdv_intel_dp_start_link_train(struct gma_encoder *encoder);
317 static void cdv_intel_dp_complete_link_train(struct gma_encoder *encoder);
318 static void cdv_intel_dp_link_down(struct gma_encoder *encoder);
319
320 static int
cdv_intel_dp_max_lane_count(struct gma_encoder * encoder)321 cdv_intel_dp_max_lane_count(struct gma_encoder *encoder)
322 {
323 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
324 int max_lane_count = 4;
325
326 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) {
327 max_lane_count = intel_dp->dpcd[DP_MAX_LANE_COUNT] & 0x1f;
328 switch (max_lane_count) {
329 case 1: case 2: case 4:
330 break;
331 default:
332 max_lane_count = 4;
333 }
334 }
335 return max_lane_count;
336 }
337
338 static int
cdv_intel_dp_max_link_bw(struct gma_encoder * encoder)339 cdv_intel_dp_max_link_bw(struct gma_encoder *encoder)
340 {
341 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
342 int max_link_bw = intel_dp->dpcd[DP_MAX_LINK_RATE];
343
344 switch (max_link_bw) {
345 case DP_LINK_BW_1_62:
346 case DP_LINK_BW_2_7:
347 break;
348 default:
349 max_link_bw = DP_LINK_BW_1_62;
350 break;
351 }
352 return max_link_bw;
353 }
354
355 static int
cdv_intel_dp_link_clock(uint8_t link_bw)356 cdv_intel_dp_link_clock(uint8_t link_bw)
357 {
358 if (link_bw == DP_LINK_BW_2_7)
359 return 270000;
360 else
361 return 162000;
362 }
363
364 static int
cdv_intel_dp_link_required(int pixel_clock,int bpp)365 cdv_intel_dp_link_required(int pixel_clock, int bpp)
366 {
367 return (pixel_clock * bpp + 7) / 8;
368 }
369
370 static int
cdv_intel_dp_max_data_rate(int max_link_clock,int max_lanes)371 cdv_intel_dp_max_data_rate(int max_link_clock, int max_lanes)
372 {
373 return (max_link_clock * max_lanes * 19) / 20;
374 }
375
cdv_intel_edp_panel_vdd_on(struct gma_encoder * intel_encoder)376 static void cdv_intel_edp_panel_vdd_on(struct gma_encoder *intel_encoder)
377 {
378 struct drm_device *dev = intel_encoder->base.dev;
379 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
380 u32 pp;
381
382 if (intel_dp->panel_on) {
383 DRM_DEBUG_KMS("Skip VDD on because of panel on\n");
384 return;
385 }
386 DRM_DEBUG_KMS("\n");
387
388 pp = REG_READ(PP_CONTROL);
389
390 pp |= EDP_FORCE_VDD;
391 REG_WRITE(PP_CONTROL, pp);
392 REG_READ(PP_CONTROL);
393 msleep(intel_dp->panel_power_up_delay);
394 }
395
cdv_intel_edp_panel_vdd_off(struct gma_encoder * intel_encoder)396 static void cdv_intel_edp_panel_vdd_off(struct gma_encoder *intel_encoder)
397 {
398 struct drm_device *dev = intel_encoder->base.dev;
399 u32 pp;
400
401 DRM_DEBUG_KMS("\n");
402 pp = REG_READ(PP_CONTROL);
403
404 pp &= ~EDP_FORCE_VDD;
405 REG_WRITE(PP_CONTROL, pp);
406 REG_READ(PP_CONTROL);
407
408 }
409
410 /* Returns true if the panel was already on when called */
cdv_intel_edp_panel_on(struct gma_encoder * intel_encoder)411 static bool cdv_intel_edp_panel_on(struct gma_encoder *intel_encoder)
412 {
413 struct drm_device *dev = intel_encoder->base.dev;
414 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
415 u32 pp, idle_on_mask = PP_ON | PP_SEQUENCE_NONE;
416
417 if (intel_dp->panel_on)
418 return true;
419
420 DRM_DEBUG_KMS("\n");
421 pp = REG_READ(PP_CONTROL);
422 pp &= ~PANEL_UNLOCK_MASK;
423
424 pp |= (PANEL_UNLOCK_REGS | POWER_TARGET_ON);
425 REG_WRITE(PP_CONTROL, pp);
426 REG_READ(PP_CONTROL);
427
428 if (wait_for(((REG_READ(PP_STATUS) & idle_on_mask) == idle_on_mask), 1000)) {
429 DRM_DEBUG_KMS("Error in Powering up eDP panel, status %x\n", REG_READ(PP_STATUS));
430 intel_dp->panel_on = false;
431 } else
432 intel_dp->panel_on = true;
433 msleep(intel_dp->panel_power_up_delay);
434
435 return false;
436 }
437
cdv_intel_edp_panel_off(struct gma_encoder * intel_encoder)438 static void cdv_intel_edp_panel_off (struct gma_encoder *intel_encoder)
439 {
440 struct drm_device *dev = intel_encoder->base.dev;
441 u32 pp, idle_off_mask = PP_ON ;
442 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
443
444 DRM_DEBUG_KMS("\n");
445
446 pp = REG_READ(PP_CONTROL);
447
448 if ((pp & POWER_TARGET_ON) == 0)
449 return;
450
451 intel_dp->panel_on = false;
452 pp &= ~PANEL_UNLOCK_MASK;
453 /* ILK workaround: disable reset around power sequence */
454
455 pp &= ~POWER_TARGET_ON;
456 pp &= ~EDP_FORCE_VDD;
457 pp &= ~EDP_BLC_ENABLE;
458 REG_WRITE(PP_CONTROL, pp);
459 REG_READ(PP_CONTROL);
460 DRM_DEBUG_KMS("PP_STATUS %x\n", REG_READ(PP_STATUS));
461
462 if (wait_for((REG_READ(PP_STATUS) & idle_off_mask) == 0, 1000)) {
463 DRM_DEBUG_KMS("Error in turning off Panel\n");
464 }
465
466 msleep(intel_dp->panel_power_cycle_delay);
467 DRM_DEBUG_KMS("Over\n");
468 }
469
cdv_intel_edp_backlight_on(struct gma_encoder * intel_encoder)470 static void cdv_intel_edp_backlight_on (struct gma_encoder *intel_encoder)
471 {
472 struct drm_device *dev = intel_encoder->base.dev;
473 u32 pp;
474
475 DRM_DEBUG_KMS("\n");
476 /*
477 * If we enable the backlight right away following a panel power
478 * on, we may see slight flicker as the panel syncs with the eDP
479 * link. So delay a bit to make sure the image is solid before
480 * allowing it to appear.
481 */
482 msleep(300);
483 pp = REG_READ(PP_CONTROL);
484
485 pp |= EDP_BLC_ENABLE;
486 REG_WRITE(PP_CONTROL, pp);
487 gma_backlight_enable(dev);
488 }
489
cdv_intel_edp_backlight_off(struct gma_encoder * intel_encoder)490 static void cdv_intel_edp_backlight_off (struct gma_encoder *intel_encoder)
491 {
492 struct drm_device *dev = intel_encoder->base.dev;
493 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
494 u32 pp;
495
496 DRM_DEBUG_KMS("\n");
497 gma_backlight_disable(dev);
498 msleep(10);
499 pp = REG_READ(PP_CONTROL);
500
501 pp &= ~EDP_BLC_ENABLE;
502 REG_WRITE(PP_CONTROL, pp);
503 msleep(intel_dp->backlight_off_delay);
504 }
505
506 static enum drm_mode_status
cdv_intel_dp_mode_valid(struct drm_connector * connector,const struct drm_display_mode * mode)507 cdv_intel_dp_mode_valid(struct drm_connector *connector,
508 const struct drm_display_mode *mode)
509 {
510 struct gma_encoder *encoder = gma_attached_encoder(connector);
511 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
512 int max_link_clock = cdv_intel_dp_link_clock(cdv_intel_dp_max_link_bw(encoder));
513 int max_lanes = cdv_intel_dp_max_lane_count(encoder);
514 struct drm_psb_private *dev_priv = to_drm_psb_private(connector->dev);
515
516 if (is_edp(encoder) && intel_dp->panel_fixed_mode) {
517 if (mode->hdisplay > intel_dp->panel_fixed_mode->hdisplay)
518 return MODE_PANEL;
519 if (mode->vdisplay > intel_dp->panel_fixed_mode->vdisplay)
520 return MODE_PANEL;
521 }
522
523 /* only refuse the mode on non eDP since we have seen some weird eDP panels
524 which are outside spec tolerances but somehow work by magic */
525 if (!is_edp(encoder) &&
526 (cdv_intel_dp_link_required(mode->clock, dev_priv->edp.bpp)
527 > cdv_intel_dp_max_data_rate(max_link_clock, max_lanes)))
528 return MODE_CLOCK_HIGH;
529
530 if (is_edp(encoder)) {
531 if (cdv_intel_dp_link_required(mode->clock, 24)
532 > cdv_intel_dp_max_data_rate(max_link_clock, max_lanes))
533 return MODE_CLOCK_HIGH;
534
535 }
536 if (mode->clock < 10000)
537 return MODE_CLOCK_LOW;
538
539 return MODE_OK;
540 }
541
542 static uint32_t
pack_aux(uint8_t * src,int src_bytes)543 pack_aux(uint8_t *src, int src_bytes)
544 {
545 int i;
546 uint32_t v = 0;
547
548 if (src_bytes > 4)
549 src_bytes = 4;
550 for (i = 0; i < src_bytes; i++)
551 v |= ((uint32_t) src[i]) << ((3-i) * 8);
552 return v;
553 }
554
555 static void
unpack_aux(uint32_t src,uint8_t * dst,int dst_bytes)556 unpack_aux(uint32_t src, uint8_t *dst, int dst_bytes)
557 {
558 int i;
559 if (dst_bytes > 4)
560 dst_bytes = 4;
561 for (i = 0; i < dst_bytes; i++)
562 dst[i] = src >> ((3-i) * 8);
563 }
564
565 static int
cdv_intel_dp_aux_ch(struct gma_encoder * encoder,uint8_t * send,int send_bytes,uint8_t * recv,int recv_size)566 cdv_intel_dp_aux_ch(struct gma_encoder *encoder,
567 uint8_t *send, int send_bytes,
568 uint8_t *recv, int recv_size)
569 {
570 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
571 uint32_t output_reg = intel_dp->output_reg;
572 struct drm_device *dev = encoder->base.dev;
573 uint32_t ch_ctl = output_reg + 0x10;
574 uint32_t ch_data = ch_ctl + 4;
575 int i;
576 int recv_bytes;
577 uint32_t status;
578 uint32_t aux_clock_divider;
579 int try, precharge;
580
581 /* The clock divider is based off the hrawclk,
582 * and would like to run at 2MHz. So, take the
583 * hrawclk value and divide by 2 and use that
584 * On CDV platform it uses 200MHz as hrawclk.
585 *
586 */
587 aux_clock_divider = 200 / 2;
588
589 precharge = 4;
590 if (is_edp(encoder))
591 precharge = 10;
592
593 if (REG_READ(ch_ctl) & DP_AUX_CH_CTL_SEND_BUSY) {
594 DRM_ERROR("dp_aux_ch not started status 0x%08x\n",
595 REG_READ(ch_ctl));
596 return -EBUSY;
597 }
598
599 /* Must try at least 3 times according to DP spec */
600 for (try = 0; try < 5; try++) {
601 /* Load the send data into the aux channel data registers */
602 for (i = 0; i < send_bytes; i += 4)
603 REG_WRITE(ch_data + i,
604 pack_aux(send + i, send_bytes - i));
605
606 /* Send the command and wait for it to complete */
607 REG_WRITE(ch_ctl,
608 DP_AUX_CH_CTL_SEND_BUSY |
609 DP_AUX_CH_CTL_TIME_OUT_400us |
610 (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) |
611 (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) |
612 (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT) |
613 DP_AUX_CH_CTL_DONE |
614 DP_AUX_CH_CTL_TIME_OUT_ERROR |
615 DP_AUX_CH_CTL_RECEIVE_ERROR);
616 for (;;) {
617 status = REG_READ(ch_ctl);
618 if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0)
619 break;
620 udelay(100);
621 }
622
623 /* Clear done status and any errors */
624 REG_WRITE(ch_ctl,
625 status |
626 DP_AUX_CH_CTL_DONE |
627 DP_AUX_CH_CTL_TIME_OUT_ERROR |
628 DP_AUX_CH_CTL_RECEIVE_ERROR);
629 if (status & DP_AUX_CH_CTL_DONE)
630 break;
631 }
632
633 if ((status & DP_AUX_CH_CTL_DONE) == 0) {
634 DRM_ERROR("dp_aux_ch not done status 0x%08x\n", status);
635 return -EBUSY;
636 }
637
638 /* Check for timeout or receive error.
639 * Timeouts occur when the sink is not connected
640 */
641 if (status & DP_AUX_CH_CTL_RECEIVE_ERROR) {
642 DRM_ERROR("dp_aux_ch receive error status 0x%08x\n", status);
643 return -EIO;
644 }
645
646 /* Timeouts occur when the device isn't connected, so they're
647 * "normal" -- don't fill the kernel log with these */
648 if (status & DP_AUX_CH_CTL_TIME_OUT_ERROR) {
649 DRM_DEBUG_KMS("dp_aux_ch timeout status 0x%08x\n", status);
650 return -ETIMEDOUT;
651 }
652
653 /* Unload any bytes sent back from the other side */
654 recv_bytes = ((status & DP_AUX_CH_CTL_MESSAGE_SIZE_MASK) >>
655 DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT);
656 if (recv_bytes > recv_size)
657 recv_bytes = recv_size;
658
659 for (i = 0; i < recv_bytes; i += 4)
660 unpack_aux(REG_READ(ch_data + i),
661 recv + i, recv_bytes - i);
662
663 return recv_bytes;
664 }
665
666 /* Write data to the aux channel in native mode */
667 static int
cdv_intel_dp_aux_native_write(struct gma_encoder * encoder,uint16_t address,uint8_t * send,int send_bytes)668 cdv_intel_dp_aux_native_write(struct gma_encoder *encoder,
669 uint16_t address, uint8_t *send, int send_bytes)
670 {
671 int ret;
672 uint8_t msg[20];
673 int msg_bytes;
674 uint8_t ack;
675
676 if (send_bytes > 16)
677 return -1;
678 msg[0] = DP_AUX_NATIVE_WRITE << 4;
679 msg[1] = address >> 8;
680 msg[2] = address & 0xff;
681 msg[3] = send_bytes - 1;
682 memcpy(&msg[4], send, send_bytes);
683 msg_bytes = send_bytes + 4;
684 for (;;) {
685 ret = cdv_intel_dp_aux_ch(encoder, msg, msg_bytes, &ack, 1);
686 if (ret < 0)
687 return ret;
688 ack >>= 4;
689 if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_ACK)
690 break;
691 else if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_DEFER)
692 udelay(100);
693 else
694 return -EIO;
695 }
696 return send_bytes;
697 }
698
699 /* Write a single byte to the aux channel in native mode */
700 static int
cdv_intel_dp_aux_native_write_1(struct gma_encoder * encoder,uint16_t address,uint8_t byte)701 cdv_intel_dp_aux_native_write_1(struct gma_encoder *encoder,
702 uint16_t address, uint8_t byte)
703 {
704 return cdv_intel_dp_aux_native_write(encoder, address, &byte, 1);
705 }
706
707 /* read bytes from a native aux channel */
708 static int
cdv_intel_dp_aux_native_read(struct gma_encoder * encoder,uint16_t address,uint8_t * recv,int recv_bytes)709 cdv_intel_dp_aux_native_read(struct gma_encoder *encoder,
710 uint16_t address, uint8_t *recv, int recv_bytes)
711 {
712 uint8_t msg[4];
713 int msg_bytes;
714 uint8_t reply[20];
715 int reply_bytes;
716 uint8_t ack;
717 int ret;
718
719 msg[0] = DP_AUX_NATIVE_READ << 4;
720 msg[1] = address >> 8;
721 msg[2] = address & 0xff;
722 msg[3] = recv_bytes - 1;
723
724 msg_bytes = 4;
725 reply_bytes = recv_bytes + 1;
726
727 for (;;) {
728 ret = cdv_intel_dp_aux_ch(encoder, msg, msg_bytes,
729 reply, reply_bytes);
730 if (ret == 0)
731 return -EPROTO;
732 if (ret < 0)
733 return ret;
734 ack = reply[0] >> 4;
735 if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_ACK) {
736 memcpy(recv, reply + 1, ret - 1);
737 return ret - 1;
738 }
739 else if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_DEFER)
740 udelay(100);
741 else
742 return -EIO;
743 }
744 }
745
746 static int
cdv_intel_dp_i2c_aux_ch(struct i2c_adapter * adapter,int mode,uint8_t write_byte,uint8_t * read_byte)747 cdv_intel_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
748 uint8_t write_byte, uint8_t *read_byte)
749 {
750 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
751 struct cdv_intel_dp *intel_dp = container_of(adapter,
752 struct cdv_intel_dp,
753 adapter);
754 struct gma_encoder *encoder = intel_dp->encoder;
755 uint16_t address = algo_data->address;
756 uint8_t msg[5];
757 uint8_t reply[2];
758 unsigned retry;
759 int msg_bytes;
760 int reply_bytes;
761 int ret;
762
763 /* Set up the command byte */
764 if (mode & MODE_I2C_READ)
765 msg[0] = DP_AUX_I2C_READ << 4;
766 else
767 msg[0] = DP_AUX_I2C_WRITE << 4;
768
769 if (!(mode & MODE_I2C_STOP))
770 msg[0] |= DP_AUX_I2C_MOT << 4;
771
772 msg[1] = address >> 8;
773 msg[2] = address;
774
775 switch (mode) {
776 case MODE_I2C_WRITE:
777 msg[3] = 0;
778 msg[4] = write_byte;
779 msg_bytes = 5;
780 reply_bytes = 1;
781 break;
782 case MODE_I2C_READ:
783 msg[3] = 0;
784 msg_bytes = 4;
785 reply_bytes = 2;
786 break;
787 default:
788 msg_bytes = 3;
789 reply_bytes = 1;
790 break;
791 }
792
793 for (retry = 0; retry < 5; retry++) {
794 ret = cdv_intel_dp_aux_ch(encoder,
795 msg, msg_bytes,
796 reply, reply_bytes);
797 if (ret < 0) {
798 DRM_DEBUG_KMS("aux_ch failed %d\n", ret);
799 return ret;
800 }
801
802 switch ((reply[0] >> 4) & DP_AUX_NATIVE_REPLY_MASK) {
803 case DP_AUX_NATIVE_REPLY_ACK:
804 /* I2C-over-AUX Reply field is only valid
805 * when paired with AUX ACK.
806 */
807 break;
808 case DP_AUX_NATIVE_REPLY_NACK:
809 DRM_DEBUG_KMS("aux_ch native nack\n");
810 return -EREMOTEIO;
811 case DP_AUX_NATIVE_REPLY_DEFER:
812 udelay(100);
813 continue;
814 default:
815 DRM_ERROR("aux_ch invalid native reply 0x%02x\n",
816 reply[0]);
817 return -EREMOTEIO;
818 }
819
820 switch ((reply[0] >> 4) & DP_AUX_I2C_REPLY_MASK) {
821 case DP_AUX_I2C_REPLY_ACK:
822 if (mode == MODE_I2C_READ) {
823 *read_byte = reply[1];
824 }
825 return reply_bytes - 1;
826 case DP_AUX_I2C_REPLY_NACK:
827 DRM_DEBUG_KMS("aux_i2c nack\n");
828 return -EREMOTEIO;
829 case DP_AUX_I2C_REPLY_DEFER:
830 DRM_DEBUG_KMS("aux_i2c defer\n");
831 udelay(100);
832 break;
833 default:
834 DRM_ERROR("aux_i2c invalid reply 0x%02x\n", reply[0]);
835 return -EREMOTEIO;
836 }
837 }
838
839 DRM_ERROR("too many retries, giving up\n");
840 return -EREMOTEIO;
841 }
842
843 static int
cdv_intel_dp_i2c_init(struct gma_connector * connector,struct gma_encoder * encoder,const char * name)844 cdv_intel_dp_i2c_init(struct gma_connector *connector,
845 struct gma_encoder *encoder, const char *name)
846 {
847 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
848 int ret;
849
850 DRM_DEBUG_KMS("i2c_init %s\n", name);
851
852 intel_dp->algo.running = false;
853 intel_dp->algo.address = 0;
854 intel_dp->algo.aux_ch = cdv_intel_dp_i2c_aux_ch;
855
856 memset(&intel_dp->adapter, '\0', sizeof (intel_dp->adapter));
857 intel_dp->adapter.owner = THIS_MODULE;
858 strscpy(intel_dp->adapter.name, name);
859 intel_dp->adapter.algo_data = &intel_dp->algo;
860 intel_dp->adapter.dev.parent = connector->base.kdev;
861
862 if (is_edp(encoder))
863 cdv_intel_edp_panel_vdd_on(encoder);
864 ret = i2c_dp_aux_add_bus(&intel_dp->adapter);
865 if (is_edp(encoder))
866 cdv_intel_edp_panel_vdd_off(encoder);
867
868 return ret;
869 }
870
cdv_intel_fixed_panel_mode(struct drm_display_mode * fixed_mode,struct drm_display_mode * adjusted_mode)871 static void cdv_intel_fixed_panel_mode(struct drm_display_mode *fixed_mode,
872 struct drm_display_mode *adjusted_mode)
873 {
874 adjusted_mode->hdisplay = fixed_mode->hdisplay;
875 adjusted_mode->hsync_start = fixed_mode->hsync_start;
876 adjusted_mode->hsync_end = fixed_mode->hsync_end;
877 adjusted_mode->htotal = fixed_mode->htotal;
878
879 adjusted_mode->vdisplay = fixed_mode->vdisplay;
880 adjusted_mode->vsync_start = fixed_mode->vsync_start;
881 adjusted_mode->vsync_end = fixed_mode->vsync_end;
882 adjusted_mode->vtotal = fixed_mode->vtotal;
883
884 adjusted_mode->clock = fixed_mode->clock;
885
886 drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V);
887 }
888
889 static bool
cdv_intel_dp_mode_fixup(struct drm_encoder * encoder,const struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)890 cdv_intel_dp_mode_fixup(struct drm_encoder *encoder, const struct drm_display_mode *mode,
891 struct drm_display_mode *adjusted_mode)
892 {
893 struct drm_psb_private *dev_priv = to_drm_psb_private(encoder->dev);
894 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
895 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
896 int lane_count, clock;
897 int max_lane_count = cdv_intel_dp_max_lane_count(intel_encoder);
898 int max_clock = cdv_intel_dp_max_link_bw(intel_encoder) == DP_LINK_BW_2_7 ? 1 : 0;
899 static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 };
900 int refclock = mode->clock;
901 int bpp = 24;
902
903 if (is_edp(intel_encoder) && intel_dp->panel_fixed_mode) {
904 cdv_intel_fixed_panel_mode(intel_dp->panel_fixed_mode, adjusted_mode);
905 refclock = intel_dp->panel_fixed_mode->clock;
906 bpp = dev_priv->edp.bpp;
907 }
908
909 for (lane_count = 1; lane_count <= max_lane_count; lane_count <<= 1) {
910 for (clock = max_clock; clock >= 0; clock--) {
911 int link_avail = cdv_intel_dp_max_data_rate(cdv_intel_dp_link_clock(bws[clock]), lane_count);
912
913 if (cdv_intel_dp_link_required(refclock, bpp) <= link_avail) {
914 intel_dp->link_bw = bws[clock];
915 intel_dp->lane_count = lane_count;
916 adjusted_mode->clock = cdv_intel_dp_link_clock(intel_dp->link_bw);
917 DRM_DEBUG_KMS("Display port link bw %02x lane "
918 "count %d clock %d\n",
919 intel_dp->link_bw, intel_dp->lane_count,
920 adjusted_mode->clock);
921 return true;
922 }
923 }
924 }
925 if (is_edp(intel_encoder)) {
926 /* okay we failed just pick the highest */
927 intel_dp->lane_count = max_lane_count;
928 intel_dp->link_bw = bws[max_clock];
929 adjusted_mode->clock = cdv_intel_dp_link_clock(intel_dp->link_bw);
930 DRM_DEBUG_KMS("Force picking display port link bw %02x lane "
931 "count %d clock %d\n",
932 intel_dp->link_bw, intel_dp->lane_count,
933 adjusted_mode->clock);
934
935 return true;
936 }
937 return false;
938 }
939
940 struct cdv_intel_dp_m_n {
941 uint32_t tu;
942 uint32_t gmch_m;
943 uint32_t gmch_n;
944 uint32_t link_m;
945 uint32_t link_n;
946 };
947
948 static void
cdv_intel_reduce_ratio(uint32_t * num,uint32_t * den)949 cdv_intel_reduce_ratio(uint32_t *num, uint32_t *den)
950 {
951 /*
952 while (*num > 0xffffff || *den > 0xffffff) {
953 *num >>= 1;
954 *den >>= 1;
955 }*/
956 uint64_t value, m;
957 m = *num;
958 value = m * (0x800000);
959 m = do_div(value, *den);
960 *num = value;
961 *den = 0x800000;
962 }
963
964 static void
cdv_intel_dp_compute_m_n(int bpp,int nlanes,int pixel_clock,int link_clock,struct cdv_intel_dp_m_n * m_n)965 cdv_intel_dp_compute_m_n(int bpp,
966 int nlanes,
967 int pixel_clock,
968 int link_clock,
969 struct cdv_intel_dp_m_n *m_n)
970 {
971 m_n->tu = 64;
972 m_n->gmch_m = (pixel_clock * bpp + 7) >> 3;
973 m_n->gmch_n = link_clock * nlanes;
974 cdv_intel_reduce_ratio(&m_n->gmch_m, &m_n->gmch_n);
975 m_n->link_m = pixel_clock;
976 m_n->link_n = link_clock;
977 cdv_intel_reduce_ratio(&m_n->link_m, &m_n->link_n);
978 }
979
980 void
cdv_intel_dp_set_m_n(struct drm_crtc * crtc,struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)981 cdv_intel_dp_set_m_n(struct drm_crtc *crtc, struct drm_display_mode *mode,
982 struct drm_display_mode *adjusted_mode)
983 {
984 struct drm_device *dev = crtc->dev;
985 struct drm_psb_private *dev_priv = to_drm_psb_private(dev);
986 struct drm_mode_config *mode_config = &dev->mode_config;
987 struct drm_encoder *encoder;
988 struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
989 int lane_count = 4, bpp = 24;
990 struct cdv_intel_dp_m_n m_n;
991 int pipe = gma_crtc->pipe;
992
993 /*
994 * Find the lane count in the intel_encoder private
995 */
996 list_for_each_entry(encoder, &mode_config->encoder_list, head) {
997 struct gma_encoder *intel_encoder;
998 struct cdv_intel_dp *intel_dp;
999
1000 if (encoder->crtc != crtc)
1001 continue;
1002
1003 intel_encoder = to_gma_encoder(encoder);
1004 intel_dp = intel_encoder->dev_priv;
1005 if (intel_encoder->type == INTEL_OUTPUT_DISPLAYPORT) {
1006 lane_count = intel_dp->lane_count;
1007 break;
1008 } else if (is_edp(intel_encoder)) {
1009 lane_count = intel_dp->lane_count;
1010 bpp = dev_priv->edp.bpp;
1011 break;
1012 }
1013 }
1014
1015 /*
1016 * Compute the GMCH and Link ratios. The '3' here is
1017 * the number of bytes_per_pixel post-LUT, which we always
1018 * set up for 8-bits of R/G/B, or 3 bytes total.
1019 */
1020 cdv_intel_dp_compute_m_n(bpp, lane_count,
1021 mode->clock, adjusted_mode->clock, &m_n);
1022
1023 {
1024 REG_WRITE(PIPE_GMCH_DATA_M(pipe),
1025 ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) |
1026 m_n.gmch_m);
1027 REG_WRITE(PIPE_GMCH_DATA_N(pipe), m_n.gmch_n);
1028 REG_WRITE(PIPE_DP_LINK_M(pipe), m_n.link_m);
1029 REG_WRITE(PIPE_DP_LINK_N(pipe), m_n.link_n);
1030 }
1031 }
1032
1033 static void
cdv_intel_dp_mode_set(struct drm_encoder * encoder,struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)1034 cdv_intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
1035 struct drm_display_mode *adjusted_mode)
1036 {
1037 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
1038 struct drm_crtc *crtc = encoder->crtc;
1039 struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
1040 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
1041 struct drm_device *dev = encoder->dev;
1042
1043 intel_dp->DP = DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
1044 intel_dp->DP |= intel_dp->color_range;
1045
1046 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
1047 intel_dp->DP |= DP_SYNC_HS_HIGH;
1048 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
1049 intel_dp->DP |= DP_SYNC_VS_HIGH;
1050
1051 intel_dp->DP |= DP_LINK_TRAIN_OFF;
1052
1053 switch (intel_dp->lane_count) {
1054 case 1:
1055 intel_dp->DP |= DP_PORT_WIDTH_1;
1056 break;
1057 case 2:
1058 intel_dp->DP |= DP_PORT_WIDTH_2;
1059 break;
1060 case 4:
1061 intel_dp->DP |= DP_PORT_WIDTH_4;
1062 break;
1063 }
1064 if (intel_dp->has_audio)
1065 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
1066
1067 memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE);
1068 intel_dp->link_configuration[0] = intel_dp->link_bw;
1069 intel_dp->link_configuration[1] = intel_dp->lane_count;
1070
1071 /*
1072 * Check for DPCD version > 1.1 and enhanced framing support
1073 */
1074 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
1075 (intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) {
1076 intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
1077 intel_dp->DP |= DP_ENHANCED_FRAMING;
1078 }
1079
1080 /* CPT DP's pipe select is decided in TRANS_DP_CTL */
1081 if (gma_crtc->pipe == 1)
1082 intel_dp->DP |= DP_PIPEB_SELECT;
1083
1084 REG_WRITE(intel_dp->output_reg, (intel_dp->DP | DP_PORT_EN));
1085 DRM_DEBUG_KMS("DP expected reg is %x\n", intel_dp->DP);
1086 if (is_edp(intel_encoder)) {
1087 uint32_t pfit_control;
1088 cdv_intel_edp_panel_on(intel_encoder);
1089
1090 if (mode->hdisplay != adjusted_mode->hdisplay ||
1091 mode->vdisplay != adjusted_mode->vdisplay)
1092 pfit_control = PFIT_ENABLE;
1093 else
1094 pfit_control = 0;
1095
1096 pfit_control |= gma_crtc->pipe << PFIT_PIPE_SHIFT;
1097
1098 REG_WRITE(PFIT_CONTROL, pfit_control);
1099 }
1100 }
1101
1102
1103 /* If the sink supports it, try to set the power state appropriately */
cdv_intel_dp_sink_dpms(struct gma_encoder * encoder,int mode)1104 static void cdv_intel_dp_sink_dpms(struct gma_encoder *encoder, int mode)
1105 {
1106 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1107 int ret, i;
1108
1109 /* Should have a valid DPCD by this point */
1110 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
1111 return;
1112
1113 if (mode != DRM_MODE_DPMS_ON) {
1114 ret = cdv_intel_dp_aux_native_write_1(encoder, DP_SET_POWER,
1115 DP_SET_POWER_D3);
1116 if (ret != 1)
1117 DRM_DEBUG_DRIVER("failed to write sink power state\n");
1118 } else {
1119 /*
1120 * When turning on, we need to retry for 1ms to give the sink
1121 * time to wake up.
1122 */
1123 for (i = 0; i < 3; i++) {
1124 ret = cdv_intel_dp_aux_native_write_1(encoder,
1125 DP_SET_POWER,
1126 DP_SET_POWER_D0);
1127 if (ret == 1)
1128 break;
1129 udelay(1000);
1130 }
1131 }
1132 }
1133
cdv_intel_dp_prepare(struct drm_encoder * encoder)1134 static void cdv_intel_dp_prepare(struct drm_encoder *encoder)
1135 {
1136 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
1137 int edp = is_edp(intel_encoder);
1138
1139 if (edp) {
1140 cdv_intel_edp_backlight_off(intel_encoder);
1141 cdv_intel_edp_panel_off(intel_encoder);
1142 cdv_intel_edp_panel_vdd_on(intel_encoder);
1143 }
1144 /* Wake up the sink first */
1145 cdv_intel_dp_sink_dpms(intel_encoder, DRM_MODE_DPMS_ON);
1146 cdv_intel_dp_link_down(intel_encoder);
1147 if (edp)
1148 cdv_intel_edp_panel_vdd_off(intel_encoder);
1149 }
1150
cdv_intel_dp_commit(struct drm_encoder * encoder)1151 static void cdv_intel_dp_commit(struct drm_encoder *encoder)
1152 {
1153 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
1154 int edp = is_edp(intel_encoder);
1155
1156 if (edp)
1157 cdv_intel_edp_panel_on(intel_encoder);
1158 cdv_intel_dp_start_link_train(intel_encoder);
1159 cdv_intel_dp_complete_link_train(intel_encoder);
1160 if (edp)
1161 cdv_intel_edp_backlight_on(intel_encoder);
1162 }
1163
1164 static void
cdv_intel_dp_dpms(struct drm_encoder * encoder,int mode)1165 cdv_intel_dp_dpms(struct drm_encoder *encoder, int mode)
1166 {
1167 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
1168 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
1169 struct drm_device *dev = encoder->dev;
1170 uint32_t dp_reg = REG_READ(intel_dp->output_reg);
1171 int edp = is_edp(intel_encoder);
1172
1173 if (mode != DRM_MODE_DPMS_ON) {
1174 if (edp) {
1175 cdv_intel_edp_backlight_off(intel_encoder);
1176 cdv_intel_edp_panel_vdd_on(intel_encoder);
1177 }
1178 cdv_intel_dp_sink_dpms(intel_encoder, mode);
1179 cdv_intel_dp_link_down(intel_encoder);
1180 if (edp) {
1181 cdv_intel_edp_panel_vdd_off(intel_encoder);
1182 cdv_intel_edp_panel_off(intel_encoder);
1183 }
1184 } else {
1185 if (edp)
1186 cdv_intel_edp_panel_on(intel_encoder);
1187 cdv_intel_dp_sink_dpms(intel_encoder, mode);
1188 if (!(dp_reg & DP_PORT_EN)) {
1189 cdv_intel_dp_start_link_train(intel_encoder);
1190 cdv_intel_dp_complete_link_train(intel_encoder);
1191 }
1192 if (edp)
1193 cdv_intel_edp_backlight_on(intel_encoder);
1194 }
1195 }
1196
1197 /*
1198 * Native read with retry for link status and receiver capability reads for
1199 * cases where the sink may still be asleep.
1200 */
1201 static bool
cdv_intel_dp_aux_native_read_retry(struct gma_encoder * encoder,uint16_t address,uint8_t * recv,int recv_bytes)1202 cdv_intel_dp_aux_native_read_retry(struct gma_encoder *encoder, uint16_t address,
1203 uint8_t *recv, int recv_bytes)
1204 {
1205 int ret, i;
1206
1207 /*
1208 * Sinks are *supposed* to come up within 1ms from an off state,
1209 * but we're also supposed to retry 3 times per the spec.
1210 */
1211 for (i = 0; i < 3; i++) {
1212 ret = cdv_intel_dp_aux_native_read(encoder, address, recv,
1213 recv_bytes);
1214 if (ret == recv_bytes)
1215 return true;
1216 udelay(1000);
1217 }
1218
1219 return false;
1220 }
1221
1222 /*
1223 * Fetch AUX CH registers 0x202 - 0x207 which contain
1224 * link status information
1225 */
1226 static bool
cdv_intel_dp_get_link_status(struct gma_encoder * encoder)1227 cdv_intel_dp_get_link_status(struct gma_encoder *encoder)
1228 {
1229 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1230 return cdv_intel_dp_aux_native_read_retry(encoder,
1231 DP_LANE0_1_STATUS,
1232 intel_dp->link_status,
1233 DP_LINK_STATUS_SIZE);
1234 }
1235
1236 static uint8_t
cdv_intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE],int r)1237 cdv_intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
1238 int r)
1239 {
1240 return link_status[r - DP_LANE0_1_STATUS];
1241 }
1242
1243 static uint8_t
cdv_intel_get_adjust_request_voltage(uint8_t link_status[DP_LINK_STATUS_SIZE],int lane)1244 cdv_intel_get_adjust_request_voltage(uint8_t link_status[DP_LINK_STATUS_SIZE],
1245 int lane)
1246 {
1247 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
1248 int s = ((lane & 1) ?
1249 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT :
1250 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT);
1251 uint8_t l = cdv_intel_dp_link_status(link_status, i);
1252
1253 return ((l >> s) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT;
1254 }
1255
1256 static uint8_t
cdv_intel_get_adjust_request_pre_emphasis(uint8_t link_status[DP_LINK_STATUS_SIZE],int lane)1257 cdv_intel_get_adjust_request_pre_emphasis(uint8_t link_status[DP_LINK_STATUS_SIZE],
1258 int lane)
1259 {
1260 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
1261 int s = ((lane & 1) ?
1262 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT :
1263 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT);
1264 uint8_t l = cdv_intel_dp_link_status(link_status, i);
1265
1266 return ((l >> s) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT;
1267 }
1268
1269 #define CDV_DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_LEVEL_3
1270
1271 static void
cdv_intel_get_adjust_train(struct gma_encoder * encoder)1272 cdv_intel_get_adjust_train(struct gma_encoder *encoder)
1273 {
1274 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1275 uint8_t v = 0;
1276 uint8_t p = 0;
1277 int lane;
1278
1279 for (lane = 0; lane < intel_dp->lane_count; lane++) {
1280 uint8_t this_v = cdv_intel_get_adjust_request_voltage(intel_dp->link_status, lane);
1281 uint8_t this_p = cdv_intel_get_adjust_request_pre_emphasis(intel_dp->link_status, lane);
1282
1283 if (this_v > v)
1284 v = this_v;
1285 if (this_p > p)
1286 p = this_p;
1287 }
1288
1289 if (v >= CDV_DP_VOLTAGE_MAX)
1290 v = CDV_DP_VOLTAGE_MAX | DP_TRAIN_MAX_SWING_REACHED;
1291
1292 if (p == DP_TRAIN_PRE_EMPHASIS_MASK)
1293 p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
1294
1295 for (lane = 0; lane < 4; lane++)
1296 intel_dp->train_set[lane] = v | p;
1297 }
1298
1299
1300 static uint8_t
cdv_intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE],int lane)1301 cdv_intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
1302 int lane)
1303 {
1304 int i = DP_LANE0_1_STATUS + (lane >> 1);
1305 int s = (lane & 1) * 4;
1306 uint8_t l = cdv_intel_dp_link_status(link_status, i);
1307
1308 return (l >> s) & 0xf;
1309 }
1310
1311 /* Check for clock recovery is done on all channels */
1312 static bool
cdv_intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE],int lane_count)1313 cdv_intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE], int lane_count)
1314 {
1315 int lane;
1316 uint8_t lane_status;
1317
1318 for (lane = 0; lane < lane_count; lane++) {
1319 lane_status = cdv_intel_get_lane_status(link_status, lane);
1320 if ((lane_status & DP_LANE_CR_DONE) == 0)
1321 return false;
1322 }
1323 return true;
1324 }
1325
1326 /* Check to see if channel eq is done on all channels */
1327 #define CHANNEL_EQ_BITS (DP_LANE_CR_DONE|\
1328 DP_LANE_CHANNEL_EQ_DONE|\
1329 DP_LANE_SYMBOL_LOCKED)
1330 static bool
cdv_intel_channel_eq_ok(struct gma_encoder * encoder)1331 cdv_intel_channel_eq_ok(struct gma_encoder *encoder)
1332 {
1333 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1334 uint8_t lane_align;
1335 uint8_t lane_status;
1336 int lane;
1337
1338 lane_align = cdv_intel_dp_link_status(intel_dp->link_status,
1339 DP_LANE_ALIGN_STATUS_UPDATED);
1340 if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0)
1341 return false;
1342 for (lane = 0; lane < intel_dp->lane_count; lane++) {
1343 lane_status = cdv_intel_get_lane_status(intel_dp->link_status, lane);
1344 if ((lane_status & CHANNEL_EQ_BITS) != CHANNEL_EQ_BITS)
1345 return false;
1346 }
1347 return true;
1348 }
1349
1350 static bool
cdv_intel_dp_set_link_train(struct gma_encoder * encoder,uint32_t dp_reg_value,uint8_t dp_train_pat)1351 cdv_intel_dp_set_link_train(struct gma_encoder *encoder,
1352 uint32_t dp_reg_value,
1353 uint8_t dp_train_pat)
1354 {
1355 struct drm_device *dev = encoder->base.dev;
1356 int ret;
1357 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1358
1359 REG_WRITE(intel_dp->output_reg, dp_reg_value);
1360 REG_READ(intel_dp->output_reg);
1361
1362 ret = cdv_intel_dp_aux_native_write_1(encoder,
1363 DP_TRAINING_PATTERN_SET,
1364 dp_train_pat);
1365
1366 if (ret != 1) {
1367 DRM_DEBUG_KMS("Failure in setting link pattern %x\n",
1368 dp_train_pat);
1369 return false;
1370 }
1371
1372 return true;
1373 }
1374
1375
1376 static bool
cdv_intel_dplink_set_level(struct gma_encoder * encoder,uint8_t dp_train_pat)1377 cdv_intel_dplink_set_level(struct gma_encoder *encoder,
1378 uint8_t dp_train_pat)
1379 {
1380 int ret;
1381 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1382
1383 ret = cdv_intel_dp_aux_native_write(encoder,
1384 DP_TRAINING_LANE0_SET,
1385 intel_dp->train_set,
1386 intel_dp->lane_count);
1387
1388 if (ret != intel_dp->lane_count) {
1389 DRM_DEBUG_KMS("Failure in setting level %d, lane_cnt= %d\n",
1390 intel_dp->train_set[0], intel_dp->lane_count);
1391 return false;
1392 }
1393 return true;
1394 }
1395
1396 static void
cdv_intel_dp_set_vswing_premph(struct gma_encoder * encoder,uint8_t signal_level)1397 cdv_intel_dp_set_vswing_premph(struct gma_encoder *encoder, uint8_t signal_level)
1398 {
1399 struct drm_device *dev = encoder->base.dev;
1400 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1401 struct ddi_regoff *ddi_reg;
1402 int vswing, premph, index;
1403
1404 if (intel_dp->output_reg == DP_B)
1405 ddi_reg = &ddi_DP_train_table[0];
1406 else
1407 ddi_reg = &ddi_DP_train_table[1];
1408
1409 vswing = (signal_level & DP_TRAIN_VOLTAGE_SWING_MASK);
1410 premph = ((signal_level & DP_TRAIN_PRE_EMPHASIS_MASK)) >>
1411 DP_TRAIN_PRE_EMPHASIS_SHIFT;
1412
1413 if (vswing + premph > 3)
1414 return;
1415 #ifdef CDV_FAST_LINK_TRAIN
1416 return;
1417 #endif
1418 DRM_DEBUG_KMS("Test2\n");
1419 //return ;
1420 cdv_sb_reset(dev);
1421 /* ;Swing voltage programming
1422 ;gfx_dpio_set_reg(0xc058, 0x0505313A) */
1423 cdv_sb_write(dev, ddi_reg->VSwing5, 0x0505313A);
1424
1425 /* ;gfx_dpio_set_reg(0x8154, 0x43406055) */
1426 cdv_sb_write(dev, ddi_reg->VSwing1, 0x43406055);
1427
1428 /* ;gfx_dpio_set_reg(0x8148, 0x55338954)
1429 * The VSwing_PreEmph table is also considered based on the vswing/premp
1430 */
1431 index = (vswing + premph) * 2;
1432 if (premph == 1 && vswing == 1) {
1433 cdv_sb_write(dev, ddi_reg->VSwing2, 0x055738954);
1434 } else
1435 cdv_sb_write(dev, ddi_reg->VSwing2, dp_vswing_premph_table[index]);
1436
1437 /* ;gfx_dpio_set_reg(0x814c, 0x40802040) */
1438 if ((vswing + premph) == DP_TRAIN_VOLTAGE_SWING_LEVEL_3)
1439 cdv_sb_write(dev, ddi_reg->VSwing3, 0x70802040);
1440 else
1441 cdv_sb_write(dev, ddi_reg->VSwing3, 0x40802040);
1442
1443 /* ;gfx_dpio_set_reg(0x8150, 0x2b405555) */
1444 /* cdv_sb_write(dev, ddi_reg->VSwing4, 0x2b405555); */
1445
1446 /* ;gfx_dpio_set_reg(0x8154, 0xc3406055) */
1447 cdv_sb_write(dev, ddi_reg->VSwing1, 0xc3406055);
1448
1449 /* ;Pre emphasis programming
1450 * ;gfx_dpio_set_reg(0xc02c, 0x1f030040)
1451 */
1452 cdv_sb_write(dev, ddi_reg->PreEmph1, 0x1f030040);
1453
1454 /* ;gfx_dpio_set_reg(0x8124, 0x00004000) */
1455 index = 2 * premph + 1;
1456 cdv_sb_write(dev, ddi_reg->PreEmph2, dp_vswing_premph_table[index]);
1457 return;
1458 }
1459
1460
1461 /* Enable corresponding port and start training pattern 1 */
1462 static void
cdv_intel_dp_start_link_train(struct gma_encoder * encoder)1463 cdv_intel_dp_start_link_train(struct gma_encoder *encoder)
1464 {
1465 struct drm_device *dev = encoder->base.dev;
1466 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1467 int i;
1468 uint8_t voltage;
1469 bool clock_recovery = false;
1470 int tries;
1471 u32 reg;
1472 uint32_t DP = intel_dp->DP;
1473
1474 DP |= DP_PORT_EN;
1475 DP &= ~DP_LINK_TRAIN_MASK;
1476
1477 reg = DP;
1478 reg |= DP_LINK_TRAIN_PAT_1;
1479 /* Enable output, wait for it to become active */
1480 REG_WRITE(intel_dp->output_reg, reg);
1481 REG_READ(intel_dp->output_reg);
1482 gma_wait_for_vblank(dev);
1483
1484 DRM_DEBUG_KMS("Link config\n");
1485 /* Write the link configuration data */
1486 cdv_intel_dp_aux_native_write(encoder, DP_LINK_BW_SET,
1487 intel_dp->link_configuration,
1488 2);
1489
1490 memset(intel_dp->train_set, 0, 4);
1491 voltage = 0;
1492 tries = 0;
1493 clock_recovery = false;
1494
1495 DRM_DEBUG_KMS("Start train\n");
1496 reg = DP | DP_LINK_TRAIN_PAT_1;
1497
1498 for (;;) {
1499 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1500 DRM_DEBUG_KMS("DP Link Train Set %x, Link_config %x, %x\n",
1501 intel_dp->train_set[0],
1502 intel_dp->link_configuration[0],
1503 intel_dp->link_configuration[1]);
1504
1505 if (!cdv_intel_dp_set_link_train(encoder, reg, DP_TRAINING_PATTERN_1)) {
1506 DRM_DEBUG_KMS("Failure in aux-transfer setting pattern 1\n");
1507 }
1508 cdv_intel_dp_set_vswing_premph(encoder, intel_dp->train_set[0]);
1509 /* Set training pattern 1 */
1510
1511 cdv_intel_dplink_set_level(encoder, DP_TRAINING_PATTERN_1);
1512
1513 udelay(200);
1514 if (!cdv_intel_dp_get_link_status(encoder))
1515 break;
1516
1517 DRM_DEBUG_KMS("DP Link status %x, %x, %x, %x, %x, %x\n",
1518 intel_dp->link_status[0], intel_dp->link_status[1], intel_dp->link_status[2],
1519 intel_dp->link_status[3], intel_dp->link_status[4], intel_dp->link_status[5]);
1520
1521 if (cdv_intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) {
1522 DRM_DEBUG_KMS("PT1 train is done\n");
1523 clock_recovery = true;
1524 break;
1525 }
1526
1527 /* Check to see if we've tried the max voltage */
1528 for (i = 0; i < intel_dp->lane_count; i++)
1529 if ((intel_dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
1530 break;
1531 if (i == intel_dp->lane_count)
1532 break;
1533
1534 /* Check to see if we've tried the same voltage 5 times */
1535 if ((intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
1536 ++tries;
1537 if (tries == 5)
1538 break;
1539 } else
1540 tries = 0;
1541 voltage = intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
1542
1543 /* Compute new intel_dp->train_set as requested by target */
1544 cdv_intel_get_adjust_train(encoder);
1545
1546 }
1547
1548 if (!clock_recovery) {
1549 DRM_DEBUG_KMS("failure in DP pattern 1 training, train set %x\n", intel_dp->train_set[0]);
1550 }
1551
1552 intel_dp->DP = DP;
1553 }
1554
1555 static void
cdv_intel_dp_complete_link_train(struct gma_encoder * encoder)1556 cdv_intel_dp_complete_link_train(struct gma_encoder *encoder)
1557 {
1558 struct drm_device *dev = encoder->base.dev;
1559 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1560 int tries, cr_tries;
1561 u32 reg;
1562 uint32_t DP = intel_dp->DP;
1563
1564 /* channel equalization */
1565 tries = 0;
1566 cr_tries = 0;
1567
1568 DRM_DEBUG_KMS("\n");
1569 reg = DP | DP_LINK_TRAIN_PAT_2;
1570
1571 for (;;) {
1572
1573 DRM_DEBUG_KMS("DP Link Train Set %x, Link_config %x, %x\n",
1574 intel_dp->train_set[0],
1575 intel_dp->link_configuration[0],
1576 intel_dp->link_configuration[1]);
1577 /* channel eq pattern */
1578
1579 if (!cdv_intel_dp_set_link_train(encoder, reg,
1580 DP_TRAINING_PATTERN_2)) {
1581 DRM_DEBUG_KMS("Failure in aux-transfer setting pattern 2\n");
1582 }
1583 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1584
1585 if (cr_tries > 5) {
1586 DRM_ERROR("failed to train DP, aborting\n");
1587 cdv_intel_dp_link_down(encoder);
1588 break;
1589 }
1590
1591 cdv_intel_dp_set_vswing_premph(encoder, intel_dp->train_set[0]);
1592
1593 cdv_intel_dplink_set_level(encoder, DP_TRAINING_PATTERN_2);
1594
1595 udelay(1000);
1596 if (!cdv_intel_dp_get_link_status(encoder))
1597 break;
1598
1599 DRM_DEBUG_KMS("DP Link status %x, %x, %x, %x, %x, %x\n",
1600 intel_dp->link_status[0], intel_dp->link_status[1], intel_dp->link_status[2],
1601 intel_dp->link_status[3], intel_dp->link_status[4], intel_dp->link_status[5]);
1602
1603 /* Make sure clock is still ok */
1604 if (!cdv_intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) {
1605 cdv_intel_dp_start_link_train(encoder);
1606 cr_tries++;
1607 continue;
1608 }
1609
1610 if (cdv_intel_channel_eq_ok(encoder)) {
1611 DRM_DEBUG_KMS("PT2 train is done\n");
1612 break;
1613 }
1614
1615 /* Try 5 times, then try clock recovery if that fails */
1616 if (tries > 5) {
1617 cdv_intel_dp_link_down(encoder);
1618 cdv_intel_dp_start_link_train(encoder);
1619 tries = 0;
1620 cr_tries++;
1621 continue;
1622 }
1623
1624 /* Compute new intel_dp->train_set as requested by target */
1625 cdv_intel_get_adjust_train(encoder);
1626 ++tries;
1627
1628 }
1629
1630 reg = DP | DP_LINK_TRAIN_OFF;
1631
1632 REG_WRITE(intel_dp->output_reg, reg);
1633 REG_READ(intel_dp->output_reg);
1634 cdv_intel_dp_aux_native_write_1(encoder,
1635 DP_TRAINING_PATTERN_SET, DP_TRAINING_PATTERN_DISABLE);
1636 }
1637
1638 static void
cdv_intel_dp_link_down(struct gma_encoder * encoder)1639 cdv_intel_dp_link_down(struct gma_encoder *encoder)
1640 {
1641 struct drm_device *dev = encoder->base.dev;
1642 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1643 uint32_t DP = intel_dp->DP;
1644
1645 if ((REG_READ(intel_dp->output_reg) & DP_PORT_EN) == 0)
1646 return;
1647
1648 DRM_DEBUG_KMS("\n");
1649
1650
1651 {
1652 DP &= ~DP_LINK_TRAIN_MASK;
1653 REG_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE);
1654 }
1655 REG_READ(intel_dp->output_reg);
1656
1657 msleep(17);
1658
1659 REG_WRITE(intel_dp->output_reg, DP & ~DP_PORT_EN);
1660 REG_READ(intel_dp->output_reg);
1661 }
1662
cdv_dp_detect(struct gma_encoder * encoder)1663 static enum drm_connector_status cdv_dp_detect(struct gma_encoder *encoder)
1664 {
1665 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1666 enum drm_connector_status status;
1667
1668 status = connector_status_disconnected;
1669 if (cdv_intel_dp_aux_native_read(encoder, 0x000, intel_dp->dpcd,
1670 sizeof (intel_dp->dpcd)) == sizeof (intel_dp->dpcd))
1671 {
1672 if (intel_dp->dpcd[DP_DPCD_REV] != 0)
1673 status = connector_status_connected;
1674 }
1675 if (status == connector_status_connected)
1676 DRM_DEBUG_KMS("DPCD: Rev=%x LN_Rate=%x LN_CNT=%x LN_DOWNSP=%x\n",
1677 intel_dp->dpcd[0], intel_dp->dpcd[1],
1678 intel_dp->dpcd[2], intel_dp->dpcd[3]);
1679 return status;
1680 }
1681
1682 /*
1683 * Uses CRT_HOTPLUG_EN and CRT_HOTPLUG_STAT to detect DP connection.
1684 *
1685 * \return true if DP port is connected.
1686 * \return false if DP port is disconnected.
1687 */
1688 static enum drm_connector_status
cdv_intel_dp_detect(struct drm_connector * connector,bool force)1689 cdv_intel_dp_detect(struct drm_connector *connector, bool force)
1690 {
1691 struct gma_encoder *encoder = gma_attached_encoder(connector);
1692 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1693 enum drm_connector_status status;
1694 struct edid *edid = NULL;
1695 int edp = is_edp(encoder);
1696
1697 intel_dp->has_audio = false;
1698
1699 if (edp)
1700 cdv_intel_edp_panel_vdd_on(encoder);
1701 status = cdv_dp_detect(encoder);
1702 if (status != connector_status_connected) {
1703 if (edp)
1704 cdv_intel_edp_panel_vdd_off(encoder);
1705 return status;
1706 }
1707
1708 if (intel_dp->force_audio) {
1709 intel_dp->has_audio = intel_dp->force_audio > 0;
1710 } else {
1711 edid = drm_get_edid(connector, &intel_dp->adapter);
1712 if (edid) {
1713 intel_dp->has_audio = drm_detect_monitor_audio(edid);
1714 kfree(edid);
1715 }
1716 }
1717 if (edp)
1718 cdv_intel_edp_panel_vdd_off(encoder);
1719
1720 return connector_status_connected;
1721 }
1722
cdv_intel_dp_get_modes(struct drm_connector * connector)1723 static int cdv_intel_dp_get_modes(struct drm_connector *connector)
1724 {
1725 struct gma_encoder *intel_encoder = gma_attached_encoder(connector);
1726 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
1727 struct edid *edid = NULL;
1728 int ret = 0;
1729 int edp = is_edp(intel_encoder);
1730
1731
1732 edid = drm_get_edid(connector, &intel_dp->adapter);
1733 if (edid) {
1734 drm_connector_update_edid_property(connector, edid);
1735 ret = drm_add_edid_modes(connector, edid);
1736 kfree(edid);
1737 }
1738
1739 if (is_edp(intel_encoder)) {
1740 struct drm_device *dev = connector->dev;
1741 struct drm_psb_private *dev_priv = to_drm_psb_private(dev);
1742
1743 cdv_intel_edp_panel_vdd_off(intel_encoder);
1744 if (ret) {
1745 if (edp && !intel_dp->panel_fixed_mode) {
1746 struct drm_display_mode *newmode;
1747 list_for_each_entry(newmode, &connector->probed_modes,
1748 head) {
1749 if (newmode->type & DRM_MODE_TYPE_PREFERRED) {
1750 intel_dp->panel_fixed_mode =
1751 drm_mode_duplicate(dev, newmode);
1752 break;
1753 }
1754 }
1755 }
1756
1757 return ret;
1758 }
1759 if (!intel_dp->panel_fixed_mode && dev_priv->lfp_lvds_vbt_mode) {
1760 intel_dp->panel_fixed_mode =
1761 drm_mode_duplicate(dev, dev_priv->lfp_lvds_vbt_mode);
1762 if (intel_dp->panel_fixed_mode) {
1763 intel_dp->panel_fixed_mode->type |=
1764 DRM_MODE_TYPE_PREFERRED;
1765 }
1766 }
1767 if (intel_dp->panel_fixed_mode != NULL) {
1768 struct drm_display_mode *mode;
1769 mode = drm_mode_duplicate(dev, intel_dp->panel_fixed_mode);
1770 drm_mode_probed_add(connector, mode);
1771 return 1;
1772 }
1773 }
1774
1775 return ret;
1776 }
1777
1778 static bool
cdv_intel_dp_detect_audio(struct drm_connector * connector)1779 cdv_intel_dp_detect_audio(struct drm_connector *connector)
1780 {
1781 struct gma_encoder *encoder = gma_attached_encoder(connector);
1782 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1783 struct edid *edid;
1784 bool has_audio = false;
1785 int edp = is_edp(encoder);
1786
1787 if (edp)
1788 cdv_intel_edp_panel_vdd_on(encoder);
1789
1790 edid = drm_get_edid(connector, &intel_dp->adapter);
1791 if (edid) {
1792 has_audio = drm_detect_monitor_audio(edid);
1793 kfree(edid);
1794 }
1795 if (edp)
1796 cdv_intel_edp_panel_vdd_off(encoder);
1797
1798 return has_audio;
1799 }
1800
1801 static int
cdv_intel_dp_set_property(struct drm_connector * connector,struct drm_property * property,uint64_t val)1802 cdv_intel_dp_set_property(struct drm_connector *connector,
1803 struct drm_property *property,
1804 uint64_t val)
1805 {
1806 struct drm_psb_private *dev_priv = to_drm_psb_private(connector->dev);
1807 struct gma_encoder *encoder = gma_attached_encoder(connector);
1808 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1809 int ret;
1810
1811 ret = drm_object_property_set_value(&connector->base, property, val);
1812 if (ret)
1813 return ret;
1814
1815 if (property == dev_priv->force_audio_property) {
1816 int i = val;
1817 bool has_audio;
1818
1819 if (i == intel_dp->force_audio)
1820 return 0;
1821
1822 intel_dp->force_audio = i;
1823
1824 if (i == 0)
1825 has_audio = cdv_intel_dp_detect_audio(connector);
1826 else
1827 has_audio = i > 0;
1828
1829 if (has_audio == intel_dp->has_audio)
1830 return 0;
1831
1832 intel_dp->has_audio = has_audio;
1833 goto done;
1834 }
1835
1836 if (property == dev_priv->broadcast_rgb_property) {
1837 if (val == !!intel_dp->color_range)
1838 return 0;
1839
1840 intel_dp->color_range = val ? DP_COLOR_RANGE_16_235 : 0;
1841 goto done;
1842 }
1843
1844 return -EINVAL;
1845
1846 done:
1847 if (encoder->base.crtc) {
1848 struct drm_crtc *crtc = encoder->base.crtc;
1849 drm_crtc_helper_set_mode(crtc, &crtc->mode,
1850 crtc->x, crtc->y,
1851 crtc->primary->fb);
1852 }
1853
1854 return 0;
1855 }
1856
1857 static void
cdv_intel_dp_destroy(struct drm_connector * connector)1858 cdv_intel_dp_destroy(struct drm_connector *connector)
1859 {
1860 struct gma_connector *gma_connector = to_gma_connector(connector);
1861 struct gma_encoder *gma_encoder = gma_attached_encoder(connector);
1862 struct cdv_intel_dp *intel_dp = gma_encoder->dev_priv;
1863
1864 if (is_edp(gma_encoder)) {
1865 /* cdv_intel_panel_destroy_backlight(connector->dev); */
1866 kfree(intel_dp->panel_fixed_mode);
1867 intel_dp->panel_fixed_mode = NULL;
1868 }
1869 i2c_del_adapter(&intel_dp->adapter);
1870 drm_connector_cleanup(connector);
1871 kfree(gma_connector);
1872 }
1873
1874 static const struct drm_encoder_helper_funcs cdv_intel_dp_helper_funcs = {
1875 .dpms = cdv_intel_dp_dpms,
1876 .mode_fixup = cdv_intel_dp_mode_fixup,
1877 .prepare = cdv_intel_dp_prepare,
1878 .mode_set = cdv_intel_dp_mode_set,
1879 .commit = cdv_intel_dp_commit,
1880 };
1881
1882 static const struct drm_connector_funcs cdv_intel_dp_connector_funcs = {
1883 .dpms = drm_helper_connector_dpms,
1884 .detect = cdv_intel_dp_detect,
1885 .fill_modes = drm_helper_probe_single_connector_modes,
1886 .set_property = cdv_intel_dp_set_property,
1887 .destroy = cdv_intel_dp_destroy,
1888 };
1889
1890 static const struct drm_connector_helper_funcs cdv_intel_dp_connector_helper_funcs = {
1891 .get_modes = cdv_intel_dp_get_modes,
1892 .mode_valid = cdv_intel_dp_mode_valid,
1893 .best_encoder = gma_best_encoder,
1894 };
1895
cdv_intel_dp_add_properties(struct drm_connector * connector)1896 static void cdv_intel_dp_add_properties(struct drm_connector *connector)
1897 {
1898 cdv_intel_attach_force_audio_property(connector);
1899 cdv_intel_attach_broadcast_rgb_property(connector);
1900 }
1901
1902 /* check the VBT to see whether the eDP is on DP-D port */
cdv_intel_dpc_is_edp(struct drm_device * dev)1903 static bool cdv_intel_dpc_is_edp(struct drm_device *dev)
1904 {
1905 struct drm_psb_private *dev_priv = to_drm_psb_private(dev);
1906 struct child_device_config *p_child;
1907 int i;
1908
1909 if (!dev_priv->child_dev_num)
1910 return false;
1911
1912 for (i = 0; i < dev_priv->child_dev_num; i++) {
1913 p_child = dev_priv->child_dev + i;
1914
1915 if (p_child->dvo_port == PORT_IDPC &&
1916 p_child->device_type == DEVICE_TYPE_eDP)
1917 return true;
1918 }
1919 return false;
1920 }
1921
1922 /* Cedarview display clock gating
1923
1924 We need this disable dot get correct behaviour while enabling
1925 DP/eDP. TODO - investigate if we can turn it back to normality
1926 after enabling */
cdv_disable_intel_clock_gating(struct drm_device * dev)1927 static void cdv_disable_intel_clock_gating(struct drm_device *dev)
1928 {
1929 u32 reg_value;
1930 reg_value = REG_READ(DSPCLK_GATE_D);
1931
1932 reg_value |= (DPUNIT_PIPEB_GATE_DISABLE |
1933 DPUNIT_PIPEA_GATE_DISABLE |
1934 DPCUNIT_CLOCK_GATE_DISABLE |
1935 DPLSUNIT_CLOCK_GATE_DISABLE |
1936 DPOUNIT_CLOCK_GATE_DISABLE |
1937 DPIOUNIT_CLOCK_GATE_DISABLE);
1938
1939 REG_WRITE(DSPCLK_GATE_D, reg_value);
1940
1941 udelay(500);
1942 }
1943
1944 void
cdv_intel_dp_init(struct drm_device * dev,struct psb_intel_mode_device * mode_dev,int output_reg)1945 cdv_intel_dp_init(struct drm_device *dev, struct psb_intel_mode_device *mode_dev, int output_reg)
1946 {
1947 struct gma_encoder *gma_encoder;
1948 struct gma_connector *gma_connector;
1949 struct drm_connector *connector;
1950 struct drm_encoder *encoder;
1951 struct cdv_intel_dp *intel_dp;
1952 const char *name = NULL;
1953 int type = DRM_MODE_CONNECTOR_DisplayPort;
1954
1955 gma_encoder = kzalloc(sizeof(struct gma_encoder), GFP_KERNEL);
1956 if (!gma_encoder)
1957 return;
1958 gma_connector = kzalloc(sizeof(struct gma_connector), GFP_KERNEL);
1959 if (!gma_connector)
1960 goto err_connector;
1961 intel_dp = kzalloc(sizeof(struct cdv_intel_dp), GFP_KERNEL);
1962 if (!intel_dp)
1963 goto err_priv;
1964
1965 if ((output_reg == DP_C) && cdv_intel_dpc_is_edp(dev))
1966 type = DRM_MODE_CONNECTOR_eDP;
1967
1968 connector = &gma_connector->base;
1969 encoder = &gma_encoder->base;
1970
1971 drm_connector_init(dev, connector, &cdv_intel_dp_connector_funcs, type);
1972 drm_simple_encoder_init(dev, encoder, DRM_MODE_ENCODER_TMDS);
1973
1974 gma_connector_attach_encoder(gma_connector, gma_encoder);
1975
1976 if (type == DRM_MODE_CONNECTOR_DisplayPort)
1977 gma_encoder->type = INTEL_OUTPUT_DISPLAYPORT;
1978 else
1979 gma_encoder->type = INTEL_OUTPUT_EDP;
1980
1981
1982 gma_encoder->dev_priv=intel_dp;
1983 intel_dp->encoder = gma_encoder;
1984 intel_dp->output_reg = output_reg;
1985
1986 drm_encoder_helper_add(encoder, &cdv_intel_dp_helper_funcs);
1987 drm_connector_helper_add(connector, &cdv_intel_dp_connector_helper_funcs);
1988
1989 connector->polled = DRM_CONNECTOR_POLL_HPD;
1990 connector->interlace_allowed = false;
1991 connector->doublescan_allowed = false;
1992
1993 /* Set up the DDC bus. */
1994 switch (output_reg) {
1995 case DP_B:
1996 name = "DPDDC-B";
1997 gma_encoder->ddi_select = (DP_MASK | DDI0_SELECT);
1998 break;
1999 case DP_C:
2000 name = "DPDDC-C";
2001 gma_encoder->ddi_select = (DP_MASK | DDI1_SELECT);
2002 break;
2003 }
2004
2005 cdv_disable_intel_clock_gating(dev);
2006
2007 cdv_intel_dp_i2c_init(gma_connector, gma_encoder, name);
2008 /* FIXME:fail check */
2009 cdv_intel_dp_add_properties(connector);
2010
2011 if (is_edp(gma_encoder)) {
2012 int ret;
2013 struct edp_power_seq cur;
2014 u32 pp_on, pp_off, pp_div;
2015 u32 pwm_ctrl;
2016
2017 pp_on = REG_READ(PP_CONTROL);
2018 pp_on &= ~PANEL_UNLOCK_MASK;
2019 pp_on |= PANEL_UNLOCK_REGS;
2020
2021 REG_WRITE(PP_CONTROL, pp_on);
2022
2023 pwm_ctrl = REG_READ(BLC_PWM_CTL2);
2024 pwm_ctrl |= PWM_PIPE_B;
2025 REG_WRITE(BLC_PWM_CTL2, pwm_ctrl);
2026
2027 pp_on = REG_READ(PP_ON_DELAYS);
2028 pp_off = REG_READ(PP_OFF_DELAYS);
2029 pp_div = REG_READ(PP_DIVISOR);
2030
2031 /* Pull timing values out of registers */
2032 cur.t1_t3 = (pp_on & PANEL_POWER_UP_DELAY_MASK) >>
2033 PANEL_POWER_UP_DELAY_SHIFT;
2034
2035 cur.t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >>
2036 PANEL_LIGHT_ON_DELAY_SHIFT;
2037
2038 cur.t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >>
2039 PANEL_LIGHT_OFF_DELAY_SHIFT;
2040
2041 cur.t10 = (pp_off & PANEL_POWER_DOWN_DELAY_MASK) >>
2042 PANEL_POWER_DOWN_DELAY_SHIFT;
2043
2044 cur.t11_t12 = ((pp_div & PANEL_POWER_CYCLE_DELAY_MASK) >>
2045 PANEL_POWER_CYCLE_DELAY_SHIFT);
2046
2047 DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
2048 cur.t1_t3, cur.t8, cur.t9, cur.t10, cur.t11_t12);
2049
2050
2051 intel_dp->panel_power_up_delay = cur.t1_t3 / 10;
2052 intel_dp->backlight_on_delay = cur.t8 / 10;
2053 intel_dp->backlight_off_delay = cur.t9 / 10;
2054 intel_dp->panel_power_down_delay = cur.t10 / 10;
2055 intel_dp->panel_power_cycle_delay = (cur.t11_t12 - 1) * 100;
2056
2057 DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n",
2058 intel_dp->panel_power_up_delay, intel_dp->panel_power_down_delay,
2059 intel_dp->panel_power_cycle_delay);
2060
2061 DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n",
2062 intel_dp->backlight_on_delay, intel_dp->backlight_off_delay);
2063
2064
2065 cdv_intel_edp_panel_vdd_on(gma_encoder);
2066 ret = cdv_intel_dp_aux_native_read(gma_encoder, DP_DPCD_REV,
2067 intel_dp->dpcd,
2068 sizeof(intel_dp->dpcd));
2069 cdv_intel_edp_panel_vdd_off(gma_encoder);
2070 if (ret <= 0) {
2071 /* if this fails, presume the device is a ghost */
2072 DRM_INFO("failed to retrieve link info, disabling eDP\n");
2073 drm_encoder_cleanup(encoder);
2074 cdv_intel_dp_destroy(connector);
2075 goto err_connector;
2076 } else {
2077 DRM_DEBUG_KMS("DPCD: Rev=%x LN_Rate=%x LN_CNT=%x LN_DOWNSP=%x\n",
2078 intel_dp->dpcd[0], intel_dp->dpcd[1],
2079 intel_dp->dpcd[2], intel_dp->dpcd[3]);
2080
2081 }
2082 /* The CDV reference driver moves pnale backlight setup into the displays that
2083 have a backlight: this is a good idea and one we should probably adopt, however
2084 we need to migrate all the drivers before we can do that */
2085 /*cdv_intel_panel_setup_backlight(dev); */
2086 }
2087 return;
2088
2089 err_priv:
2090 kfree(gma_connector);
2091 err_connector:
2092 kfree(gma_encoder);
2093 }
2094