1 /*
2 * Copyright © 2012 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 * Authors:
24 * Keith Packard <keithp@keithp.com>
25 *
26 */
27
28 #include <linux/i2c.h>
29 #include <linux/module.h>
30 #include <linux/slab.h>
31
32 #include <drm/display/drm_dp_helper.h>
33 #include <drm/drm_crtc.h>
34 #include <drm/drm_crtc_helper.h>
35 #include <drm/drm_edid.h>
36 #include <drm/drm_modeset_helper_vtables.h>
37 #include <drm/drm_simple_kms_helper.h>
38
39 #include "gma_display.h"
40 #include "psb_drv.h"
41 #include "psb_intel_drv.h"
42 #include "psb_intel_reg.h"
43
44 /**
45 * struct i2c_algo_dp_aux_data - driver interface structure for i2c over dp
46 * aux algorithm
47 * @running: set by the algo indicating whether an i2c is ongoing or whether
48 * the i2c bus is quiescent
49 * @address: i2c target address for the currently ongoing transfer
50 * @aux_ch: driver callback to transfer a single byte of the i2c payload
51 */
52 struct i2c_algo_dp_aux_data {
53 bool running;
54 u16 address;
55 int (*aux_ch) (struct i2c_adapter *adapter,
56 int mode, uint8_t write_byte,
57 uint8_t *read_byte);
58 };
59
60 /* Run a single AUX_CH I2C transaction, writing/reading data as necessary */
61 static int
i2c_algo_dp_aux_transaction(struct i2c_adapter * adapter,int mode,uint8_t write_byte,uint8_t * read_byte)62 i2c_algo_dp_aux_transaction(struct i2c_adapter *adapter, int mode,
63 uint8_t write_byte, uint8_t *read_byte)
64 {
65 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
66 int ret;
67
68 ret = (*algo_data->aux_ch)(adapter, mode,
69 write_byte, read_byte);
70 return ret;
71 }
72
73 /*
74 * I2C over AUX CH
75 */
76
77 /*
78 * Send the address. If the I2C link is running, this 'restarts'
79 * the connection with the new address, this is used for doing
80 * a write followed by a read (as needed for DDC)
81 */
82 static int
i2c_algo_dp_aux_address(struct i2c_adapter * adapter,u16 address,bool reading)83 i2c_algo_dp_aux_address(struct i2c_adapter *adapter, u16 address, bool reading)
84 {
85 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
86 int mode = MODE_I2C_START;
87
88 if (reading)
89 mode |= MODE_I2C_READ;
90 else
91 mode |= MODE_I2C_WRITE;
92 algo_data->address = address;
93 algo_data->running = true;
94 return i2c_algo_dp_aux_transaction(adapter, mode, 0, NULL);
95 }
96
97 /*
98 * Stop the I2C transaction. This closes out the link, sending
99 * a bare address packet with the MOT bit turned off
100 */
101 static void
i2c_algo_dp_aux_stop(struct i2c_adapter * adapter,bool reading)102 i2c_algo_dp_aux_stop(struct i2c_adapter *adapter, bool reading)
103 {
104 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
105 int mode = MODE_I2C_STOP;
106
107 if (reading)
108 mode |= MODE_I2C_READ;
109 else
110 mode |= MODE_I2C_WRITE;
111 if (algo_data->running) {
112 (void) i2c_algo_dp_aux_transaction(adapter, mode, 0, NULL);
113 algo_data->running = false;
114 }
115 }
116
117 /*
118 * Write a single byte to the current I2C address, the
119 * I2C link must be running or this returns -EIO
120 */
121 static int
i2c_algo_dp_aux_put_byte(struct i2c_adapter * adapter,u8 byte)122 i2c_algo_dp_aux_put_byte(struct i2c_adapter *adapter, u8 byte)
123 {
124 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
125
126 if (!algo_data->running)
127 return -EIO;
128
129 return i2c_algo_dp_aux_transaction(adapter, MODE_I2C_WRITE, byte, NULL);
130 }
131
132 /*
133 * Read a single byte from the current I2C address, the
134 * I2C link must be running or this returns -EIO
135 */
136 static int
i2c_algo_dp_aux_get_byte(struct i2c_adapter * adapter,u8 * byte_ret)137 i2c_algo_dp_aux_get_byte(struct i2c_adapter *adapter, u8 *byte_ret)
138 {
139 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
140
141 if (!algo_data->running)
142 return -EIO;
143
144 return i2c_algo_dp_aux_transaction(adapter, MODE_I2C_READ, 0, byte_ret);
145 }
146
147 static int
i2c_algo_dp_aux_xfer(struct i2c_adapter * adapter,struct i2c_msg * msgs,int num)148 i2c_algo_dp_aux_xfer(struct i2c_adapter *adapter,
149 struct i2c_msg *msgs,
150 int num)
151 {
152 int ret = 0;
153 bool reading = false;
154 int m;
155 int b;
156
157 for (m = 0; m < num; m++) {
158 u16 len = msgs[m].len;
159 u8 *buf = msgs[m].buf;
160 reading = (msgs[m].flags & I2C_M_RD) != 0;
161 ret = i2c_algo_dp_aux_address(adapter, msgs[m].addr, reading);
162 if (ret < 0)
163 break;
164 if (reading) {
165 for (b = 0; b < len; b++) {
166 ret = i2c_algo_dp_aux_get_byte(adapter, &buf[b]);
167 if (ret < 0)
168 break;
169 }
170 } else {
171 for (b = 0; b < len; b++) {
172 ret = i2c_algo_dp_aux_put_byte(adapter, buf[b]);
173 if (ret < 0)
174 break;
175 }
176 }
177 if (ret < 0)
178 break;
179 }
180 if (ret >= 0)
181 ret = num;
182 i2c_algo_dp_aux_stop(adapter, reading);
183 DRM_DEBUG_KMS("dp_aux_xfer return %d\n", ret);
184 return ret;
185 }
186
187 static u32
i2c_algo_dp_aux_functionality(struct i2c_adapter * adapter)188 i2c_algo_dp_aux_functionality(struct i2c_adapter *adapter)
189 {
190 return I2C_FUNC_I2C | I2C_FUNC_SMBUS_EMUL |
191 I2C_FUNC_SMBUS_READ_BLOCK_DATA |
192 I2C_FUNC_SMBUS_BLOCK_PROC_CALL |
193 I2C_FUNC_10BIT_ADDR;
194 }
195
196 static const struct i2c_algorithm i2c_dp_aux_algo = {
197 .master_xfer = i2c_algo_dp_aux_xfer,
198 .functionality = i2c_algo_dp_aux_functionality,
199 };
200
201 static void
i2c_dp_aux_reset_bus(struct i2c_adapter * adapter)202 i2c_dp_aux_reset_bus(struct i2c_adapter *adapter)
203 {
204 (void) i2c_algo_dp_aux_address(adapter, 0, false);
205 (void) i2c_algo_dp_aux_stop(adapter, false);
206 }
207
208 static int
i2c_dp_aux_prepare_bus(struct i2c_adapter * adapter)209 i2c_dp_aux_prepare_bus(struct i2c_adapter *adapter)
210 {
211 adapter->algo = &i2c_dp_aux_algo;
212 adapter->retries = 3;
213 i2c_dp_aux_reset_bus(adapter);
214 return 0;
215 }
216
217 /*
218 * FIXME: This is the old dp aux helper, gma500 is the last driver that needs to
219 * be ported over to the new helper code in drm_dp_helper.c like i915 or radeon.
220 */
221 static int
i2c_dp_aux_add_bus(struct i2c_adapter * adapter)222 i2c_dp_aux_add_bus(struct i2c_adapter *adapter)
223 {
224 int error;
225
226 error = i2c_dp_aux_prepare_bus(adapter);
227 if (error)
228 return error;
229 error = i2c_add_adapter(adapter);
230 return error;
231 }
232
233 #define _wait_for(COND, MS, W) ({ \
234 unsigned long timeout__ = jiffies + msecs_to_jiffies(MS); \
235 int ret__ = 0; \
236 while (! (COND)) { \
237 if (time_after(jiffies, timeout__)) { \
238 ret__ = -ETIMEDOUT; \
239 break; \
240 } \
241 if (W && !in_dbg_master()) msleep(W); \
242 } \
243 ret__; \
244 })
245
246 #define wait_for(COND, MS) _wait_for(COND, MS, 1)
247
248 #define DP_LINK_CHECK_TIMEOUT (10 * 1000)
249
250 #define DP_LINK_CONFIGURATION_SIZE 9
251
252 #define CDV_FAST_LINK_TRAIN 1
253
254 struct cdv_intel_dp {
255 uint32_t output_reg;
256 uint32_t DP;
257 uint8_t link_configuration[DP_LINK_CONFIGURATION_SIZE];
258 bool has_audio;
259 int force_audio;
260 uint32_t color_range;
261 uint8_t link_bw;
262 uint8_t lane_count;
263 uint8_t dpcd[4];
264 struct gma_encoder *encoder;
265 struct i2c_adapter adapter;
266 struct i2c_algo_dp_aux_data algo;
267 uint8_t train_set[4];
268 uint8_t link_status[DP_LINK_STATUS_SIZE];
269 int panel_power_up_delay;
270 int panel_power_down_delay;
271 int panel_power_cycle_delay;
272 int backlight_on_delay;
273 int backlight_off_delay;
274 struct drm_display_mode *panel_fixed_mode; /* for eDP */
275 bool panel_on;
276 };
277
278 struct ddi_regoff {
279 uint32_t PreEmph1;
280 uint32_t PreEmph2;
281 uint32_t VSwing1;
282 uint32_t VSwing2;
283 uint32_t VSwing3;
284 uint32_t VSwing4;
285 uint32_t VSwing5;
286 };
287
288 static struct ddi_regoff ddi_DP_train_table[] = {
289 {.PreEmph1 = 0x812c, .PreEmph2 = 0x8124, .VSwing1 = 0x8154,
290 .VSwing2 = 0x8148, .VSwing3 = 0x814C, .VSwing4 = 0x8150,
291 .VSwing5 = 0x8158,},
292 {.PreEmph1 = 0x822c, .PreEmph2 = 0x8224, .VSwing1 = 0x8254,
293 .VSwing2 = 0x8248, .VSwing3 = 0x824C, .VSwing4 = 0x8250,
294 .VSwing5 = 0x8258,},
295 };
296
297 static uint32_t dp_vswing_premph_table[] = {
298 0x55338954, 0x4000,
299 0x554d8954, 0x2000,
300 0x55668954, 0,
301 0x559ac0d4, 0x6000,
302 };
303 /**
304 * is_edp - is the given port attached to an eDP panel (either CPU or PCH)
305 * @encoder: GMA encoder struct
306 *
307 * If a CPU or PCH DP output is attached to an eDP panel, this function
308 * will return true, and false otherwise.
309 */
is_edp(struct gma_encoder * encoder)310 static bool is_edp(struct gma_encoder *encoder)
311 {
312 return encoder->type == INTEL_OUTPUT_EDP;
313 }
314
315
316 static void cdv_intel_dp_start_link_train(struct gma_encoder *encoder);
317 static void cdv_intel_dp_complete_link_train(struct gma_encoder *encoder);
318 static void cdv_intel_dp_link_down(struct gma_encoder *encoder);
319
320 static int
cdv_intel_dp_max_lane_count(struct gma_encoder * encoder)321 cdv_intel_dp_max_lane_count(struct gma_encoder *encoder)
322 {
323 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
324 int max_lane_count = 4;
325
326 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) {
327 max_lane_count = intel_dp->dpcd[DP_MAX_LANE_COUNT] & 0x1f;
328 switch (max_lane_count) {
329 case 1: case 2: case 4:
330 break;
331 default:
332 max_lane_count = 4;
333 }
334 }
335 return max_lane_count;
336 }
337
338 static int
cdv_intel_dp_max_link_bw(struct gma_encoder * encoder)339 cdv_intel_dp_max_link_bw(struct gma_encoder *encoder)
340 {
341 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
342 int max_link_bw = intel_dp->dpcd[DP_MAX_LINK_RATE];
343
344 switch (max_link_bw) {
345 case DP_LINK_BW_1_62:
346 case DP_LINK_BW_2_7:
347 break;
348 default:
349 max_link_bw = DP_LINK_BW_1_62;
350 break;
351 }
352 return max_link_bw;
353 }
354
355 static int
cdv_intel_dp_link_clock(uint8_t link_bw)356 cdv_intel_dp_link_clock(uint8_t link_bw)
357 {
358 if (link_bw == DP_LINK_BW_2_7)
359 return 270000;
360 else
361 return 162000;
362 }
363
364 static int
cdv_intel_dp_link_required(int pixel_clock,int bpp)365 cdv_intel_dp_link_required(int pixel_clock, int bpp)
366 {
367 return (pixel_clock * bpp + 7) / 8;
368 }
369
370 static int
cdv_intel_dp_max_data_rate(int max_link_clock,int max_lanes)371 cdv_intel_dp_max_data_rate(int max_link_clock, int max_lanes)
372 {
373 return (max_link_clock * max_lanes * 19) / 20;
374 }
375
cdv_intel_edp_panel_vdd_on(struct gma_encoder * intel_encoder)376 static void cdv_intel_edp_panel_vdd_on(struct gma_encoder *intel_encoder)
377 {
378 struct drm_device *dev = intel_encoder->base.dev;
379 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
380 u32 pp;
381
382 if (intel_dp->panel_on) {
383 DRM_DEBUG_KMS("Skip VDD on because of panel on\n");
384 return;
385 }
386 DRM_DEBUG_KMS("\n");
387
388 pp = REG_READ(PP_CONTROL);
389
390 pp |= EDP_FORCE_VDD;
391 REG_WRITE(PP_CONTROL, pp);
392 REG_READ(PP_CONTROL);
393 msleep(intel_dp->panel_power_up_delay);
394 }
395
cdv_intel_edp_panel_vdd_off(struct gma_encoder * intel_encoder)396 static void cdv_intel_edp_panel_vdd_off(struct gma_encoder *intel_encoder)
397 {
398 struct drm_device *dev = intel_encoder->base.dev;
399 u32 pp;
400
401 DRM_DEBUG_KMS("\n");
402 pp = REG_READ(PP_CONTROL);
403
404 pp &= ~EDP_FORCE_VDD;
405 REG_WRITE(PP_CONTROL, pp);
406 REG_READ(PP_CONTROL);
407
408 }
409
410 /* Returns true if the panel was already on when called */
cdv_intel_edp_panel_on(struct gma_encoder * intel_encoder)411 static bool cdv_intel_edp_panel_on(struct gma_encoder *intel_encoder)
412 {
413 struct drm_device *dev = intel_encoder->base.dev;
414 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
415 u32 pp, idle_on_mask = PP_ON | PP_SEQUENCE_NONE;
416
417 if (intel_dp->panel_on)
418 return true;
419
420 DRM_DEBUG_KMS("\n");
421 pp = REG_READ(PP_CONTROL);
422 pp &= ~PANEL_UNLOCK_MASK;
423
424 pp |= (PANEL_UNLOCK_REGS | POWER_TARGET_ON);
425 REG_WRITE(PP_CONTROL, pp);
426 REG_READ(PP_CONTROL);
427
428 if (wait_for(((REG_READ(PP_STATUS) & idle_on_mask) == idle_on_mask), 1000)) {
429 DRM_DEBUG_KMS("Error in Powering up eDP panel, status %x\n", REG_READ(PP_STATUS));
430 intel_dp->panel_on = false;
431 } else
432 intel_dp->panel_on = true;
433 msleep(intel_dp->panel_power_up_delay);
434
435 return false;
436 }
437
cdv_intel_edp_panel_off(struct gma_encoder * intel_encoder)438 static void cdv_intel_edp_panel_off (struct gma_encoder *intel_encoder)
439 {
440 struct drm_device *dev = intel_encoder->base.dev;
441 u32 pp, idle_off_mask = PP_ON ;
442 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
443
444 DRM_DEBUG_KMS("\n");
445
446 pp = REG_READ(PP_CONTROL);
447
448 if ((pp & POWER_TARGET_ON) == 0)
449 return;
450
451 intel_dp->panel_on = false;
452 pp &= ~PANEL_UNLOCK_MASK;
453 /* ILK workaround: disable reset around power sequence */
454
455 pp &= ~POWER_TARGET_ON;
456 pp &= ~EDP_FORCE_VDD;
457 pp &= ~EDP_BLC_ENABLE;
458 REG_WRITE(PP_CONTROL, pp);
459 REG_READ(PP_CONTROL);
460 DRM_DEBUG_KMS("PP_STATUS %x\n", REG_READ(PP_STATUS));
461
462 if (wait_for((REG_READ(PP_STATUS) & idle_off_mask) == 0, 1000)) {
463 DRM_DEBUG_KMS("Error in turning off Panel\n");
464 }
465
466 msleep(intel_dp->panel_power_cycle_delay);
467 DRM_DEBUG_KMS("Over\n");
468 }
469
cdv_intel_edp_backlight_on(struct gma_encoder * intel_encoder)470 static void cdv_intel_edp_backlight_on (struct gma_encoder *intel_encoder)
471 {
472 struct drm_device *dev = intel_encoder->base.dev;
473 u32 pp;
474
475 DRM_DEBUG_KMS("\n");
476 /*
477 * If we enable the backlight right away following a panel power
478 * on, we may see slight flicker as the panel syncs with the eDP
479 * link. So delay a bit to make sure the image is solid before
480 * allowing it to appear.
481 */
482 msleep(300);
483 pp = REG_READ(PP_CONTROL);
484
485 pp |= EDP_BLC_ENABLE;
486 REG_WRITE(PP_CONTROL, pp);
487 gma_backlight_enable(dev);
488 }
489
cdv_intel_edp_backlight_off(struct gma_encoder * intel_encoder)490 static void cdv_intel_edp_backlight_off (struct gma_encoder *intel_encoder)
491 {
492 struct drm_device *dev = intel_encoder->base.dev;
493 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
494 u32 pp;
495
496 DRM_DEBUG_KMS("\n");
497 gma_backlight_disable(dev);
498 msleep(10);
499 pp = REG_READ(PP_CONTROL);
500
501 pp &= ~EDP_BLC_ENABLE;
502 REG_WRITE(PP_CONTROL, pp);
503 msleep(intel_dp->backlight_off_delay);
504 }
505
506 static enum drm_mode_status
cdv_intel_dp_mode_valid(struct drm_connector * connector,struct drm_display_mode * mode)507 cdv_intel_dp_mode_valid(struct drm_connector *connector,
508 struct drm_display_mode *mode)
509 {
510 struct gma_encoder *encoder = gma_attached_encoder(connector);
511 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
512 int max_link_clock = cdv_intel_dp_link_clock(cdv_intel_dp_max_link_bw(encoder));
513 int max_lanes = cdv_intel_dp_max_lane_count(encoder);
514 struct drm_psb_private *dev_priv = to_drm_psb_private(connector->dev);
515
516 if (is_edp(encoder) && intel_dp->panel_fixed_mode) {
517 if (mode->hdisplay > intel_dp->panel_fixed_mode->hdisplay)
518 return MODE_PANEL;
519 if (mode->vdisplay > intel_dp->panel_fixed_mode->vdisplay)
520 return MODE_PANEL;
521 }
522
523 /* only refuse the mode on non eDP since we have seen some weird eDP panels
524 which are outside spec tolerances but somehow work by magic */
525 if (!is_edp(encoder) &&
526 (cdv_intel_dp_link_required(mode->clock, dev_priv->edp.bpp)
527 > cdv_intel_dp_max_data_rate(max_link_clock, max_lanes)))
528 return MODE_CLOCK_HIGH;
529
530 if (is_edp(encoder)) {
531 if (cdv_intel_dp_link_required(mode->clock, 24)
532 > cdv_intel_dp_max_data_rate(max_link_clock, max_lanes))
533 return MODE_CLOCK_HIGH;
534
535 }
536 if (mode->clock < 10000)
537 return MODE_CLOCK_LOW;
538
539 return MODE_OK;
540 }
541
542 static uint32_t
pack_aux(uint8_t * src,int src_bytes)543 pack_aux(uint8_t *src, int src_bytes)
544 {
545 int i;
546 uint32_t v = 0;
547
548 if (src_bytes > 4)
549 src_bytes = 4;
550 for (i = 0; i < src_bytes; i++)
551 v |= ((uint32_t) src[i]) << ((3-i) * 8);
552 return v;
553 }
554
555 static void
unpack_aux(uint32_t src,uint8_t * dst,int dst_bytes)556 unpack_aux(uint32_t src, uint8_t *dst, int dst_bytes)
557 {
558 int i;
559 if (dst_bytes > 4)
560 dst_bytes = 4;
561 for (i = 0; i < dst_bytes; i++)
562 dst[i] = src >> ((3-i) * 8);
563 }
564
565 static int
cdv_intel_dp_aux_ch(struct gma_encoder * encoder,uint8_t * send,int send_bytes,uint8_t * recv,int recv_size)566 cdv_intel_dp_aux_ch(struct gma_encoder *encoder,
567 uint8_t *send, int send_bytes,
568 uint8_t *recv, int recv_size)
569 {
570 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
571 uint32_t output_reg = intel_dp->output_reg;
572 struct drm_device *dev = encoder->base.dev;
573 uint32_t ch_ctl = output_reg + 0x10;
574 uint32_t ch_data = ch_ctl + 4;
575 int i;
576 int recv_bytes;
577 uint32_t status;
578 uint32_t aux_clock_divider;
579 int try, precharge;
580
581 /* The clock divider is based off the hrawclk,
582 * and would like to run at 2MHz. So, take the
583 * hrawclk value and divide by 2 and use that
584 * On CDV platform it uses 200MHz as hrawclk.
585 *
586 */
587 aux_clock_divider = 200 / 2;
588
589 precharge = 4;
590 if (is_edp(encoder))
591 precharge = 10;
592
593 if (REG_READ(ch_ctl) & DP_AUX_CH_CTL_SEND_BUSY) {
594 DRM_ERROR("dp_aux_ch not started status 0x%08x\n",
595 REG_READ(ch_ctl));
596 return -EBUSY;
597 }
598
599 /* Must try at least 3 times according to DP spec */
600 for (try = 0; try < 5; try++) {
601 /* Load the send data into the aux channel data registers */
602 for (i = 0; i < send_bytes; i += 4)
603 REG_WRITE(ch_data + i,
604 pack_aux(send + i, send_bytes - i));
605
606 /* Send the command and wait for it to complete */
607 REG_WRITE(ch_ctl,
608 DP_AUX_CH_CTL_SEND_BUSY |
609 DP_AUX_CH_CTL_TIME_OUT_400us |
610 (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) |
611 (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) |
612 (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT) |
613 DP_AUX_CH_CTL_DONE |
614 DP_AUX_CH_CTL_TIME_OUT_ERROR |
615 DP_AUX_CH_CTL_RECEIVE_ERROR);
616 for (;;) {
617 status = REG_READ(ch_ctl);
618 if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0)
619 break;
620 udelay(100);
621 }
622
623 /* Clear done status and any errors */
624 REG_WRITE(ch_ctl,
625 status |
626 DP_AUX_CH_CTL_DONE |
627 DP_AUX_CH_CTL_TIME_OUT_ERROR |
628 DP_AUX_CH_CTL_RECEIVE_ERROR);
629 if (status & DP_AUX_CH_CTL_DONE)
630 break;
631 }
632
633 if ((status & DP_AUX_CH_CTL_DONE) == 0) {
634 DRM_ERROR("dp_aux_ch not done status 0x%08x\n", status);
635 return -EBUSY;
636 }
637
638 /* Check for timeout or receive error.
639 * Timeouts occur when the sink is not connected
640 */
641 if (status & DP_AUX_CH_CTL_RECEIVE_ERROR) {
642 DRM_ERROR("dp_aux_ch receive error status 0x%08x\n", status);
643 return -EIO;
644 }
645
646 /* Timeouts occur when the device isn't connected, so they're
647 * "normal" -- don't fill the kernel log with these */
648 if (status & DP_AUX_CH_CTL_TIME_OUT_ERROR) {
649 DRM_DEBUG_KMS("dp_aux_ch timeout status 0x%08x\n", status);
650 return -ETIMEDOUT;
651 }
652
653 /* Unload any bytes sent back from the other side */
654 recv_bytes = ((status & DP_AUX_CH_CTL_MESSAGE_SIZE_MASK) >>
655 DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT);
656 if (recv_bytes > recv_size)
657 recv_bytes = recv_size;
658
659 for (i = 0; i < recv_bytes; i += 4)
660 unpack_aux(REG_READ(ch_data + i),
661 recv + i, recv_bytes - i);
662
663 return recv_bytes;
664 }
665
666 /* Write data to the aux channel in native mode */
667 static int
cdv_intel_dp_aux_native_write(struct gma_encoder * encoder,uint16_t address,uint8_t * send,int send_bytes)668 cdv_intel_dp_aux_native_write(struct gma_encoder *encoder,
669 uint16_t address, uint8_t *send, int send_bytes)
670 {
671 int ret;
672 uint8_t msg[20];
673 int msg_bytes;
674 uint8_t ack;
675
676 if (send_bytes > 16)
677 return -1;
678 msg[0] = DP_AUX_NATIVE_WRITE << 4;
679 msg[1] = address >> 8;
680 msg[2] = address & 0xff;
681 msg[3] = send_bytes - 1;
682 memcpy(&msg[4], send, send_bytes);
683 msg_bytes = send_bytes + 4;
684 for (;;) {
685 ret = cdv_intel_dp_aux_ch(encoder, msg, msg_bytes, &ack, 1);
686 if (ret < 0)
687 return ret;
688 ack >>= 4;
689 if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_ACK)
690 break;
691 else if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_DEFER)
692 udelay(100);
693 else
694 return -EIO;
695 }
696 return send_bytes;
697 }
698
699 /* Write a single byte to the aux channel in native mode */
700 static int
cdv_intel_dp_aux_native_write_1(struct gma_encoder * encoder,uint16_t address,uint8_t byte)701 cdv_intel_dp_aux_native_write_1(struct gma_encoder *encoder,
702 uint16_t address, uint8_t byte)
703 {
704 return cdv_intel_dp_aux_native_write(encoder, address, &byte, 1);
705 }
706
707 /* read bytes from a native aux channel */
708 static int
cdv_intel_dp_aux_native_read(struct gma_encoder * encoder,uint16_t address,uint8_t * recv,int recv_bytes)709 cdv_intel_dp_aux_native_read(struct gma_encoder *encoder,
710 uint16_t address, uint8_t *recv, int recv_bytes)
711 {
712 uint8_t msg[4];
713 int msg_bytes;
714 uint8_t reply[20];
715 int reply_bytes;
716 uint8_t ack;
717 int ret;
718
719 msg[0] = DP_AUX_NATIVE_READ << 4;
720 msg[1] = address >> 8;
721 msg[2] = address & 0xff;
722 msg[3] = recv_bytes - 1;
723
724 msg_bytes = 4;
725 reply_bytes = recv_bytes + 1;
726
727 for (;;) {
728 ret = cdv_intel_dp_aux_ch(encoder, msg, msg_bytes,
729 reply, reply_bytes);
730 if (ret == 0)
731 return -EPROTO;
732 if (ret < 0)
733 return ret;
734 ack = reply[0] >> 4;
735 if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_ACK) {
736 memcpy(recv, reply + 1, ret - 1);
737 return ret - 1;
738 }
739 else if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_DEFER)
740 udelay(100);
741 else
742 return -EIO;
743 }
744 }
745
746 static int
cdv_intel_dp_i2c_aux_ch(struct i2c_adapter * adapter,int mode,uint8_t write_byte,uint8_t * read_byte)747 cdv_intel_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
748 uint8_t write_byte, uint8_t *read_byte)
749 {
750 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
751 struct cdv_intel_dp *intel_dp = container_of(adapter,
752 struct cdv_intel_dp,
753 adapter);
754 struct gma_encoder *encoder = intel_dp->encoder;
755 uint16_t address = algo_data->address;
756 uint8_t msg[5];
757 uint8_t reply[2];
758 unsigned retry;
759 int msg_bytes;
760 int reply_bytes;
761 int ret;
762
763 /* Set up the command byte */
764 if (mode & MODE_I2C_READ)
765 msg[0] = DP_AUX_I2C_READ << 4;
766 else
767 msg[0] = DP_AUX_I2C_WRITE << 4;
768
769 if (!(mode & MODE_I2C_STOP))
770 msg[0] |= DP_AUX_I2C_MOT << 4;
771
772 msg[1] = address >> 8;
773 msg[2] = address;
774
775 switch (mode) {
776 case MODE_I2C_WRITE:
777 msg[3] = 0;
778 msg[4] = write_byte;
779 msg_bytes = 5;
780 reply_bytes = 1;
781 break;
782 case MODE_I2C_READ:
783 msg[3] = 0;
784 msg_bytes = 4;
785 reply_bytes = 2;
786 break;
787 default:
788 msg_bytes = 3;
789 reply_bytes = 1;
790 break;
791 }
792
793 for (retry = 0; retry < 5; retry++) {
794 ret = cdv_intel_dp_aux_ch(encoder,
795 msg, msg_bytes,
796 reply, reply_bytes);
797 if (ret < 0) {
798 DRM_DEBUG_KMS("aux_ch failed %d\n", ret);
799 return ret;
800 }
801
802 switch ((reply[0] >> 4) & DP_AUX_NATIVE_REPLY_MASK) {
803 case DP_AUX_NATIVE_REPLY_ACK:
804 /* I2C-over-AUX Reply field is only valid
805 * when paired with AUX ACK.
806 */
807 break;
808 case DP_AUX_NATIVE_REPLY_NACK:
809 DRM_DEBUG_KMS("aux_ch native nack\n");
810 return -EREMOTEIO;
811 case DP_AUX_NATIVE_REPLY_DEFER:
812 udelay(100);
813 continue;
814 default:
815 DRM_ERROR("aux_ch invalid native reply 0x%02x\n",
816 reply[0]);
817 return -EREMOTEIO;
818 }
819
820 switch ((reply[0] >> 4) & DP_AUX_I2C_REPLY_MASK) {
821 case DP_AUX_I2C_REPLY_ACK:
822 if (mode == MODE_I2C_READ) {
823 *read_byte = reply[1];
824 }
825 return reply_bytes - 1;
826 case DP_AUX_I2C_REPLY_NACK:
827 DRM_DEBUG_KMS("aux_i2c nack\n");
828 return -EREMOTEIO;
829 case DP_AUX_I2C_REPLY_DEFER:
830 DRM_DEBUG_KMS("aux_i2c defer\n");
831 udelay(100);
832 break;
833 default:
834 DRM_ERROR("aux_i2c invalid reply 0x%02x\n", reply[0]);
835 return -EREMOTEIO;
836 }
837 }
838
839 DRM_ERROR("too many retries, giving up\n");
840 return -EREMOTEIO;
841 }
842
843 static int
cdv_intel_dp_i2c_init(struct gma_connector * connector,struct gma_encoder * encoder,const char * name)844 cdv_intel_dp_i2c_init(struct gma_connector *connector,
845 struct gma_encoder *encoder, const char *name)
846 {
847 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
848 int ret;
849
850 DRM_DEBUG_KMS("i2c_init %s\n", name);
851
852 intel_dp->algo.running = false;
853 intel_dp->algo.address = 0;
854 intel_dp->algo.aux_ch = cdv_intel_dp_i2c_aux_ch;
855
856 memset(&intel_dp->adapter, '\0', sizeof (intel_dp->adapter));
857 intel_dp->adapter.owner = THIS_MODULE;
858 intel_dp->adapter.class = I2C_CLASS_DDC;
859 strncpy (intel_dp->adapter.name, name, sizeof(intel_dp->adapter.name) - 1);
860 intel_dp->adapter.name[sizeof(intel_dp->adapter.name) - 1] = '\0';
861 intel_dp->adapter.algo_data = &intel_dp->algo;
862 intel_dp->adapter.dev.parent = connector->base.kdev;
863
864 if (is_edp(encoder))
865 cdv_intel_edp_panel_vdd_on(encoder);
866 ret = i2c_dp_aux_add_bus(&intel_dp->adapter);
867 if (is_edp(encoder))
868 cdv_intel_edp_panel_vdd_off(encoder);
869
870 return ret;
871 }
872
cdv_intel_fixed_panel_mode(struct drm_display_mode * fixed_mode,struct drm_display_mode * adjusted_mode)873 static void cdv_intel_fixed_panel_mode(struct drm_display_mode *fixed_mode,
874 struct drm_display_mode *adjusted_mode)
875 {
876 adjusted_mode->hdisplay = fixed_mode->hdisplay;
877 adjusted_mode->hsync_start = fixed_mode->hsync_start;
878 adjusted_mode->hsync_end = fixed_mode->hsync_end;
879 adjusted_mode->htotal = fixed_mode->htotal;
880
881 adjusted_mode->vdisplay = fixed_mode->vdisplay;
882 adjusted_mode->vsync_start = fixed_mode->vsync_start;
883 adjusted_mode->vsync_end = fixed_mode->vsync_end;
884 adjusted_mode->vtotal = fixed_mode->vtotal;
885
886 adjusted_mode->clock = fixed_mode->clock;
887
888 drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V);
889 }
890
891 static bool
cdv_intel_dp_mode_fixup(struct drm_encoder * encoder,const struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)892 cdv_intel_dp_mode_fixup(struct drm_encoder *encoder, const struct drm_display_mode *mode,
893 struct drm_display_mode *adjusted_mode)
894 {
895 struct drm_psb_private *dev_priv = to_drm_psb_private(encoder->dev);
896 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
897 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
898 int lane_count, clock;
899 int max_lane_count = cdv_intel_dp_max_lane_count(intel_encoder);
900 int max_clock = cdv_intel_dp_max_link_bw(intel_encoder) == DP_LINK_BW_2_7 ? 1 : 0;
901 static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 };
902 int refclock = mode->clock;
903 int bpp = 24;
904
905 if (is_edp(intel_encoder) && intel_dp->panel_fixed_mode) {
906 cdv_intel_fixed_panel_mode(intel_dp->panel_fixed_mode, adjusted_mode);
907 refclock = intel_dp->panel_fixed_mode->clock;
908 bpp = dev_priv->edp.bpp;
909 }
910
911 for (lane_count = 1; lane_count <= max_lane_count; lane_count <<= 1) {
912 for (clock = max_clock; clock >= 0; clock--) {
913 int link_avail = cdv_intel_dp_max_data_rate(cdv_intel_dp_link_clock(bws[clock]), lane_count);
914
915 if (cdv_intel_dp_link_required(refclock, bpp) <= link_avail) {
916 intel_dp->link_bw = bws[clock];
917 intel_dp->lane_count = lane_count;
918 adjusted_mode->clock = cdv_intel_dp_link_clock(intel_dp->link_bw);
919 DRM_DEBUG_KMS("Display port link bw %02x lane "
920 "count %d clock %d\n",
921 intel_dp->link_bw, intel_dp->lane_count,
922 adjusted_mode->clock);
923 return true;
924 }
925 }
926 }
927 if (is_edp(intel_encoder)) {
928 /* okay we failed just pick the highest */
929 intel_dp->lane_count = max_lane_count;
930 intel_dp->link_bw = bws[max_clock];
931 adjusted_mode->clock = cdv_intel_dp_link_clock(intel_dp->link_bw);
932 DRM_DEBUG_KMS("Force picking display port link bw %02x lane "
933 "count %d clock %d\n",
934 intel_dp->link_bw, intel_dp->lane_count,
935 adjusted_mode->clock);
936
937 return true;
938 }
939 return false;
940 }
941
942 struct cdv_intel_dp_m_n {
943 uint32_t tu;
944 uint32_t gmch_m;
945 uint32_t gmch_n;
946 uint32_t link_m;
947 uint32_t link_n;
948 };
949
950 static void
cdv_intel_reduce_ratio(uint32_t * num,uint32_t * den)951 cdv_intel_reduce_ratio(uint32_t *num, uint32_t *den)
952 {
953 /*
954 while (*num > 0xffffff || *den > 0xffffff) {
955 *num >>= 1;
956 *den >>= 1;
957 }*/
958 uint64_t value, m;
959 m = *num;
960 value = m * (0x800000);
961 m = do_div(value, *den);
962 *num = value;
963 *den = 0x800000;
964 }
965
966 static void
cdv_intel_dp_compute_m_n(int bpp,int nlanes,int pixel_clock,int link_clock,struct cdv_intel_dp_m_n * m_n)967 cdv_intel_dp_compute_m_n(int bpp,
968 int nlanes,
969 int pixel_clock,
970 int link_clock,
971 struct cdv_intel_dp_m_n *m_n)
972 {
973 m_n->tu = 64;
974 m_n->gmch_m = (pixel_clock * bpp + 7) >> 3;
975 m_n->gmch_n = link_clock * nlanes;
976 cdv_intel_reduce_ratio(&m_n->gmch_m, &m_n->gmch_n);
977 m_n->link_m = pixel_clock;
978 m_n->link_n = link_clock;
979 cdv_intel_reduce_ratio(&m_n->link_m, &m_n->link_n);
980 }
981
982 void
cdv_intel_dp_set_m_n(struct drm_crtc * crtc,struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)983 cdv_intel_dp_set_m_n(struct drm_crtc *crtc, struct drm_display_mode *mode,
984 struct drm_display_mode *adjusted_mode)
985 {
986 struct drm_device *dev = crtc->dev;
987 struct drm_psb_private *dev_priv = to_drm_psb_private(dev);
988 struct drm_mode_config *mode_config = &dev->mode_config;
989 struct drm_encoder *encoder;
990 struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
991 int lane_count = 4, bpp = 24;
992 struct cdv_intel_dp_m_n m_n;
993 int pipe = gma_crtc->pipe;
994
995 /*
996 * Find the lane count in the intel_encoder private
997 */
998 list_for_each_entry(encoder, &mode_config->encoder_list, head) {
999 struct gma_encoder *intel_encoder;
1000 struct cdv_intel_dp *intel_dp;
1001
1002 if (encoder->crtc != crtc)
1003 continue;
1004
1005 intel_encoder = to_gma_encoder(encoder);
1006 intel_dp = intel_encoder->dev_priv;
1007 if (intel_encoder->type == INTEL_OUTPUT_DISPLAYPORT) {
1008 lane_count = intel_dp->lane_count;
1009 break;
1010 } else if (is_edp(intel_encoder)) {
1011 lane_count = intel_dp->lane_count;
1012 bpp = dev_priv->edp.bpp;
1013 break;
1014 }
1015 }
1016
1017 /*
1018 * Compute the GMCH and Link ratios. The '3' here is
1019 * the number of bytes_per_pixel post-LUT, which we always
1020 * set up for 8-bits of R/G/B, or 3 bytes total.
1021 */
1022 cdv_intel_dp_compute_m_n(bpp, lane_count,
1023 mode->clock, adjusted_mode->clock, &m_n);
1024
1025 {
1026 REG_WRITE(PIPE_GMCH_DATA_M(pipe),
1027 ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) |
1028 m_n.gmch_m);
1029 REG_WRITE(PIPE_GMCH_DATA_N(pipe), m_n.gmch_n);
1030 REG_WRITE(PIPE_DP_LINK_M(pipe), m_n.link_m);
1031 REG_WRITE(PIPE_DP_LINK_N(pipe), m_n.link_n);
1032 }
1033 }
1034
1035 static void
cdv_intel_dp_mode_set(struct drm_encoder * encoder,struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)1036 cdv_intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
1037 struct drm_display_mode *adjusted_mode)
1038 {
1039 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
1040 struct drm_crtc *crtc = encoder->crtc;
1041 struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
1042 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
1043 struct drm_device *dev = encoder->dev;
1044
1045 intel_dp->DP = DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
1046 intel_dp->DP |= intel_dp->color_range;
1047
1048 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
1049 intel_dp->DP |= DP_SYNC_HS_HIGH;
1050 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
1051 intel_dp->DP |= DP_SYNC_VS_HIGH;
1052
1053 intel_dp->DP |= DP_LINK_TRAIN_OFF;
1054
1055 switch (intel_dp->lane_count) {
1056 case 1:
1057 intel_dp->DP |= DP_PORT_WIDTH_1;
1058 break;
1059 case 2:
1060 intel_dp->DP |= DP_PORT_WIDTH_2;
1061 break;
1062 case 4:
1063 intel_dp->DP |= DP_PORT_WIDTH_4;
1064 break;
1065 }
1066 if (intel_dp->has_audio)
1067 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
1068
1069 memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE);
1070 intel_dp->link_configuration[0] = intel_dp->link_bw;
1071 intel_dp->link_configuration[1] = intel_dp->lane_count;
1072
1073 /*
1074 * Check for DPCD version > 1.1 and enhanced framing support
1075 */
1076 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
1077 (intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) {
1078 intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
1079 intel_dp->DP |= DP_ENHANCED_FRAMING;
1080 }
1081
1082 /* CPT DP's pipe select is decided in TRANS_DP_CTL */
1083 if (gma_crtc->pipe == 1)
1084 intel_dp->DP |= DP_PIPEB_SELECT;
1085
1086 REG_WRITE(intel_dp->output_reg, (intel_dp->DP | DP_PORT_EN));
1087 DRM_DEBUG_KMS("DP expected reg is %x\n", intel_dp->DP);
1088 if (is_edp(intel_encoder)) {
1089 uint32_t pfit_control;
1090 cdv_intel_edp_panel_on(intel_encoder);
1091
1092 if (mode->hdisplay != adjusted_mode->hdisplay ||
1093 mode->vdisplay != adjusted_mode->vdisplay)
1094 pfit_control = PFIT_ENABLE;
1095 else
1096 pfit_control = 0;
1097
1098 pfit_control |= gma_crtc->pipe << PFIT_PIPE_SHIFT;
1099
1100 REG_WRITE(PFIT_CONTROL, pfit_control);
1101 }
1102 }
1103
1104
1105 /* If the sink supports it, try to set the power state appropriately */
cdv_intel_dp_sink_dpms(struct gma_encoder * encoder,int mode)1106 static void cdv_intel_dp_sink_dpms(struct gma_encoder *encoder, int mode)
1107 {
1108 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1109 int ret, i;
1110
1111 /* Should have a valid DPCD by this point */
1112 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
1113 return;
1114
1115 if (mode != DRM_MODE_DPMS_ON) {
1116 ret = cdv_intel_dp_aux_native_write_1(encoder, DP_SET_POWER,
1117 DP_SET_POWER_D3);
1118 if (ret != 1)
1119 DRM_DEBUG_DRIVER("failed to write sink power state\n");
1120 } else {
1121 /*
1122 * When turning on, we need to retry for 1ms to give the sink
1123 * time to wake up.
1124 */
1125 for (i = 0; i < 3; i++) {
1126 ret = cdv_intel_dp_aux_native_write_1(encoder,
1127 DP_SET_POWER,
1128 DP_SET_POWER_D0);
1129 if (ret == 1)
1130 break;
1131 udelay(1000);
1132 }
1133 }
1134 }
1135
cdv_intel_dp_prepare(struct drm_encoder * encoder)1136 static void cdv_intel_dp_prepare(struct drm_encoder *encoder)
1137 {
1138 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
1139 int edp = is_edp(intel_encoder);
1140
1141 if (edp) {
1142 cdv_intel_edp_backlight_off(intel_encoder);
1143 cdv_intel_edp_panel_off(intel_encoder);
1144 cdv_intel_edp_panel_vdd_on(intel_encoder);
1145 }
1146 /* Wake up the sink first */
1147 cdv_intel_dp_sink_dpms(intel_encoder, DRM_MODE_DPMS_ON);
1148 cdv_intel_dp_link_down(intel_encoder);
1149 if (edp)
1150 cdv_intel_edp_panel_vdd_off(intel_encoder);
1151 }
1152
cdv_intel_dp_commit(struct drm_encoder * encoder)1153 static void cdv_intel_dp_commit(struct drm_encoder *encoder)
1154 {
1155 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
1156 int edp = is_edp(intel_encoder);
1157
1158 if (edp)
1159 cdv_intel_edp_panel_on(intel_encoder);
1160 cdv_intel_dp_start_link_train(intel_encoder);
1161 cdv_intel_dp_complete_link_train(intel_encoder);
1162 if (edp)
1163 cdv_intel_edp_backlight_on(intel_encoder);
1164 }
1165
1166 static void
cdv_intel_dp_dpms(struct drm_encoder * encoder,int mode)1167 cdv_intel_dp_dpms(struct drm_encoder *encoder, int mode)
1168 {
1169 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
1170 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
1171 struct drm_device *dev = encoder->dev;
1172 uint32_t dp_reg = REG_READ(intel_dp->output_reg);
1173 int edp = is_edp(intel_encoder);
1174
1175 if (mode != DRM_MODE_DPMS_ON) {
1176 if (edp) {
1177 cdv_intel_edp_backlight_off(intel_encoder);
1178 cdv_intel_edp_panel_vdd_on(intel_encoder);
1179 }
1180 cdv_intel_dp_sink_dpms(intel_encoder, mode);
1181 cdv_intel_dp_link_down(intel_encoder);
1182 if (edp) {
1183 cdv_intel_edp_panel_vdd_off(intel_encoder);
1184 cdv_intel_edp_panel_off(intel_encoder);
1185 }
1186 } else {
1187 if (edp)
1188 cdv_intel_edp_panel_on(intel_encoder);
1189 cdv_intel_dp_sink_dpms(intel_encoder, mode);
1190 if (!(dp_reg & DP_PORT_EN)) {
1191 cdv_intel_dp_start_link_train(intel_encoder);
1192 cdv_intel_dp_complete_link_train(intel_encoder);
1193 }
1194 if (edp)
1195 cdv_intel_edp_backlight_on(intel_encoder);
1196 }
1197 }
1198
1199 /*
1200 * Native read with retry for link status and receiver capability reads for
1201 * cases where the sink may still be asleep.
1202 */
1203 static bool
cdv_intel_dp_aux_native_read_retry(struct gma_encoder * encoder,uint16_t address,uint8_t * recv,int recv_bytes)1204 cdv_intel_dp_aux_native_read_retry(struct gma_encoder *encoder, uint16_t address,
1205 uint8_t *recv, int recv_bytes)
1206 {
1207 int ret, i;
1208
1209 /*
1210 * Sinks are *supposed* to come up within 1ms from an off state,
1211 * but we're also supposed to retry 3 times per the spec.
1212 */
1213 for (i = 0; i < 3; i++) {
1214 ret = cdv_intel_dp_aux_native_read(encoder, address, recv,
1215 recv_bytes);
1216 if (ret == recv_bytes)
1217 return true;
1218 udelay(1000);
1219 }
1220
1221 return false;
1222 }
1223
1224 /*
1225 * Fetch AUX CH registers 0x202 - 0x207 which contain
1226 * link status information
1227 */
1228 static bool
cdv_intel_dp_get_link_status(struct gma_encoder * encoder)1229 cdv_intel_dp_get_link_status(struct gma_encoder *encoder)
1230 {
1231 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1232 return cdv_intel_dp_aux_native_read_retry(encoder,
1233 DP_LANE0_1_STATUS,
1234 intel_dp->link_status,
1235 DP_LINK_STATUS_SIZE);
1236 }
1237
1238 static uint8_t
cdv_intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE],int r)1239 cdv_intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
1240 int r)
1241 {
1242 return link_status[r - DP_LANE0_1_STATUS];
1243 }
1244
1245 static uint8_t
cdv_intel_get_adjust_request_voltage(uint8_t link_status[DP_LINK_STATUS_SIZE],int lane)1246 cdv_intel_get_adjust_request_voltage(uint8_t link_status[DP_LINK_STATUS_SIZE],
1247 int lane)
1248 {
1249 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
1250 int s = ((lane & 1) ?
1251 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT :
1252 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT);
1253 uint8_t l = cdv_intel_dp_link_status(link_status, i);
1254
1255 return ((l >> s) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT;
1256 }
1257
1258 static uint8_t
cdv_intel_get_adjust_request_pre_emphasis(uint8_t link_status[DP_LINK_STATUS_SIZE],int lane)1259 cdv_intel_get_adjust_request_pre_emphasis(uint8_t link_status[DP_LINK_STATUS_SIZE],
1260 int lane)
1261 {
1262 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
1263 int s = ((lane & 1) ?
1264 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT :
1265 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT);
1266 uint8_t l = cdv_intel_dp_link_status(link_status, i);
1267
1268 return ((l >> s) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT;
1269 }
1270
1271 #define CDV_DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_LEVEL_3
1272
1273 static void
cdv_intel_get_adjust_train(struct gma_encoder * encoder)1274 cdv_intel_get_adjust_train(struct gma_encoder *encoder)
1275 {
1276 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1277 uint8_t v = 0;
1278 uint8_t p = 0;
1279 int lane;
1280
1281 for (lane = 0; lane < intel_dp->lane_count; lane++) {
1282 uint8_t this_v = cdv_intel_get_adjust_request_voltage(intel_dp->link_status, lane);
1283 uint8_t this_p = cdv_intel_get_adjust_request_pre_emphasis(intel_dp->link_status, lane);
1284
1285 if (this_v > v)
1286 v = this_v;
1287 if (this_p > p)
1288 p = this_p;
1289 }
1290
1291 if (v >= CDV_DP_VOLTAGE_MAX)
1292 v = CDV_DP_VOLTAGE_MAX | DP_TRAIN_MAX_SWING_REACHED;
1293
1294 if (p == DP_TRAIN_PRE_EMPHASIS_MASK)
1295 p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
1296
1297 for (lane = 0; lane < 4; lane++)
1298 intel_dp->train_set[lane] = v | p;
1299 }
1300
1301
1302 static uint8_t
cdv_intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE],int lane)1303 cdv_intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
1304 int lane)
1305 {
1306 int i = DP_LANE0_1_STATUS + (lane >> 1);
1307 int s = (lane & 1) * 4;
1308 uint8_t l = cdv_intel_dp_link_status(link_status, i);
1309
1310 return (l >> s) & 0xf;
1311 }
1312
1313 /* Check for clock recovery is done on all channels */
1314 static bool
cdv_intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE],int lane_count)1315 cdv_intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE], int lane_count)
1316 {
1317 int lane;
1318 uint8_t lane_status;
1319
1320 for (lane = 0; lane < lane_count; lane++) {
1321 lane_status = cdv_intel_get_lane_status(link_status, lane);
1322 if ((lane_status & DP_LANE_CR_DONE) == 0)
1323 return false;
1324 }
1325 return true;
1326 }
1327
1328 /* Check to see if channel eq is done on all channels */
1329 #define CHANNEL_EQ_BITS (DP_LANE_CR_DONE|\
1330 DP_LANE_CHANNEL_EQ_DONE|\
1331 DP_LANE_SYMBOL_LOCKED)
1332 static bool
cdv_intel_channel_eq_ok(struct gma_encoder * encoder)1333 cdv_intel_channel_eq_ok(struct gma_encoder *encoder)
1334 {
1335 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1336 uint8_t lane_align;
1337 uint8_t lane_status;
1338 int lane;
1339
1340 lane_align = cdv_intel_dp_link_status(intel_dp->link_status,
1341 DP_LANE_ALIGN_STATUS_UPDATED);
1342 if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0)
1343 return false;
1344 for (lane = 0; lane < intel_dp->lane_count; lane++) {
1345 lane_status = cdv_intel_get_lane_status(intel_dp->link_status, lane);
1346 if ((lane_status & CHANNEL_EQ_BITS) != CHANNEL_EQ_BITS)
1347 return false;
1348 }
1349 return true;
1350 }
1351
1352 static bool
cdv_intel_dp_set_link_train(struct gma_encoder * encoder,uint32_t dp_reg_value,uint8_t dp_train_pat)1353 cdv_intel_dp_set_link_train(struct gma_encoder *encoder,
1354 uint32_t dp_reg_value,
1355 uint8_t dp_train_pat)
1356 {
1357 struct drm_device *dev = encoder->base.dev;
1358 int ret;
1359 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1360
1361 REG_WRITE(intel_dp->output_reg, dp_reg_value);
1362 REG_READ(intel_dp->output_reg);
1363
1364 ret = cdv_intel_dp_aux_native_write_1(encoder,
1365 DP_TRAINING_PATTERN_SET,
1366 dp_train_pat);
1367
1368 if (ret != 1) {
1369 DRM_DEBUG_KMS("Failure in setting link pattern %x\n",
1370 dp_train_pat);
1371 return false;
1372 }
1373
1374 return true;
1375 }
1376
1377
1378 static bool
cdv_intel_dplink_set_level(struct gma_encoder * encoder,uint8_t dp_train_pat)1379 cdv_intel_dplink_set_level(struct gma_encoder *encoder,
1380 uint8_t dp_train_pat)
1381 {
1382 int ret;
1383 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1384
1385 ret = cdv_intel_dp_aux_native_write(encoder,
1386 DP_TRAINING_LANE0_SET,
1387 intel_dp->train_set,
1388 intel_dp->lane_count);
1389
1390 if (ret != intel_dp->lane_count) {
1391 DRM_DEBUG_KMS("Failure in setting level %d, lane_cnt= %d\n",
1392 intel_dp->train_set[0], intel_dp->lane_count);
1393 return false;
1394 }
1395 return true;
1396 }
1397
1398 static void
cdv_intel_dp_set_vswing_premph(struct gma_encoder * encoder,uint8_t signal_level)1399 cdv_intel_dp_set_vswing_premph(struct gma_encoder *encoder, uint8_t signal_level)
1400 {
1401 struct drm_device *dev = encoder->base.dev;
1402 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1403 struct ddi_regoff *ddi_reg;
1404 int vswing, premph, index;
1405
1406 if (intel_dp->output_reg == DP_B)
1407 ddi_reg = &ddi_DP_train_table[0];
1408 else
1409 ddi_reg = &ddi_DP_train_table[1];
1410
1411 vswing = (signal_level & DP_TRAIN_VOLTAGE_SWING_MASK);
1412 premph = ((signal_level & DP_TRAIN_PRE_EMPHASIS_MASK)) >>
1413 DP_TRAIN_PRE_EMPHASIS_SHIFT;
1414
1415 if (vswing + premph > 3)
1416 return;
1417 #ifdef CDV_FAST_LINK_TRAIN
1418 return;
1419 #endif
1420 DRM_DEBUG_KMS("Test2\n");
1421 //return ;
1422 cdv_sb_reset(dev);
1423 /* ;Swing voltage programming
1424 ;gfx_dpio_set_reg(0xc058, 0x0505313A) */
1425 cdv_sb_write(dev, ddi_reg->VSwing5, 0x0505313A);
1426
1427 /* ;gfx_dpio_set_reg(0x8154, 0x43406055) */
1428 cdv_sb_write(dev, ddi_reg->VSwing1, 0x43406055);
1429
1430 /* ;gfx_dpio_set_reg(0x8148, 0x55338954)
1431 * The VSwing_PreEmph table is also considered based on the vswing/premp
1432 */
1433 index = (vswing + premph) * 2;
1434 if (premph == 1 && vswing == 1) {
1435 cdv_sb_write(dev, ddi_reg->VSwing2, 0x055738954);
1436 } else
1437 cdv_sb_write(dev, ddi_reg->VSwing2, dp_vswing_premph_table[index]);
1438
1439 /* ;gfx_dpio_set_reg(0x814c, 0x40802040) */
1440 if ((vswing + premph) == DP_TRAIN_VOLTAGE_SWING_LEVEL_3)
1441 cdv_sb_write(dev, ddi_reg->VSwing3, 0x70802040);
1442 else
1443 cdv_sb_write(dev, ddi_reg->VSwing3, 0x40802040);
1444
1445 /* ;gfx_dpio_set_reg(0x8150, 0x2b405555) */
1446 /* cdv_sb_write(dev, ddi_reg->VSwing4, 0x2b405555); */
1447
1448 /* ;gfx_dpio_set_reg(0x8154, 0xc3406055) */
1449 cdv_sb_write(dev, ddi_reg->VSwing1, 0xc3406055);
1450
1451 /* ;Pre emphasis programming
1452 * ;gfx_dpio_set_reg(0xc02c, 0x1f030040)
1453 */
1454 cdv_sb_write(dev, ddi_reg->PreEmph1, 0x1f030040);
1455
1456 /* ;gfx_dpio_set_reg(0x8124, 0x00004000) */
1457 index = 2 * premph + 1;
1458 cdv_sb_write(dev, ddi_reg->PreEmph2, dp_vswing_premph_table[index]);
1459 return;
1460 }
1461
1462
1463 /* Enable corresponding port and start training pattern 1 */
1464 static void
cdv_intel_dp_start_link_train(struct gma_encoder * encoder)1465 cdv_intel_dp_start_link_train(struct gma_encoder *encoder)
1466 {
1467 struct drm_device *dev = encoder->base.dev;
1468 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1469 int i;
1470 uint8_t voltage;
1471 bool clock_recovery = false;
1472 int tries;
1473 u32 reg;
1474 uint32_t DP = intel_dp->DP;
1475
1476 DP |= DP_PORT_EN;
1477 DP &= ~DP_LINK_TRAIN_MASK;
1478
1479 reg = DP;
1480 reg |= DP_LINK_TRAIN_PAT_1;
1481 /* Enable output, wait for it to become active */
1482 REG_WRITE(intel_dp->output_reg, reg);
1483 REG_READ(intel_dp->output_reg);
1484 gma_wait_for_vblank(dev);
1485
1486 DRM_DEBUG_KMS("Link config\n");
1487 /* Write the link configuration data */
1488 cdv_intel_dp_aux_native_write(encoder, DP_LINK_BW_SET,
1489 intel_dp->link_configuration,
1490 2);
1491
1492 memset(intel_dp->train_set, 0, 4);
1493 voltage = 0;
1494 tries = 0;
1495 clock_recovery = false;
1496
1497 DRM_DEBUG_KMS("Start train\n");
1498 reg = DP | DP_LINK_TRAIN_PAT_1;
1499
1500 for (;;) {
1501 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1502 DRM_DEBUG_KMS("DP Link Train Set %x, Link_config %x, %x\n",
1503 intel_dp->train_set[0],
1504 intel_dp->link_configuration[0],
1505 intel_dp->link_configuration[1]);
1506
1507 if (!cdv_intel_dp_set_link_train(encoder, reg, DP_TRAINING_PATTERN_1)) {
1508 DRM_DEBUG_KMS("Failure in aux-transfer setting pattern 1\n");
1509 }
1510 cdv_intel_dp_set_vswing_premph(encoder, intel_dp->train_set[0]);
1511 /* Set training pattern 1 */
1512
1513 cdv_intel_dplink_set_level(encoder, DP_TRAINING_PATTERN_1);
1514
1515 udelay(200);
1516 if (!cdv_intel_dp_get_link_status(encoder))
1517 break;
1518
1519 DRM_DEBUG_KMS("DP Link status %x, %x, %x, %x, %x, %x\n",
1520 intel_dp->link_status[0], intel_dp->link_status[1], intel_dp->link_status[2],
1521 intel_dp->link_status[3], intel_dp->link_status[4], intel_dp->link_status[5]);
1522
1523 if (cdv_intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) {
1524 DRM_DEBUG_KMS("PT1 train is done\n");
1525 clock_recovery = true;
1526 break;
1527 }
1528
1529 /* Check to see if we've tried the max voltage */
1530 for (i = 0; i < intel_dp->lane_count; i++)
1531 if ((intel_dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
1532 break;
1533 if (i == intel_dp->lane_count)
1534 break;
1535
1536 /* Check to see if we've tried the same voltage 5 times */
1537 if ((intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
1538 ++tries;
1539 if (tries == 5)
1540 break;
1541 } else
1542 tries = 0;
1543 voltage = intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
1544
1545 /* Compute new intel_dp->train_set as requested by target */
1546 cdv_intel_get_adjust_train(encoder);
1547
1548 }
1549
1550 if (!clock_recovery) {
1551 DRM_DEBUG_KMS("failure in DP patter 1 training, train set %x\n", intel_dp->train_set[0]);
1552 }
1553
1554 intel_dp->DP = DP;
1555 }
1556
1557 static void
cdv_intel_dp_complete_link_train(struct gma_encoder * encoder)1558 cdv_intel_dp_complete_link_train(struct gma_encoder *encoder)
1559 {
1560 struct drm_device *dev = encoder->base.dev;
1561 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1562 int tries, cr_tries;
1563 u32 reg;
1564 uint32_t DP = intel_dp->DP;
1565
1566 /* channel equalization */
1567 tries = 0;
1568 cr_tries = 0;
1569
1570 DRM_DEBUG_KMS("\n");
1571 reg = DP | DP_LINK_TRAIN_PAT_2;
1572
1573 for (;;) {
1574
1575 DRM_DEBUG_KMS("DP Link Train Set %x, Link_config %x, %x\n",
1576 intel_dp->train_set[0],
1577 intel_dp->link_configuration[0],
1578 intel_dp->link_configuration[1]);
1579 /* channel eq pattern */
1580
1581 if (!cdv_intel_dp_set_link_train(encoder, reg,
1582 DP_TRAINING_PATTERN_2)) {
1583 DRM_DEBUG_KMS("Failure in aux-transfer setting pattern 2\n");
1584 }
1585 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1586
1587 if (cr_tries > 5) {
1588 DRM_ERROR("failed to train DP, aborting\n");
1589 cdv_intel_dp_link_down(encoder);
1590 break;
1591 }
1592
1593 cdv_intel_dp_set_vswing_premph(encoder, intel_dp->train_set[0]);
1594
1595 cdv_intel_dplink_set_level(encoder, DP_TRAINING_PATTERN_2);
1596
1597 udelay(1000);
1598 if (!cdv_intel_dp_get_link_status(encoder))
1599 break;
1600
1601 DRM_DEBUG_KMS("DP Link status %x, %x, %x, %x, %x, %x\n",
1602 intel_dp->link_status[0], intel_dp->link_status[1], intel_dp->link_status[2],
1603 intel_dp->link_status[3], intel_dp->link_status[4], intel_dp->link_status[5]);
1604
1605 /* Make sure clock is still ok */
1606 if (!cdv_intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) {
1607 cdv_intel_dp_start_link_train(encoder);
1608 cr_tries++;
1609 continue;
1610 }
1611
1612 if (cdv_intel_channel_eq_ok(encoder)) {
1613 DRM_DEBUG_KMS("PT2 train is done\n");
1614 break;
1615 }
1616
1617 /* Try 5 times, then try clock recovery if that fails */
1618 if (tries > 5) {
1619 cdv_intel_dp_link_down(encoder);
1620 cdv_intel_dp_start_link_train(encoder);
1621 tries = 0;
1622 cr_tries++;
1623 continue;
1624 }
1625
1626 /* Compute new intel_dp->train_set as requested by target */
1627 cdv_intel_get_adjust_train(encoder);
1628 ++tries;
1629
1630 }
1631
1632 reg = DP | DP_LINK_TRAIN_OFF;
1633
1634 REG_WRITE(intel_dp->output_reg, reg);
1635 REG_READ(intel_dp->output_reg);
1636 cdv_intel_dp_aux_native_write_1(encoder,
1637 DP_TRAINING_PATTERN_SET, DP_TRAINING_PATTERN_DISABLE);
1638 }
1639
1640 static void
cdv_intel_dp_link_down(struct gma_encoder * encoder)1641 cdv_intel_dp_link_down(struct gma_encoder *encoder)
1642 {
1643 struct drm_device *dev = encoder->base.dev;
1644 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1645 uint32_t DP = intel_dp->DP;
1646
1647 if ((REG_READ(intel_dp->output_reg) & DP_PORT_EN) == 0)
1648 return;
1649
1650 DRM_DEBUG_KMS("\n");
1651
1652
1653 {
1654 DP &= ~DP_LINK_TRAIN_MASK;
1655 REG_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE);
1656 }
1657 REG_READ(intel_dp->output_reg);
1658
1659 msleep(17);
1660
1661 REG_WRITE(intel_dp->output_reg, DP & ~DP_PORT_EN);
1662 REG_READ(intel_dp->output_reg);
1663 }
1664
cdv_dp_detect(struct gma_encoder * encoder)1665 static enum drm_connector_status cdv_dp_detect(struct gma_encoder *encoder)
1666 {
1667 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1668 enum drm_connector_status status;
1669
1670 status = connector_status_disconnected;
1671 if (cdv_intel_dp_aux_native_read(encoder, 0x000, intel_dp->dpcd,
1672 sizeof (intel_dp->dpcd)) == sizeof (intel_dp->dpcd))
1673 {
1674 if (intel_dp->dpcd[DP_DPCD_REV] != 0)
1675 status = connector_status_connected;
1676 }
1677 if (status == connector_status_connected)
1678 DRM_DEBUG_KMS("DPCD: Rev=%x LN_Rate=%x LN_CNT=%x LN_DOWNSP=%x\n",
1679 intel_dp->dpcd[0], intel_dp->dpcd[1],
1680 intel_dp->dpcd[2], intel_dp->dpcd[3]);
1681 return status;
1682 }
1683
1684 /*
1685 * Uses CRT_HOTPLUG_EN and CRT_HOTPLUG_STAT to detect DP connection.
1686 *
1687 * \return true if DP port is connected.
1688 * \return false if DP port is disconnected.
1689 */
1690 static enum drm_connector_status
cdv_intel_dp_detect(struct drm_connector * connector,bool force)1691 cdv_intel_dp_detect(struct drm_connector *connector, bool force)
1692 {
1693 struct gma_encoder *encoder = gma_attached_encoder(connector);
1694 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1695 enum drm_connector_status status;
1696 struct edid *edid = NULL;
1697 int edp = is_edp(encoder);
1698
1699 intel_dp->has_audio = false;
1700
1701 if (edp)
1702 cdv_intel_edp_panel_vdd_on(encoder);
1703 status = cdv_dp_detect(encoder);
1704 if (status != connector_status_connected) {
1705 if (edp)
1706 cdv_intel_edp_panel_vdd_off(encoder);
1707 return status;
1708 }
1709
1710 if (intel_dp->force_audio) {
1711 intel_dp->has_audio = intel_dp->force_audio > 0;
1712 } else {
1713 edid = drm_get_edid(connector, &intel_dp->adapter);
1714 if (edid) {
1715 intel_dp->has_audio = drm_detect_monitor_audio(edid);
1716 kfree(edid);
1717 }
1718 }
1719 if (edp)
1720 cdv_intel_edp_panel_vdd_off(encoder);
1721
1722 return connector_status_connected;
1723 }
1724
cdv_intel_dp_get_modes(struct drm_connector * connector)1725 static int cdv_intel_dp_get_modes(struct drm_connector *connector)
1726 {
1727 struct gma_encoder *intel_encoder = gma_attached_encoder(connector);
1728 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
1729 struct edid *edid = NULL;
1730 int ret = 0;
1731 int edp = is_edp(intel_encoder);
1732
1733
1734 edid = drm_get_edid(connector, &intel_dp->adapter);
1735 if (edid) {
1736 drm_connector_update_edid_property(connector, edid);
1737 ret = drm_add_edid_modes(connector, edid);
1738 kfree(edid);
1739 }
1740
1741 if (is_edp(intel_encoder)) {
1742 struct drm_device *dev = connector->dev;
1743 struct drm_psb_private *dev_priv = to_drm_psb_private(dev);
1744
1745 cdv_intel_edp_panel_vdd_off(intel_encoder);
1746 if (ret) {
1747 if (edp && !intel_dp->panel_fixed_mode) {
1748 struct drm_display_mode *newmode;
1749 list_for_each_entry(newmode, &connector->probed_modes,
1750 head) {
1751 if (newmode->type & DRM_MODE_TYPE_PREFERRED) {
1752 intel_dp->panel_fixed_mode =
1753 drm_mode_duplicate(dev, newmode);
1754 break;
1755 }
1756 }
1757 }
1758
1759 return ret;
1760 }
1761 if (!intel_dp->panel_fixed_mode && dev_priv->lfp_lvds_vbt_mode) {
1762 intel_dp->panel_fixed_mode =
1763 drm_mode_duplicate(dev, dev_priv->lfp_lvds_vbt_mode);
1764 if (intel_dp->panel_fixed_mode) {
1765 intel_dp->panel_fixed_mode->type |=
1766 DRM_MODE_TYPE_PREFERRED;
1767 }
1768 }
1769 if (intel_dp->panel_fixed_mode != NULL) {
1770 struct drm_display_mode *mode;
1771 mode = drm_mode_duplicate(dev, intel_dp->panel_fixed_mode);
1772 drm_mode_probed_add(connector, mode);
1773 return 1;
1774 }
1775 }
1776
1777 return ret;
1778 }
1779
1780 static bool
cdv_intel_dp_detect_audio(struct drm_connector * connector)1781 cdv_intel_dp_detect_audio(struct drm_connector *connector)
1782 {
1783 struct gma_encoder *encoder = gma_attached_encoder(connector);
1784 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1785 struct edid *edid;
1786 bool has_audio = false;
1787 int edp = is_edp(encoder);
1788
1789 if (edp)
1790 cdv_intel_edp_panel_vdd_on(encoder);
1791
1792 edid = drm_get_edid(connector, &intel_dp->adapter);
1793 if (edid) {
1794 has_audio = drm_detect_monitor_audio(edid);
1795 kfree(edid);
1796 }
1797 if (edp)
1798 cdv_intel_edp_panel_vdd_off(encoder);
1799
1800 return has_audio;
1801 }
1802
1803 static int
cdv_intel_dp_set_property(struct drm_connector * connector,struct drm_property * property,uint64_t val)1804 cdv_intel_dp_set_property(struct drm_connector *connector,
1805 struct drm_property *property,
1806 uint64_t val)
1807 {
1808 struct drm_psb_private *dev_priv = to_drm_psb_private(connector->dev);
1809 struct gma_encoder *encoder = gma_attached_encoder(connector);
1810 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1811 int ret;
1812
1813 ret = drm_object_property_set_value(&connector->base, property, val);
1814 if (ret)
1815 return ret;
1816
1817 if (property == dev_priv->force_audio_property) {
1818 int i = val;
1819 bool has_audio;
1820
1821 if (i == intel_dp->force_audio)
1822 return 0;
1823
1824 intel_dp->force_audio = i;
1825
1826 if (i == 0)
1827 has_audio = cdv_intel_dp_detect_audio(connector);
1828 else
1829 has_audio = i > 0;
1830
1831 if (has_audio == intel_dp->has_audio)
1832 return 0;
1833
1834 intel_dp->has_audio = has_audio;
1835 goto done;
1836 }
1837
1838 if (property == dev_priv->broadcast_rgb_property) {
1839 if (val == !!intel_dp->color_range)
1840 return 0;
1841
1842 intel_dp->color_range = val ? DP_COLOR_RANGE_16_235 : 0;
1843 goto done;
1844 }
1845
1846 return -EINVAL;
1847
1848 done:
1849 if (encoder->base.crtc) {
1850 struct drm_crtc *crtc = encoder->base.crtc;
1851 drm_crtc_helper_set_mode(crtc, &crtc->mode,
1852 crtc->x, crtc->y,
1853 crtc->primary->fb);
1854 }
1855
1856 return 0;
1857 }
1858
1859 static void
cdv_intel_dp_destroy(struct drm_connector * connector)1860 cdv_intel_dp_destroy(struct drm_connector *connector)
1861 {
1862 struct gma_connector *gma_connector = to_gma_connector(connector);
1863 struct gma_encoder *gma_encoder = gma_attached_encoder(connector);
1864 struct cdv_intel_dp *intel_dp = gma_encoder->dev_priv;
1865
1866 if (is_edp(gma_encoder)) {
1867 /* cdv_intel_panel_destroy_backlight(connector->dev); */
1868 kfree(intel_dp->panel_fixed_mode);
1869 intel_dp->panel_fixed_mode = NULL;
1870 }
1871 i2c_del_adapter(&intel_dp->adapter);
1872 drm_connector_cleanup(connector);
1873 kfree(gma_connector);
1874 }
1875
1876 static const struct drm_encoder_helper_funcs cdv_intel_dp_helper_funcs = {
1877 .dpms = cdv_intel_dp_dpms,
1878 .mode_fixup = cdv_intel_dp_mode_fixup,
1879 .prepare = cdv_intel_dp_prepare,
1880 .mode_set = cdv_intel_dp_mode_set,
1881 .commit = cdv_intel_dp_commit,
1882 };
1883
1884 static const struct drm_connector_funcs cdv_intel_dp_connector_funcs = {
1885 .dpms = drm_helper_connector_dpms,
1886 .detect = cdv_intel_dp_detect,
1887 .fill_modes = drm_helper_probe_single_connector_modes,
1888 .set_property = cdv_intel_dp_set_property,
1889 .destroy = cdv_intel_dp_destroy,
1890 };
1891
1892 static const struct drm_connector_helper_funcs cdv_intel_dp_connector_helper_funcs = {
1893 .get_modes = cdv_intel_dp_get_modes,
1894 .mode_valid = cdv_intel_dp_mode_valid,
1895 .best_encoder = gma_best_encoder,
1896 };
1897
cdv_intel_dp_add_properties(struct drm_connector * connector)1898 static void cdv_intel_dp_add_properties(struct drm_connector *connector)
1899 {
1900 cdv_intel_attach_force_audio_property(connector);
1901 cdv_intel_attach_broadcast_rgb_property(connector);
1902 }
1903
1904 /* check the VBT to see whether the eDP is on DP-D port */
cdv_intel_dpc_is_edp(struct drm_device * dev)1905 static bool cdv_intel_dpc_is_edp(struct drm_device *dev)
1906 {
1907 struct drm_psb_private *dev_priv = to_drm_psb_private(dev);
1908 struct child_device_config *p_child;
1909 int i;
1910
1911 if (!dev_priv->child_dev_num)
1912 return false;
1913
1914 for (i = 0; i < dev_priv->child_dev_num; i++) {
1915 p_child = dev_priv->child_dev + i;
1916
1917 if (p_child->dvo_port == PORT_IDPC &&
1918 p_child->device_type == DEVICE_TYPE_eDP)
1919 return true;
1920 }
1921 return false;
1922 }
1923
1924 /* Cedarview display clock gating
1925
1926 We need this disable dot get correct behaviour while enabling
1927 DP/eDP. TODO - investigate if we can turn it back to normality
1928 after enabling */
cdv_disable_intel_clock_gating(struct drm_device * dev)1929 static void cdv_disable_intel_clock_gating(struct drm_device *dev)
1930 {
1931 u32 reg_value;
1932 reg_value = REG_READ(DSPCLK_GATE_D);
1933
1934 reg_value |= (DPUNIT_PIPEB_GATE_DISABLE |
1935 DPUNIT_PIPEA_GATE_DISABLE |
1936 DPCUNIT_CLOCK_GATE_DISABLE |
1937 DPLSUNIT_CLOCK_GATE_DISABLE |
1938 DPOUNIT_CLOCK_GATE_DISABLE |
1939 DPIOUNIT_CLOCK_GATE_DISABLE);
1940
1941 REG_WRITE(DSPCLK_GATE_D, reg_value);
1942
1943 udelay(500);
1944 }
1945
1946 void
cdv_intel_dp_init(struct drm_device * dev,struct psb_intel_mode_device * mode_dev,int output_reg)1947 cdv_intel_dp_init(struct drm_device *dev, struct psb_intel_mode_device *mode_dev, int output_reg)
1948 {
1949 struct gma_encoder *gma_encoder;
1950 struct gma_connector *gma_connector;
1951 struct drm_connector *connector;
1952 struct drm_encoder *encoder;
1953 struct cdv_intel_dp *intel_dp;
1954 const char *name = NULL;
1955 int type = DRM_MODE_CONNECTOR_DisplayPort;
1956
1957 gma_encoder = kzalloc(sizeof(struct gma_encoder), GFP_KERNEL);
1958 if (!gma_encoder)
1959 return;
1960 gma_connector = kzalloc(sizeof(struct gma_connector), GFP_KERNEL);
1961 if (!gma_connector)
1962 goto err_connector;
1963 intel_dp = kzalloc(sizeof(struct cdv_intel_dp), GFP_KERNEL);
1964 if (!intel_dp)
1965 goto err_priv;
1966
1967 if ((output_reg == DP_C) && cdv_intel_dpc_is_edp(dev))
1968 type = DRM_MODE_CONNECTOR_eDP;
1969
1970 connector = &gma_connector->base;
1971 encoder = &gma_encoder->base;
1972
1973 drm_connector_init(dev, connector, &cdv_intel_dp_connector_funcs, type);
1974 drm_simple_encoder_init(dev, encoder, DRM_MODE_ENCODER_TMDS);
1975
1976 gma_connector_attach_encoder(gma_connector, gma_encoder);
1977
1978 if (type == DRM_MODE_CONNECTOR_DisplayPort)
1979 gma_encoder->type = INTEL_OUTPUT_DISPLAYPORT;
1980 else
1981 gma_encoder->type = INTEL_OUTPUT_EDP;
1982
1983
1984 gma_encoder->dev_priv=intel_dp;
1985 intel_dp->encoder = gma_encoder;
1986 intel_dp->output_reg = output_reg;
1987
1988 drm_encoder_helper_add(encoder, &cdv_intel_dp_helper_funcs);
1989 drm_connector_helper_add(connector, &cdv_intel_dp_connector_helper_funcs);
1990
1991 connector->polled = DRM_CONNECTOR_POLL_HPD;
1992 connector->interlace_allowed = false;
1993 connector->doublescan_allowed = false;
1994
1995 /* Set up the DDC bus. */
1996 switch (output_reg) {
1997 case DP_B:
1998 name = "DPDDC-B";
1999 gma_encoder->ddi_select = (DP_MASK | DDI0_SELECT);
2000 break;
2001 case DP_C:
2002 name = "DPDDC-C";
2003 gma_encoder->ddi_select = (DP_MASK | DDI1_SELECT);
2004 break;
2005 }
2006
2007 cdv_disable_intel_clock_gating(dev);
2008
2009 cdv_intel_dp_i2c_init(gma_connector, gma_encoder, name);
2010 /* FIXME:fail check */
2011 cdv_intel_dp_add_properties(connector);
2012
2013 if (is_edp(gma_encoder)) {
2014 int ret;
2015 struct edp_power_seq cur;
2016 u32 pp_on, pp_off, pp_div;
2017 u32 pwm_ctrl;
2018
2019 pp_on = REG_READ(PP_CONTROL);
2020 pp_on &= ~PANEL_UNLOCK_MASK;
2021 pp_on |= PANEL_UNLOCK_REGS;
2022
2023 REG_WRITE(PP_CONTROL, pp_on);
2024
2025 pwm_ctrl = REG_READ(BLC_PWM_CTL2);
2026 pwm_ctrl |= PWM_PIPE_B;
2027 REG_WRITE(BLC_PWM_CTL2, pwm_ctrl);
2028
2029 pp_on = REG_READ(PP_ON_DELAYS);
2030 pp_off = REG_READ(PP_OFF_DELAYS);
2031 pp_div = REG_READ(PP_DIVISOR);
2032
2033 /* Pull timing values out of registers */
2034 cur.t1_t3 = (pp_on & PANEL_POWER_UP_DELAY_MASK) >>
2035 PANEL_POWER_UP_DELAY_SHIFT;
2036
2037 cur.t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >>
2038 PANEL_LIGHT_ON_DELAY_SHIFT;
2039
2040 cur.t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >>
2041 PANEL_LIGHT_OFF_DELAY_SHIFT;
2042
2043 cur.t10 = (pp_off & PANEL_POWER_DOWN_DELAY_MASK) >>
2044 PANEL_POWER_DOWN_DELAY_SHIFT;
2045
2046 cur.t11_t12 = ((pp_div & PANEL_POWER_CYCLE_DELAY_MASK) >>
2047 PANEL_POWER_CYCLE_DELAY_SHIFT);
2048
2049 DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
2050 cur.t1_t3, cur.t8, cur.t9, cur.t10, cur.t11_t12);
2051
2052
2053 intel_dp->panel_power_up_delay = cur.t1_t3 / 10;
2054 intel_dp->backlight_on_delay = cur.t8 / 10;
2055 intel_dp->backlight_off_delay = cur.t9 / 10;
2056 intel_dp->panel_power_down_delay = cur.t10 / 10;
2057 intel_dp->panel_power_cycle_delay = (cur.t11_t12 - 1) * 100;
2058
2059 DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n",
2060 intel_dp->panel_power_up_delay, intel_dp->panel_power_down_delay,
2061 intel_dp->panel_power_cycle_delay);
2062
2063 DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n",
2064 intel_dp->backlight_on_delay, intel_dp->backlight_off_delay);
2065
2066
2067 cdv_intel_edp_panel_vdd_on(gma_encoder);
2068 ret = cdv_intel_dp_aux_native_read(gma_encoder, DP_DPCD_REV,
2069 intel_dp->dpcd,
2070 sizeof(intel_dp->dpcd));
2071 cdv_intel_edp_panel_vdd_off(gma_encoder);
2072 if (ret <= 0) {
2073 /* if this fails, presume the device is a ghost */
2074 DRM_INFO("failed to retrieve link info, disabling eDP\n");
2075 drm_encoder_cleanup(encoder);
2076 cdv_intel_dp_destroy(connector);
2077 goto err_connector;
2078 } else {
2079 DRM_DEBUG_KMS("DPCD: Rev=%x LN_Rate=%x LN_CNT=%x LN_DOWNSP=%x\n",
2080 intel_dp->dpcd[0], intel_dp->dpcd[1],
2081 intel_dp->dpcd[2], intel_dp->dpcd[3]);
2082
2083 }
2084 /* The CDV reference driver moves pnale backlight setup into the displays that
2085 have a backlight: this is a good idea and one we should probably adopt, however
2086 we need to migrate all the drivers before we can do that */
2087 /*cdv_intel_panel_setup_backlight(dev); */
2088 }
2089 return;
2090
2091 err_priv:
2092 kfree(gma_connector);
2093 err_connector:
2094 kfree(gma_encoder);
2095 }
2096