1 /*
2 * Copyright 2007-8 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be included in
13 * all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
22 *
23 * Authors: Dave Airlie
24 * Alex Deucher
25 * Jerome Glisse
26 */
27 #include "drmP.h"
28 #include "radeon_drm.h"
29 #include "radeon.h"
30
31 #include "atom.h"
32 #include "atom-bits.h"
33 #include "drm_dp_helper.h"
34
35 /* move these to drm_dp_helper.c/h */
36 #define DP_LINK_CONFIGURATION_SIZE 9
37 #define DP_LINK_STATUS_SIZE 6
38 #define DP_DPCD_SIZE 8
39
40 static char *voltage_names[] = {
41 "0.4V", "0.6V", "0.8V", "1.2V"
42 };
43 static char *pre_emph_names[] = {
44 "0dB", "3.5dB", "6dB", "9.5dB"
45 };
46
47 /***** radeon AUX functions *****/
48
49 /* Atom needs data in little endian format
50 * so swap as appropriate when copying data to
51 * or from atom. Note that atom operates on
52 * dw units.
53 */
radeon_atom_copy_swap(u8 * dst,u8 * src,u8 num_bytes,bool to_le)54 void radeon_atom_copy_swap(u8 *dst, u8 *src, u8 num_bytes, bool to_le)
55 {
56 #ifdef __BIG_ENDIAN
57 u8 src_tmp[20], dst_tmp[20]; /* used for byteswapping */
58 u32 *dst32, *src32;
59 int i;
60
61 memcpy(src_tmp, src, num_bytes);
62 src32 = (u32 *)src_tmp;
63 dst32 = (u32 *)dst_tmp;
64 if (to_le) {
65 for (i = 0; i < ((num_bytes + 3) / 4); i++)
66 dst32[i] = cpu_to_le32(src32[i]);
67 memcpy(dst, dst_tmp, num_bytes);
68 } else {
69 u8 dws = num_bytes & ~3;
70 for (i = 0; i < ((num_bytes + 3) / 4); i++)
71 dst32[i] = le32_to_cpu(src32[i]);
72 memcpy(dst, dst_tmp, dws);
73 if (num_bytes % 4) {
74 for (i = 0; i < (num_bytes % 4); i++)
75 dst[dws+i] = dst_tmp[dws+i];
76 }
77 }
78 #else
79 memcpy(dst, src, num_bytes);
80 #endif
81 }
82
83 union aux_channel_transaction {
84 PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1;
85 PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2;
86 };
87
radeon_process_aux_ch(struct radeon_i2c_chan * chan,u8 * send,int send_bytes,u8 * recv,int recv_size,u8 delay,u8 * ack)88 static int radeon_process_aux_ch(struct radeon_i2c_chan *chan,
89 u8 *send, int send_bytes,
90 u8 *recv, int recv_size,
91 u8 delay, u8 *ack)
92 {
93 struct drm_device *dev = chan->dev;
94 struct radeon_device *rdev = dev->dev_private;
95 union aux_channel_transaction args;
96 int index = GetIndexIntoMasterTable(COMMAND, ProcessAuxChannelTransaction);
97 unsigned char *base;
98 int recv_bytes;
99
100 memset(&args, 0, sizeof(args));
101
102 base = (unsigned char *)(rdev->mode_info.atom_context->scratch + 1);
103
104 radeon_atom_copy_swap(base, send, send_bytes, true);
105
106 args.v1.lpAuxRequest = cpu_to_le16((u16)(0 + 4));
107 args.v1.lpDataOut = cpu_to_le16((u16)(16 + 4));
108 args.v1.ucDataOutLen = 0;
109 args.v1.ucChannelID = chan->rec.i2c_id;
110 args.v1.ucDelay = delay / 10;
111 if (ASIC_IS_DCE4(rdev))
112 args.v2.ucHPD_ID = chan->rec.hpd;
113
114 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
115
116 *ack = args.v1.ucReplyStatus;
117
118 /* timeout */
119 if (args.v1.ucReplyStatus == 1) {
120 DRM_DEBUG_KMS("dp_aux_ch timeout\n");
121 return -ETIMEDOUT;
122 }
123
124 /* flags not zero */
125 if (args.v1.ucReplyStatus == 2) {
126 DRM_DEBUG_KMS("dp_aux_ch flags not zero\n");
127 return -EBUSY;
128 }
129
130 /* error */
131 if (args.v1.ucReplyStatus == 3) {
132 DRM_DEBUG_KMS("dp_aux_ch error\n");
133 return -EIO;
134 }
135
136 recv_bytes = args.v1.ucDataOutLen;
137 if (recv_bytes > recv_size)
138 recv_bytes = recv_size;
139
140 if (recv && recv_size)
141 radeon_atom_copy_swap(recv, base + 16, recv_bytes, false);
142
143 return recv_bytes;
144 }
145
radeon_dp_aux_native_write(struct radeon_connector * radeon_connector,u16 address,u8 * send,u8 send_bytes,u8 delay)146 static int radeon_dp_aux_native_write(struct radeon_connector *radeon_connector,
147 u16 address, u8 *send, u8 send_bytes, u8 delay)
148 {
149 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
150 int ret;
151 u8 msg[20];
152 int msg_bytes = send_bytes + 4;
153 u8 ack;
154 unsigned retry;
155
156 if (send_bytes > 16)
157 return -1;
158
159 msg[0] = address;
160 msg[1] = address >> 8;
161 msg[2] = AUX_NATIVE_WRITE << 4;
162 msg[3] = (msg_bytes << 4) | (send_bytes - 1);
163 memcpy(&msg[4], send, send_bytes);
164
165 for (retry = 0; retry < 4; retry++) {
166 ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus,
167 msg, msg_bytes, NULL, 0, delay, &ack);
168 if (ret == -EBUSY)
169 continue;
170 else if (ret < 0)
171 return ret;
172 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
173 return send_bytes;
174 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
175 udelay(400);
176 else
177 return -EIO;
178 }
179
180 return -EIO;
181 }
182
radeon_dp_aux_native_read(struct radeon_connector * radeon_connector,u16 address,u8 * recv,int recv_bytes,u8 delay)183 static int radeon_dp_aux_native_read(struct radeon_connector *radeon_connector,
184 u16 address, u8 *recv, int recv_bytes, u8 delay)
185 {
186 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
187 u8 msg[4];
188 int msg_bytes = 4;
189 u8 ack;
190 int ret;
191 unsigned retry;
192
193 msg[0] = address;
194 msg[1] = address >> 8;
195 msg[2] = AUX_NATIVE_READ << 4;
196 msg[3] = (msg_bytes << 4) | (recv_bytes - 1);
197
198 for (retry = 0; retry < 4; retry++) {
199 ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus,
200 msg, msg_bytes, recv, recv_bytes, delay, &ack);
201 if (ret == -EBUSY)
202 continue;
203 else if (ret < 0)
204 return ret;
205 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
206 return ret;
207 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
208 udelay(400);
209 else if (ret == 0)
210 return -EPROTO;
211 else
212 return -EIO;
213 }
214
215 return -EIO;
216 }
217
radeon_write_dpcd_reg(struct radeon_connector * radeon_connector,u16 reg,u8 val)218 static void radeon_write_dpcd_reg(struct radeon_connector *radeon_connector,
219 u16 reg, u8 val)
220 {
221 radeon_dp_aux_native_write(radeon_connector, reg, &val, 1, 0);
222 }
223
radeon_read_dpcd_reg(struct radeon_connector * radeon_connector,u16 reg)224 static u8 radeon_read_dpcd_reg(struct radeon_connector *radeon_connector,
225 u16 reg)
226 {
227 u8 val = 0;
228
229 radeon_dp_aux_native_read(radeon_connector, reg, &val, 1, 0);
230
231 return val;
232 }
233
radeon_dp_i2c_aux_ch(struct i2c_adapter * adapter,int mode,u8 write_byte,u8 * read_byte)234 int radeon_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
235 u8 write_byte, u8 *read_byte)
236 {
237 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
238 struct radeon_i2c_chan *auxch = (struct radeon_i2c_chan *)adapter;
239 u16 address = algo_data->address;
240 u8 msg[5];
241 u8 reply[2];
242 unsigned retry;
243 int msg_bytes;
244 int reply_bytes = 1;
245 int ret;
246 u8 ack;
247
248 /* Set up the command byte */
249 if (mode & MODE_I2C_READ)
250 msg[2] = AUX_I2C_READ << 4;
251 else
252 msg[2] = AUX_I2C_WRITE << 4;
253
254 if (!(mode & MODE_I2C_STOP))
255 msg[2] |= AUX_I2C_MOT << 4;
256
257 msg[0] = address;
258 msg[1] = address >> 8;
259
260 switch (mode) {
261 case MODE_I2C_WRITE:
262 msg_bytes = 5;
263 msg[3] = msg_bytes << 4;
264 msg[4] = write_byte;
265 break;
266 case MODE_I2C_READ:
267 msg_bytes = 4;
268 msg[3] = msg_bytes << 4;
269 break;
270 default:
271 msg_bytes = 4;
272 msg[3] = 3 << 4;
273 break;
274 }
275
276 for (retry = 0; retry < 4; retry++) {
277 ret = radeon_process_aux_ch(auxch,
278 msg, msg_bytes, reply, reply_bytes, 0, &ack);
279 if (ret == -EBUSY)
280 continue;
281 else if (ret < 0) {
282 DRM_DEBUG_KMS("aux_ch failed %d\n", ret);
283 return ret;
284 }
285
286 switch (ack & AUX_NATIVE_REPLY_MASK) {
287 case AUX_NATIVE_REPLY_ACK:
288 /* I2C-over-AUX Reply field is only valid
289 * when paired with AUX ACK.
290 */
291 break;
292 case AUX_NATIVE_REPLY_NACK:
293 DRM_DEBUG_KMS("aux_ch native nack\n");
294 return -EREMOTEIO;
295 case AUX_NATIVE_REPLY_DEFER:
296 DRM_DEBUG_KMS("aux_ch native defer\n");
297 udelay(400);
298 continue;
299 default:
300 DRM_ERROR("aux_ch invalid native reply 0x%02x\n", ack);
301 return -EREMOTEIO;
302 }
303
304 switch (ack & AUX_I2C_REPLY_MASK) {
305 case AUX_I2C_REPLY_ACK:
306 if (mode == MODE_I2C_READ)
307 *read_byte = reply[0];
308 return ret;
309 case AUX_I2C_REPLY_NACK:
310 DRM_DEBUG_KMS("aux_i2c nack\n");
311 return -EREMOTEIO;
312 case AUX_I2C_REPLY_DEFER:
313 DRM_DEBUG_KMS("aux_i2c defer\n");
314 udelay(400);
315 break;
316 default:
317 DRM_ERROR("aux_i2c invalid reply 0x%02x\n", ack);
318 return -EREMOTEIO;
319 }
320 }
321
322 DRM_DEBUG_KMS("aux i2c too many retries, giving up\n");
323 return -EREMOTEIO;
324 }
325
326 /***** general DP utility functions *****/
327
dp_link_status(u8 link_status[DP_LINK_STATUS_SIZE],int r)328 static u8 dp_link_status(u8 link_status[DP_LINK_STATUS_SIZE], int r)
329 {
330 return link_status[r - DP_LANE0_1_STATUS];
331 }
332
dp_get_lane_status(u8 link_status[DP_LINK_STATUS_SIZE],int lane)333 static u8 dp_get_lane_status(u8 link_status[DP_LINK_STATUS_SIZE],
334 int lane)
335 {
336 int i = DP_LANE0_1_STATUS + (lane >> 1);
337 int s = (lane & 1) * 4;
338 u8 l = dp_link_status(link_status, i);
339 return (l >> s) & 0xf;
340 }
341
dp_clock_recovery_ok(u8 link_status[DP_LINK_STATUS_SIZE],int lane_count)342 static bool dp_clock_recovery_ok(u8 link_status[DP_LINK_STATUS_SIZE],
343 int lane_count)
344 {
345 int lane;
346 u8 lane_status;
347
348 for (lane = 0; lane < lane_count; lane++) {
349 lane_status = dp_get_lane_status(link_status, lane);
350 if ((lane_status & DP_LANE_CR_DONE) == 0)
351 return false;
352 }
353 return true;
354 }
355
dp_channel_eq_ok(u8 link_status[DP_LINK_STATUS_SIZE],int lane_count)356 static bool dp_channel_eq_ok(u8 link_status[DP_LINK_STATUS_SIZE],
357 int lane_count)
358 {
359 u8 lane_align;
360 u8 lane_status;
361 int lane;
362
363 lane_align = dp_link_status(link_status,
364 DP_LANE_ALIGN_STATUS_UPDATED);
365 if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0)
366 return false;
367 for (lane = 0; lane < lane_count; lane++) {
368 lane_status = dp_get_lane_status(link_status, lane);
369 if ((lane_status & DP_CHANNEL_EQ_BITS) != DP_CHANNEL_EQ_BITS)
370 return false;
371 }
372 return true;
373 }
374
dp_get_adjust_request_voltage(u8 link_status[DP_LINK_STATUS_SIZE],int lane)375 static u8 dp_get_adjust_request_voltage(u8 link_status[DP_LINK_STATUS_SIZE],
376 int lane)
377
378 {
379 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
380 int s = ((lane & 1) ?
381 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT :
382 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT);
383 u8 l = dp_link_status(link_status, i);
384
385 return ((l >> s) & 0x3) << DP_TRAIN_VOLTAGE_SWING_SHIFT;
386 }
387
dp_get_adjust_request_pre_emphasis(u8 link_status[DP_LINK_STATUS_SIZE],int lane)388 static u8 dp_get_adjust_request_pre_emphasis(u8 link_status[DP_LINK_STATUS_SIZE],
389 int lane)
390 {
391 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
392 int s = ((lane & 1) ?
393 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT :
394 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT);
395 u8 l = dp_link_status(link_status, i);
396
397 return ((l >> s) & 0x3) << DP_TRAIN_PRE_EMPHASIS_SHIFT;
398 }
399
400 #define DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_1200
401 #define DP_PRE_EMPHASIS_MAX DP_TRAIN_PRE_EMPHASIS_9_5
402
dp_get_adjust_train(u8 link_status[DP_LINK_STATUS_SIZE],int lane_count,u8 train_set[4])403 static void dp_get_adjust_train(u8 link_status[DP_LINK_STATUS_SIZE],
404 int lane_count,
405 u8 train_set[4])
406 {
407 u8 v = 0;
408 u8 p = 0;
409 int lane;
410
411 for (lane = 0; lane < lane_count; lane++) {
412 u8 this_v = dp_get_adjust_request_voltage(link_status, lane);
413 u8 this_p = dp_get_adjust_request_pre_emphasis(link_status, lane);
414
415 DRM_DEBUG_KMS("requested signal parameters: lane %d voltage %s pre_emph %s\n",
416 lane,
417 voltage_names[this_v >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
418 pre_emph_names[this_p >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
419
420 if (this_v > v)
421 v = this_v;
422 if (this_p > p)
423 p = this_p;
424 }
425
426 if (v >= DP_VOLTAGE_MAX)
427 v |= DP_TRAIN_MAX_SWING_REACHED;
428
429 if (p >= DP_PRE_EMPHASIS_MAX)
430 p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
431
432 DRM_DEBUG_KMS("using signal parameters: voltage %s pre_emph %s\n",
433 voltage_names[(v & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
434 pre_emph_names[(p & DP_TRAIN_PRE_EMPHASIS_MASK) >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
435
436 for (lane = 0; lane < 4; lane++)
437 train_set[lane] = v | p;
438 }
439
440 /* convert bits per color to bits per pixel */
441 /* get bpc from the EDID */
convert_bpc_to_bpp(int bpc)442 static int convert_bpc_to_bpp(int bpc)
443 {
444 #if 0
445 if (bpc == 0)
446 return 24;
447 else
448 return bpc * 3;
449 #endif
450 return 24;
451 }
452
453 /* get the max pix clock supported by the link rate and lane num */
dp_get_max_dp_pix_clock(int link_rate,int lane_num,int bpp)454 static int dp_get_max_dp_pix_clock(int link_rate,
455 int lane_num,
456 int bpp)
457 {
458 return (link_rate * lane_num * 8) / bpp;
459 }
460
dp_get_max_link_rate(u8 dpcd[DP_DPCD_SIZE])461 static int dp_get_max_link_rate(u8 dpcd[DP_DPCD_SIZE])
462 {
463 switch (dpcd[DP_MAX_LINK_RATE]) {
464 case DP_LINK_BW_1_62:
465 default:
466 return 162000;
467 case DP_LINK_BW_2_7:
468 return 270000;
469 case DP_LINK_BW_5_4:
470 return 540000;
471 }
472 }
473
dp_get_max_lane_number(u8 dpcd[DP_DPCD_SIZE])474 static u8 dp_get_max_lane_number(u8 dpcd[DP_DPCD_SIZE])
475 {
476 return dpcd[DP_MAX_LANE_COUNT] & DP_MAX_LANE_COUNT_MASK;
477 }
478
dp_get_dp_link_rate_coded(int link_rate)479 static u8 dp_get_dp_link_rate_coded(int link_rate)
480 {
481 switch (link_rate) {
482 case 162000:
483 default:
484 return DP_LINK_BW_1_62;
485 case 270000:
486 return DP_LINK_BW_2_7;
487 case 540000:
488 return DP_LINK_BW_5_4;
489 }
490 }
491
492 /***** radeon specific DP functions *****/
493
494 /* First get the min lane# when low rate is used according to pixel clock
495 * (prefer low rate), second check max lane# supported by DP panel,
496 * if the max lane# < low rate lane# then use max lane# instead.
497 */
radeon_dp_get_dp_lane_number(struct drm_connector * connector,u8 dpcd[DP_DPCD_SIZE],int pix_clock)498 static int radeon_dp_get_dp_lane_number(struct drm_connector *connector,
499 u8 dpcd[DP_DPCD_SIZE],
500 int pix_clock)
501 {
502 int bpp = convert_bpc_to_bpp(connector->display_info.bpc);
503 int max_link_rate = dp_get_max_link_rate(dpcd);
504 int max_lane_num = dp_get_max_lane_number(dpcd);
505 int lane_num;
506 int max_dp_pix_clock;
507
508 for (lane_num = 1; lane_num < max_lane_num; lane_num <<= 1) {
509 max_dp_pix_clock = dp_get_max_dp_pix_clock(max_link_rate, lane_num, bpp);
510 if (pix_clock <= max_dp_pix_clock)
511 break;
512 }
513
514 return lane_num;
515 }
516
radeon_dp_get_dp_link_clock(struct drm_connector * connector,u8 dpcd[DP_DPCD_SIZE],int pix_clock)517 static int radeon_dp_get_dp_link_clock(struct drm_connector *connector,
518 u8 dpcd[DP_DPCD_SIZE],
519 int pix_clock)
520 {
521 int bpp = convert_bpc_to_bpp(connector->display_info.bpc);
522 int lane_num, max_pix_clock;
523
524 if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector) ==
525 ENCODER_OBJECT_ID_NUTMEG)
526 return 270000;
527
528 lane_num = radeon_dp_get_dp_lane_number(connector, dpcd, pix_clock);
529 max_pix_clock = dp_get_max_dp_pix_clock(162000, lane_num, bpp);
530 if (pix_clock <= max_pix_clock)
531 return 162000;
532 max_pix_clock = dp_get_max_dp_pix_clock(270000, lane_num, bpp);
533 if (pix_clock <= max_pix_clock)
534 return 270000;
535 if (radeon_connector_is_dp12_capable(connector)) {
536 max_pix_clock = dp_get_max_dp_pix_clock(540000, lane_num, bpp);
537 if (pix_clock <= max_pix_clock)
538 return 540000;
539 }
540
541 return dp_get_max_link_rate(dpcd);
542 }
543
radeon_dp_encoder_service(struct radeon_device * rdev,int action,int dp_clock,u8 ucconfig,u8 lane_num)544 static u8 radeon_dp_encoder_service(struct radeon_device *rdev,
545 int action, int dp_clock,
546 u8 ucconfig, u8 lane_num)
547 {
548 DP_ENCODER_SERVICE_PARAMETERS args;
549 int index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
550
551 memset(&args, 0, sizeof(args));
552 args.ucLinkClock = dp_clock / 10;
553 args.ucConfig = ucconfig;
554 args.ucAction = action;
555 args.ucLaneNum = lane_num;
556 args.ucStatus = 0;
557
558 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
559 return args.ucStatus;
560 }
561
radeon_dp_getsinktype(struct radeon_connector * radeon_connector)562 u8 radeon_dp_getsinktype(struct radeon_connector *radeon_connector)
563 {
564 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
565 struct drm_device *dev = radeon_connector->base.dev;
566 struct radeon_device *rdev = dev->dev_private;
567
568 return radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_GET_SINK_TYPE, 0,
569 dig_connector->dp_i2c_bus->rec.i2c_id, 0);
570 }
571
radeon_dp_getdpcd(struct radeon_connector * radeon_connector)572 bool radeon_dp_getdpcd(struct radeon_connector *radeon_connector)
573 {
574 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
575 u8 msg[25];
576 int ret, i;
577
578 ret = radeon_dp_aux_native_read(radeon_connector, DP_DPCD_REV, msg, 8, 0);
579 if (ret > 0) {
580 memcpy(dig_connector->dpcd, msg, 8);
581 DRM_DEBUG_KMS("DPCD: ");
582 for (i = 0; i < 8; i++)
583 DRM_DEBUG_KMS("%02x ", msg[i]);
584 DRM_DEBUG_KMS("\n");
585 return true;
586 }
587 dig_connector->dpcd[0] = 0;
588 return false;
589 }
590
radeon_dp_get_panel_mode(struct drm_encoder * encoder,struct drm_connector * connector)591 int radeon_dp_get_panel_mode(struct drm_encoder *encoder,
592 struct drm_connector *connector)
593 {
594 struct drm_device *dev = encoder->dev;
595 struct radeon_device *rdev = dev->dev_private;
596 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
597 int panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
598
599 if (!ASIC_IS_DCE4(rdev))
600 return panel_mode;
601
602 if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector) ==
603 ENCODER_OBJECT_ID_NUTMEG)
604 panel_mode = DP_PANEL_MODE_INTERNAL_DP1_MODE;
605 else if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector) ==
606 ENCODER_OBJECT_ID_TRAVIS) {
607 u8 id[6];
608 int i;
609 for (i = 0; i < 6; i++)
610 id[i] = radeon_read_dpcd_reg(radeon_connector, 0x503 + i);
611 if (id[0] == 0x73 &&
612 id[1] == 0x69 &&
613 id[2] == 0x76 &&
614 id[3] == 0x61 &&
615 id[4] == 0x72 &&
616 id[5] == 0x54)
617 panel_mode = DP_PANEL_MODE_INTERNAL_DP1_MODE;
618 else
619 panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE;
620 } else if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
621 u8 tmp = radeon_read_dpcd_reg(radeon_connector, DP_EDP_CONFIGURATION_CAP);
622 if (tmp & 1)
623 panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE;
624 }
625
626 return panel_mode;
627 }
628
radeon_dp_set_link_config(struct drm_connector * connector,struct drm_display_mode * mode)629 void radeon_dp_set_link_config(struct drm_connector *connector,
630 struct drm_display_mode *mode)
631 {
632 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
633 struct radeon_connector_atom_dig *dig_connector;
634
635 if (!radeon_connector->con_priv)
636 return;
637 dig_connector = radeon_connector->con_priv;
638
639 if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
640 (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
641 dig_connector->dp_clock =
642 radeon_dp_get_dp_link_clock(connector, dig_connector->dpcd, mode->clock);
643 dig_connector->dp_lane_count =
644 radeon_dp_get_dp_lane_number(connector, dig_connector->dpcd, mode->clock);
645 }
646 }
647
radeon_dp_mode_valid_helper(struct drm_connector * connector,struct drm_display_mode * mode)648 int radeon_dp_mode_valid_helper(struct drm_connector *connector,
649 struct drm_display_mode *mode)
650 {
651 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
652 struct radeon_connector_atom_dig *dig_connector;
653 int dp_clock;
654
655 if (!radeon_connector->con_priv)
656 return MODE_CLOCK_HIGH;
657 dig_connector = radeon_connector->con_priv;
658
659 dp_clock =
660 radeon_dp_get_dp_link_clock(connector, dig_connector->dpcd, mode->clock);
661
662 if ((dp_clock == 540000) &&
663 (!radeon_connector_is_dp12_capable(connector)))
664 return MODE_CLOCK_HIGH;
665
666 return MODE_OK;
667 }
668
radeon_dp_get_link_status(struct radeon_connector * radeon_connector,u8 link_status[DP_LINK_STATUS_SIZE])669 static bool radeon_dp_get_link_status(struct radeon_connector *radeon_connector,
670 u8 link_status[DP_LINK_STATUS_SIZE])
671 {
672 int ret;
673 ret = radeon_dp_aux_native_read(radeon_connector, DP_LANE0_1_STATUS,
674 link_status, DP_LINK_STATUS_SIZE, 100);
675 if (ret <= 0) {
676 return false;
677 }
678
679 DRM_DEBUG_KMS("link status %02x %02x %02x %02x %02x %02x\n",
680 link_status[0], link_status[1], link_status[2],
681 link_status[3], link_status[4], link_status[5]);
682 return true;
683 }
684
radeon_dp_needs_link_train(struct radeon_connector * radeon_connector)685 bool radeon_dp_needs_link_train(struct radeon_connector *radeon_connector)
686 {
687 u8 link_status[DP_LINK_STATUS_SIZE];
688 struct radeon_connector_atom_dig *dig = radeon_connector->con_priv;
689
690 if (!radeon_dp_get_link_status(radeon_connector, link_status))
691 return false;
692 if (dp_channel_eq_ok(link_status, dig->dp_lane_count))
693 return false;
694 return true;
695 }
696
697 struct radeon_dp_link_train_info {
698 struct radeon_device *rdev;
699 struct drm_encoder *encoder;
700 struct drm_connector *connector;
701 struct radeon_connector *radeon_connector;
702 int enc_id;
703 int dp_clock;
704 int dp_lane_count;
705 int rd_interval;
706 bool tp3_supported;
707 u8 dpcd[8];
708 u8 train_set[4];
709 u8 link_status[DP_LINK_STATUS_SIZE];
710 u8 tries;
711 bool use_dpencoder;
712 };
713
radeon_dp_update_vs_emph(struct radeon_dp_link_train_info * dp_info)714 static void radeon_dp_update_vs_emph(struct radeon_dp_link_train_info *dp_info)
715 {
716 /* set the initial vs/emph on the source */
717 atombios_dig_transmitter_setup(dp_info->encoder,
718 ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH,
719 0, dp_info->train_set[0]); /* sets all lanes at once */
720
721 /* set the vs/emph on the sink */
722 radeon_dp_aux_native_write(dp_info->radeon_connector, DP_TRAINING_LANE0_SET,
723 dp_info->train_set, dp_info->dp_lane_count, 0);
724 }
725
radeon_dp_set_tp(struct radeon_dp_link_train_info * dp_info,int tp)726 static void radeon_dp_set_tp(struct radeon_dp_link_train_info *dp_info, int tp)
727 {
728 int rtp = 0;
729
730 /* set training pattern on the source */
731 if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder) {
732 switch (tp) {
733 case DP_TRAINING_PATTERN_1:
734 rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1;
735 break;
736 case DP_TRAINING_PATTERN_2:
737 rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2;
738 break;
739 case DP_TRAINING_PATTERN_3:
740 rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3;
741 break;
742 }
743 atombios_dig_encoder_setup(dp_info->encoder, rtp, 0);
744 } else {
745 switch (tp) {
746 case DP_TRAINING_PATTERN_1:
747 rtp = 0;
748 break;
749 case DP_TRAINING_PATTERN_2:
750 rtp = 1;
751 break;
752 }
753 radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL,
754 dp_info->dp_clock, dp_info->enc_id, rtp);
755 }
756
757 /* enable training pattern on the sink */
758 radeon_write_dpcd_reg(dp_info->radeon_connector, DP_TRAINING_PATTERN_SET, tp);
759 }
760
radeon_dp_link_train_init(struct radeon_dp_link_train_info * dp_info)761 static int radeon_dp_link_train_init(struct radeon_dp_link_train_info *dp_info)
762 {
763 struct radeon_encoder *radeon_encoder = to_radeon_encoder(dp_info->encoder);
764 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
765 u8 tmp;
766
767 /* power up the sink */
768 if (dp_info->dpcd[0] >= 0x11)
769 radeon_write_dpcd_reg(dp_info->radeon_connector,
770 DP_SET_POWER, DP_SET_POWER_D0);
771
772 /* possibly enable downspread on the sink */
773 if (dp_info->dpcd[3] & 0x1)
774 radeon_write_dpcd_reg(dp_info->radeon_connector,
775 DP_DOWNSPREAD_CTRL, DP_SPREAD_AMP_0_5);
776 else
777 radeon_write_dpcd_reg(dp_info->radeon_connector,
778 DP_DOWNSPREAD_CTRL, 0);
779
780 if ((dp_info->connector->connector_type == DRM_MODE_CONNECTOR_eDP) &&
781 (dig->panel_mode == DP_PANEL_MODE_INTERNAL_DP2_MODE)) {
782 radeon_write_dpcd_reg(dp_info->radeon_connector, DP_EDP_CONFIGURATION_SET, 1);
783 }
784
785 /* set the lane count on the sink */
786 tmp = dp_info->dp_lane_count;
787 if (dp_info->dpcd[DP_DPCD_REV] >= 0x11 &&
788 dp_info->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)
789 tmp |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
790 radeon_write_dpcd_reg(dp_info->radeon_connector, DP_LANE_COUNT_SET, tmp);
791
792 /* set the link rate on the sink */
793 tmp = dp_get_dp_link_rate_coded(dp_info->dp_clock);
794 radeon_write_dpcd_reg(dp_info->radeon_connector, DP_LINK_BW_SET, tmp);
795
796 /* start training on the source */
797 if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder)
798 atombios_dig_encoder_setup(dp_info->encoder,
799 ATOM_ENCODER_CMD_DP_LINK_TRAINING_START, 0);
800 else
801 radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_START,
802 dp_info->dp_clock, dp_info->enc_id, 0);
803
804 /* disable the training pattern on the sink */
805 radeon_write_dpcd_reg(dp_info->radeon_connector,
806 DP_TRAINING_PATTERN_SET,
807 DP_TRAINING_PATTERN_DISABLE);
808
809 return 0;
810 }
811
radeon_dp_link_train_finish(struct radeon_dp_link_train_info * dp_info)812 static int radeon_dp_link_train_finish(struct radeon_dp_link_train_info *dp_info)
813 {
814 udelay(400);
815
816 /* disable the training pattern on the sink */
817 radeon_write_dpcd_reg(dp_info->radeon_connector,
818 DP_TRAINING_PATTERN_SET,
819 DP_TRAINING_PATTERN_DISABLE);
820
821 /* disable the training pattern on the source */
822 if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder)
823 atombios_dig_encoder_setup(dp_info->encoder,
824 ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE, 0);
825 else
826 radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_COMPLETE,
827 dp_info->dp_clock, dp_info->enc_id, 0);
828
829 return 0;
830 }
831
radeon_dp_link_train_cr(struct radeon_dp_link_train_info * dp_info)832 static int radeon_dp_link_train_cr(struct radeon_dp_link_train_info *dp_info)
833 {
834 bool clock_recovery;
835 u8 voltage;
836 int i;
837
838 radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_1);
839 memset(dp_info->train_set, 0, 4);
840 radeon_dp_update_vs_emph(dp_info);
841
842 udelay(400);
843
844 /* clock recovery loop */
845 clock_recovery = false;
846 dp_info->tries = 0;
847 voltage = 0xff;
848 while (1) {
849 if (dp_info->rd_interval == 0)
850 udelay(100);
851 else
852 mdelay(dp_info->rd_interval * 4);
853
854 if (!radeon_dp_get_link_status(dp_info->radeon_connector, dp_info->link_status)) {
855 DRM_ERROR("displayport link status failed\n");
856 break;
857 }
858
859 if (dp_clock_recovery_ok(dp_info->link_status, dp_info->dp_lane_count)) {
860 clock_recovery = true;
861 break;
862 }
863
864 for (i = 0; i < dp_info->dp_lane_count; i++) {
865 if ((dp_info->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
866 break;
867 }
868 if (i == dp_info->dp_lane_count) {
869 DRM_ERROR("clock recovery reached max voltage\n");
870 break;
871 }
872
873 if ((dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
874 ++dp_info->tries;
875 if (dp_info->tries == 5) {
876 DRM_ERROR("clock recovery tried 5 times\n");
877 break;
878 }
879 } else
880 dp_info->tries = 0;
881
882 voltage = dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
883
884 /* Compute new train_set as requested by sink */
885 dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
886
887 radeon_dp_update_vs_emph(dp_info);
888 }
889 if (!clock_recovery) {
890 DRM_ERROR("clock recovery failed\n");
891 return -1;
892 } else {
893 DRM_DEBUG_KMS("clock recovery at voltage %d pre-emphasis %d\n",
894 dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
895 (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) >>
896 DP_TRAIN_PRE_EMPHASIS_SHIFT);
897 return 0;
898 }
899 }
900
radeon_dp_link_train_ce(struct radeon_dp_link_train_info * dp_info)901 static int radeon_dp_link_train_ce(struct radeon_dp_link_train_info *dp_info)
902 {
903 bool channel_eq;
904
905 if (dp_info->tp3_supported)
906 radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_3);
907 else
908 radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_2);
909
910 /* channel equalization loop */
911 dp_info->tries = 0;
912 channel_eq = false;
913 while (1) {
914 if (dp_info->rd_interval == 0)
915 udelay(400);
916 else
917 mdelay(dp_info->rd_interval * 4);
918
919 if (!radeon_dp_get_link_status(dp_info->radeon_connector, dp_info->link_status)) {
920 DRM_ERROR("displayport link status failed\n");
921 break;
922 }
923
924 if (dp_channel_eq_ok(dp_info->link_status, dp_info->dp_lane_count)) {
925 channel_eq = true;
926 break;
927 }
928
929 /* Try 5 times */
930 if (dp_info->tries > 5) {
931 DRM_ERROR("channel eq failed: 5 tries\n");
932 break;
933 }
934
935 /* Compute new train_set as requested by sink */
936 dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
937
938 radeon_dp_update_vs_emph(dp_info);
939 dp_info->tries++;
940 }
941
942 if (!channel_eq) {
943 DRM_ERROR("channel eq failed\n");
944 return -1;
945 } else {
946 DRM_DEBUG_KMS("channel eq at voltage %d pre-emphasis %d\n",
947 dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
948 (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK)
949 >> DP_TRAIN_PRE_EMPHASIS_SHIFT);
950 return 0;
951 }
952 }
953
radeon_dp_link_train(struct drm_encoder * encoder,struct drm_connector * connector)954 void radeon_dp_link_train(struct drm_encoder *encoder,
955 struct drm_connector *connector)
956 {
957 struct drm_device *dev = encoder->dev;
958 struct radeon_device *rdev = dev->dev_private;
959 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
960 struct radeon_encoder_atom_dig *dig;
961 struct radeon_connector *radeon_connector;
962 struct radeon_connector_atom_dig *dig_connector;
963 struct radeon_dp_link_train_info dp_info;
964 int index;
965 u8 tmp, frev, crev;
966
967 if (!radeon_encoder->enc_priv)
968 return;
969 dig = radeon_encoder->enc_priv;
970
971 radeon_connector = to_radeon_connector(connector);
972 if (!radeon_connector->con_priv)
973 return;
974 dig_connector = radeon_connector->con_priv;
975
976 if ((dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_DISPLAYPORT) &&
977 (dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_eDP))
978 return;
979
980 /* DPEncoderService newer than 1.1 can't program properly the
981 * training pattern. When facing such version use the
982 * DIGXEncoderControl (X== 1 | 2)
983 */
984 dp_info.use_dpencoder = true;
985 index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
986 if (atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) {
987 if (crev > 1) {
988 dp_info.use_dpencoder = false;
989 }
990 }
991
992 dp_info.enc_id = 0;
993 if (dig->dig_encoder)
994 dp_info.enc_id |= ATOM_DP_CONFIG_DIG2_ENCODER;
995 else
996 dp_info.enc_id |= ATOM_DP_CONFIG_DIG1_ENCODER;
997 if (dig->linkb)
998 dp_info.enc_id |= ATOM_DP_CONFIG_LINK_B;
999 else
1000 dp_info.enc_id |= ATOM_DP_CONFIG_LINK_A;
1001
1002 dp_info.rd_interval = radeon_read_dpcd_reg(radeon_connector, DP_TRAINING_AUX_RD_INTERVAL);
1003 tmp = radeon_read_dpcd_reg(radeon_connector, DP_MAX_LANE_COUNT);
1004 if (ASIC_IS_DCE5(rdev) && (tmp & DP_TPS3_SUPPORTED))
1005 dp_info.tp3_supported = true;
1006 else
1007 dp_info.tp3_supported = false;
1008
1009 memcpy(dp_info.dpcd, dig_connector->dpcd, 8);
1010 dp_info.rdev = rdev;
1011 dp_info.encoder = encoder;
1012 dp_info.connector = connector;
1013 dp_info.radeon_connector = radeon_connector;
1014 dp_info.dp_lane_count = dig_connector->dp_lane_count;
1015 dp_info.dp_clock = dig_connector->dp_clock;
1016
1017 if (radeon_dp_link_train_init(&dp_info))
1018 goto done;
1019 if (radeon_dp_link_train_cr(&dp_info))
1020 goto done;
1021 if (radeon_dp_link_train_ce(&dp_info))
1022 goto done;
1023 done:
1024 if (radeon_dp_link_train_finish(&dp_info))
1025 return;
1026 }
1027