1 // SPDX-License-Identifier: MIT
2 /*
3 * Copyright © 2020 Intel Corporation
4 *
5 * DisplayPort support for G4x,ILK,SNB,IVB,VLV,CHV (HSW+ handled by the DDI code).
6 */
7
8 #include <linux/string_helpers.h>
9
10 #include "g4x_dp.h"
11 #include "i915_reg.h"
12 #include "intel_audio.h"
13 #include "intel_backlight.h"
14 #include "intel_connector.h"
15 #include "intel_crtc.h"
16 #include "intel_de.h"
17 #include "intel_display_power.h"
18 #include "intel_display_types.h"
19 #include "intel_dp.h"
20 #include "intel_dp_aux.h"
21 #include "intel_dp_link_training.h"
22 #include "intel_dpio_phy.h"
23 #include "intel_fifo_underrun.h"
24 #include "intel_hdmi.h"
25 #include "intel_hotplug.h"
26 #include "intel_pch_display.h"
27 #include "intel_pps.h"
28 #include "vlv_sideband.h"
29
30 static const struct dpll g4x_dpll[] = {
31 { .dot = 162000, .p1 = 2, .p2 = 10, .n = 2, .m1 = 23, .m2 = 8, },
32 { .dot = 270000, .p1 = 1, .p2 = 10, .n = 1, .m1 = 14, .m2 = 2, },
33 };
34
35 static const struct dpll pch_dpll[] = {
36 { .dot = 162000, .p1 = 2, .p2 = 10, .n = 1, .m1 = 12, .m2 = 9, },
37 { .dot = 270000, .p1 = 1, .p2 = 10, .n = 2, .m1 = 14, .m2 = 8, },
38 };
39
40 static const struct dpll vlv_dpll[] = {
41 { .dot = 162000, .p1 = 3, .p2 = 2, .n = 5, .m1 = 3, .m2 = 81, },
42 { .dot = 270000, .p1 = 2, .p2 = 2, .n = 1, .m1 = 2, .m2 = 27, },
43 };
44
45 static const struct dpll chv_dpll[] = {
46 /* m2 is .22 binary fixed point */
47 { .dot = 162000, .p1 = 4, .p2 = 2, .n = 1, .m1 = 2, .m2 = 0x819999a /* 32.4 */ },
48 { .dot = 270000, .p1 = 4, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x6c00000 /* 27.0 */ },
49 };
50
vlv_get_dpll(struct drm_i915_private * i915)51 const struct dpll *vlv_get_dpll(struct drm_i915_private *i915)
52 {
53 return IS_CHERRYVIEW(i915) ? &chv_dpll[0] : &vlv_dpll[0];
54 }
55
g4x_dp_set_clock(struct intel_encoder * encoder,struct intel_crtc_state * pipe_config)56 void g4x_dp_set_clock(struct intel_encoder *encoder,
57 struct intel_crtc_state *pipe_config)
58 {
59 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
60 const struct dpll *divisor = NULL;
61 int i, count = 0;
62
63 if (IS_G4X(dev_priv)) {
64 divisor = g4x_dpll;
65 count = ARRAY_SIZE(g4x_dpll);
66 } else if (HAS_PCH_SPLIT(dev_priv)) {
67 divisor = pch_dpll;
68 count = ARRAY_SIZE(pch_dpll);
69 } else if (IS_CHERRYVIEW(dev_priv)) {
70 divisor = chv_dpll;
71 count = ARRAY_SIZE(chv_dpll);
72 } else if (IS_VALLEYVIEW(dev_priv)) {
73 divisor = vlv_dpll;
74 count = ARRAY_SIZE(vlv_dpll);
75 }
76
77 if (divisor && count) {
78 for (i = 0; i < count; i++) {
79 if (pipe_config->port_clock == divisor[i].dot) {
80 pipe_config->dpll = divisor[i];
81 pipe_config->clock_set = true;
82 break;
83 }
84 }
85 }
86 }
87
intel_dp_prepare(struct intel_encoder * encoder,const struct intel_crtc_state * pipe_config)88 static void intel_dp_prepare(struct intel_encoder *encoder,
89 const struct intel_crtc_state *pipe_config)
90 {
91 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
92 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
93 enum port port = encoder->port;
94 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
95 const struct drm_display_mode *adjusted_mode = &pipe_config->hw.adjusted_mode;
96
97 intel_dp_set_link_params(intel_dp,
98 pipe_config->port_clock,
99 pipe_config->lane_count);
100
101 /*
102 * There are four kinds of DP registers:
103 * IBX PCH
104 * SNB CPU
105 * IVB CPU
106 * CPT PCH
107 *
108 * IBX PCH and CPU are the same for almost everything,
109 * except that the CPU DP PLL is configured in this
110 * register
111 *
112 * CPT PCH is quite different, having many bits moved
113 * to the TRANS_DP_CTL register instead. That
114 * configuration happens (oddly) in ilk_pch_enable
115 */
116
117 /* Preserve the BIOS-computed detected bit. This is
118 * supposed to be read-only.
119 */
120 intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg) & DP_DETECTED;
121
122 /* Handle DP bits in common between all three register formats */
123 intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
124 intel_dp->DP |= DP_PORT_WIDTH(pipe_config->lane_count);
125
126 /* Split out the IBX/CPU vs CPT settings */
127
128 if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) {
129 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
130 intel_dp->DP |= DP_SYNC_HS_HIGH;
131 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
132 intel_dp->DP |= DP_SYNC_VS_HIGH;
133 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
134
135 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
136 intel_dp->DP |= DP_ENHANCED_FRAMING;
137
138 intel_dp->DP |= DP_PIPE_SEL_IVB(crtc->pipe);
139 } else if (HAS_PCH_CPT(dev_priv) && port != PORT_A) {
140 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
141
142 intel_de_rmw(dev_priv, TRANS_DP_CTL(crtc->pipe),
143 TRANS_DP_ENH_FRAMING,
144 pipe_config->enhanced_framing ?
145 TRANS_DP_ENH_FRAMING : 0);
146 } else {
147 if (IS_G4X(dev_priv) && pipe_config->limited_color_range)
148 intel_dp->DP |= DP_COLOR_RANGE_16_235;
149
150 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
151 intel_dp->DP |= DP_SYNC_HS_HIGH;
152 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
153 intel_dp->DP |= DP_SYNC_VS_HIGH;
154 intel_dp->DP |= DP_LINK_TRAIN_OFF;
155
156 if (pipe_config->enhanced_framing)
157 intel_dp->DP |= DP_ENHANCED_FRAMING;
158
159 if (IS_CHERRYVIEW(dev_priv))
160 intel_dp->DP |= DP_PIPE_SEL_CHV(crtc->pipe);
161 else
162 intel_dp->DP |= DP_PIPE_SEL(crtc->pipe);
163 }
164 }
165
assert_dp_port(struct intel_dp * intel_dp,bool state)166 static void assert_dp_port(struct intel_dp *intel_dp, bool state)
167 {
168 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
169 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
170 bool cur_state = intel_de_read(dev_priv, intel_dp->output_reg) & DP_PORT_EN;
171
172 I915_STATE_WARN(dev_priv, cur_state != state,
173 "[ENCODER:%d:%s] state assertion failure (expected %s, current %s)\n",
174 dig_port->base.base.base.id, dig_port->base.base.name,
175 str_on_off(state), str_on_off(cur_state));
176 }
177 #define assert_dp_port_disabled(d) assert_dp_port((d), false)
178
assert_edp_pll(struct drm_i915_private * dev_priv,bool state)179 static void assert_edp_pll(struct drm_i915_private *dev_priv, bool state)
180 {
181 bool cur_state = intel_de_read(dev_priv, DP_A) & DP_PLL_ENABLE;
182
183 I915_STATE_WARN(dev_priv, cur_state != state,
184 "eDP PLL state assertion failure (expected %s, current %s)\n",
185 str_on_off(state), str_on_off(cur_state));
186 }
187 #define assert_edp_pll_enabled(d) assert_edp_pll((d), true)
188 #define assert_edp_pll_disabled(d) assert_edp_pll((d), false)
189
ilk_edp_pll_on(struct intel_dp * intel_dp,const struct intel_crtc_state * pipe_config)190 static void ilk_edp_pll_on(struct intel_dp *intel_dp,
191 const struct intel_crtc_state *pipe_config)
192 {
193 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
194 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
195
196 assert_transcoder_disabled(dev_priv, pipe_config->cpu_transcoder);
197 assert_dp_port_disabled(intel_dp);
198 assert_edp_pll_disabled(dev_priv);
199
200 drm_dbg_kms(&dev_priv->drm, "enabling eDP PLL for clock %d\n",
201 pipe_config->port_clock);
202
203 intel_dp->DP &= ~DP_PLL_FREQ_MASK;
204
205 if (pipe_config->port_clock == 162000)
206 intel_dp->DP |= DP_PLL_FREQ_162MHZ;
207 else
208 intel_dp->DP |= DP_PLL_FREQ_270MHZ;
209
210 intel_de_write(dev_priv, DP_A, intel_dp->DP);
211 intel_de_posting_read(dev_priv, DP_A);
212 udelay(500);
213
214 /*
215 * [DevILK] Work around required when enabling DP PLL
216 * while a pipe is enabled going to FDI:
217 * 1. Wait for the start of vertical blank on the enabled pipe going to FDI
218 * 2. Program DP PLL enable
219 */
220 if (IS_IRONLAKE(dev_priv))
221 intel_wait_for_vblank_if_active(dev_priv, !crtc->pipe);
222
223 intel_dp->DP |= DP_PLL_ENABLE;
224
225 intel_de_write(dev_priv, DP_A, intel_dp->DP);
226 intel_de_posting_read(dev_priv, DP_A);
227 udelay(200);
228 }
229
ilk_edp_pll_off(struct intel_dp * intel_dp,const struct intel_crtc_state * old_crtc_state)230 static void ilk_edp_pll_off(struct intel_dp *intel_dp,
231 const struct intel_crtc_state *old_crtc_state)
232 {
233 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc);
234 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
235
236 assert_transcoder_disabled(dev_priv, old_crtc_state->cpu_transcoder);
237 assert_dp_port_disabled(intel_dp);
238 assert_edp_pll_enabled(dev_priv);
239
240 drm_dbg_kms(&dev_priv->drm, "disabling eDP PLL\n");
241
242 intel_dp->DP &= ~DP_PLL_ENABLE;
243
244 intel_de_write(dev_priv, DP_A, intel_dp->DP);
245 intel_de_posting_read(dev_priv, DP_A);
246 udelay(200);
247 }
248
cpt_dp_port_selected(struct drm_i915_private * dev_priv,enum port port,enum pipe * pipe)249 static bool cpt_dp_port_selected(struct drm_i915_private *dev_priv,
250 enum port port, enum pipe *pipe)
251 {
252 enum pipe p;
253
254 for_each_pipe(dev_priv, p) {
255 u32 val = intel_de_read(dev_priv, TRANS_DP_CTL(p));
256
257 if ((val & TRANS_DP_PORT_SEL_MASK) == TRANS_DP_PORT_SEL(port)) {
258 *pipe = p;
259 return true;
260 }
261 }
262
263 drm_dbg_kms(&dev_priv->drm, "No pipe for DP port %c found\n",
264 port_name(port));
265
266 /* must initialize pipe to something for the asserts */
267 *pipe = PIPE_A;
268
269 return false;
270 }
271
g4x_dp_port_enabled(struct drm_i915_private * dev_priv,i915_reg_t dp_reg,enum port port,enum pipe * pipe)272 bool g4x_dp_port_enabled(struct drm_i915_private *dev_priv,
273 i915_reg_t dp_reg, enum port port,
274 enum pipe *pipe)
275 {
276 bool ret;
277 u32 val;
278
279 val = intel_de_read(dev_priv, dp_reg);
280
281 ret = val & DP_PORT_EN;
282
283 /* asserts want to know the pipe even if the port is disabled */
284 if (IS_IVYBRIDGE(dev_priv) && port == PORT_A)
285 *pipe = (val & DP_PIPE_SEL_MASK_IVB) >> DP_PIPE_SEL_SHIFT_IVB;
286 else if (HAS_PCH_CPT(dev_priv) && port != PORT_A)
287 ret &= cpt_dp_port_selected(dev_priv, port, pipe);
288 else if (IS_CHERRYVIEW(dev_priv))
289 *pipe = (val & DP_PIPE_SEL_MASK_CHV) >> DP_PIPE_SEL_SHIFT_CHV;
290 else
291 *pipe = (val & DP_PIPE_SEL_MASK) >> DP_PIPE_SEL_SHIFT;
292
293 return ret;
294 }
295
intel_dp_get_hw_state(struct intel_encoder * encoder,enum pipe * pipe)296 static bool intel_dp_get_hw_state(struct intel_encoder *encoder,
297 enum pipe *pipe)
298 {
299 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
300 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
301 intel_wakeref_t wakeref;
302 bool ret;
303
304 wakeref = intel_display_power_get_if_enabled(dev_priv,
305 encoder->power_domain);
306 if (!wakeref)
307 return false;
308
309 ret = g4x_dp_port_enabled(dev_priv, intel_dp->output_reg,
310 encoder->port, pipe);
311
312 intel_display_power_put(dev_priv, encoder->power_domain, wakeref);
313
314 return ret;
315 }
316
g4x_dp_get_m_n(struct intel_crtc_state * crtc_state)317 static void g4x_dp_get_m_n(struct intel_crtc_state *crtc_state)
318 {
319 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
320
321 if (crtc_state->has_pch_encoder) {
322 intel_pch_transcoder_get_m1_n1(crtc, &crtc_state->dp_m_n);
323 intel_pch_transcoder_get_m2_n2(crtc, &crtc_state->dp_m2_n2);
324 } else {
325 intel_cpu_transcoder_get_m1_n1(crtc, crtc_state->cpu_transcoder,
326 &crtc_state->dp_m_n);
327 intel_cpu_transcoder_get_m2_n2(crtc, crtc_state->cpu_transcoder,
328 &crtc_state->dp_m2_n2);
329 }
330 }
331
intel_dp_get_config(struct intel_encoder * encoder,struct intel_crtc_state * pipe_config)332 static void intel_dp_get_config(struct intel_encoder *encoder,
333 struct intel_crtc_state *pipe_config)
334 {
335 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
336 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
337 u32 tmp, flags = 0;
338 enum port port = encoder->port;
339 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
340
341 if (encoder->type == INTEL_OUTPUT_EDP)
342 pipe_config->output_types |= BIT(INTEL_OUTPUT_EDP);
343 else
344 pipe_config->output_types |= BIT(INTEL_OUTPUT_DP);
345
346 tmp = intel_de_read(dev_priv, intel_dp->output_reg);
347
348 pipe_config->has_audio = tmp & DP_AUDIO_OUTPUT_ENABLE && port != PORT_A;
349
350 if (HAS_PCH_CPT(dev_priv) && port != PORT_A) {
351 u32 trans_dp = intel_de_read(dev_priv,
352 TRANS_DP_CTL(crtc->pipe));
353
354 if (trans_dp & TRANS_DP_ENH_FRAMING)
355 pipe_config->enhanced_framing = true;
356
357 if (trans_dp & TRANS_DP_HSYNC_ACTIVE_HIGH)
358 flags |= DRM_MODE_FLAG_PHSYNC;
359 else
360 flags |= DRM_MODE_FLAG_NHSYNC;
361
362 if (trans_dp & TRANS_DP_VSYNC_ACTIVE_HIGH)
363 flags |= DRM_MODE_FLAG_PVSYNC;
364 else
365 flags |= DRM_MODE_FLAG_NVSYNC;
366 } else {
367 if (tmp & DP_ENHANCED_FRAMING)
368 pipe_config->enhanced_framing = true;
369
370 if (tmp & DP_SYNC_HS_HIGH)
371 flags |= DRM_MODE_FLAG_PHSYNC;
372 else
373 flags |= DRM_MODE_FLAG_NHSYNC;
374
375 if (tmp & DP_SYNC_VS_HIGH)
376 flags |= DRM_MODE_FLAG_PVSYNC;
377 else
378 flags |= DRM_MODE_FLAG_NVSYNC;
379 }
380
381 pipe_config->hw.adjusted_mode.flags |= flags;
382
383 if (IS_G4X(dev_priv) && tmp & DP_COLOR_RANGE_16_235)
384 pipe_config->limited_color_range = true;
385
386 pipe_config->lane_count =
387 ((tmp & DP_PORT_WIDTH_MASK) >> DP_PORT_WIDTH_SHIFT) + 1;
388
389 g4x_dp_get_m_n(pipe_config);
390
391 if (port == PORT_A) {
392 if ((intel_de_read(dev_priv, DP_A) & DP_PLL_FREQ_MASK) == DP_PLL_FREQ_162MHZ)
393 pipe_config->port_clock = 162000;
394 else
395 pipe_config->port_clock = 270000;
396 }
397
398 pipe_config->hw.adjusted_mode.crtc_clock =
399 intel_dotclock_calculate(pipe_config->port_clock,
400 &pipe_config->dp_m_n);
401
402 if (intel_dp_is_edp(intel_dp))
403 intel_edp_fixup_vbt_bpp(encoder, pipe_config->pipe_bpp);
404
405 intel_audio_codec_get_config(encoder, pipe_config);
406 }
407
408 static void
intel_dp_link_down(struct intel_encoder * encoder,const struct intel_crtc_state * old_crtc_state)409 intel_dp_link_down(struct intel_encoder *encoder,
410 const struct intel_crtc_state *old_crtc_state)
411 {
412 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
413 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
414 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc);
415 enum port port = encoder->port;
416
417 if (drm_WARN_ON(&dev_priv->drm,
418 (intel_de_read(dev_priv, intel_dp->output_reg) &
419 DP_PORT_EN) == 0))
420 return;
421
422 drm_dbg_kms(&dev_priv->drm, "\n");
423
424 if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) ||
425 (HAS_PCH_CPT(dev_priv) && port != PORT_A)) {
426 intel_dp->DP &= ~DP_LINK_TRAIN_MASK_CPT;
427 intel_dp->DP |= DP_LINK_TRAIN_PAT_IDLE_CPT;
428 } else {
429 intel_dp->DP &= ~DP_LINK_TRAIN_MASK;
430 intel_dp->DP |= DP_LINK_TRAIN_PAT_IDLE;
431 }
432 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
433 intel_de_posting_read(dev_priv, intel_dp->output_reg);
434
435 intel_dp->DP &= ~(DP_PORT_EN | DP_AUDIO_OUTPUT_ENABLE);
436 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
437 intel_de_posting_read(dev_priv, intel_dp->output_reg);
438
439 /*
440 * HW workaround for IBX, we need to move the port
441 * to transcoder A after disabling it to allow the
442 * matching HDMI port to be enabled on transcoder A.
443 */
444 if (HAS_PCH_IBX(dev_priv) && crtc->pipe == PIPE_B && port != PORT_A) {
445 /*
446 * We get CPU/PCH FIFO underruns on the other pipe when
447 * doing the workaround. Sweep them under the rug.
448 */
449 intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, false);
450 intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, false);
451
452 /* always enable with pattern 1 (as per spec) */
453 intel_dp->DP &= ~(DP_PIPE_SEL_MASK | DP_LINK_TRAIN_MASK);
454 intel_dp->DP |= DP_PORT_EN | DP_PIPE_SEL(PIPE_A) |
455 DP_LINK_TRAIN_PAT_1;
456 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
457 intel_de_posting_read(dev_priv, intel_dp->output_reg);
458
459 intel_dp->DP &= ~DP_PORT_EN;
460 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
461 intel_de_posting_read(dev_priv, intel_dp->output_reg);
462
463 intel_wait_for_vblank_if_active(dev_priv, PIPE_A);
464 intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, true);
465 intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, true);
466 }
467
468 msleep(intel_dp->pps.panel_power_down_delay);
469
470 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
471 intel_wakeref_t wakeref;
472
473 with_intel_pps_lock(intel_dp, wakeref)
474 intel_dp->pps.active_pipe = INVALID_PIPE;
475 }
476 }
477
intel_disable_dp(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * old_crtc_state,const struct drm_connector_state * old_conn_state)478 static void intel_disable_dp(struct intel_atomic_state *state,
479 struct intel_encoder *encoder,
480 const struct intel_crtc_state *old_crtc_state,
481 const struct drm_connector_state *old_conn_state)
482 {
483 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
484
485 intel_dp->link_trained = false;
486
487 intel_audio_codec_disable(encoder, old_crtc_state, old_conn_state);
488
489 /*
490 * Make sure the panel is off before trying to change the mode.
491 * But also ensure that we have vdd while we switch off the panel.
492 */
493 intel_pps_vdd_on(intel_dp);
494 intel_edp_backlight_off(old_conn_state);
495 intel_dp_set_power(intel_dp, DP_SET_POWER_D3);
496 intel_pps_off(intel_dp);
497 }
498
g4x_disable_dp(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * old_crtc_state,const struct drm_connector_state * old_conn_state)499 static void g4x_disable_dp(struct intel_atomic_state *state,
500 struct intel_encoder *encoder,
501 const struct intel_crtc_state *old_crtc_state,
502 const struct drm_connector_state *old_conn_state)
503 {
504 intel_disable_dp(state, encoder, old_crtc_state, old_conn_state);
505 }
506
vlv_disable_dp(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * old_crtc_state,const struct drm_connector_state * old_conn_state)507 static void vlv_disable_dp(struct intel_atomic_state *state,
508 struct intel_encoder *encoder,
509 const struct intel_crtc_state *old_crtc_state,
510 const struct drm_connector_state *old_conn_state)
511 {
512 intel_disable_dp(state, encoder, old_crtc_state, old_conn_state);
513 }
514
g4x_post_disable_dp(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * old_crtc_state,const struct drm_connector_state * old_conn_state)515 static void g4x_post_disable_dp(struct intel_atomic_state *state,
516 struct intel_encoder *encoder,
517 const struct intel_crtc_state *old_crtc_state,
518 const struct drm_connector_state *old_conn_state)
519 {
520 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
521 enum port port = encoder->port;
522
523 /*
524 * Bspec does not list a specific disable sequence for g4x DP.
525 * Follow the ilk+ sequence (disable pipe before the port) for
526 * g4x DP as it does not suffer from underruns like the normal
527 * g4x modeset sequence (disable pipe after the port).
528 */
529 intel_dp_link_down(encoder, old_crtc_state);
530
531 /* Only ilk+ has port A */
532 if (port == PORT_A)
533 ilk_edp_pll_off(intel_dp, old_crtc_state);
534 }
535
vlv_post_disable_dp(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * old_crtc_state,const struct drm_connector_state * old_conn_state)536 static void vlv_post_disable_dp(struct intel_atomic_state *state,
537 struct intel_encoder *encoder,
538 const struct intel_crtc_state *old_crtc_state,
539 const struct drm_connector_state *old_conn_state)
540 {
541 intel_dp_link_down(encoder, old_crtc_state);
542 }
543
chv_post_disable_dp(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * old_crtc_state,const struct drm_connector_state * old_conn_state)544 static void chv_post_disable_dp(struct intel_atomic_state *state,
545 struct intel_encoder *encoder,
546 const struct intel_crtc_state *old_crtc_state,
547 const struct drm_connector_state *old_conn_state)
548 {
549 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
550
551 intel_dp_link_down(encoder, old_crtc_state);
552
553 vlv_dpio_get(dev_priv);
554
555 /* Assert data lane reset */
556 chv_data_lane_soft_reset(encoder, old_crtc_state, true);
557
558 vlv_dpio_put(dev_priv);
559 }
560
561 static void
cpt_set_link_train(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state,u8 dp_train_pat)562 cpt_set_link_train(struct intel_dp *intel_dp,
563 const struct intel_crtc_state *crtc_state,
564 u8 dp_train_pat)
565 {
566 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
567
568 intel_dp->DP &= ~DP_LINK_TRAIN_MASK_CPT;
569
570 switch (intel_dp_training_pattern_symbol(dp_train_pat)) {
571 case DP_TRAINING_PATTERN_DISABLE:
572 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
573 break;
574 case DP_TRAINING_PATTERN_1:
575 intel_dp->DP |= DP_LINK_TRAIN_PAT_1_CPT;
576 break;
577 case DP_TRAINING_PATTERN_2:
578 intel_dp->DP |= DP_LINK_TRAIN_PAT_2_CPT;
579 break;
580 default:
581 MISSING_CASE(intel_dp_training_pattern_symbol(dp_train_pat));
582 return;
583 }
584
585 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
586 intel_de_posting_read(dev_priv, intel_dp->output_reg);
587 }
588
589 static void
g4x_set_link_train(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state,u8 dp_train_pat)590 g4x_set_link_train(struct intel_dp *intel_dp,
591 const struct intel_crtc_state *crtc_state,
592 u8 dp_train_pat)
593 {
594 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
595
596 intel_dp->DP &= ~DP_LINK_TRAIN_MASK;
597
598 switch (intel_dp_training_pattern_symbol(dp_train_pat)) {
599 case DP_TRAINING_PATTERN_DISABLE:
600 intel_dp->DP |= DP_LINK_TRAIN_OFF;
601 break;
602 case DP_TRAINING_PATTERN_1:
603 intel_dp->DP |= DP_LINK_TRAIN_PAT_1;
604 break;
605 case DP_TRAINING_PATTERN_2:
606 intel_dp->DP |= DP_LINK_TRAIN_PAT_2;
607 break;
608 default:
609 MISSING_CASE(intel_dp_training_pattern_symbol(dp_train_pat));
610 return;
611 }
612
613 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
614 intel_de_posting_read(dev_priv, intel_dp->output_reg);
615 }
616
intel_dp_enable_port(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)617 static void intel_dp_enable_port(struct intel_dp *intel_dp,
618 const struct intel_crtc_state *crtc_state)
619 {
620 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
621
622 /* enable with pattern 1 (as per spec) */
623
624 intel_dp_program_link_training_pattern(intel_dp, crtc_state,
625 DP_PHY_DPRX, DP_TRAINING_PATTERN_1);
626
627 /*
628 * Magic for VLV/CHV. We _must_ first set up the register
629 * without actually enabling the port, and then do another
630 * write to enable the port. Otherwise link training will
631 * fail when the power sequencer is freshly used for this port.
632 */
633 intel_dp->DP |= DP_PORT_EN;
634 if (crtc_state->has_audio)
635 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
636
637 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
638 intel_de_posting_read(dev_priv, intel_dp->output_reg);
639 }
640
intel_enable_dp(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * pipe_config,const struct drm_connector_state * conn_state)641 static void intel_enable_dp(struct intel_atomic_state *state,
642 struct intel_encoder *encoder,
643 const struct intel_crtc_state *pipe_config,
644 const struct drm_connector_state *conn_state)
645 {
646 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
647 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
648 u32 dp_reg = intel_de_read(dev_priv, intel_dp->output_reg);
649 intel_wakeref_t wakeref;
650
651 if (drm_WARN_ON(&dev_priv->drm, dp_reg & DP_PORT_EN))
652 return;
653
654 with_intel_pps_lock(intel_dp, wakeref) {
655 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
656 vlv_pps_init(encoder, pipe_config);
657
658 intel_dp_enable_port(intel_dp, pipe_config);
659
660 intel_pps_vdd_on_unlocked(intel_dp);
661 intel_pps_on_unlocked(intel_dp);
662 intel_pps_vdd_off_unlocked(intel_dp, true);
663 }
664
665 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
666 unsigned int lane_mask = 0x0;
667
668 if (IS_CHERRYVIEW(dev_priv))
669 lane_mask = intel_dp_unused_lane_mask(pipe_config->lane_count);
670
671 vlv_wait_port_ready(dev_priv, dp_to_dig_port(intel_dp),
672 lane_mask);
673 }
674
675 intel_dp_set_power(intel_dp, DP_SET_POWER_D0);
676 intel_dp_configure_protocol_converter(intel_dp, pipe_config);
677 intel_dp_check_frl_training(intel_dp);
678 intel_dp_pcon_dsc_configure(intel_dp, pipe_config);
679 intel_dp_start_link_train(intel_dp, pipe_config);
680 intel_dp_stop_link_train(intel_dp, pipe_config);
681 }
682
g4x_enable_dp(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * pipe_config,const struct drm_connector_state * conn_state)683 static void g4x_enable_dp(struct intel_atomic_state *state,
684 struct intel_encoder *encoder,
685 const struct intel_crtc_state *pipe_config,
686 const struct drm_connector_state *conn_state)
687 {
688 intel_enable_dp(state, encoder, pipe_config, conn_state);
689 intel_audio_codec_enable(encoder, pipe_config, conn_state);
690 intel_edp_backlight_on(pipe_config, conn_state);
691 }
692
vlv_enable_dp(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * pipe_config,const struct drm_connector_state * conn_state)693 static void vlv_enable_dp(struct intel_atomic_state *state,
694 struct intel_encoder *encoder,
695 const struct intel_crtc_state *pipe_config,
696 const struct drm_connector_state *conn_state)
697 {
698 intel_audio_codec_enable(encoder, pipe_config, conn_state);
699 intel_edp_backlight_on(pipe_config, conn_state);
700 }
701
g4x_pre_enable_dp(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * pipe_config,const struct drm_connector_state * conn_state)702 static void g4x_pre_enable_dp(struct intel_atomic_state *state,
703 struct intel_encoder *encoder,
704 const struct intel_crtc_state *pipe_config,
705 const struct drm_connector_state *conn_state)
706 {
707 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
708 enum port port = encoder->port;
709
710 intel_dp_prepare(encoder, pipe_config);
711
712 /* Only ilk+ has port A */
713 if (port == PORT_A)
714 ilk_edp_pll_on(intel_dp, pipe_config);
715 }
716
vlv_pre_enable_dp(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * pipe_config,const struct drm_connector_state * conn_state)717 static void vlv_pre_enable_dp(struct intel_atomic_state *state,
718 struct intel_encoder *encoder,
719 const struct intel_crtc_state *pipe_config,
720 const struct drm_connector_state *conn_state)
721 {
722 vlv_phy_pre_encoder_enable(encoder, pipe_config);
723
724 intel_enable_dp(state, encoder, pipe_config, conn_state);
725 }
726
vlv_dp_pre_pll_enable(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * pipe_config,const struct drm_connector_state * conn_state)727 static void vlv_dp_pre_pll_enable(struct intel_atomic_state *state,
728 struct intel_encoder *encoder,
729 const struct intel_crtc_state *pipe_config,
730 const struct drm_connector_state *conn_state)
731 {
732 intel_dp_prepare(encoder, pipe_config);
733
734 vlv_phy_pre_pll_enable(encoder, pipe_config);
735 }
736
chv_pre_enable_dp(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * pipe_config,const struct drm_connector_state * conn_state)737 static void chv_pre_enable_dp(struct intel_atomic_state *state,
738 struct intel_encoder *encoder,
739 const struct intel_crtc_state *pipe_config,
740 const struct drm_connector_state *conn_state)
741 {
742 chv_phy_pre_encoder_enable(encoder, pipe_config);
743
744 intel_enable_dp(state, encoder, pipe_config, conn_state);
745
746 /* Second common lane will stay alive on its own now */
747 chv_phy_release_cl2_override(encoder);
748 }
749
chv_dp_pre_pll_enable(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * pipe_config,const struct drm_connector_state * conn_state)750 static void chv_dp_pre_pll_enable(struct intel_atomic_state *state,
751 struct intel_encoder *encoder,
752 const struct intel_crtc_state *pipe_config,
753 const struct drm_connector_state *conn_state)
754 {
755 intel_dp_prepare(encoder, pipe_config);
756
757 chv_phy_pre_pll_enable(encoder, pipe_config);
758 }
759
chv_dp_post_pll_disable(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * old_crtc_state,const struct drm_connector_state * old_conn_state)760 static void chv_dp_post_pll_disable(struct intel_atomic_state *state,
761 struct intel_encoder *encoder,
762 const struct intel_crtc_state *old_crtc_state,
763 const struct drm_connector_state *old_conn_state)
764 {
765 chv_phy_post_pll_disable(encoder, old_crtc_state);
766 }
767
intel_dp_voltage_max_2(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)768 static u8 intel_dp_voltage_max_2(struct intel_dp *intel_dp,
769 const struct intel_crtc_state *crtc_state)
770 {
771 return DP_TRAIN_VOLTAGE_SWING_LEVEL_2;
772 }
773
intel_dp_voltage_max_3(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)774 static u8 intel_dp_voltage_max_3(struct intel_dp *intel_dp,
775 const struct intel_crtc_state *crtc_state)
776 {
777 return DP_TRAIN_VOLTAGE_SWING_LEVEL_3;
778 }
779
intel_dp_preemph_max_2(struct intel_dp * intel_dp)780 static u8 intel_dp_preemph_max_2(struct intel_dp *intel_dp)
781 {
782 return DP_TRAIN_PRE_EMPH_LEVEL_2;
783 }
784
intel_dp_preemph_max_3(struct intel_dp * intel_dp)785 static u8 intel_dp_preemph_max_3(struct intel_dp *intel_dp)
786 {
787 return DP_TRAIN_PRE_EMPH_LEVEL_3;
788 }
789
vlv_set_signal_levels(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state)790 static void vlv_set_signal_levels(struct intel_encoder *encoder,
791 const struct intel_crtc_state *crtc_state)
792 {
793 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
794 unsigned long demph_reg_value, preemph_reg_value,
795 uniqtranscale_reg_value;
796 u8 train_set = intel_dp->train_set[0];
797
798 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
799 case DP_TRAIN_PRE_EMPH_LEVEL_0:
800 preemph_reg_value = 0x0004000;
801 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
802 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
803 demph_reg_value = 0x2B405555;
804 uniqtranscale_reg_value = 0x552AB83A;
805 break;
806 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
807 demph_reg_value = 0x2B404040;
808 uniqtranscale_reg_value = 0x5548B83A;
809 break;
810 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
811 demph_reg_value = 0x2B245555;
812 uniqtranscale_reg_value = 0x5560B83A;
813 break;
814 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
815 demph_reg_value = 0x2B405555;
816 uniqtranscale_reg_value = 0x5598DA3A;
817 break;
818 default:
819 return;
820 }
821 break;
822 case DP_TRAIN_PRE_EMPH_LEVEL_1:
823 preemph_reg_value = 0x0002000;
824 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
825 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
826 demph_reg_value = 0x2B404040;
827 uniqtranscale_reg_value = 0x5552B83A;
828 break;
829 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
830 demph_reg_value = 0x2B404848;
831 uniqtranscale_reg_value = 0x5580B83A;
832 break;
833 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
834 demph_reg_value = 0x2B404040;
835 uniqtranscale_reg_value = 0x55ADDA3A;
836 break;
837 default:
838 return;
839 }
840 break;
841 case DP_TRAIN_PRE_EMPH_LEVEL_2:
842 preemph_reg_value = 0x0000000;
843 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
844 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
845 demph_reg_value = 0x2B305555;
846 uniqtranscale_reg_value = 0x5570B83A;
847 break;
848 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
849 demph_reg_value = 0x2B2B4040;
850 uniqtranscale_reg_value = 0x55ADDA3A;
851 break;
852 default:
853 return;
854 }
855 break;
856 case DP_TRAIN_PRE_EMPH_LEVEL_3:
857 preemph_reg_value = 0x0006000;
858 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
859 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
860 demph_reg_value = 0x1B405555;
861 uniqtranscale_reg_value = 0x55ADDA3A;
862 break;
863 default:
864 return;
865 }
866 break;
867 default:
868 return;
869 }
870
871 vlv_set_phy_signal_level(encoder, crtc_state,
872 demph_reg_value, preemph_reg_value,
873 uniqtranscale_reg_value, 0);
874 }
875
chv_set_signal_levels(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state)876 static void chv_set_signal_levels(struct intel_encoder *encoder,
877 const struct intel_crtc_state *crtc_state)
878 {
879 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
880 u32 deemph_reg_value, margin_reg_value;
881 bool uniq_trans_scale = false;
882 u8 train_set = intel_dp->train_set[0];
883
884 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
885 case DP_TRAIN_PRE_EMPH_LEVEL_0:
886 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
887 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
888 deemph_reg_value = 128;
889 margin_reg_value = 52;
890 break;
891 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
892 deemph_reg_value = 128;
893 margin_reg_value = 77;
894 break;
895 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
896 deemph_reg_value = 128;
897 margin_reg_value = 102;
898 break;
899 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
900 deemph_reg_value = 128;
901 margin_reg_value = 154;
902 uniq_trans_scale = true;
903 break;
904 default:
905 return;
906 }
907 break;
908 case DP_TRAIN_PRE_EMPH_LEVEL_1:
909 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
910 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
911 deemph_reg_value = 85;
912 margin_reg_value = 78;
913 break;
914 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
915 deemph_reg_value = 85;
916 margin_reg_value = 116;
917 break;
918 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
919 deemph_reg_value = 85;
920 margin_reg_value = 154;
921 break;
922 default:
923 return;
924 }
925 break;
926 case DP_TRAIN_PRE_EMPH_LEVEL_2:
927 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
928 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
929 deemph_reg_value = 64;
930 margin_reg_value = 104;
931 break;
932 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
933 deemph_reg_value = 64;
934 margin_reg_value = 154;
935 break;
936 default:
937 return;
938 }
939 break;
940 case DP_TRAIN_PRE_EMPH_LEVEL_3:
941 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
942 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
943 deemph_reg_value = 43;
944 margin_reg_value = 154;
945 break;
946 default:
947 return;
948 }
949 break;
950 default:
951 return;
952 }
953
954 chv_set_phy_signal_level(encoder, crtc_state,
955 deemph_reg_value, margin_reg_value,
956 uniq_trans_scale);
957 }
958
g4x_signal_levels(u8 train_set)959 static u32 g4x_signal_levels(u8 train_set)
960 {
961 u32 signal_levels = 0;
962
963 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
964 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
965 default:
966 signal_levels |= DP_VOLTAGE_0_4;
967 break;
968 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
969 signal_levels |= DP_VOLTAGE_0_6;
970 break;
971 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
972 signal_levels |= DP_VOLTAGE_0_8;
973 break;
974 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
975 signal_levels |= DP_VOLTAGE_1_2;
976 break;
977 }
978 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
979 case DP_TRAIN_PRE_EMPH_LEVEL_0:
980 default:
981 signal_levels |= DP_PRE_EMPHASIS_0;
982 break;
983 case DP_TRAIN_PRE_EMPH_LEVEL_1:
984 signal_levels |= DP_PRE_EMPHASIS_3_5;
985 break;
986 case DP_TRAIN_PRE_EMPH_LEVEL_2:
987 signal_levels |= DP_PRE_EMPHASIS_6;
988 break;
989 case DP_TRAIN_PRE_EMPH_LEVEL_3:
990 signal_levels |= DP_PRE_EMPHASIS_9_5;
991 break;
992 }
993 return signal_levels;
994 }
995
996 static void
g4x_set_signal_levels(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state)997 g4x_set_signal_levels(struct intel_encoder *encoder,
998 const struct intel_crtc_state *crtc_state)
999 {
1000 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1001 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1002 u8 train_set = intel_dp->train_set[0];
1003 u32 signal_levels;
1004
1005 signal_levels = g4x_signal_levels(train_set);
1006
1007 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n",
1008 signal_levels);
1009
1010 intel_dp->DP &= ~(DP_VOLTAGE_MASK | DP_PRE_EMPHASIS_MASK);
1011 intel_dp->DP |= signal_levels;
1012
1013 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
1014 intel_de_posting_read(dev_priv, intel_dp->output_reg);
1015 }
1016
1017 /* SNB CPU eDP voltage swing and pre-emphasis control */
snb_cpu_edp_signal_levels(u8 train_set)1018 static u32 snb_cpu_edp_signal_levels(u8 train_set)
1019 {
1020 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
1021 DP_TRAIN_PRE_EMPHASIS_MASK);
1022
1023 switch (signal_levels) {
1024 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1025 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1026 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B;
1027 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1028 return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B;
1029 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2:
1030 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2:
1031 return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B;
1032 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1033 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1034 return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B;
1035 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1036 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1037 return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B;
1038 default:
1039 MISSING_CASE(signal_levels);
1040 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B;
1041 }
1042 }
1043
1044 static void
snb_cpu_edp_set_signal_levels(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state)1045 snb_cpu_edp_set_signal_levels(struct intel_encoder *encoder,
1046 const struct intel_crtc_state *crtc_state)
1047 {
1048 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1049 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1050 u8 train_set = intel_dp->train_set[0];
1051 u32 signal_levels;
1052
1053 signal_levels = snb_cpu_edp_signal_levels(train_set);
1054
1055 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n",
1056 signal_levels);
1057
1058 intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB;
1059 intel_dp->DP |= signal_levels;
1060
1061 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
1062 intel_de_posting_read(dev_priv, intel_dp->output_reg);
1063 }
1064
1065 /* IVB CPU eDP voltage swing and pre-emphasis control */
ivb_cpu_edp_signal_levels(u8 train_set)1066 static u32 ivb_cpu_edp_signal_levels(u8 train_set)
1067 {
1068 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
1069 DP_TRAIN_PRE_EMPHASIS_MASK);
1070
1071 switch (signal_levels) {
1072 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1073 return EDP_LINK_TRAIN_400MV_0DB_IVB;
1074 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1075 return EDP_LINK_TRAIN_400MV_3_5DB_IVB;
1076 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2:
1077 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2:
1078 return EDP_LINK_TRAIN_400MV_6DB_IVB;
1079
1080 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1081 return EDP_LINK_TRAIN_600MV_0DB_IVB;
1082 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1083 return EDP_LINK_TRAIN_600MV_3_5DB_IVB;
1084
1085 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1086 return EDP_LINK_TRAIN_800MV_0DB_IVB;
1087 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1088 return EDP_LINK_TRAIN_800MV_3_5DB_IVB;
1089
1090 default:
1091 MISSING_CASE(signal_levels);
1092 return EDP_LINK_TRAIN_500MV_0DB_IVB;
1093 }
1094 }
1095
1096 static void
ivb_cpu_edp_set_signal_levels(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state)1097 ivb_cpu_edp_set_signal_levels(struct intel_encoder *encoder,
1098 const struct intel_crtc_state *crtc_state)
1099 {
1100 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1101 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1102 u8 train_set = intel_dp->train_set[0];
1103 u32 signal_levels;
1104
1105 signal_levels = ivb_cpu_edp_signal_levels(train_set);
1106
1107 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n",
1108 signal_levels);
1109
1110 intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB;
1111 intel_dp->DP |= signal_levels;
1112
1113 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
1114 intel_de_posting_read(dev_priv, intel_dp->output_reg);
1115 }
1116
1117 /*
1118 * If display is now connected check links status,
1119 * there has been known issues of link loss triggering
1120 * long pulse.
1121 *
1122 * Some sinks (eg. ASUS PB287Q) seem to perform some
1123 * weird HPD ping pong during modesets. So we can apparently
1124 * end up with HPD going low during a modeset, and then
1125 * going back up soon after. And once that happens we must
1126 * retrain the link to get a picture. That's in case no
1127 * userspace component reacted to intermittent HPD dip.
1128 */
1129 static enum intel_hotplug_state
intel_dp_hotplug(struct intel_encoder * encoder,struct intel_connector * connector)1130 intel_dp_hotplug(struct intel_encoder *encoder,
1131 struct intel_connector *connector)
1132 {
1133 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1134 struct drm_modeset_acquire_ctx ctx;
1135 enum intel_hotplug_state state;
1136 int ret;
1137
1138 if (intel_dp->compliance.test_active &&
1139 intel_dp->compliance.test_type == DP_TEST_LINK_PHY_TEST_PATTERN) {
1140 intel_dp_phy_test(encoder);
1141 /* just do the PHY test and nothing else */
1142 return INTEL_HOTPLUG_UNCHANGED;
1143 }
1144
1145 state = intel_encoder_hotplug(encoder, connector);
1146
1147 drm_modeset_acquire_init(&ctx, 0);
1148
1149 for (;;) {
1150 ret = intel_dp_retrain_link(encoder, &ctx);
1151
1152 if (ret == -EDEADLK) {
1153 drm_modeset_backoff(&ctx);
1154 continue;
1155 }
1156
1157 break;
1158 }
1159
1160 drm_modeset_drop_locks(&ctx);
1161 drm_modeset_acquire_fini(&ctx);
1162 drm_WARN(encoder->base.dev, ret,
1163 "Acquiring modeset locks failed with %i\n", ret);
1164
1165 /*
1166 * Keeping it consistent with intel_ddi_hotplug() and
1167 * intel_hdmi_hotplug().
1168 */
1169 if (state == INTEL_HOTPLUG_UNCHANGED && !connector->hotplug_retries)
1170 state = INTEL_HOTPLUG_RETRY;
1171
1172 return state;
1173 }
1174
ibx_digital_port_connected(struct intel_encoder * encoder)1175 static bool ibx_digital_port_connected(struct intel_encoder *encoder)
1176 {
1177 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1178 u32 bit = dev_priv->display.hotplug.pch_hpd[encoder->hpd_pin];
1179
1180 return intel_de_read(dev_priv, SDEISR) & bit;
1181 }
1182
g4x_digital_port_connected(struct intel_encoder * encoder)1183 static bool g4x_digital_port_connected(struct intel_encoder *encoder)
1184 {
1185 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1186 u32 bit;
1187
1188 switch (encoder->hpd_pin) {
1189 case HPD_PORT_B:
1190 bit = PORTB_HOTPLUG_LIVE_STATUS_G4X;
1191 break;
1192 case HPD_PORT_C:
1193 bit = PORTC_HOTPLUG_LIVE_STATUS_G4X;
1194 break;
1195 case HPD_PORT_D:
1196 bit = PORTD_HOTPLUG_LIVE_STATUS_G4X;
1197 break;
1198 default:
1199 MISSING_CASE(encoder->hpd_pin);
1200 return false;
1201 }
1202
1203 return intel_de_read(dev_priv, PORT_HOTPLUG_STAT) & bit;
1204 }
1205
ilk_digital_port_connected(struct intel_encoder * encoder)1206 static bool ilk_digital_port_connected(struct intel_encoder *encoder)
1207 {
1208 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1209 u32 bit = dev_priv->display.hotplug.hpd[encoder->hpd_pin];
1210
1211 return intel_de_read(dev_priv, DEISR) & bit;
1212 }
1213
intel_dp_encoder_destroy(struct drm_encoder * encoder)1214 static void intel_dp_encoder_destroy(struct drm_encoder *encoder)
1215 {
1216 intel_dp_encoder_flush_work(encoder);
1217
1218 drm_encoder_cleanup(encoder);
1219 kfree(enc_to_dig_port(to_intel_encoder(encoder)));
1220 }
1221
vlv_active_pipe(struct intel_dp * intel_dp)1222 enum pipe vlv_active_pipe(struct intel_dp *intel_dp)
1223 {
1224 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1225 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
1226 enum pipe pipe;
1227
1228 if (g4x_dp_port_enabled(dev_priv, intel_dp->output_reg,
1229 encoder->port, &pipe))
1230 return pipe;
1231
1232 return INVALID_PIPE;
1233 }
1234
intel_dp_encoder_reset(struct drm_encoder * encoder)1235 static void intel_dp_encoder_reset(struct drm_encoder *encoder)
1236 {
1237 struct drm_i915_private *dev_priv = to_i915(encoder->dev);
1238 struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(encoder));
1239
1240 intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg);
1241
1242 intel_dp->reset_link_params = true;
1243
1244 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
1245 intel_wakeref_t wakeref;
1246
1247 with_intel_pps_lock(intel_dp, wakeref)
1248 intel_dp->pps.active_pipe = vlv_active_pipe(intel_dp);
1249 }
1250
1251 intel_pps_encoder_reset(intel_dp);
1252 }
1253
1254 static const struct drm_encoder_funcs intel_dp_enc_funcs = {
1255 .reset = intel_dp_encoder_reset,
1256 .destroy = intel_dp_encoder_destroy,
1257 };
1258
g4x_dp_init(struct drm_i915_private * dev_priv,i915_reg_t output_reg,enum port port)1259 bool g4x_dp_init(struct drm_i915_private *dev_priv,
1260 i915_reg_t output_reg, enum port port)
1261 {
1262 const struct intel_bios_encoder_data *devdata;
1263 struct intel_digital_port *dig_port;
1264 struct intel_encoder *intel_encoder;
1265 struct drm_encoder *encoder;
1266 struct intel_connector *intel_connector;
1267
1268 if (!assert_port_valid(dev_priv, port))
1269 return false;
1270
1271 devdata = intel_bios_encoder_data_lookup(dev_priv, port);
1272
1273 /* FIXME bail? */
1274 if (!devdata)
1275 drm_dbg_kms(&dev_priv->drm, "No VBT child device for DP-%c\n",
1276 port_name(port));
1277
1278 dig_port = kzalloc(sizeof(*dig_port), GFP_KERNEL);
1279 if (!dig_port)
1280 return false;
1281
1282 dig_port->aux_ch = AUX_CH_NONE;
1283
1284 intel_connector = intel_connector_alloc();
1285 if (!intel_connector)
1286 goto err_connector_alloc;
1287
1288 intel_encoder = &dig_port->base;
1289 encoder = &intel_encoder->base;
1290
1291 intel_encoder->devdata = devdata;
1292
1293 mutex_init(&dig_port->hdcp_mutex);
1294
1295 if (drm_encoder_init(&dev_priv->drm, &intel_encoder->base,
1296 &intel_dp_enc_funcs, DRM_MODE_ENCODER_TMDS,
1297 "DP %c", port_name(port)))
1298 goto err_encoder_init;
1299
1300 intel_encoder->hotplug = intel_dp_hotplug;
1301 intel_encoder->compute_config = intel_dp_compute_config;
1302 intel_encoder->get_hw_state = intel_dp_get_hw_state;
1303 intel_encoder->get_config = intel_dp_get_config;
1304 intel_encoder->sync_state = intel_dp_sync_state;
1305 intel_encoder->initial_fastset_check = intel_dp_initial_fastset_check;
1306 intel_encoder->update_pipe = intel_backlight_update;
1307 intel_encoder->suspend = intel_dp_encoder_suspend;
1308 intel_encoder->shutdown = intel_dp_encoder_shutdown;
1309 if (IS_CHERRYVIEW(dev_priv)) {
1310 intel_encoder->pre_pll_enable = chv_dp_pre_pll_enable;
1311 intel_encoder->pre_enable = chv_pre_enable_dp;
1312 intel_encoder->enable = vlv_enable_dp;
1313 intel_encoder->disable = vlv_disable_dp;
1314 intel_encoder->post_disable = chv_post_disable_dp;
1315 intel_encoder->post_pll_disable = chv_dp_post_pll_disable;
1316 } else if (IS_VALLEYVIEW(dev_priv)) {
1317 intel_encoder->pre_pll_enable = vlv_dp_pre_pll_enable;
1318 intel_encoder->pre_enable = vlv_pre_enable_dp;
1319 intel_encoder->enable = vlv_enable_dp;
1320 intel_encoder->disable = vlv_disable_dp;
1321 intel_encoder->post_disable = vlv_post_disable_dp;
1322 } else {
1323 intel_encoder->pre_enable = g4x_pre_enable_dp;
1324 intel_encoder->enable = g4x_enable_dp;
1325 intel_encoder->disable = g4x_disable_dp;
1326 intel_encoder->post_disable = g4x_post_disable_dp;
1327 }
1328
1329 if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) ||
1330 (HAS_PCH_CPT(dev_priv) && port != PORT_A))
1331 dig_port->dp.set_link_train = cpt_set_link_train;
1332 else
1333 dig_port->dp.set_link_train = g4x_set_link_train;
1334
1335 if (IS_CHERRYVIEW(dev_priv))
1336 intel_encoder->set_signal_levels = chv_set_signal_levels;
1337 else if (IS_VALLEYVIEW(dev_priv))
1338 intel_encoder->set_signal_levels = vlv_set_signal_levels;
1339 else if (IS_IVYBRIDGE(dev_priv) && port == PORT_A)
1340 intel_encoder->set_signal_levels = ivb_cpu_edp_set_signal_levels;
1341 else if (IS_SANDYBRIDGE(dev_priv) && port == PORT_A)
1342 intel_encoder->set_signal_levels = snb_cpu_edp_set_signal_levels;
1343 else
1344 intel_encoder->set_signal_levels = g4x_set_signal_levels;
1345
1346 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv) ||
1347 (HAS_PCH_SPLIT(dev_priv) && port != PORT_A)) {
1348 dig_port->dp.preemph_max = intel_dp_preemph_max_3;
1349 dig_port->dp.voltage_max = intel_dp_voltage_max_3;
1350 } else {
1351 dig_port->dp.preemph_max = intel_dp_preemph_max_2;
1352 dig_port->dp.voltage_max = intel_dp_voltage_max_2;
1353 }
1354
1355 dig_port->dp.output_reg = output_reg;
1356 dig_port->max_lanes = 4;
1357
1358 intel_encoder->type = INTEL_OUTPUT_DP;
1359 intel_encoder->power_domain = intel_display_power_ddi_lanes_domain(dev_priv, port);
1360 if (IS_CHERRYVIEW(dev_priv)) {
1361 if (port == PORT_D)
1362 intel_encoder->pipe_mask = BIT(PIPE_C);
1363 else
1364 intel_encoder->pipe_mask = BIT(PIPE_A) | BIT(PIPE_B);
1365 } else {
1366 intel_encoder->pipe_mask = ~0;
1367 }
1368 intel_encoder->cloneable = 0;
1369 intel_encoder->port = port;
1370 intel_encoder->hpd_pin = intel_hpd_pin_default(dev_priv, port);
1371
1372 dig_port->hpd_pulse = intel_dp_hpd_pulse;
1373
1374 if (HAS_GMCH(dev_priv)) {
1375 dig_port->connected = g4x_digital_port_connected;
1376 } else {
1377 if (port == PORT_A)
1378 dig_port->connected = ilk_digital_port_connected;
1379 else
1380 dig_port->connected = ibx_digital_port_connected;
1381 }
1382
1383 if (port != PORT_A)
1384 intel_infoframe_init(dig_port);
1385
1386 dig_port->aux_ch = intel_dp_aux_ch(intel_encoder);
1387 if (dig_port->aux_ch == AUX_CH_NONE)
1388 goto err_init_connector;
1389
1390 if (!intel_dp_init_connector(dig_port, intel_connector))
1391 goto err_init_connector;
1392
1393 return true;
1394
1395 err_init_connector:
1396 drm_encoder_cleanup(encoder);
1397 err_encoder_init:
1398 kfree(intel_connector);
1399 err_connector_alloc:
1400 kfree(dig_port);
1401 return false;
1402 }
1403