1 // SPDX-License-Identifier: MIT
2 /*
3 * Copyright © 2020 Intel Corporation
4 *
5 * DisplayPort support for G4x,ILK,SNB,IVB,VLV,CHV (HSW+ handled by the DDI code).
6 */
7
8 #include <linux/string_helpers.h>
9
10 #include "g4x_dp.h"
11 #include "intel_audio.h"
12 #include "intel_backlight.h"
13 #include "intel_connector.h"
14 #include "intel_crtc.h"
15 #include "intel_de.h"
16 #include "intel_display_power.h"
17 #include "intel_display_types.h"
18 #include "intel_dp.h"
19 #include "intel_dp_link_training.h"
20 #include "intel_dpio_phy.h"
21 #include "intel_fifo_underrun.h"
22 #include "intel_hdmi.h"
23 #include "intel_hotplug.h"
24 #include "intel_pch_display.h"
25 #include "intel_pps.h"
26 #include "vlv_sideband.h"
27
28 static const struct dpll g4x_dpll[] = {
29 { .dot = 162000, .p1 = 2, .p2 = 10, .n = 2, .m1 = 23, .m2 = 8, },
30 { .dot = 270000, .p1 = 1, .p2 = 10, .n = 1, .m1 = 14, .m2 = 2, },
31 };
32
33 static const struct dpll pch_dpll[] = {
34 { .dot = 162000, .p1 = 2, .p2 = 10, .n = 1, .m1 = 12, .m2 = 9, },
35 { .dot = 270000, .p1 = 1, .p2 = 10, .n = 2, .m1 = 14, .m2 = 8, },
36 };
37
38 static const struct dpll vlv_dpll[] = {
39 { .dot = 162000, .p1 = 3, .p2 = 2, .n = 5, .m1 = 3, .m2 = 81, },
40 { .dot = 270000, .p1 = 2, .p2 = 2, .n = 1, .m1 = 2, .m2 = 27, },
41 };
42
43 static const struct dpll chv_dpll[] = {
44 /* m2 is .22 binary fixed point */
45 { .dot = 162000, .p1 = 4, .p2 = 2, .n = 1, .m1 = 2, .m2 = 0x819999a /* 32.4 */ },
46 { .dot = 270000, .p1 = 4, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x6c00000 /* 27.0 */ },
47 };
48
vlv_get_dpll(struct drm_i915_private * i915)49 const struct dpll *vlv_get_dpll(struct drm_i915_private *i915)
50 {
51 return IS_CHERRYVIEW(i915) ? &chv_dpll[0] : &vlv_dpll[0];
52 }
53
g4x_dp_set_clock(struct intel_encoder * encoder,struct intel_crtc_state * pipe_config)54 void g4x_dp_set_clock(struct intel_encoder *encoder,
55 struct intel_crtc_state *pipe_config)
56 {
57 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
58 const struct dpll *divisor = NULL;
59 int i, count = 0;
60
61 if (IS_G4X(dev_priv)) {
62 divisor = g4x_dpll;
63 count = ARRAY_SIZE(g4x_dpll);
64 } else if (HAS_PCH_SPLIT(dev_priv)) {
65 divisor = pch_dpll;
66 count = ARRAY_SIZE(pch_dpll);
67 } else if (IS_CHERRYVIEW(dev_priv)) {
68 divisor = chv_dpll;
69 count = ARRAY_SIZE(chv_dpll);
70 } else if (IS_VALLEYVIEW(dev_priv)) {
71 divisor = vlv_dpll;
72 count = ARRAY_SIZE(vlv_dpll);
73 }
74
75 if (divisor && count) {
76 for (i = 0; i < count; i++) {
77 if (pipe_config->port_clock == divisor[i].dot) {
78 pipe_config->dpll = divisor[i];
79 pipe_config->clock_set = true;
80 break;
81 }
82 }
83 }
84 }
85
intel_dp_prepare(struct intel_encoder * encoder,const struct intel_crtc_state * pipe_config)86 static void intel_dp_prepare(struct intel_encoder *encoder,
87 const struct intel_crtc_state *pipe_config)
88 {
89 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
90 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
91 enum port port = encoder->port;
92 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
93 const struct drm_display_mode *adjusted_mode = &pipe_config->hw.adjusted_mode;
94
95 intel_dp_set_link_params(intel_dp,
96 pipe_config->port_clock,
97 pipe_config->lane_count);
98
99 /*
100 * There are four kinds of DP registers:
101 * IBX PCH
102 * SNB CPU
103 * IVB CPU
104 * CPT PCH
105 *
106 * IBX PCH and CPU are the same for almost everything,
107 * except that the CPU DP PLL is configured in this
108 * register
109 *
110 * CPT PCH is quite different, having many bits moved
111 * to the TRANS_DP_CTL register instead. That
112 * configuration happens (oddly) in ilk_pch_enable
113 */
114
115 /* Preserve the BIOS-computed detected bit. This is
116 * supposed to be read-only.
117 */
118 intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg) & DP_DETECTED;
119
120 /* Handle DP bits in common between all three register formats */
121 intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
122 intel_dp->DP |= DP_PORT_WIDTH(pipe_config->lane_count);
123
124 /* Split out the IBX/CPU vs CPT settings */
125
126 if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) {
127 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
128 intel_dp->DP |= DP_SYNC_HS_HIGH;
129 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
130 intel_dp->DP |= DP_SYNC_VS_HIGH;
131 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
132
133 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
134 intel_dp->DP |= DP_ENHANCED_FRAMING;
135
136 intel_dp->DP |= DP_PIPE_SEL_IVB(crtc->pipe);
137 } else if (HAS_PCH_CPT(dev_priv) && port != PORT_A) {
138 u32 trans_dp;
139
140 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
141
142 trans_dp = intel_de_read(dev_priv, TRANS_DP_CTL(crtc->pipe));
143 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
144 trans_dp |= TRANS_DP_ENH_FRAMING;
145 else
146 trans_dp &= ~TRANS_DP_ENH_FRAMING;
147 intel_de_write(dev_priv, TRANS_DP_CTL(crtc->pipe), trans_dp);
148 } else {
149 if (IS_G4X(dev_priv) && pipe_config->limited_color_range)
150 intel_dp->DP |= DP_COLOR_RANGE_16_235;
151
152 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
153 intel_dp->DP |= DP_SYNC_HS_HIGH;
154 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
155 intel_dp->DP |= DP_SYNC_VS_HIGH;
156 intel_dp->DP |= DP_LINK_TRAIN_OFF;
157
158 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
159 intel_dp->DP |= DP_ENHANCED_FRAMING;
160
161 if (IS_CHERRYVIEW(dev_priv))
162 intel_dp->DP |= DP_PIPE_SEL_CHV(crtc->pipe);
163 else
164 intel_dp->DP |= DP_PIPE_SEL(crtc->pipe);
165 }
166 }
167
assert_dp_port(struct intel_dp * intel_dp,bool state)168 static void assert_dp_port(struct intel_dp *intel_dp, bool state)
169 {
170 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
171 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
172 bool cur_state = intel_de_read(dev_priv, intel_dp->output_reg) & DP_PORT_EN;
173
174 I915_STATE_WARN(cur_state != state,
175 "[ENCODER:%d:%s] state assertion failure (expected %s, current %s)\n",
176 dig_port->base.base.base.id, dig_port->base.base.name,
177 str_on_off(state), str_on_off(cur_state));
178 }
179 #define assert_dp_port_disabled(d) assert_dp_port((d), false)
180
assert_edp_pll(struct drm_i915_private * dev_priv,bool state)181 static void assert_edp_pll(struct drm_i915_private *dev_priv, bool state)
182 {
183 bool cur_state = intel_de_read(dev_priv, DP_A) & DP_PLL_ENABLE;
184
185 I915_STATE_WARN(cur_state != state,
186 "eDP PLL state assertion failure (expected %s, current %s)\n",
187 str_on_off(state), str_on_off(cur_state));
188 }
189 #define assert_edp_pll_enabled(d) assert_edp_pll((d), true)
190 #define assert_edp_pll_disabled(d) assert_edp_pll((d), false)
191
ilk_edp_pll_on(struct intel_dp * intel_dp,const struct intel_crtc_state * pipe_config)192 static void ilk_edp_pll_on(struct intel_dp *intel_dp,
193 const struct intel_crtc_state *pipe_config)
194 {
195 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
196 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
197
198 assert_transcoder_disabled(dev_priv, pipe_config->cpu_transcoder);
199 assert_dp_port_disabled(intel_dp);
200 assert_edp_pll_disabled(dev_priv);
201
202 drm_dbg_kms(&dev_priv->drm, "enabling eDP PLL for clock %d\n",
203 pipe_config->port_clock);
204
205 intel_dp->DP &= ~DP_PLL_FREQ_MASK;
206
207 if (pipe_config->port_clock == 162000)
208 intel_dp->DP |= DP_PLL_FREQ_162MHZ;
209 else
210 intel_dp->DP |= DP_PLL_FREQ_270MHZ;
211
212 intel_de_write(dev_priv, DP_A, intel_dp->DP);
213 intel_de_posting_read(dev_priv, DP_A);
214 udelay(500);
215
216 /*
217 * [DevILK] Work around required when enabling DP PLL
218 * while a pipe is enabled going to FDI:
219 * 1. Wait for the start of vertical blank on the enabled pipe going to FDI
220 * 2. Program DP PLL enable
221 */
222 if (IS_IRONLAKE(dev_priv))
223 intel_wait_for_vblank_if_active(dev_priv, !crtc->pipe);
224
225 intel_dp->DP |= DP_PLL_ENABLE;
226
227 intel_de_write(dev_priv, DP_A, intel_dp->DP);
228 intel_de_posting_read(dev_priv, DP_A);
229 udelay(200);
230 }
231
ilk_edp_pll_off(struct intel_dp * intel_dp,const struct intel_crtc_state * old_crtc_state)232 static void ilk_edp_pll_off(struct intel_dp *intel_dp,
233 const struct intel_crtc_state *old_crtc_state)
234 {
235 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc);
236 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
237
238 assert_transcoder_disabled(dev_priv, old_crtc_state->cpu_transcoder);
239 assert_dp_port_disabled(intel_dp);
240 assert_edp_pll_enabled(dev_priv);
241
242 drm_dbg_kms(&dev_priv->drm, "disabling eDP PLL\n");
243
244 intel_dp->DP &= ~DP_PLL_ENABLE;
245
246 intel_de_write(dev_priv, DP_A, intel_dp->DP);
247 intel_de_posting_read(dev_priv, DP_A);
248 udelay(200);
249 }
250
cpt_dp_port_selected(struct drm_i915_private * dev_priv,enum port port,enum pipe * pipe)251 static bool cpt_dp_port_selected(struct drm_i915_private *dev_priv,
252 enum port port, enum pipe *pipe)
253 {
254 enum pipe p;
255
256 for_each_pipe(dev_priv, p) {
257 u32 val = intel_de_read(dev_priv, TRANS_DP_CTL(p));
258
259 if ((val & TRANS_DP_PORT_SEL_MASK) == TRANS_DP_PORT_SEL(port)) {
260 *pipe = p;
261 return true;
262 }
263 }
264
265 drm_dbg_kms(&dev_priv->drm, "No pipe for DP port %c found\n",
266 port_name(port));
267
268 /* must initialize pipe to something for the asserts */
269 *pipe = PIPE_A;
270
271 return false;
272 }
273
g4x_dp_port_enabled(struct drm_i915_private * dev_priv,i915_reg_t dp_reg,enum port port,enum pipe * pipe)274 bool g4x_dp_port_enabled(struct drm_i915_private *dev_priv,
275 i915_reg_t dp_reg, enum port port,
276 enum pipe *pipe)
277 {
278 bool ret;
279 u32 val;
280
281 val = intel_de_read(dev_priv, dp_reg);
282
283 ret = val & DP_PORT_EN;
284
285 /* asserts want to know the pipe even if the port is disabled */
286 if (IS_IVYBRIDGE(dev_priv) && port == PORT_A)
287 *pipe = (val & DP_PIPE_SEL_MASK_IVB) >> DP_PIPE_SEL_SHIFT_IVB;
288 else if (HAS_PCH_CPT(dev_priv) && port != PORT_A)
289 ret &= cpt_dp_port_selected(dev_priv, port, pipe);
290 else if (IS_CHERRYVIEW(dev_priv))
291 *pipe = (val & DP_PIPE_SEL_MASK_CHV) >> DP_PIPE_SEL_SHIFT_CHV;
292 else
293 *pipe = (val & DP_PIPE_SEL_MASK) >> DP_PIPE_SEL_SHIFT;
294
295 return ret;
296 }
297
intel_dp_get_hw_state(struct intel_encoder * encoder,enum pipe * pipe)298 static bool intel_dp_get_hw_state(struct intel_encoder *encoder,
299 enum pipe *pipe)
300 {
301 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
302 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
303 intel_wakeref_t wakeref;
304 bool ret;
305
306 wakeref = intel_display_power_get_if_enabled(dev_priv,
307 encoder->power_domain);
308 if (!wakeref)
309 return false;
310
311 ret = g4x_dp_port_enabled(dev_priv, intel_dp->output_reg,
312 encoder->port, pipe);
313
314 intel_display_power_put(dev_priv, encoder->power_domain, wakeref);
315
316 return ret;
317 }
318
g4x_dp_get_m_n(struct intel_crtc_state * crtc_state)319 static void g4x_dp_get_m_n(struct intel_crtc_state *crtc_state)
320 {
321 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
322
323 if (crtc_state->has_pch_encoder) {
324 intel_pch_transcoder_get_m1_n1(crtc, &crtc_state->dp_m_n);
325 intel_pch_transcoder_get_m2_n2(crtc, &crtc_state->dp_m2_n2);
326 } else {
327 intel_cpu_transcoder_get_m1_n1(crtc, crtc_state->cpu_transcoder,
328 &crtc_state->dp_m_n);
329 intel_cpu_transcoder_get_m2_n2(crtc, crtc_state->cpu_transcoder,
330 &crtc_state->dp_m2_n2);
331 }
332 }
333
intel_dp_get_config(struct intel_encoder * encoder,struct intel_crtc_state * pipe_config)334 static void intel_dp_get_config(struct intel_encoder *encoder,
335 struct intel_crtc_state *pipe_config)
336 {
337 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
338 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
339 u32 tmp, flags = 0;
340 enum port port = encoder->port;
341 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
342
343 if (encoder->type == INTEL_OUTPUT_EDP)
344 pipe_config->output_types |= BIT(INTEL_OUTPUT_EDP);
345 else
346 pipe_config->output_types |= BIT(INTEL_OUTPUT_DP);
347
348 tmp = intel_de_read(dev_priv, intel_dp->output_reg);
349
350 pipe_config->has_audio = tmp & DP_AUDIO_OUTPUT_ENABLE && port != PORT_A;
351
352 if (HAS_PCH_CPT(dev_priv) && port != PORT_A) {
353 u32 trans_dp = intel_de_read(dev_priv,
354 TRANS_DP_CTL(crtc->pipe));
355
356 if (trans_dp & TRANS_DP_HSYNC_ACTIVE_HIGH)
357 flags |= DRM_MODE_FLAG_PHSYNC;
358 else
359 flags |= DRM_MODE_FLAG_NHSYNC;
360
361 if (trans_dp & TRANS_DP_VSYNC_ACTIVE_HIGH)
362 flags |= DRM_MODE_FLAG_PVSYNC;
363 else
364 flags |= DRM_MODE_FLAG_NVSYNC;
365 } else {
366 if (tmp & DP_SYNC_HS_HIGH)
367 flags |= DRM_MODE_FLAG_PHSYNC;
368 else
369 flags |= DRM_MODE_FLAG_NHSYNC;
370
371 if (tmp & DP_SYNC_VS_HIGH)
372 flags |= DRM_MODE_FLAG_PVSYNC;
373 else
374 flags |= DRM_MODE_FLAG_NVSYNC;
375 }
376
377 pipe_config->hw.adjusted_mode.flags |= flags;
378
379 if (IS_G4X(dev_priv) && tmp & DP_COLOR_RANGE_16_235)
380 pipe_config->limited_color_range = true;
381
382 pipe_config->lane_count =
383 ((tmp & DP_PORT_WIDTH_MASK) >> DP_PORT_WIDTH_SHIFT) + 1;
384
385 g4x_dp_get_m_n(pipe_config);
386
387 if (port == PORT_A) {
388 if ((intel_de_read(dev_priv, DP_A) & DP_PLL_FREQ_MASK) == DP_PLL_FREQ_162MHZ)
389 pipe_config->port_clock = 162000;
390 else
391 pipe_config->port_clock = 270000;
392 }
393
394 pipe_config->hw.adjusted_mode.crtc_clock =
395 intel_dotclock_calculate(pipe_config->port_clock,
396 &pipe_config->dp_m_n);
397
398 if (intel_dp_is_edp(intel_dp))
399 intel_edp_fixup_vbt_bpp(encoder, pipe_config->pipe_bpp);
400 }
401
402 static void
intel_dp_link_down(struct intel_encoder * encoder,const struct intel_crtc_state * old_crtc_state)403 intel_dp_link_down(struct intel_encoder *encoder,
404 const struct intel_crtc_state *old_crtc_state)
405 {
406 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
407 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
408 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc);
409 enum port port = encoder->port;
410
411 if (drm_WARN_ON(&dev_priv->drm,
412 (intel_de_read(dev_priv, intel_dp->output_reg) &
413 DP_PORT_EN) == 0))
414 return;
415
416 drm_dbg_kms(&dev_priv->drm, "\n");
417
418 if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) ||
419 (HAS_PCH_CPT(dev_priv) && port != PORT_A)) {
420 intel_dp->DP &= ~DP_LINK_TRAIN_MASK_CPT;
421 intel_dp->DP |= DP_LINK_TRAIN_PAT_IDLE_CPT;
422 } else {
423 intel_dp->DP &= ~DP_LINK_TRAIN_MASK;
424 intel_dp->DP |= DP_LINK_TRAIN_PAT_IDLE;
425 }
426 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
427 intel_de_posting_read(dev_priv, intel_dp->output_reg);
428
429 intel_dp->DP &= ~(DP_PORT_EN | DP_AUDIO_OUTPUT_ENABLE);
430 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
431 intel_de_posting_read(dev_priv, intel_dp->output_reg);
432
433 /*
434 * HW workaround for IBX, we need to move the port
435 * to transcoder A after disabling it to allow the
436 * matching HDMI port to be enabled on transcoder A.
437 */
438 if (HAS_PCH_IBX(dev_priv) && crtc->pipe == PIPE_B && port != PORT_A) {
439 /*
440 * We get CPU/PCH FIFO underruns on the other pipe when
441 * doing the workaround. Sweep them under the rug.
442 */
443 intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, false);
444 intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, false);
445
446 /* always enable with pattern 1 (as per spec) */
447 intel_dp->DP &= ~(DP_PIPE_SEL_MASK | DP_LINK_TRAIN_MASK);
448 intel_dp->DP |= DP_PORT_EN | DP_PIPE_SEL(PIPE_A) |
449 DP_LINK_TRAIN_PAT_1;
450 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
451 intel_de_posting_read(dev_priv, intel_dp->output_reg);
452
453 intel_dp->DP &= ~DP_PORT_EN;
454 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
455 intel_de_posting_read(dev_priv, intel_dp->output_reg);
456
457 intel_wait_for_vblank_if_active(dev_priv, PIPE_A);
458 intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, true);
459 intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, true);
460 }
461
462 msleep(intel_dp->pps.panel_power_down_delay);
463
464 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
465 intel_wakeref_t wakeref;
466
467 with_intel_pps_lock(intel_dp, wakeref)
468 intel_dp->pps.active_pipe = INVALID_PIPE;
469 }
470 }
471
intel_disable_dp(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * old_crtc_state,const struct drm_connector_state * old_conn_state)472 static void intel_disable_dp(struct intel_atomic_state *state,
473 struct intel_encoder *encoder,
474 const struct intel_crtc_state *old_crtc_state,
475 const struct drm_connector_state *old_conn_state)
476 {
477 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
478
479 intel_dp->link_trained = false;
480
481 intel_audio_codec_disable(encoder, old_crtc_state, old_conn_state);
482
483 /*
484 * Make sure the panel is off before trying to change the mode.
485 * But also ensure that we have vdd while we switch off the panel.
486 */
487 intel_pps_vdd_on(intel_dp);
488 intel_edp_backlight_off(old_conn_state);
489 intel_dp_set_power(intel_dp, DP_SET_POWER_D3);
490 intel_pps_off(intel_dp);
491 }
492
g4x_disable_dp(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * old_crtc_state,const struct drm_connector_state * old_conn_state)493 static void g4x_disable_dp(struct intel_atomic_state *state,
494 struct intel_encoder *encoder,
495 const struct intel_crtc_state *old_crtc_state,
496 const struct drm_connector_state *old_conn_state)
497 {
498 intel_disable_dp(state, encoder, old_crtc_state, old_conn_state);
499 }
500
vlv_disable_dp(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * old_crtc_state,const struct drm_connector_state * old_conn_state)501 static void vlv_disable_dp(struct intel_atomic_state *state,
502 struct intel_encoder *encoder,
503 const struct intel_crtc_state *old_crtc_state,
504 const struct drm_connector_state *old_conn_state)
505 {
506 intel_disable_dp(state, encoder, old_crtc_state, old_conn_state);
507 }
508
g4x_post_disable_dp(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * old_crtc_state,const struct drm_connector_state * old_conn_state)509 static void g4x_post_disable_dp(struct intel_atomic_state *state,
510 struct intel_encoder *encoder,
511 const struct intel_crtc_state *old_crtc_state,
512 const struct drm_connector_state *old_conn_state)
513 {
514 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
515 enum port port = encoder->port;
516
517 /*
518 * Bspec does not list a specific disable sequence for g4x DP.
519 * Follow the ilk+ sequence (disable pipe before the port) for
520 * g4x DP as it does not suffer from underruns like the normal
521 * g4x modeset sequence (disable pipe after the port).
522 */
523 intel_dp_link_down(encoder, old_crtc_state);
524
525 /* Only ilk+ has port A */
526 if (port == PORT_A)
527 ilk_edp_pll_off(intel_dp, old_crtc_state);
528 }
529
vlv_post_disable_dp(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * old_crtc_state,const struct drm_connector_state * old_conn_state)530 static void vlv_post_disable_dp(struct intel_atomic_state *state,
531 struct intel_encoder *encoder,
532 const struct intel_crtc_state *old_crtc_state,
533 const struct drm_connector_state *old_conn_state)
534 {
535 intel_dp_link_down(encoder, old_crtc_state);
536 }
537
chv_post_disable_dp(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * old_crtc_state,const struct drm_connector_state * old_conn_state)538 static void chv_post_disable_dp(struct intel_atomic_state *state,
539 struct intel_encoder *encoder,
540 const struct intel_crtc_state *old_crtc_state,
541 const struct drm_connector_state *old_conn_state)
542 {
543 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
544
545 intel_dp_link_down(encoder, old_crtc_state);
546
547 vlv_dpio_get(dev_priv);
548
549 /* Assert data lane reset */
550 chv_data_lane_soft_reset(encoder, old_crtc_state, true);
551
552 vlv_dpio_put(dev_priv);
553 }
554
555 static void
cpt_set_link_train(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state,u8 dp_train_pat)556 cpt_set_link_train(struct intel_dp *intel_dp,
557 const struct intel_crtc_state *crtc_state,
558 u8 dp_train_pat)
559 {
560 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
561
562 intel_dp->DP &= ~DP_LINK_TRAIN_MASK_CPT;
563
564 switch (intel_dp_training_pattern_symbol(dp_train_pat)) {
565 case DP_TRAINING_PATTERN_DISABLE:
566 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
567 break;
568 case DP_TRAINING_PATTERN_1:
569 intel_dp->DP |= DP_LINK_TRAIN_PAT_1_CPT;
570 break;
571 case DP_TRAINING_PATTERN_2:
572 intel_dp->DP |= DP_LINK_TRAIN_PAT_2_CPT;
573 break;
574 default:
575 MISSING_CASE(intel_dp_training_pattern_symbol(dp_train_pat));
576 return;
577 }
578
579 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
580 intel_de_posting_read(dev_priv, intel_dp->output_reg);
581 }
582
583 static void
g4x_set_link_train(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state,u8 dp_train_pat)584 g4x_set_link_train(struct intel_dp *intel_dp,
585 const struct intel_crtc_state *crtc_state,
586 u8 dp_train_pat)
587 {
588 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
589
590 intel_dp->DP &= ~DP_LINK_TRAIN_MASK;
591
592 switch (intel_dp_training_pattern_symbol(dp_train_pat)) {
593 case DP_TRAINING_PATTERN_DISABLE:
594 intel_dp->DP |= DP_LINK_TRAIN_OFF;
595 break;
596 case DP_TRAINING_PATTERN_1:
597 intel_dp->DP |= DP_LINK_TRAIN_PAT_1;
598 break;
599 case DP_TRAINING_PATTERN_2:
600 intel_dp->DP |= DP_LINK_TRAIN_PAT_2;
601 break;
602 default:
603 MISSING_CASE(intel_dp_training_pattern_symbol(dp_train_pat));
604 return;
605 }
606
607 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
608 intel_de_posting_read(dev_priv, intel_dp->output_reg);
609 }
610
intel_dp_enable_port(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)611 static void intel_dp_enable_port(struct intel_dp *intel_dp,
612 const struct intel_crtc_state *crtc_state)
613 {
614 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
615
616 /* enable with pattern 1 (as per spec) */
617
618 intel_dp_program_link_training_pattern(intel_dp, crtc_state,
619 DP_PHY_DPRX, DP_TRAINING_PATTERN_1);
620
621 /*
622 * Magic for VLV/CHV. We _must_ first set up the register
623 * without actually enabling the port, and then do another
624 * write to enable the port. Otherwise link training will
625 * fail when the power sequencer is freshly used for this port.
626 */
627 intel_dp->DP |= DP_PORT_EN;
628 if (crtc_state->has_audio)
629 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
630
631 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
632 intel_de_posting_read(dev_priv, intel_dp->output_reg);
633 }
634
intel_enable_dp(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * pipe_config,const struct drm_connector_state * conn_state)635 static void intel_enable_dp(struct intel_atomic_state *state,
636 struct intel_encoder *encoder,
637 const struct intel_crtc_state *pipe_config,
638 const struct drm_connector_state *conn_state)
639 {
640 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
641 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
642 u32 dp_reg = intel_de_read(dev_priv, intel_dp->output_reg);
643 intel_wakeref_t wakeref;
644
645 if (drm_WARN_ON(&dev_priv->drm, dp_reg & DP_PORT_EN))
646 return;
647
648 with_intel_pps_lock(intel_dp, wakeref) {
649 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
650 vlv_pps_init(encoder, pipe_config);
651
652 intel_dp_enable_port(intel_dp, pipe_config);
653
654 intel_pps_vdd_on_unlocked(intel_dp);
655 intel_pps_on_unlocked(intel_dp);
656 intel_pps_vdd_off_unlocked(intel_dp, true);
657 }
658
659 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
660 unsigned int lane_mask = 0x0;
661
662 if (IS_CHERRYVIEW(dev_priv))
663 lane_mask = intel_dp_unused_lane_mask(pipe_config->lane_count);
664
665 vlv_wait_port_ready(dev_priv, dp_to_dig_port(intel_dp),
666 lane_mask);
667 }
668
669 intel_dp_set_power(intel_dp, DP_SET_POWER_D0);
670 intel_dp_configure_protocol_converter(intel_dp, pipe_config);
671 intel_dp_check_frl_training(intel_dp);
672 intel_dp_pcon_dsc_configure(intel_dp, pipe_config);
673 intel_dp_start_link_train(intel_dp, pipe_config);
674 intel_dp_stop_link_train(intel_dp, pipe_config);
675
676 intel_audio_codec_enable(encoder, pipe_config, conn_state);
677 }
678
g4x_enable_dp(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * pipe_config,const struct drm_connector_state * conn_state)679 static void g4x_enable_dp(struct intel_atomic_state *state,
680 struct intel_encoder *encoder,
681 const struct intel_crtc_state *pipe_config,
682 const struct drm_connector_state *conn_state)
683 {
684 intel_enable_dp(state, encoder, pipe_config, conn_state);
685 intel_edp_backlight_on(pipe_config, conn_state);
686 }
687
vlv_enable_dp(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * pipe_config,const struct drm_connector_state * conn_state)688 static void vlv_enable_dp(struct intel_atomic_state *state,
689 struct intel_encoder *encoder,
690 const struct intel_crtc_state *pipe_config,
691 const struct drm_connector_state *conn_state)
692 {
693 intel_edp_backlight_on(pipe_config, conn_state);
694 }
695
g4x_pre_enable_dp(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * pipe_config,const struct drm_connector_state * conn_state)696 static void g4x_pre_enable_dp(struct intel_atomic_state *state,
697 struct intel_encoder *encoder,
698 const struct intel_crtc_state *pipe_config,
699 const struct drm_connector_state *conn_state)
700 {
701 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
702 enum port port = encoder->port;
703
704 intel_dp_prepare(encoder, pipe_config);
705
706 /* Only ilk+ has port A */
707 if (port == PORT_A)
708 ilk_edp_pll_on(intel_dp, pipe_config);
709 }
710
vlv_pre_enable_dp(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * pipe_config,const struct drm_connector_state * conn_state)711 static void vlv_pre_enable_dp(struct intel_atomic_state *state,
712 struct intel_encoder *encoder,
713 const struct intel_crtc_state *pipe_config,
714 const struct drm_connector_state *conn_state)
715 {
716 vlv_phy_pre_encoder_enable(encoder, pipe_config);
717
718 intel_enable_dp(state, encoder, pipe_config, conn_state);
719 }
720
vlv_dp_pre_pll_enable(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * pipe_config,const struct drm_connector_state * conn_state)721 static void vlv_dp_pre_pll_enable(struct intel_atomic_state *state,
722 struct intel_encoder *encoder,
723 const struct intel_crtc_state *pipe_config,
724 const struct drm_connector_state *conn_state)
725 {
726 intel_dp_prepare(encoder, pipe_config);
727
728 vlv_phy_pre_pll_enable(encoder, pipe_config);
729 }
730
chv_pre_enable_dp(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * pipe_config,const struct drm_connector_state * conn_state)731 static void chv_pre_enable_dp(struct intel_atomic_state *state,
732 struct intel_encoder *encoder,
733 const struct intel_crtc_state *pipe_config,
734 const struct drm_connector_state *conn_state)
735 {
736 chv_phy_pre_encoder_enable(encoder, pipe_config);
737
738 intel_enable_dp(state, encoder, pipe_config, conn_state);
739
740 /* Second common lane will stay alive on its own now */
741 chv_phy_release_cl2_override(encoder);
742 }
743
chv_dp_pre_pll_enable(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * pipe_config,const struct drm_connector_state * conn_state)744 static void chv_dp_pre_pll_enable(struct intel_atomic_state *state,
745 struct intel_encoder *encoder,
746 const struct intel_crtc_state *pipe_config,
747 const struct drm_connector_state *conn_state)
748 {
749 intel_dp_prepare(encoder, pipe_config);
750
751 chv_phy_pre_pll_enable(encoder, pipe_config);
752 }
753
chv_dp_post_pll_disable(struct intel_atomic_state * state,struct intel_encoder * encoder,const struct intel_crtc_state * old_crtc_state,const struct drm_connector_state * old_conn_state)754 static void chv_dp_post_pll_disable(struct intel_atomic_state *state,
755 struct intel_encoder *encoder,
756 const struct intel_crtc_state *old_crtc_state,
757 const struct drm_connector_state *old_conn_state)
758 {
759 chv_phy_post_pll_disable(encoder, old_crtc_state);
760 }
761
intel_dp_voltage_max_2(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)762 static u8 intel_dp_voltage_max_2(struct intel_dp *intel_dp,
763 const struct intel_crtc_state *crtc_state)
764 {
765 return DP_TRAIN_VOLTAGE_SWING_LEVEL_2;
766 }
767
intel_dp_voltage_max_3(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)768 static u8 intel_dp_voltage_max_3(struct intel_dp *intel_dp,
769 const struct intel_crtc_state *crtc_state)
770 {
771 return DP_TRAIN_VOLTAGE_SWING_LEVEL_3;
772 }
773
intel_dp_preemph_max_2(struct intel_dp * intel_dp)774 static u8 intel_dp_preemph_max_2(struct intel_dp *intel_dp)
775 {
776 return DP_TRAIN_PRE_EMPH_LEVEL_2;
777 }
778
intel_dp_preemph_max_3(struct intel_dp * intel_dp)779 static u8 intel_dp_preemph_max_3(struct intel_dp *intel_dp)
780 {
781 return DP_TRAIN_PRE_EMPH_LEVEL_3;
782 }
783
vlv_set_signal_levels(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state)784 static void vlv_set_signal_levels(struct intel_encoder *encoder,
785 const struct intel_crtc_state *crtc_state)
786 {
787 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
788 unsigned long demph_reg_value, preemph_reg_value,
789 uniqtranscale_reg_value;
790 u8 train_set = intel_dp->train_set[0];
791
792 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
793 case DP_TRAIN_PRE_EMPH_LEVEL_0:
794 preemph_reg_value = 0x0004000;
795 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
796 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
797 demph_reg_value = 0x2B405555;
798 uniqtranscale_reg_value = 0x552AB83A;
799 break;
800 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
801 demph_reg_value = 0x2B404040;
802 uniqtranscale_reg_value = 0x5548B83A;
803 break;
804 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
805 demph_reg_value = 0x2B245555;
806 uniqtranscale_reg_value = 0x5560B83A;
807 break;
808 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
809 demph_reg_value = 0x2B405555;
810 uniqtranscale_reg_value = 0x5598DA3A;
811 break;
812 default:
813 return;
814 }
815 break;
816 case DP_TRAIN_PRE_EMPH_LEVEL_1:
817 preemph_reg_value = 0x0002000;
818 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
819 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
820 demph_reg_value = 0x2B404040;
821 uniqtranscale_reg_value = 0x5552B83A;
822 break;
823 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
824 demph_reg_value = 0x2B404848;
825 uniqtranscale_reg_value = 0x5580B83A;
826 break;
827 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
828 demph_reg_value = 0x2B404040;
829 uniqtranscale_reg_value = 0x55ADDA3A;
830 break;
831 default:
832 return;
833 }
834 break;
835 case DP_TRAIN_PRE_EMPH_LEVEL_2:
836 preemph_reg_value = 0x0000000;
837 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
838 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
839 demph_reg_value = 0x2B305555;
840 uniqtranscale_reg_value = 0x5570B83A;
841 break;
842 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
843 demph_reg_value = 0x2B2B4040;
844 uniqtranscale_reg_value = 0x55ADDA3A;
845 break;
846 default:
847 return;
848 }
849 break;
850 case DP_TRAIN_PRE_EMPH_LEVEL_3:
851 preemph_reg_value = 0x0006000;
852 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
853 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
854 demph_reg_value = 0x1B405555;
855 uniqtranscale_reg_value = 0x55ADDA3A;
856 break;
857 default:
858 return;
859 }
860 break;
861 default:
862 return;
863 }
864
865 vlv_set_phy_signal_level(encoder, crtc_state,
866 demph_reg_value, preemph_reg_value,
867 uniqtranscale_reg_value, 0);
868 }
869
chv_set_signal_levels(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state)870 static void chv_set_signal_levels(struct intel_encoder *encoder,
871 const struct intel_crtc_state *crtc_state)
872 {
873 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
874 u32 deemph_reg_value, margin_reg_value;
875 bool uniq_trans_scale = false;
876 u8 train_set = intel_dp->train_set[0];
877
878 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
879 case DP_TRAIN_PRE_EMPH_LEVEL_0:
880 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
881 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
882 deemph_reg_value = 128;
883 margin_reg_value = 52;
884 break;
885 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
886 deemph_reg_value = 128;
887 margin_reg_value = 77;
888 break;
889 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
890 deemph_reg_value = 128;
891 margin_reg_value = 102;
892 break;
893 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
894 deemph_reg_value = 128;
895 margin_reg_value = 154;
896 uniq_trans_scale = true;
897 break;
898 default:
899 return;
900 }
901 break;
902 case DP_TRAIN_PRE_EMPH_LEVEL_1:
903 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
904 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
905 deemph_reg_value = 85;
906 margin_reg_value = 78;
907 break;
908 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
909 deemph_reg_value = 85;
910 margin_reg_value = 116;
911 break;
912 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
913 deemph_reg_value = 85;
914 margin_reg_value = 154;
915 break;
916 default:
917 return;
918 }
919 break;
920 case DP_TRAIN_PRE_EMPH_LEVEL_2:
921 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
922 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
923 deemph_reg_value = 64;
924 margin_reg_value = 104;
925 break;
926 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
927 deemph_reg_value = 64;
928 margin_reg_value = 154;
929 break;
930 default:
931 return;
932 }
933 break;
934 case DP_TRAIN_PRE_EMPH_LEVEL_3:
935 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
936 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
937 deemph_reg_value = 43;
938 margin_reg_value = 154;
939 break;
940 default:
941 return;
942 }
943 break;
944 default:
945 return;
946 }
947
948 chv_set_phy_signal_level(encoder, crtc_state,
949 deemph_reg_value, margin_reg_value,
950 uniq_trans_scale);
951 }
952
g4x_signal_levels(u8 train_set)953 static u32 g4x_signal_levels(u8 train_set)
954 {
955 u32 signal_levels = 0;
956
957 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
958 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
959 default:
960 signal_levels |= DP_VOLTAGE_0_4;
961 break;
962 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
963 signal_levels |= DP_VOLTAGE_0_6;
964 break;
965 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
966 signal_levels |= DP_VOLTAGE_0_8;
967 break;
968 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
969 signal_levels |= DP_VOLTAGE_1_2;
970 break;
971 }
972 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
973 case DP_TRAIN_PRE_EMPH_LEVEL_0:
974 default:
975 signal_levels |= DP_PRE_EMPHASIS_0;
976 break;
977 case DP_TRAIN_PRE_EMPH_LEVEL_1:
978 signal_levels |= DP_PRE_EMPHASIS_3_5;
979 break;
980 case DP_TRAIN_PRE_EMPH_LEVEL_2:
981 signal_levels |= DP_PRE_EMPHASIS_6;
982 break;
983 case DP_TRAIN_PRE_EMPH_LEVEL_3:
984 signal_levels |= DP_PRE_EMPHASIS_9_5;
985 break;
986 }
987 return signal_levels;
988 }
989
990 static void
g4x_set_signal_levels(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state)991 g4x_set_signal_levels(struct intel_encoder *encoder,
992 const struct intel_crtc_state *crtc_state)
993 {
994 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
995 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
996 u8 train_set = intel_dp->train_set[0];
997 u32 signal_levels;
998
999 signal_levels = g4x_signal_levels(train_set);
1000
1001 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n",
1002 signal_levels);
1003
1004 intel_dp->DP &= ~(DP_VOLTAGE_MASK | DP_PRE_EMPHASIS_MASK);
1005 intel_dp->DP |= signal_levels;
1006
1007 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
1008 intel_de_posting_read(dev_priv, intel_dp->output_reg);
1009 }
1010
1011 /* SNB CPU eDP voltage swing and pre-emphasis control */
snb_cpu_edp_signal_levels(u8 train_set)1012 static u32 snb_cpu_edp_signal_levels(u8 train_set)
1013 {
1014 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
1015 DP_TRAIN_PRE_EMPHASIS_MASK);
1016
1017 switch (signal_levels) {
1018 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1019 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1020 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B;
1021 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1022 return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B;
1023 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2:
1024 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2:
1025 return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B;
1026 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1027 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1028 return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B;
1029 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1030 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1031 return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B;
1032 default:
1033 MISSING_CASE(signal_levels);
1034 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B;
1035 }
1036 }
1037
1038 static void
snb_cpu_edp_set_signal_levels(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state)1039 snb_cpu_edp_set_signal_levels(struct intel_encoder *encoder,
1040 const struct intel_crtc_state *crtc_state)
1041 {
1042 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1043 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1044 u8 train_set = intel_dp->train_set[0];
1045 u32 signal_levels;
1046
1047 signal_levels = snb_cpu_edp_signal_levels(train_set);
1048
1049 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n",
1050 signal_levels);
1051
1052 intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB;
1053 intel_dp->DP |= signal_levels;
1054
1055 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
1056 intel_de_posting_read(dev_priv, intel_dp->output_reg);
1057 }
1058
1059 /* IVB CPU eDP voltage swing and pre-emphasis control */
ivb_cpu_edp_signal_levels(u8 train_set)1060 static u32 ivb_cpu_edp_signal_levels(u8 train_set)
1061 {
1062 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
1063 DP_TRAIN_PRE_EMPHASIS_MASK);
1064
1065 switch (signal_levels) {
1066 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1067 return EDP_LINK_TRAIN_400MV_0DB_IVB;
1068 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1069 return EDP_LINK_TRAIN_400MV_3_5DB_IVB;
1070 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2:
1071 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2:
1072 return EDP_LINK_TRAIN_400MV_6DB_IVB;
1073
1074 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1075 return EDP_LINK_TRAIN_600MV_0DB_IVB;
1076 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1077 return EDP_LINK_TRAIN_600MV_3_5DB_IVB;
1078
1079 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1080 return EDP_LINK_TRAIN_800MV_0DB_IVB;
1081 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1082 return EDP_LINK_TRAIN_800MV_3_5DB_IVB;
1083
1084 default:
1085 MISSING_CASE(signal_levels);
1086 return EDP_LINK_TRAIN_500MV_0DB_IVB;
1087 }
1088 }
1089
1090 static void
ivb_cpu_edp_set_signal_levels(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state)1091 ivb_cpu_edp_set_signal_levels(struct intel_encoder *encoder,
1092 const struct intel_crtc_state *crtc_state)
1093 {
1094 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1095 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1096 u8 train_set = intel_dp->train_set[0];
1097 u32 signal_levels;
1098
1099 signal_levels = ivb_cpu_edp_signal_levels(train_set);
1100
1101 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n",
1102 signal_levels);
1103
1104 intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB;
1105 intel_dp->DP |= signal_levels;
1106
1107 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
1108 intel_de_posting_read(dev_priv, intel_dp->output_reg);
1109 }
1110
1111 /*
1112 * If display is now connected check links status,
1113 * there has been known issues of link loss triggering
1114 * long pulse.
1115 *
1116 * Some sinks (eg. ASUS PB287Q) seem to perform some
1117 * weird HPD ping pong during modesets. So we can apparently
1118 * end up with HPD going low during a modeset, and then
1119 * going back up soon after. And once that happens we must
1120 * retrain the link to get a picture. That's in case no
1121 * userspace component reacted to intermittent HPD dip.
1122 */
1123 static enum intel_hotplug_state
intel_dp_hotplug(struct intel_encoder * encoder,struct intel_connector * connector)1124 intel_dp_hotplug(struct intel_encoder *encoder,
1125 struct intel_connector *connector)
1126 {
1127 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1128 struct drm_modeset_acquire_ctx ctx;
1129 enum intel_hotplug_state state;
1130 int ret;
1131
1132 if (intel_dp->compliance.test_active &&
1133 intel_dp->compliance.test_type == DP_TEST_LINK_PHY_TEST_PATTERN) {
1134 intel_dp_phy_test(encoder);
1135 /* just do the PHY test and nothing else */
1136 return INTEL_HOTPLUG_UNCHANGED;
1137 }
1138
1139 state = intel_encoder_hotplug(encoder, connector);
1140
1141 drm_modeset_acquire_init(&ctx, 0);
1142
1143 for (;;) {
1144 ret = intel_dp_retrain_link(encoder, &ctx);
1145
1146 if (ret == -EDEADLK) {
1147 drm_modeset_backoff(&ctx);
1148 continue;
1149 }
1150
1151 break;
1152 }
1153
1154 drm_modeset_drop_locks(&ctx);
1155 drm_modeset_acquire_fini(&ctx);
1156 drm_WARN(encoder->base.dev, ret,
1157 "Acquiring modeset locks failed with %i\n", ret);
1158
1159 /*
1160 * Keeping it consistent with intel_ddi_hotplug() and
1161 * intel_hdmi_hotplug().
1162 */
1163 if (state == INTEL_HOTPLUG_UNCHANGED && !connector->hotplug_retries)
1164 state = INTEL_HOTPLUG_RETRY;
1165
1166 return state;
1167 }
1168
ibx_digital_port_connected(struct intel_encoder * encoder)1169 static bool ibx_digital_port_connected(struct intel_encoder *encoder)
1170 {
1171 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1172 u32 bit = dev_priv->display.hotplug.pch_hpd[encoder->hpd_pin];
1173
1174 return intel_de_read(dev_priv, SDEISR) & bit;
1175 }
1176
g4x_digital_port_connected(struct intel_encoder * encoder)1177 static bool g4x_digital_port_connected(struct intel_encoder *encoder)
1178 {
1179 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1180 u32 bit;
1181
1182 switch (encoder->hpd_pin) {
1183 case HPD_PORT_B:
1184 bit = PORTB_HOTPLUG_LIVE_STATUS_G4X;
1185 break;
1186 case HPD_PORT_C:
1187 bit = PORTC_HOTPLUG_LIVE_STATUS_G4X;
1188 break;
1189 case HPD_PORT_D:
1190 bit = PORTD_HOTPLUG_LIVE_STATUS_G4X;
1191 break;
1192 default:
1193 MISSING_CASE(encoder->hpd_pin);
1194 return false;
1195 }
1196
1197 return intel_de_read(dev_priv, PORT_HOTPLUG_STAT) & bit;
1198 }
1199
gm45_digital_port_connected(struct intel_encoder * encoder)1200 static bool gm45_digital_port_connected(struct intel_encoder *encoder)
1201 {
1202 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1203 u32 bit;
1204
1205 switch (encoder->hpd_pin) {
1206 case HPD_PORT_B:
1207 bit = PORTB_HOTPLUG_LIVE_STATUS_GM45;
1208 break;
1209 case HPD_PORT_C:
1210 bit = PORTC_HOTPLUG_LIVE_STATUS_GM45;
1211 break;
1212 case HPD_PORT_D:
1213 bit = PORTD_HOTPLUG_LIVE_STATUS_GM45;
1214 break;
1215 default:
1216 MISSING_CASE(encoder->hpd_pin);
1217 return false;
1218 }
1219
1220 return intel_de_read(dev_priv, PORT_HOTPLUG_STAT) & bit;
1221 }
1222
ilk_digital_port_connected(struct intel_encoder * encoder)1223 static bool ilk_digital_port_connected(struct intel_encoder *encoder)
1224 {
1225 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1226 u32 bit = dev_priv->display.hotplug.hpd[encoder->hpd_pin];
1227
1228 return intel_de_read(dev_priv, DEISR) & bit;
1229 }
1230
intel_dp_encoder_destroy(struct drm_encoder * encoder)1231 static void intel_dp_encoder_destroy(struct drm_encoder *encoder)
1232 {
1233 intel_dp_encoder_flush_work(encoder);
1234
1235 drm_encoder_cleanup(encoder);
1236 kfree(enc_to_dig_port(to_intel_encoder(encoder)));
1237 }
1238
vlv_active_pipe(struct intel_dp * intel_dp)1239 enum pipe vlv_active_pipe(struct intel_dp *intel_dp)
1240 {
1241 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1242 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
1243 enum pipe pipe;
1244
1245 if (g4x_dp_port_enabled(dev_priv, intel_dp->output_reg,
1246 encoder->port, &pipe))
1247 return pipe;
1248
1249 return INVALID_PIPE;
1250 }
1251
intel_dp_encoder_reset(struct drm_encoder * encoder)1252 static void intel_dp_encoder_reset(struct drm_encoder *encoder)
1253 {
1254 struct drm_i915_private *dev_priv = to_i915(encoder->dev);
1255 struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(encoder));
1256
1257 intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg);
1258
1259 intel_dp->reset_link_params = true;
1260
1261 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
1262 intel_wakeref_t wakeref;
1263
1264 with_intel_pps_lock(intel_dp, wakeref)
1265 intel_dp->pps.active_pipe = vlv_active_pipe(intel_dp);
1266 }
1267
1268 intel_pps_encoder_reset(intel_dp);
1269 }
1270
1271 static const struct drm_encoder_funcs intel_dp_enc_funcs = {
1272 .reset = intel_dp_encoder_reset,
1273 .destroy = intel_dp_encoder_destroy,
1274 };
1275
g4x_dp_init(struct drm_i915_private * dev_priv,i915_reg_t output_reg,enum port port)1276 bool g4x_dp_init(struct drm_i915_private *dev_priv,
1277 i915_reg_t output_reg, enum port port)
1278 {
1279 struct intel_digital_port *dig_port;
1280 struct intel_encoder *intel_encoder;
1281 struct drm_encoder *encoder;
1282 struct intel_connector *intel_connector;
1283
1284 dig_port = kzalloc(sizeof(*dig_port), GFP_KERNEL);
1285 if (!dig_port)
1286 return false;
1287
1288 intel_connector = intel_connector_alloc();
1289 if (!intel_connector)
1290 goto err_connector_alloc;
1291
1292 intel_encoder = &dig_port->base;
1293 encoder = &intel_encoder->base;
1294
1295 mutex_init(&dig_port->hdcp_mutex);
1296
1297 if (drm_encoder_init(&dev_priv->drm, &intel_encoder->base,
1298 &intel_dp_enc_funcs, DRM_MODE_ENCODER_TMDS,
1299 "DP %c", port_name(port)))
1300 goto err_encoder_init;
1301
1302 intel_encoder->hotplug = intel_dp_hotplug;
1303 intel_encoder->compute_config = intel_dp_compute_config;
1304 intel_encoder->get_hw_state = intel_dp_get_hw_state;
1305 intel_encoder->get_config = intel_dp_get_config;
1306 intel_encoder->sync_state = intel_dp_sync_state;
1307 intel_encoder->initial_fastset_check = intel_dp_initial_fastset_check;
1308 intel_encoder->update_pipe = intel_backlight_update;
1309 intel_encoder->suspend = intel_dp_encoder_suspend;
1310 intel_encoder->shutdown = intel_dp_encoder_shutdown;
1311 if (IS_CHERRYVIEW(dev_priv)) {
1312 intel_encoder->pre_pll_enable = chv_dp_pre_pll_enable;
1313 intel_encoder->pre_enable = chv_pre_enable_dp;
1314 intel_encoder->enable = vlv_enable_dp;
1315 intel_encoder->disable = vlv_disable_dp;
1316 intel_encoder->post_disable = chv_post_disable_dp;
1317 intel_encoder->post_pll_disable = chv_dp_post_pll_disable;
1318 } else if (IS_VALLEYVIEW(dev_priv)) {
1319 intel_encoder->pre_pll_enable = vlv_dp_pre_pll_enable;
1320 intel_encoder->pre_enable = vlv_pre_enable_dp;
1321 intel_encoder->enable = vlv_enable_dp;
1322 intel_encoder->disable = vlv_disable_dp;
1323 intel_encoder->post_disable = vlv_post_disable_dp;
1324 } else {
1325 intel_encoder->pre_enable = g4x_pre_enable_dp;
1326 intel_encoder->enable = g4x_enable_dp;
1327 intel_encoder->disable = g4x_disable_dp;
1328 intel_encoder->post_disable = g4x_post_disable_dp;
1329 }
1330
1331 if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) ||
1332 (HAS_PCH_CPT(dev_priv) && port != PORT_A))
1333 dig_port->dp.set_link_train = cpt_set_link_train;
1334 else
1335 dig_port->dp.set_link_train = g4x_set_link_train;
1336
1337 if (IS_CHERRYVIEW(dev_priv))
1338 intel_encoder->set_signal_levels = chv_set_signal_levels;
1339 else if (IS_VALLEYVIEW(dev_priv))
1340 intel_encoder->set_signal_levels = vlv_set_signal_levels;
1341 else if (IS_IVYBRIDGE(dev_priv) && port == PORT_A)
1342 intel_encoder->set_signal_levels = ivb_cpu_edp_set_signal_levels;
1343 else if (IS_SANDYBRIDGE(dev_priv) && port == PORT_A)
1344 intel_encoder->set_signal_levels = snb_cpu_edp_set_signal_levels;
1345 else
1346 intel_encoder->set_signal_levels = g4x_set_signal_levels;
1347
1348 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv) ||
1349 (HAS_PCH_SPLIT(dev_priv) && port != PORT_A)) {
1350 dig_port->dp.preemph_max = intel_dp_preemph_max_3;
1351 dig_port->dp.voltage_max = intel_dp_voltage_max_3;
1352 } else {
1353 dig_port->dp.preemph_max = intel_dp_preemph_max_2;
1354 dig_port->dp.voltage_max = intel_dp_voltage_max_2;
1355 }
1356
1357 dig_port->dp.output_reg = output_reg;
1358 dig_port->max_lanes = 4;
1359
1360 intel_encoder->type = INTEL_OUTPUT_DP;
1361 intel_encoder->power_domain = intel_display_power_ddi_lanes_domain(dev_priv, port);
1362 if (IS_CHERRYVIEW(dev_priv)) {
1363 if (port == PORT_D)
1364 intel_encoder->pipe_mask = BIT(PIPE_C);
1365 else
1366 intel_encoder->pipe_mask = BIT(PIPE_A) | BIT(PIPE_B);
1367 } else {
1368 intel_encoder->pipe_mask = ~0;
1369 }
1370 intel_encoder->cloneable = 0;
1371 intel_encoder->port = port;
1372 intel_encoder->hpd_pin = intel_hpd_pin_default(dev_priv, port);
1373
1374 dig_port->hpd_pulse = intel_dp_hpd_pulse;
1375
1376 if (HAS_GMCH(dev_priv)) {
1377 if (IS_GM45(dev_priv))
1378 dig_port->connected = gm45_digital_port_connected;
1379 else
1380 dig_port->connected = g4x_digital_port_connected;
1381 } else {
1382 if (port == PORT_A)
1383 dig_port->connected = ilk_digital_port_connected;
1384 else
1385 dig_port->connected = ibx_digital_port_connected;
1386 }
1387
1388 if (port != PORT_A)
1389 intel_infoframe_init(dig_port);
1390
1391 dig_port->aux_ch = intel_bios_port_aux_ch(dev_priv, port);
1392 if (!intel_dp_init_connector(dig_port, intel_connector))
1393 goto err_init_connector;
1394
1395 return true;
1396
1397 err_init_connector:
1398 drm_encoder_cleanup(encoder);
1399 err_encoder_init:
1400 kfree(intel_connector);
1401 err_connector_alloc:
1402 kfree(dig_port);
1403 return false;
1404 }
1405