1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28#include <linux/i2c.h>
29#include <linux/slab.h>
30#include <linux/export.h>
31#include <linux/notifier.h>
32#include <linux/reboot.h>
33#include <drm/drmP.h>
34#include <drm/drm_atomic_helper.h>
35#include <drm/drm_crtc.h>
36#include <drm/drm_crtc_helper.h>
37#include <drm/drm_edid.h>
38#include "intel_drv.h"
39#include <drm/i915_drm.h>
40#include "i915_drv.h"
41
42#define DP_LINK_CHECK_TIMEOUT (10 * 1000)
43
44
45#define INTEL_DP_RESOLUTION_SHIFT_MASK 0
46#define INTEL_DP_RESOLUTION_PREFERRED (1 << INTEL_DP_RESOLUTION_SHIFT_MASK)
47#define INTEL_DP_RESOLUTION_STANDARD (2 << INTEL_DP_RESOLUTION_SHIFT_MASK)
48#define INTEL_DP_RESOLUTION_FAILSAFE (3 << INTEL_DP_RESOLUTION_SHIFT_MASK)
49
50struct dp_link_dpll {
51 int clock;
52 struct dpll dpll;
53};
54
55static const struct dp_link_dpll gen4_dpll[] = {
56 { 162000,
57 { .p1 = 2, .p2 = 10, .n = 2, .m1 = 23, .m2 = 8 } },
58 { 270000,
59 { .p1 = 1, .p2 = 10, .n = 1, .m1 = 14, .m2 = 2 } }
60};
61
62static const struct dp_link_dpll pch_dpll[] = {
63 { 162000,
64 { .p1 = 2, .p2 = 10, .n = 1, .m1 = 12, .m2 = 9 } },
65 { 270000,
66 { .p1 = 1, .p2 = 10, .n = 2, .m1 = 14, .m2 = 8 } }
67};
68
69static const struct dp_link_dpll vlv_dpll[] = {
70 { 162000,
71 { .p1 = 3, .p2 = 2, .n = 5, .m1 = 3, .m2 = 81 } },
72 { 270000,
73 { .p1 = 2, .p2 = 2, .n = 1, .m1 = 2, .m2 = 27 } }
74};
75
76
77
78
79
80static const struct dp_link_dpll chv_dpll[] = {
81
82
83
84
85
86 { 162000,
87 { .p1 = 4, .p2 = 2, .n = 1, .m1 = 2, .m2 = 0x819999a } },
88 { 270000,
89 { .p1 = 4, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x6c00000 } },
90 { 540000,
91 { .p1 = 2, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x6c00000 } }
92};
93
94static const int bxt_rates[] = { 162000, 216000, 243000, 270000,
95 324000, 432000, 540000 };
96static const int skl_rates[] = { 162000, 216000, 270000,
97 324000, 432000, 540000 };
98static const int default_rates[] = { 162000, 270000, 540000 };
99
100
101
102
103
104
105
106
107static bool is_edp(struct intel_dp *intel_dp)
108{
109 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
110
111 return intel_dig_port->base.type == INTEL_OUTPUT_EDP;
112}
113
114static struct drm_device *intel_dp_to_dev(struct intel_dp *intel_dp)
115{
116 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
117
118 return intel_dig_port->base.base.dev;
119}
120
121static struct intel_dp *intel_attached_dp(struct drm_connector *connector)
122{
123 return enc_to_intel_dp(&intel_attached_encoder(connector)->base);
124}
125
126static void intel_dp_link_down(struct intel_dp *intel_dp);
127static bool edp_panel_vdd_on(struct intel_dp *intel_dp);
128static void edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync);
129static void vlv_init_panel_power_sequencer(struct intel_dp *intel_dp);
130static void vlv_steal_power_sequencer(struct drm_device *dev,
131 enum pipe pipe);
132
133static unsigned int intel_dp_unused_lane_mask(int lane_count)
134{
135 return ~((1 << lane_count) - 1) & 0xf;
136}
137
138static int
139intel_dp_max_link_bw(struct intel_dp *intel_dp)
140{
141 int max_link_bw = intel_dp->dpcd[DP_MAX_LINK_RATE];
142
143 switch (max_link_bw) {
144 case DP_LINK_BW_1_62:
145 case DP_LINK_BW_2_7:
146 case DP_LINK_BW_5_4:
147 break;
148 default:
149 WARN(1, "invalid max DP link bw val %x, using 1.62Gbps\n",
150 max_link_bw);
151 max_link_bw = DP_LINK_BW_1_62;
152 break;
153 }
154 return max_link_bw;
155}
156
157static u8 intel_dp_max_lane_count(struct intel_dp *intel_dp)
158{
159 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
160 u8 source_max, sink_max;
161
162 source_max = intel_dig_port->max_lanes;
163 sink_max = drm_dp_max_lane_count(intel_dp->dpcd);
164
165 return min(source_max, sink_max);
166}
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185static int
186intel_dp_link_required(int pixel_clock, int bpp)
187{
188 return (pixel_clock * bpp + 9) / 10;
189}
190
191static int
192intel_dp_max_data_rate(int max_link_clock, int max_lanes)
193{
194 return (max_link_clock * max_lanes * 8) / 10;
195}
196
197static enum drm_mode_status
198intel_dp_mode_valid(struct drm_connector *connector,
199 struct drm_display_mode *mode)
200{
201 struct intel_dp *intel_dp = intel_attached_dp(connector);
202 struct intel_connector *intel_connector = to_intel_connector(connector);
203 struct drm_display_mode *fixed_mode = intel_connector->panel.fixed_mode;
204 int target_clock = mode->clock;
205 int max_rate, mode_rate, max_lanes, max_link_clock;
206 int max_dotclk = to_i915(connector->dev)->max_dotclk_freq;
207
208 if (is_edp(intel_dp) && fixed_mode) {
209 if (mode->hdisplay > fixed_mode->hdisplay)
210 return MODE_PANEL;
211
212 if (mode->vdisplay > fixed_mode->vdisplay)
213 return MODE_PANEL;
214
215 target_clock = fixed_mode->clock;
216 }
217
218 max_link_clock = intel_dp_max_link_rate(intel_dp);
219 max_lanes = intel_dp_max_lane_count(intel_dp);
220
221 max_rate = intel_dp_max_data_rate(max_link_clock, max_lanes);
222 mode_rate = intel_dp_link_required(target_clock, 18);
223
224 if (mode_rate > max_rate || target_clock > max_dotclk)
225 return MODE_CLOCK_HIGH;
226
227 if (mode->clock < 10000)
228 return MODE_CLOCK_LOW;
229
230 if (mode->flags & DRM_MODE_FLAG_DBLCLK)
231 return MODE_H_ILLEGAL;
232
233 return MODE_OK;
234}
235
236uint32_t intel_dp_pack_aux(const uint8_t *src, int src_bytes)
237{
238 int i;
239 uint32_t v = 0;
240
241 if (src_bytes > 4)
242 src_bytes = 4;
243 for (i = 0; i < src_bytes; i++)
244 v |= ((uint32_t) src[i]) << ((3-i) * 8);
245 return v;
246}
247
248static void intel_dp_unpack_aux(uint32_t src, uint8_t *dst, int dst_bytes)
249{
250 int i;
251 if (dst_bytes > 4)
252 dst_bytes = 4;
253 for (i = 0; i < dst_bytes; i++)
254 dst[i] = src >> ((3-i) * 8);
255}
256
257static void
258intel_dp_init_panel_power_sequencer(struct drm_device *dev,
259 struct intel_dp *intel_dp);
260static void
261intel_dp_init_panel_power_sequencer_registers(struct drm_device *dev,
262 struct intel_dp *intel_dp);
263
264static void pps_lock(struct intel_dp *intel_dp)
265{
266 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
267 struct intel_encoder *encoder = &intel_dig_port->base;
268 struct drm_device *dev = encoder->base.dev;
269 struct drm_i915_private *dev_priv = dev->dev_private;
270 enum intel_display_power_domain power_domain;
271
272
273
274
275
276 power_domain = intel_display_port_aux_power_domain(encoder);
277 intel_display_power_get(dev_priv, power_domain);
278
279 mutex_lock(&dev_priv->pps_mutex);
280}
281
282static void pps_unlock(struct intel_dp *intel_dp)
283{
284 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
285 struct intel_encoder *encoder = &intel_dig_port->base;
286 struct drm_device *dev = encoder->base.dev;
287 struct drm_i915_private *dev_priv = dev->dev_private;
288 enum intel_display_power_domain power_domain;
289
290 mutex_unlock(&dev_priv->pps_mutex);
291
292 power_domain = intel_display_port_aux_power_domain(encoder);
293 intel_display_power_put(dev_priv, power_domain);
294}
295
296static void
297vlv_power_sequencer_kick(struct intel_dp *intel_dp)
298{
299 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
300 struct drm_device *dev = intel_dig_port->base.base.dev;
301 struct drm_i915_private *dev_priv = dev->dev_private;
302 enum pipe pipe = intel_dp->pps_pipe;
303 bool pll_enabled, release_cl_override = false;
304 enum dpio_phy phy = DPIO_PHY(pipe);
305 enum dpio_channel ch = vlv_pipe_to_channel(pipe);
306 uint32_t DP;
307
308 if (WARN(I915_READ(intel_dp->output_reg) & DP_PORT_EN,
309 "skipping pipe %c power seqeuncer kick due to port %c being active\n",
310 pipe_name(pipe), port_name(intel_dig_port->port)))
311 return;
312
313 DRM_DEBUG_KMS("kicking pipe %c power sequencer for port %c\n",
314 pipe_name(pipe), port_name(intel_dig_port->port));
315
316
317
318
319 DP = I915_READ(intel_dp->output_reg) & DP_DETECTED;
320 DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
321 DP |= DP_PORT_WIDTH(1);
322 DP |= DP_LINK_TRAIN_PAT_1;
323
324 if (IS_CHERRYVIEW(dev))
325 DP |= DP_PIPE_SELECT_CHV(pipe);
326 else if (pipe == PIPE_B)
327 DP |= DP_PIPEB_SELECT;
328
329 pll_enabled = I915_READ(DPLL(pipe)) & DPLL_VCO_ENABLE;
330
331
332
333
334
335 if (!pll_enabled) {
336 release_cl_override = IS_CHERRYVIEW(dev) &&
337 !chv_phy_powergate_ch(dev_priv, phy, ch, true);
338
339 if (vlv_force_pll_on(dev, pipe, IS_CHERRYVIEW(dev) ?
340 &chv_dpll[0].dpll : &vlv_dpll[0].dpll)) {
341 DRM_ERROR("Failed to force on pll for pipe %c!\n",
342 pipe_name(pipe));
343 return;
344 }
345 }
346
347
348
349
350
351
352
353 I915_WRITE(intel_dp->output_reg, DP);
354 POSTING_READ(intel_dp->output_reg);
355
356 I915_WRITE(intel_dp->output_reg, DP | DP_PORT_EN);
357 POSTING_READ(intel_dp->output_reg);
358
359 I915_WRITE(intel_dp->output_reg, DP & ~DP_PORT_EN);
360 POSTING_READ(intel_dp->output_reg);
361
362 if (!pll_enabled) {
363 vlv_force_pll_off(dev, pipe);
364
365 if (release_cl_override)
366 chv_phy_powergate_ch(dev_priv, phy, ch, false);
367 }
368}
369
370static enum pipe
371vlv_power_sequencer_pipe(struct intel_dp *intel_dp)
372{
373 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
374 struct drm_device *dev = intel_dig_port->base.base.dev;
375 struct drm_i915_private *dev_priv = dev->dev_private;
376 struct intel_encoder *encoder;
377 unsigned int pipes = (1 << PIPE_A) | (1 << PIPE_B);
378 enum pipe pipe;
379
380 lockdep_assert_held(&dev_priv->pps_mutex);
381
382
383 WARN_ON(!is_edp(intel_dp));
384
385 if (intel_dp->pps_pipe != INVALID_PIPE)
386 return intel_dp->pps_pipe;
387
388
389
390
391
392 for_each_intel_encoder(dev, encoder) {
393 struct intel_dp *tmp;
394
395 if (encoder->type != INTEL_OUTPUT_EDP)
396 continue;
397
398 tmp = enc_to_intel_dp(&encoder->base);
399
400 if (tmp->pps_pipe != INVALID_PIPE)
401 pipes &= ~(1 << tmp->pps_pipe);
402 }
403
404
405
406
407
408 if (WARN_ON(pipes == 0))
409 pipe = PIPE_A;
410 else
411 pipe = ffs(pipes) - 1;
412
413 vlv_steal_power_sequencer(dev, pipe);
414 intel_dp->pps_pipe = pipe;
415
416 DRM_DEBUG_KMS("picked pipe %c power sequencer for port %c\n",
417 pipe_name(intel_dp->pps_pipe),
418 port_name(intel_dig_port->port));
419
420
421 intel_dp_init_panel_power_sequencer(dev, intel_dp);
422 intel_dp_init_panel_power_sequencer_registers(dev, intel_dp);
423
424
425
426
427
428 vlv_power_sequencer_kick(intel_dp);
429
430 return intel_dp->pps_pipe;
431}
432
433typedef bool (*vlv_pipe_check)(struct drm_i915_private *dev_priv,
434 enum pipe pipe);
435
436static bool vlv_pipe_has_pp_on(struct drm_i915_private *dev_priv,
437 enum pipe pipe)
438{
439 return I915_READ(VLV_PIPE_PP_STATUS(pipe)) & PP_ON;
440}
441
442static bool vlv_pipe_has_vdd_on(struct drm_i915_private *dev_priv,
443 enum pipe pipe)
444{
445 return I915_READ(VLV_PIPE_PP_CONTROL(pipe)) & EDP_FORCE_VDD;
446}
447
448static bool vlv_pipe_any(struct drm_i915_private *dev_priv,
449 enum pipe pipe)
450{
451 return true;
452}
453
454static enum pipe
455vlv_initial_pps_pipe(struct drm_i915_private *dev_priv,
456 enum port port,
457 vlv_pipe_check pipe_check)
458{
459 enum pipe pipe;
460
461 for (pipe = PIPE_A; pipe <= PIPE_B; pipe++) {
462 u32 port_sel = I915_READ(VLV_PIPE_PP_ON_DELAYS(pipe)) &
463 PANEL_PORT_SELECT_MASK;
464
465 if (port_sel != PANEL_PORT_SELECT_VLV(port))
466 continue;
467
468 if (!pipe_check(dev_priv, pipe))
469 continue;
470
471 return pipe;
472 }
473
474 return INVALID_PIPE;
475}
476
477static void
478vlv_initial_power_sequencer_setup(struct intel_dp *intel_dp)
479{
480 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
481 struct drm_device *dev = intel_dig_port->base.base.dev;
482 struct drm_i915_private *dev_priv = dev->dev_private;
483 enum port port = intel_dig_port->port;
484
485 lockdep_assert_held(&dev_priv->pps_mutex);
486
487
488
489 intel_dp->pps_pipe = vlv_initial_pps_pipe(dev_priv, port,
490 vlv_pipe_has_pp_on);
491
492 if (intel_dp->pps_pipe == INVALID_PIPE)
493 intel_dp->pps_pipe = vlv_initial_pps_pipe(dev_priv, port,
494 vlv_pipe_has_vdd_on);
495
496 if (intel_dp->pps_pipe == INVALID_PIPE)
497 intel_dp->pps_pipe = vlv_initial_pps_pipe(dev_priv, port,
498 vlv_pipe_any);
499
500
501 if (intel_dp->pps_pipe == INVALID_PIPE) {
502 DRM_DEBUG_KMS("no initial power sequencer for port %c\n",
503 port_name(port));
504 return;
505 }
506
507 DRM_DEBUG_KMS("initial power sequencer for port %c: pipe %c\n",
508 port_name(port), pipe_name(intel_dp->pps_pipe));
509
510 intel_dp_init_panel_power_sequencer(dev, intel_dp);
511 intel_dp_init_panel_power_sequencer_registers(dev, intel_dp);
512}
513
514void vlv_power_sequencer_reset(struct drm_i915_private *dev_priv)
515{
516 struct drm_device *dev = dev_priv->dev;
517 struct intel_encoder *encoder;
518
519 if (WARN_ON(!IS_VALLEYVIEW(dev) && !IS_CHERRYVIEW(dev)))
520 return;
521
522
523
524
525
526
527
528
529
530
531
532 for_each_intel_encoder(dev, encoder) {
533 struct intel_dp *intel_dp;
534
535 if (encoder->type != INTEL_OUTPUT_EDP)
536 continue;
537
538 intel_dp = enc_to_intel_dp(&encoder->base);
539 intel_dp->pps_pipe = INVALID_PIPE;
540 }
541}
542
543static i915_reg_t
544_pp_ctrl_reg(struct intel_dp *intel_dp)
545{
546 struct drm_device *dev = intel_dp_to_dev(intel_dp);
547
548 if (IS_BROXTON(dev))
549 return BXT_PP_CONTROL(0);
550 else if (HAS_PCH_SPLIT(dev))
551 return PCH_PP_CONTROL;
552 else
553 return VLV_PIPE_PP_CONTROL(vlv_power_sequencer_pipe(intel_dp));
554}
555
556static i915_reg_t
557_pp_stat_reg(struct intel_dp *intel_dp)
558{
559 struct drm_device *dev = intel_dp_to_dev(intel_dp);
560
561 if (IS_BROXTON(dev))
562 return BXT_PP_STATUS(0);
563 else if (HAS_PCH_SPLIT(dev))
564 return PCH_PP_STATUS;
565 else
566 return VLV_PIPE_PP_STATUS(vlv_power_sequencer_pipe(intel_dp));
567}
568
569
570
571static int edp_notify_handler(struct notifier_block *this, unsigned long code,
572 void *unused)
573{
574 struct intel_dp *intel_dp = container_of(this, typeof(* intel_dp),
575 edp_notifier);
576 struct drm_device *dev = intel_dp_to_dev(intel_dp);
577 struct drm_i915_private *dev_priv = dev->dev_private;
578
579 if (!is_edp(intel_dp) || code != SYS_RESTART)
580 return 0;
581
582 pps_lock(intel_dp);
583
584 if (IS_VALLEYVIEW(dev) || IS_CHERRYVIEW(dev)) {
585 enum pipe pipe = vlv_power_sequencer_pipe(intel_dp);
586 i915_reg_t pp_ctrl_reg, pp_div_reg;
587 u32 pp_div;
588
589 pp_ctrl_reg = VLV_PIPE_PP_CONTROL(pipe);
590 pp_div_reg = VLV_PIPE_PP_DIVISOR(pipe);
591 pp_div = I915_READ(pp_div_reg);
592 pp_div &= PP_REFERENCE_DIVIDER_MASK;
593
594
595 I915_WRITE(pp_div_reg, pp_div | 0x1F);
596 I915_WRITE(pp_ctrl_reg, PANEL_UNLOCK_REGS | PANEL_POWER_OFF);
597 msleep(intel_dp->panel_power_cycle_delay);
598 }
599
600 pps_unlock(intel_dp);
601
602 return 0;
603}
604
605static bool edp_have_panel_power(struct intel_dp *intel_dp)
606{
607 struct drm_device *dev = intel_dp_to_dev(intel_dp);
608 struct drm_i915_private *dev_priv = dev->dev_private;
609
610 lockdep_assert_held(&dev_priv->pps_mutex);
611
612 if ((IS_VALLEYVIEW(dev) || IS_CHERRYVIEW(dev)) &&
613 intel_dp->pps_pipe == INVALID_PIPE)
614 return false;
615
616 return (I915_READ(_pp_stat_reg(intel_dp)) & PP_ON) != 0;
617}
618
619static bool edp_have_panel_vdd(struct intel_dp *intel_dp)
620{
621 struct drm_device *dev = intel_dp_to_dev(intel_dp);
622 struct drm_i915_private *dev_priv = dev->dev_private;
623
624 lockdep_assert_held(&dev_priv->pps_mutex);
625
626 if ((IS_VALLEYVIEW(dev) || IS_CHERRYVIEW(dev)) &&
627 intel_dp->pps_pipe == INVALID_PIPE)
628 return false;
629
630 return I915_READ(_pp_ctrl_reg(intel_dp)) & EDP_FORCE_VDD;
631}
632
633static void
634intel_dp_check_edp(struct intel_dp *intel_dp)
635{
636 struct drm_device *dev = intel_dp_to_dev(intel_dp);
637 struct drm_i915_private *dev_priv = dev->dev_private;
638
639 if (!is_edp(intel_dp))
640 return;
641
642 if (!edp_have_panel_power(intel_dp) && !edp_have_panel_vdd(intel_dp)) {
643 WARN(1, "eDP powered off while attempting aux channel communication.\n");
644 DRM_DEBUG_KMS("Status 0x%08x Control 0x%08x\n",
645 I915_READ(_pp_stat_reg(intel_dp)),
646 I915_READ(_pp_ctrl_reg(intel_dp)));
647 }
648}
649
650static uint32_t
651intel_dp_aux_wait_done(struct intel_dp *intel_dp, bool has_aux_irq)
652{
653 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
654 struct drm_device *dev = intel_dig_port->base.base.dev;
655 struct drm_i915_private *dev_priv = dev->dev_private;
656 i915_reg_t ch_ctl = intel_dp->aux_ch_ctl_reg;
657 uint32_t status;
658 bool done;
659
660#define C (((status = I915_READ_NOTRACE(ch_ctl)) & DP_AUX_CH_CTL_SEND_BUSY) == 0)
661 if (has_aux_irq)
662 done = wait_event_timeout(dev_priv->gmbus_wait_queue, C,
663 msecs_to_jiffies_timeout(10));
664 else
665 done = wait_for_atomic(C, 10) == 0;
666 if (!done)
667 DRM_ERROR("dp aux hw did not signal timeout (has irq: %i)!\n",
668 has_aux_irq);
669#undef C
670
671 return status;
672}
673
674static uint32_t i9xx_get_aux_clock_divider(struct intel_dp *intel_dp, int index)
675{
676 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
677 struct drm_device *dev = intel_dig_port->base.base.dev;
678
679
680
681
682
683 return index ? 0 : DIV_ROUND_CLOSEST(intel_hrawclk(dev), 2);
684}
685
686static uint32_t ilk_get_aux_clock_divider(struct intel_dp *intel_dp, int index)
687{
688 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
689 struct drm_device *dev = intel_dig_port->base.base.dev;
690 struct drm_i915_private *dev_priv = dev->dev_private;
691
692 if (index)
693 return 0;
694
695 if (intel_dig_port->port == PORT_A) {
696 return DIV_ROUND_CLOSEST(dev_priv->cdclk_freq, 2000);
697
698 } else {
699 return DIV_ROUND_CLOSEST(intel_pch_rawclk(dev), 2);
700 }
701}
702
703static uint32_t hsw_get_aux_clock_divider(struct intel_dp *intel_dp, int index)
704{
705 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
706 struct drm_device *dev = intel_dig_port->base.base.dev;
707 struct drm_i915_private *dev_priv = dev->dev_private;
708
709 if (intel_dig_port->port == PORT_A) {
710 if (index)
711 return 0;
712 return DIV_ROUND_CLOSEST(dev_priv->cdclk_freq, 2000);
713 } else if (HAS_PCH_LPT_H(dev_priv)) {
714
715 switch (index) {
716 case 0: return 63;
717 case 1: return 72;
718 default: return 0;
719 }
720 } else {
721 return index ? 0 : DIV_ROUND_CLOSEST(intel_pch_rawclk(dev), 2);
722 }
723}
724
725static uint32_t vlv_get_aux_clock_divider(struct intel_dp *intel_dp, int index)
726{
727 return index ? 0 : 100;
728}
729
730static uint32_t skl_get_aux_clock_divider(struct intel_dp *intel_dp, int index)
731{
732
733
734
735
736
737 return index ? 0 : 1;
738}
739
740static uint32_t i9xx_get_aux_send_ctl(struct intel_dp *intel_dp,
741 bool has_aux_irq,
742 int send_bytes,
743 uint32_t aux_clock_divider)
744{
745 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
746 struct drm_device *dev = intel_dig_port->base.base.dev;
747 uint32_t precharge, timeout;
748
749 if (IS_GEN6(dev))
750 precharge = 3;
751 else
752 precharge = 5;
753
754 if (IS_BROADWELL(dev) && intel_dig_port->port == PORT_A)
755 timeout = DP_AUX_CH_CTL_TIME_OUT_600us;
756 else
757 timeout = DP_AUX_CH_CTL_TIME_OUT_400us;
758
759 return DP_AUX_CH_CTL_SEND_BUSY |
760 DP_AUX_CH_CTL_DONE |
761 (has_aux_irq ? DP_AUX_CH_CTL_INTERRUPT : 0) |
762 DP_AUX_CH_CTL_TIME_OUT_ERROR |
763 timeout |
764 DP_AUX_CH_CTL_RECEIVE_ERROR |
765 (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) |
766 (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) |
767 (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT);
768}
769
770static uint32_t skl_get_aux_send_ctl(struct intel_dp *intel_dp,
771 bool has_aux_irq,
772 int send_bytes,
773 uint32_t unused)
774{
775 return DP_AUX_CH_CTL_SEND_BUSY |
776 DP_AUX_CH_CTL_DONE |
777 (has_aux_irq ? DP_AUX_CH_CTL_INTERRUPT : 0) |
778 DP_AUX_CH_CTL_TIME_OUT_ERROR |
779 DP_AUX_CH_CTL_TIME_OUT_1600us |
780 DP_AUX_CH_CTL_RECEIVE_ERROR |
781 (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) |
782 DP_AUX_CH_CTL_SYNC_PULSE_SKL(32);
783}
784
785static int
786intel_dp_aux_ch(struct intel_dp *intel_dp,
787 const uint8_t *send, int send_bytes,
788 uint8_t *recv, int recv_size)
789{
790 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
791 struct drm_device *dev = intel_dig_port->base.base.dev;
792 struct drm_i915_private *dev_priv = dev->dev_private;
793 i915_reg_t ch_ctl = intel_dp->aux_ch_ctl_reg;
794 uint32_t aux_clock_divider;
795 int i, ret, recv_bytes;
796 uint32_t status;
797 int try, clock = 0;
798 bool has_aux_irq = HAS_AUX_IRQ(dev);
799 bool vdd;
800
801 pps_lock(intel_dp);
802
803
804
805
806
807
808
809 vdd = edp_panel_vdd_on(intel_dp);
810
811
812
813
814
815 pm_qos_update_request(&dev_priv->pm_qos, 0);
816
817 intel_dp_check_edp(intel_dp);
818
819
820 for (try = 0; try < 3; try++) {
821 status = I915_READ_NOTRACE(ch_ctl);
822 if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0)
823 break;
824 msleep(1);
825 }
826
827 if (try == 3) {
828 static u32 last_status = -1;
829 const u32 status = I915_READ(ch_ctl);
830
831 if (status != last_status) {
832 WARN(1, "dp_aux_ch not started status 0x%08x\n",
833 status);
834 last_status = status;
835 }
836
837 ret = -EBUSY;
838 goto out;
839 }
840
841
842 if (WARN_ON(send_bytes > 20 || recv_size > 20)) {
843 ret = -E2BIG;
844 goto out;
845 }
846
847 while ((aux_clock_divider = intel_dp->get_aux_clock_divider(intel_dp, clock++))) {
848 u32 send_ctl = intel_dp->get_aux_send_ctl(intel_dp,
849 has_aux_irq,
850 send_bytes,
851 aux_clock_divider);
852
853
854 for (try = 0; try < 5; try++) {
855
856 for (i = 0; i < send_bytes; i += 4)
857 I915_WRITE(intel_dp->aux_ch_data_reg[i >> 2],
858 intel_dp_pack_aux(send + i,
859 send_bytes - i));
860
861
862 I915_WRITE(ch_ctl, send_ctl);
863
864 status = intel_dp_aux_wait_done(intel_dp, has_aux_irq);
865
866
867 I915_WRITE(ch_ctl,
868 status |
869 DP_AUX_CH_CTL_DONE |
870 DP_AUX_CH_CTL_TIME_OUT_ERROR |
871 DP_AUX_CH_CTL_RECEIVE_ERROR);
872
873 if (status & DP_AUX_CH_CTL_TIME_OUT_ERROR)
874 continue;
875
876
877
878
879
880
881 if (status & DP_AUX_CH_CTL_RECEIVE_ERROR) {
882 usleep_range(400, 500);
883 continue;
884 }
885 if (status & DP_AUX_CH_CTL_DONE)
886 goto done;
887 }
888 }
889
890 if ((status & DP_AUX_CH_CTL_DONE) == 0) {
891 DRM_ERROR("dp_aux_ch not done status 0x%08x\n", status);
892 ret = -EBUSY;
893 goto out;
894 }
895
896done:
897
898
899
900 if (status & DP_AUX_CH_CTL_RECEIVE_ERROR) {
901 DRM_ERROR("dp_aux_ch receive error status 0x%08x\n", status);
902 ret = -EIO;
903 goto out;
904 }
905
906
907
908 if (status & DP_AUX_CH_CTL_TIME_OUT_ERROR) {
909 DRM_DEBUG_KMS("dp_aux_ch timeout status 0x%08x\n", status);
910 ret = -ETIMEDOUT;
911 goto out;
912 }
913
914
915 recv_bytes = ((status & DP_AUX_CH_CTL_MESSAGE_SIZE_MASK) >>
916 DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT);
917
918
919
920
921
922
923 if (recv_bytes == 0 || recv_bytes > 20) {
924 DRM_DEBUG_KMS("Forbidden recv_bytes = %d on aux transaction\n",
925 recv_bytes);
926
927
928
929
930
931
932
933 usleep_range(1000, 1500);
934 ret = -EBUSY;
935 goto out;
936 }
937
938 if (recv_bytes > recv_size)
939 recv_bytes = recv_size;
940
941 for (i = 0; i < recv_bytes; i += 4)
942 intel_dp_unpack_aux(I915_READ(intel_dp->aux_ch_data_reg[i >> 2]),
943 recv + i, recv_bytes - i);
944
945 ret = recv_bytes;
946out:
947 pm_qos_update_request(&dev_priv->pm_qos, PM_QOS_DEFAULT_VALUE);
948
949 if (vdd)
950 edp_panel_vdd_off(intel_dp, false);
951
952 pps_unlock(intel_dp);
953
954 return ret;
955}
956
957#define BARE_ADDRESS_SIZE 3
958#define HEADER_SIZE (BARE_ADDRESS_SIZE + 1)
959static ssize_t
960intel_dp_aux_transfer(struct drm_dp_aux *aux, struct drm_dp_aux_msg *msg)
961{
962 struct intel_dp *intel_dp = container_of(aux, struct intel_dp, aux);
963 uint8_t txbuf[20], rxbuf[20];
964 size_t txsize, rxsize;
965 int ret;
966
967 txbuf[0] = (msg->request << 4) |
968 ((msg->address >> 16) & 0xf);
969 txbuf[1] = (msg->address >> 8) & 0xff;
970 txbuf[2] = msg->address & 0xff;
971 txbuf[3] = msg->size - 1;
972
973 switch (msg->request & ~DP_AUX_I2C_MOT) {
974 case DP_AUX_NATIVE_WRITE:
975 case DP_AUX_I2C_WRITE:
976 case DP_AUX_I2C_WRITE_STATUS_UPDATE:
977 txsize = msg->size ? HEADER_SIZE + msg->size : BARE_ADDRESS_SIZE;
978 rxsize = 2;
979
980 if (WARN_ON(txsize > 20))
981 return -E2BIG;
982
983 if (msg->buffer)
984 memcpy(txbuf + HEADER_SIZE, msg->buffer, msg->size);
985 else
986 WARN_ON(msg->size);
987
988 ret = intel_dp_aux_ch(intel_dp, txbuf, txsize, rxbuf, rxsize);
989 if (ret > 0) {
990 msg->reply = rxbuf[0] >> 4;
991
992 if (ret > 1) {
993
994 ret = clamp_t(int, rxbuf[1], 0, msg->size);
995 } else {
996
997 ret = msg->size;
998 }
999 }
1000 break;
1001
1002 case DP_AUX_NATIVE_READ:
1003 case DP_AUX_I2C_READ:
1004 txsize = msg->size ? HEADER_SIZE : BARE_ADDRESS_SIZE;
1005 rxsize = msg->size + 1;
1006
1007 if (WARN_ON(rxsize > 20))
1008 return -E2BIG;
1009
1010 ret = intel_dp_aux_ch(intel_dp, txbuf, txsize, rxbuf, rxsize);
1011 if (ret > 0) {
1012 msg->reply = rxbuf[0] >> 4;
1013
1014
1015
1016
1017
1018
1019 ret--;
1020 memcpy(msg->buffer, rxbuf + 1, ret);
1021 }
1022 break;
1023
1024 default:
1025 ret = -EINVAL;
1026 break;
1027 }
1028
1029 return ret;
1030}
1031
1032static i915_reg_t g4x_aux_ctl_reg(struct drm_i915_private *dev_priv,
1033 enum port port)
1034{
1035 switch (port) {
1036 case PORT_B:
1037 case PORT_C:
1038 case PORT_D:
1039 return DP_AUX_CH_CTL(port);
1040 default:
1041 MISSING_CASE(port);
1042 return DP_AUX_CH_CTL(PORT_B);
1043 }
1044}
1045
1046static i915_reg_t g4x_aux_data_reg(struct drm_i915_private *dev_priv,
1047 enum port port, int index)
1048{
1049 switch (port) {
1050 case PORT_B:
1051 case PORT_C:
1052 case PORT_D:
1053 return DP_AUX_CH_DATA(port, index);
1054 default:
1055 MISSING_CASE(port);
1056 return DP_AUX_CH_DATA(PORT_B, index);
1057 }
1058}
1059
1060static i915_reg_t ilk_aux_ctl_reg(struct drm_i915_private *dev_priv,
1061 enum port port)
1062{
1063 switch (port) {
1064 case PORT_A:
1065 return DP_AUX_CH_CTL(port);
1066 case PORT_B:
1067 case PORT_C:
1068 case PORT_D:
1069 return PCH_DP_AUX_CH_CTL(port);
1070 default:
1071 MISSING_CASE(port);
1072 return DP_AUX_CH_CTL(PORT_A);
1073 }
1074}
1075
1076static i915_reg_t ilk_aux_data_reg(struct drm_i915_private *dev_priv,
1077 enum port port, int index)
1078{
1079 switch (port) {
1080 case PORT_A:
1081 return DP_AUX_CH_DATA(port, index);
1082 case PORT_B:
1083 case PORT_C:
1084 case PORT_D:
1085 return PCH_DP_AUX_CH_DATA(port, index);
1086 default:
1087 MISSING_CASE(port);
1088 return DP_AUX_CH_DATA(PORT_A, index);
1089 }
1090}
1091
1092
1093
1094
1095
1096static enum port skl_porte_aux_port(struct drm_i915_private *dev_priv)
1097{
1098 const struct ddi_vbt_port_info *info =
1099 &dev_priv->vbt.ddi_port_info[PORT_E];
1100
1101 switch (info->alternate_aux_channel) {
1102 case DP_AUX_A:
1103 return PORT_A;
1104 case DP_AUX_B:
1105 return PORT_B;
1106 case DP_AUX_C:
1107 return PORT_C;
1108 case DP_AUX_D:
1109 return PORT_D;
1110 default:
1111 MISSING_CASE(info->alternate_aux_channel);
1112 return PORT_A;
1113 }
1114}
1115
1116static i915_reg_t skl_aux_ctl_reg(struct drm_i915_private *dev_priv,
1117 enum port port)
1118{
1119 if (port == PORT_E)
1120 port = skl_porte_aux_port(dev_priv);
1121
1122 switch (port) {
1123 case PORT_A:
1124 case PORT_B:
1125 case PORT_C:
1126 case PORT_D:
1127 return DP_AUX_CH_CTL(port);
1128 default:
1129 MISSING_CASE(port);
1130 return DP_AUX_CH_CTL(PORT_A);
1131 }
1132}
1133
1134static i915_reg_t skl_aux_data_reg(struct drm_i915_private *dev_priv,
1135 enum port port, int index)
1136{
1137 if (port == PORT_E)
1138 port = skl_porte_aux_port(dev_priv);
1139
1140 switch (port) {
1141 case PORT_A:
1142 case PORT_B:
1143 case PORT_C:
1144 case PORT_D:
1145 return DP_AUX_CH_DATA(port, index);
1146 default:
1147 MISSING_CASE(port);
1148 return DP_AUX_CH_DATA(PORT_A, index);
1149 }
1150}
1151
1152static i915_reg_t intel_aux_ctl_reg(struct drm_i915_private *dev_priv,
1153 enum port port)
1154{
1155 if (INTEL_INFO(dev_priv)->gen >= 9)
1156 return skl_aux_ctl_reg(dev_priv, port);
1157 else if (HAS_PCH_SPLIT(dev_priv))
1158 return ilk_aux_ctl_reg(dev_priv, port);
1159 else
1160 return g4x_aux_ctl_reg(dev_priv, port);
1161}
1162
1163static i915_reg_t intel_aux_data_reg(struct drm_i915_private *dev_priv,
1164 enum port port, int index)
1165{
1166 if (INTEL_INFO(dev_priv)->gen >= 9)
1167 return skl_aux_data_reg(dev_priv, port, index);
1168 else if (HAS_PCH_SPLIT(dev_priv))
1169 return ilk_aux_data_reg(dev_priv, port, index);
1170 else
1171 return g4x_aux_data_reg(dev_priv, port, index);
1172}
1173
1174static void intel_aux_reg_init(struct intel_dp *intel_dp)
1175{
1176 struct drm_i915_private *dev_priv = to_i915(intel_dp_to_dev(intel_dp));
1177 enum port port = dp_to_dig_port(intel_dp)->port;
1178 int i;
1179
1180 intel_dp->aux_ch_ctl_reg = intel_aux_ctl_reg(dev_priv, port);
1181 for (i = 0; i < ARRAY_SIZE(intel_dp->aux_ch_data_reg); i++)
1182 intel_dp->aux_ch_data_reg[i] = intel_aux_data_reg(dev_priv, port, i);
1183}
1184
1185static void
1186intel_dp_aux_fini(struct intel_dp *intel_dp)
1187{
1188 drm_dp_aux_unregister(&intel_dp->aux);
1189 kfree(intel_dp->aux.name);
1190}
1191
1192static int
1193intel_dp_aux_init(struct intel_dp *intel_dp, struct intel_connector *connector)
1194{
1195 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
1196 enum port port = intel_dig_port->port;
1197 int ret;
1198
1199 intel_aux_reg_init(intel_dp);
1200
1201 intel_dp->aux.name = kasprintf(GFP_KERNEL, "DPDDC-%c", port_name(port));
1202 if (!intel_dp->aux.name)
1203 return -ENOMEM;
1204
1205 intel_dp->aux.dev = connector->base.kdev;
1206 intel_dp->aux.transfer = intel_dp_aux_transfer;
1207
1208 DRM_DEBUG_KMS("registering %s bus for %s\n",
1209 intel_dp->aux.name,
1210 connector->base.kdev->kobj.name);
1211
1212 ret = drm_dp_aux_register(&intel_dp->aux);
1213 if (ret < 0) {
1214 DRM_ERROR("drm_dp_aux_register() for %s failed (%d)\n",
1215 intel_dp->aux.name, ret);
1216 kfree(intel_dp->aux.name);
1217 return ret;
1218 }
1219
1220 return 0;
1221}
1222
1223static void
1224intel_dp_connector_unregister(struct intel_connector *intel_connector)
1225{
1226 struct intel_dp *intel_dp = intel_attached_dp(&intel_connector->base);
1227
1228 intel_dp_aux_fini(intel_dp);
1229 intel_connector_unregister(intel_connector);
1230}
1231
1232static void
1233skl_edp_set_pll_config(struct intel_crtc_state *pipe_config)
1234{
1235 u32 ctrl1;
1236
1237 memset(&pipe_config->dpll_hw_state, 0,
1238 sizeof(pipe_config->dpll_hw_state));
1239
1240 pipe_config->ddi_pll_sel = SKL_DPLL0;
1241 pipe_config->dpll_hw_state.cfgcr1 = 0;
1242 pipe_config->dpll_hw_state.cfgcr2 = 0;
1243
1244 ctrl1 = DPLL_CTRL1_OVERRIDE(SKL_DPLL0);
1245 switch (pipe_config->port_clock / 2) {
1246 case 81000:
1247 ctrl1 |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_810,
1248 SKL_DPLL0);
1249 break;
1250 case 135000:
1251 ctrl1 |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1350,
1252 SKL_DPLL0);
1253 break;
1254 case 270000:
1255 ctrl1 |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_2700,
1256 SKL_DPLL0);
1257 break;
1258 case 162000:
1259 ctrl1 |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1620,
1260 SKL_DPLL0);
1261 break;
1262
1263
1264
1265 case 108000:
1266 ctrl1 |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1080,
1267 SKL_DPLL0);
1268 break;
1269 case 216000:
1270 ctrl1 |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_2160,
1271 SKL_DPLL0);
1272 break;
1273
1274 }
1275 pipe_config->dpll_hw_state.ctrl1 = ctrl1;
1276}
1277
1278void
1279hsw_dp_set_ddi_pll_sel(struct intel_crtc_state *pipe_config)
1280{
1281 memset(&pipe_config->dpll_hw_state, 0,
1282 sizeof(pipe_config->dpll_hw_state));
1283
1284 switch (pipe_config->port_clock / 2) {
1285 case 81000:
1286 pipe_config->ddi_pll_sel = PORT_CLK_SEL_LCPLL_810;
1287 break;
1288 case 135000:
1289 pipe_config->ddi_pll_sel = PORT_CLK_SEL_LCPLL_1350;
1290 break;
1291 case 270000:
1292 pipe_config->ddi_pll_sel = PORT_CLK_SEL_LCPLL_2700;
1293 break;
1294 }
1295}
1296
1297static int
1298intel_dp_sink_rates(struct intel_dp *intel_dp, const int **sink_rates)
1299{
1300 if (intel_dp->num_sink_rates) {
1301 *sink_rates = intel_dp->sink_rates;
1302 return intel_dp->num_sink_rates;
1303 }
1304
1305 *sink_rates = default_rates;
1306
1307 return (intel_dp_max_link_bw(intel_dp) >> 3) + 1;
1308}
1309
1310bool intel_dp_source_supports_hbr2(struct intel_dp *intel_dp)
1311{
1312 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1313 struct drm_device *dev = dig_port->base.base.dev;
1314
1315
1316 if (IS_SKL_REVID(dev, 0, SKL_REVID_B0))
1317 return false;
1318
1319 if ((IS_HASWELL(dev) && !IS_HSW_ULX(dev)) || IS_BROADWELL(dev) ||
1320 (INTEL_INFO(dev)->gen >= 9))
1321 return true;
1322 else
1323 return false;
1324}
1325
1326static int
1327intel_dp_source_rates(struct intel_dp *intel_dp, const int **source_rates)
1328{
1329 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1330 struct drm_device *dev = dig_port->base.base.dev;
1331 int size;
1332
1333 if (IS_BROXTON(dev)) {
1334 *source_rates = bxt_rates;
1335 size = ARRAY_SIZE(bxt_rates);
1336 } else if (IS_SKYLAKE(dev) || IS_KABYLAKE(dev)) {
1337 *source_rates = skl_rates;
1338 size = ARRAY_SIZE(skl_rates);
1339 } else {
1340 *source_rates = default_rates;
1341 size = ARRAY_SIZE(default_rates);
1342 }
1343
1344
1345 if (!intel_dp_source_supports_hbr2(intel_dp))
1346 size--;
1347
1348 return size;
1349}
1350
1351static void
1352intel_dp_set_clock(struct intel_encoder *encoder,
1353 struct intel_crtc_state *pipe_config)
1354{
1355 struct drm_device *dev = encoder->base.dev;
1356 const struct dp_link_dpll *divisor = NULL;
1357 int i, count = 0;
1358
1359 if (IS_G4X(dev)) {
1360 divisor = gen4_dpll;
1361 count = ARRAY_SIZE(gen4_dpll);
1362 } else if (HAS_PCH_SPLIT(dev)) {
1363 divisor = pch_dpll;
1364 count = ARRAY_SIZE(pch_dpll);
1365 } else if (IS_CHERRYVIEW(dev)) {
1366 divisor = chv_dpll;
1367 count = ARRAY_SIZE(chv_dpll);
1368 } else if (IS_VALLEYVIEW(dev)) {
1369 divisor = vlv_dpll;
1370 count = ARRAY_SIZE(vlv_dpll);
1371 }
1372
1373 if (divisor && count) {
1374 for (i = 0; i < count; i++) {
1375 if (pipe_config->port_clock == divisor[i].clock) {
1376 pipe_config->dpll = divisor[i].dpll;
1377 pipe_config->clock_set = true;
1378 break;
1379 }
1380 }
1381 }
1382}
1383
1384static int intersect_rates(const int *source_rates, int source_len,
1385 const int *sink_rates, int sink_len,
1386 int *common_rates)
1387{
1388 int i = 0, j = 0, k = 0;
1389
1390 while (i < source_len && j < sink_len) {
1391 if (source_rates[i] == sink_rates[j]) {
1392 if (WARN_ON(k >= DP_MAX_SUPPORTED_RATES))
1393 return k;
1394 common_rates[k] = source_rates[i];
1395 ++k;
1396 ++i;
1397 ++j;
1398 } else if (source_rates[i] < sink_rates[j]) {
1399 ++i;
1400 } else {
1401 ++j;
1402 }
1403 }
1404 return k;
1405}
1406
1407static int intel_dp_common_rates(struct intel_dp *intel_dp,
1408 int *common_rates)
1409{
1410 const int *source_rates, *sink_rates;
1411 int source_len, sink_len;
1412
1413 sink_len = intel_dp_sink_rates(intel_dp, &sink_rates);
1414 source_len = intel_dp_source_rates(intel_dp, &source_rates);
1415
1416 return intersect_rates(source_rates, source_len,
1417 sink_rates, sink_len,
1418 common_rates);
1419}
1420
1421static void snprintf_int_array(char *str, size_t len,
1422 const int *array, int nelem)
1423{
1424 int i;
1425
1426 str[0] = '\0';
1427
1428 for (i = 0; i < nelem; i++) {
1429 int r = snprintf(str, len, "%s%d", i ? ", " : "", array[i]);
1430 if (r >= len)
1431 return;
1432 str += r;
1433 len -= r;
1434 }
1435}
1436
1437static void intel_dp_print_rates(struct intel_dp *intel_dp)
1438{
1439 const int *source_rates, *sink_rates;
1440 int source_len, sink_len, common_len;
1441 int common_rates[DP_MAX_SUPPORTED_RATES];
1442 char str[128];
1443
1444 if ((drm_debug & DRM_UT_KMS) == 0)
1445 return;
1446
1447 source_len = intel_dp_source_rates(intel_dp, &source_rates);
1448 snprintf_int_array(str, sizeof(str), source_rates, source_len);
1449 DRM_DEBUG_KMS("source rates: %s\n", str);
1450
1451 sink_len = intel_dp_sink_rates(intel_dp, &sink_rates);
1452 snprintf_int_array(str, sizeof(str), sink_rates, sink_len);
1453 DRM_DEBUG_KMS("sink rates: %s\n", str);
1454
1455 common_len = intel_dp_common_rates(intel_dp, common_rates);
1456 snprintf_int_array(str, sizeof(str), common_rates, common_len);
1457 DRM_DEBUG_KMS("common rates: %s\n", str);
1458}
1459
1460static int rate_to_index(int find, const int *rates)
1461{
1462 int i = 0;
1463
1464 for (i = 0; i < DP_MAX_SUPPORTED_RATES; ++i)
1465 if (find == rates[i])
1466 break;
1467
1468 return i;
1469}
1470
1471int
1472intel_dp_max_link_rate(struct intel_dp *intel_dp)
1473{
1474 int rates[DP_MAX_SUPPORTED_RATES] = {};
1475 int len;
1476
1477 len = intel_dp_common_rates(intel_dp, rates);
1478 if (WARN_ON(len <= 0))
1479 return 162000;
1480
1481 return rates[rate_to_index(0, rates) - 1];
1482}
1483
1484int intel_dp_rate_select(struct intel_dp *intel_dp, int rate)
1485{
1486 return rate_to_index(rate, intel_dp->sink_rates);
1487}
1488
1489void intel_dp_compute_rate(struct intel_dp *intel_dp, int port_clock,
1490 uint8_t *link_bw, uint8_t *rate_select)
1491{
1492 if (intel_dp->num_sink_rates) {
1493 *link_bw = 0;
1494 *rate_select =
1495 intel_dp_rate_select(intel_dp, port_clock);
1496 } else {
1497 *link_bw = drm_dp_link_rate_to_bw_code(port_clock);
1498 *rate_select = 0;
1499 }
1500}
1501
1502bool
1503intel_dp_compute_config(struct intel_encoder *encoder,
1504 struct intel_crtc_state *pipe_config)
1505{
1506 struct drm_device *dev = encoder->base.dev;
1507 struct drm_i915_private *dev_priv = dev->dev_private;
1508 struct drm_display_mode *adjusted_mode = &pipe_config->base.adjusted_mode;
1509 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
1510 enum port port = dp_to_dig_port(intel_dp)->port;
1511 struct intel_crtc *intel_crtc = to_intel_crtc(pipe_config->base.crtc);
1512 struct intel_connector *intel_connector = intel_dp->attached_connector;
1513 int lane_count, clock;
1514 int min_lane_count = 1;
1515 int max_lane_count = intel_dp_max_lane_count(intel_dp);
1516
1517 int min_clock = 0;
1518 int max_clock;
1519 int bpp, mode_rate;
1520 int link_avail, link_clock;
1521 int common_rates[DP_MAX_SUPPORTED_RATES] = {};
1522 int common_len;
1523 uint8_t link_bw, rate_select;
1524
1525 common_len = intel_dp_common_rates(intel_dp, common_rates);
1526
1527
1528 WARN_ON(common_len <= 0);
1529
1530 max_clock = common_len - 1;
1531
1532 if (HAS_PCH_SPLIT(dev) && !HAS_DDI(dev) && port != PORT_A)
1533 pipe_config->has_pch_encoder = true;
1534
1535 pipe_config->has_dp_encoder = true;
1536 pipe_config->has_drrs = false;
1537 pipe_config->has_audio = intel_dp->has_audio && port != PORT_A;
1538
1539 if (is_edp(intel_dp) && intel_connector->panel.fixed_mode) {
1540 intel_fixed_panel_mode(intel_connector->panel.fixed_mode,
1541 adjusted_mode);
1542
1543 if (INTEL_INFO(dev)->gen >= 9) {
1544 int ret;
1545 ret = skl_update_scaler_crtc(pipe_config);
1546 if (ret)
1547 return ret;
1548 }
1549
1550 if (HAS_GMCH_DISPLAY(dev))
1551 intel_gmch_panel_fitting(intel_crtc, pipe_config,
1552 intel_connector->panel.fitting_mode);
1553 else
1554 intel_pch_panel_fitting(intel_crtc, pipe_config,
1555 intel_connector->panel.fitting_mode);
1556 }
1557
1558 if (adjusted_mode->flags & DRM_MODE_FLAG_DBLCLK)
1559 return false;
1560
1561 DRM_DEBUG_KMS("DP link computation with max lane count %i "
1562 "max bw %d pixel clock %iKHz\n",
1563 max_lane_count, common_rates[max_clock],
1564 adjusted_mode->crtc_clock);
1565
1566
1567
1568 bpp = pipe_config->pipe_bpp;
1569 if (is_edp(intel_dp)) {
1570
1571
1572 if (intel_connector->base.display_info.bpc == 0 &&
1573 (dev_priv->vbt.edp_bpp && dev_priv->vbt.edp_bpp < bpp)) {
1574 DRM_DEBUG_KMS("clamping bpp for eDP panel to BIOS-provided %i\n",
1575 dev_priv->vbt.edp_bpp);
1576 bpp = dev_priv->vbt.edp_bpp;
1577 }
1578
1579
1580
1581
1582
1583
1584
1585
1586 min_lane_count = max_lane_count;
1587 min_clock = max_clock;
1588 }
1589
1590 for (; bpp >= 6*3; bpp -= 2*3) {
1591 mode_rate = intel_dp_link_required(adjusted_mode->crtc_clock,
1592 bpp);
1593
1594 for (clock = min_clock; clock <= max_clock; clock++) {
1595 for (lane_count = min_lane_count;
1596 lane_count <= max_lane_count;
1597 lane_count <<= 1) {
1598
1599 link_clock = common_rates[clock];
1600 link_avail = intel_dp_max_data_rate(link_clock,
1601 lane_count);
1602
1603 if (mode_rate <= link_avail) {
1604 goto found;
1605 }
1606 }
1607 }
1608 }
1609
1610 return false;
1611
1612found:
1613 if (intel_dp->color_range_auto) {
1614
1615
1616
1617
1618
1619 pipe_config->limited_color_range =
1620 bpp != 18 && drm_match_cea_mode(adjusted_mode) > 1;
1621 } else {
1622 pipe_config->limited_color_range =
1623 intel_dp->limited_color_range;
1624 }
1625
1626 pipe_config->lane_count = lane_count;
1627
1628 pipe_config->pipe_bpp = bpp;
1629 pipe_config->port_clock = common_rates[clock];
1630
1631 intel_dp_compute_rate(intel_dp, pipe_config->port_clock,
1632 &link_bw, &rate_select);
1633
1634 DRM_DEBUG_KMS("DP link bw %02x rate select %02x lane count %d clock %d bpp %d\n",
1635 link_bw, rate_select, pipe_config->lane_count,
1636 pipe_config->port_clock, bpp);
1637 DRM_DEBUG_KMS("DP link bw required %i available %i\n",
1638 mode_rate, link_avail);
1639
1640 intel_link_compute_m_n(bpp, lane_count,
1641 adjusted_mode->crtc_clock,
1642 pipe_config->port_clock,
1643 &pipe_config->dp_m_n);
1644
1645 if (intel_connector->panel.downclock_mode != NULL &&
1646 dev_priv->drrs.type == SEAMLESS_DRRS_SUPPORT) {
1647 pipe_config->has_drrs = true;
1648 intel_link_compute_m_n(bpp, lane_count,
1649 intel_connector->panel.downclock_mode->clock,
1650 pipe_config->port_clock,
1651 &pipe_config->dp_m2_n2);
1652 }
1653
1654 if ((IS_SKYLAKE(dev) || IS_KABYLAKE(dev)) && is_edp(intel_dp))
1655 skl_edp_set_pll_config(pipe_config);
1656 else if (IS_BROXTON(dev))
1657 ;
1658 else if (IS_HASWELL(dev) || IS_BROADWELL(dev))
1659 hsw_dp_set_ddi_pll_sel(pipe_config);
1660 else
1661 intel_dp_set_clock(encoder, pipe_config);
1662
1663 return true;
1664}
1665
1666void intel_dp_set_link_params(struct intel_dp *intel_dp,
1667 const struct intel_crtc_state *pipe_config)
1668{
1669 intel_dp->link_rate = pipe_config->port_clock;
1670 intel_dp->lane_count = pipe_config->lane_count;
1671}
1672
1673static void intel_dp_prepare(struct intel_encoder *encoder)
1674{
1675 struct drm_device *dev = encoder->base.dev;
1676 struct drm_i915_private *dev_priv = dev->dev_private;
1677 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
1678 enum port port = dp_to_dig_port(intel_dp)->port;
1679 struct intel_crtc *crtc = to_intel_crtc(encoder->base.crtc);
1680 const struct drm_display_mode *adjusted_mode = &crtc->config->base.adjusted_mode;
1681
1682 intel_dp_set_link_params(intel_dp, crtc->config);
1683
1684
1685
1686
1687
1688
1689
1690
1691
1692
1693
1694
1695
1696
1697
1698
1699
1700
1701
1702
1703
1704 intel_dp->DP = I915_READ(intel_dp->output_reg) & DP_DETECTED;
1705
1706
1707 intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
1708 intel_dp->DP |= DP_PORT_WIDTH(crtc->config->lane_count);
1709
1710
1711
1712 if (IS_GEN7(dev) && port == PORT_A) {
1713 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
1714 intel_dp->DP |= DP_SYNC_HS_HIGH;
1715 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
1716 intel_dp->DP |= DP_SYNC_VS_HIGH;
1717 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
1718
1719 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
1720 intel_dp->DP |= DP_ENHANCED_FRAMING;
1721
1722 intel_dp->DP |= crtc->pipe << 29;
1723 } else if (HAS_PCH_CPT(dev) && port != PORT_A) {
1724 u32 trans_dp;
1725
1726 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
1727
1728 trans_dp = I915_READ(TRANS_DP_CTL(crtc->pipe));
1729 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
1730 trans_dp |= TRANS_DP_ENH_FRAMING;
1731 else
1732 trans_dp &= ~TRANS_DP_ENH_FRAMING;
1733 I915_WRITE(TRANS_DP_CTL(crtc->pipe), trans_dp);
1734 } else {
1735 if (!HAS_PCH_SPLIT(dev) && !IS_VALLEYVIEW(dev) &&
1736 !IS_CHERRYVIEW(dev) && crtc->config->limited_color_range)
1737 intel_dp->DP |= DP_COLOR_RANGE_16_235;
1738
1739 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
1740 intel_dp->DP |= DP_SYNC_HS_HIGH;
1741 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
1742 intel_dp->DP |= DP_SYNC_VS_HIGH;
1743 intel_dp->DP |= DP_LINK_TRAIN_OFF;
1744
1745 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
1746 intel_dp->DP |= DP_ENHANCED_FRAMING;
1747
1748 if (IS_CHERRYVIEW(dev))
1749 intel_dp->DP |= DP_PIPE_SELECT_CHV(crtc->pipe);
1750 else if (crtc->pipe == PIPE_B)
1751 intel_dp->DP |= DP_PIPEB_SELECT;
1752 }
1753}
1754
1755#define IDLE_ON_MASK (PP_ON | PP_SEQUENCE_MASK | 0 | PP_SEQUENCE_STATE_MASK)
1756#define IDLE_ON_VALUE (PP_ON | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_ON_IDLE)
1757
1758#define IDLE_OFF_MASK (PP_ON | PP_SEQUENCE_MASK | 0 | 0)
1759#define IDLE_OFF_VALUE (0 | PP_SEQUENCE_NONE | 0 | 0)
1760
1761#define IDLE_CYCLE_MASK (PP_ON | PP_SEQUENCE_MASK | PP_CYCLE_DELAY_ACTIVE | PP_SEQUENCE_STATE_MASK)
1762#define IDLE_CYCLE_VALUE (0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_OFF_IDLE)
1763
1764static void wait_panel_status(struct intel_dp *intel_dp,
1765 u32 mask,
1766 u32 value)
1767{
1768 struct drm_device *dev = intel_dp_to_dev(intel_dp);
1769 struct drm_i915_private *dev_priv = dev->dev_private;
1770 i915_reg_t pp_stat_reg, pp_ctrl_reg;
1771
1772 lockdep_assert_held(&dev_priv->pps_mutex);
1773
1774 pp_stat_reg = _pp_stat_reg(intel_dp);
1775 pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
1776
1777 DRM_DEBUG_KMS("mask %08x value %08x status %08x control %08x\n",
1778 mask, value,
1779 I915_READ(pp_stat_reg),
1780 I915_READ(pp_ctrl_reg));
1781
1782 if (_wait_for((I915_READ(pp_stat_reg) & mask) == value, 5000, 10)) {
1783 DRM_ERROR("Panel status timeout: status %08x control %08x\n",
1784 I915_READ(pp_stat_reg),
1785 I915_READ(pp_ctrl_reg));
1786 }
1787
1788 DRM_DEBUG_KMS("Wait complete\n");
1789}
1790
1791static void wait_panel_on(struct intel_dp *intel_dp)
1792{
1793 DRM_DEBUG_KMS("Wait for panel power on\n");
1794 wait_panel_status(intel_dp, IDLE_ON_MASK, IDLE_ON_VALUE);
1795}
1796
1797static void wait_panel_off(struct intel_dp *intel_dp)
1798{
1799 DRM_DEBUG_KMS("Wait for panel power off time\n");
1800 wait_panel_status(intel_dp, IDLE_OFF_MASK, IDLE_OFF_VALUE);
1801}
1802
1803static void wait_panel_power_cycle(struct intel_dp *intel_dp)
1804{
1805 ktime_t panel_power_on_time;
1806 s64 panel_power_off_duration;
1807
1808 DRM_DEBUG_KMS("Wait for panel power cycle\n");
1809
1810
1811
1812 panel_power_on_time = ktime_get_boottime();
1813 panel_power_off_duration = ktime_ms_delta(panel_power_on_time, intel_dp->panel_power_off_time);
1814
1815
1816
1817 if (panel_power_off_duration < (s64)intel_dp->panel_power_cycle_delay)
1818 wait_remaining_ms_from_jiffies(jiffies,
1819 intel_dp->panel_power_cycle_delay - panel_power_off_duration);
1820
1821 wait_panel_status(intel_dp, IDLE_CYCLE_MASK, IDLE_CYCLE_VALUE);
1822}
1823
1824static void wait_backlight_on(struct intel_dp *intel_dp)
1825{
1826 wait_remaining_ms_from_jiffies(intel_dp->last_power_on,
1827 intel_dp->backlight_on_delay);
1828}
1829
1830static void edp_wait_backlight_off(struct intel_dp *intel_dp)
1831{
1832 wait_remaining_ms_from_jiffies(intel_dp->last_backlight_off,
1833 intel_dp->backlight_off_delay);
1834}
1835
1836
1837
1838
1839
1840static u32 ironlake_get_pp_control(struct intel_dp *intel_dp)
1841{
1842 struct drm_device *dev = intel_dp_to_dev(intel_dp);
1843 struct drm_i915_private *dev_priv = dev->dev_private;
1844 u32 control;
1845
1846 lockdep_assert_held(&dev_priv->pps_mutex);
1847
1848 control = I915_READ(_pp_ctrl_reg(intel_dp));
1849 if (!IS_BROXTON(dev)) {
1850 control &= ~PANEL_UNLOCK_MASK;
1851 control |= PANEL_UNLOCK_REGS;
1852 }
1853 return control;
1854}
1855
1856
1857
1858
1859
1860
1861static bool edp_panel_vdd_on(struct intel_dp *intel_dp)
1862{
1863 struct drm_device *dev = intel_dp_to_dev(intel_dp);
1864 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
1865 struct intel_encoder *intel_encoder = &intel_dig_port->base;
1866 struct drm_i915_private *dev_priv = dev->dev_private;
1867 enum intel_display_power_domain power_domain;
1868 u32 pp;
1869 i915_reg_t pp_stat_reg, pp_ctrl_reg;
1870 bool need_to_disable = !intel_dp->want_panel_vdd;
1871
1872 lockdep_assert_held(&dev_priv->pps_mutex);
1873
1874 if (!is_edp(intel_dp))
1875 return false;
1876
1877 cancel_delayed_work(&intel_dp->panel_vdd_work);
1878 intel_dp->want_panel_vdd = true;
1879
1880 if (edp_have_panel_vdd(intel_dp))
1881 return need_to_disable;
1882
1883 power_domain = intel_display_port_aux_power_domain(intel_encoder);
1884 intel_display_power_get(dev_priv, power_domain);
1885
1886 DRM_DEBUG_KMS("Turning eDP port %c VDD on\n",
1887 port_name(intel_dig_port->port));
1888
1889 if (!edp_have_panel_power(intel_dp))
1890 wait_panel_power_cycle(intel_dp);
1891
1892 pp = ironlake_get_pp_control(intel_dp);
1893 pp |= EDP_FORCE_VDD;
1894
1895 pp_stat_reg = _pp_stat_reg(intel_dp);
1896 pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
1897
1898 I915_WRITE(pp_ctrl_reg, pp);
1899 POSTING_READ(pp_ctrl_reg);
1900 DRM_DEBUG_KMS("PP_STATUS: 0x%08x PP_CONTROL: 0x%08x\n",
1901 I915_READ(pp_stat_reg), I915_READ(pp_ctrl_reg));
1902
1903
1904
1905 if (!edp_have_panel_power(intel_dp)) {
1906 DRM_DEBUG_KMS("eDP port %c panel power wasn't enabled\n",
1907 port_name(intel_dig_port->port));
1908 msleep(intel_dp->panel_power_up_delay);
1909 }
1910
1911 return need_to_disable;
1912}
1913
1914
1915
1916
1917
1918
1919
1920
1921void intel_edp_panel_vdd_on(struct intel_dp *intel_dp)
1922{
1923 bool vdd;
1924
1925 if (!is_edp(intel_dp))
1926 return;
1927
1928 pps_lock(intel_dp);
1929 vdd = edp_panel_vdd_on(intel_dp);
1930 pps_unlock(intel_dp);
1931
1932 I915_STATE_WARN(!vdd, "eDP port %c VDD already requested on\n",
1933 port_name(dp_to_dig_port(intel_dp)->port));
1934}
1935
1936static void edp_panel_vdd_off_sync(struct intel_dp *intel_dp)
1937{
1938 struct drm_device *dev = intel_dp_to_dev(intel_dp);
1939 struct drm_i915_private *dev_priv = dev->dev_private;
1940 struct intel_digital_port *intel_dig_port =
1941 dp_to_dig_port(intel_dp);
1942 struct intel_encoder *intel_encoder = &intel_dig_port->base;
1943 enum intel_display_power_domain power_domain;
1944 u32 pp;
1945 i915_reg_t pp_stat_reg, pp_ctrl_reg;
1946
1947 lockdep_assert_held(&dev_priv->pps_mutex);
1948
1949 WARN_ON(intel_dp->want_panel_vdd);
1950
1951 if (!edp_have_panel_vdd(intel_dp))
1952 return;
1953
1954 DRM_DEBUG_KMS("Turning eDP port %c VDD off\n",
1955 port_name(intel_dig_port->port));
1956
1957 pp = ironlake_get_pp_control(intel_dp);
1958 pp &= ~EDP_FORCE_VDD;
1959
1960 pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
1961 pp_stat_reg = _pp_stat_reg(intel_dp);
1962
1963 I915_WRITE(pp_ctrl_reg, pp);
1964 POSTING_READ(pp_ctrl_reg);
1965
1966
1967 DRM_DEBUG_KMS("PP_STATUS: 0x%08x PP_CONTROL: 0x%08x\n",
1968 I915_READ(pp_stat_reg), I915_READ(pp_ctrl_reg));
1969
1970 if ((pp & POWER_TARGET_ON) == 0)
1971 intel_dp->panel_power_off_time = ktime_get_boottime();
1972
1973 power_domain = intel_display_port_aux_power_domain(intel_encoder);
1974 intel_display_power_put(dev_priv, power_domain);
1975}
1976
1977static void edp_panel_vdd_work(struct work_struct *__work)
1978{
1979 struct intel_dp *intel_dp = container_of(to_delayed_work(__work),
1980 struct intel_dp, panel_vdd_work);
1981
1982 pps_lock(intel_dp);
1983 if (!intel_dp->want_panel_vdd)
1984 edp_panel_vdd_off_sync(intel_dp);
1985 pps_unlock(intel_dp);
1986}
1987
1988static void edp_panel_vdd_schedule_off(struct intel_dp *intel_dp)
1989{
1990 unsigned long delay;
1991
1992
1993
1994
1995
1996
1997 delay = msecs_to_jiffies(intel_dp->panel_power_cycle_delay * 5);
1998 schedule_delayed_work(&intel_dp->panel_vdd_work, delay);
1999}
2000
2001
2002
2003
2004
2005
2006static void edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync)
2007{
2008 struct drm_i915_private *dev_priv =
2009 intel_dp_to_dev(intel_dp)->dev_private;
2010
2011 lockdep_assert_held(&dev_priv->pps_mutex);
2012
2013 if (!is_edp(intel_dp))
2014 return;
2015
2016 I915_STATE_WARN(!intel_dp->want_panel_vdd, "eDP port %c VDD not forced on",
2017 port_name(dp_to_dig_port(intel_dp)->port));
2018
2019 intel_dp->want_panel_vdd = false;
2020
2021 if (sync)
2022 edp_panel_vdd_off_sync(intel_dp);
2023 else
2024 edp_panel_vdd_schedule_off(intel_dp);
2025}
2026
2027static void edp_panel_on(struct intel_dp *intel_dp)
2028{
2029 struct drm_device *dev = intel_dp_to_dev(intel_dp);
2030 struct drm_i915_private *dev_priv = dev->dev_private;
2031 u32 pp;
2032 i915_reg_t pp_ctrl_reg;
2033
2034 lockdep_assert_held(&dev_priv->pps_mutex);
2035
2036 if (!is_edp(intel_dp))
2037 return;
2038
2039 DRM_DEBUG_KMS("Turn eDP port %c panel power on\n",
2040 port_name(dp_to_dig_port(intel_dp)->port));
2041
2042 if (WARN(edp_have_panel_power(intel_dp),
2043 "eDP port %c panel power already on\n",
2044 port_name(dp_to_dig_port(intel_dp)->port)))
2045 return;
2046
2047 wait_panel_power_cycle(intel_dp);
2048
2049 pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
2050 pp = ironlake_get_pp_control(intel_dp);
2051 if (IS_GEN5(dev)) {
2052
2053 pp &= ~PANEL_POWER_RESET;
2054 I915_WRITE(pp_ctrl_reg, pp);
2055 POSTING_READ(pp_ctrl_reg);
2056 }
2057
2058 pp |= POWER_TARGET_ON;
2059 if (!IS_GEN5(dev))
2060 pp |= PANEL_POWER_RESET;
2061
2062 I915_WRITE(pp_ctrl_reg, pp);
2063 POSTING_READ(pp_ctrl_reg);
2064
2065 wait_panel_on(intel_dp);
2066 intel_dp->last_power_on = jiffies;
2067
2068 if (IS_GEN5(dev)) {
2069 pp |= PANEL_POWER_RESET;
2070 I915_WRITE(pp_ctrl_reg, pp);
2071 POSTING_READ(pp_ctrl_reg);
2072 }
2073}
2074
2075void intel_edp_panel_on(struct intel_dp *intel_dp)
2076{
2077 if (!is_edp(intel_dp))
2078 return;
2079
2080 pps_lock(intel_dp);
2081 edp_panel_on(intel_dp);
2082 pps_unlock(intel_dp);
2083}
2084
2085
2086static void edp_panel_off(struct intel_dp *intel_dp)
2087{
2088 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
2089 struct intel_encoder *intel_encoder = &intel_dig_port->base;
2090 struct drm_device *dev = intel_dp_to_dev(intel_dp);
2091 struct drm_i915_private *dev_priv = dev->dev_private;
2092 enum intel_display_power_domain power_domain;
2093 u32 pp;
2094 i915_reg_t pp_ctrl_reg;
2095
2096 lockdep_assert_held(&dev_priv->pps_mutex);
2097
2098 if (!is_edp(intel_dp))
2099 return;
2100
2101 DRM_DEBUG_KMS("Turn eDP port %c panel power off\n",
2102 port_name(dp_to_dig_port(intel_dp)->port));
2103
2104 WARN(!intel_dp->want_panel_vdd, "Need eDP port %c VDD to turn off panel\n",
2105 port_name(dp_to_dig_port(intel_dp)->port));
2106
2107 pp = ironlake_get_pp_control(intel_dp);
2108
2109
2110 pp &= ~(POWER_TARGET_ON | PANEL_POWER_RESET | EDP_FORCE_VDD |
2111 EDP_BLC_ENABLE);
2112
2113 pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
2114
2115 intel_dp->want_panel_vdd = false;
2116
2117 I915_WRITE(pp_ctrl_reg, pp);
2118 POSTING_READ(pp_ctrl_reg);
2119
2120 intel_dp->panel_power_off_time = ktime_get_boottime();
2121 wait_panel_off(intel_dp);
2122
2123
2124 power_domain = intel_display_port_aux_power_domain(intel_encoder);
2125 intel_display_power_put(dev_priv, power_domain);
2126}
2127
2128void intel_edp_panel_off(struct intel_dp *intel_dp)
2129{
2130 if (!is_edp(intel_dp))
2131 return;
2132
2133 pps_lock(intel_dp);
2134 edp_panel_off(intel_dp);
2135 pps_unlock(intel_dp);
2136}
2137
2138
2139static void _intel_edp_backlight_on(struct intel_dp *intel_dp)
2140{
2141 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
2142 struct drm_device *dev = intel_dig_port->base.base.dev;
2143 struct drm_i915_private *dev_priv = dev->dev_private;
2144 u32 pp;
2145 i915_reg_t pp_ctrl_reg;
2146
2147
2148
2149
2150
2151
2152
2153 wait_backlight_on(intel_dp);
2154
2155 pps_lock(intel_dp);
2156
2157 pp = ironlake_get_pp_control(intel_dp);
2158 pp |= EDP_BLC_ENABLE;
2159
2160 pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
2161
2162 I915_WRITE(pp_ctrl_reg, pp);
2163 POSTING_READ(pp_ctrl_reg);
2164
2165 pps_unlock(intel_dp);
2166}
2167
2168
2169void intel_edp_backlight_on(struct intel_dp *intel_dp)
2170{
2171 if (!is_edp(intel_dp))
2172 return;
2173
2174 DRM_DEBUG_KMS("\n");
2175
2176 intel_panel_enable_backlight(intel_dp->attached_connector);
2177 _intel_edp_backlight_on(intel_dp);
2178}
2179
2180
2181static void _intel_edp_backlight_off(struct intel_dp *intel_dp)
2182{
2183 struct drm_device *dev = intel_dp_to_dev(intel_dp);
2184 struct drm_i915_private *dev_priv = dev->dev_private;
2185 u32 pp;
2186 i915_reg_t pp_ctrl_reg;
2187
2188 if (!is_edp(intel_dp))
2189 return;
2190
2191 pps_lock(intel_dp);
2192
2193 pp = ironlake_get_pp_control(intel_dp);
2194 pp &= ~EDP_BLC_ENABLE;
2195
2196 pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
2197
2198 I915_WRITE(pp_ctrl_reg, pp);
2199 POSTING_READ(pp_ctrl_reg);
2200
2201 pps_unlock(intel_dp);
2202
2203 intel_dp->last_backlight_off = jiffies;
2204 edp_wait_backlight_off(intel_dp);
2205}
2206
2207
2208void intel_edp_backlight_off(struct intel_dp *intel_dp)
2209{
2210 if (!is_edp(intel_dp))
2211 return;
2212
2213 DRM_DEBUG_KMS("\n");
2214
2215 _intel_edp_backlight_off(intel_dp);
2216 intel_panel_disable_backlight(intel_dp->attached_connector);
2217}
2218
2219
2220
2221
2222
2223static void intel_edp_backlight_power(struct intel_connector *connector,
2224 bool enable)
2225{
2226 struct intel_dp *intel_dp = intel_attached_dp(&connector->base);
2227 bool is_enabled;
2228
2229 pps_lock(intel_dp);
2230 is_enabled = ironlake_get_pp_control(intel_dp) & EDP_BLC_ENABLE;
2231 pps_unlock(intel_dp);
2232
2233 if (is_enabled == enable)
2234 return;
2235
2236 DRM_DEBUG_KMS("panel power control backlight %s\n",
2237 enable ? "enable" : "disable");
2238
2239 if (enable)
2240 _intel_edp_backlight_on(intel_dp);
2241 else
2242 _intel_edp_backlight_off(intel_dp);
2243}
2244
2245static void assert_dp_port(struct intel_dp *intel_dp, bool state)
2246{
2247 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
2248 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
2249 bool cur_state = I915_READ(intel_dp->output_reg) & DP_PORT_EN;
2250
2251 I915_STATE_WARN(cur_state != state,
2252 "DP port %c state assertion failure (expected %s, current %s)\n",
2253 port_name(dig_port->port),
2254 onoff(state), onoff(cur_state));
2255}
2256#define assert_dp_port_disabled(d) assert_dp_port((d), false)
2257
2258static void assert_edp_pll(struct drm_i915_private *dev_priv, bool state)
2259{
2260 bool cur_state = I915_READ(DP_A) & DP_PLL_ENABLE;
2261
2262 I915_STATE_WARN(cur_state != state,
2263 "eDP PLL state assertion failure (expected %s, current %s)\n",
2264 onoff(state), onoff(cur_state));
2265}
2266#define assert_edp_pll_enabled(d) assert_edp_pll((d), true)
2267#define assert_edp_pll_disabled(d) assert_edp_pll((d), false)
2268
2269static void ironlake_edp_pll_on(struct intel_dp *intel_dp)
2270{
2271 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
2272 struct intel_crtc *crtc = to_intel_crtc(intel_dig_port->base.base.crtc);
2273 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
2274
2275 assert_pipe_disabled(dev_priv, crtc->pipe);
2276 assert_dp_port_disabled(intel_dp);
2277 assert_edp_pll_disabled(dev_priv);
2278
2279 DRM_DEBUG_KMS("enabling eDP PLL for clock %d\n",
2280 crtc->config->port_clock);
2281
2282 intel_dp->DP &= ~DP_PLL_FREQ_MASK;
2283
2284 if (crtc->config->port_clock == 162000)
2285 intel_dp->DP |= DP_PLL_FREQ_162MHZ;
2286 else
2287 intel_dp->DP |= DP_PLL_FREQ_270MHZ;
2288
2289 I915_WRITE(DP_A, intel_dp->DP);
2290 POSTING_READ(DP_A);
2291 udelay(500);
2292
2293 intel_dp->DP |= DP_PLL_ENABLE;
2294
2295 I915_WRITE(DP_A, intel_dp->DP);
2296 POSTING_READ(DP_A);
2297 udelay(200);
2298}
2299
2300static void ironlake_edp_pll_off(struct intel_dp *intel_dp)
2301{
2302 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
2303 struct intel_crtc *crtc = to_intel_crtc(intel_dig_port->base.base.crtc);
2304 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
2305
2306 assert_pipe_disabled(dev_priv, crtc->pipe);
2307 assert_dp_port_disabled(intel_dp);
2308 assert_edp_pll_enabled(dev_priv);
2309
2310 DRM_DEBUG_KMS("disabling eDP PLL\n");
2311
2312 intel_dp->DP &= ~DP_PLL_ENABLE;
2313
2314 I915_WRITE(DP_A, intel_dp->DP);
2315 POSTING_READ(DP_A);
2316 udelay(200);
2317}
2318
2319
2320void intel_dp_sink_dpms(struct intel_dp *intel_dp, int mode)
2321{
2322 int ret, i;
2323
2324
2325 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
2326 return;
2327
2328 if (mode != DRM_MODE_DPMS_ON) {
2329 ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER,
2330 DP_SET_POWER_D3);
2331 } else {
2332
2333
2334
2335
2336 for (i = 0; i < 3; i++) {
2337 ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER,
2338 DP_SET_POWER_D0);
2339 if (ret == 1)
2340 break;
2341 msleep(1);
2342 }
2343 }
2344
2345 if (ret != 1)
2346 DRM_DEBUG_KMS("failed to %s sink power state\n",
2347 mode == DRM_MODE_DPMS_ON ? "enable" : "disable");
2348}
2349
2350static bool intel_dp_get_hw_state(struct intel_encoder *encoder,
2351 enum pipe *pipe)
2352{
2353 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
2354 enum port port = dp_to_dig_port(intel_dp)->port;
2355 struct drm_device *dev = encoder->base.dev;
2356 struct drm_i915_private *dev_priv = dev->dev_private;
2357 enum intel_display_power_domain power_domain;
2358 u32 tmp;
2359 bool ret;
2360
2361 power_domain = intel_display_port_power_domain(encoder);
2362 if (!intel_display_power_get_if_enabled(dev_priv, power_domain))
2363 return false;
2364
2365 ret = false;
2366
2367 tmp = I915_READ(intel_dp->output_reg);
2368
2369 if (!(tmp & DP_PORT_EN))
2370 goto out;
2371
2372 if (IS_GEN7(dev) && port == PORT_A) {
2373 *pipe = PORT_TO_PIPE_CPT(tmp);
2374 } else if (HAS_PCH_CPT(dev) && port != PORT_A) {
2375 enum pipe p;
2376
2377 for_each_pipe(dev_priv, p) {
2378 u32 trans_dp = I915_READ(TRANS_DP_CTL(p));
2379 if (TRANS_DP_PIPE_TO_PORT(trans_dp) == port) {
2380 *pipe = p;
2381 ret = true;
2382
2383 goto out;
2384 }
2385 }
2386
2387 DRM_DEBUG_KMS("No pipe for dp port 0x%x found\n",
2388 i915_mmio_reg_offset(intel_dp->output_reg));
2389 } else if (IS_CHERRYVIEW(dev)) {
2390 *pipe = DP_PORT_TO_PIPE_CHV(tmp);
2391 } else {
2392 *pipe = PORT_TO_PIPE(tmp);
2393 }
2394
2395 ret = true;
2396
2397out:
2398 intel_display_power_put(dev_priv, power_domain);
2399
2400 return ret;
2401}
2402
2403static void intel_dp_get_config(struct intel_encoder *encoder,
2404 struct intel_crtc_state *pipe_config)
2405{
2406 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
2407 u32 tmp, flags = 0;
2408 struct drm_device *dev = encoder->base.dev;
2409 struct drm_i915_private *dev_priv = dev->dev_private;
2410 enum port port = dp_to_dig_port(intel_dp)->port;
2411 struct intel_crtc *crtc = to_intel_crtc(encoder->base.crtc);
2412 int dotclock;
2413
2414 tmp = I915_READ(intel_dp->output_reg);
2415
2416 pipe_config->has_audio = tmp & DP_AUDIO_OUTPUT_ENABLE && port != PORT_A;
2417
2418 if (HAS_PCH_CPT(dev) && port != PORT_A) {
2419 u32 trans_dp = I915_READ(TRANS_DP_CTL(crtc->pipe));
2420
2421 if (trans_dp & TRANS_DP_HSYNC_ACTIVE_HIGH)
2422 flags |= DRM_MODE_FLAG_PHSYNC;
2423 else
2424 flags |= DRM_MODE_FLAG_NHSYNC;
2425
2426 if (trans_dp & TRANS_DP_VSYNC_ACTIVE_HIGH)
2427 flags |= DRM_MODE_FLAG_PVSYNC;
2428 else
2429 flags |= DRM_MODE_FLAG_NVSYNC;
2430 } else {
2431 if (tmp & DP_SYNC_HS_HIGH)
2432 flags |= DRM_MODE_FLAG_PHSYNC;
2433 else
2434 flags |= DRM_MODE_FLAG_NHSYNC;
2435
2436 if (tmp & DP_SYNC_VS_HIGH)
2437 flags |= DRM_MODE_FLAG_PVSYNC;
2438 else
2439 flags |= DRM_MODE_FLAG_NVSYNC;
2440 }
2441
2442 pipe_config->base.adjusted_mode.flags |= flags;
2443
2444 if (!HAS_PCH_SPLIT(dev) && !IS_VALLEYVIEW(dev) &&
2445 !IS_CHERRYVIEW(dev) && tmp & DP_COLOR_RANGE_16_235)
2446 pipe_config->limited_color_range = true;
2447
2448 pipe_config->has_dp_encoder = true;
2449
2450 pipe_config->lane_count =
2451 ((tmp & DP_PORT_WIDTH_MASK) >> DP_PORT_WIDTH_SHIFT) + 1;
2452
2453 intel_dp_get_m_n(crtc, pipe_config);
2454
2455 if (port == PORT_A) {
2456 if ((I915_READ(DP_A) & DP_PLL_FREQ_MASK) == DP_PLL_FREQ_162MHZ)
2457 pipe_config->port_clock = 162000;
2458 else
2459 pipe_config->port_clock = 270000;
2460 }
2461
2462 dotclock = intel_dotclock_calculate(pipe_config->port_clock,
2463 &pipe_config->dp_m_n);
2464
2465 if (HAS_PCH_SPLIT(dev_priv->dev) && port != PORT_A)
2466 ironlake_check_encoder_dotclock(pipe_config, dotclock);
2467
2468 pipe_config->base.adjusted_mode.crtc_clock = dotclock;
2469
2470 if (is_edp(intel_dp) && dev_priv->vbt.edp_bpp &&
2471 pipe_config->pipe_bpp > dev_priv->vbt.edp_bpp) {
2472
2473
2474
2475
2476
2477
2478
2479
2480
2481
2482
2483
2484
2485 DRM_DEBUG_KMS("pipe has %d bpp for eDP panel, overriding BIOS-provided max %d bpp\n",
2486 pipe_config->pipe_bpp, dev_priv->vbt.edp_bpp);
2487 dev_priv->vbt.edp_bpp = pipe_config->pipe_bpp;
2488 }
2489}
2490
2491static void intel_disable_dp(struct intel_encoder *encoder)
2492{
2493 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
2494 struct drm_device *dev = encoder->base.dev;
2495 struct intel_crtc *crtc = to_intel_crtc(encoder->base.crtc);
2496
2497 if (crtc->config->has_audio)
2498 intel_audio_codec_disable(encoder);
2499
2500 if (HAS_PSR(dev) && !HAS_DDI(dev))
2501 intel_psr_disable(intel_dp);
2502
2503
2504
2505 intel_edp_panel_vdd_on(intel_dp);
2506 intel_edp_backlight_off(intel_dp);
2507 intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_OFF);
2508 intel_edp_panel_off(intel_dp);
2509
2510
2511 if (INTEL_INFO(dev)->gen < 5)
2512 intel_dp_link_down(intel_dp);
2513}
2514
2515static void ilk_post_disable_dp(struct intel_encoder *encoder)
2516{
2517 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
2518 enum port port = dp_to_dig_port(intel_dp)->port;
2519
2520 intel_dp_link_down(intel_dp);
2521
2522
2523 if (port == PORT_A)
2524 ironlake_edp_pll_off(intel_dp);
2525}
2526
2527static void vlv_post_disable_dp(struct intel_encoder *encoder)
2528{
2529 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
2530
2531 intel_dp_link_down(intel_dp);
2532}
2533
2534static void chv_data_lane_soft_reset(struct intel_encoder *encoder,
2535 bool reset)
2536{
2537 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2538 enum dpio_channel ch = vlv_dport_to_channel(enc_to_dig_port(&encoder->base));
2539 struct intel_crtc *crtc = to_intel_crtc(encoder->base.crtc);
2540 enum pipe pipe = crtc->pipe;
2541 uint32_t val;
2542
2543 val = vlv_dpio_read(dev_priv, pipe, VLV_PCS01_DW0(ch));
2544 if (reset)
2545 val &= ~(DPIO_PCS_TX_LANE2_RESET | DPIO_PCS_TX_LANE1_RESET);
2546 else
2547 val |= DPIO_PCS_TX_LANE2_RESET | DPIO_PCS_TX_LANE1_RESET;
2548 vlv_dpio_write(dev_priv, pipe, VLV_PCS01_DW0(ch), val);
2549
2550 if (crtc->config->lane_count > 2) {
2551 val = vlv_dpio_read(dev_priv, pipe, VLV_PCS23_DW0(ch));
2552 if (reset)
2553 val &= ~(DPIO_PCS_TX_LANE2_RESET | DPIO_PCS_TX_LANE1_RESET);
2554 else
2555 val |= DPIO_PCS_TX_LANE2_RESET | DPIO_PCS_TX_LANE1_RESET;
2556 vlv_dpio_write(dev_priv, pipe, VLV_PCS23_DW0(ch), val);
2557 }
2558
2559 val = vlv_dpio_read(dev_priv, pipe, VLV_PCS01_DW1(ch));
2560 val |= CHV_PCS_REQ_SOFTRESET_EN;
2561 if (reset)
2562 val &= ~DPIO_PCS_CLK_SOFT_RESET;
2563 else
2564 val |= DPIO_PCS_CLK_SOFT_RESET;
2565 vlv_dpio_write(dev_priv, pipe, VLV_PCS01_DW1(ch), val);
2566
2567 if (crtc->config->lane_count > 2) {
2568 val = vlv_dpio_read(dev_priv, pipe, VLV_PCS23_DW1(ch));
2569 val |= CHV_PCS_REQ_SOFTRESET_EN;
2570 if (reset)
2571 val &= ~DPIO_PCS_CLK_SOFT_RESET;
2572 else
2573 val |= DPIO_PCS_CLK_SOFT_RESET;
2574 vlv_dpio_write(dev_priv, pipe, VLV_PCS23_DW1(ch), val);
2575 }
2576}
2577
2578static void chv_post_disable_dp(struct intel_encoder *encoder)
2579{
2580 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
2581 struct drm_device *dev = encoder->base.dev;
2582 struct drm_i915_private *dev_priv = dev->dev_private;
2583
2584 intel_dp_link_down(intel_dp);
2585
2586 mutex_lock(&dev_priv->sb_lock);
2587
2588
2589 chv_data_lane_soft_reset(encoder, true);
2590
2591 mutex_unlock(&dev_priv->sb_lock);
2592}
2593
2594static void
2595_intel_dp_set_link_train(struct intel_dp *intel_dp,
2596 uint32_t *DP,
2597 uint8_t dp_train_pat)
2598{
2599 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
2600 struct drm_device *dev = intel_dig_port->base.base.dev;
2601 struct drm_i915_private *dev_priv = dev->dev_private;
2602 enum port port = intel_dig_port->port;
2603
2604 if (HAS_DDI(dev)) {
2605 uint32_t temp = I915_READ(DP_TP_CTL(port));
2606
2607 if (dp_train_pat & DP_LINK_SCRAMBLING_DISABLE)
2608 temp |= DP_TP_CTL_SCRAMBLE_DISABLE;
2609 else
2610 temp &= ~DP_TP_CTL_SCRAMBLE_DISABLE;
2611
2612 temp &= ~DP_TP_CTL_LINK_TRAIN_MASK;
2613 switch (dp_train_pat & DP_TRAINING_PATTERN_MASK) {
2614 case DP_TRAINING_PATTERN_DISABLE:
2615 temp |= DP_TP_CTL_LINK_TRAIN_NORMAL;
2616
2617 break;
2618 case DP_TRAINING_PATTERN_1:
2619 temp |= DP_TP_CTL_LINK_TRAIN_PAT1;
2620 break;
2621 case DP_TRAINING_PATTERN_2:
2622 temp |= DP_TP_CTL_LINK_TRAIN_PAT2;
2623 break;
2624 case DP_TRAINING_PATTERN_3:
2625 temp |= DP_TP_CTL_LINK_TRAIN_PAT3;
2626 break;
2627 }
2628 I915_WRITE(DP_TP_CTL(port), temp);
2629
2630 } else if ((IS_GEN7(dev) && port == PORT_A) ||
2631 (HAS_PCH_CPT(dev) && port != PORT_A)) {
2632 *DP &= ~DP_LINK_TRAIN_MASK_CPT;
2633
2634 switch (dp_train_pat & DP_TRAINING_PATTERN_MASK) {
2635 case DP_TRAINING_PATTERN_DISABLE:
2636 *DP |= DP_LINK_TRAIN_OFF_CPT;
2637 break;
2638 case DP_TRAINING_PATTERN_1:
2639 *DP |= DP_LINK_TRAIN_PAT_1_CPT;
2640 break;
2641 case DP_TRAINING_PATTERN_2:
2642 *DP |= DP_LINK_TRAIN_PAT_2_CPT;
2643 break;
2644 case DP_TRAINING_PATTERN_3:
2645 DRM_ERROR("DP training pattern 3 not supported\n");
2646 *DP |= DP_LINK_TRAIN_PAT_2_CPT;
2647 break;
2648 }
2649
2650 } else {
2651 if (IS_CHERRYVIEW(dev))
2652 *DP &= ~DP_LINK_TRAIN_MASK_CHV;
2653 else
2654 *DP &= ~DP_LINK_TRAIN_MASK;
2655
2656 switch (dp_train_pat & DP_TRAINING_PATTERN_MASK) {
2657 case DP_TRAINING_PATTERN_DISABLE:
2658 *DP |= DP_LINK_TRAIN_OFF;
2659 break;
2660 case DP_TRAINING_PATTERN_1:
2661 *DP |= DP_LINK_TRAIN_PAT_1;
2662 break;
2663 case DP_TRAINING_PATTERN_2:
2664 *DP |= DP_LINK_TRAIN_PAT_2;
2665 break;
2666 case DP_TRAINING_PATTERN_3:
2667 if (IS_CHERRYVIEW(dev)) {
2668 *DP |= DP_LINK_TRAIN_PAT_3_CHV;
2669 } else {
2670 DRM_ERROR("DP training pattern 3 not supported\n");
2671 *DP |= DP_LINK_TRAIN_PAT_2;
2672 }
2673 break;
2674 }
2675 }
2676}
2677
2678static void intel_dp_enable_port(struct intel_dp *intel_dp)
2679{
2680 struct drm_device *dev = intel_dp_to_dev(intel_dp);
2681 struct drm_i915_private *dev_priv = dev->dev_private;
2682 struct intel_crtc *crtc =
2683 to_intel_crtc(dp_to_dig_port(intel_dp)->base.base.crtc);
2684
2685
2686 _intel_dp_set_link_train(intel_dp, &intel_dp->DP,
2687 DP_TRAINING_PATTERN_1);
2688
2689 I915_WRITE(intel_dp->output_reg, intel_dp->DP);
2690 POSTING_READ(intel_dp->output_reg);
2691
2692
2693
2694
2695
2696
2697
2698 intel_dp->DP |= DP_PORT_EN;
2699 if (crtc->config->has_audio)
2700 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
2701
2702 I915_WRITE(intel_dp->output_reg, intel_dp->DP);
2703 POSTING_READ(intel_dp->output_reg);
2704}
2705
2706static void intel_enable_dp(struct intel_encoder *encoder)
2707{
2708 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
2709 struct drm_device *dev = encoder->base.dev;
2710 struct drm_i915_private *dev_priv = dev->dev_private;
2711 struct intel_crtc *crtc = to_intel_crtc(encoder->base.crtc);
2712 uint32_t dp_reg = I915_READ(intel_dp->output_reg);
2713 enum port port = dp_to_dig_port(intel_dp)->port;
2714 enum pipe pipe = crtc->pipe;
2715
2716 if (WARN_ON(dp_reg & DP_PORT_EN))
2717 return;
2718
2719 pps_lock(intel_dp);
2720
2721 if (IS_VALLEYVIEW(dev) || IS_CHERRYVIEW(dev))
2722 vlv_init_panel_power_sequencer(intel_dp);
2723
2724
2725
2726
2727
2728
2729
2730 if (port == PORT_A)
2731 intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, false);
2732
2733 intel_dp_enable_port(intel_dp);
2734
2735 if (port == PORT_A && IS_GEN5(dev_priv)) {
2736
2737
2738
2739
2740
2741 intel_wait_for_vblank_if_active(dev_priv->dev, !pipe);
2742 intel_set_cpu_fifo_underrun_reporting(dev_priv, !pipe, true);
2743 intel_set_pch_fifo_underrun_reporting(dev_priv, !pipe, true);
2744 }
2745
2746 edp_panel_vdd_on(intel_dp);
2747 edp_panel_on(intel_dp);
2748 edp_panel_vdd_off(intel_dp, true);
2749
2750 if (port == PORT_A)
2751 intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, true);
2752
2753 pps_unlock(intel_dp);
2754
2755 if (IS_VALLEYVIEW(dev) || IS_CHERRYVIEW(dev)) {
2756 unsigned int lane_mask = 0x0;
2757
2758 if (IS_CHERRYVIEW(dev))
2759 lane_mask = intel_dp_unused_lane_mask(crtc->config->lane_count);
2760
2761 vlv_wait_port_ready(dev_priv, dp_to_dig_port(intel_dp),
2762 lane_mask);
2763 }
2764
2765 intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON);
2766 intel_dp_start_link_train(intel_dp);
2767 intel_dp_stop_link_train(intel_dp);
2768
2769 if (crtc->config->has_audio) {
2770 DRM_DEBUG_DRIVER("Enabling DP audio on pipe %c\n",
2771 pipe_name(pipe));
2772 intel_audio_codec_enable(encoder);
2773 }
2774}
2775
2776static void g4x_enable_dp(struct intel_encoder *encoder)
2777{
2778 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
2779
2780 intel_enable_dp(encoder);
2781 intel_edp_backlight_on(intel_dp);
2782}
2783
2784static void vlv_enable_dp(struct intel_encoder *encoder)
2785{
2786 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
2787
2788 intel_edp_backlight_on(intel_dp);
2789 intel_psr_enable(intel_dp);
2790}
2791
2792static void g4x_pre_enable_dp(struct intel_encoder *encoder)
2793{
2794 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2795 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
2796 enum port port = dp_to_dig_port(intel_dp)->port;
2797 enum pipe pipe = to_intel_crtc(encoder->base.crtc)->pipe;
2798
2799 intel_dp_prepare(encoder);
2800
2801 if (port == PORT_A && IS_GEN5(dev_priv)) {
2802
2803
2804
2805
2806
2807
2808
2809
2810 intel_set_cpu_fifo_underrun_reporting(dev_priv, !pipe, false);
2811 intel_set_pch_fifo_underrun_reporting(dev_priv, !pipe, false);
2812 }
2813
2814
2815 if (port == PORT_A)
2816 ironlake_edp_pll_on(intel_dp);
2817}
2818
2819static void vlv_detach_power_sequencer(struct intel_dp *intel_dp)
2820{
2821 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
2822 struct drm_i915_private *dev_priv = intel_dig_port->base.base.dev->dev_private;
2823 enum pipe pipe = intel_dp->pps_pipe;
2824 i915_reg_t pp_on_reg = VLV_PIPE_PP_ON_DELAYS(pipe);
2825
2826 edp_panel_vdd_off_sync(intel_dp);
2827
2828
2829
2830
2831
2832
2833
2834
2835
2836
2837 DRM_DEBUG_KMS("detaching pipe %c power sequencer from port %c\n",
2838 pipe_name(pipe), port_name(intel_dig_port->port));
2839 I915_WRITE(pp_on_reg, 0);
2840 POSTING_READ(pp_on_reg);
2841
2842 intel_dp->pps_pipe = INVALID_PIPE;
2843}
2844
2845static void vlv_steal_power_sequencer(struct drm_device *dev,
2846 enum pipe pipe)
2847{
2848 struct drm_i915_private *dev_priv = dev->dev_private;
2849 struct intel_encoder *encoder;
2850
2851 lockdep_assert_held(&dev_priv->pps_mutex);
2852
2853 if (WARN_ON(pipe != PIPE_A && pipe != PIPE_B))
2854 return;
2855
2856 for_each_intel_encoder(dev, encoder) {
2857 struct intel_dp *intel_dp;
2858 enum port port;
2859
2860 if (encoder->type != INTEL_OUTPUT_EDP)
2861 continue;
2862
2863 intel_dp = enc_to_intel_dp(&encoder->base);
2864 port = dp_to_dig_port(intel_dp)->port;
2865
2866 if (intel_dp->pps_pipe != pipe)
2867 continue;
2868
2869 DRM_DEBUG_KMS("stealing pipe %c power sequencer from port %c\n",
2870 pipe_name(pipe), port_name(port));
2871
2872 WARN(encoder->base.crtc,
2873 "stealing pipe %c power sequencer from active eDP port %c\n",
2874 pipe_name(pipe), port_name(port));
2875
2876
2877 vlv_detach_power_sequencer(intel_dp);
2878 }
2879}
2880
2881static void vlv_init_panel_power_sequencer(struct intel_dp *intel_dp)
2882{
2883 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
2884 struct intel_encoder *encoder = &intel_dig_port->base;
2885 struct drm_device *dev = encoder->base.dev;
2886 struct drm_i915_private *dev_priv = dev->dev_private;
2887 struct intel_crtc *crtc = to_intel_crtc(encoder->base.crtc);
2888
2889 lockdep_assert_held(&dev_priv->pps_mutex);
2890
2891 if (!is_edp(intel_dp))
2892 return;
2893
2894 if (intel_dp->pps_pipe == crtc->pipe)
2895 return;
2896
2897
2898
2899
2900
2901
2902 if (intel_dp->pps_pipe != INVALID_PIPE)
2903 vlv_detach_power_sequencer(intel_dp);
2904
2905
2906
2907
2908
2909 vlv_steal_power_sequencer(dev, crtc->pipe);
2910
2911
2912 intel_dp->pps_pipe = crtc->pipe;
2913
2914 DRM_DEBUG_KMS("initializing pipe %c power sequencer for port %c\n",
2915 pipe_name(intel_dp->pps_pipe), port_name(intel_dig_port->port));
2916
2917
2918 intel_dp_init_panel_power_sequencer(dev, intel_dp);
2919 intel_dp_init_panel_power_sequencer_registers(dev, intel_dp);
2920}
2921
2922static void vlv_pre_enable_dp(struct intel_encoder *encoder)
2923{
2924 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
2925 struct intel_digital_port *dport = dp_to_dig_port(intel_dp);
2926 struct drm_device *dev = encoder->base.dev;
2927 struct drm_i915_private *dev_priv = dev->dev_private;
2928 struct intel_crtc *intel_crtc = to_intel_crtc(encoder->base.crtc);
2929 enum dpio_channel port = vlv_dport_to_channel(dport);
2930 int pipe = intel_crtc->pipe;
2931 u32 val;
2932
2933 mutex_lock(&dev_priv->sb_lock);
2934
2935 val = vlv_dpio_read(dev_priv, pipe, VLV_PCS01_DW8(port));
2936 val = 0;
2937 if (pipe)
2938 val |= (1<<21);
2939 else
2940 val &= ~(1<<21);
2941 val |= 0x001000c4;
2942 vlv_dpio_write(dev_priv, pipe, VLV_PCS_DW8(port), val);
2943 vlv_dpio_write(dev_priv, pipe, VLV_PCS_DW14(port), 0x00760018);
2944 vlv_dpio_write(dev_priv, pipe, VLV_PCS_DW23(port), 0x00400888);
2945
2946 mutex_unlock(&dev_priv->sb_lock);
2947
2948 intel_enable_dp(encoder);
2949}
2950
2951static void vlv_dp_pre_pll_enable(struct intel_encoder *encoder)
2952{
2953 struct intel_digital_port *dport = enc_to_dig_port(&encoder->base);
2954 struct drm_device *dev = encoder->base.dev;
2955 struct drm_i915_private *dev_priv = dev->dev_private;
2956 struct intel_crtc *intel_crtc =
2957 to_intel_crtc(encoder->base.crtc);
2958 enum dpio_channel port = vlv_dport_to_channel(dport);
2959 int pipe = intel_crtc->pipe;
2960
2961 intel_dp_prepare(encoder);
2962
2963
2964 mutex_lock(&dev_priv->sb_lock);
2965 vlv_dpio_write(dev_priv, pipe, VLV_PCS_DW0(port),
2966 DPIO_PCS_TX_LANE2_RESET |
2967 DPIO_PCS_TX_LANE1_RESET);
2968 vlv_dpio_write(dev_priv, pipe, VLV_PCS_DW1(port),
2969 DPIO_PCS_CLK_CRI_RXEB_EIOS_EN |
2970 DPIO_PCS_CLK_CRI_RXDIGFILTSG_EN |
2971 (1<<DPIO_PCS_CLK_DATAWIDTH_SHIFT) |
2972 DPIO_PCS_CLK_SOFT_RESET);
2973
2974
2975 vlv_dpio_write(dev_priv, pipe, VLV_PCS_DW12(port), 0x00750f00);
2976 vlv_dpio_write(dev_priv, pipe, VLV_TX_DW11(port), 0x00001500);
2977 vlv_dpio_write(dev_priv, pipe, VLV_TX_DW14(port), 0x40400000);
2978 mutex_unlock(&dev_priv->sb_lock);
2979}
2980
2981static void chv_pre_enable_dp(struct intel_encoder *encoder)
2982{
2983 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
2984 struct intel_digital_port *dport = dp_to_dig_port(intel_dp);
2985 struct drm_device *dev = encoder->base.dev;
2986 struct drm_i915_private *dev_priv = dev->dev_private;
2987 struct intel_crtc *intel_crtc =
2988 to_intel_crtc(encoder->base.crtc);
2989 enum dpio_channel ch = vlv_dport_to_channel(dport);
2990 int pipe = intel_crtc->pipe;
2991 int data, i, stagger;
2992 u32 val;
2993
2994 mutex_lock(&dev_priv->sb_lock);
2995
2996
2997 val = vlv_dpio_read(dev_priv, pipe, VLV_PCS01_DW11(ch));
2998 val &= ~DPIO_LANEDESKEW_STRAP_OVRD;
2999 vlv_dpio_write(dev_priv, pipe, VLV_PCS01_DW11(ch), val);
3000
3001 if (intel_crtc->config->lane_count > 2) {
3002 val = vlv_dpio_read(dev_priv, pipe, VLV_PCS23_DW11(ch));
3003 val &= ~DPIO_LANEDESKEW_STRAP_OVRD;
3004 vlv_dpio_write(dev_priv, pipe, VLV_PCS23_DW11(ch), val);
3005 }
3006
3007
3008 for (i = 0; i < intel_crtc->config->lane_count; i++) {
3009
3010 if (intel_crtc->config->lane_count == 1)
3011 data = 0x0;
3012 else
3013 data = (i == 1) ? 0x0 : 0x1;
3014 vlv_dpio_write(dev_priv, pipe, CHV_TX_DW14(ch, i),
3015 data << DPIO_UPAR_SHIFT);
3016 }
3017
3018
3019 if (intel_crtc->config->port_clock > 270000)
3020 stagger = 0x18;
3021 else if (intel_crtc->config->port_clock > 135000)
3022 stagger = 0xd;
3023 else if (intel_crtc->config->port_clock > 67500)
3024 stagger = 0x7;
3025 else if (intel_crtc->config->port_clock > 33750)
3026 stagger = 0x4;
3027 else
3028 stagger = 0x2;
3029
3030 val = vlv_dpio_read(dev_priv, pipe, VLV_PCS01_DW11(ch));
3031 val |= DPIO_TX2_STAGGER_MASK(0x1f);
3032 vlv_dpio_write(dev_priv, pipe, VLV_PCS01_DW11(ch), val);
3033
3034 if (intel_crtc->config->lane_count > 2) {
3035 val = vlv_dpio_read(dev_priv, pipe, VLV_PCS23_DW11(ch));
3036 val |= DPIO_TX2_STAGGER_MASK(0x1f);
3037 vlv_dpio_write(dev_priv, pipe, VLV_PCS23_DW11(ch), val);
3038 }
3039
3040 vlv_dpio_write(dev_priv, pipe, VLV_PCS01_DW12(ch),
3041 DPIO_LANESTAGGER_STRAP(stagger) |
3042 DPIO_LANESTAGGER_STRAP_OVRD |
3043 DPIO_TX1_STAGGER_MASK(0x1f) |
3044 DPIO_TX1_STAGGER_MULT(6) |
3045 DPIO_TX2_STAGGER_MULT(0));
3046
3047 if (intel_crtc->config->lane_count > 2) {
3048 vlv_dpio_write(dev_priv, pipe, VLV_PCS23_DW12(ch),
3049 DPIO_LANESTAGGER_STRAP(stagger) |
3050 DPIO_LANESTAGGER_STRAP_OVRD |
3051 DPIO_TX1_STAGGER_MASK(0x1f) |
3052 DPIO_TX1_STAGGER_MULT(7) |
3053 DPIO_TX2_STAGGER_MULT(5));
3054 }
3055
3056
3057 chv_data_lane_soft_reset(encoder, false);
3058
3059 mutex_unlock(&dev_priv->sb_lock);
3060
3061 intel_enable_dp(encoder);
3062
3063
3064 if (dport->release_cl2_override) {
3065 chv_phy_powergate_ch(dev_priv, DPIO_PHY0, DPIO_CH1, false);
3066 dport->release_cl2_override = false;
3067 }
3068}
3069
3070static void chv_dp_pre_pll_enable(struct intel_encoder *encoder)
3071{
3072 struct intel_digital_port *dport = enc_to_dig_port(&encoder->base);
3073 struct drm_device *dev = encoder->base.dev;
3074 struct drm_i915_private *dev_priv = dev->dev_private;
3075 struct intel_crtc *intel_crtc =
3076 to_intel_crtc(encoder->base.crtc);
3077 enum dpio_channel ch = vlv_dport_to_channel(dport);
3078 enum pipe pipe = intel_crtc->pipe;
3079 unsigned int lane_mask =
3080 intel_dp_unused_lane_mask(intel_crtc->config->lane_count);
3081 u32 val;
3082
3083 intel_dp_prepare(encoder);
3084
3085
3086
3087
3088
3089 if (ch == DPIO_CH0 && pipe == PIPE_B)
3090 dport->release_cl2_override =
3091 !chv_phy_powergate_ch(dev_priv, DPIO_PHY0, DPIO_CH1, true);
3092
3093 chv_phy_powergate_lanes(encoder, true, lane_mask);
3094
3095 mutex_lock(&dev_priv->sb_lock);
3096
3097
3098 chv_data_lane_soft_reset(encoder, true);
3099
3100
3101 if (pipe != PIPE_B) {
3102 val = vlv_dpio_read(dev_priv, pipe, _CHV_CMN_DW5_CH0);
3103 val &= ~(CHV_BUFLEFTENA1_MASK | CHV_BUFRIGHTENA1_MASK);
3104 if (ch == DPIO_CH0)
3105 val |= CHV_BUFLEFTENA1_FORCE;
3106 if (ch == DPIO_CH1)
3107 val |= CHV_BUFRIGHTENA1_FORCE;
3108 vlv_dpio_write(dev_priv, pipe, _CHV_CMN_DW5_CH0, val);
3109 } else {
3110 val = vlv_dpio_read(dev_priv, pipe, _CHV_CMN_DW1_CH1);
3111 val &= ~(CHV_BUFLEFTENA2_MASK | CHV_BUFRIGHTENA2_MASK);
3112 if (ch == DPIO_CH0)
3113 val |= CHV_BUFLEFTENA2_FORCE;
3114 if (ch == DPIO_CH1)
3115 val |= CHV_BUFRIGHTENA2_FORCE;
3116 vlv_dpio_write(dev_priv, pipe, _CHV_CMN_DW1_CH1, val);
3117 }
3118
3119
3120 val = vlv_dpio_read(dev_priv, pipe, VLV_PCS01_DW8(ch));
3121 val |= CHV_PCS_USEDCLKCHANNEL_OVRRIDE;
3122 if (pipe != PIPE_B)
3123 val &= ~CHV_PCS_USEDCLKCHANNEL;
3124 else
3125 val |= CHV_PCS_USEDCLKCHANNEL;
3126 vlv_dpio_write(dev_priv, pipe, VLV_PCS01_DW8(ch), val);
3127
3128 if (intel_crtc->config->lane_count > 2) {
3129 val = vlv_dpio_read(dev_priv, pipe, VLV_PCS23_DW8(ch));
3130 val |= CHV_PCS_USEDCLKCHANNEL_OVRRIDE;
3131 if (pipe != PIPE_B)
3132 val &= ~CHV_PCS_USEDCLKCHANNEL;
3133 else
3134 val |= CHV_PCS_USEDCLKCHANNEL;
3135 vlv_dpio_write(dev_priv, pipe, VLV_PCS23_DW8(ch), val);
3136 }
3137
3138
3139
3140
3141
3142
3143 val = vlv_dpio_read(dev_priv, pipe, CHV_CMN_DW19(ch));
3144 if (pipe != PIPE_B)
3145 val &= ~CHV_CMN_USEDCLKCHANNEL;
3146 else
3147 val |= CHV_CMN_USEDCLKCHANNEL;
3148 vlv_dpio_write(dev_priv, pipe, CHV_CMN_DW19(ch), val);
3149
3150 mutex_unlock(&dev_priv->sb_lock);
3151}
3152
3153static void chv_dp_post_pll_disable(struct intel_encoder *encoder)
3154{
3155 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3156 enum pipe pipe = to_intel_crtc(encoder->base.crtc)->pipe;
3157 u32 val;
3158
3159 mutex_lock(&dev_priv->sb_lock);
3160
3161
3162 if (pipe != PIPE_B) {
3163 val = vlv_dpio_read(dev_priv, pipe, _CHV_CMN_DW5_CH0);
3164 val &= ~(CHV_BUFLEFTENA1_MASK | CHV_BUFRIGHTENA1_MASK);
3165 vlv_dpio_write(dev_priv, pipe, _CHV_CMN_DW5_CH0, val);
3166 } else {
3167 val = vlv_dpio_read(dev_priv, pipe, _CHV_CMN_DW1_CH1);
3168 val &= ~(CHV_BUFLEFTENA2_MASK | CHV_BUFRIGHTENA2_MASK);
3169 vlv_dpio_write(dev_priv, pipe, _CHV_CMN_DW1_CH1, val);
3170 }
3171
3172 mutex_unlock(&dev_priv->sb_lock);
3173
3174
3175
3176
3177
3178
3179
3180
3181
3182
3183 chv_phy_powergate_lanes(encoder, false, 0x0);
3184}
3185
3186
3187
3188
3189
3190
3191
3192
3193static ssize_t
3194intel_dp_dpcd_read_wake(struct drm_dp_aux *aux, unsigned int offset,
3195 void *buffer, size_t size)
3196{
3197 ssize_t ret;
3198 int i;
3199
3200
3201
3202
3203
3204
3205 drm_dp_dpcd_read(aux, DP_DPCD_REV, buffer, 1);
3206
3207 for (i = 0; i < 3; i++) {
3208 ret = drm_dp_dpcd_read(aux, offset, buffer, size);
3209 if (ret == size)
3210 return ret;
3211 msleep(1);
3212 }
3213
3214 return ret;
3215}
3216
3217
3218
3219
3220
3221bool
3222intel_dp_get_link_status(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE])
3223{
3224 return intel_dp_dpcd_read_wake(&intel_dp->aux,
3225 DP_LANE0_1_STATUS,
3226 link_status,
3227 DP_LINK_STATUS_SIZE) == DP_LINK_STATUS_SIZE;
3228}
3229
3230
3231uint8_t
3232intel_dp_voltage_max(struct intel_dp *intel_dp)
3233{
3234 struct drm_device *dev = intel_dp_to_dev(intel_dp);
3235 struct drm_i915_private *dev_priv = dev->dev_private;
3236 enum port port = dp_to_dig_port(intel_dp)->port;
3237
3238 if (IS_BROXTON(dev))
3239 return DP_TRAIN_VOLTAGE_SWING_LEVEL_3;
3240 else if (INTEL_INFO(dev)->gen >= 9) {
3241 if (dev_priv->edp_low_vswing && port == PORT_A)
3242 return DP_TRAIN_VOLTAGE_SWING_LEVEL_3;
3243 return DP_TRAIN_VOLTAGE_SWING_LEVEL_2;
3244 } else if (IS_VALLEYVIEW(dev) || IS_CHERRYVIEW(dev))
3245 return DP_TRAIN_VOLTAGE_SWING_LEVEL_3;
3246 else if (IS_GEN7(dev) && port == PORT_A)
3247 return DP_TRAIN_VOLTAGE_SWING_LEVEL_2;
3248 else if (HAS_PCH_CPT(dev) && port != PORT_A)
3249 return DP_TRAIN_VOLTAGE_SWING_LEVEL_3;
3250 else
3251 return DP_TRAIN_VOLTAGE_SWING_LEVEL_2;
3252}
3253
3254uint8_t
3255intel_dp_pre_emphasis_max(struct intel_dp *intel_dp, uint8_t voltage_swing)
3256{
3257 struct drm_device *dev = intel_dp_to_dev(intel_dp);
3258 enum port port = dp_to_dig_port(intel_dp)->port;
3259
3260 if (INTEL_INFO(dev)->gen >= 9) {
3261 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
3262 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3263 return DP_TRAIN_PRE_EMPH_LEVEL_3;
3264 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3265 return DP_TRAIN_PRE_EMPH_LEVEL_2;
3266 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
3267 return DP_TRAIN_PRE_EMPH_LEVEL_1;
3268 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
3269 return DP_TRAIN_PRE_EMPH_LEVEL_0;
3270 default:
3271 return DP_TRAIN_PRE_EMPH_LEVEL_0;
3272 }
3273 } else if (IS_HASWELL(dev) || IS_BROADWELL(dev)) {
3274 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
3275 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3276 return DP_TRAIN_PRE_EMPH_LEVEL_3;
3277 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3278 return DP_TRAIN_PRE_EMPH_LEVEL_2;
3279 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
3280 return DP_TRAIN_PRE_EMPH_LEVEL_1;
3281 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
3282 default:
3283 return DP_TRAIN_PRE_EMPH_LEVEL_0;
3284 }
3285 } else if (IS_VALLEYVIEW(dev) || IS_CHERRYVIEW(dev)) {
3286 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
3287 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3288 return DP_TRAIN_PRE_EMPH_LEVEL_3;
3289 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3290 return DP_TRAIN_PRE_EMPH_LEVEL_2;
3291 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
3292 return DP_TRAIN_PRE_EMPH_LEVEL_1;
3293 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
3294 default:
3295 return DP_TRAIN_PRE_EMPH_LEVEL_0;
3296 }
3297 } else if (IS_GEN7(dev) && port == PORT_A) {
3298 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
3299 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3300 return DP_TRAIN_PRE_EMPH_LEVEL_2;
3301 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3302 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
3303 return DP_TRAIN_PRE_EMPH_LEVEL_1;
3304 default:
3305 return DP_TRAIN_PRE_EMPH_LEVEL_0;
3306 }
3307 } else {
3308 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
3309 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3310 return DP_TRAIN_PRE_EMPH_LEVEL_2;
3311 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3312 return DP_TRAIN_PRE_EMPH_LEVEL_2;
3313 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
3314 return DP_TRAIN_PRE_EMPH_LEVEL_1;
3315 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
3316 default:
3317 return DP_TRAIN_PRE_EMPH_LEVEL_0;
3318 }
3319 }
3320}
3321
3322static uint32_t vlv_signal_levels(struct intel_dp *intel_dp)
3323{
3324 struct drm_device *dev = intel_dp_to_dev(intel_dp);
3325 struct drm_i915_private *dev_priv = dev->dev_private;
3326 struct intel_digital_port *dport = dp_to_dig_port(intel_dp);
3327 struct intel_crtc *intel_crtc =
3328 to_intel_crtc(dport->base.base.crtc);
3329 unsigned long demph_reg_value, preemph_reg_value,
3330 uniqtranscale_reg_value;
3331 uint8_t train_set = intel_dp->train_set[0];
3332 enum dpio_channel port = vlv_dport_to_channel(dport);
3333 int pipe = intel_crtc->pipe;
3334
3335 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
3336 case DP_TRAIN_PRE_EMPH_LEVEL_0:
3337 preemph_reg_value = 0x0004000;
3338 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
3339 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3340 demph_reg_value = 0x2B405555;
3341 uniqtranscale_reg_value = 0x552AB83A;
3342 break;
3343 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3344 demph_reg_value = 0x2B404040;
3345 uniqtranscale_reg_value = 0x5548B83A;
3346 break;
3347 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
3348 demph_reg_value = 0x2B245555;
3349 uniqtranscale_reg_value = 0x5560B83A;
3350 break;
3351 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
3352 demph_reg_value = 0x2B405555;
3353 uniqtranscale_reg_value = 0x5598DA3A;
3354 break;
3355 default:
3356 return 0;
3357 }
3358 break;
3359 case DP_TRAIN_PRE_EMPH_LEVEL_1:
3360 preemph_reg_value = 0x0002000;
3361 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
3362 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3363 demph_reg_value = 0x2B404040;
3364 uniqtranscale_reg_value = 0x5552B83A;
3365 break;
3366 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3367 demph_reg_value = 0x2B404848;
3368 uniqtranscale_reg_value = 0x5580B83A;
3369 break;
3370 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
3371 demph_reg_value = 0x2B404040;
3372 uniqtranscale_reg_value = 0x55ADDA3A;
3373 break;
3374 default:
3375 return 0;
3376 }
3377 break;
3378 case DP_TRAIN_PRE_EMPH_LEVEL_2:
3379 preemph_reg_value = 0x0000000;
3380 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
3381 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3382 demph_reg_value = 0x2B305555;
3383 uniqtranscale_reg_value = 0x5570B83A;
3384 break;
3385 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3386 demph_reg_value = 0x2B2B4040;
3387 uniqtranscale_reg_value = 0x55ADDA3A;
3388 break;
3389 default:
3390 return 0;
3391 }
3392 break;
3393 case DP_TRAIN_PRE_EMPH_LEVEL_3:
3394 preemph_reg_value = 0x0006000;
3395 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
3396 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3397 demph_reg_value = 0x1B405555;
3398 uniqtranscale_reg_value = 0x55ADDA3A;
3399 break;
3400 default:
3401 return 0;
3402 }
3403 break;
3404 default:
3405 return 0;
3406 }
3407
3408 mutex_lock(&dev_priv->sb_lock);
3409 vlv_dpio_write(dev_priv, pipe, VLV_TX_DW5(port), 0x00000000);
3410 vlv_dpio_write(dev_priv, pipe, VLV_TX_DW4(port), demph_reg_value);
3411 vlv_dpio_write(dev_priv, pipe, VLV_TX_DW2(port),
3412 uniqtranscale_reg_value);
3413 vlv_dpio_write(dev_priv, pipe, VLV_TX_DW3(port), 0x0C782040);
3414 vlv_dpio_write(dev_priv, pipe, VLV_PCS_DW11(port), 0x00030000);
3415 vlv_dpio_write(dev_priv, pipe, VLV_PCS_DW9(port), preemph_reg_value);
3416 vlv_dpio_write(dev_priv, pipe, VLV_TX_DW5(port), 0x80000000);
3417 mutex_unlock(&dev_priv->sb_lock);
3418
3419 return 0;
3420}
3421
3422static bool chv_need_uniq_trans_scale(uint8_t train_set)
3423{
3424 return (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) == DP_TRAIN_PRE_EMPH_LEVEL_0 &&
3425 (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) == DP_TRAIN_VOLTAGE_SWING_LEVEL_3;
3426}
3427
3428static uint32_t chv_signal_levels(struct intel_dp *intel_dp)
3429{
3430 struct drm_device *dev = intel_dp_to_dev(intel_dp);
3431 struct drm_i915_private *dev_priv = dev->dev_private;
3432 struct intel_digital_port *dport = dp_to_dig_port(intel_dp);
3433 struct intel_crtc *intel_crtc = to_intel_crtc(dport->base.base.crtc);
3434 u32 deemph_reg_value, margin_reg_value, val;
3435 uint8_t train_set = intel_dp->train_set[0];
3436 enum dpio_channel ch = vlv_dport_to_channel(dport);
3437 enum pipe pipe = intel_crtc->pipe;
3438 int i;
3439
3440 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
3441 case DP_TRAIN_PRE_EMPH_LEVEL_0:
3442 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
3443 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3444 deemph_reg_value = 128;
3445 margin_reg_value = 52;
3446 break;
3447 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3448 deemph_reg_value = 128;
3449 margin_reg_value = 77;
3450 break;
3451 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
3452 deemph_reg_value = 128;
3453 margin_reg_value = 102;
3454 break;
3455 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
3456 deemph_reg_value = 128;
3457 margin_reg_value = 154;
3458
3459 break;
3460 default:
3461 return 0;
3462 }
3463 break;
3464 case DP_TRAIN_PRE_EMPH_LEVEL_1:
3465 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
3466 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3467 deemph_reg_value = 85;
3468 margin_reg_value = 78;
3469 break;
3470 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3471 deemph_reg_value = 85;
3472 margin_reg_value = 116;
3473 break;
3474 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
3475 deemph_reg_value = 85;
3476 margin_reg_value = 154;
3477 break;
3478 default:
3479 return 0;
3480 }
3481 break;
3482 case DP_TRAIN_PRE_EMPH_LEVEL_2:
3483 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
3484 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3485 deemph_reg_value = 64;
3486 margin_reg_value = 104;
3487 break;
3488 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3489 deemph_reg_value = 64;
3490 margin_reg_value = 154;
3491 break;
3492 default:
3493 return 0;
3494 }
3495 break;
3496 case DP_TRAIN_PRE_EMPH_LEVEL_3:
3497 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
3498 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3499 deemph_reg_value = 43;
3500 margin_reg_value = 154;
3501 break;
3502 default:
3503 return 0;
3504 }
3505 break;
3506 default:
3507 return 0;
3508 }
3509
3510 mutex_lock(&dev_priv->sb_lock);
3511
3512
3513 val = vlv_dpio_read(dev_priv, pipe, VLV_PCS01_DW10(ch));
3514 val &= ~(DPIO_PCS_SWING_CALC_TX0_TX2 | DPIO_PCS_SWING_CALC_TX1_TX3);
3515 val &= ~(DPIO_PCS_TX1DEEMP_MASK | DPIO_PCS_TX2DEEMP_MASK);
3516 val |= DPIO_PCS_TX1DEEMP_9P5 | DPIO_PCS_TX2DEEMP_9P5;
3517 vlv_dpio_write(dev_priv, pipe, VLV_PCS01_DW10(ch), val);
3518
3519 if (intel_crtc->config->lane_count > 2) {
3520 val = vlv_dpio_read(dev_priv, pipe, VLV_PCS23_DW10(ch));
3521 val &= ~(DPIO_PCS_SWING_CALC_TX0_TX2 | DPIO_PCS_SWING_CALC_TX1_TX3);
3522 val &= ~(DPIO_PCS_TX1DEEMP_MASK | DPIO_PCS_TX2DEEMP_MASK);
3523 val |= DPIO_PCS_TX1DEEMP_9P5 | DPIO_PCS_TX2DEEMP_9P5;
3524 vlv_dpio_write(dev_priv, pipe, VLV_PCS23_DW10(ch), val);
3525 }
3526
3527 val = vlv_dpio_read(dev_priv, pipe, VLV_PCS01_DW9(ch));
3528 val &= ~(DPIO_PCS_TX1MARGIN_MASK | DPIO_PCS_TX2MARGIN_MASK);
3529 val |= DPIO_PCS_TX1MARGIN_000 | DPIO_PCS_TX2MARGIN_000;
3530 vlv_dpio_write(dev_priv, pipe, VLV_PCS01_DW9(ch), val);
3531
3532 if (intel_crtc->config->lane_count > 2) {
3533 val = vlv_dpio_read(dev_priv, pipe, VLV_PCS23_DW9(ch));
3534 val &= ~(DPIO_PCS_TX1MARGIN_MASK | DPIO_PCS_TX2MARGIN_MASK);
3535 val |= DPIO_PCS_TX1MARGIN_000 | DPIO_PCS_TX2MARGIN_000;
3536 vlv_dpio_write(dev_priv, pipe, VLV_PCS23_DW9(ch), val);
3537 }
3538
3539
3540 for (i = 0; i < intel_crtc->config->lane_count; i++) {
3541 val = vlv_dpio_read(dev_priv, pipe, CHV_TX_DW4(ch, i));
3542 val &= ~DPIO_SWING_DEEMPH9P5_MASK;
3543 val |= deemph_reg_value << DPIO_SWING_DEEMPH9P5_SHIFT;
3544 vlv_dpio_write(dev_priv, pipe, CHV_TX_DW4(ch, i), val);
3545 }
3546
3547
3548 for (i = 0; i < intel_crtc->config->lane_count; i++) {
3549 val = vlv_dpio_read(dev_priv, pipe, CHV_TX_DW2(ch, i));
3550
3551 val &= ~DPIO_SWING_MARGIN000_MASK;
3552 val |= margin_reg_value << DPIO_SWING_MARGIN000_SHIFT;
3553
3554
3555
3556
3557
3558
3559 val &= ~(0xff << DPIO_UNIQ_TRANS_SCALE_SHIFT);
3560 val |= 0x9a << DPIO_UNIQ_TRANS_SCALE_SHIFT;
3561
3562 vlv_dpio_write(dev_priv, pipe, CHV_TX_DW2(ch, i), val);
3563 }
3564
3565
3566
3567
3568
3569
3570
3571 for (i = 0; i < intel_crtc->config->lane_count; i++) {
3572 val = vlv_dpio_read(dev_priv, pipe, CHV_TX_DW3(ch, i));
3573 if (chv_need_uniq_trans_scale(train_set))
3574 val |= DPIO_TX_UNIQ_TRANS_SCALE_EN;
3575 else
3576 val &= ~DPIO_TX_UNIQ_TRANS_SCALE_EN;
3577 vlv_dpio_write(dev_priv, pipe, CHV_TX_DW3(ch, i), val);
3578 }
3579
3580
3581 val = vlv_dpio_read(dev_priv, pipe, VLV_PCS01_DW10(ch));
3582 val |= DPIO_PCS_SWING_CALC_TX0_TX2 | DPIO_PCS_SWING_CALC_TX1_TX3;
3583 vlv_dpio_write(dev_priv, pipe, VLV_PCS01_DW10(ch), val);
3584
3585 if (intel_crtc->config->lane_count > 2) {
3586 val = vlv_dpio_read(dev_priv, pipe, VLV_PCS23_DW10(ch));
3587 val |= DPIO_PCS_SWING_CALC_TX0_TX2 | DPIO_PCS_SWING_CALC_TX1_TX3;
3588 vlv_dpio_write(dev_priv, pipe, VLV_PCS23_DW10(ch), val);
3589 }
3590
3591 mutex_unlock(&dev_priv->sb_lock);
3592
3593 return 0;
3594}
3595
3596static uint32_t
3597gen4_signal_levels(uint8_t train_set)
3598{
3599 uint32_t signal_levels = 0;
3600
3601 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
3602 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3603 default:
3604 signal_levels |= DP_VOLTAGE_0_4;
3605 break;
3606 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3607 signal_levels |= DP_VOLTAGE_0_6;
3608 break;
3609 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
3610 signal_levels |= DP_VOLTAGE_0_8;
3611 break;
3612 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
3613 signal_levels |= DP_VOLTAGE_1_2;
3614 break;
3615 }
3616 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
3617 case DP_TRAIN_PRE_EMPH_LEVEL_0:
3618 default:
3619 signal_levels |= DP_PRE_EMPHASIS_0;
3620 break;
3621 case DP_TRAIN_PRE_EMPH_LEVEL_1:
3622 signal_levels |= DP_PRE_EMPHASIS_3_5;
3623 break;
3624 case DP_TRAIN_PRE_EMPH_LEVEL_2:
3625 signal_levels |= DP_PRE_EMPHASIS_6;
3626 break;
3627 case DP_TRAIN_PRE_EMPH_LEVEL_3:
3628 signal_levels |= DP_PRE_EMPHASIS_9_5;
3629 break;
3630 }
3631 return signal_levels;
3632}
3633
3634
3635static uint32_t
3636gen6_edp_signal_levels(uint8_t train_set)
3637{
3638 int signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
3639 DP_TRAIN_PRE_EMPHASIS_MASK);
3640 switch (signal_levels) {
3641 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0:
3642 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0:
3643 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B;
3644 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1:
3645 return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B;
3646 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2:
3647 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2:
3648 return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B;
3649 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1:
3650 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1:
3651 return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B;
3652 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0:
3653 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3 | DP_TRAIN_PRE_EMPH_LEVEL_0:
3654 return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B;
3655 default:
3656 DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:"
3657 "0x%x\n", signal_levels);
3658 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B;
3659 }
3660}
3661
3662
3663static uint32_t
3664gen7_edp_signal_levels(uint8_t train_set)
3665{
3666 int signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
3667 DP_TRAIN_PRE_EMPHASIS_MASK);
3668 switch (signal_levels) {
3669 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0:
3670 return EDP_LINK_TRAIN_400MV_0DB_IVB;
3671 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1:
3672 return EDP_LINK_TRAIN_400MV_3_5DB_IVB;
3673 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2:
3674 return EDP_LINK_TRAIN_400MV_6DB_IVB;
3675
3676 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0:
3677 return EDP_LINK_TRAIN_600MV_0DB_IVB;
3678 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1:
3679 return EDP_LINK_TRAIN_600MV_3_5DB_IVB;
3680
3681 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0:
3682 return EDP_LINK_TRAIN_800MV_0DB_IVB;
3683 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1:
3684 return EDP_LINK_TRAIN_800MV_3_5DB_IVB;
3685
3686 default:
3687 DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:"
3688 "0x%x\n", signal_levels);
3689 return EDP_LINK_TRAIN_500MV_0DB_IVB;
3690 }
3691}
3692
3693void
3694intel_dp_set_signal_levels(struct intel_dp *intel_dp)
3695{
3696 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
3697 enum port port = intel_dig_port->port;
3698 struct drm_device *dev = intel_dig_port->base.base.dev;
3699 struct drm_i915_private *dev_priv = to_i915(dev);
3700 uint32_t signal_levels, mask = 0;
3701 uint8_t train_set = intel_dp->train_set[0];
3702
3703 if (HAS_DDI(dev)) {
3704 signal_levels = ddi_signal_levels(intel_dp);
3705
3706 if (IS_BROXTON(dev))
3707 signal_levels = 0;
3708 else
3709 mask = DDI_BUF_EMP_MASK;
3710 } else if (IS_CHERRYVIEW(dev)) {
3711 signal_levels = chv_signal_levels(intel_dp);
3712 } else if (IS_VALLEYVIEW(dev)) {
3713 signal_levels = vlv_signal_levels(intel_dp);
3714 } else if (IS_GEN7(dev) && port == PORT_A) {
3715 signal_levels = gen7_edp_signal_levels(train_set);
3716 mask = EDP_LINK_TRAIN_VOL_EMP_MASK_IVB;
3717 } else if (IS_GEN6(dev) && port == PORT_A) {
3718 signal_levels = gen6_edp_signal_levels(train_set);
3719 mask = EDP_LINK_TRAIN_VOL_EMP_MASK_SNB;
3720 } else {
3721 signal_levels = gen4_signal_levels(train_set);
3722 mask = DP_VOLTAGE_MASK | DP_PRE_EMPHASIS_MASK;
3723 }
3724
3725 if (mask)
3726 DRM_DEBUG_KMS("Using signal levels %08x\n", signal_levels);
3727
3728 DRM_DEBUG_KMS("Using vswing level %d\n",
3729 train_set & DP_TRAIN_VOLTAGE_SWING_MASK);
3730 DRM_DEBUG_KMS("Using pre-emphasis level %d\n",
3731 (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) >>
3732 DP_TRAIN_PRE_EMPHASIS_SHIFT);
3733
3734 intel_dp->DP = (intel_dp->DP & ~mask) | signal_levels;
3735
3736 I915_WRITE(intel_dp->output_reg, intel_dp->DP);
3737 POSTING_READ(intel_dp->output_reg);
3738}
3739
3740void
3741intel_dp_program_link_training_pattern(struct intel_dp *intel_dp,
3742 uint8_t dp_train_pat)
3743{
3744 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
3745 struct drm_i915_private *dev_priv =
3746 to_i915(intel_dig_port->base.base.dev);
3747
3748 _intel_dp_set_link_train(intel_dp, &intel_dp->DP, dp_train_pat);
3749
3750 I915_WRITE(intel_dp->output_reg, intel_dp->DP);
3751 POSTING_READ(intel_dp->output_reg);
3752}
3753
3754void intel_dp_set_idle_link_train(struct intel_dp *intel_dp)
3755{
3756 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
3757 struct drm_device *dev = intel_dig_port->base.base.dev;
3758 struct drm_i915_private *dev_priv = dev->dev_private;
3759 enum port port = intel_dig_port->port;
3760 uint32_t val;
3761
3762 if (!HAS_DDI(dev))
3763 return;
3764
3765 val = I915_READ(DP_TP_CTL(port));
3766 val &= ~DP_TP_CTL_LINK_TRAIN_MASK;
3767 val |= DP_TP_CTL_LINK_TRAIN_IDLE;
3768 I915_WRITE(DP_TP_CTL(port), val);
3769
3770
3771
3772
3773
3774
3775
3776
3777 if (port == PORT_A)
3778 return;
3779
3780 if (wait_for((I915_READ(DP_TP_STATUS(port)) & DP_TP_STATUS_IDLE_DONE),
3781 1))
3782 DRM_ERROR("Timed out waiting for DP idle patterns\n");
3783}
3784
3785static void
3786intel_dp_link_down(struct intel_dp *intel_dp)
3787{
3788 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
3789 struct intel_crtc *crtc = to_intel_crtc(intel_dig_port->base.base.crtc);
3790 enum port port = intel_dig_port->port;
3791 struct drm_device *dev = intel_dig_port->base.base.dev;
3792 struct drm_i915_private *dev_priv = dev->dev_private;
3793 uint32_t DP = intel_dp->DP;
3794
3795 if (WARN_ON(HAS_DDI(dev)))
3796 return;
3797
3798 if (WARN_ON((I915_READ(intel_dp->output_reg) & DP_PORT_EN) == 0))
3799 return;
3800
3801 DRM_DEBUG_KMS("\n");
3802
3803 if ((IS_GEN7(dev) && port == PORT_A) ||
3804 (HAS_PCH_CPT(dev) && port != PORT_A)) {
3805 DP &= ~DP_LINK_TRAIN_MASK_CPT;
3806 DP |= DP_LINK_TRAIN_PAT_IDLE_CPT;
3807 } else {
3808 if (IS_CHERRYVIEW(dev))
3809 DP &= ~DP_LINK_TRAIN_MASK_CHV;
3810 else
3811 DP &= ~DP_LINK_TRAIN_MASK;
3812 DP |= DP_LINK_TRAIN_PAT_IDLE;
3813 }
3814 I915_WRITE(intel_dp->output_reg, DP);
3815 POSTING_READ(intel_dp->output_reg);
3816
3817 DP &= ~(DP_PORT_EN | DP_AUDIO_OUTPUT_ENABLE);
3818 I915_WRITE(intel_dp->output_reg, DP);
3819 POSTING_READ(intel_dp->output_reg);
3820
3821
3822
3823
3824
3825
3826 if (HAS_PCH_IBX(dev) && crtc->pipe == PIPE_B && port != PORT_A) {
3827
3828
3829
3830
3831 intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, false);
3832 intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, false);
3833
3834
3835 DP &= ~(DP_PIPEB_SELECT | DP_LINK_TRAIN_MASK);
3836 DP |= DP_PORT_EN | DP_LINK_TRAIN_PAT_1;
3837 I915_WRITE(intel_dp->output_reg, DP);
3838 POSTING_READ(intel_dp->output_reg);
3839
3840 DP &= ~DP_PORT_EN;
3841 I915_WRITE(intel_dp->output_reg, DP);
3842 POSTING_READ(intel_dp->output_reg);
3843
3844 intel_wait_for_vblank_if_active(dev_priv->dev, PIPE_A);
3845 intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, true);
3846 intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, true);
3847 }
3848
3849 msleep(intel_dp->panel_power_down_delay);
3850
3851 intel_dp->DP = DP;
3852}
3853
3854static bool
3855intel_dp_get_dpcd(struct intel_dp *intel_dp)
3856{
3857 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
3858 struct drm_device *dev = dig_port->base.base.dev;
3859 struct drm_i915_private *dev_priv = dev->dev_private;
3860 uint8_t rev;
3861
3862 if (intel_dp_dpcd_read_wake(&intel_dp->aux, 0x000, intel_dp->dpcd,
3863 sizeof(intel_dp->dpcd)) < 0)
3864 return false;
3865
3866 DRM_DEBUG_KMS("DPCD: %*ph\n", (int) sizeof(intel_dp->dpcd), intel_dp->dpcd);
3867
3868 if (intel_dp->dpcd[DP_DPCD_REV] == 0)
3869 return false;
3870
3871
3872 memset(intel_dp->psr_dpcd, 0, sizeof(intel_dp->psr_dpcd));
3873 if (is_edp(intel_dp)) {
3874 intel_dp_dpcd_read_wake(&intel_dp->aux, DP_PSR_SUPPORT,
3875 intel_dp->psr_dpcd,
3876 sizeof(intel_dp->psr_dpcd));
3877 if (intel_dp->psr_dpcd[0] & DP_PSR_IS_SUPPORTED) {
3878 dev_priv->psr.sink_support = true;
3879 DRM_DEBUG_KMS("Detected EDP PSR Panel.\n");
3880 }
3881
3882 if (INTEL_INFO(dev)->gen >= 9 &&
3883 (intel_dp->psr_dpcd[0] & DP_PSR2_IS_SUPPORTED)) {
3884 uint8_t frame_sync_cap;
3885
3886 dev_priv->psr.sink_support = true;
3887 intel_dp_dpcd_read_wake(&intel_dp->aux,
3888 DP_SINK_DEVICE_AUX_FRAME_SYNC_CAP,
3889 &frame_sync_cap, 1);
3890 dev_priv->psr.aux_frame_sync = frame_sync_cap ? true : false;
3891
3892 dev_priv->psr.psr2_support = dev_priv->psr.aux_frame_sync;
3893 DRM_DEBUG_KMS("PSR2 %s on sink",
3894 dev_priv->psr.psr2_support ? "supported" : "not supported");
3895 }
3896 }
3897
3898 DRM_DEBUG_KMS("Display Port TPS3 support: source %s, sink %s\n",
3899 yesno(intel_dp_source_supports_hbr2(intel_dp)),
3900 yesno(drm_dp_tps3_supported(intel_dp->dpcd)));
3901
3902
3903 if (is_edp(intel_dp) &&
3904 (intel_dp->dpcd[DP_EDP_CONFIGURATION_CAP] & DP_DPCD_DISPLAY_CONTROL_CAPABLE) &&
3905 (intel_dp_dpcd_read_wake(&intel_dp->aux, DP_EDP_DPCD_REV, &rev, 1) == 1) &&
3906 (rev >= 0x03)) {
3907 __le16 sink_rates[DP_MAX_SUPPORTED_RATES];
3908 int i;
3909
3910 intel_dp_dpcd_read_wake(&intel_dp->aux,
3911 DP_SUPPORTED_LINK_RATES,
3912 sink_rates,
3913 sizeof(sink_rates));
3914
3915 for (i = 0; i < ARRAY_SIZE(sink_rates); i++) {
3916 int val = le16_to_cpu(sink_rates[i]);
3917
3918 if (val == 0)
3919 break;
3920
3921
3922 intel_dp->sink_rates[i] = (val * 200) / 10;
3923 }
3924 intel_dp->num_sink_rates = i;
3925 }
3926
3927 intel_dp_print_rates(intel_dp);
3928
3929 if (!(intel_dp->dpcd[DP_DOWNSTREAMPORT_PRESENT] &
3930 DP_DWN_STRM_PORT_PRESENT))
3931 return true;
3932
3933 if (intel_dp->dpcd[DP_DPCD_REV] == 0x10)
3934 return true;
3935
3936 if (intel_dp_dpcd_read_wake(&intel_dp->aux, DP_DOWNSTREAM_PORT_0,
3937 intel_dp->downstream_ports,
3938 DP_MAX_DOWNSTREAM_PORTS) < 0)
3939 return false;
3940
3941 return true;
3942}
3943
3944static void
3945intel_dp_probe_oui(struct intel_dp *intel_dp)
3946{
3947 u8 buf[3];
3948
3949 if (!(intel_dp->dpcd[DP_DOWN_STREAM_PORT_COUNT] & DP_OUI_SUPPORT))
3950 return;
3951
3952 if (intel_dp_dpcd_read_wake(&intel_dp->aux, DP_SINK_OUI, buf, 3) == 3)
3953 DRM_DEBUG_KMS("Sink OUI: %02hx%02hx%02hx\n",
3954 buf[0], buf[1], buf[2]);
3955
3956 if (intel_dp_dpcd_read_wake(&intel_dp->aux, DP_BRANCH_OUI, buf, 3) == 3)
3957 DRM_DEBUG_KMS("Branch OUI: %02hx%02hx%02hx\n",
3958 buf[0], buf[1], buf[2]);
3959}
3960
3961static bool
3962intel_dp_probe_mst(struct intel_dp *intel_dp)
3963{
3964 u8 buf[1];
3965
3966 if (!intel_dp->can_mst)
3967 return false;
3968
3969 if (intel_dp->dpcd[DP_DPCD_REV] < 0x12)
3970 return false;
3971
3972 if (intel_dp_dpcd_read_wake(&intel_dp->aux, DP_MSTM_CAP, buf, 1)) {
3973 if (buf[0] & DP_MST_CAP) {
3974 DRM_DEBUG_KMS("Sink is MST capable\n");
3975 intel_dp->is_mst = true;
3976 } else {
3977 DRM_DEBUG_KMS("Sink is not MST capable\n");
3978 intel_dp->is_mst = false;
3979 }
3980 }
3981
3982 drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst_mgr, intel_dp->is_mst);
3983 return intel_dp->is_mst;
3984}
3985
3986static int intel_dp_sink_crc_stop(struct intel_dp *intel_dp)
3987{
3988 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
3989 struct drm_device *dev = dig_port->base.base.dev;
3990 struct intel_crtc *intel_crtc = to_intel_crtc(dig_port->base.base.crtc);
3991 u8 buf;
3992 int ret = 0;
3993 int count = 0;
3994 int attempts = 10;
3995
3996 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_SINK, &buf) < 0) {
3997 DRM_DEBUG_KMS("Sink CRC couldn't be stopped properly\n");
3998 ret = -EIO;
3999 goto out;
4000 }
4001
4002 if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_TEST_SINK,
4003 buf & ~DP_TEST_SINK_START) < 0) {
4004 DRM_DEBUG_KMS("Sink CRC couldn't be stopped properly\n");
4005 ret = -EIO;
4006 goto out;
4007 }
4008
4009 do {
4010 intel_wait_for_vblank(dev, intel_crtc->pipe);
4011
4012 if (drm_dp_dpcd_readb(&intel_dp->aux,
4013 DP_TEST_SINK_MISC, &buf) < 0) {
4014 ret = -EIO;
4015 goto out;
4016 }
4017 count = buf & DP_TEST_COUNT_MASK;
4018 } while (--attempts && count);
4019
4020 if (attempts == 0) {
4021 DRM_DEBUG_KMS("TIMEOUT: Sink CRC counter is not zeroed after calculation is stopped\n");
4022 ret = -ETIMEDOUT;
4023 }
4024
4025 out:
4026 hsw_enable_ips(intel_crtc);
4027 return ret;
4028}
4029
4030static int intel_dp_sink_crc_start(struct intel_dp *intel_dp)
4031{
4032 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
4033 struct drm_device *dev = dig_port->base.base.dev;
4034 struct intel_crtc *intel_crtc = to_intel_crtc(dig_port->base.base.crtc);
4035 u8 buf;
4036 int ret;
4037
4038 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_SINK_MISC, &buf) < 0)
4039 return -EIO;
4040
4041 if (!(buf & DP_TEST_CRC_SUPPORTED))
4042 return -ENOTTY;
4043
4044 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_SINK, &buf) < 0)
4045 return -EIO;
4046
4047 if (buf & DP_TEST_SINK_START) {
4048 ret = intel_dp_sink_crc_stop(intel_dp);
4049 if (ret)
4050 return ret;
4051 }
4052
4053 hsw_disable_ips(intel_crtc);
4054
4055 if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_TEST_SINK,
4056 buf | DP_TEST_SINK_START) < 0) {
4057 hsw_enable_ips(intel_crtc);
4058 return -EIO;
4059 }
4060
4061 intel_wait_for_vblank(dev, intel_crtc->pipe);
4062 return 0;
4063}
4064
4065int intel_dp_sink_crc(struct intel_dp *intel_dp, u8 *crc)
4066{
4067 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
4068 struct drm_device *dev = dig_port->base.base.dev;
4069 struct intel_crtc *intel_crtc = to_intel_crtc(dig_port->base.base.crtc);
4070 u8 buf;
4071 int count, ret;
4072 int attempts = 6;
4073
4074 ret = intel_dp_sink_crc_start(intel_dp);
4075 if (ret)
4076 return ret;
4077
4078 do {
4079 intel_wait_for_vblank(dev, intel_crtc->pipe);
4080
4081 if (drm_dp_dpcd_readb(&intel_dp->aux,
4082 DP_TEST_SINK_MISC, &buf) < 0) {
4083 ret = -EIO;
4084 goto stop;
4085 }
4086 count = buf & DP_TEST_COUNT_MASK;
4087
4088 } while (--attempts && count == 0);
4089
4090 if (attempts == 0) {
4091 DRM_ERROR("Panel is unable to calculate any CRC after 6 vblanks\n");
4092 ret = -ETIMEDOUT;
4093 goto stop;
4094 }
4095
4096 if (drm_dp_dpcd_read(&intel_dp->aux, DP_TEST_CRC_R_CR, crc, 6) < 0) {
4097 ret = -EIO;
4098 goto stop;
4099 }
4100
4101stop:
4102 intel_dp_sink_crc_stop(intel_dp);
4103 return ret;
4104}
4105
4106static bool
4107intel_dp_get_sink_irq(struct intel_dp *intel_dp, u8 *sink_irq_vector)
4108{
4109 return intel_dp_dpcd_read_wake(&intel_dp->aux,
4110 DP_DEVICE_SERVICE_IRQ_VECTOR,
4111 sink_irq_vector, 1) == 1;
4112}
4113
4114static bool
4115intel_dp_get_sink_irq_esi(struct intel_dp *intel_dp, u8 *sink_irq_vector)
4116{
4117 int ret;
4118
4119 ret = intel_dp_dpcd_read_wake(&intel_dp->aux,
4120 DP_SINK_COUNT_ESI,
4121 sink_irq_vector, 14);
4122 if (ret != 14)
4123 return false;
4124
4125 return true;
4126}
4127
4128static uint8_t intel_dp_autotest_link_training(struct intel_dp *intel_dp)
4129{
4130 uint8_t test_result = DP_TEST_ACK;
4131 return test_result;
4132}
4133
4134static uint8_t intel_dp_autotest_video_pattern(struct intel_dp *intel_dp)
4135{
4136 uint8_t test_result = DP_TEST_NAK;
4137 return test_result;
4138}
4139
4140static uint8_t intel_dp_autotest_edid(struct intel_dp *intel_dp)
4141{
4142 uint8_t test_result = DP_TEST_NAK;
4143 struct intel_connector *intel_connector = intel_dp->attached_connector;
4144 struct drm_connector *connector = &intel_connector->base;
4145
4146 if (intel_connector->detect_edid == NULL ||
4147 connector->edid_corrupt ||
4148 intel_dp->aux.i2c_defer_count > 6) {
4149
4150
4151
4152
4153
4154
4155
4156 if (intel_dp->aux.i2c_nack_count > 0 ||
4157 intel_dp->aux.i2c_defer_count > 0)
4158 DRM_DEBUG_KMS("EDID read had %d NACKs, %d DEFERs\n",
4159 intel_dp->aux.i2c_nack_count,
4160 intel_dp->aux.i2c_defer_count);
4161 intel_dp->compliance_test_data = INTEL_DP_RESOLUTION_FAILSAFE;
4162 } else {
4163 struct edid *block = intel_connector->detect_edid;
4164
4165
4166
4167
4168 block += intel_connector->detect_edid->extensions;
4169
4170 if (!drm_dp_dpcd_write(&intel_dp->aux,
4171 DP_TEST_EDID_CHECKSUM,
4172 &block->checksum,
4173 1))
4174 DRM_DEBUG_KMS("Failed to write EDID checksum\n");
4175
4176 test_result = DP_TEST_ACK | DP_TEST_EDID_CHECKSUM_WRITE;
4177 intel_dp->compliance_test_data = INTEL_DP_RESOLUTION_STANDARD;
4178 }
4179
4180
4181 intel_dp->compliance_test_active = 1;
4182
4183 return test_result;
4184}
4185
4186static uint8_t intel_dp_autotest_phy_pattern(struct intel_dp *intel_dp)
4187{
4188 uint8_t test_result = DP_TEST_NAK;
4189 return test_result;
4190}
4191
4192static void intel_dp_handle_test_request(struct intel_dp *intel_dp)
4193{
4194 uint8_t response = DP_TEST_NAK;
4195 uint8_t rxdata = 0;
4196 int status = 0;
4197
4198 status = drm_dp_dpcd_read(&intel_dp->aux, DP_TEST_REQUEST, &rxdata, 1);
4199 if (status <= 0) {
4200 DRM_DEBUG_KMS("Could not read test request from sink\n");
4201 goto update_status;
4202 }
4203
4204 switch (rxdata) {
4205 case DP_TEST_LINK_TRAINING:
4206 DRM_DEBUG_KMS("LINK_TRAINING test requested\n");
4207 intel_dp->compliance_test_type = DP_TEST_LINK_TRAINING;
4208 response = intel_dp_autotest_link_training(intel_dp);
4209 break;
4210 case DP_TEST_LINK_VIDEO_PATTERN:
4211 DRM_DEBUG_KMS("TEST_PATTERN test requested\n");
4212 intel_dp->compliance_test_type = DP_TEST_LINK_VIDEO_PATTERN;
4213 response = intel_dp_autotest_video_pattern(intel_dp);
4214 break;
4215 case DP_TEST_LINK_EDID_READ:
4216 DRM_DEBUG_KMS("EDID test requested\n");
4217 intel_dp->compliance_test_type = DP_TEST_LINK_EDID_READ;
4218 response = intel_dp_autotest_edid(intel_dp);
4219 break;
4220 case DP_TEST_LINK_PHY_TEST_PATTERN:
4221 DRM_DEBUG_KMS("PHY_PATTERN test requested\n");
4222 intel_dp->compliance_test_type = DP_TEST_LINK_PHY_TEST_PATTERN;
4223 response = intel_dp_autotest_phy_pattern(intel_dp);
4224 break;
4225 default:
4226 DRM_DEBUG_KMS("Invalid test request '%02x'\n", rxdata);
4227 break;
4228 }
4229
4230update_status:
4231 status = drm_dp_dpcd_write(&intel_dp->aux,
4232 DP_TEST_RESPONSE,
4233 &response, 1);
4234 if (status <= 0)
4235 DRM_DEBUG_KMS("Could not write test response to sink\n");
4236}
4237
4238static int
4239intel_dp_check_mst_status(struct intel_dp *intel_dp)
4240{
4241 bool bret;
4242
4243 if (intel_dp->is_mst) {
4244 u8 esi[16] = { 0 };
4245 int ret = 0;
4246 int retry;
4247 bool handled;
4248 bret = intel_dp_get_sink_irq_esi(intel_dp, esi);
4249go_again:
4250 if (bret == true) {
4251
4252
4253 if (intel_dp->active_mst_links &&
4254 !drm_dp_channel_eq_ok(&esi[10], intel_dp->lane_count)) {
4255 DRM_DEBUG_KMS("channel EQ not ok, retraining\n");
4256 intel_dp_start_link_train(intel_dp);
4257 intel_dp_stop_link_train(intel_dp);
4258 }
4259
4260 DRM_DEBUG_KMS("got esi %3ph\n", esi);
4261 ret = drm_dp_mst_hpd_irq(&intel_dp->mst_mgr, esi, &handled);
4262
4263 if (handled) {
4264 for (retry = 0; retry < 3; retry++) {
4265 int wret;
4266 wret = drm_dp_dpcd_write(&intel_dp->aux,
4267 DP_SINK_COUNT_ESI+1,
4268 &esi[1], 3);
4269 if (wret == 3) {
4270 break;
4271 }
4272 }
4273
4274 bret = intel_dp_get_sink_irq_esi(intel_dp, esi);
4275 if (bret == true) {
4276 DRM_DEBUG_KMS("got esi2 %3ph\n", esi);
4277 goto go_again;
4278 }
4279 } else
4280 ret = 0;
4281
4282 return ret;
4283 } else {
4284 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
4285 DRM_DEBUG_KMS("failed to get ESI - device may have failed\n");
4286 intel_dp->is_mst = false;
4287 drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst_mgr, intel_dp->is_mst);
4288
4289 drm_kms_helper_hotplug_event(intel_dig_port->base.base.dev);
4290 }
4291 }
4292 return -EINVAL;
4293}
4294
4295
4296
4297
4298
4299
4300
4301
4302
4303static void
4304intel_dp_check_link_status(struct intel_dp *intel_dp)
4305{
4306 struct drm_device *dev = intel_dp_to_dev(intel_dp);
4307 struct intel_encoder *intel_encoder = &dp_to_dig_port(intel_dp)->base;
4308 u8 sink_irq_vector;
4309 u8 link_status[DP_LINK_STATUS_SIZE];
4310
4311 WARN_ON(!drm_modeset_is_locked(&dev->mode_config.connection_mutex));
4312
4313
4314
4315
4316
4317 intel_dp->compliance_test_active = 0;
4318 intel_dp->compliance_test_type = 0;
4319 intel_dp->compliance_test_data = 0;
4320
4321 if (!intel_encoder->base.crtc)
4322 return;
4323
4324 if (!to_intel_crtc(intel_encoder->base.crtc)->active)
4325 return;
4326
4327
4328 if (!intel_dp_get_link_status(intel_dp, link_status)) {
4329 return;
4330 }
4331
4332
4333 if (!intel_dp_get_dpcd(intel_dp)) {
4334 return;
4335 }
4336
4337
4338 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
4339 intel_dp_get_sink_irq(intel_dp, &sink_irq_vector)) {
4340
4341 drm_dp_dpcd_writeb(&intel_dp->aux,
4342 DP_DEVICE_SERVICE_IRQ_VECTOR,
4343 sink_irq_vector);
4344
4345 if (sink_irq_vector & DP_AUTOMATED_TEST_REQUEST)
4346 DRM_DEBUG_DRIVER("Test request in short pulse not handled\n");
4347 if (sink_irq_vector & (DP_CP_IRQ | DP_SINK_SPECIFIC_IRQ))
4348 DRM_DEBUG_DRIVER("CP or sink specific irq unhandled\n");
4349 }
4350
4351
4352 if ((intel_dp->compliance_test_type == DP_TEST_LINK_TRAINING) ||
4353 (!drm_dp_channel_eq_ok(link_status, intel_dp->lane_count))) {
4354 DRM_DEBUG_KMS("%s: channel EQ not ok, retraining\n",
4355 intel_encoder->base.name);
4356 intel_dp_start_link_train(intel_dp);
4357 intel_dp_stop_link_train(intel_dp);
4358 }
4359}
4360
4361
4362static enum drm_connector_status
4363intel_dp_detect_dpcd(struct intel_dp *intel_dp)
4364{
4365 uint8_t *dpcd = intel_dp->dpcd;
4366 uint8_t type;
4367
4368 if (!intel_dp_get_dpcd(intel_dp))
4369 return connector_status_disconnected;
4370
4371
4372 if (!(dpcd[DP_DOWNSTREAMPORT_PRESENT] & DP_DWN_STRM_PORT_PRESENT))
4373 return connector_status_connected;
4374
4375
4376 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
4377 intel_dp->downstream_ports[0] & DP_DS_PORT_HPD) {
4378 uint8_t reg;
4379
4380 if (intel_dp_dpcd_read_wake(&intel_dp->aux, DP_SINK_COUNT,
4381 ®, 1) < 0)
4382 return connector_status_unknown;
4383
4384 return DP_GET_SINK_COUNT(reg) ? connector_status_connected
4385 : connector_status_disconnected;
4386 }
4387
4388
4389 if (drm_probe_ddc(&intel_dp->aux.ddc))
4390 return connector_status_connected;
4391
4392
4393 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) {
4394 type = intel_dp->downstream_ports[0] & DP_DS_PORT_TYPE_MASK;
4395 if (type == DP_DS_PORT_TYPE_VGA ||
4396 type == DP_DS_PORT_TYPE_NON_EDID)
4397 return connector_status_unknown;
4398 } else {
4399 type = intel_dp->dpcd[DP_DOWNSTREAMPORT_PRESENT] &
4400 DP_DWN_STRM_PORT_TYPE_MASK;
4401 if (type == DP_DWN_STRM_PORT_TYPE_ANALOG ||
4402 type == DP_DWN_STRM_PORT_TYPE_OTHER)
4403 return connector_status_unknown;
4404 }
4405
4406
4407 DRM_DEBUG_KMS("Broken DP branch device, ignoring\n");
4408 return connector_status_disconnected;
4409}
4410
4411static enum drm_connector_status
4412edp_detect(struct intel_dp *intel_dp)
4413{
4414 struct drm_device *dev = intel_dp_to_dev(intel_dp);
4415 enum drm_connector_status status;
4416
4417 status = intel_panel_detect(dev);
4418 if (status == connector_status_unknown)
4419 status = connector_status_connected;
4420
4421 return status;
4422}
4423
4424static bool ibx_digital_port_connected(struct drm_i915_private *dev_priv,
4425 struct intel_digital_port *port)
4426{
4427 u32 bit;
4428
4429 switch (port->port) {
4430 case PORT_A:
4431 return true;
4432 case PORT_B:
4433 bit = SDE_PORTB_HOTPLUG;
4434 break;
4435 case PORT_C:
4436 bit = SDE_PORTC_HOTPLUG;
4437 break;
4438 case PORT_D:
4439 bit = SDE_PORTD_HOTPLUG;
4440 break;
4441 default:
4442 MISSING_CASE(port->port);
4443 return false;
4444 }
4445
4446 return I915_READ(SDEISR) & bit;
4447}
4448
4449static bool cpt_digital_port_connected(struct drm_i915_private *dev_priv,
4450 struct intel_digital_port *port)
4451{
4452 u32 bit;
4453
4454 switch (port->port) {
4455 case PORT_A:
4456 return true;
4457 case PORT_B:
4458 bit = SDE_PORTB_HOTPLUG_CPT;
4459 break;
4460 case PORT_C:
4461 bit = SDE_PORTC_HOTPLUG_CPT;
4462 break;
4463 case PORT_D:
4464 bit = SDE_PORTD_HOTPLUG_CPT;
4465 break;
4466 case PORT_E:
4467 bit = SDE_PORTE_HOTPLUG_SPT;
4468 break;
4469 default:
4470 MISSING_CASE(port->port);
4471 return false;
4472 }
4473
4474 return I915_READ(SDEISR) & bit;
4475}
4476
4477static bool g4x_digital_port_connected(struct drm_i915_private *dev_priv,
4478 struct intel_digital_port *port)
4479{
4480 u32 bit;
4481
4482 switch (port->port) {
4483 case PORT_B:
4484 bit = PORTB_HOTPLUG_LIVE_STATUS_G4X;
4485 break;
4486 case PORT_C:
4487 bit = PORTC_HOTPLUG_LIVE_STATUS_G4X;
4488 break;
4489 case PORT_D:
4490 bit = PORTD_HOTPLUG_LIVE_STATUS_G4X;
4491 break;
4492 default:
4493 MISSING_CASE(port->port);
4494 return false;
4495 }
4496
4497 return I915_READ(PORT_HOTPLUG_STAT) & bit;
4498}
4499
4500static bool gm45_digital_port_connected(struct drm_i915_private *dev_priv,
4501 struct intel_digital_port *port)
4502{
4503 u32 bit;
4504
4505 switch (port->port) {
4506 case PORT_B:
4507 bit = PORTB_HOTPLUG_LIVE_STATUS_GM45;
4508 break;
4509 case PORT_C:
4510 bit = PORTC_HOTPLUG_LIVE_STATUS_GM45;
4511 break;
4512 case PORT_D:
4513 bit = PORTD_HOTPLUG_LIVE_STATUS_GM45;
4514 break;
4515 default:
4516 MISSING_CASE(port->port);
4517 return false;
4518 }
4519
4520 return I915_READ(PORT_HOTPLUG_STAT) & bit;
4521}
4522
4523static bool bxt_digital_port_connected(struct drm_i915_private *dev_priv,
4524 struct intel_digital_port *intel_dig_port)
4525{
4526 struct intel_encoder *intel_encoder = &intel_dig_port->base;
4527 enum port port;
4528 u32 bit;
4529
4530 intel_hpd_pin_to_port(intel_encoder->hpd_pin, &port);
4531 switch (port) {
4532 case PORT_A:
4533 bit = BXT_DE_PORT_HP_DDIA;
4534 break;
4535 case PORT_B:
4536 bit = BXT_DE_PORT_HP_DDIB;
4537 break;
4538 case PORT_C:
4539 bit = BXT_DE_PORT_HP_DDIC;
4540 break;
4541 default:
4542 MISSING_CASE(port);
4543 return false;
4544 }
4545
4546 return I915_READ(GEN8_DE_PORT_ISR) & bit;
4547}
4548
4549
4550
4551
4552
4553
4554
4555
4556bool intel_digital_port_connected(struct drm_i915_private *dev_priv,
4557 struct intel_digital_port *port)
4558{
4559 if (HAS_PCH_IBX(dev_priv))
4560 return ibx_digital_port_connected(dev_priv, port);
4561 else if (HAS_PCH_SPLIT(dev_priv))
4562 return cpt_digital_port_connected(dev_priv, port);
4563 else if (IS_BROXTON(dev_priv))
4564 return bxt_digital_port_connected(dev_priv, port);
4565 else if (IS_GM45(dev_priv))
4566 return gm45_digital_port_connected(dev_priv, port);
4567 else
4568 return g4x_digital_port_connected(dev_priv, port);
4569}
4570
4571static struct edid *
4572intel_dp_get_edid(struct intel_dp *intel_dp)
4573{
4574 struct intel_connector *intel_connector = intel_dp->attached_connector;
4575
4576
4577 if (intel_connector->edid) {
4578
4579 if (IS_ERR(intel_connector->edid))
4580 return NULL;
4581
4582 return drm_edid_duplicate(intel_connector->edid);
4583 } else
4584 return drm_get_edid(&intel_connector->base,
4585 &intel_dp->aux.ddc);
4586}
4587
4588static void
4589intel_dp_set_edid(struct intel_dp *intel_dp)
4590{
4591 struct intel_connector *intel_connector = intel_dp->attached_connector;
4592 struct edid *edid;
4593
4594 edid = intel_dp_get_edid(intel_dp);
4595 intel_connector->detect_edid = edid;
4596
4597 if (intel_dp->force_audio != HDMI_AUDIO_AUTO)
4598 intel_dp->has_audio = intel_dp->force_audio == HDMI_AUDIO_ON;
4599 else
4600 intel_dp->has_audio = drm_detect_monitor_audio(edid);
4601}
4602
4603static void
4604intel_dp_unset_edid(struct intel_dp *intel_dp)
4605{
4606 struct intel_connector *intel_connector = intel_dp->attached_connector;
4607
4608 kfree(intel_connector->detect_edid);
4609 intel_connector->detect_edid = NULL;
4610
4611 intel_dp->has_audio = false;
4612}
4613
4614static enum drm_connector_status
4615intel_dp_detect(struct drm_connector *connector, bool force)
4616{
4617 struct intel_dp *intel_dp = intel_attached_dp(connector);
4618 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
4619 struct intel_encoder *intel_encoder = &intel_dig_port->base;
4620 struct drm_device *dev = connector->dev;
4621 enum drm_connector_status status;
4622 enum intel_display_power_domain power_domain;
4623 bool ret;
4624 u8 sink_irq_vector;
4625
4626 DRM_DEBUG_KMS("[CONNECTOR:%d:%s]\n",
4627 connector->base.id, connector->name);
4628 intel_dp_unset_edid(intel_dp);
4629
4630 if (intel_dp->is_mst) {
4631
4632 if (intel_encoder->type != INTEL_OUTPUT_EDP)
4633 intel_encoder->type = INTEL_OUTPUT_DISPLAYPORT;
4634 return connector_status_disconnected;
4635 }
4636
4637 power_domain = intel_display_port_aux_power_domain(intel_encoder);
4638 intel_display_power_get(to_i915(dev), power_domain);
4639
4640
4641 if (is_edp(intel_dp))
4642 status = edp_detect(intel_dp);
4643 else if (intel_digital_port_connected(to_i915(dev),
4644 dp_to_dig_port(intel_dp)))
4645 status = intel_dp_detect_dpcd(intel_dp);
4646 else
4647 status = connector_status_disconnected;
4648
4649 if (status != connector_status_connected) {
4650 intel_dp->compliance_test_active = 0;
4651 intel_dp->compliance_test_type = 0;
4652 intel_dp->compliance_test_data = 0;
4653
4654 goto out;
4655 }
4656
4657 intel_dp_probe_oui(intel_dp);
4658
4659 ret = intel_dp_probe_mst(intel_dp);
4660 if (ret) {
4661
4662
4663 if (intel_encoder->type != INTEL_OUTPUT_EDP)
4664 intel_encoder->type = INTEL_OUTPUT_DISPLAYPORT;
4665 status = connector_status_disconnected;
4666 goto out;
4667 }
4668
4669
4670
4671
4672
4673
4674 intel_dp->aux.i2c_nack_count = 0;
4675 intel_dp->aux.i2c_defer_count = 0;
4676
4677 intel_dp_set_edid(intel_dp);
4678
4679 if (intel_encoder->type != INTEL_OUTPUT_EDP)
4680 intel_encoder->type = INTEL_OUTPUT_DISPLAYPORT;
4681 status = connector_status_connected;
4682
4683
4684 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
4685 intel_dp_get_sink_irq(intel_dp, &sink_irq_vector)) {
4686
4687 drm_dp_dpcd_writeb(&intel_dp->aux,
4688 DP_DEVICE_SERVICE_IRQ_VECTOR,
4689 sink_irq_vector);
4690
4691 if (sink_irq_vector & DP_AUTOMATED_TEST_REQUEST)
4692 intel_dp_handle_test_request(intel_dp);
4693 if (sink_irq_vector & (DP_CP_IRQ | DP_SINK_SPECIFIC_IRQ))
4694 DRM_DEBUG_DRIVER("CP or sink specific irq unhandled\n");
4695 }
4696
4697out:
4698 intel_display_power_put(to_i915(dev), power_domain);
4699 return status;
4700}
4701
4702static void
4703intel_dp_force(struct drm_connector *connector)
4704{
4705 struct intel_dp *intel_dp = intel_attached_dp(connector);
4706 struct intel_encoder *intel_encoder = &dp_to_dig_port(intel_dp)->base;
4707 struct drm_i915_private *dev_priv = to_i915(intel_encoder->base.dev);
4708 enum intel_display_power_domain power_domain;
4709
4710 DRM_DEBUG_KMS("[CONNECTOR:%d:%s]\n",
4711 connector->base.id, connector->name);
4712 intel_dp_unset_edid(intel_dp);
4713
4714 if (connector->status != connector_status_connected)
4715 return;
4716
4717 power_domain = intel_display_port_aux_power_domain(intel_encoder);
4718 intel_display_power_get(dev_priv, power_domain);
4719
4720 intel_dp_set_edid(intel_dp);
4721
4722 intel_display_power_put(dev_priv, power_domain);
4723
4724 if (intel_encoder->type != INTEL_OUTPUT_EDP)
4725 intel_encoder->type = INTEL_OUTPUT_DISPLAYPORT;
4726}
4727
4728static int intel_dp_get_modes(struct drm_connector *connector)
4729{
4730 struct intel_connector *intel_connector = to_intel_connector(connector);
4731 struct edid *edid;
4732
4733 edid = intel_connector->detect_edid;
4734 if (edid) {
4735 int ret = intel_connector_update_modes(connector, edid);
4736 if (ret)
4737 return ret;
4738 }
4739
4740
4741 if (is_edp(intel_attached_dp(connector)) &&
4742 intel_connector->panel.fixed_mode) {
4743 struct drm_display_mode *mode;
4744
4745 mode = drm_mode_duplicate(connector->dev,
4746 intel_connector->panel.fixed_mode);
4747 if (mode) {
4748 drm_mode_probed_add(connector, mode);
4749 return 1;
4750 }
4751 }
4752
4753 return 0;
4754}
4755
4756static bool
4757intel_dp_detect_audio(struct drm_connector *connector)
4758{
4759 bool has_audio = false;
4760 struct edid *edid;
4761
4762 edid = to_intel_connector(connector)->detect_edid;
4763 if (edid)
4764 has_audio = drm_detect_monitor_audio(edid);
4765
4766 return has_audio;
4767}
4768
4769static int
4770intel_dp_set_property(struct drm_connector *connector,
4771 struct drm_property *property,
4772 uint64_t val)
4773{
4774 struct drm_i915_private *dev_priv = connector->dev->dev_private;
4775 struct intel_connector *intel_connector = to_intel_connector(connector);
4776 struct intel_encoder *intel_encoder = intel_attached_encoder(connector);
4777 struct intel_dp *intel_dp = enc_to_intel_dp(&intel_encoder->base);
4778 int ret;
4779
4780 ret = drm_object_property_set_value(&connector->base, property, val);
4781 if (ret)
4782 return ret;
4783
4784 if (property == dev_priv->force_audio_property) {
4785 int i = val;
4786 bool has_audio;
4787
4788 if (i == intel_dp->force_audio)
4789 return 0;
4790
4791 intel_dp->force_audio = i;
4792
4793 if (i == HDMI_AUDIO_AUTO)
4794 has_audio = intel_dp_detect_audio(connector);
4795 else
4796 has_audio = (i == HDMI_AUDIO_ON);
4797
4798 if (has_audio == intel_dp->has_audio)
4799 return 0;
4800
4801 intel_dp->has_audio = has_audio;
4802 goto done;
4803 }
4804
4805 if (property == dev_priv->broadcast_rgb_property) {
4806 bool old_auto = intel_dp->color_range_auto;
4807 bool old_range = intel_dp->limited_color_range;
4808
4809 switch (val) {
4810 case INTEL_BROADCAST_RGB_AUTO:
4811 intel_dp->color_range_auto = true;
4812 break;
4813 case INTEL_BROADCAST_RGB_FULL:
4814 intel_dp->color_range_auto = false;
4815 intel_dp->limited_color_range = false;
4816 break;
4817 case INTEL_BROADCAST_RGB_LIMITED:
4818 intel_dp->color_range_auto = false;
4819 intel_dp->limited_color_range = true;
4820 break;
4821 default:
4822 return -EINVAL;
4823 }
4824
4825 if (old_auto == intel_dp->color_range_auto &&
4826 old_range == intel_dp->limited_color_range)
4827 return 0;
4828
4829 goto done;
4830 }
4831
4832 if (is_edp(intel_dp) &&
4833 property == connector->dev->mode_config.scaling_mode_property) {
4834 if (val == DRM_MODE_SCALE_NONE) {
4835 DRM_DEBUG_KMS("no scaling not supported\n");
4836 return -EINVAL;
4837 }
4838
4839 if (intel_connector->panel.fitting_mode == val) {
4840
4841 return 0;
4842 }
4843 intel_connector->panel.fitting_mode = val;
4844
4845 goto done;
4846 }
4847
4848 return -EINVAL;
4849
4850done:
4851 if (intel_encoder->base.crtc)
4852 intel_crtc_restore_mode(intel_encoder->base.crtc);
4853
4854 return 0;
4855}
4856
4857static void
4858intel_dp_connector_destroy(struct drm_connector *connector)
4859{
4860 struct intel_connector *intel_connector = to_intel_connector(connector);
4861
4862 kfree(intel_connector->detect_edid);
4863
4864 if (!IS_ERR_OR_NULL(intel_connector->edid))
4865 kfree(intel_connector->edid);
4866
4867
4868
4869 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP)
4870 intel_panel_fini(&intel_connector->panel);
4871
4872 drm_connector_cleanup(connector);
4873 kfree(connector);
4874}
4875
4876void intel_dp_encoder_destroy(struct drm_encoder *encoder)
4877{
4878 struct intel_digital_port *intel_dig_port = enc_to_dig_port(encoder);
4879 struct intel_dp *intel_dp = &intel_dig_port->dp;
4880
4881 intel_dp_mst_encoder_cleanup(intel_dig_port);
4882 if (is_edp(intel_dp)) {
4883 cancel_delayed_work_sync(&intel_dp->panel_vdd_work);
4884
4885
4886
4887
4888 pps_lock(intel_dp);
4889 edp_panel_vdd_off_sync(intel_dp);
4890 pps_unlock(intel_dp);
4891
4892 if (intel_dp->edp_notifier.notifier_call) {
4893 unregister_reboot_notifier(&intel_dp->edp_notifier);
4894 intel_dp->edp_notifier.notifier_call = NULL;
4895 }
4896 }
4897 drm_encoder_cleanup(encoder);
4898 kfree(intel_dig_port);
4899}
4900
4901void intel_dp_encoder_suspend(struct intel_encoder *intel_encoder)
4902{
4903 struct intel_dp *intel_dp = enc_to_intel_dp(&intel_encoder->base);
4904
4905 if (!is_edp(intel_dp))
4906 return;
4907
4908
4909
4910
4911
4912 cancel_delayed_work_sync(&intel_dp->panel_vdd_work);
4913 pps_lock(intel_dp);
4914 edp_panel_vdd_off_sync(intel_dp);
4915 pps_unlock(intel_dp);
4916}
4917
4918static void intel_edp_panel_vdd_sanitize(struct intel_dp *intel_dp)
4919{
4920 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
4921 struct drm_device *dev = intel_dig_port->base.base.dev;
4922 struct drm_i915_private *dev_priv = dev->dev_private;
4923 enum intel_display_power_domain power_domain;
4924
4925 lockdep_assert_held(&dev_priv->pps_mutex);
4926
4927 if (!edp_have_panel_vdd(intel_dp))
4928 return;
4929
4930
4931
4932
4933
4934
4935
4936 DRM_DEBUG_KMS("VDD left on by BIOS, adjusting state tracking\n");
4937 power_domain = intel_display_port_aux_power_domain(&intel_dig_port->base);
4938 intel_display_power_get(dev_priv, power_domain);
4939
4940 edp_panel_vdd_schedule_off(intel_dp);
4941}
4942
4943void intel_dp_encoder_reset(struct drm_encoder *encoder)
4944{
4945 struct intel_dp *intel_dp;
4946
4947 if (to_intel_encoder(encoder)->type != INTEL_OUTPUT_EDP)
4948 return;
4949
4950 intel_dp = enc_to_intel_dp(encoder);
4951
4952 pps_lock(intel_dp);
4953
4954
4955
4956
4957
4958 if (IS_VALLEYVIEW(encoder->dev) || IS_CHERRYVIEW(encoder->dev))
4959 vlv_initial_power_sequencer_setup(intel_dp);
4960
4961 intel_edp_panel_vdd_sanitize(intel_dp);
4962
4963 pps_unlock(intel_dp);
4964}
4965
4966static const struct drm_connector_funcs intel_dp_connector_funcs = {
4967 .dpms = drm_atomic_helper_connector_dpms,
4968 .detect = intel_dp_detect,
4969 .force = intel_dp_force,
4970 .fill_modes = drm_helper_probe_single_connector_modes,
4971 .set_property = intel_dp_set_property,
4972 .atomic_get_property = intel_connector_atomic_get_property,
4973 .destroy = intel_dp_connector_destroy,
4974 .atomic_destroy_state = drm_atomic_helper_connector_destroy_state,
4975 .atomic_duplicate_state = drm_atomic_helper_connector_duplicate_state,
4976};
4977
4978static const struct drm_connector_helper_funcs intel_dp_connector_helper_funcs = {
4979 .get_modes = intel_dp_get_modes,
4980 .mode_valid = intel_dp_mode_valid,
4981 .best_encoder = intel_best_encoder,
4982};
4983
4984static const struct drm_encoder_funcs intel_dp_enc_funcs = {
4985 .reset = intel_dp_encoder_reset,
4986 .destroy = intel_dp_encoder_destroy,
4987};
4988
4989enum irqreturn
4990intel_dp_hpd_pulse(struct intel_digital_port *intel_dig_port, bool long_hpd)
4991{
4992 struct intel_dp *intel_dp = &intel_dig_port->dp;
4993 struct intel_encoder *intel_encoder = &intel_dig_port->base;
4994 struct drm_device *dev = intel_dig_port->base.base.dev;
4995 struct drm_i915_private *dev_priv = dev->dev_private;
4996 enum intel_display_power_domain power_domain;
4997 enum irqreturn ret = IRQ_NONE;
4998
4999 if (intel_dig_port->base.type != INTEL_OUTPUT_EDP &&
5000 intel_dig_port->base.type != INTEL_OUTPUT_HDMI)
5001 intel_dig_port->base.type = INTEL_OUTPUT_DISPLAYPORT;
5002
5003 if (long_hpd && intel_dig_port->base.type == INTEL_OUTPUT_EDP) {
5004
5005
5006
5007
5008
5009
5010 DRM_DEBUG_KMS("ignoring long hpd on eDP port %c\n",
5011 port_name(intel_dig_port->port));
5012 return IRQ_HANDLED;
5013 }
5014
5015 DRM_DEBUG_KMS("got hpd irq on port %c - %s\n",
5016 port_name(intel_dig_port->port),
5017 long_hpd ? "long" : "short");
5018
5019 power_domain = intel_display_port_aux_power_domain(intel_encoder);
5020 intel_display_power_get(dev_priv, power_domain);
5021
5022 if (long_hpd) {
5023
5024 intel_dp->train_set_valid = false;
5025
5026 if (!intel_digital_port_connected(dev_priv, intel_dig_port))
5027 goto mst_fail;
5028
5029 if (!intel_dp_get_dpcd(intel_dp)) {
5030 goto mst_fail;
5031 }
5032
5033 intel_dp_probe_oui(intel_dp);
5034
5035 if (!intel_dp_probe_mst(intel_dp)) {
5036 drm_modeset_lock(&dev->mode_config.connection_mutex, NULL);
5037 intel_dp_check_link_status(intel_dp);
5038 drm_modeset_unlock(&dev->mode_config.connection_mutex);
5039 goto mst_fail;
5040 }
5041 } else {
5042 if (intel_dp->is_mst) {
5043 if (intel_dp_check_mst_status(intel_dp) == -EINVAL)
5044 goto mst_fail;
5045 }
5046
5047 if (!intel_dp->is_mst) {
5048 drm_modeset_lock(&dev->mode_config.connection_mutex, NULL);
5049 intel_dp_check_link_status(intel_dp);
5050 drm_modeset_unlock(&dev->mode_config.connection_mutex);
5051 }
5052 }
5053
5054 ret = IRQ_HANDLED;
5055
5056 goto put_power;
5057mst_fail:
5058
5059 if (intel_dp->is_mst) {
5060 DRM_DEBUG_KMS("MST device may have disappeared %d vs %d\n", intel_dp->is_mst, intel_dp->mst_mgr.mst_state);
5061 intel_dp->is_mst = false;
5062 drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst_mgr, intel_dp->is_mst);
5063 }
5064put_power:
5065 intel_display_power_put(dev_priv, power_domain);
5066
5067 return ret;
5068}
5069
5070
5071bool intel_dp_is_edp(struct drm_device *dev, enum port port)
5072{
5073 struct drm_i915_private *dev_priv = dev->dev_private;
5074 union child_device_config *p_child;
5075 int i;
5076 static const short port_mapping[] = {
5077 [PORT_B] = DVO_PORT_DPB,
5078 [PORT_C] = DVO_PORT_DPC,
5079 [PORT_D] = DVO_PORT_DPD,
5080 [PORT_E] = DVO_PORT_DPE,
5081 };
5082
5083
5084
5085
5086
5087 if (INTEL_INFO(dev)->gen < 5)
5088 return false;
5089
5090 if (port == PORT_A)
5091 return true;
5092
5093 if (!dev_priv->vbt.child_dev_num)
5094 return false;
5095
5096 for (i = 0; i < dev_priv->vbt.child_dev_num; i++) {
5097 p_child = dev_priv->vbt.child_dev + i;
5098
5099 if (p_child->common.dvo_port == port_mapping[port] &&
5100 (p_child->common.device_type & DEVICE_TYPE_eDP_BITS) ==
5101 (DEVICE_TYPE_eDP & DEVICE_TYPE_eDP_BITS))
5102 return true;
5103 }
5104 return false;
5105}
5106
5107void
5108intel_dp_add_properties(struct intel_dp *intel_dp, struct drm_connector *connector)
5109{
5110 struct intel_connector *intel_connector = to_intel_connector(connector);
5111
5112 intel_attach_force_audio_property(connector);
5113 intel_attach_broadcast_rgb_property(connector);
5114 intel_dp->color_range_auto = true;
5115
5116 if (is_edp(intel_dp)) {
5117 drm_mode_create_scaling_mode_property(connector->dev);
5118 drm_object_attach_property(
5119 &connector->base,
5120 connector->dev->mode_config.scaling_mode_property,
5121 DRM_MODE_SCALE_ASPECT);
5122 intel_connector->panel.fitting_mode = DRM_MODE_SCALE_ASPECT;
5123 }
5124}
5125
5126static void intel_dp_init_panel_power_timestamps(struct intel_dp *intel_dp)
5127{
5128 intel_dp->panel_power_off_time = ktime_get_boottime();
5129 intel_dp->last_power_on = jiffies;
5130 intel_dp->last_backlight_off = jiffies;
5131}
5132
5133static void
5134intel_dp_init_panel_power_sequencer(struct drm_device *dev,
5135 struct intel_dp *intel_dp)
5136{
5137 struct drm_i915_private *dev_priv = dev->dev_private;
5138 struct edp_power_seq cur, vbt, spec,
5139 *final = &intel_dp->pps_delays;
5140 u32 pp_on, pp_off, pp_div = 0, pp_ctl = 0;
5141 i915_reg_t pp_ctrl_reg, pp_on_reg, pp_off_reg, pp_div_reg;
5142
5143 lockdep_assert_held(&dev_priv->pps_mutex);
5144
5145
5146 if (final->t11_t12 != 0)
5147 return;
5148
5149 if (IS_BROXTON(dev)) {
5150
5151
5152
5153
5154
5155 pp_ctrl_reg = BXT_PP_CONTROL(0);
5156 pp_on_reg = BXT_PP_ON_DELAYS(0);
5157 pp_off_reg = BXT_PP_OFF_DELAYS(0);
5158 } else if (HAS_PCH_SPLIT(dev)) {
5159 pp_ctrl_reg = PCH_PP_CONTROL;
5160 pp_on_reg = PCH_PP_ON_DELAYS;
5161 pp_off_reg = PCH_PP_OFF_DELAYS;
5162 pp_div_reg = PCH_PP_DIVISOR;
5163 } else {
5164 enum pipe pipe = vlv_power_sequencer_pipe(intel_dp);
5165
5166 pp_ctrl_reg = VLV_PIPE_PP_CONTROL(pipe);
5167 pp_on_reg = VLV_PIPE_PP_ON_DELAYS(pipe);
5168 pp_off_reg = VLV_PIPE_PP_OFF_DELAYS(pipe);
5169 pp_div_reg = VLV_PIPE_PP_DIVISOR(pipe);
5170 }
5171
5172
5173
5174 pp_ctl = ironlake_get_pp_control(intel_dp);
5175
5176 pp_on = I915_READ(pp_on_reg);
5177 pp_off = I915_READ(pp_off_reg);
5178 if (!IS_BROXTON(dev)) {
5179 I915_WRITE(pp_ctrl_reg, pp_ctl);
5180 pp_div = I915_READ(pp_div_reg);
5181 }
5182
5183
5184 cur.t1_t3 = (pp_on & PANEL_POWER_UP_DELAY_MASK) >>
5185 PANEL_POWER_UP_DELAY_SHIFT;
5186
5187 cur.t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >>
5188 PANEL_LIGHT_ON_DELAY_SHIFT;
5189
5190 cur.t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >>
5191 PANEL_LIGHT_OFF_DELAY_SHIFT;
5192
5193 cur.t10 = (pp_off & PANEL_POWER_DOWN_DELAY_MASK) >>
5194 PANEL_POWER_DOWN_DELAY_SHIFT;
5195
5196 if (IS_BROXTON(dev)) {
5197 u16 tmp = (pp_ctl & BXT_POWER_CYCLE_DELAY_MASK) >>
5198 BXT_POWER_CYCLE_DELAY_SHIFT;
5199 if (tmp > 0)
5200 cur.t11_t12 = (tmp - 1) * 1000;
5201 else
5202 cur.t11_t12 = 0;
5203 } else {
5204 cur.t11_t12 = ((pp_div & PANEL_POWER_CYCLE_DELAY_MASK) >>
5205 PANEL_POWER_CYCLE_DELAY_SHIFT) * 1000;
5206 }
5207
5208 DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
5209 cur.t1_t3, cur.t8, cur.t9, cur.t10, cur.t11_t12);
5210
5211 vbt = dev_priv->vbt.edp_pps;
5212
5213
5214
5215 spec.t1_t3 = 210 * 10;
5216 spec.t8 = 50 * 10;
5217 spec.t9 = 50 * 10;
5218 spec.t10 = 500 * 10;
5219
5220
5221
5222
5223 spec.t11_t12 = (510 + 100) * 10;
5224
5225 DRM_DEBUG_KMS("vbt t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
5226 vbt.t1_t3, vbt.t8, vbt.t9, vbt.t10, vbt.t11_t12);
5227
5228
5229
5230#define assign_final(field) final->field = (max(cur.field, vbt.field) == 0 ? \
5231 spec.field : \
5232 max(cur.field, vbt.field))
5233 assign_final(t1_t3);
5234 assign_final(t8);
5235 assign_final(t9);
5236 assign_final(t10);
5237 assign_final(t11_t12);
5238#undef assign_final
5239
5240#define get_delay(field) (DIV_ROUND_UP(final->field, 10))
5241 intel_dp->panel_power_up_delay = get_delay(t1_t3);
5242 intel_dp->backlight_on_delay = get_delay(t8);
5243 intel_dp->backlight_off_delay = get_delay(t9);
5244 intel_dp->panel_power_down_delay = get_delay(t10);
5245 intel_dp->panel_power_cycle_delay = get_delay(t11_t12);
5246#undef get_delay
5247
5248 DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n",
5249 intel_dp->panel_power_up_delay, intel_dp->panel_power_down_delay,
5250 intel_dp->panel_power_cycle_delay);
5251
5252 DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n",
5253 intel_dp->backlight_on_delay, intel_dp->backlight_off_delay);
5254}
5255
5256static void
5257intel_dp_init_panel_power_sequencer_registers(struct drm_device *dev,
5258 struct intel_dp *intel_dp)
5259{
5260 struct drm_i915_private *dev_priv = dev->dev_private;
5261 u32 pp_on, pp_off, pp_div, port_sel = 0;
5262 int div = HAS_PCH_SPLIT(dev) ? intel_pch_rawclk(dev) : intel_hrawclk(dev);
5263 i915_reg_t pp_on_reg, pp_off_reg, pp_div_reg, pp_ctrl_reg;
5264 enum port port = dp_to_dig_port(intel_dp)->port;
5265 const struct edp_power_seq *seq = &intel_dp->pps_delays;
5266
5267 lockdep_assert_held(&dev_priv->pps_mutex);
5268
5269 if (IS_BROXTON(dev)) {
5270
5271
5272
5273
5274
5275 pp_ctrl_reg = BXT_PP_CONTROL(0);
5276 pp_on_reg = BXT_PP_ON_DELAYS(0);
5277 pp_off_reg = BXT_PP_OFF_DELAYS(0);
5278
5279 } else if (HAS_PCH_SPLIT(dev)) {
5280 pp_on_reg = PCH_PP_ON_DELAYS;
5281 pp_off_reg = PCH_PP_OFF_DELAYS;
5282 pp_div_reg = PCH_PP_DIVISOR;
5283 } else {
5284 enum pipe pipe = vlv_power_sequencer_pipe(intel_dp);
5285
5286 pp_on_reg = VLV_PIPE_PP_ON_DELAYS(pipe);
5287 pp_off_reg = VLV_PIPE_PP_OFF_DELAYS(pipe);
5288 pp_div_reg = VLV_PIPE_PP_DIVISOR(pipe);
5289 }
5290
5291
5292
5293
5294
5295
5296
5297
5298
5299 pp_on = (seq->t1_t3 << PANEL_POWER_UP_DELAY_SHIFT) |
5300 (1 << PANEL_LIGHT_ON_DELAY_SHIFT);
5301 pp_off = (1 << PANEL_LIGHT_OFF_DELAY_SHIFT) |
5302 (seq->t10 << PANEL_POWER_DOWN_DELAY_SHIFT);
5303
5304
5305 if (IS_BROXTON(dev)) {
5306 pp_div = I915_READ(pp_ctrl_reg);
5307 pp_div &= ~BXT_POWER_CYCLE_DELAY_MASK;
5308 pp_div |= (DIV_ROUND_UP((seq->t11_t12 + 1), 1000)
5309 << BXT_POWER_CYCLE_DELAY_SHIFT);
5310 } else {
5311 pp_div = ((100 * div)/2 - 1) << PP_REFERENCE_DIVIDER_SHIFT;
5312 pp_div |= (DIV_ROUND_UP(seq->t11_t12, 1000)
5313 << PANEL_POWER_CYCLE_DELAY_SHIFT);
5314 }
5315
5316
5317
5318 if (IS_VALLEYVIEW(dev) || IS_CHERRYVIEW(dev)) {
5319 port_sel = PANEL_PORT_SELECT_VLV(port);
5320 } else if (HAS_PCH_IBX(dev) || HAS_PCH_CPT(dev)) {
5321 if (port == PORT_A)
5322 port_sel = PANEL_PORT_SELECT_DPA;
5323 else
5324 port_sel = PANEL_PORT_SELECT_DPD;
5325 }
5326
5327 pp_on |= port_sel;
5328
5329 I915_WRITE(pp_on_reg, pp_on);
5330 I915_WRITE(pp_off_reg, pp_off);
5331 if (IS_BROXTON(dev))
5332 I915_WRITE(pp_ctrl_reg, pp_div);
5333 else
5334 I915_WRITE(pp_div_reg, pp_div);
5335
5336 DRM_DEBUG_KMS("panel power sequencer register settings: PP_ON %#x, PP_OFF %#x, PP_DIV %#x\n",
5337 I915_READ(pp_on_reg),
5338 I915_READ(pp_off_reg),
5339 IS_BROXTON(dev) ?
5340 (I915_READ(pp_ctrl_reg) & BXT_POWER_CYCLE_DELAY_MASK) :
5341 I915_READ(pp_div_reg));
5342}
5343
5344
5345
5346
5347
5348
5349
5350
5351
5352
5353
5354
5355
5356static void intel_dp_set_drrs_state(struct drm_device *dev, int refresh_rate)
5357{
5358 struct drm_i915_private *dev_priv = dev->dev_private;
5359 struct intel_encoder *encoder;
5360 struct intel_digital_port *dig_port = NULL;
5361 struct intel_dp *intel_dp = dev_priv->drrs.dp;
5362 struct intel_crtc_state *config = NULL;
5363 struct intel_crtc *intel_crtc = NULL;
5364 enum drrs_refresh_rate_type index = DRRS_HIGH_RR;
5365
5366 if (refresh_rate <= 0) {
5367 DRM_DEBUG_KMS("Refresh rate should be positive non-zero.\n");
5368 return;
5369 }
5370
5371 if (intel_dp == NULL) {
5372 DRM_DEBUG_KMS("DRRS not supported.\n");
5373 return;
5374 }
5375
5376
5377
5378
5379
5380
5381 dig_port = dp_to_dig_port(intel_dp);
5382 encoder = &dig_port->base;
5383 intel_crtc = to_intel_crtc(encoder->base.crtc);
5384
5385 if (!intel_crtc) {
5386 DRM_DEBUG_KMS("DRRS: intel_crtc not initialized\n");
5387 return;
5388 }
5389
5390 config = intel_crtc->config;
5391
5392 if (dev_priv->drrs.type < SEAMLESS_DRRS_SUPPORT) {
5393 DRM_DEBUG_KMS("Only Seamless DRRS supported.\n");
5394 return;
5395 }
5396
5397 if (intel_dp->attached_connector->panel.downclock_mode->vrefresh ==
5398 refresh_rate)
5399 index = DRRS_LOW_RR;
5400
5401 if (index == dev_priv->drrs.refresh_rate_type) {
5402 DRM_DEBUG_KMS(
5403 "DRRS requested for previously set RR...ignoring\n");
5404 return;
5405 }
5406
5407 if (!intel_crtc->active) {
5408 DRM_DEBUG_KMS("eDP encoder disabled. CRTC not Active\n");
5409 return;
5410 }
5411
5412 if (INTEL_INFO(dev)->gen >= 8 && !IS_CHERRYVIEW(dev)) {
5413 switch (index) {
5414 case DRRS_HIGH_RR:
5415 intel_dp_set_m_n(intel_crtc, M1_N1);
5416 break;
5417 case DRRS_LOW_RR:
5418 intel_dp_set_m_n(intel_crtc, M2_N2);
5419 break;
5420 case DRRS_MAX_RR:
5421 default:
5422 DRM_ERROR("Unsupported refreshrate type\n");
5423 }
5424 } else if (INTEL_INFO(dev)->gen > 6) {
5425 i915_reg_t reg = PIPECONF(intel_crtc->config->cpu_transcoder);
5426 u32 val;
5427
5428 val = I915_READ(reg);
5429 if (index > DRRS_HIGH_RR) {
5430 if (IS_VALLEYVIEW(dev) || IS_CHERRYVIEW(dev))
5431 val |= PIPECONF_EDP_RR_MODE_SWITCH_VLV;
5432 else
5433 val |= PIPECONF_EDP_RR_MODE_SWITCH;
5434 } else {
5435 if (IS_VALLEYVIEW(dev) || IS_CHERRYVIEW(dev))
5436 val &= ~PIPECONF_EDP_RR_MODE_SWITCH_VLV;
5437 else
5438 val &= ~PIPECONF_EDP_RR_MODE_SWITCH;
5439 }
5440 I915_WRITE(reg, val);
5441 }
5442
5443 dev_priv->drrs.refresh_rate_type = index;
5444
5445 DRM_DEBUG_KMS("eDP Refresh Rate set to : %dHz\n", refresh_rate);
5446}
5447
5448
5449
5450
5451
5452
5453
5454void intel_edp_drrs_enable(struct intel_dp *intel_dp)
5455{
5456 struct drm_device *dev = intel_dp_to_dev(intel_dp);
5457 struct drm_i915_private *dev_priv = dev->dev_private;
5458 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
5459 struct drm_crtc *crtc = dig_port->base.base.crtc;
5460 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
5461
5462 if (!intel_crtc->config->has_drrs) {
5463 DRM_DEBUG_KMS("Panel doesn't support DRRS\n");
5464 return;
5465 }
5466
5467 mutex_lock(&dev_priv->drrs.mutex);
5468 if (WARN_ON(dev_priv->drrs.dp)) {
5469 DRM_ERROR("DRRS already enabled\n");
5470 goto unlock;
5471 }
5472
5473 dev_priv->drrs.busy_frontbuffer_bits = 0;
5474
5475 dev_priv->drrs.dp = intel_dp;
5476
5477unlock:
5478 mutex_unlock(&dev_priv->drrs.mutex);
5479}
5480
5481
5482
5483
5484
5485
5486void intel_edp_drrs_disable(struct intel_dp *intel_dp)
5487{
5488 struct drm_device *dev = intel_dp_to_dev(intel_dp);
5489 struct drm_i915_private *dev_priv = dev->dev_private;
5490 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
5491 struct drm_crtc *crtc = dig_port->base.base.crtc;
5492 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
5493
5494 if (!intel_crtc->config->has_drrs)
5495 return;
5496
5497 mutex_lock(&dev_priv->drrs.mutex);
5498 if (!dev_priv->drrs.dp) {
5499 mutex_unlock(&dev_priv->drrs.mutex);
5500 return;
5501 }
5502
5503 if (dev_priv->drrs.refresh_rate_type == DRRS_LOW_RR)
5504 intel_dp_set_drrs_state(dev_priv->dev,
5505 intel_dp->attached_connector->panel.
5506 fixed_mode->vrefresh);
5507
5508 dev_priv->drrs.dp = NULL;
5509 mutex_unlock(&dev_priv->drrs.mutex);
5510
5511 cancel_delayed_work_sync(&dev_priv->drrs.work);
5512}
5513
5514static void intel_edp_drrs_downclock_work(struct work_struct *work)
5515{
5516 struct drm_i915_private *dev_priv =
5517 container_of(work, typeof(*dev_priv), drrs.work.work);
5518 struct intel_dp *intel_dp;
5519
5520 mutex_lock(&dev_priv->drrs.mutex);
5521
5522 intel_dp = dev_priv->drrs.dp;
5523
5524 if (!intel_dp)
5525 goto unlock;
5526
5527
5528
5529
5530
5531
5532 if (dev_priv->drrs.busy_frontbuffer_bits)
5533 goto unlock;
5534
5535 if (dev_priv->drrs.refresh_rate_type != DRRS_LOW_RR)
5536 intel_dp_set_drrs_state(dev_priv->dev,
5537 intel_dp->attached_connector->panel.
5538 downclock_mode->vrefresh);
5539
5540unlock:
5541 mutex_unlock(&dev_priv->drrs.mutex);
5542}
5543
5544
5545
5546
5547
5548
5549
5550
5551
5552
5553
5554void intel_edp_drrs_invalidate(struct drm_device *dev,
5555 unsigned frontbuffer_bits)
5556{
5557 struct drm_i915_private *dev_priv = dev->dev_private;
5558 struct drm_crtc *crtc;
5559 enum pipe pipe;
5560
5561 if (dev_priv->drrs.type == DRRS_NOT_SUPPORTED)
5562 return;
5563
5564 cancel_delayed_work(&dev_priv->drrs.work);
5565
5566 mutex_lock(&dev_priv->drrs.mutex);
5567 if (!dev_priv->drrs.dp) {
5568 mutex_unlock(&dev_priv->drrs.mutex);
5569 return;
5570 }
5571
5572 crtc = dp_to_dig_port(dev_priv->drrs.dp)->base.base.crtc;
5573 pipe = to_intel_crtc(crtc)->pipe;
5574
5575 frontbuffer_bits &= INTEL_FRONTBUFFER_ALL_MASK(pipe);
5576 dev_priv->drrs.busy_frontbuffer_bits |= frontbuffer_bits;
5577
5578
5579 if (frontbuffer_bits && dev_priv->drrs.refresh_rate_type == DRRS_LOW_RR)
5580 intel_dp_set_drrs_state(dev_priv->dev,
5581 dev_priv->drrs.dp->attached_connector->panel.
5582 fixed_mode->vrefresh);
5583
5584 mutex_unlock(&dev_priv->drrs.mutex);
5585}
5586
5587
5588
5589
5590
5591
5592
5593
5594
5595
5596
5597
5598
5599void intel_edp_drrs_flush(struct drm_device *dev,
5600 unsigned frontbuffer_bits)
5601{
5602 struct drm_i915_private *dev_priv = dev->dev_private;
5603 struct drm_crtc *crtc;
5604 enum pipe pipe;
5605
5606 if (dev_priv->drrs.type == DRRS_NOT_SUPPORTED)
5607 return;
5608
5609 cancel_delayed_work(&dev_priv->drrs.work);
5610
5611 mutex_lock(&dev_priv->drrs.mutex);
5612 if (!dev_priv->drrs.dp) {
5613 mutex_unlock(&dev_priv->drrs.mutex);
5614 return;
5615 }
5616
5617 crtc = dp_to_dig_port(dev_priv->drrs.dp)->base.base.crtc;
5618 pipe = to_intel_crtc(crtc)->pipe;
5619
5620 frontbuffer_bits &= INTEL_FRONTBUFFER_ALL_MASK(pipe);
5621 dev_priv->drrs.busy_frontbuffer_bits &= ~frontbuffer_bits;
5622
5623
5624 if (frontbuffer_bits && dev_priv->drrs.refresh_rate_type == DRRS_LOW_RR)
5625 intel_dp_set_drrs_state(dev_priv->dev,
5626 dev_priv->drrs.dp->attached_connector->panel.
5627 fixed_mode->vrefresh);
5628
5629
5630
5631
5632
5633 if (!dev_priv->drrs.busy_frontbuffer_bits)
5634 schedule_delayed_work(&dev_priv->drrs.work,
5635 msecs_to_jiffies(1000));
5636 mutex_unlock(&dev_priv->drrs.mutex);
5637}
5638
5639
5640
5641
5642
5643
5644
5645
5646
5647
5648
5649
5650
5651
5652
5653
5654
5655
5656
5657
5658
5659
5660
5661
5662
5663
5664
5665
5666
5667
5668
5669
5670
5671
5672
5673
5674
5675
5676
5677
5678
5679
5680
5681
5682
5683
5684
5685
5686
5687
5688
5689static struct drm_display_mode *
5690intel_dp_drrs_init(struct intel_connector *intel_connector,
5691 struct drm_display_mode *fixed_mode)
5692{
5693 struct drm_connector *connector = &intel_connector->base;
5694 struct drm_device *dev = connector->dev;
5695 struct drm_i915_private *dev_priv = dev->dev_private;
5696 struct drm_display_mode *downclock_mode = NULL;
5697
5698 INIT_DELAYED_WORK(&dev_priv->drrs.work, intel_edp_drrs_downclock_work);
5699 mutex_init(&dev_priv->drrs.mutex);
5700
5701 if (INTEL_INFO(dev)->gen <= 6) {
5702 DRM_DEBUG_KMS("DRRS supported for Gen7 and above\n");
5703 return NULL;
5704 }
5705
5706 if (dev_priv->vbt.drrs_type != SEAMLESS_DRRS_SUPPORT) {
5707 DRM_DEBUG_KMS("VBT doesn't support DRRS\n");
5708 return NULL;
5709 }
5710
5711 downclock_mode = intel_find_panel_downclock
5712 (dev, fixed_mode, connector);
5713
5714 if (!downclock_mode) {
5715 DRM_DEBUG_KMS("Downclock mode is not found. DRRS not supported\n");
5716 return NULL;
5717 }
5718
5719 dev_priv->drrs.type = dev_priv->vbt.drrs_type;
5720
5721 dev_priv->drrs.refresh_rate_type = DRRS_HIGH_RR;
5722 DRM_DEBUG_KMS("seamless DRRS supported for eDP panel.\n");
5723 return downclock_mode;
5724}
5725
5726static bool intel_edp_init_connector(struct intel_dp *intel_dp,
5727 struct intel_connector *intel_connector)
5728{
5729 struct drm_connector *connector = &intel_connector->base;
5730 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
5731 struct intel_encoder *intel_encoder = &intel_dig_port->base;
5732 struct drm_device *dev = intel_encoder->base.dev;
5733 struct drm_i915_private *dev_priv = dev->dev_private;
5734 struct drm_display_mode *fixed_mode = NULL;
5735 struct drm_display_mode *downclock_mode = NULL;
5736 bool has_dpcd;
5737 struct drm_display_mode *scan;
5738 struct edid *edid;
5739 enum pipe pipe = INVALID_PIPE;
5740
5741 if (!is_edp(intel_dp))
5742 return true;
5743
5744 pps_lock(intel_dp);
5745 intel_edp_panel_vdd_sanitize(intel_dp);
5746 pps_unlock(intel_dp);
5747
5748
5749 has_dpcd = intel_dp_get_dpcd(intel_dp);
5750
5751 if (has_dpcd) {
5752 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11)
5753 dev_priv->no_aux_handshake =
5754 intel_dp->dpcd[DP_MAX_DOWNSPREAD] &
5755 DP_NO_AUX_HANDSHAKE_LINK_TRAINING;
5756 } else {
5757
5758 DRM_INFO("failed to retrieve link info, disabling eDP\n");
5759 return false;
5760 }
5761
5762
5763 pps_lock(intel_dp);
5764 intel_dp_init_panel_power_sequencer_registers(dev, intel_dp);
5765 pps_unlock(intel_dp);
5766
5767 mutex_lock(&dev->mode_config.mutex);
5768 edid = drm_get_edid(connector, &intel_dp->aux.ddc);
5769 if (edid) {
5770 if (drm_add_edid_modes(connector, edid)) {
5771 drm_mode_connector_update_edid_property(connector,
5772 edid);
5773 drm_edid_to_eld(connector, edid);
5774 } else {
5775 kfree(edid);
5776 edid = ERR_PTR(-EINVAL);
5777 }
5778 } else {
5779 edid = ERR_PTR(-ENOENT);
5780 }
5781 intel_connector->edid = edid;
5782
5783
5784 list_for_each_entry(scan, &connector->probed_modes, head) {
5785 if ((scan->type & DRM_MODE_TYPE_PREFERRED)) {
5786 fixed_mode = drm_mode_duplicate(dev, scan);
5787 downclock_mode = intel_dp_drrs_init(
5788 intel_connector, fixed_mode);
5789 break;
5790 }
5791 }
5792
5793
5794 if (!fixed_mode && dev_priv->vbt.lfp_lvds_vbt_mode) {
5795 fixed_mode = drm_mode_duplicate(dev,
5796 dev_priv->vbt.lfp_lvds_vbt_mode);
5797 if (fixed_mode)
5798 fixed_mode->type |= DRM_MODE_TYPE_PREFERRED;
5799 }
5800 mutex_unlock(&dev->mode_config.mutex);
5801
5802 if (IS_VALLEYVIEW(dev) || IS_CHERRYVIEW(dev)) {
5803 intel_dp->edp_notifier.notifier_call = edp_notify_handler;
5804 register_reboot_notifier(&intel_dp->edp_notifier);
5805
5806
5807
5808
5809
5810
5811 if (IS_CHERRYVIEW(dev))
5812 pipe = DP_PORT_TO_PIPE_CHV(intel_dp->DP);
5813 else
5814 pipe = PORT_TO_PIPE(intel_dp->DP);
5815
5816 if (pipe != PIPE_A && pipe != PIPE_B)
5817 pipe = intel_dp->pps_pipe;
5818
5819 if (pipe != PIPE_A && pipe != PIPE_B)
5820 pipe = PIPE_A;
5821
5822 DRM_DEBUG_KMS("using pipe %c for initial backlight setup\n",
5823 pipe_name(pipe));
5824 }
5825
5826 intel_panel_init(&intel_connector->panel, fixed_mode, downclock_mode);
5827 intel_connector->panel.backlight.power = intel_edp_backlight_power;
5828 intel_panel_setup_backlight(connector, pipe);
5829
5830 return true;
5831}
5832
5833bool
5834intel_dp_init_connector(struct intel_digital_port *intel_dig_port,
5835 struct intel_connector *intel_connector)
5836{
5837 struct drm_connector *connector = &intel_connector->base;
5838 struct intel_dp *intel_dp = &intel_dig_port->dp;
5839 struct intel_encoder *intel_encoder = &intel_dig_port->base;
5840 struct drm_device *dev = intel_encoder->base.dev;
5841 struct drm_i915_private *dev_priv = dev->dev_private;
5842 enum port port = intel_dig_port->port;
5843 int type, ret;
5844
5845 if (WARN(intel_dig_port->max_lanes < 1,
5846 "Not enough lanes (%d) for DP on port %c\n",
5847 intel_dig_port->max_lanes, port_name(port)))
5848 return false;
5849
5850 intel_dp->pps_pipe = INVALID_PIPE;
5851
5852
5853 if (INTEL_INFO(dev)->gen >= 9)
5854 intel_dp->get_aux_clock_divider = skl_get_aux_clock_divider;
5855 else if (IS_VALLEYVIEW(dev) || IS_CHERRYVIEW(dev))
5856 intel_dp->get_aux_clock_divider = vlv_get_aux_clock_divider;
5857 else if (IS_HASWELL(dev) || IS_BROADWELL(dev))
5858 intel_dp->get_aux_clock_divider = hsw_get_aux_clock_divider;
5859 else if (HAS_PCH_SPLIT(dev))
5860 intel_dp->get_aux_clock_divider = ilk_get_aux_clock_divider;
5861 else
5862 intel_dp->get_aux_clock_divider = i9xx_get_aux_clock_divider;
5863
5864 if (INTEL_INFO(dev)->gen >= 9)
5865 intel_dp->get_aux_send_ctl = skl_get_aux_send_ctl;
5866 else
5867 intel_dp->get_aux_send_ctl = i9xx_get_aux_send_ctl;
5868
5869 if (HAS_DDI(dev))
5870 intel_dp->prepare_link_retrain = intel_ddi_prepare_link_retrain;
5871
5872
5873 intel_dp->DP = I915_READ(intel_dp->output_reg);
5874 intel_dp->attached_connector = intel_connector;
5875
5876 if (intel_dp_is_edp(dev, port))
5877 type = DRM_MODE_CONNECTOR_eDP;
5878 else
5879 type = DRM_MODE_CONNECTOR_DisplayPort;
5880
5881
5882
5883
5884
5885
5886 if (type == DRM_MODE_CONNECTOR_eDP)
5887 intel_encoder->type = INTEL_OUTPUT_EDP;
5888
5889
5890 if (WARN_ON((IS_VALLEYVIEW(dev) || IS_CHERRYVIEW(dev)) &&
5891 is_edp(intel_dp) && port != PORT_B && port != PORT_C))
5892 return false;
5893
5894 DRM_DEBUG_KMS("Adding %s connector on port %c\n",
5895 type == DRM_MODE_CONNECTOR_eDP ? "eDP" : "DP",
5896 port_name(port));
5897
5898 drm_connector_init(dev, connector, &intel_dp_connector_funcs, type);
5899 drm_connector_helper_add(connector, &intel_dp_connector_helper_funcs);
5900
5901 connector->interlace_allowed = true;
5902 connector->doublescan_allowed = 0;
5903
5904 INIT_DELAYED_WORK(&intel_dp->panel_vdd_work,
5905 edp_panel_vdd_work);
5906
5907 intel_connector_attach_encoder(intel_connector, intel_encoder);
5908 drm_connector_register(connector);
5909
5910 if (HAS_DDI(dev))
5911 intel_connector->get_hw_state = intel_ddi_connector_get_hw_state;
5912 else
5913 intel_connector->get_hw_state = intel_connector_get_hw_state;
5914 intel_connector->unregister = intel_dp_connector_unregister;
5915
5916
5917 switch (port) {
5918 case PORT_A:
5919 intel_encoder->hpd_pin = HPD_PORT_A;
5920 break;
5921 case PORT_B:
5922 intel_encoder->hpd_pin = HPD_PORT_B;
5923 if (IS_BXT_REVID(dev, 0, BXT_REVID_A1))
5924 intel_encoder->hpd_pin = HPD_PORT_A;
5925 break;
5926 case PORT_C:
5927 intel_encoder->hpd_pin = HPD_PORT_C;
5928 break;
5929 case PORT_D:
5930 intel_encoder->hpd_pin = HPD_PORT_D;
5931 break;
5932 case PORT_E:
5933 intel_encoder->hpd_pin = HPD_PORT_E;
5934 break;
5935 default:
5936 BUG();
5937 }
5938
5939 if (is_edp(intel_dp)) {
5940 pps_lock(intel_dp);
5941 intel_dp_init_panel_power_timestamps(intel_dp);
5942 if (IS_VALLEYVIEW(dev) || IS_CHERRYVIEW(dev))
5943 vlv_initial_power_sequencer_setup(intel_dp);
5944 else
5945 intel_dp_init_panel_power_sequencer(dev, intel_dp);
5946 pps_unlock(intel_dp);
5947 }
5948
5949 ret = intel_dp_aux_init(intel_dp, intel_connector);
5950 if (ret)
5951 goto fail;
5952
5953
5954 if (HAS_DP_MST(dev) &&
5955 (port == PORT_B || port == PORT_C || port == PORT_D))
5956 intel_dp_mst_encoder_init(intel_dig_port,
5957 intel_connector->base.base.id);
5958
5959 if (!intel_edp_init_connector(intel_dp, intel_connector)) {
5960 intel_dp_aux_fini(intel_dp);
5961 intel_dp_mst_encoder_cleanup(intel_dig_port);
5962 goto fail;
5963 }
5964
5965 intel_dp_add_properties(intel_dp, connector);
5966
5967
5968
5969
5970
5971 if (IS_G4X(dev) && !IS_GM45(dev)) {
5972 u32 temp = I915_READ(PEG_BAND_GAP_DATA);
5973 I915_WRITE(PEG_BAND_GAP_DATA, (temp & ~0xf) | 0xd);
5974 }
5975
5976 i915_debugfs_connector_add(connector);
5977
5978 return true;
5979
5980fail:
5981 if (is_edp(intel_dp)) {
5982 cancel_delayed_work_sync(&intel_dp->panel_vdd_work);
5983
5984
5985
5986
5987 pps_lock(intel_dp);
5988 edp_panel_vdd_off_sync(intel_dp);
5989 pps_unlock(intel_dp);
5990 }
5991 drm_connector_unregister(connector);
5992 drm_connector_cleanup(connector);
5993
5994 return false;
5995}
5996
5997void
5998intel_dp_init(struct drm_device *dev,
5999 i915_reg_t output_reg, enum port port)
6000{
6001 struct drm_i915_private *dev_priv = dev->dev_private;
6002 struct intel_digital_port *intel_dig_port;
6003 struct intel_encoder *intel_encoder;
6004 struct drm_encoder *encoder;
6005 struct intel_connector *intel_connector;
6006
6007 intel_dig_port = kzalloc(sizeof(*intel_dig_port), GFP_KERNEL);
6008 if (!intel_dig_port)
6009 return;
6010
6011 intel_connector = intel_connector_alloc();
6012 if (!intel_connector)
6013 goto err_connector_alloc;
6014
6015 intel_encoder = &intel_dig_port->base;
6016 encoder = &intel_encoder->base;
6017
6018 if (drm_encoder_init(dev, &intel_encoder->base, &intel_dp_enc_funcs,
6019 DRM_MODE_ENCODER_TMDS, NULL))
6020 goto err_encoder_init;
6021
6022 intel_encoder->compute_config = intel_dp_compute_config;
6023 intel_encoder->disable = intel_disable_dp;
6024 intel_encoder->get_hw_state = intel_dp_get_hw_state;
6025 intel_encoder->get_config = intel_dp_get_config;
6026 intel_encoder->suspend = intel_dp_encoder_suspend;
6027 if (IS_CHERRYVIEW(dev)) {
6028 intel_encoder->pre_pll_enable = chv_dp_pre_pll_enable;
6029 intel_encoder->pre_enable = chv_pre_enable_dp;
6030 intel_encoder->enable = vlv_enable_dp;
6031 intel_encoder->post_disable = chv_post_disable_dp;
6032 intel_encoder->post_pll_disable = chv_dp_post_pll_disable;
6033 } else if (IS_VALLEYVIEW(dev)) {
6034 intel_encoder->pre_pll_enable = vlv_dp_pre_pll_enable;
6035 intel_encoder->pre_enable = vlv_pre_enable_dp;
6036 intel_encoder->enable = vlv_enable_dp;
6037 intel_encoder->post_disable = vlv_post_disable_dp;
6038 } else {
6039 intel_encoder->pre_enable = g4x_pre_enable_dp;
6040 intel_encoder->enable = g4x_enable_dp;
6041 if (INTEL_INFO(dev)->gen >= 5)
6042 intel_encoder->post_disable = ilk_post_disable_dp;
6043 }
6044
6045 intel_dig_port->port = port;
6046 intel_dig_port->dp.output_reg = output_reg;
6047 intel_dig_port->max_lanes = 4;
6048
6049 intel_encoder->type = INTEL_OUTPUT_DISPLAYPORT;
6050 if (IS_CHERRYVIEW(dev)) {
6051 if (port == PORT_D)
6052 intel_encoder->crtc_mask = 1 << 2;
6053 else
6054 intel_encoder->crtc_mask = (1 << 0) | (1 << 1);
6055 } else {
6056 intel_encoder->crtc_mask = (1 << 0) | (1 << 1) | (1 << 2);
6057 }
6058 intel_encoder->cloneable = 0;
6059
6060 intel_dig_port->hpd_pulse = intel_dp_hpd_pulse;
6061 dev_priv->hotplug.irq_port[port] = intel_dig_port;
6062
6063 if (!intel_dp_init_connector(intel_dig_port, intel_connector))
6064 goto err_init_connector;
6065
6066 return;
6067
6068err_init_connector:
6069 drm_encoder_cleanup(encoder);
6070err_encoder_init:
6071 kfree(intel_connector);
6072err_connector_alloc:
6073 kfree(intel_dig_port);
6074
6075 return;
6076}
6077
6078void intel_dp_mst_suspend(struct drm_device *dev)
6079{
6080 struct drm_i915_private *dev_priv = dev->dev_private;
6081 int i;
6082
6083
6084 for (i = 0; i < I915_MAX_PORTS; i++) {
6085 struct intel_digital_port *intel_dig_port = dev_priv->hotplug.irq_port[i];
6086 if (!intel_dig_port)
6087 continue;
6088
6089 if (intel_dig_port->base.type == INTEL_OUTPUT_DISPLAYPORT) {
6090 if (!intel_dig_port->dp.can_mst)
6091 continue;
6092 if (intel_dig_port->dp.is_mst)
6093 drm_dp_mst_topology_mgr_suspend(&intel_dig_port->dp.mst_mgr);
6094 }
6095 }
6096}
6097
6098void intel_dp_mst_resume(struct drm_device *dev)
6099{
6100 struct drm_i915_private *dev_priv = dev->dev_private;
6101 int i;
6102
6103 for (i = 0; i < I915_MAX_PORTS; i++) {
6104 struct intel_digital_port *intel_dig_port = dev_priv->hotplug.irq_port[i];
6105 if (!intel_dig_port)
6106 continue;
6107 if (intel_dig_port->base.type == INTEL_OUTPUT_DISPLAYPORT) {
6108 int ret;
6109
6110 if (!intel_dig_port->dp.can_mst)
6111 continue;
6112
6113 ret = drm_dp_mst_topology_mgr_resume(&intel_dig_port->dp.mst_mgr);
6114 if (ret != 0) {
6115 intel_dp_check_mst_status(&intel_dig_port->dp);
6116 }
6117 }
6118 }
6119}
6120