1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28#include <linux/i2c.h>
29#include <linux/slab.h>
30#include <linux/export.h>
31#include <linux/notifier.h>
32#include <linux/reboot.h>
33#include <drm/drmP.h>
34#include <drm/drm_atomic_helper.h>
35#include <drm/drm_crtc.h>
36#include <drm/drm_crtc_helper.h>
37#include <drm/drm_edid.h>
38#include "intel_drv.h"
39#include <drm/i915_drm.h>
40#include "i915_drv.h"
41
42#define DP_LINK_CHECK_TIMEOUT (10 * 1000)
43
44
45#define INTEL_DP_RESOLUTION_SHIFT_MASK 0
46#define INTEL_DP_RESOLUTION_PREFERRED (1 << INTEL_DP_RESOLUTION_SHIFT_MASK)
47#define INTEL_DP_RESOLUTION_STANDARD (2 << INTEL_DP_RESOLUTION_SHIFT_MASK)
48#define INTEL_DP_RESOLUTION_FAILSAFE (3 << INTEL_DP_RESOLUTION_SHIFT_MASK)
49
50struct dp_link_dpll {
51 int clock;
52 struct dpll dpll;
53};
54
55static const struct dp_link_dpll gen4_dpll[] = {
56 { 162000,
57 { .p1 = 2, .p2 = 10, .n = 2, .m1 = 23, .m2 = 8 } },
58 { 270000,
59 { .p1 = 1, .p2 = 10, .n = 1, .m1 = 14, .m2 = 2 } }
60};
61
62static const struct dp_link_dpll pch_dpll[] = {
63 { 162000,
64 { .p1 = 2, .p2 = 10, .n = 1, .m1 = 12, .m2 = 9 } },
65 { 270000,
66 { .p1 = 1, .p2 = 10, .n = 2, .m1 = 14, .m2 = 8 } }
67};
68
69static const struct dp_link_dpll vlv_dpll[] = {
70 { 162000,
71 { .p1 = 3, .p2 = 2, .n = 5, .m1 = 3, .m2 = 81 } },
72 { 270000,
73 { .p1 = 2, .p2 = 2, .n = 1, .m1 = 2, .m2 = 27 } }
74};
75
76
77
78
79
80static const struct dp_link_dpll chv_dpll[] = {
81
82
83
84
85
86 { 162000,
87 { .p1 = 4, .p2 = 2, .n = 1, .m1 = 2, .m2 = 0x819999a } },
88 { 270000,
89 { .p1 = 4, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x6c00000 } },
90 { 540000,
91 { .p1 = 2, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x6c00000 } }
92};
93
94static const int bxt_rates[] = { 162000, 216000, 243000, 270000,
95 324000, 432000, 540000 };
96static const int skl_rates[] = { 162000, 216000, 270000,
97 324000, 432000, 540000 };
98static const int default_rates[] = { 162000, 270000, 540000 };
99
100
101
102
103
104
105
106
107static bool is_edp(struct intel_dp *intel_dp)
108{
109 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
110
111 return intel_dig_port->base.type == INTEL_OUTPUT_EDP;
112}
113
114static struct drm_device *intel_dp_to_dev(struct intel_dp *intel_dp)
115{
116 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
117
118 return intel_dig_port->base.base.dev;
119}
120
121static struct intel_dp *intel_attached_dp(struct drm_connector *connector)
122{
123 return enc_to_intel_dp(&intel_attached_encoder(connector)->base);
124}
125
126static void intel_dp_link_down(struct intel_dp *intel_dp);
127static bool edp_panel_vdd_on(struct intel_dp *intel_dp);
128static void edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync);
129static void vlv_init_panel_power_sequencer(struct intel_dp *intel_dp);
130static void vlv_steal_power_sequencer(struct drm_device *dev,
131 enum pipe pipe);
132static void intel_dp_unset_edid(struct intel_dp *intel_dp);
133
134static int
135intel_dp_max_link_bw(struct intel_dp *intel_dp)
136{
137 int max_link_bw = intel_dp->dpcd[DP_MAX_LINK_RATE];
138
139 switch (max_link_bw) {
140 case DP_LINK_BW_1_62:
141 case DP_LINK_BW_2_7:
142 case DP_LINK_BW_5_4:
143 break;
144 default:
145 WARN(1, "invalid max DP link bw val %x, using 1.62Gbps\n",
146 max_link_bw);
147 max_link_bw = DP_LINK_BW_1_62;
148 break;
149 }
150 return max_link_bw;
151}
152
153static u8 intel_dp_max_lane_count(struct intel_dp *intel_dp)
154{
155 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
156 u8 source_max, sink_max;
157
158 source_max = intel_dig_port->max_lanes;
159 sink_max = drm_dp_max_lane_count(intel_dp->dpcd);
160
161 return min(source_max, sink_max);
162}
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181static int
182intel_dp_link_required(int pixel_clock, int bpp)
183{
184 return (pixel_clock * bpp + 9) / 10;
185}
186
187static int
188intel_dp_max_data_rate(int max_link_clock, int max_lanes)
189{
190 return (max_link_clock * max_lanes * 8) / 10;
191}
192
193static int
194intel_dp_downstream_max_dotclock(struct intel_dp *intel_dp)
195{
196 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
197 struct intel_encoder *encoder = &intel_dig_port->base;
198 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
199 int max_dotclk = dev_priv->max_dotclk_freq;
200 int ds_max_dotclk;
201
202 int type = intel_dp->downstream_ports[0] & DP_DS_PORT_TYPE_MASK;
203
204 if (type != DP_DS_PORT_TYPE_VGA)
205 return max_dotclk;
206
207 ds_max_dotclk = drm_dp_downstream_max_clock(intel_dp->dpcd,
208 intel_dp->downstream_ports);
209
210 if (ds_max_dotclk != 0)
211 max_dotclk = min(max_dotclk, ds_max_dotclk);
212
213 return max_dotclk;
214}
215
216static enum drm_mode_status
217intel_dp_mode_valid(struct drm_connector *connector,
218 struct drm_display_mode *mode)
219{
220 struct intel_dp *intel_dp = intel_attached_dp(connector);
221 struct intel_connector *intel_connector = to_intel_connector(connector);
222 struct drm_display_mode *fixed_mode = intel_connector->panel.fixed_mode;
223 int target_clock = mode->clock;
224 int max_rate, mode_rate, max_lanes, max_link_clock;
225 int max_dotclk;
226
227 max_dotclk = intel_dp_downstream_max_dotclock(intel_dp);
228
229 if (is_edp(intel_dp) && fixed_mode) {
230 if (mode->hdisplay > fixed_mode->hdisplay)
231 return MODE_PANEL;
232
233 if (mode->vdisplay > fixed_mode->vdisplay)
234 return MODE_PANEL;
235
236 target_clock = fixed_mode->clock;
237 }
238
239 max_link_clock = intel_dp_max_link_rate(intel_dp);
240 max_lanes = intel_dp_max_lane_count(intel_dp);
241
242 max_rate = intel_dp_max_data_rate(max_link_clock, max_lanes);
243 mode_rate = intel_dp_link_required(target_clock, 18);
244
245 if (mode_rate > max_rate || target_clock > max_dotclk)
246 return MODE_CLOCK_HIGH;
247
248 if (mode->clock < 10000)
249 return MODE_CLOCK_LOW;
250
251 if (mode->flags & DRM_MODE_FLAG_DBLCLK)
252 return MODE_H_ILLEGAL;
253
254 return MODE_OK;
255}
256
257uint32_t intel_dp_pack_aux(const uint8_t *src, int src_bytes)
258{
259 int i;
260 uint32_t v = 0;
261
262 if (src_bytes > 4)
263 src_bytes = 4;
264 for (i = 0; i < src_bytes; i++)
265 v |= ((uint32_t) src[i]) << ((3-i) * 8);
266 return v;
267}
268
269static void intel_dp_unpack_aux(uint32_t src, uint8_t *dst, int dst_bytes)
270{
271 int i;
272 if (dst_bytes > 4)
273 dst_bytes = 4;
274 for (i = 0; i < dst_bytes; i++)
275 dst[i] = src >> ((3-i) * 8);
276}
277
278static void
279intel_dp_init_panel_power_sequencer(struct drm_device *dev,
280 struct intel_dp *intel_dp);
281static void
282intel_dp_init_panel_power_sequencer_registers(struct drm_device *dev,
283 struct intel_dp *intel_dp);
284static void
285intel_dp_pps_init(struct drm_device *dev, struct intel_dp *intel_dp);
286
287static void pps_lock(struct intel_dp *intel_dp)
288{
289 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
290 struct intel_encoder *encoder = &intel_dig_port->base;
291 struct drm_device *dev = encoder->base.dev;
292 struct drm_i915_private *dev_priv = to_i915(dev);
293 enum intel_display_power_domain power_domain;
294
295
296
297
298
299 power_domain = intel_display_port_aux_power_domain(encoder);
300 intel_display_power_get(dev_priv, power_domain);
301
302 mutex_lock(&dev_priv->pps_mutex);
303}
304
305static void pps_unlock(struct intel_dp *intel_dp)
306{
307 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
308 struct intel_encoder *encoder = &intel_dig_port->base;
309 struct drm_device *dev = encoder->base.dev;
310 struct drm_i915_private *dev_priv = to_i915(dev);
311 enum intel_display_power_domain power_domain;
312
313 mutex_unlock(&dev_priv->pps_mutex);
314
315 power_domain = intel_display_port_aux_power_domain(encoder);
316 intel_display_power_put(dev_priv, power_domain);
317}
318
319static void
320vlv_power_sequencer_kick(struct intel_dp *intel_dp)
321{
322 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
323 struct drm_device *dev = intel_dig_port->base.base.dev;
324 struct drm_i915_private *dev_priv = to_i915(dev);
325 enum pipe pipe = intel_dp->pps_pipe;
326 bool pll_enabled, release_cl_override = false;
327 enum dpio_phy phy = DPIO_PHY(pipe);
328 enum dpio_channel ch = vlv_pipe_to_channel(pipe);
329 uint32_t DP;
330
331 if (WARN(I915_READ(intel_dp->output_reg) & DP_PORT_EN,
332 "skipping pipe %c power seqeuncer kick due to port %c being active\n",
333 pipe_name(pipe), port_name(intel_dig_port->port)))
334 return;
335
336 DRM_DEBUG_KMS("kicking pipe %c power sequencer for port %c\n",
337 pipe_name(pipe), port_name(intel_dig_port->port));
338
339
340
341
342 DP = I915_READ(intel_dp->output_reg) & DP_DETECTED;
343 DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
344 DP |= DP_PORT_WIDTH(1);
345 DP |= DP_LINK_TRAIN_PAT_1;
346
347 if (IS_CHERRYVIEW(dev))
348 DP |= DP_PIPE_SELECT_CHV(pipe);
349 else if (pipe == PIPE_B)
350 DP |= DP_PIPEB_SELECT;
351
352 pll_enabled = I915_READ(DPLL(pipe)) & DPLL_VCO_ENABLE;
353
354
355
356
357
358 if (!pll_enabled) {
359 release_cl_override = IS_CHERRYVIEW(dev) &&
360 !chv_phy_powergate_ch(dev_priv, phy, ch, true);
361
362 if (vlv_force_pll_on(dev, pipe, IS_CHERRYVIEW(dev) ?
363 &chv_dpll[0].dpll : &vlv_dpll[0].dpll)) {
364 DRM_ERROR("Failed to force on pll for pipe %c!\n",
365 pipe_name(pipe));
366 return;
367 }
368 }
369
370
371
372
373
374
375
376 I915_WRITE(intel_dp->output_reg, DP);
377 POSTING_READ(intel_dp->output_reg);
378
379 I915_WRITE(intel_dp->output_reg, DP | DP_PORT_EN);
380 POSTING_READ(intel_dp->output_reg);
381
382 I915_WRITE(intel_dp->output_reg, DP & ~DP_PORT_EN);
383 POSTING_READ(intel_dp->output_reg);
384
385 if (!pll_enabled) {
386 vlv_force_pll_off(dev, pipe);
387
388 if (release_cl_override)
389 chv_phy_powergate_ch(dev_priv, phy, ch, false);
390 }
391}
392
393static enum pipe
394vlv_power_sequencer_pipe(struct intel_dp *intel_dp)
395{
396 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
397 struct drm_device *dev = intel_dig_port->base.base.dev;
398 struct drm_i915_private *dev_priv = to_i915(dev);
399 struct intel_encoder *encoder;
400 unsigned int pipes = (1 << PIPE_A) | (1 << PIPE_B);
401 enum pipe pipe;
402
403 lockdep_assert_held(&dev_priv->pps_mutex);
404
405
406 WARN_ON(!is_edp(intel_dp));
407
408 if (intel_dp->pps_pipe != INVALID_PIPE)
409 return intel_dp->pps_pipe;
410
411
412
413
414
415 for_each_intel_encoder(dev, encoder) {
416 struct intel_dp *tmp;
417
418 if (encoder->type != INTEL_OUTPUT_EDP)
419 continue;
420
421 tmp = enc_to_intel_dp(&encoder->base);
422
423 if (tmp->pps_pipe != INVALID_PIPE)
424 pipes &= ~(1 << tmp->pps_pipe);
425 }
426
427
428
429
430
431 if (WARN_ON(pipes == 0))
432 pipe = PIPE_A;
433 else
434 pipe = ffs(pipes) - 1;
435
436 vlv_steal_power_sequencer(dev, pipe);
437 intel_dp->pps_pipe = pipe;
438
439 DRM_DEBUG_KMS("picked pipe %c power sequencer for port %c\n",
440 pipe_name(intel_dp->pps_pipe),
441 port_name(intel_dig_port->port));
442
443
444 intel_dp_init_panel_power_sequencer(dev, intel_dp);
445 intel_dp_init_panel_power_sequencer_registers(dev, intel_dp);
446
447
448
449
450
451 vlv_power_sequencer_kick(intel_dp);
452
453 return intel_dp->pps_pipe;
454}
455
456static int
457bxt_power_sequencer_idx(struct intel_dp *intel_dp)
458{
459 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
460 struct drm_device *dev = intel_dig_port->base.base.dev;
461 struct drm_i915_private *dev_priv = to_i915(dev);
462
463 lockdep_assert_held(&dev_priv->pps_mutex);
464
465
466 WARN_ON(!is_edp(intel_dp));
467
468
469
470
471
472
473 if (!intel_dp->pps_reset)
474 return 0;
475
476 intel_dp->pps_reset = false;
477
478
479
480
481
482 intel_dp_init_panel_power_sequencer_registers(dev, intel_dp);
483
484 return 0;
485}
486
487typedef bool (*vlv_pipe_check)(struct drm_i915_private *dev_priv,
488 enum pipe pipe);
489
490static bool vlv_pipe_has_pp_on(struct drm_i915_private *dev_priv,
491 enum pipe pipe)
492{
493 return I915_READ(PP_STATUS(pipe)) & PP_ON;
494}
495
496static bool vlv_pipe_has_vdd_on(struct drm_i915_private *dev_priv,
497 enum pipe pipe)
498{
499 return I915_READ(PP_CONTROL(pipe)) & EDP_FORCE_VDD;
500}
501
502static bool vlv_pipe_any(struct drm_i915_private *dev_priv,
503 enum pipe pipe)
504{
505 return true;
506}
507
508static enum pipe
509vlv_initial_pps_pipe(struct drm_i915_private *dev_priv,
510 enum port port,
511 vlv_pipe_check pipe_check)
512{
513 enum pipe pipe;
514
515 for (pipe = PIPE_A; pipe <= PIPE_B; pipe++) {
516 u32 port_sel = I915_READ(PP_ON_DELAYS(pipe)) &
517 PANEL_PORT_SELECT_MASK;
518
519 if (port_sel != PANEL_PORT_SELECT_VLV(port))
520 continue;
521
522 if (!pipe_check(dev_priv, pipe))
523 continue;
524
525 return pipe;
526 }
527
528 return INVALID_PIPE;
529}
530
531static void
532vlv_initial_power_sequencer_setup(struct intel_dp *intel_dp)
533{
534 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
535 struct drm_device *dev = intel_dig_port->base.base.dev;
536 struct drm_i915_private *dev_priv = to_i915(dev);
537 enum port port = intel_dig_port->port;
538
539 lockdep_assert_held(&dev_priv->pps_mutex);
540
541
542
543 intel_dp->pps_pipe = vlv_initial_pps_pipe(dev_priv, port,
544 vlv_pipe_has_pp_on);
545
546 if (intel_dp->pps_pipe == INVALID_PIPE)
547 intel_dp->pps_pipe = vlv_initial_pps_pipe(dev_priv, port,
548 vlv_pipe_has_vdd_on);
549
550 if (intel_dp->pps_pipe == INVALID_PIPE)
551 intel_dp->pps_pipe = vlv_initial_pps_pipe(dev_priv, port,
552 vlv_pipe_any);
553
554
555 if (intel_dp->pps_pipe == INVALID_PIPE) {
556 DRM_DEBUG_KMS("no initial power sequencer for port %c\n",
557 port_name(port));
558 return;
559 }
560
561 DRM_DEBUG_KMS("initial power sequencer for port %c: pipe %c\n",
562 port_name(port), pipe_name(intel_dp->pps_pipe));
563
564 intel_dp_init_panel_power_sequencer(dev, intel_dp);
565 intel_dp_init_panel_power_sequencer_registers(dev, intel_dp);
566}
567
568void intel_power_sequencer_reset(struct drm_i915_private *dev_priv)
569{
570 struct drm_device *dev = &dev_priv->drm;
571 struct intel_encoder *encoder;
572
573 if (WARN_ON(!IS_VALLEYVIEW(dev) && !IS_CHERRYVIEW(dev) &&
574 !IS_BROXTON(dev)))
575 return;
576
577
578
579
580
581
582
583
584
585
586
587 for_each_intel_encoder(dev, encoder) {
588 struct intel_dp *intel_dp;
589
590 if (encoder->type != INTEL_OUTPUT_EDP)
591 continue;
592
593 intel_dp = enc_to_intel_dp(&encoder->base);
594 if (IS_BROXTON(dev))
595 intel_dp->pps_reset = true;
596 else
597 intel_dp->pps_pipe = INVALID_PIPE;
598 }
599}
600
601struct pps_registers {
602 i915_reg_t pp_ctrl;
603 i915_reg_t pp_stat;
604 i915_reg_t pp_on;
605 i915_reg_t pp_off;
606 i915_reg_t pp_div;
607};
608
609static void intel_pps_get_registers(struct drm_i915_private *dev_priv,
610 struct intel_dp *intel_dp,
611 struct pps_registers *regs)
612{
613 int pps_idx = 0;
614
615 memset(regs, 0, sizeof(*regs));
616
617 if (IS_BROXTON(dev_priv))
618 pps_idx = bxt_power_sequencer_idx(intel_dp);
619 else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
620 pps_idx = vlv_power_sequencer_pipe(intel_dp);
621
622 regs->pp_ctrl = PP_CONTROL(pps_idx);
623 regs->pp_stat = PP_STATUS(pps_idx);
624 regs->pp_on = PP_ON_DELAYS(pps_idx);
625 regs->pp_off = PP_OFF_DELAYS(pps_idx);
626 if (!IS_BROXTON(dev_priv))
627 regs->pp_div = PP_DIVISOR(pps_idx);
628}
629
630static i915_reg_t
631_pp_ctrl_reg(struct intel_dp *intel_dp)
632{
633 struct pps_registers regs;
634
635 intel_pps_get_registers(to_i915(intel_dp_to_dev(intel_dp)), intel_dp,
636 ®s);
637
638 return regs.pp_ctrl;
639}
640
641static i915_reg_t
642_pp_stat_reg(struct intel_dp *intel_dp)
643{
644 struct pps_registers regs;
645
646 intel_pps_get_registers(to_i915(intel_dp_to_dev(intel_dp)), intel_dp,
647 ®s);
648
649 return regs.pp_stat;
650}
651
652
653
654static int edp_notify_handler(struct notifier_block *this, unsigned long code,
655 void *unused)
656{
657 struct intel_dp *intel_dp = container_of(this, typeof(* intel_dp),
658 edp_notifier);
659 struct drm_device *dev = intel_dp_to_dev(intel_dp);
660 struct drm_i915_private *dev_priv = to_i915(dev);
661
662 if (!is_edp(intel_dp) || code != SYS_RESTART)
663 return 0;
664
665 pps_lock(intel_dp);
666
667 if (IS_VALLEYVIEW(dev) || IS_CHERRYVIEW(dev)) {
668 enum pipe pipe = vlv_power_sequencer_pipe(intel_dp);
669 i915_reg_t pp_ctrl_reg, pp_div_reg;
670 u32 pp_div;
671
672 pp_ctrl_reg = PP_CONTROL(pipe);
673 pp_div_reg = PP_DIVISOR(pipe);
674 pp_div = I915_READ(pp_div_reg);
675 pp_div &= PP_REFERENCE_DIVIDER_MASK;
676
677
678 I915_WRITE(pp_div_reg, pp_div | 0x1F);
679 I915_WRITE(pp_ctrl_reg, PANEL_UNLOCK_REGS | PANEL_POWER_OFF);
680 msleep(intel_dp->panel_power_cycle_delay);
681 }
682
683 pps_unlock(intel_dp);
684
685 return 0;
686}
687
688static bool edp_have_panel_power(struct intel_dp *intel_dp)
689{
690 struct drm_device *dev = intel_dp_to_dev(intel_dp);
691 struct drm_i915_private *dev_priv = to_i915(dev);
692
693 lockdep_assert_held(&dev_priv->pps_mutex);
694
695 if ((IS_VALLEYVIEW(dev) || IS_CHERRYVIEW(dev)) &&
696 intel_dp->pps_pipe == INVALID_PIPE)
697 return false;
698
699 return (I915_READ(_pp_stat_reg(intel_dp)) & PP_ON) != 0;
700}
701
702static bool edp_have_panel_vdd(struct intel_dp *intel_dp)
703{
704 struct drm_device *dev = intel_dp_to_dev(intel_dp);
705 struct drm_i915_private *dev_priv = to_i915(dev);
706
707 lockdep_assert_held(&dev_priv->pps_mutex);
708
709 if ((IS_VALLEYVIEW(dev) || IS_CHERRYVIEW(dev)) &&
710 intel_dp->pps_pipe == INVALID_PIPE)
711 return false;
712
713 return I915_READ(_pp_ctrl_reg(intel_dp)) & EDP_FORCE_VDD;
714}
715
716static void
717intel_dp_check_edp(struct intel_dp *intel_dp)
718{
719 struct drm_device *dev = intel_dp_to_dev(intel_dp);
720 struct drm_i915_private *dev_priv = to_i915(dev);
721
722 if (!is_edp(intel_dp))
723 return;
724
725 if (!edp_have_panel_power(intel_dp) && !edp_have_panel_vdd(intel_dp)) {
726 WARN(1, "eDP powered off while attempting aux channel communication.\n");
727 DRM_DEBUG_KMS("Status 0x%08x Control 0x%08x\n",
728 I915_READ(_pp_stat_reg(intel_dp)),
729 I915_READ(_pp_ctrl_reg(intel_dp)));
730 }
731}
732
733static uint32_t
734intel_dp_aux_wait_done(struct intel_dp *intel_dp, bool has_aux_irq)
735{
736 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
737 struct drm_device *dev = intel_dig_port->base.base.dev;
738 struct drm_i915_private *dev_priv = to_i915(dev);
739 i915_reg_t ch_ctl = intel_dp->aux_ch_ctl_reg;
740 uint32_t status;
741 bool done;
742
743#define C (((status = I915_READ_NOTRACE(ch_ctl)) & DP_AUX_CH_CTL_SEND_BUSY) == 0)
744 if (has_aux_irq)
745 done = wait_event_timeout(dev_priv->gmbus_wait_queue, C,
746 msecs_to_jiffies_timeout(10));
747 else
748 done = wait_for(C, 10) == 0;
749 if (!done)
750 DRM_ERROR("dp aux hw did not signal timeout (has irq: %i)!\n",
751 has_aux_irq);
752#undef C
753
754 return status;
755}
756
757static uint32_t g4x_get_aux_clock_divider(struct intel_dp *intel_dp, int index)
758{
759 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
760 struct drm_i915_private *dev_priv = to_i915(intel_dig_port->base.base.dev);
761
762 if (index)
763 return 0;
764
765
766
767
768
769 return DIV_ROUND_CLOSEST(dev_priv->rawclk_freq, 2000);
770}
771
772static uint32_t ilk_get_aux_clock_divider(struct intel_dp *intel_dp, int index)
773{
774 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
775 struct drm_i915_private *dev_priv = to_i915(intel_dig_port->base.base.dev);
776
777 if (index)
778 return 0;
779
780
781
782
783
784
785 if (intel_dig_port->port == PORT_A)
786 return DIV_ROUND_CLOSEST(dev_priv->cdclk_freq, 2000);
787 else
788 return DIV_ROUND_CLOSEST(dev_priv->rawclk_freq, 2000);
789}
790
791static uint32_t hsw_get_aux_clock_divider(struct intel_dp *intel_dp, int index)
792{
793 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
794 struct drm_i915_private *dev_priv = to_i915(intel_dig_port->base.base.dev);
795
796 if (intel_dig_port->port != PORT_A && HAS_PCH_LPT_H(dev_priv)) {
797
798 switch (index) {
799 case 0: return 63;
800 case 1: return 72;
801 default: return 0;
802 }
803 }
804
805 return ilk_get_aux_clock_divider(intel_dp, index);
806}
807
808static uint32_t skl_get_aux_clock_divider(struct intel_dp *intel_dp, int index)
809{
810
811
812
813
814
815 return index ? 0 : 1;
816}
817
818static uint32_t g4x_get_aux_send_ctl(struct intel_dp *intel_dp,
819 bool has_aux_irq,
820 int send_bytes,
821 uint32_t aux_clock_divider)
822{
823 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
824 struct drm_device *dev = intel_dig_port->base.base.dev;
825 uint32_t precharge, timeout;
826
827 if (IS_GEN6(dev))
828 precharge = 3;
829 else
830 precharge = 5;
831
832 if (IS_BROADWELL(dev) && intel_dig_port->port == PORT_A)
833 timeout = DP_AUX_CH_CTL_TIME_OUT_600us;
834 else
835 timeout = DP_AUX_CH_CTL_TIME_OUT_400us;
836
837 return DP_AUX_CH_CTL_SEND_BUSY |
838 DP_AUX_CH_CTL_DONE |
839 (has_aux_irq ? DP_AUX_CH_CTL_INTERRUPT : 0) |
840 DP_AUX_CH_CTL_TIME_OUT_ERROR |
841 timeout |
842 DP_AUX_CH_CTL_RECEIVE_ERROR |
843 (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) |
844 (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) |
845 (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT);
846}
847
848static uint32_t skl_get_aux_send_ctl(struct intel_dp *intel_dp,
849 bool has_aux_irq,
850 int send_bytes,
851 uint32_t unused)
852{
853 return DP_AUX_CH_CTL_SEND_BUSY |
854 DP_AUX_CH_CTL_DONE |
855 (has_aux_irq ? DP_AUX_CH_CTL_INTERRUPT : 0) |
856 DP_AUX_CH_CTL_TIME_OUT_ERROR |
857 DP_AUX_CH_CTL_TIME_OUT_1600us |
858 DP_AUX_CH_CTL_RECEIVE_ERROR |
859 (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) |
860 DP_AUX_CH_CTL_FW_SYNC_PULSE_SKL(32) |
861 DP_AUX_CH_CTL_SYNC_PULSE_SKL(32);
862}
863
864static int
865intel_dp_aux_ch(struct intel_dp *intel_dp,
866 const uint8_t *send, int send_bytes,
867 uint8_t *recv, int recv_size)
868{
869 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
870 struct drm_device *dev = intel_dig_port->base.base.dev;
871 struct drm_i915_private *dev_priv = to_i915(dev);
872 i915_reg_t ch_ctl = intel_dp->aux_ch_ctl_reg;
873 uint32_t aux_clock_divider;
874 int i, ret, recv_bytes;
875 uint32_t status;
876 int try, clock = 0;
877 bool has_aux_irq = HAS_AUX_IRQ(dev);
878 bool vdd;
879
880 pps_lock(intel_dp);
881
882
883
884
885
886
887
888 vdd = edp_panel_vdd_on(intel_dp);
889
890
891
892
893
894 pm_qos_update_request(&dev_priv->pm_qos, 0);
895
896 intel_dp_check_edp(intel_dp);
897
898
899 for (try = 0; try < 3; try++) {
900 status = I915_READ_NOTRACE(ch_ctl);
901 if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0)
902 break;
903 msleep(1);
904 }
905
906 if (try == 3) {
907 static u32 last_status = -1;
908 const u32 status = I915_READ(ch_ctl);
909
910 if (status != last_status) {
911 WARN(1, "dp_aux_ch not started status 0x%08x\n",
912 status);
913 last_status = status;
914 }
915
916 ret = -EBUSY;
917 goto out;
918 }
919
920
921 if (WARN_ON(send_bytes > 20 || recv_size > 20)) {
922 ret = -E2BIG;
923 goto out;
924 }
925
926 while ((aux_clock_divider = intel_dp->get_aux_clock_divider(intel_dp, clock++))) {
927 u32 send_ctl = intel_dp->get_aux_send_ctl(intel_dp,
928 has_aux_irq,
929 send_bytes,
930 aux_clock_divider);
931
932
933 for (try = 0; try < 5; try++) {
934
935 for (i = 0; i < send_bytes; i += 4)
936 I915_WRITE(intel_dp->aux_ch_data_reg[i >> 2],
937 intel_dp_pack_aux(send + i,
938 send_bytes - i));
939
940
941 I915_WRITE(ch_ctl, send_ctl);
942
943 status = intel_dp_aux_wait_done(intel_dp, has_aux_irq);
944
945
946 I915_WRITE(ch_ctl,
947 status |
948 DP_AUX_CH_CTL_DONE |
949 DP_AUX_CH_CTL_TIME_OUT_ERROR |
950 DP_AUX_CH_CTL_RECEIVE_ERROR);
951
952 if (status & DP_AUX_CH_CTL_TIME_OUT_ERROR)
953 continue;
954
955
956
957
958
959
960 if (status & DP_AUX_CH_CTL_RECEIVE_ERROR) {
961 usleep_range(400, 500);
962 continue;
963 }
964 if (status & DP_AUX_CH_CTL_DONE)
965 goto done;
966 }
967 }
968
969 if ((status & DP_AUX_CH_CTL_DONE) == 0) {
970 DRM_ERROR("dp_aux_ch not done status 0x%08x\n", status);
971 ret = -EBUSY;
972 goto out;
973 }
974
975done:
976
977
978
979 if (status & DP_AUX_CH_CTL_RECEIVE_ERROR) {
980 DRM_ERROR("dp_aux_ch receive error status 0x%08x\n", status);
981 ret = -EIO;
982 goto out;
983 }
984
985
986
987 if (status & DP_AUX_CH_CTL_TIME_OUT_ERROR) {
988 DRM_DEBUG_KMS("dp_aux_ch timeout status 0x%08x\n", status);
989 ret = -ETIMEDOUT;
990 goto out;
991 }
992
993
994 recv_bytes = ((status & DP_AUX_CH_CTL_MESSAGE_SIZE_MASK) >>
995 DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT);
996
997
998
999
1000
1001
1002 if (recv_bytes == 0 || recv_bytes > 20) {
1003 DRM_DEBUG_KMS("Forbidden recv_bytes = %d on aux transaction\n",
1004 recv_bytes);
1005
1006
1007
1008
1009
1010
1011
1012 usleep_range(1000, 1500);
1013 ret = -EBUSY;
1014 goto out;
1015 }
1016
1017 if (recv_bytes > recv_size)
1018 recv_bytes = recv_size;
1019
1020 for (i = 0; i < recv_bytes; i += 4)
1021 intel_dp_unpack_aux(I915_READ(intel_dp->aux_ch_data_reg[i >> 2]),
1022 recv + i, recv_bytes - i);
1023
1024 ret = recv_bytes;
1025out:
1026 pm_qos_update_request(&dev_priv->pm_qos, PM_QOS_DEFAULT_VALUE);
1027
1028 if (vdd)
1029 edp_panel_vdd_off(intel_dp, false);
1030
1031 pps_unlock(intel_dp);
1032
1033 return ret;
1034}
1035
1036#define BARE_ADDRESS_SIZE 3
1037#define HEADER_SIZE (BARE_ADDRESS_SIZE + 1)
1038static ssize_t
1039intel_dp_aux_transfer(struct drm_dp_aux *aux, struct drm_dp_aux_msg *msg)
1040{
1041 struct intel_dp *intel_dp = container_of(aux, struct intel_dp, aux);
1042 uint8_t txbuf[20], rxbuf[20];
1043 size_t txsize, rxsize;
1044 int ret;
1045
1046 txbuf[0] = (msg->request << 4) |
1047 ((msg->address >> 16) & 0xf);
1048 txbuf[1] = (msg->address >> 8) & 0xff;
1049 txbuf[2] = msg->address & 0xff;
1050 txbuf[3] = msg->size - 1;
1051
1052 switch (msg->request & ~DP_AUX_I2C_MOT) {
1053 case DP_AUX_NATIVE_WRITE:
1054 case DP_AUX_I2C_WRITE:
1055 case DP_AUX_I2C_WRITE_STATUS_UPDATE:
1056 txsize = msg->size ? HEADER_SIZE + msg->size : BARE_ADDRESS_SIZE;
1057 rxsize = 2;
1058
1059 if (WARN_ON(txsize > 20))
1060 return -E2BIG;
1061
1062 WARN_ON(!msg->buffer != !msg->size);
1063
1064 if (msg->buffer)
1065 memcpy(txbuf + HEADER_SIZE, msg->buffer, msg->size);
1066
1067 ret = intel_dp_aux_ch(intel_dp, txbuf, txsize, rxbuf, rxsize);
1068 if (ret > 0) {
1069 msg->reply = rxbuf[0] >> 4;
1070
1071 if (ret > 1) {
1072
1073 ret = clamp_t(int, rxbuf[1], 0, msg->size);
1074 } else {
1075
1076 ret = msg->size;
1077 }
1078 }
1079 break;
1080
1081 case DP_AUX_NATIVE_READ:
1082 case DP_AUX_I2C_READ:
1083 txsize = msg->size ? HEADER_SIZE : BARE_ADDRESS_SIZE;
1084 rxsize = msg->size + 1;
1085
1086 if (WARN_ON(rxsize > 20))
1087 return -E2BIG;
1088
1089 ret = intel_dp_aux_ch(intel_dp, txbuf, txsize, rxbuf, rxsize);
1090 if (ret > 0) {
1091 msg->reply = rxbuf[0] >> 4;
1092
1093
1094
1095
1096
1097
1098 ret--;
1099 memcpy(msg->buffer, rxbuf + 1, ret);
1100 }
1101 break;
1102
1103 default:
1104 ret = -EINVAL;
1105 break;
1106 }
1107
1108 return ret;
1109}
1110
1111static enum port intel_aux_port(struct drm_i915_private *dev_priv,
1112 enum port port)
1113{
1114 const struct ddi_vbt_port_info *info =
1115 &dev_priv->vbt.ddi_port_info[port];
1116 enum port aux_port;
1117
1118 if (!info->alternate_aux_channel) {
1119 DRM_DEBUG_KMS("using AUX %c for port %c (platform default)\n",
1120 port_name(port), port_name(port));
1121 return port;
1122 }
1123
1124 switch (info->alternate_aux_channel) {
1125 case DP_AUX_A:
1126 aux_port = PORT_A;
1127 break;
1128 case DP_AUX_B:
1129 aux_port = PORT_B;
1130 break;
1131 case DP_AUX_C:
1132 aux_port = PORT_C;
1133 break;
1134 case DP_AUX_D:
1135 aux_port = PORT_D;
1136 break;
1137 default:
1138 MISSING_CASE(info->alternate_aux_channel);
1139 aux_port = PORT_A;
1140 break;
1141 }
1142
1143 DRM_DEBUG_KMS("using AUX %c for port %c (VBT)\n",
1144 port_name(aux_port), port_name(port));
1145
1146 return aux_port;
1147}
1148
1149static i915_reg_t g4x_aux_ctl_reg(struct drm_i915_private *dev_priv,
1150 enum port port)
1151{
1152 switch (port) {
1153 case PORT_B:
1154 case PORT_C:
1155 case PORT_D:
1156 return DP_AUX_CH_CTL(port);
1157 default:
1158 MISSING_CASE(port);
1159 return DP_AUX_CH_CTL(PORT_B);
1160 }
1161}
1162
1163static i915_reg_t g4x_aux_data_reg(struct drm_i915_private *dev_priv,
1164 enum port port, int index)
1165{
1166 switch (port) {
1167 case PORT_B:
1168 case PORT_C:
1169 case PORT_D:
1170 return DP_AUX_CH_DATA(port, index);
1171 default:
1172 MISSING_CASE(port);
1173 return DP_AUX_CH_DATA(PORT_B, index);
1174 }
1175}
1176
1177static i915_reg_t ilk_aux_ctl_reg(struct drm_i915_private *dev_priv,
1178 enum port port)
1179{
1180 switch (port) {
1181 case PORT_A:
1182 return DP_AUX_CH_CTL(port);
1183 case PORT_B:
1184 case PORT_C:
1185 case PORT_D:
1186 return PCH_DP_AUX_CH_CTL(port);
1187 default:
1188 MISSING_CASE(port);
1189 return DP_AUX_CH_CTL(PORT_A);
1190 }
1191}
1192
1193static i915_reg_t ilk_aux_data_reg(struct drm_i915_private *dev_priv,
1194 enum port port, int index)
1195{
1196 switch (port) {
1197 case PORT_A:
1198 return DP_AUX_CH_DATA(port, index);
1199 case PORT_B:
1200 case PORT_C:
1201 case PORT_D:
1202 return PCH_DP_AUX_CH_DATA(port, index);
1203 default:
1204 MISSING_CASE(port);
1205 return DP_AUX_CH_DATA(PORT_A, index);
1206 }
1207}
1208
1209static i915_reg_t skl_aux_ctl_reg(struct drm_i915_private *dev_priv,
1210 enum port port)
1211{
1212 switch (port) {
1213 case PORT_A:
1214 case PORT_B:
1215 case PORT_C:
1216 case PORT_D:
1217 return DP_AUX_CH_CTL(port);
1218 default:
1219 MISSING_CASE(port);
1220 return DP_AUX_CH_CTL(PORT_A);
1221 }
1222}
1223
1224static i915_reg_t skl_aux_data_reg(struct drm_i915_private *dev_priv,
1225 enum port port, int index)
1226{
1227 switch (port) {
1228 case PORT_A:
1229 case PORT_B:
1230 case PORT_C:
1231 case PORT_D:
1232 return DP_AUX_CH_DATA(port, index);
1233 default:
1234 MISSING_CASE(port);
1235 return DP_AUX_CH_DATA(PORT_A, index);
1236 }
1237}
1238
1239static i915_reg_t intel_aux_ctl_reg(struct drm_i915_private *dev_priv,
1240 enum port port)
1241{
1242 if (INTEL_INFO(dev_priv)->gen >= 9)
1243 return skl_aux_ctl_reg(dev_priv, port);
1244 else if (HAS_PCH_SPLIT(dev_priv))
1245 return ilk_aux_ctl_reg(dev_priv, port);
1246 else
1247 return g4x_aux_ctl_reg(dev_priv, port);
1248}
1249
1250static i915_reg_t intel_aux_data_reg(struct drm_i915_private *dev_priv,
1251 enum port port, int index)
1252{
1253 if (INTEL_INFO(dev_priv)->gen >= 9)
1254 return skl_aux_data_reg(dev_priv, port, index);
1255 else if (HAS_PCH_SPLIT(dev_priv))
1256 return ilk_aux_data_reg(dev_priv, port, index);
1257 else
1258 return g4x_aux_data_reg(dev_priv, port, index);
1259}
1260
1261static void intel_aux_reg_init(struct intel_dp *intel_dp)
1262{
1263 struct drm_i915_private *dev_priv = to_i915(intel_dp_to_dev(intel_dp));
1264 enum port port = intel_aux_port(dev_priv,
1265 dp_to_dig_port(intel_dp)->port);
1266 int i;
1267
1268 intel_dp->aux_ch_ctl_reg = intel_aux_ctl_reg(dev_priv, port);
1269 for (i = 0; i < ARRAY_SIZE(intel_dp->aux_ch_data_reg); i++)
1270 intel_dp->aux_ch_data_reg[i] = intel_aux_data_reg(dev_priv, port, i);
1271}
1272
1273static void
1274intel_dp_aux_fini(struct intel_dp *intel_dp)
1275{
1276 kfree(intel_dp->aux.name);
1277}
1278
1279static void
1280intel_dp_aux_init(struct intel_dp *intel_dp)
1281{
1282 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
1283 enum port port = intel_dig_port->port;
1284
1285 intel_aux_reg_init(intel_dp);
1286 drm_dp_aux_init(&intel_dp->aux);
1287
1288
1289 intel_dp->aux.name = kasprintf(GFP_KERNEL, "DPDDC-%c", port_name(port));
1290 intel_dp->aux.transfer = intel_dp_aux_transfer;
1291}
1292
1293static int
1294intel_dp_sink_rates(struct intel_dp *intel_dp, const int **sink_rates)
1295{
1296 if (intel_dp->num_sink_rates) {
1297 *sink_rates = intel_dp->sink_rates;
1298 return intel_dp->num_sink_rates;
1299 }
1300
1301 *sink_rates = default_rates;
1302
1303 return (intel_dp_max_link_bw(intel_dp) >> 3) + 1;
1304}
1305
1306bool intel_dp_source_supports_hbr2(struct intel_dp *intel_dp)
1307{
1308 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1309 struct drm_device *dev = dig_port->base.base.dev;
1310
1311
1312 if (IS_SKL_REVID(dev, 0, SKL_REVID_B0))
1313 return false;
1314
1315 if ((IS_HASWELL(dev) && !IS_HSW_ULX(dev)) || IS_BROADWELL(dev) ||
1316 (INTEL_INFO(dev)->gen >= 9))
1317 return true;
1318 else
1319 return false;
1320}
1321
1322static int
1323intel_dp_source_rates(struct intel_dp *intel_dp, const int **source_rates)
1324{
1325 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1326 struct drm_device *dev = dig_port->base.base.dev;
1327 int size;
1328
1329 if (IS_BROXTON(dev)) {
1330 *source_rates = bxt_rates;
1331 size = ARRAY_SIZE(bxt_rates);
1332 } else if (IS_SKYLAKE(dev) || IS_KABYLAKE(dev)) {
1333 *source_rates = skl_rates;
1334 size = ARRAY_SIZE(skl_rates);
1335 } else {
1336 *source_rates = default_rates;
1337 size = ARRAY_SIZE(default_rates);
1338 }
1339
1340
1341 if (!intel_dp_source_supports_hbr2(intel_dp))
1342 size--;
1343
1344 return size;
1345}
1346
1347static void
1348intel_dp_set_clock(struct intel_encoder *encoder,
1349 struct intel_crtc_state *pipe_config)
1350{
1351 struct drm_device *dev = encoder->base.dev;
1352 const struct dp_link_dpll *divisor = NULL;
1353 int i, count = 0;
1354
1355 if (IS_G4X(dev)) {
1356 divisor = gen4_dpll;
1357 count = ARRAY_SIZE(gen4_dpll);
1358 } else if (HAS_PCH_SPLIT(dev)) {
1359 divisor = pch_dpll;
1360 count = ARRAY_SIZE(pch_dpll);
1361 } else if (IS_CHERRYVIEW(dev)) {
1362 divisor = chv_dpll;
1363 count = ARRAY_SIZE(chv_dpll);
1364 } else if (IS_VALLEYVIEW(dev)) {
1365 divisor = vlv_dpll;
1366 count = ARRAY_SIZE(vlv_dpll);
1367 }
1368
1369 if (divisor && count) {
1370 for (i = 0; i < count; i++) {
1371 if (pipe_config->port_clock == divisor[i].clock) {
1372 pipe_config->dpll = divisor[i].dpll;
1373 pipe_config->clock_set = true;
1374 break;
1375 }
1376 }
1377 }
1378}
1379
1380static int intersect_rates(const int *source_rates, int source_len,
1381 const int *sink_rates, int sink_len,
1382 int *common_rates)
1383{
1384 int i = 0, j = 0, k = 0;
1385
1386 while (i < source_len && j < sink_len) {
1387 if (source_rates[i] == sink_rates[j]) {
1388 if (WARN_ON(k >= DP_MAX_SUPPORTED_RATES))
1389 return k;
1390 common_rates[k] = source_rates[i];
1391 ++k;
1392 ++i;
1393 ++j;
1394 } else if (source_rates[i] < sink_rates[j]) {
1395 ++i;
1396 } else {
1397 ++j;
1398 }
1399 }
1400 return k;
1401}
1402
1403static int intel_dp_common_rates(struct intel_dp *intel_dp,
1404 int *common_rates)
1405{
1406 const int *source_rates, *sink_rates;
1407 int source_len, sink_len;
1408
1409 sink_len = intel_dp_sink_rates(intel_dp, &sink_rates);
1410 source_len = intel_dp_source_rates(intel_dp, &source_rates);
1411
1412 return intersect_rates(source_rates, source_len,
1413 sink_rates, sink_len,
1414 common_rates);
1415}
1416
1417static void snprintf_int_array(char *str, size_t len,
1418 const int *array, int nelem)
1419{
1420 int i;
1421
1422 str[0] = '\0';
1423
1424 for (i = 0; i < nelem; i++) {
1425 int r = snprintf(str, len, "%s%d", i ? ", " : "", array[i]);
1426 if (r >= len)
1427 return;
1428 str += r;
1429 len -= r;
1430 }
1431}
1432
1433static void intel_dp_print_rates(struct intel_dp *intel_dp)
1434{
1435 const int *source_rates, *sink_rates;
1436 int source_len, sink_len, common_len;
1437 int common_rates[DP_MAX_SUPPORTED_RATES];
1438 char str[128];
1439
1440 if ((drm_debug & DRM_UT_KMS) == 0)
1441 return;
1442
1443 source_len = intel_dp_source_rates(intel_dp, &source_rates);
1444 snprintf_int_array(str, sizeof(str), source_rates, source_len);
1445 DRM_DEBUG_KMS("source rates: %s\n", str);
1446
1447 sink_len = intel_dp_sink_rates(intel_dp, &sink_rates);
1448 snprintf_int_array(str, sizeof(str), sink_rates, sink_len);
1449 DRM_DEBUG_KMS("sink rates: %s\n", str);
1450
1451 common_len = intel_dp_common_rates(intel_dp, common_rates);
1452 snprintf_int_array(str, sizeof(str), common_rates, common_len);
1453 DRM_DEBUG_KMS("common rates: %s\n", str);
1454}
1455
1456static void intel_dp_print_hw_revision(struct intel_dp *intel_dp)
1457{
1458 uint8_t rev;
1459 int len;
1460
1461 if ((drm_debug & DRM_UT_KMS) == 0)
1462 return;
1463
1464 if (!(intel_dp->dpcd[DP_DOWNSTREAMPORT_PRESENT] &
1465 DP_DWN_STRM_PORT_PRESENT))
1466 return;
1467
1468 len = drm_dp_dpcd_read(&intel_dp->aux, DP_BRANCH_HW_REV, &rev, 1);
1469 if (len < 0)
1470 return;
1471
1472 DRM_DEBUG_KMS("sink hw revision: %d.%d\n", (rev & 0xf0) >> 4, rev & 0xf);
1473}
1474
1475static void intel_dp_print_sw_revision(struct intel_dp *intel_dp)
1476{
1477 uint8_t rev[2];
1478 int len;
1479
1480 if ((drm_debug & DRM_UT_KMS) == 0)
1481 return;
1482
1483 if (!(intel_dp->dpcd[DP_DOWNSTREAMPORT_PRESENT] &
1484 DP_DWN_STRM_PORT_PRESENT))
1485 return;
1486
1487 len = drm_dp_dpcd_read(&intel_dp->aux, DP_BRANCH_SW_REV, &rev, 2);
1488 if (len < 0)
1489 return;
1490
1491 DRM_DEBUG_KMS("sink sw revision: %d.%d\n", rev[0], rev[1]);
1492}
1493
1494static int rate_to_index(int find, const int *rates)
1495{
1496 int i = 0;
1497
1498 for (i = 0; i < DP_MAX_SUPPORTED_RATES; ++i)
1499 if (find == rates[i])
1500 break;
1501
1502 return i;
1503}
1504
1505int
1506intel_dp_max_link_rate(struct intel_dp *intel_dp)
1507{
1508 int rates[DP_MAX_SUPPORTED_RATES] = {};
1509 int len;
1510
1511 len = intel_dp_common_rates(intel_dp, rates);
1512 if (WARN_ON(len <= 0))
1513 return 162000;
1514
1515 return rates[len - 1];
1516}
1517
1518int intel_dp_rate_select(struct intel_dp *intel_dp, int rate)
1519{
1520 return rate_to_index(rate, intel_dp->sink_rates);
1521}
1522
1523void intel_dp_compute_rate(struct intel_dp *intel_dp, int port_clock,
1524 uint8_t *link_bw, uint8_t *rate_select)
1525{
1526 if (intel_dp->num_sink_rates) {
1527 *link_bw = 0;
1528 *rate_select =
1529 intel_dp_rate_select(intel_dp, port_clock);
1530 } else {
1531 *link_bw = drm_dp_link_rate_to_bw_code(port_clock);
1532 *rate_select = 0;
1533 }
1534}
1535
1536static int intel_dp_compute_bpp(struct intel_dp *intel_dp,
1537 struct intel_crtc_state *pipe_config)
1538{
1539 int bpp, bpc;
1540
1541 bpp = pipe_config->pipe_bpp;
1542 bpc = drm_dp_downstream_max_bpc(intel_dp->dpcd, intel_dp->downstream_ports);
1543
1544 if (bpc > 0)
1545 bpp = min(bpp, 3*bpc);
1546
1547 return bpp;
1548}
1549
1550bool
1551intel_dp_compute_config(struct intel_encoder *encoder,
1552 struct intel_crtc_state *pipe_config,
1553 struct drm_connector_state *conn_state)
1554{
1555 struct drm_device *dev = encoder->base.dev;
1556 struct drm_i915_private *dev_priv = to_i915(dev);
1557 struct drm_display_mode *adjusted_mode = &pipe_config->base.adjusted_mode;
1558 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
1559 enum port port = dp_to_dig_port(intel_dp)->port;
1560 struct intel_crtc *intel_crtc = to_intel_crtc(pipe_config->base.crtc);
1561 struct intel_connector *intel_connector = intel_dp->attached_connector;
1562 int lane_count, clock;
1563 int min_lane_count = 1;
1564 int max_lane_count = intel_dp_max_lane_count(intel_dp);
1565
1566 int min_clock = 0;
1567 int max_clock;
1568 int bpp, mode_rate;
1569 int link_avail, link_clock;
1570 int common_rates[DP_MAX_SUPPORTED_RATES] = {};
1571 int common_len;
1572 uint8_t link_bw, rate_select;
1573
1574 common_len = intel_dp_common_rates(intel_dp, common_rates);
1575
1576
1577 WARN_ON(common_len <= 0);
1578
1579 max_clock = common_len - 1;
1580
1581 if (HAS_PCH_SPLIT(dev) && !HAS_DDI(dev) && port != PORT_A)
1582 pipe_config->has_pch_encoder = true;
1583
1584 pipe_config->has_drrs = false;
1585 pipe_config->has_audio = intel_dp->has_audio && port != PORT_A;
1586
1587 if (is_edp(intel_dp) && intel_connector->panel.fixed_mode) {
1588 intel_fixed_panel_mode(intel_connector->panel.fixed_mode,
1589 adjusted_mode);
1590
1591 if (INTEL_INFO(dev)->gen >= 9) {
1592 int ret;
1593 ret = skl_update_scaler_crtc(pipe_config);
1594 if (ret)
1595 return ret;
1596 }
1597
1598 if (HAS_GMCH_DISPLAY(dev))
1599 intel_gmch_panel_fitting(intel_crtc, pipe_config,
1600 intel_connector->panel.fitting_mode);
1601 else
1602 intel_pch_panel_fitting(intel_crtc, pipe_config,
1603 intel_connector->panel.fitting_mode);
1604 }
1605
1606 if (adjusted_mode->flags & DRM_MODE_FLAG_DBLCLK)
1607 return false;
1608
1609 DRM_DEBUG_KMS("DP link computation with max lane count %i "
1610 "max bw %d pixel clock %iKHz\n",
1611 max_lane_count, common_rates[max_clock],
1612 adjusted_mode->crtc_clock);
1613
1614
1615
1616 bpp = intel_dp_compute_bpp(intel_dp, pipe_config);
1617 if (is_edp(intel_dp)) {
1618
1619
1620 if (intel_connector->base.display_info.bpc == 0 &&
1621 (dev_priv->vbt.edp.bpp && dev_priv->vbt.edp.bpp < bpp)) {
1622 DRM_DEBUG_KMS("clamping bpp for eDP panel to BIOS-provided %i\n",
1623 dev_priv->vbt.edp.bpp);
1624 bpp = dev_priv->vbt.edp.bpp;
1625 }
1626
1627
1628
1629
1630
1631
1632
1633
1634 min_lane_count = max_lane_count;
1635 min_clock = max_clock;
1636 }
1637
1638 for (; bpp >= 6*3; bpp -= 2*3) {
1639 mode_rate = intel_dp_link_required(adjusted_mode->crtc_clock,
1640 bpp);
1641
1642 for (clock = min_clock; clock <= max_clock; clock++) {
1643 for (lane_count = min_lane_count;
1644 lane_count <= max_lane_count;
1645 lane_count <<= 1) {
1646
1647 link_clock = common_rates[clock];
1648 link_avail = intel_dp_max_data_rate(link_clock,
1649 lane_count);
1650
1651 if (mode_rate <= link_avail) {
1652 goto found;
1653 }
1654 }
1655 }
1656 }
1657
1658 return false;
1659
1660found:
1661 if (intel_dp->color_range_auto) {
1662
1663
1664
1665
1666
1667 pipe_config->limited_color_range =
1668 bpp != 18 && drm_match_cea_mode(adjusted_mode) > 1;
1669 } else {
1670 pipe_config->limited_color_range =
1671 intel_dp->limited_color_range;
1672 }
1673
1674 pipe_config->lane_count = lane_count;
1675
1676 pipe_config->pipe_bpp = bpp;
1677 pipe_config->port_clock = common_rates[clock];
1678
1679 intel_dp_compute_rate(intel_dp, pipe_config->port_clock,
1680 &link_bw, &rate_select);
1681
1682 DRM_DEBUG_KMS("DP link bw %02x rate select %02x lane count %d clock %d bpp %d\n",
1683 link_bw, rate_select, pipe_config->lane_count,
1684 pipe_config->port_clock, bpp);
1685 DRM_DEBUG_KMS("DP link bw required %i available %i\n",
1686 mode_rate, link_avail);
1687
1688 intel_link_compute_m_n(bpp, lane_count,
1689 adjusted_mode->crtc_clock,
1690 pipe_config->port_clock,
1691 &pipe_config->dp_m_n);
1692
1693 if (intel_connector->panel.downclock_mode != NULL &&
1694 dev_priv->drrs.type == SEAMLESS_DRRS_SUPPORT) {
1695 pipe_config->has_drrs = true;
1696 intel_link_compute_m_n(bpp, lane_count,
1697 intel_connector->panel.downclock_mode->clock,
1698 pipe_config->port_clock,
1699 &pipe_config->dp_m2_n2);
1700 }
1701
1702
1703
1704
1705
1706 if (is_edp(intel_dp) &&
1707 (IS_SKYLAKE(dev_priv) || IS_KABYLAKE(dev_priv))) {
1708 int vco;
1709
1710 switch (pipe_config->port_clock / 2) {
1711 case 108000:
1712 case 216000:
1713 vco = 8640000;
1714 break;
1715 default:
1716 vco = 8100000;
1717 break;
1718 }
1719
1720 to_intel_atomic_state(pipe_config->base.state)->cdclk_pll_vco = vco;
1721 }
1722
1723 if (!HAS_DDI(dev))
1724 intel_dp_set_clock(encoder, pipe_config);
1725
1726 return true;
1727}
1728
1729void intel_dp_set_link_params(struct intel_dp *intel_dp,
1730 int link_rate, uint8_t lane_count,
1731 bool link_mst)
1732{
1733 intel_dp->link_rate = link_rate;
1734 intel_dp->lane_count = lane_count;
1735 intel_dp->link_mst = link_mst;
1736}
1737
1738static void intel_dp_prepare(struct intel_encoder *encoder,
1739 struct intel_crtc_state *pipe_config)
1740{
1741 struct drm_device *dev = encoder->base.dev;
1742 struct drm_i915_private *dev_priv = to_i915(dev);
1743 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
1744 enum port port = dp_to_dig_port(intel_dp)->port;
1745 struct intel_crtc *crtc = to_intel_crtc(encoder->base.crtc);
1746 const struct drm_display_mode *adjusted_mode = &pipe_config->base.adjusted_mode;
1747
1748 intel_dp_set_link_params(intel_dp, pipe_config->port_clock,
1749 pipe_config->lane_count,
1750 intel_crtc_has_type(pipe_config,
1751 INTEL_OUTPUT_DP_MST));
1752
1753
1754
1755
1756
1757
1758
1759
1760
1761
1762
1763
1764
1765
1766
1767
1768
1769
1770
1771
1772
1773 intel_dp->DP = I915_READ(intel_dp->output_reg) & DP_DETECTED;
1774
1775
1776 intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
1777 intel_dp->DP |= DP_PORT_WIDTH(pipe_config->lane_count);
1778
1779
1780
1781 if (IS_GEN7(dev) && port == PORT_A) {
1782 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
1783 intel_dp->DP |= DP_SYNC_HS_HIGH;
1784 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
1785 intel_dp->DP |= DP_SYNC_VS_HIGH;
1786 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
1787
1788 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
1789 intel_dp->DP |= DP_ENHANCED_FRAMING;
1790
1791 intel_dp->DP |= crtc->pipe << 29;
1792 } else if (HAS_PCH_CPT(dev) && port != PORT_A) {
1793 u32 trans_dp;
1794
1795 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
1796
1797 trans_dp = I915_READ(TRANS_DP_CTL(crtc->pipe));
1798 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
1799 trans_dp |= TRANS_DP_ENH_FRAMING;
1800 else
1801 trans_dp &= ~TRANS_DP_ENH_FRAMING;
1802 I915_WRITE(TRANS_DP_CTL(crtc->pipe), trans_dp);
1803 } else {
1804 if (!HAS_PCH_SPLIT(dev) && !IS_VALLEYVIEW(dev) &&
1805 !IS_CHERRYVIEW(dev) && pipe_config->limited_color_range)
1806 intel_dp->DP |= DP_COLOR_RANGE_16_235;
1807
1808 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
1809 intel_dp->DP |= DP_SYNC_HS_HIGH;
1810 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
1811 intel_dp->DP |= DP_SYNC_VS_HIGH;
1812 intel_dp->DP |= DP_LINK_TRAIN_OFF;
1813
1814 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
1815 intel_dp->DP |= DP_ENHANCED_FRAMING;
1816
1817 if (IS_CHERRYVIEW(dev))
1818 intel_dp->DP |= DP_PIPE_SELECT_CHV(crtc->pipe);
1819 else if (crtc->pipe == PIPE_B)
1820 intel_dp->DP |= DP_PIPEB_SELECT;
1821 }
1822}
1823
1824#define IDLE_ON_MASK (PP_ON | PP_SEQUENCE_MASK | 0 | PP_SEQUENCE_STATE_MASK)
1825#define IDLE_ON_VALUE (PP_ON | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_ON_IDLE)
1826
1827#define IDLE_OFF_MASK (PP_ON | PP_SEQUENCE_MASK | 0 | 0)
1828#define IDLE_OFF_VALUE (0 | PP_SEQUENCE_NONE | 0 | 0)
1829
1830#define IDLE_CYCLE_MASK (PP_ON | PP_SEQUENCE_MASK | PP_CYCLE_DELAY_ACTIVE | PP_SEQUENCE_STATE_MASK)
1831#define IDLE_CYCLE_VALUE (0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_OFF_IDLE)
1832
1833static void intel_pps_verify_state(struct drm_i915_private *dev_priv,
1834 struct intel_dp *intel_dp);
1835
1836static void wait_panel_status(struct intel_dp *intel_dp,
1837 u32 mask,
1838 u32 value)
1839{
1840 struct drm_device *dev = intel_dp_to_dev(intel_dp);
1841 struct drm_i915_private *dev_priv = to_i915(dev);
1842 i915_reg_t pp_stat_reg, pp_ctrl_reg;
1843
1844 lockdep_assert_held(&dev_priv->pps_mutex);
1845
1846 intel_pps_verify_state(dev_priv, intel_dp);
1847
1848 pp_stat_reg = _pp_stat_reg(intel_dp);
1849 pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
1850
1851 DRM_DEBUG_KMS("mask %08x value %08x status %08x control %08x\n",
1852 mask, value,
1853 I915_READ(pp_stat_reg),
1854 I915_READ(pp_ctrl_reg));
1855
1856 if (intel_wait_for_register(dev_priv,
1857 pp_stat_reg, mask, value,
1858 5000))
1859 DRM_ERROR("Panel status timeout: status %08x control %08x\n",
1860 I915_READ(pp_stat_reg),
1861 I915_READ(pp_ctrl_reg));
1862
1863 DRM_DEBUG_KMS("Wait complete\n");
1864}
1865
1866static void wait_panel_on(struct intel_dp *intel_dp)
1867{
1868 DRM_DEBUG_KMS("Wait for panel power on\n");
1869 wait_panel_status(intel_dp, IDLE_ON_MASK, IDLE_ON_VALUE);
1870}
1871
1872static void wait_panel_off(struct intel_dp *intel_dp)
1873{
1874 DRM_DEBUG_KMS("Wait for panel power off time\n");
1875 wait_panel_status(intel_dp, IDLE_OFF_MASK, IDLE_OFF_VALUE);
1876}
1877
1878static void wait_panel_power_cycle(struct intel_dp *intel_dp)
1879{
1880 ktime_t panel_power_on_time;
1881 s64 panel_power_off_duration;
1882
1883 DRM_DEBUG_KMS("Wait for panel power cycle\n");
1884
1885
1886
1887 panel_power_on_time = ktime_get_boottime();
1888 panel_power_off_duration = ktime_ms_delta(panel_power_on_time, intel_dp->panel_power_off_time);
1889
1890
1891
1892 if (panel_power_off_duration < (s64)intel_dp->panel_power_cycle_delay)
1893 wait_remaining_ms_from_jiffies(jiffies,
1894 intel_dp->panel_power_cycle_delay - panel_power_off_duration);
1895
1896 wait_panel_status(intel_dp, IDLE_CYCLE_MASK, IDLE_CYCLE_VALUE);
1897}
1898
1899static void wait_backlight_on(struct intel_dp *intel_dp)
1900{
1901 wait_remaining_ms_from_jiffies(intel_dp->last_power_on,
1902 intel_dp->backlight_on_delay);
1903}
1904
1905static void edp_wait_backlight_off(struct intel_dp *intel_dp)
1906{
1907 wait_remaining_ms_from_jiffies(intel_dp->last_backlight_off,
1908 intel_dp->backlight_off_delay);
1909}
1910
1911
1912
1913
1914
1915static u32 ironlake_get_pp_control(struct intel_dp *intel_dp)
1916{
1917 struct drm_device *dev = intel_dp_to_dev(intel_dp);
1918 struct drm_i915_private *dev_priv = to_i915(dev);
1919 u32 control;
1920
1921 lockdep_assert_held(&dev_priv->pps_mutex);
1922
1923 control = I915_READ(_pp_ctrl_reg(intel_dp));
1924 if (WARN_ON(!HAS_DDI(dev_priv) &&
1925 (control & PANEL_UNLOCK_MASK) != PANEL_UNLOCK_REGS)) {
1926 control &= ~PANEL_UNLOCK_MASK;
1927 control |= PANEL_UNLOCK_REGS;
1928 }
1929 return control;
1930}
1931
1932
1933
1934
1935
1936
1937static bool edp_panel_vdd_on(struct intel_dp *intel_dp)
1938{
1939 struct drm_device *dev = intel_dp_to_dev(intel_dp);
1940 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
1941 struct intel_encoder *intel_encoder = &intel_dig_port->base;
1942 struct drm_i915_private *dev_priv = to_i915(dev);
1943 enum intel_display_power_domain power_domain;
1944 u32 pp;
1945 i915_reg_t pp_stat_reg, pp_ctrl_reg;
1946 bool need_to_disable = !intel_dp->want_panel_vdd;
1947
1948 lockdep_assert_held(&dev_priv->pps_mutex);
1949
1950 if (!is_edp(intel_dp))
1951 return false;
1952
1953 cancel_delayed_work(&intel_dp->panel_vdd_work);
1954 intel_dp->want_panel_vdd = true;
1955
1956 if (edp_have_panel_vdd(intel_dp))
1957 return need_to_disable;
1958
1959 power_domain = intel_display_port_aux_power_domain(intel_encoder);
1960 intel_display_power_get(dev_priv, power_domain);
1961
1962 DRM_DEBUG_KMS("Turning eDP port %c VDD on\n",
1963 port_name(intel_dig_port->port));
1964
1965 if (!edp_have_panel_power(intel_dp))
1966 wait_panel_power_cycle(intel_dp);
1967
1968 pp = ironlake_get_pp_control(intel_dp);
1969 pp |= EDP_FORCE_VDD;
1970
1971 pp_stat_reg = _pp_stat_reg(intel_dp);
1972 pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
1973
1974 I915_WRITE(pp_ctrl_reg, pp);
1975 POSTING_READ(pp_ctrl_reg);
1976 DRM_DEBUG_KMS("PP_STATUS: 0x%08x PP_CONTROL: 0x%08x\n",
1977 I915_READ(pp_stat_reg), I915_READ(pp_ctrl_reg));
1978
1979
1980
1981 if (!edp_have_panel_power(intel_dp)) {
1982 DRM_DEBUG_KMS("eDP port %c panel power wasn't enabled\n",
1983 port_name(intel_dig_port->port));
1984 msleep(intel_dp->panel_power_up_delay);
1985 }
1986
1987 return need_to_disable;
1988}
1989
1990
1991
1992
1993
1994
1995
1996
1997void intel_edp_panel_vdd_on(struct intel_dp *intel_dp)
1998{
1999 bool vdd;
2000
2001 if (!is_edp(intel_dp))
2002 return;
2003
2004 pps_lock(intel_dp);
2005 vdd = edp_panel_vdd_on(intel_dp);
2006 pps_unlock(intel_dp);
2007
2008 I915_STATE_WARN(!vdd, "eDP port %c VDD already requested on\n",
2009 port_name(dp_to_dig_port(intel_dp)->port));
2010}
2011
2012static void edp_panel_vdd_off_sync(struct intel_dp *intel_dp)
2013{
2014 struct drm_device *dev = intel_dp_to_dev(intel_dp);
2015 struct drm_i915_private *dev_priv = to_i915(dev);
2016 struct intel_digital_port *intel_dig_port =
2017 dp_to_dig_port(intel_dp);
2018 struct intel_encoder *intel_encoder = &intel_dig_port->base;
2019 enum intel_display_power_domain power_domain;
2020 u32 pp;
2021 i915_reg_t pp_stat_reg, pp_ctrl_reg;
2022
2023 lockdep_assert_held(&dev_priv->pps_mutex);
2024
2025 WARN_ON(intel_dp->want_panel_vdd);
2026
2027 if (!edp_have_panel_vdd(intel_dp))
2028 return;
2029
2030 DRM_DEBUG_KMS("Turning eDP port %c VDD off\n",
2031 port_name(intel_dig_port->port));
2032
2033 pp = ironlake_get_pp_control(intel_dp);
2034 pp &= ~EDP_FORCE_VDD;
2035
2036 pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
2037 pp_stat_reg = _pp_stat_reg(intel_dp);
2038
2039 I915_WRITE(pp_ctrl_reg, pp);
2040 POSTING_READ(pp_ctrl_reg);
2041
2042
2043 DRM_DEBUG_KMS("PP_STATUS: 0x%08x PP_CONTROL: 0x%08x\n",
2044 I915_READ(pp_stat_reg), I915_READ(pp_ctrl_reg));
2045
2046 if ((pp & PANEL_POWER_ON) == 0)
2047 intel_dp->panel_power_off_time = ktime_get_boottime();
2048
2049 power_domain = intel_display_port_aux_power_domain(intel_encoder);
2050 intel_display_power_put(dev_priv, power_domain);
2051}
2052
2053static void edp_panel_vdd_work(struct work_struct *__work)
2054{
2055 struct intel_dp *intel_dp = container_of(to_delayed_work(__work),
2056 struct intel_dp, panel_vdd_work);
2057
2058 pps_lock(intel_dp);
2059 if (!intel_dp->want_panel_vdd)
2060 edp_panel_vdd_off_sync(intel_dp);
2061 pps_unlock(intel_dp);
2062}
2063
2064static void edp_panel_vdd_schedule_off(struct intel_dp *intel_dp)
2065{
2066 unsigned long delay;
2067
2068
2069
2070
2071
2072
2073 delay = msecs_to_jiffies(intel_dp->panel_power_cycle_delay * 5);
2074 schedule_delayed_work(&intel_dp->panel_vdd_work, delay);
2075}
2076
2077
2078
2079
2080
2081
2082static void edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync)
2083{
2084 struct drm_i915_private *dev_priv = to_i915(intel_dp_to_dev(intel_dp));
2085
2086 lockdep_assert_held(&dev_priv->pps_mutex);
2087
2088 if (!is_edp(intel_dp))
2089 return;
2090
2091 I915_STATE_WARN(!intel_dp->want_panel_vdd, "eDP port %c VDD not forced on",
2092 port_name(dp_to_dig_port(intel_dp)->port));
2093
2094 intel_dp->want_panel_vdd = false;
2095
2096 if (sync)
2097 edp_panel_vdd_off_sync(intel_dp);
2098 else
2099 edp_panel_vdd_schedule_off(intel_dp);
2100}
2101
2102static void edp_panel_on(struct intel_dp *intel_dp)
2103{
2104 struct drm_device *dev = intel_dp_to_dev(intel_dp);
2105 struct drm_i915_private *dev_priv = to_i915(dev);
2106 u32 pp;
2107 i915_reg_t pp_ctrl_reg;
2108
2109 lockdep_assert_held(&dev_priv->pps_mutex);
2110
2111 if (!is_edp(intel_dp))
2112 return;
2113
2114 DRM_DEBUG_KMS("Turn eDP port %c panel power on\n",
2115 port_name(dp_to_dig_port(intel_dp)->port));
2116
2117 if (WARN(edp_have_panel_power(intel_dp),
2118 "eDP port %c panel power already on\n",
2119 port_name(dp_to_dig_port(intel_dp)->port)))
2120 return;
2121
2122 wait_panel_power_cycle(intel_dp);
2123
2124 pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
2125 pp = ironlake_get_pp_control(intel_dp);
2126 if (IS_GEN5(dev)) {
2127
2128 pp &= ~PANEL_POWER_RESET;
2129 I915_WRITE(pp_ctrl_reg, pp);
2130 POSTING_READ(pp_ctrl_reg);
2131 }
2132
2133 pp |= PANEL_POWER_ON;
2134 if (!IS_GEN5(dev))
2135 pp |= PANEL_POWER_RESET;
2136
2137 I915_WRITE(pp_ctrl_reg, pp);
2138 POSTING_READ(pp_ctrl_reg);
2139
2140 wait_panel_on(intel_dp);
2141 intel_dp->last_power_on = jiffies;
2142
2143 if (IS_GEN5(dev)) {
2144 pp |= PANEL_POWER_RESET;
2145 I915_WRITE(pp_ctrl_reg, pp);
2146 POSTING_READ(pp_ctrl_reg);
2147 }
2148}
2149
2150void intel_edp_panel_on(struct intel_dp *intel_dp)
2151{
2152 if (!is_edp(intel_dp))
2153 return;
2154
2155 pps_lock(intel_dp);
2156 edp_panel_on(intel_dp);
2157 pps_unlock(intel_dp);
2158}
2159
2160
2161static void edp_panel_off(struct intel_dp *intel_dp)
2162{
2163 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
2164 struct intel_encoder *intel_encoder = &intel_dig_port->base;
2165 struct drm_device *dev = intel_dp_to_dev(intel_dp);
2166 struct drm_i915_private *dev_priv = to_i915(dev);
2167 enum intel_display_power_domain power_domain;
2168 u32 pp;
2169 i915_reg_t pp_ctrl_reg;
2170
2171 lockdep_assert_held(&dev_priv->pps_mutex);
2172
2173 if (!is_edp(intel_dp))
2174 return;
2175
2176 DRM_DEBUG_KMS("Turn eDP port %c panel power off\n",
2177 port_name(dp_to_dig_port(intel_dp)->port));
2178
2179 WARN(!intel_dp->want_panel_vdd, "Need eDP port %c VDD to turn off panel\n",
2180 port_name(dp_to_dig_port(intel_dp)->port));
2181
2182 pp = ironlake_get_pp_control(intel_dp);
2183
2184
2185 pp &= ~(PANEL_POWER_ON | PANEL_POWER_RESET | EDP_FORCE_VDD |
2186 EDP_BLC_ENABLE);
2187
2188 pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
2189
2190 intel_dp->want_panel_vdd = false;
2191
2192 I915_WRITE(pp_ctrl_reg, pp);
2193 POSTING_READ(pp_ctrl_reg);
2194
2195 intel_dp->panel_power_off_time = ktime_get_boottime();
2196 wait_panel_off(intel_dp);
2197
2198
2199 power_domain = intel_display_port_aux_power_domain(intel_encoder);
2200 intel_display_power_put(dev_priv, power_domain);
2201}
2202
2203void intel_edp_panel_off(struct intel_dp *intel_dp)
2204{
2205 if (!is_edp(intel_dp))
2206 return;
2207
2208 pps_lock(intel_dp);
2209 edp_panel_off(intel_dp);
2210 pps_unlock(intel_dp);
2211}
2212
2213
2214static void _intel_edp_backlight_on(struct intel_dp *intel_dp)
2215{
2216 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
2217 struct drm_device *dev = intel_dig_port->base.base.dev;
2218 struct drm_i915_private *dev_priv = to_i915(dev);
2219 u32 pp;
2220 i915_reg_t pp_ctrl_reg;
2221
2222
2223
2224
2225
2226
2227
2228 wait_backlight_on(intel_dp);
2229
2230 pps_lock(intel_dp);
2231
2232 pp = ironlake_get_pp_control(intel_dp);
2233 pp |= EDP_BLC_ENABLE;
2234
2235 pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
2236
2237 I915_WRITE(pp_ctrl_reg, pp);
2238 POSTING_READ(pp_ctrl_reg);
2239
2240 pps_unlock(intel_dp);
2241}
2242
2243
2244void intel_edp_backlight_on(struct intel_dp *intel_dp)
2245{
2246 if (!is_edp(intel_dp))
2247 return;
2248
2249 DRM_DEBUG_KMS("\n");
2250
2251 intel_panel_enable_backlight(intel_dp->attached_connector);
2252 _intel_edp_backlight_on(intel_dp);
2253}
2254
2255
2256static void _intel_edp_backlight_off(struct intel_dp *intel_dp)
2257{
2258 struct drm_device *dev = intel_dp_to_dev(intel_dp);
2259 struct drm_i915_private *dev_priv = to_i915(dev);
2260 u32 pp;
2261 i915_reg_t pp_ctrl_reg;
2262
2263 if (!is_edp(intel_dp))
2264 return;
2265
2266 pps_lock(intel_dp);
2267
2268 pp = ironlake_get_pp_control(intel_dp);
2269 pp &= ~EDP_BLC_ENABLE;
2270
2271 pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
2272
2273 I915_WRITE(pp_ctrl_reg, pp);
2274 POSTING_READ(pp_ctrl_reg);
2275
2276 pps_unlock(intel_dp);
2277
2278 intel_dp->last_backlight_off = jiffies;
2279 edp_wait_backlight_off(intel_dp);
2280}
2281
2282
2283void intel_edp_backlight_off(struct intel_dp *intel_dp)
2284{
2285 if (!is_edp(intel_dp))
2286 return;
2287
2288 DRM_DEBUG_KMS("\n");
2289
2290 _intel_edp_backlight_off(intel_dp);
2291 intel_panel_disable_backlight(intel_dp->attached_connector);
2292}
2293
2294
2295
2296
2297
2298static void intel_edp_backlight_power(struct intel_connector *connector,
2299 bool enable)
2300{
2301 struct intel_dp *intel_dp = intel_attached_dp(&connector->base);
2302 bool is_enabled;
2303
2304 pps_lock(intel_dp);
2305 is_enabled = ironlake_get_pp_control(intel_dp) & EDP_BLC_ENABLE;
2306 pps_unlock(intel_dp);
2307
2308 if (is_enabled == enable)
2309 return;
2310
2311 DRM_DEBUG_KMS("panel power control backlight %s\n",
2312 enable ? "enable" : "disable");
2313
2314 if (enable)
2315 _intel_edp_backlight_on(intel_dp);
2316 else
2317 _intel_edp_backlight_off(intel_dp);
2318}
2319
2320static void assert_dp_port(struct intel_dp *intel_dp, bool state)
2321{
2322 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
2323 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
2324 bool cur_state = I915_READ(intel_dp->output_reg) & DP_PORT_EN;
2325
2326 I915_STATE_WARN(cur_state != state,
2327 "DP port %c state assertion failure (expected %s, current %s)\n",
2328 port_name(dig_port->port),
2329 onoff(state), onoff(cur_state));
2330}
2331#define assert_dp_port_disabled(d) assert_dp_port((d), false)
2332
2333static void assert_edp_pll(struct drm_i915_private *dev_priv, bool state)
2334{
2335 bool cur_state = I915_READ(DP_A) & DP_PLL_ENABLE;
2336
2337 I915_STATE_WARN(cur_state != state,
2338 "eDP PLL state assertion failure (expected %s, current %s)\n",
2339 onoff(state), onoff(cur_state));
2340}
2341#define assert_edp_pll_enabled(d) assert_edp_pll((d), true)
2342#define assert_edp_pll_disabled(d) assert_edp_pll((d), false)
2343
2344static void ironlake_edp_pll_on(struct intel_dp *intel_dp,
2345 struct intel_crtc_state *pipe_config)
2346{
2347 struct intel_crtc *crtc = to_intel_crtc(pipe_config->base.crtc);
2348 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
2349
2350 assert_pipe_disabled(dev_priv, crtc->pipe);
2351 assert_dp_port_disabled(intel_dp);
2352 assert_edp_pll_disabled(dev_priv);
2353
2354 DRM_DEBUG_KMS("enabling eDP PLL for clock %d\n",
2355 pipe_config->port_clock);
2356
2357 intel_dp->DP &= ~DP_PLL_FREQ_MASK;
2358
2359 if (pipe_config->port_clock == 162000)
2360 intel_dp->DP |= DP_PLL_FREQ_162MHZ;
2361 else
2362 intel_dp->DP |= DP_PLL_FREQ_270MHZ;
2363
2364 I915_WRITE(DP_A, intel_dp->DP);
2365 POSTING_READ(DP_A);
2366 udelay(500);
2367
2368
2369
2370
2371
2372
2373
2374 if (IS_GEN5(dev_priv))
2375 intel_wait_for_vblank_if_active(&dev_priv->drm, !crtc->pipe);
2376
2377 intel_dp->DP |= DP_PLL_ENABLE;
2378
2379 I915_WRITE(DP_A, intel_dp->DP);
2380 POSTING_READ(DP_A);
2381 udelay(200);
2382}
2383
2384static void ironlake_edp_pll_off(struct intel_dp *intel_dp)
2385{
2386 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
2387 struct intel_crtc *crtc = to_intel_crtc(intel_dig_port->base.base.crtc);
2388 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
2389
2390 assert_pipe_disabled(dev_priv, crtc->pipe);
2391 assert_dp_port_disabled(intel_dp);
2392 assert_edp_pll_enabled(dev_priv);
2393
2394 DRM_DEBUG_KMS("disabling eDP PLL\n");
2395
2396 intel_dp->DP &= ~DP_PLL_ENABLE;
2397
2398 I915_WRITE(DP_A, intel_dp->DP);
2399 POSTING_READ(DP_A);
2400 udelay(200);
2401}
2402
2403
2404void intel_dp_sink_dpms(struct intel_dp *intel_dp, int mode)
2405{
2406 int ret, i;
2407
2408
2409 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
2410 return;
2411
2412 if (mode != DRM_MODE_DPMS_ON) {
2413 ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER,
2414 DP_SET_POWER_D3);
2415 } else {
2416
2417
2418
2419
2420 for (i = 0; i < 3; i++) {
2421 ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER,
2422 DP_SET_POWER_D0);
2423 if (ret == 1)
2424 break;
2425 msleep(1);
2426 }
2427 }
2428
2429 if (ret != 1)
2430 DRM_DEBUG_KMS("failed to %s sink power state\n",
2431 mode == DRM_MODE_DPMS_ON ? "enable" : "disable");
2432}
2433
2434static bool intel_dp_get_hw_state(struct intel_encoder *encoder,
2435 enum pipe *pipe)
2436{
2437 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
2438 enum port port = dp_to_dig_port(intel_dp)->port;
2439 struct drm_device *dev = encoder->base.dev;
2440 struct drm_i915_private *dev_priv = to_i915(dev);
2441 enum intel_display_power_domain power_domain;
2442 u32 tmp;
2443 bool ret;
2444
2445 power_domain = intel_display_port_power_domain(encoder);
2446 if (!intel_display_power_get_if_enabled(dev_priv, power_domain))
2447 return false;
2448
2449 ret = false;
2450
2451 tmp = I915_READ(intel_dp->output_reg);
2452
2453 if (!(tmp & DP_PORT_EN))
2454 goto out;
2455
2456 if (IS_GEN7(dev) && port == PORT_A) {
2457 *pipe = PORT_TO_PIPE_CPT(tmp);
2458 } else if (HAS_PCH_CPT(dev) && port != PORT_A) {
2459 enum pipe p;
2460
2461 for_each_pipe(dev_priv, p) {
2462 u32 trans_dp = I915_READ(TRANS_DP_CTL(p));
2463 if (TRANS_DP_PIPE_TO_PORT(trans_dp) == port) {
2464 *pipe = p;
2465 ret = true;
2466
2467 goto out;
2468 }
2469 }
2470
2471 DRM_DEBUG_KMS("No pipe for dp port 0x%x found\n",
2472 i915_mmio_reg_offset(intel_dp->output_reg));
2473 } else if (IS_CHERRYVIEW(dev)) {
2474 *pipe = DP_PORT_TO_PIPE_CHV(tmp);
2475 } else {
2476 *pipe = PORT_TO_PIPE(tmp);
2477 }
2478
2479 ret = true;
2480
2481out:
2482 intel_display_power_put(dev_priv, power_domain);
2483
2484 return ret;
2485}
2486
2487static void intel_dp_get_config(struct intel_encoder *encoder,
2488 struct intel_crtc_state *pipe_config)
2489{
2490 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
2491 u32 tmp, flags = 0;
2492 struct drm_device *dev = encoder->base.dev;
2493 struct drm_i915_private *dev_priv = to_i915(dev);
2494 enum port port = dp_to_dig_port(intel_dp)->port;
2495 struct intel_crtc *crtc = to_intel_crtc(encoder->base.crtc);
2496
2497 tmp = I915_READ(intel_dp->output_reg);
2498
2499 pipe_config->has_audio = tmp & DP_AUDIO_OUTPUT_ENABLE && port != PORT_A;
2500
2501 if (HAS_PCH_CPT(dev) && port != PORT_A) {
2502 u32 trans_dp = I915_READ(TRANS_DP_CTL(crtc->pipe));
2503
2504 if (trans_dp & TRANS_DP_HSYNC_ACTIVE_HIGH)
2505 flags |= DRM_MODE_FLAG_PHSYNC;
2506 else
2507 flags |= DRM_MODE_FLAG_NHSYNC;
2508
2509 if (trans_dp & TRANS_DP_VSYNC_ACTIVE_HIGH)
2510 flags |= DRM_MODE_FLAG_PVSYNC;
2511 else
2512 flags |= DRM_MODE_FLAG_NVSYNC;
2513 } else {
2514 if (tmp & DP_SYNC_HS_HIGH)
2515 flags |= DRM_MODE_FLAG_PHSYNC;
2516 else
2517 flags |= DRM_MODE_FLAG_NHSYNC;
2518
2519 if (tmp & DP_SYNC_VS_HIGH)
2520 flags |= DRM_MODE_FLAG_PVSYNC;
2521 else
2522 flags |= DRM_MODE_FLAG_NVSYNC;
2523 }
2524
2525 pipe_config->base.adjusted_mode.flags |= flags;
2526
2527 if (!HAS_PCH_SPLIT(dev) && !IS_VALLEYVIEW(dev) &&
2528 !IS_CHERRYVIEW(dev) && tmp & DP_COLOR_RANGE_16_235)
2529 pipe_config->limited_color_range = true;
2530
2531 pipe_config->lane_count =
2532 ((tmp & DP_PORT_WIDTH_MASK) >> DP_PORT_WIDTH_SHIFT) + 1;
2533
2534 intel_dp_get_m_n(crtc, pipe_config);
2535
2536 if (port == PORT_A) {
2537 if ((I915_READ(DP_A) & DP_PLL_FREQ_MASK) == DP_PLL_FREQ_162MHZ)
2538 pipe_config->port_clock = 162000;
2539 else
2540 pipe_config->port_clock = 270000;
2541 }
2542
2543 pipe_config->base.adjusted_mode.crtc_clock =
2544 intel_dotclock_calculate(pipe_config->port_clock,
2545 &pipe_config->dp_m_n);
2546
2547 if (is_edp(intel_dp) && dev_priv->vbt.edp.bpp &&
2548 pipe_config->pipe_bpp > dev_priv->vbt.edp.bpp) {
2549
2550
2551
2552
2553
2554
2555
2556
2557
2558
2559
2560
2561
2562 DRM_DEBUG_KMS("pipe has %d bpp for eDP panel, overriding BIOS-provided max %d bpp\n",
2563 pipe_config->pipe_bpp, dev_priv->vbt.edp.bpp);
2564 dev_priv->vbt.edp.bpp = pipe_config->pipe_bpp;
2565 }
2566}
2567
2568static void intel_disable_dp(struct intel_encoder *encoder,
2569 struct intel_crtc_state *old_crtc_state,
2570 struct drm_connector_state *old_conn_state)
2571{
2572 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
2573 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2574
2575 if (old_crtc_state->has_audio)
2576 intel_audio_codec_disable(encoder);
2577
2578 if (HAS_PSR(dev_priv) && !HAS_DDI(dev_priv))
2579 intel_psr_disable(intel_dp);
2580
2581
2582
2583 intel_edp_panel_vdd_on(intel_dp);
2584 intel_edp_backlight_off(intel_dp);
2585 intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_OFF);
2586 intel_edp_panel_off(intel_dp);
2587
2588
2589 if (INTEL_GEN(dev_priv) < 5)
2590 intel_dp_link_down(intel_dp);
2591}
2592
2593static void ilk_post_disable_dp(struct intel_encoder *encoder,
2594 struct intel_crtc_state *old_crtc_state,
2595 struct drm_connector_state *old_conn_state)
2596{
2597 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
2598 enum port port = dp_to_dig_port(intel_dp)->port;
2599
2600 intel_dp_link_down(intel_dp);
2601
2602
2603 if (port == PORT_A)
2604 ironlake_edp_pll_off(intel_dp);
2605}
2606
2607static void vlv_post_disable_dp(struct intel_encoder *encoder,
2608 struct intel_crtc_state *old_crtc_state,
2609 struct drm_connector_state *old_conn_state)
2610{
2611 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
2612
2613 intel_dp_link_down(intel_dp);
2614}
2615
2616static void chv_post_disable_dp(struct intel_encoder *encoder,
2617 struct intel_crtc_state *old_crtc_state,
2618 struct drm_connector_state *old_conn_state)
2619{
2620 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
2621 struct drm_device *dev = encoder->base.dev;
2622 struct drm_i915_private *dev_priv = to_i915(dev);
2623
2624 intel_dp_link_down(intel_dp);
2625
2626 mutex_lock(&dev_priv->sb_lock);
2627
2628
2629 chv_data_lane_soft_reset(encoder, true);
2630
2631 mutex_unlock(&dev_priv->sb_lock);
2632}
2633
2634static void
2635_intel_dp_set_link_train(struct intel_dp *intel_dp,
2636 uint32_t *DP,
2637 uint8_t dp_train_pat)
2638{
2639 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
2640 struct drm_device *dev = intel_dig_port->base.base.dev;
2641 struct drm_i915_private *dev_priv = to_i915(dev);
2642 enum port port = intel_dig_port->port;
2643
2644 if (dp_train_pat & DP_TRAINING_PATTERN_MASK)
2645 DRM_DEBUG_KMS("Using DP training pattern TPS%d\n",
2646 dp_train_pat & DP_TRAINING_PATTERN_MASK);
2647
2648 if (HAS_DDI(dev)) {
2649 uint32_t temp = I915_READ(DP_TP_CTL(port));
2650
2651 if (dp_train_pat & DP_LINK_SCRAMBLING_DISABLE)
2652 temp |= DP_TP_CTL_SCRAMBLE_DISABLE;
2653 else
2654 temp &= ~DP_TP_CTL_SCRAMBLE_DISABLE;
2655
2656 temp &= ~DP_TP_CTL_LINK_TRAIN_MASK;
2657 switch (dp_train_pat & DP_TRAINING_PATTERN_MASK) {
2658 case DP_TRAINING_PATTERN_DISABLE:
2659 temp |= DP_TP_CTL_LINK_TRAIN_NORMAL;
2660
2661 break;
2662 case DP_TRAINING_PATTERN_1:
2663 temp |= DP_TP_CTL_LINK_TRAIN_PAT1;
2664 break;
2665 case DP_TRAINING_PATTERN_2:
2666 temp |= DP_TP_CTL_LINK_TRAIN_PAT2;
2667 break;
2668 case DP_TRAINING_PATTERN_3:
2669 temp |= DP_TP_CTL_LINK_TRAIN_PAT3;
2670 break;
2671 }
2672 I915_WRITE(DP_TP_CTL(port), temp);
2673
2674 } else if ((IS_GEN7(dev) && port == PORT_A) ||
2675 (HAS_PCH_CPT(dev) && port != PORT_A)) {
2676 *DP &= ~DP_LINK_TRAIN_MASK_CPT;
2677
2678 switch (dp_train_pat & DP_TRAINING_PATTERN_MASK) {
2679 case DP_TRAINING_PATTERN_DISABLE:
2680 *DP |= DP_LINK_TRAIN_OFF_CPT;
2681 break;
2682 case DP_TRAINING_PATTERN_1:
2683 *DP |= DP_LINK_TRAIN_PAT_1_CPT;
2684 break;
2685 case DP_TRAINING_PATTERN_2:
2686 *DP |= DP_LINK_TRAIN_PAT_2_CPT;
2687 break;
2688 case DP_TRAINING_PATTERN_3:
2689 DRM_DEBUG_KMS("TPS3 not supported, using TPS2 instead\n");
2690 *DP |= DP_LINK_TRAIN_PAT_2_CPT;
2691 break;
2692 }
2693
2694 } else {
2695 if (IS_CHERRYVIEW(dev))
2696 *DP &= ~DP_LINK_TRAIN_MASK_CHV;
2697 else
2698 *DP &= ~DP_LINK_TRAIN_MASK;
2699
2700 switch (dp_train_pat & DP_TRAINING_PATTERN_MASK) {
2701 case DP_TRAINING_PATTERN_DISABLE:
2702 *DP |= DP_LINK_TRAIN_OFF;
2703 break;
2704 case DP_TRAINING_PATTERN_1:
2705 *DP |= DP_LINK_TRAIN_PAT_1;
2706 break;
2707 case DP_TRAINING_PATTERN_2:
2708 *DP |= DP_LINK_TRAIN_PAT_2;
2709 break;
2710 case DP_TRAINING_PATTERN_3:
2711 if (IS_CHERRYVIEW(dev)) {
2712 *DP |= DP_LINK_TRAIN_PAT_3_CHV;
2713 } else {
2714 DRM_DEBUG_KMS("TPS3 not supported, using TPS2 instead\n");
2715 *DP |= DP_LINK_TRAIN_PAT_2;
2716 }
2717 break;
2718 }
2719 }
2720}
2721
2722static void intel_dp_enable_port(struct intel_dp *intel_dp,
2723 struct intel_crtc_state *old_crtc_state)
2724{
2725 struct drm_device *dev = intel_dp_to_dev(intel_dp);
2726 struct drm_i915_private *dev_priv = to_i915(dev);
2727
2728
2729
2730 intel_dp_program_link_training_pattern(intel_dp, DP_TRAINING_PATTERN_1);
2731
2732
2733
2734
2735
2736
2737
2738 intel_dp->DP |= DP_PORT_EN;
2739 if (old_crtc_state->has_audio)
2740 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
2741
2742 I915_WRITE(intel_dp->output_reg, intel_dp->DP);
2743 POSTING_READ(intel_dp->output_reg);
2744}
2745
2746static void intel_enable_dp(struct intel_encoder *encoder,
2747 struct intel_crtc_state *pipe_config)
2748{
2749 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
2750 struct drm_device *dev = encoder->base.dev;
2751 struct drm_i915_private *dev_priv = to_i915(dev);
2752 struct intel_crtc *crtc = to_intel_crtc(encoder->base.crtc);
2753 uint32_t dp_reg = I915_READ(intel_dp->output_reg);
2754 enum pipe pipe = crtc->pipe;
2755
2756 if (WARN_ON(dp_reg & DP_PORT_EN))
2757 return;
2758
2759 pps_lock(intel_dp);
2760
2761 if (IS_VALLEYVIEW(dev) || IS_CHERRYVIEW(dev))
2762 vlv_init_panel_power_sequencer(intel_dp);
2763
2764 intel_dp_enable_port(intel_dp, pipe_config);
2765
2766 edp_panel_vdd_on(intel_dp);
2767 edp_panel_on(intel_dp);
2768 edp_panel_vdd_off(intel_dp, true);
2769
2770 pps_unlock(intel_dp);
2771
2772 if (IS_VALLEYVIEW(dev) || IS_CHERRYVIEW(dev)) {
2773 unsigned int lane_mask = 0x0;
2774
2775 if (IS_CHERRYVIEW(dev))
2776 lane_mask = intel_dp_unused_lane_mask(pipe_config->lane_count);
2777
2778 vlv_wait_port_ready(dev_priv, dp_to_dig_port(intel_dp),
2779 lane_mask);
2780 }
2781
2782 intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON);
2783 intel_dp_start_link_train(intel_dp);
2784 intel_dp_stop_link_train(intel_dp);
2785
2786 if (pipe_config->has_audio) {
2787 DRM_DEBUG_DRIVER("Enabling DP audio on pipe %c\n",
2788 pipe_name(pipe));
2789 intel_audio_codec_enable(encoder);
2790 }
2791}
2792
2793static void g4x_enable_dp(struct intel_encoder *encoder,
2794 struct intel_crtc_state *pipe_config,
2795 struct drm_connector_state *conn_state)
2796{
2797 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
2798
2799 intel_enable_dp(encoder, pipe_config);
2800 intel_edp_backlight_on(intel_dp);
2801}
2802
2803static void vlv_enable_dp(struct intel_encoder *encoder,
2804 struct intel_crtc_state *pipe_config,
2805 struct drm_connector_state *conn_state)
2806{
2807 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
2808
2809 intel_edp_backlight_on(intel_dp);
2810 intel_psr_enable(intel_dp);
2811}
2812
2813static void g4x_pre_enable_dp(struct intel_encoder *encoder,
2814 struct intel_crtc_state *pipe_config,
2815 struct drm_connector_state *conn_state)
2816{
2817 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
2818 enum port port = dp_to_dig_port(intel_dp)->port;
2819
2820 intel_dp_prepare(encoder, pipe_config);
2821
2822
2823 if (port == PORT_A)
2824 ironlake_edp_pll_on(intel_dp, pipe_config);
2825}
2826
2827static void vlv_detach_power_sequencer(struct intel_dp *intel_dp)
2828{
2829 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
2830 struct drm_i915_private *dev_priv = to_i915(intel_dig_port->base.base.dev);
2831 enum pipe pipe = intel_dp->pps_pipe;
2832 i915_reg_t pp_on_reg = PP_ON_DELAYS(pipe);
2833
2834 edp_panel_vdd_off_sync(intel_dp);
2835
2836
2837
2838
2839
2840
2841
2842
2843
2844
2845 DRM_DEBUG_KMS("detaching pipe %c power sequencer from port %c\n",
2846 pipe_name(pipe), port_name(intel_dig_port->port));
2847 I915_WRITE(pp_on_reg, 0);
2848 POSTING_READ(pp_on_reg);
2849
2850 intel_dp->pps_pipe = INVALID_PIPE;
2851}
2852
2853static void vlv_steal_power_sequencer(struct drm_device *dev,
2854 enum pipe pipe)
2855{
2856 struct drm_i915_private *dev_priv = to_i915(dev);
2857 struct intel_encoder *encoder;
2858
2859 lockdep_assert_held(&dev_priv->pps_mutex);
2860
2861 if (WARN_ON(pipe != PIPE_A && pipe != PIPE_B))
2862 return;
2863
2864 for_each_intel_encoder(dev, encoder) {
2865 struct intel_dp *intel_dp;
2866 enum port port;
2867
2868 if (encoder->type != INTEL_OUTPUT_EDP)
2869 continue;
2870
2871 intel_dp = enc_to_intel_dp(&encoder->base);
2872 port = dp_to_dig_port(intel_dp)->port;
2873
2874 if (intel_dp->pps_pipe != pipe)
2875 continue;
2876
2877 DRM_DEBUG_KMS("stealing pipe %c power sequencer from port %c\n",
2878 pipe_name(pipe), port_name(port));
2879
2880 WARN(encoder->base.crtc,
2881 "stealing pipe %c power sequencer from active eDP port %c\n",
2882 pipe_name(pipe), port_name(port));
2883
2884
2885 vlv_detach_power_sequencer(intel_dp);
2886 }
2887}
2888
2889static void vlv_init_panel_power_sequencer(struct intel_dp *intel_dp)
2890{
2891 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
2892 struct intel_encoder *encoder = &intel_dig_port->base;
2893 struct drm_device *dev = encoder->base.dev;
2894 struct drm_i915_private *dev_priv = to_i915(dev);
2895 struct intel_crtc *crtc = to_intel_crtc(encoder->base.crtc);
2896
2897 lockdep_assert_held(&dev_priv->pps_mutex);
2898
2899 if (!is_edp(intel_dp))
2900 return;
2901
2902 if (intel_dp->pps_pipe == crtc->pipe)
2903 return;
2904
2905
2906
2907
2908
2909
2910 if (intel_dp->pps_pipe != INVALID_PIPE)
2911 vlv_detach_power_sequencer(intel_dp);
2912
2913
2914
2915
2916
2917 vlv_steal_power_sequencer(dev, crtc->pipe);
2918
2919
2920 intel_dp->pps_pipe = crtc->pipe;
2921
2922 DRM_DEBUG_KMS("initializing pipe %c power sequencer for port %c\n",
2923 pipe_name(intel_dp->pps_pipe), port_name(intel_dig_port->port));
2924
2925
2926 intel_dp_init_panel_power_sequencer(dev, intel_dp);
2927 intel_dp_init_panel_power_sequencer_registers(dev, intel_dp);
2928}
2929
2930static void vlv_pre_enable_dp(struct intel_encoder *encoder,
2931 struct intel_crtc_state *pipe_config,
2932 struct drm_connector_state *conn_state)
2933{
2934 vlv_phy_pre_encoder_enable(encoder);
2935
2936 intel_enable_dp(encoder, pipe_config);
2937}
2938
2939static void vlv_dp_pre_pll_enable(struct intel_encoder *encoder,
2940 struct intel_crtc_state *pipe_config,
2941 struct drm_connector_state *conn_state)
2942{
2943 intel_dp_prepare(encoder, pipe_config);
2944
2945 vlv_phy_pre_pll_enable(encoder);
2946}
2947
2948static void chv_pre_enable_dp(struct intel_encoder *encoder,
2949 struct intel_crtc_state *pipe_config,
2950 struct drm_connector_state *conn_state)
2951{
2952 chv_phy_pre_encoder_enable(encoder);
2953
2954 intel_enable_dp(encoder, pipe_config);
2955
2956
2957 chv_phy_release_cl2_override(encoder);
2958}
2959
2960static void chv_dp_pre_pll_enable(struct intel_encoder *encoder,
2961 struct intel_crtc_state *pipe_config,
2962 struct drm_connector_state *conn_state)
2963{
2964 intel_dp_prepare(encoder, pipe_config);
2965
2966 chv_phy_pre_pll_enable(encoder);
2967}
2968
2969static void chv_dp_post_pll_disable(struct intel_encoder *encoder,
2970 struct intel_crtc_state *pipe_config,
2971 struct drm_connector_state *conn_state)
2972{
2973 chv_phy_post_pll_disable(encoder);
2974}
2975
2976
2977
2978
2979
2980bool
2981intel_dp_get_link_status(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE])
2982{
2983 return drm_dp_dpcd_read(&intel_dp->aux, DP_LANE0_1_STATUS, link_status,
2984 DP_LINK_STATUS_SIZE) == DP_LINK_STATUS_SIZE;
2985}
2986
2987
2988uint8_t
2989intel_dp_voltage_max(struct intel_dp *intel_dp)
2990{
2991 struct drm_device *dev = intel_dp_to_dev(intel_dp);
2992 struct drm_i915_private *dev_priv = to_i915(dev);
2993 enum port port = dp_to_dig_port(intel_dp)->port;
2994
2995 if (IS_BROXTON(dev))
2996 return DP_TRAIN_VOLTAGE_SWING_LEVEL_3;
2997 else if (INTEL_INFO(dev)->gen >= 9) {
2998 if (dev_priv->vbt.edp.low_vswing && port == PORT_A)
2999 return DP_TRAIN_VOLTAGE_SWING_LEVEL_3;
3000 return DP_TRAIN_VOLTAGE_SWING_LEVEL_2;
3001 } else if (IS_VALLEYVIEW(dev) || IS_CHERRYVIEW(dev))
3002 return DP_TRAIN_VOLTAGE_SWING_LEVEL_3;
3003 else if (IS_GEN7(dev) && port == PORT_A)
3004 return DP_TRAIN_VOLTAGE_SWING_LEVEL_2;
3005 else if (HAS_PCH_CPT(dev) && port != PORT_A)
3006 return DP_TRAIN_VOLTAGE_SWING_LEVEL_3;
3007 else
3008 return DP_TRAIN_VOLTAGE_SWING_LEVEL_2;
3009}
3010
3011uint8_t
3012intel_dp_pre_emphasis_max(struct intel_dp *intel_dp, uint8_t voltage_swing)
3013{
3014 struct drm_device *dev = intel_dp_to_dev(intel_dp);
3015 enum port port = dp_to_dig_port(intel_dp)->port;
3016
3017 if (INTEL_INFO(dev)->gen >= 9) {
3018 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
3019 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3020 return DP_TRAIN_PRE_EMPH_LEVEL_3;
3021 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3022 return DP_TRAIN_PRE_EMPH_LEVEL_2;
3023 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
3024 return DP_TRAIN_PRE_EMPH_LEVEL_1;
3025 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
3026 return DP_TRAIN_PRE_EMPH_LEVEL_0;
3027 default:
3028 return DP_TRAIN_PRE_EMPH_LEVEL_0;
3029 }
3030 } else if (IS_HASWELL(dev) || IS_BROADWELL(dev)) {
3031 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
3032 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3033 return DP_TRAIN_PRE_EMPH_LEVEL_3;
3034 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3035 return DP_TRAIN_PRE_EMPH_LEVEL_2;
3036 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
3037 return DP_TRAIN_PRE_EMPH_LEVEL_1;
3038 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
3039 default:
3040 return DP_TRAIN_PRE_EMPH_LEVEL_0;
3041 }
3042 } else if (IS_VALLEYVIEW(dev) || IS_CHERRYVIEW(dev)) {
3043 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
3044 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3045 return DP_TRAIN_PRE_EMPH_LEVEL_3;
3046 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3047 return DP_TRAIN_PRE_EMPH_LEVEL_2;
3048 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
3049 return DP_TRAIN_PRE_EMPH_LEVEL_1;
3050 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
3051 default:
3052 return DP_TRAIN_PRE_EMPH_LEVEL_0;
3053 }
3054 } else if (IS_GEN7(dev) && port == PORT_A) {
3055 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
3056 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3057 return DP_TRAIN_PRE_EMPH_LEVEL_2;
3058 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3059 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
3060 return DP_TRAIN_PRE_EMPH_LEVEL_1;
3061 default:
3062 return DP_TRAIN_PRE_EMPH_LEVEL_0;
3063 }
3064 } else {
3065 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
3066 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3067 return DP_TRAIN_PRE_EMPH_LEVEL_2;
3068 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3069 return DP_TRAIN_PRE_EMPH_LEVEL_2;
3070 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
3071 return DP_TRAIN_PRE_EMPH_LEVEL_1;
3072 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
3073 default:
3074 return DP_TRAIN_PRE_EMPH_LEVEL_0;
3075 }
3076 }
3077}
3078
3079static uint32_t vlv_signal_levels(struct intel_dp *intel_dp)
3080{
3081 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
3082 unsigned long demph_reg_value, preemph_reg_value,
3083 uniqtranscale_reg_value;
3084 uint8_t train_set = intel_dp->train_set[0];
3085
3086 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
3087 case DP_TRAIN_PRE_EMPH_LEVEL_0:
3088 preemph_reg_value = 0x0004000;
3089 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
3090 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3091 demph_reg_value = 0x2B405555;
3092 uniqtranscale_reg_value = 0x552AB83A;
3093 break;
3094 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3095 demph_reg_value = 0x2B404040;
3096 uniqtranscale_reg_value = 0x5548B83A;
3097 break;
3098 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
3099 demph_reg_value = 0x2B245555;
3100 uniqtranscale_reg_value = 0x5560B83A;
3101 break;
3102 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
3103 demph_reg_value = 0x2B405555;
3104 uniqtranscale_reg_value = 0x5598DA3A;
3105 break;
3106 default:
3107 return 0;
3108 }
3109 break;
3110 case DP_TRAIN_PRE_EMPH_LEVEL_1:
3111 preemph_reg_value = 0x0002000;
3112 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
3113 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3114 demph_reg_value = 0x2B404040;
3115 uniqtranscale_reg_value = 0x5552B83A;
3116 break;
3117 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3118 demph_reg_value = 0x2B404848;
3119 uniqtranscale_reg_value = 0x5580B83A;
3120 break;
3121 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
3122 demph_reg_value = 0x2B404040;
3123 uniqtranscale_reg_value = 0x55ADDA3A;
3124 break;
3125 default:
3126 return 0;
3127 }
3128 break;
3129 case DP_TRAIN_PRE_EMPH_LEVEL_2:
3130 preemph_reg_value = 0x0000000;
3131 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
3132 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3133 demph_reg_value = 0x2B305555;
3134 uniqtranscale_reg_value = 0x5570B83A;
3135 break;
3136 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3137 demph_reg_value = 0x2B2B4040;
3138 uniqtranscale_reg_value = 0x55ADDA3A;
3139 break;
3140 default:
3141 return 0;
3142 }
3143 break;
3144 case DP_TRAIN_PRE_EMPH_LEVEL_3:
3145 preemph_reg_value = 0x0006000;
3146 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
3147 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3148 demph_reg_value = 0x1B405555;
3149 uniqtranscale_reg_value = 0x55ADDA3A;
3150 break;
3151 default:
3152 return 0;
3153 }
3154 break;
3155 default:
3156 return 0;
3157 }
3158
3159 vlv_set_phy_signal_level(encoder, demph_reg_value, preemph_reg_value,
3160 uniqtranscale_reg_value, 0);
3161
3162 return 0;
3163}
3164
3165static uint32_t chv_signal_levels(struct intel_dp *intel_dp)
3166{
3167 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
3168 u32 deemph_reg_value, margin_reg_value;
3169 bool uniq_trans_scale = false;
3170 uint8_t train_set = intel_dp->train_set[0];
3171
3172 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
3173 case DP_TRAIN_PRE_EMPH_LEVEL_0:
3174 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
3175 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3176 deemph_reg_value = 128;
3177 margin_reg_value = 52;
3178 break;
3179 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3180 deemph_reg_value = 128;
3181 margin_reg_value = 77;
3182 break;
3183 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
3184 deemph_reg_value = 128;
3185 margin_reg_value = 102;
3186 break;
3187 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
3188 deemph_reg_value = 128;
3189 margin_reg_value = 154;
3190 uniq_trans_scale = true;
3191 break;
3192 default:
3193 return 0;
3194 }
3195 break;
3196 case DP_TRAIN_PRE_EMPH_LEVEL_1:
3197 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
3198 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3199 deemph_reg_value = 85;
3200 margin_reg_value = 78;
3201 break;
3202 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3203 deemph_reg_value = 85;
3204 margin_reg_value = 116;
3205 break;
3206 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
3207 deemph_reg_value = 85;
3208 margin_reg_value = 154;
3209 break;
3210 default:
3211 return 0;
3212 }
3213 break;
3214 case DP_TRAIN_PRE_EMPH_LEVEL_2:
3215 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
3216 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3217 deemph_reg_value = 64;
3218 margin_reg_value = 104;
3219 break;
3220 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3221 deemph_reg_value = 64;
3222 margin_reg_value = 154;
3223 break;
3224 default:
3225 return 0;
3226 }
3227 break;
3228 case DP_TRAIN_PRE_EMPH_LEVEL_3:
3229 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
3230 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3231 deemph_reg_value = 43;
3232 margin_reg_value = 154;
3233 break;
3234 default:
3235 return 0;
3236 }
3237 break;
3238 default:
3239 return 0;
3240 }
3241
3242 chv_set_phy_signal_level(encoder, deemph_reg_value,
3243 margin_reg_value, uniq_trans_scale);
3244
3245 return 0;
3246}
3247
3248static uint32_t
3249gen4_signal_levels(uint8_t train_set)
3250{
3251 uint32_t signal_levels = 0;
3252
3253 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
3254 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3255 default:
3256 signal_levels |= DP_VOLTAGE_0_4;
3257 break;
3258 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3259 signal_levels |= DP_VOLTAGE_0_6;
3260 break;
3261 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
3262 signal_levels |= DP_VOLTAGE_0_8;
3263 break;
3264 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
3265 signal_levels |= DP_VOLTAGE_1_2;
3266 break;
3267 }
3268 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
3269 case DP_TRAIN_PRE_EMPH_LEVEL_0:
3270 default:
3271 signal_levels |= DP_PRE_EMPHASIS_0;
3272 break;
3273 case DP_TRAIN_PRE_EMPH_LEVEL_1:
3274 signal_levels |= DP_PRE_EMPHASIS_3_5;
3275 break;
3276 case DP_TRAIN_PRE_EMPH_LEVEL_2:
3277 signal_levels |= DP_PRE_EMPHASIS_6;
3278 break;
3279 case DP_TRAIN_PRE_EMPH_LEVEL_3:
3280 signal_levels |= DP_PRE_EMPHASIS_9_5;
3281 break;
3282 }
3283 return signal_levels;
3284}
3285
3286
3287static uint32_t
3288gen6_edp_signal_levels(uint8_t train_set)
3289{
3290 int signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
3291 DP_TRAIN_PRE_EMPHASIS_MASK);
3292 switch (signal_levels) {
3293 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0:
3294 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0:
3295 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B;
3296 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1:
3297 return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B;
3298 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2:
3299 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2:
3300 return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B;
3301 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1:
3302 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1:
3303 return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B;
3304 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0:
3305 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3 | DP_TRAIN_PRE_EMPH_LEVEL_0:
3306 return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B;
3307 default:
3308 DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:"
3309 "0x%x\n", signal_levels);
3310 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B;
3311 }
3312}
3313
3314
3315static uint32_t
3316gen7_edp_signal_levels(uint8_t train_set)
3317{
3318 int signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
3319 DP_TRAIN_PRE_EMPHASIS_MASK);
3320 switch (signal_levels) {
3321 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0:
3322 return EDP_LINK_TRAIN_400MV_0DB_IVB;
3323 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1:
3324 return EDP_LINK_TRAIN_400MV_3_5DB_IVB;
3325 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2:
3326 return EDP_LINK_TRAIN_400MV_6DB_IVB;
3327
3328 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0:
3329 return EDP_LINK_TRAIN_600MV_0DB_IVB;
3330 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1:
3331 return EDP_LINK_TRAIN_600MV_3_5DB_IVB;
3332
3333 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0:
3334 return EDP_LINK_TRAIN_800MV_0DB_IVB;
3335 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1:
3336 return EDP_LINK_TRAIN_800MV_3_5DB_IVB;
3337
3338 default:
3339 DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:"
3340 "0x%x\n", signal_levels);
3341 return EDP_LINK_TRAIN_500MV_0DB_IVB;
3342 }
3343}
3344
3345void
3346intel_dp_set_signal_levels(struct intel_dp *intel_dp)
3347{
3348 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
3349 enum port port = intel_dig_port->port;
3350 struct drm_device *dev = intel_dig_port->base.base.dev;
3351 struct drm_i915_private *dev_priv = to_i915(dev);
3352 uint32_t signal_levels, mask = 0;
3353 uint8_t train_set = intel_dp->train_set[0];
3354
3355 if (HAS_DDI(dev)) {
3356 signal_levels = ddi_signal_levels(intel_dp);
3357
3358 if (IS_BROXTON(dev))
3359 signal_levels = 0;
3360 else
3361 mask = DDI_BUF_EMP_MASK;
3362 } else if (IS_CHERRYVIEW(dev)) {
3363 signal_levels = chv_signal_levels(intel_dp);
3364 } else if (IS_VALLEYVIEW(dev)) {
3365 signal_levels = vlv_signal_levels(intel_dp);
3366 } else if (IS_GEN7(dev) && port == PORT_A) {
3367 signal_levels = gen7_edp_signal_levels(train_set);
3368 mask = EDP_LINK_TRAIN_VOL_EMP_MASK_IVB;
3369 } else if (IS_GEN6(dev) && port == PORT_A) {
3370 signal_levels = gen6_edp_signal_levels(train_set);
3371 mask = EDP_LINK_TRAIN_VOL_EMP_MASK_SNB;
3372 } else {
3373 signal_levels = gen4_signal_levels(train_set);
3374 mask = DP_VOLTAGE_MASK | DP_PRE_EMPHASIS_MASK;
3375 }
3376
3377 if (mask)
3378 DRM_DEBUG_KMS("Using signal levels %08x\n", signal_levels);
3379
3380 DRM_DEBUG_KMS("Using vswing level %d\n",
3381 train_set & DP_TRAIN_VOLTAGE_SWING_MASK);
3382 DRM_DEBUG_KMS("Using pre-emphasis level %d\n",
3383 (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) >>
3384 DP_TRAIN_PRE_EMPHASIS_SHIFT);
3385
3386 intel_dp->DP = (intel_dp->DP & ~mask) | signal_levels;
3387
3388 I915_WRITE(intel_dp->output_reg, intel_dp->DP);
3389 POSTING_READ(intel_dp->output_reg);
3390}
3391
3392void
3393intel_dp_program_link_training_pattern(struct intel_dp *intel_dp,
3394 uint8_t dp_train_pat)
3395{
3396 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
3397 struct drm_i915_private *dev_priv =
3398 to_i915(intel_dig_port->base.base.dev);
3399
3400 _intel_dp_set_link_train(intel_dp, &intel_dp->DP, dp_train_pat);
3401
3402 I915_WRITE(intel_dp->output_reg, intel_dp->DP);
3403 POSTING_READ(intel_dp->output_reg);
3404}
3405
3406void intel_dp_set_idle_link_train(struct intel_dp *intel_dp)
3407{
3408 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
3409 struct drm_device *dev = intel_dig_port->base.base.dev;
3410 struct drm_i915_private *dev_priv = to_i915(dev);
3411 enum port port = intel_dig_port->port;
3412 uint32_t val;
3413
3414 if (!HAS_DDI(dev))
3415 return;
3416
3417 val = I915_READ(DP_TP_CTL(port));
3418 val &= ~DP_TP_CTL_LINK_TRAIN_MASK;
3419 val |= DP_TP_CTL_LINK_TRAIN_IDLE;
3420 I915_WRITE(DP_TP_CTL(port), val);
3421
3422
3423
3424
3425
3426
3427
3428
3429 if (port == PORT_A)
3430 return;
3431
3432 if (intel_wait_for_register(dev_priv,DP_TP_STATUS(port),
3433 DP_TP_STATUS_IDLE_DONE,
3434 DP_TP_STATUS_IDLE_DONE,
3435 1))
3436 DRM_ERROR("Timed out waiting for DP idle patterns\n");
3437}
3438
3439static void
3440intel_dp_link_down(struct intel_dp *intel_dp)
3441{
3442 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
3443 struct intel_crtc *crtc = to_intel_crtc(intel_dig_port->base.base.crtc);
3444 enum port port = intel_dig_port->port;
3445 struct drm_device *dev = intel_dig_port->base.base.dev;
3446 struct drm_i915_private *dev_priv = to_i915(dev);
3447 uint32_t DP = intel_dp->DP;
3448
3449 if (WARN_ON(HAS_DDI(dev)))
3450 return;
3451
3452 if (WARN_ON((I915_READ(intel_dp->output_reg) & DP_PORT_EN) == 0))
3453 return;
3454
3455 DRM_DEBUG_KMS("\n");
3456
3457 if ((IS_GEN7(dev) && port == PORT_A) ||
3458 (HAS_PCH_CPT(dev) && port != PORT_A)) {
3459 DP &= ~DP_LINK_TRAIN_MASK_CPT;
3460 DP |= DP_LINK_TRAIN_PAT_IDLE_CPT;
3461 } else {
3462 if (IS_CHERRYVIEW(dev))
3463 DP &= ~DP_LINK_TRAIN_MASK_CHV;
3464 else
3465 DP &= ~DP_LINK_TRAIN_MASK;
3466 DP |= DP_LINK_TRAIN_PAT_IDLE;
3467 }
3468 I915_WRITE(intel_dp->output_reg, DP);
3469 POSTING_READ(intel_dp->output_reg);
3470
3471 DP &= ~(DP_PORT_EN | DP_AUDIO_OUTPUT_ENABLE);
3472 I915_WRITE(intel_dp->output_reg, DP);
3473 POSTING_READ(intel_dp->output_reg);
3474
3475
3476
3477
3478
3479
3480 if (HAS_PCH_IBX(dev) && crtc->pipe == PIPE_B && port != PORT_A) {
3481
3482
3483
3484
3485 intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, false);
3486 intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, false);
3487
3488
3489 DP &= ~(DP_PIPEB_SELECT | DP_LINK_TRAIN_MASK);
3490 DP |= DP_PORT_EN | DP_LINK_TRAIN_PAT_1;
3491 I915_WRITE(intel_dp->output_reg, DP);
3492 POSTING_READ(intel_dp->output_reg);
3493
3494 DP &= ~DP_PORT_EN;
3495 I915_WRITE(intel_dp->output_reg, DP);
3496 POSTING_READ(intel_dp->output_reg);
3497
3498 intel_wait_for_vblank_if_active(&dev_priv->drm, PIPE_A);
3499 intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, true);
3500 intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, true);
3501 }
3502
3503 msleep(intel_dp->panel_power_down_delay);
3504
3505 intel_dp->DP = DP;
3506}
3507
3508static bool
3509intel_dp_read_dpcd(struct intel_dp *intel_dp)
3510{
3511 if (drm_dp_dpcd_read(&intel_dp->aux, 0x000, intel_dp->dpcd,
3512 sizeof(intel_dp->dpcd)) < 0)
3513 return false;
3514
3515 DRM_DEBUG_KMS("DPCD: %*ph\n", (int) sizeof(intel_dp->dpcd), intel_dp->dpcd);
3516
3517 return intel_dp->dpcd[DP_DPCD_REV] != 0;
3518}
3519
3520static bool
3521intel_edp_init_dpcd(struct intel_dp *intel_dp)
3522{
3523 struct drm_i915_private *dev_priv =
3524 to_i915(dp_to_dig_port(intel_dp)->base.base.dev);
3525
3526
3527 WARN_ON(intel_dp->dpcd[DP_DPCD_REV] != 0);
3528
3529 if (!intel_dp_read_dpcd(intel_dp))
3530 return false;
3531
3532 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11)
3533 dev_priv->no_aux_handshake = intel_dp->dpcd[DP_MAX_DOWNSPREAD] &
3534 DP_NO_AUX_HANDSHAKE_LINK_TRAINING;
3535
3536
3537 drm_dp_dpcd_read(&intel_dp->aux, DP_PSR_SUPPORT,
3538 intel_dp->psr_dpcd,
3539 sizeof(intel_dp->psr_dpcd));
3540 if (intel_dp->psr_dpcd[0] & DP_PSR_IS_SUPPORTED) {
3541 dev_priv->psr.sink_support = true;
3542 DRM_DEBUG_KMS("Detected EDP PSR Panel.\n");
3543 }
3544
3545 if (INTEL_GEN(dev_priv) >= 9 &&
3546 (intel_dp->psr_dpcd[0] & DP_PSR2_IS_SUPPORTED)) {
3547 uint8_t frame_sync_cap;
3548
3549 dev_priv->psr.sink_support = true;
3550 drm_dp_dpcd_read(&intel_dp->aux,
3551 DP_SINK_DEVICE_AUX_FRAME_SYNC_CAP,
3552 &frame_sync_cap, 1);
3553 dev_priv->psr.aux_frame_sync = frame_sync_cap ? true : false;
3554
3555 dev_priv->psr.psr2_support = dev_priv->psr.aux_frame_sync;
3556 DRM_DEBUG_KMS("PSR2 %s on sink",
3557 dev_priv->psr.psr2_support ? "supported" : "not supported");
3558 }
3559
3560
3561 if ((intel_dp->dpcd[DP_EDP_CONFIGURATION_CAP] & DP_DPCD_DISPLAY_CONTROL_CAPABLE) &&
3562 drm_dp_dpcd_read(&intel_dp->aux, DP_EDP_DPCD_REV,
3563 intel_dp->edp_dpcd, sizeof(intel_dp->edp_dpcd)) ==
3564 sizeof(intel_dp->edp_dpcd))
3565 DRM_DEBUG_KMS("EDP DPCD : %*ph\n", (int) sizeof(intel_dp->edp_dpcd),
3566 intel_dp->edp_dpcd);
3567
3568
3569 if (intel_dp->edp_dpcd[0] >= 0x03) {
3570 __le16 sink_rates[DP_MAX_SUPPORTED_RATES];
3571 int i;
3572
3573 drm_dp_dpcd_read(&intel_dp->aux, DP_SUPPORTED_LINK_RATES,
3574 sink_rates, sizeof(sink_rates));
3575
3576 for (i = 0; i < ARRAY_SIZE(sink_rates); i++) {
3577 int val = le16_to_cpu(sink_rates[i]);
3578
3579 if (val == 0)
3580 break;
3581
3582
3583 intel_dp->sink_rates[i] = (val * 200) / 10;
3584 }
3585 intel_dp->num_sink_rates = i;
3586 }
3587
3588 return true;
3589}
3590
3591
3592static bool
3593intel_dp_get_dpcd(struct intel_dp *intel_dp)
3594{
3595 if (!intel_dp_read_dpcd(intel_dp))
3596 return false;
3597
3598 if (drm_dp_dpcd_read(&intel_dp->aux, DP_SINK_COUNT,
3599 &intel_dp->sink_count, 1) < 0)
3600 return false;
3601
3602
3603
3604
3605
3606
3607 intel_dp->sink_count = DP_GET_SINK_COUNT(intel_dp->sink_count);
3608
3609
3610
3611
3612
3613
3614
3615
3616 if (!is_edp(intel_dp) && !intel_dp->sink_count)
3617 return false;
3618
3619 if (!(intel_dp->dpcd[DP_DOWNSTREAMPORT_PRESENT] &
3620 DP_DWN_STRM_PORT_PRESENT))
3621 return true;
3622
3623 if (intel_dp->dpcd[DP_DPCD_REV] == 0x10)
3624 return true;
3625
3626 if (drm_dp_dpcd_read(&intel_dp->aux, DP_DOWNSTREAM_PORT_0,
3627 intel_dp->downstream_ports,
3628 DP_MAX_DOWNSTREAM_PORTS) < 0)
3629 return false;
3630
3631 return true;
3632}
3633
3634static void
3635intel_dp_probe_oui(struct intel_dp *intel_dp)
3636{
3637 u8 buf[3];
3638
3639 if (!(intel_dp->dpcd[DP_DOWN_STREAM_PORT_COUNT] & DP_OUI_SUPPORT))
3640 return;
3641
3642 if (drm_dp_dpcd_read(&intel_dp->aux, DP_SINK_OUI, buf, 3) == 3)
3643 DRM_DEBUG_KMS("Sink OUI: %02hx%02hx%02hx\n",
3644 buf[0], buf[1], buf[2]);
3645
3646 if (drm_dp_dpcd_read(&intel_dp->aux, DP_BRANCH_OUI, buf, 3) == 3)
3647 DRM_DEBUG_KMS("Branch OUI: %02hx%02hx%02hx\n",
3648 buf[0], buf[1], buf[2]);
3649}
3650
3651static bool
3652intel_dp_can_mst(struct intel_dp *intel_dp)
3653{
3654 u8 buf[1];
3655
3656 if (!i915.enable_dp_mst)
3657 return false;
3658
3659 if (!intel_dp->can_mst)
3660 return false;
3661
3662 if (intel_dp->dpcd[DP_DPCD_REV] < 0x12)
3663 return false;
3664
3665 if (drm_dp_dpcd_read(&intel_dp->aux, DP_MSTM_CAP, buf, 1) != 1)
3666 return false;
3667
3668 return buf[0] & DP_MST_CAP;
3669}
3670
3671static void
3672intel_dp_configure_mst(struct intel_dp *intel_dp)
3673{
3674 if (!i915.enable_dp_mst)
3675 return;
3676
3677 if (!intel_dp->can_mst)
3678 return;
3679
3680 intel_dp->is_mst = intel_dp_can_mst(intel_dp);
3681
3682 if (intel_dp->is_mst)
3683 DRM_DEBUG_KMS("Sink is MST capable\n");
3684 else
3685 DRM_DEBUG_KMS("Sink is not MST capable\n");
3686
3687 drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst_mgr,
3688 intel_dp->is_mst);
3689}
3690
3691static int intel_dp_sink_crc_stop(struct intel_dp *intel_dp)
3692{
3693 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
3694 struct drm_device *dev = dig_port->base.base.dev;
3695 struct intel_crtc *intel_crtc = to_intel_crtc(dig_port->base.base.crtc);
3696 u8 buf;
3697 int ret = 0;
3698 int count = 0;
3699 int attempts = 10;
3700
3701 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_SINK, &buf) < 0) {
3702 DRM_DEBUG_KMS("Sink CRC couldn't be stopped properly\n");
3703 ret = -EIO;
3704 goto out;
3705 }
3706
3707 if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_TEST_SINK,
3708 buf & ~DP_TEST_SINK_START) < 0) {
3709 DRM_DEBUG_KMS("Sink CRC couldn't be stopped properly\n");
3710 ret = -EIO;
3711 goto out;
3712 }
3713
3714 do {
3715 intel_wait_for_vblank(dev, intel_crtc->pipe);
3716
3717 if (drm_dp_dpcd_readb(&intel_dp->aux,
3718 DP_TEST_SINK_MISC, &buf) < 0) {
3719 ret = -EIO;
3720 goto out;
3721 }
3722 count = buf & DP_TEST_COUNT_MASK;
3723 } while (--attempts && count);
3724
3725 if (attempts == 0) {
3726 DRM_DEBUG_KMS("TIMEOUT: Sink CRC counter is not zeroed after calculation is stopped\n");
3727 ret = -ETIMEDOUT;
3728 }
3729
3730 out:
3731 hsw_enable_ips(intel_crtc);
3732 return ret;
3733}
3734
3735static int intel_dp_sink_crc_start(struct intel_dp *intel_dp)
3736{
3737 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
3738 struct drm_device *dev = dig_port->base.base.dev;
3739 struct intel_crtc *intel_crtc = to_intel_crtc(dig_port->base.base.crtc);
3740 u8 buf;
3741 int ret;
3742
3743 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_SINK_MISC, &buf) < 0)
3744 return -EIO;
3745
3746 if (!(buf & DP_TEST_CRC_SUPPORTED))
3747 return -ENOTTY;
3748
3749 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_SINK, &buf) < 0)
3750 return -EIO;
3751
3752 if (buf & DP_TEST_SINK_START) {
3753 ret = intel_dp_sink_crc_stop(intel_dp);
3754 if (ret)
3755 return ret;
3756 }
3757
3758 hsw_disable_ips(intel_crtc);
3759
3760 if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_TEST_SINK,
3761 buf | DP_TEST_SINK_START) < 0) {
3762 hsw_enable_ips(intel_crtc);
3763 return -EIO;
3764 }
3765
3766 intel_wait_for_vblank(dev, intel_crtc->pipe);
3767 return 0;
3768}
3769
3770int intel_dp_sink_crc(struct intel_dp *intel_dp, u8 *crc)
3771{
3772 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
3773 struct drm_device *dev = dig_port->base.base.dev;
3774 struct intel_crtc *intel_crtc = to_intel_crtc(dig_port->base.base.crtc);
3775 u8 buf;
3776 int count, ret;
3777 int attempts = 6;
3778
3779 ret = intel_dp_sink_crc_start(intel_dp);
3780 if (ret)
3781 return ret;
3782
3783 do {
3784 intel_wait_for_vblank(dev, intel_crtc->pipe);
3785
3786 if (drm_dp_dpcd_readb(&intel_dp->aux,
3787 DP_TEST_SINK_MISC, &buf) < 0) {
3788 ret = -EIO;
3789 goto stop;
3790 }
3791 count = buf & DP_TEST_COUNT_MASK;
3792
3793 } while (--attempts && count == 0);
3794
3795 if (attempts == 0) {
3796 DRM_ERROR("Panel is unable to calculate any CRC after 6 vblanks\n");
3797 ret = -ETIMEDOUT;
3798 goto stop;
3799 }
3800
3801 if (drm_dp_dpcd_read(&intel_dp->aux, DP_TEST_CRC_R_CR, crc, 6) < 0) {
3802 ret = -EIO;
3803 goto stop;
3804 }
3805
3806stop:
3807 intel_dp_sink_crc_stop(intel_dp);
3808 return ret;
3809}
3810
3811static bool
3812intel_dp_get_sink_irq(struct intel_dp *intel_dp, u8 *sink_irq_vector)
3813{
3814 return drm_dp_dpcd_read(&intel_dp->aux,
3815 DP_DEVICE_SERVICE_IRQ_VECTOR,
3816 sink_irq_vector, 1) == 1;
3817}
3818
3819static bool
3820intel_dp_get_sink_irq_esi(struct intel_dp *intel_dp, u8 *sink_irq_vector)
3821{
3822 int ret;
3823
3824 ret = drm_dp_dpcd_read(&intel_dp->aux,
3825 DP_SINK_COUNT_ESI,
3826 sink_irq_vector, 14);
3827 if (ret != 14)
3828 return false;
3829
3830 return true;
3831}
3832
3833static uint8_t intel_dp_autotest_link_training(struct intel_dp *intel_dp)
3834{
3835 uint8_t test_result = DP_TEST_ACK;
3836 return test_result;
3837}
3838
3839static uint8_t intel_dp_autotest_video_pattern(struct intel_dp *intel_dp)
3840{
3841 uint8_t test_result = DP_TEST_NAK;
3842 return test_result;
3843}
3844
3845static uint8_t intel_dp_autotest_edid(struct intel_dp *intel_dp)
3846{
3847 uint8_t test_result = DP_TEST_NAK;
3848 struct intel_connector *intel_connector = intel_dp->attached_connector;
3849 struct drm_connector *connector = &intel_connector->base;
3850
3851 if (intel_connector->detect_edid == NULL ||
3852 connector->edid_corrupt ||
3853 intel_dp->aux.i2c_defer_count > 6) {
3854
3855
3856
3857
3858
3859
3860
3861 if (intel_dp->aux.i2c_nack_count > 0 ||
3862 intel_dp->aux.i2c_defer_count > 0)
3863 DRM_DEBUG_KMS("EDID read had %d NACKs, %d DEFERs\n",
3864 intel_dp->aux.i2c_nack_count,
3865 intel_dp->aux.i2c_defer_count);
3866 intel_dp->compliance_test_data = INTEL_DP_RESOLUTION_FAILSAFE;
3867 } else {
3868 struct edid *block = intel_connector->detect_edid;
3869
3870
3871
3872
3873 block += intel_connector->detect_edid->extensions;
3874
3875 if (!drm_dp_dpcd_write(&intel_dp->aux,
3876 DP_TEST_EDID_CHECKSUM,
3877 &block->checksum,
3878 1))
3879 DRM_DEBUG_KMS("Failed to write EDID checksum\n");
3880
3881 test_result = DP_TEST_ACK | DP_TEST_EDID_CHECKSUM_WRITE;
3882 intel_dp->compliance_test_data = INTEL_DP_RESOLUTION_STANDARD;
3883 }
3884
3885
3886 intel_dp->compliance_test_active = 1;
3887
3888 return test_result;
3889}
3890
3891static uint8_t intel_dp_autotest_phy_pattern(struct intel_dp *intel_dp)
3892{
3893 uint8_t test_result = DP_TEST_NAK;
3894 return test_result;
3895}
3896
3897static void intel_dp_handle_test_request(struct intel_dp *intel_dp)
3898{
3899 uint8_t response = DP_TEST_NAK;
3900 uint8_t rxdata = 0;
3901 int status = 0;
3902
3903 status = drm_dp_dpcd_read(&intel_dp->aux, DP_TEST_REQUEST, &rxdata, 1);
3904 if (status <= 0) {
3905 DRM_DEBUG_KMS("Could not read test request from sink\n");
3906 goto update_status;
3907 }
3908
3909 switch (rxdata) {
3910 case DP_TEST_LINK_TRAINING:
3911 DRM_DEBUG_KMS("LINK_TRAINING test requested\n");
3912 intel_dp->compliance_test_type = DP_TEST_LINK_TRAINING;
3913 response = intel_dp_autotest_link_training(intel_dp);
3914 break;
3915 case DP_TEST_LINK_VIDEO_PATTERN:
3916 DRM_DEBUG_KMS("TEST_PATTERN test requested\n");
3917 intel_dp->compliance_test_type = DP_TEST_LINK_VIDEO_PATTERN;
3918 response = intel_dp_autotest_video_pattern(intel_dp);
3919 break;
3920 case DP_TEST_LINK_EDID_READ:
3921 DRM_DEBUG_KMS("EDID test requested\n");
3922 intel_dp->compliance_test_type = DP_TEST_LINK_EDID_READ;
3923 response = intel_dp_autotest_edid(intel_dp);
3924 break;
3925 case DP_TEST_LINK_PHY_TEST_PATTERN:
3926 DRM_DEBUG_KMS("PHY_PATTERN test requested\n");
3927 intel_dp->compliance_test_type = DP_TEST_LINK_PHY_TEST_PATTERN;
3928 response = intel_dp_autotest_phy_pattern(intel_dp);
3929 break;
3930 default:
3931 DRM_DEBUG_KMS("Invalid test request '%02x'\n", rxdata);
3932 break;
3933 }
3934
3935update_status:
3936 status = drm_dp_dpcd_write(&intel_dp->aux,
3937 DP_TEST_RESPONSE,
3938 &response, 1);
3939 if (status <= 0)
3940 DRM_DEBUG_KMS("Could not write test response to sink\n");
3941}
3942
3943static int
3944intel_dp_check_mst_status(struct intel_dp *intel_dp)
3945{
3946 bool bret;
3947
3948 if (intel_dp->is_mst) {
3949 u8 esi[16] = { 0 };
3950 int ret = 0;
3951 int retry;
3952 bool handled;
3953 bret = intel_dp_get_sink_irq_esi(intel_dp, esi);
3954go_again:
3955 if (bret == true) {
3956
3957
3958 if (intel_dp->active_mst_links &&
3959 !drm_dp_channel_eq_ok(&esi[10], intel_dp->lane_count)) {
3960 DRM_DEBUG_KMS("channel EQ not ok, retraining\n");
3961 intel_dp_start_link_train(intel_dp);
3962 intel_dp_stop_link_train(intel_dp);
3963 }
3964
3965 DRM_DEBUG_KMS("got esi %3ph\n", esi);
3966 ret = drm_dp_mst_hpd_irq(&intel_dp->mst_mgr, esi, &handled);
3967
3968 if (handled) {
3969 for (retry = 0; retry < 3; retry++) {
3970 int wret;
3971 wret = drm_dp_dpcd_write(&intel_dp->aux,
3972 DP_SINK_COUNT_ESI+1,
3973 &esi[1], 3);
3974 if (wret == 3) {
3975 break;
3976 }
3977 }
3978
3979 bret = intel_dp_get_sink_irq_esi(intel_dp, esi);
3980 if (bret == true) {
3981 DRM_DEBUG_KMS("got esi2 %3ph\n", esi);
3982 goto go_again;
3983 }
3984 } else
3985 ret = 0;
3986
3987 return ret;
3988 } else {
3989 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
3990 DRM_DEBUG_KMS("failed to get ESI - device may have failed\n");
3991 intel_dp->is_mst = false;
3992 drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst_mgr, intel_dp->is_mst);
3993
3994 drm_kms_helper_hotplug_event(intel_dig_port->base.base.dev);
3995 }
3996 }
3997 return -EINVAL;
3998}
3999
4000static void
4001intel_dp_check_link_status(struct intel_dp *intel_dp)
4002{
4003 struct intel_encoder *intel_encoder = &dp_to_dig_port(intel_dp)->base;
4004 struct drm_device *dev = intel_dp_to_dev(intel_dp);
4005 u8 link_status[DP_LINK_STATUS_SIZE];
4006
4007 WARN_ON(!drm_modeset_is_locked(&dev->mode_config.connection_mutex));
4008
4009 if (!intel_dp_get_link_status(intel_dp, link_status)) {
4010 DRM_ERROR("Failed to get link status\n");
4011 return;
4012 }
4013
4014 if (!intel_encoder->base.crtc)
4015 return;
4016
4017 if (!to_intel_crtc(intel_encoder->base.crtc)->active)
4018 return;
4019
4020
4021 if ((intel_dp->compliance_test_type == DP_TEST_LINK_TRAINING) ||
4022 (!drm_dp_channel_eq_ok(link_status, intel_dp->lane_count))) {
4023 DRM_DEBUG_KMS("%s: channel EQ not ok, retraining\n",
4024 intel_encoder->base.name);
4025 intel_dp_start_link_train(intel_dp);
4026 intel_dp_stop_link_train(intel_dp);
4027 }
4028}
4029
4030
4031
4032
4033
4034
4035
4036
4037
4038
4039
4040
4041
4042
4043static bool
4044intel_dp_short_pulse(struct intel_dp *intel_dp)
4045{
4046 struct drm_device *dev = intel_dp_to_dev(intel_dp);
4047 u8 sink_irq_vector = 0;
4048 u8 old_sink_count = intel_dp->sink_count;
4049 bool ret;
4050
4051
4052
4053
4054
4055 intel_dp->compliance_test_active = 0;
4056 intel_dp->compliance_test_type = 0;
4057 intel_dp->compliance_test_data = 0;
4058
4059
4060
4061
4062
4063
4064
4065 ret = intel_dp_get_dpcd(intel_dp);
4066
4067 if ((old_sink_count != intel_dp->sink_count) || !ret) {
4068
4069 return false;
4070 }
4071
4072
4073 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
4074 intel_dp_get_sink_irq(intel_dp, &sink_irq_vector) &&
4075 sink_irq_vector != 0) {
4076
4077 drm_dp_dpcd_writeb(&intel_dp->aux,
4078 DP_DEVICE_SERVICE_IRQ_VECTOR,
4079 sink_irq_vector);
4080
4081 if (sink_irq_vector & DP_AUTOMATED_TEST_REQUEST)
4082 DRM_DEBUG_DRIVER("Test request in short pulse not handled\n");
4083 if (sink_irq_vector & (DP_CP_IRQ | DP_SINK_SPECIFIC_IRQ))
4084 DRM_DEBUG_DRIVER("CP or sink specific irq unhandled\n");
4085 }
4086
4087 drm_modeset_lock(&dev->mode_config.connection_mutex, NULL);
4088 intel_dp_check_link_status(intel_dp);
4089 drm_modeset_unlock(&dev->mode_config.connection_mutex);
4090
4091 return true;
4092}
4093
4094
4095static enum drm_connector_status
4096intel_dp_detect_dpcd(struct intel_dp *intel_dp)
4097{
4098 uint8_t *dpcd = intel_dp->dpcd;
4099 uint8_t type;
4100
4101 if (!intel_dp_get_dpcd(intel_dp))
4102 return connector_status_disconnected;
4103
4104 if (is_edp(intel_dp))
4105 return connector_status_connected;
4106
4107
4108 if (!(dpcd[DP_DOWNSTREAMPORT_PRESENT] & DP_DWN_STRM_PORT_PRESENT))
4109 return connector_status_connected;
4110
4111
4112 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
4113 intel_dp->downstream_ports[0] & DP_DS_PORT_HPD) {
4114
4115 return intel_dp->sink_count ?
4116 connector_status_connected : connector_status_disconnected;
4117 }
4118
4119 if (intel_dp_can_mst(intel_dp))
4120 return connector_status_connected;
4121
4122
4123 if (drm_probe_ddc(&intel_dp->aux.ddc))
4124 return connector_status_connected;
4125
4126
4127 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) {
4128 type = intel_dp->downstream_ports[0] & DP_DS_PORT_TYPE_MASK;
4129 if (type == DP_DS_PORT_TYPE_VGA ||
4130 type == DP_DS_PORT_TYPE_NON_EDID)
4131 return connector_status_unknown;
4132 } else {
4133 type = intel_dp->dpcd[DP_DOWNSTREAMPORT_PRESENT] &
4134 DP_DWN_STRM_PORT_TYPE_MASK;
4135 if (type == DP_DWN_STRM_PORT_TYPE_ANALOG ||
4136 type == DP_DWN_STRM_PORT_TYPE_OTHER)
4137 return connector_status_unknown;
4138 }
4139
4140
4141 DRM_DEBUG_KMS("Broken DP branch device, ignoring\n");
4142 return connector_status_disconnected;
4143}
4144
4145static enum drm_connector_status
4146edp_detect(struct intel_dp *intel_dp)
4147{
4148 struct drm_device *dev = intel_dp_to_dev(intel_dp);
4149 enum drm_connector_status status;
4150
4151 status = intel_panel_detect(dev);
4152 if (status == connector_status_unknown)
4153 status = connector_status_connected;
4154
4155 return status;
4156}
4157
4158static bool ibx_digital_port_connected(struct drm_i915_private *dev_priv,
4159 struct intel_digital_port *port)
4160{
4161 u32 bit;
4162
4163 switch (port->port) {
4164 case PORT_A:
4165 return true;
4166 case PORT_B:
4167 bit = SDE_PORTB_HOTPLUG;
4168 break;
4169 case PORT_C:
4170 bit = SDE_PORTC_HOTPLUG;
4171 break;
4172 case PORT_D:
4173 bit = SDE_PORTD_HOTPLUG;
4174 break;
4175 default:
4176 MISSING_CASE(port->port);
4177 return false;
4178 }
4179
4180 return I915_READ(SDEISR) & bit;
4181}
4182
4183static bool cpt_digital_port_connected(struct drm_i915_private *dev_priv,
4184 struct intel_digital_port *port)
4185{
4186 u32 bit;
4187
4188 switch (port->port) {
4189 case PORT_A:
4190 return true;
4191 case PORT_B:
4192 bit = SDE_PORTB_HOTPLUG_CPT;
4193 break;
4194 case PORT_C:
4195 bit = SDE_PORTC_HOTPLUG_CPT;
4196 break;
4197 case PORT_D:
4198 bit = SDE_PORTD_HOTPLUG_CPT;
4199 break;
4200 case PORT_E:
4201 bit = SDE_PORTE_HOTPLUG_SPT;
4202 break;
4203 default:
4204 MISSING_CASE(port->port);
4205 return false;
4206 }
4207
4208 return I915_READ(SDEISR) & bit;
4209}
4210
4211static bool g4x_digital_port_connected(struct drm_i915_private *dev_priv,
4212 struct intel_digital_port *port)
4213{
4214 u32 bit;
4215
4216 switch (port->port) {
4217 case PORT_B:
4218 bit = PORTB_HOTPLUG_LIVE_STATUS_G4X;
4219 break;
4220 case PORT_C:
4221 bit = PORTC_HOTPLUG_LIVE_STATUS_G4X;
4222 break;
4223 case PORT_D:
4224 bit = PORTD_HOTPLUG_LIVE_STATUS_G4X;
4225 break;
4226 default:
4227 MISSING_CASE(port->port);
4228 return false;
4229 }
4230
4231 return I915_READ(PORT_HOTPLUG_STAT) & bit;
4232}
4233
4234static bool gm45_digital_port_connected(struct drm_i915_private *dev_priv,
4235 struct intel_digital_port *port)
4236{
4237 u32 bit;
4238
4239 switch (port->port) {
4240 case PORT_B:
4241 bit = PORTB_HOTPLUG_LIVE_STATUS_GM45;
4242 break;
4243 case PORT_C:
4244 bit = PORTC_HOTPLUG_LIVE_STATUS_GM45;
4245 break;
4246 case PORT_D:
4247 bit = PORTD_HOTPLUG_LIVE_STATUS_GM45;
4248 break;
4249 default:
4250 MISSING_CASE(port->port);
4251 return false;
4252 }
4253
4254 return I915_READ(PORT_HOTPLUG_STAT) & bit;
4255}
4256
4257static bool bxt_digital_port_connected(struct drm_i915_private *dev_priv,
4258 struct intel_digital_port *intel_dig_port)
4259{
4260 struct intel_encoder *intel_encoder = &intel_dig_port->base;
4261 enum port port;
4262 u32 bit;
4263
4264 intel_hpd_pin_to_port(intel_encoder->hpd_pin, &port);
4265 switch (port) {
4266 case PORT_A:
4267 bit = BXT_DE_PORT_HP_DDIA;
4268 break;
4269 case PORT_B:
4270 bit = BXT_DE_PORT_HP_DDIB;
4271 break;
4272 case PORT_C:
4273 bit = BXT_DE_PORT_HP_DDIC;
4274 break;
4275 default:
4276 MISSING_CASE(port);
4277 return false;
4278 }
4279
4280 return I915_READ(GEN8_DE_PORT_ISR) & bit;
4281}
4282
4283
4284
4285
4286
4287
4288
4289
4290static bool intel_digital_port_connected(struct drm_i915_private *dev_priv,
4291 struct intel_digital_port *port)
4292{
4293 if (HAS_PCH_IBX(dev_priv))
4294 return ibx_digital_port_connected(dev_priv, port);
4295 else if (HAS_PCH_SPLIT(dev_priv))
4296 return cpt_digital_port_connected(dev_priv, port);
4297 else if (IS_BROXTON(dev_priv))
4298 return bxt_digital_port_connected(dev_priv, port);
4299 else if (IS_GM45(dev_priv))
4300 return gm45_digital_port_connected(dev_priv, port);
4301 else
4302 return g4x_digital_port_connected(dev_priv, port);
4303}
4304
4305static struct edid *
4306intel_dp_get_edid(struct intel_dp *intel_dp)
4307{
4308 struct intel_connector *intel_connector = intel_dp->attached_connector;
4309
4310
4311 if (intel_connector->edid) {
4312
4313 if (IS_ERR(intel_connector->edid))
4314 return NULL;
4315
4316 return drm_edid_duplicate(intel_connector->edid);
4317 } else
4318 return drm_get_edid(&intel_connector->base,
4319 &intel_dp->aux.ddc);
4320}
4321
4322static void
4323intel_dp_set_edid(struct intel_dp *intel_dp)
4324{
4325 struct intel_connector *intel_connector = intel_dp->attached_connector;
4326 struct edid *edid;
4327
4328 intel_dp_unset_edid(intel_dp);
4329 edid = intel_dp_get_edid(intel_dp);
4330 intel_connector->detect_edid = edid;
4331
4332 if (intel_dp->force_audio != HDMI_AUDIO_AUTO)
4333 intel_dp->has_audio = intel_dp->force_audio == HDMI_AUDIO_ON;
4334 else
4335 intel_dp->has_audio = drm_detect_monitor_audio(edid);
4336}
4337
4338static void
4339intel_dp_unset_edid(struct intel_dp *intel_dp)
4340{
4341 struct intel_connector *intel_connector = intel_dp->attached_connector;
4342
4343 kfree(intel_connector->detect_edid);
4344 intel_connector->detect_edid = NULL;
4345
4346 intel_dp->has_audio = false;
4347}
4348
4349static enum drm_connector_status
4350intel_dp_long_pulse(struct intel_connector *intel_connector)
4351{
4352 struct drm_connector *connector = &intel_connector->base;
4353 struct intel_dp *intel_dp = intel_attached_dp(connector);
4354 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
4355 struct intel_encoder *intel_encoder = &intel_dig_port->base;
4356 struct drm_device *dev = connector->dev;
4357 enum drm_connector_status status;
4358 enum intel_display_power_domain power_domain;
4359 u8 sink_irq_vector = 0;
4360
4361 power_domain = intel_display_port_aux_power_domain(intel_encoder);
4362 intel_display_power_get(to_i915(dev), power_domain);
4363
4364
4365 if (is_edp(intel_dp))
4366 status = edp_detect(intel_dp);
4367 else if (intel_digital_port_connected(to_i915(dev),
4368 dp_to_dig_port(intel_dp)))
4369 status = intel_dp_detect_dpcd(intel_dp);
4370 else
4371 status = connector_status_disconnected;
4372
4373 if (status == connector_status_disconnected) {
4374 intel_dp->compliance_test_active = 0;
4375 intel_dp->compliance_test_type = 0;
4376 intel_dp->compliance_test_data = 0;
4377
4378 if (intel_dp->is_mst) {
4379 DRM_DEBUG_KMS("MST device may have disappeared %d vs %d\n",
4380 intel_dp->is_mst,
4381 intel_dp->mst_mgr.mst_state);
4382 intel_dp->is_mst = false;
4383 drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst_mgr,
4384 intel_dp->is_mst);
4385 }
4386
4387 goto out;
4388 }
4389
4390 if (intel_encoder->type != INTEL_OUTPUT_EDP)
4391 intel_encoder->type = INTEL_OUTPUT_DP;
4392
4393 DRM_DEBUG_KMS("Display Port TPS3 support: source %s, sink %s\n",
4394 yesno(intel_dp_source_supports_hbr2(intel_dp)),
4395 yesno(drm_dp_tps3_supported(intel_dp->dpcd)));
4396
4397 intel_dp_print_rates(intel_dp);
4398
4399 intel_dp_probe_oui(intel_dp);
4400
4401 intel_dp_print_hw_revision(intel_dp);
4402 intel_dp_print_sw_revision(intel_dp);
4403
4404 intel_dp_configure_mst(intel_dp);
4405
4406 if (intel_dp->is_mst) {
4407
4408
4409
4410
4411
4412 status = connector_status_disconnected;
4413 goto out;
4414 } else if (connector->status == connector_status_connected) {
4415
4416
4417
4418
4419
4420 drm_modeset_lock(&dev->mode_config.connection_mutex, NULL);
4421 intel_dp_check_link_status(intel_dp);
4422 drm_modeset_unlock(&dev->mode_config.connection_mutex);
4423 goto out;
4424 }
4425
4426
4427
4428
4429
4430
4431 intel_dp->aux.i2c_nack_count = 0;
4432 intel_dp->aux.i2c_defer_count = 0;
4433
4434 intel_dp_set_edid(intel_dp);
4435 if (is_edp(intel_dp) || intel_connector->detect_edid)
4436 status = connector_status_connected;
4437 intel_dp->detect_done = true;
4438
4439
4440 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
4441 intel_dp_get_sink_irq(intel_dp, &sink_irq_vector) &&
4442 sink_irq_vector != 0) {
4443
4444 drm_dp_dpcd_writeb(&intel_dp->aux,
4445 DP_DEVICE_SERVICE_IRQ_VECTOR,
4446 sink_irq_vector);
4447
4448 if (sink_irq_vector & DP_AUTOMATED_TEST_REQUEST)
4449 intel_dp_handle_test_request(intel_dp);
4450 if (sink_irq_vector & (DP_CP_IRQ | DP_SINK_SPECIFIC_IRQ))
4451 DRM_DEBUG_DRIVER("CP or sink specific irq unhandled\n");
4452 }
4453
4454out:
4455 if (status != connector_status_connected && !intel_dp->is_mst)
4456 intel_dp_unset_edid(intel_dp);
4457
4458 intel_display_power_put(to_i915(dev), power_domain);
4459 return status;
4460}
4461
4462static enum drm_connector_status
4463intel_dp_detect(struct drm_connector *connector, bool force)
4464{
4465 struct intel_dp *intel_dp = intel_attached_dp(connector);
4466 enum drm_connector_status status = connector->status;
4467
4468 DRM_DEBUG_KMS("[CONNECTOR:%d:%s]\n",
4469 connector->base.id, connector->name);
4470
4471
4472 if (!intel_dp->detect_done)
4473 status = intel_dp_long_pulse(intel_dp->attached_connector);
4474
4475 intel_dp->detect_done = false;
4476
4477 return status;
4478}
4479
4480static void
4481intel_dp_force(struct drm_connector *connector)
4482{
4483 struct intel_dp *intel_dp = intel_attached_dp(connector);
4484 struct intel_encoder *intel_encoder = &dp_to_dig_port(intel_dp)->base;
4485 struct drm_i915_private *dev_priv = to_i915(intel_encoder->base.dev);
4486 enum intel_display_power_domain power_domain;
4487
4488 DRM_DEBUG_KMS("[CONNECTOR:%d:%s]\n",
4489 connector->base.id, connector->name);
4490 intel_dp_unset_edid(intel_dp);
4491
4492 if (connector->status != connector_status_connected)
4493 return;
4494
4495 power_domain = intel_display_port_aux_power_domain(intel_encoder);
4496 intel_display_power_get(dev_priv, power_domain);
4497
4498 intel_dp_set_edid(intel_dp);
4499
4500 intel_display_power_put(dev_priv, power_domain);
4501
4502 if (intel_encoder->type != INTEL_OUTPUT_EDP)
4503 intel_encoder->type = INTEL_OUTPUT_DP;
4504}
4505
4506static int intel_dp_get_modes(struct drm_connector *connector)
4507{
4508 struct intel_connector *intel_connector = to_intel_connector(connector);
4509 struct edid *edid;
4510
4511 edid = intel_connector->detect_edid;
4512 if (edid) {
4513 int ret = intel_connector_update_modes(connector, edid);
4514 if (ret)
4515 return ret;
4516 }
4517
4518
4519 if (is_edp(intel_attached_dp(connector)) &&
4520 intel_connector->panel.fixed_mode) {
4521 struct drm_display_mode *mode;
4522
4523 mode = drm_mode_duplicate(connector->dev,
4524 intel_connector->panel.fixed_mode);
4525 if (mode) {
4526 drm_mode_probed_add(connector, mode);
4527 return 1;
4528 }
4529 }
4530
4531 return 0;
4532}
4533
4534static bool
4535intel_dp_detect_audio(struct drm_connector *connector)
4536{
4537 bool has_audio = false;
4538 struct edid *edid;
4539
4540 edid = to_intel_connector(connector)->detect_edid;
4541 if (edid)
4542 has_audio = drm_detect_monitor_audio(edid);
4543
4544 return has_audio;
4545}
4546
4547static int
4548intel_dp_set_property(struct drm_connector *connector,
4549 struct drm_property *property,
4550 uint64_t val)
4551{
4552 struct drm_i915_private *dev_priv = to_i915(connector->dev);
4553 struct intel_connector *intel_connector = to_intel_connector(connector);
4554 struct intel_encoder *intel_encoder = intel_attached_encoder(connector);
4555 struct intel_dp *intel_dp = enc_to_intel_dp(&intel_encoder->base);
4556 int ret;
4557
4558 ret = drm_object_property_set_value(&connector->base, property, val);
4559 if (ret)
4560 return ret;
4561
4562 if (property == dev_priv->force_audio_property) {
4563 int i = val;
4564 bool has_audio;
4565
4566 if (i == intel_dp->force_audio)
4567 return 0;
4568
4569 intel_dp->force_audio = i;
4570
4571 if (i == HDMI_AUDIO_AUTO)
4572 has_audio = intel_dp_detect_audio(connector);
4573 else
4574 has_audio = (i == HDMI_AUDIO_ON);
4575
4576 if (has_audio == intel_dp->has_audio)
4577 return 0;
4578
4579 intel_dp->has_audio = has_audio;
4580 goto done;
4581 }
4582
4583 if (property == dev_priv->broadcast_rgb_property) {
4584 bool old_auto = intel_dp->color_range_auto;
4585 bool old_range = intel_dp->limited_color_range;
4586
4587 switch (val) {
4588 case INTEL_BROADCAST_RGB_AUTO:
4589 intel_dp->color_range_auto = true;
4590 break;
4591 case INTEL_BROADCAST_RGB_FULL:
4592 intel_dp->color_range_auto = false;
4593 intel_dp->limited_color_range = false;
4594 break;
4595 case INTEL_BROADCAST_RGB_LIMITED:
4596 intel_dp->color_range_auto = false;
4597 intel_dp->limited_color_range = true;
4598 break;
4599 default:
4600 return -EINVAL;
4601 }
4602
4603 if (old_auto == intel_dp->color_range_auto &&
4604 old_range == intel_dp->limited_color_range)
4605 return 0;
4606
4607 goto done;
4608 }
4609
4610 if (is_edp(intel_dp) &&
4611 property == connector->dev->mode_config.scaling_mode_property) {
4612 if (val == DRM_MODE_SCALE_NONE) {
4613 DRM_DEBUG_KMS("no scaling not supported\n");
4614 return -EINVAL;
4615 }
4616 if (HAS_GMCH_DISPLAY(dev_priv) &&
4617 val == DRM_MODE_SCALE_CENTER) {
4618 DRM_DEBUG_KMS("centering not supported\n");
4619 return -EINVAL;
4620 }
4621
4622 if (intel_connector->panel.fitting_mode == val) {
4623
4624 return 0;
4625 }
4626 intel_connector->panel.fitting_mode = val;
4627
4628 goto done;
4629 }
4630
4631 return -EINVAL;
4632
4633done:
4634 if (intel_encoder->base.crtc)
4635 intel_crtc_restore_mode(intel_encoder->base.crtc);
4636
4637 return 0;
4638}
4639
4640static int
4641intel_dp_connector_register(struct drm_connector *connector)
4642{
4643 struct intel_dp *intel_dp = intel_attached_dp(connector);
4644 int ret;
4645
4646 ret = intel_connector_register(connector);
4647 if (ret)
4648 return ret;
4649
4650 i915_debugfs_connector_add(connector);
4651
4652 DRM_DEBUG_KMS("registering %s bus for %s\n",
4653 intel_dp->aux.name, connector->kdev->kobj.name);
4654
4655 intel_dp->aux.dev = connector->kdev;
4656 return drm_dp_aux_register(&intel_dp->aux);
4657}
4658
4659static void
4660intel_dp_connector_unregister(struct drm_connector *connector)
4661{
4662 drm_dp_aux_unregister(&intel_attached_dp(connector)->aux);
4663 intel_connector_unregister(connector);
4664}
4665
4666static void
4667intel_dp_connector_destroy(struct drm_connector *connector)
4668{
4669 struct intel_connector *intel_connector = to_intel_connector(connector);
4670
4671 kfree(intel_connector->detect_edid);
4672
4673 if (!IS_ERR_OR_NULL(intel_connector->edid))
4674 kfree(intel_connector->edid);
4675
4676
4677
4678 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP)
4679 intel_panel_fini(&intel_connector->panel);
4680
4681 drm_connector_cleanup(connector);
4682 kfree(connector);
4683}
4684
4685void intel_dp_encoder_destroy(struct drm_encoder *encoder)
4686{
4687 struct intel_digital_port *intel_dig_port = enc_to_dig_port(encoder);
4688 struct intel_dp *intel_dp = &intel_dig_port->dp;
4689
4690 intel_dp_mst_encoder_cleanup(intel_dig_port);
4691 if (is_edp(intel_dp)) {
4692 cancel_delayed_work_sync(&intel_dp->panel_vdd_work);
4693
4694
4695
4696
4697 pps_lock(intel_dp);
4698 edp_panel_vdd_off_sync(intel_dp);
4699 pps_unlock(intel_dp);
4700
4701 if (intel_dp->edp_notifier.notifier_call) {
4702 unregister_reboot_notifier(&intel_dp->edp_notifier);
4703 intel_dp->edp_notifier.notifier_call = NULL;
4704 }
4705 }
4706
4707 intel_dp_aux_fini(intel_dp);
4708
4709 drm_encoder_cleanup(encoder);
4710 kfree(intel_dig_port);
4711}
4712
4713void intel_dp_encoder_suspend(struct intel_encoder *intel_encoder)
4714{
4715 struct intel_dp *intel_dp = enc_to_intel_dp(&intel_encoder->base);
4716
4717 if (!is_edp(intel_dp))
4718 return;
4719
4720
4721
4722
4723
4724 cancel_delayed_work_sync(&intel_dp->panel_vdd_work);
4725 pps_lock(intel_dp);
4726 edp_panel_vdd_off_sync(intel_dp);
4727 pps_unlock(intel_dp);
4728}
4729
4730static void intel_edp_panel_vdd_sanitize(struct intel_dp *intel_dp)
4731{
4732 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
4733 struct drm_device *dev = intel_dig_port->base.base.dev;
4734 struct drm_i915_private *dev_priv = to_i915(dev);
4735 enum intel_display_power_domain power_domain;
4736
4737 lockdep_assert_held(&dev_priv->pps_mutex);
4738
4739 if (!edp_have_panel_vdd(intel_dp))
4740 return;
4741
4742
4743
4744
4745
4746
4747
4748 DRM_DEBUG_KMS("VDD left on by BIOS, adjusting state tracking\n");
4749 power_domain = intel_display_port_aux_power_domain(&intel_dig_port->base);
4750 intel_display_power_get(dev_priv, power_domain);
4751
4752 edp_panel_vdd_schedule_off(intel_dp);
4753}
4754
4755void intel_dp_encoder_reset(struct drm_encoder *encoder)
4756{
4757 struct drm_i915_private *dev_priv = to_i915(encoder->dev);
4758 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4759
4760 if (!HAS_DDI(dev_priv))
4761 intel_dp->DP = I915_READ(intel_dp->output_reg);
4762
4763 if (to_intel_encoder(encoder)->type != INTEL_OUTPUT_EDP)
4764 return;
4765
4766 pps_lock(intel_dp);
4767
4768
4769 intel_dp_pps_init(encoder->dev, intel_dp);
4770 intel_edp_panel_vdd_sanitize(intel_dp);
4771
4772 pps_unlock(intel_dp);
4773}
4774
4775static const struct drm_connector_funcs intel_dp_connector_funcs = {
4776 .dpms = drm_atomic_helper_connector_dpms,
4777 .detect = intel_dp_detect,
4778 .force = intel_dp_force,
4779 .fill_modes = drm_helper_probe_single_connector_modes,
4780 .set_property = intel_dp_set_property,
4781 .atomic_get_property = intel_connector_atomic_get_property,
4782 .late_register = intel_dp_connector_register,
4783 .early_unregister = intel_dp_connector_unregister,
4784 .destroy = intel_dp_connector_destroy,
4785 .atomic_destroy_state = drm_atomic_helper_connector_destroy_state,
4786 .atomic_duplicate_state = drm_atomic_helper_connector_duplicate_state,
4787};
4788
4789static const struct drm_connector_helper_funcs intel_dp_connector_helper_funcs = {
4790 .get_modes = intel_dp_get_modes,
4791 .mode_valid = intel_dp_mode_valid,
4792};
4793
4794static const struct drm_encoder_funcs intel_dp_enc_funcs = {
4795 .reset = intel_dp_encoder_reset,
4796 .destroy = intel_dp_encoder_destroy,
4797};
4798
4799enum irqreturn
4800intel_dp_hpd_pulse(struct intel_digital_port *intel_dig_port, bool long_hpd)
4801{
4802 struct intel_dp *intel_dp = &intel_dig_port->dp;
4803 struct intel_encoder *intel_encoder = &intel_dig_port->base;
4804 struct drm_device *dev = intel_dig_port->base.base.dev;
4805 struct drm_i915_private *dev_priv = to_i915(dev);
4806 enum intel_display_power_domain power_domain;
4807 enum irqreturn ret = IRQ_NONE;
4808
4809 if (intel_dig_port->base.type != INTEL_OUTPUT_EDP &&
4810 intel_dig_port->base.type != INTEL_OUTPUT_HDMI)
4811 intel_dig_port->base.type = INTEL_OUTPUT_DP;
4812
4813 if (long_hpd && intel_dig_port->base.type == INTEL_OUTPUT_EDP) {
4814
4815
4816
4817
4818
4819
4820 DRM_DEBUG_KMS("ignoring long hpd on eDP port %c\n",
4821 port_name(intel_dig_port->port));
4822 return IRQ_HANDLED;
4823 }
4824
4825 DRM_DEBUG_KMS("got hpd irq on port %c - %s\n",
4826 port_name(intel_dig_port->port),
4827 long_hpd ? "long" : "short");
4828
4829 if (long_hpd) {
4830 intel_dp->detect_done = false;
4831 return IRQ_NONE;
4832 }
4833
4834 power_domain = intel_display_port_aux_power_domain(intel_encoder);
4835 intel_display_power_get(dev_priv, power_domain);
4836
4837 if (intel_dp->is_mst) {
4838 if (intel_dp_check_mst_status(intel_dp) == -EINVAL) {
4839
4840
4841
4842
4843 DRM_DEBUG_KMS("MST device may have disappeared %d vs %d\n",
4844 intel_dp->is_mst, intel_dp->mst_mgr.mst_state);
4845 intel_dp->is_mst = false;
4846 drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst_mgr,
4847 intel_dp->is_mst);
4848 intel_dp->detect_done = false;
4849 goto put_power;
4850 }
4851 }
4852
4853 if (!intel_dp->is_mst) {
4854 if (!intel_dp_short_pulse(intel_dp)) {
4855 intel_dp->detect_done = false;
4856 goto put_power;
4857 }
4858 }
4859
4860 ret = IRQ_HANDLED;
4861
4862put_power:
4863 intel_display_power_put(dev_priv, power_domain);
4864
4865 return ret;
4866}
4867
4868
4869bool intel_dp_is_edp(struct drm_device *dev, enum port port)
4870{
4871 struct drm_i915_private *dev_priv = to_i915(dev);
4872
4873
4874
4875
4876
4877 if (INTEL_INFO(dev)->gen < 5)
4878 return false;
4879
4880 if (port == PORT_A)
4881 return true;
4882
4883 return intel_bios_is_port_edp(dev_priv, port);
4884}
4885
4886void
4887intel_dp_add_properties(struct intel_dp *intel_dp, struct drm_connector *connector)
4888{
4889 struct intel_connector *intel_connector = to_intel_connector(connector);
4890
4891 intel_attach_force_audio_property(connector);
4892 intel_attach_broadcast_rgb_property(connector);
4893 intel_dp->color_range_auto = true;
4894
4895 if (is_edp(intel_dp)) {
4896 drm_mode_create_scaling_mode_property(connector->dev);
4897 drm_object_attach_property(
4898 &connector->base,
4899 connector->dev->mode_config.scaling_mode_property,
4900 DRM_MODE_SCALE_ASPECT);
4901 intel_connector->panel.fitting_mode = DRM_MODE_SCALE_ASPECT;
4902 }
4903}
4904
4905static void intel_dp_init_panel_power_timestamps(struct intel_dp *intel_dp)
4906{
4907 intel_dp->panel_power_off_time = ktime_get_boottime();
4908 intel_dp->last_power_on = jiffies;
4909 intel_dp->last_backlight_off = jiffies;
4910}
4911
4912static void
4913intel_pps_readout_hw_state(struct drm_i915_private *dev_priv,
4914 struct intel_dp *intel_dp, struct edp_power_seq *seq)
4915{
4916 u32 pp_on, pp_off, pp_div = 0, pp_ctl = 0;
4917 struct pps_registers regs;
4918
4919 intel_pps_get_registers(dev_priv, intel_dp, ®s);
4920
4921
4922
4923 pp_ctl = ironlake_get_pp_control(intel_dp);
4924
4925 pp_on = I915_READ(regs.pp_on);
4926 pp_off = I915_READ(regs.pp_off);
4927 if (!IS_BROXTON(dev_priv)) {
4928 I915_WRITE(regs.pp_ctrl, pp_ctl);
4929 pp_div = I915_READ(regs.pp_div);
4930 }
4931
4932
4933 seq->t1_t3 = (pp_on & PANEL_POWER_UP_DELAY_MASK) >>
4934 PANEL_POWER_UP_DELAY_SHIFT;
4935
4936 seq->t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >>
4937 PANEL_LIGHT_ON_DELAY_SHIFT;
4938
4939 seq->t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >>
4940 PANEL_LIGHT_OFF_DELAY_SHIFT;
4941
4942 seq->t10 = (pp_off & PANEL_POWER_DOWN_DELAY_MASK) >>
4943 PANEL_POWER_DOWN_DELAY_SHIFT;
4944
4945 if (IS_BROXTON(dev_priv)) {
4946 u16 tmp = (pp_ctl & BXT_POWER_CYCLE_DELAY_MASK) >>
4947 BXT_POWER_CYCLE_DELAY_SHIFT;
4948 if (tmp > 0)
4949 seq->t11_t12 = (tmp - 1) * 1000;
4950 else
4951 seq->t11_t12 = 0;
4952 } else {
4953 seq->t11_t12 = ((pp_div & PANEL_POWER_CYCLE_DELAY_MASK) >>
4954 PANEL_POWER_CYCLE_DELAY_SHIFT) * 1000;
4955 }
4956}
4957
4958static void
4959intel_pps_dump_state(const char *state_name, const struct edp_power_seq *seq)
4960{
4961 DRM_DEBUG_KMS("%s t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
4962 state_name,
4963 seq->t1_t3, seq->t8, seq->t9, seq->t10, seq->t11_t12);
4964}
4965
4966static void
4967intel_pps_verify_state(struct drm_i915_private *dev_priv,
4968 struct intel_dp *intel_dp)
4969{
4970 struct edp_power_seq hw;
4971 struct edp_power_seq *sw = &intel_dp->pps_delays;
4972
4973 intel_pps_readout_hw_state(dev_priv, intel_dp, &hw);
4974
4975 if (hw.t1_t3 != sw->t1_t3 || hw.t8 != sw->t8 || hw.t9 != sw->t9 ||
4976 hw.t10 != sw->t10 || hw.t11_t12 != sw->t11_t12) {
4977 DRM_ERROR("PPS state mismatch\n");
4978 intel_pps_dump_state("sw", sw);
4979 intel_pps_dump_state("hw", &hw);
4980 }
4981}
4982
4983static void
4984intel_dp_init_panel_power_sequencer(struct drm_device *dev,
4985 struct intel_dp *intel_dp)
4986{
4987 struct drm_i915_private *dev_priv = to_i915(dev);
4988 struct edp_power_seq cur, vbt, spec,
4989 *final = &intel_dp->pps_delays;
4990
4991 lockdep_assert_held(&dev_priv->pps_mutex);
4992
4993
4994 if (final->t11_t12 != 0)
4995 return;
4996
4997 intel_pps_readout_hw_state(dev_priv, intel_dp, &cur);
4998
4999 intel_pps_dump_state("cur", &cur);
5000
5001 vbt = dev_priv->vbt.edp.pps;
5002
5003
5004
5005 spec.t1_t3 = 210 * 10;
5006 spec.t8 = 50 * 10;
5007 spec.t9 = 50 * 10;
5008 spec.t10 = 500 * 10;
5009
5010
5011
5012
5013 spec.t11_t12 = (510 + 100) * 10;
5014
5015 intel_pps_dump_state("vbt", &vbt);
5016
5017
5018
5019#define assign_final(field) final->field = (max(cur.field, vbt.field) == 0 ? \
5020 spec.field : \
5021 max(cur.field, vbt.field))
5022 assign_final(t1_t3);
5023 assign_final(t8);
5024 assign_final(t9);
5025 assign_final(t10);
5026 assign_final(t11_t12);
5027#undef assign_final
5028
5029#define get_delay(field) (DIV_ROUND_UP(final->field, 10))
5030 intel_dp->panel_power_up_delay = get_delay(t1_t3);
5031 intel_dp->backlight_on_delay = get_delay(t8);
5032 intel_dp->backlight_off_delay = get_delay(t9);
5033 intel_dp->panel_power_down_delay = get_delay(t10);
5034 intel_dp->panel_power_cycle_delay = get_delay(t11_t12);
5035#undef get_delay
5036
5037 DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n",
5038 intel_dp->panel_power_up_delay, intel_dp->panel_power_down_delay,
5039 intel_dp->panel_power_cycle_delay);
5040
5041 DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n",
5042 intel_dp->backlight_on_delay, intel_dp->backlight_off_delay);
5043
5044
5045
5046
5047
5048
5049
5050
5051 final->t8 = 1;
5052 final->t9 = 1;
5053}
5054
5055static void
5056intel_dp_init_panel_power_sequencer_registers(struct drm_device *dev,
5057 struct intel_dp *intel_dp)
5058{
5059 struct drm_i915_private *dev_priv = to_i915(dev);
5060 u32 pp_on, pp_off, pp_div, port_sel = 0;
5061 int div = dev_priv->rawclk_freq / 1000;
5062 struct pps_registers regs;
5063 enum port port = dp_to_dig_port(intel_dp)->port;
5064 const struct edp_power_seq *seq = &intel_dp->pps_delays;
5065
5066 lockdep_assert_held(&dev_priv->pps_mutex);
5067
5068 intel_pps_get_registers(dev_priv, intel_dp, ®s);
5069
5070 pp_on = (seq->t1_t3 << PANEL_POWER_UP_DELAY_SHIFT) |
5071 (seq->t8 << PANEL_LIGHT_ON_DELAY_SHIFT);
5072 pp_off = (seq->t9 << PANEL_LIGHT_OFF_DELAY_SHIFT) |
5073 (seq->t10 << PANEL_POWER_DOWN_DELAY_SHIFT);
5074
5075
5076 if (IS_BROXTON(dev)) {
5077 pp_div = I915_READ(regs.pp_ctrl);
5078 pp_div &= ~BXT_POWER_CYCLE_DELAY_MASK;
5079 pp_div |= (DIV_ROUND_UP((seq->t11_t12 + 1), 1000)
5080 << BXT_POWER_CYCLE_DELAY_SHIFT);
5081 } else {
5082 pp_div = ((100 * div)/2 - 1) << PP_REFERENCE_DIVIDER_SHIFT;
5083 pp_div |= (DIV_ROUND_UP(seq->t11_t12, 1000)
5084 << PANEL_POWER_CYCLE_DELAY_SHIFT);
5085 }
5086
5087
5088
5089 if (IS_VALLEYVIEW(dev) || IS_CHERRYVIEW(dev)) {
5090 port_sel = PANEL_PORT_SELECT_VLV(port);
5091 } else if (HAS_PCH_IBX(dev) || HAS_PCH_CPT(dev)) {
5092 if (port == PORT_A)
5093 port_sel = PANEL_PORT_SELECT_DPA;
5094 else
5095 port_sel = PANEL_PORT_SELECT_DPD;
5096 }
5097
5098 pp_on |= port_sel;
5099
5100 I915_WRITE(regs.pp_on, pp_on);
5101 I915_WRITE(regs.pp_off, pp_off);
5102 if (IS_BROXTON(dev))
5103 I915_WRITE(regs.pp_ctrl, pp_div);
5104 else
5105 I915_WRITE(regs.pp_div, pp_div);
5106
5107 DRM_DEBUG_KMS("panel power sequencer register settings: PP_ON %#x, PP_OFF %#x, PP_DIV %#x\n",
5108 I915_READ(regs.pp_on),
5109 I915_READ(regs.pp_off),
5110 IS_BROXTON(dev) ?
5111 (I915_READ(regs.pp_ctrl) & BXT_POWER_CYCLE_DELAY_MASK) :
5112 I915_READ(regs.pp_div));
5113}
5114
5115static void intel_dp_pps_init(struct drm_device *dev,
5116 struct intel_dp *intel_dp)
5117{
5118 if (IS_VALLEYVIEW(dev) || IS_CHERRYVIEW(dev)) {
5119 vlv_initial_power_sequencer_setup(intel_dp);
5120 } else {
5121 intel_dp_init_panel_power_sequencer(dev, intel_dp);
5122 intel_dp_init_panel_power_sequencer_registers(dev, intel_dp);
5123 }
5124}
5125
5126
5127
5128
5129
5130
5131
5132
5133
5134
5135
5136
5137
5138
5139static void intel_dp_set_drrs_state(struct drm_i915_private *dev_priv,
5140 struct intel_crtc_state *crtc_state,
5141 int refresh_rate)
5142{
5143 struct intel_encoder *encoder;
5144 struct intel_digital_port *dig_port = NULL;
5145 struct intel_dp *intel_dp = dev_priv->drrs.dp;
5146 struct intel_crtc *intel_crtc = to_intel_crtc(crtc_state->base.crtc);
5147 enum drrs_refresh_rate_type index = DRRS_HIGH_RR;
5148
5149 if (refresh_rate <= 0) {
5150 DRM_DEBUG_KMS("Refresh rate should be positive non-zero.\n");
5151 return;
5152 }
5153
5154 if (intel_dp == NULL) {
5155 DRM_DEBUG_KMS("DRRS not supported.\n");
5156 return;
5157 }
5158
5159
5160
5161
5162
5163
5164 dig_port = dp_to_dig_port(intel_dp);
5165 encoder = &dig_port->base;
5166 intel_crtc = to_intel_crtc(encoder->base.crtc);
5167
5168 if (!intel_crtc) {
5169 DRM_DEBUG_KMS("DRRS: intel_crtc not initialized\n");
5170 return;
5171 }
5172
5173 if (dev_priv->drrs.type < SEAMLESS_DRRS_SUPPORT) {
5174 DRM_DEBUG_KMS("Only Seamless DRRS supported.\n");
5175 return;
5176 }
5177
5178 if (intel_dp->attached_connector->panel.downclock_mode->vrefresh ==
5179 refresh_rate)
5180 index = DRRS_LOW_RR;
5181
5182 if (index == dev_priv->drrs.refresh_rate_type) {
5183 DRM_DEBUG_KMS(
5184 "DRRS requested for previously set RR...ignoring\n");
5185 return;
5186 }
5187
5188 if (!crtc_state->base.active) {
5189 DRM_DEBUG_KMS("eDP encoder disabled. CRTC not Active\n");
5190 return;
5191 }
5192
5193 if (INTEL_GEN(dev_priv) >= 8 && !IS_CHERRYVIEW(dev_priv)) {
5194 switch (index) {
5195 case DRRS_HIGH_RR:
5196 intel_dp_set_m_n(intel_crtc, M1_N1);
5197 break;
5198 case DRRS_LOW_RR:
5199 intel_dp_set_m_n(intel_crtc, M2_N2);
5200 break;
5201 case DRRS_MAX_RR:
5202 default:
5203 DRM_ERROR("Unsupported refreshrate type\n");
5204 }
5205 } else if (INTEL_GEN(dev_priv) > 6) {
5206 i915_reg_t reg = PIPECONF(crtc_state->cpu_transcoder);
5207 u32 val;
5208
5209 val = I915_READ(reg);
5210 if (index > DRRS_HIGH_RR) {
5211 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
5212 val |= PIPECONF_EDP_RR_MODE_SWITCH_VLV;
5213 else
5214 val |= PIPECONF_EDP_RR_MODE_SWITCH;
5215 } else {
5216 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
5217 val &= ~PIPECONF_EDP_RR_MODE_SWITCH_VLV;
5218 else
5219 val &= ~PIPECONF_EDP_RR_MODE_SWITCH;
5220 }
5221 I915_WRITE(reg, val);
5222 }
5223
5224 dev_priv->drrs.refresh_rate_type = index;
5225
5226 DRM_DEBUG_KMS("eDP Refresh Rate set to : %dHz\n", refresh_rate);
5227}
5228
5229
5230
5231
5232
5233
5234
5235
5236void intel_edp_drrs_enable(struct intel_dp *intel_dp,
5237 struct intel_crtc_state *crtc_state)
5238{
5239 struct drm_device *dev = intel_dp_to_dev(intel_dp);
5240 struct drm_i915_private *dev_priv = to_i915(dev);
5241
5242 if (!crtc_state->has_drrs) {
5243 DRM_DEBUG_KMS("Panel doesn't support DRRS\n");
5244 return;
5245 }
5246
5247 mutex_lock(&dev_priv->drrs.mutex);
5248 if (WARN_ON(dev_priv->drrs.dp)) {
5249 DRM_ERROR("DRRS already enabled\n");
5250 goto unlock;
5251 }
5252
5253 dev_priv->drrs.busy_frontbuffer_bits = 0;
5254
5255 dev_priv->drrs.dp = intel_dp;
5256
5257unlock:
5258 mutex_unlock(&dev_priv->drrs.mutex);
5259}
5260
5261
5262
5263
5264
5265
5266
5267void intel_edp_drrs_disable(struct intel_dp *intel_dp,
5268 struct intel_crtc_state *old_crtc_state)
5269{
5270 struct drm_device *dev = intel_dp_to_dev(intel_dp);
5271 struct drm_i915_private *dev_priv = to_i915(dev);
5272
5273 if (!old_crtc_state->has_drrs)
5274 return;
5275
5276 mutex_lock(&dev_priv->drrs.mutex);
5277 if (!dev_priv->drrs.dp) {
5278 mutex_unlock(&dev_priv->drrs.mutex);
5279 return;
5280 }
5281
5282 if (dev_priv->drrs.refresh_rate_type == DRRS_LOW_RR)
5283 intel_dp_set_drrs_state(dev_priv, old_crtc_state,
5284 intel_dp->attached_connector->panel.fixed_mode->vrefresh);
5285
5286 dev_priv->drrs.dp = NULL;
5287 mutex_unlock(&dev_priv->drrs.mutex);
5288
5289 cancel_delayed_work_sync(&dev_priv->drrs.work);
5290}
5291
5292static void intel_edp_drrs_downclock_work(struct work_struct *work)
5293{
5294 struct drm_i915_private *dev_priv =
5295 container_of(work, typeof(*dev_priv), drrs.work.work);
5296 struct intel_dp *intel_dp;
5297
5298 mutex_lock(&dev_priv->drrs.mutex);
5299
5300 intel_dp = dev_priv->drrs.dp;
5301
5302 if (!intel_dp)
5303 goto unlock;
5304
5305
5306
5307
5308
5309
5310 if (dev_priv->drrs.busy_frontbuffer_bits)
5311 goto unlock;
5312
5313 if (dev_priv->drrs.refresh_rate_type != DRRS_LOW_RR) {
5314 struct drm_crtc *crtc = dp_to_dig_port(intel_dp)->base.base.crtc;
5315
5316 intel_dp_set_drrs_state(dev_priv, to_intel_crtc(crtc)->config,
5317 intel_dp->attached_connector->panel.downclock_mode->vrefresh);
5318 }
5319
5320unlock:
5321 mutex_unlock(&dev_priv->drrs.mutex);
5322}
5323
5324
5325
5326
5327
5328
5329
5330
5331
5332
5333
5334void intel_edp_drrs_invalidate(struct drm_i915_private *dev_priv,
5335 unsigned int frontbuffer_bits)
5336{
5337 struct drm_crtc *crtc;
5338 enum pipe pipe;
5339
5340 if (dev_priv->drrs.type == DRRS_NOT_SUPPORTED)
5341 return;
5342
5343 cancel_delayed_work(&dev_priv->drrs.work);
5344
5345 mutex_lock(&dev_priv->drrs.mutex);
5346 if (!dev_priv->drrs.dp) {
5347 mutex_unlock(&dev_priv->drrs.mutex);
5348 return;
5349 }
5350
5351 crtc = dp_to_dig_port(dev_priv->drrs.dp)->base.base.crtc;
5352 pipe = to_intel_crtc(crtc)->pipe;
5353
5354 frontbuffer_bits &= INTEL_FRONTBUFFER_ALL_MASK(pipe);
5355 dev_priv->drrs.busy_frontbuffer_bits |= frontbuffer_bits;
5356
5357
5358 if (frontbuffer_bits && dev_priv->drrs.refresh_rate_type == DRRS_LOW_RR)
5359 intel_dp_set_drrs_state(dev_priv, to_intel_crtc(crtc)->config,
5360 dev_priv->drrs.dp->attached_connector->panel.fixed_mode->vrefresh);
5361
5362 mutex_unlock(&dev_priv->drrs.mutex);
5363}
5364
5365
5366
5367
5368
5369
5370
5371
5372
5373
5374
5375
5376
5377void intel_edp_drrs_flush(struct drm_i915_private *dev_priv,
5378 unsigned int frontbuffer_bits)
5379{
5380 struct drm_crtc *crtc;
5381 enum pipe pipe;
5382
5383 if (dev_priv->drrs.type == DRRS_NOT_SUPPORTED)
5384 return;
5385
5386 cancel_delayed_work(&dev_priv->drrs.work);
5387
5388 mutex_lock(&dev_priv->drrs.mutex);
5389 if (!dev_priv->drrs.dp) {
5390 mutex_unlock(&dev_priv->drrs.mutex);
5391 return;
5392 }
5393
5394 crtc = dp_to_dig_port(dev_priv->drrs.dp)->base.base.crtc;
5395 pipe = to_intel_crtc(crtc)->pipe;
5396
5397 frontbuffer_bits &= INTEL_FRONTBUFFER_ALL_MASK(pipe);
5398 dev_priv->drrs.busy_frontbuffer_bits &= ~frontbuffer_bits;
5399
5400
5401 if (frontbuffer_bits && dev_priv->drrs.refresh_rate_type == DRRS_LOW_RR)
5402 intel_dp_set_drrs_state(dev_priv, to_intel_crtc(crtc)->config,
5403 dev_priv->drrs.dp->attached_connector->panel.fixed_mode->vrefresh);
5404
5405
5406
5407
5408
5409 if (!dev_priv->drrs.busy_frontbuffer_bits)
5410 schedule_delayed_work(&dev_priv->drrs.work,
5411 msecs_to_jiffies(1000));
5412 mutex_unlock(&dev_priv->drrs.mutex);
5413}
5414
5415
5416
5417
5418
5419
5420
5421
5422
5423
5424
5425
5426
5427
5428
5429
5430
5431
5432
5433
5434
5435
5436
5437
5438
5439
5440
5441
5442
5443
5444
5445
5446
5447
5448
5449
5450
5451
5452
5453
5454
5455
5456
5457
5458
5459
5460
5461
5462
5463
5464
5465static struct drm_display_mode *
5466intel_dp_drrs_init(struct intel_connector *intel_connector,
5467 struct drm_display_mode *fixed_mode)
5468{
5469 struct drm_connector *connector = &intel_connector->base;
5470 struct drm_device *dev = connector->dev;
5471 struct drm_i915_private *dev_priv = to_i915(dev);
5472 struct drm_display_mode *downclock_mode = NULL;
5473
5474 INIT_DELAYED_WORK(&dev_priv->drrs.work, intel_edp_drrs_downclock_work);
5475 mutex_init(&dev_priv->drrs.mutex);
5476
5477 if (INTEL_INFO(dev)->gen <= 6) {
5478 DRM_DEBUG_KMS("DRRS supported for Gen7 and above\n");
5479 return NULL;
5480 }
5481
5482 if (dev_priv->vbt.drrs_type != SEAMLESS_DRRS_SUPPORT) {
5483 DRM_DEBUG_KMS("VBT doesn't support DRRS\n");
5484 return NULL;
5485 }
5486
5487 downclock_mode = intel_find_panel_downclock
5488 (dev, fixed_mode, connector);
5489
5490 if (!downclock_mode) {
5491 DRM_DEBUG_KMS("Downclock mode is not found. DRRS not supported\n");
5492 return NULL;
5493 }
5494
5495 dev_priv->drrs.type = dev_priv->vbt.drrs_type;
5496
5497 dev_priv->drrs.refresh_rate_type = DRRS_HIGH_RR;
5498 DRM_DEBUG_KMS("seamless DRRS supported for eDP panel.\n");
5499 return downclock_mode;
5500}
5501
5502static bool intel_edp_init_connector(struct intel_dp *intel_dp,
5503 struct intel_connector *intel_connector)
5504{
5505 struct drm_connector *connector = &intel_connector->base;
5506 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
5507 struct intel_encoder *intel_encoder = &intel_dig_port->base;
5508 struct drm_device *dev = intel_encoder->base.dev;
5509 struct drm_i915_private *dev_priv = to_i915(dev);
5510 struct drm_display_mode *fixed_mode = NULL;
5511 struct drm_display_mode *downclock_mode = NULL;
5512 bool has_dpcd;
5513 struct drm_display_mode *scan;
5514 struct edid *edid;
5515 enum pipe pipe = INVALID_PIPE;
5516
5517 if (!is_edp(intel_dp))
5518 return true;
5519
5520
5521
5522
5523
5524
5525
5526 if (intel_get_lvds_encoder(dev)) {
5527 WARN_ON(!(HAS_PCH_IBX(dev_priv) || HAS_PCH_CPT(dev_priv)));
5528 DRM_INFO("LVDS was detected, not registering eDP\n");
5529
5530 return false;
5531 }
5532
5533 pps_lock(intel_dp);
5534
5535 intel_dp_init_panel_power_timestamps(intel_dp);
5536 intel_dp_pps_init(dev, intel_dp);
5537 intel_edp_panel_vdd_sanitize(intel_dp);
5538
5539 pps_unlock(intel_dp);
5540
5541
5542 has_dpcd = intel_edp_init_dpcd(intel_dp);
5543
5544 if (!has_dpcd) {
5545
5546 DRM_INFO("failed to retrieve link info, disabling eDP\n");
5547 goto out_vdd_off;
5548 }
5549
5550 mutex_lock(&dev->mode_config.mutex);
5551 edid = drm_get_edid(connector, &intel_dp->aux.ddc);
5552 if (edid) {
5553 if (drm_add_edid_modes(connector, edid)) {
5554 drm_mode_connector_update_edid_property(connector,
5555 edid);
5556 drm_edid_to_eld(connector, edid);
5557 } else {
5558 kfree(edid);
5559 edid = ERR_PTR(-EINVAL);
5560 }
5561 } else {
5562 edid = ERR_PTR(-ENOENT);
5563 }
5564 intel_connector->edid = edid;
5565
5566
5567 list_for_each_entry(scan, &connector->probed_modes, head) {
5568 if ((scan->type & DRM_MODE_TYPE_PREFERRED)) {
5569 fixed_mode = drm_mode_duplicate(dev, scan);
5570 downclock_mode = intel_dp_drrs_init(
5571 intel_connector, fixed_mode);
5572 break;
5573 }
5574 }
5575
5576
5577 if (!fixed_mode && dev_priv->vbt.lfp_lvds_vbt_mode) {
5578 fixed_mode = drm_mode_duplicate(dev,
5579 dev_priv->vbt.lfp_lvds_vbt_mode);
5580 if (fixed_mode) {
5581 fixed_mode->type |= DRM_MODE_TYPE_PREFERRED;
5582 connector->display_info.width_mm = fixed_mode->width_mm;
5583 connector->display_info.height_mm = fixed_mode->height_mm;
5584 }
5585 }
5586 mutex_unlock(&dev->mode_config.mutex);
5587
5588 if (IS_VALLEYVIEW(dev) || IS_CHERRYVIEW(dev)) {
5589 intel_dp->edp_notifier.notifier_call = edp_notify_handler;
5590 register_reboot_notifier(&intel_dp->edp_notifier);
5591
5592
5593
5594
5595
5596
5597 if (IS_CHERRYVIEW(dev))
5598 pipe = DP_PORT_TO_PIPE_CHV(intel_dp->DP);
5599 else
5600 pipe = PORT_TO_PIPE(intel_dp->DP);
5601
5602 if (pipe != PIPE_A && pipe != PIPE_B)
5603 pipe = intel_dp->pps_pipe;
5604
5605 if (pipe != PIPE_A && pipe != PIPE_B)
5606 pipe = PIPE_A;
5607
5608 DRM_DEBUG_KMS("using pipe %c for initial backlight setup\n",
5609 pipe_name(pipe));
5610 }
5611
5612 intel_panel_init(&intel_connector->panel, fixed_mode, downclock_mode);
5613 intel_connector->panel.backlight.power = intel_edp_backlight_power;
5614 intel_panel_setup_backlight(connector, pipe);
5615
5616 return true;
5617
5618out_vdd_off:
5619 cancel_delayed_work_sync(&intel_dp->panel_vdd_work);
5620
5621
5622
5623
5624 pps_lock(intel_dp);
5625 edp_panel_vdd_off_sync(intel_dp);
5626 pps_unlock(intel_dp);
5627
5628 return false;
5629}
5630
5631bool
5632intel_dp_init_connector(struct intel_digital_port *intel_dig_port,
5633 struct intel_connector *intel_connector)
5634{
5635 struct drm_connector *connector = &intel_connector->base;
5636 struct intel_dp *intel_dp = &intel_dig_port->dp;
5637 struct intel_encoder *intel_encoder = &intel_dig_port->base;
5638 struct drm_device *dev = intel_encoder->base.dev;
5639 struct drm_i915_private *dev_priv = to_i915(dev);
5640 enum port port = intel_dig_port->port;
5641 int type;
5642
5643 if (WARN(intel_dig_port->max_lanes < 1,
5644 "Not enough lanes (%d) for DP on port %c\n",
5645 intel_dig_port->max_lanes, port_name(port)))
5646 return false;
5647
5648 intel_dp->pps_pipe = INVALID_PIPE;
5649
5650
5651 if (INTEL_INFO(dev)->gen >= 9)
5652 intel_dp->get_aux_clock_divider = skl_get_aux_clock_divider;
5653 else if (IS_HASWELL(dev) || IS_BROADWELL(dev))
5654 intel_dp->get_aux_clock_divider = hsw_get_aux_clock_divider;
5655 else if (HAS_PCH_SPLIT(dev))
5656 intel_dp->get_aux_clock_divider = ilk_get_aux_clock_divider;
5657 else
5658 intel_dp->get_aux_clock_divider = g4x_get_aux_clock_divider;
5659
5660 if (INTEL_INFO(dev)->gen >= 9)
5661 intel_dp->get_aux_send_ctl = skl_get_aux_send_ctl;
5662 else
5663 intel_dp->get_aux_send_ctl = g4x_get_aux_send_ctl;
5664
5665 if (HAS_DDI(dev))
5666 intel_dp->prepare_link_retrain = intel_ddi_prepare_link_retrain;
5667
5668
5669 intel_dp->DP = I915_READ(intel_dp->output_reg);
5670 intel_dp->attached_connector = intel_connector;
5671
5672 if (intel_dp_is_edp(dev, port))
5673 type = DRM_MODE_CONNECTOR_eDP;
5674 else
5675 type = DRM_MODE_CONNECTOR_DisplayPort;
5676
5677
5678
5679
5680
5681
5682 if (type == DRM_MODE_CONNECTOR_eDP)
5683 intel_encoder->type = INTEL_OUTPUT_EDP;
5684
5685
5686 if (WARN_ON((IS_VALLEYVIEW(dev) || IS_CHERRYVIEW(dev)) &&
5687 is_edp(intel_dp) && port != PORT_B && port != PORT_C))
5688 return false;
5689
5690 DRM_DEBUG_KMS("Adding %s connector on port %c\n",
5691 type == DRM_MODE_CONNECTOR_eDP ? "eDP" : "DP",
5692 port_name(port));
5693
5694 drm_connector_init(dev, connector, &intel_dp_connector_funcs, type);
5695 drm_connector_helper_add(connector, &intel_dp_connector_helper_funcs);
5696
5697 connector->interlace_allowed = true;
5698 connector->doublescan_allowed = 0;
5699
5700 intel_dp_aux_init(intel_dp);
5701
5702 INIT_DELAYED_WORK(&intel_dp->panel_vdd_work,
5703 edp_panel_vdd_work);
5704
5705 intel_connector_attach_encoder(intel_connector, intel_encoder);
5706
5707 if (HAS_DDI(dev))
5708 intel_connector->get_hw_state = intel_ddi_connector_get_hw_state;
5709 else
5710 intel_connector->get_hw_state = intel_connector_get_hw_state;
5711
5712
5713 switch (port) {
5714 case PORT_A:
5715 intel_encoder->hpd_pin = HPD_PORT_A;
5716 break;
5717 case PORT_B:
5718 intel_encoder->hpd_pin = HPD_PORT_B;
5719 if (IS_BXT_REVID(dev, 0, BXT_REVID_A1))
5720 intel_encoder->hpd_pin = HPD_PORT_A;
5721 break;
5722 case PORT_C:
5723 intel_encoder->hpd_pin = HPD_PORT_C;
5724 break;
5725 case PORT_D:
5726 intel_encoder->hpd_pin = HPD_PORT_D;
5727 break;
5728 case PORT_E:
5729 intel_encoder->hpd_pin = HPD_PORT_E;
5730 break;
5731 default:
5732 BUG();
5733 }
5734
5735
5736 if (HAS_DP_MST(dev) && !is_edp(intel_dp) &&
5737 (port == PORT_B || port == PORT_C || port == PORT_D))
5738 intel_dp_mst_encoder_init(intel_dig_port,
5739 intel_connector->base.base.id);
5740
5741 if (!intel_edp_init_connector(intel_dp, intel_connector)) {
5742 intel_dp_aux_fini(intel_dp);
5743 intel_dp_mst_encoder_cleanup(intel_dig_port);
5744 goto fail;
5745 }
5746
5747 intel_dp_add_properties(intel_dp, connector);
5748
5749
5750
5751
5752
5753 if (IS_G4X(dev) && !IS_GM45(dev)) {
5754 u32 temp = I915_READ(PEG_BAND_GAP_DATA);
5755 I915_WRITE(PEG_BAND_GAP_DATA, (temp & ~0xf) | 0xd);
5756 }
5757
5758 return true;
5759
5760fail:
5761 drm_connector_cleanup(connector);
5762
5763 return false;
5764}
5765
5766bool intel_dp_init(struct drm_device *dev,
5767 i915_reg_t output_reg,
5768 enum port port)
5769{
5770 struct drm_i915_private *dev_priv = to_i915(dev);
5771 struct intel_digital_port *intel_dig_port;
5772 struct intel_encoder *intel_encoder;
5773 struct drm_encoder *encoder;
5774 struct intel_connector *intel_connector;
5775
5776 intel_dig_port = kzalloc(sizeof(*intel_dig_port), GFP_KERNEL);
5777 if (!intel_dig_port)
5778 return false;
5779
5780 intel_connector = intel_connector_alloc();
5781 if (!intel_connector)
5782 goto err_connector_alloc;
5783
5784 intel_encoder = &intel_dig_port->base;
5785 encoder = &intel_encoder->base;
5786
5787 if (drm_encoder_init(dev, &intel_encoder->base, &intel_dp_enc_funcs,
5788 DRM_MODE_ENCODER_TMDS, "DP %c", port_name(port)))
5789 goto err_encoder_init;
5790
5791 intel_encoder->compute_config = intel_dp_compute_config;
5792 intel_encoder->disable = intel_disable_dp;
5793 intel_encoder->get_hw_state = intel_dp_get_hw_state;
5794 intel_encoder->get_config = intel_dp_get_config;
5795 intel_encoder->suspend = intel_dp_encoder_suspend;
5796 if (IS_CHERRYVIEW(dev)) {
5797 intel_encoder->pre_pll_enable = chv_dp_pre_pll_enable;
5798 intel_encoder->pre_enable = chv_pre_enable_dp;
5799 intel_encoder->enable = vlv_enable_dp;
5800 intel_encoder->post_disable = chv_post_disable_dp;
5801 intel_encoder->post_pll_disable = chv_dp_post_pll_disable;
5802 } else if (IS_VALLEYVIEW(dev)) {
5803 intel_encoder->pre_pll_enable = vlv_dp_pre_pll_enable;
5804 intel_encoder->pre_enable = vlv_pre_enable_dp;
5805 intel_encoder->enable = vlv_enable_dp;
5806 intel_encoder->post_disable = vlv_post_disable_dp;
5807 } else {
5808 intel_encoder->pre_enable = g4x_pre_enable_dp;
5809 intel_encoder->enable = g4x_enable_dp;
5810 if (INTEL_INFO(dev)->gen >= 5)
5811 intel_encoder->post_disable = ilk_post_disable_dp;
5812 }
5813
5814 intel_dig_port->port = port;
5815 intel_dig_port->dp.output_reg = output_reg;
5816 intel_dig_port->max_lanes = 4;
5817
5818 intel_encoder->type = INTEL_OUTPUT_DP;
5819 if (IS_CHERRYVIEW(dev)) {
5820 if (port == PORT_D)
5821 intel_encoder->crtc_mask = 1 << 2;
5822 else
5823 intel_encoder->crtc_mask = (1 << 0) | (1 << 1);
5824 } else {
5825 intel_encoder->crtc_mask = (1 << 0) | (1 << 1) | (1 << 2);
5826 }
5827 intel_encoder->cloneable = 0;
5828
5829 intel_dig_port->hpd_pulse = intel_dp_hpd_pulse;
5830 dev_priv->hotplug.irq_port[port] = intel_dig_port;
5831
5832 if (!intel_dp_init_connector(intel_dig_port, intel_connector))
5833 goto err_init_connector;
5834
5835 return true;
5836
5837err_init_connector:
5838 drm_encoder_cleanup(encoder);
5839err_encoder_init:
5840 kfree(intel_connector);
5841err_connector_alloc:
5842 kfree(intel_dig_port);
5843 return false;
5844}
5845
5846void intel_dp_mst_suspend(struct drm_device *dev)
5847{
5848 struct drm_i915_private *dev_priv = to_i915(dev);
5849 int i;
5850
5851
5852 for (i = 0; i < I915_MAX_PORTS; i++) {
5853 struct intel_digital_port *intel_dig_port = dev_priv->hotplug.irq_port[i];
5854
5855 if (!intel_dig_port || !intel_dig_port->dp.can_mst)
5856 continue;
5857
5858 if (intel_dig_port->dp.is_mst)
5859 drm_dp_mst_topology_mgr_suspend(&intel_dig_port->dp.mst_mgr);
5860 }
5861}
5862
5863void intel_dp_mst_resume(struct drm_device *dev)
5864{
5865 struct drm_i915_private *dev_priv = to_i915(dev);
5866 int i;
5867
5868 for (i = 0; i < I915_MAX_PORTS; i++) {
5869 struct intel_digital_port *intel_dig_port = dev_priv->hotplug.irq_port[i];
5870 int ret;
5871
5872 if (!intel_dig_port || !intel_dig_port->dp.can_mst)
5873 continue;
5874
5875 ret = drm_dp_mst_topology_mgr_resume(&intel_dig_port->dp.mst_mgr);
5876 if (ret)
5877 intel_dp_check_mst_status(&intel_dig_port->dp);
5878 }
5879}
5880