1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28#include <linux/export.h>
29#include <linux/i2c.h>
30#include <linux/notifier.h>
31#include <linux/reboot.h>
32#include <linux/slab.h>
33#include <linux/types.h>
34
35#include <asm/byteorder.h>
36
37#include <drm/drm_atomic_helper.h>
38#include <drm/drm_crtc.h>
39#include <drm/drm_dp_helper.h>
40#include <drm/drm_edid.h>
41#include <drm/drm_probe_helper.h>
42
43#include "i915_debugfs.h"
44#include "i915_drv.h"
45#include "i915_trace.h"
46#include "intel_atomic.h"
47#include "intel_audio.h"
48#include "intel_connector.h"
49#include "intel_ddi.h"
50#include "intel_display_types.h"
51#include "intel_dp.h"
52#include "intel_dp_link_training.h"
53#include "intel_dp_mst.h"
54#include "intel_dpio_phy.h"
55#include "intel_fifo_underrun.h"
56#include "intel_hdcp.h"
57#include "intel_hdmi.h"
58#include "intel_hotplug.h"
59#include "intel_lspcon.h"
60#include "intel_lvds.h"
61#include "intel_panel.h"
62#include "intel_psr.h"
63#include "intel_sideband.h"
64#include "intel_tc.h"
65#include "intel_vdsc.h"
66
67#define DP_DPRX_ESI_LEN 14
68
69
70#define DP_DSC_PEAK_PIXEL_RATE 2720000
71#define DP_DSC_MAX_ENC_THROUGHPUT_0 340000
72#define DP_DSC_MAX_ENC_THROUGHPUT_1 400000
73
74
75#define DP_DSC_FEC_OVERHEAD_FACTOR 972261
76
77
78#define INTEL_DP_RESOLUTION_SHIFT_MASK 0
79#define INTEL_DP_RESOLUTION_PREFERRED (1 << INTEL_DP_RESOLUTION_SHIFT_MASK)
80#define INTEL_DP_RESOLUTION_STANDARD (2 << INTEL_DP_RESOLUTION_SHIFT_MASK)
81#define INTEL_DP_RESOLUTION_FAILSAFE (3 << INTEL_DP_RESOLUTION_SHIFT_MASK)
82
83struct dp_link_dpll {
84 int clock;
85 struct dpll dpll;
86};
87
88static const struct dp_link_dpll g4x_dpll[] = {
89 { 162000,
90 { .p1 = 2, .p2 = 10, .n = 2, .m1 = 23, .m2 = 8 } },
91 { 270000,
92 { .p1 = 1, .p2 = 10, .n = 1, .m1 = 14, .m2 = 2 } }
93};
94
95static const struct dp_link_dpll pch_dpll[] = {
96 { 162000,
97 { .p1 = 2, .p2 = 10, .n = 1, .m1 = 12, .m2 = 9 } },
98 { 270000,
99 { .p1 = 1, .p2 = 10, .n = 2, .m1 = 14, .m2 = 8 } }
100};
101
102static const struct dp_link_dpll vlv_dpll[] = {
103 { 162000,
104 { .p1 = 3, .p2 = 2, .n = 5, .m1 = 3, .m2 = 81 } },
105 { 270000,
106 { .p1 = 2, .p2 = 2, .n = 1, .m1 = 2, .m2 = 27 } }
107};
108
109
110
111
112
113static const struct dp_link_dpll chv_dpll[] = {
114
115
116
117
118
119 { 162000,
120 { .p1 = 4, .p2 = 2, .n = 1, .m1 = 2, .m2 = 0x819999a } },
121 { 270000,
122 { .p1 = 4, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x6c00000 } },
123};
124
125
126static const u8 valid_dsc_bpp[] = {6, 8, 10, 12, 15};
127
128
129
130
131static const u8 valid_dsc_slicecount[] = {1, 2, 4};
132
133
134
135
136
137
138
139
140bool intel_dp_is_edp(struct intel_dp *intel_dp)
141{
142 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
143
144 return dig_port->base.type == INTEL_OUTPUT_EDP;
145}
146
147static void intel_dp_link_down(struct intel_encoder *encoder,
148 const struct intel_crtc_state *old_crtc_state);
149static bool edp_panel_vdd_on(struct intel_dp *intel_dp);
150static void edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync);
151static void vlv_init_panel_power_sequencer(struct intel_encoder *encoder,
152 const struct intel_crtc_state *crtc_state);
153static void vlv_steal_power_sequencer(struct drm_i915_private *dev_priv,
154 enum pipe pipe);
155static void intel_dp_unset_edid(struct intel_dp *intel_dp);
156
157
158static void intel_dp_set_sink_rates(struct intel_dp *intel_dp)
159{
160 static const int dp_rates[] = {
161 162000, 270000, 540000, 810000
162 };
163 int i, max_rate;
164
165 if (drm_dp_has_quirk(&intel_dp->desc, 0,
166 DP_DPCD_QUIRK_CAN_DO_MAX_LINK_RATE_3_24_GBPS)) {
167
168 static const int quirk_rates[] = { 162000, 270000, 324000 };
169
170 memcpy(intel_dp->sink_rates, quirk_rates, sizeof(quirk_rates));
171 intel_dp->num_sink_rates = ARRAY_SIZE(quirk_rates);
172
173 return;
174 }
175
176 max_rate = drm_dp_bw_code_to_link_rate(intel_dp->dpcd[DP_MAX_LINK_RATE]);
177
178 for (i = 0; i < ARRAY_SIZE(dp_rates); i++) {
179 if (dp_rates[i] > max_rate)
180 break;
181 intel_dp->sink_rates[i] = dp_rates[i];
182 }
183
184 intel_dp->num_sink_rates = i;
185}
186
187
188static int intel_dp_rate_limit_len(const int *rates, int len, int max_rate)
189{
190 int i;
191
192
193 for (i = 0; i < len; i++) {
194 if (rates[len - i - 1] <= max_rate)
195 return len - i;
196 }
197
198 return 0;
199}
200
201
202static int intel_dp_common_len_rate_limit(const struct intel_dp *intel_dp,
203 int max_rate)
204{
205 return intel_dp_rate_limit_len(intel_dp->common_rates,
206 intel_dp->num_common_rates, max_rate);
207}
208
209
210static int intel_dp_max_common_rate(struct intel_dp *intel_dp)
211{
212 return intel_dp->common_rates[intel_dp->num_common_rates - 1];
213}
214
215
216static int intel_dp_max_common_lane_count(struct intel_dp *intel_dp)
217{
218 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
219 int source_max = dig_port->max_lanes;
220 int sink_max = drm_dp_max_lane_count(intel_dp->dpcd);
221 int fia_max = intel_tc_port_fia_max_lane_count(dig_port);
222
223 return min3(source_max, sink_max, fia_max);
224}
225
226int intel_dp_max_lane_count(struct intel_dp *intel_dp)
227{
228 return intel_dp->max_link_lane_count;
229}
230
231int
232intel_dp_link_required(int pixel_clock, int bpp)
233{
234
235 return DIV_ROUND_UP(pixel_clock * bpp, 8);
236}
237
238int
239intel_dp_max_data_rate(int max_link_clock, int max_lanes)
240{
241
242
243
244
245
246
247 return max_link_clock * max_lanes;
248}
249
250static int cnl_max_source_rate(struct intel_dp *intel_dp)
251{
252 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
253 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
254 enum port port = dig_port->base.port;
255
256 u32 voltage = intel_de_read(dev_priv, CNL_PORT_COMP_DW3) & VOLTAGE_INFO_MASK;
257
258
259 if (voltage == VOLTAGE_INFO_0_85V)
260 return 540000;
261
262
263 if (IS_CNL_WITH_PORT_F(dev_priv))
264 return 810000;
265
266
267 if (port == PORT_A || port == PORT_D)
268 return 540000;
269
270 return 810000;
271}
272
273static int icl_max_source_rate(struct intel_dp *intel_dp)
274{
275 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
276 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
277 enum phy phy = intel_port_to_phy(dev_priv, dig_port->base.port);
278
279 if (intel_phy_is_combo(dev_priv, phy) &&
280 !IS_ELKHARTLAKE(dev_priv) &&
281 !intel_dp_is_edp(intel_dp))
282 return 540000;
283
284 return 810000;
285}
286
287static void
288intel_dp_set_source_rates(struct intel_dp *intel_dp)
289{
290
291 static const int cnl_rates[] = {
292 162000, 216000, 270000, 324000, 432000, 540000, 648000, 810000
293 };
294 static const int bxt_rates[] = {
295 162000, 216000, 243000, 270000, 324000, 432000, 540000
296 };
297 static const int skl_rates[] = {
298 162000, 216000, 270000, 324000, 432000, 540000
299 };
300 static const int hsw_rates[] = {
301 162000, 270000, 540000
302 };
303 static const int g4x_rates[] = {
304 162000, 270000
305 };
306 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
307 struct intel_encoder *encoder = &dig_port->base;
308 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
309 const int *source_rates;
310 int size, max_rate = 0, vbt_max_rate;
311
312
313 drm_WARN_ON(&dev_priv->drm,
314 intel_dp->source_rates || intel_dp->num_source_rates);
315
316 if (INTEL_GEN(dev_priv) >= 10) {
317 source_rates = cnl_rates;
318 size = ARRAY_SIZE(cnl_rates);
319 if (IS_GEN(dev_priv, 10))
320 max_rate = cnl_max_source_rate(intel_dp);
321 else
322 max_rate = icl_max_source_rate(intel_dp);
323 } else if (IS_GEN9_LP(dev_priv)) {
324 source_rates = bxt_rates;
325 size = ARRAY_SIZE(bxt_rates);
326 } else if (IS_GEN9_BC(dev_priv)) {
327 source_rates = skl_rates;
328 size = ARRAY_SIZE(skl_rates);
329 } else if ((IS_HASWELL(dev_priv) && !IS_HSW_ULX(dev_priv)) ||
330 IS_BROADWELL(dev_priv)) {
331 source_rates = hsw_rates;
332 size = ARRAY_SIZE(hsw_rates);
333 } else {
334 source_rates = g4x_rates;
335 size = ARRAY_SIZE(g4x_rates);
336 }
337
338 vbt_max_rate = intel_bios_dp_max_link_rate(encoder);
339 if (max_rate && vbt_max_rate)
340 max_rate = min(max_rate, vbt_max_rate);
341 else if (vbt_max_rate)
342 max_rate = vbt_max_rate;
343
344 if (max_rate)
345 size = intel_dp_rate_limit_len(source_rates, size, max_rate);
346
347 intel_dp->source_rates = source_rates;
348 intel_dp->num_source_rates = size;
349}
350
351static int intersect_rates(const int *source_rates, int source_len,
352 const int *sink_rates, int sink_len,
353 int *common_rates)
354{
355 int i = 0, j = 0, k = 0;
356
357 while (i < source_len && j < sink_len) {
358 if (source_rates[i] == sink_rates[j]) {
359 if (WARN_ON(k >= DP_MAX_SUPPORTED_RATES))
360 return k;
361 common_rates[k] = source_rates[i];
362 ++k;
363 ++i;
364 ++j;
365 } else if (source_rates[i] < sink_rates[j]) {
366 ++i;
367 } else {
368 ++j;
369 }
370 }
371 return k;
372}
373
374
375static int intel_dp_rate_index(const int *rates, int len, int rate)
376{
377 int i;
378
379 for (i = 0; i < len; i++)
380 if (rate == rates[i])
381 return i;
382
383 return -1;
384}
385
386static void intel_dp_set_common_rates(struct intel_dp *intel_dp)
387{
388 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
389
390 drm_WARN_ON(&i915->drm,
391 !intel_dp->num_source_rates || !intel_dp->num_sink_rates);
392
393 intel_dp->num_common_rates = intersect_rates(intel_dp->source_rates,
394 intel_dp->num_source_rates,
395 intel_dp->sink_rates,
396 intel_dp->num_sink_rates,
397 intel_dp->common_rates);
398
399
400 if (drm_WARN_ON(&i915->drm, intel_dp->num_common_rates == 0)) {
401 intel_dp->common_rates[0] = 162000;
402 intel_dp->num_common_rates = 1;
403 }
404}
405
406static bool intel_dp_link_params_valid(struct intel_dp *intel_dp, int link_rate,
407 u8 lane_count)
408{
409
410
411
412
413
414 if (link_rate == 0 ||
415 link_rate > intel_dp->max_link_rate)
416 return false;
417
418 if (lane_count == 0 ||
419 lane_count > intel_dp_max_lane_count(intel_dp))
420 return false;
421
422 return true;
423}
424
425static bool intel_dp_can_link_train_fallback_for_edp(struct intel_dp *intel_dp,
426 int link_rate,
427 u8 lane_count)
428{
429 const struct drm_display_mode *fixed_mode =
430 intel_dp->attached_connector->panel.fixed_mode;
431 int mode_rate, max_rate;
432
433 mode_rate = intel_dp_link_required(fixed_mode->clock, 18);
434 max_rate = intel_dp_max_data_rate(link_rate, lane_count);
435 if (mode_rate > max_rate)
436 return false;
437
438 return true;
439}
440
441int intel_dp_get_link_train_fallback_values(struct intel_dp *intel_dp,
442 int link_rate, u8 lane_count)
443{
444 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
445 int index;
446
447
448
449
450
451 if (intel_dp->is_mst) {
452 drm_err(&i915->drm, "Link Training Unsuccessful\n");
453 return -1;
454 }
455
456 index = intel_dp_rate_index(intel_dp->common_rates,
457 intel_dp->num_common_rates,
458 link_rate);
459 if (index > 0) {
460 if (intel_dp_is_edp(intel_dp) &&
461 !intel_dp_can_link_train_fallback_for_edp(intel_dp,
462 intel_dp->common_rates[index - 1],
463 lane_count)) {
464 drm_dbg_kms(&i915->drm,
465 "Retrying Link training for eDP with same parameters\n");
466 return 0;
467 }
468 intel_dp->max_link_rate = intel_dp->common_rates[index - 1];
469 intel_dp->max_link_lane_count = lane_count;
470 } else if (lane_count > 1) {
471 if (intel_dp_is_edp(intel_dp) &&
472 !intel_dp_can_link_train_fallback_for_edp(intel_dp,
473 intel_dp_max_common_rate(intel_dp),
474 lane_count >> 1)) {
475 drm_dbg_kms(&i915->drm,
476 "Retrying Link training for eDP with same parameters\n");
477 return 0;
478 }
479 intel_dp->max_link_rate = intel_dp_max_common_rate(intel_dp);
480 intel_dp->max_link_lane_count = lane_count >> 1;
481 } else {
482 drm_err(&i915->drm, "Link Training Unsuccessful\n");
483 return -1;
484 }
485
486 return 0;
487}
488
489u32 intel_dp_mode_to_fec_clock(u32 mode_clock)
490{
491 return div_u64(mul_u32_u32(mode_clock, 1000000U),
492 DP_DSC_FEC_OVERHEAD_FACTOR);
493}
494
495static int
496small_joiner_ram_size_bits(struct drm_i915_private *i915)
497{
498 if (INTEL_GEN(i915) >= 11)
499 return 7680 * 8;
500 else
501 return 6144 * 8;
502}
503
504static u16 intel_dp_dsc_get_output_bpp(struct drm_i915_private *i915,
505 u32 link_clock, u32 lane_count,
506 u32 mode_clock, u32 mode_hdisplay)
507{
508 u32 bits_per_pixel, max_bpp_small_joiner_ram;
509 int i;
510
511
512
513
514
515
516
517 bits_per_pixel = (link_clock * lane_count * 8) /
518 intel_dp_mode_to_fec_clock(mode_clock);
519 drm_dbg_kms(&i915->drm, "Max link bpp: %u\n", bits_per_pixel);
520
521
522 max_bpp_small_joiner_ram = small_joiner_ram_size_bits(i915) /
523 mode_hdisplay;
524 drm_dbg_kms(&i915->drm, "Max small joiner bpp: %u\n",
525 max_bpp_small_joiner_ram);
526
527
528
529
530
531 bits_per_pixel = min(bits_per_pixel, max_bpp_small_joiner_ram);
532
533
534 if (bits_per_pixel < valid_dsc_bpp[0]) {
535 drm_dbg_kms(&i915->drm, "Unsupported BPP %u, min %u\n",
536 bits_per_pixel, valid_dsc_bpp[0]);
537 return 0;
538 }
539
540
541 for (i = 0; i < ARRAY_SIZE(valid_dsc_bpp) - 1; i++) {
542 if (bits_per_pixel < valid_dsc_bpp[i + 1])
543 break;
544 }
545 bits_per_pixel = valid_dsc_bpp[i];
546
547
548
549
550
551 return bits_per_pixel << 4;
552}
553
554static u8 intel_dp_dsc_get_slice_count(struct intel_dp *intel_dp,
555 int mode_clock, int mode_hdisplay)
556{
557 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
558 u8 min_slice_count, i;
559 int max_slice_width;
560
561 if (mode_clock <= DP_DSC_PEAK_PIXEL_RATE)
562 min_slice_count = DIV_ROUND_UP(mode_clock,
563 DP_DSC_MAX_ENC_THROUGHPUT_0);
564 else
565 min_slice_count = DIV_ROUND_UP(mode_clock,
566 DP_DSC_MAX_ENC_THROUGHPUT_1);
567
568 max_slice_width = drm_dp_dsc_sink_max_slice_width(intel_dp->dsc_dpcd);
569 if (max_slice_width < DP_DSC_MIN_SLICE_WIDTH_VALUE) {
570 drm_dbg_kms(&i915->drm,
571 "Unsupported slice width %d by DP DSC Sink device\n",
572 max_slice_width);
573 return 0;
574 }
575
576 min_slice_count = max_t(u8, min_slice_count,
577 DIV_ROUND_UP(mode_hdisplay,
578 max_slice_width));
579
580
581 for (i = 0; i < ARRAY_SIZE(valid_dsc_slicecount); i++) {
582 if (valid_dsc_slicecount[i] >
583 drm_dp_dsc_sink_max_slice_count(intel_dp->dsc_dpcd,
584 false))
585 break;
586 if (min_slice_count <= valid_dsc_slicecount[i])
587 return valid_dsc_slicecount[i];
588 }
589
590 drm_dbg_kms(&i915->drm, "Unsupported Slice Count %d\n",
591 min_slice_count);
592 return 0;
593}
594
595static bool intel_dp_hdisplay_bad(struct drm_i915_private *dev_priv,
596 int hdisplay)
597{
598
599
600
601
602
603
604
605
606
607
608
609
610
611 return hdisplay == 4096 && !HAS_DDI(dev_priv);
612}
613
614static enum drm_mode_status
615intel_dp_mode_valid_downstream(struct intel_connector *connector,
616 const struct drm_display_mode *mode,
617 int target_clock)
618{
619 struct intel_dp *intel_dp = intel_attached_dp(connector);
620 const struct drm_display_info *info = &connector->base.display_info;
621 int tmds_clock;
622
623 if (intel_dp->dfp.max_dotclock &&
624 target_clock > intel_dp->dfp.max_dotclock)
625 return MODE_CLOCK_HIGH;
626
627
628 tmds_clock = target_clock;
629 if (drm_mode_is_420_only(info, mode))
630 tmds_clock /= 2;
631
632 if (intel_dp->dfp.min_tmds_clock &&
633 tmds_clock < intel_dp->dfp.min_tmds_clock)
634 return MODE_CLOCK_LOW;
635 if (intel_dp->dfp.max_tmds_clock &&
636 tmds_clock > intel_dp->dfp.max_tmds_clock)
637 return MODE_CLOCK_HIGH;
638
639 return MODE_OK;
640}
641
642static enum drm_mode_status
643intel_dp_mode_valid(struct drm_connector *connector,
644 struct drm_display_mode *mode)
645{
646 struct intel_dp *intel_dp = intel_attached_dp(to_intel_connector(connector));
647 struct intel_connector *intel_connector = to_intel_connector(connector);
648 struct drm_display_mode *fixed_mode = intel_connector->panel.fixed_mode;
649 struct drm_i915_private *dev_priv = to_i915(connector->dev);
650 int target_clock = mode->clock;
651 int max_rate, mode_rate, max_lanes, max_link_clock;
652 int max_dotclk = dev_priv->max_dotclk_freq;
653 u16 dsc_max_output_bpp = 0;
654 u8 dsc_slice_count = 0;
655 enum drm_mode_status status;
656
657 if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
658 return MODE_NO_DBLESCAN;
659
660 if (intel_dp_is_edp(intel_dp) && fixed_mode) {
661 if (mode->hdisplay > fixed_mode->hdisplay)
662 return MODE_PANEL;
663
664 if (mode->vdisplay > fixed_mode->vdisplay)
665 return MODE_PANEL;
666
667 target_clock = fixed_mode->clock;
668 }
669
670 max_link_clock = intel_dp_max_link_rate(intel_dp);
671 max_lanes = intel_dp_max_lane_count(intel_dp);
672
673 max_rate = intel_dp_max_data_rate(max_link_clock, max_lanes);
674 mode_rate = intel_dp_link_required(target_clock, 18);
675
676 if (intel_dp_hdisplay_bad(dev_priv, mode->hdisplay))
677 return MODE_H_ILLEGAL;
678
679
680
681
682
683 if ((INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv)) &&
684 drm_dp_sink_supports_dsc(intel_dp->dsc_dpcd)) {
685 if (intel_dp_is_edp(intel_dp)) {
686 dsc_max_output_bpp =
687 drm_edp_dsc_sink_output_bpp(intel_dp->dsc_dpcd) >> 4;
688 dsc_slice_count =
689 drm_dp_dsc_sink_max_slice_count(intel_dp->dsc_dpcd,
690 true);
691 } else if (drm_dp_sink_supports_fec(intel_dp->fec_capable)) {
692 dsc_max_output_bpp =
693 intel_dp_dsc_get_output_bpp(dev_priv,
694 max_link_clock,
695 max_lanes,
696 target_clock,
697 mode->hdisplay) >> 4;
698 dsc_slice_count =
699 intel_dp_dsc_get_slice_count(intel_dp,
700 target_clock,
701 mode->hdisplay);
702 }
703 }
704
705 if ((mode_rate > max_rate && !(dsc_max_output_bpp && dsc_slice_count)) ||
706 target_clock > max_dotclk)
707 return MODE_CLOCK_HIGH;
708
709 if (mode->clock < 10000)
710 return MODE_CLOCK_LOW;
711
712 if (mode->flags & DRM_MODE_FLAG_DBLCLK)
713 return MODE_H_ILLEGAL;
714
715 status = intel_dp_mode_valid_downstream(intel_connector,
716 mode, target_clock);
717 if (status != MODE_OK)
718 return status;
719
720 return intel_mode_valid_max_plane_size(dev_priv, mode);
721}
722
723u32 intel_dp_pack_aux(const u8 *src, int src_bytes)
724{
725 int i;
726 u32 v = 0;
727
728 if (src_bytes > 4)
729 src_bytes = 4;
730 for (i = 0; i < src_bytes; i++)
731 v |= ((u32)src[i]) << ((3 - i) * 8);
732 return v;
733}
734
735static void intel_dp_unpack_aux(u32 src, u8 *dst, int dst_bytes)
736{
737 int i;
738 if (dst_bytes > 4)
739 dst_bytes = 4;
740 for (i = 0; i < dst_bytes; i++)
741 dst[i] = src >> ((3-i) * 8);
742}
743
744static void
745intel_dp_init_panel_power_sequencer(struct intel_dp *intel_dp);
746static void
747intel_dp_init_panel_power_sequencer_registers(struct intel_dp *intel_dp,
748 bool force_disable_vdd);
749static void
750intel_dp_pps_init(struct intel_dp *intel_dp);
751
752static intel_wakeref_t
753pps_lock(struct intel_dp *intel_dp)
754{
755 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
756 intel_wakeref_t wakeref;
757
758
759
760
761
762 wakeref = intel_display_power_get(dev_priv,
763 intel_aux_power_domain(dp_to_dig_port(intel_dp)));
764
765 mutex_lock(&dev_priv->pps_mutex);
766
767 return wakeref;
768}
769
770static intel_wakeref_t
771pps_unlock(struct intel_dp *intel_dp, intel_wakeref_t wakeref)
772{
773 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
774
775 mutex_unlock(&dev_priv->pps_mutex);
776 intel_display_power_put(dev_priv,
777 intel_aux_power_domain(dp_to_dig_port(intel_dp)),
778 wakeref);
779 return 0;
780}
781
782#define with_pps_lock(dp, wf) \
783 for ((wf) = pps_lock(dp); (wf); (wf) = pps_unlock((dp), (wf)))
784
785static void
786vlv_power_sequencer_kick(struct intel_dp *intel_dp)
787{
788 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
789 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
790 enum pipe pipe = intel_dp->pps_pipe;
791 bool pll_enabled, release_cl_override = false;
792 enum dpio_phy phy = DPIO_PHY(pipe);
793 enum dpio_channel ch = vlv_pipe_to_channel(pipe);
794 u32 DP;
795
796 if (drm_WARN(&dev_priv->drm,
797 intel_de_read(dev_priv, intel_dp->output_reg) & DP_PORT_EN,
798 "skipping pipe %c power sequencer kick due to [ENCODER:%d:%s] being active\n",
799 pipe_name(pipe), dig_port->base.base.base.id,
800 dig_port->base.base.name))
801 return;
802
803 drm_dbg_kms(&dev_priv->drm,
804 "kicking pipe %c power sequencer for [ENCODER:%d:%s]\n",
805 pipe_name(pipe), dig_port->base.base.base.id,
806 dig_port->base.base.name);
807
808
809
810
811 DP = intel_de_read(dev_priv, intel_dp->output_reg) & DP_DETECTED;
812 DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
813 DP |= DP_PORT_WIDTH(1);
814 DP |= DP_LINK_TRAIN_PAT_1;
815
816 if (IS_CHERRYVIEW(dev_priv))
817 DP |= DP_PIPE_SEL_CHV(pipe);
818 else
819 DP |= DP_PIPE_SEL(pipe);
820
821 pll_enabled = intel_de_read(dev_priv, DPLL(pipe)) & DPLL_VCO_ENABLE;
822
823
824
825
826
827 if (!pll_enabled) {
828 release_cl_override = IS_CHERRYVIEW(dev_priv) &&
829 !chv_phy_powergate_ch(dev_priv, phy, ch, true);
830
831 if (vlv_force_pll_on(dev_priv, pipe, IS_CHERRYVIEW(dev_priv) ?
832 &chv_dpll[0].dpll : &vlv_dpll[0].dpll)) {
833 drm_err(&dev_priv->drm,
834 "Failed to force on pll for pipe %c!\n",
835 pipe_name(pipe));
836 return;
837 }
838 }
839
840
841
842
843
844
845
846 intel_de_write(dev_priv, intel_dp->output_reg, DP);
847 intel_de_posting_read(dev_priv, intel_dp->output_reg);
848
849 intel_de_write(dev_priv, intel_dp->output_reg, DP | DP_PORT_EN);
850 intel_de_posting_read(dev_priv, intel_dp->output_reg);
851
852 intel_de_write(dev_priv, intel_dp->output_reg, DP & ~DP_PORT_EN);
853 intel_de_posting_read(dev_priv, intel_dp->output_reg);
854
855 if (!pll_enabled) {
856 vlv_force_pll_off(dev_priv, pipe);
857
858 if (release_cl_override)
859 chv_phy_powergate_ch(dev_priv, phy, ch, false);
860 }
861}
862
863static enum pipe vlv_find_free_pps(struct drm_i915_private *dev_priv)
864{
865 struct intel_encoder *encoder;
866 unsigned int pipes = (1 << PIPE_A) | (1 << PIPE_B);
867
868
869
870
871
872 for_each_intel_dp(&dev_priv->drm, encoder) {
873 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
874
875 if (encoder->type == INTEL_OUTPUT_EDP) {
876 drm_WARN_ON(&dev_priv->drm,
877 intel_dp->active_pipe != INVALID_PIPE &&
878 intel_dp->active_pipe !=
879 intel_dp->pps_pipe);
880
881 if (intel_dp->pps_pipe != INVALID_PIPE)
882 pipes &= ~(1 << intel_dp->pps_pipe);
883 } else {
884 drm_WARN_ON(&dev_priv->drm,
885 intel_dp->pps_pipe != INVALID_PIPE);
886
887 if (intel_dp->active_pipe != INVALID_PIPE)
888 pipes &= ~(1 << intel_dp->active_pipe);
889 }
890 }
891
892 if (pipes == 0)
893 return INVALID_PIPE;
894
895 return ffs(pipes) - 1;
896}
897
898static enum pipe
899vlv_power_sequencer_pipe(struct intel_dp *intel_dp)
900{
901 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
902 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
903 enum pipe pipe;
904
905 lockdep_assert_held(&dev_priv->pps_mutex);
906
907
908 drm_WARN_ON(&dev_priv->drm, !intel_dp_is_edp(intel_dp));
909
910 drm_WARN_ON(&dev_priv->drm, intel_dp->active_pipe != INVALID_PIPE &&
911 intel_dp->active_pipe != intel_dp->pps_pipe);
912
913 if (intel_dp->pps_pipe != INVALID_PIPE)
914 return intel_dp->pps_pipe;
915
916 pipe = vlv_find_free_pps(dev_priv);
917
918
919
920
921
922 if (drm_WARN_ON(&dev_priv->drm, pipe == INVALID_PIPE))
923 pipe = PIPE_A;
924
925 vlv_steal_power_sequencer(dev_priv, pipe);
926 intel_dp->pps_pipe = pipe;
927
928 drm_dbg_kms(&dev_priv->drm,
929 "picked pipe %c power sequencer for [ENCODER:%d:%s]\n",
930 pipe_name(intel_dp->pps_pipe),
931 dig_port->base.base.base.id,
932 dig_port->base.base.name);
933
934
935 intel_dp_init_panel_power_sequencer(intel_dp);
936 intel_dp_init_panel_power_sequencer_registers(intel_dp, true);
937
938
939
940
941
942 vlv_power_sequencer_kick(intel_dp);
943
944 return intel_dp->pps_pipe;
945}
946
947static int
948bxt_power_sequencer_idx(struct intel_dp *intel_dp)
949{
950 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
951 int backlight_controller = dev_priv->vbt.backlight.controller;
952
953 lockdep_assert_held(&dev_priv->pps_mutex);
954
955
956 drm_WARN_ON(&dev_priv->drm, !intel_dp_is_edp(intel_dp));
957
958 if (!intel_dp->pps_reset)
959 return backlight_controller;
960
961 intel_dp->pps_reset = false;
962
963
964
965
966
967 intel_dp_init_panel_power_sequencer_registers(intel_dp, false);
968
969 return backlight_controller;
970}
971
972typedef bool (*vlv_pipe_check)(struct drm_i915_private *dev_priv,
973 enum pipe pipe);
974
975static bool vlv_pipe_has_pp_on(struct drm_i915_private *dev_priv,
976 enum pipe pipe)
977{
978 return intel_de_read(dev_priv, PP_STATUS(pipe)) & PP_ON;
979}
980
981static bool vlv_pipe_has_vdd_on(struct drm_i915_private *dev_priv,
982 enum pipe pipe)
983{
984 return intel_de_read(dev_priv, PP_CONTROL(pipe)) & EDP_FORCE_VDD;
985}
986
987static bool vlv_pipe_any(struct drm_i915_private *dev_priv,
988 enum pipe pipe)
989{
990 return true;
991}
992
993static enum pipe
994vlv_initial_pps_pipe(struct drm_i915_private *dev_priv,
995 enum port port,
996 vlv_pipe_check pipe_check)
997{
998 enum pipe pipe;
999
1000 for (pipe = PIPE_A; pipe <= PIPE_B; pipe++) {
1001 u32 port_sel = intel_de_read(dev_priv, PP_ON_DELAYS(pipe)) &
1002 PANEL_PORT_SELECT_MASK;
1003
1004 if (port_sel != PANEL_PORT_SELECT_VLV(port))
1005 continue;
1006
1007 if (!pipe_check(dev_priv, pipe))
1008 continue;
1009
1010 return pipe;
1011 }
1012
1013 return INVALID_PIPE;
1014}
1015
1016static void
1017vlv_initial_power_sequencer_setup(struct intel_dp *intel_dp)
1018{
1019 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1020 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1021 enum port port = dig_port->base.port;
1022
1023 lockdep_assert_held(&dev_priv->pps_mutex);
1024
1025
1026
1027 intel_dp->pps_pipe = vlv_initial_pps_pipe(dev_priv, port,
1028 vlv_pipe_has_pp_on);
1029
1030 if (intel_dp->pps_pipe == INVALID_PIPE)
1031 intel_dp->pps_pipe = vlv_initial_pps_pipe(dev_priv, port,
1032 vlv_pipe_has_vdd_on);
1033
1034 if (intel_dp->pps_pipe == INVALID_PIPE)
1035 intel_dp->pps_pipe = vlv_initial_pps_pipe(dev_priv, port,
1036 vlv_pipe_any);
1037
1038
1039 if (intel_dp->pps_pipe == INVALID_PIPE) {
1040 drm_dbg_kms(&dev_priv->drm,
1041 "no initial power sequencer for [ENCODER:%d:%s]\n",
1042 dig_port->base.base.base.id,
1043 dig_port->base.base.name);
1044 return;
1045 }
1046
1047 drm_dbg_kms(&dev_priv->drm,
1048 "initial power sequencer for [ENCODER:%d:%s]: pipe %c\n",
1049 dig_port->base.base.base.id,
1050 dig_port->base.base.name,
1051 pipe_name(intel_dp->pps_pipe));
1052
1053 intel_dp_init_panel_power_sequencer(intel_dp);
1054 intel_dp_init_panel_power_sequencer_registers(intel_dp, false);
1055}
1056
1057void intel_power_sequencer_reset(struct drm_i915_private *dev_priv)
1058{
1059 struct intel_encoder *encoder;
1060
1061 if (drm_WARN_ON(&dev_priv->drm,
1062 !(IS_VALLEYVIEW(dev_priv) ||
1063 IS_CHERRYVIEW(dev_priv) ||
1064 IS_GEN9_LP(dev_priv))))
1065 return;
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077 for_each_intel_dp(&dev_priv->drm, encoder) {
1078 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1079
1080 drm_WARN_ON(&dev_priv->drm,
1081 intel_dp->active_pipe != INVALID_PIPE);
1082
1083 if (encoder->type != INTEL_OUTPUT_EDP)
1084 continue;
1085
1086 if (IS_GEN9_LP(dev_priv))
1087 intel_dp->pps_reset = true;
1088 else
1089 intel_dp->pps_pipe = INVALID_PIPE;
1090 }
1091}
1092
1093struct pps_registers {
1094 i915_reg_t pp_ctrl;
1095 i915_reg_t pp_stat;
1096 i915_reg_t pp_on;
1097 i915_reg_t pp_off;
1098 i915_reg_t pp_div;
1099};
1100
1101static void intel_pps_get_registers(struct intel_dp *intel_dp,
1102 struct pps_registers *regs)
1103{
1104 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1105 int pps_idx = 0;
1106
1107 memset(regs, 0, sizeof(*regs));
1108
1109 if (IS_GEN9_LP(dev_priv))
1110 pps_idx = bxt_power_sequencer_idx(intel_dp);
1111 else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
1112 pps_idx = vlv_power_sequencer_pipe(intel_dp);
1113
1114 regs->pp_ctrl = PP_CONTROL(pps_idx);
1115 regs->pp_stat = PP_STATUS(pps_idx);
1116 regs->pp_on = PP_ON_DELAYS(pps_idx);
1117 regs->pp_off = PP_OFF_DELAYS(pps_idx);
1118
1119
1120 if (IS_GEN9_LP(dev_priv) || INTEL_PCH_TYPE(dev_priv) >= PCH_CNP)
1121 regs->pp_div = INVALID_MMIO_REG;
1122 else
1123 regs->pp_div = PP_DIVISOR(pps_idx);
1124}
1125
1126static i915_reg_t
1127_pp_ctrl_reg(struct intel_dp *intel_dp)
1128{
1129 struct pps_registers regs;
1130
1131 intel_pps_get_registers(intel_dp, ®s);
1132
1133 return regs.pp_ctrl;
1134}
1135
1136static i915_reg_t
1137_pp_stat_reg(struct intel_dp *intel_dp)
1138{
1139 struct pps_registers regs;
1140
1141 intel_pps_get_registers(intel_dp, ®s);
1142
1143 return regs.pp_stat;
1144}
1145
1146
1147
1148static int edp_notify_handler(struct notifier_block *this, unsigned long code,
1149 void *unused)
1150{
1151 struct intel_dp *intel_dp = container_of(this, typeof(* intel_dp),
1152 edp_notifier);
1153 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1154 intel_wakeref_t wakeref;
1155
1156 if (!intel_dp_is_edp(intel_dp) || code != SYS_RESTART)
1157 return 0;
1158
1159 with_pps_lock(intel_dp, wakeref) {
1160 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
1161 enum pipe pipe = vlv_power_sequencer_pipe(intel_dp);
1162 i915_reg_t pp_ctrl_reg, pp_div_reg;
1163 u32 pp_div;
1164
1165 pp_ctrl_reg = PP_CONTROL(pipe);
1166 pp_div_reg = PP_DIVISOR(pipe);
1167 pp_div = intel_de_read(dev_priv, pp_div_reg);
1168 pp_div &= PP_REFERENCE_DIVIDER_MASK;
1169
1170
1171 intel_de_write(dev_priv, pp_div_reg, pp_div | 0x1F);
1172 intel_de_write(dev_priv, pp_ctrl_reg,
1173 PANEL_UNLOCK_REGS);
1174 msleep(intel_dp->panel_power_cycle_delay);
1175 }
1176 }
1177
1178 return 0;
1179}
1180
1181static bool edp_have_panel_power(struct intel_dp *intel_dp)
1182{
1183 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1184
1185 lockdep_assert_held(&dev_priv->pps_mutex);
1186
1187 if ((IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) &&
1188 intel_dp->pps_pipe == INVALID_PIPE)
1189 return false;
1190
1191 return (intel_de_read(dev_priv, _pp_stat_reg(intel_dp)) & PP_ON) != 0;
1192}
1193
1194static bool edp_have_panel_vdd(struct intel_dp *intel_dp)
1195{
1196 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1197
1198 lockdep_assert_held(&dev_priv->pps_mutex);
1199
1200 if ((IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) &&
1201 intel_dp->pps_pipe == INVALID_PIPE)
1202 return false;
1203
1204 return intel_de_read(dev_priv, _pp_ctrl_reg(intel_dp)) & EDP_FORCE_VDD;
1205}
1206
1207static void
1208intel_dp_check_edp(struct intel_dp *intel_dp)
1209{
1210 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1211
1212 if (!intel_dp_is_edp(intel_dp))
1213 return;
1214
1215 if (!edp_have_panel_power(intel_dp) && !edp_have_panel_vdd(intel_dp)) {
1216 drm_WARN(&dev_priv->drm, 1,
1217 "eDP powered off while attempting aux channel communication.\n");
1218 drm_dbg_kms(&dev_priv->drm, "Status 0x%08x Control 0x%08x\n",
1219 intel_de_read(dev_priv, _pp_stat_reg(intel_dp)),
1220 intel_de_read(dev_priv, _pp_ctrl_reg(intel_dp)));
1221 }
1222}
1223
1224static u32
1225intel_dp_aux_wait_done(struct intel_dp *intel_dp)
1226{
1227 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1228 i915_reg_t ch_ctl = intel_dp->aux_ch_ctl_reg(intel_dp);
1229 const unsigned int timeout_ms = 10;
1230 u32 status;
1231 bool done;
1232
1233#define C (((status = intel_uncore_read_notrace(&i915->uncore, ch_ctl)) & DP_AUX_CH_CTL_SEND_BUSY) == 0)
1234 done = wait_event_timeout(i915->gmbus_wait_queue, C,
1235 msecs_to_jiffies_timeout(timeout_ms));
1236
1237
1238 trace_i915_reg_rw(false, ch_ctl, status, sizeof(status), true);
1239
1240 if (!done)
1241 drm_err(&i915->drm,
1242 "%s: did not complete or timeout within %ums (status 0x%08x)\n",
1243 intel_dp->aux.name, timeout_ms, status);
1244#undef C
1245
1246 return status;
1247}
1248
1249static u32 g4x_get_aux_clock_divider(struct intel_dp *intel_dp, int index)
1250{
1251 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1252
1253 if (index)
1254 return 0;
1255
1256
1257
1258
1259
1260 return DIV_ROUND_CLOSEST(RUNTIME_INFO(dev_priv)->rawclk_freq, 2000);
1261}
1262
1263static u32 ilk_get_aux_clock_divider(struct intel_dp *intel_dp, int index)
1264{
1265 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1266 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1267 u32 freq;
1268
1269 if (index)
1270 return 0;
1271
1272
1273
1274
1275
1276
1277 if (dig_port->aux_ch == AUX_CH_A)
1278 freq = dev_priv->cdclk.hw.cdclk;
1279 else
1280 freq = RUNTIME_INFO(dev_priv)->rawclk_freq;
1281 return DIV_ROUND_CLOSEST(freq, 2000);
1282}
1283
1284static u32 hsw_get_aux_clock_divider(struct intel_dp *intel_dp, int index)
1285{
1286 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1287 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1288
1289 if (dig_port->aux_ch != AUX_CH_A && HAS_PCH_LPT_H(dev_priv)) {
1290
1291 switch (index) {
1292 case 0: return 63;
1293 case 1: return 72;
1294 default: return 0;
1295 }
1296 }
1297
1298 return ilk_get_aux_clock_divider(intel_dp, index);
1299}
1300
1301static u32 skl_get_aux_clock_divider(struct intel_dp *intel_dp, int index)
1302{
1303
1304
1305
1306
1307
1308 return index ? 0 : 1;
1309}
1310
1311static u32 g4x_get_aux_send_ctl(struct intel_dp *intel_dp,
1312 int send_bytes,
1313 u32 aux_clock_divider)
1314{
1315 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1316 struct drm_i915_private *dev_priv =
1317 to_i915(dig_port->base.base.dev);
1318 u32 precharge, timeout;
1319
1320 if (IS_GEN(dev_priv, 6))
1321 precharge = 3;
1322 else
1323 precharge = 5;
1324
1325 if (IS_BROADWELL(dev_priv))
1326 timeout = DP_AUX_CH_CTL_TIME_OUT_600us;
1327 else
1328 timeout = DP_AUX_CH_CTL_TIME_OUT_400us;
1329
1330 return DP_AUX_CH_CTL_SEND_BUSY |
1331 DP_AUX_CH_CTL_DONE |
1332 DP_AUX_CH_CTL_INTERRUPT |
1333 DP_AUX_CH_CTL_TIME_OUT_ERROR |
1334 timeout |
1335 DP_AUX_CH_CTL_RECEIVE_ERROR |
1336 (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) |
1337 (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) |
1338 (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT);
1339}
1340
1341static u32 skl_get_aux_send_ctl(struct intel_dp *intel_dp,
1342 int send_bytes,
1343 u32 unused)
1344{
1345 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1346 struct drm_i915_private *i915 =
1347 to_i915(dig_port->base.base.dev);
1348 enum phy phy = intel_port_to_phy(i915, dig_port->base.port);
1349 u32 ret;
1350
1351 ret = DP_AUX_CH_CTL_SEND_BUSY |
1352 DP_AUX_CH_CTL_DONE |
1353 DP_AUX_CH_CTL_INTERRUPT |
1354 DP_AUX_CH_CTL_TIME_OUT_ERROR |
1355 DP_AUX_CH_CTL_TIME_OUT_MAX |
1356 DP_AUX_CH_CTL_RECEIVE_ERROR |
1357 (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) |
1358 DP_AUX_CH_CTL_FW_SYNC_PULSE_SKL(32) |
1359 DP_AUX_CH_CTL_SYNC_PULSE_SKL(32);
1360
1361 if (intel_phy_is_tc(i915, phy) &&
1362 dig_port->tc_mode == TC_PORT_TBT_ALT)
1363 ret |= DP_AUX_CH_CTL_TBT_IO;
1364
1365 return ret;
1366}
1367
1368static int
1369intel_dp_aux_xfer(struct intel_dp *intel_dp,
1370 const u8 *send, int send_bytes,
1371 u8 *recv, int recv_size,
1372 u32 aux_send_ctl_flags)
1373{
1374 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1375 struct drm_i915_private *i915 =
1376 to_i915(dig_port->base.base.dev);
1377 struct intel_uncore *uncore = &i915->uncore;
1378 enum phy phy = intel_port_to_phy(i915, dig_port->base.port);
1379 bool is_tc_port = intel_phy_is_tc(i915, phy);
1380 i915_reg_t ch_ctl, ch_data[5];
1381 u32 aux_clock_divider;
1382 enum intel_display_power_domain aux_domain;
1383 intel_wakeref_t aux_wakeref;
1384 intel_wakeref_t pps_wakeref;
1385 int i, ret, recv_bytes;
1386 int try, clock = 0;
1387 u32 status;
1388 bool vdd;
1389
1390 ch_ctl = intel_dp->aux_ch_ctl_reg(intel_dp);
1391 for (i = 0; i < ARRAY_SIZE(ch_data); i++)
1392 ch_data[i] = intel_dp->aux_ch_data_reg(intel_dp, i);
1393
1394 if (is_tc_port)
1395 intel_tc_port_lock(dig_port);
1396
1397 aux_domain = intel_aux_power_domain(dig_port);
1398
1399 aux_wakeref = intel_display_power_get(i915, aux_domain);
1400 pps_wakeref = pps_lock(intel_dp);
1401
1402
1403
1404
1405
1406
1407
1408 vdd = edp_panel_vdd_on(intel_dp);
1409
1410
1411
1412
1413
1414 cpu_latency_qos_update_request(&i915->pm_qos, 0);
1415
1416 intel_dp_check_edp(intel_dp);
1417
1418
1419 for (try = 0; try < 3; try++) {
1420 status = intel_uncore_read_notrace(uncore, ch_ctl);
1421 if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0)
1422 break;
1423 msleep(1);
1424 }
1425
1426 trace_i915_reg_rw(false, ch_ctl, status, sizeof(status), true);
1427
1428 if (try == 3) {
1429 const u32 status = intel_uncore_read(uncore, ch_ctl);
1430
1431 if (status != intel_dp->aux_busy_last_status) {
1432 drm_WARN(&i915->drm, 1,
1433 "%s: not started (status 0x%08x)\n",
1434 intel_dp->aux.name, status);
1435 intel_dp->aux_busy_last_status = status;
1436 }
1437
1438 ret = -EBUSY;
1439 goto out;
1440 }
1441
1442
1443 if (drm_WARN_ON(&i915->drm, send_bytes > 20 || recv_size > 20)) {
1444 ret = -E2BIG;
1445 goto out;
1446 }
1447
1448 while ((aux_clock_divider = intel_dp->get_aux_clock_divider(intel_dp, clock++))) {
1449 u32 send_ctl = intel_dp->get_aux_send_ctl(intel_dp,
1450 send_bytes,
1451 aux_clock_divider);
1452
1453 send_ctl |= aux_send_ctl_flags;
1454
1455
1456 for (try = 0; try < 5; try++) {
1457
1458 for (i = 0; i < send_bytes; i += 4)
1459 intel_uncore_write(uncore,
1460 ch_data[i >> 2],
1461 intel_dp_pack_aux(send + i,
1462 send_bytes - i));
1463
1464
1465 intel_uncore_write(uncore, ch_ctl, send_ctl);
1466
1467 status = intel_dp_aux_wait_done(intel_dp);
1468
1469
1470 intel_uncore_write(uncore,
1471 ch_ctl,
1472 status |
1473 DP_AUX_CH_CTL_DONE |
1474 DP_AUX_CH_CTL_TIME_OUT_ERROR |
1475 DP_AUX_CH_CTL_RECEIVE_ERROR);
1476
1477
1478
1479
1480
1481
1482 if (status & DP_AUX_CH_CTL_TIME_OUT_ERROR)
1483 continue;
1484
1485 if (status & DP_AUX_CH_CTL_RECEIVE_ERROR) {
1486 usleep_range(400, 500);
1487 continue;
1488 }
1489 if (status & DP_AUX_CH_CTL_DONE)
1490 goto done;
1491 }
1492 }
1493
1494 if ((status & DP_AUX_CH_CTL_DONE) == 0) {
1495 drm_err(&i915->drm, "%s: not done (status 0x%08x)\n",
1496 intel_dp->aux.name, status);
1497 ret = -EBUSY;
1498 goto out;
1499 }
1500
1501done:
1502
1503
1504
1505 if (status & DP_AUX_CH_CTL_RECEIVE_ERROR) {
1506 drm_err(&i915->drm, "%s: receive error (status 0x%08x)\n",
1507 intel_dp->aux.name, status);
1508 ret = -EIO;
1509 goto out;
1510 }
1511
1512
1513
1514 if (status & DP_AUX_CH_CTL_TIME_OUT_ERROR) {
1515 drm_dbg_kms(&i915->drm, "%s: timeout (status 0x%08x)\n",
1516 intel_dp->aux.name, status);
1517 ret = -ETIMEDOUT;
1518 goto out;
1519 }
1520
1521
1522 recv_bytes = ((status & DP_AUX_CH_CTL_MESSAGE_SIZE_MASK) >>
1523 DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT);
1524
1525
1526
1527
1528
1529
1530 if (recv_bytes == 0 || recv_bytes > 20) {
1531 drm_dbg_kms(&i915->drm,
1532 "%s: Forbidden recv_bytes = %d on aux transaction\n",
1533 intel_dp->aux.name, recv_bytes);
1534 ret = -EBUSY;
1535 goto out;
1536 }
1537
1538 if (recv_bytes > recv_size)
1539 recv_bytes = recv_size;
1540
1541 for (i = 0; i < recv_bytes; i += 4)
1542 intel_dp_unpack_aux(intel_uncore_read(uncore, ch_data[i >> 2]),
1543 recv + i, recv_bytes - i);
1544
1545 ret = recv_bytes;
1546out:
1547 cpu_latency_qos_update_request(&i915->pm_qos, PM_QOS_DEFAULT_VALUE);
1548
1549 if (vdd)
1550 edp_panel_vdd_off(intel_dp, false);
1551
1552 pps_unlock(intel_dp, pps_wakeref);
1553 intel_display_power_put_async(i915, aux_domain, aux_wakeref);
1554
1555 if (is_tc_port)
1556 intel_tc_port_unlock(dig_port);
1557
1558 return ret;
1559}
1560
1561#define BARE_ADDRESS_SIZE 3
1562#define HEADER_SIZE (BARE_ADDRESS_SIZE + 1)
1563
1564static void
1565intel_dp_aux_header(u8 txbuf[HEADER_SIZE],
1566 const struct drm_dp_aux_msg *msg)
1567{
1568 txbuf[0] = (msg->request << 4) | ((msg->address >> 16) & 0xf);
1569 txbuf[1] = (msg->address >> 8) & 0xff;
1570 txbuf[2] = msg->address & 0xff;
1571 txbuf[3] = msg->size - 1;
1572}
1573
1574static u32 intel_dp_aux_xfer_flags(const struct drm_dp_aux_msg *msg)
1575{
1576
1577
1578
1579
1580
1581 if ((msg->request & ~DP_AUX_I2C_MOT) == DP_AUX_NATIVE_WRITE &&
1582 msg->address == DP_AUX_HDCP_AKSV)
1583 return DP_AUX_CH_CTL_AUX_AKSV_SELECT;
1584
1585 return 0;
1586}
1587
1588static ssize_t
1589intel_dp_aux_transfer(struct drm_dp_aux *aux, struct drm_dp_aux_msg *msg)
1590{
1591 struct intel_dp *intel_dp = container_of(aux, struct intel_dp, aux);
1592 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1593 u8 txbuf[20], rxbuf[20];
1594 size_t txsize, rxsize;
1595 u32 flags = intel_dp_aux_xfer_flags(msg);
1596 int ret;
1597
1598 intel_dp_aux_header(txbuf, msg);
1599
1600 switch (msg->request & ~DP_AUX_I2C_MOT) {
1601 case DP_AUX_NATIVE_WRITE:
1602 case DP_AUX_I2C_WRITE:
1603 case DP_AUX_I2C_WRITE_STATUS_UPDATE:
1604 txsize = msg->size ? HEADER_SIZE + msg->size : BARE_ADDRESS_SIZE;
1605 rxsize = 2;
1606
1607 if (drm_WARN_ON(&i915->drm, txsize > 20))
1608 return -E2BIG;
1609
1610 drm_WARN_ON(&i915->drm, !msg->buffer != !msg->size);
1611
1612 if (msg->buffer)
1613 memcpy(txbuf + HEADER_SIZE, msg->buffer, msg->size);
1614
1615 ret = intel_dp_aux_xfer(intel_dp, txbuf, txsize,
1616 rxbuf, rxsize, flags);
1617 if (ret > 0) {
1618 msg->reply = rxbuf[0] >> 4;
1619
1620 if (ret > 1) {
1621
1622 ret = clamp_t(int, rxbuf[1], 0, msg->size);
1623 } else {
1624
1625 ret = msg->size;
1626 }
1627 }
1628 break;
1629
1630 case DP_AUX_NATIVE_READ:
1631 case DP_AUX_I2C_READ:
1632 txsize = msg->size ? HEADER_SIZE : BARE_ADDRESS_SIZE;
1633 rxsize = msg->size + 1;
1634
1635 if (drm_WARN_ON(&i915->drm, rxsize > 20))
1636 return -E2BIG;
1637
1638 ret = intel_dp_aux_xfer(intel_dp, txbuf, txsize,
1639 rxbuf, rxsize, flags);
1640 if (ret > 0) {
1641 msg->reply = rxbuf[0] >> 4;
1642
1643
1644
1645
1646
1647
1648 ret--;
1649 memcpy(msg->buffer, rxbuf + 1, ret);
1650 }
1651 break;
1652
1653 default:
1654 ret = -EINVAL;
1655 break;
1656 }
1657
1658 return ret;
1659}
1660
1661
1662static i915_reg_t g4x_aux_ctl_reg(struct intel_dp *intel_dp)
1663{
1664 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1665 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1666 enum aux_ch aux_ch = dig_port->aux_ch;
1667
1668 switch (aux_ch) {
1669 case AUX_CH_B:
1670 case AUX_CH_C:
1671 case AUX_CH_D:
1672 return DP_AUX_CH_CTL(aux_ch);
1673 default:
1674 MISSING_CASE(aux_ch);
1675 return DP_AUX_CH_CTL(AUX_CH_B);
1676 }
1677}
1678
1679static i915_reg_t g4x_aux_data_reg(struct intel_dp *intel_dp, int index)
1680{
1681 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1682 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1683 enum aux_ch aux_ch = dig_port->aux_ch;
1684
1685 switch (aux_ch) {
1686 case AUX_CH_B:
1687 case AUX_CH_C:
1688 case AUX_CH_D:
1689 return DP_AUX_CH_DATA(aux_ch, index);
1690 default:
1691 MISSING_CASE(aux_ch);
1692 return DP_AUX_CH_DATA(AUX_CH_B, index);
1693 }
1694}
1695
1696static i915_reg_t ilk_aux_ctl_reg(struct intel_dp *intel_dp)
1697{
1698 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1699 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1700 enum aux_ch aux_ch = dig_port->aux_ch;
1701
1702 switch (aux_ch) {
1703 case AUX_CH_A:
1704 return DP_AUX_CH_CTL(aux_ch);
1705 case AUX_CH_B:
1706 case AUX_CH_C:
1707 case AUX_CH_D:
1708 return PCH_DP_AUX_CH_CTL(aux_ch);
1709 default:
1710 MISSING_CASE(aux_ch);
1711 return DP_AUX_CH_CTL(AUX_CH_A);
1712 }
1713}
1714
1715static i915_reg_t ilk_aux_data_reg(struct intel_dp *intel_dp, int index)
1716{
1717 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1718 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1719 enum aux_ch aux_ch = dig_port->aux_ch;
1720
1721 switch (aux_ch) {
1722 case AUX_CH_A:
1723 return DP_AUX_CH_DATA(aux_ch, index);
1724 case AUX_CH_B:
1725 case AUX_CH_C:
1726 case AUX_CH_D:
1727 return PCH_DP_AUX_CH_DATA(aux_ch, index);
1728 default:
1729 MISSING_CASE(aux_ch);
1730 return DP_AUX_CH_DATA(AUX_CH_A, index);
1731 }
1732}
1733
1734static i915_reg_t skl_aux_ctl_reg(struct intel_dp *intel_dp)
1735{
1736 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1737 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1738 enum aux_ch aux_ch = dig_port->aux_ch;
1739
1740 switch (aux_ch) {
1741 case AUX_CH_A:
1742 case AUX_CH_B:
1743 case AUX_CH_C:
1744 case AUX_CH_D:
1745 case AUX_CH_E:
1746 case AUX_CH_F:
1747 case AUX_CH_G:
1748 return DP_AUX_CH_CTL(aux_ch);
1749 default:
1750 MISSING_CASE(aux_ch);
1751 return DP_AUX_CH_CTL(AUX_CH_A);
1752 }
1753}
1754
1755static i915_reg_t skl_aux_data_reg(struct intel_dp *intel_dp, int index)
1756{
1757 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1758 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1759 enum aux_ch aux_ch = dig_port->aux_ch;
1760
1761 switch (aux_ch) {
1762 case AUX_CH_A:
1763 case AUX_CH_B:
1764 case AUX_CH_C:
1765 case AUX_CH_D:
1766 case AUX_CH_E:
1767 case AUX_CH_F:
1768 case AUX_CH_G:
1769 return DP_AUX_CH_DATA(aux_ch, index);
1770 default:
1771 MISSING_CASE(aux_ch);
1772 return DP_AUX_CH_DATA(AUX_CH_A, index);
1773 }
1774}
1775
1776static void
1777intel_dp_aux_fini(struct intel_dp *intel_dp)
1778{
1779 kfree(intel_dp->aux.name);
1780}
1781
1782static void
1783intel_dp_aux_init(struct intel_dp *intel_dp)
1784{
1785 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1786 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1787 struct intel_encoder *encoder = &dig_port->base;
1788
1789 if (INTEL_GEN(dev_priv) >= 9) {
1790 intel_dp->aux_ch_ctl_reg = skl_aux_ctl_reg;
1791 intel_dp->aux_ch_data_reg = skl_aux_data_reg;
1792 } else if (HAS_PCH_SPLIT(dev_priv)) {
1793 intel_dp->aux_ch_ctl_reg = ilk_aux_ctl_reg;
1794 intel_dp->aux_ch_data_reg = ilk_aux_data_reg;
1795 } else {
1796 intel_dp->aux_ch_ctl_reg = g4x_aux_ctl_reg;
1797 intel_dp->aux_ch_data_reg = g4x_aux_data_reg;
1798 }
1799
1800 if (INTEL_GEN(dev_priv) >= 9)
1801 intel_dp->get_aux_clock_divider = skl_get_aux_clock_divider;
1802 else if (IS_BROADWELL(dev_priv) || IS_HASWELL(dev_priv))
1803 intel_dp->get_aux_clock_divider = hsw_get_aux_clock_divider;
1804 else if (HAS_PCH_SPLIT(dev_priv))
1805 intel_dp->get_aux_clock_divider = ilk_get_aux_clock_divider;
1806 else
1807 intel_dp->get_aux_clock_divider = g4x_get_aux_clock_divider;
1808
1809 if (INTEL_GEN(dev_priv) >= 9)
1810 intel_dp->get_aux_send_ctl = skl_get_aux_send_ctl;
1811 else
1812 intel_dp->get_aux_send_ctl = g4x_get_aux_send_ctl;
1813
1814 drm_dp_aux_init(&intel_dp->aux);
1815
1816
1817 intel_dp->aux.name = kasprintf(GFP_KERNEL, "AUX %c/port %c",
1818 aux_ch_name(dig_port->aux_ch),
1819 port_name(encoder->port));
1820 intel_dp->aux.transfer = intel_dp_aux_transfer;
1821}
1822
1823bool intel_dp_source_supports_hbr2(struct intel_dp *intel_dp)
1824{
1825 int max_rate = intel_dp->source_rates[intel_dp->num_source_rates - 1];
1826
1827 return max_rate >= 540000;
1828}
1829
1830bool intel_dp_source_supports_hbr3(struct intel_dp *intel_dp)
1831{
1832 int max_rate = intel_dp->source_rates[intel_dp->num_source_rates - 1];
1833
1834 return max_rate >= 810000;
1835}
1836
1837static void
1838intel_dp_set_clock(struct intel_encoder *encoder,
1839 struct intel_crtc_state *pipe_config)
1840{
1841 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1842 const struct dp_link_dpll *divisor = NULL;
1843 int i, count = 0;
1844
1845 if (IS_G4X(dev_priv)) {
1846 divisor = g4x_dpll;
1847 count = ARRAY_SIZE(g4x_dpll);
1848 } else if (HAS_PCH_SPLIT(dev_priv)) {
1849 divisor = pch_dpll;
1850 count = ARRAY_SIZE(pch_dpll);
1851 } else if (IS_CHERRYVIEW(dev_priv)) {
1852 divisor = chv_dpll;
1853 count = ARRAY_SIZE(chv_dpll);
1854 } else if (IS_VALLEYVIEW(dev_priv)) {
1855 divisor = vlv_dpll;
1856 count = ARRAY_SIZE(vlv_dpll);
1857 }
1858
1859 if (divisor && count) {
1860 for (i = 0; i < count; i++) {
1861 if (pipe_config->port_clock == divisor[i].clock) {
1862 pipe_config->dpll = divisor[i].dpll;
1863 pipe_config->clock_set = true;
1864 break;
1865 }
1866 }
1867 }
1868}
1869
1870static void snprintf_int_array(char *str, size_t len,
1871 const int *array, int nelem)
1872{
1873 int i;
1874
1875 str[0] = '\0';
1876
1877 for (i = 0; i < nelem; i++) {
1878 int r = snprintf(str, len, "%s%d", i ? ", " : "", array[i]);
1879 if (r >= len)
1880 return;
1881 str += r;
1882 len -= r;
1883 }
1884}
1885
1886static void intel_dp_print_rates(struct intel_dp *intel_dp)
1887{
1888 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1889 char str[128];
1890
1891 if (!drm_debug_enabled(DRM_UT_KMS))
1892 return;
1893
1894 snprintf_int_array(str, sizeof(str),
1895 intel_dp->source_rates, intel_dp->num_source_rates);
1896 drm_dbg_kms(&i915->drm, "source rates: %s\n", str);
1897
1898 snprintf_int_array(str, sizeof(str),
1899 intel_dp->sink_rates, intel_dp->num_sink_rates);
1900 drm_dbg_kms(&i915->drm, "sink rates: %s\n", str);
1901
1902 snprintf_int_array(str, sizeof(str),
1903 intel_dp->common_rates, intel_dp->num_common_rates);
1904 drm_dbg_kms(&i915->drm, "common rates: %s\n", str);
1905}
1906
1907int
1908intel_dp_max_link_rate(struct intel_dp *intel_dp)
1909{
1910 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1911 int len;
1912
1913 len = intel_dp_common_len_rate_limit(intel_dp, intel_dp->max_link_rate);
1914 if (drm_WARN_ON(&i915->drm, len <= 0))
1915 return 162000;
1916
1917 return intel_dp->common_rates[len - 1];
1918}
1919
1920int intel_dp_rate_select(struct intel_dp *intel_dp, int rate)
1921{
1922 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1923 int i = intel_dp_rate_index(intel_dp->sink_rates,
1924 intel_dp->num_sink_rates, rate);
1925
1926 if (drm_WARN_ON(&i915->drm, i < 0))
1927 i = 0;
1928
1929 return i;
1930}
1931
1932void intel_dp_compute_rate(struct intel_dp *intel_dp, int port_clock,
1933 u8 *link_bw, u8 *rate_select)
1934{
1935
1936 if (intel_dp->use_rate_select) {
1937 *link_bw = 0;
1938 *rate_select =
1939 intel_dp_rate_select(intel_dp, port_clock);
1940 } else {
1941 *link_bw = drm_dp_link_rate_to_bw_code(port_clock);
1942 *rate_select = 0;
1943 }
1944}
1945
1946static bool intel_dp_source_supports_fec(struct intel_dp *intel_dp,
1947 const struct intel_crtc_state *pipe_config)
1948{
1949 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1950
1951
1952 if (INTEL_GEN(dev_priv) >= 12)
1953 return true;
1954
1955 if (IS_GEN(dev_priv, 11) && pipe_config->cpu_transcoder != TRANSCODER_A)
1956 return true;
1957
1958 return false;
1959}
1960
1961static bool intel_dp_supports_fec(struct intel_dp *intel_dp,
1962 const struct intel_crtc_state *pipe_config)
1963{
1964 return intel_dp_source_supports_fec(intel_dp, pipe_config) &&
1965 drm_dp_sink_supports_fec(intel_dp->fec_capable);
1966}
1967
1968static bool intel_dp_supports_dsc(struct intel_dp *intel_dp,
1969 const struct intel_crtc_state *crtc_state)
1970{
1971 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
1972
1973 if (!intel_dp_is_edp(intel_dp) && !crtc_state->fec_enable)
1974 return false;
1975
1976 return intel_dsc_source_support(encoder, crtc_state) &&
1977 drm_dp_sink_supports_dsc(intel_dp->dsc_dpcd);
1978}
1979
1980static bool intel_dp_hdmi_ycbcr420(struct intel_dp *intel_dp,
1981 const struct intel_crtc_state *crtc_state)
1982{
1983 return crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420 ||
1984 (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR444 &&
1985 intel_dp->dfp.ycbcr_444_to_420);
1986}
1987
1988static int intel_dp_hdmi_tmds_clock(struct intel_dp *intel_dp,
1989 const struct intel_crtc_state *crtc_state, int bpc)
1990{
1991 int clock = crtc_state->hw.adjusted_mode.crtc_clock * bpc / 8;
1992
1993 if (intel_dp_hdmi_ycbcr420(intel_dp, crtc_state))
1994 clock /= 2;
1995
1996 return clock;
1997}
1998
1999static bool intel_dp_hdmi_tmds_clock_valid(struct intel_dp *intel_dp,
2000 const struct intel_crtc_state *crtc_state, int bpc)
2001{
2002 int tmds_clock = intel_dp_hdmi_tmds_clock(intel_dp, crtc_state, bpc);
2003
2004 if (intel_dp->dfp.min_tmds_clock &&
2005 tmds_clock < intel_dp->dfp.min_tmds_clock)
2006 return false;
2007
2008 if (intel_dp->dfp.max_tmds_clock &&
2009 tmds_clock > intel_dp->dfp.max_tmds_clock)
2010 return false;
2011
2012 return true;
2013}
2014
2015static bool intel_dp_hdmi_deep_color_possible(struct intel_dp *intel_dp,
2016 const struct intel_crtc_state *crtc_state,
2017 int bpc)
2018{
2019
2020 return intel_hdmi_deep_color_possible(crtc_state, bpc,
2021 intel_dp->has_hdmi_sink,
2022 intel_dp_hdmi_ycbcr420(intel_dp, crtc_state)) &&
2023 intel_dp_hdmi_tmds_clock_valid(intel_dp, crtc_state, bpc);
2024}
2025
2026static int intel_dp_max_bpp(struct intel_dp *intel_dp,
2027 const struct intel_crtc_state *crtc_state)
2028{
2029 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
2030 struct intel_connector *intel_connector = intel_dp->attached_connector;
2031 int bpp, bpc;
2032
2033 bpc = crtc_state->pipe_bpp / 3;
2034
2035 if (intel_dp->dfp.max_bpc)
2036 bpc = min_t(int, bpc, intel_dp->dfp.max_bpc);
2037
2038 if (intel_dp->dfp.min_tmds_clock) {
2039 for (; bpc >= 10; bpc -= 2) {
2040 if (intel_dp_hdmi_deep_color_possible(intel_dp, crtc_state, bpc))
2041 break;
2042 }
2043 }
2044
2045 bpp = bpc * 3;
2046 if (intel_dp_is_edp(intel_dp)) {
2047
2048 if (intel_connector->base.display_info.bpc == 0 &&
2049 dev_priv->vbt.edp.bpp && dev_priv->vbt.edp.bpp < bpp) {
2050 drm_dbg_kms(&dev_priv->drm,
2051 "clamping bpp for eDP panel to BIOS-provided %i\n",
2052 dev_priv->vbt.edp.bpp);
2053 bpp = dev_priv->vbt.edp.bpp;
2054 }
2055 }
2056
2057 return bpp;
2058}
2059
2060
2061void
2062intel_dp_adjust_compliance_config(struct intel_dp *intel_dp,
2063 struct intel_crtc_state *pipe_config,
2064 struct link_config_limits *limits)
2065{
2066 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2067
2068
2069 if (intel_dp->compliance.test_data.bpc != 0) {
2070 int bpp = 3 * intel_dp->compliance.test_data.bpc;
2071
2072 limits->min_bpp = limits->max_bpp = bpp;
2073 pipe_config->dither_force_disable = bpp == 6 * 3;
2074
2075 drm_dbg_kms(&i915->drm, "Setting pipe_bpp to %d\n", bpp);
2076 }
2077
2078
2079 if (intel_dp->compliance.test_type == DP_TEST_LINK_TRAINING) {
2080 int index;
2081
2082
2083
2084
2085 if (intel_dp_link_params_valid(intel_dp, intel_dp->compliance.test_link_rate,
2086 intel_dp->compliance.test_lane_count)) {
2087 index = intel_dp_rate_index(intel_dp->common_rates,
2088 intel_dp->num_common_rates,
2089 intel_dp->compliance.test_link_rate);
2090 if (index >= 0)
2091 limits->min_clock = limits->max_clock = index;
2092 limits->min_lane_count = limits->max_lane_count =
2093 intel_dp->compliance.test_lane_count;
2094 }
2095 }
2096}
2097
2098static int intel_dp_output_bpp(const struct intel_crtc_state *crtc_state, int bpp)
2099{
2100
2101
2102
2103
2104
2105 if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420)
2106 bpp /= 2;
2107
2108 return bpp;
2109}
2110
2111
2112static int
2113intel_dp_compute_link_config_wide(struct intel_dp *intel_dp,
2114 struct intel_crtc_state *pipe_config,
2115 const struct link_config_limits *limits)
2116{
2117 struct drm_display_mode *adjusted_mode = &pipe_config->hw.adjusted_mode;
2118 int bpp, clock, lane_count;
2119 int mode_rate, link_clock, link_avail;
2120
2121 for (bpp = limits->max_bpp; bpp >= limits->min_bpp; bpp -= 2 * 3) {
2122 int output_bpp = intel_dp_output_bpp(pipe_config, bpp);
2123
2124 mode_rate = intel_dp_link_required(adjusted_mode->crtc_clock,
2125 output_bpp);
2126
2127 for (clock = limits->min_clock; clock <= limits->max_clock; clock++) {
2128 for (lane_count = limits->min_lane_count;
2129 lane_count <= limits->max_lane_count;
2130 lane_count <<= 1) {
2131 link_clock = intel_dp->common_rates[clock];
2132 link_avail = intel_dp_max_data_rate(link_clock,
2133 lane_count);
2134
2135 if (mode_rate <= link_avail) {
2136 pipe_config->lane_count = lane_count;
2137 pipe_config->pipe_bpp = bpp;
2138 pipe_config->port_clock = link_clock;
2139
2140 return 0;
2141 }
2142 }
2143 }
2144 }
2145
2146 return -EINVAL;
2147}
2148
2149static int intel_dp_dsc_compute_bpp(struct intel_dp *intel_dp, u8 dsc_max_bpc)
2150{
2151 int i, num_bpc;
2152 u8 dsc_bpc[3] = {0};
2153
2154 num_bpc = drm_dp_dsc_sink_supported_input_bpcs(intel_dp->dsc_dpcd,
2155 dsc_bpc);
2156 for (i = 0; i < num_bpc; i++) {
2157 if (dsc_max_bpc >= dsc_bpc[i])
2158 return dsc_bpc[i] * 3;
2159 }
2160
2161 return 0;
2162}
2163
2164#define DSC_SUPPORTED_VERSION_MIN 1
2165
2166static int intel_dp_dsc_compute_params(struct intel_encoder *encoder,
2167 struct intel_crtc_state *crtc_state)
2168{
2169 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
2170 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2171 struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
2172 u8 line_buf_depth;
2173 int ret;
2174
2175 ret = intel_dsc_compute_params(encoder, crtc_state);
2176 if (ret)
2177 return ret;
2178
2179
2180
2181
2182
2183
2184 if (vdsc_cfg->pic_height % 8 == 0)
2185 vdsc_cfg->slice_height = 8;
2186 else if (vdsc_cfg->pic_height % 4 == 0)
2187 vdsc_cfg->slice_height = 4;
2188 else
2189 vdsc_cfg->slice_height = 2;
2190
2191 vdsc_cfg->dsc_version_major =
2192 (intel_dp->dsc_dpcd[DP_DSC_REV - DP_DSC_SUPPORT] &
2193 DP_DSC_MAJOR_MASK) >> DP_DSC_MAJOR_SHIFT;
2194 vdsc_cfg->dsc_version_minor =
2195 min(DSC_SUPPORTED_VERSION_MIN,
2196 (intel_dp->dsc_dpcd[DP_DSC_REV - DP_DSC_SUPPORT] &
2197 DP_DSC_MINOR_MASK) >> DP_DSC_MINOR_SHIFT);
2198
2199 vdsc_cfg->convert_rgb = intel_dp->dsc_dpcd[DP_DSC_DEC_COLOR_FORMAT_CAP - DP_DSC_SUPPORT] &
2200 DP_DSC_RGB;
2201
2202 line_buf_depth = drm_dp_dsc_sink_line_buf_depth(intel_dp->dsc_dpcd);
2203 if (!line_buf_depth) {
2204 drm_dbg_kms(&i915->drm,
2205 "DSC Sink Line Buffer Depth invalid\n");
2206 return -EINVAL;
2207 }
2208
2209 if (vdsc_cfg->dsc_version_minor == 2)
2210 vdsc_cfg->line_buf_depth = (line_buf_depth == DSC_1_2_MAX_LINEBUF_DEPTH_BITS) ?
2211 DSC_1_2_MAX_LINEBUF_DEPTH_VAL : line_buf_depth;
2212 else
2213 vdsc_cfg->line_buf_depth = (line_buf_depth > DSC_1_1_MAX_LINEBUF_DEPTH_BITS) ?
2214 DSC_1_1_MAX_LINEBUF_DEPTH_BITS : line_buf_depth;
2215
2216 vdsc_cfg->block_pred_enable =
2217 intel_dp->dsc_dpcd[DP_DSC_BLK_PREDICTION_SUPPORT - DP_DSC_SUPPORT] &
2218 DP_DSC_BLK_PREDICTION_IS_SUPPORTED;
2219
2220 return drm_dsc_compute_rc_parameters(vdsc_cfg);
2221}
2222
2223static int intel_dp_dsc_compute_config(struct intel_dp *intel_dp,
2224 struct intel_crtc_state *pipe_config,
2225 struct drm_connector_state *conn_state,
2226 struct link_config_limits *limits)
2227{
2228 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
2229 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
2230 const struct drm_display_mode *adjusted_mode =
2231 &pipe_config->hw.adjusted_mode;
2232 u8 dsc_max_bpc;
2233 int pipe_bpp;
2234 int ret;
2235
2236 pipe_config->fec_enable = !intel_dp_is_edp(intel_dp) &&
2237 intel_dp_supports_fec(intel_dp, pipe_config);
2238
2239 if (!intel_dp_supports_dsc(intel_dp, pipe_config))
2240 return -EINVAL;
2241
2242
2243 if (INTEL_GEN(dev_priv) >= 12)
2244 dsc_max_bpc = min_t(u8, 12, conn_state->max_requested_bpc);
2245 else
2246 dsc_max_bpc = min_t(u8, 10,
2247 conn_state->max_requested_bpc);
2248
2249 pipe_bpp = intel_dp_dsc_compute_bpp(intel_dp, dsc_max_bpc);
2250
2251
2252 if (pipe_bpp < 8 * 3) {
2253 drm_dbg_kms(&dev_priv->drm,
2254 "No DSC support for less than 8bpc\n");
2255 return -EINVAL;
2256 }
2257
2258
2259
2260
2261
2262
2263 pipe_config->pipe_bpp = pipe_bpp;
2264 pipe_config->port_clock = intel_dp->common_rates[limits->max_clock];
2265 pipe_config->lane_count = limits->max_lane_count;
2266
2267 if (intel_dp_is_edp(intel_dp)) {
2268 pipe_config->dsc.compressed_bpp =
2269 min_t(u16, drm_edp_dsc_sink_output_bpp(intel_dp->dsc_dpcd) >> 4,
2270 pipe_config->pipe_bpp);
2271 pipe_config->dsc.slice_count =
2272 drm_dp_dsc_sink_max_slice_count(intel_dp->dsc_dpcd,
2273 true);
2274 } else {
2275 u16 dsc_max_output_bpp;
2276 u8 dsc_dp_slice_count;
2277
2278 dsc_max_output_bpp =
2279 intel_dp_dsc_get_output_bpp(dev_priv,
2280 pipe_config->port_clock,
2281 pipe_config->lane_count,
2282 adjusted_mode->crtc_clock,
2283 adjusted_mode->crtc_hdisplay);
2284 dsc_dp_slice_count =
2285 intel_dp_dsc_get_slice_count(intel_dp,
2286 adjusted_mode->crtc_clock,
2287 adjusted_mode->crtc_hdisplay);
2288 if (!dsc_max_output_bpp || !dsc_dp_slice_count) {
2289 drm_dbg_kms(&dev_priv->drm,
2290 "Compressed BPP/Slice Count not supported\n");
2291 return -EINVAL;
2292 }
2293 pipe_config->dsc.compressed_bpp = min_t(u16,
2294 dsc_max_output_bpp >> 4,
2295 pipe_config->pipe_bpp);
2296 pipe_config->dsc.slice_count = dsc_dp_slice_count;
2297 }
2298
2299
2300
2301
2302
2303 if (adjusted_mode->crtc_clock > dev_priv->max_cdclk_freq) {
2304 if (pipe_config->dsc.slice_count > 1) {
2305 pipe_config->dsc.dsc_split = true;
2306 } else {
2307 drm_dbg_kms(&dev_priv->drm,
2308 "Cannot split stream to use 2 VDSC instances\n");
2309 return -EINVAL;
2310 }
2311 }
2312
2313 ret = intel_dp_dsc_compute_params(&dig_port->base, pipe_config);
2314 if (ret < 0) {
2315 drm_dbg_kms(&dev_priv->drm,
2316 "Cannot compute valid DSC parameters for Input Bpp = %d "
2317 "Compressed BPP = %d\n",
2318 pipe_config->pipe_bpp,
2319 pipe_config->dsc.compressed_bpp);
2320 return ret;
2321 }
2322
2323 pipe_config->dsc.compression_enable = true;
2324 drm_dbg_kms(&dev_priv->drm, "DP DSC computed with Input Bpp = %d "
2325 "Compressed Bpp = %d Slice Count = %d\n",
2326 pipe_config->pipe_bpp,
2327 pipe_config->dsc.compressed_bpp,
2328 pipe_config->dsc.slice_count);
2329
2330 return 0;
2331}
2332
2333int intel_dp_min_bpp(const struct intel_crtc_state *crtc_state)
2334{
2335 if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_RGB)
2336 return 6 * 3;
2337 else
2338 return 8 * 3;
2339}
2340
2341static int
2342intel_dp_compute_link_config(struct intel_encoder *encoder,
2343 struct intel_crtc_state *pipe_config,
2344 struct drm_connector_state *conn_state)
2345{
2346 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
2347 const struct drm_display_mode *adjusted_mode =
2348 &pipe_config->hw.adjusted_mode;
2349 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2350 struct link_config_limits limits;
2351 int common_len;
2352 int ret;
2353
2354 common_len = intel_dp_common_len_rate_limit(intel_dp,
2355 intel_dp->max_link_rate);
2356
2357
2358 drm_WARN_ON(encoder->base.dev, common_len <= 0);
2359
2360 limits.min_clock = 0;
2361 limits.max_clock = common_len - 1;
2362
2363 limits.min_lane_count = 1;
2364 limits.max_lane_count = intel_dp_max_lane_count(intel_dp);
2365
2366 limits.min_bpp = intel_dp_min_bpp(pipe_config);
2367 limits.max_bpp = intel_dp_max_bpp(intel_dp, pipe_config);
2368
2369 if (intel_dp_is_edp(intel_dp)) {
2370
2371
2372
2373
2374
2375
2376
2377 limits.min_lane_count = limits.max_lane_count;
2378 limits.min_clock = limits.max_clock;
2379 }
2380
2381 intel_dp_adjust_compliance_config(intel_dp, pipe_config, &limits);
2382
2383 drm_dbg_kms(&i915->drm, "DP link computation with max lane count %i "
2384 "max rate %d max bpp %d pixel clock %iKHz\n",
2385 limits.max_lane_count,
2386 intel_dp->common_rates[limits.max_clock],
2387 limits.max_bpp, adjusted_mode->crtc_clock);
2388
2389
2390
2391
2392
2393 ret = intel_dp_compute_link_config_wide(intel_dp, pipe_config, &limits);
2394
2395
2396 drm_dbg_kms(&i915->drm, "Force DSC en = %d\n", intel_dp->force_dsc_en);
2397 if (ret || intel_dp->force_dsc_en) {
2398 ret = intel_dp_dsc_compute_config(intel_dp, pipe_config,
2399 conn_state, &limits);
2400 if (ret < 0)
2401 return ret;
2402 }
2403
2404 if (pipe_config->dsc.compression_enable) {
2405 drm_dbg_kms(&i915->drm,
2406 "DP lane count %d clock %d Input bpp %d Compressed bpp %d\n",
2407 pipe_config->lane_count, pipe_config->port_clock,
2408 pipe_config->pipe_bpp,
2409 pipe_config->dsc.compressed_bpp);
2410
2411 drm_dbg_kms(&i915->drm,
2412 "DP link rate required %i available %i\n",
2413 intel_dp_link_required(adjusted_mode->crtc_clock,
2414 pipe_config->dsc.compressed_bpp),
2415 intel_dp_max_data_rate(pipe_config->port_clock,
2416 pipe_config->lane_count));
2417 } else {
2418 drm_dbg_kms(&i915->drm, "DP lane count %d clock %d bpp %d\n",
2419 pipe_config->lane_count, pipe_config->port_clock,
2420 pipe_config->pipe_bpp);
2421
2422 drm_dbg_kms(&i915->drm,
2423 "DP link rate required %i available %i\n",
2424 intel_dp_link_required(adjusted_mode->crtc_clock,
2425 pipe_config->pipe_bpp),
2426 intel_dp_max_data_rate(pipe_config->port_clock,
2427 pipe_config->lane_count));
2428 }
2429 return 0;
2430}
2431
2432static int
2433intel_dp_ycbcr420_config(struct intel_dp *intel_dp,
2434 struct intel_crtc_state *crtc_state,
2435 const struct drm_connector_state *conn_state)
2436{
2437 struct drm_connector *connector = conn_state->connector;
2438 const struct drm_display_info *info = &connector->display_info;
2439 const struct drm_display_mode *adjusted_mode =
2440 &crtc_state->hw.adjusted_mode;
2441
2442 if (!connector->ycbcr_420_allowed)
2443 return 0;
2444
2445 if (!drm_mode_is_420_only(info, adjusted_mode))
2446 return 0;
2447
2448 if (intel_dp->dfp.ycbcr_444_to_420) {
2449 crtc_state->output_format = INTEL_OUTPUT_FORMAT_YCBCR444;
2450 return 0;
2451 }
2452
2453 crtc_state->output_format = INTEL_OUTPUT_FORMAT_YCBCR420;
2454
2455 return intel_pch_panel_fitting(crtc_state, conn_state);
2456}
2457
2458bool intel_dp_limited_color_range(const struct intel_crtc_state *crtc_state,
2459 const struct drm_connector_state *conn_state)
2460{
2461 const struct intel_digital_connector_state *intel_conn_state =
2462 to_intel_digital_connector_state(conn_state);
2463 const struct drm_display_mode *adjusted_mode =
2464 &crtc_state->hw.adjusted_mode;
2465
2466
2467
2468
2469
2470
2471
2472
2473 if (crtc_state->output_format != INTEL_OUTPUT_FORMAT_RGB)
2474 return false;
2475
2476 if (intel_conn_state->broadcast_rgb == INTEL_BROADCAST_RGB_AUTO) {
2477
2478
2479
2480
2481
2482 return crtc_state->pipe_bpp != 18 &&
2483 drm_default_rgb_quant_range(adjusted_mode) ==
2484 HDMI_QUANTIZATION_RANGE_LIMITED;
2485 } else {
2486 return intel_conn_state->broadcast_rgb ==
2487 INTEL_BROADCAST_RGB_LIMITED;
2488 }
2489}
2490
2491static bool intel_dp_port_has_audio(struct drm_i915_private *dev_priv,
2492 enum port port)
2493{
2494 if (IS_G4X(dev_priv))
2495 return false;
2496 if (INTEL_GEN(dev_priv) < 12 && port == PORT_A)
2497 return false;
2498
2499 return true;
2500}
2501
2502static void intel_dp_compute_vsc_colorimetry(const struct intel_crtc_state *crtc_state,
2503 const struct drm_connector_state *conn_state,
2504 struct drm_dp_vsc_sdp *vsc)
2505{
2506 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2507 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
2508
2509
2510
2511
2512
2513
2514 vsc->revision = 0x5;
2515 vsc->length = 0x13;
2516
2517
2518 switch (crtc_state->output_format) {
2519 case INTEL_OUTPUT_FORMAT_YCBCR444:
2520 vsc->pixelformat = DP_PIXELFORMAT_YUV444;
2521 break;
2522 case INTEL_OUTPUT_FORMAT_YCBCR420:
2523 vsc->pixelformat = DP_PIXELFORMAT_YUV420;
2524 break;
2525 case INTEL_OUTPUT_FORMAT_RGB:
2526 default:
2527 vsc->pixelformat = DP_PIXELFORMAT_RGB;
2528 }
2529
2530 switch (conn_state->colorspace) {
2531 case DRM_MODE_COLORIMETRY_BT709_YCC:
2532 vsc->colorimetry = DP_COLORIMETRY_BT709_YCC;
2533 break;
2534 case DRM_MODE_COLORIMETRY_XVYCC_601:
2535 vsc->colorimetry = DP_COLORIMETRY_XVYCC_601;
2536 break;
2537 case DRM_MODE_COLORIMETRY_XVYCC_709:
2538 vsc->colorimetry = DP_COLORIMETRY_XVYCC_709;
2539 break;
2540 case DRM_MODE_COLORIMETRY_SYCC_601:
2541 vsc->colorimetry = DP_COLORIMETRY_SYCC_601;
2542 break;
2543 case DRM_MODE_COLORIMETRY_OPYCC_601:
2544 vsc->colorimetry = DP_COLORIMETRY_OPYCC_601;
2545 break;
2546 case DRM_MODE_COLORIMETRY_BT2020_CYCC:
2547 vsc->colorimetry = DP_COLORIMETRY_BT2020_CYCC;
2548 break;
2549 case DRM_MODE_COLORIMETRY_BT2020_RGB:
2550 vsc->colorimetry = DP_COLORIMETRY_BT2020_RGB;
2551 break;
2552 case DRM_MODE_COLORIMETRY_BT2020_YCC:
2553 vsc->colorimetry = DP_COLORIMETRY_BT2020_YCC;
2554 break;
2555 case DRM_MODE_COLORIMETRY_DCI_P3_RGB_D65:
2556 case DRM_MODE_COLORIMETRY_DCI_P3_RGB_THEATER:
2557 vsc->colorimetry = DP_COLORIMETRY_DCI_P3_RGB;
2558 break;
2559 default:
2560
2561
2562
2563
2564 if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420)
2565 vsc->colorimetry = DP_COLORIMETRY_BT709_YCC;
2566 else
2567 vsc->colorimetry = DP_COLORIMETRY_DEFAULT;
2568 break;
2569 }
2570
2571 vsc->bpc = crtc_state->pipe_bpp / 3;
2572
2573
2574 drm_WARN_ON(&dev_priv->drm,
2575 vsc->bpc == 6 && vsc->pixelformat != DP_PIXELFORMAT_RGB);
2576
2577
2578 vsc->dynamic_range = DP_DYNAMIC_RANGE_CTA;
2579 vsc->content_type = DP_CONTENT_TYPE_NOT_DEFINED;
2580}
2581
2582static void intel_dp_compute_vsc_sdp(struct intel_dp *intel_dp,
2583 struct intel_crtc_state *crtc_state,
2584 const struct drm_connector_state *conn_state)
2585{
2586 struct drm_dp_vsc_sdp *vsc = &crtc_state->infoframes.vsc;
2587
2588
2589 if (crtc_state->has_psr)
2590 return;
2591
2592 if (!intel_dp_needs_vsc_sdp(crtc_state, conn_state))
2593 return;
2594
2595 crtc_state->infoframes.enable |= intel_hdmi_infoframe_enable(DP_SDP_VSC);
2596 vsc->sdp_type = DP_SDP_VSC;
2597 intel_dp_compute_vsc_colorimetry(crtc_state, conn_state,
2598 &crtc_state->infoframes.vsc);
2599}
2600
2601void intel_dp_compute_psr_vsc_sdp(struct intel_dp *intel_dp,
2602 const struct intel_crtc_state *crtc_state,
2603 const struct drm_connector_state *conn_state,
2604 struct drm_dp_vsc_sdp *vsc)
2605{
2606 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
2607
2608 vsc->sdp_type = DP_SDP_VSC;
2609
2610 if (dev_priv->psr.psr2_enabled) {
2611 if (dev_priv->psr.colorimetry_support &&
2612 intel_dp_needs_vsc_sdp(crtc_state, conn_state)) {
2613
2614 intel_dp_compute_vsc_colorimetry(crtc_state, conn_state,
2615 vsc);
2616 } else {
2617
2618
2619
2620
2621
2622 vsc->revision = 0x4;
2623 vsc->length = 0xe;
2624 }
2625 } else {
2626
2627
2628
2629
2630
2631
2632 vsc->revision = 0x2;
2633 vsc->length = 0x8;
2634 }
2635}
2636
2637static void
2638intel_dp_compute_hdr_metadata_infoframe_sdp(struct intel_dp *intel_dp,
2639 struct intel_crtc_state *crtc_state,
2640 const struct drm_connector_state *conn_state)
2641{
2642 int ret;
2643 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
2644 struct hdmi_drm_infoframe *drm_infoframe = &crtc_state->infoframes.drm.drm;
2645
2646 if (!conn_state->hdr_output_metadata)
2647 return;
2648
2649 ret = drm_hdmi_infoframe_set_hdr_metadata(drm_infoframe, conn_state);
2650
2651 if (ret) {
2652 drm_dbg_kms(&dev_priv->drm, "couldn't set HDR metadata in infoframe\n");
2653 return;
2654 }
2655
2656 crtc_state->infoframes.enable |=
2657 intel_hdmi_infoframe_enable(HDMI_PACKET_TYPE_GAMUT_METADATA);
2658}
2659
2660static void
2661intel_dp_drrs_compute_config(struct intel_dp *intel_dp,
2662 struct intel_crtc_state *pipe_config,
2663 int output_bpp, bool constant_n)
2664{
2665 struct intel_connector *intel_connector = intel_dp->attached_connector;
2666 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
2667
2668
2669
2670
2671
2672
2673
2674 if (pipe_config->has_psr)
2675 return;
2676
2677 if (!intel_connector->panel.downclock_mode ||
2678 dev_priv->drrs.type != SEAMLESS_DRRS_SUPPORT)
2679 return;
2680
2681 pipe_config->has_drrs = true;
2682 intel_link_compute_m_n(output_bpp, pipe_config->lane_count,
2683 intel_connector->panel.downclock_mode->clock,
2684 pipe_config->port_clock, &pipe_config->dp_m2_n2,
2685 constant_n, pipe_config->fec_enable);
2686}
2687
2688int
2689intel_dp_compute_config(struct intel_encoder *encoder,
2690 struct intel_crtc_state *pipe_config,
2691 struct drm_connector_state *conn_state)
2692{
2693 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2694 struct drm_display_mode *adjusted_mode = &pipe_config->hw.adjusted_mode;
2695 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2696 struct intel_lspcon *lspcon = enc_to_intel_lspcon(encoder);
2697 enum port port = encoder->port;
2698 struct intel_connector *intel_connector = intel_dp->attached_connector;
2699 struct intel_digital_connector_state *intel_conn_state =
2700 to_intel_digital_connector_state(conn_state);
2701 bool constant_n = drm_dp_has_quirk(&intel_dp->desc, 0,
2702 DP_DPCD_QUIRK_CONSTANT_N);
2703 int ret = 0, output_bpp;
2704
2705 if (HAS_PCH_SPLIT(dev_priv) && !HAS_DDI(dev_priv) && port != PORT_A)
2706 pipe_config->has_pch_encoder = true;
2707
2708 pipe_config->output_format = INTEL_OUTPUT_FORMAT_RGB;
2709
2710 if (lspcon->active)
2711 lspcon_ycbcr420_config(&intel_connector->base, pipe_config);
2712 else
2713 ret = intel_dp_ycbcr420_config(intel_dp, pipe_config,
2714 conn_state);
2715 if (ret)
2716 return ret;
2717
2718 if (!intel_dp_port_has_audio(dev_priv, port))
2719 pipe_config->has_audio = false;
2720 else if (intel_conn_state->force_audio == HDMI_AUDIO_AUTO)
2721 pipe_config->has_audio = intel_dp->has_audio;
2722 else
2723 pipe_config->has_audio = intel_conn_state->force_audio == HDMI_AUDIO_ON;
2724
2725 if (intel_dp_is_edp(intel_dp) && intel_connector->panel.fixed_mode) {
2726 intel_fixed_panel_mode(intel_connector->panel.fixed_mode,
2727 adjusted_mode);
2728
2729 if (HAS_GMCH(dev_priv))
2730 ret = intel_gmch_panel_fitting(pipe_config, conn_state);
2731 else
2732 ret = intel_pch_panel_fitting(pipe_config, conn_state);
2733 if (ret)
2734 return ret;
2735 }
2736
2737 if (adjusted_mode->flags & DRM_MODE_FLAG_DBLSCAN)
2738 return -EINVAL;
2739
2740 if (HAS_GMCH(dev_priv) &&
2741 adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE)
2742 return -EINVAL;
2743
2744 if (adjusted_mode->flags & DRM_MODE_FLAG_DBLCLK)
2745 return -EINVAL;
2746
2747 if (intel_dp_hdisplay_bad(dev_priv, adjusted_mode->crtc_hdisplay))
2748 return -EINVAL;
2749
2750 ret = intel_dp_compute_link_config(encoder, pipe_config, conn_state);
2751 if (ret < 0)
2752 return ret;
2753
2754 pipe_config->limited_color_range =
2755 intel_dp_limited_color_range(pipe_config, conn_state);
2756
2757 if (pipe_config->dsc.compression_enable)
2758 output_bpp = pipe_config->dsc.compressed_bpp;
2759 else
2760 output_bpp = intel_dp_output_bpp(pipe_config, pipe_config->pipe_bpp);
2761
2762 intel_link_compute_m_n(output_bpp,
2763 pipe_config->lane_count,
2764 adjusted_mode->crtc_clock,
2765 pipe_config->port_clock,
2766 &pipe_config->dp_m_n,
2767 constant_n, pipe_config->fec_enable);
2768
2769 if (!HAS_DDI(dev_priv))
2770 intel_dp_set_clock(encoder, pipe_config);
2771
2772 intel_psr_compute_config(intel_dp, pipe_config);
2773 intel_dp_drrs_compute_config(intel_dp, pipe_config, output_bpp,
2774 constant_n);
2775 intel_dp_compute_vsc_sdp(intel_dp, pipe_config, conn_state);
2776 intel_dp_compute_hdr_metadata_infoframe_sdp(intel_dp, pipe_config, conn_state);
2777
2778 return 0;
2779}
2780
2781void intel_dp_set_link_params(struct intel_dp *intel_dp,
2782 int link_rate, u8 lane_count,
2783 bool link_mst)
2784{
2785 intel_dp->link_trained = false;
2786 intel_dp->link_rate = link_rate;
2787 intel_dp->lane_count = lane_count;
2788 intel_dp->link_mst = link_mst;
2789}
2790
2791static void intel_dp_prepare(struct intel_encoder *encoder,
2792 const struct intel_crtc_state *pipe_config)
2793{
2794 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2795 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2796 enum port port = encoder->port;
2797 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
2798 const struct drm_display_mode *adjusted_mode = &pipe_config->hw.adjusted_mode;
2799
2800 intel_dp_set_link_params(intel_dp, pipe_config->port_clock,
2801 pipe_config->lane_count,
2802 intel_crtc_has_type(pipe_config,
2803 INTEL_OUTPUT_DP_MST));
2804
2805
2806
2807
2808
2809
2810
2811
2812
2813
2814
2815
2816
2817
2818
2819
2820
2821
2822
2823
2824
2825 intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg) & DP_DETECTED;
2826
2827
2828 intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
2829 intel_dp->DP |= DP_PORT_WIDTH(pipe_config->lane_count);
2830
2831
2832
2833 if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) {
2834 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
2835 intel_dp->DP |= DP_SYNC_HS_HIGH;
2836 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
2837 intel_dp->DP |= DP_SYNC_VS_HIGH;
2838 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
2839
2840 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
2841 intel_dp->DP |= DP_ENHANCED_FRAMING;
2842
2843 intel_dp->DP |= DP_PIPE_SEL_IVB(crtc->pipe);
2844 } else if (HAS_PCH_CPT(dev_priv) && port != PORT_A) {
2845 u32 trans_dp;
2846
2847 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
2848
2849 trans_dp = intel_de_read(dev_priv, TRANS_DP_CTL(crtc->pipe));
2850 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
2851 trans_dp |= TRANS_DP_ENH_FRAMING;
2852 else
2853 trans_dp &= ~TRANS_DP_ENH_FRAMING;
2854 intel_de_write(dev_priv, TRANS_DP_CTL(crtc->pipe), trans_dp);
2855 } else {
2856 if (IS_G4X(dev_priv) && pipe_config->limited_color_range)
2857 intel_dp->DP |= DP_COLOR_RANGE_16_235;
2858
2859 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
2860 intel_dp->DP |= DP_SYNC_HS_HIGH;
2861 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
2862 intel_dp->DP |= DP_SYNC_VS_HIGH;
2863 intel_dp->DP |= DP_LINK_TRAIN_OFF;
2864
2865 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
2866 intel_dp->DP |= DP_ENHANCED_FRAMING;
2867
2868 if (IS_CHERRYVIEW(dev_priv))
2869 intel_dp->DP |= DP_PIPE_SEL_CHV(crtc->pipe);
2870 else
2871 intel_dp->DP |= DP_PIPE_SEL(crtc->pipe);
2872 }
2873}
2874
2875#define IDLE_ON_MASK (PP_ON | PP_SEQUENCE_MASK | 0 | PP_SEQUENCE_STATE_MASK)
2876#define IDLE_ON_VALUE (PP_ON | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_ON_IDLE)
2877
2878#define IDLE_OFF_MASK (PP_ON | PP_SEQUENCE_MASK | 0 | 0)
2879#define IDLE_OFF_VALUE (0 | PP_SEQUENCE_NONE | 0 | 0)
2880
2881#define IDLE_CYCLE_MASK (PP_ON | PP_SEQUENCE_MASK | PP_CYCLE_DELAY_ACTIVE | PP_SEQUENCE_STATE_MASK)
2882#define IDLE_CYCLE_VALUE (0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_OFF_IDLE)
2883
2884static void intel_pps_verify_state(struct intel_dp *intel_dp);
2885
2886static void wait_panel_status(struct intel_dp *intel_dp,
2887 u32 mask,
2888 u32 value)
2889{
2890 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
2891 i915_reg_t pp_stat_reg, pp_ctrl_reg;
2892
2893 lockdep_assert_held(&dev_priv->pps_mutex);
2894
2895 intel_pps_verify_state(intel_dp);
2896
2897 pp_stat_reg = _pp_stat_reg(intel_dp);
2898 pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
2899
2900 drm_dbg_kms(&dev_priv->drm,
2901 "mask %08x value %08x status %08x control %08x\n",
2902 mask, value,
2903 intel_de_read(dev_priv, pp_stat_reg),
2904 intel_de_read(dev_priv, pp_ctrl_reg));
2905
2906 if (intel_de_wait_for_register(dev_priv, pp_stat_reg,
2907 mask, value, 5000))
2908 drm_err(&dev_priv->drm,
2909 "Panel status timeout: status %08x control %08x\n",
2910 intel_de_read(dev_priv, pp_stat_reg),
2911 intel_de_read(dev_priv, pp_ctrl_reg));
2912
2913 drm_dbg_kms(&dev_priv->drm, "Wait complete\n");
2914}
2915
2916static void wait_panel_on(struct intel_dp *intel_dp)
2917{
2918 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2919
2920 drm_dbg_kms(&i915->drm, "Wait for panel power on\n");
2921 wait_panel_status(intel_dp, IDLE_ON_MASK, IDLE_ON_VALUE);
2922}
2923
2924static void wait_panel_off(struct intel_dp *intel_dp)
2925{
2926 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2927
2928 drm_dbg_kms(&i915->drm, "Wait for panel power off time\n");
2929 wait_panel_status(intel_dp, IDLE_OFF_MASK, IDLE_OFF_VALUE);
2930}
2931
2932static void wait_panel_power_cycle(struct intel_dp *intel_dp)
2933{
2934 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2935 ktime_t panel_power_on_time;
2936 s64 panel_power_off_duration;
2937
2938 drm_dbg_kms(&i915->drm, "Wait for panel power cycle\n");
2939
2940
2941
2942 panel_power_on_time = ktime_get_boottime();
2943 panel_power_off_duration = ktime_ms_delta(panel_power_on_time, intel_dp->panel_power_off_time);
2944
2945
2946
2947 if (panel_power_off_duration < (s64)intel_dp->panel_power_cycle_delay)
2948 wait_remaining_ms_from_jiffies(jiffies,
2949 intel_dp->panel_power_cycle_delay - panel_power_off_duration);
2950
2951 wait_panel_status(intel_dp, IDLE_CYCLE_MASK, IDLE_CYCLE_VALUE);
2952}
2953
2954static void wait_backlight_on(struct intel_dp *intel_dp)
2955{
2956 wait_remaining_ms_from_jiffies(intel_dp->last_power_on,
2957 intel_dp->backlight_on_delay);
2958}
2959
2960static void edp_wait_backlight_off(struct intel_dp *intel_dp)
2961{
2962 wait_remaining_ms_from_jiffies(intel_dp->last_backlight_off,
2963 intel_dp->backlight_off_delay);
2964}
2965
2966
2967
2968
2969
2970static u32 ilk_get_pp_control(struct intel_dp *intel_dp)
2971{
2972 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
2973 u32 control;
2974
2975 lockdep_assert_held(&dev_priv->pps_mutex);
2976
2977 control = intel_de_read(dev_priv, _pp_ctrl_reg(intel_dp));
2978 if (drm_WARN_ON(&dev_priv->drm, !HAS_DDI(dev_priv) &&
2979 (control & PANEL_UNLOCK_MASK) != PANEL_UNLOCK_REGS)) {
2980 control &= ~PANEL_UNLOCK_MASK;
2981 control |= PANEL_UNLOCK_REGS;
2982 }
2983 return control;
2984}
2985
2986
2987
2988
2989
2990
2991static bool edp_panel_vdd_on(struct intel_dp *intel_dp)
2992{
2993 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
2994 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
2995 u32 pp;
2996 i915_reg_t pp_stat_reg, pp_ctrl_reg;
2997 bool need_to_disable = !intel_dp->want_panel_vdd;
2998
2999 lockdep_assert_held(&dev_priv->pps_mutex);
3000
3001 if (!intel_dp_is_edp(intel_dp))
3002 return false;
3003
3004 cancel_delayed_work(&intel_dp->panel_vdd_work);
3005 intel_dp->want_panel_vdd = true;
3006
3007 if (edp_have_panel_vdd(intel_dp))
3008 return need_to_disable;
3009
3010 intel_display_power_get(dev_priv,
3011 intel_aux_power_domain(dig_port));
3012
3013 drm_dbg_kms(&dev_priv->drm, "Turning [ENCODER:%d:%s] VDD on\n",
3014 dig_port->base.base.base.id,
3015 dig_port->base.base.name);
3016
3017 if (!edp_have_panel_power(intel_dp))
3018 wait_panel_power_cycle(intel_dp);
3019
3020 pp = ilk_get_pp_control(intel_dp);
3021 pp |= EDP_FORCE_VDD;
3022
3023 pp_stat_reg = _pp_stat_reg(intel_dp);
3024 pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
3025
3026 intel_de_write(dev_priv, pp_ctrl_reg, pp);
3027 intel_de_posting_read(dev_priv, pp_ctrl_reg);
3028 drm_dbg_kms(&dev_priv->drm, "PP_STATUS: 0x%08x PP_CONTROL: 0x%08x\n",
3029 intel_de_read(dev_priv, pp_stat_reg),
3030 intel_de_read(dev_priv, pp_ctrl_reg));
3031
3032
3033
3034 if (!edp_have_panel_power(intel_dp)) {
3035 drm_dbg_kms(&dev_priv->drm,
3036 "[ENCODER:%d:%s] panel power wasn't enabled\n",
3037 dig_port->base.base.base.id,
3038 dig_port->base.base.name);
3039 msleep(intel_dp->panel_power_up_delay);
3040 }
3041
3042 return need_to_disable;
3043}
3044
3045
3046
3047
3048
3049
3050
3051
3052void intel_edp_panel_vdd_on(struct intel_dp *intel_dp)
3053{
3054 intel_wakeref_t wakeref;
3055 bool vdd;
3056
3057 if (!intel_dp_is_edp(intel_dp))
3058 return;
3059
3060 vdd = false;
3061 with_pps_lock(intel_dp, wakeref)
3062 vdd = edp_panel_vdd_on(intel_dp);
3063 I915_STATE_WARN(!vdd, "[ENCODER:%d:%s] VDD already requested on\n",
3064 dp_to_dig_port(intel_dp)->base.base.base.id,
3065 dp_to_dig_port(intel_dp)->base.base.name);
3066}
3067
3068static void edp_panel_vdd_off_sync(struct intel_dp *intel_dp)
3069{
3070 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
3071 struct intel_digital_port *dig_port =
3072 dp_to_dig_port(intel_dp);
3073 u32 pp;
3074 i915_reg_t pp_stat_reg, pp_ctrl_reg;
3075
3076 lockdep_assert_held(&dev_priv->pps_mutex);
3077
3078 drm_WARN_ON(&dev_priv->drm, intel_dp->want_panel_vdd);
3079
3080 if (!edp_have_panel_vdd(intel_dp))
3081 return;
3082
3083 drm_dbg_kms(&dev_priv->drm, "Turning [ENCODER:%d:%s] VDD off\n",
3084 dig_port->base.base.base.id,
3085 dig_port->base.base.name);
3086
3087 pp = ilk_get_pp_control(intel_dp);
3088 pp &= ~EDP_FORCE_VDD;
3089
3090 pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
3091 pp_stat_reg = _pp_stat_reg(intel_dp);
3092
3093 intel_de_write(dev_priv, pp_ctrl_reg, pp);
3094 intel_de_posting_read(dev_priv, pp_ctrl_reg);
3095
3096
3097 drm_dbg_kms(&dev_priv->drm, "PP_STATUS: 0x%08x PP_CONTROL: 0x%08x\n",
3098 intel_de_read(dev_priv, pp_stat_reg),
3099 intel_de_read(dev_priv, pp_ctrl_reg));
3100
3101 if ((pp & PANEL_POWER_ON) == 0)
3102 intel_dp->panel_power_off_time = ktime_get_boottime();
3103
3104 intel_display_power_put_unchecked(dev_priv,
3105 intel_aux_power_domain(dig_port));
3106}
3107
3108static void edp_panel_vdd_work(struct work_struct *__work)
3109{
3110 struct intel_dp *intel_dp =
3111 container_of(to_delayed_work(__work),
3112 struct intel_dp, panel_vdd_work);
3113 intel_wakeref_t wakeref;
3114
3115 with_pps_lock(intel_dp, wakeref) {
3116 if (!intel_dp->want_panel_vdd)
3117 edp_panel_vdd_off_sync(intel_dp);
3118 }
3119}
3120
3121static void edp_panel_vdd_schedule_off(struct intel_dp *intel_dp)
3122{
3123 unsigned long delay;
3124
3125
3126
3127
3128
3129
3130 delay = msecs_to_jiffies(intel_dp->panel_power_cycle_delay * 5);
3131 schedule_delayed_work(&intel_dp->panel_vdd_work, delay);
3132}
3133
3134
3135
3136
3137
3138
3139static void edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync)
3140{
3141 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
3142
3143 lockdep_assert_held(&dev_priv->pps_mutex);
3144
3145 if (!intel_dp_is_edp(intel_dp))
3146 return;
3147
3148 I915_STATE_WARN(!intel_dp->want_panel_vdd, "[ENCODER:%d:%s] VDD not forced on",
3149 dp_to_dig_port(intel_dp)->base.base.base.id,
3150 dp_to_dig_port(intel_dp)->base.base.name);
3151
3152 intel_dp->want_panel_vdd = false;
3153
3154 if (sync)
3155 edp_panel_vdd_off_sync(intel_dp);
3156 else
3157 edp_panel_vdd_schedule_off(intel_dp);
3158}
3159
3160static void edp_panel_on(struct intel_dp *intel_dp)
3161{
3162 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
3163 u32 pp;
3164 i915_reg_t pp_ctrl_reg;
3165
3166 lockdep_assert_held(&dev_priv->pps_mutex);
3167
3168 if (!intel_dp_is_edp(intel_dp))
3169 return;
3170
3171 drm_dbg_kms(&dev_priv->drm, "Turn [ENCODER:%d:%s] panel power on\n",
3172 dp_to_dig_port(intel_dp)->base.base.base.id,
3173 dp_to_dig_port(intel_dp)->base.base.name);
3174
3175 if (drm_WARN(&dev_priv->drm, edp_have_panel_power(intel_dp),
3176 "[ENCODER:%d:%s] panel power already on\n",
3177 dp_to_dig_port(intel_dp)->base.base.base.id,
3178 dp_to_dig_port(intel_dp)->base.base.name))
3179 return;
3180
3181 wait_panel_power_cycle(intel_dp);
3182
3183 pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
3184 pp = ilk_get_pp_control(intel_dp);
3185 if (IS_GEN(dev_priv, 5)) {
3186
3187 pp &= ~PANEL_POWER_RESET;
3188 intel_de_write(dev_priv, pp_ctrl_reg, pp);
3189 intel_de_posting_read(dev_priv, pp_ctrl_reg);
3190 }
3191
3192 pp |= PANEL_POWER_ON;
3193 if (!IS_GEN(dev_priv, 5))
3194 pp |= PANEL_POWER_RESET;
3195
3196 intel_de_write(dev_priv, pp_ctrl_reg, pp);
3197 intel_de_posting_read(dev_priv, pp_ctrl_reg);
3198
3199 wait_panel_on(intel_dp);
3200 intel_dp->last_power_on = jiffies;
3201
3202 if (IS_GEN(dev_priv, 5)) {
3203 pp |= PANEL_POWER_RESET;
3204 intel_de_write(dev_priv, pp_ctrl_reg, pp);
3205 intel_de_posting_read(dev_priv, pp_ctrl_reg);
3206 }
3207}
3208
3209void intel_edp_panel_on(struct intel_dp *intel_dp)
3210{
3211 intel_wakeref_t wakeref;
3212
3213 if (!intel_dp_is_edp(intel_dp))
3214 return;
3215
3216 with_pps_lock(intel_dp, wakeref)
3217 edp_panel_on(intel_dp);
3218}
3219
3220
3221static void edp_panel_off(struct intel_dp *intel_dp)
3222{
3223 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
3224 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
3225 u32 pp;
3226 i915_reg_t pp_ctrl_reg;
3227
3228 lockdep_assert_held(&dev_priv->pps_mutex);
3229
3230 if (!intel_dp_is_edp(intel_dp))
3231 return;
3232
3233 drm_dbg_kms(&dev_priv->drm, "Turn [ENCODER:%d:%s] panel power off\n",
3234 dig_port->base.base.base.id, dig_port->base.base.name);
3235
3236 drm_WARN(&dev_priv->drm, !intel_dp->want_panel_vdd,
3237 "Need [ENCODER:%d:%s] VDD to turn off panel\n",
3238 dig_port->base.base.base.id, dig_port->base.base.name);
3239
3240 pp = ilk_get_pp_control(intel_dp);
3241
3242
3243 pp &= ~(PANEL_POWER_ON | PANEL_POWER_RESET | EDP_FORCE_VDD |
3244 EDP_BLC_ENABLE);
3245
3246 pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
3247
3248 intel_dp->want_panel_vdd = false;
3249
3250 intel_de_write(dev_priv, pp_ctrl_reg, pp);
3251 intel_de_posting_read(dev_priv, pp_ctrl_reg);
3252
3253 wait_panel_off(intel_dp);
3254 intel_dp->panel_power_off_time = ktime_get_boottime();
3255
3256
3257 intel_display_power_put_unchecked(dev_priv, intel_aux_power_domain(dig_port));
3258}
3259
3260void intel_edp_panel_off(struct intel_dp *intel_dp)
3261{
3262 intel_wakeref_t wakeref;
3263
3264 if (!intel_dp_is_edp(intel_dp))
3265 return;
3266
3267 with_pps_lock(intel_dp, wakeref)
3268 edp_panel_off(intel_dp);
3269}
3270
3271
3272static void _intel_edp_backlight_on(struct intel_dp *intel_dp)
3273{
3274 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
3275 intel_wakeref_t wakeref;
3276
3277
3278
3279
3280
3281
3282
3283 wait_backlight_on(intel_dp);
3284
3285 with_pps_lock(intel_dp, wakeref) {
3286 i915_reg_t pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
3287 u32 pp;
3288
3289 pp = ilk_get_pp_control(intel_dp);
3290 pp |= EDP_BLC_ENABLE;
3291
3292 intel_de_write(dev_priv, pp_ctrl_reg, pp);
3293 intel_de_posting_read(dev_priv, pp_ctrl_reg);
3294 }
3295}
3296
3297
3298void intel_edp_backlight_on(const struct intel_crtc_state *crtc_state,
3299 const struct drm_connector_state *conn_state)
3300{
3301 struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(conn_state->best_encoder));
3302 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3303
3304 if (!intel_dp_is_edp(intel_dp))
3305 return;
3306
3307 drm_dbg_kms(&i915->drm, "\n");
3308
3309 intel_panel_enable_backlight(crtc_state, conn_state);
3310 _intel_edp_backlight_on(intel_dp);
3311}
3312
3313
3314static void _intel_edp_backlight_off(struct intel_dp *intel_dp)
3315{
3316 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
3317 intel_wakeref_t wakeref;
3318
3319 if (!intel_dp_is_edp(intel_dp))
3320 return;
3321
3322 with_pps_lock(intel_dp, wakeref) {
3323 i915_reg_t pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
3324 u32 pp;
3325
3326 pp = ilk_get_pp_control(intel_dp);
3327 pp &= ~EDP_BLC_ENABLE;
3328
3329 intel_de_write(dev_priv, pp_ctrl_reg, pp);
3330 intel_de_posting_read(dev_priv, pp_ctrl_reg);
3331 }
3332
3333 intel_dp->last_backlight_off = jiffies;
3334 edp_wait_backlight_off(intel_dp);
3335}
3336
3337
3338void intel_edp_backlight_off(const struct drm_connector_state *old_conn_state)
3339{
3340 struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(old_conn_state->best_encoder));
3341 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3342
3343 if (!intel_dp_is_edp(intel_dp))
3344 return;
3345
3346 drm_dbg_kms(&i915->drm, "\n");
3347
3348 _intel_edp_backlight_off(intel_dp);
3349 intel_panel_disable_backlight(old_conn_state);
3350}
3351
3352
3353
3354
3355
3356static void intel_edp_backlight_power(struct intel_connector *connector,
3357 bool enable)
3358{
3359 struct drm_i915_private *i915 = to_i915(connector->base.dev);
3360 struct intel_dp *intel_dp = intel_attached_dp(connector);
3361 intel_wakeref_t wakeref;
3362 bool is_enabled;
3363
3364 is_enabled = false;
3365 with_pps_lock(intel_dp, wakeref)
3366 is_enabled = ilk_get_pp_control(intel_dp) & EDP_BLC_ENABLE;
3367 if (is_enabled == enable)
3368 return;
3369
3370 drm_dbg_kms(&i915->drm, "panel power control backlight %s\n",
3371 enable ? "enable" : "disable");
3372
3373 if (enable)
3374 _intel_edp_backlight_on(intel_dp);
3375 else
3376 _intel_edp_backlight_off(intel_dp);
3377}
3378
3379static void assert_dp_port(struct intel_dp *intel_dp, bool state)
3380{
3381 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
3382 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
3383 bool cur_state = intel_de_read(dev_priv, intel_dp->output_reg) & DP_PORT_EN;
3384
3385 I915_STATE_WARN(cur_state != state,
3386 "[ENCODER:%d:%s] state assertion failure (expected %s, current %s)\n",
3387 dig_port->base.base.base.id, dig_port->base.base.name,
3388 onoff(state), onoff(cur_state));
3389}
3390#define assert_dp_port_disabled(d) assert_dp_port((d), false)
3391
3392static void assert_edp_pll(struct drm_i915_private *dev_priv, bool state)
3393{
3394 bool cur_state = intel_de_read(dev_priv, DP_A) & DP_PLL_ENABLE;
3395
3396 I915_STATE_WARN(cur_state != state,
3397 "eDP PLL state assertion failure (expected %s, current %s)\n",
3398 onoff(state), onoff(cur_state));
3399}
3400#define assert_edp_pll_enabled(d) assert_edp_pll((d), true)
3401#define assert_edp_pll_disabled(d) assert_edp_pll((d), false)
3402
3403static void ilk_edp_pll_on(struct intel_dp *intel_dp,
3404 const struct intel_crtc_state *pipe_config)
3405{
3406 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
3407 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
3408
3409 assert_pipe_disabled(dev_priv, pipe_config->cpu_transcoder);
3410 assert_dp_port_disabled(intel_dp);
3411 assert_edp_pll_disabled(dev_priv);
3412
3413 drm_dbg_kms(&dev_priv->drm, "enabling eDP PLL for clock %d\n",
3414 pipe_config->port_clock);
3415
3416 intel_dp->DP &= ~DP_PLL_FREQ_MASK;
3417
3418 if (pipe_config->port_clock == 162000)
3419 intel_dp->DP |= DP_PLL_FREQ_162MHZ;
3420 else
3421 intel_dp->DP |= DP_PLL_FREQ_270MHZ;
3422
3423 intel_de_write(dev_priv, DP_A, intel_dp->DP);
3424 intel_de_posting_read(dev_priv, DP_A);
3425 udelay(500);
3426
3427
3428
3429
3430
3431
3432
3433 if (IS_GEN(dev_priv, 5))
3434 intel_wait_for_vblank_if_active(dev_priv, !crtc->pipe);
3435
3436 intel_dp->DP |= DP_PLL_ENABLE;
3437
3438 intel_de_write(dev_priv, DP_A, intel_dp->DP);
3439 intel_de_posting_read(dev_priv, DP_A);
3440 udelay(200);
3441}
3442
3443static void ilk_edp_pll_off(struct intel_dp *intel_dp,
3444 const struct intel_crtc_state *old_crtc_state)
3445{
3446 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc);
3447 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
3448
3449 assert_pipe_disabled(dev_priv, old_crtc_state->cpu_transcoder);
3450 assert_dp_port_disabled(intel_dp);
3451 assert_edp_pll_enabled(dev_priv);
3452
3453 drm_dbg_kms(&dev_priv->drm, "disabling eDP PLL\n");
3454
3455 intel_dp->DP &= ~DP_PLL_ENABLE;
3456
3457 intel_de_write(dev_priv, DP_A, intel_dp->DP);
3458 intel_de_posting_read(dev_priv, DP_A);
3459 udelay(200);
3460}
3461
3462static bool downstream_hpd_needs_d0(struct intel_dp *intel_dp)
3463{
3464
3465
3466
3467
3468
3469
3470
3471
3472 return intel_dp->dpcd[DP_DPCD_REV] == 0x11 &&
3473 drm_dp_is_branch(intel_dp->dpcd) &&
3474 intel_dp->downstream_ports[0] & DP_DS_PORT_HPD;
3475}
3476
3477void intel_dp_sink_set_decompression_state(struct intel_dp *intel_dp,
3478 const struct intel_crtc_state *crtc_state,
3479 bool enable)
3480{
3481 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3482 int ret;
3483
3484 if (!crtc_state->dsc.compression_enable)
3485 return;
3486
3487 ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_DSC_ENABLE,
3488 enable ? DP_DECOMPRESSION_EN : 0);
3489 if (ret < 0)
3490 drm_dbg_kms(&i915->drm,
3491 "Failed to %s sink decompression state\n",
3492 enable ? "enable" : "disable");
3493}
3494
3495
3496void intel_dp_sink_dpms(struct intel_dp *intel_dp, int mode)
3497{
3498 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3499 int ret, i;
3500
3501
3502 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
3503 return;
3504
3505 if (mode != DRM_MODE_DPMS_ON) {
3506 if (downstream_hpd_needs_d0(intel_dp))
3507 return;
3508
3509 ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER,
3510 DP_SET_POWER_D3);
3511 } else {
3512 struct intel_lspcon *lspcon = dp_to_lspcon(intel_dp);
3513
3514
3515
3516
3517
3518 for (i = 0; i < 3; i++) {
3519 ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER,
3520 DP_SET_POWER_D0);
3521 if (ret == 1)
3522 break;
3523 msleep(1);
3524 }
3525
3526 if (ret == 1 && lspcon->active)
3527 lspcon_wait_pcon_mode(lspcon);
3528 }
3529
3530 if (ret != 1)
3531 drm_dbg_kms(&i915->drm, "failed to %s sink power state\n",
3532 mode == DRM_MODE_DPMS_ON ? "enable" : "disable");
3533}
3534
3535static bool cpt_dp_port_selected(struct drm_i915_private *dev_priv,
3536 enum port port, enum pipe *pipe)
3537{
3538 enum pipe p;
3539
3540 for_each_pipe(dev_priv, p) {
3541 u32 val = intel_de_read(dev_priv, TRANS_DP_CTL(p));
3542
3543 if ((val & TRANS_DP_PORT_SEL_MASK) == TRANS_DP_PORT_SEL(port)) {
3544 *pipe = p;
3545 return true;
3546 }
3547 }
3548
3549 drm_dbg_kms(&dev_priv->drm, "No pipe for DP port %c found\n",
3550 port_name(port));
3551
3552
3553 *pipe = PIPE_A;
3554
3555 return false;
3556}
3557
3558bool intel_dp_port_enabled(struct drm_i915_private *dev_priv,
3559 i915_reg_t dp_reg, enum port port,
3560 enum pipe *pipe)
3561{
3562 bool ret;
3563 u32 val;
3564
3565 val = intel_de_read(dev_priv, dp_reg);
3566
3567 ret = val & DP_PORT_EN;
3568
3569
3570 if (IS_IVYBRIDGE(dev_priv) && port == PORT_A)
3571 *pipe = (val & DP_PIPE_SEL_MASK_IVB) >> DP_PIPE_SEL_SHIFT_IVB;
3572 else if (HAS_PCH_CPT(dev_priv) && port != PORT_A)
3573 ret &= cpt_dp_port_selected(dev_priv, port, pipe);
3574 else if (IS_CHERRYVIEW(dev_priv))
3575 *pipe = (val & DP_PIPE_SEL_MASK_CHV) >> DP_PIPE_SEL_SHIFT_CHV;
3576 else
3577 *pipe = (val & DP_PIPE_SEL_MASK) >> DP_PIPE_SEL_SHIFT;
3578
3579 return ret;
3580}
3581
3582static bool intel_dp_get_hw_state(struct intel_encoder *encoder,
3583 enum pipe *pipe)
3584{
3585 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3586 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3587 intel_wakeref_t wakeref;
3588 bool ret;
3589
3590 wakeref = intel_display_power_get_if_enabled(dev_priv,
3591 encoder->power_domain);
3592 if (!wakeref)
3593 return false;
3594
3595 ret = intel_dp_port_enabled(dev_priv, intel_dp->output_reg,
3596 encoder->port, pipe);
3597
3598 intel_display_power_put(dev_priv, encoder->power_domain, wakeref);
3599
3600 return ret;
3601}
3602
3603static void intel_dp_get_config(struct intel_encoder *encoder,
3604 struct intel_crtc_state *pipe_config)
3605{
3606 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3607 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3608 u32 tmp, flags = 0;
3609 enum port port = encoder->port;
3610 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
3611
3612 if (encoder->type == INTEL_OUTPUT_EDP)
3613 pipe_config->output_types |= BIT(INTEL_OUTPUT_EDP);
3614 else
3615 pipe_config->output_types |= BIT(INTEL_OUTPUT_DP);
3616
3617 tmp = intel_de_read(dev_priv, intel_dp->output_reg);
3618
3619 pipe_config->has_audio = tmp & DP_AUDIO_OUTPUT_ENABLE && port != PORT_A;
3620
3621 if (HAS_PCH_CPT(dev_priv) && port != PORT_A) {
3622 u32 trans_dp = intel_de_read(dev_priv,
3623 TRANS_DP_CTL(crtc->pipe));
3624
3625 if (trans_dp & TRANS_DP_HSYNC_ACTIVE_HIGH)
3626 flags |= DRM_MODE_FLAG_PHSYNC;
3627 else
3628 flags |= DRM_MODE_FLAG_NHSYNC;
3629
3630 if (trans_dp & TRANS_DP_VSYNC_ACTIVE_HIGH)
3631 flags |= DRM_MODE_FLAG_PVSYNC;
3632 else
3633 flags |= DRM_MODE_FLAG_NVSYNC;
3634 } else {
3635 if (tmp & DP_SYNC_HS_HIGH)
3636 flags |= DRM_MODE_FLAG_PHSYNC;
3637 else
3638 flags |= DRM_MODE_FLAG_NHSYNC;
3639
3640 if (tmp & DP_SYNC_VS_HIGH)
3641 flags |= DRM_MODE_FLAG_PVSYNC;
3642 else
3643 flags |= DRM_MODE_FLAG_NVSYNC;
3644 }
3645
3646 pipe_config->hw.adjusted_mode.flags |= flags;
3647
3648 if (IS_G4X(dev_priv) && tmp & DP_COLOR_RANGE_16_235)
3649 pipe_config->limited_color_range = true;
3650
3651 pipe_config->lane_count =
3652 ((tmp & DP_PORT_WIDTH_MASK) >> DP_PORT_WIDTH_SHIFT) + 1;
3653
3654 intel_dp_get_m_n(crtc, pipe_config);
3655
3656 if (port == PORT_A) {
3657 if ((intel_de_read(dev_priv, DP_A) & DP_PLL_FREQ_MASK) == DP_PLL_FREQ_162MHZ)
3658 pipe_config->port_clock = 162000;
3659 else
3660 pipe_config->port_clock = 270000;
3661 }
3662
3663 pipe_config->hw.adjusted_mode.crtc_clock =
3664 intel_dotclock_calculate(pipe_config->port_clock,
3665 &pipe_config->dp_m_n);
3666
3667 if (intel_dp_is_edp(intel_dp) && dev_priv->vbt.edp.bpp &&
3668 pipe_config->pipe_bpp > dev_priv->vbt.edp.bpp) {
3669
3670
3671
3672
3673
3674
3675
3676
3677
3678
3679
3680
3681
3682 drm_dbg_kms(&dev_priv->drm,
3683 "pipe has %d bpp for eDP panel, overriding BIOS-provided max %d bpp\n",
3684 pipe_config->pipe_bpp, dev_priv->vbt.edp.bpp);
3685 dev_priv->vbt.edp.bpp = pipe_config->pipe_bpp;
3686 }
3687}
3688
3689static void intel_disable_dp(struct intel_atomic_state *state,
3690 struct intel_encoder *encoder,
3691 const struct intel_crtc_state *old_crtc_state,
3692 const struct drm_connector_state *old_conn_state)
3693{
3694 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3695
3696 intel_dp->link_trained = false;
3697
3698 if (old_crtc_state->has_audio)
3699 intel_audio_codec_disable(encoder,
3700 old_crtc_state, old_conn_state);
3701
3702
3703
3704 intel_edp_panel_vdd_on(intel_dp);
3705 intel_edp_backlight_off(old_conn_state);
3706 intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_OFF);
3707 intel_edp_panel_off(intel_dp);
3708}
3709
3710static void g4x_disable_dp(struct intel_atomic_state *state,
3711 struct intel_encoder *encoder,
3712 const struct intel_crtc_state *old_crtc_state,
3713 const struct drm_connector_state *old_conn_state)
3714{
3715 intel_disable_dp(state, encoder, old_crtc_state, old_conn_state);
3716}
3717
3718static void vlv_disable_dp(struct intel_atomic_state *state,
3719 struct intel_encoder *encoder,
3720 const struct intel_crtc_state *old_crtc_state,
3721 const struct drm_connector_state *old_conn_state)
3722{
3723 intel_disable_dp(state, encoder, old_crtc_state, old_conn_state);
3724}
3725
3726static void g4x_post_disable_dp(struct intel_atomic_state *state,
3727 struct intel_encoder *encoder,
3728 const struct intel_crtc_state *old_crtc_state,
3729 const struct drm_connector_state *old_conn_state)
3730{
3731 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3732 enum port port = encoder->port;
3733
3734
3735
3736
3737
3738
3739
3740 intel_dp_link_down(encoder, old_crtc_state);
3741
3742
3743 if (port == PORT_A)
3744 ilk_edp_pll_off(intel_dp, old_crtc_state);
3745}
3746
3747static void vlv_post_disable_dp(struct intel_atomic_state *state,
3748 struct intel_encoder *encoder,
3749 const struct intel_crtc_state *old_crtc_state,
3750 const struct drm_connector_state *old_conn_state)
3751{
3752 intel_dp_link_down(encoder, old_crtc_state);
3753}
3754
3755static void chv_post_disable_dp(struct intel_atomic_state *state,
3756 struct intel_encoder *encoder,
3757 const struct intel_crtc_state *old_crtc_state,
3758 const struct drm_connector_state *old_conn_state)
3759{
3760 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3761
3762 intel_dp_link_down(encoder, old_crtc_state);
3763
3764 vlv_dpio_get(dev_priv);
3765
3766
3767 chv_data_lane_soft_reset(encoder, old_crtc_state, true);
3768
3769 vlv_dpio_put(dev_priv);
3770}
3771
3772static void
3773cpt_set_link_train(struct intel_dp *intel_dp,
3774 u8 dp_train_pat)
3775{
3776 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
3777 u32 *DP = &intel_dp->DP;
3778
3779 *DP &= ~DP_LINK_TRAIN_MASK_CPT;
3780
3781 switch (dp_train_pat & DP_TRAINING_PATTERN_MASK) {
3782 case DP_TRAINING_PATTERN_DISABLE:
3783 *DP |= DP_LINK_TRAIN_OFF_CPT;
3784 break;
3785 case DP_TRAINING_PATTERN_1:
3786 *DP |= DP_LINK_TRAIN_PAT_1_CPT;
3787 break;
3788 case DP_TRAINING_PATTERN_2:
3789 *DP |= DP_LINK_TRAIN_PAT_2_CPT;
3790 break;
3791 case DP_TRAINING_PATTERN_3:
3792 drm_dbg_kms(&dev_priv->drm,
3793 "TPS3 not supported, using TPS2 instead\n");
3794 *DP |= DP_LINK_TRAIN_PAT_2_CPT;
3795 break;
3796 }
3797
3798 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
3799 intel_de_posting_read(dev_priv, intel_dp->output_reg);
3800}
3801
3802static void
3803g4x_set_link_train(struct intel_dp *intel_dp,
3804 u8 dp_train_pat)
3805{
3806 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
3807 u32 *DP = &intel_dp->DP;
3808
3809 *DP &= ~DP_LINK_TRAIN_MASK;
3810
3811 switch (dp_train_pat & DP_TRAINING_PATTERN_MASK) {
3812 case DP_TRAINING_PATTERN_DISABLE:
3813 *DP |= DP_LINK_TRAIN_OFF;
3814 break;
3815 case DP_TRAINING_PATTERN_1:
3816 *DP |= DP_LINK_TRAIN_PAT_1;
3817 break;
3818 case DP_TRAINING_PATTERN_2:
3819 *DP |= DP_LINK_TRAIN_PAT_2;
3820 break;
3821 case DP_TRAINING_PATTERN_3:
3822 drm_dbg_kms(&dev_priv->drm,
3823 "TPS3 not supported, using TPS2 instead\n");
3824 *DP |= DP_LINK_TRAIN_PAT_2;
3825 break;
3826 }
3827
3828 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
3829 intel_de_posting_read(dev_priv, intel_dp->output_reg);
3830}
3831
3832static void intel_dp_enable_port(struct intel_dp *intel_dp,
3833 const struct intel_crtc_state *old_crtc_state)
3834{
3835 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
3836
3837
3838
3839 intel_dp_program_link_training_pattern(intel_dp, DP_TRAINING_PATTERN_1);
3840
3841
3842
3843
3844
3845
3846
3847 intel_dp->DP |= DP_PORT_EN;
3848 if (old_crtc_state->has_audio)
3849 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
3850
3851 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
3852 intel_de_posting_read(dev_priv, intel_dp->output_reg);
3853}
3854
3855void intel_dp_configure_protocol_converter(struct intel_dp *intel_dp)
3856{
3857 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3858 u8 tmp;
3859
3860 if (intel_dp->dpcd[DP_DPCD_REV] < 0x13)
3861 return;
3862
3863 if (!drm_dp_is_branch(intel_dp->dpcd))
3864 return;
3865
3866 tmp = intel_dp->has_hdmi_sink ?
3867 DP_HDMI_DVI_OUTPUT_CONFIG : 0;
3868
3869 if (drm_dp_dpcd_writeb(&intel_dp->aux,
3870 DP_PROTOCOL_CONVERTER_CONTROL_0, tmp) != 1)
3871 drm_dbg_kms(&i915->drm, "Failed to set protocol converter HDMI mode to %s\n",
3872 enableddisabled(intel_dp->has_hdmi_sink));
3873
3874 tmp = intel_dp->dfp.ycbcr_444_to_420 ?
3875 DP_CONVERSION_TO_YCBCR420_ENABLE : 0;
3876
3877 if (drm_dp_dpcd_writeb(&intel_dp->aux,
3878 DP_PROTOCOL_CONVERTER_CONTROL_1, tmp) != 1)
3879 drm_dbg_kms(&i915->drm,
3880 "Failed to set protocol converter YCbCr 4:2:0 conversion mode to %s\n",
3881 enableddisabled(intel_dp->dfp.ycbcr_444_to_420));
3882
3883 tmp = 0;
3884
3885 if (drm_dp_dpcd_writeb(&intel_dp->aux,
3886 DP_PROTOCOL_CONVERTER_CONTROL_2, tmp) <= 0)
3887 drm_dbg_kms(&i915->drm,
3888 "Failed to set protocol converter YCbCr 4:2:2 conversion mode to %s\n",
3889 enableddisabled(false));
3890}
3891
3892static void intel_enable_dp(struct intel_atomic_state *state,
3893 struct intel_encoder *encoder,
3894 const struct intel_crtc_state *pipe_config,
3895 const struct drm_connector_state *conn_state)
3896{
3897 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3898 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3899 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
3900 u32 dp_reg = intel_de_read(dev_priv, intel_dp->output_reg);
3901 enum pipe pipe = crtc->pipe;
3902 intel_wakeref_t wakeref;
3903
3904 if (drm_WARN_ON(&dev_priv->drm, dp_reg & DP_PORT_EN))
3905 return;
3906
3907 with_pps_lock(intel_dp, wakeref) {
3908 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
3909 vlv_init_panel_power_sequencer(encoder, pipe_config);
3910
3911 intel_dp_enable_port(intel_dp, pipe_config);
3912
3913 edp_panel_vdd_on(intel_dp);
3914 edp_panel_on(intel_dp);
3915 edp_panel_vdd_off(intel_dp, true);
3916 }
3917
3918 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
3919 unsigned int lane_mask = 0x0;
3920
3921 if (IS_CHERRYVIEW(dev_priv))
3922 lane_mask = intel_dp_unused_lane_mask(pipe_config->lane_count);
3923
3924 vlv_wait_port_ready(dev_priv, dp_to_dig_port(intel_dp),
3925 lane_mask);
3926 }
3927
3928 intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON);
3929 intel_dp_configure_protocol_converter(intel_dp);
3930 intel_dp_start_link_train(intel_dp);
3931 intel_dp_stop_link_train(intel_dp);
3932
3933 if (pipe_config->has_audio) {
3934 drm_dbg(&dev_priv->drm, "Enabling DP audio on pipe %c\n",
3935 pipe_name(pipe));
3936 intel_audio_codec_enable(encoder, pipe_config, conn_state);
3937 }
3938}
3939
3940static void g4x_enable_dp(struct intel_atomic_state *state,
3941 struct intel_encoder *encoder,
3942 const struct intel_crtc_state *pipe_config,
3943 const struct drm_connector_state *conn_state)
3944{
3945 intel_enable_dp(state, encoder, pipe_config, conn_state);
3946 intel_edp_backlight_on(pipe_config, conn_state);
3947}
3948
3949static void vlv_enable_dp(struct intel_atomic_state *state,
3950 struct intel_encoder *encoder,
3951 const struct intel_crtc_state *pipe_config,
3952 const struct drm_connector_state *conn_state)
3953{
3954 intel_edp_backlight_on(pipe_config, conn_state);
3955}
3956
3957static void g4x_pre_enable_dp(struct intel_atomic_state *state,
3958 struct intel_encoder *encoder,
3959 const struct intel_crtc_state *pipe_config,
3960 const struct drm_connector_state *conn_state)
3961{
3962 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3963 enum port port = encoder->port;
3964
3965 intel_dp_prepare(encoder, pipe_config);
3966
3967
3968 if (port == PORT_A)
3969 ilk_edp_pll_on(intel_dp, pipe_config);
3970}
3971
3972static void vlv_detach_power_sequencer(struct intel_dp *intel_dp)
3973{
3974 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
3975 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
3976 enum pipe pipe = intel_dp->pps_pipe;
3977 i915_reg_t pp_on_reg = PP_ON_DELAYS(pipe);
3978
3979 drm_WARN_ON(&dev_priv->drm, intel_dp->active_pipe != INVALID_PIPE);
3980
3981 if (drm_WARN_ON(&dev_priv->drm, pipe != PIPE_A && pipe != PIPE_B))
3982 return;
3983
3984 edp_panel_vdd_off_sync(intel_dp);
3985
3986
3987
3988
3989
3990
3991
3992
3993
3994
3995 drm_dbg_kms(&dev_priv->drm,
3996 "detaching pipe %c power sequencer from [ENCODER:%d:%s]\n",
3997 pipe_name(pipe), dig_port->base.base.base.id,
3998 dig_port->base.base.name);
3999 intel_de_write(dev_priv, pp_on_reg, 0);
4000 intel_de_posting_read(dev_priv, pp_on_reg);
4001
4002 intel_dp->pps_pipe = INVALID_PIPE;
4003}
4004
4005static void vlv_steal_power_sequencer(struct drm_i915_private *dev_priv,
4006 enum pipe pipe)
4007{
4008 struct intel_encoder *encoder;
4009
4010 lockdep_assert_held(&dev_priv->pps_mutex);
4011
4012 for_each_intel_dp(&dev_priv->drm, encoder) {
4013 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4014
4015 drm_WARN(&dev_priv->drm, intel_dp->active_pipe == pipe,
4016 "stealing pipe %c power sequencer from active [ENCODER:%d:%s]\n",
4017 pipe_name(pipe), encoder->base.base.id,
4018 encoder->base.name);
4019
4020 if (intel_dp->pps_pipe != pipe)
4021 continue;
4022
4023 drm_dbg_kms(&dev_priv->drm,
4024 "stealing pipe %c power sequencer from [ENCODER:%d:%s]\n",
4025 pipe_name(pipe), encoder->base.base.id,
4026 encoder->base.name);
4027
4028
4029 vlv_detach_power_sequencer(intel_dp);
4030 }
4031}
4032
4033static void vlv_init_panel_power_sequencer(struct intel_encoder *encoder,
4034 const struct intel_crtc_state *crtc_state)
4035{
4036 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4037 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4038 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
4039
4040 lockdep_assert_held(&dev_priv->pps_mutex);
4041
4042 drm_WARN_ON(&dev_priv->drm, intel_dp->active_pipe != INVALID_PIPE);
4043
4044 if (intel_dp->pps_pipe != INVALID_PIPE &&
4045 intel_dp->pps_pipe != crtc->pipe) {
4046
4047
4048
4049
4050
4051 vlv_detach_power_sequencer(intel_dp);
4052 }
4053
4054
4055
4056
4057
4058 vlv_steal_power_sequencer(dev_priv, crtc->pipe);
4059
4060 intel_dp->active_pipe = crtc->pipe;
4061
4062 if (!intel_dp_is_edp(intel_dp))
4063 return;
4064
4065
4066 intel_dp->pps_pipe = crtc->pipe;
4067
4068 drm_dbg_kms(&dev_priv->drm,
4069 "initializing pipe %c power sequencer for [ENCODER:%d:%s]\n",
4070 pipe_name(intel_dp->pps_pipe), encoder->base.base.id,
4071 encoder->base.name);
4072
4073
4074 intel_dp_init_panel_power_sequencer(intel_dp);
4075 intel_dp_init_panel_power_sequencer_registers(intel_dp, true);
4076}
4077
4078static void vlv_pre_enable_dp(struct intel_atomic_state *state,
4079 struct intel_encoder *encoder,
4080 const struct intel_crtc_state *pipe_config,
4081 const struct drm_connector_state *conn_state)
4082{
4083 vlv_phy_pre_encoder_enable(encoder, pipe_config);
4084
4085 intel_enable_dp(state, encoder, pipe_config, conn_state);
4086}
4087
4088static void vlv_dp_pre_pll_enable(struct intel_atomic_state *state,
4089 struct intel_encoder *encoder,
4090 const struct intel_crtc_state *pipe_config,
4091 const struct drm_connector_state *conn_state)
4092{
4093 intel_dp_prepare(encoder, pipe_config);
4094
4095 vlv_phy_pre_pll_enable(encoder, pipe_config);
4096}
4097
4098static void chv_pre_enable_dp(struct intel_atomic_state *state,
4099 struct intel_encoder *encoder,
4100 const struct intel_crtc_state *pipe_config,
4101 const struct drm_connector_state *conn_state)
4102{
4103 chv_phy_pre_encoder_enable(encoder, pipe_config);
4104
4105 intel_enable_dp(state, encoder, pipe_config, conn_state);
4106
4107
4108 chv_phy_release_cl2_override(encoder);
4109}
4110
4111static void chv_dp_pre_pll_enable(struct intel_atomic_state *state,
4112 struct intel_encoder *encoder,
4113 const struct intel_crtc_state *pipe_config,
4114 const struct drm_connector_state *conn_state)
4115{
4116 intel_dp_prepare(encoder, pipe_config);
4117
4118 chv_phy_pre_pll_enable(encoder, pipe_config);
4119}
4120
4121static void chv_dp_post_pll_disable(struct intel_atomic_state *state,
4122 struct intel_encoder *encoder,
4123 const struct intel_crtc_state *old_crtc_state,
4124 const struct drm_connector_state *old_conn_state)
4125{
4126 chv_phy_post_pll_disable(encoder, old_crtc_state);
4127}
4128
4129
4130
4131
4132
4133bool
4134intel_dp_get_link_status(struct intel_dp *intel_dp, u8 link_status[DP_LINK_STATUS_SIZE])
4135{
4136 return drm_dp_dpcd_read(&intel_dp->aux, DP_LANE0_1_STATUS, link_status,
4137 DP_LINK_STATUS_SIZE) == DP_LINK_STATUS_SIZE;
4138}
4139
4140static u8 intel_dp_voltage_max_2(struct intel_dp *intel_dp)
4141{
4142 return DP_TRAIN_VOLTAGE_SWING_LEVEL_2;
4143}
4144
4145static u8 intel_dp_voltage_max_3(struct intel_dp *intel_dp)
4146{
4147 return DP_TRAIN_VOLTAGE_SWING_LEVEL_3;
4148}
4149
4150static u8 intel_dp_pre_empemph_max_2(struct intel_dp *intel_dp)
4151{
4152 return DP_TRAIN_PRE_EMPH_LEVEL_2;
4153}
4154
4155static u8 intel_dp_pre_empemph_max_3(struct intel_dp *intel_dp)
4156{
4157 return DP_TRAIN_PRE_EMPH_LEVEL_3;
4158}
4159
4160static void vlv_set_signal_levels(struct intel_dp *intel_dp)
4161{
4162 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
4163 unsigned long demph_reg_value, preemph_reg_value,
4164 uniqtranscale_reg_value;
4165 u8 train_set = intel_dp->train_set[0];
4166
4167 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
4168 case DP_TRAIN_PRE_EMPH_LEVEL_0:
4169 preemph_reg_value = 0x0004000;
4170 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
4171 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
4172 demph_reg_value = 0x2B405555;
4173 uniqtranscale_reg_value = 0x552AB83A;
4174 break;
4175 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
4176 demph_reg_value = 0x2B404040;
4177 uniqtranscale_reg_value = 0x5548B83A;
4178 break;
4179 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
4180 demph_reg_value = 0x2B245555;
4181 uniqtranscale_reg_value = 0x5560B83A;
4182 break;
4183 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
4184 demph_reg_value = 0x2B405555;
4185 uniqtranscale_reg_value = 0x5598DA3A;
4186 break;
4187 default:
4188 return;
4189 }
4190 break;
4191 case DP_TRAIN_PRE_EMPH_LEVEL_1:
4192 preemph_reg_value = 0x0002000;
4193 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
4194 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
4195 demph_reg_value = 0x2B404040;
4196 uniqtranscale_reg_value = 0x5552B83A;
4197 break;
4198 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
4199 demph_reg_value = 0x2B404848;
4200 uniqtranscale_reg_value = 0x5580B83A;
4201 break;
4202 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
4203 demph_reg_value = 0x2B404040;
4204 uniqtranscale_reg_value = 0x55ADDA3A;
4205 break;
4206 default:
4207 return;
4208 }
4209 break;
4210 case DP_TRAIN_PRE_EMPH_LEVEL_2:
4211 preemph_reg_value = 0x0000000;
4212 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
4213 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
4214 demph_reg_value = 0x2B305555;
4215 uniqtranscale_reg_value = 0x5570B83A;
4216 break;
4217 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
4218 demph_reg_value = 0x2B2B4040;
4219 uniqtranscale_reg_value = 0x55ADDA3A;
4220 break;
4221 default:
4222 return;
4223 }
4224 break;
4225 case DP_TRAIN_PRE_EMPH_LEVEL_3:
4226 preemph_reg_value = 0x0006000;
4227 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
4228 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
4229 demph_reg_value = 0x1B405555;
4230 uniqtranscale_reg_value = 0x55ADDA3A;
4231 break;
4232 default:
4233 return;
4234 }
4235 break;
4236 default:
4237 return;
4238 }
4239
4240 vlv_set_phy_signal_level(encoder, demph_reg_value, preemph_reg_value,
4241 uniqtranscale_reg_value, 0);
4242}
4243
4244static void chv_set_signal_levels(struct intel_dp *intel_dp)
4245{
4246 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
4247 u32 deemph_reg_value, margin_reg_value;
4248 bool uniq_trans_scale = false;
4249 u8 train_set = intel_dp->train_set[0];
4250
4251 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
4252 case DP_TRAIN_PRE_EMPH_LEVEL_0:
4253 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
4254 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
4255 deemph_reg_value = 128;
4256 margin_reg_value = 52;
4257 break;
4258 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
4259 deemph_reg_value = 128;
4260 margin_reg_value = 77;
4261 break;
4262 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
4263 deemph_reg_value = 128;
4264 margin_reg_value = 102;
4265 break;
4266 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
4267 deemph_reg_value = 128;
4268 margin_reg_value = 154;
4269 uniq_trans_scale = true;
4270 break;
4271 default:
4272 return;
4273 }
4274 break;
4275 case DP_TRAIN_PRE_EMPH_LEVEL_1:
4276 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
4277 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
4278 deemph_reg_value = 85;
4279 margin_reg_value = 78;
4280 break;
4281 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
4282 deemph_reg_value = 85;
4283 margin_reg_value = 116;
4284 break;
4285 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
4286 deemph_reg_value = 85;
4287 margin_reg_value = 154;
4288 break;
4289 default:
4290 return;
4291 }
4292 break;
4293 case DP_TRAIN_PRE_EMPH_LEVEL_2:
4294 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
4295 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
4296 deemph_reg_value = 64;
4297 margin_reg_value = 104;
4298 break;
4299 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
4300 deemph_reg_value = 64;
4301 margin_reg_value = 154;
4302 break;
4303 default:
4304 return;
4305 }
4306 break;
4307 case DP_TRAIN_PRE_EMPH_LEVEL_3:
4308 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
4309 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
4310 deemph_reg_value = 43;
4311 margin_reg_value = 154;
4312 break;
4313 default:
4314 return;
4315 }
4316 break;
4317 default:
4318 return;
4319 }
4320
4321 chv_set_phy_signal_level(encoder, deemph_reg_value,
4322 margin_reg_value, uniq_trans_scale);
4323}
4324
4325static u32 g4x_signal_levels(u8 train_set)
4326{
4327 u32 signal_levels = 0;
4328
4329 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
4330 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
4331 default:
4332 signal_levels |= DP_VOLTAGE_0_4;
4333 break;
4334 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
4335 signal_levels |= DP_VOLTAGE_0_6;
4336 break;
4337 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
4338 signal_levels |= DP_VOLTAGE_0_8;
4339 break;
4340 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
4341 signal_levels |= DP_VOLTAGE_1_2;
4342 break;
4343 }
4344 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
4345 case DP_TRAIN_PRE_EMPH_LEVEL_0:
4346 default:
4347 signal_levels |= DP_PRE_EMPHASIS_0;
4348 break;
4349 case DP_TRAIN_PRE_EMPH_LEVEL_1:
4350 signal_levels |= DP_PRE_EMPHASIS_3_5;
4351 break;
4352 case DP_TRAIN_PRE_EMPH_LEVEL_2:
4353 signal_levels |= DP_PRE_EMPHASIS_6;
4354 break;
4355 case DP_TRAIN_PRE_EMPH_LEVEL_3:
4356 signal_levels |= DP_PRE_EMPHASIS_9_5;
4357 break;
4358 }
4359 return signal_levels;
4360}
4361
4362static void
4363g4x_set_signal_levels(struct intel_dp *intel_dp)
4364{
4365 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
4366 u8 train_set = intel_dp->train_set[0];
4367 u32 signal_levels;
4368
4369 signal_levels = g4x_signal_levels(train_set);
4370
4371 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n",
4372 signal_levels);
4373
4374 intel_dp->DP &= ~(DP_VOLTAGE_MASK | DP_PRE_EMPHASIS_MASK);
4375 intel_dp->DP |= signal_levels;
4376
4377 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
4378 intel_de_posting_read(dev_priv, intel_dp->output_reg);
4379}
4380
4381
4382static u32 snb_cpu_edp_signal_levels(u8 train_set)
4383{
4384 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
4385 DP_TRAIN_PRE_EMPHASIS_MASK);
4386
4387 switch (signal_levels) {
4388 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0:
4389 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0:
4390 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B;
4391 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1:
4392 return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B;
4393 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2:
4394 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2:
4395 return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B;
4396 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1:
4397 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1:
4398 return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B;
4399 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0:
4400 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3 | DP_TRAIN_PRE_EMPH_LEVEL_0:
4401 return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B;
4402 default:
4403 DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:"
4404 "0x%x\n", signal_levels);
4405 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B;
4406 }
4407}
4408
4409static void
4410snb_cpu_edp_set_signal_levels(struct intel_dp *intel_dp)
4411{
4412 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
4413 u8 train_set = intel_dp->train_set[0];
4414 u32 signal_levels;
4415
4416 signal_levels = snb_cpu_edp_signal_levels(train_set);
4417
4418 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n",
4419 signal_levels);
4420
4421 intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB;
4422 intel_dp->DP |= signal_levels;
4423
4424 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
4425 intel_de_posting_read(dev_priv, intel_dp->output_reg);
4426}
4427
4428
4429static u32 ivb_cpu_edp_signal_levels(u8 train_set)
4430{
4431 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
4432 DP_TRAIN_PRE_EMPHASIS_MASK);
4433
4434 switch (signal_levels) {
4435 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0:
4436 return EDP_LINK_TRAIN_400MV_0DB_IVB;
4437 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1:
4438 return EDP_LINK_TRAIN_400MV_3_5DB_IVB;
4439 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2:
4440 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2:
4441 return EDP_LINK_TRAIN_400MV_6DB_IVB;
4442
4443 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0:
4444 return EDP_LINK_TRAIN_600MV_0DB_IVB;
4445 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1:
4446 return EDP_LINK_TRAIN_600MV_3_5DB_IVB;
4447
4448 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0:
4449 return EDP_LINK_TRAIN_800MV_0DB_IVB;
4450 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1:
4451 return EDP_LINK_TRAIN_800MV_3_5DB_IVB;
4452
4453 default:
4454 DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:"
4455 "0x%x\n", signal_levels);
4456 return EDP_LINK_TRAIN_500MV_0DB_IVB;
4457 }
4458}
4459
4460static void
4461ivb_cpu_edp_set_signal_levels(struct intel_dp *intel_dp)
4462{
4463 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
4464 u8 train_set = intel_dp->train_set[0];
4465 u32 signal_levels;
4466
4467 signal_levels = ivb_cpu_edp_signal_levels(train_set);
4468
4469 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n",
4470 signal_levels);
4471
4472 intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB;
4473 intel_dp->DP |= signal_levels;
4474
4475 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
4476 intel_de_posting_read(dev_priv, intel_dp->output_reg);
4477}
4478
4479void intel_dp_set_signal_levels(struct intel_dp *intel_dp)
4480{
4481 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
4482 u8 train_set = intel_dp->train_set[0];
4483
4484 drm_dbg_kms(&dev_priv->drm, "Using vswing level %d%s\n",
4485 train_set & DP_TRAIN_VOLTAGE_SWING_MASK,
4486 train_set & DP_TRAIN_MAX_SWING_REACHED ? " (max)" : "");
4487 drm_dbg_kms(&dev_priv->drm, "Using pre-emphasis level %d%s\n",
4488 (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) >>
4489 DP_TRAIN_PRE_EMPHASIS_SHIFT,
4490 train_set & DP_TRAIN_MAX_PRE_EMPHASIS_REACHED ?
4491 " (max)" : "");
4492
4493 intel_dp->set_signal_levels(intel_dp);
4494}
4495
4496void
4497intel_dp_program_link_training_pattern(struct intel_dp *intel_dp,
4498 u8 dp_train_pat)
4499{
4500 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
4501 u8 train_pat_mask = drm_dp_training_pattern_mask(intel_dp->dpcd);
4502
4503 if (dp_train_pat & train_pat_mask)
4504 drm_dbg_kms(&dev_priv->drm,
4505 "Using DP training pattern TPS%d\n",
4506 dp_train_pat & train_pat_mask);
4507
4508 intel_dp->set_link_train(intel_dp, dp_train_pat);
4509}
4510
4511void intel_dp_set_idle_link_train(struct intel_dp *intel_dp)
4512{
4513 if (intel_dp->set_idle_link_train)
4514 intel_dp->set_idle_link_train(intel_dp);
4515}
4516
4517static void
4518intel_dp_link_down(struct intel_encoder *encoder,
4519 const struct intel_crtc_state *old_crtc_state)
4520{
4521 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4522 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4523 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc);
4524 enum port port = encoder->port;
4525 u32 DP = intel_dp->DP;
4526
4527 if (drm_WARN_ON(&dev_priv->drm,
4528 (intel_de_read(dev_priv, intel_dp->output_reg) &
4529 DP_PORT_EN) == 0))
4530 return;
4531
4532 drm_dbg_kms(&dev_priv->drm, "\n");
4533
4534 if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) ||
4535 (HAS_PCH_CPT(dev_priv) && port != PORT_A)) {
4536 DP &= ~DP_LINK_TRAIN_MASK_CPT;
4537 DP |= DP_LINK_TRAIN_PAT_IDLE_CPT;
4538 } else {
4539 DP &= ~DP_LINK_TRAIN_MASK;
4540 DP |= DP_LINK_TRAIN_PAT_IDLE;
4541 }
4542 intel_de_write(dev_priv, intel_dp->output_reg, DP);
4543 intel_de_posting_read(dev_priv, intel_dp->output_reg);
4544
4545 DP &= ~(DP_PORT_EN | DP_AUDIO_OUTPUT_ENABLE);
4546 intel_de_write(dev_priv, intel_dp->output_reg, DP);
4547 intel_de_posting_read(dev_priv, intel_dp->output_reg);
4548
4549
4550
4551
4552
4553
4554 if (HAS_PCH_IBX(dev_priv) && crtc->pipe == PIPE_B && port != PORT_A) {
4555
4556
4557
4558
4559 intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, false);
4560 intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, false);
4561
4562
4563 DP &= ~(DP_PIPE_SEL_MASK | DP_LINK_TRAIN_MASK);
4564 DP |= DP_PORT_EN | DP_PIPE_SEL(PIPE_A) |
4565 DP_LINK_TRAIN_PAT_1;
4566 intel_de_write(dev_priv, intel_dp->output_reg, DP);
4567 intel_de_posting_read(dev_priv, intel_dp->output_reg);
4568
4569 DP &= ~DP_PORT_EN;
4570 intel_de_write(dev_priv, intel_dp->output_reg, DP);
4571 intel_de_posting_read(dev_priv, intel_dp->output_reg);
4572
4573 intel_wait_for_vblank_if_active(dev_priv, PIPE_A);
4574 intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, true);
4575 intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, true);
4576 }
4577
4578 msleep(intel_dp->panel_power_down_delay);
4579
4580 intel_dp->DP = DP;
4581
4582 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
4583 intel_wakeref_t wakeref;
4584
4585 with_pps_lock(intel_dp, wakeref)
4586 intel_dp->active_pipe = INVALID_PIPE;
4587 }
4588}
4589
4590bool intel_dp_get_colorimetry_status(struct intel_dp *intel_dp)
4591{
4592 u8 dprx = 0;
4593
4594 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_DPRX_FEATURE_ENUMERATION_LIST,
4595 &dprx) != 1)
4596 return false;
4597 return dprx & DP_VSC_SDP_EXT_FOR_COLORIMETRY_SUPPORTED;
4598}
4599
4600static void intel_dp_get_dsc_sink_cap(struct intel_dp *intel_dp)
4601{
4602 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4603
4604
4605
4606
4607
4608 memset(intel_dp->dsc_dpcd, 0, sizeof(intel_dp->dsc_dpcd));
4609
4610
4611 intel_dp->fec_capable = 0;
4612
4613
4614 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x14 ||
4615 intel_dp->edp_dpcd[0] >= DP_EDP_14) {
4616 if (drm_dp_dpcd_read(&intel_dp->aux, DP_DSC_SUPPORT,
4617 intel_dp->dsc_dpcd,
4618 sizeof(intel_dp->dsc_dpcd)) < 0)
4619 drm_err(&i915->drm,
4620 "Failed to read DPCD register 0x%x\n",
4621 DP_DSC_SUPPORT);
4622
4623 drm_dbg_kms(&i915->drm, "DSC DPCD: %*ph\n",
4624 (int)sizeof(intel_dp->dsc_dpcd),
4625 intel_dp->dsc_dpcd);
4626
4627
4628 if (!intel_dp_is_edp(intel_dp) &&
4629 drm_dp_dpcd_readb(&intel_dp->aux, DP_FEC_CAPABILITY,
4630 &intel_dp->fec_capable) < 0)
4631 drm_err(&i915->drm,
4632 "Failed to read FEC DPCD register\n");
4633
4634 drm_dbg_kms(&i915->drm, "FEC CAPABILITY: %x\n",
4635 intel_dp->fec_capable);
4636 }
4637}
4638
4639static bool
4640intel_edp_init_dpcd(struct intel_dp *intel_dp)
4641{
4642 struct drm_i915_private *dev_priv =
4643 to_i915(dp_to_dig_port(intel_dp)->base.base.dev);
4644
4645
4646 drm_WARN_ON(&dev_priv->drm, intel_dp->dpcd[DP_DPCD_REV] != 0);
4647
4648 if (drm_dp_read_dpcd_caps(&intel_dp->aux, intel_dp->dpcd) != 0)
4649 return false;
4650
4651 drm_dp_read_desc(&intel_dp->aux, &intel_dp->desc,
4652 drm_dp_is_branch(intel_dp->dpcd));
4653
4654
4655
4656
4657
4658
4659
4660
4661
4662
4663 if (drm_dp_dpcd_read(&intel_dp->aux, DP_EDP_DPCD_REV,
4664 intel_dp->edp_dpcd, sizeof(intel_dp->edp_dpcd)) ==
4665 sizeof(intel_dp->edp_dpcd))
4666 drm_dbg_kms(&dev_priv->drm, "eDP DPCD: %*ph\n",
4667 (int)sizeof(intel_dp->edp_dpcd),
4668 intel_dp->edp_dpcd);
4669
4670
4671
4672
4673
4674 intel_psr_init_dpcd(intel_dp);
4675
4676
4677 if (intel_dp->edp_dpcd[0] >= DP_EDP_14) {
4678 __le16 sink_rates[DP_MAX_SUPPORTED_RATES];
4679 int i;
4680
4681 drm_dp_dpcd_read(&intel_dp->aux, DP_SUPPORTED_LINK_RATES,
4682 sink_rates, sizeof(sink_rates));
4683
4684 for (i = 0; i < ARRAY_SIZE(sink_rates); i++) {
4685 int val = le16_to_cpu(sink_rates[i]);
4686
4687 if (val == 0)
4688 break;
4689
4690
4691
4692
4693
4694
4695
4696 intel_dp->sink_rates[i] = (val * 200) / 10;
4697 }
4698 intel_dp->num_sink_rates = i;
4699 }
4700
4701
4702
4703
4704
4705 if (intel_dp->num_sink_rates)
4706 intel_dp->use_rate_select = true;
4707 else
4708 intel_dp_set_sink_rates(intel_dp);
4709
4710 intel_dp_set_common_rates(intel_dp);
4711
4712
4713 if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv))
4714 intel_dp_get_dsc_sink_cap(intel_dp);
4715
4716 return true;
4717}
4718
4719static bool
4720intel_dp_has_sink_count(struct intel_dp *intel_dp)
4721{
4722 if (!intel_dp->attached_connector)
4723 return false;
4724
4725 return drm_dp_read_sink_count_cap(&intel_dp->attached_connector->base,
4726 intel_dp->dpcd,
4727 &intel_dp->desc);
4728}
4729
4730static bool
4731intel_dp_get_dpcd(struct intel_dp *intel_dp)
4732{
4733 int ret;
4734
4735 if (drm_dp_read_dpcd_caps(&intel_dp->aux, intel_dp->dpcd))
4736 return false;
4737
4738
4739
4740
4741
4742 if (!intel_dp_is_edp(intel_dp)) {
4743 drm_dp_read_desc(&intel_dp->aux, &intel_dp->desc,
4744 drm_dp_is_branch(intel_dp->dpcd));
4745
4746 intel_dp_set_sink_rates(intel_dp);
4747 intel_dp_set_common_rates(intel_dp);
4748 }
4749
4750 if (intel_dp_has_sink_count(intel_dp)) {
4751 ret = drm_dp_read_sink_count(&intel_dp->aux);
4752 if (ret < 0)
4753 return false;
4754
4755
4756
4757
4758
4759
4760 intel_dp->sink_count = ret;
4761
4762
4763
4764
4765
4766
4767
4768
4769 if (!intel_dp->sink_count)
4770 return false;
4771 }
4772
4773 return drm_dp_read_downstream_info(&intel_dp->aux, intel_dp->dpcd,
4774 intel_dp->downstream_ports) == 0;
4775}
4776
4777static bool
4778intel_dp_can_mst(struct intel_dp *intel_dp)
4779{
4780 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4781
4782 return i915->params.enable_dp_mst &&
4783 intel_dp->can_mst &&
4784 drm_dp_read_mst_cap(&intel_dp->aux, intel_dp->dpcd);
4785}
4786
4787static void
4788intel_dp_configure_mst(struct intel_dp *intel_dp)
4789{
4790 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4791 struct intel_encoder *encoder =
4792 &dp_to_dig_port(intel_dp)->base;
4793 bool sink_can_mst = drm_dp_read_mst_cap(&intel_dp->aux, intel_dp->dpcd);
4794
4795 drm_dbg_kms(&i915->drm,
4796 "[ENCODER:%d:%s] MST support: port: %s, sink: %s, modparam: %s\n",
4797 encoder->base.base.id, encoder->base.name,
4798 yesno(intel_dp->can_mst), yesno(sink_can_mst),
4799 yesno(i915->params.enable_dp_mst));
4800
4801 if (!intel_dp->can_mst)
4802 return;
4803
4804 intel_dp->is_mst = sink_can_mst &&
4805 i915->params.enable_dp_mst;
4806
4807 drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst_mgr,
4808 intel_dp->is_mst);
4809}
4810
4811static bool
4812intel_dp_get_sink_irq_esi(struct intel_dp *intel_dp, u8 *sink_irq_vector)
4813{
4814 return drm_dp_dpcd_read(&intel_dp->aux, DP_SINK_COUNT_ESI,
4815 sink_irq_vector, DP_DPRX_ESI_LEN) ==
4816 DP_DPRX_ESI_LEN;
4817}
4818
4819bool
4820intel_dp_needs_vsc_sdp(const struct intel_crtc_state *crtc_state,
4821 const struct drm_connector_state *conn_state)
4822{
4823
4824
4825
4826
4827
4828 if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420)
4829 return true;
4830
4831 switch (conn_state->colorspace) {
4832 case DRM_MODE_COLORIMETRY_SYCC_601:
4833 case DRM_MODE_COLORIMETRY_OPYCC_601:
4834 case DRM_MODE_COLORIMETRY_BT2020_YCC:
4835 case DRM_MODE_COLORIMETRY_BT2020_RGB:
4836 case DRM_MODE_COLORIMETRY_BT2020_CYCC:
4837 return true;
4838 default:
4839 break;
4840 }
4841
4842 return false;
4843}
4844
4845static ssize_t intel_dp_vsc_sdp_pack(const struct drm_dp_vsc_sdp *vsc,
4846 struct dp_sdp *sdp, size_t size)
4847{
4848 size_t length = sizeof(struct dp_sdp);
4849
4850 if (size < length)
4851 return -ENOSPC;
4852
4853 memset(sdp, 0, size);
4854
4855
4856
4857
4858
4859 sdp->sdp_header.HB0 = 0;
4860 sdp->sdp_header.HB1 = vsc->sdp_type;
4861 sdp->sdp_header.HB2 = vsc->revision;
4862 sdp->sdp_header.HB3 = vsc->length;
4863
4864
4865
4866
4867
4868 if (vsc->revision != 0x5)
4869 goto out;
4870
4871
4872
4873 sdp->db[16] = (vsc->pixelformat & 0xf) << 4;
4874 sdp->db[16] |= vsc->colorimetry & 0xf;
4875
4876 switch (vsc->bpc) {
4877 case 6:
4878
4879 break;
4880 case 8:
4881 sdp->db[17] = 0x1;
4882 break;
4883 case 10:
4884 sdp->db[17] = 0x2;
4885 break;
4886 case 12:
4887 sdp->db[17] = 0x3;
4888 break;
4889 case 16:
4890 sdp->db[17] = 0x4;
4891 break;
4892 default:
4893 MISSING_CASE(vsc->bpc);
4894 break;
4895 }
4896
4897 if (vsc->dynamic_range == DP_DYNAMIC_RANGE_CTA)
4898 sdp->db[17] |= 0x80;
4899
4900
4901 sdp->db[18] = vsc->content_type & 0x7;
4902
4903out:
4904 return length;
4905}
4906
4907static ssize_t
4908intel_dp_hdr_metadata_infoframe_sdp_pack(const struct hdmi_drm_infoframe *drm_infoframe,
4909 struct dp_sdp *sdp,
4910 size_t size)
4911{
4912 size_t length = sizeof(struct dp_sdp);
4913 const int infoframe_size = HDMI_INFOFRAME_HEADER_SIZE + HDMI_DRM_INFOFRAME_SIZE;
4914 unsigned char buf[HDMI_INFOFRAME_HEADER_SIZE + HDMI_DRM_INFOFRAME_SIZE];
4915 ssize_t len;
4916
4917 if (size < length)
4918 return -ENOSPC;
4919
4920 memset(sdp, 0, size);
4921
4922 len = hdmi_drm_infoframe_pack_only(drm_infoframe, buf, sizeof(buf));
4923 if (len < 0) {
4924 DRM_DEBUG_KMS("buffer size is smaller than hdr metadata infoframe\n");
4925 return -ENOSPC;
4926 }
4927
4928 if (len != infoframe_size) {
4929 DRM_DEBUG_KMS("wrong static hdr metadata size\n");
4930 return -ENOSPC;
4931 }
4932
4933
4934
4935
4936
4937
4938
4939
4940 sdp->sdp_header.HB0 = 0;
4941
4942
4943
4944
4945
4946
4947
4948 sdp->sdp_header.HB1 = drm_infoframe->type;
4949
4950
4951
4952
4953 sdp->sdp_header.HB2 = 0x1D;
4954
4955 sdp->sdp_header.HB3 = (0x13 << 2);
4956
4957 sdp->db[0] = drm_infoframe->version;
4958
4959 sdp->db[1] = drm_infoframe->length;
4960
4961
4962
4963
4964 BUILD_BUG_ON(sizeof(sdp->db) < HDMI_DRM_INFOFRAME_SIZE + 2);
4965 memcpy(&sdp->db[2], &buf[HDMI_INFOFRAME_HEADER_SIZE],
4966 HDMI_DRM_INFOFRAME_SIZE);
4967
4968
4969
4970
4971
4972
4973
4974
4975
4976
4977
4978
4979
4980 return sizeof(struct dp_sdp_header) + 2 + HDMI_DRM_INFOFRAME_SIZE;
4981}
4982
4983static void intel_write_dp_sdp(struct intel_encoder *encoder,
4984 const struct intel_crtc_state *crtc_state,
4985 unsigned int type)
4986{
4987 struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
4988 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4989 struct dp_sdp sdp = {};
4990 ssize_t len;
4991
4992 if ((crtc_state->infoframes.enable &
4993 intel_hdmi_infoframe_enable(type)) == 0)
4994 return;
4995
4996 switch (type) {
4997 case DP_SDP_VSC:
4998 len = intel_dp_vsc_sdp_pack(&crtc_state->infoframes.vsc, &sdp,
4999 sizeof(sdp));
5000 break;
5001 case HDMI_PACKET_TYPE_GAMUT_METADATA:
5002 len = intel_dp_hdr_metadata_infoframe_sdp_pack(&crtc_state->infoframes.drm.drm,
5003 &sdp, sizeof(sdp));
5004 break;
5005 default:
5006 MISSING_CASE(type);
5007 return;
5008 }
5009
5010 if (drm_WARN_ON(&dev_priv->drm, len < 0))
5011 return;
5012
5013 dig_port->write_infoframe(encoder, crtc_state, type, &sdp, len);
5014}
5015
5016void intel_write_dp_vsc_sdp(struct intel_encoder *encoder,
5017 const struct intel_crtc_state *crtc_state,
5018 struct drm_dp_vsc_sdp *vsc)
5019{
5020 struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
5021 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
5022 struct dp_sdp sdp = {};
5023 ssize_t len;
5024
5025 len = intel_dp_vsc_sdp_pack(vsc, &sdp, sizeof(sdp));
5026
5027 if (drm_WARN_ON(&dev_priv->drm, len < 0))
5028 return;
5029
5030 dig_port->write_infoframe(encoder, crtc_state, DP_SDP_VSC,
5031 &sdp, len);
5032}
5033
5034void intel_dp_set_infoframes(struct intel_encoder *encoder,
5035 bool enable,
5036 const struct intel_crtc_state *crtc_state,
5037 const struct drm_connector_state *conn_state)
5038{
5039 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
5040 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
5041 i915_reg_t reg = HSW_TVIDEO_DIP_CTL(crtc_state->cpu_transcoder);
5042 u32 dip_enable = VIDEO_DIP_ENABLE_AVI_HSW | VIDEO_DIP_ENABLE_GCP_HSW |
5043 VIDEO_DIP_ENABLE_VS_HSW | VIDEO_DIP_ENABLE_GMP_HSW |
5044 VIDEO_DIP_ENABLE_SPD_HSW | VIDEO_DIP_ENABLE_DRM_GLK;
5045 u32 val = intel_de_read(dev_priv, reg);
5046
5047
5048
5049 if (intel_psr_enabled(intel_dp))
5050 val &= ~dip_enable;
5051 else
5052 val &= ~(dip_enable | VIDEO_DIP_ENABLE_VSC_HSW);
5053
5054 if (!enable) {
5055 intel_de_write(dev_priv, reg, val);
5056 intel_de_posting_read(dev_priv, reg);
5057 return;
5058 }
5059
5060 intel_de_write(dev_priv, reg, val);
5061 intel_de_posting_read(dev_priv, reg);
5062
5063
5064 if (!intel_psr_enabled(intel_dp))
5065 intel_write_dp_sdp(encoder, crtc_state, DP_SDP_VSC);
5066
5067 intel_write_dp_sdp(encoder, crtc_state, HDMI_PACKET_TYPE_GAMUT_METADATA);
5068}
5069
5070static int intel_dp_vsc_sdp_unpack(struct drm_dp_vsc_sdp *vsc,
5071 const void *buffer, size_t size)
5072{
5073 const struct dp_sdp *sdp = buffer;
5074
5075 if (size < sizeof(struct dp_sdp))
5076 return -EINVAL;
5077
5078 memset(vsc, 0, size);
5079
5080 if (sdp->sdp_header.HB0 != 0)
5081 return -EINVAL;
5082
5083 if (sdp->sdp_header.HB1 != DP_SDP_VSC)
5084 return -EINVAL;
5085
5086 vsc->sdp_type = sdp->sdp_header.HB1;
5087 vsc->revision = sdp->sdp_header.HB2;
5088 vsc->length = sdp->sdp_header.HB3;
5089
5090 if ((sdp->sdp_header.HB2 == 0x2 && sdp->sdp_header.HB3 == 0x8) ||
5091 (sdp->sdp_header.HB2 == 0x4 && sdp->sdp_header.HB3 == 0xe)) {
5092
5093
5094
5095
5096
5097
5098
5099
5100 return 0;
5101 } else if (sdp->sdp_header.HB2 == 0x5 && sdp->sdp_header.HB3 == 0x13) {
5102
5103
5104
5105
5106
5107 vsc->pixelformat = (sdp->db[16] >> 4) & 0xf;
5108 vsc->colorimetry = sdp->db[16] & 0xf;
5109 vsc->dynamic_range = (sdp->db[17] >> 7) & 0x1;
5110
5111 switch (sdp->db[17] & 0x7) {
5112 case 0x0:
5113 vsc->bpc = 6;
5114 break;
5115 case 0x1:
5116 vsc->bpc = 8;
5117 break;
5118 case 0x2:
5119 vsc->bpc = 10;
5120 break;
5121 case 0x3:
5122 vsc->bpc = 12;
5123 break;
5124 case 0x4:
5125 vsc->bpc = 16;
5126 break;
5127 default:
5128 MISSING_CASE(sdp->db[17] & 0x7);
5129 return -EINVAL;
5130 }
5131
5132 vsc->content_type = sdp->db[18] & 0x7;
5133 } else {
5134 return -EINVAL;
5135 }
5136
5137 return 0;
5138}
5139
5140static int
5141intel_dp_hdr_metadata_infoframe_sdp_unpack(struct hdmi_drm_infoframe *drm_infoframe,
5142 const void *buffer, size_t size)
5143{
5144 int ret;
5145
5146 const struct dp_sdp *sdp = buffer;
5147
5148 if (size < sizeof(struct dp_sdp))
5149 return -EINVAL;
5150
5151 if (sdp->sdp_header.HB0 != 0)
5152 return -EINVAL;
5153
5154 if (sdp->sdp_header.HB1 != HDMI_INFOFRAME_TYPE_DRM)
5155 return -EINVAL;
5156
5157
5158
5159
5160
5161 if (sdp->sdp_header.HB2 != 0x1D)
5162 return -EINVAL;
5163
5164
5165 if ((sdp->sdp_header.HB3 & 0x3) != 0)
5166 return -EINVAL;
5167
5168
5169 if (((sdp->sdp_header.HB3 >> 2) & 0x3f) != 0x13)
5170 return -EINVAL;
5171
5172
5173 if (sdp->db[0] != 1)
5174 return -EINVAL;
5175
5176
5177 if (sdp->db[1] != HDMI_DRM_INFOFRAME_SIZE)
5178 return -EINVAL;
5179
5180 ret = hdmi_drm_infoframe_unpack_only(drm_infoframe, &sdp->db[2],
5181 HDMI_DRM_INFOFRAME_SIZE);
5182
5183 return ret;
5184}
5185
5186static void intel_read_dp_vsc_sdp(struct intel_encoder *encoder,
5187 struct intel_crtc_state *crtc_state,
5188 struct drm_dp_vsc_sdp *vsc)
5189{
5190 struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
5191 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
5192 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
5193 unsigned int type = DP_SDP_VSC;
5194 struct dp_sdp sdp = {};
5195 int ret;
5196
5197
5198 if (intel_psr_enabled(intel_dp))
5199 return;
5200
5201 if ((crtc_state->infoframes.enable &
5202 intel_hdmi_infoframe_enable(type)) == 0)
5203 return;
5204
5205 dig_port->read_infoframe(encoder, crtc_state, type, &sdp, sizeof(sdp));
5206
5207 ret = intel_dp_vsc_sdp_unpack(vsc, &sdp, sizeof(sdp));
5208
5209 if (ret)
5210 drm_dbg_kms(&dev_priv->drm, "Failed to unpack DP VSC SDP\n");
5211}
5212
5213static void intel_read_dp_hdr_metadata_infoframe_sdp(struct intel_encoder *encoder,
5214 struct intel_crtc_state *crtc_state,
5215 struct hdmi_drm_infoframe *drm_infoframe)
5216{
5217 struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
5218 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
5219 unsigned int type = HDMI_PACKET_TYPE_GAMUT_METADATA;
5220 struct dp_sdp sdp = {};
5221 int ret;
5222
5223 if ((crtc_state->infoframes.enable &
5224 intel_hdmi_infoframe_enable(type)) == 0)
5225 return;
5226
5227 dig_port->read_infoframe(encoder, crtc_state, type, &sdp,
5228 sizeof(sdp));
5229
5230 ret = intel_dp_hdr_metadata_infoframe_sdp_unpack(drm_infoframe, &sdp,
5231 sizeof(sdp));
5232
5233 if (ret)
5234 drm_dbg_kms(&dev_priv->drm,
5235 "Failed to unpack DP HDR Metadata Infoframe SDP\n");
5236}
5237
5238void intel_read_dp_sdp(struct intel_encoder *encoder,
5239 struct intel_crtc_state *crtc_state,
5240 unsigned int type)
5241{
5242 if (encoder->type != INTEL_OUTPUT_DDI)
5243 return;
5244
5245 switch (type) {
5246 case DP_SDP_VSC:
5247 intel_read_dp_vsc_sdp(encoder, crtc_state,
5248 &crtc_state->infoframes.vsc);
5249 break;
5250 case HDMI_PACKET_TYPE_GAMUT_METADATA:
5251 intel_read_dp_hdr_metadata_infoframe_sdp(encoder, crtc_state,
5252 &crtc_state->infoframes.drm.drm);
5253 break;
5254 default:
5255 MISSING_CASE(type);
5256 break;
5257 }
5258}
5259
5260static u8 intel_dp_autotest_link_training(struct intel_dp *intel_dp)
5261{
5262 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
5263 int status = 0;
5264 int test_link_rate;
5265 u8 test_lane_count, test_link_bw;
5266
5267
5268
5269
5270 status = drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_LANE_COUNT,
5271 &test_lane_count);
5272
5273 if (status <= 0) {
5274 drm_dbg_kms(&i915->drm, "Lane count read failed\n");
5275 return DP_TEST_NAK;
5276 }
5277 test_lane_count &= DP_MAX_LANE_COUNT_MASK;
5278
5279 status = drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_LINK_RATE,
5280 &test_link_bw);
5281 if (status <= 0) {
5282 drm_dbg_kms(&i915->drm, "Link Rate read failed\n");
5283 return DP_TEST_NAK;
5284 }
5285 test_link_rate = drm_dp_bw_code_to_link_rate(test_link_bw);
5286
5287
5288 if (!intel_dp_link_params_valid(intel_dp, test_link_rate,
5289 test_lane_count))
5290 return DP_TEST_NAK;
5291
5292 intel_dp->compliance.test_lane_count = test_lane_count;
5293 intel_dp->compliance.test_link_rate = test_link_rate;
5294
5295 return DP_TEST_ACK;
5296}
5297
5298static u8 intel_dp_autotest_video_pattern(struct intel_dp *intel_dp)
5299{
5300 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
5301 u8 test_pattern;
5302 u8 test_misc;
5303 __be16 h_width, v_height;
5304 int status = 0;
5305
5306
5307 status = drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_PATTERN,
5308 &test_pattern);
5309 if (status <= 0) {
5310 drm_dbg_kms(&i915->drm, "Test pattern read failed\n");
5311 return DP_TEST_NAK;
5312 }
5313 if (test_pattern != DP_COLOR_RAMP)
5314 return DP_TEST_NAK;
5315
5316 status = drm_dp_dpcd_read(&intel_dp->aux, DP_TEST_H_WIDTH_HI,
5317 &h_width, 2);
5318 if (status <= 0) {
5319 drm_dbg_kms(&i915->drm, "H Width read failed\n");
5320 return DP_TEST_NAK;
5321 }
5322
5323 status = drm_dp_dpcd_read(&intel_dp->aux, DP_TEST_V_HEIGHT_HI,
5324 &v_height, 2);
5325 if (status <= 0) {
5326 drm_dbg_kms(&i915->drm, "V Height read failed\n");
5327 return DP_TEST_NAK;
5328 }
5329
5330 status = drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_MISC0,
5331 &test_misc);
5332 if (status <= 0) {
5333 drm_dbg_kms(&i915->drm, "TEST MISC read failed\n");
5334 return DP_TEST_NAK;
5335 }
5336 if ((test_misc & DP_TEST_COLOR_FORMAT_MASK) != DP_COLOR_FORMAT_RGB)
5337 return DP_TEST_NAK;
5338 if (test_misc & DP_TEST_DYNAMIC_RANGE_CEA)
5339 return DP_TEST_NAK;
5340 switch (test_misc & DP_TEST_BIT_DEPTH_MASK) {
5341 case DP_TEST_BIT_DEPTH_6:
5342 intel_dp->compliance.test_data.bpc = 6;
5343 break;
5344 case DP_TEST_BIT_DEPTH_8:
5345 intel_dp->compliance.test_data.bpc = 8;
5346 break;
5347 default:
5348 return DP_TEST_NAK;
5349 }
5350
5351 intel_dp->compliance.test_data.video_pattern = test_pattern;
5352 intel_dp->compliance.test_data.hdisplay = be16_to_cpu(h_width);
5353 intel_dp->compliance.test_data.vdisplay = be16_to_cpu(v_height);
5354
5355 intel_dp->compliance.test_active = true;
5356
5357 return DP_TEST_ACK;
5358}
5359
5360static u8 intel_dp_autotest_edid(struct intel_dp *intel_dp)
5361{
5362 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
5363 u8 test_result = DP_TEST_ACK;
5364 struct intel_connector *intel_connector = intel_dp->attached_connector;
5365 struct drm_connector *connector = &intel_connector->base;
5366
5367 if (intel_connector->detect_edid == NULL ||
5368 connector->edid_corrupt ||
5369 intel_dp->aux.i2c_defer_count > 6) {
5370
5371
5372
5373
5374
5375
5376
5377 if (intel_dp->aux.i2c_nack_count > 0 ||
5378 intel_dp->aux.i2c_defer_count > 0)
5379 drm_dbg_kms(&i915->drm,
5380 "EDID read had %d NACKs, %d DEFERs\n",
5381 intel_dp->aux.i2c_nack_count,
5382 intel_dp->aux.i2c_defer_count);
5383 intel_dp->compliance.test_data.edid = INTEL_DP_RESOLUTION_FAILSAFE;
5384 } else {
5385 struct edid *block = intel_connector->detect_edid;
5386
5387
5388
5389
5390 block += intel_connector->detect_edid->extensions;
5391
5392 if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_TEST_EDID_CHECKSUM,
5393 block->checksum) <= 0)
5394 drm_dbg_kms(&i915->drm,
5395 "Failed to write EDID checksum\n");
5396
5397 test_result = DP_TEST_ACK | DP_TEST_EDID_CHECKSUM_WRITE;
5398 intel_dp->compliance.test_data.edid = INTEL_DP_RESOLUTION_PREFERRED;
5399 }
5400
5401
5402 intel_dp->compliance.test_active = true;
5403
5404 return test_result;
5405}
5406
5407static u8 intel_dp_prepare_phytest(struct intel_dp *intel_dp)
5408{
5409 struct drm_dp_phy_test_params *data =
5410 &intel_dp->compliance.test_data.phytest;
5411
5412 if (drm_dp_get_phy_test_pattern(&intel_dp->aux, data)) {
5413 DRM_DEBUG_KMS("DP Phy Test pattern AUX read failure\n");
5414 return DP_TEST_NAK;
5415 }
5416
5417
5418
5419
5420
5421 intel_dp->link_mst = false;
5422
5423 return DP_TEST_ACK;
5424}
5425
5426static void intel_dp_phy_pattern_update(struct intel_dp *intel_dp)
5427{
5428 struct drm_i915_private *dev_priv =
5429 to_i915(dp_to_dig_port(intel_dp)->base.base.dev);
5430 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
5431 struct drm_dp_phy_test_params *data =
5432 &intel_dp->compliance.test_data.phytest;
5433 struct intel_crtc *crtc = to_intel_crtc(dig_port->base.base.crtc);
5434 enum pipe pipe = crtc->pipe;
5435 u32 pattern_val;
5436
5437 switch (data->phy_pattern) {
5438 case DP_PHY_TEST_PATTERN_NONE:
5439 DRM_DEBUG_KMS("Disable Phy Test Pattern\n");
5440 intel_de_write(dev_priv, DDI_DP_COMP_CTL(pipe), 0x0);
5441 break;
5442 case DP_PHY_TEST_PATTERN_D10_2:
5443 DRM_DEBUG_KMS("Set D10.2 Phy Test Pattern\n");
5444 intel_de_write(dev_priv, DDI_DP_COMP_CTL(pipe),
5445 DDI_DP_COMP_CTL_ENABLE | DDI_DP_COMP_CTL_D10_2);
5446 break;
5447 case DP_PHY_TEST_PATTERN_ERROR_COUNT:
5448 DRM_DEBUG_KMS("Set Error Count Phy Test Pattern\n");
5449 intel_de_write(dev_priv, DDI_DP_COMP_CTL(pipe),
5450 DDI_DP_COMP_CTL_ENABLE |
5451 DDI_DP_COMP_CTL_SCRAMBLED_0);
5452 break;
5453 case DP_PHY_TEST_PATTERN_PRBS7:
5454 DRM_DEBUG_KMS("Set PRBS7 Phy Test Pattern\n");
5455 intel_de_write(dev_priv, DDI_DP_COMP_CTL(pipe),
5456 DDI_DP_COMP_CTL_ENABLE | DDI_DP_COMP_CTL_PRBS7);
5457 break;
5458 case DP_PHY_TEST_PATTERN_80BIT_CUSTOM:
5459
5460
5461
5462
5463
5464 DRM_DEBUG_KMS("Set 80Bit Custom Phy Test Pattern 0x3e0f83e0 0x0f83e0f8 0x0000f83e\n");
5465 pattern_val = 0x3e0f83e0;
5466 intel_de_write(dev_priv, DDI_DP_COMP_PAT(pipe, 0), pattern_val);
5467 pattern_val = 0x0f83e0f8;
5468 intel_de_write(dev_priv, DDI_DP_COMP_PAT(pipe, 1), pattern_val);
5469 pattern_val = 0x0000f83e;
5470 intel_de_write(dev_priv, DDI_DP_COMP_PAT(pipe, 2), pattern_val);
5471 intel_de_write(dev_priv, DDI_DP_COMP_CTL(pipe),
5472 DDI_DP_COMP_CTL_ENABLE |
5473 DDI_DP_COMP_CTL_CUSTOM80);
5474 break;
5475 case DP_PHY_TEST_PATTERN_CP2520:
5476
5477
5478
5479
5480
5481 DRM_DEBUG_KMS("Set HBR2 compliance Phy Test Pattern\n");
5482 pattern_val = 0xFB;
5483 intel_de_write(dev_priv, DDI_DP_COMP_CTL(pipe),
5484 DDI_DP_COMP_CTL_ENABLE | DDI_DP_COMP_CTL_HBR2 |
5485 pattern_val);
5486 break;
5487 default:
5488 WARN(1, "Invalid Phy Test Pattern\n");
5489 }
5490}
5491
5492static void
5493intel_dp_autotest_phy_ddi_disable(struct intel_dp *intel_dp)
5494{
5495 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
5496 struct drm_device *dev = dig_port->base.base.dev;
5497 struct drm_i915_private *dev_priv = to_i915(dev);
5498 struct intel_crtc *crtc = to_intel_crtc(dig_port->base.base.crtc);
5499 enum pipe pipe = crtc->pipe;
5500 u32 trans_ddi_func_ctl_value, trans_conf_value, dp_tp_ctl_value;
5501
5502 trans_ddi_func_ctl_value = intel_de_read(dev_priv,
5503 TRANS_DDI_FUNC_CTL(pipe));
5504 trans_conf_value = intel_de_read(dev_priv, PIPECONF(pipe));
5505 dp_tp_ctl_value = intel_de_read(dev_priv, TGL_DP_TP_CTL(pipe));
5506
5507 trans_ddi_func_ctl_value &= ~(TRANS_DDI_FUNC_ENABLE |
5508 TGL_TRANS_DDI_PORT_MASK);
5509 trans_conf_value &= ~PIPECONF_ENABLE;
5510 dp_tp_ctl_value &= ~DP_TP_CTL_ENABLE;
5511
5512 intel_de_write(dev_priv, PIPECONF(pipe), trans_conf_value);
5513 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(pipe),
5514 trans_ddi_func_ctl_value);
5515 intel_de_write(dev_priv, TGL_DP_TP_CTL(pipe), dp_tp_ctl_value);
5516}
5517
5518static void
5519intel_dp_autotest_phy_ddi_enable(struct intel_dp *intel_dp, uint8_t lane_cnt)
5520{
5521 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
5522 struct drm_device *dev = dig_port->base.base.dev;
5523 struct drm_i915_private *dev_priv = to_i915(dev);
5524 enum port port = dig_port->base.port;
5525 struct intel_crtc *crtc = to_intel_crtc(dig_port->base.base.crtc);
5526 enum pipe pipe = crtc->pipe;
5527 u32 trans_ddi_func_ctl_value, trans_conf_value, dp_tp_ctl_value;
5528
5529 trans_ddi_func_ctl_value = intel_de_read(dev_priv,
5530 TRANS_DDI_FUNC_CTL(pipe));
5531 trans_conf_value = intel_de_read(dev_priv, PIPECONF(pipe));
5532 dp_tp_ctl_value = intel_de_read(dev_priv, TGL_DP_TP_CTL(pipe));
5533
5534 trans_ddi_func_ctl_value |= TRANS_DDI_FUNC_ENABLE |
5535 TGL_TRANS_DDI_SELECT_PORT(port);
5536 trans_conf_value |= PIPECONF_ENABLE;
5537 dp_tp_ctl_value |= DP_TP_CTL_ENABLE;
5538
5539 intel_de_write(dev_priv, PIPECONF(pipe), trans_conf_value);
5540 intel_de_write(dev_priv, TGL_DP_TP_CTL(pipe), dp_tp_ctl_value);
5541 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(pipe),
5542 trans_ddi_func_ctl_value);
5543}
5544
5545void intel_dp_process_phy_request(struct intel_dp *intel_dp)
5546{
5547 struct drm_dp_phy_test_params *data =
5548 &intel_dp->compliance.test_data.phytest;
5549 u8 link_status[DP_LINK_STATUS_SIZE];
5550
5551 if (!intel_dp_get_link_status(intel_dp, link_status)) {
5552 DRM_DEBUG_KMS("failed to get link status\n");
5553 return;
5554 }
5555
5556
5557 intel_dp_get_adjust_train(intel_dp, link_status);
5558
5559 intel_dp_autotest_phy_ddi_disable(intel_dp);
5560
5561 intel_dp_set_signal_levels(intel_dp);
5562
5563 intel_dp_phy_pattern_update(intel_dp);
5564
5565 intel_dp_autotest_phy_ddi_enable(intel_dp, data->num_lanes);
5566
5567 drm_dp_set_phy_test_pattern(&intel_dp->aux, data,
5568 link_status[DP_DPCD_REV]);
5569}
5570
5571static u8 intel_dp_autotest_phy_pattern(struct intel_dp *intel_dp)
5572{
5573 u8 test_result;
5574
5575 test_result = intel_dp_prepare_phytest(intel_dp);
5576 if (test_result != DP_TEST_ACK)
5577 DRM_ERROR("Phy test preparation failed\n");
5578
5579 intel_dp_process_phy_request(intel_dp);
5580
5581 return test_result;
5582}
5583
5584static void intel_dp_handle_test_request(struct intel_dp *intel_dp)
5585{
5586 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
5587 u8 response = DP_TEST_NAK;
5588 u8 request = 0;
5589 int status;
5590
5591 status = drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_REQUEST, &request);
5592 if (status <= 0) {
5593 drm_dbg_kms(&i915->drm,
5594 "Could not read test request from sink\n");
5595 goto update_status;
5596 }
5597
5598 switch (request) {
5599 case DP_TEST_LINK_TRAINING:
5600 drm_dbg_kms(&i915->drm, "LINK_TRAINING test requested\n");
5601 response = intel_dp_autotest_link_training(intel_dp);
5602 break;
5603 case DP_TEST_LINK_VIDEO_PATTERN:
5604 drm_dbg_kms(&i915->drm, "TEST_PATTERN test requested\n");
5605 response = intel_dp_autotest_video_pattern(intel_dp);
5606 break;
5607 case DP_TEST_LINK_EDID_READ:
5608 drm_dbg_kms(&i915->drm, "EDID test requested\n");
5609 response = intel_dp_autotest_edid(intel_dp);
5610 break;
5611 case DP_TEST_LINK_PHY_TEST_PATTERN:
5612 drm_dbg_kms(&i915->drm, "PHY_PATTERN test requested\n");
5613 response = intel_dp_autotest_phy_pattern(intel_dp);
5614 break;
5615 default:
5616 drm_dbg_kms(&i915->drm, "Invalid test request '%02x'\n",
5617 request);
5618 break;
5619 }
5620
5621 if (response & DP_TEST_ACK)
5622 intel_dp->compliance.test_type = request;
5623
5624update_status:
5625 status = drm_dp_dpcd_writeb(&intel_dp->aux, DP_TEST_RESPONSE, response);
5626 if (status <= 0)
5627 drm_dbg_kms(&i915->drm,
5628 "Could not write test response to sink\n");
5629}
5630
5631
5632
5633
5634
5635
5636
5637
5638
5639
5640
5641
5642
5643
5644static bool
5645intel_dp_check_mst_status(struct intel_dp *intel_dp)
5646{
5647 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
5648 bool link_ok = true;
5649
5650 drm_WARN_ON_ONCE(&i915->drm, intel_dp->active_mst_links < 0);
5651
5652 for (;;) {
5653 u8 esi[DP_DPRX_ESI_LEN] = {};
5654 bool handled;
5655 int retry;
5656
5657 if (!intel_dp_get_sink_irq_esi(intel_dp, esi)) {
5658 drm_dbg_kms(&i915->drm,
5659 "failed to get ESI - device may have failed\n");
5660 link_ok = false;
5661
5662 break;
5663 }
5664
5665
5666 if (intel_dp->active_mst_links > 0 && link_ok &&
5667 !drm_dp_channel_eq_ok(&esi[10], intel_dp->lane_count)) {
5668 drm_dbg_kms(&i915->drm,
5669 "channel EQ not ok, retraining\n");
5670 link_ok = false;
5671 }
5672
5673 drm_dbg_kms(&i915->drm, "got esi %3ph\n", esi);
5674
5675 drm_dp_mst_hpd_irq(&intel_dp->mst_mgr, esi, &handled);
5676 if (!handled)
5677 break;
5678
5679 for (retry = 0; retry < 3; retry++) {
5680 int wret;
5681
5682 wret = drm_dp_dpcd_write(&intel_dp->aux,
5683 DP_SINK_COUNT_ESI+1,
5684 &esi[1], 3);
5685 if (wret == 3)
5686 break;
5687 }
5688 }
5689
5690 return link_ok;
5691}
5692
5693static bool
5694intel_dp_needs_link_retrain(struct intel_dp *intel_dp)
5695{
5696 u8 link_status[DP_LINK_STATUS_SIZE];
5697
5698 if (!intel_dp->link_trained)
5699 return false;
5700
5701
5702
5703
5704
5705
5706
5707
5708
5709 if (intel_psr_enabled(intel_dp))
5710 return false;
5711
5712 if (!intel_dp_get_link_status(intel_dp, link_status))
5713 return false;
5714
5715
5716
5717
5718
5719 if (!intel_dp_link_params_valid(intel_dp, intel_dp->link_rate,
5720 intel_dp->lane_count))
5721 return false;
5722
5723
5724 return !drm_dp_channel_eq_ok(link_status, intel_dp->lane_count);
5725}
5726
5727static bool intel_dp_has_connector(struct intel_dp *intel_dp,
5728 const struct drm_connector_state *conn_state)
5729{
5730 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
5731 struct intel_encoder *encoder;
5732 enum pipe pipe;
5733
5734 if (!conn_state->best_encoder)
5735 return false;
5736
5737
5738 encoder = &dp_to_dig_port(intel_dp)->base;
5739 if (conn_state->best_encoder == &encoder->base)
5740 return true;
5741
5742
5743 for_each_pipe(i915, pipe) {
5744 encoder = &intel_dp->mst_encoders[pipe]->base;
5745 if (conn_state->best_encoder == &encoder->base)
5746 return true;
5747 }
5748
5749 return false;
5750}
5751
5752static int intel_dp_prep_link_retrain(struct intel_dp *intel_dp,
5753 struct drm_modeset_acquire_ctx *ctx,
5754 u32 *crtc_mask)
5755{
5756 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
5757 struct drm_connector_list_iter conn_iter;
5758 struct intel_connector *connector;
5759 int ret = 0;
5760
5761 *crtc_mask = 0;
5762
5763 if (!intel_dp_needs_link_retrain(intel_dp))
5764 return 0;
5765
5766 drm_connector_list_iter_begin(&i915->drm, &conn_iter);
5767 for_each_intel_connector_iter(connector, &conn_iter) {
5768 struct drm_connector_state *conn_state =
5769 connector->base.state;
5770 struct intel_crtc_state *crtc_state;
5771 struct intel_crtc *crtc;
5772
5773 if (!intel_dp_has_connector(intel_dp, conn_state))
5774 continue;
5775
5776 crtc = to_intel_crtc(conn_state->crtc);
5777 if (!crtc)
5778 continue;
5779
5780 ret = drm_modeset_lock(&crtc->base.mutex, ctx);
5781 if (ret)
5782 break;
5783
5784 crtc_state = to_intel_crtc_state(crtc->base.state);
5785
5786 drm_WARN_ON(&i915->drm, !intel_crtc_has_dp_encoder(crtc_state));
5787
5788 if (!crtc_state->hw.active)
5789 continue;
5790
5791 if (conn_state->commit &&
5792 !try_wait_for_completion(&conn_state->commit->hw_done))
5793 continue;
5794
5795 *crtc_mask |= drm_crtc_mask(&crtc->base);
5796 }
5797 drm_connector_list_iter_end(&conn_iter);
5798
5799 if (!intel_dp_needs_link_retrain(intel_dp))
5800 *crtc_mask = 0;
5801
5802 return ret;
5803}
5804
5805static bool intel_dp_is_connected(struct intel_dp *intel_dp)
5806{
5807 struct intel_connector *connector = intel_dp->attached_connector;
5808
5809 return connector->base.status == connector_status_connected ||
5810 intel_dp->is_mst;
5811}
5812
5813int intel_dp_retrain_link(struct intel_encoder *encoder,
5814 struct drm_modeset_acquire_ctx *ctx)
5815{
5816 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
5817 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
5818 struct intel_crtc *crtc;
5819 u32 crtc_mask;
5820 int ret;
5821
5822 if (!intel_dp_is_connected(intel_dp))
5823 return 0;
5824
5825 ret = drm_modeset_lock(&dev_priv->drm.mode_config.connection_mutex,
5826 ctx);
5827 if (ret)
5828 return ret;
5829
5830 ret = intel_dp_prep_link_retrain(intel_dp, ctx, &crtc_mask);
5831 if (ret)
5832 return ret;
5833
5834 if (crtc_mask == 0)
5835 return 0;
5836
5837 drm_dbg_kms(&dev_priv->drm, "[ENCODER:%d:%s] retraining link\n",
5838 encoder->base.base.id, encoder->base.name);
5839
5840 for_each_intel_crtc_mask(&dev_priv->drm, crtc, crtc_mask) {
5841 const struct intel_crtc_state *crtc_state =
5842 to_intel_crtc_state(crtc->base.state);
5843
5844
5845 intel_set_cpu_fifo_underrun_reporting(dev_priv, crtc->pipe, false);
5846 if (crtc_state->has_pch_encoder)
5847 intel_set_pch_fifo_underrun_reporting(dev_priv,
5848 intel_crtc_pch_transcoder(crtc), false);
5849 }
5850
5851 intel_dp_start_link_train(intel_dp);
5852 intel_dp_stop_link_train(intel_dp);
5853
5854 for_each_intel_crtc_mask(&dev_priv->drm, crtc, crtc_mask) {
5855 const struct intel_crtc_state *crtc_state =
5856 to_intel_crtc_state(crtc->base.state);
5857
5858
5859 intel_wait_for_vblank(dev_priv, crtc->pipe);
5860
5861 intel_set_cpu_fifo_underrun_reporting(dev_priv, crtc->pipe, true);
5862 if (crtc_state->has_pch_encoder)
5863 intel_set_pch_fifo_underrun_reporting(dev_priv,
5864 intel_crtc_pch_transcoder(crtc), true);
5865 }
5866
5867 return 0;
5868}
5869
5870
5871
5872
5873
5874
5875
5876
5877
5878
5879
5880
5881
5882static enum intel_hotplug_state
5883intel_dp_hotplug(struct intel_encoder *encoder,
5884 struct intel_connector *connector)
5885{
5886 struct drm_modeset_acquire_ctx ctx;
5887 enum intel_hotplug_state state;
5888 int ret;
5889
5890 state = intel_encoder_hotplug(encoder, connector);
5891
5892 drm_modeset_acquire_init(&ctx, 0);
5893
5894 for (;;) {
5895 ret = intel_dp_retrain_link(encoder, &ctx);
5896
5897 if (ret == -EDEADLK) {
5898 drm_modeset_backoff(&ctx);
5899 continue;
5900 }
5901
5902 break;
5903 }
5904
5905 drm_modeset_drop_locks(&ctx);
5906 drm_modeset_acquire_fini(&ctx);
5907 drm_WARN(encoder->base.dev, ret,
5908 "Acquiring modeset locks failed with %i\n", ret);
5909
5910
5911
5912
5913
5914 if (state == INTEL_HOTPLUG_UNCHANGED && !connector->hotplug_retries)
5915 state = INTEL_HOTPLUG_RETRY;
5916
5917 return state;
5918}
5919
5920static void intel_dp_check_service_irq(struct intel_dp *intel_dp)
5921{
5922 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
5923 u8 val;
5924
5925 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
5926 return;
5927
5928 if (drm_dp_dpcd_readb(&intel_dp->aux,
5929 DP_DEVICE_SERVICE_IRQ_VECTOR, &val) != 1 || !val)
5930 return;
5931
5932 drm_dp_dpcd_writeb(&intel_dp->aux, DP_DEVICE_SERVICE_IRQ_VECTOR, val);
5933
5934 if (val & DP_AUTOMATED_TEST_REQUEST)
5935 intel_dp_handle_test_request(intel_dp);
5936
5937 if (val & DP_CP_IRQ)
5938 intel_hdcp_handle_cp_irq(intel_dp->attached_connector);
5939
5940 if (val & DP_SINK_SPECIFIC_IRQ)
5941 drm_dbg_kms(&i915->drm, "Sink specific irq unhandled\n");
5942}
5943
5944
5945
5946
5947
5948
5949
5950
5951
5952
5953
5954
5955
5956
5957static bool
5958intel_dp_short_pulse(struct intel_dp *intel_dp)
5959{
5960 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
5961 u8 old_sink_count = intel_dp->sink_count;
5962 bool ret;
5963
5964
5965
5966
5967
5968 memset(&intel_dp->compliance, 0, sizeof(intel_dp->compliance));
5969
5970
5971
5972
5973
5974
5975
5976 ret = intel_dp_get_dpcd(intel_dp);
5977
5978 if ((old_sink_count != intel_dp->sink_count) || !ret) {
5979
5980 return false;
5981 }
5982
5983 intel_dp_check_service_irq(intel_dp);
5984
5985
5986 drm_dp_cec_irq(&intel_dp->aux);
5987
5988
5989 if (intel_dp_needs_link_retrain(intel_dp))
5990 return false;
5991
5992 intel_psr_short_pulse(intel_dp);
5993
5994 if (intel_dp->compliance.test_type == DP_TEST_LINK_TRAINING) {
5995 drm_dbg_kms(&dev_priv->drm,
5996 "Link Training Compliance Test requested\n");
5997
5998 drm_kms_helper_hotplug_event(&dev_priv->drm);
5999 }
6000
6001 return true;
6002}
6003
6004
6005static enum drm_connector_status
6006intel_dp_detect_dpcd(struct intel_dp *intel_dp)
6007{
6008 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
6009 struct intel_lspcon *lspcon = dp_to_lspcon(intel_dp);
6010 u8 *dpcd = intel_dp->dpcd;
6011 u8 type;
6012
6013 if (drm_WARN_ON(&i915->drm, intel_dp_is_edp(intel_dp)))
6014 return connector_status_connected;
6015
6016 if (lspcon->active)
6017 lspcon_resume(lspcon);
6018
6019 if (!intel_dp_get_dpcd(intel_dp))
6020 return connector_status_disconnected;
6021
6022
6023 if (!drm_dp_is_branch(dpcd))
6024 return connector_status_connected;
6025
6026
6027 if (intel_dp_has_sink_count(intel_dp) &&
6028 intel_dp->downstream_ports[0] & DP_DS_PORT_HPD) {
6029 return intel_dp->sink_count ?
6030 connector_status_connected : connector_status_disconnected;
6031 }
6032
6033 if (intel_dp_can_mst(intel_dp))
6034 return connector_status_connected;
6035
6036
6037 if (drm_probe_ddc(&intel_dp->aux.ddc))
6038 return connector_status_connected;
6039
6040
6041 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) {
6042 type = intel_dp->downstream_ports[0] & DP_DS_PORT_TYPE_MASK;
6043 if (type == DP_DS_PORT_TYPE_VGA ||
6044 type == DP_DS_PORT_TYPE_NON_EDID)
6045 return connector_status_unknown;
6046 } else {
6047 type = intel_dp->dpcd[DP_DOWNSTREAMPORT_PRESENT] &
6048 DP_DWN_STRM_PORT_TYPE_MASK;
6049 if (type == DP_DWN_STRM_PORT_TYPE_ANALOG ||
6050 type == DP_DWN_STRM_PORT_TYPE_OTHER)
6051 return connector_status_unknown;
6052 }
6053
6054
6055 drm_dbg_kms(&i915->drm, "Broken DP branch device, ignoring\n");
6056 return connector_status_disconnected;
6057}
6058
6059static enum drm_connector_status
6060edp_detect(struct intel_dp *intel_dp)
6061{
6062 return connector_status_connected;
6063}
6064
6065static bool ibx_digital_port_connected(struct intel_encoder *encoder)
6066{
6067 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
6068 u32 bit = dev_priv->hotplug.pch_hpd[encoder->hpd_pin];
6069
6070 return intel_de_read(dev_priv, SDEISR) & bit;
6071}
6072
6073static bool g4x_digital_port_connected(struct intel_encoder *encoder)
6074{
6075 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
6076 u32 bit;
6077
6078 switch (encoder->hpd_pin) {
6079 case HPD_PORT_B:
6080 bit = PORTB_HOTPLUG_LIVE_STATUS_G4X;
6081 break;
6082 case HPD_PORT_C:
6083 bit = PORTC_HOTPLUG_LIVE_STATUS_G4X;
6084 break;
6085 case HPD_PORT_D:
6086 bit = PORTD_HOTPLUG_LIVE_STATUS_G4X;
6087 break;
6088 default:
6089 MISSING_CASE(encoder->hpd_pin);
6090 return false;
6091 }
6092
6093 return intel_de_read(dev_priv, PORT_HOTPLUG_STAT) & bit;
6094}
6095
6096static bool gm45_digital_port_connected(struct intel_encoder *encoder)
6097{
6098 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
6099 u32 bit;
6100
6101 switch (encoder->hpd_pin) {
6102 case HPD_PORT_B:
6103 bit = PORTB_HOTPLUG_LIVE_STATUS_GM45;
6104 break;
6105 case HPD_PORT_C:
6106 bit = PORTC_HOTPLUG_LIVE_STATUS_GM45;
6107 break;
6108 case HPD_PORT_D:
6109 bit = PORTD_HOTPLUG_LIVE_STATUS_GM45;
6110 break;
6111 default:
6112 MISSING_CASE(encoder->hpd_pin);
6113 return false;
6114 }
6115
6116 return intel_de_read(dev_priv, PORT_HOTPLUG_STAT) & bit;
6117}
6118
6119static bool ilk_digital_port_connected(struct intel_encoder *encoder)
6120{
6121 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
6122 u32 bit = dev_priv->hotplug.hpd[encoder->hpd_pin];
6123
6124 return intel_de_read(dev_priv, DEISR) & bit;
6125}
6126
6127
6128
6129
6130
6131
6132
6133
6134
6135
6136
6137
6138bool intel_digital_port_connected(struct intel_encoder *encoder)
6139{
6140 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
6141 struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
6142 bool is_connected = false;
6143 intel_wakeref_t wakeref;
6144
6145 with_intel_display_power(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref)
6146 is_connected = dig_port->connected(encoder);
6147
6148 return is_connected;
6149}
6150
6151static struct edid *
6152intel_dp_get_edid(struct intel_dp *intel_dp)
6153{
6154 struct intel_connector *intel_connector = intel_dp->attached_connector;
6155
6156
6157 if (intel_connector->edid) {
6158
6159 if (IS_ERR(intel_connector->edid))
6160 return NULL;
6161
6162 return drm_edid_duplicate(intel_connector->edid);
6163 } else
6164 return drm_get_edid(&intel_connector->base,
6165 &intel_dp->aux.ddc);
6166}
6167
6168static void
6169intel_dp_update_dfp(struct intel_dp *intel_dp,
6170 const struct edid *edid)
6171{
6172 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
6173 struct intel_connector *connector = intel_dp->attached_connector;
6174
6175 intel_dp->dfp.max_bpc =
6176 drm_dp_downstream_max_bpc(intel_dp->dpcd,
6177 intel_dp->downstream_ports, edid);
6178
6179 intel_dp->dfp.max_dotclock =
6180 drm_dp_downstream_max_dotclock(intel_dp->dpcd,
6181 intel_dp->downstream_ports);
6182
6183 intel_dp->dfp.min_tmds_clock =
6184 drm_dp_downstream_min_tmds_clock(intel_dp->dpcd,
6185 intel_dp->downstream_ports,
6186 edid);
6187 intel_dp->dfp.max_tmds_clock =
6188 drm_dp_downstream_max_tmds_clock(intel_dp->dpcd,
6189 intel_dp->downstream_ports,
6190 edid);
6191
6192 drm_dbg_kms(&i915->drm,
6193 "[CONNECTOR:%d:%s] DFP max bpc %d, max dotclock %d, TMDS clock %d-%d\n",
6194 connector->base.base.id, connector->base.name,
6195 intel_dp->dfp.max_bpc,
6196 intel_dp->dfp.max_dotclock,
6197 intel_dp->dfp.min_tmds_clock,
6198 intel_dp->dfp.max_tmds_clock);
6199}
6200
6201static void
6202intel_dp_update_420(struct intel_dp *intel_dp)
6203{
6204 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
6205 struct intel_connector *connector = intel_dp->attached_connector;
6206 bool is_branch, ycbcr_420_passthrough, ycbcr_444_to_420;
6207
6208
6209 if (HAS_GMCH(i915))
6210 return;
6211
6212
6213
6214
6215
6216 if (IS_GEN(i915, 5))
6217 return;
6218
6219 is_branch = drm_dp_is_branch(intel_dp->dpcd);
6220 ycbcr_420_passthrough =
6221 drm_dp_downstream_420_passthrough(intel_dp->dpcd,
6222 intel_dp->downstream_ports);
6223 ycbcr_444_to_420 =
6224 drm_dp_downstream_444_to_420_conversion(intel_dp->dpcd,
6225 intel_dp->downstream_ports);
6226
6227 if (INTEL_GEN(i915) >= 11) {
6228
6229 intel_dp->dfp.ycbcr_444_to_420 =
6230 ycbcr_444_to_420 && !ycbcr_420_passthrough;
6231
6232 connector->base.ycbcr_420_allowed =
6233 !is_branch || ycbcr_444_to_420 || ycbcr_420_passthrough;
6234 } else {
6235
6236 intel_dp->dfp.ycbcr_444_to_420 = ycbcr_444_to_420;
6237
6238 connector->base.ycbcr_420_allowed = ycbcr_444_to_420;
6239 }
6240
6241 drm_dbg_kms(&i915->drm,
6242 "[CONNECTOR:%d:%s] YCbCr 4:2:0 allowed? %s, YCbCr 4:4:4->4:2:0 conversion? %s\n",
6243 connector->base.base.id, connector->base.name,
6244 yesno(connector->base.ycbcr_420_allowed),
6245 yesno(intel_dp->dfp.ycbcr_444_to_420));
6246}
6247
6248static void
6249intel_dp_set_edid(struct intel_dp *intel_dp)
6250{
6251 struct intel_connector *connector = intel_dp->attached_connector;
6252 struct edid *edid;
6253
6254 intel_dp_unset_edid(intel_dp);
6255 edid = intel_dp_get_edid(intel_dp);
6256 connector->detect_edid = edid;
6257
6258 intel_dp_update_dfp(intel_dp, edid);
6259 intel_dp_update_420(intel_dp);
6260
6261 if (edid && edid->input & DRM_EDID_INPUT_DIGITAL) {
6262 intel_dp->has_hdmi_sink = drm_detect_hdmi_monitor(edid);
6263 intel_dp->has_audio = drm_detect_monitor_audio(edid);
6264 }
6265
6266 drm_dp_cec_set_edid(&intel_dp->aux, edid);
6267 intel_dp->edid_quirks = drm_dp_get_edid_quirks(edid);
6268}
6269
6270static void
6271intel_dp_unset_edid(struct intel_dp *intel_dp)
6272{
6273 struct intel_connector *connector = intel_dp->attached_connector;
6274
6275 drm_dp_cec_unset_edid(&intel_dp->aux);
6276 kfree(connector->detect_edid);
6277 connector->detect_edid = NULL;
6278
6279 intel_dp->has_hdmi_sink = false;
6280 intel_dp->has_audio = false;
6281 intel_dp->edid_quirks = 0;
6282
6283 intel_dp->dfp.max_bpc = 0;
6284 intel_dp->dfp.max_dotclock = 0;
6285 intel_dp->dfp.min_tmds_clock = 0;
6286 intel_dp->dfp.max_tmds_clock = 0;
6287
6288 intel_dp->dfp.ycbcr_444_to_420 = false;
6289 connector->base.ycbcr_420_allowed = false;
6290}
6291
6292static int
6293intel_dp_detect(struct drm_connector *connector,
6294 struct drm_modeset_acquire_ctx *ctx,
6295 bool force)
6296{
6297 struct drm_i915_private *dev_priv = to_i915(connector->dev);
6298 struct intel_dp *intel_dp = intel_attached_dp(to_intel_connector(connector));
6299 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
6300 struct intel_encoder *encoder = &dig_port->base;
6301 enum drm_connector_status status;
6302
6303 drm_dbg_kms(&dev_priv->drm, "[CONNECTOR:%d:%s]\n",
6304 connector->base.id, connector->name);
6305 drm_WARN_ON(&dev_priv->drm,
6306 !drm_modeset_is_locked(&dev_priv->drm.mode_config.connection_mutex));
6307
6308 if (!INTEL_DISPLAY_ENABLED(dev_priv))
6309 return connector_status_disconnected;
6310
6311
6312 if (intel_dp_is_edp(intel_dp))
6313 status = edp_detect(intel_dp);
6314 else if (intel_digital_port_connected(encoder))
6315 status = intel_dp_detect_dpcd(intel_dp);
6316 else
6317 status = connector_status_disconnected;
6318
6319 if (status == connector_status_disconnected) {
6320 memset(&intel_dp->compliance, 0, sizeof(intel_dp->compliance));
6321 memset(intel_dp->dsc_dpcd, 0, sizeof(intel_dp->dsc_dpcd));
6322
6323 if (intel_dp->is_mst) {
6324 drm_dbg_kms(&dev_priv->drm,
6325 "MST device may have disappeared %d vs %d\n",
6326 intel_dp->is_mst,
6327 intel_dp->mst_mgr.mst_state);
6328 intel_dp->is_mst = false;
6329 drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst_mgr,
6330 intel_dp->is_mst);
6331 }
6332
6333 goto out;
6334 }
6335
6336
6337 if (INTEL_GEN(dev_priv) >= 11)
6338 intel_dp_get_dsc_sink_cap(intel_dp);
6339
6340 intel_dp_configure_mst(intel_dp);
6341
6342
6343
6344
6345
6346 if (intel_dp->reset_link_params || intel_dp->is_mst) {
6347
6348 intel_dp->max_link_lane_count = intel_dp_max_common_lane_count(intel_dp);
6349
6350
6351 intel_dp->max_link_rate = intel_dp_max_common_rate(intel_dp);
6352
6353 intel_dp->reset_link_params = false;
6354 }
6355
6356 intel_dp_print_rates(intel_dp);
6357
6358 if (intel_dp->is_mst) {
6359
6360
6361
6362
6363
6364 status = connector_status_disconnected;
6365 goto out;
6366 }
6367
6368
6369
6370
6371
6372 if (!intel_dp_is_edp(intel_dp)) {
6373 int ret;
6374
6375 ret = intel_dp_retrain_link(encoder, ctx);
6376 if (ret)
6377 return ret;
6378 }
6379
6380
6381
6382
6383
6384
6385 intel_dp->aux.i2c_nack_count = 0;
6386 intel_dp->aux.i2c_defer_count = 0;
6387
6388 intel_dp_set_edid(intel_dp);
6389 if (intel_dp_is_edp(intel_dp) ||
6390 to_intel_connector(connector)->detect_edid)
6391 status = connector_status_connected;
6392
6393 intel_dp_check_service_irq(intel_dp);
6394
6395out:
6396 if (status != connector_status_connected && !intel_dp->is_mst)
6397 intel_dp_unset_edid(intel_dp);
6398
6399
6400
6401
6402
6403 intel_display_power_flush_work(dev_priv);
6404
6405 if (!intel_dp_is_edp(intel_dp))
6406 drm_dp_set_subconnector_property(connector,
6407 status,
6408 intel_dp->dpcd,
6409 intel_dp->downstream_ports);
6410 return status;
6411}
6412
6413static void
6414intel_dp_force(struct drm_connector *connector)
6415{
6416 struct intel_dp *intel_dp = intel_attached_dp(to_intel_connector(connector));
6417 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
6418 struct intel_encoder *intel_encoder = &dig_port->base;
6419 struct drm_i915_private *dev_priv = to_i915(intel_encoder->base.dev);
6420 enum intel_display_power_domain aux_domain =
6421 intel_aux_power_domain(dig_port);
6422 intel_wakeref_t wakeref;
6423
6424 drm_dbg_kms(&dev_priv->drm, "[CONNECTOR:%d:%s]\n",
6425 connector->base.id, connector->name);
6426 intel_dp_unset_edid(intel_dp);
6427
6428 if (connector->status != connector_status_connected)
6429 return;
6430
6431 wakeref = intel_display_power_get(dev_priv, aux_domain);
6432
6433 intel_dp_set_edid(intel_dp);
6434
6435 intel_display_power_put(dev_priv, aux_domain, wakeref);
6436}
6437
6438static int intel_dp_get_modes(struct drm_connector *connector)
6439{
6440 struct intel_connector *intel_connector = to_intel_connector(connector);
6441 struct edid *edid;
6442
6443 edid = intel_connector->detect_edid;
6444 if (edid) {
6445 int ret = intel_connector_update_modes(connector, edid);
6446 if (ret)
6447 return ret;
6448 }
6449
6450
6451 if (intel_dp_is_edp(intel_attached_dp(intel_connector)) &&
6452 intel_connector->panel.fixed_mode) {
6453 struct drm_display_mode *mode;
6454
6455 mode = drm_mode_duplicate(connector->dev,
6456 intel_connector->panel.fixed_mode);
6457 if (mode) {
6458 drm_mode_probed_add(connector, mode);
6459 return 1;
6460 }
6461 }
6462
6463 if (!edid) {
6464 struct intel_dp *intel_dp = intel_attached_dp(intel_connector);
6465 struct drm_display_mode *mode;
6466
6467 mode = drm_dp_downstream_mode(connector->dev,
6468 intel_dp->dpcd,
6469 intel_dp->downstream_ports);
6470 if (mode) {
6471 drm_mode_probed_add(connector, mode);
6472 return 1;
6473 }
6474 }
6475
6476 return 0;
6477}
6478
6479static int
6480intel_dp_connector_register(struct drm_connector *connector)
6481{
6482 struct drm_i915_private *i915 = to_i915(connector->dev);
6483 struct intel_dp *intel_dp = intel_attached_dp(to_intel_connector(connector));
6484 int ret;
6485
6486 ret = intel_connector_register(connector);
6487 if (ret)
6488 return ret;
6489
6490 drm_dbg_kms(&i915->drm, "registering %s bus for %s\n",
6491 intel_dp->aux.name, connector->kdev->kobj.name);
6492
6493 intel_dp->aux.dev = connector->kdev;
6494 ret = drm_dp_aux_register(&intel_dp->aux);
6495 if (!ret)
6496 drm_dp_cec_register_connector(&intel_dp->aux, connector);
6497 return ret;
6498}
6499
6500static void
6501intel_dp_connector_unregister(struct drm_connector *connector)
6502{
6503 struct intel_dp *intel_dp = intel_attached_dp(to_intel_connector(connector));
6504
6505 drm_dp_cec_unregister_connector(&intel_dp->aux);
6506 drm_dp_aux_unregister(&intel_dp->aux);
6507 intel_connector_unregister(connector);
6508}
6509
6510void intel_dp_encoder_flush_work(struct drm_encoder *encoder)
6511{
6512 struct intel_digital_port *dig_port = enc_to_dig_port(to_intel_encoder(encoder));
6513 struct intel_dp *intel_dp = &dig_port->dp;
6514
6515 intel_dp_mst_encoder_cleanup(dig_port);
6516 if (intel_dp_is_edp(intel_dp)) {
6517 intel_wakeref_t wakeref;
6518
6519 cancel_delayed_work_sync(&intel_dp->panel_vdd_work);
6520
6521
6522
6523
6524 with_pps_lock(intel_dp, wakeref)
6525 edp_panel_vdd_off_sync(intel_dp);
6526
6527 if (intel_dp->edp_notifier.notifier_call) {
6528 unregister_reboot_notifier(&intel_dp->edp_notifier);
6529 intel_dp->edp_notifier.notifier_call = NULL;
6530 }
6531 }
6532
6533 intel_dp_aux_fini(intel_dp);
6534}
6535
6536static void intel_dp_encoder_destroy(struct drm_encoder *encoder)
6537{
6538 intel_dp_encoder_flush_work(encoder);
6539
6540 drm_encoder_cleanup(encoder);
6541 kfree(enc_to_dig_port(to_intel_encoder(encoder)));
6542}
6543
6544void intel_dp_encoder_suspend(struct intel_encoder *intel_encoder)
6545{
6546 struct intel_dp *intel_dp = enc_to_intel_dp(intel_encoder);
6547 intel_wakeref_t wakeref;
6548
6549 if (!intel_dp_is_edp(intel_dp))
6550 return;
6551
6552
6553
6554
6555
6556 cancel_delayed_work_sync(&intel_dp->panel_vdd_work);
6557 with_pps_lock(intel_dp, wakeref)
6558 edp_panel_vdd_off_sync(intel_dp);
6559}
6560
6561static void intel_edp_panel_vdd_sanitize(struct intel_dp *intel_dp)
6562{
6563 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
6564 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
6565
6566 lockdep_assert_held(&dev_priv->pps_mutex);
6567
6568 if (!edp_have_panel_vdd(intel_dp))
6569 return;
6570
6571
6572
6573
6574
6575
6576
6577 drm_dbg_kms(&dev_priv->drm,
6578 "VDD left on by BIOS, adjusting state tracking\n");
6579 intel_display_power_get(dev_priv, intel_aux_power_domain(dig_port));
6580
6581 edp_panel_vdd_schedule_off(intel_dp);
6582}
6583
6584static enum pipe vlv_active_pipe(struct intel_dp *intel_dp)
6585{
6586 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
6587 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
6588 enum pipe pipe;
6589
6590 if (intel_dp_port_enabled(dev_priv, intel_dp->output_reg,
6591 encoder->port, &pipe))
6592 return pipe;
6593
6594 return INVALID_PIPE;
6595}
6596
6597void intel_dp_encoder_reset(struct drm_encoder *encoder)
6598{
6599 struct drm_i915_private *dev_priv = to_i915(encoder->dev);
6600 struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(encoder));
6601 struct intel_lspcon *lspcon = dp_to_lspcon(intel_dp);
6602 intel_wakeref_t wakeref;
6603
6604 if (!HAS_DDI(dev_priv))
6605 intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg);
6606
6607 if (lspcon->active)
6608 lspcon_resume(lspcon);
6609
6610 intel_dp->reset_link_params = true;
6611
6612 if (!IS_VALLEYVIEW(dev_priv) && !IS_CHERRYVIEW(dev_priv) &&
6613 !intel_dp_is_edp(intel_dp))
6614 return;
6615
6616 with_pps_lock(intel_dp, wakeref) {
6617 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
6618 intel_dp->active_pipe = vlv_active_pipe(intel_dp);
6619
6620 if (intel_dp_is_edp(intel_dp)) {
6621
6622
6623
6624
6625 intel_dp_pps_init(intel_dp);
6626 intel_edp_panel_vdd_sanitize(intel_dp);
6627 }
6628 }
6629}
6630
6631static int intel_modeset_tile_group(struct intel_atomic_state *state,
6632 int tile_group_id)
6633{
6634 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
6635 struct drm_connector_list_iter conn_iter;
6636 struct drm_connector *connector;
6637 int ret = 0;
6638
6639 drm_connector_list_iter_begin(&dev_priv->drm, &conn_iter);
6640 drm_for_each_connector_iter(connector, &conn_iter) {
6641 struct drm_connector_state *conn_state;
6642 struct intel_crtc_state *crtc_state;
6643 struct intel_crtc *crtc;
6644
6645 if (!connector->has_tile ||
6646 connector->tile_group->id != tile_group_id)
6647 continue;
6648
6649 conn_state = drm_atomic_get_connector_state(&state->base,
6650 connector);
6651 if (IS_ERR(conn_state)) {
6652 ret = PTR_ERR(conn_state);
6653 break;
6654 }
6655
6656 crtc = to_intel_crtc(conn_state->crtc);
6657
6658 if (!crtc)
6659 continue;
6660
6661 crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
6662 crtc_state->uapi.mode_changed = true;
6663
6664 ret = drm_atomic_add_affected_planes(&state->base, &crtc->base);
6665 if (ret)
6666 break;
6667 }
6668 drm_connector_list_iter_end(&conn_iter);
6669
6670 return ret;
6671}
6672
6673static int intel_modeset_affected_transcoders(struct intel_atomic_state *state, u8 transcoders)
6674{
6675 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
6676 struct intel_crtc *crtc;
6677
6678 if (transcoders == 0)
6679 return 0;
6680
6681 for_each_intel_crtc(&dev_priv->drm, crtc) {
6682 struct intel_crtc_state *crtc_state;
6683 int ret;
6684
6685 crtc_state = intel_atomic_get_crtc_state(&state->base, crtc);
6686 if (IS_ERR(crtc_state))
6687 return PTR_ERR(crtc_state);
6688
6689 if (!crtc_state->hw.enable)
6690 continue;
6691
6692 if (!(transcoders & BIT(crtc_state->cpu_transcoder)))
6693 continue;
6694
6695 crtc_state->uapi.mode_changed = true;
6696
6697 ret = drm_atomic_add_affected_connectors(&state->base, &crtc->base);
6698 if (ret)
6699 return ret;
6700
6701 ret = drm_atomic_add_affected_planes(&state->base, &crtc->base);
6702 if (ret)
6703 return ret;
6704
6705 transcoders &= ~BIT(crtc_state->cpu_transcoder);
6706 }
6707
6708 drm_WARN_ON(&dev_priv->drm, transcoders != 0);
6709
6710 return 0;
6711}
6712
6713static int intel_modeset_synced_crtcs(struct intel_atomic_state *state,
6714 struct drm_connector *connector)
6715{
6716 const struct drm_connector_state *old_conn_state =
6717 drm_atomic_get_old_connector_state(&state->base, connector);
6718 const struct intel_crtc_state *old_crtc_state;
6719 struct intel_crtc *crtc;
6720 u8 transcoders;
6721
6722 crtc = to_intel_crtc(old_conn_state->crtc);
6723 if (!crtc)
6724 return 0;
6725
6726 old_crtc_state = intel_atomic_get_old_crtc_state(state, crtc);
6727
6728 if (!old_crtc_state->hw.active)
6729 return 0;
6730
6731 transcoders = old_crtc_state->sync_mode_slaves_mask;
6732 if (old_crtc_state->master_transcoder != INVALID_TRANSCODER)
6733 transcoders |= BIT(old_crtc_state->master_transcoder);
6734
6735 return intel_modeset_affected_transcoders(state,
6736 transcoders);
6737}
6738
6739static int intel_dp_connector_atomic_check(struct drm_connector *conn,
6740 struct drm_atomic_state *_state)
6741{
6742 struct drm_i915_private *dev_priv = to_i915(conn->dev);
6743 struct intel_atomic_state *state = to_intel_atomic_state(_state);
6744 int ret;
6745
6746 ret = intel_digital_connector_atomic_check(conn, &state->base);
6747 if (ret)
6748 return ret;
6749
6750
6751
6752
6753
6754 if (INTEL_GEN(dev_priv) < 9)
6755 return 0;
6756
6757 if (!intel_connector_needs_modeset(state, conn))
6758 return 0;
6759
6760 if (conn->has_tile) {
6761 ret = intel_modeset_tile_group(state, conn->tile_group->id);
6762 if (ret)
6763 return ret;
6764 }
6765
6766 return intel_modeset_synced_crtcs(state, conn);
6767}
6768
6769static const struct drm_connector_funcs intel_dp_connector_funcs = {
6770 .force = intel_dp_force,
6771 .fill_modes = drm_helper_probe_single_connector_modes,
6772 .atomic_get_property = intel_digital_connector_atomic_get_property,
6773 .atomic_set_property = intel_digital_connector_atomic_set_property,
6774 .late_register = intel_dp_connector_register,
6775 .early_unregister = intel_dp_connector_unregister,
6776 .destroy = intel_connector_destroy,
6777 .atomic_destroy_state = drm_atomic_helper_connector_destroy_state,
6778 .atomic_duplicate_state = intel_digital_connector_duplicate_state,
6779};
6780
6781static const struct drm_connector_helper_funcs intel_dp_connector_helper_funcs = {
6782 .detect_ctx = intel_dp_detect,
6783 .get_modes = intel_dp_get_modes,
6784 .mode_valid = intel_dp_mode_valid,
6785 .atomic_check = intel_dp_connector_atomic_check,
6786};
6787
6788static const struct drm_encoder_funcs intel_dp_enc_funcs = {
6789 .reset = intel_dp_encoder_reset,
6790 .destroy = intel_dp_encoder_destroy,
6791};
6792
6793static bool intel_edp_have_power(struct intel_dp *intel_dp)
6794{
6795 intel_wakeref_t wakeref;
6796 bool have_power = false;
6797
6798 with_pps_lock(intel_dp, wakeref) {
6799 have_power = edp_have_panel_power(intel_dp) &&
6800 edp_have_panel_vdd(intel_dp);
6801 }
6802
6803 return have_power;
6804}
6805
6806enum irqreturn
6807intel_dp_hpd_pulse(struct intel_digital_port *dig_port, bool long_hpd)
6808{
6809 struct drm_i915_private *i915 = to_i915(dig_port->base.base.dev);
6810 struct intel_dp *intel_dp = &dig_port->dp;
6811
6812 if (dig_port->base.type == INTEL_OUTPUT_EDP &&
6813 (long_hpd || !intel_edp_have_power(intel_dp))) {
6814
6815
6816
6817
6818
6819
6820 drm_dbg_kms(&i915->drm,
6821 "ignoring %s hpd on eDP [ENCODER:%d:%s]\n",
6822 long_hpd ? "long" : "short",
6823 dig_port->base.base.base.id,
6824 dig_port->base.base.name);
6825 return IRQ_HANDLED;
6826 }
6827
6828 drm_dbg_kms(&i915->drm, "got hpd irq on [ENCODER:%d:%s] - %s\n",
6829 dig_port->base.base.base.id,
6830 dig_port->base.base.name,
6831 long_hpd ? "long" : "short");
6832
6833 if (long_hpd) {
6834 intel_dp->reset_link_params = true;
6835 return IRQ_NONE;
6836 }
6837
6838 if (intel_dp->is_mst) {
6839 if (!intel_dp_check_mst_status(intel_dp))
6840 return IRQ_NONE;
6841 } else if (!intel_dp_short_pulse(intel_dp)) {
6842 return IRQ_NONE;
6843 }
6844
6845 return IRQ_HANDLED;
6846}
6847
6848
6849bool intel_dp_is_port_edp(struct drm_i915_private *dev_priv, enum port port)
6850{
6851
6852
6853
6854
6855 if (INTEL_GEN(dev_priv) < 5)
6856 return false;
6857
6858 if (INTEL_GEN(dev_priv) < 9 && port == PORT_A)
6859 return true;
6860
6861 return intel_bios_is_port_edp(dev_priv, port);
6862}
6863
6864static void
6865intel_dp_add_properties(struct intel_dp *intel_dp, struct drm_connector *connector)
6866{
6867 struct drm_i915_private *dev_priv = to_i915(connector->dev);
6868 enum port port = dp_to_dig_port(intel_dp)->base.port;
6869
6870 if (!intel_dp_is_edp(intel_dp))
6871 drm_connector_attach_dp_subconnector_property(connector);
6872
6873 if (!IS_G4X(dev_priv) && port != PORT_A)
6874 intel_attach_force_audio_property(connector);
6875
6876 intel_attach_broadcast_rgb_property(connector);
6877 if (HAS_GMCH(dev_priv))
6878 drm_connector_attach_max_bpc_property(connector, 6, 10);
6879 else if (INTEL_GEN(dev_priv) >= 5)
6880 drm_connector_attach_max_bpc_property(connector, 6, 12);
6881
6882 intel_attach_colorspace_property(connector);
6883
6884 if (IS_GEMINILAKE(dev_priv) || INTEL_GEN(dev_priv) >= 11)
6885 drm_object_attach_property(&connector->base,
6886 connector->dev->mode_config.hdr_output_metadata_property,
6887 0);
6888
6889 if (intel_dp_is_edp(intel_dp)) {
6890 u32 allowed_scalers;
6891
6892 allowed_scalers = BIT(DRM_MODE_SCALE_ASPECT) | BIT(DRM_MODE_SCALE_FULLSCREEN);
6893 if (!HAS_GMCH(dev_priv))
6894 allowed_scalers |= BIT(DRM_MODE_SCALE_CENTER);
6895
6896 drm_connector_attach_scaling_mode_property(connector, allowed_scalers);
6897
6898 connector->state->scaling_mode = DRM_MODE_SCALE_ASPECT;
6899
6900 }
6901}
6902
6903static void intel_dp_init_panel_power_timestamps(struct intel_dp *intel_dp)
6904{
6905 intel_dp->panel_power_off_time = ktime_get_boottime();
6906 intel_dp->last_power_on = jiffies;
6907 intel_dp->last_backlight_off = jiffies;
6908}
6909
6910static void
6911intel_pps_readout_hw_state(struct intel_dp *intel_dp, struct edp_power_seq *seq)
6912{
6913 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
6914 u32 pp_on, pp_off, pp_ctl;
6915 struct pps_registers regs;
6916
6917 intel_pps_get_registers(intel_dp, ®s);
6918
6919 pp_ctl = ilk_get_pp_control(intel_dp);
6920
6921
6922 if (!HAS_DDI(dev_priv))
6923 intel_de_write(dev_priv, regs.pp_ctrl, pp_ctl);
6924
6925 pp_on = intel_de_read(dev_priv, regs.pp_on);
6926 pp_off = intel_de_read(dev_priv, regs.pp_off);
6927
6928
6929 seq->t1_t3 = REG_FIELD_GET(PANEL_POWER_UP_DELAY_MASK, pp_on);
6930 seq->t8 = REG_FIELD_GET(PANEL_LIGHT_ON_DELAY_MASK, pp_on);
6931 seq->t9 = REG_FIELD_GET(PANEL_LIGHT_OFF_DELAY_MASK, pp_off);
6932 seq->t10 = REG_FIELD_GET(PANEL_POWER_DOWN_DELAY_MASK, pp_off);
6933
6934 if (i915_mmio_reg_valid(regs.pp_div)) {
6935 u32 pp_div;
6936
6937 pp_div = intel_de_read(dev_priv, regs.pp_div);
6938
6939 seq->t11_t12 = REG_FIELD_GET(PANEL_POWER_CYCLE_DELAY_MASK, pp_div) * 1000;
6940 } else {
6941 seq->t11_t12 = REG_FIELD_GET(BXT_POWER_CYCLE_DELAY_MASK, pp_ctl) * 1000;
6942 }
6943}
6944
6945static void
6946intel_pps_dump_state(const char *state_name, const struct edp_power_seq *seq)
6947{
6948 DRM_DEBUG_KMS("%s t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
6949 state_name,
6950 seq->t1_t3, seq->t8, seq->t9, seq->t10, seq->t11_t12);
6951}
6952
6953static void
6954intel_pps_verify_state(struct intel_dp *intel_dp)
6955{
6956 struct edp_power_seq hw;
6957 struct edp_power_seq *sw = &intel_dp->pps_delays;
6958
6959 intel_pps_readout_hw_state(intel_dp, &hw);
6960
6961 if (hw.t1_t3 != sw->t1_t3 || hw.t8 != sw->t8 || hw.t9 != sw->t9 ||
6962 hw.t10 != sw->t10 || hw.t11_t12 != sw->t11_t12) {
6963 DRM_ERROR("PPS state mismatch\n");
6964 intel_pps_dump_state("sw", sw);
6965 intel_pps_dump_state("hw", &hw);
6966 }
6967}
6968
6969static void
6970intel_dp_init_panel_power_sequencer(struct intel_dp *intel_dp)
6971{
6972 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
6973 struct edp_power_seq cur, vbt, spec,
6974 *final = &intel_dp->pps_delays;
6975
6976 lockdep_assert_held(&dev_priv->pps_mutex);
6977
6978
6979 if (final->t11_t12 != 0)
6980 return;
6981
6982 intel_pps_readout_hw_state(intel_dp, &cur);
6983
6984 intel_pps_dump_state("cur", &cur);
6985
6986 vbt = dev_priv->vbt.edp.pps;
6987
6988
6989
6990
6991
6992 if (dev_priv->quirks & QUIRK_INCREASE_T12_DELAY) {
6993 vbt.t11_t12 = max_t(u16, vbt.t11_t12, 1300 * 10);
6994 drm_dbg_kms(&dev_priv->drm,
6995 "Increasing T12 panel delay as per the quirk to %d\n",
6996 vbt.t11_t12);
6997 }
6998
6999
7000
7001
7002 vbt.t11_t12 += 100 * 10;
7003
7004
7005
7006 spec.t1_t3 = 210 * 10;
7007 spec.t8 = 50 * 10;
7008 spec.t9 = 50 * 10;
7009 spec.t10 = 500 * 10;
7010
7011
7012
7013
7014 spec.t11_t12 = (510 + 100) * 10;
7015
7016 intel_pps_dump_state("vbt", &vbt);
7017
7018
7019
7020#define assign_final(field) final->field = (max(cur.field, vbt.field) == 0 ? \
7021 spec.field : \
7022 max(cur.field, vbt.field))
7023 assign_final(t1_t3);
7024 assign_final(t8);
7025 assign_final(t9);
7026 assign_final(t10);
7027 assign_final(t11_t12);
7028#undef assign_final
7029
7030#define get_delay(field) (DIV_ROUND_UP(final->field, 10))
7031 intel_dp->panel_power_up_delay = get_delay(t1_t3);
7032 intel_dp->backlight_on_delay = get_delay(t8);
7033 intel_dp->backlight_off_delay = get_delay(t9);
7034 intel_dp->panel_power_down_delay = get_delay(t10);
7035 intel_dp->panel_power_cycle_delay = get_delay(t11_t12);
7036#undef get_delay
7037
7038 drm_dbg_kms(&dev_priv->drm,
7039 "panel power up delay %d, power down delay %d, power cycle delay %d\n",
7040 intel_dp->panel_power_up_delay,
7041 intel_dp->panel_power_down_delay,
7042 intel_dp->panel_power_cycle_delay);
7043
7044 drm_dbg_kms(&dev_priv->drm, "backlight on delay %d, off delay %d\n",
7045 intel_dp->backlight_on_delay,
7046 intel_dp->backlight_off_delay);
7047
7048
7049
7050
7051
7052
7053
7054
7055 final->t8 = 1;
7056 final->t9 = 1;
7057
7058
7059
7060
7061
7062 final->t11_t12 = roundup(final->t11_t12, 100 * 10);
7063}
7064
7065static void
7066intel_dp_init_panel_power_sequencer_registers(struct intel_dp *intel_dp,
7067 bool force_disable_vdd)
7068{
7069 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
7070 u32 pp_on, pp_off, port_sel = 0;
7071 int div = RUNTIME_INFO(dev_priv)->rawclk_freq / 1000;
7072 struct pps_registers regs;
7073 enum port port = dp_to_dig_port(intel_dp)->base.port;
7074 const struct edp_power_seq *seq = &intel_dp->pps_delays;
7075
7076 lockdep_assert_held(&dev_priv->pps_mutex);
7077
7078 intel_pps_get_registers(intel_dp, ®s);
7079
7080
7081
7082
7083
7084
7085
7086
7087
7088
7089
7090
7091
7092 if (force_disable_vdd) {
7093 u32 pp = ilk_get_pp_control(intel_dp);
7094
7095 drm_WARN(&dev_priv->drm, pp & PANEL_POWER_ON,
7096 "Panel power already on\n");
7097
7098 if (pp & EDP_FORCE_VDD)
7099 drm_dbg_kms(&dev_priv->drm,
7100 "VDD already on, disabling first\n");
7101
7102 pp &= ~EDP_FORCE_VDD;
7103
7104 intel_de_write(dev_priv, regs.pp_ctrl, pp);
7105 }
7106
7107 pp_on = REG_FIELD_PREP(PANEL_POWER_UP_DELAY_MASK, seq->t1_t3) |
7108 REG_FIELD_PREP(PANEL_LIGHT_ON_DELAY_MASK, seq->t8);
7109 pp_off = REG_FIELD_PREP(PANEL_LIGHT_OFF_DELAY_MASK, seq->t9) |
7110 REG_FIELD_PREP(PANEL_POWER_DOWN_DELAY_MASK, seq->t10);
7111
7112
7113
7114 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
7115 port_sel = PANEL_PORT_SELECT_VLV(port);
7116 } else if (HAS_PCH_IBX(dev_priv) || HAS_PCH_CPT(dev_priv)) {
7117 switch (port) {
7118 case PORT_A:
7119 port_sel = PANEL_PORT_SELECT_DPA;
7120 break;
7121 case PORT_C:
7122 port_sel = PANEL_PORT_SELECT_DPC;
7123 break;
7124 case PORT_D:
7125 port_sel = PANEL_PORT_SELECT_DPD;
7126 break;
7127 default:
7128 MISSING_CASE(port);
7129 break;
7130 }
7131 }
7132
7133 pp_on |= port_sel;
7134
7135 intel_de_write(dev_priv, regs.pp_on, pp_on);
7136 intel_de_write(dev_priv, regs.pp_off, pp_off);
7137
7138
7139
7140
7141 if (i915_mmio_reg_valid(regs.pp_div)) {
7142 intel_de_write(dev_priv, regs.pp_div,
7143 REG_FIELD_PREP(PP_REFERENCE_DIVIDER_MASK, (100 * div) / 2 - 1) | REG_FIELD_PREP(PANEL_POWER_CYCLE_DELAY_MASK, DIV_ROUND_UP(seq->t11_t12, 1000)));
7144 } else {
7145 u32 pp_ctl;
7146
7147 pp_ctl = intel_de_read(dev_priv, regs.pp_ctrl);
7148 pp_ctl &= ~BXT_POWER_CYCLE_DELAY_MASK;
7149 pp_ctl |= REG_FIELD_PREP(BXT_POWER_CYCLE_DELAY_MASK, DIV_ROUND_UP(seq->t11_t12, 1000));
7150 intel_de_write(dev_priv, regs.pp_ctrl, pp_ctl);
7151 }
7152
7153 drm_dbg_kms(&dev_priv->drm,
7154 "panel power sequencer register settings: PP_ON %#x, PP_OFF %#x, PP_DIV %#x\n",
7155 intel_de_read(dev_priv, regs.pp_on),
7156 intel_de_read(dev_priv, regs.pp_off),
7157 i915_mmio_reg_valid(regs.pp_div) ?
7158 intel_de_read(dev_priv, regs.pp_div) :
7159 (intel_de_read(dev_priv, regs.pp_ctrl) & BXT_POWER_CYCLE_DELAY_MASK));
7160}
7161
7162static void intel_dp_pps_init(struct intel_dp *intel_dp)
7163{
7164 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
7165
7166 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
7167 vlv_initial_power_sequencer_setup(intel_dp);
7168 } else {
7169 intel_dp_init_panel_power_sequencer(intel_dp);
7170 intel_dp_init_panel_power_sequencer_registers(intel_dp, false);
7171 }
7172}
7173
7174
7175
7176
7177
7178
7179
7180
7181
7182
7183
7184
7185
7186
7187static void intel_dp_set_drrs_state(struct drm_i915_private *dev_priv,
7188 const struct intel_crtc_state *crtc_state,
7189 int refresh_rate)
7190{
7191 struct intel_dp *intel_dp = dev_priv->drrs.dp;
7192 struct intel_crtc *intel_crtc = to_intel_crtc(crtc_state->uapi.crtc);
7193 enum drrs_refresh_rate_type index = DRRS_HIGH_RR;
7194
7195 if (refresh_rate <= 0) {
7196 drm_dbg_kms(&dev_priv->drm,
7197 "Refresh rate should be positive non-zero.\n");
7198 return;
7199 }
7200
7201 if (intel_dp == NULL) {
7202 drm_dbg_kms(&dev_priv->drm, "DRRS not supported.\n");
7203 return;
7204 }
7205
7206 if (!intel_crtc) {
7207 drm_dbg_kms(&dev_priv->drm,
7208 "DRRS: intel_crtc not initialized\n");
7209 return;
7210 }
7211
7212 if (dev_priv->drrs.type < SEAMLESS_DRRS_SUPPORT) {
7213 drm_dbg_kms(&dev_priv->drm, "Only Seamless DRRS supported.\n");
7214 return;
7215 }
7216
7217 if (drm_mode_vrefresh(intel_dp->attached_connector->panel.downclock_mode) ==
7218 refresh_rate)
7219 index = DRRS_LOW_RR;
7220
7221 if (index == dev_priv->drrs.refresh_rate_type) {
7222 drm_dbg_kms(&dev_priv->drm,
7223 "DRRS requested for previously set RR...ignoring\n");
7224 return;
7225 }
7226
7227 if (!crtc_state->hw.active) {
7228 drm_dbg_kms(&dev_priv->drm,
7229 "eDP encoder disabled. CRTC not Active\n");
7230 return;
7231 }
7232
7233 if (INTEL_GEN(dev_priv) >= 8 && !IS_CHERRYVIEW(dev_priv)) {
7234 switch (index) {
7235 case DRRS_HIGH_RR:
7236 intel_dp_set_m_n(crtc_state, M1_N1);
7237 break;
7238 case DRRS_LOW_RR:
7239 intel_dp_set_m_n(crtc_state, M2_N2);
7240 break;
7241 case DRRS_MAX_RR:
7242 default:
7243 drm_err(&dev_priv->drm,
7244 "Unsupported refreshrate type\n");
7245 }
7246 } else if (INTEL_GEN(dev_priv) > 6) {
7247 i915_reg_t reg = PIPECONF(crtc_state->cpu_transcoder);
7248 u32 val;
7249
7250 val = intel_de_read(dev_priv, reg);
7251 if (index > DRRS_HIGH_RR) {
7252 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
7253 val |= PIPECONF_EDP_RR_MODE_SWITCH_VLV;
7254 else
7255 val |= PIPECONF_EDP_RR_MODE_SWITCH;
7256 } else {
7257 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
7258 val &= ~PIPECONF_EDP_RR_MODE_SWITCH_VLV;
7259 else
7260 val &= ~PIPECONF_EDP_RR_MODE_SWITCH;
7261 }
7262 intel_de_write(dev_priv, reg, val);
7263 }
7264
7265 dev_priv->drrs.refresh_rate_type = index;
7266
7267 drm_dbg_kms(&dev_priv->drm, "eDP Refresh Rate set to : %dHz\n",
7268 refresh_rate);
7269}
7270
7271static void
7272intel_edp_drrs_enable_locked(struct intel_dp *intel_dp)
7273{
7274 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
7275
7276 dev_priv->drrs.busy_frontbuffer_bits = 0;
7277 dev_priv->drrs.dp = intel_dp;
7278}
7279
7280
7281
7282
7283
7284
7285
7286
7287void intel_edp_drrs_enable(struct intel_dp *intel_dp,
7288 const struct intel_crtc_state *crtc_state)
7289{
7290 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
7291
7292 if (!crtc_state->has_drrs)
7293 return;
7294
7295 drm_dbg_kms(&dev_priv->drm, "Enabling DRRS\n");
7296
7297 mutex_lock(&dev_priv->drrs.mutex);
7298
7299 if (dev_priv->drrs.dp) {
7300 drm_warn(&dev_priv->drm, "DRRS already enabled\n");
7301 goto unlock;
7302 }
7303
7304 intel_edp_drrs_enable_locked(intel_dp);
7305
7306unlock:
7307 mutex_unlock(&dev_priv->drrs.mutex);
7308}
7309
7310static void
7311intel_edp_drrs_disable_locked(struct intel_dp *intel_dp,
7312 const struct intel_crtc_state *crtc_state)
7313{
7314 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
7315
7316 if (dev_priv->drrs.refresh_rate_type == DRRS_LOW_RR) {
7317 int refresh;
7318
7319 refresh = drm_mode_vrefresh(intel_dp->attached_connector->panel.fixed_mode);
7320 intel_dp_set_drrs_state(dev_priv, crtc_state, refresh);
7321 }
7322
7323 dev_priv->drrs.dp = NULL;
7324}
7325
7326
7327
7328
7329
7330
7331
7332void intel_edp_drrs_disable(struct intel_dp *intel_dp,
7333 const struct intel_crtc_state *old_crtc_state)
7334{
7335 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
7336
7337 if (!old_crtc_state->has_drrs)
7338 return;
7339
7340 mutex_lock(&dev_priv->drrs.mutex);
7341 if (!dev_priv->drrs.dp) {
7342 mutex_unlock(&dev_priv->drrs.mutex);
7343 return;
7344 }
7345
7346 intel_edp_drrs_disable_locked(intel_dp, old_crtc_state);
7347 mutex_unlock(&dev_priv->drrs.mutex);
7348
7349 cancel_delayed_work_sync(&dev_priv->drrs.work);
7350}
7351
7352
7353
7354
7355
7356
7357
7358
7359
7360
7361void
7362intel_edp_drrs_update(struct intel_dp *intel_dp,
7363 const struct intel_crtc_state *crtc_state)
7364{
7365 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
7366
7367 if (dev_priv->drrs.type != SEAMLESS_DRRS_SUPPORT)
7368 return;
7369
7370 mutex_lock(&dev_priv->drrs.mutex);
7371
7372
7373 if (crtc_state->has_drrs == !!dev_priv->drrs.dp)
7374 goto unlock;
7375
7376 if (crtc_state->has_drrs)
7377 intel_edp_drrs_enable_locked(intel_dp);
7378 else
7379 intel_edp_drrs_disable_locked(intel_dp, crtc_state);
7380
7381unlock:
7382 mutex_unlock(&dev_priv->drrs.mutex);
7383}
7384
7385static void intel_edp_drrs_downclock_work(struct work_struct *work)
7386{
7387 struct drm_i915_private *dev_priv =
7388 container_of(work, typeof(*dev_priv), drrs.work.work);
7389 struct intel_dp *intel_dp;
7390
7391 mutex_lock(&dev_priv->drrs.mutex);
7392
7393 intel_dp = dev_priv->drrs.dp;
7394
7395 if (!intel_dp)
7396 goto unlock;
7397
7398
7399
7400
7401
7402
7403 if (dev_priv->drrs.busy_frontbuffer_bits)
7404 goto unlock;
7405
7406 if (dev_priv->drrs.refresh_rate_type != DRRS_LOW_RR) {
7407 struct drm_crtc *crtc = dp_to_dig_port(intel_dp)->base.base.crtc;
7408
7409 intel_dp_set_drrs_state(dev_priv, to_intel_crtc(crtc)->config,
7410 drm_mode_vrefresh(intel_dp->attached_connector->panel.downclock_mode));
7411 }
7412
7413unlock:
7414 mutex_unlock(&dev_priv->drrs.mutex);
7415}
7416
7417
7418
7419
7420
7421
7422
7423
7424
7425
7426
7427void intel_edp_drrs_invalidate(struct drm_i915_private *dev_priv,
7428 unsigned int frontbuffer_bits)
7429{
7430 struct intel_dp *intel_dp;
7431 struct drm_crtc *crtc;
7432 enum pipe pipe;
7433
7434 if (dev_priv->drrs.type == DRRS_NOT_SUPPORTED)
7435 return;
7436
7437 cancel_delayed_work(&dev_priv->drrs.work);
7438
7439 mutex_lock(&dev_priv->drrs.mutex);
7440
7441 intel_dp = dev_priv->drrs.dp;
7442 if (!intel_dp) {
7443 mutex_unlock(&dev_priv->drrs.mutex);
7444 return;
7445 }
7446
7447 crtc = dp_to_dig_port(intel_dp)->base.base.crtc;
7448 pipe = to_intel_crtc(crtc)->pipe;
7449
7450 frontbuffer_bits &= INTEL_FRONTBUFFER_ALL_MASK(pipe);
7451 dev_priv->drrs.busy_frontbuffer_bits |= frontbuffer_bits;
7452
7453
7454 if (frontbuffer_bits && dev_priv->drrs.refresh_rate_type == DRRS_LOW_RR)
7455 intel_dp_set_drrs_state(dev_priv, to_intel_crtc(crtc)->config,
7456 drm_mode_vrefresh(intel_dp->attached_connector->panel.fixed_mode));
7457
7458 mutex_unlock(&dev_priv->drrs.mutex);
7459}
7460
7461
7462
7463
7464
7465
7466
7467
7468
7469
7470
7471
7472
7473void intel_edp_drrs_flush(struct drm_i915_private *dev_priv,
7474 unsigned int frontbuffer_bits)
7475{
7476 struct intel_dp *intel_dp;
7477 struct drm_crtc *crtc;
7478 enum pipe pipe;
7479
7480 if (dev_priv->drrs.type == DRRS_NOT_SUPPORTED)
7481 return;
7482
7483 cancel_delayed_work(&dev_priv->drrs.work);
7484
7485 mutex_lock(&dev_priv->drrs.mutex);
7486
7487 intel_dp = dev_priv->drrs.dp;
7488 if (!intel_dp) {
7489 mutex_unlock(&dev_priv->drrs.mutex);
7490 return;
7491 }
7492
7493 crtc = dp_to_dig_port(intel_dp)->base.base.crtc;
7494 pipe = to_intel_crtc(crtc)->pipe;
7495
7496 frontbuffer_bits &= INTEL_FRONTBUFFER_ALL_MASK(pipe);
7497 dev_priv->drrs.busy_frontbuffer_bits &= ~frontbuffer_bits;
7498
7499
7500 if (frontbuffer_bits && dev_priv->drrs.refresh_rate_type == DRRS_LOW_RR)
7501 intel_dp_set_drrs_state(dev_priv, to_intel_crtc(crtc)->config,
7502 drm_mode_vrefresh(intel_dp->attached_connector->panel.fixed_mode));
7503
7504
7505
7506
7507
7508 if (!dev_priv->drrs.busy_frontbuffer_bits)
7509 schedule_delayed_work(&dev_priv->drrs.work,
7510 msecs_to_jiffies(1000));
7511 mutex_unlock(&dev_priv->drrs.mutex);
7512}
7513
7514
7515
7516
7517
7518
7519
7520
7521
7522
7523
7524
7525
7526
7527
7528
7529
7530
7531
7532
7533
7534
7535
7536
7537
7538
7539
7540
7541
7542
7543
7544
7545
7546
7547
7548
7549
7550
7551
7552
7553
7554
7555
7556
7557
7558
7559
7560
7561
7562
7563
7564static struct drm_display_mode *
7565intel_dp_drrs_init(struct intel_connector *connector,
7566 struct drm_display_mode *fixed_mode)
7567{
7568 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
7569 struct drm_display_mode *downclock_mode = NULL;
7570
7571 INIT_DELAYED_WORK(&dev_priv->drrs.work, intel_edp_drrs_downclock_work);
7572 mutex_init(&dev_priv->drrs.mutex);
7573
7574 if (INTEL_GEN(dev_priv) <= 6) {
7575 drm_dbg_kms(&dev_priv->drm,
7576 "DRRS supported for Gen7 and above\n");
7577 return NULL;
7578 }
7579
7580 if (dev_priv->vbt.drrs_type != SEAMLESS_DRRS_SUPPORT) {
7581 drm_dbg_kms(&dev_priv->drm, "VBT doesn't support DRRS\n");
7582 return NULL;
7583 }
7584
7585 downclock_mode = intel_panel_edid_downclock_mode(connector, fixed_mode);
7586 if (!downclock_mode) {
7587 drm_dbg_kms(&dev_priv->drm,
7588 "Downclock mode is not found. DRRS not supported\n");
7589 return NULL;
7590 }
7591
7592 dev_priv->drrs.type = dev_priv->vbt.drrs_type;
7593
7594 dev_priv->drrs.refresh_rate_type = DRRS_HIGH_RR;
7595 drm_dbg_kms(&dev_priv->drm,
7596 "seamless DRRS supported for eDP panel.\n");
7597 return downclock_mode;
7598}
7599
7600static bool intel_edp_init_connector(struct intel_dp *intel_dp,
7601 struct intel_connector *intel_connector)
7602{
7603 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
7604 struct drm_device *dev = &dev_priv->drm;
7605 struct drm_connector *connector = &intel_connector->base;
7606 struct drm_display_mode *fixed_mode = NULL;
7607 struct drm_display_mode *downclock_mode = NULL;
7608 bool has_dpcd;
7609 enum pipe pipe = INVALID_PIPE;
7610 intel_wakeref_t wakeref;
7611 struct edid *edid;
7612
7613 if (!intel_dp_is_edp(intel_dp))
7614 return true;
7615
7616 INIT_DELAYED_WORK(&intel_dp->panel_vdd_work, edp_panel_vdd_work);
7617
7618
7619
7620
7621
7622
7623
7624 if (intel_get_lvds_encoder(dev_priv)) {
7625 drm_WARN_ON(dev,
7626 !(HAS_PCH_IBX(dev_priv) || HAS_PCH_CPT(dev_priv)));
7627 drm_info(&dev_priv->drm,
7628 "LVDS was detected, not registering eDP\n");
7629
7630 return false;
7631 }
7632
7633 with_pps_lock(intel_dp, wakeref) {
7634 intel_dp_init_panel_power_timestamps(intel_dp);
7635 intel_dp_pps_init(intel_dp);
7636 intel_edp_panel_vdd_sanitize(intel_dp);
7637 }
7638
7639
7640 has_dpcd = intel_edp_init_dpcd(intel_dp);
7641
7642 if (!has_dpcd) {
7643
7644 drm_info(&dev_priv->drm,
7645 "failed to retrieve link info, disabling eDP\n");
7646 goto out_vdd_off;
7647 }
7648
7649 mutex_lock(&dev->mode_config.mutex);
7650 edid = drm_get_edid(connector, &intel_dp->aux.ddc);
7651 if (edid) {
7652 if (drm_add_edid_modes(connector, edid)) {
7653 drm_connector_update_edid_property(connector, edid);
7654 intel_dp->edid_quirks = drm_dp_get_edid_quirks(edid);
7655 } else {
7656 kfree(edid);
7657 edid = ERR_PTR(-EINVAL);
7658 }
7659 } else {
7660 edid = ERR_PTR(-ENOENT);
7661 }
7662 intel_connector->edid = edid;
7663
7664 fixed_mode = intel_panel_edid_fixed_mode(intel_connector);
7665 if (fixed_mode)
7666 downclock_mode = intel_dp_drrs_init(intel_connector, fixed_mode);
7667
7668
7669 if (!fixed_mode)
7670 fixed_mode = intel_panel_vbt_fixed_mode(intel_connector);
7671 mutex_unlock(&dev->mode_config.mutex);
7672
7673 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
7674 intel_dp->edp_notifier.notifier_call = edp_notify_handler;
7675 register_reboot_notifier(&intel_dp->edp_notifier);
7676
7677
7678
7679
7680
7681
7682 pipe = vlv_active_pipe(intel_dp);
7683
7684 if (pipe != PIPE_A && pipe != PIPE_B)
7685 pipe = intel_dp->pps_pipe;
7686
7687 if (pipe != PIPE_A && pipe != PIPE_B)
7688 pipe = PIPE_A;
7689
7690 drm_dbg_kms(&dev_priv->drm,
7691 "using pipe %c for initial backlight setup\n",
7692 pipe_name(pipe));
7693 }
7694
7695 intel_panel_init(&intel_connector->panel, fixed_mode, downclock_mode);
7696 intel_connector->panel.backlight.power = intel_edp_backlight_power;
7697 intel_panel_setup_backlight(connector, pipe);
7698
7699 if (fixed_mode) {
7700 drm_connector_set_panel_orientation_with_quirk(connector,
7701 dev_priv->vbt.orientation,
7702 fixed_mode->hdisplay, fixed_mode->vdisplay);
7703 }
7704
7705 return true;
7706
7707out_vdd_off:
7708 cancel_delayed_work_sync(&intel_dp->panel_vdd_work);
7709
7710
7711
7712
7713 with_pps_lock(intel_dp, wakeref)
7714 edp_panel_vdd_off_sync(intel_dp);
7715
7716 return false;
7717}
7718
7719static void intel_dp_modeset_retry_work_fn(struct work_struct *work)
7720{
7721 struct intel_connector *intel_connector;
7722 struct drm_connector *connector;
7723
7724 intel_connector = container_of(work, typeof(*intel_connector),
7725 modeset_retry_work);
7726 connector = &intel_connector->base;
7727 DRM_DEBUG_KMS("[CONNECTOR:%d:%s]\n", connector->base.id,
7728 connector->name);
7729
7730
7731 mutex_lock(&connector->dev->mode_config.mutex);
7732
7733
7734
7735 drm_connector_set_link_status_property(connector,
7736 DRM_MODE_LINK_STATUS_BAD);
7737 mutex_unlock(&connector->dev->mode_config.mutex);
7738
7739 drm_kms_helper_hotplug_event(connector->dev);
7740}
7741
7742bool
7743intel_dp_init_connector(struct intel_digital_port *dig_port,
7744 struct intel_connector *intel_connector)
7745{
7746 struct drm_connector *connector = &intel_connector->base;
7747 struct intel_dp *intel_dp = &dig_port->dp;
7748 struct intel_encoder *intel_encoder = &dig_port->base;
7749 struct drm_device *dev = intel_encoder->base.dev;
7750 struct drm_i915_private *dev_priv = to_i915(dev);
7751 enum port port = intel_encoder->port;
7752 enum phy phy = intel_port_to_phy(dev_priv, port);
7753 int type;
7754
7755
7756 INIT_WORK(&intel_connector->modeset_retry_work,
7757 intel_dp_modeset_retry_work_fn);
7758
7759 if (drm_WARN(dev, dig_port->max_lanes < 1,
7760 "Not enough lanes (%d) for DP on [ENCODER:%d:%s]\n",
7761 dig_port->max_lanes, intel_encoder->base.base.id,
7762 intel_encoder->base.name))
7763 return false;
7764
7765 intel_dp_set_source_rates(intel_dp);
7766
7767 intel_dp->reset_link_params = true;
7768 intel_dp->pps_pipe = INVALID_PIPE;
7769 intel_dp->active_pipe = INVALID_PIPE;
7770
7771
7772 intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg);
7773 intel_dp->attached_connector = intel_connector;
7774
7775 if (intel_dp_is_port_edp(dev_priv, port)) {
7776
7777
7778
7779
7780 drm_WARN_ON(dev, intel_phy_is_tc(dev_priv, phy));
7781 type = DRM_MODE_CONNECTOR_eDP;
7782 } else {
7783 type = DRM_MODE_CONNECTOR_DisplayPort;
7784 }
7785
7786 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
7787 intel_dp->active_pipe = vlv_active_pipe(intel_dp);
7788
7789
7790
7791
7792
7793
7794 if (type == DRM_MODE_CONNECTOR_eDP)
7795 intel_encoder->type = INTEL_OUTPUT_EDP;
7796
7797
7798 if (drm_WARN_ON(dev, (IS_VALLEYVIEW(dev_priv) ||
7799 IS_CHERRYVIEW(dev_priv)) &&
7800 intel_dp_is_edp(intel_dp) &&
7801 port != PORT_B && port != PORT_C))
7802 return false;
7803
7804 drm_dbg_kms(&dev_priv->drm,
7805 "Adding %s connector on [ENCODER:%d:%s]\n",
7806 type == DRM_MODE_CONNECTOR_eDP ? "eDP" : "DP",
7807 intel_encoder->base.base.id, intel_encoder->base.name);
7808
7809 drm_connector_init(dev, connector, &intel_dp_connector_funcs, type);
7810 drm_connector_helper_add(connector, &intel_dp_connector_helper_funcs);
7811
7812 if (!HAS_GMCH(dev_priv))
7813 connector->interlace_allowed = true;
7814 connector->doublescan_allowed = 0;
7815
7816 intel_connector->polled = DRM_CONNECTOR_POLL_HPD;
7817
7818 intel_dp_aux_init(intel_dp);
7819
7820 intel_connector_attach_encoder(intel_connector, intel_encoder);
7821
7822 if (HAS_DDI(dev_priv))
7823 intel_connector->get_hw_state = intel_ddi_connector_get_hw_state;
7824 else
7825 intel_connector->get_hw_state = intel_connector_get_hw_state;
7826
7827
7828 intel_dp_mst_encoder_init(dig_port,
7829 intel_connector->base.base.id);
7830
7831 if (!intel_edp_init_connector(intel_dp, intel_connector)) {
7832 intel_dp_aux_fini(intel_dp);
7833 intel_dp_mst_encoder_cleanup(dig_port);
7834 goto fail;
7835 }
7836
7837 intel_dp_add_properties(intel_dp, connector);
7838
7839 if (is_hdcp_supported(dev_priv, port) && !intel_dp_is_edp(intel_dp)) {
7840 int ret = intel_dp_init_hdcp(dig_port, intel_connector);
7841 if (ret)
7842 drm_dbg_kms(&dev_priv->drm,
7843 "HDCP init failed, skipping.\n");
7844 }
7845
7846
7847
7848
7849
7850 if (IS_G45(dev_priv)) {
7851 u32 temp = intel_de_read(dev_priv, PEG_BAND_GAP_DATA);
7852 intel_de_write(dev_priv, PEG_BAND_GAP_DATA,
7853 (temp & ~0xf) | 0xd);
7854 }
7855
7856 return true;
7857
7858fail:
7859 drm_connector_cleanup(connector);
7860
7861 return false;
7862}
7863
7864bool intel_dp_init(struct drm_i915_private *dev_priv,
7865 i915_reg_t output_reg,
7866 enum port port)
7867{
7868 struct intel_digital_port *dig_port;
7869 struct intel_encoder *intel_encoder;
7870 struct drm_encoder *encoder;
7871 struct intel_connector *intel_connector;
7872
7873 dig_port = kzalloc(sizeof(*dig_port), GFP_KERNEL);
7874 if (!dig_port)
7875 return false;
7876
7877 intel_connector = intel_connector_alloc();
7878 if (!intel_connector)
7879 goto err_connector_alloc;
7880
7881 intel_encoder = &dig_port->base;
7882 encoder = &intel_encoder->base;
7883
7884 mutex_init(&dig_port->hdcp_mutex);
7885
7886 if (drm_encoder_init(&dev_priv->drm, &intel_encoder->base,
7887 &intel_dp_enc_funcs, DRM_MODE_ENCODER_TMDS,
7888 "DP %c", port_name(port)))
7889 goto err_encoder_init;
7890
7891 intel_encoder->hotplug = intel_dp_hotplug;
7892 intel_encoder->compute_config = intel_dp_compute_config;
7893 intel_encoder->get_hw_state = intel_dp_get_hw_state;
7894 intel_encoder->get_config = intel_dp_get_config;
7895 intel_encoder->update_pipe = intel_panel_update_backlight;
7896 intel_encoder->suspend = intel_dp_encoder_suspend;
7897 if (IS_CHERRYVIEW(dev_priv)) {
7898 intel_encoder->pre_pll_enable = chv_dp_pre_pll_enable;
7899 intel_encoder->pre_enable = chv_pre_enable_dp;
7900 intel_encoder->enable = vlv_enable_dp;
7901 intel_encoder->disable = vlv_disable_dp;
7902 intel_encoder->post_disable = chv_post_disable_dp;
7903 intel_encoder->post_pll_disable = chv_dp_post_pll_disable;
7904 } else if (IS_VALLEYVIEW(dev_priv)) {
7905 intel_encoder->pre_pll_enable = vlv_dp_pre_pll_enable;
7906 intel_encoder->pre_enable = vlv_pre_enable_dp;
7907 intel_encoder->enable = vlv_enable_dp;
7908 intel_encoder->disable = vlv_disable_dp;
7909 intel_encoder->post_disable = vlv_post_disable_dp;
7910 } else {
7911 intel_encoder->pre_enable = g4x_pre_enable_dp;
7912 intel_encoder->enable = g4x_enable_dp;
7913 intel_encoder->disable = g4x_disable_dp;
7914 intel_encoder->post_disable = g4x_post_disable_dp;
7915 }
7916
7917 if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) ||
7918 (HAS_PCH_CPT(dev_priv) && port != PORT_A))
7919 dig_port->dp.set_link_train = cpt_set_link_train;
7920 else
7921 dig_port->dp.set_link_train = g4x_set_link_train;
7922
7923 if (IS_CHERRYVIEW(dev_priv))
7924 dig_port->dp.set_signal_levels = chv_set_signal_levels;
7925 else if (IS_VALLEYVIEW(dev_priv))
7926 dig_port->dp.set_signal_levels = vlv_set_signal_levels;
7927 else if (IS_IVYBRIDGE(dev_priv) && port == PORT_A)
7928 dig_port->dp.set_signal_levels = ivb_cpu_edp_set_signal_levels;
7929 else if (IS_GEN(dev_priv, 6) && port == PORT_A)
7930 dig_port->dp.set_signal_levels = snb_cpu_edp_set_signal_levels;
7931 else
7932 dig_port->dp.set_signal_levels = g4x_set_signal_levels;
7933
7934 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv) ||
7935 (HAS_PCH_SPLIT(dev_priv) && port != PORT_A)) {
7936 dig_port->dp.preemph_max = intel_dp_pre_empemph_max_3;
7937 dig_port->dp.voltage_max = intel_dp_voltage_max_3;
7938 } else {
7939 dig_port->dp.preemph_max = intel_dp_pre_empemph_max_2;
7940 dig_port->dp.voltage_max = intel_dp_voltage_max_2;
7941 }
7942
7943 dig_port->dp.output_reg = output_reg;
7944 dig_port->max_lanes = 4;
7945 dig_port->dp.regs.dp_tp_ctl = DP_TP_CTL(port);
7946 dig_port->dp.regs.dp_tp_status = DP_TP_STATUS(port);
7947
7948 intel_encoder->type = INTEL_OUTPUT_DP;
7949 intel_encoder->power_domain = intel_port_to_power_domain(port);
7950 if (IS_CHERRYVIEW(dev_priv)) {
7951 if (port == PORT_D)
7952 intel_encoder->pipe_mask = BIT(PIPE_C);
7953 else
7954 intel_encoder->pipe_mask = BIT(PIPE_A) | BIT(PIPE_B);
7955 } else {
7956 intel_encoder->pipe_mask = ~0;
7957 }
7958 intel_encoder->cloneable = 0;
7959 intel_encoder->port = port;
7960 intel_encoder->hpd_pin = intel_hpd_pin_default(dev_priv, port);
7961
7962 dig_port->hpd_pulse = intel_dp_hpd_pulse;
7963
7964 if (HAS_GMCH(dev_priv)) {
7965 if (IS_GM45(dev_priv))
7966 dig_port->connected = gm45_digital_port_connected;
7967 else
7968 dig_port->connected = g4x_digital_port_connected;
7969 } else {
7970 if (port == PORT_A)
7971 dig_port->connected = ilk_digital_port_connected;
7972 else
7973 dig_port->connected = ibx_digital_port_connected;
7974 }
7975
7976 if (port != PORT_A)
7977 intel_infoframe_init(dig_port);
7978
7979 dig_port->aux_ch = intel_bios_port_aux_ch(dev_priv, port);
7980 if (!intel_dp_init_connector(dig_port, intel_connector))
7981 goto err_init_connector;
7982
7983 return true;
7984
7985err_init_connector:
7986 drm_encoder_cleanup(encoder);
7987err_encoder_init:
7988 kfree(intel_connector);
7989err_connector_alloc:
7990 kfree(dig_port);
7991 return false;
7992}
7993
7994void intel_dp_mst_suspend(struct drm_i915_private *dev_priv)
7995{
7996 struct intel_encoder *encoder;
7997
7998 for_each_intel_encoder(&dev_priv->drm, encoder) {
7999 struct intel_dp *intel_dp;
8000
8001 if (encoder->type != INTEL_OUTPUT_DDI)
8002 continue;
8003
8004 intel_dp = enc_to_intel_dp(encoder);
8005
8006 if (!intel_dp->can_mst)
8007 continue;
8008
8009 if (intel_dp->is_mst)
8010 drm_dp_mst_topology_mgr_suspend(&intel_dp->mst_mgr);
8011 }
8012}
8013
8014void intel_dp_mst_resume(struct drm_i915_private *dev_priv)
8015{
8016 struct intel_encoder *encoder;
8017
8018 for_each_intel_encoder(&dev_priv->drm, encoder) {
8019 struct intel_dp *intel_dp;
8020 int ret;
8021
8022 if (encoder->type != INTEL_OUTPUT_DDI)
8023 continue;
8024
8025 intel_dp = enc_to_intel_dp(encoder);
8026
8027 if (!intel_dp->can_mst)
8028 continue;
8029
8030 ret = drm_dp_mst_topology_mgr_resume(&intel_dp->mst_mgr,
8031 true);
8032 if (ret) {
8033 intel_dp->is_mst = false;
8034 drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst_mgr,
8035 false);
8036 }
8037 }
8038}
8039