1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28#include <linux/export.h>
29#include <linux/i2c.h>
30#include <linux/notifier.h>
31#include <linux/slab.h>
32#include <linux/types.h>
33
34#include <asm/byteorder.h>
35
36#include <drm/drm_atomic_helper.h>
37#include <drm/drm_crtc.h>
38#include <drm/drm_dp_helper.h>
39#include <drm/drm_edid.h>
40#include <drm/drm_probe_helper.h>
41
42#include "i915_debugfs.h"
43#include "i915_drv.h"
44#include "intel_atomic.h"
45#include "intel_audio.h"
46#include "intel_connector.h"
47#include "intel_ddi.h"
48#include "intel_display_types.h"
49#include "intel_dp.h"
50#include "intel_dp_aux.h"
51#include "intel_dp_link_training.h"
52#include "intel_dp_mst.h"
53#include "intel_dpio_phy.h"
54#include "intel_fifo_underrun.h"
55#include "intel_hdcp.h"
56#include "intel_hdmi.h"
57#include "intel_hotplug.h"
58#include "intel_lspcon.h"
59#include "intel_lvds.h"
60#include "intel_panel.h"
61#include "intel_pps.h"
62#include "intel_psr.h"
63#include "intel_sideband.h"
64#include "intel_tc.h"
65#include "intel_vdsc.h"
66#include "intel_vrr.h"
67
68#define DP_DPRX_ESI_LEN 14
69
70
71#define DP_DSC_PEAK_PIXEL_RATE 2720000
72#define DP_DSC_MAX_ENC_THROUGHPUT_0 340000
73#define DP_DSC_MAX_ENC_THROUGHPUT_1 400000
74
75
76#define DP_DSC_FEC_OVERHEAD_FACTOR 972261
77
78
79#define INTEL_DP_RESOLUTION_SHIFT_MASK 0
80#define INTEL_DP_RESOLUTION_PREFERRED (1 << INTEL_DP_RESOLUTION_SHIFT_MASK)
81#define INTEL_DP_RESOLUTION_STANDARD (2 << INTEL_DP_RESOLUTION_SHIFT_MASK)
82#define INTEL_DP_RESOLUTION_FAILSAFE (3 << INTEL_DP_RESOLUTION_SHIFT_MASK)
83
84struct dp_link_dpll {
85 int clock;
86 struct dpll dpll;
87};
88
89static const struct dp_link_dpll g4x_dpll[] = {
90 { 162000,
91 { .p1 = 2, .p2 = 10, .n = 2, .m1 = 23, .m2 = 8 } },
92 { 270000,
93 { .p1 = 1, .p2 = 10, .n = 1, .m1 = 14, .m2 = 2 } }
94};
95
96static const struct dp_link_dpll pch_dpll[] = {
97 { 162000,
98 { .p1 = 2, .p2 = 10, .n = 1, .m1 = 12, .m2 = 9 } },
99 { 270000,
100 { .p1 = 1, .p2 = 10, .n = 2, .m1 = 14, .m2 = 8 } }
101};
102
103static const struct dp_link_dpll vlv_dpll[] = {
104 { 162000,
105 { .p1 = 3, .p2 = 2, .n = 5, .m1 = 3, .m2 = 81 } },
106 { 270000,
107 { .p1 = 2, .p2 = 2, .n = 1, .m1 = 2, .m2 = 27 } }
108};
109
110
111
112
113
114static const struct dp_link_dpll chv_dpll[] = {
115
116
117
118
119
120 { 162000,
121 { .p1 = 4, .p2 = 2, .n = 1, .m1 = 2, .m2 = 0x819999a } },
122 { 270000,
123 { .p1 = 4, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x6c00000 } },
124};
125
126const struct dpll *vlv_get_dpll(struct drm_i915_private *i915)
127{
128 return IS_CHERRYVIEW(i915) ? &chv_dpll[0].dpll : &vlv_dpll[0].dpll;
129}
130
131
132static const u8 valid_dsc_bpp[] = {6, 8, 10, 12, 15};
133
134
135
136
137static const u8 valid_dsc_slicecount[] = {1, 2, 4};
138
139
140
141
142
143
144
145
146bool intel_dp_is_edp(struct intel_dp *intel_dp)
147{
148 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
149
150 return dig_port->base.type == INTEL_OUTPUT_EDP;
151}
152
153static void intel_dp_link_down(struct intel_encoder *encoder,
154 const struct intel_crtc_state *old_crtc_state);
155static void intel_dp_unset_edid(struct intel_dp *intel_dp);
156
157
158static void intel_dp_set_sink_rates(struct intel_dp *intel_dp)
159{
160 static const int dp_rates[] = {
161 162000, 270000, 540000, 810000
162 };
163 int i, max_rate;
164 int max_lttpr_rate;
165
166 if (drm_dp_has_quirk(&intel_dp->desc, DP_DPCD_QUIRK_CAN_DO_MAX_LINK_RATE_3_24_GBPS)) {
167
168 static const int quirk_rates[] = { 162000, 270000, 324000 };
169
170 memcpy(intel_dp->sink_rates, quirk_rates, sizeof(quirk_rates));
171 intel_dp->num_sink_rates = ARRAY_SIZE(quirk_rates);
172
173 return;
174 }
175
176 max_rate = drm_dp_bw_code_to_link_rate(intel_dp->dpcd[DP_MAX_LINK_RATE]);
177 max_lttpr_rate = drm_dp_lttpr_max_link_rate(intel_dp->lttpr_common_caps);
178 if (max_lttpr_rate)
179 max_rate = min(max_rate, max_lttpr_rate);
180
181 for (i = 0; i < ARRAY_SIZE(dp_rates); i++) {
182 if (dp_rates[i] > max_rate)
183 break;
184 intel_dp->sink_rates[i] = dp_rates[i];
185 }
186
187 intel_dp->num_sink_rates = i;
188}
189
190
191static int intel_dp_rate_limit_len(const int *rates, int len, int max_rate)
192{
193 int i;
194
195
196 for (i = 0; i < len; i++) {
197 if (rates[len - i - 1] <= max_rate)
198 return len - i;
199 }
200
201 return 0;
202}
203
204
205static int intel_dp_common_len_rate_limit(const struct intel_dp *intel_dp,
206 int max_rate)
207{
208 return intel_dp_rate_limit_len(intel_dp->common_rates,
209 intel_dp->num_common_rates, max_rate);
210}
211
212
213static int intel_dp_max_common_rate(struct intel_dp *intel_dp)
214{
215 return intel_dp->common_rates[intel_dp->num_common_rates - 1];
216}
217
218
219static int intel_dp_max_common_lane_count(struct intel_dp *intel_dp)
220{
221 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
222 int source_max = dig_port->max_lanes;
223 int sink_max = drm_dp_max_lane_count(intel_dp->dpcd);
224 int fia_max = intel_tc_port_fia_max_lane_count(dig_port);
225 int lttpr_max = drm_dp_lttpr_max_lane_count(intel_dp->lttpr_common_caps);
226
227 if (lttpr_max)
228 sink_max = min(sink_max, lttpr_max);
229
230 return min3(source_max, sink_max, fia_max);
231}
232
233int intel_dp_max_lane_count(struct intel_dp *intel_dp)
234{
235 return intel_dp->max_link_lane_count;
236}
237
238int
239intel_dp_link_required(int pixel_clock, int bpp)
240{
241
242 return DIV_ROUND_UP(pixel_clock * bpp, 8);
243}
244
245int
246intel_dp_max_data_rate(int max_link_clock, int max_lanes)
247{
248
249
250
251
252
253
254 return max_link_clock * max_lanes;
255}
256
257bool intel_dp_can_bigjoiner(struct intel_dp *intel_dp)
258{
259 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
260 struct intel_encoder *encoder = &intel_dig_port->base;
261 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
262
263 return INTEL_GEN(dev_priv) >= 12 ||
264 (INTEL_GEN(dev_priv) == 11 &&
265 encoder->port != PORT_A);
266}
267
268static int cnl_max_source_rate(struct intel_dp *intel_dp)
269{
270 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
271 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
272 enum port port = dig_port->base.port;
273
274 u32 voltage = intel_de_read(dev_priv, CNL_PORT_COMP_DW3) & VOLTAGE_INFO_MASK;
275
276
277 if (voltage == VOLTAGE_INFO_0_85V)
278 return 540000;
279
280
281 if (IS_CNL_WITH_PORT_F(dev_priv))
282 return 810000;
283
284
285 if (port == PORT_A || port == PORT_D)
286 return 540000;
287
288 return 810000;
289}
290
291static int icl_max_source_rate(struct intel_dp *intel_dp)
292{
293 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
294 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
295 enum phy phy = intel_port_to_phy(dev_priv, dig_port->base.port);
296
297 if (intel_phy_is_combo(dev_priv, phy) &&
298 !intel_dp_is_edp(intel_dp))
299 return 540000;
300
301 return 810000;
302}
303
304static int ehl_max_source_rate(struct intel_dp *intel_dp)
305{
306 if (intel_dp_is_edp(intel_dp))
307 return 540000;
308
309 return 810000;
310}
311
312static void
313intel_dp_set_source_rates(struct intel_dp *intel_dp)
314{
315
316 static const int cnl_rates[] = {
317 162000, 216000, 270000, 324000, 432000, 540000, 648000, 810000
318 };
319 static const int bxt_rates[] = {
320 162000, 216000, 243000, 270000, 324000, 432000, 540000
321 };
322 static const int skl_rates[] = {
323 162000, 216000, 270000, 324000, 432000, 540000
324 };
325 static const int hsw_rates[] = {
326 162000, 270000, 540000
327 };
328 static const int g4x_rates[] = {
329 162000, 270000
330 };
331 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
332 struct intel_encoder *encoder = &dig_port->base;
333 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
334 const int *source_rates;
335 int size, max_rate = 0, vbt_max_rate;
336
337
338 drm_WARN_ON(&dev_priv->drm,
339 intel_dp->source_rates || intel_dp->num_source_rates);
340
341 if (INTEL_GEN(dev_priv) >= 10) {
342 source_rates = cnl_rates;
343 size = ARRAY_SIZE(cnl_rates);
344 if (IS_GEN(dev_priv, 10))
345 max_rate = cnl_max_source_rate(intel_dp);
346 else if (IS_JSL_EHL(dev_priv))
347 max_rate = ehl_max_source_rate(intel_dp);
348 else
349 max_rate = icl_max_source_rate(intel_dp);
350 } else if (IS_GEN9_LP(dev_priv)) {
351 source_rates = bxt_rates;
352 size = ARRAY_SIZE(bxt_rates);
353 } else if (IS_GEN9_BC(dev_priv)) {
354 source_rates = skl_rates;
355 size = ARRAY_SIZE(skl_rates);
356 } else if ((IS_HASWELL(dev_priv) && !IS_HSW_ULX(dev_priv)) ||
357 IS_BROADWELL(dev_priv)) {
358 source_rates = hsw_rates;
359 size = ARRAY_SIZE(hsw_rates);
360 } else {
361 source_rates = g4x_rates;
362 size = ARRAY_SIZE(g4x_rates);
363 }
364
365 vbt_max_rate = intel_bios_dp_max_link_rate(encoder);
366 if (max_rate && vbt_max_rate)
367 max_rate = min(max_rate, vbt_max_rate);
368 else if (vbt_max_rate)
369 max_rate = vbt_max_rate;
370
371 if (max_rate)
372 size = intel_dp_rate_limit_len(source_rates, size, max_rate);
373
374 intel_dp->source_rates = source_rates;
375 intel_dp->num_source_rates = size;
376}
377
378static int intersect_rates(const int *source_rates, int source_len,
379 const int *sink_rates, int sink_len,
380 int *common_rates)
381{
382 int i = 0, j = 0, k = 0;
383
384 while (i < source_len && j < sink_len) {
385 if (source_rates[i] == sink_rates[j]) {
386 if (WARN_ON(k >= DP_MAX_SUPPORTED_RATES))
387 return k;
388 common_rates[k] = source_rates[i];
389 ++k;
390 ++i;
391 ++j;
392 } else if (source_rates[i] < sink_rates[j]) {
393 ++i;
394 } else {
395 ++j;
396 }
397 }
398 return k;
399}
400
401
402static int intel_dp_rate_index(const int *rates, int len, int rate)
403{
404 int i;
405
406 for (i = 0; i < len; i++)
407 if (rate == rates[i])
408 return i;
409
410 return -1;
411}
412
413static void intel_dp_set_common_rates(struct intel_dp *intel_dp)
414{
415 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
416
417 drm_WARN_ON(&i915->drm,
418 !intel_dp->num_source_rates || !intel_dp->num_sink_rates);
419
420 intel_dp->num_common_rates = intersect_rates(intel_dp->source_rates,
421 intel_dp->num_source_rates,
422 intel_dp->sink_rates,
423 intel_dp->num_sink_rates,
424 intel_dp->common_rates);
425
426
427 if (drm_WARN_ON(&i915->drm, intel_dp->num_common_rates == 0)) {
428 intel_dp->common_rates[0] = 162000;
429 intel_dp->num_common_rates = 1;
430 }
431}
432
433static bool intel_dp_link_params_valid(struct intel_dp *intel_dp, int link_rate,
434 u8 lane_count)
435{
436
437
438
439
440
441 if (link_rate == 0 ||
442 link_rate > intel_dp->max_link_rate)
443 return false;
444
445 if (lane_count == 0 ||
446 lane_count > intel_dp_max_lane_count(intel_dp))
447 return false;
448
449 return true;
450}
451
452static bool intel_dp_can_link_train_fallback_for_edp(struct intel_dp *intel_dp,
453 int link_rate,
454 u8 lane_count)
455{
456 const struct drm_display_mode *fixed_mode =
457 intel_dp->attached_connector->panel.fixed_mode;
458 int mode_rate, max_rate;
459
460 mode_rate = intel_dp_link_required(fixed_mode->clock, 18);
461 max_rate = intel_dp_max_data_rate(link_rate, lane_count);
462 if (mode_rate > max_rate)
463 return false;
464
465 return true;
466}
467
468int intel_dp_get_link_train_fallback_values(struct intel_dp *intel_dp,
469 int link_rate, u8 lane_count)
470{
471 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
472 int index;
473
474
475
476
477
478 if (intel_dp->is_mst) {
479 drm_err(&i915->drm, "Link Training Unsuccessful\n");
480 return -1;
481 }
482
483 if (intel_dp_is_edp(intel_dp) && !intel_dp->use_max_params) {
484 drm_dbg_kms(&i915->drm,
485 "Retrying Link training for eDP with max parameters\n");
486 intel_dp->use_max_params = true;
487 return 0;
488 }
489
490 index = intel_dp_rate_index(intel_dp->common_rates,
491 intel_dp->num_common_rates,
492 link_rate);
493 if (index > 0) {
494 if (intel_dp_is_edp(intel_dp) &&
495 !intel_dp_can_link_train_fallback_for_edp(intel_dp,
496 intel_dp->common_rates[index - 1],
497 lane_count)) {
498 drm_dbg_kms(&i915->drm,
499 "Retrying Link training for eDP with same parameters\n");
500 return 0;
501 }
502 intel_dp->max_link_rate = intel_dp->common_rates[index - 1];
503 intel_dp->max_link_lane_count = lane_count;
504 } else if (lane_count > 1) {
505 if (intel_dp_is_edp(intel_dp) &&
506 !intel_dp_can_link_train_fallback_for_edp(intel_dp,
507 intel_dp_max_common_rate(intel_dp),
508 lane_count >> 1)) {
509 drm_dbg_kms(&i915->drm,
510 "Retrying Link training for eDP with same parameters\n");
511 return 0;
512 }
513 intel_dp->max_link_rate = intel_dp_max_common_rate(intel_dp);
514 intel_dp->max_link_lane_count = lane_count >> 1;
515 } else {
516 drm_err(&i915->drm, "Link Training Unsuccessful\n");
517 return -1;
518 }
519
520 return 0;
521}
522
523u32 intel_dp_mode_to_fec_clock(u32 mode_clock)
524{
525 return div_u64(mul_u32_u32(mode_clock, 1000000U),
526 DP_DSC_FEC_OVERHEAD_FACTOR);
527}
528
529static int
530small_joiner_ram_size_bits(struct drm_i915_private *i915)
531{
532 if (INTEL_GEN(i915) >= 11)
533 return 7680 * 8;
534 else
535 return 6144 * 8;
536}
537
538static u16 intel_dp_dsc_get_output_bpp(struct drm_i915_private *i915,
539 u32 link_clock, u32 lane_count,
540 u32 mode_clock, u32 mode_hdisplay,
541 bool bigjoiner)
542{
543 u32 bits_per_pixel, max_bpp_small_joiner_ram;
544 int i;
545
546
547
548
549
550
551
552 bits_per_pixel = (link_clock * lane_count * 8) /
553 intel_dp_mode_to_fec_clock(mode_clock);
554 drm_dbg_kms(&i915->drm, "Max link bpp: %u\n", bits_per_pixel);
555
556
557 max_bpp_small_joiner_ram = small_joiner_ram_size_bits(i915) /
558 mode_hdisplay;
559
560 if (bigjoiner)
561 max_bpp_small_joiner_ram *= 2;
562
563 drm_dbg_kms(&i915->drm, "Max small joiner bpp: %u\n",
564 max_bpp_small_joiner_ram);
565
566
567
568
569
570 bits_per_pixel = min(bits_per_pixel, max_bpp_small_joiner_ram);
571
572 if (bigjoiner) {
573 u32 max_bpp_bigjoiner =
574 i915->max_cdclk_freq * 48 /
575 intel_dp_mode_to_fec_clock(mode_clock);
576
577 DRM_DEBUG_KMS("Max big joiner bpp: %u\n", max_bpp_bigjoiner);
578 bits_per_pixel = min(bits_per_pixel, max_bpp_bigjoiner);
579 }
580
581
582 if (bits_per_pixel < valid_dsc_bpp[0]) {
583 drm_dbg_kms(&i915->drm, "Unsupported BPP %u, min %u\n",
584 bits_per_pixel, valid_dsc_bpp[0]);
585 return 0;
586 }
587
588
589 for (i = 0; i < ARRAY_SIZE(valid_dsc_bpp) - 1; i++) {
590 if (bits_per_pixel < valid_dsc_bpp[i + 1])
591 break;
592 }
593 bits_per_pixel = valid_dsc_bpp[i];
594
595
596
597
598
599 return bits_per_pixel << 4;
600}
601
602static u8 intel_dp_dsc_get_slice_count(struct intel_dp *intel_dp,
603 int mode_clock, int mode_hdisplay,
604 bool bigjoiner)
605{
606 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
607 u8 min_slice_count, i;
608 int max_slice_width;
609
610 if (mode_clock <= DP_DSC_PEAK_PIXEL_RATE)
611 min_slice_count = DIV_ROUND_UP(mode_clock,
612 DP_DSC_MAX_ENC_THROUGHPUT_0);
613 else
614 min_slice_count = DIV_ROUND_UP(mode_clock,
615 DP_DSC_MAX_ENC_THROUGHPUT_1);
616
617 max_slice_width = drm_dp_dsc_sink_max_slice_width(intel_dp->dsc_dpcd);
618 if (max_slice_width < DP_DSC_MIN_SLICE_WIDTH_VALUE) {
619 drm_dbg_kms(&i915->drm,
620 "Unsupported slice width %d by DP DSC Sink device\n",
621 max_slice_width);
622 return 0;
623 }
624
625 min_slice_count = max_t(u8, min_slice_count,
626 DIV_ROUND_UP(mode_hdisplay,
627 max_slice_width));
628
629
630 for (i = 0; i < ARRAY_SIZE(valid_dsc_slicecount); i++) {
631 u8 test_slice_count = valid_dsc_slicecount[i] << bigjoiner;
632
633 if (test_slice_count >
634 drm_dp_dsc_sink_max_slice_count(intel_dp->dsc_dpcd, false))
635 break;
636
637
638 if (bigjoiner && test_slice_count < 4)
639 continue;
640
641 if (min_slice_count <= test_slice_count)
642 return test_slice_count;
643 }
644
645 drm_dbg_kms(&i915->drm, "Unsupported Slice Count %d\n",
646 min_slice_count);
647 return 0;
648}
649
650static enum intel_output_format
651intel_dp_output_format(struct drm_connector *connector,
652 const struct drm_display_mode *mode)
653{
654 struct intel_dp *intel_dp = intel_attached_dp(to_intel_connector(connector));
655 const struct drm_display_info *info = &connector->display_info;
656
657 if (!connector->ycbcr_420_allowed ||
658 !drm_mode_is_420_only(info, mode))
659 return INTEL_OUTPUT_FORMAT_RGB;
660
661 if (intel_dp->dfp.rgb_to_ycbcr &&
662 intel_dp->dfp.ycbcr_444_to_420)
663 return INTEL_OUTPUT_FORMAT_RGB;
664
665 if (intel_dp->dfp.ycbcr_444_to_420)
666 return INTEL_OUTPUT_FORMAT_YCBCR444;
667 else
668 return INTEL_OUTPUT_FORMAT_YCBCR420;
669}
670
671int intel_dp_min_bpp(enum intel_output_format output_format)
672{
673 if (output_format == INTEL_OUTPUT_FORMAT_RGB)
674 return 6 * 3;
675 else
676 return 8 * 3;
677}
678
679static int intel_dp_output_bpp(enum intel_output_format output_format, int bpp)
680{
681
682
683
684
685
686 if (output_format == INTEL_OUTPUT_FORMAT_YCBCR420)
687 bpp /= 2;
688
689 return bpp;
690}
691
692static int
693intel_dp_mode_min_output_bpp(struct drm_connector *connector,
694 const struct drm_display_mode *mode)
695{
696 enum intel_output_format output_format =
697 intel_dp_output_format(connector, mode);
698
699 return intel_dp_output_bpp(output_format, intel_dp_min_bpp(output_format));
700}
701
702static bool intel_dp_hdisplay_bad(struct drm_i915_private *dev_priv,
703 int hdisplay)
704{
705
706
707
708
709
710
711
712
713
714
715
716
717
718 return hdisplay == 4096 && !HAS_DDI(dev_priv);
719}
720
721static enum drm_mode_status
722intel_dp_mode_valid_downstream(struct intel_connector *connector,
723 const struct drm_display_mode *mode,
724 int target_clock)
725{
726 struct intel_dp *intel_dp = intel_attached_dp(connector);
727 const struct drm_display_info *info = &connector->base.display_info;
728 int tmds_clock;
729
730
731 if (intel_dp->dfp.pcon_max_frl_bw) {
732 int target_bw;
733 int max_frl_bw;
734 int bpp = intel_dp_mode_min_output_bpp(&connector->base, mode);
735
736 target_bw = bpp * target_clock;
737
738 max_frl_bw = intel_dp->dfp.pcon_max_frl_bw;
739
740
741 max_frl_bw = max_frl_bw * 1000000;
742
743 if (target_bw > max_frl_bw)
744 return MODE_CLOCK_HIGH;
745
746 return MODE_OK;
747 }
748
749 if (intel_dp->dfp.max_dotclock &&
750 target_clock > intel_dp->dfp.max_dotclock)
751 return MODE_CLOCK_HIGH;
752
753
754 tmds_clock = target_clock;
755 if (drm_mode_is_420_only(info, mode))
756 tmds_clock /= 2;
757
758 if (intel_dp->dfp.min_tmds_clock &&
759 tmds_clock < intel_dp->dfp.min_tmds_clock)
760 return MODE_CLOCK_LOW;
761 if (intel_dp->dfp.max_tmds_clock &&
762 tmds_clock > intel_dp->dfp.max_tmds_clock)
763 return MODE_CLOCK_HIGH;
764
765 return MODE_OK;
766}
767
768static enum drm_mode_status
769intel_dp_mode_valid(struct drm_connector *connector,
770 struct drm_display_mode *mode)
771{
772 struct intel_dp *intel_dp = intel_attached_dp(to_intel_connector(connector));
773 struct intel_connector *intel_connector = to_intel_connector(connector);
774 struct drm_display_mode *fixed_mode = intel_connector->panel.fixed_mode;
775 struct drm_i915_private *dev_priv = to_i915(connector->dev);
776 int target_clock = mode->clock;
777 int max_rate, mode_rate, max_lanes, max_link_clock;
778 int max_dotclk = dev_priv->max_dotclk_freq;
779 u16 dsc_max_output_bpp = 0;
780 u8 dsc_slice_count = 0;
781 enum drm_mode_status status;
782 bool dsc = false, bigjoiner = false;
783
784 if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
785 return MODE_NO_DBLESCAN;
786
787 if (mode->flags & DRM_MODE_FLAG_DBLCLK)
788 return MODE_H_ILLEGAL;
789
790 if (intel_dp_is_edp(intel_dp) && fixed_mode) {
791 if (mode->hdisplay > fixed_mode->hdisplay)
792 return MODE_PANEL;
793
794 if (mode->vdisplay > fixed_mode->vdisplay)
795 return MODE_PANEL;
796
797 target_clock = fixed_mode->clock;
798 }
799
800 if (mode->clock < 10000)
801 return MODE_CLOCK_LOW;
802
803 if ((target_clock > max_dotclk || mode->hdisplay > 5120) &&
804 intel_dp_can_bigjoiner(intel_dp)) {
805 bigjoiner = true;
806 max_dotclk *= 2;
807 }
808 if (target_clock > max_dotclk)
809 return MODE_CLOCK_HIGH;
810
811 max_link_clock = intel_dp_max_link_rate(intel_dp);
812 max_lanes = intel_dp_max_lane_count(intel_dp);
813
814 max_rate = intel_dp_max_data_rate(max_link_clock, max_lanes);
815 mode_rate = intel_dp_link_required(target_clock,
816 intel_dp_mode_min_output_bpp(connector, mode));
817
818 if (intel_dp_hdisplay_bad(dev_priv, mode->hdisplay))
819 return MODE_H_ILLEGAL;
820
821
822
823
824
825 if ((INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv)) &&
826 drm_dp_sink_supports_dsc(intel_dp->dsc_dpcd)) {
827 if (intel_dp_is_edp(intel_dp)) {
828 dsc_max_output_bpp =
829 drm_edp_dsc_sink_output_bpp(intel_dp->dsc_dpcd) >> 4;
830 dsc_slice_count =
831 drm_dp_dsc_sink_max_slice_count(intel_dp->dsc_dpcd,
832 true);
833 } else if (drm_dp_sink_supports_fec(intel_dp->fec_capable)) {
834 dsc_max_output_bpp =
835 intel_dp_dsc_get_output_bpp(dev_priv,
836 max_link_clock,
837 max_lanes,
838 target_clock,
839 mode->hdisplay,
840 bigjoiner) >> 4;
841 dsc_slice_count =
842 intel_dp_dsc_get_slice_count(intel_dp,
843 target_clock,
844 mode->hdisplay,
845 bigjoiner);
846 }
847
848 dsc = dsc_max_output_bpp && dsc_slice_count;
849 }
850
851
852 if (bigjoiner && !dsc)
853 return MODE_CLOCK_HIGH;
854
855 if (mode_rate > max_rate && !dsc)
856 return MODE_CLOCK_HIGH;
857
858 status = intel_dp_mode_valid_downstream(intel_connector,
859 mode, target_clock);
860 if (status != MODE_OK)
861 return status;
862
863 return intel_mode_valid_max_plane_size(dev_priv, mode, bigjoiner);
864}
865
866bool intel_dp_source_supports_hbr2(struct intel_dp *intel_dp)
867{
868 int max_rate = intel_dp->source_rates[intel_dp->num_source_rates - 1];
869
870 return max_rate >= 540000;
871}
872
873bool intel_dp_source_supports_hbr3(struct intel_dp *intel_dp)
874{
875 int max_rate = intel_dp->source_rates[intel_dp->num_source_rates - 1];
876
877 return max_rate >= 810000;
878}
879
880static void
881intel_dp_set_clock(struct intel_encoder *encoder,
882 struct intel_crtc_state *pipe_config)
883{
884 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
885 const struct dp_link_dpll *divisor = NULL;
886 int i, count = 0;
887
888 if (IS_G4X(dev_priv)) {
889 divisor = g4x_dpll;
890 count = ARRAY_SIZE(g4x_dpll);
891 } else if (HAS_PCH_SPLIT(dev_priv)) {
892 divisor = pch_dpll;
893 count = ARRAY_SIZE(pch_dpll);
894 } else if (IS_CHERRYVIEW(dev_priv)) {
895 divisor = chv_dpll;
896 count = ARRAY_SIZE(chv_dpll);
897 } else if (IS_VALLEYVIEW(dev_priv)) {
898 divisor = vlv_dpll;
899 count = ARRAY_SIZE(vlv_dpll);
900 }
901
902 if (divisor && count) {
903 for (i = 0; i < count; i++) {
904 if (pipe_config->port_clock == divisor[i].clock) {
905 pipe_config->dpll = divisor[i].dpll;
906 pipe_config->clock_set = true;
907 break;
908 }
909 }
910 }
911}
912
913static void snprintf_int_array(char *str, size_t len,
914 const int *array, int nelem)
915{
916 int i;
917
918 str[0] = '\0';
919
920 for (i = 0; i < nelem; i++) {
921 int r = snprintf(str, len, "%s%d", i ? ", " : "", array[i]);
922 if (r >= len)
923 return;
924 str += r;
925 len -= r;
926 }
927}
928
929static void intel_dp_print_rates(struct intel_dp *intel_dp)
930{
931 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
932 char str[128];
933
934 if (!drm_debug_enabled(DRM_UT_KMS))
935 return;
936
937 snprintf_int_array(str, sizeof(str),
938 intel_dp->source_rates, intel_dp->num_source_rates);
939 drm_dbg_kms(&i915->drm, "source rates: %s\n", str);
940
941 snprintf_int_array(str, sizeof(str),
942 intel_dp->sink_rates, intel_dp->num_sink_rates);
943 drm_dbg_kms(&i915->drm, "sink rates: %s\n", str);
944
945 snprintf_int_array(str, sizeof(str),
946 intel_dp->common_rates, intel_dp->num_common_rates);
947 drm_dbg_kms(&i915->drm, "common rates: %s\n", str);
948}
949
950int
951intel_dp_max_link_rate(struct intel_dp *intel_dp)
952{
953 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
954 int len;
955
956 len = intel_dp_common_len_rate_limit(intel_dp, intel_dp->max_link_rate);
957 if (drm_WARN_ON(&i915->drm, len <= 0))
958 return 162000;
959
960 return intel_dp->common_rates[len - 1];
961}
962
963int intel_dp_rate_select(struct intel_dp *intel_dp, int rate)
964{
965 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
966 int i = intel_dp_rate_index(intel_dp->sink_rates,
967 intel_dp->num_sink_rates, rate);
968
969 if (drm_WARN_ON(&i915->drm, i < 0))
970 i = 0;
971
972 return i;
973}
974
975void intel_dp_compute_rate(struct intel_dp *intel_dp, int port_clock,
976 u8 *link_bw, u8 *rate_select)
977{
978
979 if (intel_dp->use_rate_select) {
980 *link_bw = 0;
981 *rate_select =
982 intel_dp_rate_select(intel_dp, port_clock);
983 } else {
984 *link_bw = drm_dp_link_rate_to_bw_code(port_clock);
985 *rate_select = 0;
986 }
987}
988
989static bool intel_dp_source_supports_fec(struct intel_dp *intel_dp,
990 const struct intel_crtc_state *pipe_config)
991{
992 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
993
994
995 if (INTEL_GEN(dev_priv) >= 12)
996 return true;
997
998 if (IS_GEN(dev_priv, 11) && pipe_config->cpu_transcoder != TRANSCODER_A)
999 return true;
1000
1001 return false;
1002}
1003
1004static bool intel_dp_supports_fec(struct intel_dp *intel_dp,
1005 const struct intel_crtc_state *pipe_config)
1006{
1007 return intel_dp_source_supports_fec(intel_dp, pipe_config) &&
1008 drm_dp_sink_supports_fec(intel_dp->fec_capable);
1009}
1010
1011static bool intel_dp_supports_dsc(struct intel_dp *intel_dp,
1012 const struct intel_crtc_state *crtc_state)
1013{
1014 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP) && !crtc_state->fec_enable)
1015 return false;
1016
1017 return intel_dsc_source_support(crtc_state) &&
1018 drm_dp_sink_supports_dsc(intel_dp->dsc_dpcd);
1019}
1020
1021static bool intel_dp_hdmi_ycbcr420(struct intel_dp *intel_dp,
1022 const struct intel_crtc_state *crtc_state)
1023{
1024 return crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420 ||
1025 (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR444 &&
1026 intel_dp->dfp.ycbcr_444_to_420);
1027}
1028
1029static int intel_dp_hdmi_tmds_clock(struct intel_dp *intel_dp,
1030 const struct intel_crtc_state *crtc_state, int bpc)
1031{
1032 int clock = crtc_state->hw.adjusted_mode.crtc_clock * bpc / 8;
1033
1034 if (intel_dp_hdmi_ycbcr420(intel_dp, crtc_state))
1035 clock /= 2;
1036
1037 return clock;
1038}
1039
1040static bool intel_dp_hdmi_tmds_clock_valid(struct intel_dp *intel_dp,
1041 const struct intel_crtc_state *crtc_state, int bpc)
1042{
1043 int tmds_clock = intel_dp_hdmi_tmds_clock(intel_dp, crtc_state, bpc);
1044
1045 if (intel_dp->dfp.min_tmds_clock &&
1046 tmds_clock < intel_dp->dfp.min_tmds_clock)
1047 return false;
1048
1049 if (intel_dp->dfp.max_tmds_clock &&
1050 tmds_clock > intel_dp->dfp.max_tmds_clock)
1051 return false;
1052
1053 return true;
1054}
1055
1056static bool intel_dp_hdmi_deep_color_possible(struct intel_dp *intel_dp,
1057 const struct intel_crtc_state *crtc_state,
1058 int bpc)
1059{
1060
1061 return intel_hdmi_deep_color_possible(crtc_state, bpc,
1062 intel_dp->has_hdmi_sink,
1063 intel_dp_hdmi_ycbcr420(intel_dp, crtc_state)) &&
1064 intel_dp_hdmi_tmds_clock_valid(intel_dp, crtc_state, bpc);
1065}
1066
1067static int intel_dp_max_bpp(struct intel_dp *intel_dp,
1068 const struct intel_crtc_state *crtc_state)
1069{
1070 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1071 struct intel_connector *intel_connector = intel_dp->attached_connector;
1072 int bpp, bpc;
1073
1074 bpc = crtc_state->pipe_bpp / 3;
1075
1076 if (intel_dp->dfp.max_bpc)
1077 bpc = min_t(int, bpc, intel_dp->dfp.max_bpc);
1078
1079 if (intel_dp->dfp.min_tmds_clock) {
1080 for (; bpc >= 10; bpc -= 2) {
1081 if (intel_dp_hdmi_deep_color_possible(intel_dp, crtc_state, bpc))
1082 break;
1083 }
1084 }
1085
1086 bpp = bpc * 3;
1087 if (intel_dp_is_edp(intel_dp)) {
1088
1089 if (intel_connector->base.display_info.bpc == 0 &&
1090 dev_priv->vbt.edp.bpp && dev_priv->vbt.edp.bpp < bpp) {
1091 drm_dbg_kms(&dev_priv->drm,
1092 "clamping bpp for eDP panel to BIOS-provided %i\n",
1093 dev_priv->vbt.edp.bpp);
1094 bpp = dev_priv->vbt.edp.bpp;
1095 }
1096 }
1097
1098 return bpp;
1099}
1100
1101
1102void
1103intel_dp_adjust_compliance_config(struct intel_dp *intel_dp,
1104 struct intel_crtc_state *pipe_config,
1105 struct link_config_limits *limits)
1106{
1107 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1108
1109
1110 if (intel_dp->compliance.test_data.bpc != 0) {
1111 int bpp = 3 * intel_dp->compliance.test_data.bpc;
1112
1113 limits->min_bpp = limits->max_bpp = bpp;
1114 pipe_config->dither_force_disable = bpp == 6 * 3;
1115
1116 drm_dbg_kms(&i915->drm, "Setting pipe_bpp to %d\n", bpp);
1117 }
1118
1119
1120 if (intel_dp->compliance.test_type == DP_TEST_LINK_TRAINING) {
1121 int index;
1122
1123
1124
1125
1126 if (intel_dp_link_params_valid(intel_dp, intel_dp->compliance.test_link_rate,
1127 intel_dp->compliance.test_lane_count)) {
1128 index = intel_dp_rate_index(intel_dp->common_rates,
1129 intel_dp->num_common_rates,
1130 intel_dp->compliance.test_link_rate);
1131 if (index >= 0)
1132 limits->min_clock = limits->max_clock = index;
1133 limits->min_lane_count = limits->max_lane_count =
1134 intel_dp->compliance.test_lane_count;
1135 }
1136 }
1137}
1138
1139
1140static int
1141intel_dp_compute_link_config_wide(struct intel_dp *intel_dp,
1142 struct intel_crtc_state *pipe_config,
1143 const struct link_config_limits *limits)
1144{
1145 struct drm_display_mode *adjusted_mode = &pipe_config->hw.adjusted_mode;
1146 int bpp, clock, lane_count;
1147 int mode_rate, link_clock, link_avail;
1148
1149 for (bpp = limits->max_bpp; bpp >= limits->min_bpp; bpp -= 2 * 3) {
1150 int output_bpp = intel_dp_output_bpp(pipe_config->output_format, bpp);
1151
1152 mode_rate = intel_dp_link_required(adjusted_mode->crtc_clock,
1153 output_bpp);
1154
1155 for (clock = limits->min_clock; clock <= limits->max_clock; clock++) {
1156 for (lane_count = limits->min_lane_count;
1157 lane_count <= limits->max_lane_count;
1158 lane_count <<= 1) {
1159 link_clock = intel_dp->common_rates[clock];
1160 link_avail = intel_dp_max_data_rate(link_clock,
1161 lane_count);
1162
1163 if (mode_rate <= link_avail) {
1164 pipe_config->lane_count = lane_count;
1165 pipe_config->pipe_bpp = bpp;
1166 pipe_config->port_clock = link_clock;
1167
1168 return 0;
1169 }
1170 }
1171 }
1172 }
1173
1174 return -EINVAL;
1175}
1176
1177
1178static int
1179intel_dp_compute_link_config_fast(struct intel_dp *intel_dp,
1180 struct intel_crtc_state *pipe_config,
1181 const struct link_config_limits *limits)
1182{
1183 const struct drm_display_mode *adjusted_mode = &pipe_config->hw.adjusted_mode;
1184 int bpp, clock, lane_count;
1185 int mode_rate, link_clock, link_avail;
1186
1187 for (bpp = limits->max_bpp; bpp >= limits->min_bpp; bpp -= 2 * 3) {
1188 int output_bpp = intel_dp_output_bpp(pipe_config->output_format, bpp);
1189
1190 mode_rate = intel_dp_link_required(adjusted_mode->crtc_clock,
1191 output_bpp);
1192
1193 for (lane_count = limits->min_lane_count;
1194 lane_count <= limits->max_lane_count;
1195 lane_count <<= 1) {
1196 for (clock = limits->min_clock; clock <= limits->max_clock; clock++) {
1197 link_clock = intel_dp->common_rates[clock];
1198 link_avail = intel_dp_max_data_rate(link_clock,
1199 lane_count);
1200
1201 if (mode_rate <= link_avail) {
1202 pipe_config->lane_count = lane_count;
1203 pipe_config->pipe_bpp = bpp;
1204 pipe_config->port_clock = link_clock;
1205
1206 return 0;
1207 }
1208 }
1209 }
1210 }
1211
1212 return -EINVAL;
1213}
1214
1215static int intel_dp_dsc_compute_bpp(struct intel_dp *intel_dp, u8 dsc_max_bpc)
1216{
1217 int i, num_bpc;
1218 u8 dsc_bpc[3] = {0};
1219
1220 num_bpc = drm_dp_dsc_sink_supported_input_bpcs(intel_dp->dsc_dpcd,
1221 dsc_bpc);
1222 for (i = 0; i < num_bpc; i++) {
1223 if (dsc_max_bpc >= dsc_bpc[i])
1224 return dsc_bpc[i] * 3;
1225 }
1226
1227 return 0;
1228}
1229
1230#define DSC_SUPPORTED_VERSION_MIN 1
1231
1232static int intel_dp_dsc_compute_params(struct intel_encoder *encoder,
1233 struct intel_crtc_state *crtc_state)
1234{
1235 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
1236 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1237 struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
1238 u8 line_buf_depth;
1239 int ret;
1240
1241
1242
1243
1244
1245
1246
1247 vdsc_cfg->rc_model_size = DSC_RC_MODEL_SIZE_CONST;
1248
1249 ret = intel_dsc_compute_params(encoder, crtc_state);
1250 if (ret)
1251 return ret;
1252
1253
1254
1255
1256
1257
1258 if (vdsc_cfg->pic_height % 8 == 0)
1259 vdsc_cfg->slice_height = 8;
1260 else if (vdsc_cfg->pic_height % 4 == 0)
1261 vdsc_cfg->slice_height = 4;
1262 else
1263 vdsc_cfg->slice_height = 2;
1264
1265 vdsc_cfg->dsc_version_major =
1266 (intel_dp->dsc_dpcd[DP_DSC_REV - DP_DSC_SUPPORT] &
1267 DP_DSC_MAJOR_MASK) >> DP_DSC_MAJOR_SHIFT;
1268 vdsc_cfg->dsc_version_minor =
1269 min(DSC_SUPPORTED_VERSION_MIN,
1270 (intel_dp->dsc_dpcd[DP_DSC_REV - DP_DSC_SUPPORT] &
1271 DP_DSC_MINOR_MASK) >> DP_DSC_MINOR_SHIFT);
1272
1273 vdsc_cfg->convert_rgb = intel_dp->dsc_dpcd[DP_DSC_DEC_COLOR_FORMAT_CAP - DP_DSC_SUPPORT] &
1274 DP_DSC_RGB;
1275
1276 line_buf_depth = drm_dp_dsc_sink_line_buf_depth(intel_dp->dsc_dpcd);
1277 if (!line_buf_depth) {
1278 drm_dbg_kms(&i915->drm,
1279 "DSC Sink Line Buffer Depth invalid\n");
1280 return -EINVAL;
1281 }
1282
1283 if (vdsc_cfg->dsc_version_minor == 2)
1284 vdsc_cfg->line_buf_depth = (line_buf_depth == DSC_1_2_MAX_LINEBUF_DEPTH_BITS) ?
1285 DSC_1_2_MAX_LINEBUF_DEPTH_VAL : line_buf_depth;
1286 else
1287 vdsc_cfg->line_buf_depth = (line_buf_depth > DSC_1_1_MAX_LINEBUF_DEPTH_BITS) ?
1288 DSC_1_1_MAX_LINEBUF_DEPTH_BITS : line_buf_depth;
1289
1290 vdsc_cfg->block_pred_enable =
1291 intel_dp->dsc_dpcd[DP_DSC_BLK_PREDICTION_SUPPORT - DP_DSC_SUPPORT] &
1292 DP_DSC_BLK_PREDICTION_IS_SUPPORTED;
1293
1294 return drm_dsc_compute_rc_parameters(vdsc_cfg);
1295}
1296
1297static int intel_dp_dsc_compute_config(struct intel_dp *intel_dp,
1298 struct intel_crtc_state *pipe_config,
1299 struct drm_connector_state *conn_state,
1300 struct link_config_limits *limits)
1301{
1302 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1303 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
1304 const struct drm_display_mode *adjusted_mode =
1305 &pipe_config->hw.adjusted_mode;
1306 u8 dsc_max_bpc;
1307 int pipe_bpp;
1308 int ret;
1309
1310 pipe_config->fec_enable = !intel_dp_is_edp(intel_dp) &&
1311 intel_dp_supports_fec(intel_dp, pipe_config);
1312
1313 if (!intel_dp_supports_dsc(intel_dp, pipe_config))
1314 return -EINVAL;
1315
1316
1317 if (INTEL_GEN(dev_priv) >= 12)
1318 dsc_max_bpc = min_t(u8, 12, conn_state->max_requested_bpc);
1319 else
1320 dsc_max_bpc = min_t(u8, 10,
1321 conn_state->max_requested_bpc);
1322
1323 pipe_bpp = intel_dp_dsc_compute_bpp(intel_dp, dsc_max_bpc);
1324
1325
1326 if (pipe_bpp < 8 * 3) {
1327 drm_dbg_kms(&dev_priv->drm,
1328 "No DSC support for less than 8bpc\n");
1329 return -EINVAL;
1330 }
1331
1332
1333
1334
1335
1336
1337 pipe_config->pipe_bpp = pipe_bpp;
1338 pipe_config->port_clock = intel_dp->common_rates[limits->max_clock];
1339 pipe_config->lane_count = limits->max_lane_count;
1340
1341 if (intel_dp_is_edp(intel_dp)) {
1342 pipe_config->dsc.compressed_bpp =
1343 min_t(u16, drm_edp_dsc_sink_output_bpp(intel_dp->dsc_dpcd) >> 4,
1344 pipe_config->pipe_bpp);
1345 pipe_config->dsc.slice_count =
1346 drm_dp_dsc_sink_max_slice_count(intel_dp->dsc_dpcd,
1347 true);
1348 } else {
1349 u16 dsc_max_output_bpp;
1350 u8 dsc_dp_slice_count;
1351
1352 dsc_max_output_bpp =
1353 intel_dp_dsc_get_output_bpp(dev_priv,
1354 pipe_config->port_clock,
1355 pipe_config->lane_count,
1356 adjusted_mode->crtc_clock,
1357 adjusted_mode->crtc_hdisplay,
1358 pipe_config->bigjoiner);
1359 dsc_dp_slice_count =
1360 intel_dp_dsc_get_slice_count(intel_dp,
1361 adjusted_mode->crtc_clock,
1362 adjusted_mode->crtc_hdisplay,
1363 pipe_config->bigjoiner);
1364 if (!dsc_max_output_bpp || !dsc_dp_slice_count) {
1365 drm_dbg_kms(&dev_priv->drm,
1366 "Compressed BPP/Slice Count not supported\n");
1367 return -EINVAL;
1368 }
1369 pipe_config->dsc.compressed_bpp = min_t(u16,
1370 dsc_max_output_bpp >> 4,
1371 pipe_config->pipe_bpp);
1372 pipe_config->dsc.slice_count = dsc_dp_slice_count;
1373 }
1374
1375
1376
1377
1378
1379 if (adjusted_mode->crtc_clock > dev_priv->max_cdclk_freq ||
1380 pipe_config->bigjoiner) {
1381 if (pipe_config->dsc.slice_count < 2) {
1382 drm_dbg_kms(&dev_priv->drm,
1383 "Cannot split stream to use 2 VDSC instances\n");
1384 return -EINVAL;
1385 }
1386
1387 pipe_config->dsc.dsc_split = true;
1388 }
1389
1390 ret = intel_dp_dsc_compute_params(&dig_port->base, pipe_config);
1391 if (ret < 0) {
1392 drm_dbg_kms(&dev_priv->drm,
1393 "Cannot compute valid DSC parameters for Input Bpp = %d "
1394 "Compressed BPP = %d\n",
1395 pipe_config->pipe_bpp,
1396 pipe_config->dsc.compressed_bpp);
1397 return ret;
1398 }
1399
1400 pipe_config->dsc.compression_enable = true;
1401 drm_dbg_kms(&dev_priv->drm, "DP DSC computed with Input Bpp = %d "
1402 "Compressed Bpp = %d Slice Count = %d\n",
1403 pipe_config->pipe_bpp,
1404 pipe_config->dsc.compressed_bpp,
1405 pipe_config->dsc.slice_count);
1406
1407 return 0;
1408}
1409
1410static int
1411intel_dp_compute_link_config(struct intel_encoder *encoder,
1412 struct intel_crtc_state *pipe_config,
1413 struct drm_connector_state *conn_state)
1414{
1415 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
1416 const struct drm_display_mode *adjusted_mode =
1417 &pipe_config->hw.adjusted_mode;
1418 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1419 struct link_config_limits limits;
1420 int common_len;
1421 int ret;
1422
1423 common_len = intel_dp_common_len_rate_limit(intel_dp,
1424 intel_dp->max_link_rate);
1425
1426
1427 drm_WARN_ON(encoder->base.dev, common_len <= 0);
1428
1429 limits.min_clock = 0;
1430 limits.max_clock = common_len - 1;
1431
1432 limits.min_lane_count = 1;
1433 limits.max_lane_count = intel_dp_max_lane_count(intel_dp);
1434
1435 limits.min_bpp = intel_dp_min_bpp(pipe_config->output_format);
1436 limits.max_bpp = intel_dp_max_bpp(intel_dp, pipe_config);
1437
1438 if (intel_dp->use_max_params) {
1439
1440
1441
1442
1443
1444
1445
1446
1447 limits.min_lane_count = limits.max_lane_count;
1448 limits.min_clock = limits.max_clock;
1449 }
1450
1451 intel_dp_adjust_compliance_config(intel_dp, pipe_config, &limits);
1452
1453 drm_dbg_kms(&i915->drm, "DP link computation with max lane count %i "
1454 "max rate %d max bpp %d pixel clock %iKHz\n",
1455 limits.max_lane_count,
1456 intel_dp->common_rates[limits.max_clock],
1457 limits.max_bpp, adjusted_mode->crtc_clock);
1458
1459 if ((adjusted_mode->crtc_clock > i915->max_dotclk_freq ||
1460 adjusted_mode->crtc_hdisplay > 5120) &&
1461 intel_dp_can_bigjoiner(intel_dp))
1462 pipe_config->bigjoiner = true;
1463
1464 if (intel_dp_is_edp(intel_dp))
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476 ret = intel_dp_compute_link_config_fast(intel_dp, pipe_config, &limits);
1477 else
1478
1479 ret = intel_dp_compute_link_config_wide(intel_dp, pipe_config, &limits);
1480
1481
1482 drm_dbg_kms(&i915->drm, "Force DSC en = %d\n", intel_dp->force_dsc_en);
1483 if (ret || intel_dp->force_dsc_en || pipe_config->bigjoiner) {
1484 ret = intel_dp_dsc_compute_config(intel_dp, pipe_config,
1485 conn_state, &limits);
1486 if (ret < 0)
1487 return ret;
1488 }
1489
1490 if (pipe_config->dsc.compression_enable) {
1491 drm_dbg_kms(&i915->drm,
1492 "DP lane count %d clock %d Input bpp %d Compressed bpp %d\n",
1493 pipe_config->lane_count, pipe_config->port_clock,
1494 pipe_config->pipe_bpp,
1495 pipe_config->dsc.compressed_bpp);
1496
1497 drm_dbg_kms(&i915->drm,
1498 "DP link rate required %i available %i\n",
1499 intel_dp_link_required(adjusted_mode->crtc_clock,
1500 pipe_config->dsc.compressed_bpp),
1501 intel_dp_max_data_rate(pipe_config->port_clock,
1502 pipe_config->lane_count));
1503 } else {
1504 drm_dbg_kms(&i915->drm, "DP lane count %d clock %d bpp %d\n",
1505 pipe_config->lane_count, pipe_config->port_clock,
1506 pipe_config->pipe_bpp);
1507
1508 drm_dbg_kms(&i915->drm,
1509 "DP link rate required %i available %i\n",
1510 intel_dp_link_required(adjusted_mode->crtc_clock,
1511 pipe_config->pipe_bpp),
1512 intel_dp_max_data_rate(pipe_config->port_clock,
1513 pipe_config->lane_count));
1514 }
1515 return 0;
1516}
1517
1518bool intel_dp_limited_color_range(const struct intel_crtc_state *crtc_state,
1519 const struct drm_connector_state *conn_state)
1520{
1521 const struct intel_digital_connector_state *intel_conn_state =
1522 to_intel_digital_connector_state(conn_state);
1523 const struct drm_display_mode *adjusted_mode =
1524 &crtc_state->hw.adjusted_mode;
1525
1526
1527
1528
1529
1530
1531
1532
1533 if (crtc_state->output_format != INTEL_OUTPUT_FORMAT_RGB)
1534 return false;
1535
1536 if (intel_conn_state->broadcast_rgb == INTEL_BROADCAST_RGB_AUTO) {
1537
1538
1539
1540
1541
1542 return crtc_state->pipe_bpp != 18 &&
1543 drm_default_rgb_quant_range(adjusted_mode) ==
1544 HDMI_QUANTIZATION_RANGE_LIMITED;
1545 } else {
1546 return intel_conn_state->broadcast_rgb ==
1547 INTEL_BROADCAST_RGB_LIMITED;
1548 }
1549}
1550
1551static bool intel_dp_port_has_audio(struct drm_i915_private *dev_priv,
1552 enum port port)
1553{
1554 if (IS_G4X(dev_priv))
1555 return false;
1556 if (INTEL_GEN(dev_priv) < 12 && port == PORT_A)
1557 return false;
1558
1559 return true;
1560}
1561
1562static void intel_dp_compute_vsc_colorimetry(const struct intel_crtc_state *crtc_state,
1563 const struct drm_connector_state *conn_state,
1564 struct drm_dp_vsc_sdp *vsc)
1565{
1566 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
1567 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1568
1569
1570
1571
1572
1573
1574 vsc->revision = 0x5;
1575 vsc->length = 0x13;
1576
1577
1578 switch (crtc_state->output_format) {
1579 case INTEL_OUTPUT_FORMAT_YCBCR444:
1580 vsc->pixelformat = DP_PIXELFORMAT_YUV444;
1581 break;
1582 case INTEL_OUTPUT_FORMAT_YCBCR420:
1583 vsc->pixelformat = DP_PIXELFORMAT_YUV420;
1584 break;
1585 case INTEL_OUTPUT_FORMAT_RGB:
1586 default:
1587 vsc->pixelformat = DP_PIXELFORMAT_RGB;
1588 }
1589
1590 switch (conn_state->colorspace) {
1591 case DRM_MODE_COLORIMETRY_BT709_YCC:
1592 vsc->colorimetry = DP_COLORIMETRY_BT709_YCC;
1593 break;
1594 case DRM_MODE_COLORIMETRY_XVYCC_601:
1595 vsc->colorimetry = DP_COLORIMETRY_XVYCC_601;
1596 break;
1597 case DRM_MODE_COLORIMETRY_XVYCC_709:
1598 vsc->colorimetry = DP_COLORIMETRY_XVYCC_709;
1599 break;
1600 case DRM_MODE_COLORIMETRY_SYCC_601:
1601 vsc->colorimetry = DP_COLORIMETRY_SYCC_601;
1602 break;
1603 case DRM_MODE_COLORIMETRY_OPYCC_601:
1604 vsc->colorimetry = DP_COLORIMETRY_OPYCC_601;
1605 break;
1606 case DRM_MODE_COLORIMETRY_BT2020_CYCC:
1607 vsc->colorimetry = DP_COLORIMETRY_BT2020_CYCC;
1608 break;
1609 case DRM_MODE_COLORIMETRY_BT2020_RGB:
1610 vsc->colorimetry = DP_COLORIMETRY_BT2020_RGB;
1611 break;
1612 case DRM_MODE_COLORIMETRY_BT2020_YCC:
1613 vsc->colorimetry = DP_COLORIMETRY_BT2020_YCC;
1614 break;
1615 case DRM_MODE_COLORIMETRY_DCI_P3_RGB_D65:
1616 case DRM_MODE_COLORIMETRY_DCI_P3_RGB_THEATER:
1617 vsc->colorimetry = DP_COLORIMETRY_DCI_P3_RGB;
1618 break;
1619 default:
1620
1621
1622
1623
1624 if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420)
1625 vsc->colorimetry = DP_COLORIMETRY_BT709_YCC;
1626 else
1627 vsc->colorimetry = DP_COLORIMETRY_DEFAULT;
1628 break;
1629 }
1630
1631 vsc->bpc = crtc_state->pipe_bpp / 3;
1632
1633
1634 drm_WARN_ON(&dev_priv->drm,
1635 vsc->bpc == 6 && vsc->pixelformat != DP_PIXELFORMAT_RGB);
1636
1637
1638 vsc->dynamic_range = DP_DYNAMIC_RANGE_CTA;
1639 vsc->content_type = DP_CONTENT_TYPE_NOT_DEFINED;
1640}
1641
1642static void intel_dp_compute_vsc_sdp(struct intel_dp *intel_dp,
1643 struct intel_crtc_state *crtc_state,
1644 const struct drm_connector_state *conn_state)
1645{
1646 struct drm_dp_vsc_sdp *vsc = &crtc_state->infoframes.vsc;
1647
1648
1649 if (crtc_state->has_psr)
1650 return;
1651
1652 if (!intel_dp_needs_vsc_sdp(crtc_state, conn_state))
1653 return;
1654
1655 crtc_state->infoframes.enable |= intel_hdmi_infoframe_enable(DP_SDP_VSC);
1656 vsc->sdp_type = DP_SDP_VSC;
1657 intel_dp_compute_vsc_colorimetry(crtc_state, conn_state,
1658 &crtc_state->infoframes.vsc);
1659}
1660
1661void intel_dp_compute_psr_vsc_sdp(struct intel_dp *intel_dp,
1662 const struct intel_crtc_state *crtc_state,
1663 const struct drm_connector_state *conn_state,
1664 struct drm_dp_vsc_sdp *vsc)
1665{
1666 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1667
1668 vsc->sdp_type = DP_SDP_VSC;
1669
1670 if (dev_priv->psr.psr2_enabled) {
1671 if (dev_priv->psr.colorimetry_support &&
1672 intel_dp_needs_vsc_sdp(crtc_state, conn_state)) {
1673
1674 intel_dp_compute_vsc_colorimetry(crtc_state, conn_state,
1675 vsc);
1676 } else {
1677
1678
1679
1680
1681
1682 vsc->revision = 0x4;
1683 vsc->length = 0xe;
1684 }
1685 } else {
1686
1687
1688
1689
1690
1691
1692 vsc->revision = 0x2;
1693 vsc->length = 0x8;
1694 }
1695}
1696
1697static void
1698intel_dp_compute_hdr_metadata_infoframe_sdp(struct intel_dp *intel_dp,
1699 struct intel_crtc_state *crtc_state,
1700 const struct drm_connector_state *conn_state)
1701{
1702 int ret;
1703 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1704 struct hdmi_drm_infoframe *drm_infoframe = &crtc_state->infoframes.drm.drm;
1705
1706 if (!conn_state->hdr_output_metadata)
1707 return;
1708
1709 ret = drm_hdmi_infoframe_set_hdr_metadata(drm_infoframe, conn_state);
1710
1711 if (ret) {
1712 drm_dbg_kms(&dev_priv->drm, "couldn't set HDR metadata in infoframe\n");
1713 return;
1714 }
1715
1716 crtc_state->infoframes.enable |=
1717 intel_hdmi_infoframe_enable(HDMI_PACKET_TYPE_GAMUT_METADATA);
1718}
1719
1720static void
1721intel_dp_drrs_compute_config(struct intel_dp *intel_dp,
1722 struct intel_crtc_state *pipe_config,
1723 int output_bpp, bool constant_n)
1724{
1725 struct intel_connector *intel_connector = intel_dp->attached_connector;
1726 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1727
1728 if (pipe_config->vrr.enable)
1729 return;
1730
1731
1732
1733
1734
1735
1736
1737 if (pipe_config->has_psr)
1738 return;
1739
1740 if (!intel_connector->panel.downclock_mode ||
1741 dev_priv->drrs.type != SEAMLESS_DRRS_SUPPORT)
1742 return;
1743
1744 pipe_config->has_drrs = true;
1745 intel_link_compute_m_n(output_bpp, pipe_config->lane_count,
1746 intel_connector->panel.downclock_mode->clock,
1747 pipe_config->port_clock, &pipe_config->dp_m2_n2,
1748 constant_n, pipe_config->fec_enable);
1749}
1750
1751int
1752intel_dp_compute_config(struct intel_encoder *encoder,
1753 struct intel_crtc_state *pipe_config,
1754 struct drm_connector_state *conn_state)
1755{
1756 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1757 struct drm_display_mode *adjusted_mode = &pipe_config->hw.adjusted_mode;
1758 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1759 enum port port = encoder->port;
1760 struct intel_connector *intel_connector = intel_dp->attached_connector;
1761 struct intel_digital_connector_state *intel_conn_state =
1762 to_intel_digital_connector_state(conn_state);
1763 bool constant_n = drm_dp_has_quirk(&intel_dp->desc, DP_DPCD_QUIRK_CONSTANT_N);
1764 int ret = 0, output_bpp;
1765
1766 if (HAS_PCH_SPLIT(dev_priv) && !HAS_DDI(dev_priv) && port != PORT_A)
1767 pipe_config->has_pch_encoder = true;
1768
1769 pipe_config->output_format = intel_dp_output_format(&intel_connector->base,
1770 adjusted_mode);
1771
1772 if (pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR420) {
1773 ret = intel_pch_panel_fitting(pipe_config, conn_state);
1774 if (ret)
1775 return ret;
1776 }
1777
1778 if (!intel_dp_port_has_audio(dev_priv, port))
1779 pipe_config->has_audio = false;
1780 else if (intel_conn_state->force_audio == HDMI_AUDIO_AUTO)
1781 pipe_config->has_audio = intel_dp->has_audio;
1782 else
1783 pipe_config->has_audio = intel_conn_state->force_audio == HDMI_AUDIO_ON;
1784
1785 if (intel_dp_is_edp(intel_dp) && intel_connector->panel.fixed_mode) {
1786 intel_fixed_panel_mode(intel_connector->panel.fixed_mode,
1787 adjusted_mode);
1788
1789 if (HAS_GMCH(dev_priv))
1790 ret = intel_gmch_panel_fitting(pipe_config, conn_state);
1791 else
1792 ret = intel_pch_panel_fitting(pipe_config, conn_state);
1793 if (ret)
1794 return ret;
1795 }
1796
1797 if (adjusted_mode->flags & DRM_MODE_FLAG_DBLSCAN)
1798 return -EINVAL;
1799
1800 if (HAS_GMCH(dev_priv) &&
1801 adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE)
1802 return -EINVAL;
1803
1804 if (adjusted_mode->flags & DRM_MODE_FLAG_DBLCLK)
1805 return -EINVAL;
1806
1807 if (intel_dp_hdisplay_bad(dev_priv, adjusted_mode->crtc_hdisplay))
1808 return -EINVAL;
1809
1810 ret = intel_dp_compute_link_config(encoder, pipe_config, conn_state);
1811 if (ret < 0)
1812 return ret;
1813
1814 pipe_config->limited_color_range =
1815 intel_dp_limited_color_range(pipe_config, conn_state);
1816
1817 if (pipe_config->dsc.compression_enable)
1818 output_bpp = pipe_config->dsc.compressed_bpp;
1819 else
1820 output_bpp = intel_dp_output_bpp(pipe_config->output_format,
1821 pipe_config->pipe_bpp);
1822
1823 intel_link_compute_m_n(output_bpp,
1824 pipe_config->lane_count,
1825 adjusted_mode->crtc_clock,
1826 pipe_config->port_clock,
1827 &pipe_config->dp_m_n,
1828 constant_n, pipe_config->fec_enable);
1829
1830 if (!HAS_DDI(dev_priv))
1831 intel_dp_set_clock(encoder, pipe_config);
1832
1833 intel_vrr_compute_config(pipe_config, conn_state);
1834 intel_psr_compute_config(intel_dp, pipe_config);
1835 intel_dp_drrs_compute_config(intel_dp, pipe_config, output_bpp,
1836 constant_n);
1837 intel_dp_compute_vsc_sdp(intel_dp, pipe_config, conn_state);
1838 intel_dp_compute_hdr_metadata_infoframe_sdp(intel_dp, pipe_config, conn_state);
1839
1840 return 0;
1841}
1842
1843void intel_dp_set_link_params(struct intel_dp *intel_dp,
1844 int link_rate, int lane_count)
1845{
1846 intel_dp->link_trained = false;
1847 intel_dp->link_rate = link_rate;
1848 intel_dp->lane_count = lane_count;
1849}
1850
1851static void intel_dp_prepare(struct intel_encoder *encoder,
1852 const struct intel_crtc_state *pipe_config)
1853{
1854 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1855 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1856 enum port port = encoder->port;
1857 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
1858 const struct drm_display_mode *adjusted_mode = &pipe_config->hw.adjusted_mode;
1859
1860 intel_dp_set_link_params(intel_dp,
1861 pipe_config->port_clock,
1862 pipe_config->lane_count);
1863
1864
1865
1866
1867
1868
1869
1870
1871
1872
1873
1874
1875
1876
1877
1878
1879
1880
1881
1882
1883
1884 intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg) & DP_DETECTED;
1885
1886
1887 intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
1888 intel_dp->DP |= DP_PORT_WIDTH(pipe_config->lane_count);
1889
1890
1891
1892 if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) {
1893 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
1894 intel_dp->DP |= DP_SYNC_HS_HIGH;
1895 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
1896 intel_dp->DP |= DP_SYNC_VS_HIGH;
1897 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
1898
1899 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
1900 intel_dp->DP |= DP_ENHANCED_FRAMING;
1901
1902 intel_dp->DP |= DP_PIPE_SEL_IVB(crtc->pipe);
1903 } else if (HAS_PCH_CPT(dev_priv) && port != PORT_A) {
1904 u32 trans_dp;
1905
1906 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
1907
1908 trans_dp = intel_de_read(dev_priv, TRANS_DP_CTL(crtc->pipe));
1909 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
1910 trans_dp |= TRANS_DP_ENH_FRAMING;
1911 else
1912 trans_dp &= ~TRANS_DP_ENH_FRAMING;
1913 intel_de_write(dev_priv, TRANS_DP_CTL(crtc->pipe), trans_dp);
1914 } else {
1915 if (IS_G4X(dev_priv) && pipe_config->limited_color_range)
1916 intel_dp->DP |= DP_COLOR_RANGE_16_235;
1917
1918 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
1919 intel_dp->DP |= DP_SYNC_HS_HIGH;
1920 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
1921 intel_dp->DP |= DP_SYNC_VS_HIGH;
1922 intel_dp->DP |= DP_LINK_TRAIN_OFF;
1923
1924 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
1925 intel_dp->DP |= DP_ENHANCED_FRAMING;
1926
1927 if (IS_CHERRYVIEW(dev_priv))
1928 intel_dp->DP |= DP_PIPE_SEL_CHV(crtc->pipe);
1929 else
1930 intel_dp->DP |= DP_PIPE_SEL(crtc->pipe);
1931 }
1932}
1933
1934
1935
1936void intel_edp_backlight_on(const struct intel_crtc_state *crtc_state,
1937 const struct drm_connector_state *conn_state)
1938{
1939 struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(conn_state->best_encoder));
1940 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1941
1942 if (!intel_dp_is_edp(intel_dp))
1943 return;
1944
1945 drm_dbg_kms(&i915->drm, "\n");
1946
1947 intel_panel_enable_backlight(crtc_state, conn_state);
1948 intel_pps_backlight_on(intel_dp);
1949}
1950
1951
1952void intel_edp_backlight_off(const struct drm_connector_state *old_conn_state)
1953{
1954 struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(old_conn_state->best_encoder));
1955 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1956
1957 if (!intel_dp_is_edp(intel_dp))
1958 return;
1959
1960 drm_dbg_kms(&i915->drm, "\n");
1961
1962 intel_pps_backlight_off(intel_dp);
1963 intel_panel_disable_backlight(old_conn_state);
1964}
1965
1966static void assert_dp_port(struct intel_dp *intel_dp, bool state)
1967{
1968 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1969 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
1970 bool cur_state = intel_de_read(dev_priv, intel_dp->output_reg) & DP_PORT_EN;
1971
1972 I915_STATE_WARN(cur_state != state,
1973 "[ENCODER:%d:%s] state assertion failure (expected %s, current %s)\n",
1974 dig_port->base.base.base.id, dig_port->base.base.name,
1975 onoff(state), onoff(cur_state));
1976}
1977#define assert_dp_port_disabled(d) assert_dp_port((d), false)
1978
1979static void assert_edp_pll(struct drm_i915_private *dev_priv, bool state)
1980{
1981 bool cur_state = intel_de_read(dev_priv, DP_A) & DP_PLL_ENABLE;
1982
1983 I915_STATE_WARN(cur_state != state,
1984 "eDP PLL state assertion failure (expected %s, current %s)\n",
1985 onoff(state), onoff(cur_state));
1986}
1987#define assert_edp_pll_enabled(d) assert_edp_pll((d), true)
1988#define assert_edp_pll_disabled(d) assert_edp_pll((d), false)
1989
1990static void ilk_edp_pll_on(struct intel_dp *intel_dp,
1991 const struct intel_crtc_state *pipe_config)
1992{
1993 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
1994 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1995
1996 assert_pipe_disabled(dev_priv, pipe_config->cpu_transcoder);
1997 assert_dp_port_disabled(intel_dp);
1998 assert_edp_pll_disabled(dev_priv);
1999
2000 drm_dbg_kms(&dev_priv->drm, "enabling eDP PLL for clock %d\n",
2001 pipe_config->port_clock);
2002
2003 intel_dp->DP &= ~DP_PLL_FREQ_MASK;
2004
2005 if (pipe_config->port_clock == 162000)
2006 intel_dp->DP |= DP_PLL_FREQ_162MHZ;
2007 else
2008 intel_dp->DP |= DP_PLL_FREQ_270MHZ;
2009
2010 intel_de_write(dev_priv, DP_A, intel_dp->DP);
2011 intel_de_posting_read(dev_priv, DP_A);
2012 udelay(500);
2013
2014
2015
2016
2017
2018
2019
2020 if (IS_GEN(dev_priv, 5))
2021 intel_wait_for_vblank_if_active(dev_priv, !crtc->pipe);
2022
2023 intel_dp->DP |= DP_PLL_ENABLE;
2024
2025 intel_de_write(dev_priv, DP_A, intel_dp->DP);
2026 intel_de_posting_read(dev_priv, DP_A);
2027 udelay(200);
2028}
2029
2030static void ilk_edp_pll_off(struct intel_dp *intel_dp,
2031 const struct intel_crtc_state *old_crtc_state)
2032{
2033 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc);
2034 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
2035
2036 assert_pipe_disabled(dev_priv, old_crtc_state->cpu_transcoder);
2037 assert_dp_port_disabled(intel_dp);
2038 assert_edp_pll_enabled(dev_priv);
2039
2040 drm_dbg_kms(&dev_priv->drm, "disabling eDP PLL\n");
2041
2042 intel_dp->DP &= ~DP_PLL_ENABLE;
2043
2044 intel_de_write(dev_priv, DP_A, intel_dp->DP);
2045 intel_de_posting_read(dev_priv, DP_A);
2046 udelay(200);
2047}
2048
2049static bool downstream_hpd_needs_d0(struct intel_dp *intel_dp)
2050{
2051
2052
2053
2054
2055
2056
2057
2058
2059 return intel_dp->dpcd[DP_DPCD_REV] == 0x11 &&
2060 drm_dp_is_branch(intel_dp->dpcd) &&
2061 intel_dp->downstream_ports[0] & DP_DS_PORT_HPD;
2062}
2063
2064void intel_dp_sink_set_decompression_state(struct intel_dp *intel_dp,
2065 const struct intel_crtc_state *crtc_state,
2066 bool enable)
2067{
2068 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2069 int ret;
2070
2071 if (!crtc_state->dsc.compression_enable)
2072 return;
2073
2074 ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_DSC_ENABLE,
2075 enable ? DP_DECOMPRESSION_EN : 0);
2076 if (ret < 0)
2077 drm_dbg_kms(&i915->drm,
2078 "Failed to %s sink decompression state\n",
2079 enable ? "enable" : "disable");
2080}
2081
2082static void
2083intel_edp_init_source_oui(struct intel_dp *intel_dp, bool careful)
2084{
2085 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2086 u8 oui[] = { 0x00, 0xaa, 0x01 };
2087 u8 buf[3] = { 0 };
2088
2089
2090
2091
2092
2093 if (careful) {
2094 if (drm_dp_dpcd_read(&intel_dp->aux, DP_SOURCE_OUI, buf, sizeof(buf)) < 0)
2095 drm_err(&i915->drm, "Failed to read source OUI\n");
2096
2097 if (memcmp(oui, buf, sizeof(oui)) == 0)
2098 return;
2099 }
2100
2101 if (drm_dp_dpcd_write(&intel_dp->aux, DP_SOURCE_OUI, oui, sizeof(oui)) < 0)
2102 drm_err(&i915->drm, "Failed to write source OUI\n");
2103}
2104
2105
2106void intel_dp_set_power(struct intel_dp *intel_dp, u8 mode)
2107{
2108 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
2109 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
2110 int ret, i;
2111
2112
2113 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
2114 return;
2115
2116 if (mode != DP_SET_POWER_D0) {
2117 if (downstream_hpd_needs_d0(intel_dp))
2118 return;
2119
2120 ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, mode);
2121 } else {
2122 struct intel_lspcon *lspcon = dp_to_lspcon(intel_dp);
2123
2124 lspcon_resume(dp_to_dig_port(intel_dp));
2125
2126
2127 if (intel_dp_is_edp(intel_dp))
2128 intel_edp_init_source_oui(intel_dp, false);
2129
2130
2131
2132
2133
2134 for (i = 0; i < 3; i++) {
2135 ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, mode);
2136 if (ret == 1)
2137 break;
2138 msleep(1);
2139 }
2140
2141 if (ret == 1 && lspcon->active)
2142 lspcon_wait_pcon_mode(lspcon);
2143 }
2144
2145 if (ret != 1)
2146 drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s] Set power to %s failed\n",
2147 encoder->base.base.id, encoder->base.name,
2148 mode == DP_SET_POWER_D0 ? "D0" : "D3");
2149}
2150
2151static bool cpt_dp_port_selected(struct drm_i915_private *dev_priv,
2152 enum port port, enum pipe *pipe)
2153{
2154 enum pipe p;
2155
2156 for_each_pipe(dev_priv, p) {
2157 u32 val = intel_de_read(dev_priv, TRANS_DP_CTL(p));
2158
2159 if ((val & TRANS_DP_PORT_SEL_MASK) == TRANS_DP_PORT_SEL(port)) {
2160 *pipe = p;
2161 return true;
2162 }
2163 }
2164
2165 drm_dbg_kms(&dev_priv->drm, "No pipe for DP port %c found\n",
2166 port_name(port));
2167
2168
2169 *pipe = PIPE_A;
2170
2171 return false;
2172}
2173
2174bool intel_dp_port_enabled(struct drm_i915_private *dev_priv,
2175 i915_reg_t dp_reg, enum port port,
2176 enum pipe *pipe)
2177{
2178 bool ret;
2179 u32 val;
2180
2181 val = intel_de_read(dev_priv, dp_reg);
2182
2183 ret = val & DP_PORT_EN;
2184
2185
2186 if (IS_IVYBRIDGE(dev_priv) && port == PORT_A)
2187 *pipe = (val & DP_PIPE_SEL_MASK_IVB) >> DP_PIPE_SEL_SHIFT_IVB;
2188 else if (HAS_PCH_CPT(dev_priv) && port != PORT_A)
2189 ret &= cpt_dp_port_selected(dev_priv, port, pipe);
2190 else if (IS_CHERRYVIEW(dev_priv))
2191 *pipe = (val & DP_PIPE_SEL_MASK_CHV) >> DP_PIPE_SEL_SHIFT_CHV;
2192 else
2193 *pipe = (val & DP_PIPE_SEL_MASK) >> DP_PIPE_SEL_SHIFT;
2194
2195 return ret;
2196}
2197
2198static bool intel_dp_get_hw_state(struct intel_encoder *encoder,
2199 enum pipe *pipe)
2200{
2201 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2202 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2203 intel_wakeref_t wakeref;
2204 bool ret;
2205
2206 wakeref = intel_display_power_get_if_enabled(dev_priv,
2207 encoder->power_domain);
2208 if (!wakeref)
2209 return false;
2210
2211 ret = intel_dp_port_enabled(dev_priv, intel_dp->output_reg,
2212 encoder->port, pipe);
2213
2214 intel_display_power_put(dev_priv, encoder->power_domain, wakeref);
2215
2216 return ret;
2217}
2218
2219static void intel_dp_get_config(struct intel_encoder *encoder,
2220 struct intel_crtc_state *pipe_config)
2221{
2222 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2223 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2224 u32 tmp, flags = 0;
2225 enum port port = encoder->port;
2226 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
2227
2228 if (encoder->type == INTEL_OUTPUT_EDP)
2229 pipe_config->output_types |= BIT(INTEL_OUTPUT_EDP);
2230 else
2231 pipe_config->output_types |= BIT(INTEL_OUTPUT_DP);
2232
2233 tmp = intel_de_read(dev_priv, intel_dp->output_reg);
2234
2235 pipe_config->has_audio = tmp & DP_AUDIO_OUTPUT_ENABLE && port != PORT_A;
2236
2237 if (HAS_PCH_CPT(dev_priv) && port != PORT_A) {
2238 u32 trans_dp = intel_de_read(dev_priv,
2239 TRANS_DP_CTL(crtc->pipe));
2240
2241 if (trans_dp & TRANS_DP_HSYNC_ACTIVE_HIGH)
2242 flags |= DRM_MODE_FLAG_PHSYNC;
2243 else
2244 flags |= DRM_MODE_FLAG_NHSYNC;
2245
2246 if (trans_dp & TRANS_DP_VSYNC_ACTIVE_HIGH)
2247 flags |= DRM_MODE_FLAG_PVSYNC;
2248 else
2249 flags |= DRM_MODE_FLAG_NVSYNC;
2250 } else {
2251 if (tmp & DP_SYNC_HS_HIGH)
2252 flags |= DRM_MODE_FLAG_PHSYNC;
2253 else
2254 flags |= DRM_MODE_FLAG_NHSYNC;
2255
2256 if (tmp & DP_SYNC_VS_HIGH)
2257 flags |= DRM_MODE_FLAG_PVSYNC;
2258 else
2259 flags |= DRM_MODE_FLAG_NVSYNC;
2260 }
2261
2262 pipe_config->hw.adjusted_mode.flags |= flags;
2263
2264 if (IS_G4X(dev_priv) && tmp & DP_COLOR_RANGE_16_235)
2265 pipe_config->limited_color_range = true;
2266
2267 pipe_config->lane_count =
2268 ((tmp & DP_PORT_WIDTH_MASK) >> DP_PORT_WIDTH_SHIFT) + 1;
2269
2270 intel_dp_get_m_n(crtc, pipe_config);
2271
2272 if (port == PORT_A) {
2273 if ((intel_de_read(dev_priv, DP_A) & DP_PLL_FREQ_MASK) == DP_PLL_FREQ_162MHZ)
2274 pipe_config->port_clock = 162000;
2275 else
2276 pipe_config->port_clock = 270000;
2277 }
2278
2279 pipe_config->hw.adjusted_mode.crtc_clock =
2280 intel_dotclock_calculate(pipe_config->port_clock,
2281 &pipe_config->dp_m_n);
2282
2283 if (intel_dp_is_edp(intel_dp) && dev_priv->vbt.edp.bpp &&
2284 pipe_config->pipe_bpp > dev_priv->vbt.edp.bpp) {
2285
2286
2287
2288
2289
2290
2291
2292
2293
2294
2295
2296
2297
2298 drm_dbg_kms(&dev_priv->drm,
2299 "pipe has %d bpp for eDP panel, overriding BIOS-provided max %d bpp\n",
2300 pipe_config->pipe_bpp, dev_priv->vbt.edp.bpp);
2301 dev_priv->vbt.edp.bpp = pipe_config->pipe_bpp;
2302 }
2303}
2304
2305static bool
2306intel_dp_get_dpcd(struct intel_dp *intel_dp);
2307
2308
2309
2310
2311
2312
2313
2314
2315
2316void intel_dp_sync_state(struct intel_encoder *encoder,
2317 const struct intel_crtc_state *crtc_state)
2318{
2319 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2320
2321
2322
2323
2324
2325 if (intel_dp->dpcd[DP_DPCD_REV] == 0)
2326 intel_dp_get_dpcd(intel_dp);
2327
2328 intel_dp->max_link_lane_count = intel_dp_max_common_lane_count(intel_dp);
2329 intel_dp->max_link_rate = intel_dp_max_common_rate(intel_dp);
2330}
2331
2332bool intel_dp_initial_fastset_check(struct intel_encoder *encoder,
2333 struct intel_crtc_state *crtc_state)
2334{
2335 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
2336 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2337
2338
2339
2340
2341
2342 if (intel_dp_rate_index(intel_dp->source_rates, intel_dp->num_source_rates,
2343 crtc_state->port_clock) < 0) {
2344 drm_dbg_kms(&i915->drm, "Forcing full modeset due to unsupported link rate\n");
2345 crtc_state->uapi.connectors_changed = true;
2346 return false;
2347 }
2348
2349
2350
2351
2352
2353
2354
2355
2356 if (crtc_state->dsc.compression_enable) {
2357 drm_dbg_kms(&i915->drm, "Forcing full modeset due to DSC being enabled\n");
2358 crtc_state->uapi.mode_changed = true;
2359 return false;
2360 }
2361
2362 if (CAN_PSR(i915) && intel_dp_is_edp(intel_dp)) {
2363 drm_dbg_kms(&i915->drm, "Forcing full modeset to compute PSR state\n");
2364 crtc_state->uapi.mode_changed = true;
2365 return false;
2366 }
2367
2368 return true;
2369}
2370
2371static void intel_disable_dp(struct intel_atomic_state *state,
2372 struct intel_encoder *encoder,
2373 const struct intel_crtc_state *old_crtc_state,
2374 const struct drm_connector_state *old_conn_state)
2375{
2376 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2377
2378 intel_dp->link_trained = false;
2379
2380 if (old_crtc_state->has_audio)
2381 intel_audio_codec_disable(encoder,
2382 old_crtc_state, old_conn_state);
2383
2384
2385
2386 intel_pps_vdd_on(intel_dp);
2387 intel_edp_backlight_off(old_conn_state);
2388 intel_dp_set_power(intel_dp, DP_SET_POWER_D3);
2389 intel_pps_off(intel_dp);
2390 intel_dp->frl.is_trained = false;
2391 intel_dp->frl.trained_rate_gbps = 0;
2392}
2393
2394static void g4x_disable_dp(struct intel_atomic_state *state,
2395 struct intel_encoder *encoder,
2396 const struct intel_crtc_state *old_crtc_state,
2397 const struct drm_connector_state *old_conn_state)
2398{
2399 intel_disable_dp(state, encoder, old_crtc_state, old_conn_state);
2400}
2401
2402static void vlv_disable_dp(struct intel_atomic_state *state,
2403 struct intel_encoder *encoder,
2404 const struct intel_crtc_state *old_crtc_state,
2405 const struct drm_connector_state *old_conn_state)
2406{
2407 intel_disable_dp(state, encoder, old_crtc_state, old_conn_state);
2408}
2409
2410static void g4x_post_disable_dp(struct intel_atomic_state *state,
2411 struct intel_encoder *encoder,
2412 const struct intel_crtc_state *old_crtc_state,
2413 const struct drm_connector_state *old_conn_state)
2414{
2415 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2416 enum port port = encoder->port;
2417
2418
2419
2420
2421
2422
2423
2424 intel_dp_link_down(encoder, old_crtc_state);
2425
2426
2427 if (port == PORT_A)
2428 ilk_edp_pll_off(intel_dp, old_crtc_state);
2429}
2430
2431static void vlv_post_disable_dp(struct intel_atomic_state *state,
2432 struct intel_encoder *encoder,
2433 const struct intel_crtc_state *old_crtc_state,
2434 const struct drm_connector_state *old_conn_state)
2435{
2436 intel_dp_link_down(encoder, old_crtc_state);
2437}
2438
2439static void chv_post_disable_dp(struct intel_atomic_state *state,
2440 struct intel_encoder *encoder,
2441 const struct intel_crtc_state *old_crtc_state,
2442 const struct drm_connector_state *old_conn_state)
2443{
2444 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2445
2446 intel_dp_link_down(encoder, old_crtc_state);
2447
2448 vlv_dpio_get(dev_priv);
2449
2450
2451 chv_data_lane_soft_reset(encoder, old_crtc_state, true);
2452
2453 vlv_dpio_put(dev_priv);
2454}
2455
2456static void
2457cpt_set_link_train(struct intel_dp *intel_dp,
2458 const struct intel_crtc_state *crtc_state,
2459 u8 dp_train_pat)
2460{
2461 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
2462 u32 *DP = &intel_dp->DP;
2463
2464 *DP &= ~DP_LINK_TRAIN_MASK_CPT;
2465
2466 switch (intel_dp_training_pattern_symbol(dp_train_pat)) {
2467 case DP_TRAINING_PATTERN_DISABLE:
2468 *DP |= DP_LINK_TRAIN_OFF_CPT;
2469 break;
2470 case DP_TRAINING_PATTERN_1:
2471 *DP |= DP_LINK_TRAIN_PAT_1_CPT;
2472 break;
2473 case DP_TRAINING_PATTERN_2:
2474 *DP |= DP_LINK_TRAIN_PAT_2_CPT;
2475 break;
2476 case DP_TRAINING_PATTERN_3:
2477 drm_dbg_kms(&dev_priv->drm,
2478 "TPS3 not supported, using TPS2 instead\n");
2479 *DP |= DP_LINK_TRAIN_PAT_2_CPT;
2480 break;
2481 }
2482
2483 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
2484 intel_de_posting_read(dev_priv, intel_dp->output_reg);
2485}
2486
2487static void intel_dp_get_pcon_dsc_cap(struct intel_dp *intel_dp)
2488{
2489 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2490
2491
2492
2493 memset(intel_dp->pcon_dsc_dpcd, 0, sizeof(intel_dp->pcon_dsc_dpcd));
2494
2495 if (drm_dp_dpcd_read(&intel_dp->aux, DP_PCON_DSC_ENCODER,
2496 intel_dp->pcon_dsc_dpcd,
2497 sizeof(intel_dp->pcon_dsc_dpcd)) < 0)
2498 drm_err(&i915->drm, "Failed to read DPCD register 0x%x\n",
2499 DP_PCON_DSC_ENCODER);
2500
2501 drm_dbg_kms(&i915->drm, "PCON ENCODER DSC DPCD: %*ph\n",
2502 (int)sizeof(intel_dp->pcon_dsc_dpcd), intel_dp->pcon_dsc_dpcd);
2503}
2504
2505static int intel_dp_pcon_get_frl_mask(u8 frl_bw_mask)
2506{
2507 int bw_gbps[] = {9, 18, 24, 32, 40, 48};
2508 int i;
2509
2510 for (i = ARRAY_SIZE(bw_gbps) - 1; i >= 0; i--) {
2511 if (frl_bw_mask & (1 << i))
2512 return bw_gbps[i];
2513 }
2514 return 0;
2515}
2516
2517static int intel_dp_pcon_set_frl_mask(int max_frl)
2518{
2519 switch (max_frl) {
2520 case 48:
2521 return DP_PCON_FRL_BW_MASK_48GBPS;
2522 case 40:
2523 return DP_PCON_FRL_BW_MASK_40GBPS;
2524 case 32:
2525 return DP_PCON_FRL_BW_MASK_32GBPS;
2526 case 24:
2527 return DP_PCON_FRL_BW_MASK_24GBPS;
2528 case 18:
2529 return DP_PCON_FRL_BW_MASK_18GBPS;
2530 case 9:
2531 return DP_PCON_FRL_BW_MASK_9GBPS;
2532 }
2533
2534 return 0;
2535}
2536
2537static int intel_dp_hdmi_sink_max_frl(struct intel_dp *intel_dp)
2538{
2539 struct intel_connector *intel_connector = intel_dp->attached_connector;
2540 struct drm_connector *connector = &intel_connector->base;
2541 int max_frl_rate;
2542 int max_lanes, rate_per_lane;
2543 int max_dsc_lanes, dsc_rate_per_lane;
2544
2545 max_lanes = connector->display_info.hdmi.max_lanes;
2546 rate_per_lane = connector->display_info.hdmi.max_frl_rate_per_lane;
2547 max_frl_rate = max_lanes * rate_per_lane;
2548
2549 if (connector->display_info.hdmi.dsc_cap.v_1p2) {
2550 max_dsc_lanes = connector->display_info.hdmi.dsc_cap.max_lanes;
2551 dsc_rate_per_lane = connector->display_info.hdmi.dsc_cap.max_frl_rate_per_lane;
2552 if (max_dsc_lanes && dsc_rate_per_lane)
2553 max_frl_rate = min(max_frl_rate, max_dsc_lanes * dsc_rate_per_lane);
2554 }
2555
2556 return max_frl_rate;
2557}
2558
2559static int intel_dp_pcon_start_frl_training(struct intel_dp *intel_dp)
2560{
2561#define PCON_EXTENDED_TRAIN_MODE (1 > 0)
2562#define PCON_CONCURRENT_MODE (1 > 0)
2563#define PCON_SEQUENTIAL_MODE !PCON_CONCURRENT_MODE
2564#define PCON_NORMAL_TRAIN_MODE !PCON_EXTENDED_TRAIN_MODE
2565#define TIMEOUT_FRL_READY_MS 500
2566#define TIMEOUT_HDMI_LINK_ACTIVE_MS 1000
2567
2568 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2569 int max_frl_bw, max_pcon_frl_bw, max_edid_frl_bw, ret;
2570 u8 max_frl_bw_mask = 0, frl_trained_mask;
2571 bool is_active;
2572
2573 ret = drm_dp_pcon_reset_frl_config(&intel_dp->aux);
2574 if (ret < 0)
2575 return ret;
2576
2577 max_pcon_frl_bw = intel_dp->dfp.pcon_max_frl_bw;
2578 drm_dbg(&i915->drm, "PCON max rate = %d Gbps\n", max_pcon_frl_bw);
2579
2580 max_edid_frl_bw = intel_dp_hdmi_sink_max_frl(intel_dp);
2581 drm_dbg(&i915->drm, "Sink max rate from EDID = %d Gbps\n", max_edid_frl_bw);
2582
2583 max_frl_bw = min(max_edid_frl_bw, max_pcon_frl_bw);
2584
2585 if (max_frl_bw <= 0)
2586 return -EINVAL;
2587
2588 ret = drm_dp_pcon_frl_prepare(&intel_dp->aux, false);
2589 if (ret < 0)
2590 return ret;
2591
2592 wait_for(is_active = drm_dp_pcon_is_frl_ready(&intel_dp->aux) == true, TIMEOUT_FRL_READY_MS);
2593
2594 if (!is_active)
2595 return -ETIMEDOUT;
2596
2597 max_frl_bw_mask = intel_dp_pcon_set_frl_mask(max_frl_bw);
2598 ret = drm_dp_pcon_frl_configure_1(&intel_dp->aux, max_frl_bw, PCON_SEQUENTIAL_MODE);
2599 if (ret < 0)
2600 return ret;
2601 ret = drm_dp_pcon_frl_configure_2(&intel_dp->aux, max_frl_bw_mask, PCON_NORMAL_TRAIN_MODE);
2602 if (ret < 0)
2603 return ret;
2604 ret = drm_dp_pcon_frl_enable(&intel_dp->aux);
2605 if (ret < 0)
2606 return ret;
2607
2608
2609
2610
2611 wait_for(is_active = drm_dp_pcon_hdmi_link_active(&intel_dp->aux) == true, TIMEOUT_HDMI_LINK_ACTIVE_MS);
2612
2613 if (!is_active)
2614 return -ETIMEDOUT;
2615
2616
2617 if (drm_dp_pcon_hdmi_link_mode(&intel_dp->aux, &frl_trained_mask) !=
2618 DP_PCON_HDMI_MODE_FRL) {
2619 drm_dbg(&i915->drm, "HDMI couldn't be trained in FRL Mode\n");
2620 return -EINVAL;
2621 }
2622 drm_dbg(&i915->drm, "MAX_FRL_MASK = %u, FRL_TRAINED_MASK = %u\n", max_frl_bw_mask, frl_trained_mask);
2623
2624 intel_dp->frl.trained_rate_gbps = intel_dp_pcon_get_frl_mask(frl_trained_mask);
2625 intel_dp->frl.is_trained = true;
2626 drm_dbg(&i915->drm, "FRL trained with : %d Gbps\n", intel_dp->frl.trained_rate_gbps);
2627
2628 return 0;
2629}
2630
2631static bool intel_dp_is_hdmi_2_1_sink(struct intel_dp *intel_dp)
2632{
2633 if (drm_dp_is_branch(intel_dp->dpcd) &&
2634 intel_dp->has_hdmi_sink &&
2635 intel_dp_hdmi_sink_max_frl(intel_dp) > 0)
2636 return true;
2637
2638 return false;
2639}
2640
2641void intel_dp_check_frl_training(struct intel_dp *intel_dp)
2642{
2643 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
2644
2645
2646 if (!intel_dp_is_hdmi_2_1_sink(intel_dp) ||
2647 intel_dp->frl.is_trained)
2648 return;
2649
2650 if (intel_dp_pcon_start_frl_training(intel_dp) < 0) {
2651 int ret, mode;
2652
2653 drm_dbg(&dev_priv->drm, "Couldnt set FRL mode, continuing with TMDS mode\n");
2654 ret = drm_dp_pcon_reset_frl_config(&intel_dp->aux);
2655 mode = drm_dp_pcon_hdmi_link_mode(&intel_dp->aux, NULL);
2656
2657 if (ret < 0 || mode != DP_PCON_HDMI_MODE_TMDS)
2658 drm_dbg(&dev_priv->drm, "Issue with PCON, cannot set TMDS mode\n");
2659 } else {
2660 drm_dbg(&dev_priv->drm, "FRL training Completed\n");
2661 }
2662}
2663
2664static int
2665intel_dp_pcon_dsc_enc_slice_height(const struct intel_crtc_state *crtc_state)
2666{
2667 int vactive = crtc_state->hw.adjusted_mode.vdisplay;
2668
2669 return intel_hdmi_dsc_get_slice_height(vactive);
2670}
2671
2672static int
2673intel_dp_pcon_dsc_enc_slices(struct intel_dp *intel_dp,
2674 const struct intel_crtc_state *crtc_state)
2675{
2676 struct intel_connector *intel_connector = intel_dp->attached_connector;
2677 struct drm_connector *connector = &intel_connector->base;
2678 int hdmi_throughput = connector->display_info.hdmi.dsc_cap.clk_per_slice;
2679 int hdmi_max_slices = connector->display_info.hdmi.dsc_cap.max_slices;
2680 int pcon_max_slices = drm_dp_pcon_dsc_max_slices(intel_dp->pcon_dsc_dpcd);
2681 int pcon_max_slice_width = drm_dp_pcon_dsc_max_slice_width(intel_dp->pcon_dsc_dpcd);
2682
2683 return intel_hdmi_dsc_get_num_slices(crtc_state, pcon_max_slices,
2684 pcon_max_slice_width,
2685 hdmi_max_slices, hdmi_throughput);
2686}
2687
2688static int
2689intel_dp_pcon_dsc_enc_bpp(struct intel_dp *intel_dp,
2690 const struct intel_crtc_state *crtc_state,
2691 int num_slices, int slice_width)
2692{
2693 struct intel_connector *intel_connector = intel_dp->attached_connector;
2694 struct drm_connector *connector = &intel_connector->base;
2695 int output_format = crtc_state->output_format;
2696 bool hdmi_all_bpp = connector->display_info.hdmi.dsc_cap.all_bpp;
2697 int pcon_fractional_bpp = drm_dp_pcon_dsc_bpp_incr(intel_dp->pcon_dsc_dpcd);
2698 int hdmi_max_chunk_bytes =
2699 connector->display_info.hdmi.dsc_cap.total_chunk_kbytes * 1024;
2700
2701 return intel_hdmi_dsc_get_bpp(pcon_fractional_bpp, slice_width,
2702 num_slices, output_format, hdmi_all_bpp,
2703 hdmi_max_chunk_bytes);
2704}
2705
2706void
2707intel_dp_pcon_dsc_configure(struct intel_dp *intel_dp,
2708 const struct intel_crtc_state *crtc_state)
2709{
2710 u8 pps_param[6];
2711 int slice_height;
2712 int slice_width;
2713 int num_slices;
2714 int bits_per_pixel;
2715 int ret;
2716 struct intel_connector *intel_connector = intel_dp->attached_connector;
2717 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2718 struct drm_connector *connector;
2719 bool hdmi_is_dsc_1_2;
2720
2721 if (!intel_dp_is_hdmi_2_1_sink(intel_dp))
2722 return;
2723
2724 if (!intel_connector)
2725 return;
2726 connector = &intel_connector->base;
2727 hdmi_is_dsc_1_2 = connector->display_info.hdmi.dsc_cap.v_1p2;
2728
2729 if (!drm_dp_pcon_enc_is_dsc_1_2(intel_dp->pcon_dsc_dpcd) ||
2730 !hdmi_is_dsc_1_2)
2731 return;
2732
2733 slice_height = intel_dp_pcon_dsc_enc_slice_height(crtc_state);
2734 if (!slice_height)
2735 return;
2736
2737 num_slices = intel_dp_pcon_dsc_enc_slices(intel_dp, crtc_state);
2738 if (!num_slices)
2739 return;
2740
2741 slice_width = DIV_ROUND_UP(crtc_state->hw.adjusted_mode.hdisplay,
2742 num_slices);
2743
2744 bits_per_pixel = intel_dp_pcon_dsc_enc_bpp(intel_dp, crtc_state,
2745 num_slices, slice_width);
2746 if (!bits_per_pixel)
2747 return;
2748
2749 pps_param[0] = slice_height & 0xFF;
2750 pps_param[1] = slice_height >> 8;
2751 pps_param[2] = slice_width & 0xFF;
2752 pps_param[3] = slice_width >> 8;
2753 pps_param[4] = bits_per_pixel & 0xFF;
2754 pps_param[5] = (bits_per_pixel >> 8) & 0x3;
2755
2756 ret = drm_dp_pcon_pps_override_param(&intel_dp->aux, pps_param);
2757 if (ret < 0)
2758 drm_dbg_kms(&i915->drm, "Failed to set pcon DSC\n");
2759}
2760
2761static void
2762g4x_set_link_train(struct intel_dp *intel_dp,
2763 const struct intel_crtc_state *crtc_state,
2764 u8 dp_train_pat)
2765{
2766 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
2767 u32 *DP = &intel_dp->DP;
2768
2769 *DP &= ~DP_LINK_TRAIN_MASK;
2770
2771 switch (intel_dp_training_pattern_symbol(dp_train_pat)) {
2772 case DP_TRAINING_PATTERN_DISABLE:
2773 *DP |= DP_LINK_TRAIN_OFF;
2774 break;
2775 case DP_TRAINING_PATTERN_1:
2776 *DP |= DP_LINK_TRAIN_PAT_1;
2777 break;
2778 case DP_TRAINING_PATTERN_2:
2779 *DP |= DP_LINK_TRAIN_PAT_2;
2780 break;
2781 case DP_TRAINING_PATTERN_3:
2782 drm_dbg_kms(&dev_priv->drm,
2783 "TPS3 not supported, using TPS2 instead\n");
2784 *DP |= DP_LINK_TRAIN_PAT_2;
2785 break;
2786 }
2787
2788 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
2789 intel_de_posting_read(dev_priv, intel_dp->output_reg);
2790}
2791
2792static void intel_dp_enable_port(struct intel_dp *intel_dp,
2793 const struct intel_crtc_state *crtc_state)
2794{
2795 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
2796
2797
2798
2799 intel_dp_program_link_training_pattern(intel_dp, crtc_state,
2800 DP_TRAINING_PATTERN_1);
2801
2802
2803
2804
2805
2806
2807
2808 intel_dp->DP |= DP_PORT_EN;
2809 if (crtc_state->has_audio)
2810 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
2811
2812 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
2813 intel_de_posting_read(dev_priv, intel_dp->output_reg);
2814}
2815
2816void intel_dp_configure_protocol_converter(struct intel_dp *intel_dp,
2817 const struct intel_crtc_state *crtc_state)
2818{
2819 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2820 u8 tmp;
2821
2822 if (intel_dp->dpcd[DP_DPCD_REV] < 0x13)
2823 return;
2824
2825 if (!drm_dp_is_branch(intel_dp->dpcd))
2826 return;
2827
2828 tmp = intel_dp->has_hdmi_sink ?
2829 DP_HDMI_DVI_OUTPUT_CONFIG : 0;
2830
2831 if (drm_dp_dpcd_writeb(&intel_dp->aux,
2832 DP_PROTOCOL_CONVERTER_CONTROL_0, tmp) != 1)
2833 drm_dbg_kms(&i915->drm, "Failed to set protocol converter HDMI mode to %s\n",
2834 enableddisabled(intel_dp->has_hdmi_sink));
2835
2836 tmp = crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR444 &&
2837 intel_dp->dfp.ycbcr_444_to_420 ? DP_CONVERSION_TO_YCBCR420_ENABLE : 0;
2838
2839 if (drm_dp_dpcd_writeb(&intel_dp->aux,
2840 DP_PROTOCOL_CONVERTER_CONTROL_1, tmp) != 1)
2841 drm_dbg_kms(&i915->drm,
2842 "Failed to set protocol converter YCbCr 4:2:0 conversion mode to %s\n",
2843 enableddisabled(intel_dp->dfp.ycbcr_444_to_420));
2844
2845 tmp = 0;
2846 if (intel_dp->dfp.rgb_to_ycbcr) {
2847 bool bt2020, bt709;
2848
2849
2850
2851
2852
2853
2854 tmp = DP_CONVERSION_BT601_RGB_YCBCR_ENABLE;
2855
2856 bt2020 = drm_dp_downstream_rgb_to_ycbcr_conversion(intel_dp->dpcd,
2857 intel_dp->downstream_ports,
2858 DP_DS_HDMI_BT2020_RGB_YCBCR_CONV);
2859 bt709 = drm_dp_downstream_rgb_to_ycbcr_conversion(intel_dp->dpcd,
2860 intel_dp->downstream_ports,
2861 DP_DS_HDMI_BT709_RGB_YCBCR_CONV);
2862 switch (crtc_state->infoframes.vsc.colorimetry) {
2863 case DP_COLORIMETRY_BT2020_RGB:
2864 case DP_COLORIMETRY_BT2020_YCC:
2865 if (bt2020)
2866 tmp = DP_CONVERSION_BT2020_RGB_YCBCR_ENABLE;
2867 break;
2868 case DP_COLORIMETRY_BT709_YCC:
2869 case DP_COLORIMETRY_XVYCC_709:
2870 if (bt709)
2871 tmp = DP_CONVERSION_BT709_RGB_YCBCR_ENABLE;
2872 break;
2873 default:
2874 break;
2875 }
2876 }
2877
2878 if (drm_dp_pcon_convert_rgb_to_ycbcr(&intel_dp->aux, tmp) < 0)
2879 drm_dbg_kms(&i915->drm,
2880 "Failed to set protocol converter RGB->YCbCr conversion mode to %s\n",
2881 enableddisabled(tmp ? true : false));
2882}
2883
2884static void intel_enable_dp(struct intel_atomic_state *state,
2885 struct intel_encoder *encoder,
2886 const struct intel_crtc_state *pipe_config,
2887 const struct drm_connector_state *conn_state)
2888{
2889 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2890 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2891 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
2892 u32 dp_reg = intel_de_read(dev_priv, intel_dp->output_reg);
2893 enum pipe pipe = crtc->pipe;
2894 intel_wakeref_t wakeref;
2895
2896 if (drm_WARN_ON(&dev_priv->drm, dp_reg & DP_PORT_EN))
2897 return;
2898
2899 with_intel_pps_lock(intel_dp, wakeref) {
2900 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
2901 vlv_pps_init(encoder, pipe_config);
2902
2903 intel_dp_enable_port(intel_dp, pipe_config);
2904
2905 intel_pps_vdd_on_unlocked(intel_dp);
2906 intel_pps_on_unlocked(intel_dp);
2907 intel_pps_vdd_off_unlocked(intel_dp, true);
2908 }
2909
2910 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
2911 unsigned int lane_mask = 0x0;
2912
2913 if (IS_CHERRYVIEW(dev_priv))
2914 lane_mask = intel_dp_unused_lane_mask(pipe_config->lane_count);
2915
2916 vlv_wait_port_ready(dev_priv, dp_to_dig_port(intel_dp),
2917 lane_mask);
2918 }
2919
2920 intel_dp_set_power(intel_dp, DP_SET_POWER_D0);
2921 intel_dp_configure_protocol_converter(intel_dp, pipe_config);
2922 intel_dp_check_frl_training(intel_dp);
2923 intel_dp_pcon_dsc_configure(intel_dp, pipe_config);
2924 intel_dp_start_link_train(intel_dp, pipe_config);
2925 intel_dp_stop_link_train(intel_dp, pipe_config);
2926
2927 if (pipe_config->has_audio) {
2928 drm_dbg(&dev_priv->drm, "Enabling DP audio on pipe %c\n",
2929 pipe_name(pipe));
2930 intel_audio_codec_enable(encoder, pipe_config, conn_state);
2931 }
2932}
2933
2934static void g4x_enable_dp(struct intel_atomic_state *state,
2935 struct intel_encoder *encoder,
2936 const struct intel_crtc_state *pipe_config,
2937 const struct drm_connector_state *conn_state)
2938{
2939 intel_enable_dp(state, encoder, pipe_config, conn_state);
2940 intel_edp_backlight_on(pipe_config, conn_state);
2941}
2942
2943static void vlv_enable_dp(struct intel_atomic_state *state,
2944 struct intel_encoder *encoder,
2945 const struct intel_crtc_state *pipe_config,
2946 const struct drm_connector_state *conn_state)
2947{
2948 intel_edp_backlight_on(pipe_config, conn_state);
2949}
2950
2951static void g4x_pre_enable_dp(struct intel_atomic_state *state,
2952 struct intel_encoder *encoder,
2953 const struct intel_crtc_state *pipe_config,
2954 const struct drm_connector_state *conn_state)
2955{
2956 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2957 enum port port = encoder->port;
2958
2959 intel_dp_prepare(encoder, pipe_config);
2960
2961
2962 if (port == PORT_A)
2963 ilk_edp_pll_on(intel_dp, pipe_config);
2964}
2965
2966static void vlv_pre_enable_dp(struct intel_atomic_state *state,
2967 struct intel_encoder *encoder,
2968 const struct intel_crtc_state *pipe_config,
2969 const struct drm_connector_state *conn_state)
2970{
2971 vlv_phy_pre_encoder_enable(encoder, pipe_config);
2972
2973 intel_enable_dp(state, encoder, pipe_config, conn_state);
2974}
2975
2976static void vlv_dp_pre_pll_enable(struct intel_atomic_state *state,
2977 struct intel_encoder *encoder,
2978 const struct intel_crtc_state *pipe_config,
2979 const struct drm_connector_state *conn_state)
2980{
2981 intel_dp_prepare(encoder, pipe_config);
2982
2983 vlv_phy_pre_pll_enable(encoder, pipe_config);
2984}
2985
2986static void chv_pre_enable_dp(struct intel_atomic_state *state,
2987 struct intel_encoder *encoder,
2988 const struct intel_crtc_state *pipe_config,
2989 const struct drm_connector_state *conn_state)
2990{
2991 chv_phy_pre_encoder_enable(encoder, pipe_config);
2992
2993 intel_enable_dp(state, encoder, pipe_config, conn_state);
2994
2995
2996 chv_phy_release_cl2_override(encoder);
2997}
2998
2999static void chv_dp_pre_pll_enable(struct intel_atomic_state *state,
3000 struct intel_encoder *encoder,
3001 const struct intel_crtc_state *pipe_config,
3002 const struct drm_connector_state *conn_state)
3003{
3004 intel_dp_prepare(encoder, pipe_config);
3005
3006 chv_phy_pre_pll_enable(encoder, pipe_config);
3007}
3008
3009static void chv_dp_post_pll_disable(struct intel_atomic_state *state,
3010 struct intel_encoder *encoder,
3011 const struct intel_crtc_state *old_crtc_state,
3012 const struct drm_connector_state *old_conn_state)
3013{
3014 chv_phy_post_pll_disable(encoder, old_crtc_state);
3015}
3016
3017static u8 intel_dp_voltage_max_2(struct intel_dp *intel_dp,
3018 const struct intel_crtc_state *crtc_state)
3019{
3020 return DP_TRAIN_VOLTAGE_SWING_LEVEL_2;
3021}
3022
3023static u8 intel_dp_voltage_max_3(struct intel_dp *intel_dp,
3024 const struct intel_crtc_state *crtc_state)
3025{
3026 return DP_TRAIN_VOLTAGE_SWING_LEVEL_3;
3027}
3028
3029static u8 intel_dp_preemph_max_2(struct intel_dp *intel_dp)
3030{
3031 return DP_TRAIN_PRE_EMPH_LEVEL_2;
3032}
3033
3034static u8 intel_dp_preemph_max_3(struct intel_dp *intel_dp)
3035{
3036 return DP_TRAIN_PRE_EMPH_LEVEL_3;
3037}
3038
3039static void vlv_set_signal_levels(struct intel_dp *intel_dp,
3040 const struct intel_crtc_state *crtc_state)
3041{
3042 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
3043 unsigned long demph_reg_value, preemph_reg_value,
3044 uniqtranscale_reg_value;
3045 u8 train_set = intel_dp->train_set[0];
3046
3047 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
3048 case DP_TRAIN_PRE_EMPH_LEVEL_0:
3049 preemph_reg_value = 0x0004000;
3050 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
3051 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3052 demph_reg_value = 0x2B405555;
3053 uniqtranscale_reg_value = 0x552AB83A;
3054 break;
3055 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3056 demph_reg_value = 0x2B404040;
3057 uniqtranscale_reg_value = 0x5548B83A;
3058 break;
3059 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
3060 demph_reg_value = 0x2B245555;
3061 uniqtranscale_reg_value = 0x5560B83A;
3062 break;
3063 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
3064 demph_reg_value = 0x2B405555;
3065 uniqtranscale_reg_value = 0x5598DA3A;
3066 break;
3067 default:
3068 return;
3069 }
3070 break;
3071 case DP_TRAIN_PRE_EMPH_LEVEL_1:
3072 preemph_reg_value = 0x0002000;
3073 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
3074 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3075 demph_reg_value = 0x2B404040;
3076 uniqtranscale_reg_value = 0x5552B83A;
3077 break;
3078 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3079 demph_reg_value = 0x2B404848;
3080 uniqtranscale_reg_value = 0x5580B83A;
3081 break;
3082 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
3083 demph_reg_value = 0x2B404040;
3084 uniqtranscale_reg_value = 0x55ADDA3A;
3085 break;
3086 default:
3087 return;
3088 }
3089 break;
3090 case DP_TRAIN_PRE_EMPH_LEVEL_2:
3091 preemph_reg_value = 0x0000000;
3092 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
3093 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3094 demph_reg_value = 0x2B305555;
3095 uniqtranscale_reg_value = 0x5570B83A;
3096 break;
3097 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3098 demph_reg_value = 0x2B2B4040;
3099 uniqtranscale_reg_value = 0x55ADDA3A;
3100 break;
3101 default:
3102 return;
3103 }
3104 break;
3105 case DP_TRAIN_PRE_EMPH_LEVEL_3:
3106 preemph_reg_value = 0x0006000;
3107 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
3108 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3109 demph_reg_value = 0x1B405555;
3110 uniqtranscale_reg_value = 0x55ADDA3A;
3111 break;
3112 default:
3113 return;
3114 }
3115 break;
3116 default:
3117 return;
3118 }
3119
3120 vlv_set_phy_signal_level(encoder, crtc_state,
3121 demph_reg_value, preemph_reg_value,
3122 uniqtranscale_reg_value, 0);
3123}
3124
3125static void chv_set_signal_levels(struct intel_dp *intel_dp,
3126 const struct intel_crtc_state *crtc_state)
3127{
3128 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
3129 u32 deemph_reg_value, margin_reg_value;
3130 bool uniq_trans_scale = false;
3131 u8 train_set = intel_dp->train_set[0];
3132
3133 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
3134 case DP_TRAIN_PRE_EMPH_LEVEL_0:
3135 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
3136 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3137 deemph_reg_value = 128;
3138 margin_reg_value = 52;
3139 break;
3140 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3141 deemph_reg_value = 128;
3142 margin_reg_value = 77;
3143 break;
3144 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
3145 deemph_reg_value = 128;
3146 margin_reg_value = 102;
3147 break;
3148 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
3149 deemph_reg_value = 128;
3150 margin_reg_value = 154;
3151 uniq_trans_scale = true;
3152 break;
3153 default:
3154 return;
3155 }
3156 break;
3157 case DP_TRAIN_PRE_EMPH_LEVEL_1:
3158 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
3159 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3160 deemph_reg_value = 85;
3161 margin_reg_value = 78;
3162 break;
3163 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3164 deemph_reg_value = 85;
3165 margin_reg_value = 116;
3166 break;
3167 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
3168 deemph_reg_value = 85;
3169 margin_reg_value = 154;
3170 break;
3171 default:
3172 return;
3173 }
3174 break;
3175 case DP_TRAIN_PRE_EMPH_LEVEL_2:
3176 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
3177 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3178 deemph_reg_value = 64;
3179 margin_reg_value = 104;
3180 break;
3181 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3182 deemph_reg_value = 64;
3183 margin_reg_value = 154;
3184 break;
3185 default:
3186 return;
3187 }
3188 break;
3189 case DP_TRAIN_PRE_EMPH_LEVEL_3:
3190 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
3191 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3192 deemph_reg_value = 43;
3193 margin_reg_value = 154;
3194 break;
3195 default:
3196 return;
3197 }
3198 break;
3199 default:
3200 return;
3201 }
3202
3203 chv_set_phy_signal_level(encoder, crtc_state,
3204 deemph_reg_value, margin_reg_value,
3205 uniq_trans_scale);
3206}
3207
3208static u32 g4x_signal_levels(u8 train_set)
3209{
3210 u32 signal_levels = 0;
3211
3212 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
3213 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
3214 default:
3215 signal_levels |= DP_VOLTAGE_0_4;
3216 break;
3217 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
3218 signal_levels |= DP_VOLTAGE_0_6;
3219 break;
3220 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
3221 signal_levels |= DP_VOLTAGE_0_8;
3222 break;
3223 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
3224 signal_levels |= DP_VOLTAGE_1_2;
3225 break;
3226 }
3227 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
3228 case DP_TRAIN_PRE_EMPH_LEVEL_0:
3229 default:
3230 signal_levels |= DP_PRE_EMPHASIS_0;
3231 break;
3232 case DP_TRAIN_PRE_EMPH_LEVEL_1:
3233 signal_levels |= DP_PRE_EMPHASIS_3_5;
3234 break;
3235 case DP_TRAIN_PRE_EMPH_LEVEL_2:
3236 signal_levels |= DP_PRE_EMPHASIS_6;
3237 break;
3238 case DP_TRAIN_PRE_EMPH_LEVEL_3:
3239 signal_levels |= DP_PRE_EMPHASIS_9_5;
3240 break;
3241 }
3242 return signal_levels;
3243}
3244
3245static void
3246g4x_set_signal_levels(struct intel_dp *intel_dp,
3247 const struct intel_crtc_state *crtc_state)
3248{
3249 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
3250 u8 train_set = intel_dp->train_set[0];
3251 u32 signal_levels;
3252
3253 signal_levels = g4x_signal_levels(train_set);
3254
3255 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n",
3256 signal_levels);
3257
3258 intel_dp->DP &= ~(DP_VOLTAGE_MASK | DP_PRE_EMPHASIS_MASK);
3259 intel_dp->DP |= signal_levels;
3260
3261 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
3262 intel_de_posting_read(dev_priv, intel_dp->output_reg);
3263}
3264
3265
3266static u32 snb_cpu_edp_signal_levels(u8 train_set)
3267{
3268 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
3269 DP_TRAIN_PRE_EMPHASIS_MASK);
3270
3271 switch (signal_levels) {
3272 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0:
3273 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0:
3274 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B;
3275 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1:
3276 return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B;
3277 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2:
3278 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2:
3279 return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B;
3280 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1:
3281 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1:
3282 return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B;
3283 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0:
3284 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3 | DP_TRAIN_PRE_EMPH_LEVEL_0:
3285 return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B;
3286 default:
3287 DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:"
3288 "0x%x\n", signal_levels);
3289 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B;
3290 }
3291}
3292
3293static void
3294snb_cpu_edp_set_signal_levels(struct intel_dp *intel_dp,
3295 const struct intel_crtc_state *crtc_state)
3296{
3297 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
3298 u8 train_set = intel_dp->train_set[0];
3299 u32 signal_levels;
3300
3301 signal_levels = snb_cpu_edp_signal_levels(train_set);
3302
3303 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n",
3304 signal_levels);
3305
3306 intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB;
3307 intel_dp->DP |= signal_levels;
3308
3309 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
3310 intel_de_posting_read(dev_priv, intel_dp->output_reg);
3311}
3312
3313
3314static u32 ivb_cpu_edp_signal_levels(u8 train_set)
3315{
3316 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
3317 DP_TRAIN_PRE_EMPHASIS_MASK);
3318
3319 switch (signal_levels) {
3320 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0:
3321 return EDP_LINK_TRAIN_400MV_0DB_IVB;
3322 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1:
3323 return EDP_LINK_TRAIN_400MV_3_5DB_IVB;
3324 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2:
3325 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2:
3326 return EDP_LINK_TRAIN_400MV_6DB_IVB;
3327
3328 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0:
3329 return EDP_LINK_TRAIN_600MV_0DB_IVB;
3330 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1:
3331 return EDP_LINK_TRAIN_600MV_3_5DB_IVB;
3332
3333 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0:
3334 return EDP_LINK_TRAIN_800MV_0DB_IVB;
3335 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1:
3336 return EDP_LINK_TRAIN_800MV_3_5DB_IVB;
3337
3338 default:
3339 DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:"
3340 "0x%x\n", signal_levels);
3341 return EDP_LINK_TRAIN_500MV_0DB_IVB;
3342 }
3343}
3344
3345static void
3346ivb_cpu_edp_set_signal_levels(struct intel_dp *intel_dp,
3347 const struct intel_crtc_state *crtc_state)
3348{
3349 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
3350 u8 train_set = intel_dp->train_set[0];
3351 u32 signal_levels;
3352
3353 signal_levels = ivb_cpu_edp_signal_levels(train_set);
3354
3355 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n",
3356 signal_levels);
3357
3358 intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB;
3359 intel_dp->DP |= signal_levels;
3360
3361 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
3362 intel_de_posting_read(dev_priv, intel_dp->output_reg);
3363}
3364
3365static char dp_training_pattern_name(u8 train_pat)
3366{
3367 switch (train_pat) {
3368 case DP_TRAINING_PATTERN_1:
3369 case DP_TRAINING_PATTERN_2:
3370 case DP_TRAINING_PATTERN_3:
3371 return '0' + train_pat;
3372 case DP_TRAINING_PATTERN_4:
3373 return '4';
3374 default:
3375 MISSING_CASE(train_pat);
3376 return '?';
3377 }
3378}
3379
3380void
3381intel_dp_program_link_training_pattern(struct intel_dp *intel_dp,
3382 const struct intel_crtc_state *crtc_state,
3383 u8 dp_train_pat)
3384{
3385 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
3386 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3387 u8 train_pat = intel_dp_training_pattern_symbol(dp_train_pat);
3388
3389 if (train_pat != DP_TRAINING_PATTERN_DISABLE)
3390 drm_dbg_kms(&dev_priv->drm,
3391 "[ENCODER:%d:%s] Using DP training pattern TPS%c\n",
3392 encoder->base.base.id, encoder->base.name,
3393 dp_training_pattern_name(train_pat));
3394
3395 intel_dp->set_link_train(intel_dp, crtc_state, dp_train_pat);
3396}
3397
3398static void
3399intel_dp_link_down(struct intel_encoder *encoder,
3400 const struct intel_crtc_state *old_crtc_state)
3401{
3402 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3403 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3404 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc);
3405 enum port port = encoder->port;
3406 u32 DP = intel_dp->DP;
3407
3408 if (drm_WARN_ON(&dev_priv->drm,
3409 (intel_de_read(dev_priv, intel_dp->output_reg) &
3410 DP_PORT_EN) == 0))
3411 return;
3412
3413 drm_dbg_kms(&dev_priv->drm, "\n");
3414
3415 if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) ||
3416 (HAS_PCH_CPT(dev_priv) && port != PORT_A)) {
3417 DP &= ~DP_LINK_TRAIN_MASK_CPT;
3418 DP |= DP_LINK_TRAIN_PAT_IDLE_CPT;
3419 } else {
3420 DP &= ~DP_LINK_TRAIN_MASK;
3421 DP |= DP_LINK_TRAIN_PAT_IDLE;
3422 }
3423 intel_de_write(dev_priv, intel_dp->output_reg, DP);
3424 intel_de_posting_read(dev_priv, intel_dp->output_reg);
3425
3426 DP &= ~(DP_PORT_EN | DP_AUDIO_OUTPUT_ENABLE);
3427 intel_de_write(dev_priv, intel_dp->output_reg, DP);
3428 intel_de_posting_read(dev_priv, intel_dp->output_reg);
3429
3430
3431
3432
3433
3434
3435 if (HAS_PCH_IBX(dev_priv) && crtc->pipe == PIPE_B && port != PORT_A) {
3436
3437
3438
3439
3440 intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, false);
3441 intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, false);
3442
3443
3444 DP &= ~(DP_PIPE_SEL_MASK | DP_LINK_TRAIN_MASK);
3445 DP |= DP_PORT_EN | DP_PIPE_SEL(PIPE_A) |
3446 DP_LINK_TRAIN_PAT_1;
3447 intel_de_write(dev_priv, intel_dp->output_reg, DP);
3448 intel_de_posting_read(dev_priv, intel_dp->output_reg);
3449
3450 DP &= ~DP_PORT_EN;
3451 intel_de_write(dev_priv, intel_dp->output_reg, DP);
3452 intel_de_posting_read(dev_priv, intel_dp->output_reg);
3453
3454 intel_wait_for_vblank_if_active(dev_priv, PIPE_A);
3455 intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, true);
3456 intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, true);
3457 }
3458
3459 msleep(intel_dp->pps.panel_power_down_delay);
3460
3461 intel_dp->DP = DP;
3462
3463 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
3464 intel_wakeref_t wakeref;
3465
3466 with_intel_pps_lock(intel_dp, wakeref)
3467 intel_dp->pps.active_pipe = INVALID_PIPE;
3468 }
3469}
3470
3471bool intel_dp_get_colorimetry_status(struct intel_dp *intel_dp)
3472{
3473 u8 dprx = 0;
3474
3475 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_DPRX_FEATURE_ENUMERATION_LIST,
3476 &dprx) != 1)
3477 return false;
3478 return dprx & DP_VSC_SDP_EXT_FOR_COLORIMETRY_SUPPORTED;
3479}
3480
3481static void intel_dp_get_dsc_sink_cap(struct intel_dp *intel_dp)
3482{
3483 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3484
3485
3486
3487
3488
3489 memset(intel_dp->dsc_dpcd, 0, sizeof(intel_dp->dsc_dpcd));
3490
3491
3492 intel_dp->fec_capable = 0;
3493
3494
3495 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x14 ||
3496 intel_dp->edp_dpcd[0] >= DP_EDP_14) {
3497 if (drm_dp_dpcd_read(&intel_dp->aux, DP_DSC_SUPPORT,
3498 intel_dp->dsc_dpcd,
3499 sizeof(intel_dp->dsc_dpcd)) < 0)
3500 drm_err(&i915->drm,
3501 "Failed to read DPCD register 0x%x\n",
3502 DP_DSC_SUPPORT);
3503
3504 drm_dbg_kms(&i915->drm, "DSC DPCD: %*ph\n",
3505 (int)sizeof(intel_dp->dsc_dpcd),
3506 intel_dp->dsc_dpcd);
3507
3508
3509 if (!intel_dp_is_edp(intel_dp) &&
3510 drm_dp_dpcd_readb(&intel_dp->aux, DP_FEC_CAPABILITY,
3511 &intel_dp->fec_capable) < 0)
3512 drm_err(&i915->drm,
3513 "Failed to read FEC DPCD register\n");
3514
3515 drm_dbg_kms(&i915->drm, "FEC CAPABILITY: %x\n",
3516 intel_dp->fec_capable);
3517 }
3518}
3519
3520static bool
3521intel_edp_init_dpcd(struct intel_dp *intel_dp)
3522{
3523 struct drm_i915_private *dev_priv =
3524 to_i915(dp_to_dig_port(intel_dp)->base.base.dev);
3525
3526
3527 drm_WARN_ON(&dev_priv->drm, intel_dp->dpcd[DP_DPCD_REV] != 0);
3528
3529 if (drm_dp_read_dpcd_caps(&intel_dp->aux, intel_dp->dpcd) != 0)
3530 return false;
3531
3532 drm_dp_read_desc(&intel_dp->aux, &intel_dp->desc,
3533 drm_dp_is_branch(intel_dp->dpcd));
3534
3535
3536
3537
3538
3539
3540
3541
3542
3543
3544 if (drm_dp_dpcd_read(&intel_dp->aux, DP_EDP_DPCD_REV,
3545 intel_dp->edp_dpcd, sizeof(intel_dp->edp_dpcd)) ==
3546 sizeof(intel_dp->edp_dpcd))
3547 drm_dbg_kms(&dev_priv->drm, "eDP DPCD: %*ph\n",
3548 (int)sizeof(intel_dp->edp_dpcd),
3549 intel_dp->edp_dpcd);
3550
3551
3552
3553
3554
3555 intel_psr_init_dpcd(intel_dp);
3556
3557
3558 if (intel_dp->edp_dpcd[0] >= DP_EDP_14) {
3559 __le16 sink_rates[DP_MAX_SUPPORTED_RATES];
3560 int i;
3561
3562 drm_dp_dpcd_read(&intel_dp->aux, DP_SUPPORTED_LINK_RATES,
3563 sink_rates, sizeof(sink_rates));
3564
3565 for (i = 0; i < ARRAY_SIZE(sink_rates); i++) {
3566 int val = le16_to_cpu(sink_rates[i]);
3567
3568 if (val == 0)
3569 break;
3570
3571
3572
3573
3574
3575
3576
3577 intel_dp->sink_rates[i] = (val * 200) / 10;
3578 }
3579 intel_dp->num_sink_rates = i;
3580 }
3581
3582
3583
3584
3585
3586 if (intel_dp->num_sink_rates)
3587 intel_dp->use_rate_select = true;
3588 else
3589 intel_dp_set_sink_rates(intel_dp);
3590
3591 intel_dp_set_common_rates(intel_dp);
3592
3593
3594 if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv))
3595 intel_dp_get_dsc_sink_cap(intel_dp);
3596
3597
3598
3599
3600
3601 intel_edp_init_source_oui(intel_dp, true);
3602
3603 return true;
3604}
3605
3606static bool
3607intel_dp_has_sink_count(struct intel_dp *intel_dp)
3608{
3609 if (!intel_dp->attached_connector)
3610 return false;
3611
3612 return drm_dp_read_sink_count_cap(&intel_dp->attached_connector->base,
3613 intel_dp->dpcd,
3614 &intel_dp->desc);
3615}
3616
3617static bool
3618intel_dp_get_dpcd(struct intel_dp *intel_dp)
3619{
3620 int ret;
3621
3622 if (intel_dp_init_lttpr_and_dprx_caps(intel_dp) < 0)
3623 return false;
3624
3625
3626
3627
3628
3629 if (!intel_dp_is_edp(intel_dp)) {
3630 drm_dp_read_desc(&intel_dp->aux, &intel_dp->desc,
3631 drm_dp_is_branch(intel_dp->dpcd));
3632
3633 intel_dp_set_sink_rates(intel_dp);
3634 intel_dp_set_common_rates(intel_dp);
3635 }
3636
3637 if (intel_dp_has_sink_count(intel_dp)) {
3638 ret = drm_dp_read_sink_count(&intel_dp->aux);
3639 if (ret < 0)
3640 return false;
3641
3642
3643
3644
3645
3646
3647 intel_dp->sink_count = ret;
3648
3649
3650
3651
3652
3653
3654
3655
3656 if (!intel_dp->sink_count)
3657 return false;
3658 }
3659
3660 return drm_dp_read_downstream_info(&intel_dp->aux, intel_dp->dpcd,
3661 intel_dp->downstream_ports) == 0;
3662}
3663
3664static bool
3665intel_dp_can_mst(struct intel_dp *intel_dp)
3666{
3667 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3668
3669 return i915->params.enable_dp_mst &&
3670 intel_dp->can_mst &&
3671 drm_dp_read_mst_cap(&intel_dp->aux, intel_dp->dpcd);
3672}
3673
3674static void
3675intel_dp_configure_mst(struct intel_dp *intel_dp)
3676{
3677 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3678 struct intel_encoder *encoder =
3679 &dp_to_dig_port(intel_dp)->base;
3680 bool sink_can_mst = drm_dp_read_mst_cap(&intel_dp->aux, intel_dp->dpcd);
3681
3682 drm_dbg_kms(&i915->drm,
3683 "[ENCODER:%d:%s] MST support: port: %s, sink: %s, modparam: %s\n",
3684 encoder->base.base.id, encoder->base.name,
3685 yesno(intel_dp->can_mst), yesno(sink_can_mst),
3686 yesno(i915->params.enable_dp_mst));
3687
3688 if (!intel_dp->can_mst)
3689 return;
3690
3691 intel_dp->is_mst = sink_can_mst &&
3692 i915->params.enable_dp_mst;
3693
3694 drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst_mgr,
3695 intel_dp->is_mst);
3696}
3697
3698static bool
3699intel_dp_get_sink_irq_esi(struct intel_dp *intel_dp, u8 *sink_irq_vector)
3700{
3701 return drm_dp_dpcd_read(&intel_dp->aux, DP_SINK_COUNT_ESI,
3702 sink_irq_vector, DP_DPRX_ESI_LEN) ==
3703 DP_DPRX_ESI_LEN;
3704}
3705
3706bool
3707intel_dp_needs_vsc_sdp(const struct intel_crtc_state *crtc_state,
3708 const struct drm_connector_state *conn_state)
3709{
3710
3711
3712
3713
3714
3715 if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420)
3716 return true;
3717
3718 switch (conn_state->colorspace) {
3719 case DRM_MODE_COLORIMETRY_SYCC_601:
3720 case DRM_MODE_COLORIMETRY_OPYCC_601:
3721 case DRM_MODE_COLORIMETRY_BT2020_YCC:
3722 case DRM_MODE_COLORIMETRY_BT2020_RGB:
3723 case DRM_MODE_COLORIMETRY_BT2020_CYCC:
3724 return true;
3725 default:
3726 break;
3727 }
3728
3729 return false;
3730}
3731
3732static ssize_t intel_dp_vsc_sdp_pack(const struct drm_dp_vsc_sdp *vsc,
3733 struct dp_sdp *sdp, size_t size)
3734{
3735 size_t length = sizeof(struct dp_sdp);
3736
3737 if (size < length)
3738 return -ENOSPC;
3739
3740 memset(sdp, 0, size);
3741
3742
3743
3744
3745
3746 sdp->sdp_header.HB0 = 0;
3747 sdp->sdp_header.HB1 = vsc->sdp_type;
3748 sdp->sdp_header.HB2 = vsc->revision;
3749 sdp->sdp_header.HB3 = vsc->length;
3750
3751
3752
3753
3754
3755 if (vsc->revision != 0x5)
3756 goto out;
3757
3758
3759
3760 sdp->db[16] = (vsc->pixelformat & 0xf) << 4;
3761 sdp->db[16] |= vsc->colorimetry & 0xf;
3762
3763 switch (vsc->bpc) {
3764 case 6:
3765
3766 break;
3767 case 8:
3768 sdp->db[17] = 0x1;
3769 break;
3770 case 10:
3771 sdp->db[17] = 0x2;
3772 break;
3773 case 12:
3774 sdp->db[17] = 0x3;
3775 break;
3776 case 16:
3777 sdp->db[17] = 0x4;
3778 break;
3779 default:
3780 MISSING_CASE(vsc->bpc);
3781 break;
3782 }
3783
3784 if (vsc->dynamic_range == DP_DYNAMIC_RANGE_CTA)
3785 sdp->db[17] |= 0x80;
3786
3787
3788 sdp->db[18] = vsc->content_type & 0x7;
3789
3790out:
3791 return length;
3792}
3793
3794static ssize_t
3795intel_dp_hdr_metadata_infoframe_sdp_pack(const struct hdmi_drm_infoframe *drm_infoframe,
3796 struct dp_sdp *sdp,
3797 size_t size)
3798{
3799 size_t length = sizeof(struct dp_sdp);
3800 const int infoframe_size = HDMI_INFOFRAME_HEADER_SIZE + HDMI_DRM_INFOFRAME_SIZE;
3801 unsigned char buf[HDMI_INFOFRAME_HEADER_SIZE + HDMI_DRM_INFOFRAME_SIZE];
3802 ssize_t len;
3803
3804 if (size < length)
3805 return -ENOSPC;
3806
3807 memset(sdp, 0, size);
3808
3809 len = hdmi_drm_infoframe_pack_only(drm_infoframe, buf, sizeof(buf));
3810 if (len < 0) {
3811 DRM_DEBUG_KMS("buffer size is smaller than hdr metadata infoframe\n");
3812 return -ENOSPC;
3813 }
3814
3815 if (len != infoframe_size) {
3816 DRM_DEBUG_KMS("wrong static hdr metadata size\n");
3817 return -ENOSPC;
3818 }
3819
3820
3821
3822
3823
3824
3825
3826
3827 sdp->sdp_header.HB0 = 0;
3828
3829
3830
3831
3832
3833
3834
3835 sdp->sdp_header.HB1 = drm_infoframe->type;
3836
3837
3838
3839
3840 sdp->sdp_header.HB2 = 0x1D;
3841
3842 sdp->sdp_header.HB3 = (0x13 << 2);
3843
3844 sdp->db[0] = drm_infoframe->version;
3845
3846 sdp->db[1] = drm_infoframe->length;
3847
3848
3849
3850
3851 BUILD_BUG_ON(sizeof(sdp->db) < HDMI_DRM_INFOFRAME_SIZE + 2);
3852 memcpy(&sdp->db[2], &buf[HDMI_INFOFRAME_HEADER_SIZE],
3853 HDMI_DRM_INFOFRAME_SIZE);
3854
3855
3856
3857
3858
3859
3860
3861
3862
3863
3864
3865
3866
3867 return sizeof(struct dp_sdp_header) + 2 + HDMI_DRM_INFOFRAME_SIZE;
3868}
3869
3870static void intel_write_dp_sdp(struct intel_encoder *encoder,
3871 const struct intel_crtc_state *crtc_state,
3872 unsigned int type)
3873{
3874 struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
3875 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3876 struct dp_sdp sdp = {};
3877 ssize_t len;
3878
3879 if ((crtc_state->infoframes.enable &
3880 intel_hdmi_infoframe_enable(type)) == 0)
3881 return;
3882
3883 switch (type) {
3884 case DP_SDP_VSC:
3885 len = intel_dp_vsc_sdp_pack(&crtc_state->infoframes.vsc, &sdp,
3886 sizeof(sdp));
3887 break;
3888 case HDMI_PACKET_TYPE_GAMUT_METADATA:
3889 len = intel_dp_hdr_metadata_infoframe_sdp_pack(&crtc_state->infoframes.drm.drm,
3890 &sdp, sizeof(sdp));
3891 break;
3892 default:
3893 MISSING_CASE(type);
3894 return;
3895 }
3896
3897 if (drm_WARN_ON(&dev_priv->drm, len < 0))
3898 return;
3899
3900 dig_port->write_infoframe(encoder, crtc_state, type, &sdp, len);
3901}
3902
3903void intel_write_dp_vsc_sdp(struct intel_encoder *encoder,
3904 const struct intel_crtc_state *crtc_state,
3905 struct drm_dp_vsc_sdp *vsc)
3906{
3907 struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
3908 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3909 struct dp_sdp sdp = {};
3910 ssize_t len;
3911
3912 len = intel_dp_vsc_sdp_pack(vsc, &sdp, sizeof(sdp));
3913
3914 if (drm_WARN_ON(&dev_priv->drm, len < 0))
3915 return;
3916
3917 dig_port->write_infoframe(encoder, crtc_state, DP_SDP_VSC,
3918 &sdp, len);
3919}
3920
3921void intel_dp_set_infoframes(struct intel_encoder *encoder,
3922 bool enable,
3923 const struct intel_crtc_state *crtc_state,
3924 const struct drm_connector_state *conn_state)
3925{
3926 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3927 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3928 i915_reg_t reg = HSW_TVIDEO_DIP_CTL(crtc_state->cpu_transcoder);
3929 u32 dip_enable = VIDEO_DIP_ENABLE_AVI_HSW | VIDEO_DIP_ENABLE_GCP_HSW |
3930 VIDEO_DIP_ENABLE_VS_HSW | VIDEO_DIP_ENABLE_GMP_HSW |
3931 VIDEO_DIP_ENABLE_SPD_HSW | VIDEO_DIP_ENABLE_DRM_GLK;
3932 u32 val = intel_de_read(dev_priv, reg);
3933
3934
3935
3936 if (intel_psr_enabled(intel_dp))
3937 val &= ~dip_enable;
3938 else
3939 val &= ~(dip_enable | VIDEO_DIP_ENABLE_VSC_HSW);
3940
3941 if (!enable) {
3942 intel_de_write(dev_priv, reg, val);
3943 intel_de_posting_read(dev_priv, reg);
3944 return;
3945 }
3946
3947 intel_de_write(dev_priv, reg, val);
3948 intel_de_posting_read(dev_priv, reg);
3949
3950
3951 if (!intel_psr_enabled(intel_dp))
3952 intel_write_dp_sdp(encoder, crtc_state, DP_SDP_VSC);
3953
3954 intel_write_dp_sdp(encoder, crtc_state, HDMI_PACKET_TYPE_GAMUT_METADATA);
3955}
3956
3957static int intel_dp_vsc_sdp_unpack(struct drm_dp_vsc_sdp *vsc,
3958 const void *buffer, size_t size)
3959{
3960 const struct dp_sdp *sdp = buffer;
3961
3962 if (size < sizeof(struct dp_sdp))
3963 return -EINVAL;
3964
3965 memset(vsc, 0, size);
3966
3967 if (sdp->sdp_header.HB0 != 0)
3968 return -EINVAL;
3969
3970 if (sdp->sdp_header.HB1 != DP_SDP_VSC)
3971 return -EINVAL;
3972
3973 vsc->sdp_type = sdp->sdp_header.HB1;
3974 vsc->revision = sdp->sdp_header.HB2;
3975 vsc->length = sdp->sdp_header.HB3;
3976
3977 if ((sdp->sdp_header.HB2 == 0x2 && sdp->sdp_header.HB3 == 0x8) ||
3978 (sdp->sdp_header.HB2 == 0x4 && sdp->sdp_header.HB3 == 0xe)) {
3979
3980
3981
3982
3983
3984
3985
3986
3987 return 0;
3988 } else if (sdp->sdp_header.HB2 == 0x5 && sdp->sdp_header.HB3 == 0x13) {
3989
3990
3991
3992
3993
3994 vsc->pixelformat = (sdp->db[16] >> 4) & 0xf;
3995 vsc->colorimetry = sdp->db[16] & 0xf;
3996 vsc->dynamic_range = (sdp->db[17] >> 7) & 0x1;
3997
3998 switch (sdp->db[17] & 0x7) {
3999 case 0x0:
4000 vsc->bpc = 6;
4001 break;
4002 case 0x1:
4003 vsc->bpc = 8;
4004 break;
4005 case 0x2:
4006 vsc->bpc = 10;
4007 break;
4008 case 0x3:
4009 vsc->bpc = 12;
4010 break;
4011 case 0x4:
4012 vsc->bpc = 16;
4013 break;
4014 default:
4015 MISSING_CASE(sdp->db[17] & 0x7);
4016 return -EINVAL;
4017 }
4018
4019 vsc->content_type = sdp->db[18] & 0x7;
4020 } else {
4021 return -EINVAL;
4022 }
4023
4024 return 0;
4025}
4026
4027static int
4028intel_dp_hdr_metadata_infoframe_sdp_unpack(struct hdmi_drm_infoframe *drm_infoframe,
4029 const void *buffer, size_t size)
4030{
4031 int ret;
4032
4033 const struct dp_sdp *sdp = buffer;
4034
4035 if (size < sizeof(struct dp_sdp))
4036 return -EINVAL;
4037
4038 if (sdp->sdp_header.HB0 != 0)
4039 return -EINVAL;
4040
4041 if (sdp->sdp_header.HB1 != HDMI_INFOFRAME_TYPE_DRM)
4042 return -EINVAL;
4043
4044
4045
4046
4047
4048 if (sdp->sdp_header.HB2 != 0x1D)
4049 return -EINVAL;
4050
4051
4052 if ((sdp->sdp_header.HB3 & 0x3) != 0)
4053 return -EINVAL;
4054
4055
4056 if (((sdp->sdp_header.HB3 >> 2) & 0x3f) != 0x13)
4057 return -EINVAL;
4058
4059
4060 if (sdp->db[0] != 1)
4061 return -EINVAL;
4062
4063
4064 if (sdp->db[1] != HDMI_DRM_INFOFRAME_SIZE)
4065 return -EINVAL;
4066
4067 ret = hdmi_drm_infoframe_unpack_only(drm_infoframe, &sdp->db[2],
4068 HDMI_DRM_INFOFRAME_SIZE);
4069
4070 return ret;
4071}
4072
4073static void intel_read_dp_vsc_sdp(struct intel_encoder *encoder,
4074 struct intel_crtc_state *crtc_state,
4075 struct drm_dp_vsc_sdp *vsc)
4076{
4077 struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
4078 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4079 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4080 unsigned int type = DP_SDP_VSC;
4081 struct dp_sdp sdp = {};
4082 int ret;
4083
4084
4085 if (intel_psr_enabled(intel_dp))
4086 return;
4087
4088 if ((crtc_state->infoframes.enable &
4089 intel_hdmi_infoframe_enable(type)) == 0)
4090 return;
4091
4092 dig_port->read_infoframe(encoder, crtc_state, type, &sdp, sizeof(sdp));
4093
4094 ret = intel_dp_vsc_sdp_unpack(vsc, &sdp, sizeof(sdp));
4095
4096 if (ret)
4097 drm_dbg_kms(&dev_priv->drm, "Failed to unpack DP VSC SDP\n");
4098}
4099
4100static void intel_read_dp_hdr_metadata_infoframe_sdp(struct intel_encoder *encoder,
4101 struct intel_crtc_state *crtc_state,
4102 struct hdmi_drm_infoframe *drm_infoframe)
4103{
4104 struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
4105 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4106 unsigned int type = HDMI_PACKET_TYPE_GAMUT_METADATA;
4107 struct dp_sdp sdp = {};
4108 int ret;
4109
4110 if ((crtc_state->infoframes.enable &
4111 intel_hdmi_infoframe_enable(type)) == 0)
4112 return;
4113
4114 dig_port->read_infoframe(encoder, crtc_state, type, &sdp,
4115 sizeof(sdp));
4116
4117 ret = intel_dp_hdr_metadata_infoframe_sdp_unpack(drm_infoframe, &sdp,
4118 sizeof(sdp));
4119
4120 if (ret)
4121 drm_dbg_kms(&dev_priv->drm,
4122 "Failed to unpack DP HDR Metadata Infoframe SDP\n");
4123}
4124
4125void intel_read_dp_sdp(struct intel_encoder *encoder,
4126 struct intel_crtc_state *crtc_state,
4127 unsigned int type)
4128{
4129 if (encoder->type != INTEL_OUTPUT_DDI)
4130 return;
4131
4132 switch (type) {
4133 case DP_SDP_VSC:
4134 intel_read_dp_vsc_sdp(encoder, crtc_state,
4135 &crtc_state->infoframes.vsc);
4136 break;
4137 case HDMI_PACKET_TYPE_GAMUT_METADATA:
4138 intel_read_dp_hdr_metadata_infoframe_sdp(encoder, crtc_state,
4139 &crtc_state->infoframes.drm.drm);
4140 break;
4141 default:
4142 MISSING_CASE(type);
4143 break;
4144 }
4145}
4146
4147static u8 intel_dp_autotest_link_training(struct intel_dp *intel_dp)
4148{
4149 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4150 int status = 0;
4151 int test_link_rate;
4152 u8 test_lane_count, test_link_bw;
4153
4154
4155
4156
4157 status = drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_LANE_COUNT,
4158 &test_lane_count);
4159
4160 if (status <= 0) {
4161 drm_dbg_kms(&i915->drm, "Lane count read failed\n");
4162 return DP_TEST_NAK;
4163 }
4164 test_lane_count &= DP_MAX_LANE_COUNT_MASK;
4165
4166 status = drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_LINK_RATE,
4167 &test_link_bw);
4168 if (status <= 0) {
4169 drm_dbg_kms(&i915->drm, "Link Rate read failed\n");
4170 return DP_TEST_NAK;
4171 }
4172 test_link_rate = drm_dp_bw_code_to_link_rate(test_link_bw);
4173
4174
4175 if (!intel_dp_link_params_valid(intel_dp, test_link_rate,
4176 test_lane_count))
4177 return DP_TEST_NAK;
4178
4179 intel_dp->compliance.test_lane_count = test_lane_count;
4180 intel_dp->compliance.test_link_rate = test_link_rate;
4181
4182 return DP_TEST_ACK;
4183}
4184
4185static u8 intel_dp_autotest_video_pattern(struct intel_dp *intel_dp)
4186{
4187 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4188 u8 test_pattern;
4189 u8 test_misc;
4190 __be16 h_width, v_height;
4191 int status = 0;
4192
4193
4194 status = drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_PATTERN,
4195 &test_pattern);
4196 if (status <= 0) {
4197 drm_dbg_kms(&i915->drm, "Test pattern read failed\n");
4198 return DP_TEST_NAK;
4199 }
4200 if (test_pattern != DP_COLOR_RAMP)
4201 return DP_TEST_NAK;
4202
4203 status = drm_dp_dpcd_read(&intel_dp->aux, DP_TEST_H_WIDTH_HI,
4204 &h_width, 2);
4205 if (status <= 0) {
4206 drm_dbg_kms(&i915->drm, "H Width read failed\n");
4207 return DP_TEST_NAK;
4208 }
4209
4210 status = drm_dp_dpcd_read(&intel_dp->aux, DP_TEST_V_HEIGHT_HI,
4211 &v_height, 2);
4212 if (status <= 0) {
4213 drm_dbg_kms(&i915->drm, "V Height read failed\n");
4214 return DP_TEST_NAK;
4215 }
4216
4217 status = drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_MISC0,
4218 &test_misc);
4219 if (status <= 0) {
4220 drm_dbg_kms(&i915->drm, "TEST MISC read failed\n");
4221 return DP_TEST_NAK;
4222 }
4223 if ((test_misc & DP_TEST_COLOR_FORMAT_MASK) != DP_COLOR_FORMAT_RGB)
4224 return DP_TEST_NAK;
4225 if (test_misc & DP_TEST_DYNAMIC_RANGE_CEA)
4226 return DP_TEST_NAK;
4227 switch (test_misc & DP_TEST_BIT_DEPTH_MASK) {
4228 case DP_TEST_BIT_DEPTH_6:
4229 intel_dp->compliance.test_data.bpc = 6;
4230 break;
4231 case DP_TEST_BIT_DEPTH_8:
4232 intel_dp->compliance.test_data.bpc = 8;
4233 break;
4234 default:
4235 return DP_TEST_NAK;
4236 }
4237
4238 intel_dp->compliance.test_data.video_pattern = test_pattern;
4239 intel_dp->compliance.test_data.hdisplay = be16_to_cpu(h_width);
4240 intel_dp->compliance.test_data.vdisplay = be16_to_cpu(v_height);
4241
4242 intel_dp->compliance.test_active = true;
4243
4244 return DP_TEST_ACK;
4245}
4246
4247static u8 intel_dp_autotest_edid(struct intel_dp *intel_dp)
4248{
4249 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4250 u8 test_result = DP_TEST_ACK;
4251 struct intel_connector *intel_connector = intel_dp->attached_connector;
4252 struct drm_connector *connector = &intel_connector->base;
4253
4254 if (intel_connector->detect_edid == NULL ||
4255 connector->edid_corrupt ||
4256 intel_dp->aux.i2c_defer_count > 6) {
4257
4258
4259
4260
4261
4262
4263
4264 if (intel_dp->aux.i2c_nack_count > 0 ||
4265 intel_dp->aux.i2c_defer_count > 0)
4266 drm_dbg_kms(&i915->drm,
4267 "EDID read had %d NACKs, %d DEFERs\n",
4268 intel_dp->aux.i2c_nack_count,
4269 intel_dp->aux.i2c_defer_count);
4270 intel_dp->compliance.test_data.edid = INTEL_DP_RESOLUTION_FAILSAFE;
4271 } else {
4272 struct edid *block = intel_connector->detect_edid;
4273
4274
4275
4276
4277 block += intel_connector->detect_edid->extensions;
4278
4279 if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_TEST_EDID_CHECKSUM,
4280 block->checksum) <= 0)
4281 drm_dbg_kms(&i915->drm,
4282 "Failed to write EDID checksum\n");
4283
4284 test_result = DP_TEST_ACK | DP_TEST_EDID_CHECKSUM_WRITE;
4285 intel_dp->compliance.test_data.edid = INTEL_DP_RESOLUTION_PREFERRED;
4286 }
4287
4288
4289 intel_dp->compliance.test_active = true;
4290
4291 return test_result;
4292}
4293
4294static void intel_dp_phy_pattern_update(struct intel_dp *intel_dp,
4295 const struct intel_crtc_state *crtc_state)
4296{
4297 struct drm_i915_private *dev_priv =
4298 to_i915(dp_to_dig_port(intel_dp)->base.base.dev);
4299 struct drm_dp_phy_test_params *data =
4300 &intel_dp->compliance.test_data.phytest;
4301 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
4302 enum pipe pipe = crtc->pipe;
4303 u32 pattern_val;
4304
4305 switch (data->phy_pattern) {
4306 case DP_PHY_TEST_PATTERN_NONE:
4307 DRM_DEBUG_KMS("Disable Phy Test Pattern\n");
4308 intel_de_write(dev_priv, DDI_DP_COMP_CTL(pipe), 0x0);
4309 break;
4310 case DP_PHY_TEST_PATTERN_D10_2:
4311 DRM_DEBUG_KMS("Set D10.2 Phy Test Pattern\n");
4312 intel_de_write(dev_priv, DDI_DP_COMP_CTL(pipe),
4313 DDI_DP_COMP_CTL_ENABLE | DDI_DP_COMP_CTL_D10_2);
4314 break;
4315 case DP_PHY_TEST_PATTERN_ERROR_COUNT:
4316 DRM_DEBUG_KMS("Set Error Count Phy Test Pattern\n");
4317 intel_de_write(dev_priv, DDI_DP_COMP_CTL(pipe),
4318 DDI_DP_COMP_CTL_ENABLE |
4319 DDI_DP_COMP_CTL_SCRAMBLED_0);
4320 break;
4321 case DP_PHY_TEST_PATTERN_PRBS7:
4322 DRM_DEBUG_KMS("Set PRBS7 Phy Test Pattern\n");
4323 intel_de_write(dev_priv, DDI_DP_COMP_CTL(pipe),
4324 DDI_DP_COMP_CTL_ENABLE | DDI_DP_COMP_CTL_PRBS7);
4325 break;
4326 case DP_PHY_TEST_PATTERN_80BIT_CUSTOM:
4327
4328
4329
4330
4331
4332 DRM_DEBUG_KMS("Set 80Bit Custom Phy Test Pattern 0x3e0f83e0 0x0f83e0f8 0x0000f83e\n");
4333 pattern_val = 0x3e0f83e0;
4334 intel_de_write(dev_priv, DDI_DP_COMP_PAT(pipe, 0), pattern_val);
4335 pattern_val = 0x0f83e0f8;
4336 intel_de_write(dev_priv, DDI_DP_COMP_PAT(pipe, 1), pattern_val);
4337 pattern_val = 0x0000f83e;
4338 intel_de_write(dev_priv, DDI_DP_COMP_PAT(pipe, 2), pattern_val);
4339 intel_de_write(dev_priv, DDI_DP_COMP_CTL(pipe),
4340 DDI_DP_COMP_CTL_ENABLE |
4341 DDI_DP_COMP_CTL_CUSTOM80);
4342 break;
4343 case DP_PHY_TEST_PATTERN_CP2520:
4344
4345
4346
4347
4348
4349 DRM_DEBUG_KMS("Set HBR2 compliance Phy Test Pattern\n");
4350 pattern_val = 0xFB;
4351 intel_de_write(dev_priv, DDI_DP_COMP_CTL(pipe),
4352 DDI_DP_COMP_CTL_ENABLE | DDI_DP_COMP_CTL_HBR2 |
4353 pattern_val);
4354 break;
4355 default:
4356 WARN(1, "Invalid Phy Test Pattern\n");
4357 }
4358}
4359
4360static void
4361intel_dp_autotest_phy_ddi_disable(struct intel_dp *intel_dp,
4362 const struct intel_crtc_state *crtc_state)
4363{
4364 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
4365 struct drm_device *dev = dig_port->base.base.dev;
4366 struct drm_i915_private *dev_priv = to_i915(dev);
4367 struct intel_crtc *crtc = to_intel_crtc(dig_port->base.base.crtc);
4368 enum pipe pipe = crtc->pipe;
4369 u32 trans_ddi_func_ctl_value, trans_conf_value, dp_tp_ctl_value;
4370
4371 trans_ddi_func_ctl_value = intel_de_read(dev_priv,
4372 TRANS_DDI_FUNC_CTL(pipe));
4373 trans_conf_value = intel_de_read(dev_priv, PIPECONF(pipe));
4374 dp_tp_ctl_value = intel_de_read(dev_priv, TGL_DP_TP_CTL(pipe));
4375
4376 trans_ddi_func_ctl_value &= ~(TRANS_DDI_FUNC_ENABLE |
4377 TGL_TRANS_DDI_PORT_MASK);
4378 trans_conf_value &= ~PIPECONF_ENABLE;
4379 dp_tp_ctl_value &= ~DP_TP_CTL_ENABLE;
4380
4381 intel_de_write(dev_priv, PIPECONF(pipe), trans_conf_value);
4382 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(pipe),
4383 trans_ddi_func_ctl_value);
4384 intel_de_write(dev_priv, TGL_DP_TP_CTL(pipe), dp_tp_ctl_value);
4385}
4386
4387static void
4388intel_dp_autotest_phy_ddi_enable(struct intel_dp *intel_dp,
4389 const struct intel_crtc_state *crtc_state)
4390{
4391 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
4392 struct drm_device *dev = dig_port->base.base.dev;
4393 struct drm_i915_private *dev_priv = to_i915(dev);
4394 enum port port = dig_port->base.port;
4395 struct intel_crtc *crtc = to_intel_crtc(dig_port->base.base.crtc);
4396 enum pipe pipe = crtc->pipe;
4397 u32 trans_ddi_func_ctl_value, trans_conf_value, dp_tp_ctl_value;
4398
4399 trans_ddi_func_ctl_value = intel_de_read(dev_priv,
4400 TRANS_DDI_FUNC_CTL(pipe));
4401 trans_conf_value = intel_de_read(dev_priv, PIPECONF(pipe));
4402 dp_tp_ctl_value = intel_de_read(dev_priv, TGL_DP_TP_CTL(pipe));
4403
4404 trans_ddi_func_ctl_value |= TRANS_DDI_FUNC_ENABLE |
4405 TGL_TRANS_DDI_SELECT_PORT(port);
4406 trans_conf_value |= PIPECONF_ENABLE;
4407 dp_tp_ctl_value |= DP_TP_CTL_ENABLE;
4408
4409 intel_de_write(dev_priv, PIPECONF(pipe), trans_conf_value);
4410 intel_de_write(dev_priv, TGL_DP_TP_CTL(pipe), dp_tp_ctl_value);
4411 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(pipe),
4412 trans_ddi_func_ctl_value);
4413}
4414
4415static void intel_dp_process_phy_request(struct intel_dp *intel_dp,
4416 const struct intel_crtc_state *crtc_state)
4417{
4418 struct drm_dp_phy_test_params *data =
4419 &intel_dp->compliance.test_data.phytest;
4420 u8 link_status[DP_LINK_STATUS_SIZE];
4421
4422 if (drm_dp_dpcd_read_phy_link_status(&intel_dp->aux, DP_PHY_DPRX,
4423 link_status) < 0) {
4424 DRM_DEBUG_KMS("failed to get link status\n");
4425 return;
4426 }
4427
4428
4429 intel_dp_get_adjust_train(intel_dp, crtc_state, DP_PHY_DPRX,
4430 link_status);
4431
4432 intel_dp_autotest_phy_ddi_disable(intel_dp, crtc_state);
4433
4434 intel_dp_set_signal_levels(intel_dp, crtc_state, DP_PHY_DPRX);
4435
4436 intel_dp_phy_pattern_update(intel_dp, crtc_state);
4437
4438 intel_dp_autotest_phy_ddi_enable(intel_dp, crtc_state);
4439
4440 drm_dp_set_phy_test_pattern(&intel_dp->aux, data,
4441 link_status[DP_DPCD_REV]);
4442}
4443
4444static u8 intel_dp_autotest_phy_pattern(struct intel_dp *intel_dp)
4445{
4446 struct drm_dp_phy_test_params *data =
4447 &intel_dp->compliance.test_data.phytest;
4448
4449 if (drm_dp_get_phy_test_pattern(&intel_dp->aux, data)) {
4450 DRM_DEBUG_KMS("DP Phy Test pattern AUX read failure\n");
4451 return DP_TEST_NAK;
4452 }
4453
4454
4455 intel_dp->compliance.test_active = true;
4456
4457 return DP_TEST_ACK;
4458}
4459
4460static void intel_dp_handle_test_request(struct intel_dp *intel_dp)
4461{
4462 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4463 u8 response = DP_TEST_NAK;
4464 u8 request = 0;
4465 int status;
4466
4467 status = drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_REQUEST, &request);
4468 if (status <= 0) {
4469 drm_dbg_kms(&i915->drm,
4470 "Could not read test request from sink\n");
4471 goto update_status;
4472 }
4473
4474 switch (request) {
4475 case DP_TEST_LINK_TRAINING:
4476 drm_dbg_kms(&i915->drm, "LINK_TRAINING test requested\n");
4477 response = intel_dp_autotest_link_training(intel_dp);
4478 break;
4479 case DP_TEST_LINK_VIDEO_PATTERN:
4480 drm_dbg_kms(&i915->drm, "TEST_PATTERN test requested\n");
4481 response = intel_dp_autotest_video_pattern(intel_dp);
4482 break;
4483 case DP_TEST_LINK_EDID_READ:
4484 drm_dbg_kms(&i915->drm, "EDID test requested\n");
4485 response = intel_dp_autotest_edid(intel_dp);
4486 break;
4487 case DP_TEST_LINK_PHY_TEST_PATTERN:
4488 drm_dbg_kms(&i915->drm, "PHY_PATTERN test requested\n");
4489 response = intel_dp_autotest_phy_pattern(intel_dp);
4490 break;
4491 default:
4492 drm_dbg_kms(&i915->drm, "Invalid test request '%02x'\n",
4493 request);
4494 break;
4495 }
4496
4497 if (response & DP_TEST_ACK)
4498 intel_dp->compliance.test_type = request;
4499
4500update_status:
4501 status = drm_dp_dpcd_writeb(&intel_dp->aux, DP_TEST_RESPONSE, response);
4502 if (status <= 0)
4503 drm_dbg_kms(&i915->drm,
4504 "Could not write test response to sink\n");
4505}
4506
4507static void
4508intel_dp_mst_hpd_irq(struct intel_dp *intel_dp, u8 *esi, bool *handled)
4509{
4510 drm_dp_mst_hpd_irq(&intel_dp->mst_mgr, esi, handled);
4511
4512 if (esi[1] & DP_CP_IRQ) {
4513 intel_hdcp_handle_cp_irq(intel_dp->attached_connector);
4514 *handled = true;
4515 }
4516}
4517
4518
4519
4520
4521
4522
4523
4524
4525
4526
4527
4528
4529
4530
4531static bool
4532intel_dp_check_mst_status(struct intel_dp *intel_dp)
4533{
4534 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4535 bool link_ok = true;
4536
4537 drm_WARN_ON_ONCE(&i915->drm, intel_dp->active_mst_links < 0);
4538
4539 for (;;) {
4540 u8 esi[DP_DPRX_ESI_LEN] = {};
4541 bool handled;
4542 int retry;
4543
4544 if (!intel_dp_get_sink_irq_esi(intel_dp, esi)) {
4545 drm_dbg_kms(&i915->drm,
4546 "failed to get ESI - device may have failed\n");
4547 link_ok = false;
4548
4549 break;
4550 }
4551
4552
4553 if (intel_dp->active_mst_links > 0 && link_ok &&
4554 !drm_dp_channel_eq_ok(&esi[10], intel_dp->lane_count)) {
4555 drm_dbg_kms(&i915->drm,
4556 "channel EQ not ok, retraining\n");
4557 link_ok = false;
4558 }
4559
4560 drm_dbg_kms(&i915->drm, "got esi %3ph\n", esi);
4561
4562 intel_dp_mst_hpd_irq(intel_dp, esi, &handled);
4563
4564 if (!handled)
4565 break;
4566
4567 for (retry = 0; retry < 3; retry++) {
4568 int wret;
4569
4570 wret = drm_dp_dpcd_write(&intel_dp->aux,
4571 DP_SINK_COUNT_ESI+1,
4572 &esi[1], 3);
4573 if (wret == 3)
4574 break;
4575 }
4576 }
4577
4578 return link_ok;
4579}
4580
4581static void
4582intel_dp_handle_hdmi_link_status_change(struct intel_dp *intel_dp)
4583{
4584 bool is_active;
4585 u8 buf = 0;
4586
4587 is_active = drm_dp_pcon_hdmi_link_active(&intel_dp->aux);
4588 if (intel_dp->frl.is_trained && !is_active) {
4589 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_PCON_HDMI_LINK_CONFIG_1, &buf) < 0)
4590 return;
4591
4592 buf &= ~DP_PCON_ENABLE_HDMI_LINK;
4593 if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_PCON_HDMI_LINK_CONFIG_1, buf) < 0)
4594 return;
4595
4596 drm_dp_pcon_hdmi_frl_link_error_count(&intel_dp->aux, &intel_dp->attached_connector->base);
4597
4598
4599 intel_dp_check_frl_training(intel_dp);
4600 }
4601}
4602
4603static bool
4604intel_dp_needs_link_retrain(struct intel_dp *intel_dp)
4605{
4606 u8 link_status[DP_LINK_STATUS_SIZE];
4607
4608 if (!intel_dp->link_trained)
4609 return false;
4610
4611
4612
4613
4614
4615
4616
4617
4618
4619 if (intel_psr_enabled(intel_dp))
4620 return false;
4621
4622 if (drm_dp_dpcd_read_phy_link_status(&intel_dp->aux, DP_PHY_DPRX,
4623 link_status) < 0)
4624 return false;
4625
4626
4627
4628
4629
4630
4631
4632
4633
4634 if (!intel_dp_link_params_valid(intel_dp, intel_dp->link_rate,
4635 intel_dp->lane_count))
4636 return false;
4637
4638
4639 return !drm_dp_channel_eq_ok(link_status, intel_dp->lane_count);
4640}
4641
4642static bool intel_dp_has_connector(struct intel_dp *intel_dp,
4643 const struct drm_connector_state *conn_state)
4644{
4645 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4646 struct intel_encoder *encoder;
4647 enum pipe pipe;
4648
4649 if (!conn_state->best_encoder)
4650 return false;
4651
4652
4653 encoder = &dp_to_dig_port(intel_dp)->base;
4654 if (conn_state->best_encoder == &encoder->base)
4655 return true;
4656
4657
4658 for_each_pipe(i915, pipe) {
4659 encoder = &intel_dp->mst_encoders[pipe]->base;
4660 if (conn_state->best_encoder == &encoder->base)
4661 return true;
4662 }
4663
4664 return false;
4665}
4666
4667static int intel_dp_prep_link_retrain(struct intel_dp *intel_dp,
4668 struct drm_modeset_acquire_ctx *ctx,
4669 u32 *crtc_mask)
4670{
4671 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4672 struct drm_connector_list_iter conn_iter;
4673 struct intel_connector *connector;
4674 int ret = 0;
4675
4676 *crtc_mask = 0;
4677
4678 if (!intel_dp_needs_link_retrain(intel_dp))
4679 return 0;
4680
4681 drm_connector_list_iter_begin(&i915->drm, &conn_iter);
4682 for_each_intel_connector_iter(connector, &conn_iter) {
4683 struct drm_connector_state *conn_state =
4684 connector->base.state;
4685 struct intel_crtc_state *crtc_state;
4686 struct intel_crtc *crtc;
4687
4688 if (!intel_dp_has_connector(intel_dp, conn_state))
4689 continue;
4690
4691 crtc = to_intel_crtc(conn_state->crtc);
4692 if (!crtc)
4693 continue;
4694
4695 ret = drm_modeset_lock(&crtc->base.mutex, ctx);
4696 if (ret)
4697 break;
4698
4699 crtc_state = to_intel_crtc_state(crtc->base.state);
4700
4701 drm_WARN_ON(&i915->drm, !intel_crtc_has_dp_encoder(crtc_state));
4702
4703 if (!crtc_state->hw.active)
4704 continue;
4705
4706 if (conn_state->commit &&
4707 !try_wait_for_completion(&conn_state->commit->hw_done))
4708 continue;
4709
4710 *crtc_mask |= drm_crtc_mask(&crtc->base);
4711 }
4712 drm_connector_list_iter_end(&conn_iter);
4713
4714 if (!intel_dp_needs_link_retrain(intel_dp))
4715 *crtc_mask = 0;
4716
4717 return ret;
4718}
4719
4720static bool intel_dp_is_connected(struct intel_dp *intel_dp)
4721{
4722 struct intel_connector *connector = intel_dp->attached_connector;
4723
4724 return connector->base.status == connector_status_connected ||
4725 intel_dp->is_mst;
4726}
4727
4728int intel_dp_retrain_link(struct intel_encoder *encoder,
4729 struct drm_modeset_acquire_ctx *ctx)
4730{
4731 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4732 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4733 struct intel_crtc *crtc;
4734 u32 crtc_mask;
4735 int ret;
4736
4737 if (!intel_dp_is_connected(intel_dp))
4738 return 0;
4739
4740 ret = drm_modeset_lock(&dev_priv->drm.mode_config.connection_mutex,
4741 ctx);
4742 if (ret)
4743 return ret;
4744
4745 ret = intel_dp_prep_link_retrain(intel_dp, ctx, &crtc_mask);
4746 if (ret)
4747 return ret;
4748
4749 if (crtc_mask == 0)
4750 return 0;
4751
4752 drm_dbg_kms(&dev_priv->drm, "[ENCODER:%d:%s] retraining link\n",
4753 encoder->base.base.id, encoder->base.name);
4754
4755 for_each_intel_crtc_mask(&dev_priv->drm, crtc, crtc_mask) {
4756 const struct intel_crtc_state *crtc_state =
4757 to_intel_crtc_state(crtc->base.state);
4758
4759
4760 intel_set_cpu_fifo_underrun_reporting(dev_priv, crtc->pipe, false);
4761 if (crtc_state->has_pch_encoder)
4762 intel_set_pch_fifo_underrun_reporting(dev_priv,
4763 intel_crtc_pch_transcoder(crtc), false);
4764 }
4765
4766 for_each_intel_crtc_mask(&dev_priv->drm, crtc, crtc_mask) {
4767 const struct intel_crtc_state *crtc_state =
4768 to_intel_crtc_state(crtc->base.state);
4769
4770
4771 if (INTEL_GEN(dev_priv) >= 12 &&
4772 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST) &&
4773 !intel_dp_mst_is_master_trans(crtc_state))
4774 continue;
4775
4776 intel_dp_check_frl_training(intel_dp);
4777 intel_dp_pcon_dsc_configure(intel_dp, crtc_state);
4778 intel_dp_start_link_train(intel_dp, crtc_state);
4779 intel_dp_stop_link_train(intel_dp, crtc_state);
4780 break;
4781 }
4782
4783 for_each_intel_crtc_mask(&dev_priv->drm, crtc, crtc_mask) {
4784 const struct intel_crtc_state *crtc_state =
4785 to_intel_crtc_state(crtc->base.state);
4786
4787
4788 intel_wait_for_vblank(dev_priv, crtc->pipe);
4789
4790 intel_set_cpu_fifo_underrun_reporting(dev_priv, crtc->pipe, true);
4791 if (crtc_state->has_pch_encoder)
4792 intel_set_pch_fifo_underrun_reporting(dev_priv,
4793 intel_crtc_pch_transcoder(crtc), true);
4794 }
4795
4796 return 0;
4797}
4798
4799static int intel_dp_prep_phy_test(struct intel_dp *intel_dp,
4800 struct drm_modeset_acquire_ctx *ctx,
4801 u32 *crtc_mask)
4802{
4803 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4804 struct drm_connector_list_iter conn_iter;
4805 struct intel_connector *connector;
4806 int ret = 0;
4807
4808 *crtc_mask = 0;
4809
4810 drm_connector_list_iter_begin(&i915->drm, &conn_iter);
4811 for_each_intel_connector_iter(connector, &conn_iter) {
4812 struct drm_connector_state *conn_state =
4813 connector->base.state;
4814 struct intel_crtc_state *crtc_state;
4815 struct intel_crtc *crtc;
4816
4817 if (!intel_dp_has_connector(intel_dp, conn_state))
4818 continue;
4819
4820 crtc = to_intel_crtc(conn_state->crtc);
4821 if (!crtc)
4822 continue;
4823
4824 ret = drm_modeset_lock(&crtc->base.mutex, ctx);
4825 if (ret)
4826 break;
4827
4828 crtc_state = to_intel_crtc_state(crtc->base.state);
4829
4830 drm_WARN_ON(&i915->drm, !intel_crtc_has_dp_encoder(crtc_state));
4831
4832 if (!crtc_state->hw.active)
4833 continue;
4834
4835 if (conn_state->commit &&
4836 !try_wait_for_completion(&conn_state->commit->hw_done))
4837 continue;
4838
4839 *crtc_mask |= drm_crtc_mask(&crtc->base);
4840 }
4841 drm_connector_list_iter_end(&conn_iter);
4842
4843 return ret;
4844}
4845
4846static int intel_dp_do_phy_test(struct intel_encoder *encoder,
4847 struct drm_modeset_acquire_ctx *ctx)
4848{
4849 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4850 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4851 struct intel_crtc *crtc;
4852 u32 crtc_mask;
4853 int ret;
4854
4855 ret = drm_modeset_lock(&dev_priv->drm.mode_config.connection_mutex,
4856 ctx);
4857 if (ret)
4858 return ret;
4859
4860 ret = intel_dp_prep_phy_test(intel_dp, ctx, &crtc_mask);
4861 if (ret)
4862 return ret;
4863
4864 if (crtc_mask == 0)
4865 return 0;
4866
4867 drm_dbg_kms(&dev_priv->drm, "[ENCODER:%d:%s] PHY test\n",
4868 encoder->base.base.id, encoder->base.name);
4869
4870 for_each_intel_crtc_mask(&dev_priv->drm, crtc, crtc_mask) {
4871 const struct intel_crtc_state *crtc_state =
4872 to_intel_crtc_state(crtc->base.state);
4873
4874
4875 if (INTEL_GEN(dev_priv) >= 12 &&
4876 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST) &&
4877 !intel_dp_mst_is_master_trans(crtc_state))
4878 continue;
4879
4880 intel_dp_process_phy_request(intel_dp, crtc_state);
4881 break;
4882 }
4883
4884 return 0;
4885}
4886
4887void intel_dp_phy_test(struct intel_encoder *encoder)
4888{
4889 struct drm_modeset_acquire_ctx ctx;
4890 int ret;
4891
4892 drm_modeset_acquire_init(&ctx, 0);
4893
4894 for (;;) {
4895 ret = intel_dp_do_phy_test(encoder, &ctx);
4896
4897 if (ret == -EDEADLK) {
4898 drm_modeset_backoff(&ctx);
4899 continue;
4900 }
4901
4902 break;
4903 }
4904
4905 drm_modeset_drop_locks(&ctx);
4906 drm_modeset_acquire_fini(&ctx);
4907 drm_WARN(encoder->base.dev, ret,
4908 "Acquiring modeset locks failed with %i\n", ret);
4909}
4910
4911
4912
4913
4914
4915
4916
4917
4918
4919
4920
4921
4922
4923static enum intel_hotplug_state
4924intel_dp_hotplug(struct intel_encoder *encoder,
4925 struct intel_connector *connector)
4926{
4927 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4928 struct drm_modeset_acquire_ctx ctx;
4929 enum intel_hotplug_state state;
4930 int ret;
4931
4932 if (intel_dp->compliance.test_active &&
4933 intel_dp->compliance.test_type == DP_TEST_LINK_PHY_TEST_PATTERN) {
4934 intel_dp_phy_test(encoder);
4935
4936 return INTEL_HOTPLUG_UNCHANGED;
4937 }
4938
4939 state = intel_encoder_hotplug(encoder, connector);
4940
4941 drm_modeset_acquire_init(&ctx, 0);
4942
4943 for (;;) {
4944 ret = intel_dp_retrain_link(encoder, &ctx);
4945
4946 if (ret == -EDEADLK) {
4947 drm_modeset_backoff(&ctx);
4948 continue;
4949 }
4950
4951 break;
4952 }
4953
4954 drm_modeset_drop_locks(&ctx);
4955 drm_modeset_acquire_fini(&ctx);
4956 drm_WARN(encoder->base.dev, ret,
4957 "Acquiring modeset locks failed with %i\n", ret);
4958
4959
4960
4961
4962
4963 if (state == INTEL_HOTPLUG_UNCHANGED && !connector->hotplug_retries)
4964 state = INTEL_HOTPLUG_RETRY;
4965
4966 return state;
4967}
4968
4969static void intel_dp_check_device_service_irq(struct intel_dp *intel_dp)
4970{
4971 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4972 u8 val;
4973
4974 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
4975 return;
4976
4977 if (drm_dp_dpcd_readb(&intel_dp->aux,
4978 DP_DEVICE_SERVICE_IRQ_VECTOR, &val) != 1 || !val)
4979 return;
4980
4981 drm_dp_dpcd_writeb(&intel_dp->aux, DP_DEVICE_SERVICE_IRQ_VECTOR, val);
4982
4983 if (val & DP_AUTOMATED_TEST_REQUEST)
4984 intel_dp_handle_test_request(intel_dp);
4985
4986 if (val & DP_CP_IRQ)
4987 intel_hdcp_handle_cp_irq(intel_dp->attached_connector);
4988
4989 if (val & DP_SINK_SPECIFIC_IRQ)
4990 drm_dbg_kms(&i915->drm, "Sink specific irq unhandled\n");
4991}
4992
4993static void intel_dp_check_link_service_irq(struct intel_dp *intel_dp)
4994{
4995 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4996 u8 val;
4997
4998 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
4999 return;
5000
5001 if (drm_dp_dpcd_readb(&intel_dp->aux,
5002 DP_LINK_SERVICE_IRQ_VECTOR_ESI0, &val) != 1 || !val) {
5003 drm_dbg_kms(&i915->drm, "Error in reading link service irq vector\n");
5004 return;
5005 }
5006
5007 if (drm_dp_dpcd_writeb(&intel_dp->aux,
5008 DP_LINK_SERVICE_IRQ_VECTOR_ESI0, val) != 1) {
5009 drm_dbg_kms(&i915->drm, "Error in writing link service irq vector\n");
5010 return;
5011 }
5012
5013 if (val & HDMI_LINK_STATUS_CHANGED)
5014 intel_dp_handle_hdmi_link_status_change(intel_dp);
5015}
5016
5017
5018
5019
5020
5021
5022
5023
5024
5025
5026
5027
5028
5029
5030static bool
5031intel_dp_short_pulse(struct intel_dp *intel_dp)
5032{
5033 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
5034 u8 old_sink_count = intel_dp->sink_count;
5035 bool ret;
5036
5037
5038
5039
5040
5041 memset(&intel_dp->compliance, 0, sizeof(intel_dp->compliance));
5042
5043
5044
5045
5046
5047
5048
5049 ret = intel_dp_get_dpcd(intel_dp);
5050
5051 if ((old_sink_count != intel_dp->sink_count) || !ret) {
5052
5053 return false;
5054 }
5055
5056 intel_dp_check_device_service_irq(intel_dp);
5057 intel_dp_check_link_service_irq(intel_dp);
5058
5059
5060 drm_dp_cec_irq(&intel_dp->aux);
5061
5062
5063 if (intel_dp_needs_link_retrain(intel_dp))
5064 return false;
5065
5066 intel_psr_short_pulse(intel_dp);
5067
5068 switch (intel_dp->compliance.test_type) {
5069 case DP_TEST_LINK_TRAINING:
5070 drm_dbg_kms(&dev_priv->drm,
5071 "Link Training Compliance Test requested\n");
5072
5073 drm_kms_helper_hotplug_event(&dev_priv->drm);
5074 break;
5075 case DP_TEST_LINK_PHY_TEST_PATTERN:
5076 drm_dbg_kms(&dev_priv->drm,
5077 "PHY test pattern Compliance Test requested\n");
5078
5079
5080
5081
5082
5083
5084 return false;
5085 }
5086
5087 return true;
5088}
5089
5090
5091static enum drm_connector_status
5092intel_dp_detect_dpcd(struct intel_dp *intel_dp)
5093{
5094 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
5095 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
5096 u8 *dpcd = intel_dp->dpcd;
5097 u8 type;
5098
5099 if (drm_WARN_ON(&i915->drm, intel_dp_is_edp(intel_dp)))
5100 return connector_status_connected;
5101
5102 lspcon_resume(dig_port);
5103
5104 if (!intel_dp_get_dpcd(intel_dp))
5105 return connector_status_disconnected;
5106
5107
5108 if (!drm_dp_is_branch(dpcd))
5109 return connector_status_connected;
5110
5111
5112 if (intel_dp_has_sink_count(intel_dp) &&
5113 intel_dp->downstream_ports[0] & DP_DS_PORT_HPD) {
5114 return intel_dp->sink_count ?
5115 connector_status_connected : connector_status_disconnected;
5116 }
5117
5118 if (intel_dp_can_mst(intel_dp))
5119 return connector_status_connected;
5120
5121
5122 if (drm_probe_ddc(&intel_dp->aux.ddc))
5123 return connector_status_connected;
5124
5125
5126 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) {
5127 type = intel_dp->downstream_ports[0] & DP_DS_PORT_TYPE_MASK;
5128 if (type == DP_DS_PORT_TYPE_VGA ||
5129 type == DP_DS_PORT_TYPE_NON_EDID)
5130 return connector_status_unknown;
5131 } else {
5132 type = intel_dp->dpcd[DP_DOWNSTREAMPORT_PRESENT] &
5133 DP_DWN_STRM_PORT_TYPE_MASK;
5134 if (type == DP_DWN_STRM_PORT_TYPE_ANALOG ||
5135 type == DP_DWN_STRM_PORT_TYPE_OTHER)
5136 return connector_status_unknown;
5137 }
5138
5139
5140 drm_dbg_kms(&i915->drm, "Broken DP branch device, ignoring\n");
5141 return connector_status_disconnected;
5142}
5143
5144static enum drm_connector_status
5145edp_detect(struct intel_dp *intel_dp)
5146{
5147 return connector_status_connected;
5148}
5149
5150static bool ibx_digital_port_connected(struct intel_encoder *encoder)
5151{
5152 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
5153 u32 bit = dev_priv->hotplug.pch_hpd[encoder->hpd_pin];
5154
5155 return intel_de_read(dev_priv, SDEISR) & bit;
5156}
5157
5158static bool g4x_digital_port_connected(struct intel_encoder *encoder)
5159{
5160 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
5161 u32 bit;
5162
5163 switch (encoder->hpd_pin) {
5164 case HPD_PORT_B:
5165 bit = PORTB_HOTPLUG_LIVE_STATUS_G4X;
5166 break;
5167 case HPD_PORT_C:
5168 bit = PORTC_HOTPLUG_LIVE_STATUS_G4X;
5169 break;
5170 case HPD_PORT_D:
5171 bit = PORTD_HOTPLUG_LIVE_STATUS_G4X;
5172 break;
5173 default:
5174 MISSING_CASE(encoder->hpd_pin);
5175 return false;
5176 }
5177
5178 return intel_de_read(dev_priv, PORT_HOTPLUG_STAT) & bit;
5179}
5180
5181static bool gm45_digital_port_connected(struct intel_encoder *encoder)
5182{
5183 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
5184 u32 bit;
5185
5186 switch (encoder->hpd_pin) {
5187 case HPD_PORT_B:
5188 bit = PORTB_HOTPLUG_LIVE_STATUS_GM45;
5189 break;
5190 case HPD_PORT_C:
5191 bit = PORTC_HOTPLUG_LIVE_STATUS_GM45;
5192 break;
5193 case HPD_PORT_D:
5194 bit = PORTD_HOTPLUG_LIVE_STATUS_GM45;
5195 break;
5196 default:
5197 MISSING_CASE(encoder->hpd_pin);
5198 return false;
5199 }
5200
5201 return intel_de_read(dev_priv, PORT_HOTPLUG_STAT) & bit;
5202}
5203
5204static bool ilk_digital_port_connected(struct intel_encoder *encoder)
5205{
5206 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
5207 u32 bit = dev_priv->hotplug.hpd[encoder->hpd_pin];
5208
5209 return intel_de_read(dev_priv, DEISR) & bit;
5210}
5211
5212
5213
5214
5215
5216
5217
5218
5219
5220
5221
5222
5223bool intel_digital_port_connected(struct intel_encoder *encoder)
5224{
5225 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
5226 struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
5227 bool is_connected = false;
5228 intel_wakeref_t wakeref;
5229
5230 with_intel_display_power(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref)
5231 is_connected = dig_port->connected(encoder);
5232
5233 return is_connected;
5234}
5235
5236static struct edid *
5237intel_dp_get_edid(struct intel_dp *intel_dp)
5238{
5239 struct intel_connector *intel_connector = intel_dp->attached_connector;
5240
5241
5242 if (intel_connector->edid) {
5243
5244 if (IS_ERR(intel_connector->edid))
5245 return NULL;
5246
5247 return drm_edid_duplicate(intel_connector->edid);
5248 } else
5249 return drm_get_edid(&intel_connector->base,
5250 &intel_dp->aux.ddc);
5251}
5252
5253static void
5254intel_dp_update_dfp(struct intel_dp *intel_dp,
5255 const struct edid *edid)
5256{
5257 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
5258 struct intel_connector *connector = intel_dp->attached_connector;
5259
5260 intel_dp->dfp.max_bpc =
5261 drm_dp_downstream_max_bpc(intel_dp->dpcd,
5262 intel_dp->downstream_ports, edid);
5263
5264 intel_dp->dfp.max_dotclock =
5265 drm_dp_downstream_max_dotclock(intel_dp->dpcd,
5266 intel_dp->downstream_ports);
5267
5268 intel_dp->dfp.min_tmds_clock =
5269 drm_dp_downstream_min_tmds_clock(intel_dp->dpcd,
5270 intel_dp->downstream_ports,
5271 edid);
5272 intel_dp->dfp.max_tmds_clock =
5273 drm_dp_downstream_max_tmds_clock(intel_dp->dpcd,
5274 intel_dp->downstream_ports,
5275 edid);
5276
5277 intel_dp->dfp.pcon_max_frl_bw =
5278 drm_dp_get_pcon_max_frl_bw(intel_dp->dpcd,
5279 intel_dp->downstream_ports);
5280
5281 drm_dbg_kms(&i915->drm,
5282 "[CONNECTOR:%d:%s] DFP max bpc %d, max dotclock %d, TMDS clock %d-%d, PCON Max FRL BW %dGbps\n",
5283 connector->base.base.id, connector->base.name,
5284 intel_dp->dfp.max_bpc,
5285 intel_dp->dfp.max_dotclock,
5286 intel_dp->dfp.min_tmds_clock,
5287 intel_dp->dfp.max_tmds_clock,
5288 intel_dp->dfp.pcon_max_frl_bw);
5289
5290 intel_dp_get_pcon_dsc_cap(intel_dp);
5291}
5292
5293static void
5294intel_dp_update_420(struct intel_dp *intel_dp)
5295{
5296 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
5297 struct intel_connector *connector = intel_dp->attached_connector;
5298 bool is_branch, ycbcr_420_passthrough, ycbcr_444_to_420, rgb_to_ycbcr;
5299
5300
5301 if (HAS_GMCH(i915))
5302 return;
5303
5304
5305
5306
5307
5308 if (IS_GEN(i915, 5))
5309 return;
5310
5311 is_branch = drm_dp_is_branch(intel_dp->dpcd);
5312 ycbcr_420_passthrough =
5313 drm_dp_downstream_420_passthrough(intel_dp->dpcd,
5314 intel_dp->downstream_ports);
5315
5316 ycbcr_444_to_420 =
5317 dp_to_dig_port(intel_dp)->lspcon.active ||
5318 drm_dp_downstream_444_to_420_conversion(intel_dp->dpcd,
5319 intel_dp->downstream_ports);
5320 rgb_to_ycbcr = drm_dp_downstream_rgb_to_ycbcr_conversion(intel_dp->dpcd,
5321 intel_dp->downstream_ports,
5322 DP_DS_HDMI_BT601_RGB_YCBCR_CONV |
5323 DP_DS_HDMI_BT709_RGB_YCBCR_CONV |
5324 DP_DS_HDMI_BT2020_RGB_YCBCR_CONV);
5325
5326 if (INTEL_GEN(i915) >= 11) {
5327
5328 if (is_branch && rgb_to_ycbcr && ycbcr_444_to_420) {
5329 intel_dp->dfp.rgb_to_ycbcr = true;
5330 intel_dp->dfp.ycbcr_444_to_420 = true;
5331 connector->base.ycbcr_420_allowed = true;
5332 } else {
5333
5334 intel_dp->dfp.ycbcr_444_to_420 =
5335 ycbcr_444_to_420 && !ycbcr_420_passthrough;
5336
5337 connector->base.ycbcr_420_allowed =
5338 !is_branch || ycbcr_444_to_420 || ycbcr_420_passthrough;
5339 }
5340 } else {
5341
5342 intel_dp->dfp.ycbcr_444_to_420 = ycbcr_444_to_420;
5343
5344 connector->base.ycbcr_420_allowed = ycbcr_444_to_420;
5345 }
5346
5347 drm_dbg_kms(&i915->drm,
5348 "[CONNECTOR:%d:%s] RGB->YcbCr conversion? %s, YCbCr 4:2:0 allowed? %s, YCbCr 4:4:4->4:2:0 conversion? %s\n",
5349 connector->base.base.id, connector->base.name,
5350 yesno(intel_dp->dfp.rgb_to_ycbcr),
5351 yesno(connector->base.ycbcr_420_allowed),
5352 yesno(intel_dp->dfp.ycbcr_444_to_420));
5353}
5354
5355static void
5356intel_dp_set_edid(struct intel_dp *intel_dp)
5357{
5358 struct intel_connector *connector = intel_dp->attached_connector;
5359 struct edid *edid;
5360
5361 intel_dp_unset_edid(intel_dp);
5362 edid = intel_dp_get_edid(intel_dp);
5363 connector->detect_edid = edid;
5364
5365 intel_dp_update_dfp(intel_dp, edid);
5366 intel_dp_update_420(intel_dp);
5367
5368 if (edid && edid->input & DRM_EDID_INPUT_DIGITAL) {
5369 intel_dp->has_hdmi_sink = drm_detect_hdmi_monitor(edid);
5370 intel_dp->has_audio = drm_detect_monitor_audio(edid);
5371 }
5372
5373 drm_dp_cec_set_edid(&intel_dp->aux, edid);
5374}
5375
5376static void
5377intel_dp_unset_edid(struct intel_dp *intel_dp)
5378{
5379 struct intel_connector *connector = intel_dp->attached_connector;
5380
5381 drm_dp_cec_unset_edid(&intel_dp->aux);
5382 kfree(connector->detect_edid);
5383 connector->detect_edid = NULL;
5384
5385 intel_dp->has_hdmi_sink = false;
5386 intel_dp->has_audio = false;
5387
5388 intel_dp->dfp.max_bpc = 0;
5389 intel_dp->dfp.max_dotclock = 0;
5390 intel_dp->dfp.min_tmds_clock = 0;
5391 intel_dp->dfp.max_tmds_clock = 0;
5392
5393 intel_dp->dfp.pcon_max_frl_bw = 0;
5394
5395 intel_dp->dfp.ycbcr_444_to_420 = false;
5396 connector->base.ycbcr_420_allowed = false;
5397}
5398
5399static int
5400intel_dp_detect(struct drm_connector *connector,
5401 struct drm_modeset_acquire_ctx *ctx,
5402 bool force)
5403{
5404 struct drm_i915_private *dev_priv = to_i915(connector->dev);
5405 struct intel_dp *intel_dp = intel_attached_dp(to_intel_connector(connector));
5406 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
5407 struct intel_encoder *encoder = &dig_port->base;
5408 enum drm_connector_status status;
5409
5410 drm_dbg_kms(&dev_priv->drm, "[CONNECTOR:%d:%s]\n",
5411 connector->base.id, connector->name);
5412 drm_WARN_ON(&dev_priv->drm,
5413 !drm_modeset_is_locked(&dev_priv->drm.mode_config.connection_mutex));
5414
5415 if (!INTEL_DISPLAY_ENABLED(dev_priv))
5416 return connector_status_disconnected;
5417
5418
5419 if (intel_dp_is_edp(intel_dp))
5420 status = edp_detect(intel_dp);
5421 else if (intel_digital_port_connected(encoder))
5422 status = intel_dp_detect_dpcd(intel_dp);
5423 else
5424 status = connector_status_disconnected;
5425
5426 if (status == connector_status_disconnected) {
5427 memset(&intel_dp->compliance, 0, sizeof(intel_dp->compliance));
5428 memset(intel_dp->dsc_dpcd, 0, sizeof(intel_dp->dsc_dpcd));
5429
5430 if (intel_dp->is_mst) {
5431 drm_dbg_kms(&dev_priv->drm,
5432 "MST device may have disappeared %d vs %d\n",
5433 intel_dp->is_mst,
5434 intel_dp->mst_mgr.mst_state);
5435 intel_dp->is_mst = false;
5436 drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst_mgr,
5437 intel_dp->is_mst);
5438 }
5439
5440 goto out;
5441 }
5442
5443
5444 if (INTEL_GEN(dev_priv) >= 11)
5445 intel_dp_get_dsc_sink_cap(intel_dp);
5446
5447 intel_dp_configure_mst(intel_dp);
5448
5449
5450
5451
5452
5453 if (intel_dp->reset_link_params || intel_dp->is_mst) {
5454
5455 intel_dp->max_link_lane_count = intel_dp_max_common_lane_count(intel_dp);
5456
5457
5458 intel_dp->max_link_rate = intel_dp_max_common_rate(intel_dp);
5459
5460 intel_dp->reset_link_params = false;
5461 }
5462
5463 intel_dp_print_rates(intel_dp);
5464
5465 if (intel_dp->is_mst) {
5466
5467
5468
5469
5470
5471 status = connector_status_disconnected;
5472 goto out;
5473 }
5474
5475
5476
5477
5478
5479 if (!intel_dp_is_edp(intel_dp)) {
5480 int ret;
5481
5482 ret = intel_dp_retrain_link(encoder, ctx);
5483 if (ret)
5484 return ret;
5485 }
5486
5487
5488
5489
5490
5491
5492 intel_dp->aux.i2c_nack_count = 0;
5493 intel_dp->aux.i2c_defer_count = 0;
5494
5495 intel_dp_set_edid(intel_dp);
5496 if (intel_dp_is_edp(intel_dp) ||
5497 to_intel_connector(connector)->detect_edid)
5498 status = connector_status_connected;
5499
5500 intel_dp_check_device_service_irq(intel_dp);
5501
5502out:
5503 if (status != connector_status_connected && !intel_dp->is_mst)
5504 intel_dp_unset_edid(intel_dp);
5505
5506
5507
5508
5509
5510 intel_display_power_flush_work(dev_priv);
5511
5512 if (!intel_dp_is_edp(intel_dp))
5513 drm_dp_set_subconnector_property(connector,
5514 status,
5515 intel_dp->dpcd,
5516 intel_dp->downstream_ports);
5517 return status;
5518}
5519
5520static void
5521intel_dp_force(struct drm_connector *connector)
5522{
5523 struct intel_dp *intel_dp = intel_attached_dp(to_intel_connector(connector));
5524 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
5525 struct intel_encoder *intel_encoder = &dig_port->base;
5526 struct drm_i915_private *dev_priv = to_i915(intel_encoder->base.dev);
5527 enum intel_display_power_domain aux_domain =
5528 intel_aux_power_domain(dig_port);
5529 intel_wakeref_t wakeref;
5530
5531 drm_dbg_kms(&dev_priv->drm, "[CONNECTOR:%d:%s]\n",
5532 connector->base.id, connector->name);
5533 intel_dp_unset_edid(intel_dp);
5534
5535 if (connector->status != connector_status_connected)
5536 return;
5537
5538 wakeref = intel_display_power_get(dev_priv, aux_domain);
5539
5540 intel_dp_set_edid(intel_dp);
5541
5542 intel_display_power_put(dev_priv, aux_domain, wakeref);
5543}
5544
5545static int intel_dp_get_modes(struct drm_connector *connector)
5546{
5547 struct intel_connector *intel_connector = to_intel_connector(connector);
5548 struct edid *edid;
5549
5550 edid = intel_connector->detect_edid;
5551 if (edid) {
5552 int ret = intel_connector_update_modes(connector, edid);
5553
5554 if (intel_vrr_is_capable(connector))
5555 drm_connector_set_vrr_capable_property(connector,
5556 true);
5557 if (ret)
5558 return ret;
5559 }
5560
5561
5562 if (intel_dp_is_edp(intel_attached_dp(intel_connector)) &&
5563 intel_connector->panel.fixed_mode) {
5564 struct drm_display_mode *mode;
5565
5566 mode = drm_mode_duplicate(connector->dev,
5567 intel_connector->panel.fixed_mode);
5568 if (mode) {
5569 drm_mode_probed_add(connector, mode);
5570 return 1;
5571 }
5572 }
5573
5574 if (!edid) {
5575 struct intel_dp *intel_dp = intel_attached_dp(intel_connector);
5576 struct drm_display_mode *mode;
5577
5578 mode = drm_dp_downstream_mode(connector->dev,
5579 intel_dp->dpcd,
5580 intel_dp->downstream_ports);
5581 if (mode) {
5582 drm_mode_probed_add(connector, mode);
5583 return 1;
5584 }
5585 }
5586
5587 return 0;
5588}
5589
5590static int
5591intel_dp_connector_register(struct drm_connector *connector)
5592{
5593 struct drm_i915_private *i915 = to_i915(connector->dev);
5594 struct intel_dp *intel_dp = intel_attached_dp(to_intel_connector(connector));
5595 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
5596 struct intel_lspcon *lspcon = &dig_port->lspcon;
5597 int ret;
5598
5599 ret = intel_connector_register(connector);
5600 if (ret)
5601 return ret;
5602
5603 drm_dbg_kms(&i915->drm, "registering %s bus for %s\n",
5604 intel_dp->aux.name, connector->kdev->kobj.name);
5605
5606 intel_dp->aux.dev = connector->kdev;
5607 ret = drm_dp_aux_register(&intel_dp->aux);
5608 if (!ret)
5609 drm_dp_cec_register_connector(&intel_dp->aux, connector);
5610
5611 if (!intel_bios_is_lspcon_present(i915, dig_port->base.port))
5612 return ret;
5613
5614
5615
5616
5617
5618 if (lspcon_init(dig_port)) {
5619 lspcon_detect_hdr_capability(lspcon);
5620 if (lspcon->hdr_supported)
5621 drm_object_attach_property(&connector->base,
5622 connector->dev->mode_config.hdr_output_metadata_property,
5623 0);
5624 }
5625
5626 return ret;
5627}
5628
5629static void
5630intel_dp_connector_unregister(struct drm_connector *connector)
5631{
5632 struct intel_dp *intel_dp = intel_attached_dp(to_intel_connector(connector));
5633
5634 drm_dp_cec_unregister_connector(&intel_dp->aux);
5635 drm_dp_aux_unregister(&intel_dp->aux);
5636 intel_connector_unregister(connector);
5637}
5638
5639void intel_dp_encoder_flush_work(struct drm_encoder *encoder)
5640{
5641 struct intel_digital_port *dig_port = enc_to_dig_port(to_intel_encoder(encoder));
5642 struct intel_dp *intel_dp = &dig_port->dp;
5643
5644 intel_dp_mst_encoder_cleanup(dig_port);
5645
5646 intel_pps_vdd_off_sync(intel_dp);
5647
5648 intel_dp_aux_fini(intel_dp);
5649}
5650
5651static void intel_dp_encoder_destroy(struct drm_encoder *encoder)
5652{
5653 intel_dp_encoder_flush_work(encoder);
5654
5655 drm_encoder_cleanup(encoder);
5656 kfree(enc_to_dig_port(to_intel_encoder(encoder)));
5657}
5658
5659void intel_dp_encoder_suspend(struct intel_encoder *intel_encoder)
5660{
5661 struct intel_dp *intel_dp = enc_to_intel_dp(intel_encoder);
5662
5663 intel_pps_vdd_off_sync(intel_dp);
5664}
5665
5666void intel_dp_encoder_shutdown(struct intel_encoder *intel_encoder)
5667{
5668 struct intel_dp *intel_dp = enc_to_intel_dp(intel_encoder);
5669
5670 intel_pps_wait_power_cycle(intel_dp);
5671}
5672
5673static enum pipe vlv_active_pipe(struct intel_dp *intel_dp)
5674{
5675 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
5676 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
5677 enum pipe pipe;
5678
5679 if (intel_dp_port_enabled(dev_priv, intel_dp->output_reg,
5680 encoder->port, &pipe))
5681 return pipe;
5682
5683 return INVALID_PIPE;
5684}
5685
5686void intel_dp_encoder_reset(struct drm_encoder *encoder)
5687{
5688 struct drm_i915_private *dev_priv = to_i915(encoder->dev);
5689 struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(encoder));
5690
5691 if (!HAS_DDI(dev_priv))
5692 intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg);
5693
5694 intel_dp->reset_link_params = true;
5695
5696 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
5697 intel_wakeref_t wakeref;
5698
5699 with_intel_pps_lock(intel_dp, wakeref)
5700 intel_dp->pps.active_pipe = vlv_active_pipe(intel_dp);
5701 }
5702
5703 intel_pps_encoder_reset(intel_dp);
5704}
5705
5706static int intel_modeset_tile_group(struct intel_atomic_state *state,
5707 int tile_group_id)
5708{
5709 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
5710 struct drm_connector_list_iter conn_iter;
5711 struct drm_connector *connector;
5712 int ret = 0;
5713
5714 drm_connector_list_iter_begin(&dev_priv->drm, &conn_iter);
5715 drm_for_each_connector_iter(connector, &conn_iter) {
5716 struct drm_connector_state *conn_state;
5717 struct intel_crtc_state *crtc_state;
5718 struct intel_crtc *crtc;
5719
5720 if (!connector->has_tile ||
5721 connector->tile_group->id != tile_group_id)
5722 continue;
5723
5724 conn_state = drm_atomic_get_connector_state(&state->base,
5725 connector);
5726 if (IS_ERR(conn_state)) {
5727 ret = PTR_ERR(conn_state);
5728 break;
5729 }
5730
5731 crtc = to_intel_crtc(conn_state->crtc);
5732
5733 if (!crtc)
5734 continue;
5735
5736 crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
5737 crtc_state->uapi.mode_changed = true;
5738
5739 ret = drm_atomic_add_affected_planes(&state->base, &crtc->base);
5740 if (ret)
5741 break;
5742 }
5743 drm_connector_list_iter_end(&conn_iter);
5744
5745 return ret;
5746}
5747
5748static int intel_modeset_affected_transcoders(struct intel_atomic_state *state, u8 transcoders)
5749{
5750 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
5751 struct intel_crtc *crtc;
5752
5753 if (transcoders == 0)
5754 return 0;
5755
5756 for_each_intel_crtc(&dev_priv->drm, crtc) {
5757 struct intel_crtc_state *crtc_state;
5758 int ret;
5759
5760 crtc_state = intel_atomic_get_crtc_state(&state->base, crtc);
5761 if (IS_ERR(crtc_state))
5762 return PTR_ERR(crtc_state);
5763
5764 if (!crtc_state->hw.enable)
5765 continue;
5766
5767 if (!(transcoders & BIT(crtc_state->cpu_transcoder)))
5768 continue;
5769
5770 crtc_state->uapi.mode_changed = true;
5771
5772 ret = drm_atomic_add_affected_connectors(&state->base, &crtc->base);
5773 if (ret)
5774 return ret;
5775
5776 ret = drm_atomic_add_affected_planes(&state->base, &crtc->base);
5777 if (ret)
5778 return ret;
5779
5780 transcoders &= ~BIT(crtc_state->cpu_transcoder);
5781 }
5782
5783 drm_WARN_ON(&dev_priv->drm, transcoders != 0);
5784
5785 return 0;
5786}
5787
5788static int intel_modeset_synced_crtcs(struct intel_atomic_state *state,
5789 struct drm_connector *connector)
5790{
5791 const struct drm_connector_state *old_conn_state =
5792 drm_atomic_get_old_connector_state(&state->base, connector);
5793 const struct intel_crtc_state *old_crtc_state;
5794 struct intel_crtc *crtc;
5795 u8 transcoders;
5796
5797 crtc = to_intel_crtc(old_conn_state->crtc);
5798 if (!crtc)
5799 return 0;
5800
5801 old_crtc_state = intel_atomic_get_old_crtc_state(state, crtc);
5802
5803 if (!old_crtc_state->hw.active)
5804 return 0;
5805
5806 transcoders = old_crtc_state->sync_mode_slaves_mask;
5807 if (old_crtc_state->master_transcoder != INVALID_TRANSCODER)
5808 transcoders |= BIT(old_crtc_state->master_transcoder);
5809
5810 return intel_modeset_affected_transcoders(state,
5811 transcoders);
5812}
5813
5814static int intel_dp_connector_atomic_check(struct drm_connector *conn,
5815 struct drm_atomic_state *_state)
5816{
5817 struct drm_i915_private *dev_priv = to_i915(conn->dev);
5818 struct intel_atomic_state *state = to_intel_atomic_state(_state);
5819 int ret;
5820
5821 ret = intel_digital_connector_atomic_check(conn, &state->base);
5822 if (ret)
5823 return ret;
5824
5825
5826
5827
5828
5829 if (INTEL_GEN(dev_priv) < 9)
5830 return 0;
5831
5832 if (!intel_connector_needs_modeset(state, conn))
5833 return 0;
5834
5835 if (conn->has_tile) {
5836 ret = intel_modeset_tile_group(state, conn->tile_group->id);
5837 if (ret)
5838 return ret;
5839 }
5840
5841 return intel_modeset_synced_crtcs(state, conn);
5842}
5843
5844static const struct drm_connector_funcs intel_dp_connector_funcs = {
5845 .force = intel_dp_force,
5846 .fill_modes = drm_helper_probe_single_connector_modes,
5847 .atomic_get_property = intel_digital_connector_atomic_get_property,
5848 .atomic_set_property = intel_digital_connector_atomic_set_property,
5849 .late_register = intel_dp_connector_register,
5850 .early_unregister = intel_dp_connector_unregister,
5851 .destroy = intel_connector_destroy,
5852 .atomic_destroy_state = drm_atomic_helper_connector_destroy_state,
5853 .atomic_duplicate_state = intel_digital_connector_duplicate_state,
5854};
5855
5856static const struct drm_connector_helper_funcs intel_dp_connector_helper_funcs = {
5857 .detect_ctx = intel_dp_detect,
5858 .get_modes = intel_dp_get_modes,
5859 .mode_valid = intel_dp_mode_valid,
5860 .atomic_check = intel_dp_connector_atomic_check,
5861};
5862
5863static const struct drm_encoder_funcs intel_dp_enc_funcs = {
5864 .reset = intel_dp_encoder_reset,
5865 .destroy = intel_dp_encoder_destroy,
5866};
5867
5868enum irqreturn
5869intel_dp_hpd_pulse(struct intel_digital_port *dig_port, bool long_hpd)
5870{
5871 struct drm_i915_private *i915 = to_i915(dig_port->base.base.dev);
5872 struct intel_dp *intel_dp = &dig_port->dp;
5873
5874 if (dig_port->base.type == INTEL_OUTPUT_EDP &&
5875 (long_hpd || !intel_pps_have_power(intel_dp))) {
5876
5877
5878
5879
5880
5881
5882 drm_dbg_kms(&i915->drm,
5883 "ignoring %s hpd on eDP [ENCODER:%d:%s]\n",
5884 long_hpd ? "long" : "short",
5885 dig_port->base.base.base.id,
5886 dig_port->base.base.name);
5887 return IRQ_HANDLED;
5888 }
5889
5890 drm_dbg_kms(&i915->drm, "got hpd irq on [ENCODER:%d:%s] - %s\n",
5891 dig_port->base.base.base.id,
5892 dig_port->base.base.name,
5893 long_hpd ? "long" : "short");
5894
5895 if (long_hpd) {
5896 intel_dp->reset_link_params = true;
5897 return IRQ_NONE;
5898 }
5899
5900 if (intel_dp->is_mst) {
5901 if (!intel_dp_check_mst_status(intel_dp))
5902 return IRQ_NONE;
5903 } else if (!intel_dp_short_pulse(intel_dp)) {
5904 return IRQ_NONE;
5905 }
5906
5907 return IRQ_HANDLED;
5908}
5909
5910
5911bool intel_dp_is_port_edp(struct drm_i915_private *dev_priv, enum port port)
5912{
5913
5914
5915
5916
5917 if (INTEL_GEN(dev_priv) < 5)
5918 return false;
5919
5920 if (INTEL_GEN(dev_priv) < 9 && port == PORT_A)
5921 return true;
5922
5923 return intel_bios_is_port_edp(dev_priv, port);
5924}
5925
5926static void
5927intel_dp_add_properties(struct intel_dp *intel_dp, struct drm_connector *connector)
5928{
5929 struct drm_i915_private *dev_priv = to_i915(connector->dev);
5930 enum port port = dp_to_dig_port(intel_dp)->base.port;
5931
5932 if (!intel_dp_is_edp(intel_dp))
5933 drm_connector_attach_dp_subconnector_property(connector);
5934
5935 if (!IS_G4X(dev_priv) && port != PORT_A)
5936 intel_attach_force_audio_property(connector);
5937
5938 intel_attach_broadcast_rgb_property(connector);
5939 if (HAS_GMCH(dev_priv))
5940 drm_connector_attach_max_bpc_property(connector, 6, 10);
5941 else if (INTEL_GEN(dev_priv) >= 5)
5942 drm_connector_attach_max_bpc_property(connector, 6, 12);
5943
5944
5945 if (intel_bios_is_lspcon_present(dev_priv, port)) {
5946 drm_connector_attach_content_type_property(connector);
5947 intel_attach_hdmi_colorspace_property(connector);
5948 } else {
5949 intel_attach_dp_colorspace_property(connector);
5950 }
5951
5952 if (IS_GEMINILAKE(dev_priv) || INTEL_GEN(dev_priv) >= 11)
5953 drm_object_attach_property(&connector->base,
5954 connector->dev->mode_config.hdr_output_metadata_property,
5955 0);
5956
5957 if (intel_dp_is_edp(intel_dp)) {
5958 u32 allowed_scalers;
5959
5960 allowed_scalers = BIT(DRM_MODE_SCALE_ASPECT) | BIT(DRM_MODE_SCALE_FULLSCREEN);
5961 if (!HAS_GMCH(dev_priv))
5962 allowed_scalers |= BIT(DRM_MODE_SCALE_CENTER);
5963
5964 drm_connector_attach_scaling_mode_property(connector, allowed_scalers);
5965
5966 connector->state->scaling_mode = DRM_MODE_SCALE_ASPECT;
5967
5968 }
5969
5970 if (HAS_VRR(dev_priv))
5971 drm_connector_attach_vrr_capable_property(connector);
5972}
5973
5974
5975
5976
5977
5978
5979
5980
5981
5982
5983
5984
5985
5986
5987static void intel_dp_set_drrs_state(struct drm_i915_private *dev_priv,
5988 const struct intel_crtc_state *crtc_state,
5989 int refresh_rate)
5990{
5991 struct intel_dp *intel_dp = dev_priv->drrs.dp;
5992 struct intel_crtc *intel_crtc = to_intel_crtc(crtc_state->uapi.crtc);
5993 enum drrs_refresh_rate_type index = DRRS_HIGH_RR;
5994
5995 if (refresh_rate <= 0) {
5996 drm_dbg_kms(&dev_priv->drm,
5997 "Refresh rate should be positive non-zero.\n");
5998 return;
5999 }
6000
6001 if (intel_dp == NULL) {
6002 drm_dbg_kms(&dev_priv->drm, "DRRS not supported.\n");
6003 return;
6004 }
6005
6006 if (!intel_crtc) {
6007 drm_dbg_kms(&dev_priv->drm,
6008 "DRRS: intel_crtc not initialized\n");
6009 return;
6010 }
6011
6012 if (dev_priv->drrs.type < SEAMLESS_DRRS_SUPPORT) {
6013 drm_dbg_kms(&dev_priv->drm, "Only Seamless DRRS supported.\n");
6014 return;
6015 }
6016
6017 if (drm_mode_vrefresh(intel_dp->attached_connector->panel.downclock_mode) ==
6018 refresh_rate)
6019 index = DRRS_LOW_RR;
6020
6021 if (index == dev_priv->drrs.refresh_rate_type) {
6022 drm_dbg_kms(&dev_priv->drm,
6023 "DRRS requested for previously set RR...ignoring\n");
6024 return;
6025 }
6026
6027 if (!crtc_state->hw.active) {
6028 drm_dbg_kms(&dev_priv->drm,
6029 "eDP encoder disabled. CRTC not Active\n");
6030 return;
6031 }
6032
6033 if (INTEL_GEN(dev_priv) >= 8 && !IS_CHERRYVIEW(dev_priv)) {
6034 switch (index) {
6035 case DRRS_HIGH_RR:
6036 intel_dp_set_m_n(crtc_state, M1_N1);
6037 break;
6038 case DRRS_LOW_RR:
6039 intel_dp_set_m_n(crtc_state, M2_N2);
6040 break;
6041 case DRRS_MAX_RR:
6042 default:
6043 drm_err(&dev_priv->drm,
6044 "Unsupported refreshrate type\n");
6045 }
6046 } else if (INTEL_GEN(dev_priv) > 6) {
6047 i915_reg_t reg = PIPECONF(crtc_state->cpu_transcoder);
6048 u32 val;
6049
6050 val = intel_de_read(dev_priv, reg);
6051 if (index > DRRS_HIGH_RR) {
6052 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
6053 val |= PIPECONF_EDP_RR_MODE_SWITCH_VLV;
6054 else
6055 val |= PIPECONF_EDP_RR_MODE_SWITCH;
6056 } else {
6057 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
6058 val &= ~PIPECONF_EDP_RR_MODE_SWITCH_VLV;
6059 else
6060 val &= ~PIPECONF_EDP_RR_MODE_SWITCH;
6061 }
6062 intel_de_write(dev_priv, reg, val);
6063 }
6064
6065 dev_priv->drrs.refresh_rate_type = index;
6066
6067 drm_dbg_kms(&dev_priv->drm, "eDP Refresh Rate set to : %dHz\n",
6068 refresh_rate);
6069}
6070
6071static void
6072intel_edp_drrs_enable_locked(struct intel_dp *intel_dp)
6073{
6074 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
6075
6076 dev_priv->drrs.busy_frontbuffer_bits = 0;
6077 dev_priv->drrs.dp = intel_dp;
6078}
6079
6080
6081
6082
6083
6084
6085
6086
6087void intel_edp_drrs_enable(struct intel_dp *intel_dp,
6088 const struct intel_crtc_state *crtc_state)
6089{
6090 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
6091
6092 if (!crtc_state->has_drrs)
6093 return;
6094
6095 drm_dbg_kms(&dev_priv->drm, "Enabling DRRS\n");
6096
6097 mutex_lock(&dev_priv->drrs.mutex);
6098
6099 if (dev_priv->drrs.dp) {
6100 drm_warn(&dev_priv->drm, "DRRS already enabled\n");
6101 goto unlock;
6102 }
6103
6104 intel_edp_drrs_enable_locked(intel_dp);
6105
6106unlock:
6107 mutex_unlock(&dev_priv->drrs.mutex);
6108}
6109
6110static void
6111intel_edp_drrs_disable_locked(struct intel_dp *intel_dp,
6112 const struct intel_crtc_state *crtc_state)
6113{
6114 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
6115
6116 if (dev_priv->drrs.refresh_rate_type == DRRS_LOW_RR) {
6117 int refresh;
6118
6119 refresh = drm_mode_vrefresh(intel_dp->attached_connector->panel.fixed_mode);
6120 intel_dp_set_drrs_state(dev_priv, crtc_state, refresh);
6121 }
6122
6123 dev_priv->drrs.dp = NULL;
6124}
6125
6126
6127
6128
6129
6130
6131
6132void intel_edp_drrs_disable(struct intel_dp *intel_dp,
6133 const struct intel_crtc_state *old_crtc_state)
6134{
6135 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
6136
6137 if (!old_crtc_state->has_drrs)
6138 return;
6139
6140 mutex_lock(&dev_priv->drrs.mutex);
6141 if (!dev_priv->drrs.dp) {
6142 mutex_unlock(&dev_priv->drrs.mutex);
6143 return;
6144 }
6145
6146 intel_edp_drrs_disable_locked(intel_dp, old_crtc_state);
6147 mutex_unlock(&dev_priv->drrs.mutex);
6148
6149 cancel_delayed_work_sync(&dev_priv->drrs.work);
6150}
6151
6152
6153
6154
6155
6156
6157
6158
6159
6160
6161void
6162intel_edp_drrs_update(struct intel_dp *intel_dp,
6163 const struct intel_crtc_state *crtc_state)
6164{
6165 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
6166
6167 if (dev_priv->drrs.type != SEAMLESS_DRRS_SUPPORT)
6168 return;
6169
6170 mutex_lock(&dev_priv->drrs.mutex);
6171
6172
6173 if (crtc_state->has_drrs == !!dev_priv->drrs.dp)
6174 goto unlock;
6175
6176 if (crtc_state->has_drrs)
6177 intel_edp_drrs_enable_locked(intel_dp);
6178 else
6179 intel_edp_drrs_disable_locked(intel_dp, crtc_state);
6180
6181unlock:
6182 mutex_unlock(&dev_priv->drrs.mutex);
6183}
6184
6185static void intel_edp_drrs_downclock_work(struct work_struct *work)
6186{
6187 struct drm_i915_private *dev_priv =
6188 container_of(work, typeof(*dev_priv), drrs.work.work);
6189 struct intel_dp *intel_dp;
6190
6191 mutex_lock(&dev_priv->drrs.mutex);
6192
6193 intel_dp = dev_priv->drrs.dp;
6194
6195 if (!intel_dp)
6196 goto unlock;
6197
6198
6199
6200
6201
6202
6203 if (dev_priv->drrs.busy_frontbuffer_bits)
6204 goto unlock;
6205
6206 if (dev_priv->drrs.refresh_rate_type != DRRS_LOW_RR) {
6207 struct drm_crtc *crtc = dp_to_dig_port(intel_dp)->base.base.crtc;
6208
6209 intel_dp_set_drrs_state(dev_priv, to_intel_crtc(crtc)->config,
6210 drm_mode_vrefresh(intel_dp->attached_connector->panel.downclock_mode));
6211 }
6212
6213unlock:
6214 mutex_unlock(&dev_priv->drrs.mutex);
6215}
6216
6217
6218
6219
6220
6221
6222
6223
6224
6225
6226
6227void intel_edp_drrs_invalidate(struct drm_i915_private *dev_priv,
6228 unsigned int frontbuffer_bits)
6229{
6230 struct intel_dp *intel_dp;
6231 struct drm_crtc *crtc;
6232 enum pipe pipe;
6233
6234 if (dev_priv->drrs.type == DRRS_NOT_SUPPORTED)
6235 return;
6236
6237 cancel_delayed_work(&dev_priv->drrs.work);
6238
6239 mutex_lock(&dev_priv->drrs.mutex);
6240
6241 intel_dp = dev_priv->drrs.dp;
6242 if (!intel_dp) {
6243 mutex_unlock(&dev_priv->drrs.mutex);
6244 return;
6245 }
6246
6247 crtc = dp_to_dig_port(intel_dp)->base.base.crtc;
6248 pipe = to_intel_crtc(crtc)->pipe;
6249
6250 frontbuffer_bits &= INTEL_FRONTBUFFER_ALL_MASK(pipe);
6251 dev_priv->drrs.busy_frontbuffer_bits |= frontbuffer_bits;
6252
6253
6254 if (frontbuffer_bits && dev_priv->drrs.refresh_rate_type == DRRS_LOW_RR)
6255 intel_dp_set_drrs_state(dev_priv, to_intel_crtc(crtc)->config,
6256 drm_mode_vrefresh(intel_dp->attached_connector->panel.fixed_mode));
6257
6258 mutex_unlock(&dev_priv->drrs.mutex);
6259}
6260
6261
6262
6263
6264
6265
6266
6267
6268
6269
6270
6271
6272
6273void intel_edp_drrs_flush(struct drm_i915_private *dev_priv,
6274 unsigned int frontbuffer_bits)
6275{
6276 struct intel_dp *intel_dp;
6277 struct drm_crtc *crtc;
6278 enum pipe pipe;
6279
6280 if (dev_priv->drrs.type == DRRS_NOT_SUPPORTED)
6281 return;
6282
6283 cancel_delayed_work(&dev_priv->drrs.work);
6284
6285 mutex_lock(&dev_priv->drrs.mutex);
6286
6287 intel_dp = dev_priv->drrs.dp;
6288 if (!intel_dp) {
6289 mutex_unlock(&dev_priv->drrs.mutex);
6290 return;
6291 }
6292
6293 crtc = dp_to_dig_port(intel_dp)->base.base.crtc;
6294 pipe = to_intel_crtc(crtc)->pipe;
6295
6296 frontbuffer_bits &= INTEL_FRONTBUFFER_ALL_MASK(pipe);
6297 dev_priv->drrs.busy_frontbuffer_bits &= ~frontbuffer_bits;
6298
6299
6300 if (frontbuffer_bits && dev_priv->drrs.refresh_rate_type == DRRS_LOW_RR)
6301 intel_dp_set_drrs_state(dev_priv, to_intel_crtc(crtc)->config,
6302 drm_mode_vrefresh(intel_dp->attached_connector->panel.fixed_mode));
6303
6304
6305
6306
6307
6308 if (!dev_priv->drrs.busy_frontbuffer_bits)
6309 schedule_delayed_work(&dev_priv->drrs.work,
6310 msecs_to_jiffies(1000));
6311 mutex_unlock(&dev_priv->drrs.mutex);
6312}
6313
6314
6315
6316
6317
6318
6319
6320
6321
6322
6323
6324
6325
6326
6327
6328
6329
6330
6331
6332
6333
6334
6335
6336
6337
6338
6339
6340
6341
6342
6343
6344
6345
6346
6347
6348
6349
6350
6351
6352
6353
6354
6355
6356
6357
6358
6359
6360
6361
6362
6363
6364static struct drm_display_mode *
6365intel_dp_drrs_init(struct intel_connector *connector,
6366 struct drm_display_mode *fixed_mode)
6367{
6368 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
6369 struct drm_display_mode *downclock_mode = NULL;
6370
6371 INIT_DELAYED_WORK(&dev_priv->drrs.work, intel_edp_drrs_downclock_work);
6372 mutex_init(&dev_priv->drrs.mutex);
6373
6374 if (INTEL_GEN(dev_priv) <= 6) {
6375 drm_dbg_kms(&dev_priv->drm,
6376 "DRRS supported for Gen7 and above\n");
6377 return NULL;
6378 }
6379
6380 if (dev_priv->vbt.drrs_type != SEAMLESS_DRRS_SUPPORT) {
6381 drm_dbg_kms(&dev_priv->drm, "VBT doesn't support DRRS\n");
6382 return NULL;
6383 }
6384
6385 downclock_mode = intel_panel_edid_downclock_mode(connector, fixed_mode);
6386 if (!downclock_mode) {
6387 drm_dbg_kms(&dev_priv->drm,
6388 "Downclock mode is not found. DRRS not supported\n");
6389 return NULL;
6390 }
6391
6392 dev_priv->drrs.type = dev_priv->vbt.drrs_type;
6393
6394 dev_priv->drrs.refresh_rate_type = DRRS_HIGH_RR;
6395 drm_dbg_kms(&dev_priv->drm,
6396 "seamless DRRS supported for eDP panel.\n");
6397 return downclock_mode;
6398}
6399
6400static bool intel_edp_init_connector(struct intel_dp *intel_dp,
6401 struct intel_connector *intel_connector)
6402{
6403 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
6404 struct drm_device *dev = &dev_priv->drm;
6405 struct drm_connector *connector = &intel_connector->base;
6406 struct drm_display_mode *fixed_mode = NULL;
6407 struct drm_display_mode *downclock_mode = NULL;
6408 bool has_dpcd;
6409 enum pipe pipe = INVALID_PIPE;
6410 struct edid *edid;
6411
6412 if (!intel_dp_is_edp(intel_dp))
6413 return true;
6414
6415
6416
6417
6418
6419
6420
6421 if (intel_get_lvds_encoder(dev_priv)) {
6422 drm_WARN_ON(dev,
6423 !(HAS_PCH_IBX(dev_priv) || HAS_PCH_CPT(dev_priv)));
6424 drm_info(&dev_priv->drm,
6425 "LVDS was detected, not registering eDP\n");
6426
6427 return false;
6428 }
6429
6430 intel_pps_init(intel_dp);
6431
6432
6433 has_dpcd = intel_edp_init_dpcd(intel_dp);
6434
6435 if (!has_dpcd) {
6436
6437 drm_info(&dev_priv->drm,
6438 "failed to retrieve link info, disabling eDP\n");
6439 goto out_vdd_off;
6440 }
6441
6442 mutex_lock(&dev->mode_config.mutex);
6443 edid = drm_get_edid(connector, &intel_dp->aux.ddc);
6444 if (edid) {
6445 if (drm_add_edid_modes(connector, edid)) {
6446 drm_connector_update_edid_property(connector, edid);
6447 } else {
6448 kfree(edid);
6449 edid = ERR_PTR(-EINVAL);
6450 }
6451 } else {
6452 edid = ERR_PTR(-ENOENT);
6453 }
6454 intel_connector->edid = edid;
6455
6456 fixed_mode = intel_panel_edid_fixed_mode(intel_connector);
6457 if (fixed_mode)
6458 downclock_mode = intel_dp_drrs_init(intel_connector, fixed_mode);
6459
6460
6461 if (!fixed_mode)
6462 fixed_mode = intel_panel_vbt_fixed_mode(intel_connector);
6463 mutex_unlock(&dev->mode_config.mutex);
6464
6465 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
6466
6467
6468
6469
6470
6471 pipe = vlv_active_pipe(intel_dp);
6472
6473 if (pipe != PIPE_A && pipe != PIPE_B)
6474 pipe = intel_dp->pps.pps_pipe;
6475
6476 if (pipe != PIPE_A && pipe != PIPE_B)
6477 pipe = PIPE_A;
6478
6479 drm_dbg_kms(&dev_priv->drm,
6480 "using pipe %c for initial backlight setup\n",
6481 pipe_name(pipe));
6482 }
6483
6484 intel_panel_init(&intel_connector->panel, fixed_mode, downclock_mode);
6485 intel_connector->panel.backlight.power = intel_pps_backlight_power;
6486 intel_panel_setup_backlight(connector, pipe);
6487
6488 if (fixed_mode) {
6489 drm_connector_set_panel_orientation_with_quirk(connector,
6490 dev_priv->vbt.orientation,
6491 fixed_mode->hdisplay, fixed_mode->vdisplay);
6492 }
6493
6494 return true;
6495
6496out_vdd_off:
6497 intel_pps_vdd_off_sync(intel_dp);
6498
6499 return false;
6500}
6501
6502static void intel_dp_modeset_retry_work_fn(struct work_struct *work)
6503{
6504 struct intel_connector *intel_connector;
6505 struct drm_connector *connector;
6506
6507 intel_connector = container_of(work, typeof(*intel_connector),
6508 modeset_retry_work);
6509 connector = &intel_connector->base;
6510 DRM_DEBUG_KMS("[CONNECTOR:%d:%s]\n", connector->base.id,
6511 connector->name);
6512
6513
6514 mutex_lock(&connector->dev->mode_config.mutex);
6515
6516
6517
6518 drm_connector_set_link_status_property(connector,
6519 DRM_MODE_LINK_STATUS_BAD);
6520 mutex_unlock(&connector->dev->mode_config.mutex);
6521
6522 drm_kms_helper_hotplug_event(connector->dev);
6523}
6524
6525bool
6526intel_dp_init_connector(struct intel_digital_port *dig_port,
6527 struct intel_connector *intel_connector)
6528{
6529 struct drm_connector *connector = &intel_connector->base;
6530 struct intel_dp *intel_dp = &dig_port->dp;
6531 struct intel_encoder *intel_encoder = &dig_port->base;
6532 struct drm_device *dev = intel_encoder->base.dev;
6533 struct drm_i915_private *dev_priv = to_i915(dev);
6534 enum port port = intel_encoder->port;
6535 enum phy phy = intel_port_to_phy(dev_priv, port);
6536 int type;
6537
6538
6539 INIT_WORK(&intel_connector->modeset_retry_work,
6540 intel_dp_modeset_retry_work_fn);
6541
6542 if (drm_WARN(dev, dig_port->max_lanes < 1,
6543 "Not enough lanes (%d) for DP on [ENCODER:%d:%s]\n",
6544 dig_port->max_lanes, intel_encoder->base.base.id,
6545 intel_encoder->base.name))
6546 return false;
6547
6548 intel_dp_set_source_rates(intel_dp);
6549
6550 intel_dp->reset_link_params = true;
6551 intel_dp->pps.pps_pipe = INVALID_PIPE;
6552 intel_dp->pps.active_pipe = INVALID_PIPE;
6553
6554
6555 intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg);
6556 intel_dp->attached_connector = intel_connector;
6557
6558 if (intel_dp_is_port_edp(dev_priv, port)) {
6559
6560
6561
6562
6563 drm_WARN_ON(dev, intel_phy_is_tc(dev_priv, phy));
6564 type = DRM_MODE_CONNECTOR_eDP;
6565 } else {
6566 type = DRM_MODE_CONNECTOR_DisplayPort;
6567 }
6568
6569 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
6570 intel_dp->pps.active_pipe = vlv_active_pipe(intel_dp);
6571
6572
6573
6574
6575
6576
6577 if (type == DRM_MODE_CONNECTOR_eDP)
6578 intel_encoder->type = INTEL_OUTPUT_EDP;
6579
6580
6581 if (drm_WARN_ON(dev, (IS_VALLEYVIEW(dev_priv) ||
6582 IS_CHERRYVIEW(dev_priv)) &&
6583 intel_dp_is_edp(intel_dp) &&
6584 port != PORT_B && port != PORT_C))
6585 return false;
6586
6587 drm_dbg_kms(&dev_priv->drm,
6588 "Adding %s connector on [ENCODER:%d:%s]\n",
6589 type == DRM_MODE_CONNECTOR_eDP ? "eDP" : "DP",
6590 intel_encoder->base.base.id, intel_encoder->base.name);
6591
6592 drm_connector_init(dev, connector, &intel_dp_connector_funcs, type);
6593 drm_connector_helper_add(connector, &intel_dp_connector_helper_funcs);
6594
6595 if (!HAS_GMCH(dev_priv))
6596 connector->interlace_allowed = true;
6597 connector->doublescan_allowed = 0;
6598
6599 intel_connector->polled = DRM_CONNECTOR_POLL_HPD;
6600
6601 intel_dp_aux_init(intel_dp);
6602
6603 intel_connector_attach_encoder(intel_connector, intel_encoder);
6604
6605 if (HAS_DDI(dev_priv))
6606 intel_connector->get_hw_state = intel_ddi_connector_get_hw_state;
6607 else
6608 intel_connector->get_hw_state = intel_connector_get_hw_state;
6609
6610
6611 intel_dp_mst_encoder_init(dig_port,
6612 intel_connector->base.base.id);
6613
6614 if (!intel_edp_init_connector(intel_dp, intel_connector)) {
6615 intel_dp_aux_fini(intel_dp);
6616 intel_dp_mst_encoder_cleanup(dig_port);
6617 goto fail;
6618 }
6619
6620 intel_dp_add_properties(intel_dp, connector);
6621
6622 if (is_hdcp_supported(dev_priv, port) && !intel_dp_is_edp(intel_dp)) {
6623 int ret = intel_dp_init_hdcp(dig_port, intel_connector);
6624 if (ret)
6625 drm_dbg_kms(&dev_priv->drm,
6626 "HDCP init failed, skipping.\n");
6627 }
6628
6629
6630
6631
6632
6633 if (IS_G45(dev_priv)) {
6634 u32 temp = intel_de_read(dev_priv, PEG_BAND_GAP_DATA);
6635 intel_de_write(dev_priv, PEG_BAND_GAP_DATA,
6636 (temp & ~0xf) | 0xd);
6637 }
6638
6639 intel_dp->frl.is_trained = false;
6640 intel_dp->frl.trained_rate_gbps = 0;
6641
6642 return true;
6643
6644fail:
6645 drm_connector_cleanup(connector);
6646
6647 return false;
6648}
6649
6650bool intel_dp_init(struct drm_i915_private *dev_priv,
6651 i915_reg_t output_reg,
6652 enum port port)
6653{
6654 struct intel_digital_port *dig_port;
6655 struct intel_encoder *intel_encoder;
6656 struct drm_encoder *encoder;
6657 struct intel_connector *intel_connector;
6658
6659 dig_port = kzalloc(sizeof(*dig_port), GFP_KERNEL);
6660 if (!dig_port)
6661 return false;
6662
6663 intel_connector = intel_connector_alloc();
6664 if (!intel_connector)
6665 goto err_connector_alloc;
6666
6667 intel_encoder = &dig_port->base;
6668 encoder = &intel_encoder->base;
6669
6670 mutex_init(&dig_port->hdcp_mutex);
6671
6672 if (drm_encoder_init(&dev_priv->drm, &intel_encoder->base,
6673 &intel_dp_enc_funcs, DRM_MODE_ENCODER_TMDS,
6674 "DP %c", port_name(port)))
6675 goto err_encoder_init;
6676
6677 intel_encoder->hotplug = intel_dp_hotplug;
6678 intel_encoder->compute_config = intel_dp_compute_config;
6679 intel_encoder->get_hw_state = intel_dp_get_hw_state;
6680 intel_encoder->get_config = intel_dp_get_config;
6681 intel_encoder->sync_state = intel_dp_sync_state;
6682 intel_encoder->initial_fastset_check = intel_dp_initial_fastset_check;
6683 intel_encoder->update_pipe = intel_panel_update_backlight;
6684 intel_encoder->suspend = intel_dp_encoder_suspend;
6685 intel_encoder->shutdown = intel_dp_encoder_shutdown;
6686 if (IS_CHERRYVIEW(dev_priv)) {
6687 intel_encoder->pre_pll_enable = chv_dp_pre_pll_enable;
6688 intel_encoder->pre_enable = chv_pre_enable_dp;
6689 intel_encoder->enable = vlv_enable_dp;
6690 intel_encoder->disable = vlv_disable_dp;
6691 intel_encoder->post_disable = chv_post_disable_dp;
6692 intel_encoder->post_pll_disable = chv_dp_post_pll_disable;
6693 } else if (IS_VALLEYVIEW(dev_priv)) {
6694 intel_encoder->pre_pll_enable = vlv_dp_pre_pll_enable;
6695 intel_encoder->pre_enable = vlv_pre_enable_dp;
6696 intel_encoder->enable = vlv_enable_dp;
6697 intel_encoder->disable = vlv_disable_dp;
6698 intel_encoder->post_disable = vlv_post_disable_dp;
6699 } else {
6700 intel_encoder->pre_enable = g4x_pre_enable_dp;
6701 intel_encoder->enable = g4x_enable_dp;
6702 intel_encoder->disable = g4x_disable_dp;
6703 intel_encoder->post_disable = g4x_post_disable_dp;
6704 }
6705
6706 if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) ||
6707 (HAS_PCH_CPT(dev_priv) && port != PORT_A))
6708 dig_port->dp.set_link_train = cpt_set_link_train;
6709 else
6710 dig_port->dp.set_link_train = g4x_set_link_train;
6711
6712 if (IS_CHERRYVIEW(dev_priv))
6713 dig_port->dp.set_signal_levels = chv_set_signal_levels;
6714 else if (IS_VALLEYVIEW(dev_priv))
6715 dig_port->dp.set_signal_levels = vlv_set_signal_levels;
6716 else if (IS_IVYBRIDGE(dev_priv) && port == PORT_A)
6717 dig_port->dp.set_signal_levels = ivb_cpu_edp_set_signal_levels;
6718 else if (IS_GEN(dev_priv, 6) && port == PORT_A)
6719 dig_port->dp.set_signal_levels = snb_cpu_edp_set_signal_levels;
6720 else
6721 dig_port->dp.set_signal_levels = g4x_set_signal_levels;
6722
6723 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv) ||
6724 (HAS_PCH_SPLIT(dev_priv) && port != PORT_A)) {
6725 dig_port->dp.preemph_max = intel_dp_preemph_max_3;
6726 dig_port->dp.voltage_max = intel_dp_voltage_max_3;
6727 } else {
6728 dig_port->dp.preemph_max = intel_dp_preemph_max_2;
6729 dig_port->dp.voltage_max = intel_dp_voltage_max_2;
6730 }
6731
6732 dig_port->dp.output_reg = output_reg;
6733 dig_port->max_lanes = 4;
6734
6735 intel_encoder->type = INTEL_OUTPUT_DP;
6736 intel_encoder->power_domain = intel_port_to_power_domain(port);
6737 if (IS_CHERRYVIEW(dev_priv)) {
6738 if (port == PORT_D)
6739 intel_encoder->pipe_mask = BIT(PIPE_C);
6740 else
6741 intel_encoder->pipe_mask = BIT(PIPE_A) | BIT(PIPE_B);
6742 } else {
6743 intel_encoder->pipe_mask = ~0;
6744 }
6745 intel_encoder->cloneable = 0;
6746 intel_encoder->port = port;
6747 intel_encoder->hpd_pin = intel_hpd_pin_default(dev_priv, port);
6748
6749 dig_port->hpd_pulse = intel_dp_hpd_pulse;
6750
6751 if (HAS_GMCH(dev_priv)) {
6752 if (IS_GM45(dev_priv))
6753 dig_port->connected = gm45_digital_port_connected;
6754 else
6755 dig_port->connected = g4x_digital_port_connected;
6756 } else {
6757 if (port == PORT_A)
6758 dig_port->connected = ilk_digital_port_connected;
6759 else
6760 dig_port->connected = ibx_digital_port_connected;
6761 }
6762
6763 if (port != PORT_A)
6764 intel_infoframe_init(dig_port);
6765
6766 dig_port->aux_ch = intel_bios_port_aux_ch(dev_priv, port);
6767 if (!intel_dp_init_connector(dig_port, intel_connector))
6768 goto err_init_connector;
6769
6770 return true;
6771
6772err_init_connector:
6773 drm_encoder_cleanup(encoder);
6774err_encoder_init:
6775 kfree(intel_connector);
6776err_connector_alloc:
6777 kfree(dig_port);
6778 return false;
6779}
6780
6781void intel_dp_mst_suspend(struct drm_i915_private *dev_priv)
6782{
6783 struct intel_encoder *encoder;
6784
6785 for_each_intel_encoder(&dev_priv->drm, encoder) {
6786 struct intel_dp *intel_dp;
6787
6788 if (encoder->type != INTEL_OUTPUT_DDI)
6789 continue;
6790
6791 intel_dp = enc_to_intel_dp(encoder);
6792
6793 if (!intel_dp->can_mst)
6794 continue;
6795
6796 if (intel_dp->is_mst)
6797 drm_dp_mst_topology_mgr_suspend(&intel_dp->mst_mgr);
6798 }
6799}
6800
6801void intel_dp_mst_resume(struct drm_i915_private *dev_priv)
6802{
6803 struct intel_encoder *encoder;
6804
6805 for_each_intel_encoder(&dev_priv->drm, encoder) {
6806 struct intel_dp *intel_dp;
6807 int ret;
6808
6809 if (encoder->type != INTEL_OUTPUT_DDI)
6810 continue;
6811
6812 intel_dp = enc_to_intel_dp(encoder);
6813
6814 if (!intel_dp->can_mst)
6815 continue;
6816
6817 ret = drm_dp_mst_topology_mgr_resume(&intel_dp->mst_mgr,
6818 true);
6819 if (ret) {
6820 intel_dp->is_mst = false;
6821 drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst_mgr,
6822 false);
6823 }
6824 }
6825}
6826