1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24#include <drm/drm_atomic_helper.h>
25
26#include "i915_drv.h"
27#include "intel_dp.h"
28#include "intel_drv.h"
29#include "intel_psr.h"
30#include "intel_sprite.h"
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62static bool psr_global_enabled(u32 debug)
63{
64 switch (debug & I915_PSR_DEBUG_MODE_MASK) {
65 case I915_PSR_DEBUG_DEFAULT:
66 return i915_modparams.enable_psr;
67 case I915_PSR_DEBUG_DISABLE:
68 return false;
69 default:
70 return true;
71 }
72}
73
74static bool intel_psr2_enabled(struct drm_i915_private *dev_priv,
75 const struct intel_crtc_state *crtc_state)
76{
77
78 WARN_ON(crtc_state->dsc_params.compression_enable &&
79 crtc_state->has_psr2);
80
81 switch (dev_priv->psr.debug & I915_PSR_DEBUG_MODE_MASK) {
82 case I915_PSR_DEBUG_DISABLE:
83 case I915_PSR_DEBUG_FORCE_PSR1:
84 return false;
85 default:
86 return crtc_state->has_psr2;
87 }
88}
89
90static int edp_psr_shift(enum transcoder cpu_transcoder)
91{
92 switch (cpu_transcoder) {
93 case TRANSCODER_A:
94 return EDP_PSR_TRANSCODER_A_SHIFT;
95 case TRANSCODER_B:
96 return EDP_PSR_TRANSCODER_B_SHIFT;
97 case TRANSCODER_C:
98 return EDP_PSR_TRANSCODER_C_SHIFT;
99 default:
100 MISSING_CASE(cpu_transcoder);
101
102 case TRANSCODER_EDP:
103 return EDP_PSR_TRANSCODER_EDP_SHIFT;
104 }
105}
106
107void intel_psr_irq_control(struct drm_i915_private *dev_priv, u32 debug)
108{
109 u32 debug_mask, mask;
110 enum transcoder cpu_transcoder;
111 u32 transcoders = BIT(TRANSCODER_EDP);
112
113 if (INTEL_GEN(dev_priv) >= 8)
114 transcoders |= BIT(TRANSCODER_A) |
115 BIT(TRANSCODER_B) |
116 BIT(TRANSCODER_C);
117
118 debug_mask = 0;
119 mask = 0;
120 for_each_cpu_transcoder_masked(dev_priv, cpu_transcoder, transcoders) {
121 int shift = edp_psr_shift(cpu_transcoder);
122
123 mask |= EDP_PSR_ERROR(shift);
124 debug_mask |= EDP_PSR_POST_EXIT(shift) |
125 EDP_PSR_PRE_ENTRY(shift);
126 }
127
128 if (debug & I915_PSR_DEBUG_IRQ)
129 mask |= debug_mask;
130
131 I915_WRITE(EDP_PSR_IMR, ~mask);
132}
133
134static void psr_event_print(u32 val, bool psr2_enabled)
135{
136 DRM_DEBUG_KMS("PSR exit events: 0x%x\n", val);
137 if (val & PSR_EVENT_PSR2_WD_TIMER_EXPIRE)
138 DRM_DEBUG_KMS("\tPSR2 watchdog timer expired\n");
139 if ((val & PSR_EVENT_PSR2_DISABLED) && psr2_enabled)
140 DRM_DEBUG_KMS("\tPSR2 disabled\n");
141 if (val & PSR_EVENT_SU_DIRTY_FIFO_UNDERRUN)
142 DRM_DEBUG_KMS("\tSU dirty FIFO underrun\n");
143 if (val & PSR_EVENT_SU_CRC_FIFO_UNDERRUN)
144 DRM_DEBUG_KMS("\tSU CRC FIFO underrun\n");
145 if (val & PSR_EVENT_GRAPHICS_RESET)
146 DRM_DEBUG_KMS("\tGraphics reset\n");
147 if (val & PSR_EVENT_PCH_INTERRUPT)
148 DRM_DEBUG_KMS("\tPCH interrupt\n");
149 if (val & PSR_EVENT_MEMORY_UP)
150 DRM_DEBUG_KMS("\tMemory up\n");
151 if (val & PSR_EVENT_FRONT_BUFFER_MODIFY)
152 DRM_DEBUG_KMS("\tFront buffer modification\n");
153 if (val & PSR_EVENT_WD_TIMER_EXPIRE)
154 DRM_DEBUG_KMS("\tPSR watchdog timer expired\n");
155 if (val & PSR_EVENT_PIPE_REGISTERS_UPDATE)
156 DRM_DEBUG_KMS("\tPIPE registers updated\n");
157 if (val & PSR_EVENT_REGISTER_UPDATE)
158 DRM_DEBUG_KMS("\tRegister updated\n");
159 if (val & PSR_EVENT_HDCP_ENABLE)
160 DRM_DEBUG_KMS("\tHDCP enabled\n");
161 if (val & PSR_EVENT_KVMR_SESSION_ENABLE)
162 DRM_DEBUG_KMS("\tKVMR session enabled\n");
163 if (val & PSR_EVENT_VBI_ENABLE)
164 DRM_DEBUG_KMS("\tVBI enabled\n");
165 if (val & PSR_EVENT_LPSP_MODE_EXIT)
166 DRM_DEBUG_KMS("\tLPSP mode exited\n");
167 if ((val & PSR_EVENT_PSR_DISABLE) && !psr2_enabled)
168 DRM_DEBUG_KMS("\tPSR disabled\n");
169}
170
171void intel_psr_irq_handler(struct drm_i915_private *dev_priv, u32 psr_iir)
172{
173 u32 transcoders = BIT(TRANSCODER_EDP);
174 enum transcoder cpu_transcoder;
175 ktime_t time_ns = ktime_get();
176 u32 mask = 0;
177
178 if (INTEL_GEN(dev_priv) >= 8)
179 transcoders |= BIT(TRANSCODER_A) |
180 BIT(TRANSCODER_B) |
181 BIT(TRANSCODER_C);
182
183 for_each_cpu_transcoder_masked(dev_priv, cpu_transcoder, transcoders) {
184 int shift = edp_psr_shift(cpu_transcoder);
185
186 if (psr_iir & EDP_PSR_ERROR(shift)) {
187 DRM_WARN("[transcoder %s] PSR aux error\n",
188 transcoder_name(cpu_transcoder));
189
190 dev_priv->psr.irq_aux_error = true;
191
192
193
194
195
196
197
198
199
200 mask |= EDP_PSR_ERROR(shift);
201 }
202
203 if (psr_iir & EDP_PSR_PRE_ENTRY(shift)) {
204 dev_priv->psr.last_entry_attempt = time_ns;
205 DRM_DEBUG_KMS("[transcoder %s] PSR entry attempt in 2 vblanks\n",
206 transcoder_name(cpu_transcoder));
207 }
208
209 if (psr_iir & EDP_PSR_POST_EXIT(shift)) {
210 dev_priv->psr.last_exit = time_ns;
211 DRM_DEBUG_KMS("[transcoder %s] PSR exit completed\n",
212 transcoder_name(cpu_transcoder));
213
214 if (INTEL_GEN(dev_priv) >= 9) {
215 u32 val = I915_READ(PSR_EVENT(cpu_transcoder));
216 bool psr2_enabled = dev_priv->psr.psr2_enabled;
217
218 I915_WRITE(PSR_EVENT(cpu_transcoder), val);
219 psr_event_print(val, psr2_enabled);
220 }
221 }
222 }
223
224 if (mask) {
225 mask |= I915_READ(EDP_PSR_IMR);
226 I915_WRITE(EDP_PSR_IMR, mask);
227
228 schedule_work(&dev_priv->psr.work);
229 }
230}
231
232static bool intel_dp_get_colorimetry_status(struct intel_dp *intel_dp)
233{
234 u8 dprx = 0;
235
236 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_DPRX_FEATURE_ENUMERATION_LIST,
237 &dprx) != 1)
238 return false;
239 return dprx & DP_VSC_SDP_EXT_FOR_COLORIMETRY_SUPPORTED;
240}
241
242static bool intel_dp_get_alpm_status(struct intel_dp *intel_dp)
243{
244 u8 alpm_caps = 0;
245
246 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_RECEIVER_ALPM_CAP,
247 &alpm_caps) != 1)
248 return false;
249 return alpm_caps & DP_ALPM_CAP;
250}
251
252static u8 intel_dp_get_sink_sync_latency(struct intel_dp *intel_dp)
253{
254 u8 val = 8;
255
256 if (drm_dp_dpcd_readb(&intel_dp->aux,
257 DP_SYNCHRONIZATION_LATENCY_IN_SINK, &val) == 1)
258 val &= DP_MAX_RESYNC_FRAME_COUNT_MASK;
259 else
260 DRM_DEBUG_KMS("Unable to get sink synchronization latency, assuming 8 frames\n");
261 return val;
262}
263
264static u16 intel_dp_get_su_x_granulartiy(struct intel_dp *intel_dp)
265{
266 u16 val;
267 ssize_t r;
268
269
270
271
272
273 if (!(intel_dp->psr_dpcd[1] & DP_PSR2_SU_GRANULARITY_REQUIRED))
274 return 4;
275
276 r = drm_dp_dpcd_read(&intel_dp->aux, DP_PSR2_SU_X_GRANULARITY, &val, 2);
277 if (r != 2)
278 DRM_DEBUG_KMS("Unable to read DP_PSR2_SU_X_GRANULARITY\n");
279
280
281
282
283
284 if (r != 2 || val == 0)
285 val = 4;
286
287 return val;
288}
289
290void intel_psr_init_dpcd(struct intel_dp *intel_dp)
291{
292 struct drm_i915_private *dev_priv =
293 to_i915(dp_to_dig_port(intel_dp)->base.base.dev);
294
295 drm_dp_dpcd_read(&intel_dp->aux, DP_PSR_SUPPORT, intel_dp->psr_dpcd,
296 sizeof(intel_dp->psr_dpcd));
297
298 if (!intel_dp->psr_dpcd[0])
299 return;
300 DRM_DEBUG_KMS("eDP panel supports PSR version %x\n",
301 intel_dp->psr_dpcd[0]);
302
303 if (drm_dp_has_quirk(&intel_dp->desc, DP_DPCD_QUIRK_NO_PSR)) {
304 DRM_DEBUG_KMS("PSR support not currently available for this panel\n");
305 return;
306 }
307
308 if (!(intel_dp->edp_dpcd[1] & DP_EDP_SET_POWER_CAP)) {
309 DRM_DEBUG_KMS("Panel lacks power state control, PSR cannot be enabled\n");
310 return;
311 }
312
313 dev_priv->psr.sink_support = true;
314 dev_priv->psr.sink_sync_latency =
315 intel_dp_get_sink_sync_latency(intel_dp);
316
317 WARN_ON(dev_priv->psr.dp);
318 dev_priv->psr.dp = intel_dp;
319
320 if (INTEL_GEN(dev_priv) >= 9 &&
321 (intel_dp->psr_dpcd[0] == DP_PSR2_WITH_Y_COORD_IS_SUPPORTED)) {
322 bool y_req = intel_dp->psr_dpcd[1] &
323 DP_PSR2_SU_Y_COORDINATE_REQUIRED;
324 bool alpm = intel_dp_get_alpm_status(intel_dp);
325
326
327
328
329
330
331
332
333
334
335
336
337 dev_priv->psr.sink_psr2_support = y_req && alpm;
338 DRM_DEBUG_KMS("PSR2 %ssupported\n",
339 dev_priv->psr.sink_psr2_support ? "" : "not ");
340
341 if (dev_priv->psr.sink_psr2_support) {
342 dev_priv->psr.colorimetry_support =
343 intel_dp_get_colorimetry_status(intel_dp);
344 dev_priv->psr.su_x_granularity =
345 intel_dp_get_su_x_granulartiy(intel_dp);
346 }
347 }
348}
349
350static void intel_psr_setup_vsc(struct intel_dp *intel_dp,
351 const struct intel_crtc_state *crtc_state)
352{
353 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
354 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
355 struct edp_vsc_psr psr_vsc;
356
357 if (dev_priv->psr.psr2_enabled) {
358
359 memset(&psr_vsc, 0, sizeof(psr_vsc));
360 psr_vsc.sdp_header.HB0 = 0;
361 psr_vsc.sdp_header.HB1 = 0x7;
362 if (dev_priv->psr.colorimetry_support) {
363 psr_vsc.sdp_header.HB2 = 0x5;
364 psr_vsc.sdp_header.HB3 = 0x13;
365 } else {
366 psr_vsc.sdp_header.HB2 = 0x4;
367 psr_vsc.sdp_header.HB3 = 0xe;
368 }
369 } else {
370
371 memset(&psr_vsc, 0, sizeof(psr_vsc));
372 psr_vsc.sdp_header.HB0 = 0;
373 psr_vsc.sdp_header.HB1 = 0x7;
374 psr_vsc.sdp_header.HB2 = 0x2;
375 psr_vsc.sdp_header.HB3 = 0x8;
376 }
377
378 intel_dig_port->write_infoframe(&intel_dig_port->base,
379 crtc_state,
380 DP_SDP_VSC, &psr_vsc, sizeof(psr_vsc));
381}
382
383static void hsw_psr_setup_aux(struct intel_dp *intel_dp)
384{
385 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
386 u32 aux_clock_divider, aux_ctl;
387 int i;
388 static const u8 aux_msg[] = {
389 [0] = DP_AUX_NATIVE_WRITE << 4,
390 [1] = DP_SET_POWER >> 8,
391 [2] = DP_SET_POWER & 0xff,
392 [3] = 1 - 1,
393 [4] = DP_SET_POWER_D0,
394 };
395 u32 psr_aux_mask = EDP_PSR_AUX_CTL_TIME_OUT_MASK |
396 EDP_PSR_AUX_CTL_MESSAGE_SIZE_MASK |
397 EDP_PSR_AUX_CTL_PRECHARGE_2US_MASK |
398 EDP_PSR_AUX_CTL_BIT_CLOCK_2X_MASK;
399
400 BUILD_BUG_ON(sizeof(aux_msg) > 20);
401 for (i = 0; i < sizeof(aux_msg); i += 4)
402 I915_WRITE(EDP_PSR_AUX_DATA(i >> 2),
403 intel_dp_pack_aux(&aux_msg[i], sizeof(aux_msg) - i));
404
405 aux_clock_divider = intel_dp->get_aux_clock_divider(intel_dp, 0);
406
407
408 aux_ctl = intel_dp->get_aux_send_ctl(intel_dp, sizeof(aux_msg),
409 aux_clock_divider);
410
411
412 aux_ctl &= psr_aux_mask;
413 I915_WRITE(EDP_PSR_AUX_CTL, aux_ctl);
414}
415
416static void intel_psr_enable_sink(struct intel_dp *intel_dp)
417{
418 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
419 u8 dpcd_val = DP_PSR_ENABLE;
420
421
422 if (dev_priv->psr.psr2_enabled) {
423 drm_dp_dpcd_writeb(&intel_dp->aux, DP_RECEIVER_ALPM_CONFIG,
424 DP_ALPM_ENABLE);
425 dpcd_val |= DP_PSR_ENABLE_PSR2 | DP_PSR_IRQ_HPD_WITH_CRC_ERRORS;
426 } else {
427 if (dev_priv->psr.link_standby)
428 dpcd_val |= DP_PSR_MAIN_LINK_ACTIVE;
429
430 if (INTEL_GEN(dev_priv) >= 8)
431 dpcd_val |= DP_PSR_CRC_VERIFICATION;
432 }
433
434 drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, dpcd_val);
435
436 drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
437}
438
439static u32 intel_psr1_get_tp_time(struct intel_dp *intel_dp)
440{
441 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
442 u32 val = 0;
443
444 if (INTEL_GEN(dev_priv) >= 11)
445 val |= EDP_PSR_TP4_TIME_0US;
446
447 if (dev_priv->vbt.psr.tp1_wakeup_time_us == 0)
448 val |= EDP_PSR_TP1_TIME_0us;
449 else if (dev_priv->vbt.psr.tp1_wakeup_time_us <= 100)
450 val |= EDP_PSR_TP1_TIME_100us;
451 else if (dev_priv->vbt.psr.tp1_wakeup_time_us <= 500)
452 val |= EDP_PSR_TP1_TIME_500us;
453 else
454 val |= EDP_PSR_TP1_TIME_2500us;
455
456 if (dev_priv->vbt.psr.tp2_tp3_wakeup_time_us == 0)
457 val |= EDP_PSR_TP2_TP3_TIME_0us;
458 else if (dev_priv->vbt.psr.tp2_tp3_wakeup_time_us <= 100)
459 val |= EDP_PSR_TP2_TP3_TIME_100us;
460 else if (dev_priv->vbt.psr.tp2_tp3_wakeup_time_us <= 500)
461 val |= EDP_PSR_TP2_TP3_TIME_500us;
462 else
463 val |= EDP_PSR_TP2_TP3_TIME_2500us;
464
465 if (intel_dp_source_supports_hbr2(intel_dp) &&
466 drm_dp_tps3_supported(intel_dp->dpcd))
467 val |= EDP_PSR_TP1_TP3_SEL;
468 else
469 val |= EDP_PSR_TP1_TP2_SEL;
470
471 return val;
472}
473
474static void hsw_activate_psr1(struct intel_dp *intel_dp)
475{
476 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
477 u32 max_sleep_time = 0x1f;
478 u32 val = EDP_PSR_ENABLE;
479
480
481
482
483 int idle_frames = max(6, dev_priv->vbt.psr.idle_frames);
484
485
486
487
488 idle_frames = max(idle_frames, dev_priv->psr.sink_sync_latency + 1);
489 val |= idle_frames << EDP_PSR_IDLE_FRAME_SHIFT;
490
491 val |= max_sleep_time << EDP_PSR_MAX_SLEEP_TIME_SHIFT;
492 if (IS_HASWELL(dev_priv))
493 val |= EDP_PSR_MIN_LINK_ENTRY_TIME_8_LINES;
494
495 if (dev_priv->psr.link_standby)
496 val |= EDP_PSR_LINK_STANDBY;
497
498 val |= intel_psr1_get_tp_time(intel_dp);
499
500 if (INTEL_GEN(dev_priv) >= 8)
501 val |= EDP_PSR_CRC_ENABLE;
502
503 val |= I915_READ(EDP_PSR_CTL) & EDP_PSR_RESTORE_PSR_ACTIVE_CTX_MASK;
504 I915_WRITE(EDP_PSR_CTL, val);
505}
506
507static void hsw_activate_psr2(struct intel_dp *intel_dp)
508{
509 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
510 u32 val;
511
512
513
514
515 int idle_frames = max(6, dev_priv->vbt.psr.idle_frames);
516
517 idle_frames = max(idle_frames, dev_priv->psr.sink_sync_latency + 1);
518 val = idle_frames << EDP_PSR2_IDLE_FRAME_SHIFT;
519
520 val |= EDP_PSR2_ENABLE | EDP_SU_TRACK_ENABLE;
521 if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv))
522 val |= EDP_Y_COORDINATE_ENABLE;
523
524 val |= EDP_PSR2_FRAME_BEFORE_SU(dev_priv->psr.sink_sync_latency + 1);
525
526 if (dev_priv->vbt.psr.psr2_tp2_tp3_wakeup_time_us >= 0 &&
527 dev_priv->vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 50)
528 val |= EDP_PSR2_TP2_TIME_50us;
529 else if (dev_priv->vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 100)
530 val |= EDP_PSR2_TP2_TIME_100us;
531 else if (dev_priv->vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 500)
532 val |= EDP_PSR2_TP2_TIME_500us;
533 else
534 val |= EDP_PSR2_TP2_TIME_2500us;
535
536
537
538
539
540 I915_WRITE(EDP_PSR_CTL, 0);
541
542 I915_WRITE(EDP_PSR2_CTL, val);
543}
544
545static bool intel_psr2_config_valid(struct intel_dp *intel_dp,
546 struct intel_crtc_state *crtc_state)
547{
548 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
549 int crtc_hdisplay = crtc_state->base.adjusted_mode.crtc_hdisplay;
550 int crtc_vdisplay = crtc_state->base.adjusted_mode.crtc_vdisplay;
551 int psr_max_h = 0, psr_max_v = 0;
552
553 if (!dev_priv->psr.sink_psr2_support)
554 return false;
555
556
557
558
559
560
561 if (crtc_state->dsc_params.compression_enable) {
562 DRM_DEBUG_KMS("PSR2 cannot be enabled since DSC is enabled\n");
563 return false;
564 }
565
566 if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv)) {
567 psr_max_h = 4096;
568 psr_max_v = 2304;
569 } else if (IS_GEN(dev_priv, 9)) {
570 psr_max_h = 3640;
571 psr_max_v = 2304;
572 }
573
574 if (crtc_hdisplay > psr_max_h || crtc_vdisplay > psr_max_v) {
575 DRM_DEBUG_KMS("PSR2 not enabled, resolution %dx%d > max supported %dx%d\n",
576 crtc_hdisplay, crtc_vdisplay,
577 psr_max_h, psr_max_v);
578 return false;
579 }
580
581
582
583
584
585
586
587 if (crtc_hdisplay % dev_priv->psr.su_x_granularity) {
588 DRM_DEBUG_KMS("PSR2 not enabled, hdisplay(%d) not multiple of %d\n",
589 crtc_hdisplay, dev_priv->psr.su_x_granularity);
590 return false;
591 }
592
593 if (crtc_state->crc_enabled) {
594 DRM_DEBUG_KMS("PSR2 not enabled because it would inhibit pipe CRC calculation\n");
595 return false;
596 }
597
598 return true;
599}
600
601void intel_psr_compute_config(struct intel_dp *intel_dp,
602 struct intel_crtc_state *crtc_state)
603{
604 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
605 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
606 const struct drm_display_mode *adjusted_mode =
607 &crtc_state->base.adjusted_mode;
608 int psr_setup_time;
609
610 if (!CAN_PSR(dev_priv))
611 return;
612
613 if (intel_dp != dev_priv->psr.dp)
614 return;
615
616
617
618
619
620
621
622
623 if (dig_port->base.port != PORT_A) {
624 DRM_DEBUG_KMS("PSR condition failed: Port not supported\n");
625 return;
626 }
627
628 if (dev_priv->psr.sink_not_reliable) {
629 DRM_DEBUG_KMS("PSR sink implementation is not reliable\n");
630 return;
631 }
632
633 if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) {
634 DRM_DEBUG_KMS("PSR condition failed: Interlaced mode enabled\n");
635 return;
636 }
637
638 psr_setup_time = drm_dp_psr_setup_time(intel_dp->psr_dpcd);
639 if (psr_setup_time < 0) {
640 DRM_DEBUG_KMS("PSR condition failed: Invalid PSR setup time (0x%02x)\n",
641 intel_dp->psr_dpcd[1]);
642 return;
643 }
644
645 if (intel_usecs_to_scanlines(adjusted_mode, psr_setup_time) >
646 adjusted_mode->crtc_vtotal - adjusted_mode->crtc_vdisplay - 1) {
647 DRM_DEBUG_KMS("PSR condition failed: PSR setup time (%d us) too long\n",
648 psr_setup_time);
649 return;
650 }
651
652 crtc_state->has_psr = true;
653 crtc_state->has_psr2 = intel_psr2_config_valid(intel_dp, crtc_state);
654}
655
656static void intel_psr_activate(struct intel_dp *intel_dp)
657{
658 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
659
660 if (INTEL_GEN(dev_priv) >= 9)
661 WARN_ON(I915_READ(EDP_PSR2_CTL) & EDP_PSR2_ENABLE);
662 WARN_ON(I915_READ(EDP_PSR_CTL) & EDP_PSR_ENABLE);
663 WARN_ON(dev_priv->psr.active);
664 lockdep_assert_held(&dev_priv->psr.lock);
665
666
667 if (dev_priv->psr.psr2_enabled)
668 hsw_activate_psr2(intel_dp);
669 else
670 hsw_activate_psr1(intel_dp);
671
672 dev_priv->psr.active = true;
673}
674
675static i915_reg_t gen9_chicken_trans_reg(struct drm_i915_private *dev_priv,
676 enum transcoder cpu_transcoder)
677{
678 static const i915_reg_t regs[] = {
679 [TRANSCODER_A] = CHICKEN_TRANS_A,
680 [TRANSCODER_B] = CHICKEN_TRANS_B,
681 [TRANSCODER_C] = CHICKEN_TRANS_C,
682 [TRANSCODER_EDP] = CHICKEN_TRANS_EDP,
683 };
684
685 WARN_ON(INTEL_GEN(dev_priv) < 9);
686
687 if (WARN_ON(cpu_transcoder >= ARRAY_SIZE(regs) ||
688 !regs[cpu_transcoder].reg))
689 cpu_transcoder = TRANSCODER_A;
690
691 return regs[cpu_transcoder];
692}
693
694static void intel_psr_enable_source(struct intel_dp *intel_dp,
695 const struct intel_crtc_state *crtc_state)
696{
697 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
698 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
699 u32 mask;
700
701
702
703
704 if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv))
705 hsw_psr_setup_aux(intel_dp);
706
707 if (dev_priv->psr.psr2_enabled && (IS_GEN(dev_priv, 9) &&
708 !IS_GEMINILAKE(dev_priv))) {
709 i915_reg_t reg = gen9_chicken_trans_reg(dev_priv,
710 cpu_transcoder);
711 u32 chicken = I915_READ(reg);
712
713 chicken |= PSR2_VSC_ENABLE_PROG_HEADER |
714 PSR2_ADD_VERTICAL_LINE_COUNT;
715 I915_WRITE(reg, chicken);
716 }
717
718
719
720
721
722
723
724 mask = EDP_PSR_DEBUG_MASK_MEMUP |
725 EDP_PSR_DEBUG_MASK_HPD |
726 EDP_PSR_DEBUG_MASK_LPSP |
727 EDP_PSR_DEBUG_MASK_MAX_SLEEP;
728
729 if (INTEL_GEN(dev_priv) < 11)
730 mask |= EDP_PSR_DEBUG_MASK_DISP_REG_WRITE;
731
732 I915_WRITE(EDP_PSR_DEBUG, mask);
733}
734
735static void intel_psr_enable_locked(struct drm_i915_private *dev_priv,
736 const struct intel_crtc_state *crtc_state)
737{
738 struct intel_dp *intel_dp = dev_priv->psr.dp;
739
740 WARN_ON(dev_priv->psr.enabled);
741
742 dev_priv->psr.psr2_enabled = intel_psr2_enabled(dev_priv, crtc_state);
743 dev_priv->psr.busy_frontbuffer_bits = 0;
744 dev_priv->psr.pipe = to_intel_crtc(crtc_state->base.crtc)->pipe;
745
746 DRM_DEBUG_KMS("Enabling PSR%s\n",
747 dev_priv->psr.psr2_enabled ? "2" : "1");
748 intel_psr_setup_vsc(intel_dp, crtc_state);
749 intel_psr_enable_sink(intel_dp);
750 intel_psr_enable_source(intel_dp, crtc_state);
751 dev_priv->psr.enabled = true;
752
753 intel_psr_activate(intel_dp);
754}
755
756
757
758
759
760
761
762
763void intel_psr_enable(struct intel_dp *intel_dp,
764 const struct intel_crtc_state *crtc_state)
765{
766 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
767
768 if (!crtc_state->has_psr)
769 return;
770
771 if (WARN_ON(!CAN_PSR(dev_priv)))
772 return;
773
774 WARN_ON(dev_priv->drrs.dp);
775
776 mutex_lock(&dev_priv->psr.lock);
777
778 if (!psr_global_enabled(dev_priv->psr.debug)) {
779 DRM_DEBUG_KMS("PSR disabled by flag\n");
780 goto unlock;
781 }
782
783 intel_psr_enable_locked(dev_priv, crtc_state);
784
785unlock:
786 mutex_unlock(&dev_priv->psr.lock);
787}
788
789static void intel_psr_exit(struct drm_i915_private *dev_priv)
790{
791 u32 val;
792
793 if (!dev_priv->psr.active) {
794 if (INTEL_GEN(dev_priv) >= 9)
795 WARN_ON(I915_READ(EDP_PSR2_CTL) & EDP_PSR2_ENABLE);
796 WARN_ON(I915_READ(EDP_PSR_CTL) & EDP_PSR_ENABLE);
797 return;
798 }
799
800 if (dev_priv->psr.psr2_enabled) {
801 val = I915_READ(EDP_PSR2_CTL);
802 WARN_ON(!(val & EDP_PSR2_ENABLE));
803 I915_WRITE(EDP_PSR2_CTL, val & ~EDP_PSR2_ENABLE);
804 } else {
805 val = I915_READ(EDP_PSR_CTL);
806 WARN_ON(!(val & EDP_PSR_ENABLE));
807 I915_WRITE(EDP_PSR_CTL, val & ~EDP_PSR_ENABLE);
808 }
809 dev_priv->psr.active = false;
810}
811
812static void intel_psr_disable_locked(struct intel_dp *intel_dp)
813{
814 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
815 i915_reg_t psr_status;
816 u32 psr_status_mask;
817
818 lockdep_assert_held(&dev_priv->psr.lock);
819
820 if (!dev_priv->psr.enabled)
821 return;
822
823 DRM_DEBUG_KMS("Disabling PSR%s\n",
824 dev_priv->psr.psr2_enabled ? "2" : "1");
825
826 intel_psr_exit(dev_priv);
827
828 if (dev_priv->psr.psr2_enabled) {
829 psr_status = EDP_PSR2_STATUS;
830 psr_status_mask = EDP_PSR2_STATUS_STATE_MASK;
831 } else {
832 psr_status = EDP_PSR_STATUS;
833 psr_status_mask = EDP_PSR_STATUS_STATE_MASK;
834 }
835
836
837 if (intel_wait_for_register(&dev_priv->uncore,
838 psr_status, psr_status_mask, 0, 2000))
839 DRM_ERROR("Timed out waiting PSR idle state\n");
840
841
842 drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, 0);
843
844 dev_priv->psr.enabled = false;
845}
846
847
848
849
850
851
852
853
854void intel_psr_disable(struct intel_dp *intel_dp,
855 const struct intel_crtc_state *old_crtc_state)
856{
857 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
858
859 if (!old_crtc_state->has_psr)
860 return;
861
862 if (WARN_ON(!CAN_PSR(dev_priv)))
863 return;
864
865 mutex_lock(&dev_priv->psr.lock);
866
867 intel_psr_disable_locked(intel_dp);
868
869 mutex_unlock(&dev_priv->psr.lock);
870 cancel_work_sync(&dev_priv->psr.work);
871}
872
873static void psr_force_hw_tracking_exit(struct drm_i915_private *dev_priv)
874{
875
876
877
878
879
880
881
882
883
884 I915_WRITE(CURSURFLIVE(dev_priv->psr.pipe), 0);
885}
886
887
888
889
890
891
892
893
894
895
896void intel_psr_update(struct intel_dp *intel_dp,
897 const struct intel_crtc_state *crtc_state)
898{
899 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
900 struct i915_psr *psr = &dev_priv->psr;
901 bool enable, psr2_enable;
902
903 if (!CAN_PSR(dev_priv) || READ_ONCE(psr->dp) != intel_dp)
904 return;
905
906 mutex_lock(&dev_priv->psr.lock);
907
908 enable = crtc_state->has_psr && psr_global_enabled(psr->debug);
909 psr2_enable = intel_psr2_enabled(dev_priv, crtc_state);
910
911 if (enable == psr->enabled && psr2_enable == psr->psr2_enabled) {
912
913 if (crtc_state->crc_enabled && psr->enabled)
914 psr_force_hw_tracking_exit(dev_priv);
915
916 goto unlock;
917 }
918
919 if (psr->enabled)
920 intel_psr_disable_locked(intel_dp);
921
922 if (enable)
923 intel_psr_enable_locked(dev_priv, crtc_state);
924
925unlock:
926 mutex_unlock(&dev_priv->psr.lock);
927}
928
929
930
931
932
933
934
935
936
937
938
939int intel_psr_wait_for_idle(const struct intel_crtc_state *new_crtc_state,
940 u32 *out_value)
941{
942 struct intel_crtc *crtc = to_intel_crtc(new_crtc_state->base.crtc);
943 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
944
945 if (!dev_priv->psr.enabled || !new_crtc_state->has_psr)
946 return 0;
947
948
949 if (READ_ONCE(dev_priv->psr.psr2_enabled))
950 return 0;
951
952
953
954
955
956
957
958
959 return __intel_wait_for_register(&dev_priv->uncore, EDP_PSR_STATUS,
960 EDP_PSR_STATUS_STATE_MASK,
961 EDP_PSR_STATUS_STATE_IDLE, 2, 50,
962 out_value);
963}
964
965static bool __psr_wait_for_idle_locked(struct drm_i915_private *dev_priv)
966{
967 i915_reg_t reg;
968 u32 mask;
969 int err;
970
971 if (!dev_priv->psr.enabled)
972 return false;
973
974 if (dev_priv->psr.psr2_enabled) {
975 reg = EDP_PSR2_STATUS;
976 mask = EDP_PSR2_STATUS_STATE_MASK;
977 } else {
978 reg = EDP_PSR_STATUS;
979 mask = EDP_PSR_STATUS_STATE_MASK;
980 }
981
982 mutex_unlock(&dev_priv->psr.lock);
983
984 err = intel_wait_for_register(&dev_priv->uncore, reg, mask, 0, 50);
985 if (err)
986 DRM_ERROR("Timed out waiting for PSR Idle for re-enable\n");
987
988
989 mutex_lock(&dev_priv->psr.lock);
990 return err == 0 && dev_priv->psr.enabled;
991}
992
993static int intel_psr_fastset_force(struct drm_i915_private *dev_priv)
994{
995 struct drm_device *dev = &dev_priv->drm;
996 struct drm_modeset_acquire_ctx ctx;
997 struct drm_atomic_state *state;
998 struct drm_crtc *crtc;
999 int err;
1000
1001 state = drm_atomic_state_alloc(dev);
1002 if (!state)
1003 return -ENOMEM;
1004
1005 drm_modeset_acquire_init(&ctx, DRM_MODESET_ACQUIRE_INTERRUPTIBLE);
1006 state->acquire_ctx = &ctx;
1007
1008retry:
1009 drm_for_each_crtc(crtc, dev) {
1010 struct drm_crtc_state *crtc_state;
1011 struct intel_crtc_state *intel_crtc_state;
1012
1013 crtc_state = drm_atomic_get_crtc_state(state, crtc);
1014 if (IS_ERR(crtc_state)) {
1015 err = PTR_ERR(crtc_state);
1016 goto error;
1017 }
1018
1019 intel_crtc_state = to_intel_crtc_state(crtc_state);
1020
1021 if (crtc_state->active && intel_crtc_state->has_psr) {
1022
1023 crtc_state->mode_changed = true;
1024 break;
1025 }
1026 }
1027
1028 err = drm_atomic_commit(state);
1029
1030error:
1031 if (err == -EDEADLK) {
1032 drm_atomic_state_clear(state);
1033 err = drm_modeset_backoff(&ctx);
1034 if (!err)
1035 goto retry;
1036 }
1037
1038 drm_modeset_drop_locks(&ctx);
1039 drm_modeset_acquire_fini(&ctx);
1040 drm_atomic_state_put(state);
1041
1042 return err;
1043}
1044
1045int intel_psr_debug_set(struct drm_i915_private *dev_priv, u64 val)
1046{
1047 const u32 mode = val & I915_PSR_DEBUG_MODE_MASK;
1048 u32 old_mode;
1049 int ret;
1050
1051 if (val & ~(I915_PSR_DEBUG_IRQ | I915_PSR_DEBUG_MODE_MASK) ||
1052 mode > I915_PSR_DEBUG_FORCE_PSR1) {
1053 DRM_DEBUG_KMS("Invalid debug mask %llx\n", val);
1054 return -EINVAL;
1055 }
1056
1057 ret = mutex_lock_interruptible(&dev_priv->psr.lock);
1058 if (ret)
1059 return ret;
1060
1061 old_mode = dev_priv->psr.debug & I915_PSR_DEBUG_MODE_MASK;
1062 dev_priv->psr.debug = val;
1063 intel_psr_irq_control(dev_priv, dev_priv->psr.debug);
1064
1065 mutex_unlock(&dev_priv->psr.lock);
1066
1067 if (old_mode != mode)
1068 ret = intel_psr_fastset_force(dev_priv);
1069
1070 return ret;
1071}
1072
1073static void intel_psr_handle_irq(struct drm_i915_private *dev_priv)
1074{
1075 struct i915_psr *psr = &dev_priv->psr;
1076
1077 intel_psr_disable_locked(psr->dp);
1078 psr->sink_not_reliable = true;
1079
1080 drm_dp_dpcd_writeb(&psr->dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
1081}
1082
1083static void intel_psr_work(struct work_struct *work)
1084{
1085 struct drm_i915_private *dev_priv =
1086 container_of(work, typeof(*dev_priv), psr.work);
1087
1088 mutex_lock(&dev_priv->psr.lock);
1089
1090 if (!dev_priv->psr.enabled)
1091 goto unlock;
1092
1093 if (READ_ONCE(dev_priv->psr.irq_aux_error))
1094 intel_psr_handle_irq(dev_priv);
1095
1096
1097
1098
1099
1100
1101
1102 if (!__psr_wait_for_idle_locked(dev_priv))
1103 goto unlock;
1104
1105
1106
1107
1108
1109
1110 if (dev_priv->psr.busy_frontbuffer_bits || dev_priv->psr.active)
1111 goto unlock;
1112
1113 intel_psr_activate(dev_priv->psr.dp);
1114unlock:
1115 mutex_unlock(&dev_priv->psr.lock);
1116}
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131void intel_psr_invalidate(struct drm_i915_private *dev_priv,
1132 unsigned frontbuffer_bits, enum fb_op_origin origin)
1133{
1134 if (!CAN_PSR(dev_priv))
1135 return;
1136
1137 if (origin == ORIGIN_FLIP)
1138 return;
1139
1140 mutex_lock(&dev_priv->psr.lock);
1141 if (!dev_priv->psr.enabled) {
1142 mutex_unlock(&dev_priv->psr.lock);
1143 return;
1144 }
1145
1146 frontbuffer_bits &= INTEL_FRONTBUFFER_ALL_MASK(dev_priv->psr.pipe);
1147 dev_priv->psr.busy_frontbuffer_bits |= frontbuffer_bits;
1148
1149 if (frontbuffer_bits)
1150 intel_psr_exit(dev_priv);
1151
1152 mutex_unlock(&dev_priv->psr.lock);
1153}
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168void intel_psr_flush(struct drm_i915_private *dev_priv,
1169 unsigned frontbuffer_bits, enum fb_op_origin origin)
1170{
1171 if (!CAN_PSR(dev_priv))
1172 return;
1173
1174 if (origin == ORIGIN_FLIP)
1175 return;
1176
1177 mutex_lock(&dev_priv->psr.lock);
1178 if (!dev_priv->psr.enabled) {
1179 mutex_unlock(&dev_priv->psr.lock);
1180 return;
1181 }
1182
1183 frontbuffer_bits &= INTEL_FRONTBUFFER_ALL_MASK(dev_priv->psr.pipe);
1184 dev_priv->psr.busy_frontbuffer_bits &= ~frontbuffer_bits;
1185
1186
1187 if (frontbuffer_bits)
1188 psr_force_hw_tracking_exit(dev_priv);
1189
1190 if (!dev_priv->psr.active && !dev_priv->psr.busy_frontbuffer_bits)
1191 schedule_work(&dev_priv->psr.work);
1192 mutex_unlock(&dev_priv->psr.lock);
1193}
1194
1195
1196
1197
1198
1199
1200
1201
1202void intel_psr_init(struct drm_i915_private *dev_priv)
1203{
1204 u32 val;
1205
1206 if (!HAS_PSR(dev_priv))
1207 return;
1208
1209 dev_priv->psr_mmio_base = IS_HASWELL(dev_priv) ?
1210 HSW_EDP_PSR_BASE : BDW_EDP_PSR_BASE;
1211
1212 if (!dev_priv->psr.sink_support)
1213 return;
1214
1215 if (i915_modparams.enable_psr == -1)
1216 if (INTEL_GEN(dev_priv) < 9 || !dev_priv->vbt.psr.enable)
1217 i915_modparams.enable_psr = 0;
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227 val = I915_READ(EDP_PSR_IIR);
1228 val &= EDP_PSR_ERROR(edp_psr_shift(TRANSCODER_EDP));
1229 if (val) {
1230 DRM_DEBUG_KMS("PSR interruption error set\n");
1231 dev_priv->psr.sink_not_reliable = true;
1232 }
1233
1234
1235 if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv))
1236
1237 dev_priv->psr.link_standby = false;
1238 else
1239
1240 dev_priv->psr.link_standby = dev_priv->vbt.psr.full_link;
1241
1242 INIT_WORK(&dev_priv->psr.work, intel_psr_work);
1243 mutex_init(&dev_priv->psr.lock);
1244}
1245
1246void intel_psr_short_pulse(struct intel_dp *intel_dp)
1247{
1248 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1249 struct i915_psr *psr = &dev_priv->psr;
1250 u8 val;
1251 const u8 errors = DP_PSR_RFB_STORAGE_ERROR |
1252 DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR |
1253 DP_PSR_LINK_CRC_ERROR;
1254
1255 if (!CAN_PSR(dev_priv) || !intel_dp_is_edp(intel_dp))
1256 return;
1257
1258 mutex_lock(&psr->lock);
1259
1260 if (!psr->enabled || psr->dp != intel_dp)
1261 goto exit;
1262
1263 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_PSR_STATUS, &val) != 1) {
1264 DRM_ERROR("PSR_STATUS dpcd read failed\n");
1265 goto exit;
1266 }
1267
1268 if ((val & DP_PSR_SINK_STATE_MASK) == DP_PSR_SINK_INTERNAL_ERROR) {
1269 DRM_DEBUG_KMS("PSR sink internal error, disabling PSR\n");
1270 intel_psr_disable_locked(intel_dp);
1271 psr->sink_not_reliable = true;
1272 }
1273
1274 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_PSR_ERROR_STATUS, &val) != 1) {
1275 DRM_ERROR("PSR_ERROR_STATUS dpcd read failed\n");
1276 goto exit;
1277 }
1278
1279 if (val & DP_PSR_RFB_STORAGE_ERROR)
1280 DRM_DEBUG_KMS("PSR RFB storage error, disabling PSR\n");
1281 if (val & DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR)
1282 DRM_DEBUG_KMS("PSR VSC SDP uncorrectable error, disabling PSR\n");
1283 if (val & DP_PSR_LINK_CRC_ERROR)
1284 DRM_ERROR("PSR Link CRC error, disabling PSR\n");
1285
1286 if (val & ~errors)
1287 DRM_ERROR("PSR_ERROR_STATUS unhandled errors %x\n",
1288 val & ~errors);
1289 if (val & errors) {
1290 intel_psr_disable_locked(intel_dp);
1291 psr->sink_not_reliable = true;
1292 }
1293
1294 drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_ERROR_STATUS, val);
1295exit:
1296 mutex_unlock(&psr->lock);
1297}
1298
1299bool intel_psr_enabled(struct intel_dp *intel_dp)
1300{
1301 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1302 bool ret;
1303
1304 if (!CAN_PSR(dev_priv) || !intel_dp_is_edp(intel_dp))
1305 return false;
1306
1307 mutex_lock(&dev_priv->psr.lock);
1308 ret = (dev_priv->psr.dp == intel_dp && dev_priv->psr.enabled);
1309 mutex_unlock(&dev_priv->psr.lock);
1310
1311 return ret;
1312}
1313