1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54#include <drm/drmP.h>
55
56#include "intel_drv.h"
57#include "i915_drv.h"
58
59static bool psr_global_enabled(u32 debug)
60{
61 switch (debug & I915_PSR_DEBUG_MODE_MASK) {
62 case I915_PSR_DEBUG_DEFAULT:
63 return i915_modparams.enable_psr;
64 case I915_PSR_DEBUG_DISABLE:
65 return false;
66 default:
67 return true;
68 }
69}
70
71static bool intel_psr2_enabled(struct drm_i915_private *dev_priv,
72 const struct intel_crtc_state *crtc_state)
73{
74 switch (dev_priv->psr.debug & I915_PSR_DEBUG_MODE_MASK) {
75 case I915_PSR_DEBUG_FORCE_PSR1:
76 return false;
77 default:
78 return crtc_state->has_psr2;
79 }
80}
81
82void intel_psr_irq_control(struct drm_i915_private *dev_priv, u32 debug)
83{
84 u32 debug_mask, mask;
85
86 mask = EDP_PSR_ERROR(TRANSCODER_EDP);
87 debug_mask = EDP_PSR_POST_EXIT(TRANSCODER_EDP) |
88 EDP_PSR_PRE_ENTRY(TRANSCODER_EDP);
89
90 if (INTEL_GEN(dev_priv) >= 8) {
91 mask |= EDP_PSR_ERROR(TRANSCODER_A) |
92 EDP_PSR_ERROR(TRANSCODER_B) |
93 EDP_PSR_ERROR(TRANSCODER_C);
94
95 debug_mask |= EDP_PSR_POST_EXIT(TRANSCODER_A) |
96 EDP_PSR_PRE_ENTRY(TRANSCODER_A) |
97 EDP_PSR_POST_EXIT(TRANSCODER_B) |
98 EDP_PSR_PRE_ENTRY(TRANSCODER_B) |
99 EDP_PSR_POST_EXIT(TRANSCODER_C) |
100 EDP_PSR_PRE_ENTRY(TRANSCODER_C);
101 }
102
103 if (debug & I915_PSR_DEBUG_IRQ)
104 mask |= debug_mask;
105
106 I915_WRITE(EDP_PSR_IMR, ~mask);
107}
108
109static void psr_event_print(u32 val, bool psr2_enabled)
110{
111 DRM_DEBUG_KMS("PSR exit events: 0x%x\n", val);
112 if (val & PSR_EVENT_PSR2_WD_TIMER_EXPIRE)
113 DRM_DEBUG_KMS("\tPSR2 watchdog timer expired\n");
114 if ((val & PSR_EVENT_PSR2_DISABLED) && psr2_enabled)
115 DRM_DEBUG_KMS("\tPSR2 disabled\n");
116 if (val & PSR_EVENT_SU_DIRTY_FIFO_UNDERRUN)
117 DRM_DEBUG_KMS("\tSU dirty FIFO underrun\n");
118 if (val & PSR_EVENT_SU_CRC_FIFO_UNDERRUN)
119 DRM_DEBUG_KMS("\tSU CRC FIFO underrun\n");
120 if (val & PSR_EVENT_GRAPHICS_RESET)
121 DRM_DEBUG_KMS("\tGraphics reset\n");
122 if (val & PSR_EVENT_PCH_INTERRUPT)
123 DRM_DEBUG_KMS("\tPCH interrupt\n");
124 if (val & PSR_EVENT_MEMORY_UP)
125 DRM_DEBUG_KMS("\tMemory up\n");
126 if (val & PSR_EVENT_FRONT_BUFFER_MODIFY)
127 DRM_DEBUG_KMS("\tFront buffer modification\n");
128 if (val & PSR_EVENT_WD_TIMER_EXPIRE)
129 DRM_DEBUG_KMS("\tPSR watchdog timer expired\n");
130 if (val & PSR_EVENT_PIPE_REGISTERS_UPDATE)
131 DRM_DEBUG_KMS("\tPIPE registers updated\n");
132 if (val & PSR_EVENT_REGISTER_UPDATE)
133 DRM_DEBUG_KMS("\tRegister updated\n");
134 if (val & PSR_EVENT_HDCP_ENABLE)
135 DRM_DEBUG_KMS("\tHDCP enabled\n");
136 if (val & PSR_EVENT_KVMR_SESSION_ENABLE)
137 DRM_DEBUG_KMS("\tKVMR session enabled\n");
138 if (val & PSR_EVENT_VBI_ENABLE)
139 DRM_DEBUG_KMS("\tVBI enabled\n");
140 if (val & PSR_EVENT_LPSP_MODE_EXIT)
141 DRM_DEBUG_KMS("\tLPSP mode exited\n");
142 if ((val & PSR_EVENT_PSR_DISABLE) && !psr2_enabled)
143 DRM_DEBUG_KMS("\tPSR disabled\n");
144}
145
146void intel_psr_irq_handler(struct drm_i915_private *dev_priv, u32 psr_iir)
147{
148 u32 transcoders = BIT(TRANSCODER_EDP);
149 enum transcoder cpu_transcoder;
150 ktime_t time_ns = ktime_get();
151
152 if (INTEL_GEN(dev_priv) >= 8)
153 transcoders |= BIT(TRANSCODER_A) |
154 BIT(TRANSCODER_B) |
155 BIT(TRANSCODER_C);
156
157 for_each_cpu_transcoder_masked(dev_priv, cpu_transcoder, transcoders) {
158
159 if (psr_iir & EDP_PSR_ERROR(cpu_transcoder))
160 DRM_DEBUG_KMS("[transcoder %s] PSR aux error\n",
161 transcoder_name(cpu_transcoder));
162
163 if (psr_iir & EDP_PSR_PRE_ENTRY(cpu_transcoder)) {
164 dev_priv->psr.last_entry_attempt = time_ns;
165 DRM_DEBUG_KMS("[transcoder %s] PSR entry attempt in 2 vblanks\n",
166 transcoder_name(cpu_transcoder));
167 }
168
169 if (psr_iir & EDP_PSR_POST_EXIT(cpu_transcoder)) {
170 dev_priv->psr.last_exit = time_ns;
171 DRM_DEBUG_KMS("[transcoder %s] PSR exit completed\n",
172 transcoder_name(cpu_transcoder));
173
174 if (INTEL_GEN(dev_priv) >= 9) {
175 u32 val = I915_READ(PSR_EVENT(cpu_transcoder));
176 bool psr2_enabled = dev_priv->psr.psr2_enabled;
177
178 I915_WRITE(PSR_EVENT(cpu_transcoder), val);
179 psr_event_print(val, psr2_enabled);
180 }
181 }
182 }
183}
184
185static bool intel_dp_get_colorimetry_status(struct intel_dp *intel_dp)
186{
187 uint8_t dprx = 0;
188
189 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_DPRX_FEATURE_ENUMERATION_LIST,
190 &dprx) != 1)
191 return false;
192 return dprx & DP_VSC_SDP_EXT_FOR_COLORIMETRY_SUPPORTED;
193}
194
195static bool intel_dp_get_alpm_status(struct intel_dp *intel_dp)
196{
197 uint8_t alpm_caps = 0;
198
199 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_RECEIVER_ALPM_CAP,
200 &alpm_caps) != 1)
201 return false;
202 return alpm_caps & DP_ALPM_CAP;
203}
204
205static u8 intel_dp_get_sink_sync_latency(struct intel_dp *intel_dp)
206{
207 u8 val = 8;
208
209 if (drm_dp_dpcd_readb(&intel_dp->aux,
210 DP_SYNCHRONIZATION_LATENCY_IN_SINK, &val) == 1)
211 val &= DP_MAX_RESYNC_FRAME_COUNT_MASK;
212 else
213 DRM_DEBUG_KMS("Unable to get sink synchronization latency, assuming 8 frames\n");
214 return val;
215}
216
217void intel_psr_init_dpcd(struct intel_dp *intel_dp)
218{
219 struct drm_i915_private *dev_priv =
220 to_i915(dp_to_dig_port(intel_dp)->base.base.dev);
221
222 drm_dp_dpcd_read(&intel_dp->aux, DP_PSR_SUPPORT, intel_dp->psr_dpcd,
223 sizeof(intel_dp->psr_dpcd));
224
225 if (!intel_dp->psr_dpcd[0])
226 return;
227 DRM_DEBUG_KMS("eDP panel supports PSR version %x\n",
228 intel_dp->psr_dpcd[0]);
229
230 if (!(intel_dp->edp_dpcd[1] & DP_EDP_SET_POWER_CAP)) {
231 DRM_DEBUG_KMS("Panel lacks power state control, PSR cannot be enabled\n");
232 return;
233 }
234 dev_priv->psr.sink_support = true;
235 dev_priv->psr.sink_sync_latency =
236 intel_dp_get_sink_sync_latency(intel_dp);
237
238 WARN_ON(dev_priv->psr.dp);
239 dev_priv->psr.dp = intel_dp;
240
241 if (INTEL_GEN(dev_priv) >= 9 &&
242 (intel_dp->psr_dpcd[0] == DP_PSR2_WITH_Y_COORD_IS_SUPPORTED)) {
243 bool y_req = intel_dp->psr_dpcd[1] &
244 DP_PSR2_SU_Y_COORDINATE_REQUIRED;
245 bool alpm = intel_dp_get_alpm_status(intel_dp);
246
247
248
249
250
251
252
253
254
255
256
257
258 dev_priv->psr.sink_psr2_support = y_req && alpm;
259 DRM_DEBUG_KMS("PSR2 %ssupported\n",
260 dev_priv->psr.sink_psr2_support ? "" : "not ");
261
262 if (dev_priv->psr.sink_psr2_support) {
263 dev_priv->psr.colorimetry_support =
264 intel_dp_get_colorimetry_status(intel_dp);
265 }
266 }
267}
268
269static void intel_psr_setup_vsc(struct intel_dp *intel_dp,
270 const struct intel_crtc_state *crtc_state)
271{
272 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
273 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
274 struct edp_vsc_psr psr_vsc;
275
276 if (dev_priv->psr.psr2_enabled) {
277
278 memset(&psr_vsc, 0, sizeof(psr_vsc));
279 psr_vsc.sdp_header.HB0 = 0;
280 psr_vsc.sdp_header.HB1 = 0x7;
281 if (dev_priv->psr.colorimetry_support) {
282 psr_vsc.sdp_header.HB2 = 0x5;
283 psr_vsc.sdp_header.HB3 = 0x13;
284 } else {
285 psr_vsc.sdp_header.HB2 = 0x4;
286 psr_vsc.sdp_header.HB3 = 0xe;
287 }
288 } else {
289
290 memset(&psr_vsc, 0, sizeof(psr_vsc));
291 psr_vsc.sdp_header.HB0 = 0;
292 psr_vsc.sdp_header.HB1 = 0x7;
293 psr_vsc.sdp_header.HB2 = 0x2;
294 psr_vsc.sdp_header.HB3 = 0x8;
295 }
296
297 intel_dig_port->write_infoframe(&intel_dig_port->base.base, crtc_state,
298 DP_SDP_VSC, &psr_vsc, sizeof(psr_vsc));
299}
300
301static void hsw_psr_setup_aux(struct intel_dp *intel_dp)
302{
303 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
304 u32 aux_clock_divider, aux_ctl;
305 int i;
306 static const uint8_t aux_msg[] = {
307 [0] = DP_AUX_NATIVE_WRITE << 4,
308 [1] = DP_SET_POWER >> 8,
309 [2] = DP_SET_POWER & 0xff,
310 [3] = 1 - 1,
311 [4] = DP_SET_POWER_D0,
312 };
313 u32 psr_aux_mask = EDP_PSR_AUX_CTL_TIME_OUT_MASK |
314 EDP_PSR_AUX_CTL_MESSAGE_SIZE_MASK |
315 EDP_PSR_AUX_CTL_PRECHARGE_2US_MASK |
316 EDP_PSR_AUX_CTL_BIT_CLOCK_2X_MASK;
317
318 BUILD_BUG_ON(sizeof(aux_msg) > 20);
319 for (i = 0; i < sizeof(aux_msg); i += 4)
320 I915_WRITE(EDP_PSR_AUX_DATA(i >> 2),
321 intel_dp_pack_aux(&aux_msg[i], sizeof(aux_msg) - i));
322
323 aux_clock_divider = intel_dp->get_aux_clock_divider(intel_dp, 0);
324
325
326 aux_ctl = intel_dp->get_aux_send_ctl(intel_dp, sizeof(aux_msg),
327 aux_clock_divider);
328
329
330 aux_ctl &= psr_aux_mask;
331 I915_WRITE(EDP_PSR_AUX_CTL, aux_ctl);
332}
333
334static void intel_psr_enable_sink(struct intel_dp *intel_dp)
335{
336 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
337 u8 dpcd_val = DP_PSR_ENABLE;
338
339
340 if (dev_priv->psr.psr2_enabled) {
341 drm_dp_dpcd_writeb(&intel_dp->aux, DP_RECEIVER_ALPM_CONFIG,
342 DP_ALPM_ENABLE);
343 dpcd_val |= DP_PSR_ENABLE_PSR2;
344 }
345
346 if (dev_priv->psr.link_standby)
347 dpcd_val |= DP_PSR_MAIN_LINK_ACTIVE;
348 if (!dev_priv->psr.psr2_enabled && INTEL_GEN(dev_priv) >= 8)
349 dpcd_val |= DP_PSR_CRC_VERIFICATION;
350 drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, dpcd_val);
351
352 drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
353}
354
355static void hsw_activate_psr1(struct intel_dp *intel_dp)
356{
357 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
358 u32 max_sleep_time = 0x1f;
359 u32 val = EDP_PSR_ENABLE;
360
361
362
363
364 int idle_frames = max(6, dev_priv->vbt.psr.idle_frames);
365
366
367
368
369 idle_frames = max(idle_frames, dev_priv->psr.sink_sync_latency + 1);
370 val |= idle_frames << EDP_PSR_IDLE_FRAME_SHIFT;
371
372 val |= max_sleep_time << EDP_PSR_MAX_SLEEP_TIME_SHIFT;
373 if (IS_HASWELL(dev_priv))
374 val |= EDP_PSR_MIN_LINK_ENTRY_TIME_8_LINES;
375
376 if (dev_priv->psr.link_standby)
377 val |= EDP_PSR_LINK_STANDBY;
378
379 if (dev_priv->vbt.psr.tp1_wakeup_time_us == 0)
380 val |= EDP_PSR_TP1_TIME_0us;
381 else if (dev_priv->vbt.psr.tp1_wakeup_time_us <= 100)
382 val |= EDP_PSR_TP1_TIME_100us;
383 else if (dev_priv->vbt.psr.tp1_wakeup_time_us <= 500)
384 val |= EDP_PSR_TP1_TIME_500us;
385 else
386 val |= EDP_PSR_TP1_TIME_2500us;
387
388 if (dev_priv->vbt.psr.tp2_tp3_wakeup_time_us == 0)
389 val |= EDP_PSR_TP2_TP3_TIME_0us;
390 else if (dev_priv->vbt.psr.tp2_tp3_wakeup_time_us <= 100)
391 val |= EDP_PSR_TP2_TP3_TIME_100us;
392 else if (dev_priv->vbt.psr.tp2_tp3_wakeup_time_us <= 500)
393 val |= EDP_PSR_TP2_TP3_TIME_500us;
394 else
395 val |= EDP_PSR_TP2_TP3_TIME_2500us;
396
397 if (intel_dp_source_supports_hbr2(intel_dp) &&
398 drm_dp_tps3_supported(intel_dp->dpcd))
399 val |= EDP_PSR_TP1_TP3_SEL;
400 else
401 val |= EDP_PSR_TP1_TP2_SEL;
402
403 if (INTEL_GEN(dev_priv) >= 8)
404 val |= EDP_PSR_CRC_ENABLE;
405
406 val |= I915_READ(EDP_PSR_CTL) & EDP_PSR_RESTORE_PSR_ACTIVE_CTX_MASK;
407 I915_WRITE(EDP_PSR_CTL, val);
408}
409
410static void hsw_activate_psr2(struct intel_dp *intel_dp)
411{
412 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
413 u32 val;
414
415
416
417
418 int idle_frames = max(6, dev_priv->vbt.psr.idle_frames);
419
420 idle_frames = max(idle_frames, dev_priv->psr.sink_sync_latency + 1);
421 val = idle_frames << EDP_PSR2_IDLE_FRAME_SHIFT;
422
423
424
425
426 val |= EDP_PSR2_ENABLE | EDP_SU_TRACK_ENABLE;
427 if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv))
428 val |= EDP_Y_COORDINATE_ENABLE;
429
430 val |= EDP_PSR2_FRAME_BEFORE_SU(dev_priv->psr.sink_sync_latency + 1);
431
432 if (dev_priv->vbt.psr.tp2_tp3_wakeup_time_us >= 0 &&
433 dev_priv->vbt.psr.tp2_tp3_wakeup_time_us <= 50)
434 val |= EDP_PSR2_TP2_TIME_50us;
435 else if (dev_priv->vbt.psr.tp2_tp3_wakeup_time_us <= 100)
436 val |= EDP_PSR2_TP2_TIME_100us;
437 else if (dev_priv->vbt.psr.tp2_tp3_wakeup_time_us <= 500)
438 val |= EDP_PSR2_TP2_TIME_500us;
439 else
440 val |= EDP_PSR2_TP2_TIME_2500us;
441
442 I915_WRITE(EDP_PSR2_CTL, val);
443}
444
445static bool intel_psr2_config_valid(struct intel_dp *intel_dp,
446 struct intel_crtc_state *crtc_state)
447{
448 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
449 int crtc_hdisplay = crtc_state->base.adjusted_mode.crtc_hdisplay;
450 int crtc_vdisplay = crtc_state->base.adjusted_mode.crtc_vdisplay;
451 int psr_max_h = 0, psr_max_v = 0;
452
453
454
455
456
457
458 if (!dev_priv->psr.sink_psr2_support)
459 return false;
460
461 if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv)) {
462 psr_max_h = 4096;
463 psr_max_v = 2304;
464 } else if (IS_GEN9(dev_priv)) {
465 psr_max_h = 3640;
466 psr_max_v = 2304;
467 }
468
469 if (crtc_hdisplay > psr_max_h || crtc_vdisplay > psr_max_v) {
470 DRM_DEBUG_KMS("PSR2 not enabled, resolution %dx%d > max supported %dx%d\n",
471 crtc_hdisplay, crtc_vdisplay,
472 psr_max_h, psr_max_v);
473 return false;
474 }
475
476 return true;
477}
478
479void intel_psr_compute_config(struct intel_dp *intel_dp,
480 struct intel_crtc_state *crtc_state)
481{
482 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
483 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
484 const struct drm_display_mode *adjusted_mode =
485 &crtc_state->base.adjusted_mode;
486 int psr_setup_time;
487
488 if (!CAN_PSR(dev_priv))
489 return;
490
491 if (intel_dp != dev_priv->psr.dp)
492 return;
493
494
495
496
497
498
499
500
501 if (dig_port->base.port != PORT_A) {
502 DRM_DEBUG_KMS("PSR condition failed: Port not supported\n");
503 return;
504 }
505
506 if (IS_HASWELL(dev_priv) &&
507 I915_READ(HSW_STEREO_3D_CTL(crtc_state->cpu_transcoder)) &
508 S3D_ENABLE) {
509 DRM_DEBUG_KMS("PSR condition failed: Stereo 3D is Enabled\n");
510 return;
511 }
512
513 if (IS_HASWELL(dev_priv) &&
514 adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) {
515 DRM_DEBUG_KMS("PSR condition failed: Interlaced is Enabled\n");
516 return;
517 }
518
519 psr_setup_time = drm_dp_psr_setup_time(intel_dp->psr_dpcd);
520 if (psr_setup_time < 0) {
521 DRM_DEBUG_KMS("PSR condition failed: Invalid PSR setup time (0x%02x)\n",
522 intel_dp->psr_dpcd[1]);
523 return;
524 }
525
526 if (intel_usecs_to_scanlines(adjusted_mode, psr_setup_time) >
527 adjusted_mode->crtc_vtotal - adjusted_mode->crtc_vdisplay - 1) {
528 DRM_DEBUG_KMS("PSR condition failed: PSR setup time (%d us) too long\n",
529 psr_setup_time);
530 return;
531 }
532
533 crtc_state->has_psr = true;
534 crtc_state->has_psr2 = intel_psr2_config_valid(intel_dp, crtc_state);
535}
536
537static void intel_psr_activate(struct intel_dp *intel_dp)
538{
539 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
540
541 if (INTEL_GEN(dev_priv) >= 9)
542 WARN_ON(I915_READ(EDP_PSR2_CTL) & EDP_PSR2_ENABLE);
543 WARN_ON(I915_READ(EDP_PSR_CTL) & EDP_PSR_ENABLE);
544 WARN_ON(dev_priv->psr.active);
545 lockdep_assert_held(&dev_priv->psr.lock);
546
547
548 if (dev_priv->psr.psr2_enabled)
549 hsw_activate_psr2(intel_dp);
550 else
551 hsw_activate_psr1(intel_dp);
552
553 dev_priv->psr.active = true;
554}
555
556static void intel_psr_enable_source(struct intel_dp *intel_dp,
557 const struct intel_crtc_state *crtc_state)
558{
559 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
560 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
561
562
563
564
565 if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv))
566 hsw_psr_setup_aux(intel_dp);
567
568 if (dev_priv->psr.psr2_enabled) {
569 u32 chicken = I915_READ(CHICKEN_TRANS(cpu_transcoder));
570
571 if (INTEL_GEN(dev_priv) == 9 && !IS_GEMINILAKE(dev_priv))
572 chicken |= (PSR2_VSC_ENABLE_PROG_HEADER
573 | PSR2_ADD_VERTICAL_LINE_COUNT);
574
575 else
576 chicken &= ~VSC_DATA_SEL_SOFTWARE_CONTROL;
577 I915_WRITE(CHICKEN_TRANS(cpu_transcoder), chicken);
578
579 I915_WRITE(EDP_PSR_DEBUG,
580 EDP_PSR_DEBUG_MASK_MEMUP |
581 EDP_PSR_DEBUG_MASK_HPD |
582 EDP_PSR_DEBUG_MASK_LPSP |
583 EDP_PSR_DEBUG_MASK_MAX_SLEEP |
584 EDP_PSR_DEBUG_MASK_DISP_REG_WRITE);
585 } else {
586
587
588
589
590
591
592
593 I915_WRITE(EDP_PSR_DEBUG,
594 EDP_PSR_DEBUG_MASK_MEMUP |
595 EDP_PSR_DEBUG_MASK_HPD |
596 EDP_PSR_DEBUG_MASK_LPSP |
597 EDP_PSR_DEBUG_MASK_DISP_REG_WRITE |
598 EDP_PSR_DEBUG_MASK_MAX_SLEEP);
599 }
600}
601
602static void intel_psr_enable_locked(struct drm_i915_private *dev_priv,
603 const struct intel_crtc_state *crtc_state)
604{
605 struct intel_dp *intel_dp = dev_priv->psr.dp;
606
607 if (dev_priv->psr.enabled)
608 return;
609
610 DRM_DEBUG_KMS("Enabling PSR%s\n",
611 dev_priv->psr.psr2_enabled ? "2" : "1");
612 intel_psr_setup_vsc(intel_dp, crtc_state);
613 intel_psr_enable_sink(intel_dp);
614 intel_psr_enable_source(intel_dp, crtc_state);
615 dev_priv->psr.enabled = true;
616
617 intel_psr_activate(intel_dp);
618}
619
620
621
622
623
624
625
626
627void intel_psr_enable(struct intel_dp *intel_dp,
628 const struct intel_crtc_state *crtc_state)
629{
630 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
631
632 if (!crtc_state->has_psr)
633 return;
634
635 if (WARN_ON(!CAN_PSR(dev_priv)))
636 return;
637
638 WARN_ON(dev_priv->drrs.dp);
639
640 mutex_lock(&dev_priv->psr.lock);
641 if (dev_priv->psr.prepared) {
642 DRM_DEBUG_KMS("PSR already in use\n");
643 goto unlock;
644 }
645
646 dev_priv->psr.psr2_enabled = intel_psr2_enabled(dev_priv, crtc_state);
647 dev_priv->psr.busy_frontbuffer_bits = 0;
648 dev_priv->psr.prepared = true;
649
650 if (psr_global_enabled(dev_priv->psr.debug))
651 intel_psr_enable_locked(dev_priv, crtc_state);
652 else
653 DRM_DEBUG_KMS("PSR disabled by flag\n");
654
655unlock:
656 mutex_unlock(&dev_priv->psr.lock);
657}
658
659static void
660intel_psr_disable_source(struct intel_dp *intel_dp)
661{
662 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
663
664 if (dev_priv->psr.active) {
665 i915_reg_t psr_status;
666 u32 psr_status_mask;
667
668 if (dev_priv->psr.psr2_enabled) {
669 psr_status = EDP_PSR2_STATUS;
670 psr_status_mask = EDP_PSR2_STATUS_STATE_MASK;
671
672 I915_WRITE(EDP_PSR2_CTL,
673 I915_READ(EDP_PSR2_CTL) &
674 ~(EDP_PSR2_ENABLE | EDP_SU_TRACK_ENABLE));
675
676 } else {
677 psr_status = EDP_PSR_STATUS;
678 psr_status_mask = EDP_PSR_STATUS_STATE_MASK;
679
680 I915_WRITE(EDP_PSR_CTL,
681 I915_READ(EDP_PSR_CTL) & ~EDP_PSR_ENABLE);
682 }
683
684
685 if (intel_wait_for_register(dev_priv,
686 psr_status, psr_status_mask, 0,
687 2000))
688 DRM_ERROR("Timed out waiting for PSR Idle State\n");
689
690 dev_priv->psr.active = false;
691 } else {
692 if (dev_priv->psr.psr2_enabled)
693 WARN_ON(I915_READ(EDP_PSR2_CTL) & EDP_PSR2_ENABLE);
694 else
695 WARN_ON(I915_READ(EDP_PSR_CTL) & EDP_PSR_ENABLE);
696 }
697}
698
699static void intel_psr_disable_locked(struct intel_dp *intel_dp)
700{
701 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
702
703 lockdep_assert_held(&dev_priv->psr.lock);
704
705 if (!dev_priv->psr.enabled)
706 return;
707
708 DRM_DEBUG_KMS("Disabling PSR%s\n",
709 dev_priv->psr.psr2_enabled ? "2" : "1");
710 intel_psr_disable_source(intel_dp);
711
712
713 drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, 0);
714
715 dev_priv->psr.enabled = false;
716}
717
718
719
720
721
722
723
724
725void intel_psr_disable(struct intel_dp *intel_dp,
726 const struct intel_crtc_state *old_crtc_state)
727{
728 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
729
730 if (!old_crtc_state->has_psr)
731 return;
732
733 if (WARN_ON(!CAN_PSR(dev_priv)))
734 return;
735
736 mutex_lock(&dev_priv->psr.lock);
737 if (!dev_priv->psr.prepared) {
738 mutex_unlock(&dev_priv->psr.lock);
739 return;
740 }
741
742 intel_psr_disable_locked(intel_dp);
743
744 dev_priv->psr.prepared = false;
745 mutex_unlock(&dev_priv->psr.lock);
746 cancel_work_sync(&dev_priv->psr.work);
747}
748
749
750
751
752
753
754
755
756
757
758
759int intel_psr_wait_for_idle(const struct intel_crtc_state *new_crtc_state,
760 u32 *out_value)
761{
762 struct intel_crtc *crtc = to_intel_crtc(new_crtc_state->base.crtc);
763 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
764
765 if (!dev_priv->psr.enabled || !new_crtc_state->has_psr)
766 return 0;
767
768
769 if (READ_ONCE(dev_priv->psr.psr2_enabled))
770 return 0;
771
772
773
774
775
776
777
778
779 return __intel_wait_for_register(dev_priv, EDP_PSR_STATUS,
780 EDP_PSR_STATUS_STATE_MASK,
781 EDP_PSR_STATUS_STATE_IDLE, 2, 50,
782 out_value);
783}
784
785static bool __psr_wait_for_idle_locked(struct drm_i915_private *dev_priv)
786{
787 i915_reg_t reg;
788 u32 mask;
789 int err;
790
791 if (!dev_priv->psr.enabled)
792 return false;
793
794 if (dev_priv->psr.psr2_enabled) {
795 reg = EDP_PSR2_STATUS;
796 mask = EDP_PSR2_STATUS_STATE_MASK;
797 } else {
798 reg = EDP_PSR_STATUS;
799 mask = EDP_PSR_STATUS_STATE_MASK;
800 }
801
802 mutex_unlock(&dev_priv->psr.lock);
803
804 err = intel_wait_for_register(dev_priv, reg, mask, 0, 50);
805 if (err)
806 DRM_ERROR("Timed out waiting for PSR Idle for re-enable\n");
807
808
809 mutex_lock(&dev_priv->psr.lock);
810 return err == 0 && dev_priv->psr.enabled;
811}
812
813static bool switching_psr(struct drm_i915_private *dev_priv,
814 struct intel_crtc_state *crtc_state,
815 u32 mode)
816{
817
818 if (!crtc_state || !crtc_state->has_psr2)
819 return false;
820
821 if (dev_priv->psr.psr2_enabled && mode == I915_PSR_DEBUG_FORCE_PSR1)
822 return true;
823
824 if (!dev_priv->psr.psr2_enabled && mode != I915_PSR_DEBUG_FORCE_PSR1)
825 return true;
826
827 return false;
828}
829
830int intel_psr_set_debugfs_mode(struct drm_i915_private *dev_priv,
831 struct drm_modeset_acquire_ctx *ctx,
832 u64 val)
833{
834 struct drm_device *dev = &dev_priv->drm;
835 struct drm_connector_state *conn_state;
836 struct intel_crtc_state *crtc_state = NULL;
837 struct drm_crtc_commit *commit;
838 struct drm_crtc *crtc;
839 struct intel_dp *dp;
840 int ret;
841 bool enable;
842 u32 mode = val & I915_PSR_DEBUG_MODE_MASK;
843
844 if (val & ~(I915_PSR_DEBUG_IRQ | I915_PSR_DEBUG_MODE_MASK) ||
845 mode > I915_PSR_DEBUG_FORCE_PSR1) {
846 DRM_DEBUG_KMS("Invalid debug mask %llx\n", val);
847 return -EINVAL;
848 }
849
850 ret = drm_modeset_lock(&dev->mode_config.connection_mutex, ctx);
851 if (ret)
852 return ret;
853
854
855 dp = READ_ONCE(dev_priv->psr.dp);
856 conn_state = dp->attached_connector->base.state;
857 crtc = conn_state->crtc;
858 if (crtc) {
859 ret = drm_modeset_lock(&crtc->mutex, ctx);
860 if (ret)
861 return ret;
862
863 crtc_state = to_intel_crtc_state(crtc->state);
864 commit = crtc_state->base.commit;
865 } else {
866 commit = conn_state->commit;
867 }
868 if (commit) {
869 ret = wait_for_completion_interruptible(&commit->hw_done);
870 if (ret)
871 return ret;
872 }
873
874 ret = mutex_lock_interruptible(&dev_priv->psr.lock);
875 if (ret)
876 return ret;
877
878 enable = psr_global_enabled(val);
879
880 if (!enable || switching_psr(dev_priv, crtc_state, mode))
881 intel_psr_disable_locked(dev_priv->psr.dp);
882
883 dev_priv->psr.debug = val;
884 if (crtc)
885 dev_priv->psr.psr2_enabled = intel_psr2_enabled(dev_priv, crtc_state);
886
887 intel_psr_irq_control(dev_priv, dev_priv->psr.debug);
888
889 if (dev_priv->psr.prepared && enable)
890 intel_psr_enable_locked(dev_priv, crtc_state);
891
892 mutex_unlock(&dev_priv->psr.lock);
893 return ret;
894}
895
896static void intel_psr_work(struct work_struct *work)
897{
898 struct drm_i915_private *dev_priv =
899 container_of(work, typeof(*dev_priv), psr.work);
900
901 mutex_lock(&dev_priv->psr.lock);
902
903 if (!dev_priv->psr.enabled)
904 goto unlock;
905
906
907
908
909
910
911
912 if (!__psr_wait_for_idle_locked(dev_priv))
913 goto unlock;
914
915
916
917
918
919
920 if (dev_priv->psr.busy_frontbuffer_bits || dev_priv->psr.active)
921 goto unlock;
922
923 intel_psr_activate(dev_priv->psr.dp);
924unlock:
925 mutex_unlock(&dev_priv->psr.lock);
926}
927
928static void intel_psr_exit(struct drm_i915_private *dev_priv)
929{
930 u32 val;
931
932 if (!dev_priv->psr.active)
933 return;
934
935 if (dev_priv->psr.psr2_enabled) {
936 val = I915_READ(EDP_PSR2_CTL);
937 WARN_ON(!(val & EDP_PSR2_ENABLE));
938 I915_WRITE(EDP_PSR2_CTL, val & ~EDP_PSR2_ENABLE);
939 } else {
940 val = I915_READ(EDP_PSR_CTL);
941 WARN_ON(!(val & EDP_PSR_ENABLE));
942 I915_WRITE(EDP_PSR_CTL, val & ~EDP_PSR_ENABLE);
943 }
944 dev_priv->psr.active = false;
945}
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960void intel_psr_invalidate(struct drm_i915_private *dev_priv,
961 unsigned frontbuffer_bits, enum fb_op_origin origin)
962{
963 struct drm_crtc *crtc;
964 enum pipe pipe;
965
966 if (!CAN_PSR(dev_priv))
967 return;
968
969 if (origin == ORIGIN_FLIP)
970 return;
971
972 mutex_lock(&dev_priv->psr.lock);
973 if (!dev_priv->psr.enabled) {
974 mutex_unlock(&dev_priv->psr.lock);
975 return;
976 }
977
978 crtc = dp_to_dig_port(dev_priv->psr.dp)->base.base.crtc;
979 pipe = to_intel_crtc(crtc)->pipe;
980
981 frontbuffer_bits &= INTEL_FRONTBUFFER_ALL_MASK(pipe);
982 dev_priv->psr.busy_frontbuffer_bits |= frontbuffer_bits;
983
984 if (frontbuffer_bits)
985 intel_psr_exit(dev_priv);
986
987 mutex_unlock(&dev_priv->psr.lock);
988}
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003void intel_psr_flush(struct drm_i915_private *dev_priv,
1004 unsigned frontbuffer_bits, enum fb_op_origin origin)
1005{
1006 struct drm_crtc *crtc;
1007 enum pipe pipe;
1008
1009 if (!CAN_PSR(dev_priv))
1010 return;
1011
1012 if (origin == ORIGIN_FLIP)
1013 return;
1014
1015 mutex_lock(&dev_priv->psr.lock);
1016 if (!dev_priv->psr.enabled) {
1017 mutex_unlock(&dev_priv->psr.lock);
1018 return;
1019 }
1020
1021 crtc = dp_to_dig_port(dev_priv->psr.dp)->base.base.crtc;
1022 pipe = to_intel_crtc(crtc)->pipe;
1023
1024 frontbuffer_bits &= INTEL_FRONTBUFFER_ALL_MASK(pipe);
1025 dev_priv->psr.busy_frontbuffer_bits &= ~frontbuffer_bits;
1026
1027
1028 if (frontbuffer_bits) {
1029 if (dev_priv->psr.psr2_enabled) {
1030 intel_psr_exit(dev_priv);
1031 } else {
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041 I915_WRITE(CURSURFLIVE(pipe), 0);
1042 }
1043 }
1044
1045 if (!dev_priv->psr.active && !dev_priv->psr.busy_frontbuffer_bits)
1046 schedule_work(&dev_priv->psr.work);
1047 mutex_unlock(&dev_priv->psr.lock);
1048}
1049
1050
1051
1052
1053
1054
1055
1056
1057void intel_psr_init(struct drm_i915_private *dev_priv)
1058{
1059 if (!HAS_PSR(dev_priv))
1060 return;
1061
1062 dev_priv->psr_mmio_base = IS_HASWELL(dev_priv) ?
1063 HSW_EDP_PSR_BASE : BDW_EDP_PSR_BASE;
1064
1065 if (!dev_priv->psr.sink_support)
1066 return;
1067
1068 if (i915_modparams.enable_psr == -1) {
1069 i915_modparams.enable_psr = dev_priv->vbt.psr.enable;
1070
1071
1072 i915_modparams.enable_psr = 0;
1073 }
1074
1075
1076 if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv))
1077
1078 dev_priv->psr.link_standby = false;
1079 else
1080
1081 dev_priv->psr.link_standby = dev_priv->vbt.psr.full_link;
1082
1083 INIT_WORK(&dev_priv->psr.work, intel_psr_work);
1084 mutex_init(&dev_priv->psr.lock);
1085}
1086
1087void intel_psr_short_pulse(struct intel_dp *intel_dp)
1088{
1089 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1090 struct i915_psr *psr = &dev_priv->psr;
1091 u8 val;
1092 const u8 errors = DP_PSR_RFB_STORAGE_ERROR |
1093 DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR |
1094 DP_PSR_LINK_CRC_ERROR;
1095
1096 if (!CAN_PSR(dev_priv) || !intel_dp_is_edp(intel_dp))
1097 return;
1098
1099 mutex_lock(&psr->lock);
1100
1101 if (!psr->enabled || psr->dp != intel_dp)
1102 goto exit;
1103
1104 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_PSR_STATUS, &val) != 1) {
1105 DRM_ERROR("PSR_STATUS dpcd read failed\n");
1106 goto exit;
1107 }
1108
1109 if ((val & DP_PSR_SINK_STATE_MASK) == DP_PSR_SINK_INTERNAL_ERROR) {
1110 DRM_DEBUG_KMS("PSR sink internal error, disabling PSR\n");
1111 intel_psr_disable_locked(intel_dp);
1112 }
1113
1114 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_PSR_ERROR_STATUS, &val) != 1) {
1115 DRM_ERROR("PSR_ERROR_STATUS dpcd read failed\n");
1116 goto exit;
1117 }
1118
1119 if (val & DP_PSR_RFB_STORAGE_ERROR)
1120 DRM_DEBUG_KMS("PSR RFB storage error, disabling PSR\n");
1121 if (val & DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR)
1122 DRM_DEBUG_KMS("PSR VSC SDP uncorrectable error, disabling PSR\n");
1123 if (val & DP_PSR_LINK_CRC_ERROR)
1124 DRM_ERROR("PSR Link CRC error, disabling PSR\n");
1125
1126 if (val & ~errors)
1127 DRM_ERROR("PSR_ERROR_STATUS unhandled errors %x\n",
1128 val & ~errors);
1129 if (val & errors)
1130 intel_psr_disable_locked(intel_dp);
1131
1132 drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_ERROR_STATUS, val);
1133
1134
1135exit:
1136 mutex_unlock(&psr->lock);
1137}
1138