1
2
3
4
5
6#include <linux/vgaarb.h>
7
8#include "display/intel_crt.h"
9#include "display/intel_dp.h"
10
11#include "i915_drv.h"
12#include "i915_irq.h"
13#include "intel_cdclk.h"
14#include "intel_combo_phy.h"
15#include "intel_csr.h"
16#include "intel_dpio_phy.h"
17#include "intel_drv.h"
18#include "intel_hotplug.h"
19#include "intel_sideband.h"
20
21bool intel_display_power_well_is_enabled(struct drm_i915_private *dev_priv,
22 enum i915_power_well_id power_well_id);
23
24const char *
25intel_display_power_domain_str(enum intel_display_power_domain domain)
26{
27 switch (domain) {
28 case POWER_DOMAIN_DISPLAY_CORE:
29 return "DISPLAY_CORE";
30 case POWER_DOMAIN_PIPE_A:
31 return "PIPE_A";
32 case POWER_DOMAIN_PIPE_B:
33 return "PIPE_B";
34 case POWER_DOMAIN_PIPE_C:
35 return "PIPE_C";
36 case POWER_DOMAIN_PIPE_A_PANEL_FITTER:
37 return "PIPE_A_PANEL_FITTER";
38 case POWER_DOMAIN_PIPE_B_PANEL_FITTER:
39 return "PIPE_B_PANEL_FITTER";
40 case POWER_DOMAIN_PIPE_C_PANEL_FITTER:
41 return "PIPE_C_PANEL_FITTER";
42 case POWER_DOMAIN_TRANSCODER_A:
43 return "TRANSCODER_A";
44 case POWER_DOMAIN_TRANSCODER_B:
45 return "TRANSCODER_B";
46 case POWER_DOMAIN_TRANSCODER_C:
47 return "TRANSCODER_C";
48 case POWER_DOMAIN_TRANSCODER_EDP:
49 return "TRANSCODER_EDP";
50 case POWER_DOMAIN_TRANSCODER_EDP_VDSC:
51 return "TRANSCODER_EDP_VDSC";
52 case POWER_DOMAIN_TRANSCODER_DSI_A:
53 return "TRANSCODER_DSI_A";
54 case POWER_DOMAIN_TRANSCODER_DSI_C:
55 return "TRANSCODER_DSI_C";
56 case POWER_DOMAIN_PORT_DDI_A_LANES:
57 return "PORT_DDI_A_LANES";
58 case POWER_DOMAIN_PORT_DDI_B_LANES:
59 return "PORT_DDI_B_LANES";
60 case POWER_DOMAIN_PORT_DDI_C_LANES:
61 return "PORT_DDI_C_LANES";
62 case POWER_DOMAIN_PORT_DDI_D_LANES:
63 return "PORT_DDI_D_LANES";
64 case POWER_DOMAIN_PORT_DDI_E_LANES:
65 return "PORT_DDI_E_LANES";
66 case POWER_DOMAIN_PORT_DDI_F_LANES:
67 return "PORT_DDI_F_LANES";
68 case POWER_DOMAIN_PORT_DDI_A_IO:
69 return "PORT_DDI_A_IO";
70 case POWER_DOMAIN_PORT_DDI_B_IO:
71 return "PORT_DDI_B_IO";
72 case POWER_DOMAIN_PORT_DDI_C_IO:
73 return "PORT_DDI_C_IO";
74 case POWER_DOMAIN_PORT_DDI_D_IO:
75 return "PORT_DDI_D_IO";
76 case POWER_DOMAIN_PORT_DDI_E_IO:
77 return "PORT_DDI_E_IO";
78 case POWER_DOMAIN_PORT_DDI_F_IO:
79 return "PORT_DDI_F_IO";
80 case POWER_DOMAIN_PORT_DSI:
81 return "PORT_DSI";
82 case POWER_DOMAIN_PORT_CRT:
83 return "PORT_CRT";
84 case POWER_DOMAIN_PORT_OTHER:
85 return "PORT_OTHER";
86 case POWER_DOMAIN_VGA:
87 return "VGA";
88 case POWER_DOMAIN_AUDIO:
89 return "AUDIO";
90 case POWER_DOMAIN_AUX_A:
91 return "AUX_A";
92 case POWER_DOMAIN_AUX_B:
93 return "AUX_B";
94 case POWER_DOMAIN_AUX_C:
95 return "AUX_C";
96 case POWER_DOMAIN_AUX_D:
97 return "AUX_D";
98 case POWER_DOMAIN_AUX_E:
99 return "AUX_E";
100 case POWER_DOMAIN_AUX_F:
101 return "AUX_F";
102 case POWER_DOMAIN_AUX_IO_A:
103 return "AUX_IO_A";
104 case POWER_DOMAIN_AUX_TBT1:
105 return "AUX_TBT1";
106 case POWER_DOMAIN_AUX_TBT2:
107 return "AUX_TBT2";
108 case POWER_DOMAIN_AUX_TBT3:
109 return "AUX_TBT3";
110 case POWER_DOMAIN_AUX_TBT4:
111 return "AUX_TBT4";
112 case POWER_DOMAIN_GMBUS:
113 return "GMBUS";
114 case POWER_DOMAIN_INIT:
115 return "INIT";
116 case POWER_DOMAIN_MODESET:
117 return "MODESET";
118 case POWER_DOMAIN_GT_IRQ:
119 return "GT_IRQ";
120 default:
121 MISSING_CASE(domain);
122 return "?";
123 }
124}
125
126static void intel_power_well_enable(struct drm_i915_private *dev_priv,
127 struct i915_power_well *power_well)
128{
129 DRM_DEBUG_KMS("enabling %s\n", power_well->desc->name);
130 power_well->desc->ops->enable(dev_priv, power_well);
131 power_well->hw_enabled = true;
132}
133
134static void intel_power_well_disable(struct drm_i915_private *dev_priv,
135 struct i915_power_well *power_well)
136{
137 DRM_DEBUG_KMS("disabling %s\n", power_well->desc->name);
138 power_well->hw_enabled = false;
139 power_well->desc->ops->disable(dev_priv, power_well);
140}
141
142static void intel_power_well_get(struct drm_i915_private *dev_priv,
143 struct i915_power_well *power_well)
144{
145 if (!power_well->count++)
146 intel_power_well_enable(dev_priv, power_well);
147}
148
149static void intel_power_well_put(struct drm_i915_private *dev_priv,
150 struct i915_power_well *power_well)
151{
152 WARN(!power_well->count, "Use count on power well %s is already zero",
153 power_well->desc->name);
154
155 if (!--power_well->count)
156 intel_power_well_disable(dev_priv, power_well);
157}
158
159
160
161
162
163
164
165
166
167
168
169
170
171bool __intel_display_power_is_enabled(struct drm_i915_private *dev_priv,
172 enum intel_display_power_domain domain)
173{
174 struct i915_power_well *power_well;
175 bool is_enabled;
176
177 if (dev_priv->runtime_pm.suspended)
178 return false;
179
180 is_enabled = true;
181
182 for_each_power_domain_well_reverse(dev_priv, power_well, BIT_ULL(domain)) {
183 if (power_well->desc->always_on)
184 continue;
185
186 if (!power_well->hw_enabled) {
187 is_enabled = false;
188 break;
189 }
190 }
191
192 return is_enabled;
193}
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212bool intel_display_power_is_enabled(struct drm_i915_private *dev_priv,
213 enum intel_display_power_domain domain)
214{
215 struct i915_power_domains *power_domains;
216 bool ret;
217
218 power_domains = &dev_priv->power_domains;
219
220 mutex_lock(&power_domains->lock);
221 ret = __intel_display_power_is_enabled(dev_priv, domain);
222 mutex_unlock(&power_domains->lock);
223
224 return ret;
225}
226
227
228
229
230
231
232
233static void hsw_power_well_post_enable(struct drm_i915_private *dev_priv,
234 u8 irq_pipe_mask, bool has_vga)
235{
236 struct pci_dev *pdev = dev_priv->drm.pdev;
237
238
239
240
241
242
243
244
245
246
247
248 if (has_vga) {
249 vga_get_uninterruptible(pdev, VGA_RSRC_LEGACY_IO);
250 outb(inb(VGA_MSR_READ), VGA_MSR_WRITE);
251 vga_put(pdev, VGA_RSRC_LEGACY_IO);
252 }
253
254 if (irq_pipe_mask)
255 gen8_irq_power_well_post_enable(dev_priv, irq_pipe_mask);
256}
257
258static void hsw_power_well_pre_disable(struct drm_i915_private *dev_priv,
259 u8 irq_pipe_mask)
260{
261 if (irq_pipe_mask)
262 gen8_irq_power_well_pre_disable(dev_priv, irq_pipe_mask);
263}
264
265static void hsw_wait_for_power_well_enable(struct drm_i915_private *dev_priv,
266 struct i915_power_well *power_well)
267{
268 const struct i915_power_well_regs *regs = power_well->desc->hsw.regs;
269 int pw_idx = power_well->desc->hsw.idx;
270
271
272 WARN_ON(intel_wait_for_register(&dev_priv->uncore,
273 regs->driver,
274 HSW_PWR_WELL_CTL_STATE(pw_idx),
275 HSW_PWR_WELL_CTL_STATE(pw_idx),
276 1));
277}
278
279static u32 hsw_power_well_requesters(struct drm_i915_private *dev_priv,
280 const struct i915_power_well_regs *regs,
281 int pw_idx)
282{
283 u32 req_mask = HSW_PWR_WELL_CTL_REQ(pw_idx);
284 u32 ret;
285
286 ret = I915_READ(regs->bios) & req_mask ? 1 : 0;
287 ret |= I915_READ(regs->driver) & req_mask ? 2 : 0;
288 if (regs->kvmr.reg)
289 ret |= I915_READ(regs->kvmr) & req_mask ? 4 : 0;
290 ret |= I915_READ(regs->debug) & req_mask ? 8 : 0;
291
292 return ret;
293}
294
295static void hsw_wait_for_power_well_disable(struct drm_i915_private *dev_priv,
296 struct i915_power_well *power_well)
297{
298 const struct i915_power_well_regs *regs = power_well->desc->hsw.regs;
299 int pw_idx = power_well->desc->hsw.idx;
300 bool disabled;
301 u32 reqs;
302
303
304
305
306
307
308
309
310
311
312 wait_for((disabled = !(I915_READ(regs->driver) &
313 HSW_PWR_WELL_CTL_STATE(pw_idx))) ||
314 (reqs = hsw_power_well_requesters(dev_priv, regs, pw_idx)), 1);
315 if (disabled)
316 return;
317
318 DRM_DEBUG_KMS("%s forced on (bios:%d driver:%d kvmr:%d debug:%d)\n",
319 power_well->desc->name,
320 !!(reqs & 1), !!(reqs & 2), !!(reqs & 4), !!(reqs & 8));
321}
322
323static void gen9_wait_for_power_well_fuses(struct drm_i915_private *dev_priv,
324 enum skl_power_gate pg)
325{
326
327 WARN_ON(intel_wait_for_register(&dev_priv->uncore, SKL_FUSE_STATUS,
328 SKL_FUSE_PG_DIST_STATUS(pg),
329 SKL_FUSE_PG_DIST_STATUS(pg), 1));
330}
331
332static void hsw_power_well_enable(struct drm_i915_private *dev_priv,
333 struct i915_power_well *power_well)
334{
335 const struct i915_power_well_regs *regs = power_well->desc->hsw.regs;
336 int pw_idx = power_well->desc->hsw.idx;
337 bool wait_fuses = power_well->desc->hsw.has_fuses;
338 enum skl_power_gate uninitialized_var(pg);
339 u32 val;
340
341 if (wait_fuses) {
342 pg = INTEL_GEN(dev_priv) >= 11 ? ICL_PW_CTL_IDX_TO_PG(pw_idx) :
343 SKL_PW_CTL_IDX_TO_PG(pw_idx);
344
345
346
347
348
349
350
351 if (pg == SKL_PG1)
352 gen9_wait_for_power_well_fuses(dev_priv, SKL_PG0);
353 }
354
355 val = I915_READ(regs->driver);
356 I915_WRITE(regs->driver, val | HSW_PWR_WELL_CTL_REQ(pw_idx));
357 hsw_wait_for_power_well_enable(dev_priv, power_well);
358
359
360 if (IS_CANNONLAKE(dev_priv) &&
361 pw_idx >= GLK_PW_CTL_IDX_AUX_B &&
362 pw_idx <= CNL_PW_CTL_IDX_AUX_F) {
363 val = I915_READ(CNL_AUX_ANAOVRD1(pw_idx));
364 val |= CNL_AUX_ANAOVRD1_ENABLE | CNL_AUX_ANAOVRD1_LDO_BYPASS;
365 I915_WRITE(CNL_AUX_ANAOVRD1(pw_idx), val);
366 }
367
368 if (wait_fuses)
369 gen9_wait_for_power_well_fuses(dev_priv, pg);
370
371 hsw_power_well_post_enable(dev_priv,
372 power_well->desc->hsw.irq_pipe_mask,
373 power_well->desc->hsw.has_vga);
374}
375
376static void hsw_power_well_disable(struct drm_i915_private *dev_priv,
377 struct i915_power_well *power_well)
378{
379 const struct i915_power_well_regs *regs = power_well->desc->hsw.regs;
380 int pw_idx = power_well->desc->hsw.idx;
381 u32 val;
382
383 hsw_power_well_pre_disable(dev_priv,
384 power_well->desc->hsw.irq_pipe_mask);
385
386 val = I915_READ(regs->driver);
387 I915_WRITE(regs->driver, val & ~HSW_PWR_WELL_CTL_REQ(pw_idx));
388 hsw_wait_for_power_well_disable(dev_priv, power_well);
389}
390
391#define ICL_AUX_PW_TO_PORT(pw_idx) ((pw_idx) - ICL_PW_CTL_IDX_AUX_A)
392
393static void
394icl_combo_phy_aux_power_well_enable(struct drm_i915_private *dev_priv,
395 struct i915_power_well *power_well)
396{
397 const struct i915_power_well_regs *regs = power_well->desc->hsw.regs;
398 int pw_idx = power_well->desc->hsw.idx;
399 enum port port = ICL_AUX_PW_TO_PORT(pw_idx);
400 u32 val;
401
402 val = I915_READ(regs->driver);
403 I915_WRITE(regs->driver, val | HSW_PWR_WELL_CTL_REQ(pw_idx));
404
405 val = I915_READ(ICL_PORT_CL_DW12(port));
406 I915_WRITE(ICL_PORT_CL_DW12(port), val | ICL_LANE_ENABLE_AUX);
407
408 hsw_wait_for_power_well_enable(dev_priv, power_well);
409
410
411 if (IS_ICELAKE(dev_priv) &&
412 pw_idx >= ICL_PW_CTL_IDX_AUX_A && pw_idx <= ICL_PW_CTL_IDX_AUX_B &&
413 !intel_bios_is_port_edp(dev_priv, port)) {
414 val = I915_READ(ICL_AUX_ANAOVRD1(pw_idx));
415 val |= ICL_AUX_ANAOVRD1_ENABLE | ICL_AUX_ANAOVRD1_LDO_BYPASS;
416 I915_WRITE(ICL_AUX_ANAOVRD1(pw_idx), val);
417 }
418}
419
420static void
421icl_combo_phy_aux_power_well_disable(struct drm_i915_private *dev_priv,
422 struct i915_power_well *power_well)
423{
424 const struct i915_power_well_regs *regs = power_well->desc->hsw.regs;
425 int pw_idx = power_well->desc->hsw.idx;
426 enum port port = ICL_AUX_PW_TO_PORT(pw_idx);
427 u32 val;
428
429 val = I915_READ(ICL_PORT_CL_DW12(port));
430 I915_WRITE(ICL_PORT_CL_DW12(port), val & ~ICL_LANE_ENABLE_AUX);
431
432 val = I915_READ(regs->driver);
433 I915_WRITE(regs->driver, val & ~HSW_PWR_WELL_CTL_REQ(pw_idx));
434
435 hsw_wait_for_power_well_disable(dev_priv, power_well);
436}
437
438#define ICL_AUX_PW_TO_CH(pw_idx) \
439 ((pw_idx) - ICL_PW_CTL_IDX_AUX_A + AUX_CH_A)
440
441#define ICL_TBT_AUX_PW_TO_CH(pw_idx) \
442 ((pw_idx) - ICL_PW_CTL_IDX_AUX_TBT1 + AUX_CH_C)
443
444static void
445icl_tc_phy_aux_power_well_enable(struct drm_i915_private *dev_priv,
446 struct i915_power_well *power_well)
447{
448 int pw_idx = power_well->desc->hsw.idx;
449 bool is_tbt = power_well->desc->hsw.is_tc_tbt;
450 enum aux_ch aux_ch;
451 u32 val;
452
453 aux_ch = is_tbt ? ICL_TBT_AUX_PW_TO_CH(pw_idx) :
454 ICL_AUX_PW_TO_CH(pw_idx);
455 val = I915_READ(DP_AUX_CH_CTL(aux_ch));
456 val &= ~DP_AUX_CH_CTL_TBT_IO;
457 if (is_tbt)
458 val |= DP_AUX_CH_CTL_TBT_IO;
459 I915_WRITE(DP_AUX_CH_CTL(aux_ch), val);
460
461 hsw_power_well_enable(dev_priv, power_well);
462}
463
464
465
466
467
468
469static bool hsw_power_well_enabled(struct drm_i915_private *dev_priv,
470 struct i915_power_well *power_well)
471{
472 const struct i915_power_well_regs *regs = power_well->desc->hsw.regs;
473 enum i915_power_well_id id = power_well->desc->id;
474 int pw_idx = power_well->desc->hsw.idx;
475 u32 mask = HSW_PWR_WELL_CTL_REQ(pw_idx) |
476 HSW_PWR_WELL_CTL_STATE(pw_idx);
477 u32 val;
478
479 val = I915_READ(regs->driver);
480
481
482
483
484
485
486
487 if (IS_GEN(dev_priv, 9) && !IS_GEN9_LP(dev_priv) &&
488 (id == SKL_DISP_PW_1 || id == SKL_DISP_PW_MISC_IO))
489 val |= I915_READ(regs->bios);
490
491 return (val & mask) == mask;
492}
493
494static void assert_can_enable_dc9(struct drm_i915_private *dev_priv)
495{
496 WARN_ONCE((I915_READ(DC_STATE_EN) & DC_STATE_EN_DC9),
497 "DC9 already programmed to be enabled.\n");
498 WARN_ONCE(I915_READ(DC_STATE_EN) & DC_STATE_EN_UPTO_DC5,
499 "DC5 still not disabled to enable DC9.\n");
500 WARN_ONCE(I915_READ(HSW_PWR_WELL_CTL2) &
501 HSW_PWR_WELL_CTL_REQ(SKL_PW_CTL_IDX_PW_2),
502 "Power well 2 on.\n");
503 WARN_ONCE(intel_irqs_enabled(dev_priv),
504 "Interrupts not disabled yet.\n");
505
506
507
508
509
510
511
512
513}
514
515static void assert_can_disable_dc9(struct drm_i915_private *dev_priv)
516{
517 WARN_ONCE(intel_irqs_enabled(dev_priv),
518 "Interrupts not disabled yet.\n");
519 WARN_ONCE(I915_READ(DC_STATE_EN) & DC_STATE_EN_UPTO_DC5,
520 "DC5 still not disabled.\n");
521
522
523
524
525
526
527
528
529}
530
531static void gen9_write_dc_state(struct drm_i915_private *dev_priv,
532 u32 state)
533{
534 int rewrites = 0;
535 int rereads = 0;
536 u32 v;
537
538 I915_WRITE(DC_STATE_EN, state);
539
540
541
542
543
544
545 do {
546 v = I915_READ(DC_STATE_EN);
547
548 if (v != state) {
549 I915_WRITE(DC_STATE_EN, state);
550 rewrites++;
551 rereads = 0;
552 } else if (rereads++ > 5) {
553 break;
554 }
555
556 } while (rewrites < 100);
557
558 if (v != state)
559 DRM_ERROR("Writing dc state to 0x%x failed, now 0x%x\n",
560 state, v);
561
562
563 if (rewrites > 1)
564 DRM_DEBUG_KMS("Rewrote dc state to 0x%x %d times\n",
565 state, rewrites);
566}
567
568static u32 gen9_dc_mask(struct drm_i915_private *dev_priv)
569{
570 u32 mask;
571
572 mask = DC_STATE_EN_UPTO_DC5;
573 if (INTEL_GEN(dev_priv) >= 11)
574 mask |= DC_STATE_EN_UPTO_DC6 | DC_STATE_EN_DC9;
575 else if (IS_GEN9_LP(dev_priv))
576 mask |= DC_STATE_EN_DC9;
577 else
578 mask |= DC_STATE_EN_UPTO_DC6;
579
580 return mask;
581}
582
583void gen9_sanitize_dc_state(struct drm_i915_private *dev_priv)
584{
585 u32 val;
586
587 val = I915_READ(DC_STATE_EN) & gen9_dc_mask(dev_priv);
588
589 DRM_DEBUG_KMS("Resetting DC state tracking from %02x to %02x\n",
590 dev_priv->csr.dc_state, val);
591 dev_priv->csr.dc_state = val;
592}
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617static void gen9_set_dc_state(struct drm_i915_private *dev_priv, u32 state)
618{
619 u32 val;
620 u32 mask;
621
622 if (WARN_ON_ONCE(state & ~dev_priv->csr.allowed_dc_mask))
623 state &= dev_priv->csr.allowed_dc_mask;
624
625 val = I915_READ(DC_STATE_EN);
626 mask = gen9_dc_mask(dev_priv);
627 DRM_DEBUG_KMS("Setting DC state from %02x to %02x\n",
628 val & mask, state);
629
630
631 if ((val & mask) != dev_priv->csr.dc_state)
632 DRM_ERROR("DC state mismatch (0x%x -> 0x%x)\n",
633 dev_priv->csr.dc_state, val & mask);
634
635 val &= ~mask;
636 val |= state;
637
638 gen9_write_dc_state(dev_priv, val);
639
640 dev_priv->csr.dc_state = val & mask;
641}
642
643void bxt_enable_dc9(struct drm_i915_private *dev_priv)
644{
645 assert_can_enable_dc9(dev_priv);
646
647 DRM_DEBUG_KMS("Enabling DC9\n");
648
649
650
651
652
653 if (!HAS_PCH_SPLIT(dev_priv))
654 intel_power_sequencer_reset(dev_priv);
655 gen9_set_dc_state(dev_priv, DC_STATE_EN_DC9);
656}
657
658void bxt_disable_dc9(struct drm_i915_private *dev_priv)
659{
660 assert_can_disable_dc9(dev_priv);
661
662 DRM_DEBUG_KMS("Disabling DC9\n");
663
664 gen9_set_dc_state(dev_priv, DC_STATE_DISABLE);
665
666 intel_pps_unlock_regs_wa(dev_priv);
667}
668
669static void assert_csr_loaded(struct drm_i915_private *dev_priv)
670{
671 WARN_ONCE(!I915_READ(CSR_PROGRAM(0)),
672 "CSR program storage start is NULL\n");
673 WARN_ONCE(!I915_READ(CSR_SSP_BASE), "CSR SSP Base Not fine\n");
674 WARN_ONCE(!I915_READ(CSR_HTP_SKL), "CSR HTP Not fine\n");
675}
676
677static struct i915_power_well *
678lookup_power_well(struct drm_i915_private *dev_priv,
679 enum i915_power_well_id power_well_id)
680{
681 struct i915_power_well *power_well;
682
683 for_each_power_well(dev_priv, power_well)
684 if (power_well->desc->id == power_well_id)
685 return power_well;
686
687
688
689
690
691
692
693
694 WARN(1, "Power well %d not defined for this platform\n", power_well_id);
695 return &dev_priv->power_domains.power_wells[0];
696}
697
698static void assert_can_enable_dc5(struct drm_i915_private *dev_priv)
699{
700 bool pg2_enabled = intel_display_power_well_is_enabled(dev_priv,
701 SKL_DISP_PW_2);
702
703 WARN_ONCE(pg2_enabled, "PG2 not disabled to enable DC5.\n");
704
705 WARN_ONCE((I915_READ(DC_STATE_EN) & DC_STATE_EN_UPTO_DC5),
706 "DC5 already programmed to be enabled.\n");
707 assert_rpm_wakelock_held(&dev_priv->runtime_pm);
708
709 assert_csr_loaded(dev_priv);
710}
711
712void gen9_enable_dc5(struct drm_i915_private *dev_priv)
713{
714 assert_can_enable_dc5(dev_priv);
715
716 DRM_DEBUG_KMS("Enabling DC5\n");
717
718
719 if (IS_GEN9_BC(dev_priv))
720 I915_WRITE(GEN8_CHICKEN_DCPR_1, I915_READ(GEN8_CHICKEN_DCPR_1) |
721 SKL_SELECT_ALTERNATE_DC_EXIT);
722
723 gen9_set_dc_state(dev_priv, DC_STATE_EN_UPTO_DC5);
724}
725
726static void assert_can_enable_dc6(struct drm_i915_private *dev_priv)
727{
728 WARN_ONCE(I915_READ(UTIL_PIN_CTL) & UTIL_PIN_ENABLE,
729 "Backlight is not disabled.\n");
730 WARN_ONCE((I915_READ(DC_STATE_EN) & DC_STATE_EN_UPTO_DC6),
731 "DC6 already programmed to be enabled.\n");
732
733 assert_csr_loaded(dev_priv);
734}
735
736void skl_enable_dc6(struct drm_i915_private *dev_priv)
737{
738 assert_can_enable_dc6(dev_priv);
739
740 DRM_DEBUG_KMS("Enabling DC6\n");
741
742
743 if (IS_GEN9_BC(dev_priv))
744 I915_WRITE(GEN8_CHICKEN_DCPR_1, I915_READ(GEN8_CHICKEN_DCPR_1) |
745 SKL_SELECT_ALTERNATE_DC_EXIT);
746
747 gen9_set_dc_state(dev_priv, DC_STATE_EN_UPTO_DC6);
748}
749
750static void hsw_power_well_sync_hw(struct drm_i915_private *dev_priv,
751 struct i915_power_well *power_well)
752{
753 const struct i915_power_well_regs *regs = power_well->desc->hsw.regs;
754 int pw_idx = power_well->desc->hsw.idx;
755 u32 mask = HSW_PWR_WELL_CTL_REQ(pw_idx);
756 u32 bios_req = I915_READ(regs->bios);
757
758
759 if (bios_req & mask) {
760 u32 drv_req = I915_READ(regs->driver);
761
762 if (!(drv_req & mask))
763 I915_WRITE(regs->driver, drv_req | mask);
764 I915_WRITE(regs->bios, bios_req & ~mask);
765 }
766}
767
768static void bxt_dpio_cmn_power_well_enable(struct drm_i915_private *dev_priv,
769 struct i915_power_well *power_well)
770{
771 bxt_ddi_phy_init(dev_priv, power_well->desc->bxt.phy);
772}
773
774static void bxt_dpio_cmn_power_well_disable(struct drm_i915_private *dev_priv,
775 struct i915_power_well *power_well)
776{
777 bxt_ddi_phy_uninit(dev_priv, power_well->desc->bxt.phy);
778}
779
780static bool bxt_dpio_cmn_power_well_enabled(struct drm_i915_private *dev_priv,
781 struct i915_power_well *power_well)
782{
783 return bxt_ddi_phy_is_enabled(dev_priv, power_well->desc->bxt.phy);
784}
785
786static void bxt_verify_ddi_phy_power_wells(struct drm_i915_private *dev_priv)
787{
788 struct i915_power_well *power_well;
789
790 power_well = lookup_power_well(dev_priv, BXT_DISP_PW_DPIO_CMN_A);
791 if (power_well->count > 0)
792 bxt_ddi_phy_verify_state(dev_priv, power_well->desc->bxt.phy);
793
794 power_well = lookup_power_well(dev_priv, VLV_DISP_PW_DPIO_CMN_BC);
795 if (power_well->count > 0)
796 bxt_ddi_phy_verify_state(dev_priv, power_well->desc->bxt.phy);
797
798 if (IS_GEMINILAKE(dev_priv)) {
799 power_well = lookup_power_well(dev_priv,
800 GLK_DISP_PW_DPIO_CMN_C);
801 if (power_well->count > 0)
802 bxt_ddi_phy_verify_state(dev_priv,
803 power_well->desc->bxt.phy);
804 }
805}
806
807static bool gen9_dc_off_power_well_enabled(struct drm_i915_private *dev_priv,
808 struct i915_power_well *power_well)
809{
810 return (I915_READ(DC_STATE_EN) & DC_STATE_EN_UPTO_DC5_DC6_MASK) == 0;
811}
812
813static void gen9_assert_dbuf_enabled(struct drm_i915_private *dev_priv)
814{
815 u32 tmp = I915_READ(DBUF_CTL);
816
817 WARN((tmp & (DBUF_POWER_STATE | DBUF_POWER_REQUEST)) !=
818 (DBUF_POWER_STATE | DBUF_POWER_REQUEST),
819 "Unexpected DBuf power power state (0x%08x)\n", tmp);
820}
821
822static void gen9_dc_off_power_well_enable(struct drm_i915_private *dev_priv,
823 struct i915_power_well *power_well)
824{
825 struct intel_cdclk_state cdclk_state = {};
826
827 gen9_set_dc_state(dev_priv, DC_STATE_DISABLE);
828
829 dev_priv->display.get_cdclk(dev_priv, &cdclk_state);
830
831 WARN_ON(intel_cdclk_needs_modeset(&dev_priv->cdclk.hw, &cdclk_state));
832
833 gen9_assert_dbuf_enabled(dev_priv);
834
835 if (IS_GEN9_LP(dev_priv))
836 bxt_verify_ddi_phy_power_wells(dev_priv);
837
838 if (INTEL_GEN(dev_priv) >= 11)
839
840
841
842
843
844 intel_combo_phy_init(dev_priv);
845}
846
847static void gen9_dc_off_power_well_disable(struct drm_i915_private *dev_priv,
848 struct i915_power_well *power_well)
849{
850 if (!dev_priv->csr.dmc_payload)
851 return;
852
853 if (dev_priv->csr.allowed_dc_mask & DC_STATE_EN_UPTO_DC6)
854 skl_enable_dc6(dev_priv);
855 else if (dev_priv->csr.allowed_dc_mask & DC_STATE_EN_UPTO_DC5)
856 gen9_enable_dc5(dev_priv);
857}
858
859static void i9xx_power_well_sync_hw_noop(struct drm_i915_private *dev_priv,
860 struct i915_power_well *power_well)
861{
862}
863
864static void i9xx_always_on_power_well_noop(struct drm_i915_private *dev_priv,
865 struct i915_power_well *power_well)
866{
867}
868
869static bool i9xx_always_on_power_well_enabled(struct drm_i915_private *dev_priv,
870 struct i915_power_well *power_well)
871{
872 return true;
873}
874
875static void i830_pipes_power_well_enable(struct drm_i915_private *dev_priv,
876 struct i915_power_well *power_well)
877{
878 if ((I915_READ(PIPECONF(PIPE_A)) & PIPECONF_ENABLE) == 0)
879 i830_enable_pipe(dev_priv, PIPE_A);
880 if ((I915_READ(PIPECONF(PIPE_B)) & PIPECONF_ENABLE) == 0)
881 i830_enable_pipe(dev_priv, PIPE_B);
882}
883
884static void i830_pipes_power_well_disable(struct drm_i915_private *dev_priv,
885 struct i915_power_well *power_well)
886{
887 i830_disable_pipe(dev_priv, PIPE_B);
888 i830_disable_pipe(dev_priv, PIPE_A);
889}
890
891static bool i830_pipes_power_well_enabled(struct drm_i915_private *dev_priv,
892 struct i915_power_well *power_well)
893{
894 return I915_READ(PIPECONF(PIPE_A)) & PIPECONF_ENABLE &&
895 I915_READ(PIPECONF(PIPE_B)) & PIPECONF_ENABLE;
896}
897
898static void i830_pipes_power_well_sync_hw(struct drm_i915_private *dev_priv,
899 struct i915_power_well *power_well)
900{
901 if (power_well->count > 0)
902 i830_pipes_power_well_enable(dev_priv, power_well);
903 else
904 i830_pipes_power_well_disable(dev_priv, power_well);
905}
906
907static void vlv_set_power_well(struct drm_i915_private *dev_priv,
908 struct i915_power_well *power_well, bool enable)
909{
910 int pw_idx = power_well->desc->vlv.idx;
911 u32 mask;
912 u32 state;
913 u32 ctrl;
914
915 mask = PUNIT_PWRGT_MASK(pw_idx);
916 state = enable ? PUNIT_PWRGT_PWR_ON(pw_idx) :
917 PUNIT_PWRGT_PWR_GATE(pw_idx);
918
919 vlv_punit_get(dev_priv);
920
921#define COND \
922 ((vlv_punit_read(dev_priv, PUNIT_REG_PWRGT_STATUS) & mask) == state)
923
924 if (COND)
925 goto out;
926
927 ctrl = vlv_punit_read(dev_priv, PUNIT_REG_PWRGT_CTRL);
928 ctrl &= ~mask;
929 ctrl |= state;
930 vlv_punit_write(dev_priv, PUNIT_REG_PWRGT_CTRL, ctrl);
931
932 if (wait_for(COND, 100))
933 DRM_ERROR("timeout setting power well state %08x (%08x)\n",
934 state,
935 vlv_punit_read(dev_priv, PUNIT_REG_PWRGT_CTRL));
936
937#undef COND
938
939out:
940 vlv_punit_put(dev_priv);
941}
942
943static void vlv_power_well_enable(struct drm_i915_private *dev_priv,
944 struct i915_power_well *power_well)
945{
946 vlv_set_power_well(dev_priv, power_well, true);
947}
948
949static void vlv_power_well_disable(struct drm_i915_private *dev_priv,
950 struct i915_power_well *power_well)
951{
952 vlv_set_power_well(dev_priv, power_well, false);
953}
954
955static bool vlv_power_well_enabled(struct drm_i915_private *dev_priv,
956 struct i915_power_well *power_well)
957{
958 int pw_idx = power_well->desc->vlv.idx;
959 bool enabled = false;
960 u32 mask;
961 u32 state;
962 u32 ctrl;
963
964 mask = PUNIT_PWRGT_MASK(pw_idx);
965 ctrl = PUNIT_PWRGT_PWR_ON(pw_idx);
966
967 vlv_punit_get(dev_priv);
968
969 state = vlv_punit_read(dev_priv, PUNIT_REG_PWRGT_STATUS) & mask;
970
971
972
973
974 WARN_ON(state != PUNIT_PWRGT_PWR_ON(pw_idx) &&
975 state != PUNIT_PWRGT_PWR_GATE(pw_idx));
976 if (state == ctrl)
977 enabled = true;
978
979
980
981
982
983 ctrl = vlv_punit_read(dev_priv, PUNIT_REG_PWRGT_CTRL) & mask;
984 WARN_ON(ctrl != state);
985
986 vlv_punit_put(dev_priv);
987
988 return enabled;
989}
990
991static void vlv_init_display_clock_gating(struct drm_i915_private *dev_priv)
992{
993 u32 val;
994
995
996
997
998
999
1000
1001 val = I915_READ(DSPCLK_GATE_D);
1002 val &= DPOUNIT_CLOCK_GATE_DISABLE;
1003 val |= VRHUNIT_CLOCK_GATE_DISABLE;
1004 I915_WRITE(DSPCLK_GATE_D, val);
1005
1006
1007
1008
1009 I915_WRITE(MI_ARB_VLV, MI_ARB_DISPLAY_TRICKLE_FEED_DISABLE);
1010 I915_WRITE(CBR1_VLV, 0);
1011
1012 WARN_ON(dev_priv->rawclk_freq == 0);
1013
1014 I915_WRITE(RAWCLK_FREQ_VLV,
1015 DIV_ROUND_CLOSEST(dev_priv->rawclk_freq, 1000));
1016}
1017
1018static void vlv_display_power_well_init(struct drm_i915_private *dev_priv)
1019{
1020 struct intel_encoder *encoder;
1021 enum pipe pipe;
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031 for_each_pipe(dev_priv, pipe) {
1032 u32 val = I915_READ(DPLL(pipe));
1033
1034 val |= DPLL_REF_CLK_ENABLE_VLV | DPLL_VGA_MODE_DIS;
1035 if (pipe != PIPE_A)
1036 val |= DPLL_INTEGRATED_CRI_CLK_VLV;
1037
1038 I915_WRITE(DPLL(pipe), val);
1039 }
1040
1041 vlv_init_display_clock_gating(dev_priv);
1042
1043 spin_lock_irq(&dev_priv->irq_lock);
1044 valleyview_enable_display_irqs(dev_priv);
1045 spin_unlock_irq(&dev_priv->irq_lock);
1046
1047
1048
1049
1050
1051 if (dev_priv->power_domains.initializing)
1052 return;
1053
1054 intel_hpd_init(dev_priv);
1055
1056
1057 for_each_intel_encoder(&dev_priv->drm, encoder) {
1058 if (encoder->type == INTEL_OUTPUT_ANALOG)
1059 intel_crt_reset(&encoder->base);
1060 }
1061
1062 i915_redisable_vga_power_on(dev_priv);
1063
1064 intel_pps_unlock_regs_wa(dev_priv);
1065}
1066
1067static void vlv_display_power_well_deinit(struct drm_i915_private *dev_priv)
1068{
1069 spin_lock_irq(&dev_priv->irq_lock);
1070 valleyview_disable_display_irqs(dev_priv);
1071 spin_unlock_irq(&dev_priv->irq_lock);
1072
1073
1074 synchronize_irq(dev_priv->drm.irq);
1075
1076 intel_power_sequencer_reset(dev_priv);
1077
1078
1079 if (!dev_priv->drm.dev->power.is_suspended)
1080 intel_hpd_poll_init(dev_priv);
1081}
1082
1083static void vlv_display_power_well_enable(struct drm_i915_private *dev_priv,
1084 struct i915_power_well *power_well)
1085{
1086 vlv_set_power_well(dev_priv, power_well, true);
1087
1088 vlv_display_power_well_init(dev_priv);
1089}
1090
1091static void vlv_display_power_well_disable(struct drm_i915_private *dev_priv,
1092 struct i915_power_well *power_well)
1093{
1094 vlv_display_power_well_deinit(dev_priv);
1095
1096 vlv_set_power_well(dev_priv, power_well, false);
1097}
1098
1099static void vlv_dpio_cmn_power_well_enable(struct drm_i915_private *dev_priv,
1100 struct i915_power_well *power_well)
1101{
1102
1103 udelay(1);
1104
1105 vlv_set_power_well(dev_priv, power_well, true);
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118 I915_WRITE(DPIO_CTL, I915_READ(DPIO_CTL) | DPIO_CMNRST);
1119}
1120
1121static void vlv_dpio_cmn_power_well_disable(struct drm_i915_private *dev_priv,
1122 struct i915_power_well *power_well)
1123{
1124 enum pipe pipe;
1125
1126 for_each_pipe(dev_priv, pipe)
1127 assert_pll_disabled(dev_priv, pipe);
1128
1129
1130 I915_WRITE(DPIO_CTL, I915_READ(DPIO_CTL) & ~DPIO_CMNRST);
1131
1132 vlv_set_power_well(dev_priv, power_well, false);
1133}
1134
1135#define POWER_DOMAIN_MASK (GENMASK_ULL(POWER_DOMAIN_NUM - 1, 0))
1136
1137#define BITS_SET(val, bits) (((val) & (bits)) == (bits))
1138
1139static void assert_chv_phy_status(struct drm_i915_private *dev_priv)
1140{
1141 struct i915_power_well *cmn_bc =
1142 lookup_power_well(dev_priv, VLV_DISP_PW_DPIO_CMN_BC);
1143 struct i915_power_well *cmn_d =
1144 lookup_power_well(dev_priv, CHV_DISP_PW_DPIO_CMN_D);
1145 u32 phy_control = dev_priv->chv_phy_control;
1146 u32 phy_status = 0;
1147 u32 phy_status_mask = 0xffffffff;
1148
1149
1150
1151
1152
1153
1154
1155
1156 if (!dev_priv->chv_phy_assert[DPIO_PHY0])
1157 phy_status_mask &= ~(PHY_STATUS_CMN_LDO(DPIO_PHY0, DPIO_CH0) |
1158 PHY_STATUS_SPLINE_LDO(DPIO_PHY0, DPIO_CH0, 0) |
1159 PHY_STATUS_SPLINE_LDO(DPIO_PHY0, DPIO_CH0, 1) |
1160 PHY_STATUS_CMN_LDO(DPIO_PHY0, DPIO_CH1) |
1161 PHY_STATUS_SPLINE_LDO(DPIO_PHY0, DPIO_CH1, 0) |
1162 PHY_STATUS_SPLINE_LDO(DPIO_PHY0, DPIO_CH1, 1));
1163
1164 if (!dev_priv->chv_phy_assert[DPIO_PHY1])
1165 phy_status_mask &= ~(PHY_STATUS_CMN_LDO(DPIO_PHY1, DPIO_CH0) |
1166 PHY_STATUS_SPLINE_LDO(DPIO_PHY1, DPIO_CH0, 0) |
1167 PHY_STATUS_SPLINE_LDO(DPIO_PHY1, DPIO_CH0, 1));
1168
1169 if (cmn_bc->desc->ops->is_enabled(dev_priv, cmn_bc)) {
1170 phy_status |= PHY_POWERGOOD(DPIO_PHY0);
1171
1172
1173 if ((phy_control & PHY_CH_POWER_DOWN_OVRD_EN(DPIO_PHY0, DPIO_CH0)) == 0)
1174 phy_control |= PHY_CH_POWER_DOWN_OVRD(0xf, DPIO_PHY0, DPIO_CH0);
1175
1176 if ((phy_control & PHY_CH_POWER_DOWN_OVRD_EN(DPIO_PHY0, DPIO_CH1)) == 0)
1177 phy_control |= PHY_CH_POWER_DOWN_OVRD(0xf, DPIO_PHY0, DPIO_CH1);
1178
1179
1180 if (BITS_SET(phy_control,
1181 PHY_CH_POWER_DOWN_OVRD(0xf, DPIO_PHY0, DPIO_CH0) |
1182 PHY_CH_POWER_DOWN_OVRD(0xf, DPIO_PHY0, DPIO_CH1)))
1183 phy_status |= PHY_STATUS_CMN_LDO(DPIO_PHY0, DPIO_CH0);
1184
1185
1186
1187
1188
1189
1190 if (BITS_SET(phy_control,
1191 PHY_CH_POWER_DOWN_OVRD(0xf, DPIO_PHY0, DPIO_CH1)) &&
1192 (I915_READ(DPLL(PIPE_B)) & DPLL_VCO_ENABLE) == 0)
1193 phy_status |= PHY_STATUS_CMN_LDO(DPIO_PHY0, DPIO_CH1);
1194
1195 if (BITS_SET(phy_control,
1196 PHY_CH_POWER_DOWN_OVRD(0x3, DPIO_PHY0, DPIO_CH0)))
1197 phy_status |= PHY_STATUS_SPLINE_LDO(DPIO_PHY0, DPIO_CH0, 0);
1198 if (BITS_SET(phy_control,
1199 PHY_CH_POWER_DOWN_OVRD(0xc, DPIO_PHY0, DPIO_CH0)))
1200 phy_status |= PHY_STATUS_SPLINE_LDO(DPIO_PHY0, DPIO_CH0, 1);
1201
1202 if (BITS_SET(phy_control,
1203 PHY_CH_POWER_DOWN_OVRD(0x3, DPIO_PHY0, DPIO_CH1)))
1204 phy_status |= PHY_STATUS_SPLINE_LDO(DPIO_PHY0, DPIO_CH1, 0);
1205 if (BITS_SET(phy_control,
1206 PHY_CH_POWER_DOWN_OVRD(0xc, DPIO_PHY0, DPIO_CH1)))
1207 phy_status |= PHY_STATUS_SPLINE_LDO(DPIO_PHY0, DPIO_CH1, 1);
1208 }
1209
1210 if (cmn_d->desc->ops->is_enabled(dev_priv, cmn_d)) {
1211 phy_status |= PHY_POWERGOOD(DPIO_PHY1);
1212
1213
1214 if ((phy_control & PHY_CH_POWER_DOWN_OVRD_EN(DPIO_PHY1, DPIO_CH0)) == 0)
1215 phy_control |= PHY_CH_POWER_DOWN_OVRD(0xf, DPIO_PHY1, DPIO_CH0);
1216
1217 if (BITS_SET(phy_control,
1218 PHY_CH_POWER_DOWN_OVRD(0xf, DPIO_PHY1, DPIO_CH0)))
1219 phy_status |= PHY_STATUS_CMN_LDO(DPIO_PHY1, DPIO_CH0);
1220
1221 if (BITS_SET(phy_control,
1222 PHY_CH_POWER_DOWN_OVRD(0x3, DPIO_PHY1, DPIO_CH0)))
1223 phy_status |= PHY_STATUS_SPLINE_LDO(DPIO_PHY1, DPIO_CH0, 0);
1224 if (BITS_SET(phy_control,
1225 PHY_CH_POWER_DOWN_OVRD(0xc, DPIO_PHY1, DPIO_CH0)))
1226 phy_status |= PHY_STATUS_SPLINE_LDO(DPIO_PHY1, DPIO_CH0, 1);
1227 }
1228
1229 phy_status &= phy_status_mask;
1230
1231
1232
1233
1234
1235 if (intel_wait_for_register(&dev_priv->uncore,
1236 DISPLAY_PHY_STATUS,
1237 phy_status_mask,
1238 phy_status,
1239 10))
1240 DRM_ERROR("Unexpected PHY_STATUS 0x%08x, expected 0x%08x (PHY_CONTROL=0x%08x)\n",
1241 I915_READ(DISPLAY_PHY_STATUS) & phy_status_mask,
1242 phy_status, dev_priv->chv_phy_control);
1243}
1244
1245#undef BITS_SET
1246
1247static void chv_dpio_cmn_power_well_enable(struct drm_i915_private *dev_priv,
1248 struct i915_power_well *power_well)
1249{
1250 enum dpio_phy phy;
1251 enum pipe pipe;
1252 u32 tmp;
1253
1254 WARN_ON_ONCE(power_well->desc->id != VLV_DISP_PW_DPIO_CMN_BC &&
1255 power_well->desc->id != CHV_DISP_PW_DPIO_CMN_D);
1256
1257 if (power_well->desc->id == VLV_DISP_PW_DPIO_CMN_BC) {
1258 pipe = PIPE_A;
1259 phy = DPIO_PHY0;
1260 } else {
1261 pipe = PIPE_C;
1262 phy = DPIO_PHY1;
1263 }
1264
1265
1266 udelay(1);
1267 vlv_set_power_well(dev_priv, power_well, true);
1268
1269
1270 if (intel_wait_for_register(&dev_priv->uncore,
1271 DISPLAY_PHY_STATUS,
1272 PHY_POWERGOOD(phy),
1273 PHY_POWERGOOD(phy),
1274 1))
1275 DRM_ERROR("Display PHY %d is not power up\n", phy);
1276
1277 vlv_dpio_get(dev_priv);
1278
1279
1280 tmp = vlv_dpio_read(dev_priv, pipe, CHV_CMN_DW28);
1281 tmp |= DPIO_DYNPWRDOWNEN_CH0 | DPIO_CL1POWERDOWNEN |
1282 DPIO_SUS_CLK_CONFIG_GATE_CLKREQ;
1283 vlv_dpio_write(dev_priv, pipe, CHV_CMN_DW28, tmp);
1284
1285 if (power_well->desc->id == VLV_DISP_PW_DPIO_CMN_BC) {
1286 tmp = vlv_dpio_read(dev_priv, pipe, _CHV_CMN_DW6_CH1);
1287 tmp |= DPIO_DYNPWRDOWNEN_CH1;
1288 vlv_dpio_write(dev_priv, pipe, _CHV_CMN_DW6_CH1, tmp);
1289 } else {
1290
1291
1292
1293
1294
1295 tmp = vlv_dpio_read(dev_priv, pipe, CHV_CMN_DW30);
1296 tmp |= DPIO_CL2_LDOFUSE_PWRENB;
1297 vlv_dpio_write(dev_priv, pipe, CHV_CMN_DW30, tmp);
1298 }
1299
1300 vlv_dpio_put(dev_priv);
1301
1302 dev_priv->chv_phy_control |= PHY_COM_LANE_RESET_DEASSERT(phy);
1303 I915_WRITE(DISPLAY_PHY_CONTROL, dev_priv->chv_phy_control);
1304
1305 DRM_DEBUG_KMS("Enabled DPIO PHY%d (PHY_CONTROL=0x%08x)\n",
1306 phy, dev_priv->chv_phy_control);
1307
1308 assert_chv_phy_status(dev_priv);
1309}
1310
1311static void chv_dpio_cmn_power_well_disable(struct drm_i915_private *dev_priv,
1312 struct i915_power_well *power_well)
1313{
1314 enum dpio_phy phy;
1315
1316 WARN_ON_ONCE(power_well->desc->id != VLV_DISP_PW_DPIO_CMN_BC &&
1317 power_well->desc->id != CHV_DISP_PW_DPIO_CMN_D);
1318
1319 if (power_well->desc->id == VLV_DISP_PW_DPIO_CMN_BC) {
1320 phy = DPIO_PHY0;
1321 assert_pll_disabled(dev_priv, PIPE_A);
1322 assert_pll_disabled(dev_priv, PIPE_B);
1323 } else {
1324 phy = DPIO_PHY1;
1325 assert_pll_disabled(dev_priv, PIPE_C);
1326 }
1327
1328 dev_priv->chv_phy_control &= ~PHY_COM_LANE_RESET_DEASSERT(phy);
1329 I915_WRITE(DISPLAY_PHY_CONTROL, dev_priv->chv_phy_control);
1330
1331 vlv_set_power_well(dev_priv, power_well, false);
1332
1333 DRM_DEBUG_KMS("Disabled DPIO PHY%d (PHY_CONTROL=0x%08x)\n",
1334 phy, dev_priv->chv_phy_control);
1335
1336
1337 dev_priv->chv_phy_assert[phy] = true;
1338
1339 assert_chv_phy_status(dev_priv);
1340}
1341
1342static void assert_chv_phy_powergate(struct drm_i915_private *dev_priv, enum dpio_phy phy,
1343 enum dpio_channel ch, bool override, unsigned int mask)
1344{
1345 enum pipe pipe = phy == DPIO_PHY0 ? PIPE_A : PIPE_C;
1346 u32 reg, val, expected, actual;
1347
1348
1349
1350
1351
1352
1353
1354
1355 if (!dev_priv->chv_phy_assert[phy])
1356 return;
1357
1358 if (ch == DPIO_CH0)
1359 reg = _CHV_CMN_DW0_CH0;
1360 else
1361 reg = _CHV_CMN_DW6_CH1;
1362
1363 vlv_dpio_get(dev_priv);
1364 val = vlv_dpio_read(dev_priv, pipe, reg);
1365 vlv_dpio_put(dev_priv);
1366
1367
1368
1369
1370
1371
1372 if (!override || mask == 0xf) {
1373 expected = DPIO_ALLDL_POWERDOWN | DPIO_ANYDL_POWERDOWN;
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383 if (ch == DPIO_CH1 && val == 0)
1384 expected = 0;
1385 } else if (mask != 0x0) {
1386 expected = DPIO_ANYDL_POWERDOWN;
1387 } else {
1388 expected = 0;
1389 }
1390
1391 if (ch == DPIO_CH0)
1392 actual = val >> DPIO_ANYDL_POWERDOWN_SHIFT_CH0;
1393 else
1394 actual = val >> DPIO_ANYDL_POWERDOWN_SHIFT_CH1;
1395 actual &= DPIO_ALLDL_POWERDOWN | DPIO_ANYDL_POWERDOWN;
1396
1397 WARN(actual != expected,
1398 "Unexpected DPIO lane power down: all %d, any %d. Expected: all %d, any %d. (0x%x = 0x%08x)\n",
1399 !!(actual & DPIO_ALLDL_POWERDOWN), !!(actual & DPIO_ANYDL_POWERDOWN),
1400 !!(expected & DPIO_ALLDL_POWERDOWN), !!(expected & DPIO_ANYDL_POWERDOWN),
1401 reg, val);
1402}
1403
1404bool chv_phy_powergate_ch(struct drm_i915_private *dev_priv, enum dpio_phy phy,
1405 enum dpio_channel ch, bool override)
1406{
1407 struct i915_power_domains *power_domains = &dev_priv->power_domains;
1408 bool was_override;
1409
1410 mutex_lock(&power_domains->lock);
1411
1412 was_override = dev_priv->chv_phy_control & PHY_CH_POWER_DOWN_OVRD_EN(phy, ch);
1413
1414 if (override == was_override)
1415 goto out;
1416
1417 if (override)
1418 dev_priv->chv_phy_control |= PHY_CH_POWER_DOWN_OVRD_EN(phy, ch);
1419 else
1420 dev_priv->chv_phy_control &= ~PHY_CH_POWER_DOWN_OVRD_EN(phy, ch);
1421
1422 I915_WRITE(DISPLAY_PHY_CONTROL, dev_priv->chv_phy_control);
1423
1424 DRM_DEBUG_KMS("Power gating DPIO PHY%d CH%d (DPIO_PHY_CONTROL=0x%08x)\n",
1425 phy, ch, dev_priv->chv_phy_control);
1426
1427 assert_chv_phy_status(dev_priv);
1428
1429out:
1430 mutex_unlock(&power_domains->lock);
1431
1432 return was_override;
1433}
1434
1435void chv_phy_powergate_lanes(struct intel_encoder *encoder,
1436 bool override, unsigned int mask)
1437{
1438 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1439 struct i915_power_domains *power_domains = &dev_priv->power_domains;
1440 enum dpio_phy phy = vlv_dport_to_phy(enc_to_dig_port(&encoder->base));
1441 enum dpio_channel ch = vlv_dport_to_channel(enc_to_dig_port(&encoder->base));
1442
1443 mutex_lock(&power_domains->lock);
1444
1445 dev_priv->chv_phy_control &= ~PHY_CH_POWER_DOWN_OVRD(0xf, phy, ch);
1446 dev_priv->chv_phy_control |= PHY_CH_POWER_DOWN_OVRD(mask, phy, ch);
1447
1448 if (override)
1449 dev_priv->chv_phy_control |= PHY_CH_POWER_DOWN_OVRD_EN(phy, ch);
1450 else
1451 dev_priv->chv_phy_control &= ~PHY_CH_POWER_DOWN_OVRD_EN(phy, ch);
1452
1453 I915_WRITE(DISPLAY_PHY_CONTROL, dev_priv->chv_phy_control);
1454
1455 DRM_DEBUG_KMS("Power gating DPIO PHY%d CH%d lanes 0x%x (PHY_CONTROL=0x%08x)\n",
1456 phy, ch, mask, dev_priv->chv_phy_control);
1457
1458 assert_chv_phy_status(dev_priv);
1459
1460 assert_chv_phy_powergate(dev_priv, phy, ch, override, mask);
1461
1462 mutex_unlock(&power_domains->lock);
1463}
1464
1465static bool chv_pipe_power_well_enabled(struct drm_i915_private *dev_priv,
1466 struct i915_power_well *power_well)
1467{
1468 enum pipe pipe = PIPE_A;
1469 bool enabled;
1470 u32 state, ctrl;
1471
1472 vlv_punit_get(dev_priv);
1473
1474 state = vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM) & DP_SSS_MASK(pipe);
1475
1476
1477
1478
1479 WARN_ON(state != DP_SSS_PWR_ON(pipe) && state != DP_SSS_PWR_GATE(pipe));
1480 enabled = state == DP_SSS_PWR_ON(pipe);
1481
1482
1483
1484
1485
1486 ctrl = vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM) & DP_SSC_MASK(pipe);
1487 WARN_ON(ctrl << 16 != state);
1488
1489 vlv_punit_put(dev_priv);
1490
1491 return enabled;
1492}
1493
1494static void chv_set_pipe_power_well(struct drm_i915_private *dev_priv,
1495 struct i915_power_well *power_well,
1496 bool enable)
1497{
1498 enum pipe pipe = PIPE_A;
1499 u32 state;
1500 u32 ctrl;
1501
1502 state = enable ? DP_SSS_PWR_ON(pipe) : DP_SSS_PWR_GATE(pipe);
1503
1504 vlv_punit_get(dev_priv);
1505
1506#define COND \
1507 ((vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM) & DP_SSS_MASK(pipe)) == state)
1508
1509 if (COND)
1510 goto out;
1511
1512 ctrl = vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM);
1513 ctrl &= ~DP_SSC_MASK(pipe);
1514 ctrl |= enable ? DP_SSC_PWR_ON(pipe) : DP_SSC_PWR_GATE(pipe);
1515 vlv_punit_write(dev_priv, PUNIT_REG_DSPSSPM, ctrl);
1516
1517 if (wait_for(COND, 100))
1518 DRM_ERROR("timeout setting power well state %08x (%08x)\n",
1519 state,
1520 vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM));
1521
1522#undef COND
1523
1524out:
1525 vlv_punit_put(dev_priv);
1526}
1527
1528static void chv_pipe_power_well_enable(struct drm_i915_private *dev_priv,
1529 struct i915_power_well *power_well)
1530{
1531 chv_set_pipe_power_well(dev_priv, power_well, true);
1532
1533 vlv_display_power_well_init(dev_priv);
1534}
1535
1536static void chv_pipe_power_well_disable(struct drm_i915_private *dev_priv,
1537 struct i915_power_well *power_well)
1538{
1539 vlv_display_power_well_deinit(dev_priv);
1540
1541 chv_set_pipe_power_well(dev_priv, power_well, false);
1542}
1543
1544static u64 __async_put_domains_mask(struct i915_power_domains *power_domains)
1545{
1546 return power_domains->async_put_domains[0] |
1547 power_domains->async_put_domains[1];
1548}
1549
1550#if IS_ENABLED(CONFIG_DRM_I915_DEBUG_RUNTIME_PM)
1551
1552static bool
1553assert_async_put_domain_masks_disjoint(struct i915_power_domains *power_domains)
1554{
1555 return !WARN_ON(power_domains->async_put_domains[0] &
1556 power_domains->async_put_domains[1]);
1557}
1558
1559static bool
1560__async_put_domains_state_ok(struct i915_power_domains *power_domains)
1561{
1562 enum intel_display_power_domain domain;
1563 bool err = false;
1564
1565 err |= !assert_async_put_domain_masks_disjoint(power_domains);
1566 err |= WARN_ON(!!power_domains->async_put_wakeref !=
1567 !!__async_put_domains_mask(power_domains));
1568
1569 for_each_power_domain(domain, __async_put_domains_mask(power_domains))
1570 err |= WARN_ON(power_domains->domain_use_count[domain] != 1);
1571
1572 return !err;
1573}
1574
1575static void print_power_domains(struct i915_power_domains *power_domains,
1576 const char *prefix, u64 mask)
1577{
1578 enum intel_display_power_domain domain;
1579
1580 DRM_DEBUG_DRIVER("%s (%lu):\n", prefix, hweight64(mask));
1581 for_each_power_domain(domain, mask)
1582 DRM_DEBUG_DRIVER("%s use_count %d\n",
1583 intel_display_power_domain_str(domain),
1584 power_domains->domain_use_count[domain]);
1585}
1586
1587static void
1588print_async_put_domains_state(struct i915_power_domains *power_domains)
1589{
1590 DRM_DEBUG_DRIVER("async_put_wakeref %u\n",
1591 power_domains->async_put_wakeref);
1592
1593 print_power_domains(power_domains, "async_put_domains[0]",
1594 power_domains->async_put_domains[0]);
1595 print_power_domains(power_domains, "async_put_domains[1]",
1596 power_domains->async_put_domains[1]);
1597}
1598
1599static void
1600verify_async_put_domains_state(struct i915_power_domains *power_domains)
1601{
1602 if (!__async_put_domains_state_ok(power_domains))
1603 print_async_put_domains_state(power_domains);
1604}
1605
1606#else
1607
1608static void
1609assert_async_put_domain_masks_disjoint(struct i915_power_domains *power_domains)
1610{
1611}
1612
1613static void
1614verify_async_put_domains_state(struct i915_power_domains *power_domains)
1615{
1616}
1617
1618#endif
1619
1620static u64 async_put_domains_mask(struct i915_power_domains *power_domains)
1621{
1622 assert_async_put_domain_masks_disjoint(power_domains);
1623
1624 return __async_put_domains_mask(power_domains);
1625}
1626
1627static void
1628async_put_domains_clear_domain(struct i915_power_domains *power_domains,
1629 enum intel_display_power_domain domain)
1630{
1631 assert_async_put_domain_masks_disjoint(power_domains);
1632
1633 power_domains->async_put_domains[0] &= ~BIT_ULL(domain);
1634 power_domains->async_put_domains[1] &= ~BIT_ULL(domain);
1635}
1636
1637static bool
1638intel_display_power_grab_async_put_ref(struct drm_i915_private *dev_priv,
1639 enum intel_display_power_domain domain)
1640{
1641 struct i915_power_domains *power_domains = &dev_priv->power_domains;
1642 bool ret = false;
1643
1644 if (!(async_put_domains_mask(power_domains) & BIT_ULL(domain)))
1645 goto out_verify;
1646
1647 async_put_domains_clear_domain(power_domains, domain);
1648
1649 ret = true;
1650
1651 if (async_put_domains_mask(power_domains))
1652 goto out_verify;
1653
1654 cancel_delayed_work(&power_domains->async_put_work);
1655 intel_runtime_pm_put_raw(&dev_priv->runtime_pm,
1656 fetch_and_zero(&power_domains->async_put_wakeref));
1657out_verify:
1658 verify_async_put_domains_state(power_domains);
1659
1660 return ret;
1661}
1662
1663static void
1664__intel_display_power_get_domain(struct drm_i915_private *dev_priv,
1665 enum intel_display_power_domain domain)
1666{
1667 struct i915_power_domains *power_domains = &dev_priv->power_domains;
1668 struct i915_power_well *power_well;
1669
1670 if (intel_display_power_grab_async_put_ref(dev_priv, domain))
1671 return;
1672
1673 for_each_power_domain_well(dev_priv, power_well, BIT_ULL(domain))
1674 intel_power_well_get(dev_priv, power_well);
1675
1676 power_domains->domain_use_count[domain]++;
1677}
1678
1679
1680
1681
1682
1683
1684
1685
1686
1687
1688
1689
1690
1691intel_wakeref_t intel_display_power_get(struct drm_i915_private *dev_priv,
1692 enum intel_display_power_domain domain)
1693{
1694 struct i915_power_domains *power_domains = &dev_priv->power_domains;
1695 intel_wakeref_t wakeref = intel_runtime_pm_get(&dev_priv->runtime_pm);
1696
1697 mutex_lock(&power_domains->lock);
1698 __intel_display_power_get_domain(dev_priv, domain);
1699 mutex_unlock(&power_domains->lock);
1700
1701 return wakeref;
1702}
1703
1704
1705
1706
1707
1708
1709
1710
1711
1712
1713
1714
1715
1716intel_wakeref_t
1717intel_display_power_get_if_enabled(struct drm_i915_private *dev_priv,
1718 enum intel_display_power_domain domain)
1719{
1720 struct i915_power_domains *power_domains = &dev_priv->power_domains;
1721 intel_wakeref_t wakeref;
1722 bool is_enabled;
1723
1724 wakeref = intel_runtime_pm_get_if_in_use(&dev_priv->runtime_pm);
1725 if (!wakeref)
1726 return false;
1727
1728 mutex_lock(&power_domains->lock);
1729
1730 if (__intel_display_power_is_enabled(dev_priv, domain)) {
1731 __intel_display_power_get_domain(dev_priv, domain);
1732 is_enabled = true;
1733 } else {
1734 is_enabled = false;
1735 }
1736
1737 mutex_unlock(&power_domains->lock);
1738
1739 if (!is_enabled) {
1740 intel_runtime_pm_put(&dev_priv->runtime_pm, wakeref);
1741 wakeref = 0;
1742 }
1743
1744 return wakeref;
1745}
1746
1747static void
1748__intel_display_power_put_domain(struct drm_i915_private *dev_priv,
1749 enum intel_display_power_domain domain)
1750{
1751 struct i915_power_domains *power_domains;
1752 struct i915_power_well *power_well;
1753 const char *name = intel_display_power_domain_str(domain);
1754
1755 power_domains = &dev_priv->power_domains;
1756
1757 WARN(!power_domains->domain_use_count[domain],
1758 "Use count on domain %s is already zero\n",
1759 name);
1760 WARN(async_put_domains_mask(power_domains) & BIT_ULL(domain),
1761 "Async disabling of domain %s is pending\n",
1762 name);
1763
1764 power_domains->domain_use_count[domain]--;
1765
1766 for_each_power_domain_well_reverse(dev_priv, power_well, BIT_ULL(domain))
1767 intel_power_well_put(dev_priv, power_well);
1768}
1769
1770static void __intel_display_power_put(struct drm_i915_private *dev_priv,
1771 enum intel_display_power_domain domain)
1772{
1773 struct i915_power_domains *power_domains = &dev_priv->power_domains;
1774
1775 mutex_lock(&power_domains->lock);
1776 __intel_display_power_put_domain(dev_priv, domain);
1777 mutex_unlock(&power_domains->lock);
1778}
1779
1780
1781
1782
1783
1784
1785
1786
1787
1788
1789
1790
1791
1792
1793void intel_display_power_put_unchecked(struct drm_i915_private *dev_priv,
1794 enum intel_display_power_domain domain)
1795{
1796 __intel_display_power_put(dev_priv, domain);
1797 intel_runtime_pm_put_unchecked(&dev_priv->runtime_pm);
1798}
1799
1800static void
1801queue_async_put_domains_work(struct i915_power_domains *power_domains,
1802 intel_wakeref_t wakeref)
1803{
1804 WARN_ON(power_domains->async_put_wakeref);
1805 power_domains->async_put_wakeref = wakeref;
1806 WARN_ON(!queue_delayed_work(system_unbound_wq,
1807 &power_domains->async_put_work,
1808 msecs_to_jiffies(100)));
1809}
1810
1811static void
1812release_async_put_domains(struct i915_power_domains *power_domains, u64 mask)
1813{
1814 struct drm_i915_private *dev_priv =
1815 container_of(power_domains, struct drm_i915_private,
1816 power_domains);
1817 struct intel_runtime_pm *rpm = &dev_priv->runtime_pm;
1818 enum intel_display_power_domain domain;
1819 intel_wakeref_t wakeref;
1820
1821
1822
1823
1824
1825
1826 assert_rpm_raw_wakeref_held(rpm);
1827 wakeref = intel_runtime_pm_get(rpm);
1828
1829 for_each_power_domain(domain, mask) {
1830
1831 async_put_domains_clear_domain(power_domains, domain);
1832 __intel_display_power_put_domain(dev_priv, domain);
1833 }
1834
1835 intel_runtime_pm_put(rpm, wakeref);
1836}
1837
1838static void
1839intel_display_power_put_async_work(struct work_struct *work)
1840{
1841 struct drm_i915_private *dev_priv =
1842 container_of(work, struct drm_i915_private,
1843 power_domains.async_put_work.work);
1844 struct i915_power_domains *power_domains = &dev_priv->power_domains;
1845 struct intel_runtime_pm *rpm = &dev_priv->runtime_pm;
1846 intel_wakeref_t new_work_wakeref = intel_runtime_pm_get_raw(rpm);
1847 intel_wakeref_t old_work_wakeref = 0;
1848
1849 mutex_lock(&power_domains->lock);
1850
1851
1852
1853
1854
1855 old_work_wakeref = fetch_and_zero(&power_domains->async_put_wakeref);
1856 if (!old_work_wakeref)
1857 goto out_verify;
1858
1859 release_async_put_domains(power_domains,
1860 power_domains->async_put_domains[0]);
1861
1862
1863 if (power_domains->async_put_domains[1]) {
1864 power_domains->async_put_domains[0] =
1865 fetch_and_zero(&power_domains->async_put_domains[1]);
1866 queue_async_put_domains_work(power_domains,
1867 fetch_and_zero(&new_work_wakeref));
1868 }
1869
1870out_verify:
1871 verify_async_put_domains_state(power_domains);
1872
1873 mutex_unlock(&power_domains->lock);
1874
1875 if (old_work_wakeref)
1876 intel_runtime_pm_put_raw(rpm, old_work_wakeref);
1877 if (new_work_wakeref)
1878 intel_runtime_pm_put_raw(rpm, new_work_wakeref);
1879}
1880
1881
1882
1883
1884
1885
1886
1887
1888
1889
1890
1891void __intel_display_power_put_async(struct drm_i915_private *i915,
1892 enum intel_display_power_domain domain,
1893 intel_wakeref_t wakeref)
1894{
1895 struct i915_power_domains *power_domains = &i915->power_domains;
1896 struct intel_runtime_pm *rpm = &i915->runtime_pm;
1897 intel_wakeref_t work_wakeref = intel_runtime_pm_get_raw(rpm);
1898
1899 mutex_lock(&power_domains->lock);
1900
1901 if (power_domains->domain_use_count[domain] > 1) {
1902 __intel_display_power_put_domain(i915, domain);
1903
1904 goto out_verify;
1905 }
1906
1907 WARN_ON(power_domains->domain_use_count[domain] != 1);
1908
1909
1910 if (power_domains->async_put_wakeref) {
1911 power_domains->async_put_domains[1] |= BIT_ULL(domain);
1912 } else {
1913 power_domains->async_put_domains[0] |= BIT_ULL(domain);
1914 queue_async_put_domains_work(power_domains,
1915 fetch_and_zero(&work_wakeref));
1916 }
1917
1918out_verify:
1919 verify_async_put_domains_state(power_domains);
1920
1921 mutex_unlock(&power_domains->lock);
1922
1923 if (work_wakeref)
1924 intel_runtime_pm_put_raw(rpm, work_wakeref);
1925
1926 intel_runtime_pm_put(rpm, wakeref);
1927}
1928
1929
1930
1931
1932
1933
1934
1935
1936
1937
1938
1939
1940
1941void intel_display_power_flush_work(struct drm_i915_private *i915)
1942{
1943 struct i915_power_domains *power_domains = &i915->power_domains;
1944 intel_wakeref_t work_wakeref;
1945
1946 mutex_lock(&power_domains->lock);
1947
1948 work_wakeref = fetch_and_zero(&power_domains->async_put_wakeref);
1949 if (!work_wakeref)
1950 goto out_verify;
1951
1952 release_async_put_domains(power_domains,
1953 async_put_domains_mask(power_domains));
1954 cancel_delayed_work(&power_domains->async_put_work);
1955
1956out_verify:
1957 verify_async_put_domains_state(power_domains);
1958
1959 mutex_unlock(&power_domains->lock);
1960
1961 if (work_wakeref)
1962 intel_runtime_pm_put_raw(&i915->runtime_pm, work_wakeref);
1963}
1964
1965
1966
1967
1968
1969
1970
1971
1972static void
1973intel_display_power_flush_work_sync(struct drm_i915_private *i915)
1974{
1975 struct i915_power_domains *power_domains = &i915->power_domains;
1976
1977 intel_display_power_flush_work(i915);
1978 cancel_delayed_work_sync(&power_domains->async_put_work);
1979
1980 verify_async_put_domains_state(power_domains);
1981
1982 WARN_ON(power_domains->async_put_wakeref);
1983}
1984
1985#if IS_ENABLED(CONFIG_DRM_I915_DEBUG_RUNTIME_PM)
1986
1987
1988
1989
1990
1991
1992
1993
1994
1995
1996void intel_display_power_put(struct drm_i915_private *dev_priv,
1997 enum intel_display_power_domain domain,
1998 intel_wakeref_t wakeref)
1999{
2000 __intel_display_power_put(dev_priv, domain);
2001 intel_runtime_pm_put(&dev_priv->runtime_pm, wakeref);
2002}
2003#endif
2004
2005#define I830_PIPES_POWER_DOMAINS ( \
2006 BIT_ULL(POWER_DOMAIN_PIPE_A) | \
2007 BIT_ULL(POWER_DOMAIN_PIPE_B) | \
2008 BIT_ULL(POWER_DOMAIN_PIPE_A_PANEL_FITTER) | \
2009 BIT_ULL(POWER_DOMAIN_PIPE_B_PANEL_FITTER) | \
2010 BIT_ULL(POWER_DOMAIN_TRANSCODER_A) | \
2011 BIT_ULL(POWER_DOMAIN_TRANSCODER_B) | \
2012 BIT_ULL(POWER_DOMAIN_INIT))
2013
2014#define VLV_DISPLAY_POWER_DOMAINS ( \
2015 BIT_ULL(POWER_DOMAIN_DISPLAY_CORE) | \
2016 BIT_ULL(POWER_DOMAIN_PIPE_A) | \
2017 BIT_ULL(POWER_DOMAIN_PIPE_B) | \
2018 BIT_ULL(POWER_DOMAIN_PIPE_A_PANEL_FITTER) | \
2019 BIT_ULL(POWER_DOMAIN_PIPE_B_PANEL_FITTER) | \
2020 BIT_ULL(POWER_DOMAIN_TRANSCODER_A) | \
2021 BIT_ULL(POWER_DOMAIN_TRANSCODER_B) | \
2022 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
2023 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
2024 BIT_ULL(POWER_DOMAIN_PORT_DSI) | \
2025 BIT_ULL(POWER_DOMAIN_PORT_CRT) | \
2026 BIT_ULL(POWER_DOMAIN_VGA) | \
2027 BIT_ULL(POWER_DOMAIN_AUDIO) | \
2028 BIT_ULL(POWER_DOMAIN_AUX_B) | \
2029 BIT_ULL(POWER_DOMAIN_AUX_C) | \
2030 BIT_ULL(POWER_DOMAIN_GMBUS) | \
2031 BIT_ULL(POWER_DOMAIN_INIT))
2032
2033#define VLV_DPIO_CMN_BC_POWER_DOMAINS ( \
2034 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
2035 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
2036 BIT_ULL(POWER_DOMAIN_PORT_CRT) | \
2037 BIT_ULL(POWER_DOMAIN_AUX_B) | \
2038 BIT_ULL(POWER_DOMAIN_AUX_C) | \
2039 BIT_ULL(POWER_DOMAIN_INIT))
2040
2041#define VLV_DPIO_TX_B_LANES_01_POWER_DOMAINS ( \
2042 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
2043 BIT_ULL(POWER_DOMAIN_AUX_B) | \
2044 BIT_ULL(POWER_DOMAIN_INIT))
2045
2046#define VLV_DPIO_TX_B_LANES_23_POWER_DOMAINS ( \
2047 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
2048 BIT_ULL(POWER_DOMAIN_AUX_B) | \
2049 BIT_ULL(POWER_DOMAIN_INIT))
2050
2051#define VLV_DPIO_TX_C_LANES_01_POWER_DOMAINS ( \
2052 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
2053 BIT_ULL(POWER_DOMAIN_AUX_C) | \
2054 BIT_ULL(POWER_DOMAIN_INIT))
2055
2056#define VLV_DPIO_TX_C_LANES_23_POWER_DOMAINS ( \
2057 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
2058 BIT_ULL(POWER_DOMAIN_AUX_C) | \
2059 BIT_ULL(POWER_DOMAIN_INIT))
2060
2061#define CHV_DISPLAY_POWER_DOMAINS ( \
2062 BIT_ULL(POWER_DOMAIN_DISPLAY_CORE) | \
2063 BIT_ULL(POWER_DOMAIN_PIPE_A) | \
2064 BIT_ULL(POWER_DOMAIN_PIPE_B) | \
2065 BIT_ULL(POWER_DOMAIN_PIPE_C) | \
2066 BIT_ULL(POWER_DOMAIN_PIPE_A_PANEL_FITTER) | \
2067 BIT_ULL(POWER_DOMAIN_PIPE_B_PANEL_FITTER) | \
2068 BIT_ULL(POWER_DOMAIN_PIPE_C_PANEL_FITTER) | \
2069 BIT_ULL(POWER_DOMAIN_TRANSCODER_A) | \
2070 BIT_ULL(POWER_DOMAIN_TRANSCODER_B) | \
2071 BIT_ULL(POWER_DOMAIN_TRANSCODER_C) | \
2072 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
2073 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
2074 BIT_ULL(POWER_DOMAIN_PORT_DDI_D_LANES) | \
2075 BIT_ULL(POWER_DOMAIN_PORT_DSI) | \
2076 BIT_ULL(POWER_DOMAIN_VGA) | \
2077 BIT_ULL(POWER_DOMAIN_AUDIO) | \
2078 BIT_ULL(POWER_DOMAIN_AUX_B) | \
2079 BIT_ULL(POWER_DOMAIN_AUX_C) | \
2080 BIT_ULL(POWER_DOMAIN_AUX_D) | \
2081 BIT_ULL(POWER_DOMAIN_GMBUS) | \
2082 BIT_ULL(POWER_DOMAIN_INIT))
2083
2084#define CHV_DPIO_CMN_BC_POWER_DOMAINS ( \
2085 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
2086 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
2087 BIT_ULL(POWER_DOMAIN_AUX_B) | \
2088 BIT_ULL(POWER_DOMAIN_AUX_C) | \
2089 BIT_ULL(POWER_DOMAIN_INIT))
2090
2091#define CHV_DPIO_CMN_D_POWER_DOMAINS ( \
2092 BIT_ULL(POWER_DOMAIN_PORT_DDI_D_LANES) | \
2093 BIT_ULL(POWER_DOMAIN_AUX_D) | \
2094 BIT_ULL(POWER_DOMAIN_INIT))
2095
2096#define HSW_DISPLAY_POWER_DOMAINS ( \
2097 BIT_ULL(POWER_DOMAIN_PIPE_B) | \
2098 BIT_ULL(POWER_DOMAIN_PIPE_C) | \
2099 BIT_ULL(POWER_DOMAIN_PIPE_A_PANEL_FITTER) | \
2100 BIT_ULL(POWER_DOMAIN_PIPE_B_PANEL_FITTER) | \
2101 BIT_ULL(POWER_DOMAIN_PIPE_C_PANEL_FITTER) | \
2102 BIT_ULL(POWER_DOMAIN_TRANSCODER_A) | \
2103 BIT_ULL(POWER_DOMAIN_TRANSCODER_B) | \
2104 BIT_ULL(POWER_DOMAIN_TRANSCODER_C) | \
2105 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
2106 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
2107 BIT_ULL(POWER_DOMAIN_PORT_DDI_D_LANES) | \
2108 BIT_ULL(POWER_DOMAIN_PORT_CRT) | \
2109 BIT_ULL(POWER_DOMAIN_VGA) | \
2110 BIT_ULL(POWER_DOMAIN_AUDIO) | \
2111 BIT_ULL(POWER_DOMAIN_INIT))
2112
2113#define BDW_DISPLAY_POWER_DOMAINS ( \
2114 BIT_ULL(POWER_DOMAIN_PIPE_B) | \
2115 BIT_ULL(POWER_DOMAIN_PIPE_C) | \
2116 BIT_ULL(POWER_DOMAIN_PIPE_B_PANEL_FITTER) | \
2117 BIT_ULL(POWER_DOMAIN_PIPE_C_PANEL_FITTER) | \
2118 BIT_ULL(POWER_DOMAIN_TRANSCODER_A) | \
2119 BIT_ULL(POWER_DOMAIN_TRANSCODER_B) | \
2120 BIT_ULL(POWER_DOMAIN_TRANSCODER_C) | \
2121 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
2122 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
2123 BIT_ULL(POWER_DOMAIN_PORT_DDI_D_LANES) | \
2124 BIT_ULL(POWER_DOMAIN_PORT_CRT) | \
2125 BIT_ULL(POWER_DOMAIN_VGA) | \
2126 BIT_ULL(POWER_DOMAIN_AUDIO) | \
2127 BIT_ULL(POWER_DOMAIN_INIT))
2128
2129#define SKL_DISPLAY_POWERWELL_2_POWER_DOMAINS ( \
2130 BIT_ULL(POWER_DOMAIN_TRANSCODER_A) | \
2131 BIT_ULL(POWER_DOMAIN_PIPE_B) | \
2132 BIT_ULL(POWER_DOMAIN_TRANSCODER_B) | \
2133 BIT_ULL(POWER_DOMAIN_PIPE_C) | \
2134 BIT_ULL(POWER_DOMAIN_TRANSCODER_C) | \
2135 BIT_ULL(POWER_DOMAIN_PIPE_B_PANEL_FITTER) | \
2136 BIT_ULL(POWER_DOMAIN_PIPE_C_PANEL_FITTER) | \
2137 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
2138 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
2139 BIT_ULL(POWER_DOMAIN_PORT_DDI_D_LANES) | \
2140 BIT_ULL(POWER_DOMAIN_PORT_DDI_E_LANES) | \
2141 BIT_ULL(POWER_DOMAIN_AUX_B) | \
2142 BIT_ULL(POWER_DOMAIN_AUX_C) | \
2143 BIT_ULL(POWER_DOMAIN_AUX_D) | \
2144 BIT_ULL(POWER_DOMAIN_AUDIO) | \
2145 BIT_ULL(POWER_DOMAIN_VGA) | \
2146 BIT_ULL(POWER_DOMAIN_INIT))
2147#define SKL_DISPLAY_DDI_IO_A_E_POWER_DOMAINS ( \
2148 BIT_ULL(POWER_DOMAIN_PORT_DDI_A_IO) | \
2149 BIT_ULL(POWER_DOMAIN_PORT_DDI_E_IO) | \
2150 BIT_ULL(POWER_DOMAIN_INIT))
2151#define SKL_DISPLAY_DDI_IO_B_POWER_DOMAINS ( \
2152 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_IO) | \
2153 BIT_ULL(POWER_DOMAIN_INIT))
2154#define SKL_DISPLAY_DDI_IO_C_POWER_DOMAINS ( \
2155 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_IO) | \
2156 BIT_ULL(POWER_DOMAIN_INIT))
2157#define SKL_DISPLAY_DDI_IO_D_POWER_DOMAINS ( \
2158 BIT_ULL(POWER_DOMAIN_PORT_DDI_D_IO) | \
2159 BIT_ULL(POWER_DOMAIN_INIT))
2160#define SKL_DISPLAY_DC_OFF_POWER_DOMAINS ( \
2161 SKL_DISPLAY_POWERWELL_2_POWER_DOMAINS | \
2162 BIT_ULL(POWER_DOMAIN_GT_IRQ) | \
2163 BIT_ULL(POWER_DOMAIN_MODESET) | \
2164 BIT_ULL(POWER_DOMAIN_AUX_A) | \
2165 BIT_ULL(POWER_DOMAIN_INIT))
2166
2167#define BXT_DISPLAY_POWERWELL_2_POWER_DOMAINS ( \
2168 BIT_ULL(POWER_DOMAIN_TRANSCODER_A) | \
2169 BIT_ULL(POWER_DOMAIN_PIPE_B) | \
2170 BIT_ULL(POWER_DOMAIN_TRANSCODER_B) | \
2171 BIT_ULL(POWER_DOMAIN_PIPE_C) | \
2172 BIT_ULL(POWER_DOMAIN_TRANSCODER_C) | \
2173 BIT_ULL(POWER_DOMAIN_PIPE_B_PANEL_FITTER) | \
2174 BIT_ULL(POWER_DOMAIN_PIPE_C_PANEL_FITTER) | \
2175 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
2176 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
2177 BIT_ULL(POWER_DOMAIN_AUX_B) | \
2178 BIT_ULL(POWER_DOMAIN_AUX_C) | \
2179 BIT_ULL(POWER_DOMAIN_AUDIO) | \
2180 BIT_ULL(POWER_DOMAIN_VGA) | \
2181 BIT_ULL(POWER_DOMAIN_INIT))
2182#define BXT_DISPLAY_DC_OFF_POWER_DOMAINS ( \
2183 BXT_DISPLAY_POWERWELL_2_POWER_DOMAINS | \
2184 BIT_ULL(POWER_DOMAIN_GT_IRQ) | \
2185 BIT_ULL(POWER_DOMAIN_MODESET) | \
2186 BIT_ULL(POWER_DOMAIN_AUX_A) | \
2187 BIT_ULL(POWER_DOMAIN_GMBUS) | \
2188 BIT_ULL(POWER_DOMAIN_INIT))
2189#define BXT_DPIO_CMN_A_POWER_DOMAINS ( \
2190 BIT_ULL(POWER_DOMAIN_PORT_DDI_A_LANES) | \
2191 BIT_ULL(POWER_DOMAIN_AUX_A) | \
2192 BIT_ULL(POWER_DOMAIN_INIT))
2193#define BXT_DPIO_CMN_BC_POWER_DOMAINS ( \
2194 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
2195 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
2196 BIT_ULL(POWER_DOMAIN_AUX_B) | \
2197 BIT_ULL(POWER_DOMAIN_AUX_C) | \
2198 BIT_ULL(POWER_DOMAIN_INIT))
2199
2200#define GLK_DISPLAY_POWERWELL_2_POWER_DOMAINS ( \
2201 BIT_ULL(POWER_DOMAIN_TRANSCODER_A) | \
2202 BIT_ULL(POWER_DOMAIN_PIPE_B) | \
2203 BIT_ULL(POWER_DOMAIN_TRANSCODER_B) | \
2204 BIT_ULL(POWER_DOMAIN_PIPE_C) | \
2205 BIT_ULL(POWER_DOMAIN_TRANSCODER_C) | \
2206 BIT_ULL(POWER_DOMAIN_PIPE_B_PANEL_FITTER) | \
2207 BIT_ULL(POWER_DOMAIN_PIPE_C_PANEL_FITTER) | \
2208 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
2209 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
2210 BIT_ULL(POWER_DOMAIN_AUX_B) | \
2211 BIT_ULL(POWER_DOMAIN_AUX_C) | \
2212 BIT_ULL(POWER_DOMAIN_AUDIO) | \
2213 BIT_ULL(POWER_DOMAIN_VGA) | \
2214 BIT_ULL(POWER_DOMAIN_INIT))
2215#define GLK_DISPLAY_DDI_IO_A_POWER_DOMAINS ( \
2216 BIT_ULL(POWER_DOMAIN_PORT_DDI_A_IO))
2217#define GLK_DISPLAY_DDI_IO_B_POWER_DOMAINS ( \
2218 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_IO))
2219#define GLK_DISPLAY_DDI_IO_C_POWER_DOMAINS ( \
2220 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_IO))
2221#define GLK_DPIO_CMN_A_POWER_DOMAINS ( \
2222 BIT_ULL(POWER_DOMAIN_PORT_DDI_A_LANES) | \
2223 BIT_ULL(POWER_DOMAIN_AUX_A) | \
2224 BIT_ULL(POWER_DOMAIN_INIT))
2225#define GLK_DPIO_CMN_B_POWER_DOMAINS ( \
2226 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
2227 BIT_ULL(POWER_DOMAIN_AUX_B) | \
2228 BIT_ULL(POWER_DOMAIN_INIT))
2229#define GLK_DPIO_CMN_C_POWER_DOMAINS ( \
2230 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
2231 BIT_ULL(POWER_DOMAIN_AUX_C) | \
2232 BIT_ULL(POWER_DOMAIN_INIT))
2233#define GLK_DISPLAY_AUX_A_POWER_DOMAINS ( \
2234 BIT_ULL(POWER_DOMAIN_AUX_A) | \
2235 BIT_ULL(POWER_DOMAIN_AUX_IO_A) | \
2236 BIT_ULL(POWER_DOMAIN_INIT))
2237#define GLK_DISPLAY_AUX_B_POWER_DOMAINS ( \
2238 BIT_ULL(POWER_DOMAIN_AUX_B) | \
2239 BIT_ULL(POWER_DOMAIN_INIT))
2240#define GLK_DISPLAY_AUX_C_POWER_DOMAINS ( \
2241 BIT_ULL(POWER_DOMAIN_AUX_C) | \
2242 BIT_ULL(POWER_DOMAIN_INIT))
2243#define GLK_DISPLAY_DC_OFF_POWER_DOMAINS ( \
2244 GLK_DISPLAY_POWERWELL_2_POWER_DOMAINS | \
2245 BIT_ULL(POWER_DOMAIN_GT_IRQ) | \
2246 BIT_ULL(POWER_DOMAIN_MODESET) | \
2247 BIT_ULL(POWER_DOMAIN_AUX_A) | \
2248 BIT_ULL(POWER_DOMAIN_GMBUS) | \
2249 BIT_ULL(POWER_DOMAIN_INIT))
2250
2251#define CNL_DISPLAY_POWERWELL_2_POWER_DOMAINS ( \
2252 BIT_ULL(POWER_DOMAIN_TRANSCODER_A) | \
2253 BIT_ULL(POWER_DOMAIN_PIPE_B) | \
2254 BIT_ULL(POWER_DOMAIN_TRANSCODER_B) | \
2255 BIT_ULL(POWER_DOMAIN_PIPE_C) | \
2256 BIT_ULL(POWER_DOMAIN_TRANSCODER_C) | \
2257 BIT_ULL(POWER_DOMAIN_PIPE_B_PANEL_FITTER) | \
2258 BIT_ULL(POWER_DOMAIN_PIPE_C_PANEL_FITTER) | \
2259 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
2260 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
2261 BIT_ULL(POWER_DOMAIN_PORT_DDI_D_LANES) | \
2262 BIT_ULL(POWER_DOMAIN_PORT_DDI_F_LANES) | \
2263 BIT_ULL(POWER_DOMAIN_AUX_B) | \
2264 BIT_ULL(POWER_DOMAIN_AUX_C) | \
2265 BIT_ULL(POWER_DOMAIN_AUX_D) | \
2266 BIT_ULL(POWER_DOMAIN_AUX_F) | \
2267 BIT_ULL(POWER_DOMAIN_AUDIO) | \
2268 BIT_ULL(POWER_DOMAIN_VGA) | \
2269 BIT_ULL(POWER_DOMAIN_INIT))
2270#define CNL_DISPLAY_DDI_A_IO_POWER_DOMAINS ( \
2271 BIT_ULL(POWER_DOMAIN_PORT_DDI_A_IO) | \
2272 BIT_ULL(POWER_DOMAIN_INIT))
2273#define CNL_DISPLAY_DDI_B_IO_POWER_DOMAINS ( \
2274 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_IO) | \
2275 BIT_ULL(POWER_DOMAIN_INIT))
2276#define CNL_DISPLAY_DDI_C_IO_POWER_DOMAINS ( \
2277 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_IO) | \
2278 BIT_ULL(POWER_DOMAIN_INIT))
2279#define CNL_DISPLAY_DDI_D_IO_POWER_DOMAINS ( \
2280 BIT_ULL(POWER_DOMAIN_PORT_DDI_D_IO) | \
2281 BIT_ULL(POWER_DOMAIN_INIT))
2282#define CNL_DISPLAY_AUX_A_POWER_DOMAINS ( \
2283 BIT_ULL(POWER_DOMAIN_AUX_A) | \
2284 BIT_ULL(POWER_DOMAIN_AUX_IO_A) | \
2285 BIT_ULL(POWER_DOMAIN_INIT))
2286#define CNL_DISPLAY_AUX_B_POWER_DOMAINS ( \
2287 BIT_ULL(POWER_DOMAIN_AUX_B) | \
2288 BIT_ULL(POWER_DOMAIN_INIT))
2289#define CNL_DISPLAY_AUX_C_POWER_DOMAINS ( \
2290 BIT_ULL(POWER_DOMAIN_AUX_C) | \
2291 BIT_ULL(POWER_DOMAIN_INIT))
2292#define CNL_DISPLAY_AUX_D_POWER_DOMAINS ( \
2293 BIT_ULL(POWER_DOMAIN_AUX_D) | \
2294 BIT_ULL(POWER_DOMAIN_INIT))
2295#define CNL_DISPLAY_AUX_F_POWER_DOMAINS ( \
2296 BIT_ULL(POWER_DOMAIN_AUX_F) | \
2297 BIT_ULL(POWER_DOMAIN_INIT))
2298#define CNL_DISPLAY_DDI_F_IO_POWER_DOMAINS ( \
2299 BIT_ULL(POWER_DOMAIN_PORT_DDI_F_IO) | \
2300 BIT_ULL(POWER_DOMAIN_INIT))
2301#define CNL_DISPLAY_DC_OFF_POWER_DOMAINS ( \
2302 CNL_DISPLAY_POWERWELL_2_POWER_DOMAINS | \
2303 BIT_ULL(POWER_DOMAIN_GT_IRQ) | \
2304 BIT_ULL(POWER_DOMAIN_MODESET) | \
2305 BIT_ULL(POWER_DOMAIN_AUX_A) | \
2306 BIT_ULL(POWER_DOMAIN_INIT))
2307
2308
2309
2310
2311
2312
2313
2314
2315
2316
2317
2318
2319
2320
2321
2322#define ICL_PW_4_POWER_DOMAINS ( \
2323 BIT_ULL(POWER_DOMAIN_PIPE_C) | \
2324 BIT_ULL(POWER_DOMAIN_PIPE_C_PANEL_FITTER) | \
2325 BIT_ULL(POWER_DOMAIN_INIT))
2326
2327#define ICL_PW_3_POWER_DOMAINS ( \
2328 ICL_PW_4_POWER_DOMAINS | \
2329 BIT_ULL(POWER_DOMAIN_PIPE_B) | \
2330 BIT_ULL(POWER_DOMAIN_TRANSCODER_A) | \
2331 BIT_ULL(POWER_DOMAIN_TRANSCODER_B) | \
2332 BIT_ULL(POWER_DOMAIN_TRANSCODER_C) | \
2333 BIT_ULL(POWER_DOMAIN_PIPE_B_PANEL_FITTER) | \
2334 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
2335 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_IO) | \
2336 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
2337 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_IO) | \
2338 BIT_ULL(POWER_DOMAIN_PORT_DDI_D_LANES) | \
2339 BIT_ULL(POWER_DOMAIN_PORT_DDI_D_IO) | \
2340 BIT_ULL(POWER_DOMAIN_PORT_DDI_E_LANES) | \
2341 BIT_ULL(POWER_DOMAIN_PORT_DDI_E_IO) | \
2342 BIT_ULL(POWER_DOMAIN_PORT_DDI_F_LANES) | \
2343 BIT_ULL(POWER_DOMAIN_PORT_DDI_F_IO) | \
2344 BIT_ULL(POWER_DOMAIN_AUX_B) | \
2345 BIT_ULL(POWER_DOMAIN_AUX_C) | \
2346 BIT_ULL(POWER_DOMAIN_AUX_D) | \
2347 BIT_ULL(POWER_DOMAIN_AUX_E) | \
2348 BIT_ULL(POWER_DOMAIN_AUX_F) | \
2349 BIT_ULL(POWER_DOMAIN_AUX_TBT1) | \
2350 BIT_ULL(POWER_DOMAIN_AUX_TBT2) | \
2351 BIT_ULL(POWER_DOMAIN_AUX_TBT3) | \
2352 BIT_ULL(POWER_DOMAIN_AUX_TBT4) | \
2353 BIT_ULL(POWER_DOMAIN_VGA) | \
2354 BIT_ULL(POWER_DOMAIN_AUDIO) | \
2355 BIT_ULL(POWER_DOMAIN_INIT))
2356
2357
2358
2359
2360#define ICL_PW_2_POWER_DOMAINS ( \
2361 ICL_PW_3_POWER_DOMAINS | \
2362 BIT_ULL(POWER_DOMAIN_TRANSCODER_EDP_VDSC) | \
2363 BIT_ULL(POWER_DOMAIN_INIT))
2364
2365
2366
2367#define ICL_DISPLAY_DC_OFF_POWER_DOMAINS ( \
2368 ICL_PW_2_POWER_DOMAINS | \
2369 BIT_ULL(POWER_DOMAIN_MODESET) | \
2370 BIT_ULL(POWER_DOMAIN_AUX_A) | \
2371 BIT_ULL(POWER_DOMAIN_INIT))
2372
2373#define ICL_DDI_IO_A_POWER_DOMAINS ( \
2374 BIT_ULL(POWER_DOMAIN_PORT_DDI_A_IO))
2375#define ICL_DDI_IO_B_POWER_DOMAINS ( \
2376 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_IO))
2377#define ICL_DDI_IO_C_POWER_DOMAINS ( \
2378 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_IO))
2379#define ICL_DDI_IO_D_POWER_DOMAINS ( \
2380 BIT_ULL(POWER_DOMAIN_PORT_DDI_D_IO))
2381#define ICL_DDI_IO_E_POWER_DOMAINS ( \
2382 BIT_ULL(POWER_DOMAIN_PORT_DDI_E_IO))
2383#define ICL_DDI_IO_F_POWER_DOMAINS ( \
2384 BIT_ULL(POWER_DOMAIN_PORT_DDI_F_IO))
2385
2386#define ICL_AUX_A_IO_POWER_DOMAINS ( \
2387 BIT_ULL(POWER_DOMAIN_AUX_IO_A) | \
2388 BIT_ULL(POWER_DOMAIN_AUX_A))
2389#define ICL_AUX_B_IO_POWER_DOMAINS ( \
2390 BIT_ULL(POWER_DOMAIN_AUX_B))
2391#define ICL_AUX_C_IO_POWER_DOMAINS ( \
2392 BIT_ULL(POWER_DOMAIN_AUX_C))
2393#define ICL_AUX_D_IO_POWER_DOMAINS ( \
2394 BIT_ULL(POWER_DOMAIN_AUX_D))
2395#define ICL_AUX_E_IO_POWER_DOMAINS ( \
2396 BIT_ULL(POWER_DOMAIN_AUX_E))
2397#define ICL_AUX_F_IO_POWER_DOMAINS ( \
2398 BIT_ULL(POWER_DOMAIN_AUX_F))
2399#define ICL_AUX_TBT1_IO_POWER_DOMAINS ( \
2400 BIT_ULL(POWER_DOMAIN_AUX_TBT1))
2401#define ICL_AUX_TBT2_IO_POWER_DOMAINS ( \
2402 BIT_ULL(POWER_DOMAIN_AUX_TBT2))
2403#define ICL_AUX_TBT3_IO_POWER_DOMAINS ( \
2404 BIT_ULL(POWER_DOMAIN_AUX_TBT3))
2405#define ICL_AUX_TBT4_IO_POWER_DOMAINS ( \
2406 BIT_ULL(POWER_DOMAIN_AUX_TBT4))
2407
2408static const struct i915_power_well_ops i9xx_always_on_power_well_ops = {
2409 .sync_hw = i9xx_power_well_sync_hw_noop,
2410 .enable = i9xx_always_on_power_well_noop,
2411 .disable = i9xx_always_on_power_well_noop,
2412 .is_enabled = i9xx_always_on_power_well_enabled,
2413};
2414
2415static const struct i915_power_well_ops chv_pipe_power_well_ops = {
2416 .sync_hw = i9xx_power_well_sync_hw_noop,
2417 .enable = chv_pipe_power_well_enable,
2418 .disable = chv_pipe_power_well_disable,
2419 .is_enabled = chv_pipe_power_well_enabled,
2420};
2421
2422static const struct i915_power_well_ops chv_dpio_cmn_power_well_ops = {
2423 .sync_hw = i9xx_power_well_sync_hw_noop,
2424 .enable = chv_dpio_cmn_power_well_enable,
2425 .disable = chv_dpio_cmn_power_well_disable,
2426 .is_enabled = vlv_power_well_enabled,
2427};
2428
2429static const struct i915_power_well_desc i9xx_always_on_power_well[] = {
2430 {
2431 .name = "always-on",
2432 .always_on = true,
2433 .domains = POWER_DOMAIN_MASK,
2434 .ops = &i9xx_always_on_power_well_ops,
2435 .id = DISP_PW_ID_NONE,
2436 },
2437};
2438
2439static const struct i915_power_well_ops i830_pipes_power_well_ops = {
2440 .sync_hw = i830_pipes_power_well_sync_hw,
2441 .enable = i830_pipes_power_well_enable,
2442 .disable = i830_pipes_power_well_disable,
2443 .is_enabled = i830_pipes_power_well_enabled,
2444};
2445
2446static const struct i915_power_well_desc i830_power_wells[] = {
2447 {
2448 .name = "always-on",
2449 .always_on = true,
2450 .domains = POWER_DOMAIN_MASK,
2451 .ops = &i9xx_always_on_power_well_ops,
2452 .id = DISP_PW_ID_NONE,
2453 },
2454 {
2455 .name = "pipes",
2456 .domains = I830_PIPES_POWER_DOMAINS,
2457 .ops = &i830_pipes_power_well_ops,
2458 .id = DISP_PW_ID_NONE,
2459 },
2460};
2461
2462static const struct i915_power_well_ops hsw_power_well_ops = {
2463 .sync_hw = hsw_power_well_sync_hw,
2464 .enable = hsw_power_well_enable,
2465 .disable = hsw_power_well_disable,
2466 .is_enabled = hsw_power_well_enabled,
2467};
2468
2469static const struct i915_power_well_ops gen9_dc_off_power_well_ops = {
2470 .sync_hw = i9xx_power_well_sync_hw_noop,
2471 .enable = gen9_dc_off_power_well_enable,
2472 .disable = gen9_dc_off_power_well_disable,
2473 .is_enabled = gen9_dc_off_power_well_enabled,
2474};
2475
2476static const struct i915_power_well_ops bxt_dpio_cmn_power_well_ops = {
2477 .sync_hw = i9xx_power_well_sync_hw_noop,
2478 .enable = bxt_dpio_cmn_power_well_enable,
2479 .disable = bxt_dpio_cmn_power_well_disable,
2480 .is_enabled = bxt_dpio_cmn_power_well_enabled,
2481};
2482
2483static const struct i915_power_well_regs hsw_power_well_regs = {
2484 .bios = HSW_PWR_WELL_CTL1,
2485 .driver = HSW_PWR_WELL_CTL2,
2486 .kvmr = HSW_PWR_WELL_CTL3,
2487 .debug = HSW_PWR_WELL_CTL4,
2488};
2489
2490static const struct i915_power_well_desc hsw_power_wells[] = {
2491 {
2492 .name = "always-on",
2493 .always_on = true,
2494 .domains = POWER_DOMAIN_MASK,
2495 .ops = &i9xx_always_on_power_well_ops,
2496 .id = DISP_PW_ID_NONE,
2497 },
2498 {
2499 .name = "display",
2500 .domains = HSW_DISPLAY_POWER_DOMAINS,
2501 .ops = &hsw_power_well_ops,
2502 .id = HSW_DISP_PW_GLOBAL,
2503 {
2504 .hsw.regs = &hsw_power_well_regs,
2505 .hsw.idx = HSW_PW_CTL_IDX_GLOBAL,
2506 .hsw.has_vga = true,
2507 },
2508 },
2509};
2510
2511static const struct i915_power_well_desc bdw_power_wells[] = {
2512 {
2513 .name = "always-on",
2514 .always_on = true,
2515 .domains = POWER_DOMAIN_MASK,
2516 .ops = &i9xx_always_on_power_well_ops,
2517 .id = DISP_PW_ID_NONE,
2518 },
2519 {
2520 .name = "display",
2521 .domains = BDW_DISPLAY_POWER_DOMAINS,
2522 .ops = &hsw_power_well_ops,
2523 .id = HSW_DISP_PW_GLOBAL,
2524 {
2525 .hsw.regs = &hsw_power_well_regs,
2526 .hsw.idx = HSW_PW_CTL_IDX_GLOBAL,
2527 .hsw.irq_pipe_mask = BIT(PIPE_B) | BIT(PIPE_C),
2528 .hsw.has_vga = true,
2529 },
2530 },
2531};
2532
2533static const struct i915_power_well_ops vlv_display_power_well_ops = {
2534 .sync_hw = i9xx_power_well_sync_hw_noop,
2535 .enable = vlv_display_power_well_enable,
2536 .disable = vlv_display_power_well_disable,
2537 .is_enabled = vlv_power_well_enabled,
2538};
2539
2540static const struct i915_power_well_ops vlv_dpio_cmn_power_well_ops = {
2541 .sync_hw = i9xx_power_well_sync_hw_noop,
2542 .enable = vlv_dpio_cmn_power_well_enable,
2543 .disable = vlv_dpio_cmn_power_well_disable,
2544 .is_enabled = vlv_power_well_enabled,
2545};
2546
2547static const struct i915_power_well_ops vlv_dpio_power_well_ops = {
2548 .sync_hw = i9xx_power_well_sync_hw_noop,
2549 .enable = vlv_power_well_enable,
2550 .disable = vlv_power_well_disable,
2551 .is_enabled = vlv_power_well_enabled,
2552};
2553
2554static const struct i915_power_well_desc vlv_power_wells[] = {
2555 {
2556 .name = "always-on",
2557 .always_on = true,
2558 .domains = POWER_DOMAIN_MASK,
2559 .ops = &i9xx_always_on_power_well_ops,
2560 .id = DISP_PW_ID_NONE,
2561 },
2562 {
2563 .name = "display",
2564 .domains = VLV_DISPLAY_POWER_DOMAINS,
2565 .ops = &vlv_display_power_well_ops,
2566 .id = VLV_DISP_PW_DISP2D,
2567 {
2568 .vlv.idx = PUNIT_PWGT_IDX_DISP2D,
2569 },
2570 },
2571 {
2572 .name = "dpio-tx-b-01",
2573 .domains = VLV_DPIO_TX_B_LANES_01_POWER_DOMAINS |
2574 VLV_DPIO_TX_B_LANES_23_POWER_DOMAINS |
2575 VLV_DPIO_TX_C_LANES_01_POWER_DOMAINS |
2576 VLV_DPIO_TX_C_LANES_23_POWER_DOMAINS,
2577 .ops = &vlv_dpio_power_well_ops,
2578 .id = DISP_PW_ID_NONE,
2579 {
2580 .vlv.idx = PUNIT_PWGT_IDX_DPIO_TX_B_LANES_01,
2581 },
2582 },
2583 {
2584 .name = "dpio-tx-b-23",
2585 .domains = VLV_DPIO_TX_B_LANES_01_POWER_DOMAINS |
2586 VLV_DPIO_TX_B_LANES_23_POWER_DOMAINS |
2587 VLV_DPIO_TX_C_LANES_01_POWER_DOMAINS |
2588 VLV_DPIO_TX_C_LANES_23_POWER_DOMAINS,
2589 .ops = &vlv_dpio_power_well_ops,
2590 .id = DISP_PW_ID_NONE,
2591 {
2592 .vlv.idx = PUNIT_PWGT_IDX_DPIO_TX_B_LANES_23,
2593 },
2594 },
2595 {
2596 .name = "dpio-tx-c-01",
2597 .domains = VLV_DPIO_TX_B_LANES_01_POWER_DOMAINS |
2598 VLV_DPIO_TX_B_LANES_23_POWER_DOMAINS |
2599 VLV_DPIO_TX_C_LANES_01_POWER_DOMAINS |
2600 VLV_DPIO_TX_C_LANES_23_POWER_DOMAINS,
2601 .ops = &vlv_dpio_power_well_ops,
2602 .id = DISP_PW_ID_NONE,
2603 {
2604 .vlv.idx = PUNIT_PWGT_IDX_DPIO_TX_C_LANES_01,
2605 },
2606 },
2607 {
2608 .name = "dpio-tx-c-23",
2609 .domains = VLV_DPIO_TX_B_LANES_01_POWER_DOMAINS |
2610 VLV_DPIO_TX_B_LANES_23_POWER_DOMAINS |
2611 VLV_DPIO_TX_C_LANES_01_POWER_DOMAINS |
2612 VLV_DPIO_TX_C_LANES_23_POWER_DOMAINS,
2613 .ops = &vlv_dpio_power_well_ops,
2614 .id = DISP_PW_ID_NONE,
2615 {
2616 .vlv.idx = PUNIT_PWGT_IDX_DPIO_TX_C_LANES_23,
2617 },
2618 },
2619 {
2620 .name = "dpio-common",
2621 .domains = VLV_DPIO_CMN_BC_POWER_DOMAINS,
2622 .ops = &vlv_dpio_cmn_power_well_ops,
2623 .id = VLV_DISP_PW_DPIO_CMN_BC,
2624 {
2625 .vlv.idx = PUNIT_PWGT_IDX_DPIO_CMN_BC,
2626 },
2627 },
2628};
2629
2630static const struct i915_power_well_desc chv_power_wells[] = {
2631 {
2632 .name = "always-on",
2633 .always_on = true,
2634 .domains = POWER_DOMAIN_MASK,
2635 .ops = &i9xx_always_on_power_well_ops,
2636 .id = DISP_PW_ID_NONE,
2637 },
2638 {
2639 .name = "display",
2640
2641
2642
2643
2644
2645 .domains = CHV_DISPLAY_POWER_DOMAINS,
2646 .ops = &chv_pipe_power_well_ops,
2647 .id = DISP_PW_ID_NONE,
2648 },
2649 {
2650 .name = "dpio-common-bc",
2651 .domains = CHV_DPIO_CMN_BC_POWER_DOMAINS,
2652 .ops = &chv_dpio_cmn_power_well_ops,
2653 .id = VLV_DISP_PW_DPIO_CMN_BC,
2654 {
2655 .vlv.idx = PUNIT_PWGT_IDX_DPIO_CMN_BC,
2656 },
2657 },
2658 {
2659 .name = "dpio-common-d",
2660 .domains = CHV_DPIO_CMN_D_POWER_DOMAINS,
2661 .ops = &chv_dpio_cmn_power_well_ops,
2662 .id = CHV_DISP_PW_DPIO_CMN_D,
2663 {
2664 .vlv.idx = PUNIT_PWGT_IDX_DPIO_CMN_D,
2665 },
2666 },
2667};
2668
2669bool intel_display_power_well_is_enabled(struct drm_i915_private *dev_priv,
2670 enum i915_power_well_id power_well_id)
2671{
2672 struct i915_power_well *power_well;
2673 bool ret;
2674
2675 power_well = lookup_power_well(dev_priv, power_well_id);
2676 ret = power_well->desc->ops->is_enabled(dev_priv, power_well);
2677
2678 return ret;
2679}
2680
2681static const struct i915_power_well_desc skl_power_wells[] = {
2682 {
2683 .name = "always-on",
2684 .always_on = true,
2685 .domains = POWER_DOMAIN_MASK,
2686 .ops = &i9xx_always_on_power_well_ops,
2687 .id = DISP_PW_ID_NONE,
2688 },
2689 {
2690 .name = "power well 1",
2691
2692 .always_on = true,
2693 .domains = 0,
2694 .ops = &hsw_power_well_ops,
2695 .id = SKL_DISP_PW_1,
2696 {
2697 .hsw.regs = &hsw_power_well_regs,
2698 .hsw.idx = SKL_PW_CTL_IDX_PW_1,
2699 .hsw.has_fuses = true,
2700 },
2701 },
2702 {
2703 .name = "MISC IO power well",
2704
2705 .always_on = true,
2706 .domains = 0,
2707 .ops = &hsw_power_well_ops,
2708 .id = SKL_DISP_PW_MISC_IO,
2709 {
2710 .hsw.regs = &hsw_power_well_regs,
2711 .hsw.idx = SKL_PW_CTL_IDX_MISC_IO,
2712 },
2713 },
2714 {
2715 .name = "DC off",
2716 .domains = SKL_DISPLAY_DC_OFF_POWER_DOMAINS,
2717 .ops = &gen9_dc_off_power_well_ops,
2718 .id = DISP_PW_ID_NONE,
2719 },
2720 {
2721 .name = "power well 2",
2722 .domains = SKL_DISPLAY_POWERWELL_2_POWER_DOMAINS,
2723 .ops = &hsw_power_well_ops,
2724 .id = SKL_DISP_PW_2,
2725 {
2726 .hsw.regs = &hsw_power_well_regs,
2727 .hsw.idx = SKL_PW_CTL_IDX_PW_2,
2728 .hsw.irq_pipe_mask = BIT(PIPE_B) | BIT(PIPE_C),
2729 .hsw.has_vga = true,
2730 .hsw.has_fuses = true,
2731 },
2732 },
2733 {
2734 .name = "DDI A/E IO power well",
2735 .domains = SKL_DISPLAY_DDI_IO_A_E_POWER_DOMAINS,
2736 .ops = &hsw_power_well_ops,
2737 .id = DISP_PW_ID_NONE,
2738 {
2739 .hsw.regs = &hsw_power_well_regs,
2740 .hsw.idx = SKL_PW_CTL_IDX_DDI_A_E,
2741 },
2742 },
2743 {
2744 .name = "DDI B IO power well",
2745 .domains = SKL_DISPLAY_DDI_IO_B_POWER_DOMAINS,
2746 .ops = &hsw_power_well_ops,
2747 .id = DISP_PW_ID_NONE,
2748 {
2749 .hsw.regs = &hsw_power_well_regs,
2750 .hsw.idx = SKL_PW_CTL_IDX_DDI_B,
2751 },
2752 },
2753 {
2754 .name = "DDI C IO power well",
2755 .domains = SKL_DISPLAY_DDI_IO_C_POWER_DOMAINS,
2756 .ops = &hsw_power_well_ops,
2757 .id = DISP_PW_ID_NONE,
2758 {
2759 .hsw.regs = &hsw_power_well_regs,
2760 .hsw.idx = SKL_PW_CTL_IDX_DDI_C,
2761 },
2762 },
2763 {
2764 .name = "DDI D IO power well",
2765 .domains = SKL_DISPLAY_DDI_IO_D_POWER_DOMAINS,
2766 .ops = &hsw_power_well_ops,
2767 .id = DISP_PW_ID_NONE,
2768 {
2769 .hsw.regs = &hsw_power_well_regs,
2770 .hsw.idx = SKL_PW_CTL_IDX_DDI_D,
2771 },
2772 },
2773};
2774
2775static const struct i915_power_well_desc bxt_power_wells[] = {
2776 {
2777 .name = "always-on",
2778 .always_on = true,
2779 .domains = POWER_DOMAIN_MASK,
2780 .ops = &i9xx_always_on_power_well_ops,
2781 .id = DISP_PW_ID_NONE,
2782 },
2783 {
2784 .name = "power well 1",
2785
2786 .always_on = true,
2787 .domains = 0,
2788 .ops = &hsw_power_well_ops,
2789 .id = SKL_DISP_PW_1,
2790 {
2791 .hsw.regs = &hsw_power_well_regs,
2792 .hsw.idx = SKL_PW_CTL_IDX_PW_1,
2793 .hsw.has_fuses = true,
2794 },
2795 },
2796 {
2797 .name = "DC off",
2798 .domains = BXT_DISPLAY_DC_OFF_POWER_DOMAINS,
2799 .ops = &gen9_dc_off_power_well_ops,
2800 .id = DISP_PW_ID_NONE,
2801 },
2802 {
2803 .name = "power well 2",
2804 .domains = BXT_DISPLAY_POWERWELL_2_POWER_DOMAINS,
2805 .ops = &hsw_power_well_ops,
2806 .id = SKL_DISP_PW_2,
2807 {
2808 .hsw.regs = &hsw_power_well_regs,
2809 .hsw.idx = SKL_PW_CTL_IDX_PW_2,
2810 .hsw.irq_pipe_mask = BIT(PIPE_B) | BIT(PIPE_C),
2811 .hsw.has_vga = true,
2812 .hsw.has_fuses = true,
2813 },
2814 },
2815 {
2816 .name = "dpio-common-a",
2817 .domains = BXT_DPIO_CMN_A_POWER_DOMAINS,
2818 .ops = &bxt_dpio_cmn_power_well_ops,
2819 .id = BXT_DISP_PW_DPIO_CMN_A,
2820 {
2821 .bxt.phy = DPIO_PHY1,
2822 },
2823 },
2824 {
2825 .name = "dpio-common-bc",
2826 .domains = BXT_DPIO_CMN_BC_POWER_DOMAINS,
2827 .ops = &bxt_dpio_cmn_power_well_ops,
2828 .id = VLV_DISP_PW_DPIO_CMN_BC,
2829 {
2830 .bxt.phy = DPIO_PHY0,
2831 },
2832 },
2833};
2834
2835static const struct i915_power_well_desc glk_power_wells[] = {
2836 {
2837 .name = "always-on",
2838 .always_on = true,
2839 .domains = POWER_DOMAIN_MASK,
2840 .ops = &i9xx_always_on_power_well_ops,
2841 .id = DISP_PW_ID_NONE,
2842 },
2843 {
2844 .name = "power well 1",
2845
2846 .always_on = true,
2847 .domains = 0,
2848 .ops = &hsw_power_well_ops,
2849 .id = SKL_DISP_PW_1,
2850 {
2851 .hsw.regs = &hsw_power_well_regs,
2852 .hsw.idx = SKL_PW_CTL_IDX_PW_1,
2853 .hsw.has_fuses = true,
2854 },
2855 },
2856 {
2857 .name = "DC off",
2858 .domains = GLK_DISPLAY_DC_OFF_POWER_DOMAINS,
2859 .ops = &gen9_dc_off_power_well_ops,
2860 .id = DISP_PW_ID_NONE,
2861 },
2862 {
2863 .name = "power well 2",
2864 .domains = GLK_DISPLAY_POWERWELL_2_POWER_DOMAINS,
2865 .ops = &hsw_power_well_ops,
2866 .id = SKL_DISP_PW_2,
2867 {
2868 .hsw.regs = &hsw_power_well_regs,
2869 .hsw.idx = SKL_PW_CTL_IDX_PW_2,
2870 .hsw.irq_pipe_mask = BIT(PIPE_B) | BIT(PIPE_C),
2871 .hsw.has_vga = true,
2872 .hsw.has_fuses = true,
2873 },
2874 },
2875 {
2876 .name = "dpio-common-a",
2877 .domains = GLK_DPIO_CMN_A_POWER_DOMAINS,
2878 .ops = &bxt_dpio_cmn_power_well_ops,
2879 .id = BXT_DISP_PW_DPIO_CMN_A,
2880 {
2881 .bxt.phy = DPIO_PHY1,
2882 },
2883 },
2884 {
2885 .name = "dpio-common-b",
2886 .domains = GLK_DPIO_CMN_B_POWER_DOMAINS,
2887 .ops = &bxt_dpio_cmn_power_well_ops,
2888 .id = VLV_DISP_PW_DPIO_CMN_BC,
2889 {
2890 .bxt.phy = DPIO_PHY0,
2891 },
2892 },
2893 {
2894 .name = "dpio-common-c",
2895 .domains = GLK_DPIO_CMN_C_POWER_DOMAINS,
2896 .ops = &bxt_dpio_cmn_power_well_ops,
2897 .id = GLK_DISP_PW_DPIO_CMN_C,
2898 {
2899 .bxt.phy = DPIO_PHY2,
2900 },
2901 },
2902 {
2903 .name = "AUX A",
2904 .domains = GLK_DISPLAY_AUX_A_POWER_DOMAINS,
2905 .ops = &hsw_power_well_ops,
2906 .id = DISP_PW_ID_NONE,
2907 {
2908 .hsw.regs = &hsw_power_well_regs,
2909 .hsw.idx = GLK_PW_CTL_IDX_AUX_A,
2910 },
2911 },
2912 {
2913 .name = "AUX B",
2914 .domains = GLK_DISPLAY_AUX_B_POWER_DOMAINS,
2915 .ops = &hsw_power_well_ops,
2916 .id = DISP_PW_ID_NONE,
2917 {
2918 .hsw.regs = &hsw_power_well_regs,
2919 .hsw.idx = GLK_PW_CTL_IDX_AUX_B,
2920 },
2921 },
2922 {
2923 .name = "AUX C",
2924 .domains = GLK_DISPLAY_AUX_C_POWER_DOMAINS,
2925 .ops = &hsw_power_well_ops,
2926 .id = DISP_PW_ID_NONE,
2927 {
2928 .hsw.regs = &hsw_power_well_regs,
2929 .hsw.idx = GLK_PW_CTL_IDX_AUX_C,
2930 },
2931 },
2932 {
2933 .name = "DDI A IO power well",
2934 .domains = GLK_DISPLAY_DDI_IO_A_POWER_DOMAINS,
2935 .ops = &hsw_power_well_ops,
2936 .id = DISP_PW_ID_NONE,
2937 {
2938 .hsw.regs = &hsw_power_well_regs,
2939 .hsw.idx = GLK_PW_CTL_IDX_DDI_A,
2940 },
2941 },
2942 {
2943 .name = "DDI B IO power well",
2944 .domains = GLK_DISPLAY_DDI_IO_B_POWER_DOMAINS,
2945 .ops = &hsw_power_well_ops,
2946 .id = DISP_PW_ID_NONE,
2947 {
2948 .hsw.regs = &hsw_power_well_regs,
2949 .hsw.idx = SKL_PW_CTL_IDX_DDI_B,
2950 },
2951 },
2952 {
2953 .name = "DDI C IO power well",
2954 .domains = GLK_DISPLAY_DDI_IO_C_POWER_DOMAINS,
2955 .ops = &hsw_power_well_ops,
2956 .id = DISP_PW_ID_NONE,
2957 {
2958 .hsw.regs = &hsw_power_well_regs,
2959 .hsw.idx = SKL_PW_CTL_IDX_DDI_C,
2960 },
2961 },
2962};
2963
2964static const struct i915_power_well_desc cnl_power_wells[] = {
2965 {
2966 .name = "always-on",
2967 .always_on = true,
2968 .domains = POWER_DOMAIN_MASK,
2969 .ops = &i9xx_always_on_power_well_ops,
2970 .id = DISP_PW_ID_NONE,
2971 },
2972 {
2973 .name = "power well 1",
2974
2975 .always_on = true,
2976 .domains = 0,
2977 .ops = &hsw_power_well_ops,
2978 .id = SKL_DISP_PW_1,
2979 {
2980 .hsw.regs = &hsw_power_well_regs,
2981 .hsw.idx = SKL_PW_CTL_IDX_PW_1,
2982 .hsw.has_fuses = true,
2983 },
2984 },
2985 {
2986 .name = "AUX A",
2987 .domains = CNL_DISPLAY_AUX_A_POWER_DOMAINS,
2988 .ops = &hsw_power_well_ops,
2989 .id = DISP_PW_ID_NONE,
2990 {
2991 .hsw.regs = &hsw_power_well_regs,
2992 .hsw.idx = GLK_PW_CTL_IDX_AUX_A,
2993 },
2994 },
2995 {
2996 .name = "AUX B",
2997 .domains = CNL_DISPLAY_AUX_B_POWER_DOMAINS,
2998 .ops = &hsw_power_well_ops,
2999 .id = DISP_PW_ID_NONE,
3000 {
3001 .hsw.regs = &hsw_power_well_regs,
3002 .hsw.idx = GLK_PW_CTL_IDX_AUX_B,
3003 },
3004 },
3005 {
3006 .name = "AUX C",
3007 .domains = CNL_DISPLAY_AUX_C_POWER_DOMAINS,
3008 .ops = &hsw_power_well_ops,
3009 .id = DISP_PW_ID_NONE,
3010 {
3011 .hsw.regs = &hsw_power_well_regs,
3012 .hsw.idx = GLK_PW_CTL_IDX_AUX_C,
3013 },
3014 },
3015 {
3016 .name = "AUX D",
3017 .domains = CNL_DISPLAY_AUX_D_POWER_DOMAINS,
3018 .ops = &hsw_power_well_ops,
3019 .id = DISP_PW_ID_NONE,
3020 {
3021 .hsw.regs = &hsw_power_well_regs,
3022 .hsw.idx = CNL_PW_CTL_IDX_AUX_D,
3023 },
3024 },
3025 {
3026 .name = "DC off",
3027 .domains = CNL_DISPLAY_DC_OFF_POWER_DOMAINS,
3028 .ops = &gen9_dc_off_power_well_ops,
3029 .id = DISP_PW_ID_NONE,
3030 },
3031 {
3032 .name = "power well 2",
3033 .domains = CNL_DISPLAY_POWERWELL_2_POWER_DOMAINS,
3034 .ops = &hsw_power_well_ops,
3035 .id = SKL_DISP_PW_2,
3036 {
3037 .hsw.regs = &hsw_power_well_regs,
3038 .hsw.idx = SKL_PW_CTL_IDX_PW_2,
3039 .hsw.irq_pipe_mask = BIT(PIPE_B) | BIT(PIPE_C),
3040 .hsw.has_vga = true,
3041 .hsw.has_fuses = true,
3042 },
3043 },
3044 {
3045 .name = "DDI A IO power well",
3046 .domains = CNL_DISPLAY_DDI_A_IO_POWER_DOMAINS,
3047 .ops = &hsw_power_well_ops,
3048 .id = DISP_PW_ID_NONE,
3049 {
3050 .hsw.regs = &hsw_power_well_regs,
3051 .hsw.idx = GLK_PW_CTL_IDX_DDI_A,
3052 },
3053 },
3054 {
3055 .name = "DDI B IO power well",
3056 .domains = CNL_DISPLAY_DDI_B_IO_POWER_DOMAINS,
3057 .ops = &hsw_power_well_ops,
3058 .id = DISP_PW_ID_NONE,
3059 {
3060 .hsw.regs = &hsw_power_well_regs,
3061 .hsw.idx = SKL_PW_CTL_IDX_DDI_B,
3062 },
3063 },
3064 {
3065 .name = "DDI C IO power well",
3066 .domains = CNL_DISPLAY_DDI_C_IO_POWER_DOMAINS,
3067 .ops = &hsw_power_well_ops,
3068 .id = DISP_PW_ID_NONE,
3069 {
3070 .hsw.regs = &hsw_power_well_regs,
3071 .hsw.idx = SKL_PW_CTL_IDX_DDI_C,
3072 },
3073 },
3074 {
3075 .name = "DDI D IO power well",
3076 .domains = CNL_DISPLAY_DDI_D_IO_POWER_DOMAINS,
3077 .ops = &hsw_power_well_ops,
3078 .id = DISP_PW_ID_NONE,
3079 {
3080 .hsw.regs = &hsw_power_well_regs,
3081 .hsw.idx = SKL_PW_CTL_IDX_DDI_D,
3082 },
3083 },
3084 {
3085 .name = "DDI F IO power well",
3086 .domains = CNL_DISPLAY_DDI_F_IO_POWER_DOMAINS,
3087 .ops = &hsw_power_well_ops,
3088 .id = DISP_PW_ID_NONE,
3089 {
3090 .hsw.regs = &hsw_power_well_regs,
3091 .hsw.idx = CNL_PW_CTL_IDX_DDI_F,
3092 },
3093 },
3094 {
3095 .name = "AUX F",
3096 .domains = CNL_DISPLAY_AUX_F_POWER_DOMAINS,
3097 .ops = &hsw_power_well_ops,
3098 .id = DISP_PW_ID_NONE,
3099 {
3100 .hsw.regs = &hsw_power_well_regs,
3101 .hsw.idx = CNL_PW_CTL_IDX_AUX_F,
3102 },
3103 },
3104};
3105
3106static const struct i915_power_well_ops icl_combo_phy_aux_power_well_ops = {
3107 .sync_hw = hsw_power_well_sync_hw,
3108 .enable = icl_combo_phy_aux_power_well_enable,
3109 .disable = icl_combo_phy_aux_power_well_disable,
3110 .is_enabled = hsw_power_well_enabled,
3111};
3112
3113static const struct i915_power_well_ops icl_tc_phy_aux_power_well_ops = {
3114 .sync_hw = hsw_power_well_sync_hw,
3115 .enable = icl_tc_phy_aux_power_well_enable,
3116 .disable = hsw_power_well_disable,
3117 .is_enabled = hsw_power_well_enabled,
3118};
3119
3120static const struct i915_power_well_regs icl_aux_power_well_regs = {
3121 .bios = ICL_PWR_WELL_CTL_AUX1,
3122 .driver = ICL_PWR_WELL_CTL_AUX2,
3123 .debug = ICL_PWR_WELL_CTL_AUX4,
3124};
3125
3126static const struct i915_power_well_regs icl_ddi_power_well_regs = {
3127 .bios = ICL_PWR_WELL_CTL_DDI1,
3128 .driver = ICL_PWR_WELL_CTL_DDI2,
3129 .debug = ICL_PWR_WELL_CTL_DDI4,
3130};
3131
3132static const struct i915_power_well_desc icl_power_wells[] = {
3133 {
3134 .name = "always-on",
3135 .always_on = true,
3136 .domains = POWER_DOMAIN_MASK,
3137 .ops = &i9xx_always_on_power_well_ops,
3138 .id = DISP_PW_ID_NONE,
3139 },
3140 {
3141 .name = "power well 1",
3142
3143 .always_on = true,
3144 .domains = 0,
3145 .ops = &hsw_power_well_ops,
3146 .id = SKL_DISP_PW_1,
3147 {
3148 .hsw.regs = &hsw_power_well_regs,
3149 .hsw.idx = ICL_PW_CTL_IDX_PW_1,
3150 .hsw.has_fuses = true,
3151 },
3152 },
3153 {
3154 .name = "DC off",
3155 .domains = ICL_DISPLAY_DC_OFF_POWER_DOMAINS,
3156 .ops = &gen9_dc_off_power_well_ops,
3157 .id = DISP_PW_ID_NONE,
3158 },
3159 {
3160 .name = "power well 2",
3161 .domains = ICL_PW_2_POWER_DOMAINS,
3162 .ops = &hsw_power_well_ops,
3163 .id = SKL_DISP_PW_2,
3164 {
3165 .hsw.regs = &hsw_power_well_regs,
3166 .hsw.idx = ICL_PW_CTL_IDX_PW_2,
3167 .hsw.has_fuses = true,
3168 },
3169 },
3170 {
3171 .name = "power well 3",
3172 .domains = ICL_PW_3_POWER_DOMAINS,
3173 .ops = &hsw_power_well_ops,
3174 .id = DISP_PW_ID_NONE,
3175 {
3176 .hsw.regs = &hsw_power_well_regs,
3177 .hsw.idx = ICL_PW_CTL_IDX_PW_3,
3178 .hsw.irq_pipe_mask = BIT(PIPE_B),
3179 .hsw.has_vga = true,
3180 .hsw.has_fuses = true,
3181 },
3182 },
3183 {
3184 .name = "DDI A IO",
3185 .domains = ICL_DDI_IO_A_POWER_DOMAINS,
3186 .ops = &hsw_power_well_ops,
3187 .id = DISP_PW_ID_NONE,
3188 {
3189 .hsw.regs = &icl_ddi_power_well_regs,
3190 .hsw.idx = ICL_PW_CTL_IDX_DDI_A,
3191 },
3192 },
3193 {
3194 .name = "DDI B IO",
3195 .domains = ICL_DDI_IO_B_POWER_DOMAINS,
3196 .ops = &hsw_power_well_ops,
3197 .id = DISP_PW_ID_NONE,
3198 {
3199 .hsw.regs = &icl_ddi_power_well_regs,
3200 .hsw.idx = ICL_PW_CTL_IDX_DDI_B,
3201 },
3202 },
3203 {
3204 .name = "DDI C IO",
3205 .domains = ICL_DDI_IO_C_POWER_DOMAINS,
3206 .ops = &hsw_power_well_ops,
3207 .id = DISP_PW_ID_NONE,
3208 {
3209 .hsw.regs = &icl_ddi_power_well_regs,
3210 .hsw.idx = ICL_PW_CTL_IDX_DDI_C,
3211 },
3212 },
3213 {
3214 .name = "DDI D IO",
3215 .domains = ICL_DDI_IO_D_POWER_DOMAINS,
3216 .ops = &hsw_power_well_ops,
3217 .id = DISP_PW_ID_NONE,
3218 {
3219 .hsw.regs = &icl_ddi_power_well_regs,
3220 .hsw.idx = ICL_PW_CTL_IDX_DDI_D,
3221 },
3222 },
3223 {
3224 .name = "DDI E IO",
3225 .domains = ICL_DDI_IO_E_POWER_DOMAINS,
3226 .ops = &hsw_power_well_ops,
3227 .id = DISP_PW_ID_NONE,
3228 {
3229 .hsw.regs = &icl_ddi_power_well_regs,
3230 .hsw.idx = ICL_PW_CTL_IDX_DDI_E,
3231 },
3232 },
3233 {
3234 .name = "DDI F IO",
3235 .domains = ICL_DDI_IO_F_POWER_DOMAINS,
3236 .ops = &hsw_power_well_ops,
3237 .id = DISP_PW_ID_NONE,
3238 {
3239 .hsw.regs = &icl_ddi_power_well_regs,
3240 .hsw.idx = ICL_PW_CTL_IDX_DDI_F,
3241 },
3242 },
3243 {
3244 .name = "AUX A",
3245 .domains = ICL_AUX_A_IO_POWER_DOMAINS,
3246 .ops = &icl_combo_phy_aux_power_well_ops,
3247 .id = DISP_PW_ID_NONE,
3248 {
3249 .hsw.regs = &icl_aux_power_well_regs,
3250 .hsw.idx = ICL_PW_CTL_IDX_AUX_A,
3251 },
3252 },
3253 {
3254 .name = "AUX B",
3255 .domains = ICL_AUX_B_IO_POWER_DOMAINS,
3256 .ops = &icl_combo_phy_aux_power_well_ops,
3257 .id = DISP_PW_ID_NONE,
3258 {
3259 .hsw.regs = &icl_aux_power_well_regs,
3260 .hsw.idx = ICL_PW_CTL_IDX_AUX_B,
3261 },
3262 },
3263 {
3264 .name = "AUX C",
3265 .domains = ICL_AUX_C_IO_POWER_DOMAINS,
3266 .ops = &icl_tc_phy_aux_power_well_ops,
3267 .id = DISP_PW_ID_NONE,
3268 {
3269 .hsw.regs = &icl_aux_power_well_regs,
3270 .hsw.idx = ICL_PW_CTL_IDX_AUX_C,
3271 .hsw.is_tc_tbt = false,
3272 },
3273 },
3274 {
3275 .name = "AUX D",
3276 .domains = ICL_AUX_D_IO_POWER_DOMAINS,
3277 .ops = &icl_tc_phy_aux_power_well_ops,
3278 .id = DISP_PW_ID_NONE,
3279 {
3280 .hsw.regs = &icl_aux_power_well_regs,
3281 .hsw.idx = ICL_PW_CTL_IDX_AUX_D,
3282 .hsw.is_tc_tbt = false,
3283 },
3284 },
3285 {
3286 .name = "AUX E",
3287 .domains = ICL_AUX_E_IO_POWER_DOMAINS,
3288 .ops = &icl_tc_phy_aux_power_well_ops,
3289 .id = DISP_PW_ID_NONE,
3290 {
3291 .hsw.regs = &icl_aux_power_well_regs,
3292 .hsw.idx = ICL_PW_CTL_IDX_AUX_E,
3293 .hsw.is_tc_tbt = false,
3294 },
3295 },
3296 {
3297 .name = "AUX F",
3298 .domains = ICL_AUX_F_IO_POWER_DOMAINS,
3299 .ops = &icl_tc_phy_aux_power_well_ops,
3300 .id = DISP_PW_ID_NONE,
3301 {
3302 .hsw.regs = &icl_aux_power_well_regs,
3303 .hsw.idx = ICL_PW_CTL_IDX_AUX_F,
3304 .hsw.is_tc_tbt = false,
3305 },
3306 },
3307 {
3308 .name = "AUX TBT1",
3309 .domains = ICL_AUX_TBT1_IO_POWER_DOMAINS,
3310 .ops = &icl_tc_phy_aux_power_well_ops,
3311 .id = DISP_PW_ID_NONE,
3312 {
3313 .hsw.regs = &icl_aux_power_well_regs,
3314 .hsw.idx = ICL_PW_CTL_IDX_AUX_TBT1,
3315 .hsw.is_tc_tbt = true,
3316 },
3317 },
3318 {
3319 .name = "AUX TBT2",
3320 .domains = ICL_AUX_TBT2_IO_POWER_DOMAINS,
3321 .ops = &icl_tc_phy_aux_power_well_ops,
3322 .id = DISP_PW_ID_NONE,
3323 {
3324 .hsw.regs = &icl_aux_power_well_regs,
3325 .hsw.idx = ICL_PW_CTL_IDX_AUX_TBT2,
3326 .hsw.is_tc_tbt = true,
3327 },
3328 },
3329 {
3330 .name = "AUX TBT3",
3331 .domains = ICL_AUX_TBT3_IO_POWER_DOMAINS,
3332 .ops = &icl_tc_phy_aux_power_well_ops,
3333 .id = DISP_PW_ID_NONE,
3334 {
3335 .hsw.regs = &icl_aux_power_well_regs,
3336 .hsw.idx = ICL_PW_CTL_IDX_AUX_TBT3,
3337 .hsw.is_tc_tbt = true,
3338 },
3339 },
3340 {
3341 .name = "AUX TBT4",
3342 .domains = ICL_AUX_TBT4_IO_POWER_DOMAINS,
3343 .ops = &icl_tc_phy_aux_power_well_ops,
3344 .id = DISP_PW_ID_NONE,
3345 {
3346 .hsw.regs = &icl_aux_power_well_regs,
3347 .hsw.idx = ICL_PW_CTL_IDX_AUX_TBT4,
3348 .hsw.is_tc_tbt = true,
3349 },
3350 },
3351 {
3352 .name = "power well 4",
3353 .domains = ICL_PW_4_POWER_DOMAINS,
3354 .ops = &hsw_power_well_ops,
3355 .id = DISP_PW_ID_NONE,
3356 {
3357 .hsw.regs = &hsw_power_well_regs,
3358 .hsw.idx = ICL_PW_CTL_IDX_PW_4,
3359 .hsw.has_fuses = true,
3360 .hsw.irq_pipe_mask = BIT(PIPE_C),
3361 },
3362 },
3363};
3364
3365static int
3366sanitize_disable_power_well_option(const struct drm_i915_private *dev_priv,
3367 int disable_power_well)
3368{
3369 if (disable_power_well >= 0)
3370 return !!disable_power_well;
3371
3372 return 1;
3373}
3374
3375static u32 get_allowed_dc_mask(const struct drm_i915_private *dev_priv,
3376 int enable_dc)
3377{
3378 u32 mask;
3379 int requested_dc;
3380 int max_dc;
3381
3382 if (INTEL_GEN(dev_priv) >= 11) {
3383 max_dc = 2;
3384
3385
3386
3387
3388
3389 mask = DC_STATE_EN_DC9;
3390 } else if (IS_GEN(dev_priv, 10) || IS_GEN9_BC(dev_priv)) {
3391 max_dc = 2;
3392 mask = 0;
3393 } else if (IS_GEN9_LP(dev_priv)) {
3394 max_dc = 1;
3395 mask = DC_STATE_EN_DC9;
3396 } else {
3397 max_dc = 0;
3398 mask = 0;
3399 }
3400
3401 if (!i915_modparams.disable_power_well)
3402 max_dc = 0;
3403
3404 if (enable_dc >= 0 && enable_dc <= max_dc) {
3405 requested_dc = enable_dc;
3406 } else if (enable_dc == -1) {
3407 requested_dc = max_dc;
3408 } else if (enable_dc > max_dc && enable_dc <= 2) {
3409 DRM_DEBUG_KMS("Adjusting requested max DC state (%d->%d)\n",
3410 enable_dc, max_dc);
3411 requested_dc = max_dc;
3412 } else {
3413 DRM_ERROR("Unexpected value for enable_dc (%d)\n", enable_dc);
3414 requested_dc = max_dc;
3415 }
3416
3417 if (requested_dc > 1)
3418 mask |= DC_STATE_EN_UPTO_DC6;
3419 if (requested_dc > 0)
3420 mask |= DC_STATE_EN_UPTO_DC5;
3421
3422 DRM_DEBUG_KMS("Allowed DC state mask %02x\n", mask);
3423
3424 return mask;
3425}
3426
3427static int
3428__set_power_wells(struct i915_power_domains *power_domains,
3429 const struct i915_power_well_desc *power_well_descs,
3430 int power_well_count)
3431{
3432 u64 power_well_ids = 0;
3433 int i;
3434
3435 power_domains->power_well_count = power_well_count;
3436 power_domains->power_wells =
3437 kcalloc(power_well_count,
3438 sizeof(*power_domains->power_wells),
3439 GFP_KERNEL);
3440 if (!power_domains->power_wells)
3441 return -ENOMEM;
3442
3443 for (i = 0; i < power_well_count; i++) {
3444 enum i915_power_well_id id = power_well_descs[i].id;
3445
3446 power_domains->power_wells[i].desc = &power_well_descs[i];
3447
3448 if (id == DISP_PW_ID_NONE)
3449 continue;
3450
3451 WARN_ON(id >= sizeof(power_well_ids) * 8);
3452 WARN_ON(power_well_ids & BIT_ULL(id));
3453 power_well_ids |= BIT_ULL(id);
3454 }
3455
3456 return 0;
3457}
3458
3459#define set_power_wells(power_domains, __power_well_descs) \
3460 __set_power_wells(power_domains, __power_well_descs, \
3461 ARRAY_SIZE(__power_well_descs))
3462
3463
3464
3465
3466
3467
3468
3469
3470int intel_power_domains_init(struct drm_i915_private *dev_priv)
3471{
3472 struct i915_power_domains *power_domains = &dev_priv->power_domains;
3473 int err;
3474
3475 i915_modparams.disable_power_well =
3476 sanitize_disable_power_well_option(dev_priv,
3477 i915_modparams.disable_power_well);
3478 dev_priv->csr.allowed_dc_mask =
3479 get_allowed_dc_mask(dev_priv, i915_modparams.enable_dc);
3480
3481 BUILD_BUG_ON(POWER_DOMAIN_NUM > 64);
3482
3483 mutex_init(&power_domains->lock);
3484
3485 INIT_DELAYED_WORK(&power_domains->async_put_work,
3486 intel_display_power_put_async_work);
3487
3488
3489
3490
3491
3492 if (IS_GEN(dev_priv, 11)) {
3493 err = set_power_wells(power_domains, icl_power_wells);
3494 } else if (IS_CANNONLAKE(dev_priv)) {
3495 err = set_power_wells(power_domains, cnl_power_wells);
3496
3497
3498
3499
3500
3501
3502
3503 if (!IS_CNL_WITH_PORT_F(dev_priv))
3504 power_domains->power_well_count -= 2;
3505 } else if (IS_GEMINILAKE(dev_priv)) {
3506 err = set_power_wells(power_domains, glk_power_wells);
3507 } else if (IS_BROXTON(dev_priv)) {
3508 err = set_power_wells(power_domains, bxt_power_wells);
3509 } else if (IS_GEN9_BC(dev_priv)) {
3510 err = set_power_wells(power_domains, skl_power_wells);
3511 } else if (IS_CHERRYVIEW(dev_priv)) {
3512 err = set_power_wells(power_domains, chv_power_wells);
3513 } else if (IS_BROADWELL(dev_priv)) {
3514 err = set_power_wells(power_domains, bdw_power_wells);
3515 } else if (IS_HASWELL(dev_priv)) {
3516 err = set_power_wells(power_domains, hsw_power_wells);
3517 } else if (IS_VALLEYVIEW(dev_priv)) {
3518 err = set_power_wells(power_domains, vlv_power_wells);
3519 } else if (IS_I830(dev_priv)) {
3520 err = set_power_wells(power_domains, i830_power_wells);
3521 } else {
3522 err = set_power_wells(power_domains, i9xx_always_on_power_well);
3523 }
3524
3525 return err;
3526}
3527
3528
3529
3530
3531
3532
3533
3534void intel_power_domains_cleanup(struct drm_i915_private *dev_priv)
3535{
3536 kfree(dev_priv->power_domains.power_wells);
3537}
3538
3539static void intel_power_domains_sync_hw(struct drm_i915_private *dev_priv)
3540{
3541 struct i915_power_domains *power_domains = &dev_priv->power_domains;
3542 struct i915_power_well *power_well;
3543
3544 mutex_lock(&power_domains->lock);
3545 for_each_power_well(dev_priv, power_well) {
3546 power_well->desc->ops->sync_hw(dev_priv, power_well);
3547 power_well->hw_enabled =
3548 power_well->desc->ops->is_enabled(dev_priv, power_well);
3549 }
3550 mutex_unlock(&power_domains->lock);
3551}
3552
3553static inline
3554bool intel_dbuf_slice_set(struct drm_i915_private *dev_priv,
3555 i915_reg_t reg, bool enable)
3556{
3557 u32 val, status;
3558
3559 val = I915_READ(reg);
3560 val = enable ? (val | DBUF_POWER_REQUEST) : (val & ~DBUF_POWER_REQUEST);
3561 I915_WRITE(reg, val);
3562 POSTING_READ(reg);
3563 udelay(10);
3564
3565 status = I915_READ(reg) & DBUF_POWER_STATE;
3566 if ((enable && !status) || (!enable && status)) {
3567 DRM_ERROR("DBus power %s timeout!\n",
3568 enable ? "enable" : "disable");
3569 return false;
3570 }
3571 return true;
3572}
3573
3574static void gen9_dbuf_enable(struct drm_i915_private *dev_priv)
3575{
3576 intel_dbuf_slice_set(dev_priv, DBUF_CTL, true);
3577}
3578
3579static void gen9_dbuf_disable(struct drm_i915_private *dev_priv)
3580{
3581 intel_dbuf_slice_set(dev_priv, DBUF_CTL, false);
3582}
3583
3584static u8 intel_dbuf_max_slices(struct drm_i915_private *dev_priv)
3585{
3586 if (INTEL_GEN(dev_priv) < 11)
3587 return 1;
3588 return 2;
3589}
3590
3591void icl_dbuf_slices_update(struct drm_i915_private *dev_priv,
3592 u8 req_slices)
3593{
3594 const u8 hw_enabled_slices = dev_priv->wm.skl_hw.ddb.enabled_slices;
3595 bool ret;
3596
3597 if (req_slices > intel_dbuf_max_slices(dev_priv)) {
3598 DRM_ERROR("Invalid number of dbuf slices requested\n");
3599 return;
3600 }
3601
3602 if (req_slices == hw_enabled_slices || req_slices == 0)
3603 return;
3604
3605 if (req_slices > hw_enabled_slices)
3606 ret = intel_dbuf_slice_set(dev_priv, DBUF_CTL_S2, true);
3607 else
3608 ret = intel_dbuf_slice_set(dev_priv, DBUF_CTL_S2, false);
3609
3610 if (ret)
3611 dev_priv->wm.skl_hw.ddb.enabled_slices = req_slices;
3612}
3613
3614static void icl_dbuf_enable(struct drm_i915_private *dev_priv)
3615{
3616 I915_WRITE(DBUF_CTL_S1, I915_READ(DBUF_CTL_S1) | DBUF_POWER_REQUEST);
3617 I915_WRITE(DBUF_CTL_S2, I915_READ(DBUF_CTL_S2) | DBUF_POWER_REQUEST);
3618 POSTING_READ(DBUF_CTL_S2);
3619
3620 udelay(10);
3621
3622 if (!(I915_READ(DBUF_CTL_S1) & DBUF_POWER_STATE) ||
3623 !(I915_READ(DBUF_CTL_S2) & DBUF_POWER_STATE))
3624 DRM_ERROR("DBuf power enable timeout\n");
3625 else
3626
3627
3628
3629
3630 dev_priv->wm.skl_hw.ddb.enabled_slices = 1;
3631}
3632
3633static void icl_dbuf_disable(struct drm_i915_private *dev_priv)
3634{
3635 I915_WRITE(DBUF_CTL_S1, I915_READ(DBUF_CTL_S1) & ~DBUF_POWER_REQUEST);
3636 I915_WRITE(DBUF_CTL_S2, I915_READ(DBUF_CTL_S2) & ~DBUF_POWER_REQUEST);
3637 POSTING_READ(DBUF_CTL_S2);
3638
3639 udelay(10);
3640
3641 if ((I915_READ(DBUF_CTL_S1) & DBUF_POWER_STATE) ||
3642 (I915_READ(DBUF_CTL_S2) & DBUF_POWER_STATE))
3643 DRM_ERROR("DBuf power disable timeout!\n");
3644 else
3645
3646
3647
3648
3649 dev_priv->wm.skl_hw.ddb.enabled_slices = 1;
3650}
3651
3652static void icl_mbus_init(struct drm_i915_private *dev_priv)
3653{
3654 u32 val;
3655
3656 val = MBUS_ABOX_BT_CREDIT_POOL1(16) |
3657 MBUS_ABOX_BT_CREDIT_POOL2(16) |
3658 MBUS_ABOX_B_CREDIT(1) |
3659 MBUS_ABOX_BW_CREDIT(1);
3660
3661 I915_WRITE(MBUS_ABOX_CTL, val);
3662}
3663
3664static void hsw_assert_cdclk(struct drm_i915_private *dev_priv)
3665{
3666 u32 val = I915_READ(LCPLL_CTL);
3667
3668
3669
3670
3671
3672
3673
3674 if (val & LCPLL_CD_SOURCE_FCLK)
3675 DRM_ERROR("CDCLK source is not LCPLL\n");
3676
3677 if (val & LCPLL_PLL_DISABLE)
3678 DRM_ERROR("LCPLL is disabled\n");
3679
3680 if ((val & LCPLL_REF_MASK) != LCPLL_REF_NON_SSC)
3681 DRM_ERROR("LCPLL not using non-SSC reference\n");
3682}
3683
3684static void assert_can_disable_lcpll(struct drm_i915_private *dev_priv)
3685{
3686 struct drm_device *dev = &dev_priv->drm;
3687 struct intel_crtc *crtc;
3688
3689 for_each_intel_crtc(dev, crtc)
3690 I915_STATE_WARN(crtc->active, "CRTC for pipe %c enabled\n",
3691 pipe_name(crtc->pipe));
3692
3693 I915_STATE_WARN(I915_READ(HSW_PWR_WELL_CTL2),
3694 "Display power well on\n");
3695 I915_STATE_WARN(I915_READ(SPLL_CTL) & SPLL_PLL_ENABLE,
3696 "SPLL enabled\n");
3697 I915_STATE_WARN(I915_READ(WRPLL_CTL(0)) & WRPLL_PLL_ENABLE,
3698 "WRPLL1 enabled\n");
3699 I915_STATE_WARN(I915_READ(WRPLL_CTL(1)) & WRPLL_PLL_ENABLE,
3700 "WRPLL2 enabled\n");
3701 I915_STATE_WARN(I915_READ(PP_STATUS(0)) & PP_ON,
3702 "Panel power on\n");
3703 I915_STATE_WARN(I915_READ(BLC_PWM_CPU_CTL2) & BLM_PWM_ENABLE,
3704 "CPU PWM1 enabled\n");
3705 if (IS_HASWELL(dev_priv))
3706 I915_STATE_WARN(I915_READ(HSW_BLC_PWM2_CTL) & BLM_PWM_ENABLE,
3707 "CPU PWM2 enabled\n");
3708 I915_STATE_WARN(I915_READ(BLC_PWM_PCH_CTL1) & BLM_PCH_PWM_ENABLE,
3709 "PCH PWM1 enabled\n");
3710 I915_STATE_WARN(I915_READ(UTIL_PIN_CTL) & UTIL_PIN_ENABLE,
3711 "Utility pin enabled\n");
3712 I915_STATE_WARN(I915_READ(PCH_GTC_CTL) & PCH_GTC_ENABLE,
3713 "PCH GTC enabled\n");
3714
3715
3716
3717
3718
3719
3720
3721 I915_STATE_WARN(intel_irqs_enabled(dev_priv), "IRQs enabled\n");
3722}
3723
3724static u32 hsw_read_dcomp(struct drm_i915_private *dev_priv)
3725{
3726 if (IS_HASWELL(dev_priv))
3727 return I915_READ(D_COMP_HSW);
3728 else
3729 return I915_READ(D_COMP_BDW);
3730}
3731
3732static void hsw_write_dcomp(struct drm_i915_private *dev_priv, u32 val)
3733{
3734 if (IS_HASWELL(dev_priv)) {
3735 if (sandybridge_pcode_write(dev_priv,
3736 GEN6_PCODE_WRITE_D_COMP, val))
3737 DRM_DEBUG_KMS("Failed to write to D_COMP\n");
3738 } else {
3739 I915_WRITE(D_COMP_BDW, val);
3740 POSTING_READ(D_COMP_BDW);
3741 }
3742}
3743
3744
3745
3746
3747
3748
3749
3750
3751
3752static void hsw_disable_lcpll(struct drm_i915_private *dev_priv,
3753 bool switch_to_fclk, bool allow_power_down)
3754{
3755 u32 val;
3756
3757 assert_can_disable_lcpll(dev_priv);
3758
3759 val = I915_READ(LCPLL_CTL);
3760
3761 if (switch_to_fclk) {
3762 val |= LCPLL_CD_SOURCE_FCLK;
3763 I915_WRITE(LCPLL_CTL, val);
3764
3765 if (wait_for_us(I915_READ(LCPLL_CTL) &
3766 LCPLL_CD_SOURCE_FCLK_DONE, 1))
3767 DRM_ERROR("Switching to FCLK failed\n");
3768
3769 val = I915_READ(LCPLL_CTL);
3770 }
3771
3772 val |= LCPLL_PLL_DISABLE;
3773 I915_WRITE(LCPLL_CTL, val);
3774 POSTING_READ(LCPLL_CTL);
3775
3776 if (intel_wait_for_register(&dev_priv->uncore, LCPLL_CTL,
3777 LCPLL_PLL_LOCK, 0, 1))
3778 DRM_ERROR("LCPLL still locked\n");
3779
3780 val = hsw_read_dcomp(dev_priv);
3781 val |= D_COMP_COMP_DISABLE;
3782 hsw_write_dcomp(dev_priv, val);
3783 ndelay(100);
3784
3785 if (wait_for((hsw_read_dcomp(dev_priv) &
3786 D_COMP_RCOMP_IN_PROGRESS) == 0, 1))
3787 DRM_ERROR("D_COMP RCOMP still in progress\n");
3788
3789 if (allow_power_down) {
3790 val = I915_READ(LCPLL_CTL);
3791 val |= LCPLL_POWER_DOWN_ALLOW;
3792 I915_WRITE(LCPLL_CTL, val);
3793 POSTING_READ(LCPLL_CTL);
3794 }
3795}
3796
3797
3798
3799
3800
3801static void hsw_restore_lcpll(struct drm_i915_private *dev_priv)
3802{
3803 u32 val;
3804
3805 val = I915_READ(LCPLL_CTL);
3806
3807 if ((val & (LCPLL_PLL_LOCK | LCPLL_PLL_DISABLE | LCPLL_CD_SOURCE_FCLK |
3808 LCPLL_POWER_DOWN_ALLOW)) == LCPLL_PLL_LOCK)
3809 return;
3810
3811
3812
3813
3814
3815 intel_uncore_forcewake_get(&dev_priv->uncore, FORCEWAKE_ALL);
3816
3817 if (val & LCPLL_POWER_DOWN_ALLOW) {
3818 val &= ~LCPLL_POWER_DOWN_ALLOW;
3819 I915_WRITE(LCPLL_CTL, val);
3820 POSTING_READ(LCPLL_CTL);
3821 }
3822
3823 val = hsw_read_dcomp(dev_priv);
3824 val |= D_COMP_COMP_FORCE;
3825 val &= ~D_COMP_COMP_DISABLE;
3826 hsw_write_dcomp(dev_priv, val);
3827
3828 val = I915_READ(LCPLL_CTL);
3829 val &= ~LCPLL_PLL_DISABLE;
3830 I915_WRITE(LCPLL_CTL, val);
3831
3832 if (intel_wait_for_register(&dev_priv->uncore, LCPLL_CTL,
3833 LCPLL_PLL_LOCK, LCPLL_PLL_LOCK, 5))
3834 DRM_ERROR("LCPLL not locked yet\n");
3835
3836 if (val & LCPLL_CD_SOURCE_FCLK) {
3837 val = I915_READ(LCPLL_CTL);
3838 val &= ~LCPLL_CD_SOURCE_FCLK;
3839 I915_WRITE(LCPLL_CTL, val);
3840
3841 if (wait_for_us((I915_READ(LCPLL_CTL) &
3842 LCPLL_CD_SOURCE_FCLK_DONE) == 0, 1))
3843 DRM_ERROR("Switching back to LCPLL failed\n");
3844 }
3845
3846 intel_uncore_forcewake_put(&dev_priv->uncore, FORCEWAKE_ALL);
3847
3848 intel_update_cdclk(dev_priv);
3849 intel_dump_cdclk_state(&dev_priv->cdclk.hw, "Current CDCLK");
3850}
3851
3852
3853
3854
3855
3856
3857
3858
3859
3860
3861
3862
3863
3864
3865
3866
3867
3868
3869
3870
3871
3872
3873
3874
3875void hsw_enable_pc8(struct drm_i915_private *dev_priv)
3876{
3877 u32 val;
3878
3879 DRM_DEBUG_KMS("Enabling package C8+\n");
3880
3881 if (HAS_PCH_LPT_LP(dev_priv)) {
3882 val = I915_READ(SOUTH_DSPCLK_GATE_D);
3883 val &= ~PCH_LP_PARTITION_LEVEL_DISABLE;
3884 I915_WRITE(SOUTH_DSPCLK_GATE_D, val);
3885 }
3886
3887 lpt_disable_clkout_dp(dev_priv);
3888 hsw_disable_lcpll(dev_priv, true, true);
3889}
3890
3891void hsw_disable_pc8(struct drm_i915_private *dev_priv)
3892{
3893 u32 val;
3894
3895 DRM_DEBUG_KMS("Disabling package C8+\n");
3896
3897 hsw_restore_lcpll(dev_priv);
3898 intel_init_pch_refclk(dev_priv);
3899
3900 if (HAS_PCH_LPT_LP(dev_priv)) {
3901 val = I915_READ(SOUTH_DSPCLK_GATE_D);
3902 val |= PCH_LP_PARTITION_LEVEL_DISABLE;
3903 I915_WRITE(SOUTH_DSPCLK_GATE_D, val);
3904 }
3905}
3906
3907static void intel_pch_reset_handshake(struct drm_i915_private *dev_priv,
3908 bool enable)
3909{
3910 i915_reg_t reg;
3911 u32 reset_bits, val;
3912
3913 if (IS_IVYBRIDGE(dev_priv)) {
3914 reg = GEN7_MSG_CTL;
3915 reset_bits = WAIT_FOR_PCH_FLR_ACK | WAIT_FOR_PCH_RESET_ACK;
3916 } else {
3917 reg = HSW_NDE_RSTWRN_OPT;
3918 reset_bits = RESET_PCH_HANDSHAKE_ENABLE;
3919 }
3920
3921 val = I915_READ(reg);
3922
3923 if (enable)
3924 val |= reset_bits;
3925 else
3926 val &= ~reset_bits;
3927
3928 I915_WRITE(reg, val);
3929}
3930
3931static void skl_display_core_init(struct drm_i915_private *dev_priv,
3932 bool resume)
3933{
3934 struct i915_power_domains *power_domains = &dev_priv->power_domains;
3935 struct i915_power_well *well;
3936
3937 gen9_set_dc_state(dev_priv, DC_STATE_DISABLE);
3938
3939
3940 intel_pch_reset_handshake(dev_priv, !HAS_PCH_NOP(dev_priv));
3941
3942
3943 mutex_lock(&power_domains->lock);
3944
3945 well = lookup_power_well(dev_priv, SKL_DISP_PW_1);
3946 intel_power_well_enable(dev_priv, well);
3947
3948 well = lookup_power_well(dev_priv, SKL_DISP_PW_MISC_IO);
3949 intel_power_well_enable(dev_priv, well);
3950
3951 mutex_unlock(&power_domains->lock);
3952
3953 intel_cdclk_init(dev_priv);
3954
3955 gen9_dbuf_enable(dev_priv);
3956
3957 if (resume && dev_priv->csr.dmc_payload)
3958 intel_csr_load_program(dev_priv);
3959}
3960
3961static void skl_display_core_uninit(struct drm_i915_private *dev_priv)
3962{
3963 struct i915_power_domains *power_domains = &dev_priv->power_domains;
3964 struct i915_power_well *well;
3965
3966 gen9_set_dc_state(dev_priv, DC_STATE_DISABLE);
3967
3968 gen9_dbuf_disable(dev_priv);
3969
3970 intel_cdclk_uninit(dev_priv);
3971
3972
3973
3974
3975 mutex_lock(&power_domains->lock);
3976
3977
3978
3979
3980
3981
3982
3983 well = lookup_power_well(dev_priv, SKL_DISP_PW_1);
3984 intel_power_well_disable(dev_priv, well);
3985
3986 mutex_unlock(&power_domains->lock);
3987
3988 usleep_range(10, 30);
3989}
3990
3991void bxt_display_core_init(struct drm_i915_private *dev_priv,
3992 bool resume)
3993{
3994 struct i915_power_domains *power_domains = &dev_priv->power_domains;
3995 struct i915_power_well *well;
3996
3997 gen9_set_dc_state(dev_priv, DC_STATE_DISABLE);
3998
3999
4000
4001
4002
4003
4004
4005 intel_pch_reset_handshake(dev_priv, false);
4006
4007
4008 mutex_lock(&power_domains->lock);
4009
4010 well = lookup_power_well(dev_priv, SKL_DISP_PW_1);
4011 intel_power_well_enable(dev_priv, well);
4012
4013 mutex_unlock(&power_domains->lock);
4014
4015 intel_cdclk_init(dev_priv);
4016
4017 gen9_dbuf_enable(dev_priv);
4018
4019 if (resume && dev_priv->csr.dmc_payload)
4020 intel_csr_load_program(dev_priv);
4021}
4022
4023void bxt_display_core_uninit(struct drm_i915_private *dev_priv)
4024{
4025 struct i915_power_domains *power_domains = &dev_priv->power_domains;
4026 struct i915_power_well *well;
4027
4028 gen9_set_dc_state(dev_priv, DC_STATE_DISABLE);
4029
4030 gen9_dbuf_disable(dev_priv);
4031
4032 intel_cdclk_uninit(dev_priv);
4033
4034
4035
4036
4037
4038
4039
4040
4041 mutex_lock(&power_domains->lock);
4042
4043 well = lookup_power_well(dev_priv, SKL_DISP_PW_1);
4044 intel_power_well_disable(dev_priv, well);
4045
4046 mutex_unlock(&power_domains->lock);
4047
4048 usleep_range(10, 30);
4049}
4050
4051static void cnl_display_core_init(struct drm_i915_private *dev_priv, bool resume)
4052{
4053 struct i915_power_domains *power_domains = &dev_priv->power_domains;
4054 struct i915_power_well *well;
4055
4056 gen9_set_dc_state(dev_priv, DC_STATE_DISABLE);
4057
4058
4059 intel_pch_reset_handshake(dev_priv, !HAS_PCH_NOP(dev_priv));
4060
4061
4062 intel_combo_phy_init(dev_priv);
4063
4064
4065
4066
4067
4068 mutex_lock(&power_domains->lock);
4069 well = lookup_power_well(dev_priv, SKL_DISP_PW_1);
4070 intel_power_well_enable(dev_priv, well);
4071 mutex_unlock(&power_domains->lock);
4072
4073
4074 intel_cdclk_init(dev_priv);
4075
4076
4077 gen9_dbuf_enable(dev_priv);
4078
4079 if (resume && dev_priv->csr.dmc_payload)
4080 intel_csr_load_program(dev_priv);
4081}
4082
4083static void cnl_display_core_uninit(struct drm_i915_private *dev_priv)
4084{
4085 struct i915_power_domains *power_domains = &dev_priv->power_domains;
4086 struct i915_power_well *well;
4087
4088 gen9_set_dc_state(dev_priv, DC_STATE_DISABLE);
4089
4090
4091
4092
4093 gen9_dbuf_disable(dev_priv);
4094
4095
4096 intel_cdclk_uninit(dev_priv);
4097
4098
4099
4100
4101
4102
4103 mutex_lock(&power_domains->lock);
4104 well = lookup_power_well(dev_priv, SKL_DISP_PW_1);
4105 intel_power_well_disable(dev_priv, well);
4106 mutex_unlock(&power_domains->lock);
4107
4108 usleep_range(10, 30);
4109
4110
4111 intel_combo_phy_uninit(dev_priv);
4112}
4113
4114void icl_display_core_init(struct drm_i915_private *dev_priv,
4115 bool resume)
4116{
4117 struct i915_power_domains *power_domains = &dev_priv->power_domains;
4118 struct i915_power_well *well;
4119
4120 gen9_set_dc_state(dev_priv, DC_STATE_DISABLE);
4121
4122
4123 intel_pch_reset_handshake(dev_priv, !HAS_PCH_NOP(dev_priv));
4124
4125
4126 intel_combo_phy_init(dev_priv);
4127
4128
4129
4130
4131
4132 mutex_lock(&power_domains->lock);
4133 well = lookup_power_well(dev_priv, SKL_DISP_PW_1);
4134 intel_power_well_enable(dev_priv, well);
4135 mutex_unlock(&power_domains->lock);
4136
4137
4138 intel_cdclk_init(dev_priv);
4139
4140
4141 icl_dbuf_enable(dev_priv);
4142
4143
4144 icl_mbus_init(dev_priv);
4145
4146 if (resume && dev_priv->csr.dmc_payload)
4147 intel_csr_load_program(dev_priv);
4148}
4149
4150void icl_display_core_uninit(struct drm_i915_private *dev_priv)
4151{
4152 struct i915_power_domains *power_domains = &dev_priv->power_domains;
4153 struct i915_power_well *well;
4154
4155 gen9_set_dc_state(dev_priv, DC_STATE_DISABLE);
4156
4157
4158
4159
4160 icl_dbuf_disable(dev_priv);
4161
4162
4163 intel_cdclk_uninit(dev_priv);
4164
4165
4166
4167
4168
4169
4170 mutex_lock(&power_domains->lock);
4171 well = lookup_power_well(dev_priv, SKL_DISP_PW_1);
4172 intel_power_well_disable(dev_priv, well);
4173 mutex_unlock(&power_domains->lock);
4174
4175
4176 intel_combo_phy_uninit(dev_priv);
4177}
4178
4179static void chv_phy_control_init(struct drm_i915_private *dev_priv)
4180{
4181 struct i915_power_well *cmn_bc =
4182 lookup_power_well(dev_priv, VLV_DISP_PW_DPIO_CMN_BC);
4183 struct i915_power_well *cmn_d =
4184 lookup_power_well(dev_priv, CHV_DISP_PW_DPIO_CMN_D);
4185
4186
4187
4188
4189
4190
4191
4192
4193 dev_priv->chv_phy_control =
4194 PHY_LDO_SEQ_DELAY(PHY_LDO_DELAY_600NS, DPIO_PHY0) |
4195 PHY_LDO_SEQ_DELAY(PHY_LDO_DELAY_600NS, DPIO_PHY1) |
4196 PHY_CH_POWER_MODE(PHY_CH_DEEP_PSR, DPIO_PHY0, DPIO_CH0) |
4197 PHY_CH_POWER_MODE(PHY_CH_DEEP_PSR, DPIO_PHY0, DPIO_CH1) |
4198 PHY_CH_POWER_MODE(PHY_CH_DEEP_PSR, DPIO_PHY1, DPIO_CH0);
4199
4200
4201
4202
4203
4204
4205
4206
4207 if (cmn_bc->desc->ops->is_enabled(dev_priv, cmn_bc)) {
4208 u32 status = I915_READ(DPLL(PIPE_A));
4209 unsigned int mask;
4210
4211 mask = status & DPLL_PORTB_READY_MASK;
4212 if (mask == 0xf)
4213 mask = 0x0;
4214 else
4215 dev_priv->chv_phy_control |=
4216 PHY_CH_POWER_DOWN_OVRD_EN(DPIO_PHY0, DPIO_CH0);
4217
4218 dev_priv->chv_phy_control |=
4219 PHY_CH_POWER_DOWN_OVRD(mask, DPIO_PHY0, DPIO_CH0);
4220
4221 mask = (status & DPLL_PORTC_READY_MASK) >> 4;
4222 if (mask == 0xf)
4223 mask = 0x0;
4224 else
4225 dev_priv->chv_phy_control |=
4226 PHY_CH_POWER_DOWN_OVRD_EN(DPIO_PHY0, DPIO_CH1);
4227
4228 dev_priv->chv_phy_control |=
4229 PHY_CH_POWER_DOWN_OVRD(mask, DPIO_PHY0, DPIO_CH1);
4230
4231 dev_priv->chv_phy_control |= PHY_COM_LANE_RESET_DEASSERT(DPIO_PHY0);
4232
4233 dev_priv->chv_phy_assert[DPIO_PHY0] = false;
4234 } else {
4235 dev_priv->chv_phy_assert[DPIO_PHY0] = true;
4236 }
4237
4238 if (cmn_d->desc->ops->is_enabled(dev_priv, cmn_d)) {
4239 u32 status = I915_READ(DPIO_PHY_STATUS);
4240 unsigned int mask;
4241
4242 mask = status & DPLL_PORTD_READY_MASK;
4243
4244 if (mask == 0xf)
4245 mask = 0x0;
4246 else
4247 dev_priv->chv_phy_control |=
4248 PHY_CH_POWER_DOWN_OVRD_EN(DPIO_PHY1, DPIO_CH0);
4249
4250 dev_priv->chv_phy_control |=
4251 PHY_CH_POWER_DOWN_OVRD(mask, DPIO_PHY1, DPIO_CH0);
4252
4253 dev_priv->chv_phy_control |= PHY_COM_LANE_RESET_DEASSERT(DPIO_PHY1);
4254
4255 dev_priv->chv_phy_assert[DPIO_PHY1] = false;
4256 } else {
4257 dev_priv->chv_phy_assert[DPIO_PHY1] = true;
4258 }
4259
4260 I915_WRITE(DISPLAY_PHY_CONTROL, dev_priv->chv_phy_control);
4261
4262 DRM_DEBUG_KMS("Initial PHY_CONTROL=0x%08x\n",
4263 dev_priv->chv_phy_control);
4264}
4265
4266static void vlv_cmnlane_wa(struct drm_i915_private *dev_priv)
4267{
4268 struct i915_power_well *cmn =
4269 lookup_power_well(dev_priv, VLV_DISP_PW_DPIO_CMN_BC);
4270 struct i915_power_well *disp2d =
4271 lookup_power_well(dev_priv, VLV_DISP_PW_DISP2D);
4272
4273
4274 if (cmn->desc->ops->is_enabled(dev_priv, cmn) &&
4275 disp2d->desc->ops->is_enabled(dev_priv, disp2d) &&
4276 I915_READ(DPIO_CTL) & DPIO_CMNRST)
4277 return;
4278
4279 DRM_DEBUG_KMS("toggling display PHY side reset\n");
4280
4281
4282 disp2d->desc->ops->enable(dev_priv, disp2d);
4283
4284
4285
4286
4287
4288
4289
4290
4291 cmn->desc->ops->disable(dev_priv, cmn);
4292}
4293
4294static bool vlv_punit_is_power_gated(struct drm_i915_private *dev_priv, u32 reg0)
4295{
4296 bool ret;
4297
4298 vlv_punit_get(dev_priv);
4299 ret = (vlv_punit_read(dev_priv, reg0) & SSPM0_SSC_MASK) == SSPM0_SSC_PWR_GATE;
4300 vlv_punit_put(dev_priv);
4301
4302 return ret;
4303}
4304
4305static void assert_ved_power_gated(struct drm_i915_private *dev_priv)
4306{
4307 WARN(!vlv_punit_is_power_gated(dev_priv, PUNIT_REG_VEDSSPM0),
4308 "VED not power gated\n");
4309}
4310
4311static void assert_isp_power_gated(struct drm_i915_private *dev_priv)
4312{
4313 static const struct pci_device_id isp_ids[] = {
4314 {PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x0f38)},
4315 {PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x22b8)},
4316 {}
4317 };
4318
4319 WARN(!pci_dev_present(isp_ids) &&
4320 !vlv_punit_is_power_gated(dev_priv, PUNIT_REG_ISPSSPM0),
4321 "ISP not power gated\n");
4322}
4323
4324static void intel_power_domains_verify_state(struct drm_i915_private *dev_priv);
4325
4326
4327
4328
4329
4330
4331
4332
4333
4334
4335
4336
4337
4338
4339
4340
4341
4342void intel_power_domains_init_hw(struct drm_i915_private *i915, bool resume)
4343{
4344 struct i915_power_domains *power_domains = &i915->power_domains;
4345
4346 power_domains->initializing = true;
4347
4348 if (INTEL_GEN(i915) >= 11) {
4349 icl_display_core_init(i915, resume);
4350 } else if (IS_CANNONLAKE(i915)) {
4351 cnl_display_core_init(i915, resume);
4352 } else if (IS_GEN9_BC(i915)) {
4353 skl_display_core_init(i915, resume);
4354 } else if (IS_GEN9_LP(i915)) {
4355 bxt_display_core_init(i915, resume);
4356 } else if (IS_CHERRYVIEW(i915)) {
4357 mutex_lock(&power_domains->lock);
4358 chv_phy_control_init(i915);
4359 mutex_unlock(&power_domains->lock);
4360 assert_isp_power_gated(i915);
4361 } else if (IS_VALLEYVIEW(i915)) {
4362 mutex_lock(&power_domains->lock);
4363 vlv_cmnlane_wa(i915);
4364 mutex_unlock(&power_domains->lock);
4365 assert_ved_power_gated(i915);
4366 assert_isp_power_gated(i915);
4367 } else if (IS_BROADWELL(i915) || IS_HASWELL(i915)) {
4368 hsw_assert_cdclk(i915);
4369 intel_pch_reset_handshake(i915, !HAS_PCH_NOP(i915));
4370 } else if (IS_IVYBRIDGE(i915)) {
4371 intel_pch_reset_handshake(i915, !HAS_PCH_NOP(i915));
4372 }
4373
4374
4375
4376
4377
4378
4379
4380 power_domains->wakeref =
4381 intel_display_power_get(i915, POWER_DOMAIN_INIT);
4382
4383
4384 if (!i915_modparams.disable_power_well)
4385 intel_display_power_get(i915, POWER_DOMAIN_INIT);
4386 intel_power_domains_sync_hw(i915);
4387
4388 power_domains->initializing = false;
4389}
4390
4391
4392
4393
4394
4395
4396
4397
4398
4399
4400
4401
4402void intel_power_domains_fini_hw(struct drm_i915_private *i915)
4403{
4404 intel_wakeref_t wakeref __maybe_unused =
4405 fetch_and_zero(&i915->power_domains.wakeref);
4406
4407
4408 if (!i915_modparams.disable_power_well)
4409 intel_display_power_put_unchecked(i915, POWER_DOMAIN_INIT);
4410
4411 intel_display_power_flush_work_sync(i915);
4412
4413 intel_power_domains_verify_state(i915);
4414
4415
4416 intel_runtime_pm_put(&i915->runtime_pm, wakeref);
4417}
4418
4419
4420
4421
4422
4423
4424
4425
4426
4427
4428
4429
4430
4431void intel_power_domains_enable(struct drm_i915_private *i915)
4432{
4433 intel_wakeref_t wakeref __maybe_unused =
4434 fetch_and_zero(&i915->power_domains.wakeref);
4435
4436 intel_display_power_put(i915, POWER_DOMAIN_INIT, wakeref);
4437 intel_power_domains_verify_state(i915);
4438}
4439
4440
4441
4442
4443
4444
4445
4446
4447void intel_power_domains_disable(struct drm_i915_private *i915)
4448{
4449 struct i915_power_domains *power_domains = &i915->power_domains;
4450
4451 WARN_ON(power_domains->wakeref);
4452 power_domains->wakeref =
4453 intel_display_power_get(i915, POWER_DOMAIN_INIT);
4454
4455 intel_power_domains_verify_state(i915);
4456}
4457
4458
4459
4460
4461
4462
4463
4464
4465
4466
4467
4468
4469void intel_power_domains_suspend(struct drm_i915_private *i915,
4470 enum i915_drm_suspend_mode suspend_mode)
4471{
4472 struct i915_power_domains *power_domains = &i915->power_domains;
4473 intel_wakeref_t wakeref __maybe_unused =
4474 fetch_and_zero(&power_domains->wakeref);
4475
4476 intel_display_power_put(i915, POWER_DOMAIN_INIT, wakeref);
4477
4478
4479
4480
4481
4482
4483
4484
4485 if (!(i915->csr.allowed_dc_mask & DC_STATE_EN_DC9) &&
4486 suspend_mode == I915_DRM_SUSPEND_IDLE &&
4487 i915->csr.dmc_payload) {
4488 intel_display_power_flush_work(i915);
4489 intel_power_domains_verify_state(i915);
4490 return;
4491 }
4492
4493
4494
4495
4496
4497 if (!i915_modparams.disable_power_well)
4498 intel_display_power_put_unchecked(i915, POWER_DOMAIN_INIT);
4499
4500 intel_display_power_flush_work(i915);
4501 intel_power_domains_verify_state(i915);
4502
4503 if (INTEL_GEN(i915) >= 11)
4504 icl_display_core_uninit(i915);
4505 else if (IS_CANNONLAKE(i915))
4506 cnl_display_core_uninit(i915);
4507 else if (IS_GEN9_BC(i915))
4508 skl_display_core_uninit(i915);
4509 else if (IS_GEN9_LP(i915))
4510 bxt_display_core_uninit(i915);
4511
4512 power_domains->display_core_suspended = true;
4513}
4514
4515
4516
4517
4518
4519
4520
4521
4522
4523
4524
4525void intel_power_domains_resume(struct drm_i915_private *i915)
4526{
4527 struct i915_power_domains *power_domains = &i915->power_domains;
4528
4529 if (power_domains->display_core_suspended) {
4530 intel_power_domains_init_hw(i915, true);
4531 power_domains->display_core_suspended = false;
4532 } else {
4533 WARN_ON(power_domains->wakeref);
4534 power_domains->wakeref =
4535 intel_display_power_get(i915, POWER_DOMAIN_INIT);
4536 }
4537
4538 intel_power_domains_verify_state(i915);
4539}
4540
4541#if IS_ENABLED(CONFIG_DRM_I915_DEBUG_RUNTIME_PM)
4542
4543static void intel_power_domains_dump_info(struct drm_i915_private *i915)
4544{
4545 struct i915_power_domains *power_domains = &i915->power_domains;
4546 struct i915_power_well *power_well;
4547
4548 for_each_power_well(i915, power_well) {
4549 enum intel_display_power_domain domain;
4550
4551 DRM_DEBUG_DRIVER("%-25s %d\n",
4552 power_well->desc->name, power_well->count);
4553
4554 for_each_power_domain(domain, power_well->desc->domains)
4555 DRM_DEBUG_DRIVER(" %-23s %d\n",
4556 intel_display_power_domain_str(domain),
4557 power_domains->domain_use_count[domain]);
4558 }
4559}
4560
4561
4562
4563
4564
4565
4566
4567
4568
4569
4570
4571static void intel_power_domains_verify_state(struct drm_i915_private *i915)
4572{
4573 struct i915_power_domains *power_domains = &i915->power_domains;
4574 struct i915_power_well *power_well;
4575 bool dump_domain_info;
4576
4577 mutex_lock(&power_domains->lock);
4578
4579 verify_async_put_domains_state(power_domains);
4580
4581 dump_domain_info = false;
4582 for_each_power_well(i915, power_well) {
4583 enum intel_display_power_domain domain;
4584 int domains_count;
4585 bool enabled;
4586
4587 enabled = power_well->desc->ops->is_enabled(i915, power_well);
4588 if ((power_well->count || power_well->desc->always_on) !=
4589 enabled)
4590 DRM_ERROR("power well %s state mismatch (refcount %d/enabled %d)",
4591 power_well->desc->name,
4592 power_well->count, enabled);
4593
4594 domains_count = 0;
4595 for_each_power_domain(domain, power_well->desc->domains)
4596 domains_count += power_domains->domain_use_count[domain];
4597
4598 if (power_well->count != domains_count) {
4599 DRM_ERROR("power well %s refcount/domain refcount mismatch "
4600 "(refcount %d/domains refcount %d)\n",
4601 power_well->desc->name, power_well->count,
4602 domains_count);
4603 dump_domain_info = true;
4604 }
4605 }
4606
4607 if (dump_domain_info) {
4608 static bool dumped;
4609
4610 if (!dumped) {
4611 intel_power_domains_dump_info(i915);
4612 dumped = true;
4613 }
4614 }
4615
4616 mutex_unlock(&power_domains->lock);
4617}
4618
4619#else
4620
4621static void intel_power_domains_verify_state(struct drm_i915_private *i915)
4622{
4623}
4624
4625#endif
4626