1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29#include <linux/pm_runtime.h>
30#include <linux/vgaarb.h>
31
32#include <drm/drm_print.h>
33
34#include "i915_drv.h"
35#include "intel_drv.h"
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54#if IS_ENABLED(CONFIG_DRM_I915_DEBUG_RUNTIME_PM)
55
56#include <linux/sort.h>
57
58#define STACKDEPTH 8
59
60static noinline depot_stack_handle_t __save_depot_stack(void)
61{
62 unsigned long entries[STACKDEPTH];
63 struct stack_trace trace = {
64 .entries = entries,
65 .max_entries = ARRAY_SIZE(entries),
66 .skip = 1,
67 };
68
69 save_stack_trace(&trace);
70 if (trace.nr_entries &&
71 trace.entries[trace.nr_entries - 1] == ULONG_MAX)
72 trace.nr_entries--;
73
74 return depot_save_stack(&trace, GFP_NOWAIT | __GFP_NOWARN);
75}
76
77static void __print_depot_stack(depot_stack_handle_t stack,
78 char *buf, int sz, int indent)
79{
80 unsigned long entries[STACKDEPTH];
81 struct stack_trace trace = {
82 .entries = entries,
83 .max_entries = ARRAY_SIZE(entries),
84 };
85
86 depot_fetch_stack(stack, &trace);
87 snprint_stack_trace(buf, sz, &trace, indent);
88}
89
90static void init_intel_runtime_pm_wakeref(struct drm_i915_private *i915)
91{
92 struct i915_runtime_pm *rpm = &i915->runtime_pm;
93
94 spin_lock_init(&rpm->debug.lock);
95}
96
97static noinline depot_stack_handle_t
98track_intel_runtime_pm_wakeref(struct drm_i915_private *i915)
99{
100 struct i915_runtime_pm *rpm = &i915->runtime_pm;
101 depot_stack_handle_t stack, *stacks;
102 unsigned long flags;
103
104 atomic_inc(&rpm->wakeref_count);
105 assert_rpm_wakelock_held(i915);
106
107 if (!HAS_RUNTIME_PM(i915))
108 return -1;
109
110 stack = __save_depot_stack();
111 if (!stack)
112 return -1;
113
114 spin_lock_irqsave(&rpm->debug.lock, flags);
115
116 if (!rpm->debug.count)
117 rpm->debug.last_acquire = stack;
118
119 stacks = krealloc(rpm->debug.owners,
120 (rpm->debug.count + 1) * sizeof(*stacks),
121 GFP_NOWAIT | __GFP_NOWARN);
122 if (stacks) {
123 stacks[rpm->debug.count++] = stack;
124 rpm->debug.owners = stacks;
125 } else {
126 stack = -1;
127 }
128
129 spin_unlock_irqrestore(&rpm->debug.lock, flags);
130
131 return stack;
132}
133
134static void cancel_intel_runtime_pm_wakeref(struct drm_i915_private *i915,
135 depot_stack_handle_t stack)
136{
137 struct i915_runtime_pm *rpm = &i915->runtime_pm;
138 unsigned long flags, n;
139 bool found = false;
140
141 if (unlikely(stack == -1))
142 return;
143
144 spin_lock_irqsave(&rpm->debug.lock, flags);
145 for (n = rpm->debug.count; n--; ) {
146 if (rpm->debug.owners[n] == stack) {
147 memmove(rpm->debug.owners + n,
148 rpm->debug.owners + n + 1,
149 (--rpm->debug.count - n) * sizeof(stack));
150 found = true;
151 break;
152 }
153 }
154 spin_unlock_irqrestore(&rpm->debug.lock, flags);
155
156 if (WARN(!found,
157 "Unmatched wakeref (tracking %lu), count %u\n",
158 rpm->debug.count, atomic_read(&rpm->wakeref_count))) {
159 char *buf;
160
161 buf = kmalloc(PAGE_SIZE, GFP_KERNEL);
162 if (!buf)
163 return;
164
165 __print_depot_stack(stack, buf, PAGE_SIZE, 2);
166 DRM_DEBUG_DRIVER("wakeref %x from\n%s", stack, buf);
167
168 stack = READ_ONCE(rpm->debug.last_release);
169 if (stack) {
170 __print_depot_stack(stack, buf, PAGE_SIZE, 2);
171 DRM_DEBUG_DRIVER("wakeref last released at\n%s", buf);
172 }
173
174 kfree(buf);
175 }
176}
177
178static int cmphandle(const void *_a, const void *_b)
179{
180 const depot_stack_handle_t * const a = _a, * const b = _b;
181
182 if (*a < *b)
183 return -1;
184 else if (*a > *b)
185 return 1;
186 else
187 return 0;
188}
189
190static void
191__print_intel_runtime_pm_wakeref(struct drm_printer *p,
192 const struct intel_runtime_pm_debug *dbg)
193{
194 unsigned long i;
195 char *buf;
196
197 buf = kmalloc(PAGE_SIZE, GFP_KERNEL);
198 if (!buf)
199 return;
200
201 if (dbg->last_acquire) {
202 __print_depot_stack(dbg->last_acquire, buf, PAGE_SIZE, 2);
203 drm_printf(p, "Wakeref last acquired:\n%s", buf);
204 }
205
206 if (dbg->last_release) {
207 __print_depot_stack(dbg->last_release, buf, PAGE_SIZE, 2);
208 drm_printf(p, "Wakeref last released:\n%s", buf);
209 }
210
211 drm_printf(p, "Wakeref count: %lu\n", dbg->count);
212
213 sort(dbg->owners, dbg->count, sizeof(*dbg->owners), cmphandle, NULL);
214
215 for (i = 0; i < dbg->count; i++) {
216 depot_stack_handle_t stack = dbg->owners[i];
217 unsigned long rep;
218
219 rep = 1;
220 while (i + 1 < dbg->count && dbg->owners[i + 1] == stack)
221 rep++, i++;
222 __print_depot_stack(stack, buf, PAGE_SIZE, 2);
223 drm_printf(p, "Wakeref x%lu taken at:\n%s", rep, buf);
224 }
225
226 kfree(buf);
227}
228
229static noinline void
230untrack_intel_runtime_pm_wakeref(struct drm_i915_private *i915)
231{
232 struct i915_runtime_pm *rpm = &i915->runtime_pm;
233 struct intel_runtime_pm_debug dbg = {};
234 struct drm_printer p;
235 unsigned long flags;
236
237 assert_rpm_wakelock_held(i915);
238 if (atomic_dec_and_lock_irqsave(&rpm->wakeref_count,
239 &rpm->debug.lock,
240 flags)) {
241 dbg = rpm->debug;
242
243 rpm->debug.owners = NULL;
244 rpm->debug.count = 0;
245 rpm->debug.last_release = __save_depot_stack();
246
247 spin_unlock_irqrestore(&rpm->debug.lock, flags);
248 }
249 if (!dbg.count)
250 return;
251
252 p = drm_debug_printer("i915");
253 __print_intel_runtime_pm_wakeref(&p, &dbg);
254
255 kfree(dbg.owners);
256}
257
258void print_intel_runtime_pm_wakeref(struct drm_i915_private *i915,
259 struct drm_printer *p)
260{
261 struct intel_runtime_pm_debug dbg = {};
262
263 do {
264 struct i915_runtime_pm *rpm = &i915->runtime_pm;
265 unsigned long alloc = dbg.count;
266 depot_stack_handle_t *s;
267
268 spin_lock_irq(&rpm->debug.lock);
269 dbg.count = rpm->debug.count;
270 if (dbg.count <= alloc) {
271 memcpy(dbg.owners,
272 rpm->debug.owners,
273 dbg.count * sizeof(*s));
274 }
275 dbg.last_acquire = rpm->debug.last_acquire;
276 dbg.last_release = rpm->debug.last_release;
277 spin_unlock_irq(&rpm->debug.lock);
278 if (dbg.count <= alloc)
279 break;
280
281 s = krealloc(dbg.owners, dbg.count * sizeof(*s), GFP_KERNEL);
282 if (!s)
283 goto out;
284
285 dbg.owners = s;
286 } while (1);
287
288 __print_intel_runtime_pm_wakeref(p, &dbg);
289
290out:
291 kfree(dbg.owners);
292}
293
294#else
295
296static void init_intel_runtime_pm_wakeref(struct drm_i915_private *i915)
297{
298}
299
300static depot_stack_handle_t
301track_intel_runtime_pm_wakeref(struct drm_i915_private *i915)
302{
303 atomic_inc(&i915->runtime_pm.wakeref_count);
304 assert_rpm_wakelock_held(i915);
305 return -1;
306}
307
308static void untrack_intel_runtime_pm_wakeref(struct drm_i915_private *i915)
309{
310 assert_rpm_wakelock_held(i915);
311 atomic_dec(&i915->runtime_pm.wakeref_count);
312}
313
314#endif
315
316bool intel_display_power_well_is_enabled(struct drm_i915_private *dev_priv,
317 enum i915_power_well_id power_well_id);
318
319const char *
320intel_display_power_domain_str(enum intel_display_power_domain domain)
321{
322 switch (domain) {
323 case POWER_DOMAIN_PIPE_A:
324 return "PIPE_A";
325 case POWER_DOMAIN_PIPE_B:
326 return "PIPE_B";
327 case POWER_DOMAIN_PIPE_C:
328 return "PIPE_C";
329 case POWER_DOMAIN_PIPE_A_PANEL_FITTER:
330 return "PIPE_A_PANEL_FITTER";
331 case POWER_DOMAIN_PIPE_B_PANEL_FITTER:
332 return "PIPE_B_PANEL_FITTER";
333 case POWER_DOMAIN_PIPE_C_PANEL_FITTER:
334 return "PIPE_C_PANEL_FITTER";
335 case POWER_DOMAIN_TRANSCODER_A:
336 return "TRANSCODER_A";
337 case POWER_DOMAIN_TRANSCODER_B:
338 return "TRANSCODER_B";
339 case POWER_DOMAIN_TRANSCODER_C:
340 return "TRANSCODER_C";
341 case POWER_DOMAIN_TRANSCODER_EDP:
342 return "TRANSCODER_EDP";
343 case POWER_DOMAIN_TRANSCODER_EDP_VDSC:
344 return "TRANSCODER_EDP_VDSC";
345 case POWER_DOMAIN_TRANSCODER_DSI_A:
346 return "TRANSCODER_DSI_A";
347 case POWER_DOMAIN_TRANSCODER_DSI_C:
348 return "TRANSCODER_DSI_C";
349 case POWER_DOMAIN_PORT_DDI_A_LANES:
350 return "PORT_DDI_A_LANES";
351 case POWER_DOMAIN_PORT_DDI_B_LANES:
352 return "PORT_DDI_B_LANES";
353 case POWER_DOMAIN_PORT_DDI_C_LANES:
354 return "PORT_DDI_C_LANES";
355 case POWER_DOMAIN_PORT_DDI_D_LANES:
356 return "PORT_DDI_D_LANES";
357 case POWER_DOMAIN_PORT_DDI_E_LANES:
358 return "PORT_DDI_E_LANES";
359 case POWER_DOMAIN_PORT_DDI_F_LANES:
360 return "PORT_DDI_F_LANES";
361 case POWER_DOMAIN_PORT_DDI_A_IO:
362 return "PORT_DDI_A_IO";
363 case POWER_DOMAIN_PORT_DDI_B_IO:
364 return "PORT_DDI_B_IO";
365 case POWER_DOMAIN_PORT_DDI_C_IO:
366 return "PORT_DDI_C_IO";
367 case POWER_DOMAIN_PORT_DDI_D_IO:
368 return "PORT_DDI_D_IO";
369 case POWER_DOMAIN_PORT_DDI_E_IO:
370 return "PORT_DDI_E_IO";
371 case POWER_DOMAIN_PORT_DDI_F_IO:
372 return "PORT_DDI_F_IO";
373 case POWER_DOMAIN_PORT_DSI:
374 return "PORT_DSI";
375 case POWER_DOMAIN_PORT_CRT:
376 return "PORT_CRT";
377 case POWER_DOMAIN_PORT_OTHER:
378 return "PORT_OTHER";
379 case POWER_DOMAIN_VGA:
380 return "VGA";
381 case POWER_DOMAIN_AUDIO:
382 return "AUDIO";
383 case POWER_DOMAIN_PLLS:
384 return "PLLS";
385 case POWER_DOMAIN_AUX_A:
386 return "AUX_A";
387 case POWER_DOMAIN_AUX_B:
388 return "AUX_B";
389 case POWER_DOMAIN_AUX_C:
390 return "AUX_C";
391 case POWER_DOMAIN_AUX_D:
392 return "AUX_D";
393 case POWER_DOMAIN_AUX_E:
394 return "AUX_E";
395 case POWER_DOMAIN_AUX_F:
396 return "AUX_F";
397 case POWER_DOMAIN_AUX_IO_A:
398 return "AUX_IO_A";
399 case POWER_DOMAIN_AUX_TBT1:
400 return "AUX_TBT1";
401 case POWER_DOMAIN_AUX_TBT2:
402 return "AUX_TBT2";
403 case POWER_DOMAIN_AUX_TBT3:
404 return "AUX_TBT3";
405 case POWER_DOMAIN_AUX_TBT4:
406 return "AUX_TBT4";
407 case POWER_DOMAIN_GMBUS:
408 return "GMBUS";
409 case POWER_DOMAIN_INIT:
410 return "INIT";
411 case POWER_DOMAIN_MODESET:
412 return "MODESET";
413 case POWER_DOMAIN_GT_IRQ:
414 return "GT_IRQ";
415 default:
416 MISSING_CASE(domain);
417 return "?";
418 }
419}
420
421static void intel_power_well_enable(struct drm_i915_private *dev_priv,
422 struct i915_power_well *power_well)
423{
424 DRM_DEBUG_KMS("enabling %s\n", power_well->desc->name);
425 power_well->desc->ops->enable(dev_priv, power_well);
426 power_well->hw_enabled = true;
427}
428
429static void intel_power_well_disable(struct drm_i915_private *dev_priv,
430 struct i915_power_well *power_well)
431{
432 DRM_DEBUG_KMS("disabling %s\n", power_well->desc->name);
433 power_well->hw_enabled = false;
434 power_well->desc->ops->disable(dev_priv, power_well);
435}
436
437static void intel_power_well_get(struct drm_i915_private *dev_priv,
438 struct i915_power_well *power_well)
439{
440 if (!power_well->count++)
441 intel_power_well_enable(dev_priv, power_well);
442}
443
444static void intel_power_well_put(struct drm_i915_private *dev_priv,
445 struct i915_power_well *power_well)
446{
447 WARN(!power_well->count, "Use count on power well %s is already zero",
448 power_well->desc->name);
449
450 if (!--power_well->count)
451 intel_power_well_disable(dev_priv, power_well);
452}
453
454
455
456
457
458
459
460
461
462
463
464
465
466bool __intel_display_power_is_enabled(struct drm_i915_private *dev_priv,
467 enum intel_display_power_domain domain)
468{
469 struct i915_power_well *power_well;
470 bool is_enabled;
471
472 if (dev_priv->runtime_pm.suspended)
473 return false;
474
475 is_enabled = true;
476
477 for_each_power_domain_well_reverse(dev_priv, power_well, BIT_ULL(domain)) {
478 if (power_well->desc->always_on)
479 continue;
480
481 if (!power_well->hw_enabled) {
482 is_enabled = false;
483 break;
484 }
485 }
486
487 return is_enabled;
488}
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507bool intel_display_power_is_enabled(struct drm_i915_private *dev_priv,
508 enum intel_display_power_domain domain)
509{
510 struct i915_power_domains *power_domains;
511 bool ret;
512
513 power_domains = &dev_priv->power_domains;
514
515 mutex_lock(&power_domains->lock);
516 ret = __intel_display_power_is_enabled(dev_priv, domain);
517 mutex_unlock(&power_domains->lock);
518
519 return ret;
520}
521
522
523
524
525
526
527
528static void hsw_power_well_post_enable(struct drm_i915_private *dev_priv,
529 u8 irq_pipe_mask, bool has_vga)
530{
531 struct pci_dev *pdev = dev_priv->drm.pdev;
532
533
534
535
536
537
538
539
540
541
542
543 if (has_vga) {
544 vga_get_uninterruptible(pdev, VGA_RSRC_LEGACY_IO);
545 outb(inb(VGA_MSR_READ), VGA_MSR_WRITE);
546 vga_put(pdev, VGA_RSRC_LEGACY_IO);
547 }
548
549 if (irq_pipe_mask)
550 gen8_irq_power_well_post_enable(dev_priv, irq_pipe_mask);
551}
552
553static void hsw_power_well_pre_disable(struct drm_i915_private *dev_priv,
554 u8 irq_pipe_mask)
555{
556 if (irq_pipe_mask)
557 gen8_irq_power_well_pre_disable(dev_priv, irq_pipe_mask);
558}
559
560
561static void hsw_wait_for_power_well_enable(struct drm_i915_private *dev_priv,
562 struct i915_power_well *power_well)
563{
564 const struct i915_power_well_regs *regs = power_well->desc->hsw.regs;
565 int pw_idx = power_well->desc->hsw.idx;
566
567
568 WARN_ON(intel_wait_for_register(dev_priv,
569 regs->driver,
570 HSW_PWR_WELL_CTL_STATE(pw_idx),
571 HSW_PWR_WELL_CTL_STATE(pw_idx),
572 1));
573}
574
575static u32 hsw_power_well_requesters(struct drm_i915_private *dev_priv,
576 const struct i915_power_well_regs *regs,
577 int pw_idx)
578{
579 u32 req_mask = HSW_PWR_WELL_CTL_REQ(pw_idx);
580 u32 ret;
581
582 ret = I915_READ(regs->bios) & req_mask ? 1 : 0;
583 ret |= I915_READ(regs->driver) & req_mask ? 2 : 0;
584 if (regs->kvmr.reg)
585 ret |= I915_READ(regs->kvmr) & req_mask ? 4 : 0;
586 ret |= I915_READ(regs->debug) & req_mask ? 8 : 0;
587
588 return ret;
589}
590
591static void hsw_wait_for_power_well_disable(struct drm_i915_private *dev_priv,
592 struct i915_power_well *power_well)
593{
594 const struct i915_power_well_regs *regs = power_well->desc->hsw.regs;
595 int pw_idx = power_well->desc->hsw.idx;
596 bool disabled;
597 u32 reqs;
598
599
600
601
602
603
604
605
606
607
608 wait_for((disabled = !(I915_READ(regs->driver) &
609 HSW_PWR_WELL_CTL_STATE(pw_idx))) ||
610 (reqs = hsw_power_well_requesters(dev_priv, regs, pw_idx)), 1);
611 if (disabled)
612 return;
613
614 DRM_DEBUG_KMS("%s forced on (bios:%d driver:%d kvmr:%d debug:%d)\n",
615 power_well->desc->name,
616 !!(reqs & 1), !!(reqs & 2), !!(reqs & 4), !!(reqs & 8));
617}
618
619static void gen9_wait_for_power_well_fuses(struct drm_i915_private *dev_priv,
620 enum skl_power_gate pg)
621{
622
623 WARN_ON(intel_wait_for_register(dev_priv, SKL_FUSE_STATUS,
624 SKL_FUSE_PG_DIST_STATUS(pg),
625 SKL_FUSE_PG_DIST_STATUS(pg), 1));
626}
627
628static void hsw_power_well_enable(struct drm_i915_private *dev_priv,
629 struct i915_power_well *power_well)
630{
631 const struct i915_power_well_regs *regs = power_well->desc->hsw.regs;
632 int pw_idx = power_well->desc->hsw.idx;
633 bool wait_fuses = power_well->desc->hsw.has_fuses;
634 enum skl_power_gate uninitialized_var(pg);
635 u32 val;
636
637 if (wait_fuses) {
638 pg = INTEL_GEN(dev_priv) >= 11 ? ICL_PW_CTL_IDX_TO_PG(pw_idx) :
639 SKL_PW_CTL_IDX_TO_PG(pw_idx);
640
641
642
643
644
645
646
647 if (pg == SKL_PG1)
648 gen9_wait_for_power_well_fuses(dev_priv, SKL_PG0);
649 }
650
651 val = I915_READ(regs->driver);
652 I915_WRITE(regs->driver, val | HSW_PWR_WELL_CTL_REQ(pw_idx));
653 hsw_wait_for_power_well_enable(dev_priv, power_well);
654
655
656 if (IS_CANNONLAKE(dev_priv) &&
657 pw_idx >= GLK_PW_CTL_IDX_AUX_B &&
658 pw_idx <= CNL_PW_CTL_IDX_AUX_F) {
659 val = I915_READ(CNL_AUX_ANAOVRD1(pw_idx));
660 val |= CNL_AUX_ANAOVRD1_ENABLE | CNL_AUX_ANAOVRD1_LDO_BYPASS;
661 I915_WRITE(CNL_AUX_ANAOVRD1(pw_idx), val);
662 }
663
664 if (wait_fuses)
665 gen9_wait_for_power_well_fuses(dev_priv, pg);
666
667 hsw_power_well_post_enable(dev_priv,
668 power_well->desc->hsw.irq_pipe_mask,
669 power_well->desc->hsw.has_vga);
670}
671
672static void hsw_power_well_disable(struct drm_i915_private *dev_priv,
673 struct i915_power_well *power_well)
674{
675 const struct i915_power_well_regs *regs = power_well->desc->hsw.regs;
676 int pw_idx = power_well->desc->hsw.idx;
677 u32 val;
678
679 hsw_power_well_pre_disable(dev_priv,
680 power_well->desc->hsw.irq_pipe_mask);
681
682 val = I915_READ(regs->driver);
683 I915_WRITE(regs->driver, val & ~HSW_PWR_WELL_CTL_REQ(pw_idx));
684 hsw_wait_for_power_well_disable(dev_priv, power_well);
685}
686
687#define ICL_AUX_PW_TO_PORT(pw_idx) ((pw_idx) - ICL_PW_CTL_IDX_AUX_A)
688
689static void
690icl_combo_phy_aux_power_well_enable(struct drm_i915_private *dev_priv,
691 struct i915_power_well *power_well)
692{
693 const struct i915_power_well_regs *regs = power_well->desc->hsw.regs;
694 int pw_idx = power_well->desc->hsw.idx;
695 enum port port = ICL_AUX_PW_TO_PORT(pw_idx);
696 u32 val;
697
698 val = I915_READ(regs->driver);
699 I915_WRITE(regs->driver, val | HSW_PWR_WELL_CTL_REQ(pw_idx));
700
701 val = I915_READ(ICL_PORT_CL_DW12(port));
702 I915_WRITE(ICL_PORT_CL_DW12(port), val | ICL_LANE_ENABLE_AUX);
703
704 hsw_wait_for_power_well_enable(dev_priv, power_well);
705
706
707 if (IS_ICELAKE(dev_priv) &&
708 pw_idx >= ICL_PW_CTL_IDX_AUX_A && pw_idx <= ICL_PW_CTL_IDX_AUX_B &&
709 !intel_bios_is_port_edp(dev_priv, port)) {
710 val = I915_READ(ICL_AUX_ANAOVRD1(pw_idx));
711 val |= ICL_AUX_ANAOVRD1_ENABLE | ICL_AUX_ANAOVRD1_LDO_BYPASS;
712 I915_WRITE(ICL_AUX_ANAOVRD1(pw_idx), val);
713 }
714}
715
716static void
717icl_combo_phy_aux_power_well_disable(struct drm_i915_private *dev_priv,
718 struct i915_power_well *power_well)
719{
720 const struct i915_power_well_regs *regs = power_well->desc->hsw.regs;
721 int pw_idx = power_well->desc->hsw.idx;
722 enum port port = ICL_AUX_PW_TO_PORT(pw_idx);
723 u32 val;
724
725 val = I915_READ(ICL_PORT_CL_DW12(port));
726 I915_WRITE(ICL_PORT_CL_DW12(port), val & ~ICL_LANE_ENABLE_AUX);
727
728 val = I915_READ(regs->driver);
729 I915_WRITE(regs->driver, val & ~HSW_PWR_WELL_CTL_REQ(pw_idx));
730
731 hsw_wait_for_power_well_disable(dev_priv, power_well);
732}
733
734#define ICL_AUX_PW_TO_CH(pw_idx) \
735 ((pw_idx) - ICL_PW_CTL_IDX_AUX_A + AUX_CH_A)
736
737static void
738icl_tc_phy_aux_power_well_enable(struct drm_i915_private *dev_priv,
739 struct i915_power_well *power_well)
740{
741 enum aux_ch aux_ch = ICL_AUX_PW_TO_CH(power_well->desc->hsw.idx);
742 u32 val;
743
744 val = I915_READ(DP_AUX_CH_CTL(aux_ch));
745 val &= ~DP_AUX_CH_CTL_TBT_IO;
746 if (power_well->desc->hsw.is_tc_tbt)
747 val |= DP_AUX_CH_CTL_TBT_IO;
748 I915_WRITE(DP_AUX_CH_CTL(aux_ch), val);
749
750 hsw_power_well_enable(dev_priv, power_well);
751}
752
753
754
755
756
757
758static bool hsw_power_well_enabled(struct drm_i915_private *dev_priv,
759 struct i915_power_well *power_well)
760{
761 const struct i915_power_well_regs *regs = power_well->desc->hsw.regs;
762 enum i915_power_well_id id = power_well->desc->id;
763 int pw_idx = power_well->desc->hsw.idx;
764 u32 mask = HSW_PWR_WELL_CTL_REQ(pw_idx) |
765 HSW_PWR_WELL_CTL_STATE(pw_idx);
766 u32 val;
767
768 val = I915_READ(regs->driver);
769
770
771
772
773
774
775
776 if (IS_GEN(dev_priv, 9) && !IS_GEN9_LP(dev_priv) &&
777 (id == SKL_DISP_PW_1 || id == SKL_DISP_PW_MISC_IO))
778 val |= I915_READ(regs->bios);
779
780 return (val & mask) == mask;
781}
782
783static void assert_can_enable_dc9(struct drm_i915_private *dev_priv)
784{
785 WARN_ONCE((I915_READ(DC_STATE_EN) & DC_STATE_EN_DC9),
786 "DC9 already programmed to be enabled.\n");
787 WARN_ONCE(I915_READ(DC_STATE_EN) & DC_STATE_EN_UPTO_DC5,
788 "DC5 still not disabled to enable DC9.\n");
789 WARN_ONCE(I915_READ(HSW_PWR_WELL_CTL2) &
790 HSW_PWR_WELL_CTL_REQ(SKL_PW_CTL_IDX_PW_2),
791 "Power well 2 on.\n");
792 WARN_ONCE(intel_irqs_enabled(dev_priv),
793 "Interrupts not disabled yet.\n");
794
795
796
797
798
799
800
801
802}
803
804static void assert_can_disable_dc9(struct drm_i915_private *dev_priv)
805{
806 WARN_ONCE(intel_irqs_enabled(dev_priv),
807 "Interrupts not disabled yet.\n");
808 WARN_ONCE(I915_READ(DC_STATE_EN) & DC_STATE_EN_UPTO_DC5,
809 "DC5 still not disabled.\n");
810
811
812
813
814
815
816
817
818}
819
820static void gen9_write_dc_state(struct drm_i915_private *dev_priv,
821 u32 state)
822{
823 int rewrites = 0;
824 int rereads = 0;
825 u32 v;
826
827 I915_WRITE(DC_STATE_EN, state);
828
829
830
831
832
833
834 do {
835 v = I915_READ(DC_STATE_EN);
836
837 if (v != state) {
838 I915_WRITE(DC_STATE_EN, state);
839 rewrites++;
840 rereads = 0;
841 } else if (rereads++ > 5) {
842 break;
843 }
844
845 } while (rewrites < 100);
846
847 if (v != state)
848 DRM_ERROR("Writing dc state to 0x%x failed, now 0x%x\n",
849 state, v);
850
851
852 if (rewrites > 1)
853 DRM_DEBUG_KMS("Rewrote dc state to 0x%x %d times\n",
854 state, rewrites);
855}
856
857static u32 gen9_dc_mask(struct drm_i915_private *dev_priv)
858{
859 u32 mask;
860
861 mask = DC_STATE_EN_UPTO_DC5;
862 if (INTEL_GEN(dev_priv) >= 11)
863 mask |= DC_STATE_EN_UPTO_DC6 | DC_STATE_EN_DC9;
864 else if (IS_GEN9_LP(dev_priv))
865 mask |= DC_STATE_EN_DC9;
866 else
867 mask |= DC_STATE_EN_UPTO_DC6;
868
869 return mask;
870}
871
872void gen9_sanitize_dc_state(struct drm_i915_private *dev_priv)
873{
874 u32 val;
875
876 val = I915_READ(DC_STATE_EN) & gen9_dc_mask(dev_priv);
877
878 DRM_DEBUG_KMS("Resetting DC state tracking from %02x to %02x\n",
879 dev_priv->csr.dc_state, val);
880 dev_priv->csr.dc_state = val;
881}
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906static void gen9_set_dc_state(struct drm_i915_private *dev_priv, u32 state)
907{
908 u32 val;
909 u32 mask;
910
911 if (WARN_ON_ONCE(state & ~dev_priv->csr.allowed_dc_mask))
912 state &= dev_priv->csr.allowed_dc_mask;
913
914 val = I915_READ(DC_STATE_EN);
915 mask = gen9_dc_mask(dev_priv);
916 DRM_DEBUG_KMS("Setting DC state from %02x to %02x\n",
917 val & mask, state);
918
919
920 if ((val & mask) != dev_priv->csr.dc_state)
921 DRM_ERROR("DC state mismatch (0x%x -> 0x%x)\n",
922 dev_priv->csr.dc_state, val & mask);
923
924 val &= ~mask;
925 val |= state;
926
927 gen9_write_dc_state(dev_priv, val);
928
929 dev_priv->csr.dc_state = val & mask;
930}
931
932void bxt_enable_dc9(struct drm_i915_private *dev_priv)
933{
934 assert_can_enable_dc9(dev_priv);
935
936 DRM_DEBUG_KMS("Enabling DC9\n");
937
938
939
940
941
942 if (!HAS_PCH_SPLIT(dev_priv))
943 intel_power_sequencer_reset(dev_priv);
944 gen9_set_dc_state(dev_priv, DC_STATE_EN_DC9);
945}
946
947void bxt_disable_dc9(struct drm_i915_private *dev_priv)
948{
949 assert_can_disable_dc9(dev_priv);
950
951 DRM_DEBUG_KMS("Disabling DC9\n");
952
953 gen9_set_dc_state(dev_priv, DC_STATE_DISABLE);
954
955 intel_pps_unlock_regs_wa(dev_priv);
956}
957
958static void assert_csr_loaded(struct drm_i915_private *dev_priv)
959{
960 WARN_ONCE(!I915_READ(CSR_PROGRAM(0)),
961 "CSR program storage start is NULL\n");
962 WARN_ONCE(!I915_READ(CSR_SSP_BASE), "CSR SSP Base Not fine\n");
963 WARN_ONCE(!I915_READ(CSR_HTP_SKL), "CSR HTP Not fine\n");
964}
965
966static struct i915_power_well *
967lookup_power_well(struct drm_i915_private *dev_priv,
968 enum i915_power_well_id power_well_id)
969{
970 struct i915_power_well *power_well;
971
972 for_each_power_well(dev_priv, power_well)
973 if (power_well->desc->id == power_well_id)
974 return power_well;
975
976
977
978
979
980
981
982
983 WARN(1, "Power well %d not defined for this platform\n", power_well_id);
984 return &dev_priv->power_domains.power_wells[0];
985}
986
987static void assert_can_enable_dc5(struct drm_i915_private *dev_priv)
988{
989 bool pg2_enabled = intel_display_power_well_is_enabled(dev_priv,
990 SKL_DISP_PW_2);
991
992 WARN_ONCE(pg2_enabled, "PG2 not disabled to enable DC5.\n");
993
994 WARN_ONCE((I915_READ(DC_STATE_EN) & DC_STATE_EN_UPTO_DC5),
995 "DC5 already programmed to be enabled.\n");
996 assert_rpm_wakelock_held(dev_priv);
997
998 assert_csr_loaded(dev_priv);
999}
1000
1001void gen9_enable_dc5(struct drm_i915_private *dev_priv)
1002{
1003 assert_can_enable_dc5(dev_priv);
1004
1005 DRM_DEBUG_KMS("Enabling DC5\n");
1006
1007
1008 if (IS_GEN9_BC(dev_priv))
1009 I915_WRITE(GEN8_CHICKEN_DCPR_1, I915_READ(GEN8_CHICKEN_DCPR_1) |
1010 SKL_SELECT_ALTERNATE_DC_EXIT);
1011
1012 gen9_set_dc_state(dev_priv, DC_STATE_EN_UPTO_DC5);
1013}
1014
1015static void assert_can_enable_dc6(struct drm_i915_private *dev_priv)
1016{
1017 WARN_ONCE(I915_READ(UTIL_PIN_CTL) & UTIL_PIN_ENABLE,
1018 "Backlight is not disabled.\n");
1019 WARN_ONCE((I915_READ(DC_STATE_EN) & DC_STATE_EN_UPTO_DC6),
1020 "DC6 already programmed to be enabled.\n");
1021
1022 assert_csr_loaded(dev_priv);
1023}
1024
1025void skl_enable_dc6(struct drm_i915_private *dev_priv)
1026{
1027 assert_can_enable_dc6(dev_priv);
1028
1029 DRM_DEBUG_KMS("Enabling DC6\n");
1030
1031
1032 if (IS_GEN9_BC(dev_priv))
1033 I915_WRITE(GEN8_CHICKEN_DCPR_1, I915_READ(GEN8_CHICKEN_DCPR_1) |
1034 SKL_SELECT_ALTERNATE_DC_EXIT);
1035
1036 gen9_set_dc_state(dev_priv, DC_STATE_EN_UPTO_DC6);
1037}
1038
1039static void hsw_power_well_sync_hw(struct drm_i915_private *dev_priv,
1040 struct i915_power_well *power_well)
1041{
1042 const struct i915_power_well_regs *regs = power_well->desc->hsw.regs;
1043 int pw_idx = power_well->desc->hsw.idx;
1044 u32 mask = HSW_PWR_WELL_CTL_REQ(pw_idx);
1045 u32 bios_req = I915_READ(regs->bios);
1046
1047
1048 if (bios_req & mask) {
1049 u32 drv_req = I915_READ(regs->driver);
1050
1051 if (!(drv_req & mask))
1052 I915_WRITE(regs->driver, drv_req | mask);
1053 I915_WRITE(regs->bios, bios_req & ~mask);
1054 }
1055}
1056
1057static void bxt_dpio_cmn_power_well_enable(struct drm_i915_private *dev_priv,
1058 struct i915_power_well *power_well)
1059{
1060 bxt_ddi_phy_init(dev_priv, power_well->desc->bxt.phy);
1061}
1062
1063static void bxt_dpio_cmn_power_well_disable(struct drm_i915_private *dev_priv,
1064 struct i915_power_well *power_well)
1065{
1066 bxt_ddi_phy_uninit(dev_priv, power_well->desc->bxt.phy);
1067}
1068
1069static bool bxt_dpio_cmn_power_well_enabled(struct drm_i915_private *dev_priv,
1070 struct i915_power_well *power_well)
1071{
1072 return bxt_ddi_phy_is_enabled(dev_priv, power_well->desc->bxt.phy);
1073}
1074
1075static void bxt_verify_ddi_phy_power_wells(struct drm_i915_private *dev_priv)
1076{
1077 struct i915_power_well *power_well;
1078
1079 power_well = lookup_power_well(dev_priv, BXT_DISP_PW_DPIO_CMN_A);
1080 if (power_well->count > 0)
1081 bxt_ddi_phy_verify_state(dev_priv, power_well->desc->bxt.phy);
1082
1083 power_well = lookup_power_well(dev_priv, VLV_DISP_PW_DPIO_CMN_BC);
1084 if (power_well->count > 0)
1085 bxt_ddi_phy_verify_state(dev_priv, power_well->desc->bxt.phy);
1086
1087 if (IS_GEMINILAKE(dev_priv)) {
1088 power_well = lookup_power_well(dev_priv,
1089 GLK_DISP_PW_DPIO_CMN_C);
1090 if (power_well->count > 0)
1091 bxt_ddi_phy_verify_state(dev_priv,
1092 power_well->desc->bxt.phy);
1093 }
1094}
1095
1096static bool gen9_dc_off_power_well_enabled(struct drm_i915_private *dev_priv,
1097 struct i915_power_well *power_well)
1098{
1099 return (I915_READ(DC_STATE_EN) & DC_STATE_EN_UPTO_DC5_DC6_MASK) == 0;
1100}
1101
1102static void gen9_assert_dbuf_enabled(struct drm_i915_private *dev_priv)
1103{
1104 u32 tmp = I915_READ(DBUF_CTL);
1105
1106 WARN((tmp & (DBUF_POWER_STATE | DBUF_POWER_REQUEST)) !=
1107 (DBUF_POWER_STATE | DBUF_POWER_REQUEST),
1108 "Unexpected DBuf power power state (0x%08x)\n", tmp);
1109}
1110
1111static void gen9_dc_off_power_well_enable(struct drm_i915_private *dev_priv,
1112 struct i915_power_well *power_well)
1113{
1114 struct intel_cdclk_state cdclk_state = {};
1115
1116 gen9_set_dc_state(dev_priv, DC_STATE_DISABLE);
1117
1118 dev_priv->display.get_cdclk(dev_priv, &cdclk_state);
1119
1120 WARN_ON(intel_cdclk_needs_modeset(&dev_priv->cdclk.hw, &cdclk_state));
1121
1122 gen9_assert_dbuf_enabled(dev_priv);
1123
1124 if (IS_GEN9_LP(dev_priv))
1125 bxt_verify_ddi_phy_power_wells(dev_priv);
1126
1127 if (INTEL_GEN(dev_priv) >= 11)
1128
1129
1130
1131
1132
1133 icl_combo_phys_init(dev_priv);
1134}
1135
1136static void gen9_dc_off_power_well_disable(struct drm_i915_private *dev_priv,
1137 struct i915_power_well *power_well)
1138{
1139 if (!dev_priv->csr.dmc_payload)
1140 return;
1141
1142 if (dev_priv->csr.allowed_dc_mask & DC_STATE_EN_UPTO_DC6)
1143 skl_enable_dc6(dev_priv);
1144 else if (dev_priv->csr.allowed_dc_mask & DC_STATE_EN_UPTO_DC5)
1145 gen9_enable_dc5(dev_priv);
1146}
1147
1148static void i9xx_power_well_sync_hw_noop(struct drm_i915_private *dev_priv,
1149 struct i915_power_well *power_well)
1150{
1151}
1152
1153static void i9xx_always_on_power_well_noop(struct drm_i915_private *dev_priv,
1154 struct i915_power_well *power_well)
1155{
1156}
1157
1158static bool i9xx_always_on_power_well_enabled(struct drm_i915_private *dev_priv,
1159 struct i915_power_well *power_well)
1160{
1161 return true;
1162}
1163
1164static void i830_pipes_power_well_enable(struct drm_i915_private *dev_priv,
1165 struct i915_power_well *power_well)
1166{
1167 if ((I915_READ(PIPECONF(PIPE_A)) & PIPECONF_ENABLE) == 0)
1168 i830_enable_pipe(dev_priv, PIPE_A);
1169 if ((I915_READ(PIPECONF(PIPE_B)) & PIPECONF_ENABLE) == 0)
1170 i830_enable_pipe(dev_priv, PIPE_B);
1171}
1172
1173static void i830_pipes_power_well_disable(struct drm_i915_private *dev_priv,
1174 struct i915_power_well *power_well)
1175{
1176 i830_disable_pipe(dev_priv, PIPE_B);
1177 i830_disable_pipe(dev_priv, PIPE_A);
1178}
1179
1180static bool i830_pipes_power_well_enabled(struct drm_i915_private *dev_priv,
1181 struct i915_power_well *power_well)
1182{
1183 return I915_READ(PIPECONF(PIPE_A)) & PIPECONF_ENABLE &&
1184 I915_READ(PIPECONF(PIPE_B)) & PIPECONF_ENABLE;
1185}
1186
1187static void i830_pipes_power_well_sync_hw(struct drm_i915_private *dev_priv,
1188 struct i915_power_well *power_well)
1189{
1190 if (power_well->count > 0)
1191 i830_pipes_power_well_enable(dev_priv, power_well);
1192 else
1193 i830_pipes_power_well_disable(dev_priv, power_well);
1194}
1195
1196static void vlv_set_power_well(struct drm_i915_private *dev_priv,
1197 struct i915_power_well *power_well, bool enable)
1198{
1199 int pw_idx = power_well->desc->vlv.idx;
1200 u32 mask;
1201 u32 state;
1202 u32 ctrl;
1203
1204 mask = PUNIT_PWRGT_MASK(pw_idx);
1205 state = enable ? PUNIT_PWRGT_PWR_ON(pw_idx) :
1206 PUNIT_PWRGT_PWR_GATE(pw_idx);
1207
1208 mutex_lock(&dev_priv->pcu_lock);
1209
1210#define COND \
1211 ((vlv_punit_read(dev_priv, PUNIT_REG_PWRGT_STATUS) & mask) == state)
1212
1213 if (COND)
1214 goto out;
1215
1216 ctrl = vlv_punit_read(dev_priv, PUNIT_REG_PWRGT_CTRL);
1217 ctrl &= ~mask;
1218 ctrl |= state;
1219 vlv_punit_write(dev_priv, PUNIT_REG_PWRGT_CTRL, ctrl);
1220
1221 if (wait_for(COND, 100))
1222 DRM_ERROR("timeout setting power well state %08x (%08x)\n",
1223 state,
1224 vlv_punit_read(dev_priv, PUNIT_REG_PWRGT_CTRL));
1225
1226#undef COND
1227
1228out:
1229 mutex_unlock(&dev_priv->pcu_lock);
1230}
1231
1232static void vlv_power_well_enable(struct drm_i915_private *dev_priv,
1233 struct i915_power_well *power_well)
1234{
1235 vlv_set_power_well(dev_priv, power_well, true);
1236}
1237
1238static void vlv_power_well_disable(struct drm_i915_private *dev_priv,
1239 struct i915_power_well *power_well)
1240{
1241 vlv_set_power_well(dev_priv, power_well, false);
1242}
1243
1244static bool vlv_power_well_enabled(struct drm_i915_private *dev_priv,
1245 struct i915_power_well *power_well)
1246{
1247 int pw_idx = power_well->desc->vlv.idx;
1248 bool enabled = false;
1249 u32 mask;
1250 u32 state;
1251 u32 ctrl;
1252
1253 mask = PUNIT_PWRGT_MASK(pw_idx);
1254 ctrl = PUNIT_PWRGT_PWR_ON(pw_idx);
1255
1256 mutex_lock(&dev_priv->pcu_lock);
1257
1258 state = vlv_punit_read(dev_priv, PUNIT_REG_PWRGT_STATUS) & mask;
1259
1260
1261
1262
1263 WARN_ON(state != PUNIT_PWRGT_PWR_ON(pw_idx) &&
1264 state != PUNIT_PWRGT_PWR_GATE(pw_idx));
1265 if (state == ctrl)
1266 enabled = true;
1267
1268
1269
1270
1271
1272 ctrl = vlv_punit_read(dev_priv, PUNIT_REG_PWRGT_CTRL) & mask;
1273 WARN_ON(ctrl != state);
1274
1275 mutex_unlock(&dev_priv->pcu_lock);
1276
1277 return enabled;
1278}
1279
1280static void vlv_init_display_clock_gating(struct drm_i915_private *dev_priv)
1281{
1282 u32 val;
1283
1284
1285
1286
1287
1288
1289
1290 val = I915_READ(DSPCLK_GATE_D);
1291 val &= DPOUNIT_CLOCK_GATE_DISABLE;
1292 val |= VRHUNIT_CLOCK_GATE_DISABLE;
1293 I915_WRITE(DSPCLK_GATE_D, val);
1294
1295
1296
1297
1298 I915_WRITE(MI_ARB_VLV, MI_ARB_DISPLAY_TRICKLE_FEED_DISABLE);
1299 I915_WRITE(CBR1_VLV, 0);
1300
1301 WARN_ON(dev_priv->rawclk_freq == 0);
1302
1303 I915_WRITE(RAWCLK_FREQ_VLV,
1304 DIV_ROUND_CLOSEST(dev_priv->rawclk_freq, 1000));
1305}
1306
1307static void vlv_display_power_well_init(struct drm_i915_private *dev_priv)
1308{
1309 struct intel_encoder *encoder;
1310 enum pipe pipe;
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320 for_each_pipe(dev_priv, pipe) {
1321 u32 val = I915_READ(DPLL(pipe));
1322
1323 val |= DPLL_REF_CLK_ENABLE_VLV | DPLL_VGA_MODE_DIS;
1324 if (pipe != PIPE_A)
1325 val |= DPLL_INTEGRATED_CRI_CLK_VLV;
1326
1327 I915_WRITE(DPLL(pipe), val);
1328 }
1329
1330 vlv_init_display_clock_gating(dev_priv);
1331
1332 spin_lock_irq(&dev_priv->irq_lock);
1333 valleyview_enable_display_irqs(dev_priv);
1334 spin_unlock_irq(&dev_priv->irq_lock);
1335
1336
1337
1338
1339
1340 if (dev_priv->power_domains.initializing)
1341 return;
1342
1343 intel_hpd_init(dev_priv);
1344
1345
1346 for_each_intel_encoder(&dev_priv->drm, encoder) {
1347 if (encoder->type == INTEL_OUTPUT_ANALOG)
1348 intel_crt_reset(&encoder->base);
1349 }
1350
1351 i915_redisable_vga_power_on(dev_priv);
1352
1353 intel_pps_unlock_regs_wa(dev_priv);
1354}
1355
1356static void vlv_display_power_well_deinit(struct drm_i915_private *dev_priv)
1357{
1358 spin_lock_irq(&dev_priv->irq_lock);
1359 valleyview_disable_display_irqs(dev_priv);
1360 spin_unlock_irq(&dev_priv->irq_lock);
1361
1362
1363 synchronize_irq(dev_priv->drm.irq);
1364
1365 intel_power_sequencer_reset(dev_priv);
1366
1367
1368 if (!dev_priv->drm.dev->power.is_suspended)
1369 intel_hpd_poll_init(dev_priv);
1370}
1371
1372static void vlv_display_power_well_enable(struct drm_i915_private *dev_priv,
1373 struct i915_power_well *power_well)
1374{
1375 vlv_set_power_well(dev_priv, power_well, true);
1376
1377 vlv_display_power_well_init(dev_priv);
1378}
1379
1380static void vlv_display_power_well_disable(struct drm_i915_private *dev_priv,
1381 struct i915_power_well *power_well)
1382{
1383 vlv_display_power_well_deinit(dev_priv);
1384
1385 vlv_set_power_well(dev_priv, power_well, false);
1386}
1387
1388static void vlv_dpio_cmn_power_well_enable(struct drm_i915_private *dev_priv,
1389 struct i915_power_well *power_well)
1390{
1391
1392 udelay(1);
1393
1394 vlv_set_power_well(dev_priv, power_well, true);
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407 I915_WRITE(DPIO_CTL, I915_READ(DPIO_CTL) | DPIO_CMNRST);
1408}
1409
1410static void vlv_dpio_cmn_power_well_disable(struct drm_i915_private *dev_priv,
1411 struct i915_power_well *power_well)
1412{
1413 enum pipe pipe;
1414
1415 for_each_pipe(dev_priv, pipe)
1416 assert_pll_disabled(dev_priv, pipe);
1417
1418
1419 I915_WRITE(DPIO_CTL, I915_READ(DPIO_CTL) & ~DPIO_CMNRST);
1420
1421 vlv_set_power_well(dev_priv, power_well, false);
1422}
1423
1424#define POWER_DOMAIN_MASK (GENMASK_ULL(POWER_DOMAIN_NUM - 1, 0))
1425
1426#define BITS_SET(val, bits) (((val) & (bits)) == (bits))
1427
1428static void assert_chv_phy_status(struct drm_i915_private *dev_priv)
1429{
1430 struct i915_power_well *cmn_bc =
1431 lookup_power_well(dev_priv, VLV_DISP_PW_DPIO_CMN_BC);
1432 struct i915_power_well *cmn_d =
1433 lookup_power_well(dev_priv, CHV_DISP_PW_DPIO_CMN_D);
1434 u32 phy_control = dev_priv->chv_phy_control;
1435 u32 phy_status = 0;
1436 u32 phy_status_mask = 0xffffffff;
1437
1438
1439
1440
1441
1442
1443
1444
1445 if (!dev_priv->chv_phy_assert[DPIO_PHY0])
1446 phy_status_mask &= ~(PHY_STATUS_CMN_LDO(DPIO_PHY0, DPIO_CH0) |
1447 PHY_STATUS_SPLINE_LDO(DPIO_PHY0, DPIO_CH0, 0) |
1448 PHY_STATUS_SPLINE_LDO(DPIO_PHY0, DPIO_CH0, 1) |
1449 PHY_STATUS_CMN_LDO(DPIO_PHY0, DPIO_CH1) |
1450 PHY_STATUS_SPLINE_LDO(DPIO_PHY0, DPIO_CH1, 0) |
1451 PHY_STATUS_SPLINE_LDO(DPIO_PHY0, DPIO_CH1, 1));
1452
1453 if (!dev_priv->chv_phy_assert[DPIO_PHY1])
1454 phy_status_mask &= ~(PHY_STATUS_CMN_LDO(DPIO_PHY1, DPIO_CH0) |
1455 PHY_STATUS_SPLINE_LDO(DPIO_PHY1, DPIO_CH0, 0) |
1456 PHY_STATUS_SPLINE_LDO(DPIO_PHY1, DPIO_CH0, 1));
1457
1458 if (cmn_bc->desc->ops->is_enabled(dev_priv, cmn_bc)) {
1459 phy_status |= PHY_POWERGOOD(DPIO_PHY0);
1460
1461
1462 if ((phy_control & PHY_CH_POWER_DOWN_OVRD_EN(DPIO_PHY0, DPIO_CH0)) == 0)
1463 phy_control |= PHY_CH_POWER_DOWN_OVRD(0xf, DPIO_PHY0, DPIO_CH0);
1464
1465 if ((phy_control & PHY_CH_POWER_DOWN_OVRD_EN(DPIO_PHY0, DPIO_CH1)) == 0)
1466 phy_control |= PHY_CH_POWER_DOWN_OVRD(0xf, DPIO_PHY0, DPIO_CH1);
1467
1468
1469 if (BITS_SET(phy_control,
1470 PHY_CH_POWER_DOWN_OVRD(0xf, DPIO_PHY0, DPIO_CH0) |
1471 PHY_CH_POWER_DOWN_OVRD(0xf, DPIO_PHY0, DPIO_CH1)))
1472 phy_status |= PHY_STATUS_CMN_LDO(DPIO_PHY0, DPIO_CH0);
1473
1474
1475
1476
1477
1478
1479 if (BITS_SET(phy_control,
1480 PHY_CH_POWER_DOWN_OVRD(0xf, DPIO_PHY0, DPIO_CH1)) &&
1481 (I915_READ(DPLL(PIPE_B)) & DPLL_VCO_ENABLE) == 0)
1482 phy_status |= PHY_STATUS_CMN_LDO(DPIO_PHY0, DPIO_CH1);
1483
1484 if (BITS_SET(phy_control,
1485 PHY_CH_POWER_DOWN_OVRD(0x3, DPIO_PHY0, DPIO_CH0)))
1486 phy_status |= PHY_STATUS_SPLINE_LDO(DPIO_PHY0, DPIO_CH0, 0);
1487 if (BITS_SET(phy_control,
1488 PHY_CH_POWER_DOWN_OVRD(0xc, DPIO_PHY0, DPIO_CH0)))
1489 phy_status |= PHY_STATUS_SPLINE_LDO(DPIO_PHY0, DPIO_CH0, 1);
1490
1491 if (BITS_SET(phy_control,
1492 PHY_CH_POWER_DOWN_OVRD(0x3, DPIO_PHY0, DPIO_CH1)))
1493 phy_status |= PHY_STATUS_SPLINE_LDO(DPIO_PHY0, DPIO_CH1, 0);
1494 if (BITS_SET(phy_control,
1495 PHY_CH_POWER_DOWN_OVRD(0xc, DPIO_PHY0, DPIO_CH1)))
1496 phy_status |= PHY_STATUS_SPLINE_LDO(DPIO_PHY0, DPIO_CH1, 1);
1497 }
1498
1499 if (cmn_d->desc->ops->is_enabled(dev_priv, cmn_d)) {
1500 phy_status |= PHY_POWERGOOD(DPIO_PHY1);
1501
1502
1503 if ((phy_control & PHY_CH_POWER_DOWN_OVRD_EN(DPIO_PHY1, DPIO_CH0)) == 0)
1504 phy_control |= PHY_CH_POWER_DOWN_OVRD(0xf, DPIO_PHY1, DPIO_CH0);
1505
1506 if (BITS_SET(phy_control,
1507 PHY_CH_POWER_DOWN_OVRD(0xf, DPIO_PHY1, DPIO_CH0)))
1508 phy_status |= PHY_STATUS_CMN_LDO(DPIO_PHY1, DPIO_CH0);
1509
1510 if (BITS_SET(phy_control,
1511 PHY_CH_POWER_DOWN_OVRD(0x3, DPIO_PHY1, DPIO_CH0)))
1512 phy_status |= PHY_STATUS_SPLINE_LDO(DPIO_PHY1, DPIO_CH0, 0);
1513 if (BITS_SET(phy_control,
1514 PHY_CH_POWER_DOWN_OVRD(0xc, DPIO_PHY1, DPIO_CH0)))
1515 phy_status |= PHY_STATUS_SPLINE_LDO(DPIO_PHY1, DPIO_CH0, 1);
1516 }
1517
1518 phy_status &= phy_status_mask;
1519
1520
1521
1522
1523
1524 if (intel_wait_for_register(dev_priv,
1525 DISPLAY_PHY_STATUS,
1526 phy_status_mask,
1527 phy_status,
1528 10))
1529 DRM_ERROR("Unexpected PHY_STATUS 0x%08x, expected 0x%08x (PHY_CONTROL=0x%08x)\n",
1530 I915_READ(DISPLAY_PHY_STATUS) & phy_status_mask,
1531 phy_status, dev_priv->chv_phy_control);
1532}
1533
1534#undef BITS_SET
1535
1536static void chv_dpio_cmn_power_well_enable(struct drm_i915_private *dev_priv,
1537 struct i915_power_well *power_well)
1538{
1539 enum dpio_phy phy;
1540 enum pipe pipe;
1541 u32 tmp;
1542
1543 WARN_ON_ONCE(power_well->desc->id != VLV_DISP_PW_DPIO_CMN_BC &&
1544 power_well->desc->id != CHV_DISP_PW_DPIO_CMN_D);
1545
1546 if (power_well->desc->id == VLV_DISP_PW_DPIO_CMN_BC) {
1547 pipe = PIPE_A;
1548 phy = DPIO_PHY0;
1549 } else {
1550 pipe = PIPE_C;
1551 phy = DPIO_PHY1;
1552 }
1553
1554
1555 udelay(1);
1556 vlv_set_power_well(dev_priv, power_well, true);
1557
1558
1559 if (intel_wait_for_register(dev_priv,
1560 DISPLAY_PHY_STATUS,
1561 PHY_POWERGOOD(phy),
1562 PHY_POWERGOOD(phy),
1563 1))
1564 DRM_ERROR("Display PHY %d is not power up\n", phy);
1565
1566 mutex_lock(&dev_priv->sb_lock);
1567
1568
1569 tmp = vlv_dpio_read(dev_priv, pipe, CHV_CMN_DW28);
1570 tmp |= DPIO_DYNPWRDOWNEN_CH0 | DPIO_CL1POWERDOWNEN |
1571 DPIO_SUS_CLK_CONFIG_GATE_CLKREQ;
1572 vlv_dpio_write(dev_priv, pipe, CHV_CMN_DW28, tmp);
1573
1574 if (power_well->desc->id == VLV_DISP_PW_DPIO_CMN_BC) {
1575 tmp = vlv_dpio_read(dev_priv, pipe, _CHV_CMN_DW6_CH1);
1576 tmp |= DPIO_DYNPWRDOWNEN_CH1;
1577 vlv_dpio_write(dev_priv, pipe, _CHV_CMN_DW6_CH1, tmp);
1578 } else {
1579
1580
1581
1582
1583
1584 tmp = vlv_dpio_read(dev_priv, pipe, CHV_CMN_DW30);
1585 tmp |= DPIO_CL2_LDOFUSE_PWRENB;
1586 vlv_dpio_write(dev_priv, pipe, CHV_CMN_DW30, tmp);
1587 }
1588
1589 mutex_unlock(&dev_priv->sb_lock);
1590
1591 dev_priv->chv_phy_control |= PHY_COM_LANE_RESET_DEASSERT(phy);
1592 I915_WRITE(DISPLAY_PHY_CONTROL, dev_priv->chv_phy_control);
1593
1594 DRM_DEBUG_KMS("Enabled DPIO PHY%d (PHY_CONTROL=0x%08x)\n",
1595 phy, dev_priv->chv_phy_control);
1596
1597 assert_chv_phy_status(dev_priv);
1598}
1599
1600static void chv_dpio_cmn_power_well_disable(struct drm_i915_private *dev_priv,
1601 struct i915_power_well *power_well)
1602{
1603 enum dpio_phy phy;
1604
1605 WARN_ON_ONCE(power_well->desc->id != VLV_DISP_PW_DPIO_CMN_BC &&
1606 power_well->desc->id != CHV_DISP_PW_DPIO_CMN_D);
1607
1608 if (power_well->desc->id == VLV_DISP_PW_DPIO_CMN_BC) {
1609 phy = DPIO_PHY0;
1610 assert_pll_disabled(dev_priv, PIPE_A);
1611 assert_pll_disabled(dev_priv, PIPE_B);
1612 } else {
1613 phy = DPIO_PHY1;
1614 assert_pll_disabled(dev_priv, PIPE_C);
1615 }
1616
1617 dev_priv->chv_phy_control &= ~PHY_COM_LANE_RESET_DEASSERT(phy);
1618 I915_WRITE(DISPLAY_PHY_CONTROL, dev_priv->chv_phy_control);
1619
1620 vlv_set_power_well(dev_priv, power_well, false);
1621
1622 DRM_DEBUG_KMS("Disabled DPIO PHY%d (PHY_CONTROL=0x%08x)\n",
1623 phy, dev_priv->chv_phy_control);
1624
1625
1626 dev_priv->chv_phy_assert[phy] = true;
1627
1628 assert_chv_phy_status(dev_priv);
1629}
1630
1631static void assert_chv_phy_powergate(struct drm_i915_private *dev_priv, enum dpio_phy phy,
1632 enum dpio_channel ch, bool override, unsigned int mask)
1633{
1634 enum pipe pipe = phy == DPIO_PHY0 ? PIPE_A : PIPE_C;
1635 u32 reg, val, expected, actual;
1636
1637
1638
1639
1640
1641
1642
1643
1644 if (!dev_priv->chv_phy_assert[phy])
1645 return;
1646
1647 if (ch == DPIO_CH0)
1648 reg = _CHV_CMN_DW0_CH0;
1649 else
1650 reg = _CHV_CMN_DW6_CH1;
1651
1652 mutex_lock(&dev_priv->sb_lock);
1653 val = vlv_dpio_read(dev_priv, pipe, reg);
1654 mutex_unlock(&dev_priv->sb_lock);
1655
1656
1657
1658
1659
1660
1661 if (!override || mask == 0xf) {
1662 expected = DPIO_ALLDL_POWERDOWN | DPIO_ANYDL_POWERDOWN;
1663
1664
1665
1666
1667
1668
1669
1670
1671
1672 if (ch == DPIO_CH1 && val == 0)
1673 expected = 0;
1674 } else if (mask != 0x0) {
1675 expected = DPIO_ANYDL_POWERDOWN;
1676 } else {
1677 expected = 0;
1678 }
1679
1680 if (ch == DPIO_CH0)
1681 actual = val >> DPIO_ANYDL_POWERDOWN_SHIFT_CH0;
1682 else
1683 actual = val >> DPIO_ANYDL_POWERDOWN_SHIFT_CH1;
1684 actual &= DPIO_ALLDL_POWERDOWN | DPIO_ANYDL_POWERDOWN;
1685
1686 WARN(actual != expected,
1687 "Unexpected DPIO lane power down: all %d, any %d. Expected: all %d, any %d. (0x%x = 0x%08x)\n",
1688 !!(actual & DPIO_ALLDL_POWERDOWN), !!(actual & DPIO_ANYDL_POWERDOWN),
1689 !!(expected & DPIO_ALLDL_POWERDOWN), !!(expected & DPIO_ANYDL_POWERDOWN),
1690 reg, val);
1691}
1692
1693bool chv_phy_powergate_ch(struct drm_i915_private *dev_priv, enum dpio_phy phy,
1694 enum dpio_channel ch, bool override)
1695{
1696 struct i915_power_domains *power_domains = &dev_priv->power_domains;
1697 bool was_override;
1698
1699 mutex_lock(&power_domains->lock);
1700
1701 was_override = dev_priv->chv_phy_control & PHY_CH_POWER_DOWN_OVRD_EN(phy, ch);
1702
1703 if (override == was_override)
1704 goto out;
1705
1706 if (override)
1707 dev_priv->chv_phy_control |= PHY_CH_POWER_DOWN_OVRD_EN(phy, ch);
1708 else
1709 dev_priv->chv_phy_control &= ~PHY_CH_POWER_DOWN_OVRD_EN(phy, ch);
1710
1711 I915_WRITE(DISPLAY_PHY_CONTROL, dev_priv->chv_phy_control);
1712
1713 DRM_DEBUG_KMS("Power gating DPIO PHY%d CH%d (DPIO_PHY_CONTROL=0x%08x)\n",
1714 phy, ch, dev_priv->chv_phy_control);
1715
1716 assert_chv_phy_status(dev_priv);
1717
1718out:
1719 mutex_unlock(&power_domains->lock);
1720
1721 return was_override;
1722}
1723
1724void chv_phy_powergate_lanes(struct intel_encoder *encoder,
1725 bool override, unsigned int mask)
1726{
1727 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1728 struct i915_power_domains *power_domains = &dev_priv->power_domains;
1729 enum dpio_phy phy = vlv_dport_to_phy(enc_to_dig_port(&encoder->base));
1730 enum dpio_channel ch = vlv_dport_to_channel(enc_to_dig_port(&encoder->base));
1731
1732 mutex_lock(&power_domains->lock);
1733
1734 dev_priv->chv_phy_control &= ~PHY_CH_POWER_DOWN_OVRD(0xf, phy, ch);
1735 dev_priv->chv_phy_control |= PHY_CH_POWER_DOWN_OVRD(mask, phy, ch);
1736
1737 if (override)
1738 dev_priv->chv_phy_control |= PHY_CH_POWER_DOWN_OVRD_EN(phy, ch);
1739 else
1740 dev_priv->chv_phy_control &= ~PHY_CH_POWER_DOWN_OVRD_EN(phy, ch);
1741
1742 I915_WRITE(DISPLAY_PHY_CONTROL, dev_priv->chv_phy_control);
1743
1744 DRM_DEBUG_KMS("Power gating DPIO PHY%d CH%d lanes 0x%x (PHY_CONTROL=0x%08x)\n",
1745 phy, ch, mask, dev_priv->chv_phy_control);
1746
1747 assert_chv_phy_status(dev_priv);
1748
1749 assert_chv_phy_powergate(dev_priv, phy, ch, override, mask);
1750
1751 mutex_unlock(&power_domains->lock);
1752}
1753
1754static bool chv_pipe_power_well_enabled(struct drm_i915_private *dev_priv,
1755 struct i915_power_well *power_well)
1756{
1757 enum pipe pipe = PIPE_A;
1758 bool enabled;
1759 u32 state, ctrl;
1760
1761 mutex_lock(&dev_priv->pcu_lock);
1762
1763 state = vlv_punit_read(dev_priv, PUNIT_REG_DSPFREQ) & DP_SSS_MASK(pipe);
1764
1765
1766
1767
1768 WARN_ON(state != DP_SSS_PWR_ON(pipe) && state != DP_SSS_PWR_GATE(pipe));
1769 enabled = state == DP_SSS_PWR_ON(pipe);
1770
1771
1772
1773
1774
1775 ctrl = vlv_punit_read(dev_priv, PUNIT_REG_DSPFREQ) & DP_SSC_MASK(pipe);
1776 WARN_ON(ctrl << 16 != state);
1777
1778 mutex_unlock(&dev_priv->pcu_lock);
1779
1780 return enabled;
1781}
1782
1783static void chv_set_pipe_power_well(struct drm_i915_private *dev_priv,
1784 struct i915_power_well *power_well,
1785 bool enable)
1786{
1787 enum pipe pipe = PIPE_A;
1788 u32 state;
1789 u32 ctrl;
1790
1791 state = enable ? DP_SSS_PWR_ON(pipe) : DP_SSS_PWR_GATE(pipe);
1792
1793 mutex_lock(&dev_priv->pcu_lock);
1794
1795#define COND \
1796 ((vlv_punit_read(dev_priv, PUNIT_REG_DSPFREQ) & DP_SSS_MASK(pipe)) == state)
1797
1798 if (COND)
1799 goto out;
1800
1801 ctrl = vlv_punit_read(dev_priv, PUNIT_REG_DSPFREQ);
1802 ctrl &= ~DP_SSC_MASK(pipe);
1803 ctrl |= enable ? DP_SSC_PWR_ON(pipe) : DP_SSC_PWR_GATE(pipe);
1804 vlv_punit_write(dev_priv, PUNIT_REG_DSPFREQ, ctrl);
1805
1806 if (wait_for(COND, 100))
1807 DRM_ERROR("timeout setting power well state %08x (%08x)\n",
1808 state,
1809 vlv_punit_read(dev_priv, PUNIT_REG_DSPFREQ));
1810
1811#undef COND
1812
1813out:
1814 mutex_unlock(&dev_priv->pcu_lock);
1815}
1816
1817static void chv_pipe_power_well_enable(struct drm_i915_private *dev_priv,
1818 struct i915_power_well *power_well)
1819{
1820 chv_set_pipe_power_well(dev_priv, power_well, true);
1821
1822 vlv_display_power_well_init(dev_priv);
1823}
1824
1825static void chv_pipe_power_well_disable(struct drm_i915_private *dev_priv,
1826 struct i915_power_well *power_well)
1827{
1828 vlv_display_power_well_deinit(dev_priv);
1829
1830 chv_set_pipe_power_well(dev_priv, power_well, false);
1831}
1832
1833static void
1834__intel_display_power_get_domain(struct drm_i915_private *dev_priv,
1835 enum intel_display_power_domain domain)
1836{
1837 struct i915_power_domains *power_domains = &dev_priv->power_domains;
1838 struct i915_power_well *power_well;
1839
1840 for_each_power_domain_well(dev_priv, power_well, BIT_ULL(domain))
1841 intel_power_well_get(dev_priv, power_well);
1842
1843 power_domains->domain_use_count[domain]++;
1844}
1845
1846
1847
1848
1849
1850
1851
1852
1853
1854
1855
1856
1857
1858intel_wakeref_t intel_display_power_get(struct drm_i915_private *dev_priv,
1859 enum intel_display_power_domain domain)
1860{
1861 struct i915_power_domains *power_domains = &dev_priv->power_domains;
1862 intel_wakeref_t wakeref = intel_runtime_pm_get(dev_priv);
1863
1864 mutex_lock(&power_domains->lock);
1865
1866 __intel_display_power_get_domain(dev_priv, domain);
1867
1868 mutex_unlock(&power_domains->lock);
1869
1870 return wakeref;
1871}
1872
1873
1874
1875
1876
1877
1878
1879
1880
1881
1882
1883
1884
1885intel_wakeref_t
1886intel_display_power_get_if_enabled(struct drm_i915_private *dev_priv,
1887 enum intel_display_power_domain domain)
1888{
1889 struct i915_power_domains *power_domains = &dev_priv->power_domains;
1890 intel_wakeref_t wakeref;
1891 bool is_enabled;
1892
1893 wakeref = intel_runtime_pm_get_if_in_use(dev_priv);
1894 if (!wakeref)
1895 return false;
1896
1897 mutex_lock(&power_domains->lock);
1898
1899 if (__intel_display_power_is_enabled(dev_priv, domain)) {
1900 __intel_display_power_get_domain(dev_priv, domain);
1901 is_enabled = true;
1902 } else {
1903 is_enabled = false;
1904 }
1905
1906 mutex_unlock(&power_domains->lock);
1907
1908 if (!is_enabled) {
1909 intel_runtime_pm_put(dev_priv, wakeref);
1910 wakeref = 0;
1911 }
1912
1913 return wakeref;
1914}
1915
1916static void __intel_display_power_put(struct drm_i915_private *dev_priv,
1917 enum intel_display_power_domain domain)
1918{
1919 struct i915_power_domains *power_domains;
1920 struct i915_power_well *power_well;
1921
1922 power_domains = &dev_priv->power_domains;
1923
1924 mutex_lock(&power_domains->lock);
1925
1926 WARN(!power_domains->domain_use_count[domain],
1927 "Use count on domain %s is already zero\n",
1928 intel_display_power_domain_str(domain));
1929 power_domains->domain_use_count[domain]--;
1930
1931 for_each_power_domain_well_reverse(dev_priv, power_well, BIT_ULL(domain))
1932 intel_power_well_put(dev_priv, power_well);
1933
1934 mutex_unlock(&power_domains->lock);
1935}
1936
1937
1938
1939
1940
1941
1942
1943
1944
1945
1946void intel_display_power_put_unchecked(struct drm_i915_private *dev_priv,
1947 enum intel_display_power_domain domain)
1948{
1949 __intel_display_power_put(dev_priv, domain);
1950 intel_runtime_pm_put_unchecked(dev_priv);
1951}
1952
1953#if IS_ENABLED(CONFIG_DRM_I915_DEBUG_RUNTIME_PM)
1954void intel_display_power_put(struct drm_i915_private *dev_priv,
1955 enum intel_display_power_domain domain,
1956 intel_wakeref_t wakeref)
1957{
1958 __intel_display_power_put(dev_priv, domain);
1959 intel_runtime_pm_put(dev_priv, wakeref);
1960}
1961#endif
1962
1963#define I830_PIPES_POWER_DOMAINS ( \
1964 BIT_ULL(POWER_DOMAIN_PIPE_A) | \
1965 BIT_ULL(POWER_DOMAIN_PIPE_B) | \
1966 BIT_ULL(POWER_DOMAIN_PIPE_A_PANEL_FITTER) | \
1967 BIT_ULL(POWER_DOMAIN_PIPE_B_PANEL_FITTER) | \
1968 BIT_ULL(POWER_DOMAIN_TRANSCODER_A) | \
1969 BIT_ULL(POWER_DOMAIN_TRANSCODER_B) | \
1970 BIT_ULL(POWER_DOMAIN_INIT))
1971
1972#define VLV_DISPLAY_POWER_DOMAINS ( \
1973 BIT_ULL(POWER_DOMAIN_PIPE_A) | \
1974 BIT_ULL(POWER_DOMAIN_PIPE_B) | \
1975 BIT_ULL(POWER_DOMAIN_PIPE_A_PANEL_FITTER) | \
1976 BIT_ULL(POWER_DOMAIN_PIPE_B_PANEL_FITTER) | \
1977 BIT_ULL(POWER_DOMAIN_TRANSCODER_A) | \
1978 BIT_ULL(POWER_DOMAIN_TRANSCODER_B) | \
1979 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
1980 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
1981 BIT_ULL(POWER_DOMAIN_PORT_DSI) | \
1982 BIT_ULL(POWER_DOMAIN_PORT_CRT) | \
1983 BIT_ULL(POWER_DOMAIN_VGA) | \
1984 BIT_ULL(POWER_DOMAIN_AUDIO) | \
1985 BIT_ULL(POWER_DOMAIN_AUX_B) | \
1986 BIT_ULL(POWER_DOMAIN_AUX_C) | \
1987 BIT_ULL(POWER_DOMAIN_GMBUS) | \
1988 BIT_ULL(POWER_DOMAIN_INIT))
1989
1990#define VLV_DPIO_CMN_BC_POWER_DOMAINS ( \
1991 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
1992 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
1993 BIT_ULL(POWER_DOMAIN_PORT_CRT) | \
1994 BIT_ULL(POWER_DOMAIN_AUX_B) | \
1995 BIT_ULL(POWER_DOMAIN_AUX_C) | \
1996 BIT_ULL(POWER_DOMAIN_INIT))
1997
1998#define VLV_DPIO_TX_B_LANES_01_POWER_DOMAINS ( \
1999 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
2000 BIT_ULL(POWER_DOMAIN_AUX_B) | \
2001 BIT_ULL(POWER_DOMAIN_INIT))
2002
2003#define VLV_DPIO_TX_B_LANES_23_POWER_DOMAINS ( \
2004 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
2005 BIT_ULL(POWER_DOMAIN_AUX_B) | \
2006 BIT_ULL(POWER_DOMAIN_INIT))
2007
2008#define VLV_DPIO_TX_C_LANES_01_POWER_DOMAINS ( \
2009 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
2010 BIT_ULL(POWER_DOMAIN_AUX_C) | \
2011 BIT_ULL(POWER_DOMAIN_INIT))
2012
2013#define VLV_DPIO_TX_C_LANES_23_POWER_DOMAINS ( \
2014 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
2015 BIT_ULL(POWER_DOMAIN_AUX_C) | \
2016 BIT_ULL(POWER_DOMAIN_INIT))
2017
2018#define CHV_DISPLAY_POWER_DOMAINS ( \
2019 BIT_ULL(POWER_DOMAIN_PIPE_A) | \
2020 BIT_ULL(POWER_DOMAIN_PIPE_B) | \
2021 BIT_ULL(POWER_DOMAIN_PIPE_C) | \
2022 BIT_ULL(POWER_DOMAIN_PIPE_A_PANEL_FITTER) | \
2023 BIT_ULL(POWER_DOMAIN_PIPE_B_PANEL_FITTER) | \
2024 BIT_ULL(POWER_DOMAIN_PIPE_C_PANEL_FITTER) | \
2025 BIT_ULL(POWER_DOMAIN_TRANSCODER_A) | \
2026 BIT_ULL(POWER_DOMAIN_TRANSCODER_B) | \
2027 BIT_ULL(POWER_DOMAIN_TRANSCODER_C) | \
2028 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
2029 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
2030 BIT_ULL(POWER_DOMAIN_PORT_DDI_D_LANES) | \
2031 BIT_ULL(POWER_DOMAIN_PORT_DSI) | \
2032 BIT_ULL(POWER_DOMAIN_VGA) | \
2033 BIT_ULL(POWER_DOMAIN_AUDIO) | \
2034 BIT_ULL(POWER_DOMAIN_AUX_B) | \
2035 BIT_ULL(POWER_DOMAIN_AUX_C) | \
2036 BIT_ULL(POWER_DOMAIN_AUX_D) | \
2037 BIT_ULL(POWER_DOMAIN_GMBUS) | \
2038 BIT_ULL(POWER_DOMAIN_INIT))
2039
2040#define CHV_DPIO_CMN_BC_POWER_DOMAINS ( \
2041 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
2042 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
2043 BIT_ULL(POWER_DOMAIN_AUX_B) | \
2044 BIT_ULL(POWER_DOMAIN_AUX_C) | \
2045 BIT_ULL(POWER_DOMAIN_INIT))
2046
2047#define CHV_DPIO_CMN_D_POWER_DOMAINS ( \
2048 BIT_ULL(POWER_DOMAIN_PORT_DDI_D_LANES) | \
2049 BIT_ULL(POWER_DOMAIN_AUX_D) | \
2050 BIT_ULL(POWER_DOMAIN_INIT))
2051
2052#define HSW_DISPLAY_POWER_DOMAINS ( \
2053 BIT_ULL(POWER_DOMAIN_PIPE_B) | \
2054 BIT_ULL(POWER_DOMAIN_PIPE_C) | \
2055 BIT_ULL(POWER_DOMAIN_PIPE_A_PANEL_FITTER) | \
2056 BIT_ULL(POWER_DOMAIN_PIPE_B_PANEL_FITTER) | \
2057 BIT_ULL(POWER_DOMAIN_PIPE_C_PANEL_FITTER) | \
2058 BIT_ULL(POWER_DOMAIN_TRANSCODER_A) | \
2059 BIT_ULL(POWER_DOMAIN_TRANSCODER_B) | \
2060 BIT_ULL(POWER_DOMAIN_TRANSCODER_C) | \
2061 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
2062 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
2063 BIT_ULL(POWER_DOMAIN_PORT_DDI_D_LANES) | \
2064 BIT_ULL(POWER_DOMAIN_PORT_CRT) | \
2065 BIT_ULL(POWER_DOMAIN_VGA) | \
2066 BIT_ULL(POWER_DOMAIN_AUDIO) | \
2067 BIT_ULL(POWER_DOMAIN_INIT))
2068
2069#define BDW_DISPLAY_POWER_DOMAINS ( \
2070 BIT_ULL(POWER_DOMAIN_PIPE_B) | \
2071 BIT_ULL(POWER_DOMAIN_PIPE_C) | \
2072 BIT_ULL(POWER_DOMAIN_PIPE_B_PANEL_FITTER) | \
2073 BIT_ULL(POWER_DOMAIN_PIPE_C_PANEL_FITTER) | \
2074 BIT_ULL(POWER_DOMAIN_TRANSCODER_A) | \
2075 BIT_ULL(POWER_DOMAIN_TRANSCODER_B) | \
2076 BIT_ULL(POWER_DOMAIN_TRANSCODER_C) | \
2077 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
2078 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
2079 BIT_ULL(POWER_DOMAIN_PORT_DDI_D_LANES) | \
2080 BIT_ULL(POWER_DOMAIN_PORT_CRT) | \
2081 BIT_ULL(POWER_DOMAIN_VGA) | \
2082 BIT_ULL(POWER_DOMAIN_AUDIO) | \
2083 BIT_ULL(POWER_DOMAIN_INIT))
2084
2085#define SKL_DISPLAY_POWERWELL_2_POWER_DOMAINS ( \
2086 BIT_ULL(POWER_DOMAIN_TRANSCODER_A) | \
2087 BIT_ULL(POWER_DOMAIN_PIPE_B) | \
2088 BIT_ULL(POWER_DOMAIN_TRANSCODER_B) | \
2089 BIT_ULL(POWER_DOMAIN_PIPE_C) | \
2090 BIT_ULL(POWER_DOMAIN_TRANSCODER_C) | \
2091 BIT_ULL(POWER_DOMAIN_PIPE_B_PANEL_FITTER) | \
2092 BIT_ULL(POWER_DOMAIN_PIPE_C_PANEL_FITTER) | \
2093 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
2094 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
2095 BIT_ULL(POWER_DOMAIN_PORT_DDI_D_LANES) | \
2096 BIT_ULL(POWER_DOMAIN_PORT_DDI_E_LANES) | \
2097 BIT_ULL(POWER_DOMAIN_AUX_B) | \
2098 BIT_ULL(POWER_DOMAIN_AUX_C) | \
2099 BIT_ULL(POWER_DOMAIN_AUX_D) | \
2100 BIT_ULL(POWER_DOMAIN_AUDIO) | \
2101 BIT_ULL(POWER_DOMAIN_VGA) | \
2102 BIT_ULL(POWER_DOMAIN_INIT))
2103#define SKL_DISPLAY_DDI_IO_A_E_POWER_DOMAINS ( \
2104 BIT_ULL(POWER_DOMAIN_PORT_DDI_A_IO) | \
2105 BIT_ULL(POWER_DOMAIN_PORT_DDI_E_IO) | \
2106 BIT_ULL(POWER_DOMAIN_INIT))
2107#define SKL_DISPLAY_DDI_IO_B_POWER_DOMAINS ( \
2108 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_IO) | \
2109 BIT_ULL(POWER_DOMAIN_INIT))
2110#define SKL_DISPLAY_DDI_IO_C_POWER_DOMAINS ( \
2111 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_IO) | \
2112 BIT_ULL(POWER_DOMAIN_INIT))
2113#define SKL_DISPLAY_DDI_IO_D_POWER_DOMAINS ( \
2114 BIT_ULL(POWER_DOMAIN_PORT_DDI_D_IO) | \
2115 BIT_ULL(POWER_DOMAIN_INIT))
2116#define SKL_DISPLAY_DC_OFF_POWER_DOMAINS ( \
2117 SKL_DISPLAY_POWERWELL_2_POWER_DOMAINS | \
2118 BIT_ULL(POWER_DOMAIN_GT_IRQ) | \
2119 BIT_ULL(POWER_DOMAIN_MODESET) | \
2120 BIT_ULL(POWER_DOMAIN_AUX_A) | \
2121 BIT_ULL(POWER_DOMAIN_INIT))
2122
2123#define BXT_DISPLAY_POWERWELL_2_POWER_DOMAINS ( \
2124 BIT_ULL(POWER_DOMAIN_TRANSCODER_A) | \
2125 BIT_ULL(POWER_DOMAIN_PIPE_B) | \
2126 BIT_ULL(POWER_DOMAIN_TRANSCODER_B) | \
2127 BIT_ULL(POWER_DOMAIN_PIPE_C) | \
2128 BIT_ULL(POWER_DOMAIN_TRANSCODER_C) | \
2129 BIT_ULL(POWER_DOMAIN_PIPE_B_PANEL_FITTER) | \
2130 BIT_ULL(POWER_DOMAIN_PIPE_C_PANEL_FITTER) | \
2131 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
2132 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
2133 BIT_ULL(POWER_DOMAIN_AUX_B) | \
2134 BIT_ULL(POWER_DOMAIN_AUX_C) | \
2135 BIT_ULL(POWER_DOMAIN_AUDIO) | \
2136 BIT_ULL(POWER_DOMAIN_VGA) | \
2137 BIT_ULL(POWER_DOMAIN_INIT))
2138#define BXT_DISPLAY_DC_OFF_POWER_DOMAINS ( \
2139 BXT_DISPLAY_POWERWELL_2_POWER_DOMAINS | \
2140 BIT_ULL(POWER_DOMAIN_GT_IRQ) | \
2141 BIT_ULL(POWER_DOMAIN_MODESET) | \
2142 BIT_ULL(POWER_DOMAIN_AUX_A) | \
2143 BIT_ULL(POWER_DOMAIN_GMBUS) | \
2144 BIT_ULL(POWER_DOMAIN_INIT))
2145#define BXT_DPIO_CMN_A_POWER_DOMAINS ( \
2146 BIT_ULL(POWER_DOMAIN_PORT_DDI_A_LANES) | \
2147 BIT_ULL(POWER_DOMAIN_AUX_A) | \
2148 BIT_ULL(POWER_DOMAIN_INIT))
2149#define BXT_DPIO_CMN_BC_POWER_DOMAINS ( \
2150 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
2151 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
2152 BIT_ULL(POWER_DOMAIN_AUX_B) | \
2153 BIT_ULL(POWER_DOMAIN_AUX_C) | \
2154 BIT_ULL(POWER_DOMAIN_INIT))
2155
2156#define GLK_DISPLAY_POWERWELL_2_POWER_DOMAINS ( \
2157 BIT_ULL(POWER_DOMAIN_TRANSCODER_A) | \
2158 BIT_ULL(POWER_DOMAIN_PIPE_B) | \
2159 BIT_ULL(POWER_DOMAIN_TRANSCODER_B) | \
2160 BIT_ULL(POWER_DOMAIN_PIPE_C) | \
2161 BIT_ULL(POWER_DOMAIN_TRANSCODER_C) | \
2162 BIT_ULL(POWER_DOMAIN_PIPE_B_PANEL_FITTER) | \
2163 BIT_ULL(POWER_DOMAIN_PIPE_C_PANEL_FITTER) | \
2164 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
2165 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
2166 BIT_ULL(POWER_DOMAIN_AUX_B) | \
2167 BIT_ULL(POWER_DOMAIN_AUX_C) | \
2168 BIT_ULL(POWER_DOMAIN_AUDIO) | \
2169 BIT_ULL(POWER_DOMAIN_VGA) | \
2170 BIT_ULL(POWER_DOMAIN_INIT))
2171#define GLK_DISPLAY_DDI_IO_A_POWER_DOMAINS ( \
2172 BIT_ULL(POWER_DOMAIN_PORT_DDI_A_IO))
2173#define GLK_DISPLAY_DDI_IO_B_POWER_DOMAINS ( \
2174 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_IO))
2175#define GLK_DISPLAY_DDI_IO_C_POWER_DOMAINS ( \
2176 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_IO))
2177#define GLK_DPIO_CMN_A_POWER_DOMAINS ( \
2178 BIT_ULL(POWER_DOMAIN_PORT_DDI_A_LANES) | \
2179 BIT_ULL(POWER_DOMAIN_AUX_A) | \
2180 BIT_ULL(POWER_DOMAIN_INIT))
2181#define GLK_DPIO_CMN_B_POWER_DOMAINS ( \
2182 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
2183 BIT_ULL(POWER_DOMAIN_AUX_B) | \
2184 BIT_ULL(POWER_DOMAIN_INIT))
2185#define GLK_DPIO_CMN_C_POWER_DOMAINS ( \
2186 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
2187 BIT_ULL(POWER_DOMAIN_AUX_C) | \
2188 BIT_ULL(POWER_DOMAIN_INIT))
2189#define GLK_DISPLAY_AUX_A_POWER_DOMAINS ( \
2190 BIT_ULL(POWER_DOMAIN_AUX_A) | \
2191 BIT_ULL(POWER_DOMAIN_AUX_IO_A) | \
2192 BIT_ULL(POWER_DOMAIN_INIT))
2193#define GLK_DISPLAY_AUX_B_POWER_DOMAINS ( \
2194 BIT_ULL(POWER_DOMAIN_AUX_B) | \
2195 BIT_ULL(POWER_DOMAIN_INIT))
2196#define GLK_DISPLAY_AUX_C_POWER_DOMAINS ( \
2197 BIT_ULL(POWER_DOMAIN_AUX_C) | \
2198 BIT_ULL(POWER_DOMAIN_INIT))
2199#define GLK_DISPLAY_DC_OFF_POWER_DOMAINS ( \
2200 GLK_DISPLAY_POWERWELL_2_POWER_DOMAINS | \
2201 BIT_ULL(POWER_DOMAIN_GT_IRQ) | \
2202 BIT_ULL(POWER_DOMAIN_MODESET) | \
2203 BIT_ULL(POWER_DOMAIN_AUX_A) | \
2204 BIT_ULL(POWER_DOMAIN_GMBUS) | \
2205 BIT_ULL(POWER_DOMAIN_INIT))
2206
2207#define CNL_DISPLAY_POWERWELL_2_POWER_DOMAINS ( \
2208 BIT_ULL(POWER_DOMAIN_TRANSCODER_A) | \
2209 BIT_ULL(POWER_DOMAIN_PIPE_B) | \
2210 BIT_ULL(POWER_DOMAIN_TRANSCODER_B) | \
2211 BIT_ULL(POWER_DOMAIN_PIPE_C) | \
2212 BIT_ULL(POWER_DOMAIN_TRANSCODER_C) | \
2213 BIT_ULL(POWER_DOMAIN_PIPE_B_PANEL_FITTER) | \
2214 BIT_ULL(POWER_DOMAIN_PIPE_C_PANEL_FITTER) | \
2215 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
2216 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
2217 BIT_ULL(POWER_DOMAIN_PORT_DDI_D_LANES) | \
2218 BIT_ULL(POWER_DOMAIN_PORT_DDI_F_LANES) | \
2219 BIT_ULL(POWER_DOMAIN_AUX_B) | \
2220 BIT_ULL(POWER_DOMAIN_AUX_C) | \
2221 BIT_ULL(POWER_DOMAIN_AUX_D) | \
2222 BIT_ULL(POWER_DOMAIN_AUX_F) | \
2223 BIT_ULL(POWER_DOMAIN_AUDIO) | \
2224 BIT_ULL(POWER_DOMAIN_VGA) | \
2225 BIT_ULL(POWER_DOMAIN_INIT))
2226#define CNL_DISPLAY_DDI_A_IO_POWER_DOMAINS ( \
2227 BIT_ULL(POWER_DOMAIN_PORT_DDI_A_IO) | \
2228 BIT_ULL(POWER_DOMAIN_INIT))
2229#define CNL_DISPLAY_DDI_B_IO_POWER_DOMAINS ( \
2230 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_IO) | \
2231 BIT_ULL(POWER_DOMAIN_INIT))
2232#define CNL_DISPLAY_DDI_C_IO_POWER_DOMAINS ( \
2233 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_IO) | \
2234 BIT_ULL(POWER_DOMAIN_INIT))
2235#define CNL_DISPLAY_DDI_D_IO_POWER_DOMAINS ( \
2236 BIT_ULL(POWER_DOMAIN_PORT_DDI_D_IO) | \
2237 BIT_ULL(POWER_DOMAIN_INIT))
2238#define CNL_DISPLAY_AUX_A_POWER_DOMAINS ( \
2239 BIT_ULL(POWER_DOMAIN_AUX_A) | \
2240 BIT_ULL(POWER_DOMAIN_AUX_IO_A) | \
2241 BIT_ULL(POWER_DOMAIN_INIT))
2242#define CNL_DISPLAY_AUX_B_POWER_DOMAINS ( \
2243 BIT_ULL(POWER_DOMAIN_AUX_B) | \
2244 BIT_ULL(POWER_DOMAIN_INIT))
2245#define CNL_DISPLAY_AUX_C_POWER_DOMAINS ( \
2246 BIT_ULL(POWER_DOMAIN_AUX_C) | \
2247 BIT_ULL(POWER_DOMAIN_INIT))
2248#define CNL_DISPLAY_AUX_D_POWER_DOMAINS ( \
2249 BIT_ULL(POWER_DOMAIN_AUX_D) | \
2250 BIT_ULL(POWER_DOMAIN_INIT))
2251#define CNL_DISPLAY_AUX_F_POWER_DOMAINS ( \
2252 BIT_ULL(POWER_DOMAIN_AUX_F) | \
2253 BIT_ULL(POWER_DOMAIN_INIT))
2254#define CNL_DISPLAY_DDI_F_IO_POWER_DOMAINS ( \
2255 BIT_ULL(POWER_DOMAIN_PORT_DDI_F_IO) | \
2256 BIT_ULL(POWER_DOMAIN_INIT))
2257#define CNL_DISPLAY_DC_OFF_POWER_DOMAINS ( \
2258 CNL_DISPLAY_POWERWELL_2_POWER_DOMAINS | \
2259 BIT_ULL(POWER_DOMAIN_GT_IRQ) | \
2260 BIT_ULL(POWER_DOMAIN_MODESET) | \
2261 BIT_ULL(POWER_DOMAIN_AUX_A) | \
2262 BIT_ULL(POWER_DOMAIN_INIT))
2263
2264
2265
2266
2267
2268
2269
2270
2271
2272
2273
2274
2275
2276
2277
2278#define ICL_PW_4_POWER_DOMAINS ( \
2279 BIT_ULL(POWER_DOMAIN_PIPE_C) | \
2280 BIT_ULL(POWER_DOMAIN_PIPE_C_PANEL_FITTER) | \
2281 BIT_ULL(POWER_DOMAIN_INIT))
2282
2283#define ICL_PW_3_POWER_DOMAINS ( \
2284 ICL_PW_4_POWER_DOMAINS | \
2285 BIT_ULL(POWER_DOMAIN_PIPE_B) | \
2286 BIT_ULL(POWER_DOMAIN_TRANSCODER_A) | \
2287 BIT_ULL(POWER_DOMAIN_TRANSCODER_B) | \
2288 BIT_ULL(POWER_DOMAIN_TRANSCODER_C) | \
2289 BIT_ULL(POWER_DOMAIN_PIPE_B_PANEL_FITTER) | \
2290 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_LANES) | \
2291 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_IO) | \
2292 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_LANES) | \
2293 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_IO) | \
2294 BIT_ULL(POWER_DOMAIN_PORT_DDI_D_LANES) | \
2295 BIT_ULL(POWER_DOMAIN_PORT_DDI_D_IO) | \
2296 BIT_ULL(POWER_DOMAIN_PORT_DDI_E_LANES) | \
2297 BIT_ULL(POWER_DOMAIN_PORT_DDI_E_IO) | \
2298 BIT_ULL(POWER_DOMAIN_PORT_DDI_F_LANES) | \
2299 BIT_ULL(POWER_DOMAIN_PORT_DDI_F_IO) | \
2300 BIT_ULL(POWER_DOMAIN_AUX_B) | \
2301 BIT_ULL(POWER_DOMAIN_AUX_C) | \
2302 BIT_ULL(POWER_DOMAIN_AUX_D) | \
2303 BIT_ULL(POWER_DOMAIN_AUX_E) | \
2304 BIT_ULL(POWER_DOMAIN_AUX_F) | \
2305 BIT_ULL(POWER_DOMAIN_AUX_TBT1) | \
2306 BIT_ULL(POWER_DOMAIN_AUX_TBT2) | \
2307 BIT_ULL(POWER_DOMAIN_AUX_TBT3) | \
2308 BIT_ULL(POWER_DOMAIN_AUX_TBT4) | \
2309 BIT_ULL(POWER_DOMAIN_VGA) | \
2310 BIT_ULL(POWER_DOMAIN_AUDIO) | \
2311 BIT_ULL(POWER_DOMAIN_INIT))
2312
2313
2314
2315
2316#define ICL_PW_2_POWER_DOMAINS ( \
2317 ICL_PW_3_POWER_DOMAINS | \
2318 BIT_ULL(POWER_DOMAIN_TRANSCODER_EDP_VDSC) | \
2319 BIT_ULL(POWER_DOMAIN_INIT))
2320
2321
2322
2323#define ICL_DISPLAY_DC_OFF_POWER_DOMAINS ( \
2324 ICL_PW_2_POWER_DOMAINS | \
2325 BIT_ULL(POWER_DOMAIN_MODESET) | \
2326 BIT_ULL(POWER_DOMAIN_AUX_A) | \
2327 BIT_ULL(POWER_DOMAIN_INIT))
2328
2329#define ICL_DDI_IO_A_POWER_DOMAINS ( \
2330 BIT_ULL(POWER_DOMAIN_PORT_DDI_A_IO))
2331#define ICL_DDI_IO_B_POWER_DOMAINS ( \
2332 BIT_ULL(POWER_DOMAIN_PORT_DDI_B_IO))
2333#define ICL_DDI_IO_C_POWER_DOMAINS ( \
2334 BIT_ULL(POWER_DOMAIN_PORT_DDI_C_IO))
2335#define ICL_DDI_IO_D_POWER_DOMAINS ( \
2336 BIT_ULL(POWER_DOMAIN_PORT_DDI_D_IO))
2337#define ICL_DDI_IO_E_POWER_DOMAINS ( \
2338 BIT_ULL(POWER_DOMAIN_PORT_DDI_E_IO))
2339#define ICL_DDI_IO_F_POWER_DOMAINS ( \
2340 BIT_ULL(POWER_DOMAIN_PORT_DDI_F_IO))
2341
2342#define ICL_AUX_A_IO_POWER_DOMAINS ( \
2343 BIT_ULL(POWER_DOMAIN_AUX_IO_A) | \
2344 BIT_ULL(POWER_DOMAIN_AUX_A))
2345#define ICL_AUX_B_IO_POWER_DOMAINS ( \
2346 BIT_ULL(POWER_DOMAIN_AUX_B))
2347#define ICL_AUX_C_IO_POWER_DOMAINS ( \
2348 BIT_ULL(POWER_DOMAIN_AUX_C))
2349#define ICL_AUX_D_IO_POWER_DOMAINS ( \
2350 BIT_ULL(POWER_DOMAIN_AUX_D))
2351#define ICL_AUX_E_IO_POWER_DOMAINS ( \
2352 BIT_ULL(POWER_DOMAIN_AUX_E))
2353#define ICL_AUX_F_IO_POWER_DOMAINS ( \
2354 BIT_ULL(POWER_DOMAIN_AUX_F))
2355#define ICL_AUX_TBT1_IO_POWER_DOMAINS ( \
2356 BIT_ULL(POWER_DOMAIN_AUX_TBT1))
2357#define ICL_AUX_TBT2_IO_POWER_DOMAINS ( \
2358 BIT_ULL(POWER_DOMAIN_AUX_TBT2))
2359#define ICL_AUX_TBT3_IO_POWER_DOMAINS ( \
2360 BIT_ULL(POWER_DOMAIN_AUX_TBT3))
2361#define ICL_AUX_TBT4_IO_POWER_DOMAINS ( \
2362 BIT_ULL(POWER_DOMAIN_AUX_TBT4))
2363
2364static const struct i915_power_well_ops i9xx_always_on_power_well_ops = {
2365 .sync_hw = i9xx_power_well_sync_hw_noop,
2366 .enable = i9xx_always_on_power_well_noop,
2367 .disable = i9xx_always_on_power_well_noop,
2368 .is_enabled = i9xx_always_on_power_well_enabled,
2369};
2370
2371static const struct i915_power_well_ops chv_pipe_power_well_ops = {
2372 .sync_hw = i9xx_power_well_sync_hw_noop,
2373 .enable = chv_pipe_power_well_enable,
2374 .disable = chv_pipe_power_well_disable,
2375 .is_enabled = chv_pipe_power_well_enabled,
2376};
2377
2378static const struct i915_power_well_ops chv_dpio_cmn_power_well_ops = {
2379 .sync_hw = i9xx_power_well_sync_hw_noop,
2380 .enable = chv_dpio_cmn_power_well_enable,
2381 .disable = chv_dpio_cmn_power_well_disable,
2382 .is_enabled = vlv_power_well_enabled,
2383};
2384
2385static const struct i915_power_well_desc i9xx_always_on_power_well[] = {
2386 {
2387 .name = "always-on",
2388 .always_on = true,
2389 .domains = POWER_DOMAIN_MASK,
2390 .ops = &i9xx_always_on_power_well_ops,
2391 .id = DISP_PW_ID_NONE,
2392 },
2393};
2394
2395static const struct i915_power_well_ops i830_pipes_power_well_ops = {
2396 .sync_hw = i830_pipes_power_well_sync_hw,
2397 .enable = i830_pipes_power_well_enable,
2398 .disable = i830_pipes_power_well_disable,
2399 .is_enabled = i830_pipes_power_well_enabled,
2400};
2401
2402static const struct i915_power_well_desc i830_power_wells[] = {
2403 {
2404 .name = "always-on",
2405 .always_on = true,
2406 .domains = POWER_DOMAIN_MASK,
2407 .ops = &i9xx_always_on_power_well_ops,
2408 .id = DISP_PW_ID_NONE,
2409 },
2410 {
2411 .name = "pipes",
2412 .domains = I830_PIPES_POWER_DOMAINS,
2413 .ops = &i830_pipes_power_well_ops,
2414 .id = DISP_PW_ID_NONE,
2415 },
2416};
2417
2418static const struct i915_power_well_ops hsw_power_well_ops = {
2419 .sync_hw = hsw_power_well_sync_hw,
2420 .enable = hsw_power_well_enable,
2421 .disable = hsw_power_well_disable,
2422 .is_enabled = hsw_power_well_enabled,
2423};
2424
2425static const struct i915_power_well_ops gen9_dc_off_power_well_ops = {
2426 .sync_hw = i9xx_power_well_sync_hw_noop,
2427 .enable = gen9_dc_off_power_well_enable,
2428 .disable = gen9_dc_off_power_well_disable,
2429 .is_enabled = gen9_dc_off_power_well_enabled,
2430};
2431
2432static const struct i915_power_well_ops bxt_dpio_cmn_power_well_ops = {
2433 .sync_hw = i9xx_power_well_sync_hw_noop,
2434 .enable = bxt_dpio_cmn_power_well_enable,
2435 .disable = bxt_dpio_cmn_power_well_disable,
2436 .is_enabled = bxt_dpio_cmn_power_well_enabled,
2437};
2438
2439static const struct i915_power_well_regs hsw_power_well_regs = {
2440 .bios = HSW_PWR_WELL_CTL1,
2441 .driver = HSW_PWR_WELL_CTL2,
2442 .kvmr = HSW_PWR_WELL_CTL3,
2443 .debug = HSW_PWR_WELL_CTL4,
2444};
2445
2446static const struct i915_power_well_desc hsw_power_wells[] = {
2447 {
2448 .name = "always-on",
2449 .always_on = true,
2450 .domains = POWER_DOMAIN_MASK,
2451 .ops = &i9xx_always_on_power_well_ops,
2452 .id = DISP_PW_ID_NONE,
2453 },
2454 {
2455 .name = "display",
2456 .domains = HSW_DISPLAY_POWER_DOMAINS,
2457 .ops = &hsw_power_well_ops,
2458 .id = HSW_DISP_PW_GLOBAL,
2459 {
2460 .hsw.regs = &hsw_power_well_regs,
2461 .hsw.idx = HSW_PW_CTL_IDX_GLOBAL,
2462 .hsw.has_vga = true,
2463 },
2464 },
2465};
2466
2467static const struct i915_power_well_desc bdw_power_wells[] = {
2468 {
2469 .name = "always-on",
2470 .always_on = true,
2471 .domains = POWER_DOMAIN_MASK,
2472 .ops = &i9xx_always_on_power_well_ops,
2473 .id = DISP_PW_ID_NONE,
2474 },
2475 {
2476 .name = "display",
2477 .domains = BDW_DISPLAY_POWER_DOMAINS,
2478 .ops = &hsw_power_well_ops,
2479 .id = HSW_DISP_PW_GLOBAL,
2480 {
2481 .hsw.regs = &hsw_power_well_regs,
2482 .hsw.idx = HSW_PW_CTL_IDX_GLOBAL,
2483 .hsw.irq_pipe_mask = BIT(PIPE_B) | BIT(PIPE_C),
2484 .hsw.has_vga = true,
2485 },
2486 },
2487};
2488
2489static const struct i915_power_well_ops vlv_display_power_well_ops = {
2490 .sync_hw = i9xx_power_well_sync_hw_noop,
2491 .enable = vlv_display_power_well_enable,
2492 .disable = vlv_display_power_well_disable,
2493 .is_enabled = vlv_power_well_enabled,
2494};
2495
2496static const struct i915_power_well_ops vlv_dpio_cmn_power_well_ops = {
2497 .sync_hw = i9xx_power_well_sync_hw_noop,
2498 .enable = vlv_dpio_cmn_power_well_enable,
2499 .disable = vlv_dpio_cmn_power_well_disable,
2500 .is_enabled = vlv_power_well_enabled,
2501};
2502
2503static const struct i915_power_well_ops vlv_dpio_power_well_ops = {
2504 .sync_hw = i9xx_power_well_sync_hw_noop,
2505 .enable = vlv_power_well_enable,
2506 .disable = vlv_power_well_disable,
2507 .is_enabled = vlv_power_well_enabled,
2508};
2509
2510static const struct i915_power_well_desc vlv_power_wells[] = {
2511 {
2512 .name = "always-on",
2513 .always_on = true,
2514 .domains = POWER_DOMAIN_MASK,
2515 .ops = &i9xx_always_on_power_well_ops,
2516 .id = DISP_PW_ID_NONE,
2517 },
2518 {
2519 .name = "display",
2520 .domains = VLV_DISPLAY_POWER_DOMAINS,
2521 .ops = &vlv_display_power_well_ops,
2522 .id = VLV_DISP_PW_DISP2D,
2523 {
2524 .vlv.idx = PUNIT_PWGT_IDX_DISP2D,
2525 },
2526 },
2527 {
2528 .name = "dpio-tx-b-01",
2529 .domains = VLV_DPIO_TX_B_LANES_01_POWER_DOMAINS |
2530 VLV_DPIO_TX_B_LANES_23_POWER_DOMAINS |
2531 VLV_DPIO_TX_C_LANES_01_POWER_DOMAINS |
2532 VLV_DPIO_TX_C_LANES_23_POWER_DOMAINS,
2533 .ops = &vlv_dpio_power_well_ops,
2534 .id = DISP_PW_ID_NONE,
2535 {
2536 .vlv.idx = PUNIT_PWGT_IDX_DPIO_TX_B_LANES_01,
2537 },
2538 },
2539 {
2540 .name = "dpio-tx-b-23",
2541 .domains = VLV_DPIO_TX_B_LANES_01_POWER_DOMAINS |
2542 VLV_DPIO_TX_B_LANES_23_POWER_DOMAINS |
2543 VLV_DPIO_TX_C_LANES_01_POWER_DOMAINS |
2544 VLV_DPIO_TX_C_LANES_23_POWER_DOMAINS,
2545 .ops = &vlv_dpio_power_well_ops,
2546 .id = DISP_PW_ID_NONE,
2547 {
2548 .vlv.idx = PUNIT_PWGT_IDX_DPIO_TX_B_LANES_23,
2549 },
2550 },
2551 {
2552 .name = "dpio-tx-c-01",
2553 .domains = VLV_DPIO_TX_B_LANES_01_POWER_DOMAINS |
2554 VLV_DPIO_TX_B_LANES_23_POWER_DOMAINS |
2555 VLV_DPIO_TX_C_LANES_01_POWER_DOMAINS |
2556 VLV_DPIO_TX_C_LANES_23_POWER_DOMAINS,
2557 .ops = &vlv_dpio_power_well_ops,
2558 .id = DISP_PW_ID_NONE,
2559 {
2560 .vlv.idx = PUNIT_PWGT_IDX_DPIO_TX_C_LANES_01,
2561 },
2562 },
2563 {
2564 .name = "dpio-tx-c-23",
2565 .domains = VLV_DPIO_TX_B_LANES_01_POWER_DOMAINS |
2566 VLV_DPIO_TX_B_LANES_23_POWER_DOMAINS |
2567 VLV_DPIO_TX_C_LANES_01_POWER_DOMAINS |
2568 VLV_DPIO_TX_C_LANES_23_POWER_DOMAINS,
2569 .ops = &vlv_dpio_power_well_ops,
2570 .id = DISP_PW_ID_NONE,
2571 {
2572 .vlv.idx = PUNIT_PWGT_IDX_DPIO_TX_C_LANES_23,
2573 },
2574 },
2575 {
2576 .name = "dpio-common",
2577 .domains = VLV_DPIO_CMN_BC_POWER_DOMAINS,
2578 .ops = &vlv_dpio_cmn_power_well_ops,
2579 .id = VLV_DISP_PW_DPIO_CMN_BC,
2580 {
2581 .vlv.idx = PUNIT_PWGT_IDX_DPIO_CMN_BC,
2582 },
2583 },
2584};
2585
2586static const struct i915_power_well_desc chv_power_wells[] = {
2587 {
2588 .name = "always-on",
2589 .always_on = true,
2590 .domains = POWER_DOMAIN_MASK,
2591 .ops = &i9xx_always_on_power_well_ops,
2592 .id = DISP_PW_ID_NONE,
2593 },
2594 {
2595 .name = "display",
2596
2597
2598
2599
2600
2601 .domains = CHV_DISPLAY_POWER_DOMAINS,
2602 .ops = &chv_pipe_power_well_ops,
2603 .id = DISP_PW_ID_NONE,
2604 },
2605 {
2606 .name = "dpio-common-bc",
2607 .domains = CHV_DPIO_CMN_BC_POWER_DOMAINS,
2608 .ops = &chv_dpio_cmn_power_well_ops,
2609 .id = VLV_DISP_PW_DPIO_CMN_BC,
2610 {
2611 .vlv.idx = PUNIT_PWGT_IDX_DPIO_CMN_BC,
2612 },
2613 },
2614 {
2615 .name = "dpio-common-d",
2616 .domains = CHV_DPIO_CMN_D_POWER_DOMAINS,
2617 .ops = &chv_dpio_cmn_power_well_ops,
2618 .id = CHV_DISP_PW_DPIO_CMN_D,
2619 {
2620 .vlv.idx = PUNIT_PWGT_IDX_DPIO_CMN_D,
2621 },
2622 },
2623};
2624
2625bool intel_display_power_well_is_enabled(struct drm_i915_private *dev_priv,
2626 enum i915_power_well_id power_well_id)
2627{
2628 struct i915_power_well *power_well;
2629 bool ret;
2630
2631 power_well = lookup_power_well(dev_priv, power_well_id);
2632 ret = power_well->desc->ops->is_enabled(dev_priv, power_well);
2633
2634 return ret;
2635}
2636
2637static const struct i915_power_well_desc skl_power_wells[] = {
2638 {
2639 .name = "always-on",
2640 .always_on = true,
2641 .domains = POWER_DOMAIN_MASK,
2642 .ops = &i9xx_always_on_power_well_ops,
2643 .id = DISP_PW_ID_NONE,
2644 },
2645 {
2646 .name = "power well 1",
2647
2648 .always_on = true,
2649 .domains = 0,
2650 .ops = &hsw_power_well_ops,
2651 .id = SKL_DISP_PW_1,
2652 {
2653 .hsw.regs = &hsw_power_well_regs,
2654 .hsw.idx = SKL_PW_CTL_IDX_PW_1,
2655 .hsw.has_fuses = true,
2656 },
2657 },
2658 {
2659 .name = "MISC IO power well",
2660
2661 .always_on = true,
2662 .domains = 0,
2663 .ops = &hsw_power_well_ops,
2664 .id = SKL_DISP_PW_MISC_IO,
2665 {
2666 .hsw.regs = &hsw_power_well_regs,
2667 .hsw.idx = SKL_PW_CTL_IDX_MISC_IO,
2668 },
2669 },
2670 {
2671 .name = "DC off",
2672 .domains = SKL_DISPLAY_DC_OFF_POWER_DOMAINS,
2673 .ops = &gen9_dc_off_power_well_ops,
2674 .id = DISP_PW_ID_NONE,
2675 },
2676 {
2677 .name = "power well 2",
2678 .domains = SKL_DISPLAY_POWERWELL_2_POWER_DOMAINS,
2679 .ops = &hsw_power_well_ops,
2680 .id = SKL_DISP_PW_2,
2681 {
2682 .hsw.regs = &hsw_power_well_regs,
2683 .hsw.idx = SKL_PW_CTL_IDX_PW_2,
2684 .hsw.irq_pipe_mask = BIT(PIPE_B) | BIT(PIPE_C),
2685 .hsw.has_vga = true,
2686 .hsw.has_fuses = true,
2687 },
2688 },
2689 {
2690 .name = "DDI A/E IO power well",
2691 .domains = SKL_DISPLAY_DDI_IO_A_E_POWER_DOMAINS,
2692 .ops = &hsw_power_well_ops,
2693 .id = DISP_PW_ID_NONE,
2694 {
2695 .hsw.regs = &hsw_power_well_regs,
2696 .hsw.idx = SKL_PW_CTL_IDX_DDI_A_E,
2697 },
2698 },
2699 {
2700 .name = "DDI B IO power well",
2701 .domains = SKL_DISPLAY_DDI_IO_B_POWER_DOMAINS,
2702 .ops = &hsw_power_well_ops,
2703 .id = DISP_PW_ID_NONE,
2704 {
2705 .hsw.regs = &hsw_power_well_regs,
2706 .hsw.idx = SKL_PW_CTL_IDX_DDI_B,
2707 },
2708 },
2709 {
2710 .name = "DDI C IO power well",
2711 .domains = SKL_DISPLAY_DDI_IO_C_POWER_DOMAINS,
2712 .ops = &hsw_power_well_ops,
2713 .id = DISP_PW_ID_NONE,
2714 {
2715 .hsw.regs = &hsw_power_well_regs,
2716 .hsw.idx = SKL_PW_CTL_IDX_DDI_C,
2717 },
2718 },
2719 {
2720 .name = "DDI D IO power well",
2721 .domains = SKL_DISPLAY_DDI_IO_D_POWER_DOMAINS,
2722 .ops = &hsw_power_well_ops,
2723 .id = DISP_PW_ID_NONE,
2724 {
2725 .hsw.regs = &hsw_power_well_regs,
2726 .hsw.idx = SKL_PW_CTL_IDX_DDI_D,
2727 },
2728 },
2729};
2730
2731static const struct i915_power_well_desc bxt_power_wells[] = {
2732 {
2733 .name = "always-on",
2734 .always_on = true,
2735 .domains = POWER_DOMAIN_MASK,
2736 .ops = &i9xx_always_on_power_well_ops,
2737 .id = DISP_PW_ID_NONE,
2738 },
2739 {
2740 .name = "power well 1",
2741
2742 .always_on = true,
2743 .domains = 0,
2744 .ops = &hsw_power_well_ops,
2745 .id = SKL_DISP_PW_1,
2746 {
2747 .hsw.regs = &hsw_power_well_regs,
2748 .hsw.idx = SKL_PW_CTL_IDX_PW_1,
2749 .hsw.has_fuses = true,
2750 },
2751 },
2752 {
2753 .name = "DC off",
2754 .domains = BXT_DISPLAY_DC_OFF_POWER_DOMAINS,
2755 .ops = &gen9_dc_off_power_well_ops,
2756 .id = DISP_PW_ID_NONE,
2757 },
2758 {
2759 .name = "power well 2",
2760 .domains = BXT_DISPLAY_POWERWELL_2_POWER_DOMAINS,
2761 .ops = &hsw_power_well_ops,
2762 .id = SKL_DISP_PW_2,
2763 {
2764 .hsw.regs = &hsw_power_well_regs,
2765 .hsw.idx = SKL_PW_CTL_IDX_PW_2,
2766 .hsw.irq_pipe_mask = BIT(PIPE_B) | BIT(PIPE_C),
2767 .hsw.has_vga = true,
2768 .hsw.has_fuses = true,
2769 },
2770 },
2771 {
2772 .name = "dpio-common-a",
2773 .domains = BXT_DPIO_CMN_A_POWER_DOMAINS,
2774 .ops = &bxt_dpio_cmn_power_well_ops,
2775 .id = BXT_DISP_PW_DPIO_CMN_A,
2776 {
2777 .bxt.phy = DPIO_PHY1,
2778 },
2779 },
2780 {
2781 .name = "dpio-common-bc",
2782 .domains = BXT_DPIO_CMN_BC_POWER_DOMAINS,
2783 .ops = &bxt_dpio_cmn_power_well_ops,
2784 .id = VLV_DISP_PW_DPIO_CMN_BC,
2785 {
2786 .bxt.phy = DPIO_PHY0,
2787 },
2788 },
2789};
2790
2791static const struct i915_power_well_desc glk_power_wells[] = {
2792 {
2793 .name = "always-on",
2794 .always_on = true,
2795 .domains = POWER_DOMAIN_MASK,
2796 .ops = &i9xx_always_on_power_well_ops,
2797 .id = DISP_PW_ID_NONE,
2798 },
2799 {
2800 .name = "power well 1",
2801
2802 .always_on = true,
2803 .domains = 0,
2804 .ops = &hsw_power_well_ops,
2805 .id = SKL_DISP_PW_1,
2806 {
2807 .hsw.regs = &hsw_power_well_regs,
2808 .hsw.idx = SKL_PW_CTL_IDX_PW_1,
2809 .hsw.has_fuses = true,
2810 },
2811 },
2812 {
2813 .name = "DC off",
2814 .domains = GLK_DISPLAY_DC_OFF_POWER_DOMAINS,
2815 .ops = &gen9_dc_off_power_well_ops,
2816 .id = DISP_PW_ID_NONE,
2817 },
2818 {
2819 .name = "power well 2",
2820 .domains = GLK_DISPLAY_POWERWELL_2_POWER_DOMAINS,
2821 .ops = &hsw_power_well_ops,
2822 .id = SKL_DISP_PW_2,
2823 {
2824 .hsw.regs = &hsw_power_well_regs,
2825 .hsw.idx = SKL_PW_CTL_IDX_PW_2,
2826 .hsw.irq_pipe_mask = BIT(PIPE_B) | BIT(PIPE_C),
2827 .hsw.has_vga = true,
2828 .hsw.has_fuses = true,
2829 },
2830 },
2831 {
2832 .name = "dpio-common-a",
2833 .domains = GLK_DPIO_CMN_A_POWER_DOMAINS,
2834 .ops = &bxt_dpio_cmn_power_well_ops,
2835 .id = BXT_DISP_PW_DPIO_CMN_A,
2836 {
2837 .bxt.phy = DPIO_PHY1,
2838 },
2839 },
2840 {
2841 .name = "dpio-common-b",
2842 .domains = GLK_DPIO_CMN_B_POWER_DOMAINS,
2843 .ops = &bxt_dpio_cmn_power_well_ops,
2844 .id = VLV_DISP_PW_DPIO_CMN_BC,
2845 {
2846 .bxt.phy = DPIO_PHY0,
2847 },
2848 },
2849 {
2850 .name = "dpio-common-c",
2851 .domains = GLK_DPIO_CMN_C_POWER_DOMAINS,
2852 .ops = &bxt_dpio_cmn_power_well_ops,
2853 .id = GLK_DISP_PW_DPIO_CMN_C,
2854 {
2855 .bxt.phy = DPIO_PHY2,
2856 },
2857 },
2858 {
2859 .name = "AUX A",
2860 .domains = GLK_DISPLAY_AUX_A_POWER_DOMAINS,
2861 .ops = &hsw_power_well_ops,
2862 .id = DISP_PW_ID_NONE,
2863 {
2864 .hsw.regs = &hsw_power_well_regs,
2865 .hsw.idx = GLK_PW_CTL_IDX_AUX_A,
2866 },
2867 },
2868 {
2869 .name = "AUX B",
2870 .domains = GLK_DISPLAY_AUX_B_POWER_DOMAINS,
2871 .ops = &hsw_power_well_ops,
2872 .id = DISP_PW_ID_NONE,
2873 {
2874 .hsw.regs = &hsw_power_well_regs,
2875 .hsw.idx = GLK_PW_CTL_IDX_AUX_B,
2876 },
2877 },
2878 {
2879 .name = "AUX C",
2880 .domains = GLK_DISPLAY_AUX_C_POWER_DOMAINS,
2881 .ops = &hsw_power_well_ops,
2882 .id = DISP_PW_ID_NONE,
2883 {
2884 .hsw.regs = &hsw_power_well_regs,
2885 .hsw.idx = GLK_PW_CTL_IDX_AUX_C,
2886 },
2887 },
2888 {
2889 .name = "DDI A IO power well",
2890 .domains = GLK_DISPLAY_DDI_IO_A_POWER_DOMAINS,
2891 .ops = &hsw_power_well_ops,
2892 .id = DISP_PW_ID_NONE,
2893 {
2894 .hsw.regs = &hsw_power_well_regs,
2895 .hsw.idx = GLK_PW_CTL_IDX_DDI_A,
2896 },
2897 },
2898 {
2899 .name = "DDI B IO power well",
2900 .domains = GLK_DISPLAY_DDI_IO_B_POWER_DOMAINS,
2901 .ops = &hsw_power_well_ops,
2902 .id = DISP_PW_ID_NONE,
2903 {
2904 .hsw.regs = &hsw_power_well_regs,
2905 .hsw.idx = SKL_PW_CTL_IDX_DDI_B,
2906 },
2907 },
2908 {
2909 .name = "DDI C IO power well",
2910 .domains = GLK_DISPLAY_DDI_IO_C_POWER_DOMAINS,
2911 .ops = &hsw_power_well_ops,
2912 .id = DISP_PW_ID_NONE,
2913 {
2914 .hsw.regs = &hsw_power_well_regs,
2915 .hsw.idx = SKL_PW_CTL_IDX_DDI_C,
2916 },
2917 },
2918};
2919
2920static const struct i915_power_well_desc cnl_power_wells[] = {
2921 {
2922 .name = "always-on",
2923 .always_on = true,
2924 .domains = POWER_DOMAIN_MASK,
2925 .ops = &i9xx_always_on_power_well_ops,
2926 .id = DISP_PW_ID_NONE,
2927 },
2928 {
2929 .name = "power well 1",
2930
2931 .always_on = true,
2932 .domains = 0,
2933 .ops = &hsw_power_well_ops,
2934 .id = SKL_DISP_PW_1,
2935 {
2936 .hsw.regs = &hsw_power_well_regs,
2937 .hsw.idx = SKL_PW_CTL_IDX_PW_1,
2938 .hsw.has_fuses = true,
2939 },
2940 },
2941 {
2942 .name = "AUX A",
2943 .domains = CNL_DISPLAY_AUX_A_POWER_DOMAINS,
2944 .ops = &hsw_power_well_ops,
2945 .id = DISP_PW_ID_NONE,
2946 {
2947 .hsw.regs = &hsw_power_well_regs,
2948 .hsw.idx = GLK_PW_CTL_IDX_AUX_A,
2949 },
2950 },
2951 {
2952 .name = "AUX B",
2953 .domains = CNL_DISPLAY_AUX_B_POWER_DOMAINS,
2954 .ops = &hsw_power_well_ops,
2955 .id = DISP_PW_ID_NONE,
2956 {
2957 .hsw.regs = &hsw_power_well_regs,
2958 .hsw.idx = GLK_PW_CTL_IDX_AUX_B,
2959 },
2960 },
2961 {
2962 .name = "AUX C",
2963 .domains = CNL_DISPLAY_AUX_C_POWER_DOMAINS,
2964 .ops = &hsw_power_well_ops,
2965 .id = DISP_PW_ID_NONE,
2966 {
2967 .hsw.regs = &hsw_power_well_regs,
2968 .hsw.idx = GLK_PW_CTL_IDX_AUX_C,
2969 },
2970 },
2971 {
2972 .name = "AUX D",
2973 .domains = CNL_DISPLAY_AUX_D_POWER_DOMAINS,
2974 .ops = &hsw_power_well_ops,
2975 .id = DISP_PW_ID_NONE,
2976 {
2977 .hsw.regs = &hsw_power_well_regs,
2978 .hsw.idx = CNL_PW_CTL_IDX_AUX_D,
2979 },
2980 },
2981 {
2982 .name = "DC off",
2983 .domains = CNL_DISPLAY_DC_OFF_POWER_DOMAINS,
2984 .ops = &gen9_dc_off_power_well_ops,
2985 .id = DISP_PW_ID_NONE,
2986 },
2987 {
2988 .name = "power well 2",
2989 .domains = CNL_DISPLAY_POWERWELL_2_POWER_DOMAINS,
2990 .ops = &hsw_power_well_ops,
2991 .id = SKL_DISP_PW_2,
2992 {
2993 .hsw.regs = &hsw_power_well_regs,
2994 .hsw.idx = SKL_PW_CTL_IDX_PW_2,
2995 .hsw.irq_pipe_mask = BIT(PIPE_B) | BIT(PIPE_C),
2996 .hsw.has_vga = true,
2997 .hsw.has_fuses = true,
2998 },
2999 },
3000 {
3001 .name = "DDI A IO power well",
3002 .domains = CNL_DISPLAY_DDI_A_IO_POWER_DOMAINS,
3003 .ops = &hsw_power_well_ops,
3004 .id = DISP_PW_ID_NONE,
3005 {
3006 .hsw.regs = &hsw_power_well_regs,
3007 .hsw.idx = GLK_PW_CTL_IDX_DDI_A,
3008 },
3009 },
3010 {
3011 .name = "DDI B IO power well",
3012 .domains = CNL_DISPLAY_DDI_B_IO_POWER_DOMAINS,
3013 .ops = &hsw_power_well_ops,
3014 .id = DISP_PW_ID_NONE,
3015 {
3016 .hsw.regs = &hsw_power_well_regs,
3017 .hsw.idx = SKL_PW_CTL_IDX_DDI_B,
3018 },
3019 },
3020 {
3021 .name = "DDI C IO power well",
3022 .domains = CNL_DISPLAY_DDI_C_IO_POWER_DOMAINS,
3023 .ops = &hsw_power_well_ops,
3024 .id = DISP_PW_ID_NONE,
3025 {
3026 .hsw.regs = &hsw_power_well_regs,
3027 .hsw.idx = SKL_PW_CTL_IDX_DDI_C,
3028 },
3029 },
3030 {
3031 .name = "DDI D IO power well",
3032 .domains = CNL_DISPLAY_DDI_D_IO_POWER_DOMAINS,
3033 .ops = &hsw_power_well_ops,
3034 .id = DISP_PW_ID_NONE,
3035 {
3036 .hsw.regs = &hsw_power_well_regs,
3037 .hsw.idx = SKL_PW_CTL_IDX_DDI_D,
3038 },
3039 },
3040 {
3041 .name = "DDI F IO power well",
3042 .domains = CNL_DISPLAY_DDI_F_IO_POWER_DOMAINS,
3043 .ops = &hsw_power_well_ops,
3044 .id = DISP_PW_ID_NONE,
3045 {
3046 .hsw.regs = &hsw_power_well_regs,
3047 .hsw.idx = CNL_PW_CTL_IDX_DDI_F,
3048 },
3049 },
3050 {
3051 .name = "AUX F",
3052 .domains = CNL_DISPLAY_AUX_F_POWER_DOMAINS,
3053 .ops = &hsw_power_well_ops,
3054 .id = DISP_PW_ID_NONE,
3055 {
3056 .hsw.regs = &hsw_power_well_regs,
3057 .hsw.idx = CNL_PW_CTL_IDX_AUX_F,
3058 },
3059 },
3060};
3061
3062static const struct i915_power_well_ops icl_combo_phy_aux_power_well_ops = {
3063 .sync_hw = hsw_power_well_sync_hw,
3064 .enable = icl_combo_phy_aux_power_well_enable,
3065 .disable = icl_combo_phy_aux_power_well_disable,
3066 .is_enabled = hsw_power_well_enabled,
3067};
3068
3069static const struct i915_power_well_ops icl_tc_phy_aux_power_well_ops = {
3070 .sync_hw = hsw_power_well_sync_hw,
3071 .enable = icl_tc_phy_aux_power_well_enable,
3072 .disable = hsw_power_well_disable,
3073 .is_enabled = hsw_power_well_enabled,
3074};
3075
3076static const struct i915_power_well_regs icl_aux_power_well_regs = {
3077 .bios = ICL_PWR_WELL_CTL_AUX1,
3078 .driver = ICL_PWR_WELL_CTL_AUX2,
3079 .debug = ICL_PWR_WELL_CTL_AUX4,
3080};
3081
3082static const struct i915_power_well_regs icl_ddi_power_well_regs = {
3083 .bios = ICL_PWR_WELL_CTL_DDI1,
3084 .driver = ICL_PWR_WELL_CTL_DDI2,
3085 .debug = ICL_PWR_WELL_CTL_DDI4,
3086};
3087
3088static const struct i915_power_well_desc icl_power_wells[] = {
3089 {
3090 .name = "always-on",
3091 .always_on = true,
3092 .domains = POWER_DOMAIN_MASK,
3093 .ops = &i9xx_always_on_power_well_ops,
3094 .id = DISP_PW_ID_NONE,
3095 },
3096 {
3097 .name = "power well 1",
3098
3099 .always_on = true,
3100 .domains = 0,
3101 .ops = &hsw_power_well_ops,
3102 .id = SKL_DISP_PW_1,
3103 {
3104 .hsw.regs = &hsw_power_well_regs,
3105 .hsw.idx = ICL_PW_CTL_IDX_PW_1,
3106 .hsw.has_fuses = true,
3107 },
3108 },
3109 {
3110 .name = "DC off",
3111 .domains = ICL_DISPLAY_DC_OFF_POWER_DOMAINS,
3112 .ops = &gen9_dc_off_power_well_ops,
3113 .id = DISP_PW_ID_NONE,
3114 },
3115 {
3116 .name = "power well 2",
3117 .domains = ICL_PW_2_POWER_DOMAINS,
3118 .ops = &hsw_power_well_ops,
3119 .id = SKL_DISP_PW_2,
3120 {
3121 .hsw.regs = &hsw_power_well_regs,
3122 .hsw.idx = ICL_PW_CTL_IDX_PW_2,
3123 .hsw.has_fuses = true,
3124 },
3125 },
3126 {
3127 .name = "power well 3",
3128 .domains = ICL_PW_3_POWER_DOMAINS,
3129 .ops = &hsw_power_well_ops,
3130 .id = DISP_PW_ID_NONE,
3131 {
3132 .hsw.regs = &hsw_power_well_regs,
3133 .hsw.idx = ICL_PW_CTL_IDX_PW_3,
3134 .hsw.irq_pipe_mask = BIT(PIPE_B),
3135 .hsw.has_vga = true,
3136 .hsw.has_fuses = true,
3137 },
3138 },
3139 {
3140 .name = "DDI A IO",
3141 .domains = ICL_DDI_IO_A_POWER_DOMAINS,
3142 .ops = &hsw_power_well_ops,
3143 .id = DISP_PW_ID_NONE,
3144 {
3145 .hsw.regs = &icl_ddi_power_well_regs,
3146 .hsw.idx = ICL_PW_CTL_IDX_DDI_A,
3147 },
3148 },
3149 {
3150 .name = "DDI B IO",
3151 .domains = ICL_DDI_IO_B_POWER_DOMAINS,
3152 .ops = &hsw_power_well_ops,
3153 .id = DISP_PW_ID_NONE,
3154 {
3155 .hsw.regs = &icl_ddi_power_well_regs,
3156 .hsw.idx = ICL_PW_CTL_IDX_DDI_B,
3157 },
3158 },
3159 {
3160 .name = "DDI C IO",
3161 .domains = ICL_DDI_IO_C_POWER_DOMAINS,
3162 .ops = &hsw_power_well_ops,
3163 .id = DISP_PW_ID_NONE,
3164 {
3165 .hsw.regs = &icl_ddi_power_well_regs,
3166 .hsw.idx = ICL_PW_CTL_IDX_DDI_C,
3167 },
3168 },
3169 {
3170 .name = "DDI D IO",
3171 .domains = ICL_DDI_IO_D_POWER_DOMAINS,
3172 .ops = &hsw_power_well_ops,
3173 .id = DISP_PW_ID_NONE,
3174 {
3175 .hsw.regs = &icl_ddi_power_well_regs,
3176 .hsw.idx = ICL_PW_CTL_IDX_DDI_D,
3177 },
3178 },
3179 {
3180 .name = "DDI E IO",
3181 .domains = ICL_DDI_IO_E_POWER_DOMAINS,
3182 .ops = &hsw_power_well_ops,
3183 .id = DISP_PW_ID_NONE,
3184 {
3185 .hsw.regs = &icl_ddi_power_well_regs,
3186 .hsw.idx = ICL_PW_CTL_IDX_DDI_E,
3187 },
3188 },
3189 {
3190 .name = "DDI F IO",
3191 .domains = ICL_DDI_IO_F_POWER_DOMAINS,
3192 .ops = &hsw_power_well_ops,
3193 .id = DISP_PW_ID_NONE,
3194 {
3195 .hsw.regs = &icl_ddi_power_well_regs,
3196 .hsw.idx = ICL_PW_CTL_IDX_DDI_F,
3197 },
3198 },
3199 {
3200 .name = "AUX A",
3201 .domains = ICL_AUX_A_IO_POWER_DOMAINS,
3202 .ops = &icl_combo_phy_aux_power_well_ops,
3203 .id = DISP_PW_ID_NONE,
3204 {
3205 .hsw.regs = &icl_aux_power_well_regs,
3206 .hsw.idx = ICL_PW_CTL_IDX_AUX_A,
3207 },
3208 },
3209 {
3210 .name = "AUX B",
3211 .domains = ICL_AUX_B_IO_POWER_DOMAINS,
3212 .ops = &icl_combo_phy_aux_power_well_ops,
3213 .id = DISP_PW_ID_NONE,
3214 {
3215 .hsw.regs = &icl_aux_power_well_regs,
3216 .hsw.idx = ICL_PW_CTL_IDX_AUX_B,
3217 },
3218 },
3219 {
3220 .name = "AUX C",
3221 .domains = ICL_AUX_C_IO_POWER_DOMAINS,
3222 .ops = &icl_tc_phy_aux_power_well_ops,
3223 .id = DISP_PW_ID_NONE,
3224 {
3225 .hsw.regs = &icl_aux_power_well_regs,
3226 .hsw.idx = ICL_PW_CTL_IDX_AUX_C,
3227 .hsw.is_tc_tbt = false,
3228 },
3229 },
3230 {
3231 .name = "AUX D",
3232 .domains = ICL_AUX_D_IO_POWER_DOMAINS,
3233 .ops = &icl_tc_phy_aux_power_well_ops,
3234 .id = DISP_PW_ID_NONE,
3235 {
3236 .hsw.regs = &icl_aux_power_well_regs,
3237 .hsw.idx = ICL_PW_CTL_IDX_AUX_D,
3238 .hsw.is_tc_tbt = false,
3239 },
3240 },
3241 {
3242 .name = "AUX E",
3243 .domains = ICL_AUX_E_IO_POWER_DOMAINS,
3244 .ops = &icl_tc_phy_aux_power_well_ops,
3245 .id = DISP_PW_ID_NONE,
3246 {
3247 .hsw.regs = &icl_aux_power_well_regs,
3248 .hsw.idx = ICL_PW_CTL_IDX_AUX_E,
3249 .hsw.is_tc_tbt = false,
3250 },
3251 },
3252 {
3253 .name = "AUX F",
3254 .domains = ICL_AUX_F_IO_POWER_DOMAINS,
3255 .ops = &icl_tc_phy_aux_power_well_ops,
3256 .id = DISP_PW_ID_NONE,
3257 {
3258 .hsw.regs = &icl_aux_power_well_regs,
3259 .hsw.idx = ICL_PW_CTL_IDX_AUX_F,
3260 .hsw.is_tc_tbt = false,
3261 },
3262 },
3263 {
3264 .name = "AUX TBT1",
3265 .domains = ICL_AUX_TBT1_IO_POWER_DOMAINS,
3266 .ops = &icl_tc_phy_aux_power_well_ops,
3267 .id = DISP_PW_ID_NONE,
3268 {
3269 .hsw.regs = &icl_aux_power_well_regs,
3270 .hsw.idx = ICL_PW_CTL_IDX_AUX_TBT1,
3271 .hsw.is_tc_tbt = true,
3272 },
3273 },
3274 {
3275 .name = "AUX TBT2",
3276 .domains = ICL_AUX_TBT2_IO_POWER_DOMAINS,
3277 .ops = &icl_tc_phy_aux_power_well_ops,
3278 .id = DISP_PW_ID_NONE,
3279 {
3280 .hsw.regs = &icl_aux_power_well_regs,
3281 .hsw.idx = ICL_PW_CTL_IDX_AUX_TBT2,
3282 .hsw.is_tc_tbt = true,
3283 },
3284 },
3285 {
3286 .name = "AUX TBT3",
3287 .domains = ICL_AUX_TBT3_IO_POWER_DOMAINS,
3288 .ops = &icl_tc_phy_aux_power_well_ops,
3289 .id = DISP_PW_ID_NONE,
3290 {
3291 .hsw.regs = &icl_aux_power_well_regs,
3292 .hsw.idx = ICL_PW_CTL_IDX_AUX_TBT3,
3293 .hsw.is_tc_tbt = true,
3294 },
3295 },
3296 {
3297 .name = "AUX TBT4",
3298 .domains = ICL_AUX_TBT4_IO_POWER_DOMAINS,
3299 .ops = &icl_tc_phy_aux_power_well_ops,
3300 .id = DISP_PW_ID_NONE,
3301 {
3302 .hsw.regs = &icl_aux_power_well_regs,
3303 .hsw.idx = ICL_PW_CTL_IDX_AUX_TBT4,
3304 .hsw.is_tc_tbt = true,
3305 },
3306 },
3307 {
3308 .name = "power well 4",
3309 .domains = ICL_PW_4_POWER_DOMAINS,
3310 .ops = &hsw_power_well_ops,
3311 .id = DISP_PW_ID_NONE,
3312 {
3313 .hsw.regs = &hsw_power_well_regs,
3314 .hsw.idx = ICL_PW_CTL_IDX_PW_4,
3315 .hsw.has_fuses = true,
3316 .hsw.irq_pipe_mask = BIT(PIPE_C),
3317 },
3318 },
3319};
3320
3321static int
3322sanitize_disable_power_well_option(const struct drm_i915_private *dev_priv,
3323 int disable_power_well)
3324{
3325 if (disable_power_well >= 0)
3326 return !!disable_power_well;
3327
3328 return 1;
3329}
3330
3331static u32 get_allowed_dc_mask(const struct drm_i915_private *dev_priv,
3332 int enable_dc)
3333{
3334 u32 mask;
3335 int requested_dc;
3336 int max_dc;
3337
3338 if (INTEL_GEN(dev_priv) >= 11) {
3339 max_dc = 2;
3340
3341
3342
3343
3344
3345 mask = DC_STATE_EN_DC9;
3346 } else if (IS_GEN(dev_priv, 10) || IS_GEN9_BC(dev_priv)) {
3347 max_dc = 2;
3348 mask = 0;
3349 } else if (IS_GEN9_LP(dev_priv)) {
3350 max_dc = 1;
3351 mask = DC_STATE_EN_DC9;
3352 } else {
3353 max_dc = 0;
3354 mask = 0;
3355 }
3356
3357 if (!i915_modparams.disable_power_well)
3358 max_dc = 0;
3359
3360 if (enable_dc >= 0 && enable_dc <= max_dc) {
3361 requested_dc = enable_dc;
3362 } else if (enable_dc == -1) {
3363 requested_dc = max_dc;
3364 } else if (enable_dc > max_dc && enable_dc <= 2) {
3365 DRM_DEBUG_KMS("Adjusting requested max DC state (%d->%d)\n",
3366 enable_dc, max_dc);
3367 requested_dc = max_dc;
3368 } else {
3369 DRM_ERROR("Unexpected value for enable_dc (%d)\n", enable_dc);
3370 requested_dc = max_dc;
3371 }
3372
3373 if (requested_dc > 1)
3374 mask |= DC_STATE_EN_UPTO_DC6;
3375 if (requested_dc > 0)
3376 mask |= DC_STATE_EN_UPTO_DC5;
3377
3378 DRM_DEBUG_KMS("Allowed DC state mask %02x\n", mask);
3379
3380 return mask;
3381}
3382
3383static int
3384__set_power_wells(struct i915_power_domains *power_domains,
3385 const struct i915_power_well_desc *power_well_descs,
3386 int power_well_count)
3387{
3388 u64 power_well_ids = 0;
3389 int i;
3390
3391 power_domains->power_well_count = power_well_count;
3392 power_domains->power_wells =
3393 kcalloc(power_well_count,
3394 sizeof(*power_domains->power_wells),
3395 GFP_KERNEL);
3396 if (!power_domains->power_wells)
3397 return -ENOMEM;
3398
3399 for (i = 0; i < power_well_count; i++) {
3400 enum i915_power_well_id id = power_well_descs[i].id;
3401
3402 power_domains->power_wells[i].desc = &power_well_descs[i];
3403
3404 if (id == DISP_PW_ID_NONE)
3405 continue;
3406
3407 WARN_ON(id >= sizeof(power_well_ids) * 8);
3408 WARN_ON(power_well_ids & BIT_ULL(id));
3409 power_well_ids |= BIT_ULL(id);
3410 }
3411
3412 return 0;
3413}
3414
3415#define set_power_wells(power_domains, __power_well_descs) \
3416 __set_power_wells(power_domains, __power_well_descs, \
3417 ARRAY_SIZE(__power_well_descs))
3418
3419
3420
3421
3422
3423
3424
3425
3426int intel_power_domains_init(struct drm_i915_private *dev_priv)
3427{
3428 struct i915_power_domains *power_domains = &dev_priv->power_domains;
3429 int err;
3430
3431 i915_modparams.disable_power_well =
3432 sanitize_disable_power_well_option(dev_priv,
3433 i915_modparams.disable_power_well);
3434 dev_priv->csr.allowed_dc_mask =
3435 get_allowed_dc_mask(dev_priv, i915_modparams.enable_dc);
3436
3437 BUILD_BUG_ON(POWER_DOMAIN_NUM > 64);
3438
3439 mutex_init(&power_domains->lock);
3440
3441
3442
3443
3444
3445 if (IS_ICELAKE(dev_priv)) {
3446 err = set_power_wells(power_domains, icl_power_wells);
3447 } else if (IS_CANNONLAKE(dev_priv)) {
3448 err = set_power_wells(power_domains, cnl_power_wells);
3449
3450
3451
3452
3453
3454
3455
3456 if (!IS_CNL_WITH_PORT_F(dev_priv))
3457 power_domains->power_well_count -= 2;
3458 } else if (IS_GEMINILAKE(dev_priv)) {
3459 err = set_power_wells(power_domains, glk_power_wells);
3460 } else if (IS_BROXTON(dev_priv)) {
3461 err = set_power_wells(power_domains, bxt_power_wells);
3462 } else if (IS_GEN9_BC(dev_priv)) {
3463 err = set_power_wells(power_domains, skl_power_wells);
3464 } else if (IS_CHERRYVIEW(dev_priv)) {
3465 err = set_power_wells(power_domains, chv_power_wells);
3466 } else if (IS_BROADWELL(dev_priv)) {
3467 err = set_power_wells(power_domains, bdw_power_wells);
3468 } else if (IS_HASWELL(dev_priv)) {
3469 err = set_power_wells(power_domains, hsw_power_wells);
3470 } else if (IS_VALLEYVIEW(dev_priv)) {
3471 err = set_power_wells(power_domains, vlv_power_wells);
3472 } else if (IS_I830(dev_priv)) {
3473 err = set_power_wells(power_domains, i830_power_wells);
3474 } else {
3475 err = set_power_wells(power_domains, i9xx_always_on_power_well);
3476 }
3477
3478 return err;
3479}
3480
3481
3482
3483
3484
3485
3486
3487void intel_power_domains_cleanup(struct drm_i915_private *dev_priv)
3488{
3489 kfree(dev_priv->power_domains.power_wells);
3490}
3491
3492static void intel_power_domains_sync_hw(struct drm_i915_private *dev_priv)
3493{
3494 struct i915_power_domains *power_domains = &dev_priv->power_domains;
3495 struct i915_power_well *power_well;
3496
3497 mutex_lock(&power_domains->lock);
3498 for_each_power_well(dev_priv, power_well) {
3499 power_well->desc->ops->sync_hw(dev_priv, power_well);
3500 power_well->hw_enabled =
3501 power_well->desc->ops->is_enabled(dev_priv, power_well);
3502 }
3503 mutex_unlock(&power_domains->lock);
3504}
3505
3506static inline
3507bool intel_dbuf_slice_set(struct drm_i915_private *dev_priv,
3508 i915_reg_t reg, bool enable)
3509{
3510 u32 val, status;
3511
3512 val = I915_READ(reg);
3513 val = enable ? (val | DBUF_POWER_REQUEST) : (val & ~DBUF_POWER_REQUEST);
3514 I915_WRITE(reg, val);
3515 POSTING_READ(reg);
3516 udelay(10);
3517
3518 status = I915_READ(reg) & DBUF_POWER_STATE;
3519 if ((enable && !status) || (!enable && status)) {
3520 DRM_ERROR("DBus power %s timeout!\n",
3521 enable ? "enable" : "disable");
3522 return false;
3523 }
3524 return true;
3525}
3526
3527static void gen9_dbuf_enable(struct drm_i915_private *dev_priv)
3528{
3529 intel_dbuf_slice_set(dev_priv, DBUF_CTL, true);
3530}
3531
3532static void gen9_dbuf_disable(struct drm_i915_private *dev_priv)
3533{
3534 intel_dbuf_slice_set(dev_priv, DBUF_CTL, false);
3535}
3536
3537static u8 intel_dbuf_max_slices(struct drm_i915_private *dev_priv)
3538{
3539 if (INTEL_GEN(dev_priv) < 11)
3540 return 1;
3541 return 2;
3542}
3543
3544void icl_dbuf_slices_update(struct drm_i915_private *dev_priv,
3545 u8 req_slices)
3546{
3547 const u8 hw_enabled_slices = dev_priv->wm.skl_hw.ddb.enabled_slices;
3548 bool ret;
3549
3550 if (req_slices > intel_dbuf_max_slices(dev_priv)) {
3551 DRM_ERROR("Invalid number of dbuf slices requested\n");
3552 return;
3553 }
3554
3555 if (req_slices == hw_enabled_slices || req_slices == 0)
3556 return;
3557
3558 if (req_slices > hw_enabled_slices)
3559 ret = intel_dbuf_slice_set(dev_priv, DBUF_CTL_S2, true);
3560 else
3561 ret = intel_dbuf_slice_set(dev_priv, DBUF_CTL_S2, false);
3562
3563 if (ret)
3564 dev_priv->wm.skl_hw.ddb.enabled_slices = req_slices;
3565}
3566
3567static void icl_dbuf_enable(struct drm_i915_private *dev_priv)
3568{
3569 I915_WRITE(DBUF_CTL_S1, I915_READ(DBUF_CTL_S1) | DBUF_POWER_REQUEST);
3570 I915_WRITE(DBUF_CTL_S2, I915_READ(DBUF_CTL_S2) | DBUF_POWER_REQUEST);
3571 POSTING_READ(DBUF_CTL_S2);
3572
3573 udelay(10);
3574
3575 if (!(I915_READ(DBUF_CTL_S1) & DBUF_POWER_STATE) ||
3576 !(I915_READ(DBUF_CTL_S2) & DBUF_POWER_STATE))
3577 DRM_ERROR("DBuf power enable timeout\n");
3578 else
3579
3580
3581
3582
3583 dev_priv->wm.skl_hw.ddb.enabled_slices = 1;
3584}
3585
3586static void icl_dbuf_disable(struct drm_i915_private *dev_priv)
3587{
3588 I915_WRITE(DBUF_CTL_S1, I915_READ(DBUF_CTL_S1) & ~DBUF_POWER_REQUEST);
3589 I915_WRITE(DBUF_CTL_S2, I915_READ(DBUF_CTL_S2) & ~DBUF_POWER_REQUEST);
3590 POSTING_READ(DBUF_CTL_S2);
3591
3592 udelay(10);
3593
3594 if ((I915_READ(DBUF_CTL_S1) & DBUF_POWER_STATE) ||
3595 (I915_READ(DBUF_CTL_S2) & DBUF_POWER_STATE))
3596 DRM_ERROR("DBuf power disable timeout!\n");
3597 else
3598
3599
3600
3601
3602 dev_priv->wm.skl_hw.ddb.enabled_slices = 1;
3603}
3604
3605static void icl_mbus_init(struct drm_i915_private *dev_priv)
3606{
3607 u32 val;
3608
3609 val = MBUS_ABOX_BT_CREDIT_POOL1(16) |
3610 MBUS_ABOX_BT_CREDIT_POOL2(16) |
3611 MBUS_ABOX_B_CREDIT(1) |
3612 MBUS_ABOX_BW_CREDIT(1);
3613
3614 I915_WRITE(MBUS_ABOX_CTL, val);
3615}
3616
3617static void intel_pch_reset_handshake(struct drm_i915_private *dev_priv,
3618 bool enable)
3619{
3620 i915_reg_t reg;
3621 u32 reset_bits, val;
3622
3623 if (IS_IVYBRIDGE(dev_priv)) {
3624 reg = GEN7_MSG_CTL;
3625 reset_bits = WAIT_FOR_PCH_FLR_ACK | WAIT_FOR_PCH_RESET_ACK;
3626 } else {
3627 reg = HSW_NDE_RSTWRN_OPT;
3628 reset_bits = RESET_PCH_HANDSHAKE_ENABLE;
3629 }
3630
3631 val = I915_READ(reg);
3632
3633 if (enable)
3634 val |= reset_bits;
3635 else
3636 val &= ~reset_bits;
3637
3638 I915_WRITE(reg, val);
3639}
3640
3641static void skl_display_core_init(struct drm_i915_private *dev_priv,
3642 bool resume)
3643{
3644 struct i915_power_domains *power_domains = &dev_priv->power_domains;
3645 struct i915_power_well *well;
3646
3647 gen9_set_dc_state(dev_priv, DC_STATE_DISABLE);
3648
3649
3650 intel_pch_reset_handshake(dev_priv, !HAS_PCH_NOP(dev_priv));
3651
3652
3653 mutex_lock(&power_domains->lock);
3654
3655 well = lookup_power_well(dev_priv, SKL_DISP_PW_1);
3656 intel_power_well_enable(dev_priv, well);
3657
3658 well = lookup_power_well(dev_priv, SKL_DISP_PW_MISC_IO);
3659 intel_power_well_enable(dev_priv, well);
3660
3661 mutex_unlock(&power_domains->lock);
3662
3663 skl_init_cdclk(dev_priv);
3664
3665 gen9_dbuf_enable(dev_priv);
3666
3667 if (resume && dev_priv->csr.dmc_payload)
3668 intel_csr_load_program(dev_priv);
3669}
3670
3671static void skl_display_core_uninit(struct drm_i915_private *dev_priv)
3672{
3673 struct i915_power_domains *power_domains = &dev_priv->power_domains;
3674 struct i915_power_well *well;
3675
3676 gen9_set_dc_state(dev_priv, DC_STATE_DISABLE);
3677
3678 gen9_dbuf_disable(dev_priv);
3679
3680 skl_uninit_cdclk(dev_priv);
3681
3682
3683
3684
3685 mutex_lock(&power_domains->lock);
3686
3687
3688
3689
3690
3691
3692
3693 well = lookup_power_well(dev_priv, SKL_DISP_PW_1);
3694 intel_power_well_disable(dev_priv, well);
3695
3696 mutex_unlock(&power_domains->lock);
3697
3698 usleep_range(10, 30);
3699}
3700
3701void bxt_display_core_init(struct drm_i915_private *dev_priv,
3702 bool resume)
3703{
3704 struct i915_power_domains *power_domains = &dev_priv->power_domains;
3705 struct i915_power_well *well;
3706
3707 gen9_set_dc_state(dev_priv, DC_STATE_DISABLE);
3708
3709
3710
3711
3712
3713
3714
3715 intel_pch_reset_handshake(dev_priv, false);
3716
3717
3718 mutex_lock(&power_domains->lock);
3719
3720 well = lookup_power_well(dev_priv, SKL_DISP_PW_1);
3721 intel_power_well_enable(dev_priv, well);
3722
3723 mutex_unlock(&power_domains->lock);
3724
3725 bxt_init_cdclk(dev_priv);
3726
3727 gen9_dbuf_enable(dev_priv);
3728
3729 if (resume && dev_priv->csr.dmc_payload)
3730 intel_csr_load_program(dev_priv);
3731}
3732
3733void bxt_display_core_uninit(struct drm_i915_private *dev_priv)
3734{
3735 struct i915_power_domains *power_domains = &dev_priv->power_domains;
3736 struct i915_power_well *well;
3737
3738 gen9_set_dc_state(dev_priv, DC_STATE_DISABLE);
3739
3740 gen9_dbuf_disable(dev_priv);
3741
3742 bxt_uninit_cdclk(dev_priv);
3743
3744
3745
3746
3747
3748
3749
3750
3751 mutex_lock(&power_domains->lock);
3752
3753 well = lookup_power_well(dev_priv, SKL_DISP_PW_1);
3754 intel_power_well_disable(dev_priv, well);
3755
3756 mutex_unlock(&power_domains->lock);
3757
3758 usleep_range(10, 30);
3759}
3760
3761static void cnl_display_core_init(struct drm_i915_private *dev_priv, bool resume)
3762{
3763 struct i915_power_domains *power_domains = &dev_priv->power_domains;
3764 struct i915_power_well *well;
3765
3766 gen9_set_dc_state(dev_priv, DC_STATE_DISABLE);
3767
3768
3769 intel_pch_reset_handshake(dev_priv, !HAS_PCH_NOP(dev_priv));
3770
3771
3772 cnl_combo_phys_init(dev_priv);
3773
3774
3775
3776
3777
3778 mutex_lock(&power_domains->lock);
3779 well = lookup_power_well(dev_priv, SKL_DISP_PW_1);
3780 intel_power_well_enable(dev_priv, well);
3781 mutex_unlock(&power_domains->lock);
3782
3783
3784 cnl_init_cdclk(dev_priv);
3785
3786
3787 gen9_dbuf_enable(dev_priv);
3788
3789 if (resume && dev_priv->csr.dmc_payload)
3790 intel_csr_load_program(dev_priv);
3791}
3792
3793static void cnl_display_core_uninit(struct drm_i915_private *dev_priv)
3794{
3795 struct i915_power_domains *power_domains = &dev_priv->power_domains;
3796 struct i915_power_well *well;
3797
3798 gen9_set_dc_state(dev_priv, DC_STATE_DISABLE);
3799
3800
3801
3802
3803 gen9_dbuf_disable(dev_priv);
3804
3805
3806 cnl_uninit_cdclk(dev_priv);
3807
3808
3809
3810
3811
3812
3813 mutex_lock(&power_domains->lock);
3814 well = lookup_power_well(dev_priv, SKL_DISP_PW_1);
3815 intel_power_well_disable(dev_priv, well);
3816 mutex_unlock(&power_domains->lock);
3817
3818 usleep_range(10, 30);
3819
3820
3821 cnl_combo_phys_uninit(dev_priv);
3822}
3823
3824void icl_display_core_init(struct drm_i915_private *dev_priv,
3825 bool resume)
3826{
3827 struct i915_power_domains *power_domains = &dev_priv->power_domains;
3828 struct i915_power_well *well;
3829
3830 gen9_set_dc_state(dev_priv, DC_STATE_DISABLE);
3831
3832
3833 intel_pch_reset_handshake(dev_priv, !HAS_PCH_NOP(dev_priv));
3834
3835
3836 icl_combo_phys_init(dev_priv);
3837
3838
3839
3840
3841
3842 mutex_lock(&power_domains->lock);
3843 well = lookup_power_well(dev_priv, SKL_DISP_PW_1);
3844 intel_power_well_enable(dev_priv, well);
3845 mutex_unlock(&power_domains->lock);
3846
3847
3848 icl_init_cdclk(dev_priv);
3849
3850
3851 icl_dbuf_enable(dev_priv);
3852
3853
3854 icl_mbus_init(dev_priv);
3855
3856 if (resume && dev_priv->csr.dmc_payload)
3857 intel_csr_load_program(dev_priv);
3858}
3859
3860void icl_display_core_uninit(struct drm_i915_private *dev_priv)
3861{
3862 struct i915_power_domains *power_domains = &dev_priv->power_domains;
3863 struct i915_power_well *well;
3864
3865 gen9_set_dc_state(dev_priv, DC_STATE_DISABLE);
3866
3867
3868
3869
3870 icl_dbuf_disable(dev_priv);
3871
3872
3873 icl_uninit_cdclk(dev_priv);
3874
3875
3876
3877
3878
3879
3880 mutex_lock(&power_domains->lock);
3881 well = lookup_power_well(dev_priv, SKL_DISP_PW_1);
3882 intel_power_well_disable(dev_priv, well);
3883 mutex_unlock(&power_domains->lock);
3884
3885
3886 icl_combo_phys_uninit(dev_priv);
3887}
3888
3889static void chv_phy_control_init(struct drm_i915_private *dev_priv)
3890{
3891 struct i915_power_well *cmn_bc =
3892 lookup_power_well(dev_priv, VLV_DISP_PW_DPIO_CMN_BC);
3893 struct i915_power_well *cmn_d =
3894 lookup_power_well(dev_priv, CHV_DISP_PW_DPIO_CMN_D);
3895
3896
3897
3898
3899
3900
3901
3902
3903 dev_priv->chv_phy_control =
3904 PHY_LDO_SEQ_DELAY(PHY_LDO_DELAY_600NS, DPIO_PHY0) |
3905 PHY_LDO_SEQ_DELAY(PHY_LDO_DELAY_600NS, DPIO_PHY1) |
3906 PHY_CH_POWER_MODE(PHY_CH_DEEP_PSR, DPIO_PHY0, DPIO_CH0) |
3907 PHY_CH_POWER_MODE(PHY_CH_DEEP_PSR, DPIO_PHY0, DPIO_CH1) |
3908 PHY_CH_POWER_MODE(PHY_CH_DEEP_PSR, DPIO_PHY1, DPIO_CH0);
3909
3910
3911
3912
3913
3914
3915
3916
3917 if (cmn_bc->desc->ops->is_enabled(dev_priv, cmn_bc)) {
3918 u32 status = I915_READ(DPLL(PIPE_A));
3919 unsigned int mask;
3920
3921 mask = status & DPLL_PORTB_READY_MASK;
3922 if (mask == 0xf)
3923 mask = 0x0;
3924 else
3925 dev_priv->chv_phy_control |=
3926 PHY_CH_POWER_DOWN_OVRD_EN(DPIO_PHY0, DPIO_CH0);
3927
3928 dev_priv->chv_phy_control |=
3929 PHY_CH_POWER_DOWN_OVRD(mask, DPIO_PHY0, DPIO_CH0);
3930
3931 mask = (status & DPLL_PORTC_READY_MASK) >> 4;
3932 if (mask == 0xf)
3933 mask = 0x0;
3934 else
3935 dev_priv->chv_phy_control |=
3936 PHY_CH_POWER_DOWN_OVRD_EN(DPIO_PHY0, DPIO_CH1);
3937
3938 dev_priv->chv_phy_control |=
3939 PHY_CH_POWER_DOWN_OVRD(mask, DPIO_PHY0, DPIO_CH1);
3940
3941 dev_priv->chv_phy_control |= PHY_COM_LANE_RESET_DEASSERT(DPIO_PHY0);
3942
3943 dev_priv->chv_phy_assert[DPIO_PHY0] = false;
3944 } else {
3945 dev_priv->chv_phy_assert[DPIO_PHY0] = true;
3946 }
3947
3948 if (cmn_d->desc->ops->is_enabled(dev_priv, cmn_d)) {
3949 u32 status = I915_READ(DPIO_PHY_STATUS);
3950 unsigned int mask;
3951
3952 mask = status & DPLL_PORTD_READY_MASK;
3953
3954 if (mask == 0xf)
3955 mask = 0x0;
3956 else
3957 dev_priv->chv_phy_control |=
3958 PHY_CH_POWER_DOWN_OVRD_EN(DPIO_PHY1, DPIO_CH0);
3959
3960 dev_priv->chv_phy_control |=
3961 PHY_CH_POWER_DOWN_OVRD(mask, DPIO_PHY1, DPIO_CH0);
3962
3963 dev_priv->chv_phy_control |= PHY_COM_LANE_RESET_DEASSERT(DPIO_PHY1);
3964
3965 dev_priv->chv_phy_assert[DPIO_PHY1] = false;
3966 } else {
3967 dev_priv->chv_phy_assert[DPIO_PHY1] = true;
3968 }
3969
3970 I915_WRITE(DISPLAY_PHY_CONTROL, dev_priv->chv_phy_control);
3971
3972 DRM_DEBUG_KMS("Initial PHY_CONTROL=0x%08x\n",
3973 dev_priv->chv_phy_control);
3974}
3975
3976static void vlv_cmnlane_wa(struct drm_i915_private *dev_priv)
3977{
3978 struct i915_power_well *cmn =
3979 lookup_power_well(dev_priv, VLV_DISP_PW_DPIO_CMN_BC);
3980 struct i915_power_well *disp2d =
3981 lookup_power_well(dev_priv, VLV_DISP_PW_DISP2D);
3982
3983
3984 if (cmn->desc->ops->is_enabled(dev_priv, cmn) &&
3985 disp2d->desc->ops->is_enabled(dev_priv, disp2d) &&
3986 I915_READ(DPIO_CTL) & DPIO_CMNRST)
3987 return;
3988
3989 DRM_DEBUG_KMS("toggling display PHY side reset\n");
3990
3991
3992 disp2d->desc->ops->enable(dev_priv, disp2d);
3993
3994
3995
3996
3997
3998
3999
4000
4001 cmn->desc->ops->disable(dev_priv, cmn);
4002}
4003
4004static void intel_power_domains_verify_state(struct drm_i915_private *dev_priv);
4005
4006
4007
4008
4009
4010
4011
4012
4013
4014
4015
4016
4017
4018
4019
4020
4021
4022void intel_power_domains_init_hw(struct drm_i915_private *i915, bool resume)
4023{
4024 struct i915_power_domains *power_domains = &i915->power_domains;
4025
4026 power_domains->initializing = true;
4027
4028 if (IS_ICELAKE(i915)) {
4029 icl_display_core_init(i915, resume);
4030 } else if (IS_CANNONLAKE(i915)) {
4031 cnl_display_core_init(i915, resume);
4032 } else if (IS_GEN9_BC(i915)) {
4033 skl_display_core_init(i915, resume);
4034 } else if (IS_GEN9_LP(i915)) {
4035 bxt_display_core_init(i915, resume);
4036 } else if (IS_CHERRYVIEW(i915)) {
4037 mutex_lock(&power_domains->lock);
4038 chv_phy_control_init(i915);
4039 mutex_unlock(&power_domains->lock);
4040 } else if (IS_VALLEYVIEW(i915)) {
4041 mutex_lock(&power_domains->lock);
4042 vlv_cmnlane_wa(i915);
4043 mutex_unlock(&power_domains->lock);
4044 } else if (IS_IVYBRIDGE(i915) || INTEL_GEN(i915) >= 7) {
4045 intel_pch_reset_handshake(i915, !HAS_PCH_NOP(i915));
4046 }
4047
4048
4049
4050
4051
4052
4053
4054 power_domains->wakeref =
4055 intel_display_power_get(i915, POWER_DOMAIN_INIT);
4056
4057
4058 if (!i915_modparams.disable_power_well)
4059 intel_display_power_get(i915, POWER_DOMAIN_INIT);
4060 intel_power_domains_sync_hw(i915);
4061
4062 power_domains->initializing = false;
4063}
4064
4065
4066
4067
4068
4069
4070
4071
4072
4073
4074
4075
4076void intel_power_domains_fini_hw(struct drm_i915_private *i915)
4077{
4078 intel_wakeref_t wakeref __maybe_unused =
4079 fetch_and_zero(&i915->power_domains.wakeref);
4080
4081
4082 if (!i915_modparams.disable_power_well)
4083 intel_display_power_put_unchecked(i915, POWER_DOMAIN_INIT);
4084
4085 intel_power_domains_verify_state(i915);
4086
4087
4088 intel_runtime_pm_put(i915, wakeref);
4089}
4090
4091
4092
4093
4094
4095
4096
4097
4098
4099
4100
4101
4102
4103void intel_power_domains_enable(struct drm_i915_private *i915)
4104{
4105 intel_wakeref_t wakeref __maybe_unused =
4106 fetch_and_zero(&i915->power_domains.wakeref);
4107
4108 intel_display_power_put(i915, POWER_DOMAIN_INIT, wakeref);
4109 intel_power_domains_verify_state(i915);
4110}
4111
4112
4113
4114
4115
4116
4117
4118
4119void intel_power_domains_disable(struct drm_i915_private *i915)
4120{
4121 struct i915_power_domains *power_domains = &i915->power_domains;
4122
4123 WARN_ON(power_domains->wakeref);
4124 power_domains->wakeref =
4125 intel_display_power_get(i915, POWER_DOMAIN_INIT);
4126
4127 intel_power_domains_verify_state(i915);
4128}
4129
4130
4131
4132
4133
4134
4135
4136
4137
4138
4139
4140
4141void intel_power_domains_suspend(struct drm_i915_private *i915,
4142 enum i915_drm_suspend_mode suspend_mode)
4143{
4144 struct i915_power_domains *power_domains = &i915->power_domains;
4145 intel_wakeref_t wakeref __maybe_unused =
4146 fetch_and_zero(&power_domains->wakeref);
4147
4148 intel_display_power_put(i915, POWER_DOMAIN_INIT, wakeref);
4149
4150
4151
4152
4153
4154
4155
4156
4157 if (!(i915->csr.allowed_dc_mask & DC_STATE_EN_DC9) &&
4158 suspend_mode == I915_DRM_SUSPEND_IDLE &&
4159 i915->csr.dmc_payload) {
4160 intel_power_domains_verify_state(i915);
4161 return;
4162 }
4163
4164
4165
4166
4167
4168 if (!i915_modparams.disable_power_well) {
4169 intel_display_power_put_unchecked(i915, POWER_DOMAIN_INIT);
4170 intel_power_domains_verify_state(i915);
4171 }
4172
4173 if (IS_ICELAKE(i915))
4174 icl_display_core_uninit(i915);
4175 else if (IS_CANNONLAKE(i915))
4176 cnl_display_core_uninit(i915);
4177 else if (IS_GEN9_BC(i915))
4178 skl_display_core_uninit(i915);
4179 else if (IS_GEN9_LP(i915))
4180 bxt_display_core_uninit(i915);
4181
4182 power_domains->display_core_suspended = true;
4183}
4184
4185
4186
4187
4188
4189
4190
4191
4192
4193
4194
4195void intel_power_domains_resume(struct drm_i915_private *i915)
4196{
4197 struct i915_power_domains *power_domains = &i915->power_domains;
4198
4199 if (power_domains->display_core_suspended) {
4200 intel_power_domains_init_hw(i915, true);
4201 power_domains->display_core_suspended = false;
4202 } else {
4203 WARN_ON(power_domains->wakeref);
4204 power_domains->wakeref =
4205 intel_display_power_get(i915, POWER_DOMAIN_INIT);
4206 }
4207
4208 intel_power_domains_verify_state(i915);
4209}
4210
4211#if IS_ENABLED(CONFIG_DRM_I915_DEBUG_RUNTIME_PM)
4212
4213static void intel_power_domains_dump_info(struct drm_i915_private *i915)
4214{
4215 struct i915_power_domains *power_domains = &i915->power_domains;
4216 struct i915_power_well *power_well;
4217
4218 for_each_power_well(i915, power_well) {
4219 enum intel_display_power_domain domain;
4220
4221 DRM_DEBUG_DRIVER("%-25s %d\n",
4222 power_well->desc->name, power_well->count);
4223
4224 for_each_power_domain(domain, power_well->desc->domains)
4225 DRM_DEBUG_DRIVER(" %-23s %d\n",
4226 intel_display_power_domain_str(domain),
4227 power_domains->domain_use_count[domain]);
4228 }
4229}
4230
4231
4232
4233
4234
4235
4236
4237
4238
4239
4240
4241static void intel_power_domains_verify_state(struct drm_i915_private *i915)
4242{
4243 struct i915_power_domains *power_domains = &i915->power_domains;
4244 struct i915_power_well *power_well;
4245 bool dump_domain_info;
4246
4247 mutex_lock(&power_domains->lock);
4248
4249 dump_domain_info = false;
4250 for_each_power_well(i915, power_well) {
4251 enum intel_display_power_domain domain;
4252 int domains_count;
4253 bool enabled;
4254
4255 enabled = power_well->desc->ops->is_enabled(i915, power_well);
4256 if ((power_well->count || power_well->desc->always_on) !=
4257 enabled)
4258 DRM_ERROR("power well %s state mismatch (refcount %d/enabled %d)",
4259 power_well->desc->name,
4260 power_well->count, enabled);
4261
4262 domains_count = 0;
4263 for_each_power_domain(domain, power_well->desc->domains)
4264 domains_count += power_domains->domain_use_count[domain];
4265
4266 if (power_well->count != domains_count) {
4267 DRM_ERROR("power well %s refcount/domain refcount mismatch "
4268 "(refcount %d/domains refcount %d)\n",
4269 power_well->desc->name, power_well->count,
4270 domains_count);
4271 dump_domain_info = true;
4272 }
4273 }
4274
4275 if (dump_domain_info) {
4276 static bool dumped;
4277
4278 if (!dumped) {
4279 intel_power_domains_dump_info(i915);
4280 dumped = true;
4281 }
4282 }
4283
4284 mutex_unlock(&power_domains->lock);
4285}
4286
4287#else
4288
4289static void intel_power_domains_verify_state(struct drm_i915_private *i915)
4290{
4291}
4292
4293#endif
4294
4295
4296
4297
4298
4299
4300
4301
4302
4303
4304
4305
4306
4307intel_wakeref_t intel_runtime_pm_get(struct drm_i915_private *i915)
4308{
4309 struct pci_dev *pdev = i915->drm.pdev;
4310 struct device *kdev = &pdev->dev;
4311 int ret;
4312
4313 ret = pm_runtime_get_sync(kdev);
4314 WARN_ONCE(ret < 0, "pm_runtime_get_sync() failed: %d\n", ret);
4315
4316 return track_intel_runtime_pm_wakeref(i915);
4317}
4318
4319
4320
4321
4322
4323
4324
4325
4326
4327
4328
4329
4330
4331
4332
4333intel_wakeref_t intel_runtime_pm_get_if_in_use(struct drm_i915_private *i915)
4334{
4335 if (IS_ENABLED(CONFIG_PM)) {
4336 struct pci_dev *pdev = i915->drm.pdev;
4337 struct device *kdev = &pdev->dev;
4338
4339
4340
4341
4342
4343
4344
4345 if (pm_runtime_get_if_in_use(kdev) <= 0)
4346 return 0;
4347 }
4348
4349 return track_intel_runtime_pm_wakeref(i915);
4350}
4351
4352
4353
4354
4355
4356
4357
4358
4359
4360
4361
4362
4363
4364
4365
4366
4367
4368
4369
4370
4371intel_wakeref_t intel_runtime_pm_get_noresume(struct drm_i915_private *i915)
4372{
4373 struct pci_dev *pdev = i915->drm.pdev;
4374 struct device *kdev = &pdev->dev;
4375
4376 assert_rpm_wakelock_held(i915);
4377 pm_runtime_get_noresume(kdev);
4378
4379 return track_intel_runtime_pm_wakeref(i915);
4380}
4381
4382
4383
4384
4385
4386
4387
4388
4389
4390void intel_runtime_pm_put_unchecked(struct drm_i915_private *i915)
4391{
4392 struct pci_dev *pdev = i915->drm.pdev;
4393 struct device *kdev = &pdev->dev;
4394
4395 untrack_intel_runtime_pm_wakeref(i915);
4396
4397 pm_runtime_mark_last_busy(kdev);
4398 pm_runtime_put_autosuspend(kdev);
4399}
4400
4401#if IS_ENABLED(CONFIG_DRM_I915_DEBUG_RUNTIME_PM)
4402void intel_runtime_pm_put(struct drm_i915_private *i915, intel_wakeref_t wref)
4403{
4404 cancel_intel_runtime_pm_wakeref(i915, wref);
4405 intel_runtime_pm_put_unchecked(i915);
4406}
4407#endif
4408
4409
4410
4411
4412
4413
4414
4415
4416
4417
4418
4419void intel_runtime_pm_enable(struct drm_i915_private *i915)
4420{
4421 struct pci_dev *pdev = i915->drm.pdev;
4422 struct device *kdev = &pdev->dev;
4423
4424
4425
4426
4427
4428
4429
4430
4431
4432 dev_pm_set_driver_flags(kdev, DPM_FLAG_NEVER_SKIP);
4433
4434 pm_runtime_set_autosuspend_delay(kdev, 10000);
4435 pm_runtime_mark_last_busy(kdev);
4436
4437
4438
4439
4440
4441
4442
4443 if (!HAS_RUNTIME_PM(i915)) {
4444 int ret;
4445
4446 pm_runtime_dont_use_autosuspend(kdev);
4447 ret = pm_runtime_get_sync(kdev);
4448 WARN(ret < 0, "pm_runtime_get_sync() failed: %d\n", ret);
4449 } else {
4450 pm_runtime_use_autosuspend(kdev);
4451 }
4452
4453
4454
4455
4456
4457
4458 pm_runtime_put_autosuspend(kdev);
4459}
4460
4461void intel_runtime_pm_disable(struct drm_i915_private *i915)
4462{
4463 struct pci_dev *pdev = i915->drm.pdev;
4464 struct device *kdev = &pdev->dev;
4465
4466
4467 WARN(pm_runtime_get_sync(kdev) < 0,
4468 "Failed to pass rpm ownership back to core\n");
4469
4470 pm_runtime_dont_use_autosuspend(kdev);
4471
4472 if (!HAS_RUNTIME_PM(i915))
4473 pm_runtime_put(kdev);
4474}
4475
4476void intel_runtime_pm_cleanup(struct drm_i915_private *i915)
4477{
4478 struct i915_runtime_pm *rpm = &i915->runtime_pm;
4479 int count;
4480
4481 count = atomic_fetch_inc(&rpm->wakeref_count);
4482 WARN(count,
4483 "i915->runtime_pm.wakeref_count=%d on cleanup\n",
4484 count);
4485
4486 untrack_intel_runtime_pm_wakeref(i915);
4487}
4488
4489void intel_runtime_pm_init_early(struct drm_i915_private *i915)
4490{
4491 init_intel_runtime_pm_wakeref(i915);
4492}
4493