1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24#include "intel_display_types.h"
25#include "intel_dpio_phy.h"
26#include "intel_dpll_mgr.h"
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48struct intel_dpll_mgr {
49 const struct dpll_info *dpll_info;
50
51 bool (*get_dplls)(struct intel_atomic_state *state,
52 struct intel_crtc *crtc,
53 struct intel_encoder *encoder);
54 void (*put_dplls)(struct intel_atomic_state *state,
55 struct intel_crtc *crtc);
56 void (*update_active_dpll)(struct intel_atomic_state *state,
57 struct intel_crtc *crtc,
58 struct intel_encoder *encoder);
59 void (*update_ref_clks)(struct drm_i915_private *i915);
60 void (*dump_hw_state)(struct drm_i915_private *dev_priv,
61 const struct intel_dpll_hw_state *hw_state);
62};
63
64static void
65intel_atomic_duplicate_dpll_state(struct drm_i915_private *dev_priv,
66 struct intel_shared_dpll_state *shared_dpll)
67{
68 enum intel_dpll_id i;
69
70
71 for (i = 0; i < dev_priv->dpll.num_shared_dpll; i++) {
72 struct intel_shared_dpll *pll = &dev_priv->dpll.shared_dplls[i];
73
74 shared_dpll[i] = pll->state;
75 }
76}
77
78static struct intel_shared_dpll_state *
79intel_atomic_get_shared_dpll_state(struct drm_atomic_state *s)
80{
81 struct intel_atomic_state *state = to_intel_atomic_state(s);
82
83 drm_WARN_ON(s->dev, !drm_modeset_is_locked(&s->dev->mode_config.connection_mutex));
84
85 if (!state->dpll_set) {
86 state->dpll_set = true;
87
88 intel_atomic_duplicate_dpll_state(to_i915(s->dev),
89 state->shared_dpll);
90 }
91
92 return state->shared_dpll;
93}
94
95
96
97
98
99
100
101
102
103struct intel_shared_dpll *
104intel_get_shared_dpll_by_id(struct drm_i915_private *dev_priv,
105 enum intel_dpll_id id)
106{
107 return &dev_priv->dpll.shared_dplls[id];
108}
109
110
111
112
113
114
115
116
117
118enum intel_dpll_id
119intel_get_shared_dpll_id(struct drm_i915_private *dev_priv,
120 struct intel_shared_dpll *pll)
121{
122 long pll_idx = pll - dev_priv->dpll.shared_dplls;
123
124 if (drm_WARN_ON(&dev_priv->drm,
125 pll_idx < 0 ||
126 pll_idx >= dev_priv->dpll.num_shared_dpll))
127 return -1;
128
129 return pll_idx;
130}
131
132
133void assert_shared_dpll(struct drm_i915_private *dev_priv,
134 struct intel_shared_dpll *pll,
135 bool state)
136{
137 bool cur_state;
138 struct intel_dpll_hw_state hw_state;
139
140 if (drm_WARN(&dev_priv->drm, !pll,
141 "asserting DPLL %s with no DPLL\n", onoff(state)))
142 return;
143
144 cur_state = intel_dpll_get_hw_state(dev_priv, pll, &hw_state);
145 I915_STATE_WARN(cur_state != state,
146 "%s assertion failure (expected %s, current %s)\n",
147 pll->info->name, onoff(state), onoff(cur_state));
148}
149
150static i915_reg_t
151intel_combo_pll_enable_reg(struct drm_i915_private *i915,
152 struct intel_shared_dpll *pll)
153{
154 if (IS_DG1(i915))
155 return DG1_DPLL_ENABLE(pll->info->id);
156 else if (IS_JSL_EHL(i915) && (pll->info->id == DPLL_ID_EHL_DPLL4))
157 return MG_PLL_ENABLE(0);
158
159 return CNL_DPLL_ENABLE(pll->info->id);
160}
161
162
163
164
165
166
167
168
169void intel_prepare_shared_dpll(const struct intel_crtc_state *crtc_state)
170{
171 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
172 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
173 struct intel_shared_dpll *pll = crtc_state->shared_dpll;
174
175 if (drm_WARN_ON(&dev_priv->drm, pll == NULL))
176 return;
177
178 mutex_lock(&dev_priv->dpll.lock);
179 drm_WARN_ON(&dev_priv->drm, !pll->state.crtc_mask);
180 if (!pll->active_mask) {
181 drm_dbg(&dev_priv->drm, "setting up %s\n", pll->info->name);
182 drm_WARN_ON(&dev_priv->drm, pll->on);
183 assert_shared_dpll_disabled(dev_priv, pll);
184
185 pll->info->funcs->prepare(dev_priv, pll);
186 }
187 mutex_unlock(&dev_priv->dpll.lock);
188}
189
190
191
192
193
194
195
196void intel_enable_shared_dpll(const struct intel_crtc_state *crtc_state)
197{
198 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
199 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
200 struct intel_shared_dpll *pll = crtc_state->shared_dpll;
201 unsigned int crtc_mask = drm_crtc_mask(&crtc->base);
202 unsigned int old_mask;
203
204 if (drm_WARN_ON(&dev_priv->drm, pll == NULL))
205 return;
206
207 mutex_lock(&dev_priv->dpll.lock);
208 old_mask = pll->active_mask;
209
210 if (drm_WARN_ON(&dev_priv->drm, !(pll->state.crtc_mask & crtc_mask)) ||
211 drm_WARN_ON(&dev_priv->drm, pll->active_mask & crtc_mask))
212 goto out;
213
214 pll->active_mask |= crtc_mask;
215
216 drm_dbg_kms(&dev_priv->drm,
217 "enable %s (active %x, on? %d) for crtc %d\n",
218 pll->info->name, pll->active_mask, pll->on,
219 crtc->base.base.id);
220
221 if (old_mask) {
222 drm_WARN_ON(&dev_priv->drm, !pll->on);
223 assert_shared_dpll_enabled(dev_priv, pll);
224 goto out;
225 }
226 drm_WARN_ON(&dev_priv->drm, pll->on);
227
228 drm_dbg_kms(&dev_priv->drm, "enabling %s\n", pll->info->name);
229 pll->info->funcs->enable(dev_priv, pll);
230 pll->on = true;
231
232out:
233 mutex_unlock(&dev_priv->dpll.lock);
234}
235
236
237
238
239
240
241
242void intel_disable_shared_dpll(const struct intel_crtc_state *crtc_state)
243{
244 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
245 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
246 struct intel_shared_dpll *pll = crtc_state->shared_dpll;
247 unsigned int crtc_mask = drm_crtc_mask(&crtc->base);
248
249
250 if (INTEL_GEN(dev_priv) < 5)
251 return;
252
253 if (pll == NULL)
254 return;
255
256 mutex_lock(&dev_priv->dpll.lock);
257 if (drm_WARN_ON(&dev_priv->drm, !(pll->active_mask & crtc_mask)))
258 goto out;
259
260 drm_dbg_kms(&dev_priv->drm,
261 "disable %s (active %x, on? %d) for crtc %d\n",
262 pll->info->name, pll->active_mask, pll->on,
263 crtc->base.base.id);
264
265 assert_shared_dpll_enabled(dev_priv, pll);
266 drm_WARN_ON(&dev_priv->drm, !pll->on);
267
268 pll->active_mask &= ~crtc_mask;
269 if (pll->active_mask)
270 goto out;
271
272 drm_dbg_kms(&dev_priv->drm, "disabling %s\n", pll->info->name);
273 pll->info->funcs->disable(dev_priv, pll);
274 pll->on = false;
275
276out:
277 mutex_unlock(&dev_priv->dpll.lock);
278}
279
280static struct intel_shared_dpll *
281intel_find_shared_dpll(struct intel_atomic_state *state,
282 const struct intel_crtc *crtc,
283 const struct intel_dpll_hw_state *pll_state,
284 unsigned long dpll_mask)
285{
286 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
287 struct intel_shared_dpll *pll, *unused_pll = NULL;
288 struct intel_shared_dpll_state *shared_dpll;
289 enum intel_dpll_id i;
290
291 shared_dpll = intel_atomic_get_shared_dpll_state(&state->base);
292
293 drm_WARN_ON(&dev_priv->drm, dpll_mask & ~(BIT(I915_NUM_PLLS) - 1));
294
295 for_each_set_bit(i, &dpll_mask, I915_NUM_PLLS) {
296 pll = &dev_priv->dpll.shared_dplls[i];
297
298
299 if (shared_dpll[i].crtc_mask == 0) {
300 if (!unused_pll)
301 unused_pll = pll;
302 continue;
303 }
304
305 if (memcmp(pll_state,
306 &shared_dpll[i].hw_state,
307 sizeof(*pll_state)) == 0) {
308 drm_dbg_kms(&dev_priv->drm,
309 "[CRTC:%d:%s] sharing existing %s (crtc mask 0x%08x, active %x)\n",
310 crtc->base.base.id, crtc->base.name,
311 pll->info->name,
312 shared_dpll[i].crtc_mask,
313 pll->active_mask);
314 return pll;
315 }
316 }
317
318
319 if (unused_pll) {
320 drm_dbg_kms(&dev_priv->drm, "[CRTC:%d:%s] allocated %s\n",
321 crtc->base.base.id, crtc->base.name,
322 unused_pll->info->name);
323 return unused_pll;
324 }
325
326 return NULL;
327}
328
329static void
330intel_reference_shared_dpll(struct intel_atomic_state *state,
331 const struct intel_crtc *crtc,
332 const struct intel_shared_dpll *pll,
333 const struct intel_dpll_hw_state *pll_state)
334{
335 struct drm_i915_private *i915 = to_i915(state->base.dev);
336 struct intel_shared_dpll_state *shared_dpll;
337 const enum intel_dpll_id id = pll->info->id;
338
339 shared_dpll = intel_atomic_get_shared_dpll_state(&state->base);
340
341 if (shared_dpll[id].crtc_mask == 0)
342 shared_dpll[id].hw_state = *pll_state;
343
344 drm_dbg(&i915->drm, "using %s for pipe %c\n", pll->info->name,
345 pipe_name(crtc->pipe));
346
347 shared_dpll[id].crtc_mask |= 1 << crtc->pipe;
348}
349
350static void intel_unreference_shared_dpll(struct intel_atomic_state *state,
351 const struct intel_crtc *crtc,
352 const struct intel_shared_dpll *pll)
353{
354 struct intel_shared_dpll_state *shared_dpll;
355
356 shared_dpll = intel_atomic_get_shared_dpll_state(&state->base);
357 shared_dpll[pll->info->id].crtc_mask &= ~(1 << crtc->pipe);
358}
359
360static void intel_put_dpll(struct intel_atomic_state *state,
361 struct intel_crtc *crtc)
362{
363 const struct intel_crtc_state *old_crtc_state =
364 intel_atomic_get_old_crtc_state(state, crtc);
365 struct intel_crtc_state *new_crtc_state =
366 intel_atomic_get_new_crtc_state(state, crtc);
367
368 new_crtc_state->shared_dpll = NULL;
369
370 if (!old_crtc_state->shared_dpll)
371 return;
372
373 intel_unreference_shared_dpll(state, crtc, old_crtc_state->shared_dpll);
374}
375
376
377
378
379
380
381
382
383
384
385
386
387void intel_shared_dpll_swap_state(struct intel_atomic_state *state)
388{
389 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
390 struct intel_shared_dpll_state *shared_dpll = state->shared_dpll;
391 enum intel_dpll_id i;
392
393 if (!state->dpll_set)
394 return;
395
396 for (i = 0; i < dev_priv->dpll.num_shared_dpll; i++) {
397 struct intel_shared_dpll *pll =
398 &dev_priv->dpll.shared_dplls[i];
399
400 swap(pll->state, shared_dpll[i]);
401 }
402}
403
404static bool ibx_pch_dpll_get_hw_state(struct drm_i915_private *dev_priv,
405 struct intel_shared_dpll *pll,
406 struct intel_dpll_hw_state *hw_state)
407{
408 const enum intel_dpll_id id = pll->info->id;
409 intel_wakeref_t wakeref;
410 u32 val;
411
412 wakeref = intel_display_power_get_if_enabled(dev_priv,
413 POWER_DOMAIN_DISPLAY_CORE);
414 if (!wakeref)
415 return false;
416
417 val = intel_de_read(dev_priv, PCH_DPLL(id));
418 hw_state->dpll = val;
419 hw_state->fp0 = intel_de_read(dev_priv, PCH_FP0(id));
420 hw_state->fp1 = intel_de_read(dev_priv, PCH_FP1(id));
421
422 intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
423
424 return val & DPLL_VCO_ENABLE;
425}
426
427static void ibx_pch_dpll_prepare(struct drm_i915_private *dev_priv,
428 struct intel_shared_dpll *pll)
429{
430 const enum intel_dpll_id id = pll->info->id;
431
432 intel_de_write(dev_priv, PCH_FP0(id), pll->state.hw_state.fp0);
433 intel_de_write(dev_priv, PCH_FP1(id), pll->state.hw_state.fp1);
434}
435
436static void ibx_assert_pch_refclk_enabled(struct drm_i915_private *dev_priv)
437{
438 u32 val;
439 bool enabled;
440
441 I915_STATE_WARN_ON(!(HAS_PCH_IBX(dev_priv) || HAS_PCH_CPT(dev_priv)));
442
443 val = intel_de_read(dev_priv, PCH_DREF_CONTROL);
444 enabled = !!(val & (DREF_SSC_SOURCE_MASK | DREF_NONSPREAD_SOURCE_MASK |
445 DREF_SUPERSPREAD_SOURCE_MASK));
446 I915_STATE_WARN(!enabled, "PCH refclk assertion failure, should be active but is disabled\n");
447}
448
449static void ibx_pch_dpll_enable(struct drm_i915_private *dev_priv,
450 struct intel_shared_dpll *pll)
451{
452 const enum intel_dpll_id id = pll->info->id;
453
454
455 ibx_assert_pch_refclk_enabled(dev_priv);
456
457 intel_de_write(dev_priv, PCH_DPLL(id), pll->state.hw_state.dpll);
458
459
460 intel_de_posting_read(dev_priv, PCH_DPLL(id));
461 udelay(150);
462
463
464
465
466
467
468 intel_de_write(dev_priv, PCH_DPLL(id), pll->state.hw_state.dpll);
469 intel_de_posting_read(dev_priv, PCH_DPLL(id));
470 udelay(200);
471}
472
473static void ibx_pch_dpll_disable(struct drm_i915_private *dev_priv,
474 struct intel_shared_dpll *pll)
475{
476 const enum intel_dpll_id id = pll->info->id;
477
478 intel_de_write(dev_priv, PCH_DPLL(id), 0);
479 intel_de_posting_read(dev_priv, PCH_DPLL(id));
480 udelay(200);
481}
482
483static bool ibx_get_dpll(struct intel_atomic_state *state,
484 struct intel_crtc *crtc,
485 struct intel_encoder *encoder)
486{
487 struct intel_crtc_state *crtc_state =
488 intel_atomic_get_new_crtc_state(state, crtc);
489 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
490 struct intel_shared_dpll *pll;
491 enum intel_dpll_id i;
492
493 if (HAS_PCH_IBX(dev_priv)) {
494
495 i = (enum intel_dpll_id) crtc->pipe;
496 pll = &dev_priv->dpll.shared_dplls[i];
497
498 drm_dbg_kms(&dev_priv->drm,
499 "[CRTC:%d:%s] using pre-allocated %s\n",
500 crtc->base.base.id, crtc->base.name,
501 pll->info->name);
502 } else {
503 pll = intel_find_shared_dpll(state, crtc,
504 &crtc_state->dpll_hw_state,
505 BIT(DPLL_ID_PCH_PLL_B) |
506 BIT(DPLL_ID_PCH_PLL_A));
507 }
508
509 if (!pll)
510 return false;
511
512
513 intel_reference_shared_dpll(state, crtc,
514 pll, &crtc_state->dpll_hw_state);
515
516 crtc_state->shared_dpll = pll;
517
518 return true;
519}
520
521static void ibx_dump_hw_state(struct drm_i915_private *dev_priv,
522 const struct intel_dpll_hw_state *hw_state)
523{
524 drm_dbg_kms(&dev_priv->drm,
525 "dpll_hw_state: dpll: 0x%x, dpll_md: 0x%x, "
526 "fp0: 0x%x, fp1: 0x%x\n",
527 hw_state->dpll,
528 hw_state->dpll_md,
529 hw_state->fp0,
530 hw_state->fp1);
531}
532
533static const struct intel_shared_dpll_funcs ibx_pch_dpll_funcs = {
534 .prepare = ibx_pch_dpll_prepare,
535 .enable = ibx_pch_dpll_enable,
536 .disable = ibx_pch_dpll_disable,
537 .get_hw_state = ibx_pch_dpll_get_hw_state,
538};
539
540static const struct dpll_info pch_plls[] = {
541 { "PCH DPLL A", &ibx_pch_dpll_funcs, DPLL_ID_PCH_PLL_A, 0 },
542 { "PCH DPLL B", &ibx_pch_dpll_funcs, DPLL_ID_PCH_PLL_B, 0 },
543 { },
544};
545
546static const struct intel_dpll_mgr pch_pll_mgr = {
547 .dpll_info = pch_plls,
548 .get_dplls = ibx_get_dpll,
549 .put_dplls = intel_put_dpll,
550 .dump_hw_state = ibx_dump_hw_state,
551};
552
553static void hsw_ddi_wrpll_enable(struct drm_i915_private *dev_priv,
554 struct intel_shared_dpll *pll)
555{
556 const enum intel_dpll_id id = pll->info->id;
557
558 intel_de_write(dev_priv, WRPLL_CTL(id), pll->state.hw_state.wrpll);
559 intel_de_posting_read(dev_priv, WRPLL_CTL(id));
560 udelay(20);
561}
562
563static void hsw_ddi_spll_enable(struct drm_i915_private *dev_priv,
564 struct intel_shared_dpll *pll)
565{
566 intel_de_write(dev_priv, SPLL_CTL, pll->state.hw_state.spll);
567 intel_de_posting_read(dev_priv, SPLL_CTL);
568 udelay(20);
569}
570
571static void hsw_ddi_wrpll_disable(struct drm_i915_private *dev_priv,
572 struct intel_shared_dpll *pll)
573{
574 const enum intel_dpll_id id = pll->info->id;
575 u32 val;
576
577 val = intel_de_read(dev_priv, WRPLL_CTL(id));
578 intel_de_write(dev_priv, WRPLL_CTL(id), val & ~WRPLL_PLL_ENABLE);
579 intel_de_posting_read(dev_priv, WRPLL_CTL(id));
580
581
582
583
584
585 if (dev_priv->pch_ssc_use & BIT(id))
586 intel_init_pch_refclk(dev_priv);
587}
588
589static void hsw_ddi_spll_disable(struct drm_i915_private *dev_priv,
590 struct intel_shared_dpll *pll)
591{
592 enum intel_dpll_id id = pll->info->id;
593 u32 val;
594
595 val = intel_de_read(dev_priv, SPLL_CTL);
596 intel_de_write(dev_priv, SPLL_CTL, val & ~SPLL_PLL_ENABLE);
597 intel_de_posting_read(dev_priv, SPLL_CTL);
598
599
600
601
602
603 if (dev_priv->pch_ssc_use & BIT(id))
604 intel_init_pch_refclk(dev_priv);
605}
606
607static bool hsw_ddi_wrpll_get_hw_state(struct drm_i915_private *dev_priv,
608 struct intel_shared_dpll *pll,
609 struct intel_dpll_hw_state *hw_state)
610{
611 const enum intel_dpll_id id = pll->info->id;
612 intel_wakeref_t wakeref;
613 u32 val;
614
615 wakeref = intel_display_power_get_if_enabled(dev_priv,
616 POWER_DOMAIN_DISPLAY_CORE);
617 if (!wakeref)
618 return false;
619
620 val = intel_de_read(dev_priv, WRPLL_CTL(id));
621 hw_state->wrpll = val;
622
623 intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
624
625 return val & WRPLL_PLL_ENABLE;
626}
627
628static bool hsw_ddi_spll_get_hw_state(struct drm_i915_private *dev_priv,
629 struct intel_shared_dpll *pll,
630 struct intel_dpll_hw_state *hw_state)
631{
632 intel_wakeref_t wakeref;
633 u32 val;
634
635 wakeref = intel_display_power_get_if_enabled(dev_priv,
636 POWER_DOMAIN_DISPLAY_CORE);
637 if (!wakeref)
638 return false;
639
640 val = intel_de_read(dev_priv, SPLL_CTL);
641 hw_state->spll = val;
642
643 intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
644
645 return val & SPLL_PLL_ENABLE;
646}
647
648#define LC_FREQ 2700
649#define LC_FREQ_2K U64_C(LC_FREQ * 2000)
650
651#define P_MIN 2
652#define P_MAX 64
653#define P_INC 2
654
655
656#define REF_MIN 48
657#define REF_MAX 400
658#define VCO_MIN 2400
659#define VCO_MAX 4800
660
661struct hsw_wrpll_rnp {
662 unsigned p, n2, r2;
663};
664
665static unsigned hsw_wrpll_get_budget_for_freq(int clock)
666{
667 unsigned budget;
668
669 switch (clock) {
670 case 25175000:
671 case 25200000:
672 case 27000000:
673 case 27027000:
674 case 37762500:
675 case 37800000:
676 case 40500000:
677 case 40541000:
678 case 54000000:
679 case 54054000:
680 case 59341000:
681 case 59400000:
682 case 72000000:
683 case 74176000:
684 case 74250000:
685 case 81000000:
686 case 81081000:
687 case 89012000:
688 case 89100000:
689 case 108000000:
690 case 108108000:
691 case 111264000:
692 case 111375000:
693 case 148352000:
694 case 148500000:
695 case 162000000:
696 case 162162000:
697 case 222525000:
698 case 222750000:
699 case 296703000:
700 case 297000000:
701 budget = 0;
702 break;
703 case 233500000:
704 case 245250000:
705 case 247750000:
706 case 253250000:
707 case 298000000:
708 budget = 1500;
709 break;
710 case 169128000:
711 case 169500000:
712 case 179500000:
713 case 202000000:
714 budget = 2000;
715 break;
716 case 256250000:
717 case 262500000:
718 case 270000000:
719 case 272500000:
720 case 273750000:
721 case 280750000:
722 case 281250000:
723 case 286000000:
724 case 291750000:
725 budget = 4000;
726 break;
727 case 267250000:
728 case 268500000:
729 budget = 5000;
730 break;
731 default:
732 budget = 1000;
733 break;
734 }
735
736 return budget;
737}
738
739static void hsw_wrpll_update_rnp(u64 freq2k, unsigned int budget,
740 unsigned int r2, unsigned int n2,
741 unsigned int p,
742 struct hsw_wrpll_rnp *best)
743{
744 u64 a, b, c, d, diff, diff_best;
745
746
747 if (best->p == 0) {
748 best->p = p;
749 best->n2 = n2;
750 best->r2 = r2;
751 return;
752 }
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768 a = freq2k * budget * p * r2;
769 b = freq2k * budget * best->p * best->r2;
770 diff = abs_diff(freq2k * p * r2, LC_FREQ_2K * n2);
771 diff_best = abs_diff(freq2k * best->p * best->r2,
772 LC_FREQ_2K * best->n2);
773 c = 1000000 * diff;
774 d = 1000000 * diff_best;
775
776 if (a < c && b < d) {
777
778 if (best->p * best->r2 * diff < p * r2 * diff_best) {
779 best->p = p;
780 best->n2 = n2;
781 best->r2 = r2;
782 }
783 } else if (a >= c && b < d) {
784
785 best->p = p;
786 best->n2 = n2;
787 best->r2 = r2;
788 } else if (a >= c && b >= d) {
789
790 if (n2 * best->r2 * best->r2 > best->n2 * r2 * r2) {
791 best->p = p;
792 best->n2 = n2;
793 best->r2 = r2;
794 }
795 }
796
797}
798
799static void
800hsw_ddi_calculate_wrpll(int clock ,
801 unsigned *r2_out, unsigned *n2_out, unsigned *p_out)
802{
803 u64 freq2k;
804 unsigned p, n2, r2;
805 struct hsw_wrpll_rnp best = { 0, 0, 0 };
806 unsigned budget;
807
808 freq2k = clock / 100;
809
810 budget = hsw_wrpll_get_budget_for_freq(clock);
811
812
813
814 if (freq2k == 5400000) {
815 *n2_out = 2;
816 *p_out = 1;
817 *r2_out = 2;
818 return;
819 }
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834 for (r2 = LC_FREQ * 2 / REF_MAX + 1;
835 r2 <= LC_FREQ * 2 / REF_MIN;
836 r2++) {
837
838
839
840
841
842
843
844
845
846
847
848
849 for (n2 = VCO_MIN * r2 / LC_FREQ + 1;
850 n2 <= VCO_MAX * r2 / LC_FREQ;
851 n2++) {
852
853 for (p = P_MIN; p <= P_MAX; p += P_INC)
854 hsw_wrpll_update_rnp(freq2k, budget,
855 r2, n2, p, &best);
856 }
857 }
858
859 *n2_out = best.n2;
860 *p_out = best.p;
861 *r2_out = best.r2;
862}
863
864static struct intel_shared_dpll *
865hsw_ddi_wrpll_get_dpll(struct intel_atomic_state *state,
866 struct intel_crtc *crtc)
867{
868 struct intel_crtc_state *crtc_state =
869 intel_atomic_get_new_crtc_state(state, crtc);
870 struct intel_shared_dpll *pll;
871 u32 val;
872 unsigned int p, n2, r2;
873
874 hsw_ddi_calculate_wrpll(crtc_state->port_clock * 1000, &r2, &n2, &p);
875
876 val = WRPLL_PLL_ENABLE | WRPLL_REF_LCPLL |
877 WRPLL_DIVIDER_REFERENCE(r2) | WRPLL_DIVIDER_FEEDBACK(n2) |
878 WRPLL_DIVIDER_POST(p);
879
880 crtc_state->dpll_hw_state.wrpll = val;
881
882 pll = intel_find_shared_dpll(state, crtc,
883 &crtc_state->dpll_hw_state,
884 BIT(DPLL_ID_WRPLL2) |
885 BIT(DPLL_ID_WRPLL1));
886
887 if (!pll)
888 return NULL;
889
890 return pll;
891}
892
893static int hsw_ddi_wrpll_get_freq(struct drm_i915_private *dev_priv,
894 const struct intel_shared_dpll *pll,
895 const struct intel_dpll_hw_state *pll_state)
896{
897 int refclk;
898 int n, p, r;
899 u32 wrpll = pll_state->wrpll;
900
901 switch (wrpll & WRPLL_REF_MASK) {
902 case WRPLL_REF_SPECIAL_HSW:
903
904 if (IS_HASWELL(dev_priv) && !IS_HSW_ULT(dev_priv)) {
905 refclk = dev_priv->dpll.ref_clks.nssc;
906 break;
907 }
908 fallthrough;
909 case WRPLL_REF_PCH_SSC:
910
911
912
913
914
915 refclk = dev_priv->dpll.ref_clks.ssc;
916 break;
917 case WRPLL_REF_LCPLL:
918 refclk = 2700000;
919 break;
920 default:
921 MISSING_CASE(wrpll);
922 return 0;
923 }
924
925 r = wrpll & WRPLL_DIVIDER_REF_MASK;
926 p = (wrpll & WRPLL_DIVIDER_POST_MASK) >> WRPLL_DIVIDER_POST_SHIFT;
927 n = (wrpll & WRPLL_DIVIDER_FB_MASK) >> WRPLL_DIVIDER_FB_SHIFT;
928
929
930 return (refclk * n / 10) / (p * r) * 2;
931}
932
933static struct intel_shared_dpll *
934hsw_ddi_lcpll_get_dpll(struct intel_crtc_state *crtc_state)
935{
936 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
937 struct intel_shared_dpll *pll;
938 enum intel_dpll_id pll_id;
939 int clock = crtc_state->port_clock;
940
941 switch (clock / 2) {
942 case 81000:
943 pll_id = DPLL_ID_LCPLL_810;
944 break;
945 case 135000:
946 pll_id = DPLL_ID_LCPLL_1350;
947 break;
948 case 270000:
949 pll_id = DPLL_ID_LCPLL_2700;
950 break;
951 default:
952 drm_dbg_kms(&dev_priv->drm, "Invalid clock for DP: %d\n",
953 clock);
954 return NULL;
955 }
956
957 pll = intel_get_shared_dpll_by_id(dev_priv, pll_id);
958
959 if (!pll)
960 return NULL;
961
962 return pll;
963}
964
965static int hsw_ddi_lcpll_get_freq(struct drm_i915_private *i915,
966 const struct intel_shared_dpll *pll,
967 const struct intel_dpll_hw_state *pll_state)
968{
969 int link_clock = 0;
970
971 switch (pll->info->id) {
972 case DPLL_ID_LCPLL_810:
973 link_clock = 81000;
974 break;
975 case DPLL_ID_LCPLL_1350:
976 link_clock = 135000;
977 break;
978 case DPLL_ID_LCPLL_2700:
979 link_clock = 270000;
980 break;
981 default:
982 drm_WARN(&i915->drm, 1, "bad port clock sel\n");
983 break;
984 }
985
986 return link_clock * 2;
987}
988
989static struct intel_shared_dpll *
990hsw_ddi_spll_get_dpll(struct intel_atomic_state *state,
991 struct intel_crtc *crtc)
992{
993 struct intel_crtc_state *crtc_state =
994 intel_atomic_get_new_crtc_state(state, crtc);
995
996 if (drm_WARN_ON(crtc->base.dev, crtc_state->port_clock / 2 != 135000))
997 return NULL;
998
999 crtc_state->dpll_hw_state.spll = SPLL_PLL_ENABLE | SPLL_FREQ_1350MHz |
1000 SPLL_REF_MUXED_SSC;
1001
1002 return intel_find_shared_dpll(state, crtc, &crtc_state->dpll_hw_state,
1003 BIT(DPLL_ID_SPLL));
1004}
1005
1006static int hsw_ddi_spll_get_freq(struct drm_i915_private *i915,
1007 const struct intel_shared_dpll *pll,
1008 const struct intel_dpll_hw_state *pll_state)
1009{
1010 int link_clock = 0;
1011
1012 switch (pll_state->spll & SPLL_FREQ_MASK) {
1013 case SPLL_FREQ_810MHz:
1014 link_clock = 81000;
1015 break;
1016 case SPLL_FREQ_1350MHz:
1017 link_clock = 135000;
1018 break;
1019 case SPLL_FREQ_2700MHz:
1020 link_clock = 270000;
1021 break;
1022 default:
1023 drm_WARN(&i915->drm, 1, "bad spll freq\n");
1024 break;
1025 }
1026
1027 return link_clock * 2;
1028}
1029
1030static bool hsw_get_dpll(struct intel_atomic_state *state,
1031 struct intel_crtc *crtc,
1032 struct intel_encoder *encoder)
1033{
1034 struct intel_crtc_state *crtc_state =
1035 intel_atomic_get_new_crtc_state(state, crtc);
1036 struct intel_shared_dpll *pll;
1037
1038 memset(&crtc_state->dpll_hw_state, 0,
1039 sizeof(crtc_state->dpll_hw_state));
1040
1041 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
1042 pll = hsw_ddi_wrpll_get_dpll(state, crtc);
1043 else if (intel_crtc_has_dp_encoder(crtc_state))
1044 pll = hsw_ddi_lcpll_get_dpll(crtc_state);
1045 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_ANALOG))
1046 pll = hsw_ddi_spll_get_dpll(state, crtc);
1047 else
1048 return false;
1049
1050 if (!pll)
1051 return false;
1052
1053 intel_reference_shared_dpll(state, crtc,
1054 pll, &crtc_state->dpll_hw_state);
1055
1056 crtc_state->shared_dpll = pll;
1057
1058 return true;
1059}
1060
1061static void hsw_update_dpll_ref_clks(struct drm_i915_private *i915)
1062{
1063 i915->dpll.ref_clks.ssc = 135000;
1064
1065 if (intel_de_read(i915, FUSE_STRAP3) & HSW_REF_CLK_SELECT)
1066 i915->dpll.ref_clks.nssc = 24000;
1067 else
1068 i915->dpll.ref_clks.nssc = 135000;
1069}
1070
1071static void hsw_dump_hw_state(struct drm_i915_private *dev_priv,
1072 const struct intel_dpll_hw_state *hw_state)
1073{
1074 drm_dbg_kms(&dev_priv->drm, "dpll_hw_state: wrpll: 0x%x spll: 0x%x\n",
1075 hw_state->wrpll, hw_state->spll);
1076}
1077
1078static const struct intel_shared_dpll_funcs hsw_ddi_wrpll_funcs = {
1079 .enable = hsw_ddi_wrpll_enable,
1080 .disable = hsw_ddi_wrpll_disable,
1081 .get_hw_state = hsw_ddi_wrpll_get_hw_state,
1082 .get_freq = hsw_ddi_wrpll_get_freq,
1083};
1084
1085static const struct intel_shared_dpll_funcs hsw_ddi_spll_funcs = {
1086 .enable = hsw_ddi_spll_enable,
1087 .disable = hsw_ddi_spll_disable,
1088 .get_hw_state = hsw_ddi_spll_get_hw_state,
1089 .get_freq = hsw_ddi_spll_get_freq,
1090};
1091
1092static void hsw_ddi_lcpll_enable(struct drm_i915_private *dev_priv,
1093 struct intel_shared_dpll *pll)
1094{
1095}
1096
1097static void hsw_ddi_lcpll_disable(struct drm_i915_private *dev_priv,
1098 struct intel_shared_dpll *pll)
1099{
1100}
1101
1102static bool hsw_ddi_lcpll_get_hw_state(struct drm_i915_private *dev_priv,
1103 struct intel_shared_dpll *pll,
1104 struct intel_dpll_hw_state *hw_state)
1105{
1106 return true;
1107}
1108
1109static const struct intel_shared_dpll_funcs hsw_ddi_lcpll_funcs = {
1110 .enable = hsw_ddi_lcpll_enable,
1111 .disable = hsw_ddi_lcpll_disable,
1112 .get_hw_state = hsw_ddi_lcpll_get_hw_state,
1113 .get_freq = hsw_ddi_lcpll_get_freq,
1114};
1115
1116static const struct dpll_info hsw_plls[] = {
1117 { "WRPLL 1", &hsw_ddi_wrpll_funcs, DPLL_ID_WRPLL1, 0 },
1118 { "WRPLL 2", &hsw_ddi_wrpll_funcs, DPLL_ID_WRPLL2, 0 },
1119 { "SPLL", &hsw_ddi_spll_funcs, DPLL_ID_SPLL, 0 },
1120 { "LCPLL 810", &hsw_ddi_lcpll_funcs, DPLL_ID_LCPLL_810, INTEL_DPLL_ALWAYS_ON },
1121 { "LCPLL 1350", &hsw_ddi_lcpll_funcs, DPLL_ID_LCPLL_1350, INTEL_DPLL_ALWAYS_ON },
1122 { "LCPLL 2700", &hsw_ddi_lcpll_funcs, DPLL_ID_LCPLL_2700, INTEL_DPLL_ALWAYS_ON },
1123 { },
1124};
1125
1126static const struct intel_dpll_mgr hsw_pll_mgr = {
1127 .dpll_info = hsw_plls,
1128 .get_dplls = hsw_get_dpll,
1129 .put_dplls = intel_put_dpll,
1130 .update_ref_clks = hsw_update_dpll_ref_clks,
1131 .dump_hw_state = hsw_dump_hw_state,
1132};
1133
1134struct skl_dpll_regs {
1135 i915_reg_t ctl, cfgcr1, cfgcr2;
1136};
1137
1138
1139static const struct skl_dpll_regs skl_dpll_regs[4] = {
1140 {
1141
1142 .ctl = LCPLL1_CTL,
1143
1144 },
1145 {
1146
1147 .ctl = LCPLL2_CTL,
1148 .cfgcr1 = DPLL_CFGCR1(SKL_DPLL1),
1149 .cfgcr2 = DPLL_CFGCR2(SKL_DPLL1),
1150 },
1151 {
1152
1153 .ctl = WRPLL_CTL(0),
1154 .cfgcr1 = DPLL_CFGCR1(SKL_DPLL2),
1155 .cfgcr2 = DPLL_CFGCR2(SKL_DPLL2),
1156 },
1157 {
1158
1159 .ctl = WRPLL_CTL(1),
1160 .cfgcr1 = DPLL_CFGCR1(SKL_DPLL3),
1161 .cfgcr2 = DPLL_CFGCR2(SKL_DPLL3),
1162 },
1163};
1164
1165static void skl_ddi_pll_write_ctrl1(struct drm_i915_private *dev_priv,
1166 struct intel_shared_dpll *pll)
1167{
1168 const enum intel_dpll_id id = pll->info->id;
1169 u32 val;
1170
1171 val = intel_de_read(dev_priv, DPLL_CTRL1);
1172
1173 val &= ~(DPLL_CTRL1_HDMI_MODE(id) |
1174 DPLL_CTRL1_SSC(id) |
1175 DPLL_CTRL1_LINK_RATE_MASK(id));
1176 val |= pll->state.hw_state.ctrl1 << (id * 6);
1177
1178 intel_de_write(dev_priv, DPLL_CTRL1, val);
1179 intel_de_posting_read(dev_priv, DPLL_CTRL1);
1180}
1181
1182static void skl_ddi_pll_enable(struct drm_i915_private *dev_priv,
1183 struct intel_shared_dpll *pll)
1184{
1185 const struct skl_dpll_regs *regs = skl_dpll_regs;
1186 const enum intel_dpll_id id = pll->info->id;
1187
1188 skl_ddi_pll_write_ctrl1(dev_priv, pll);
1189
1190 intel_de_write(dev_priv, regs[id].cfgcr1, pll->state.hw_state.cfgcr1);
1191 intel_de_write(dev_priv, regs[id].cfgcr2, pll->state.hw_state.cfgcr2);
1192 intel_de_posting_read(dev_priv, regs[id].cfgcr1);
1193 intel_de_posting_read(dev_priv, regs[id].cfgcr2);
1194
1195
1196 intel_de_write(dev_priv, regs[id].ctl,
1197 intel_de_read(dev_priv, regs[id].ctl) | LCPLL_PLL_ENABLE);
1198
1199 if (intel_de_wait_for_set(dev_priv, DPLL_STATUS, DPLL_LOCK(id), 5))
1200 drm_err(&dev_priv->drm, "DPLL %d not locked\n", id);
1201}
1202
1203static void skl_ddi_dpll0_enable(struct drm_i915_private *dev_priv,
1204 struct intel_shared_dpll *pll)
1205{
1206 skl_ddi_pll_write_ctrl1(dev_priv, pll);
1207}
1208
1209static void skl_ddi_pll_disable(struct drm_i915_private *dev_priv,
1210 struct intel_shared_dpll *pll)
1211{
1212 const struct skl_dpll_regs *regs = skl_dpll_regs;
1213 const enum intel_dpll_id id = pll->info->id;
1214
1215
1216 intel_de_write(dev_priv, regs[id].ctl,
1217 intel_de_read(dev_priv, regs[id].ctl) & ~LCPLL_PLL_ENABLE);
1218 intel_de_posting_read(dev_priv, regs[id].ctl);
1219}
1220
1221static void skl_ddi_dpll0_disable(struct drm_i915_private *dev_priv,
1222 struct intel_shared_dpll *pll)
1223{
1224}
1225
1226static bool skl_ddi_pll_get_hw_state(struct drm_i915_private *dev_priv,
1227 struct intel_shared_dpll *pll,
1228 struct intel_dpll_hw_state *hw_state)
1229{
1230 u32 val;
1231 const struct skl_dpll_regs *regs = skl_dpll_regs;
1232 const enum intel_dpll_id id = pll->info->id;
1233 intel_wakeref_t wakeref;
1234 bool ret;
1235
1236 wakeref = intel_display_power_get_if_enabled(dev_priv,
1237 POWER_DOMAIN_DISPLAY_CORE);
1238 if (!wakeref)
1239 return false;
1240
1241 ret = false;
1242
1243 val = intel_de_read(dev_priv, regs[id].ctl);
1244 if (!(val & LCPLL_PLL_ENABLE))
1245 goto out;
1246
1247 val = intel_de_read(dev_priv, DPLL_CTRL1);
1248 hw_state->ctrl1 = (val >> (id * 6)) & 0x3f;
1249
1250
1251 if (val & DPLL_CTRL1_HDMI_MODE(id)) {
1252 hw_state->cfgcr1 = intel_de_read(dev_priv, regs[id].cfgcr1);
1253 hw_state->cfgcr2 = intel_de_read(dev_priv, regs[id].cfgcr2);
1254 }
1255 ret = true;
1256
1257out:
1258 intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
1259
1260 return ret;
1261}
1262
1263static bool skl_ddi_dpll0_get_hw_state(struct drm_i915_private *dev_priv,
1264 struct intel_shared_dpll *pll,
1265 struct intel_dpll_hw_state *hw_state)
1266{
1267 const struct skl_dpll_regs *regs = skl_dpll_regs;
1268 const enum intel_dpll_id id = pll->info->id;
1269 intel_wakeref_t wakeref;
1270 u32 val;
1271 bool ret;
1272
1273 wakeref = intel_display_power_get_if_enabled(dev_priv,
1274 POWER_DOMAIN_DISPLAY_CORE);
1275 if (!wakeref)
1276 return false;
1277
1278 ret = false;
1279
1280
1281 val = intel_de_read(dev_priv, regs[id].ctl);
1282 if (drm_WARN_ON(&dev_priv->drm, !(val & LCPLL_PLL_ENABLE)))
1283 goto out;
1284
1285 val = intel_de_read(dev_priv, DPLL_CTRL1);
1286 hw_state->ctrl1 = (val >> (id * 6)) & 0x3f;
1287
1288 ret = true;
1289
1290out:
1291 intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
1292
1293 return ret;
1294}
1295
1296struct skl_wrpll_context {
1297 u64 min_deviation;
1298 u64 central_freq;
1299 u64 dco_freq;
1300 unsigned int p;
1301};
1302
1303static void skl_wrpll_context_init(struct skl_wrpll_context *ctx)
1304{
1305 memset(ctx, 0, sizeof(*ctx));
1306
1307 ctx->min_deviation = U64_MAX;
1308}
1309
1310
1311#define SKL_DCO_MAX_PDEVIATION 100
1312#define SKL_DCO_MAX_NDEVIATION 600
1313
1314static void skl_wrpll_try_divider(struct skl_wrpll_context *ctx,
1315 u64 central_freq,
1316 u64 dco_freq,
1317 unsigned int divider)
1318{
1319 u64 deviation;
1320
1321 deviation = div64_u64(10000 * abs_diff(dco_freq, central_freq),
1322 central_freq);
1323
1324
1325 if (dco_freq >= central_freq) {
1326 if (deviation < SKL_DCO_MAX_PDEVIATION &&
1327 deviation < ctx->min_deviation) {
1328 ctx->min_deviation = deviation;
1329 ctx->central_freq = central_freq;
1330 ctx->dco_freq = dco_freq;
1331 ctx->p = divider;
1332 }
1333
1334 } else if (deviation < SKL_DCO_MAX_NDEVIATION &&
1335 deviation < ctx->min_deviation) {
1336 ctx->min_deviation = deviation;
1337 ctx->central_freq = central_freq;
1338 ctx->dco_freq = dco_freq;
1339 ctx->p = divider;
1340 }
1341}
1342
1343static void skl_wrpll_get_multipliers(unsigned int p,
1344 unsigned int *p0 ,
1345 unsigned int *p1 ,
1346 unsigned int *p2 )
1347{
1348
1349 if (p % 2 == 0) {
1350 unsigned int half = p / 2;
1351
1352 if (half == 1 || half == 2 || half == 3 || half == 5) {
1353 *p0 = 2;
1354 *p1 = 1;
1355 *p2 = half;
1356 } else if (half % 2 == 0) {
1357 *p0 = 2;
1358 *p1 = half / 2;
1359 *p2 = 2;
1360 } else if (half % 3 == 0) {
1361 *p0 = 3;
1362 *p1 = half / 3;
1363 *p2 = 2;
1364 } else if (half % 7 == 0) {
1365 *p0 = 7;
1366 *p1 = half / 7;
1367 *p2 = 2;
1368 }
1369 } else if (p == 3 || p == 9) {
1370 *p0 = 3;
1371 *p1 = 1;
1372 *p2 = p / 3;
1373 } else if (p == 5 || p == 7) {
1374 *p0 = p;
1375 *p1 = 1;
1376 *p2 = 1;
1377 } else if (p == 15) {
1378 *p0 = 3;
1379 *p1 = 1;
1380 *p2 = 5;
1381 } else if (p == 21) {
1382 *p0 = 7;
1383 *p1 = 1;
1384 *p2 = 3;
1385 } else if (p == 35) {
1386 *p0 = 7;
1387 *p1 = 1;
1388 *p2 = 5;
1389 }
1390}
1391
1392struct skl_wrpll_params {
1393 u32 dco_fraction;
1394 u32 dco_integer;
1395 u32 qdiv_ratio;
1396 u32 qdiv_mode;
1397 u32 kdiv;
1398 u32 pdiv;
1399 u32 central_freq;
1400};
1401
1402static void skl_wrpll_params_populate(struct skl_wrpll_params *params,
1403 u64 afe_clock,
1404 int ref_clock,
1405 u64 central_freq,
1406 u32 p0, u32 p1, u32 p2)
1407{
1408 u64 dco_freq;
1409
1410 switch (central_freq) {
1411 case 9600000000ULL:
1412 params->central_freq = 0;
1413 break;
1414 case 9000000000ULL:
1415 params->central_freq = 1;
1416 break;
1417 case 8400000000ULL:
1418 params->central_freq = 3;
1419 }
1420
1421 switch (p0) {
1422 case 1:
1423 params->pdiv = 0;
1424 break;
1425 case 2:
1426 params->pdiv = 1;
1427 break;
1428 case 3:
1429 params->pdiv = 2;
1430 break;
1431 case 7:
1432 params->pdiv = 4;
1433 break;
1434 default:
1435 WARN(1, "Incorrect PDiv\n");
1436 }
1437
1438 switch (p2) {
1439 case 5:
1440 params->kdiv = 0;
1441 break;
1442 case 2:
1443 params->kdiv = 1;
1444 break;
1445 case 3:
1446 params->kdiv = 2;
1447 break;
1448 case 1:
1449 params->kdiv = 3;
1450 break;
1451 default:
1452 WARN(1, "Incorrect KDiv\n");
1453 }
1454
1455 params->qdiv_ratio = p1;
1456 params->qdiv_mode = (params->qdiv_ratio == 1) ? 0 : 1;
1457
1458 dco_freq = p0 * p1 * p2 * afe_clock;
1459
1460
1461
1462
1463
1464 params->dco_integer = div_u64(dco_freq, ref_clock * KHz(1));
1465 params->dco_fraction =
1466 div_u64((div_u64(dco_freq, ref_clock / KHz(1)) -
1467 params->dco_integer * MHz(1)) * 0x8000, MHz(1));
1468}
1469
1470static bool
1471skl_ddi_calculate_wrpll(int clock ,
1472 int ref_clock,
1473 struct skl_wrpll_params *wrpll_params)
1474{
1475 u64 afe_clock = clock * 5;
1476 u64 dco_central_freq[3] = { 8400000000ULL,
1477 9000000000ULL,
1478 9600000000ULL };
1479 static const int even_dividers[] = { 4, 6, 8, 10, 12, 14, 16, 18, 20,
1480 24, 28, 30, 32, 36, 40, 42, 44,
1481 48, 52, 54, 56, 60, 64, 66, 68,
1482 70, 72, 76, 78, 80, 84, 88, 90,
1483 92, 96, 98 };
1484 static const int odd_dividers[] = { 3, 5, 7, 9, 15, 21, 35 };
1485 static const struct {
1486 const int *list;
1487 int n_dividers;
1488 } dividers[] = {
1489 { even_dividers, ARRAY_SIZE(even_dividers) },
1490 { odd_dividers, ARRAY_SIZE(odd_dividers) },
1491 };
1492 struct skl_wrpll_context ctx;
1493 unsigned int dco, d, i;
1494 unsigned int p0, p1, p2;
1495
1496 skl_wrpll_context_init(&ctx);
1497
1498 for (d = 0; d < ARRAY_SIZE(dividers); d++) {
1499 for (dco = 0; dco < ARRAY_SIZE(dco_central_freq); dco++) {
1500 for (i = 0; i < dividers[d].n_dividers; i++) {
1501 unsigned int p = dividers[d].list[i];
1502 u64 dco_freq = p * afe_clock;
1503
1504 skl_wrpll_try_divider(&ctx,
1505 dco_central_freq[dco],
1506 dco_freq,
1507 p);
1508
1509
1510
1511
1512
1513 if (ctx.min_deviation == 0)
1514 goto skip_remaining_dividers;
1515 }
1516 }
1517
1518skip_remaining_dividers:
1519
1520
1521
1522
1523 if (d == 0 && ctx.p)
1524 break;
1525 }
1526
1527 if (!ctx.p) {
1528 DRM_DEBUG_DRIVER("No valid divider found for %dHz\n", clock);
1529 return false;
1530 }
1531
1532
1533
1534
1535
1536 p0 = p1 = p2 = 0;
1537 skl_wrpll_get_multipliers(ctx.p, &p0, &p1, &p2);
1538 skl_wrpll_params_populate(wrpll_params, afe_clock, ref_clock,
1539 ctx.central_freq, p0, p1, p2);
1540
1541 return true;
1542}
1543
1544static bool skl_ddi_hdmi_pll_dividers(struct intel_crtc_state *crtc_state)
1545{
1546 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev);
1547 u32 ctrl1, cfgcr1, cfgcr2;
1548 struct skl_wrpll_params wrpll_params = { 0, };
1549
1550
1551
1552
1553
1554 ctrl1 = DPLL_CTRL1_OVERRIDE(0);
1555
1556 ctrl1 |= DPLL_CTRL1_HDMI_MODE(0);
1557
1558 if (!skl_ddi_calculate_wrpll(crtc_state->port_clock * 1000,
1559 i915->dpll.ref_clks.nssc,
1560 &wrpll_params))
1561 return false;
1562
1563 cfgcr1 = DPLL_CFGCR1_FREQ_ENABLE |
1564 DPLL_CFGCR1_DCO_FRACTION(wrpll_params.dco_fraction) |
1565 wrpll_params.dco_integer;
1566
1567 cfgcr2 = DPLL_CFGCR2_QDIV_RATIO(wrpll_params.qdiv_ratio) |
1568 DPLL_CFGCR2_QDIV_MODE(wrpll_params.qdiv_mode) |
1569 DPLL_CFGCR2_KDIV(wrpll_params.kdiv) |
1570 DPLL_CFGCR2_PDIV(wrpll_params.pdiv) |
1571 wrpll_params.central_freq;
1572
1573 memset(&crtc_state->dpll_hw_state, 0,
1574 sizeof(crtc_state->dpll_hw_state));
1575
1576 crtc_state->dpll_hw_state.ctrl1 = ctrl1;
1577 crtc_state->dpll_hw_state.cfgcr1 = cfgcr1;
1578 crtc_state->dpll_hw_state.cfgcr2 = cfgcr2;
1579 return true;
1580}
1581
1582static int skl_ddi_wrpll_get_freq(struct drm_i915_private *i915,
1583 const struct intel_shared_dpll *pll,
1584 const struct intel_dpll_hw_state *pll_state)
1585{
1586 int ref_clock = i915->dpll.ref_clks.nssc;
1587 u32 p0, p1, p2, dco_freq;
1588
1589 p0 = pll_state->cfgcr2 & DPLL_CFGCR2_PDIV_MASK;
1590 p2 = pll_state->cfgcr2 & DPLL_CFGCR2_KDIV_MASK;
1591
1592 if (pll_state->cfgcr2 & DPLL_CFGCR2_QDIV_MODE(1))
1593 p1 = (pll_state->cfgcr2 & DPLL_CFGCR2_QDIV_RATIO_MASK) >> 8;
1594 else
1595 p1 = 1;
1596
1597
1598 switch (p0) {
1599 case DPLL_CFGCR2_PDIV_1:
1600 p0 = 1;
1601 break;
1602 case DPLL_CFGCR2_PDIV_2:
1603 p0 = 2;
1604 break;
1605 case DPLL_CFGCR2_PDIV_3:
1606 p0 = 3;
1607 break;
1608 case DPLL_CFGCR2_PDIV_7_INVALID:
1609
1610
1611
1612
1613 drm_dbg_kms(&i915->drm, "Invalid WRPLL PDIV divider value, fixing it.\n");
1614 fallthrough;
1615 case DPLL_CFGCR2_PDIV_7:
1616 p0 = 7;
1617 break;
1618 default:
1619 MISSING_CASE(p0);
1620 return 0;
1621 }
1622
1623 switch (p2) {
1624 case DPLL_CFGCR2_KDIV_5:
1625 p2 = 5;
1626 break;
1627 case DPLL_CFGCR2_KDIV_2:
1628 p2 = 2;
1629 break;
1630 case DPLL_CFGCR2_KDIV_3:
1631 p2 = 3;
1632 break;
1633 case DPLL_CFGCR2_KDIV_1:
1634 p2 = 1;
1635 break;
1636 default:
1637 MISSING_CASE(p2);
1638 return 0;
1639 }
1640
1641 dco_freq = (pll_state->cfgcr1 & DPLL_CFGCR1_DCO_INTEGER_MASK) *
1642 ref_clock;
1643
1644 dco_freq += ((pll_state->cfgcr1 & DPLL_CFGCR1_DCO_FRACTION_MASK) >> 9) *
1645 ref_clock / 0x8000;
1646
1647 if (drm_WARN_ON(&i915->drm, p0 == 0 || p1 == 0 || p2 == 0))
1648 return 0;
1649
1650 return dco_freq / (p0 * p1 * p2 * 5);
1651}
1652
1653static bool
1654skl_ddi_dp_set_dpll_hw_state(struct intel_crtc_state *crtc_state)
1655{
1656 u32 ctrl1;
1657
1658
1659
1660
1661
1662 ctrl1 = DPLL_CTRL1_OVERRIDE(0);
1663 switch (crtc_state->port_clock / 2) {
1664 case 81000:
1665 ctrl1 |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_810, 0);
1666 break;
1667 case 135000:
1668 ctrl1 |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1350, 0);
1669 break;
1670 case 270000:
1671 ctrl1 |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_2700, 0);
1672 break;
1673
1674 case 162000:
1675 ctrl1 |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1620, 0);
1676 break;
1677 case 108000:
1678 ctrl1 |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1080, 0);
1679 break;
1680 case 216000:
1681 ctrl1 |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_2160, 0);
1682 break;
1683 }
1684
1685 memset(&crtc_state->dpll_hw_state, 0,
1686 sizeof(crtc_state->dpll_hw_state));
1687
1688 crtc_state->dpll_hw_state.ctrl1 = ctrl1;
1689
1690 return true;
1691}
1692
1693static int skl_ddi_lcpll_get_freq(struct drm_i915_private *i915,
1694 const struct intel_shared_dpll *pll,
1695 const struct intel_dpll_hw_state *pll_state)
1696{
1697 int link_clock = 0;
1698
1699 switch ((pll_state->ctrl1 & DPLL_CTRL1_LINK_RATE_MASK(0)) >>
1700 DPLL_CTRL1_LINK_RATE_SHIFT(0)) {
1701 case DPLL_CTRL1_LINK_RATE_810:
1702 link_clock = 81000;
1703 break;
1704 case DPLL_CTRL1_LINK_RATE_1080:
1705 link_clock = 108000;
1706 break;
1707 case DPLL_CTRL1_LINK_RATE_1350:
1708 link_clock = 135000;
1709 break;
1710 case DPLL_CTRL1_LINK_RATE_1620:
1711 link_clock = 162000;
1712 break;
1713 case DPLL_CTRL1_LINK_RATE_2160:
1714 link_clock = 216000;
1715 break;
1716 case DPLL_CTRL1_LINK_RATE_2700:
1717 link_clock = 270000;
1718 break;
1719 default:
1720 drm_WARN(&i915->drm, 1, "Unsupported link rate\n");
1721 break;
1722 }
1723
1724 return link_clock * 2;
1725}
1726
1727static bool skl_get_dpll(struct intel_atomic_state *state,
1728 struct intel_crtc *crtc,
1729 struct intel_encoder *encoder)
1730{
1731 struct intel_crtc_state *crtc_state =
1732 intel_atomic_get_new_crtc_state(state, crtc);
1733 struct drm_i915_private *i915 = to_i915(crtc->base.dev);
1734 struct intel_shared_dpll *pll;
1735 bool bret;
1736
1737 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) {
1738 bret = skl_ddi_hdmi_pll_dividers(crtc_state);
1739 if (!bret) {
1740 drm_dbg_kms(&i915->drm,
1741 "Could not get HDMI pll dividers.\n");
1742 return false;
1743 }
1744 } else if (intel_crtc_has_dp_encoder(crtc_state)) {
1745 bret = skl_ddi_dp_set_dpll_hw_state(crtc_state);
1746 if (!bret) {
1747 drm_dbg_kms(&i915->drm,
1748 "Could not set DP dpll HW state.\n");
1749 return false;
1750 }
1751 } else {
1752 return false;
1753 }
1754
1755 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
1756 pll = intel_find_shared_dpll(state, crtc,
1757 &crtc_state->dpll_hw_state,
1758 BIT(DPLL_ID_SKL_DPLL0));
1759 else
1760 pll = intel_find_shared_dpll(state, crtc,
1761 &crtc_state->dpll_hw_state,
1762 BIT(DPLL_ID_SKL_DPLL3) |
1763 BIT(DPLL_ID_SKL_DPLL2) |
1764 BIT(DPLL_ID_SKL_DPLL1));
1765 if (!pll)
1766 return false;
1767
1768 intel_reference_shared_dpll(state, crtc,
1769 pll, &crtc_state->dpll_hw_state);
1770
1771 crtc_state->shared_dpll = pll;
1772
1773 return true;
1774}
1775
1776static int skl_ddi_pll_get_freq(struct drm_i915_private *i915,
1777 const struct intel_shared_dpll *pll,
1778 const struct intel_dpll_hw_state *pll_state)
1779{
1780
1781
1782
1783
1784 if (pll_state->ctrl1 & DPLL_CTRL1_HDMI_MODE(0))
1785 return skl_ddi_wrpll_get_freq(i915, pll, pll_state);
1786 else
1787 return skl_ddi_lcpll_get_freq(i915, pll, pll_state);
1788}
1789
1790static void skl_update_dpll_ref_clks(struct drm_i915_private *i915)
1791{
1792
1793 i915->dpll.ref_clks.nssc = i915->cdclk.hw.ref;
1794}
1795
1796static void skl_dump_hw_state(struct drm_i915_private *dev_priv,
1797 const struct intel_dpll_hw_state *hw_state)
1798{
1799 drm_dbg_kms(&dev_priv->drm, "dpll_hw_state: "
1800 "ctrl1: 0x%x, cfgcr1: 0x%x, cfgcr2: 0x%x\n",
1801 hw_state->ctrl1,
1802 hw_state->cfgcr1,
1803 hw_state->cfgcr2);
1804}
1805
1806static const struct intel_shared_dpll_funcs skl_ddi_pll_funcs = {
1807 .enable = skl_ddi_pll_enable,
1808 .disable = skl_ddi_pll_disable,
1809 .get_hw_state = skl_ddi_pll_get_hw_state,
1810 .get_freq = skl_ddi_pll_get_freq,
1811};
1812
1813static const struct intel_shared_dpll_funcs skl_ddi_dpll0_funcs = {
1814 .enable = skl_ddi_dpll0_enable,
1815 .disable = skl_ddi_dpll0_disable,
1816 .get_hw_state = skl_ddi_dpll0_get_hw_state,
1817 .get_freq = skl_ddi_pll_get_freq,
1818};
1819
1820static const struct dpll_info skl_plls[] = {
1821 { "DPLL 0", &skl_ddi_dpll0_funcs, DPLL_ID_SKL_DPLL0, INTEL_DPLL_ALWAYS_ON },
1822 { "DPLL 1", &skl_ddi_pll_funcs, DPLL_ID_SKL_DPLL1, 0 },
1823 { "DPLL 2", &skl_ddi_pll_funcs, DPLL_ID_SKL_DPLL2, 0 },
1824 { "DPLL 3", &skl_ddi_pll_funcs, DPLL_ID_SKL_DPLL3, 0 },
1825 { },
1826};
1827
1828static const struct intel_dpll_mgr skl_pll_mgr = {
1829 .dpll_info = skl_plls,
1830 .get_dplls = skl_get_dpll,
1831 .put_dplls = intel_put_dpll,
1832 .update_ref_clks = skl_update_dpll_ref_clks,
1833 .dump_hw_state = skl_dump_hw_state,
1834};
1835
1836static void bxt_ddi_pll_enable(struct drm_i915_private *dev_priv,
1837 struct intel_shared_dpll *pll)
1838{
1839 u32 temp;
1840 enum port port = (enum port)pll->info->id;
1841 enum dpio_phy phy;
1842 enum dpio_channel ch;
1843
1844 bxt_port_to_phy_channel(dev_priv, port, &phy, &ch);
1845
1846
1847 temp = intel_de_read(dev_priv, BXT_PORT_PLL_ENABLE(port));
1848 temp |= PORT_PLL_REF_SEL;
1849 intel_de_write(dev_priv, BXT_PORT_PLL_ENABLE(port), temp);
1850
1851 if (IS_GEMINILAKE(dev_priv)) {
1852 temp = intel_de_read(dev_priv, BXT_PORT_PLL_ENABLE(port));
1853 temp |= PORT_PLL_POWER_ENABLE;
1854 intel_de_write(dev_priv, BXT_PORT_PLL_ENABLE(port), temp);
1855
1856 if (wait_for_us((intel_de_read(dev_priv, BXT_PORT_PLL_ENABLE(port)) &
1857 PORT_PLL_POWER_STATE), 200))
1858 drm_err(&dev_priv->drm,
1859 "Power state not set for PLL:%d\n", port);
1860 }
1861
1862
1863 temp = intel_de_read(dev_priv, BXT_PORT_PLL_EBB_4(phy, ch));
1864 temp &= ~PORT_PLL_10BIT_CLK_ENABLE;
1865 intel_de_write(dev_priv, BXT_PORT_PLL_EBB_4(phy, ch), temp);
1866
1867
1868 temp = intel_de_read(dev_priv, BXT_PORT_PLL_EBB_0(phy, ch));
1869 temp &= ~(PORT_PLL_P1_MASK | PORT_PLL_P2_MASK);
1870 temp |= pll->state.hw_state.ebb0;
1871 intel_de_write(dev_priv, BXT_PORT_PLL_EBB_0(phy, ch), temp);
1872
1873
1874 temp = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 0));
1875 temp &= ~PORT_PLL_M2_MASK;
1876 temp |= pll->state.hw_state.pll0;
1877 intel_de_write(dev_priv, BXT_PORT_PLL(phy, ch, 0), temp);
1878
1879
1880 temp = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 1));
1881 temp &= ~PORT_PLL_N_MASK;
1882 temp |= pll->state.hw_state.pll1;
1883 intel_de_write(dev_priv, BXT_PORT_PLL(phy, ch, 1), temp);
1884
1885
1886 temp = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 2));
1887 temp &= ~PORT_PLL_M2_FRAC_MASK;
1888 temp |= pll->state.hw_state.pll2;
1889 intel_de_write(dev_priv, BXT_PORT_PLL(phy, ch, 2), temp);
1890
1891
1892 temp = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 3));
1893 temp &= ~PORT_PLL_M2_FRAC_ENABLE;
1894 temp |= pll->state.hw_state.pll3;
1895 intel_de_write(dev_priv, BXT_PORT_PLL(phy, ch, 3), temp);
1896
1897
1898 temp = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 6));
1899 temp &= ~PORT_PLL_PROP_COEFF_MASK;
1900 temp &= ~PORT_PLL_INT_COEFF_MASK;
1901 temp &= ~PORT_PLL_GAIN_CTL_MASK;
1902 temp |= pll->state.hw_state.pll6;
1903 intel_de_write(dev_priv, BXT_PORT_PLL(phy, ch, 6), temp);
1904
1905
1906 temp = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 8));
1907 temp &= ~PORT_PLL_TARGET_CNT_MASK;
1908 temp |= pll->state.hw_state.pll8;
1909 intel_de_write(dev_priv, BXT_PORT_PLL(phy, ch, 8), temp);
1910
1911 temp = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 9));
1912 temp &= ~PORT_PLL_LOCK_THRESHOLD_MASK;
1913 temp |= pll->state.hw_state.pll9;
1914 intel_de_write(dev_priv, BXT_PORT_PLL(phy, ch, 9), temp);
1915
1916 temp = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 10));
1917 temp &= ~PORT_PLL_DCO_AMP_OVR_EN_H;
1918 temp &= ~PORT_PLL_DCO_AMP_MASK;
1919 temp |= pll->state.hw_state.pll10;
1920 intel_de_write(dev_priv, BXT_PORT_PLL(phy, ch, 10), temp);
1921
1922
1923 temp = intel_de_read(dev_priv, BXT_PORT_PLL_EBB_4(phy, ch));
1924 temp |= PORT_PLL_RECALIBRATE;
1925 intel_de_write(dev_priv, BXT_PORT_PLL_EBB_4(phy, ch), temp);
1926 temp &= ~PORT_PLL_10BIT_CLK_ENABLE;
1927 temp |= pll->state.hw_state.ebb4;
1928 intel_de_write(dev_priv, BXT_PORT_PLL_EBB_4(phy, ch), temp);
1929
1930
1931 temp = intel_de_read(dev_priv, BXT_PORT_PLL_ENABLE(port));
1932 temp |= PORT_PLL_ENABLE;
1933 intel_de_write(dev_priv, BXT_PORT_PLL_ENABLE(port), temp);
1934 intel_de_posting_read(dev_priv, BXT_PORT_PLL_ENABLE(port));
1935
1936 if (wait_for_us((intel_de_read(dev_priv, BXT_PORT_PLL_ENABLE(port)) & PORT_PLL_LOCK),
1937 200))
1938 drm_err(&dev_priv->drm, "PLL %d not locked\n", port);
1939
1940 if (IS_GEMINILAKE(dev_priv)) {
1941 temp = intel_de_read(dev_priv, BXT_PORT_TX_DW5_LN0(phy, ch));
1942 temp |= DCC_DELAY_RANGE_2;
1943 intel_de_write(dev_priv, BXT_PORT_TX_DW5_GRP(phy, ch), temp);
1944 }
1945
1946
1947
1948
1949
1950 temp = intel_de_read(dev_priv, BXT_PORT_PCS_DW12_LN01(phy, ch));
1951 temp &= ~LANE_STAGGER_MASK;
1952 temp &= ~LANESTAGGER_STRAP_OVRD;
1953 temp |= pll->state.hw_state.pcsdw12;
1954 intel_de_write(dev_priv, BXT_PORT_PCS_DW12_GRP(phy, ch), temp);
1955}
1956
1957static void bxt_ddi_pll_disable(struct drm_i915_private *dev_priv,
1958 struct intel_shared_dpll *pll)
1959{
1960 enum port port = (enum port)pll->info->id;
1961 u32 temp;
1962
1963 temp = intel_de_read(dev_priv, BXT_PORT_PLL_ENABLE(port));
1964 temp &= ~PORT_PLL_ENABLE;
1965 intel_de_write(dev_priv, BXT_PORT_PLL_ENABLE(port), temp);
1966 intel_de_posting_read(dev_priv, BXT_PORT_PLL_ENABLE(port));
1967
1968 if (IS_GEMINILAKE(dev_priv)) {
1969 temp = intel_de_read(dev_priv, BXT_PORT_PLL_ENABLE(port));
1970 temp &= ~PORT_PLL_POWER_ENABLE;
1971 intel_de_write(dev_priv, BXT_PORT_PLL_ENABLE(port), temp);
1972
1973 if (wait_for_us(!(intel_de_read(dev_priv, BXT_PORT_PLL_ENABLE(port)) &
1974 PORT_PLL_POWER_STATE), 200))
1975 drm_err(&dev_priv->drm,
1976 "Power state not reset for PLL:%d\n", port);
1977 }
1978}
1979
1980static bool bxt_ddi_pll_get_hw_state(struct drm_i915_private *dev_priv,
1981 struct intel_shared_dpll *pll,
1982 struct intel_dpll_hw_state *hw_state)
1983{
1984 enum port port = (enum port)pll->info->id;
1985 intel_wakeref_t wakeref;
1986 enum dpio_phy phy;
1987 enum dpio_channel ch;
1988 u32 val;
1989 bool ret;
1990
1991 bxt_port_to_phy_channel(dev_priv, port, &phy, &ch);
1992
1993 wakeref = intel_display_power_get_if_enabled(dev_priv,
1994 POWER_DOMAIN_DISPLAY_CORE);
1995 if (!wakeref)
1996 return false;
1997
1998 ret = false;
1999
2000 val = intel_de_read(dev_priv, BXT_PORT_PLL_ENABLE(port));
2001 if (!(val & PORT_PLL_ENABLE))
2002 goto out;
2003
2004 hw_state->ebb0 = intel_de_read(dev_priv, BXT_PORT_PLL_EBB_0(phy, ch));
2005 hw_state->ebb0 &= PORT_PLL_P1_MASK | PORT_PLL_P2_MASK;
2006
2007 hw_state->ebb4 = intel_de_read(dev_priv, BXT_PORT_PLL_EBB_4(phy, ch));
2008 hw_state->ebb4 &= PORT_PLL_10BIT_CLK_ENABLE;
2009
2010 hw_state->pll0 = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 0));
2011 hw_state->pll0 &= PORT_PLL_M2_MASK;
2012
2013 hw_state->pll1 = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 1));
2014 hw_state->pll1 &= PORT_PLL_N_MASK;
2015
2016 hw_state->pll2 = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 2));
2017 hw_state->pll2 &= PORT_PLL_M2_FRAC_MASK;
2018
2019 hw_state->pll3 = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 3));
2020 hw_state->pll3 &= PORT_PLL_M2_FRAC_ENABLE;
2021
2022 hw_state->pll6 = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 6));
2023 hw_state->pll6 &= PORT_PLL_PROP_COEFF_MASK |
2024 PORT_PLL_INT_COEFF_MASK |
2025 PORT_PLL_GAIN_CTL_MASK;
2026
2027 hw_state->pll8 = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 8));
2028 hw_state->pll8 &= PORT_PLL_TARGET_CNT_MASK;
2029
2030 hw_state->pll9 = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 9));
2031 hw_state->pll9 &= PORT_PLL_LOCK_THRESHOLD_MASK;
2032
2033 hw_state->pll10 = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 10));
2034 hw_state->pll10 &= PORT_PLL_DCO_AMP_OVR_EN_H |
2035 PORT_PLL_DCO_AMP_MASK;
2036
2037
2038
2039
2040
2041
2042 hw_state->pcsdw12 = intel_de_read(dev_priv,
2043 BXT_PORT_PCS_DW12_LN01(phy, ch));
2044 if (intel_de_read(dev_priv, BXT_PORT_PCS_DW12_LN23(phy, ch)) != hw_state->pcsdw12)
2045 drm_dbg(&dev_priv->drm,
2046 "lane stagger config different for lane 01 (%08x) and 23 (%08x)\n",
2047 hw_state->pcsdw12,
2048 intel_de_read(dev_priv,
2049 BXT_PORT_PCS_DW12_LN23(phy, ch)));
2050 hw_state->pcsdw12 &= LANE_STAGGER_MASK | LANESTAGGER_STRAP_OVRD;
2051
2052 ret = true;
2053
2054out:
2055 intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
2056
2057 return ret;
2058}
2059
2060
2061struct bxt_clk_div {
2062 int clock;
2063 u32 p1;
2064 u32 p2;
2065 u32 m2_int;
2066 u32 m2_frac;
2067 bool m2_frac_en;
2068 u32 n;
2069
2070 int vco;
2071};
2072
2073
2074static const struct bxt_clk_div bxt_dp_clk_val[] = {
2075 {162000, 4, 2, 32, 1677722, 1, 1},
2076 {270000, 4, 1, 27, 0, 0, 1},
2077 {540000, 2, 1, 27, 0, 0, 1},
2078 {216000, 3, 2, 32, 1677722, 1, 1},
2079 {243000, 4, 1, 24, 1258291, 1, 1},
2080 {324000, 4, 1, 32, 1677722, 1, 1},
2081 {432000, 3, 1, 32, 1677722, 1, 1}
2082};
2083
2084static bool
2085bxt_ddi_hdmi_pll_dividers(struct intel_crtc_state *crtc_state,
2086 struct bxt_clk_div *clk_div)
2087{
2088 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev);
2089 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2090 struct dpll best_clock;
2091
2092
2093
2094
2095
2096
2097 if (!bxt_find_best_dpll(crtc_state, &best_clock)) {
2098 drm_dbg(&i915->drm, "no PLL dividers found for clock %d pipe %c\n",
2099 crtc_state->port_clock,
2100 pipe_name(crtc->pipe));
2101 return false;
2102 }
2103
2104 clk_div->p1 = best_clock.p1;
2105 clk_div->p2 = best_clock.p2;
2106 drm_WARN_ON(&i915->drm, best_clock.m1 != 2);
2107 clk_div->n = best_clock.n;
2108 clk_div->m2_int = best_clock.m2 >> 22;
2109 clk_div->m2_frac = best_clock.m2 & ((1 << 22) - 1);
2110 clk_div->m2_frac_en = clk_div->m2_frac != 0;
2111
2112 clk_div->vco = best_clock.vco;
2113
2114 return true;
2115}
2116
2117static void bxt_ddi_dp_pll_dividers(struct intel_crtc_state *crtc_state,
2118 struct bxt_clk_div *clk_div)
2119{
2120 int clock = crtc_state->port_clock;
2121 int i;
2122
2123 *clk_div = bxt_dp_clk_val[0];
2124 for (i = 0; i < ARRAY_SIZE(bxt_dp_clk_val); ++i) {
2125 if (bxt_dp_clk_val[i].clock == clock) {
2126 *clk_div = bxt_dp_clk_val[i];
2127 break;
2128 }
2129 }
2130
2131 clk_div->vco = clock * 10 / 2 * clk_div->p1 * clk_div->p2;
2132}
2133
2134static bool bxt_ddi_set_dpll_hw_state(struct intel_crtc_state *crtc_state,
2135 const struct bxt_clk_div *clk_div)
2136{
2137 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev);
2138 struct intel_dpll_hw_state *dpll_hw_state = &crtc_state->dpll_hw_state;
2139 int clock = crtc_state->port_clock;
2140 int vco = clk_div->vco;
2141 u32 prop_coef, int_coef, gain_ctl, targ_cnt;
2142 u32 lanestagger;
2143
2144 memset(dpll_hw_state, 0, sizeof(*dpll_hw_state));
2145
2146 if (vco >= 6200000 && vco <= 6700000) {
2147 prop_coef = 4;
2148 int_coef = 9;
2149 gain_ctl = 3;
2150 targ_cnt = 8;
2151 } else if ((vco > 5400000 && vco < 6200000) ||
2152 (vco >= 4800000 && vco < 5400000)) {
2153 prop_coef = 5;
2154 int_coef = 11;
2155 gain_ctl = 3;
2156 targ_cnt = 9;
2157 } else if (vco == 5400000) {
2158 prop_coef = 3;
2159 int_coef = 8;
2160 gain_ctl = 1;
2161 targ_cnt = 9;
2162 } else {
2163 drm_err(&i915->drm, "Invalid VCO\n");
2164 return false;
2165 }
2166
2167 if (clock > 270000)
2168 lanestagger = 0x18;
2169 else if (clock > 135000)
2170 lanestagger = 0x0d;
2171 else if (clock > 67000)
2172 lanestagger = 0x07;
2173 else if (clock > 33000)
2174 lanestagger = 0x04;
2175 else
2176 lanestagger = 0x02;
2177
2178 dpll_hw_state->ebb0 = PORT_PLL_P1(clk_div->p1) | PORT_PLL_P2(clk_div->p2);
2179 dpll_hw_state->pll0 = clk_div->m2_int;
2180 dpll_hw_state->pll1 = PORT_PLL_N(clk_div->n);
2181 dpll_hw_state->pll2 = clk_div->m2_frac;
2182
2183 if (clk_div->m2_frac_en)
2184 dpll_hw_state->pll3 = PORT_PLL_M2_FRAC_ENABLE;
2185
2186 dpll_hw_state->pll6 = prop_coef | PORT_PLL_INT_COEFF(int_coef);
2187 dpll_hw_state->pll6 |= PORT_PLL_GAIN_CTL(gain_ctl);
2188
2189 dpll_hw_state->pll8 = targ_cnt;
2190
2191 dpll_hw_state->pll9 = 5 << PORT_PLL_LOCK_THRESHOLD_SHIFT;
2192
2193 dpll_hw_state->pll10 =
2194 PORT_PLL_DCO_AMP(PORT_PLL_DCO_AMP_DEFAULT)
2195 | PORT_PLL_DCO_AMP_OVR_EN_H;
2196
2197 dpll_hw_state->ebb4 = PORT_PLL_10BIT_CLK_ENABLE;
2198
2199 dpll_hw_state->pcsdw12 = LANESTAGGER_STRAP_OVRD | lanestagger;
2200
2201 return true;
2202}
2203
2204static bool
2205bxt_ddi_dp_set_dpll_hw_state(struct intel_crtc_state *crtc_state)
2206{
2207 struct bxt_clk_div clk_div = {};
2208
2209 bxt_ddi_dp_pll_dividers(crtc_state, &clk_div);
2210
2211 return bxt_ddi_set_dpll_hw_state(crtc_state, &clk_div);
2212}
2213
2214static bool
2215bxt_ddi_hdmi_set_dpll_hw_state(struct intel_crtc_state *crtc_state)
2216{
2217 struct bxt_clk_div clk_div = {};
2218
2219 bxt_ddi_hdmi_pll_dividers(crtc_state, &clk_div);
2220
2221 return bxt_ddi_set_dpll_hw_state(crtc_state, &clk_div);
2222}
2223
2224static int bxt_ddi_pll_get_freq(struct drm_i915_private *i915,
2225 const struct intel_shared_dpll *pll,
2226 const struct intel_dpll_hw_state *pll_state)
2227{
2228 struct dpll clock;
2229
2230 clock.m1 = 2;
2231 clock.m2 = (pll_state->pll0 & PORT_PLL_M2_MASK) << 22;
2232 if (pll_state->pll3 & PORT_PLL_M2_FRAC_ENABLE)
2233 clock.m2 |= pll_state->pll2 & PORT_PLL_M2_FRAC_MASK;
2234 clock.n = (pll_state->pll1 & PORT_PLL_N_MASK) >> PORT_PLL_N_SHIFT;
2235 clock.p1 = (pll_state->ebb0 & PORT_PLL_P1_MASK) >> PORT_PLL_P1_SHIFT;
2236 clock.p2 = (pll_state->ebb0 & PORT_PLL_P2_MASK) >> PORT_PLL_P2_SHIFT;
2237
2238 return chv_calc_dpll_params(i915->dpll.ref_clks.nssc, &clock);
2239}
2240
2241static bool bxt_get_dpll(struct intel_atomic_state *state,
2242 struct intel_crtc *crtc,
2243 struct intel_encoder *encoder)
2244{
2245 struct intel_crtc_state *crtc_state =
2246 intel_atomic_get_new_crtc_state(state, crtc);
2247 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
2248 struct intel_shared_dpll *pll;
2249 enum intel_dpll_id id;
2250
2251 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI) &&
2252 !bxt_ddi_hdmi_set_dpll_hw_state(crtc_state))
2253 return false;
2254
2255 if (intel_crtc_has_dp_encoder(crtc_state) &&
2256 !bxt_ddi_dp_set_dpll_hw_state(crtc_state))
2257 return false;
2258
2259
2260 id = (enum intel_dpll_id) encoder->port;
2261 pll = intel_get_shared_dpll_by_id(dev_priv, id);
2262
2263 drm_dbg_kms(&dev_priv->drm, "[CRTC:%d:%s] using pre-allocated %s\n",
2264 crtc->base.base.id, crtc->base.name, pll->info->name);
2265
2266 intel_reference_shared_dpll(state, crtc,
2267 pll, &crtc_state->dpll_hw_state);
2268
2269 crtc_state->shared_dpll = pll;
2270
2271 return true;
2272}
2273
2274static void bxt_update_dpll_ref_clks(struct drm_i915_private *i915)
2275{
2276 i915->dpll.ref_clks.ssc = 100000;
2277 i915->dpll.ref_clks.nssc = 100000;
2278
2279}
2280
2281static void bxt_dump_hw_state(struct drm_i915_private *dev_priv,
2282 const struct intel_dpll_hw_state *hw_state)
2283{
2284 drm_dbg_kms(&dev_priv->drm, "dpll_hw_state: ebb0: 0x%x, ebb4: 0x%x,"
2285 "pll0: 0x%x, pll1: 0x%x, pll2: 0x%x, pll3: 0x%x, "
2286 "pll6: 0x%x, pll8: 0x%x, pll9: 0x%x, pll10: 0x%x, pcsdw12: 0x%x\n",
2287 hw_state->ebb0,
2288 hw_state->ebb4,
2289 hw_state->pll0,
2290 hw_state->pll1,
2291 hw_state->pll2,
2292 hw_state->pll3,
2293 hw_state->pll6,
2294 hw_state->pll8,
2295 hw_state->pll9,
2296 hw_state->pll10,
2297 hw_state->pcsdw12);
2298}
2299
2300static const struct intel_shared_dpll_funcs bxt_ddi_pll_funcs = {
2301 .enable = bxt_ddi_pll_enable,
2302 .disable = bxt_ddi_pll_disable,
2303 .get_hw_state = bxt_ddi_pll_get_hw_state,
2304 .get_freq = bxt_ddi_pll_get_freq,
2305};
2306
2307static const struct dpll_info bxt_plls[] = {
2308 { "PORT PLL A", &bxt_ddi_pll_funcs, DPLL_ID_SKL_DPLL0, 0 },
2309 { "PORT PLL B", &bxt_ddi_pll_funcs, DPLL_ID_SKL_DPLL1, 0 },
2310 { "PORT PLL C", &bxt_ddi_pll_funcs, DPLL_ID_SKL_DPLL2, 0 },
2311 { },
2312};
2313
2314static const struct intel_dpll_mgr bxt_pll_mgr = {
2315 .dpll_info = bxt_plls,
2316 .get_dplls = bxt_get_dpll,
2317 .put_dplls = intel_put_dpll,
2318 .update_ref_clks = bxt_update_dpll_ref_clks,
2319 .dump_hw_state = bxt_dump_hw_state,
2320};
2321
2322static void cnl_ddi_pll_enable(struct drm_i915_private *dev_priv,
2323 struct intel_shared_dpll *pll)
2324{
2325 const enum intel_dpll_id id = pll->info->id;
2326 u32 val;
2327
2328
2329 val = intel_de_read(dev_priv, CNL_DPLL_ENABLE(id));
2330 val |= PLL_POWER_ENABLE;
2331 intel_de_write(dev_priv, CNL_DPLL_ENABLE(id), val);
2332
2333
2334 if (intel_de_wait_for_set(dev_priv, CNL_DPLL_ENABLE(id),
2335 PLL_POWER_STATE, 5))
2336 drm_err(&dev_priv->drm, "PLL %d Power not enabled\n", id);
2337
2338
2339
2340
2341
2342 val = pll->state.hw_state.cfgcr0;
2343 intel_de_write(dev_priv, CNL_DPLL_CFGCR0(id), val);
2344
2345
2346 intel_de_posting_read(dev_priv, CNL_DPLL_CFGCR0(id));
2347
2348
2349
2350 if (pll->state.hw_state.cfgcr0 & DPLL_CFGCR0_HDMI_MODE) {
2351 val = pll->state.hw_state.cfgcr1;
2352 intel_de_write(dev_priv, CNL_DPLL_CFGCR1(id), val);
2353
2354 intel_de_posting_read(dev_priv, CNL_DPLL_CFGCR1(id));
2355 }
2356
2357
2358
2359
2360
2361
2362
2363
2364
2365
2366
2367 val = intel_de_read(dev_priv, CNL_DPLL_ENABLE(id));
2368 val |= PLL_ENABLE;
2369 intel_de_write(dev_priv, CNL_DPLL_ENABLE(id), val);
2370
2371
2372 if (intel_de_wait_for_set(dev_priv, CNL_DPLL_ENABLE(id), PLL_LOCK, 5))
2373 drm_err(&dev_priv->drm, "PLL %d not locked\n", id);
2374
2375
2376
2377
2378
2379
2380
2381
2382
2383
2384
2385
2386
2387
2388}
2389
2390static void cnl_ddi_pll_disable(struct drm_i915_private *dev_priv,
2391 struct intel_shared_dpll *pll)
2392{
2393 const enum intel_dpll_id id = pll->info->id;
2394 u32 val;
2395
2396
2397
2398
2399
2400
2401
2402
2403
2404
2405
2406
2407
2408
2409
2410
2411 val = intel_de_read(dev_priv, CNL_DPLL_ENABLE(id));
2412 val &= ~PLL_ENABLE;
2413 intel_de_write(dev_priv, CNL_DPLL_ENABLE(id), val);
2414
2415
2416 if (intel_de_wait_for_clear(dev_priv, CNL_DPLL_ENABLE(id), PLL_LOCK, 5))
2417 drm_err(&dev_priv->drm, "PLL %d locked\n", id);
2418
2419
2420
2421
2422
2423
2424
2425
2426
2427
2428
2429 val = intel_de_read(dev_priv, CNL_DPLL_ENABLE(id));
2430 val &= ~PLL_POWER_ENABLE;
2431 intel_de_write(dev_priv, CNL_DPLL_ENABLE(id), val);
2432
2433
2434 if (intel_de_wait_for_clear(dev_priv, CNL_DPLL_ENABLE(id),
2435 PLL_POWER_STATE, 5))
2436 drm_err(&dev_priv->drm, "PLL %d Power not disabled\n", id);
2437}
2438
2439static bool cnl_ddi_pll_get_hw_state(struct drm_i915_private *dev_priv,
2440 struct intel_shared_dpll *pll,
2441 struct intel_dpll_hw_state *hw_state)
2442{
2443 const enum intel_dpll_id id = pll->info->id;
2444 intel_wakeref_t wakeref;
2445 u32 val;
2446 bool ret;
2447
2448 wakeref = intel_display_power_get_if_enabled(dev_priv,
2449 POWER_DOMAIN_DISPLAY_CORE);
2450 if (!wakeref)
2451 return false;
2452
2453 ret = false;
2454
2455 val = intel_de_read(dev_priv, CNL_DPLL_ENABLE(id));
2456 if (!(val & PLL_ENABLE))
2457 goto out;
2458
2459 val = intel_de_read(dev_priv, CNL_DPLL_CFGCR0(id));
2460 hw_state->cfgcr0 = val;
2461
2462
2463 if (val & DPLL_CFGCR0_HDMI_MODE) {
2464 hw_state->cfgcr1 = intel_de_read(dev_priv,
2465 CNL_DPLL_CFGCR1(id));
2466 }
2467 ret = true;
2468
2469out:
2470 intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
2471
2472 return ret;
2473}
2474
2475static void cnl_wrpll_get_multipliers(int bestdiv, int *pdiv,
2476 int *qdiv, int *kdiv)
2477{
2478
2479 if (bestdiv % 2 == 0) {
2480 if (bestdiv == 2) {
2481 *pdiv = 2;
2482 *qdiv = 1;
2483 *kdiv = 1;
2484 } else if (bestdiv % 4 == 0) {
2485 *pdiv = 2;
2486 *qdiv = bestdiv / 4;
2487 *kdiv = 2;
2488 } else if (bestdiv % 6 == 0) {
2489 *pdiv = 3;
2490 *qdiv = bestdiv / 6;
2491 *kdiv = 2;
2492 } else if (bestdiv % 5 == 0) {
2493 *pdiv = 5;
2494 *qdiv = bestdiv / 10;
2495 *kdiv = 2;
2496 } else if (bestdiv % 14 == 0) {
2497 *pdiv = 7;
2498 *qdiv = bestdiv / 14;
2499 *kdiv = 2;
2500 }
2501 } else {
2502 if (bestdiv == 3 || bestdiv == 5 || bestdiv == 7) {
2503 *pdiv = bestdiv;
2504 *qdiv = 1;
2505 *kdiv = 1;
2506 } else {
2507 *pdiv = bestdiv / 3;
2508 *qdiv = 1;
2509 *kdiv = 3;
2510 }
2511 }
2512}
2513
2514static void cnl_wrpll_params_populate(struct skl_wrpll_params *params,
2515 u32 dco_freq, u32 ref_freq,
2516 int pdiv, int qdiv, int kdiv)
2517{
2518 u32 dco;
2519
2520 switch (kdiv) {
2521 case 1:
2522 params->kdiv = 1;
2523 break;
2524 case 2:
2525 params->kdiv = 2;
2526 break;
2527 case 3:
2528 params->kdiv = 4;
2529 break;
2530 default:
2531 WARN(1, "Incorrect KDiv\n");
2532 }
2533
2534 switch (pdiv) {
2535 case 2:
2536 params->pdiv = 1;
2537 break;
2538 case 3:
2539 params->pdiv = 2;
2540 break;
2541 case 5:
2542 params->pdiv = 4;
2543 break;
2544 case 7:
2545 params->pdiv = 8;
2546 break;
2547 default:
2548 WARN(1, "Incorrect PDiv\n");
2549 }
2550
2551 WARN_ON(kdiv != 2 && qdiv != 1);
2552
2553 params->qdiv_ratio = qdiv;
2554 params->qdiv_mode = (qdiv == 1) ? 0 : 1;
2555
2556 dco = div_u64((u64)dco_freq << 15, ref_freq);
2557
2558 params->dco_integer = dco >> 15;
2559 params->dco_fraction = dco & 0x7fff;
2560}
2561
2562static bool
2563__cnl_ddi_calculate_wrpll(struct intel_crtc_state *crtc_state,
2564 struct skl_wrpll_params *wrpll_params,
2565 int ref_clock)
2566{
2567 u32 afe_clock = crtc_state->port_clock * 5;
2568 u32 dco_min = 7998000;
2569 u32 dco_max = 10000000;
2570 u32 dco_mid = (dco_min + dco_max) / 2;
2571 static const int dividers[] = { 2, 4, 6, 8, 10, 12, 14, 16,
2572 18, 20, 24, 28, 30, 32, 36, 40,
2573 42, 44, 48, 50, 52, 54, 56, 60,
2574 64, 66, 68, 70, 72, 76, 78, 80,
2575 84, 88, 90, 92, 96, 98, 100, 102,
2576 3, 5, 7, 9, 15, 21 };
2577 u32 dco, best_dco = 0, dco_centrality = 0;
2578 u32 best_dco_centrality = U32_MAX;
2579 int d, best_div = 0, pdiv = 0, qdiv = 0, kdiv = 0;
2580
2581 for (d = 0; d < ARRAY_SIZE(dividers); d++) {
2582 dco = afe_clock * dividers[d];
2583
2584 if ((dco <= dco_max) && (dco >= dco_min)) {
2585 dco_centrality = abs(dco - dco_mid);
2586
2587 if (dco_centrality < best_dco_centrality) {
2588 best_dco_centrality = dco_centrality;
2589 best_div = dividers[d];
2590 best_dco = dco;
2591 }
2592 }
2593 }
2594
2595 if (best_div == 0)
2596 return false;
2597
2598 cnl_wrpll_get_multipliers(best_div, &pdiv, &qdiv, &kdiv);
2599 cnl_wrpll_params_populate(wrpll_params, best_dco, ref_clock,
2600 pdiv, qdiv, kdiv);
2601
2602 return true;
2603}
2604
2605static bool
2606cnl_ddi_calculate_wrpll(struct intel_crtc_state *crtc_state,
2607 struct skl_wrpll_params *wrpll_params)
2608{
2609 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev);
2610
2611 return __cnl_ddi_calculate_wrpll(crtc_state, wrpll_params,
2612 i915->dpll.ref_clks.nssc);
2613}
2614
2615static bool cnl_ddi_hdmi_pll_dividers(struct intel_crtc_state *crtc_state)
2616{
2617 u32 cfgcr0, cfgcr1;
2618 struct skl_wrpll_params wrpll_params = { 0, };
2619
2620 cfgcr0 = DPLL_CFGCR0_HDMI_MODE;
2621
2622 if (!cnl_ddi_calculate_wrpll(crtc_state, &wrpll_params))
2623 return false;
2624
2625 cfgcr0 |= DPLL_CFGCR0_DCO_FRACTION(wrpll_params.dco_fraction) |
2626 wrpll_params.dco_integer;
2627
2628 cfgcr1 = DPLL_CFGCR1_QDIV_RATIO(wrpll_params.qdiv_ratio) |
2629 DPLL_CFGCR1_QDIV_MODE(wrpll_params.qdiv_mode) |
2630 DPLL_CFGCR1_KDIV(wrpll_params.kdiv) |
2631 DPLL_CFGCR1_PDIV(wrpll_params.pdiv) |
2632 DPLL_CFGCR1_CENTRAL_FREQ;
2633
2634 memset(&crtc_state->dpll_hw_state, 0,
2635 sizeof(crtc_state->dpll_hw_state));
2636
2637 crtc_state->dpll_hw_state.cfgcr0 = cfgcr0;
2638 crtc_state->dpll_hw_state.cfgcr1 = cfgcr1;
2639 return true;
2640}
2641
2642
2643
2644
2645
2646static bool
2647ehl_combo_pll_div_frac_wa_needed(struct drm_i915_private *i915)
2648{
2649 return ((IS_PLATFORM(i915, INTEL_ELKHARTLAKE) &&
2650 IS_JSL_EHL_REVID(i915, EHL_REVID_B0, REVID_FOREVER)) ||
2651 IS_TIGERLAKE(i915)) &&
2652 i915->dpll.ref_clks.nssc == 38400;
2653}
2654
2655static int __cnl_ddi_wrpll_get_freq(struct drm_i915_private *dev_priv,
2656 const struct intel_shared_dpll *pll,
2657 const struct intel_dpll_hw_state *pll_state,
2658 int ref_clock)
2659{
2660 u32 dco_fraction;
2661 u32 p0, p1, p2, dco_freq;
2662
2663 p0 = pll_state->cfgcr1 & DPLL_CFGCR1_PDIV_MASK;
2664 p2 = pll_state->cfgcr1 & DPLL_CFGCR1_KDIV_MASK;
2665
2666 if (pll_state->cfgcr1 & DPLL_CFGCR1_QDIV_MODE(1))
2667 p1 = (pll_state->cfgcr1 & DPLL_CFGCR1_QDIV_RATIO_MASK) >>
2668 DPLL_CFGCR1_QDIV_RATIO_SHIFT;
2669 else
2670 p1 = 1;
2671
2672
2673 switch (p0) {
2674 case DPLL_CFGCR1_PDIV_2:
2675 p0 = 2;
2676 break;
2677 case DPLL_CFGCR1_PDIV_3:
2678 p0 = 3;
2679 break;
2680 case DPLL_CFGCR1_PDIV_5:
2681 p0 = 5;
2682 break;
2683 case DPLL_CFGCR1_PDIV_7:
2684 p0 = 7;
2685 break;
2686 }
2687
2688 switch (p2) {
2689 case DPLL_CFGCR1_KDIV_1:
2690 p2 = 1;
2691 break;
2692 case DPLL_CFGCR1_KDIV_2:
2693 p2 = 2;
2694 break;
2695 case DPLL_CFGCR1_KDIV_3:
2696 p2 = 3;
2697 break;
2698 }
2699
2700 dco_freq = (pll_state->cfgcr0 & DPLL_CFGCR0_DCO_INTEGER_MASK) *
2701 ref_clock;
2702
2703 dco_fraction = (pll_state->cfgcr0 & DPLL_CFGCR0_DCO_FRACTION_MASK) >>
2704 DPLL_CFGCR0_DCO_FRACTION_SHIFT;
2705
2706 if (ehl_combo_pll_div_frac_wa_needed(dev_priv))
2707 dco_fraction *= 2;
2708
2709 dco_freq += (dco_fraction * ref_clock) / 0x8000;
2710
2711 if (drm_WARN_ON(&dev_priv->drm, p0 == 0 || p1 == 0 || p2 == 0))
2712 return 0;
2713
2714 return dco_freq / (p0 * p1 * p2 * 5);
2715}
2716
2717static int cnl_ddi_wrpll_get_freq(struct drm_i915_private *i915,
2718 const struct intel_shared_dpll *pll,
2719 const struct intel_dpll_hw_state *pll_state)
2720{
2721 return __cnl_ddi_wrpll_get_freq(i915, pll, pll_state,
2722 i915->dpll.ref_clks.nssc);
2723}
2724
2725static bool
2726cnl_ddi_dp_set_dpll_hw_state(struct intel_crtc_state *crtc_state)
2727{
2728 u32 cfgcr0;
2729
2730 cfgcr0 = DPLL_CFGCR0_SSC_ENABLE;
2731
2732 switch (crtc_state->port_clock / 2) {
2733 case 81000:
2734 cfgcr0 |= DPLL_CFGCR0_LINK_RATE_810;
2735 break;
2736 case 135000:
2737 cfgcr0 |= DPLL_CFGCR0_LINK_RATE_1350;
2738 break;
2739 case 270000:
2740 cfgcr0 |= DPLL_CFGCR0_LINK_RATE_2700;
2741 break;
2742
2743 case 162000:
2744 cfgcr0 |= DPLL_CFGCR0_LINK_RATE_1620;
2745 break;
2746 case 108000:
2747 cfgcr0 |= DPLL_CFGCR0_LINK_RATE_1080;
2748 break;
2749 case 216000:
2750 cfgcr0 |= DPLL_CFGCR0_LINK_RATE_2160;
2751 break;
2752 case 324000:
2753
2754 cfgcr0 |= DPLL_CFGCR0_LINK_RATE_3240;
2755 break;
2756 case 405000:
2757
2758 cfgcr0 |= DPLL_CFGCR0_LINK_RATE_4050;
2759 break;
2760 }
2761
2762 memset(&crtc_state->dpll_hw_state, 0,
2763 sizeof(crtc_state->dpll_hw_state));
2764
2765 crtc_state->dpll_hw_state.cfgcr0 = cfgcr0;
2766
2767 return true;
2768}
2769
2770static int cnl_ddi_lcpll_get_freq(struct drm_i915_private *i915,
2771 const struct intel_shared_dpll *pll,
2772 const struct intel_dpll_hw_state *pll_state)
2773{
2774 int link_clock = 0;
2775
2776 switch (pll_state->cfgcr0 & DPLL_CFGCR0_LINK_RATE_MASK) {
2777 case DPLL_CFGCR0_LINK_RATE_810:
2778 link_clock = 81000;
2779 break;
2780 case DPLL_CFGCR0_LINK_RATE_1080:
2781 link_clock = 108000;
2782 break;
2783 case DPLL_CFGCR0_LINK_RATE_1350:
2784 link_clock = 135000;
2785 break;
2786 case DPLL_CFGCR0_LINK_RATE_1620:
2787 link_clock = 162000;
2788 break;
2789 case DPLL_CFGCR0_LINK_RATE_2160:
2790 link_clock = 216000;
2791 break;
2792 case DPLL_CFGCR0_LINK_RATE_2700:
2793 link_clock = 270000;
2794 break;
2795 case DPLL_CFGCR0_LINK_RATE_3240:
2796 link_clock = 324000;
2797 break;
2798 case DPLL_CFGCR0_LINK_RATE_4050:
2799 link_clock = 405000;
2800 break;
2801 default:
2802 drm_WARN(&i915->drm, 1, "Unsupported link rate\n");
2803 break;
2804 }
2805
2806 return link_clock * 2;
2807}
2808
2809static bool cnl_get_dpll(struct intel_atomic_state *state,
2810 struct intel_crtc *crtc,
2811 struct intel_encoder *encoder)
2812{
2813 struct intel_crtc_state *crtc_state =
2814 intel_atomic_get_new_crtc_state(state, crtc);
2815 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev);
2816 struct intel_shared_dpll *pll;
2817 bool bret;
2818
2819 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) {
2820 bret = cnl_ddi_hdmi_pll_dividers(crtc_state);
2821 if (!bret) {
2822 drm_dbg_kms(&i915->drm,
2823 "Could not get HDMI pll dividers.\n");
2824 return false;
2825 }
2826 } else if (intel_crtc_has_dp_encoder(crtc_state)) {
2827 bret = cnl_ddi_dp_set_dpll_hw_state(crtc_state);
2828 if (!bret) {
2829 drm_dbg_kms(&i915->drm,
2830 "Could not set DP dpll HW state.\n");
2831 return false;
2832 }
2833 } else {
2834 drm_dbg_kms(&i915->drm,
2835 "Skip DPLL setup for output_types 0x%x\n",
2836 crtc_state->output_types);
2837 return false;
2838 }
2839
2840 pll = intel_find_shared_dpll(state, crtc,
2841 &crtc_state->dpll_hw_state,
2842 BIT(DPLL_ID_SKL_DPLL2) |
2843 BIT(DPLL_ID_SKL_DPLL1) |
2844 BIT(DPLL_ID_SKL_DPLL0));
2845 if (!pll) {
2846 drm_dbg_kms(&i915->drm, "No PLL selected\n");
2847 return false;
2848 }
2849
2850 intel_reference_shared_dpll(state, crtc,
2851 pll, &crtc_state->dpll_hw_state);
2852
2853 crtc_state->shared_dpll = pll;
2854
2855 return true;
2856}
2857
2858static int cnl_ddi_pll_get_freq(struct drm_i915_private *i915,
2859 const struct intel_shared_dpll *pll,
2860 const struct intel_dpll_hw_state *pll_state)
2861{
2862 if (pll_state->cfgcr0 & DPLL_CFGCR0_HDMI_MODE)
2863 return cnl_ddi_wrpll_get_freq(i915, pll, pll_state);
2864 else
2865 return cnl_ddi_lcpll_get_freq(i915, pll, pll_state);
2866}
2867
2868static void cnl_update_dpll_ref_clks(struct drm_i915_private *i915)
2869{
2870
2871 i915->dpll.ref_clks.nssc = i915->cdclk.hw.ref;
2872}
2873
2874static void cnl_dump_hw_state(struct drm_i915_private *dev_priv,
2875 const struct intel_dpll_hw_state *hw_state)
2876{
2877 drm_dbg_kms(&dev_priv->drm, "dpll_hw_state: "
2878 "cfgcr0: 0x%x, cfgcr1: 0x%x\n",
2879 hw_state->cfgcr0,
2880 hw_state->cfgcr1);
2881}
2882
2883static const struct intel_shared_dpll_funcs cnl_ddi_pll_funcs = {
2884 .enable = cnl_ddi_pll_enable,
2885 .disable = cnl_ddi_pll_disable,
2886 .get_hw_state = cnl_ddi_pll_get_hw_state,
2887 .get_freq = cnl_ddi_pll_get_freq,
2888};
2889
2890static const struct dpll_info cnl_plls[] = {
2891 { "DPLL 0", &cnl_ddi_pll_funcs, DPLL_ID_SKL_DPLL0, 0 },
2892 { "DPLL 1", &cnl_ddi_pll_funcs, DPLL_ID_SKL_DPLL1, 0 },
2893 { "DPLL 2", &cnl_ddi_pll_funcs, DPLL_ID_SKL_DPLL2, 0 },
2894 { },
2895};
2896
2897static const struct intel_dpll_mgr cnl_pll_mgr = {
2898 .dpll_info = cnl_plls,
2899 .get_dplls = cnl_get_dpll,
2900 .put_dplls = intel_put_dpll,
2901 .update_ref_clks = cnl_update_dpll_ref_clks,
2902 .dump_hw_state = cnl_dump_hw_state,
2903};
2904
2905struct icl_combo_pll_params {
2906 int clock;
2907 struct skl_wrpll_params wrpll;
2908};
2909
2910
2911
2912
2913
2914static const struct icl_combo_pll_params icl_dp_combo_pll_24MHz_values[] = {
2915 { 540000,
2916 { .dco_integer = 0x151, .dco_fraction = 0x4000,
2917 .pdiv = 0x2 , .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2918 { 270000,
2919 { .dco_integer = 0x151, .dco_fraction = 0x4000,
2920 .pdiv = 0x2 , .kdiv = 2, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2921 { 162000,
2922 { .dco_integer = 0x151, .dco_fraction = 0x4000,
2923 .pdiv = 0x4 , .kdiv = 2, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2924 { 324000,
2925 { .dco_integer = 0x151, .dco_fraction = 0x4000,
2926 .pdiv = 0x4 , .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2927 { 216000,
2928 { .dco_integer = 0x168, .dco_fraction = 0x0000,
2929 .pdiv = 0x1 , .kdiv = 2, .qdiv_mode = 1, .qdiv_ratio = 2, }, },
2930 { 432000,
2931 { .dco_integer = 0x168, .dco_fraction = 0x0000,
2932 .pdiv = 0x1 , .kdiv = 2, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2933 { 648000,
2934 { .dco_integer = 0x195, .dco_fraction = 0x0000,
2935 .pdiv = 0x2 , .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2936 { 810000,
2937 { .dco_integer = 0x151, .dco_fraction = 0x4000,
2938 .pdiv = 0x1 , .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2939};
2940
2941
2942
2943static const struct icl_combo_pll_params icl_dp_combo_pll_19_2MHz_values[] = {
2944 { 540000,
2945 { .dco_integer = 0x1A5, .dco_fraction = 0x7000,
2946 .pdiv = 0x2 , .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2947 { 270000,
2948 { .dco_integer = 0x1A5, .dco_fraction = 0x7000,
2949 .pdiv = 0x2 , .kdiv = 2, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2950 { 162000,
2951 { .dco_integer = 0x1A5, .dco_fraction = 0x7000,
2952 .pdiv = 0x4 , .kdiv = 2, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2953 { 324000,
2954 { .dco_integer = 0x1A5, .dco_fraction = 0x7000,
2955 .pdiv = 0x4 , .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2956 { 216000,
2957 { .dco_integer = 0x1C2, .dco_fraction = 0x0000,
2958 .pdiv = 0x1 , .kdiv = 2, .qdiv_mode = 1, .qdiv_ratio = 2, }, },
2959 { 432000,
2960 { .dco_integer = 0x1C2, .dco_fraction = 0x0000,
2961 .pdiv = 0x1 , .kdiv = 2, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2962 { 648000,
2963 { .dco_integer = 0x1FA, .dco_fraction = 0x2000,
2964 .pdiv = 0x2 , .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2965 { 810000,
2966 { .dco_integer = 0x1A5, .dco_fraction = 0x7000,
2967 .pdiv = 0x1 , .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2968};
2969
2970static const struct skl_wrpll_params icl_tbt_pll_24MHz_values = {
2971 .dco_integer = 0x151, .dco_fraction = 0x4000,
2972 .pdiv = 0x4 , .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0,
2973};
2974
2975static const struct skl_wrpll_params icl_tbt_pll_19_2MHz_values = {
2976 .dco_integer = 0x1A5, .dco_fraction = 0x7000,
2977 .pdiv = 0x4 , .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0,
2978};
2979
2980static const struct skl_wrpll_params tgl_tbt_pll_19_2MHz_values = {
2981 .dco_integer = 0x54, .dco_fraction = 0x3000,
2982
2983 .pdiv = 0, .kdiv = 0, .qdiv_mode = 0, .qdiv_ratio = 0,
2984};
2985
2986static const struct skl_wrpll_params tgl_tbt_pll_24MHz_values = {
2987 .dco_integer = 0x43, .dco_fraction = 0x4000,
2988
2989};
2990
2991static bool icl_calc_dp_combo_pll(struct intel_crtc_state *crtc_state,
2992 struct skl_wrpll_params *pll_params)
2993{
2994 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
2995 const struct icl_combo_pll_params *params =
2996 dev_priv->dpll.ref_clks.nssc == 24000 ?
2997 icl_dp_combo_pll_24MHz_values :
2998 icl_dp_combo_pll_19_2MHz_values;
2999 int clock = crtc_state->port_clock;
3000 int i;
3001
3002 for (i = 0; i < ARRAY_SIZE(icl_dp_combo_pll_24MHz_values); i++) {
3003 if (clock == params[i].clock) {
3004 *pll_params = params[i].wrpll;
3005 return true;
3006 }
3007 }
3008
3009 MISSING_CASE(clock);
3010 return false;
3011}
3012
3013static bool icl_calc_tbt_pll(struct intel_crtc_state *crtc_state,
3014 struct skl_wrpll_params *pll_params)
3015{
3016 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
3017
3018 if (INTEL_GEN(dev_priv) >= 12) {
3019 switch (dev_priv->dpll.ref_clks.nssc) {
3020 default:
3021 MISSING_CASE(dev_priv->dpll.ref_clks.nssc);
3022 fallthrough;
3023 case 19200:
3024 case 38400:
3025 *pll_params = tgl_tbt_pll_19_2MHz_values;
3026 break;
3027 case 24000:
3028 *pll_params = tgl_tbt_pll_24MHz_values;
3029 break;
3030 }
3031 } else {
3032 switch (dev_priv->dpll.ref_clks.nssc) {
3033 default:
3034 MISSING_CASE(dev_priv->dpll.ref_clks.nssc);
3035 fallthrough;
3036 case 19200:
3037 case 38400:
3038 *pll_params = icl_tbt_pll_19_2MHz_values;
3039 break;
3040 case 24000:
3041 *pll_params = icl_tbt_pll_24MHz_values;
3042 break;
3043 }
3044 }
3045
3046 return true;
3047}
3048
3049static int icl_ddi_tbt_pll_get_freq(struct drm_i915_private *i915,
3050 const struct intel_shared_dpll *pll,
3051 const struct intel_dpll_hw_state *pll_state)
3052{
3053
3054
3055
3056
3057 drm_WARN_ON(&i915->drm, 1);
3058
3059 return 0;
3060}
3061
3062static int icl_wrpll_ref_clock(struct drm_i915_private *i915)
3063{
3064 int ref_clock = i915->dpll.ref_clks.nssc;
3065
3066
3067
3068
3069
3070 if (ref_clock == 38400)
3071 ref_clock = 19200;
3072
3073 return ref_clock;
3074}
3075
3076static bool
3077icl_calc_wrpll(struct intel_crtc_state *crtc_state,
3078 struct skl_wrpll_params *wrpll_params)
3079{
3080 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev);
3081
3082 return __cnl_ddi_calculate_wrpll(crtc_state, wrpll_params,
3083 icl_wrpll_ref_clock(i915));
3084}
3085
3086static int icl_ddi_combo_pll_get_freq(struct drm_i915_private *i915,
3087 const struct intel_shared_dpll *pll,
3088 const struct intel_dpll_hw_state *pll_state)
3089{
3090 return __cnl_ddi_wrpll_get_freq(i915, pll, pll_state,
3091 icl_wrpll_ref_clock(i915));
3092}
3093
3094static void icl_calc_dpll_state(struct drm_i915_private *i915,
3095 const struct skl_wrpll_params *pll_params,
3096 struct intel_dpll_hw_state *pll_state)
3097{
3098 u32 dco_fraction = pll_params->dco_fraction;
3099
3100 memset(pll_state, 0, sizeof(*pll_state));
3101
3102 if (ehl_combo_pll_div_frac_wa_needed(i915))
3103 dco_fraction = DIV_ROUND_CLOSEST(dco_fraction, 2);
3104
3105 pll_state->cfgcr0 = DPLL_CFGCR0_DCO_FRACTION(dco_fraction) |
3106 pll_params->dco_integer;
3107
3108 pll_state->cfgcr1 = DPLL_CFGCR1_QDIV_RATIO(pll_params->qdiv_ratio) |
3109 DPLL_CFGCR1_QDIV_MODE(pll_params->qdiv_mode) |
3110 DPLL_CFGCR1_KDIV(pll_params->kdiv) |
3111 DPLL_CFGCR1_PDIV(pll_params->pdiv);
3112
3113 if (INTEL_GEN(i915) >= 12)
3114 pll_state->cfgcr1 |= TGL_DPLL_CFGCR1_CFSELOVRD_NORMAL_XTAL;
3115 else
3116 pll_state->cfgcr1 |= DPLL_CFGCR1_CENTRAL_FREQ_8400;
3117}
3118
3119static enum tc_port icl_pll_id_to_tc_port(enum intel_dpll_id id)
3120{
3121 return id - DPLL_ID_ICL_MGPLL1;
3122}
3123
3124enum intel_dpll_id icl_tc_port_to_pll_id(enum tc_port tc_port)
3125{
3126 return tc_port + DPLL_ID_ICL_MGPLL1;
3127}
3128
3129static bool icl_mg_pll_find_divisors(int clock_khz, bool is_dp, bool use_ssc,
3130 u32 *target_dco_khz,
3131 struct intel_dpll_hw_state *state,
3132 bool is_dkl)
3133{
3134 u32 dco_min_freq, dco_max_freq;
3135 int div1_vals[] = {7, 5, 3, 2};
3136 unsigned int i;
3137 int div2;
3138
3139 dco_min_freq = is_dp ? 8100000 : use_ssc ? 8000000 : 7992000;
3140 dco_max_freq = is_dp ? 8100000 : 10000000;
3141
3142 for (i = 0; i < ARRAY_SIZE(div1_vals); i++) {
3143 int div1 = div1_vals[i];
3144
3145 for (div2 = 10; div2 > 0; div2--) {
3146 int dco = div1 * div2 * clock_khz * 5;
3147 int a_divratio, tlinedrv, inputsel;
3148 u32 hsdiv;
3149
3150 if (dco < dco_min_freq || dco > dco_max_freq)
3151 continue;
3152
3153 if (div2 >= 2) {
3154
3155
3156
3157
3158
3159 a_divratio = is_dp ? 10 : 5;
3160 tlinedrv = is_dkl ? 1 : 2;
3161 } else {
3162 a_divratio = 5;
3163 tlinedrv = 0;
3164 }
3165 inputsel = is_dp ? 0 : 1;
3166
3167 switch (div1) {
3168 default:
3169 MISSING_CASE(div1);
3170 fallthrough;
3171 case 2:
3172 hsdiv = MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_2;
3173 break;
3174 case 3:
3175 hsdiv = MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_3;
3176 break;
3177 case 5:
3178 hsdiv = MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_5;
3179 break;
3180 case 7:
3181 hsdiv = MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_7;
3182 break;
3183 }
3184
3185 *target_dco_khz = dco;
3186
3187 state->mg_refclkin_ctl = MG_REFCLKIN_CTL_OD_2_MUX(1);
3188
3189 state->mg_clktop2_coreclkctl1 =
3190 MG_CLKTOP2_CORECLKCTL1_A_DIVRATIO(a_divratio);
3191
3192 state->mg_clktop2_hsclkctl =
3193 MG_CLKTOP2_HSCLKCTL_TLINEDRV_CLKSEL(tlinedrv) |
3194 MG_CLKTOP2_HSCLKCTL_CORE_INPUTSEL(inputsel) |
3195 hsdiv |
3196 MG_CLKTOP2_HSCLKCTL_DSDIV_RATIO(div2);
3197
3198 return true;
3199 }
3200 }
3201
3202 return false;
3203}
3204
3205
3206
3207
3208
3209static bool icl_calc_mg_pll_state(struct intel_crtc_state *crtc_state,
3210 struct intel_dpll_hw_state *pll_state)
3211{
3212 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
3213 int refclk_khz = dev_priv->dpll.ref_clks.nssc;
3214 int clock = crtc_state->port_clock;
3215 u32 dco_khz, m1div, m2div_int, m2div_rem, m2div_frac;
3216 u32 iref_ndiv, iref_trim, iref_pulse_w;
3217 u32 prop_coeff, int_coeff;
3218 u32 tdc_targetcnt, feedfwgain;
3219 u64 ssc_stepsize, ssc_steplen, ssc_steplog;
3220 u64 tmp;
3221 bool use_ssc = false;
3222 bool is_dp = !intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI);
3223 bool is_dkl = INTEL_GEN(dev_priv) >= 12;
3224
3225 memset(pll_state, 0, sizeof(*pll_state));
3226
3227 if (!icl_mg_pll_find_divisors(clock, is_dp, use_ssc, &dco_khz,
3228 pll_state, is_dkl)) {
3229 drm_dbg_kms(&dev_priv->drm,
3230 "Failed to find divisors for clock %d\n", clock);
3231 return false;
3232 }
3233
3234 m1div = 2;
3235 m2div_int = dco_khz / (refclk_khz * m1div);
3236 if (m2div_int > 255) {
3237 if (!is_dkl) {
3238 m1div = 4;
3239 m2div_int = dco_khz / (refclk_khz * m1div);
3240 }
3241
3242 if (m2div_int > 255) {
3243 drm_dbg_kms(&dev_priv->drm,
3244 "Failed to find mdiv for clock %d\n",
3245 clock);
3246 return false;
3247 }
3248 }
3249 m2div_rem = dco_khz % (refclk_khz * m1div);
3250
3251 tmp = (u64)m2div_rem * (1 << 22);
3252 do_div(tmp, refclk_khz * m1div);
3253 m2div_frac = tmp;
3254
3255 switch (refclk_khz) {
3256 case 19200:
3257 iref_ndiv = 1;
3258 iref_trim = 28;
3259 iref_pulse_w = 1;
3260 break;
3261 case 24000:
3262 iref_ndiv = 1;
3263 iref_trim = 25;
3264 iref_pulse_w = 2;
3265 break;
3266 case 38400:
3267 iref_ndiv = 2;
3268 iref_trim = 28;
3269 iref_pulse_w = 1;
3270 break;
3271 default:
3272 MISSING_CASE(refclk_khz);
3273 return false;
3274 }
3275
3276
3277
3278
3279
3280
3281
3282
3283
3284
3285
3286
3287
3288
3289
3290
3291 tdc_targetcnt = (2 * 1000 * 100000 * 10 / (132 * refclk_khz) + 5) / 10;
3292
3293
3294
3295
3296
3297
3298 feedfwgain = (use_ssc || m2div_rem > 0) ?
3299 m1div * 1000000 * 100 / (dco_khz * 3 / 10) : 0;
3300
3301 if (dco_khz >= 9000000) {
3302 prop_coeff = 5;
3303 int_coeff = 10;
3304 } else {
3305 prop_coeff = 4;
3306 int_coeff = 8;
3307 }
3308
3309 if (use_ssc) {
3310 tmp = mul_u32_u32(dco_khz, 47 * 32);
3311 do_div(tmp, refclk_khz * m1div * 10000);
3312 ssc_stepsize = tmp;
3313
3314 tmp = mul_u32_u32(dco_khz, 1000);
3315 ssc_steplen = DIV_ROUND_UP_ULL(tmp, 32 * 2 * 32);
3316 } else {
3317 ssc_stepsize = 0;
3318 ssc_steplen = 0;
3319 }
3320 ssc_steplog = 4;
3321
3322
3323 if (is_dkl) {
3324 pll_state->mg_pll_div0 = DKL_PLL_DIV0_INTEG_COEFF(int_coeff) |
3325 DKL_PLL_DIV0_PROP_COEFF(prop_coeff) |
3326 DKL_PLL_DIV0_FBPREDIV(m1div) |
3327 DKL_PLL_DIV0_FBDIV_INT(m2div_int);
3328
3329 pll_state->mg_pll_div1 = DKL_PLL_DIV1_IREF_TRIM(iref_trim) |
3330 DKL_PLL_DIV1_TDC_TARGET_CNT(tdc_targetcnt);
3331
3332 pll_state->mg_pll_ssc = DKL_PLL_SSC_IREF_NDIV_RATIO(iref_ndiv) |
3333 DKL_PLL_SSC_STEP_LEN(ssc_steplen) |
3334 DKL_PLL_SSC_STEP_NUM(ssc_steplog) |
3335 (use_ssc ? DKL_PLL_SSC_EN : 0);
3336
3337 pll_state->mg_pll_bias = (m2div_frac ? DKL_PLL_BIAS_FRAC_EN_H : 0) |
3338 DKL_PLL_BIAS_FBDIV_FRAC(m2div_frac);
3339
3340 pll_state->mg_pll_tdc_coldst_bias =
3341 DKL_PLL_TDC_SSC_STEP_SIZE(ssc_stepsize) |
3342 DKL_PLL_TDC_FEED_FWD_GAIN(feedfwgain);
3343
3344 } else {
3345 pll_state->mg_pll_div0 =
3346 (m2div_rem > 0 ? MG_PLL_DIV0_FRACNEN_H : 0) |
3347 MG_PLL_DIV0_FBDIV_FRAC(m2div_frac) |
3348 MG_PLL_DIV0_FBDIV_INT(m2div_int);
3349
3350 pll_state->mg_pll_div1 =
3351 MG_PLL_DIV1_IREF_NDIVRATIO(iref_ndiv) |
3352 MG_PLL_DIV1_DITHER_DIV_2 |
3353 MG_PLL_DIV1_NDIVRATIO(1) |
3354 MG_PLL_DIV1_FBPREDIV(m1div);
3355
3356 pll_state->mg_pll_lf =
3357 MG_PLL_LF_TDCTARGETCNT(tdc_targetcnt) |
3358 MG_PLL_LF_AFCCNTSEL_512 |
3359 MG_PLL_LF_GAINCTRL(1) |
3360 MG_PLL_LF_INT_COEFF(int_coeff) |
3361 MG_PLL_LF_PROP_COEFF(prop_coeff);
3362
3363 pll_state->mg_pll_frac_lock =
3364 MG_PLL_FRAC_LOCK_TRUELOCK_CRIT_32 |
3365 MG_PLL_FRAC_LOCK_EARLYLOCK_CRIT_32 |
3366 MG_PLL_FRAC_LOCK_LOCKTHRESH(10) |
3367 MG_PLL_FRAC_LOCK_DCODITHEREN |
3368 MG_PLL_FRAC_LOCK_FEEDFWRDGAIN(feedfwgain);
3369 if (use_ssc || m2div_rem > 0)
3370 pll_state->mg_pll_frac_lock |=
3371 MG_PLL_FRAC_LOCK_FEEDFWRDCAL_EN;
3372
3373 pll_state->mg_pll_ssc =
3374 (use_ssc ? MG_PLL_SSC_EN : 0) |
3375 MG_PLL_SSC_TYPE(2) |
3376 MG_PLL_SSC_STEPLENGTH(ssc_steplen) |
3377 MG_PLL_SSC_STEPNUM(ssc_steplog) |
3378 MG_PLL_SSC_FLLEN |
3379 MG_PLL_SSC_STEPSIZE(ssc_stepsize);
3380
3381 pll_state->mg_pll_tdc_coldst_bias =
3382 MG_PLL_TDC_COLDST_COLDSTART |
3383 MG_PLL_TDC_COLDST_IREFINT_EN |
3384 MG_PLL_TDC_COLDST_REFBIAS_START_PULSE_W(iref_pulse_w) |
3385 MG_PLL_TDC_TDCOVCCORR_EN |
3386 MG_PLL_TDC_TDCSEL(3);
3387
3388 pll_state->mg_pll_bias =
3389 MG_PLL_BIAS_BIAS_GB_SEL(3) |
3390 MG_PLL_BIAS_INIT_DCOAMP(0x3F) |
3391 MG_PLL_BIAS_BIAS_BONUS(10) |
3392 MG_PLL_BIAS_BIASCAL_EN |
3393 MG_PLL_BIAS_CTRIM(12) |
3394 MG_PLL_BIAS_VREF_RDAC(4) |
3395 MG_PLL_BIAS_IREFTRIM(iref_trim);
3396
3397 if (refclk_khz == 38400) {
3398 pll_state->mg_pll_tdc_coldst_bias_mask =
3399 MG_PLL_TDC_COLDST_COLDSTART;
3400 pll_state->mg_pll_bias_mask = 0;
3401 } else {
3402 pll_state->mg_pll_tdc_coldst_bias_mask = -1U;
3403 pll_state->mg_pll_bias_mask = -1U;
3404 }
3405
3406 pll_state->mg_pll_tdc_coldst_bias &=
3407 pll_state->mg_pll_tdc_coldst_bias_mask;
3408 pll_state->mg_pll_bias &= pll_state->mg_pll_bias_mask;
3409 }
3410
3411 return true;
3412}
3413
3414static int icl_ddi_mg_pll_get_freq(struct drm_i915_private *dev_priv,
3415 const struct intel_shared_dpll *pll,
3416 const struct intel_dpll_hw_state *pll_state)
3417{
3418 u32 m1, m2_int, m2_frac, div1, div2, ref_clock;
3419 u64 tmp;
3420
3421 ref_clock = dev_priv->dpll.ref_clks.nssc;
3422
3423 if (INTEL_GEN(dev_priv) >= 12) {
3424 m1 = pll_state->mg_pll_div0 & DKL_PLL_DIV0_FBPREDIV_MASK;
3425 m1 = m1 >> DKL_PLL_DIV0_FBPREDIV_SHIFT;
3426 m2_int = pll_state->mg_pll_div0 & DKL_PLL_DIV0_FBDIV_INT_MASK;
3427
3428 if (pll_state->mg_pll_bias & DKL_PLL_BIAS_FRAC_EN_H) {
3429 m2_frac = pll_state->mg_pll_bias &
3430 DKL_PLL_BIAS_FBDIV_FRAC_MASK;
3431 m2_frac = m2_frac >> DKL_PLL_BIAS_FBDIV_SHIFT;
3432 } else {
3433 m2_frac = 0;
3434 }
3435 } else {
3436 m1 = pll_state->mg_pll_div1 & MG_PLL_DIV1_FBPREDIV_MASK;
3437 m2_int = pll_state->mg_pll_div0 & MG_PLL_DIV0_FBDIV_INT_MASK;
3438
3439 if (pll_state->mg_pll_div0 & MG_PLL_DIV0_FRACNEN_H) {
3440 m2_frac = pll_state->mg_pll_div0 &
3441 MG_PLL_DIV0_FBDIV_FRAC_MASK;
3442 m2_frac = m2_frac >> MG_PLL_DIV0_FBDIV_FRAC_SHIFT;
3443 } else {
3444 m2_frac = 0;
3445 }
3446 }
3447
3448 switch (pll_state->mg_clktop2_hsclkctl &
3449 MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_MASK) {
3450 case MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_2:
3451 div1 = 2;
3452 break;
3453 case MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_3:
3454 div1 = 3;
3455 break;
3456 case MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_5:
3457 div1 = 5;
3458 break;
3459 case MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_7:
3460 div1 = 7;
3461 break;
3462 default:
3463 MISSING_CASE(pll_state->mg_clktop2_hsclkctl);
3464 return 0;
3465 }
3466
3467 div2 = (pll_state->mg_clktop2_hsclkctl &
3468 MG_CLKTOP2_HSCLKCTL_DSDIV_RATIO_MASK) >>
3469 MG_CLKTOP2_HSCLKCTL_DSDIV_RATIO_SHIFT;
3470
3471
3472 if (div2 == 0)
3473 div2 = 1;
3474
3475
3476
3477
3478
3479 tmp = (u64)m1 * m2_int * ref_clock +
3480 (((u64)m1 * m2_frac * ref_clock) >> 22);
3481 tmp = div_u64(tmp, 5 * div1 * div2);
3482
3483 return tmp;
3484}
3485
3486
3487
3488
3489
3490
3491
3492
3493
3494void icl_set_active_port_dpll(struct intel_crtc_state *crtc_state,
3495 enum icl_port_dpll_id port_dpll_id)
3496{
3497 struct icl_port_dpll *port_dpll =
3498 &crtc_state->icl_port_dplls[port_dpll_id];
3499
3500 crtc_state->shared_dpll = port_dpll->pll;
3501 crtc_state->dpll_hw_state = port_dpll->hw_state;
3502}
3503
3504static void icl_update_active_dpll(struct intel_atomic_state *state,
3505 struct intel_crtc *crtc,
3506 struct intel_encoder *encoder)
3507{
3508 struct intel_crtc_state *crtc_state =
3509 intel_atomic_get_new_crtc_state(state, crtc);
3510 struct intel_digital_port *primary_port;
3511 enum icl_port_dpll_id port_dpll_id = ICL_PORT_DPLL_DEFAULT;
3512
3513 primary_port = encoder->type == INTEL_OUTPUT_DP_MST ?
3514 enc_to_mst(encoder)->primary :
3515 enc_to_dig_port(encoder);
3516
3517 if (primary_port &&
3518 (primary_port->tc_mode == TC_PORT_DP_ALT ||
3519 primary_port->tc_mode == TC_PORT_LEGACY))
3520 port_dpll_id = ICL_PORT_DPLL_MG_PHY;
3521
3522 icl_set_active_port_dpll(crtc_state, port_dpll_id);
3523}
3524
3525static u32 intel_get_hti_plls(struct drm_i915_private *i915)
3526{
3527 if (!(i915->hti_state & HDPORT_ENABLED))
3528 return 0;
3529
3530 return REG_FIELD_GET(HDPORT_DPLL_USED_MASK, i915->hti_state);
3531}
3532
3533static bool icl_get_combo_phy_dpll(struct intel_atomic_state *state,
3534 struct intel_crtc *crtc,
3535 struct intel_encoder *encoder)
3536{
3537 struct intel_crtc_state *crtc_state =
3538 intel_atomic_get_new_crtc_state(state, crtc);
3539 struct skl_wrpll_params pll_params = { };
3540 struct icl_port_dpll *port_dpll =
3541 &crtc_state->icl_port_dplls[ICL_PORT_DPLL_DEFAULT];
3542 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
3543 enum port port = encoder->port;
3544 unsigned long dpll_mask;
3545 int ret;
3546
3547 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI) ||
3548 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DSI))
3549 ret = icl_calc_wrpll(crtc_state, &pll_params);
3550 else
3551 ret = icl_calc_dp_combo_pll(crtc_state, &pll_params);
3552
3553 if (!ret) {
3554 drm_dbg_kms(&dev_priv->drm,
3555 "Could not calculate combo PHY PLL state.\n");
3556
3557 return false;
3558 }
3559
3560 icl_calc_dpll_state(dev_priv, &pll_params, &port_dpll->hw_state);
3561
3562 if (IS_ALDERLAKE_S(dev_priv)) {
3563 dpll_mask =
3564 BIT(DPLL_ID_DG1_DPLL3) |
3565 BIT(DPLL_ID_DG1_DPLL2) |
3566 BIT(DPLL_ID_ICL_DPLL1) |
3567 BIT(DPLL_ID_ICL_DPLL0);
3568 } else if (IS_DG1(dev_priv)) {
3569 if (port == PORT_D || port == PORT_E) {
3570 dpll_mask =
3571 BIT(DPLL_ID_DG1_DPLL2) |
3572 BIT(DPLL_ID_DG1_DPLL3);
3573 } else {
3574 dpll_mask =
3575 BIT(DPLL_ID_DG1_DPLL0) |
3576 BIT(DPLL_ID_DG1_DPLL1);
3577 }
3578 } else if (IS_ROCKETLAKE(dev_priv)) {
3579 dpll_mask =
3580 BIT(DPLL_ID_EHL_DPLL4) |
3581 BIT(DPLL_ID_ICL_DPLL1) |
3582 BIT(DPLL_ID_ICL_DPLL0);
3583 } else if (IS_JSL_EHL(dev_priv) && port != PORT_A) {
3584 dpll_mask =
3585 BIT(DPLL_ID_EHL_DPLL4) |
3586 BIT(DPLL_ID_ICL_DPLL1) |
3587 BIT(DPLL_ID_ICL_DPLL0);
3588 } else {
3589 dpll_mask = BIT(DPLL_ID_ICL_DPLL1) | BIT(DPLL_ID_ICL_DPLL0);
3590 }
3591
3592
3593 dpll_mask &= ~intel_get_hti_plls(dev_priv);
3594
3595 port_dpll->pll = intel_find_shared_dpll(state, crtc,
3596 &port_dpll->hw_state,
3597 dpll_mask);
3598 if (!port_dpll->pll) {
3599 drm_dbg_kms(&dev_priv->drm,
3600 "No combo PHY PLL found for [ENCODER:%d:%s]\n",
3601 encoder->base.base.id, encoder->base.name);
3602 return false;
3603 }
3604
3605 intel_reference_shared_dpll(state, crtc,
3606 port_dpll->pll, &port_dpll->hw_state);
3607
3608 icl_update_active_dpll(state, crtc, encoder);
3609
3610 return true;
3611}
3612
3613static bool icl_get_tc_phy_dplls(struct intel_atomic_state *state,
3614 struct intel_crtc *crtc,
3615 struct intel_encoder *encoder)
3616{
3617 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
3618 struct intel_crtc_state *crtc_state =
3619 intel_atomic_get_new_crtc_state(state, crtc);
3620 struct skl_wrpll_params pll_params = { };
3621 struct icl_port_dpll *port_dpll;
3622 enum intel_dpll_id dpll_id;
3623
3624 port_dpll = &crtc_state->icl_port_dplls[ICL_PORT_DPLL_DEFAULT];
3625 if (!icl_calc_tbt_pll(crtc_state, &pll_params)) {
3626 drm_dbg_kms(&dev_priv->drm,
3627 "Could not calculate TBT PLL state.\n");
3628 return false;
3629 }
3630
3631 icl_calc_dpll_state(dev_priv, &pll_params, &port_dpll->hw_state);
3632
3633 port_dpll->pll = intel_find_shared_dpll(state, crtc,
3634 &port_dpll->hw_state,
3635 BIT(DPLL_ID_ICL_TBTPLL));
3636 if (!port_dpll->pll) {
3637 drm_dbg_kms(&dev_priv->drm, "No TBT-ALT PLL found\n");
3638 return false;
3639 }
3640 intel_reference_shared_dpll(state, crtc,
3641 port_dpll->pll, &port_dpll->hw_state);
3642
3643
3644 port_dpll = &crtc_state->icl_port_dplls[ICL_PORT_DPLL_MG_PHY];
3645 if (!icl_calc_mg_pll_state(crtc_state, &port_dpll->hw_state)) {
3646 drm_dbg_kms(&dev_priv->drm,
3647 "Could not calculate MG PHY PLL state.\n");
3648 goto err_unreference_tbt_pll;
3649 }
3650
3651 dpll_id = icl_tc_port_to_pll_id(intel_port_to_tc(dev_priv,
3652 encoder->port));
3653 port_dpll->pll = intel_find_shared_dpll(state, crtc,
3654 &port_dpll->hw_state,
3655 BIT(dpll_id));
3656 if (!port_dpll->pll) {
3657 drm_dbg_kms(&dev_priv->drm, "No MG PHY PLL found\n");
3658 goto err_unreference_tbt_pll;
3659 }
3660 intel_reference_shared_dpll(state, crtc,
3661 port_dpll->pll, &port_dpll->hw_state);
3662
3663 icl_update_active_dpll(state, crtc, encoder);
3664
3665 return true;
3666
3667err_unreference_tbt_pll:
3668 port_dpll = &crtc_state->icl_port_dplls[ICL_PORT_DPLL_DEFAULT];
3669 intel_unreference_shared_dpll(state, crtc, port_dpll->pll);
3670
3671 return false;
3672}
3673
3674static bool icl_get_dplls(struct intel_atomic_state *state,
3675 struct intel_crtc *crtc,
3676 struct intel_encoder *encoder)
3677{
3678 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
3679 enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
3680
3681 if (intel_phy_is_combo(dev_priv, phy))
3682 return icl_get_combo_phy_dpll(state, crtc, encoder);
3683 else if (intel_phy_is_tc(dev_priv, phy))
3684 return icl_get_tc_phy_dplls(state, crtc, encoder);
3685
3686 MISSING_CASE(phy);
3687
3688 return false;
3689}
3690
3691static void icl_put_dplls(struct intel_atomic_state *state,
3692 struct intel_crtc *crtc)
3693{
3694 const struct intel_crtc_state *old_crtc_state =
3695 intel_atomic_get_old_crtc_state(state, crtc);
3696 struct intel_crtc_state *new_crtc_state =
3697 intel_atomic_get_new_crtc_state(state, crtc);
3698 enum icl_port_dpll_id id;
3699
3700 new_crtc_state->shared_dpll = NULL;
3701
3702 for (id = ICL_PORT_DPLL_DEFAULT; id < ICL_PORT_DPLL_COUNT; id++) {
3703 const struct icl_port_dpll *old_port_dpll =
3704 &old_crtc_state->icl_port_dplls[id];
3705 struct icl_port_dpll *new_port_dpll =
3706 &new_crtc_state->icl_port_dplls[id];
3707
3708 new_port_dpll->pll = NULL;
3709
3710 if (!old_port_dpll->pll)
3711 continue;
3712
3713 intel_unreference_shared_dpll(state, crtc, old_port_dpll->pll);
3714 }
3715}
3716
3717static bool mg_pll_get_hw_state(struct drm_i915_private *dev_priv,
3718 struct intel_shared_dpll *pll,
3719 struct intel_dpll_hw_state *hw_state)
3720{
3721 const enum intel_dpll_id id = pll->info->id;
3722 enum tc_port tc_port = icl_pll_id_to_tc_port(id);
3723 intel_wakeref_t wakeref;
3724 bool ret = false;
3725 u32 val;
3726
3727 wakeref = intel_display_power_get_if_enabled(dev_priv,
3728 POWER_DOMAIN_DISPLAY_CORE);
3729 if (!wakeref)
3730 return false;
3731
3732 val = intel_de_read(dev_priv, MG_PLL_ENABLE(tc_port));
3733 if (!(val & PLL_ENABLE))
3734 goto out;
3735
3736 hw_state->mg_refclkin_ctl = intel_de_read(dev_priv,
3737 MG_REFCLKIN_CTL(tc_port));
3738 hw_state->mg_refclkin_ctl &= MG_REFCLKIN_CTL_OD_2_MUX_MASK;
3739
3740 hw_state->mg_clktop2_coreclkctl1 =
3741 intel_de_read(dev_priv, MG_CLKTOP2_CORECLKCTL1(tc_port));
3742 hw_state->mg_clktop2_coreclkctl1 &=
3743 MG_CLKTOP2_CORECLKCTL1_A_DIVRATIO_MASK;
3744
3745 hw_state->mg_clktop2_hsclkctl =
3746 intel_de_read(dev_priv, MG_CLKTOP2_HSCLKCTL(tc_port));
3747 hw_state->mg_clktop2_hsclkctl &=
3748 MG_CLKTOP2_HSCLKCTL_TLINEDRV_CLKSEL_MASK |
3749 MG_CLKTOP2_HSCLKCTL_CORE_INPUTSEL_MASK |
3750 MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_MASK |
3751 MG_CLKTOP2_HSCLKCTL_DSDIV_RATIO_MASK;
3752
3753 hw_state->mg_pll_div0 = intel_de_read(dev_priv, MG_PLL_DIV0(tc_port));
3754 hw_state->mg_pll_div1 = intel_de_read(dev_priv, MG_PLL_DIV1(tc_port));
3755 hw_state->mg_pll_lf = intel_de_read(dev_priv, MG_PLL_LF(tc_port));
3756 hw_state->mg_pll_frac_lock = intel_de_read(dev_priv,
3757 MG_PLL_FRAC_LOCK(tc_port));
3758 hw_state->mg_pll_ssc = intel_de_read(dev_priv, MG_PLL_SSC(tc_port));
3759
3760 hw_state->mg_pll_bias = intel_de_read(dev_priv, MG_PLL_BIAS(tc_port));
3761 hw_state->mg_pll_tdc_coldst_bias =
3762 intel_de_read(dev_priv, MG_PLL_TDC_COLDST_BIAS(tc_port));
3763
3764 if (dev_priv->dpll.ref_clks.nssc == 38400) {
3765 hw_state->mg_pll_tdc_coldst_bias_mask = MG_PLL_TDC_COLDST_COLDSTART;
3766 hw_state->mg_pll_bias_mask = 0;
3767 } else {
3768 hw_state->mg_pll_tdc_coldst_bias_mask = -1U;
3769 hw_state->mg_pll_bias_mask = -1U;
3770 }
3771
3772 hw_state->mg_pll_tdc_coldst_bias &= hw_state->mg_pll_tdc_coldst_bias_mask;
3773 hw_state->mg_pll_bias &= hw_state->mg_pll_bias_mask;
3774
3775 ret = true;
3776out:
3777 intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
3778 return ret;
3779}
3780
3781static bool dkl_pll_get_hw_state(struct drm_i915_private *dev_priv,
3782 struct intel_shared_dpll *pll,
3783 struct intel_dpll_hw_state *hw_state)
3784{
3785 const enum intel_dpll_id id = pll->info->id;
3786 enum tc_port tc_port = icl_pll_id_to_tc_port(id);
3787 intel_wakeref_t wakeref;
3788 bool ret = false;
3789 u32 val;
3790
3791 wakeref = intel_display_power_get_if_enabled(dev_priv,
3792 POWER_DOMAIN_DISPLAY_CORE);
3793 if (!wakeref)
3794 return false;
3795
3796 val = intel_de_read(dev_priv, MG_PLL_ENABLE(tc_port));
3797 if (!(val & PLL_ENABLE))
3798 goto out;
3799
3800
3801
3802
3803
3804 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port),
3805 HIP_INDEX_VAL(tc_port, 0x2));
3806
3807 hw_state->mg_refclkin_ctl = intel_de_read(dev_priv,
3808 DKL_REFCLKIN_CTL(tc_port));
3809 hw_state->mg_refclkin_ctl &= MG_REFCLKIN_CTL_OD_2_MUX_MASK;
3810
3811 hw_state->mg_clktop2_hsclkctl =
3812 intel_de_read(dev_priv, DKL_CLKTOP2_HSCLKCTL(tc_port));
3813 hw_state->mg_clktop2_hsclkctl &=
3814 MG_CLKTOP2_HSCLKCTL_TLINEDRV_CLKSEL_MASK |
3815 MG_CLKTOP2_HSCLKCTL_CORE_INPUTSEL_MASK |
3816 MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_MASK |
3817 MG_CLKTOP2_HSCLKCTL_DSDIV_RATIO_MASK;
3818
3819 hw_state->mg_clktop2_coreclkctl1 =
3820 intel_de_read(dev_priv, DKL_CLKTOP2_CORECLKCTL1(tc_port));
3821 hw_state->mg_clktop2_coreclkctl1 &=
3822 MG_CLKTOP2_CORECLKCTL1_A_DIVRATIO_MASK;
3823
3824 hw_state->mg_pll_div0 = intel_de_read(dev_priv, DKL_PLL_DIV0(tc_port));
3825 hw_state->mg_pll_div0 &= (DKL_PLL_DIV0_INTEG_COEFF_MASK |
3826 DKL_PLL_DIV0_PROP_COEFF_MASK |
3827 DKL_PLL_DIV0_FBPREDIV_MASK |
3828 DKL_PLL_DIV0_FBDIV_INT_MASK);
3829
3830 hw_state->mg_pll_div1 = intel_de_read(dev_priv, DKL_PLL_DIV1(tc_port));
3831 hw_state->mg_pll_div1 &= (DKL_PLL_DIV1_IREF_TRIM_MASK |
3832 DKL_PLL_DIV1_TDC_TARGET_CNT_MASK);
3833
3834 hw_state->mg_pll_ssc = intel_de_read(dev_priv, DKL_PLL_SSC(tc_port));
3835 hw_state->mg_pll_ssc &= (DKL_PLL_SSC_IREF_NDIV_RATIO_MASK |
3836 DKL_PLL_SSC_STEP_LEN_MASK |
3837 DKL_PLL_SSC_STEP_NUM_MASK |
3838 DKL_PLL_SSC_EN);
3839
3840 hw_state->mg_pll_bias = intel_de_read(dev_priv, DKL_PLL_BIAS(tc_port));
3841 hw_state->mg_pll_bias &= (DKL_PLL_BIAS_FRAC_EN_H |
3842 DKL_PLL_BIAS_FBDIV_FRAC_MASK);
3843
3844 hw_state->mg_pll_tdc_coldst_bias =
3845 intel_de_read(dev_priv, DKL_PLL_TDC_COLDST_BIAS(tc_port));
3846 hw_state->mg_pll_tdc_coldst_bias &= (DKL_PLL_TDC_SSC_STEP_SIZE_MASK |
3847 DKL_PLL_TDC_FEED_FWD_GAIN_MASK);
3848
3849 ret = true;
3850out:
3851 intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
3852 return ret;
3853}
3854
3855static bool icl_pll_get_hw_state(struct drm_i915_private *dev_priv,
3856 struct intel_shared_dpll *pll,
3857 struct intel_dpll_hw_state *hw_state,
3858 i915_reg_t enable_reg)
3859{
3860 const enum intel_dpll_id id = pll->info->id;
3861 intel_wakeref_t wakeref;
3862 bool ret = false;
3863 u32 val;
3864
3865 wakeref = intel_display_power_get_if_enabled(dev_priv,
3866 POWER_DOMAIN_DISPLAY_CORE);
3867 if (!wakeref)
3868 return false;
3869
3870 val = intel_de_read(dev_priv, enable_reg);
3871 if (!(val & PLL_ENABLE))
3872 goto out;
3873
3874 if (IS_ALDERLAKE_S(dev_priv)) {
3875 hw_state->cfgcr0 = intel_de_read(dev_priv, ADLS_DPLL_CFGCR0(id));
3876 hw_state->cfgcr1 = intel_de_read(dev_priv, ADLS_DPLL_CFGCR1(id));
3877 } else if (IS_DG1(dev_priv)) {
3878 hw_state->cfgcr0 = intel_de_read(dev_priv, DG1_DPLL_CFGCR0(id));
3879 hw_state->cfgcr1 = intel_de_read(dev_priv, DG1_DPLL_CFGCR1(id));
3880 } else if (IS_ROCKETLAKE(dev_priv)) {
3881 hw_state->cfgcr0 = intel_de_read(dev_priv,
3882 RKL_DPLL_CFGCR0(id));
3883 hw_state->cfgcr1 = intel_de_read(dev_priv,
3884 RKL_DPLL_CFGCR1(id));
3885 } else if (INTEL_GEN(dev_priv) >= 12) {
3886 hw_state->cfgcr0 = intel_de_read(dev_priv,
3887 TGL_DPLL_CFGCR0(id));
3888 hw_state->cfgcr1 = intel_de_read(dev_priv,
3889 TGL_DPLL_CFGCR1(id));
3890 } else {
3891 if (IS_JSL_EHL(dev_priv) && id == DPLL_ID_EHL_DPLL4) {
3892 hw_state->cfgcr0 = intel_de_read(dev_priv,
3893 ICL_DPLL_CFGCR0(4));
3894 hw_state->cfgcr1 = intel_de_read(dev_priv,
3895 ICL_DPLL_CFGCR1(4));
3896 } else {
3897 hw_state->cfgcr0 = intel_de_read(dev_priv,
3898 ICL_DPLL_CFGCR0(id));
3899 hw_state->cfgcr1 = intel_de_read(dev_priv,
3900 ICL_DPLL_CFGCR1(id));
3901 }
3902 }
3903
3904 ret = true;
3905out:
3906 intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
3907 return ret;
3908}
3909
3910static bool combo_pll_get_hw_state(struct drm_i915_private *dev_priv,
3911 struct intel_shared_dpll *pll,
3912 struct intel_dpll_hw_state *hw_state)
3913{
3914 i915_reg_t enable_reg = intel_combo_pll_enable_reg(dev_priv, pll);
3915
3916 return icl_pll_get_hw_state(dev_priv, pll, hw_state, enable_reg);
3917}
3918
3919static bool tbt_pll_get_hw_state(struct drm_i915_private *dev_priv,
3920 struct intel_shared_dpll *pll,
3921 struct intel_dpll_hw_state *hw_state)
3922{
3923 return icl_pll_get_hw_state(dev_priv, pll, hw_state, TBT_PLL_ENABLE);
3924}
3925
3926static void icl_dpll_write(struct drm_i915_private *dev_priv,
3927 struct intel_shared_dpll *pll)
3928{
3929 struct intel_dpll_hw_state *hw_state = &pll->state.hw_state;
3930 const enum intel_dpll_id id = pll->info->id;
3931 i915_reg_t cfgcr0_reg, cfgcr1_reg;
3932
3933 if (IS_ALDERLAKE_S(dev_priv)) {
3934 cfgcr0_reg = ADLS_DPLL_CFGCR0(id);
3935 cfgcr1_reg = ADLS_DPLL_CFGCR1(id);
3936 } else if (IS_DG1(dev_priv)) {
3937 cfgcr0_reg = DG1_DPLL_CFGCR0(id);
3938 cfgcr1_reg = DG1_DPLL_CFGCR1(id);
3939 } else if (IS_ROCKETLAKE(dev_priv)) {
3940 cfgcr0_reg = RKL_DPLL_CFGCR0(id);
3941 cfgcr1_reg = RKL_DPLL_CFGCR1(id);
3942 } else if (INTEL_GEN(dev_priv) >= 12) {
3943 cfgcr0_reg = TGL_DPLL_CFGCR0(id);
3944 cfgcr1_reg = TGL_DPLL_CFGCR1(id);
3945 } else {
3946 if (IS_JSL_EHL(dev_priv) && id == DPLL_ID_EHL_DPLL4) {
3947 cfgcr0_reg = ICL_DPLL_CFGCR0(4);
3948 cfgcr1_reg = ICL_DPLL_CFGCR1(4);
3949 } else {
3950 cfgcr0_reg = ICL_DPLL_CFGCR0(id);
3951 cfgcr1_reg = ICL_DPLL_CFGCR1(id);
3952 }
3953 }
3954
3955 intel_de_write(dev_priv, cfgcr0_reg, hw_state->cfgcr0);
3956 intel_de_write(dev_priv, cfgcr1_reg, hw_state->cfgcr1);
3957 intel_de_posting_read(dev_priv, cfgcr1_reg);
3958}
3959
3960static void icl_mg_pll_write(struct drm_i915_private *dev_priv,
3961 struct intel_shared_dpll *pll)
3962{
3963 struct intel_dpll_hw_state *hw_state = &pll->state.hw_state;
3964 enum tc_port tc_port = icl_pll_id_to_tc_port(pll->info->id);
3965 u32 val;
3966
3967
3968
3969
3970
3971
3972
3973 val = intel_de_read(dev_priv, MG_REFCLKIN_CTL(tc_port));
3974 val &= ~MG_REFCLKIN_CTL_OD_2_MUX_MASK;
3975 val |= hw_state->mg_refclkin_ctl;
3976 intel_de_write(dev_priv, MG_REFCLKIN_CTL(tc_port), val);
3977
3978 val = intel_de_read(dev_priv, MG_CLKTOP2_CORECLKCTL1(tc_port));
3979 val &= ~MG_CLKTOP2_CORECLKCTL1_A_DIVRATIO_MASK;
3980 val |= hw_state->mg_clktop2_coreclkctl1;
3981 intel_de_write(dev_priv, MG_CLKTOP2_CORECLKCTL1(tc_port), val);
3982
3983 val = intel_de_read(dev_priv, MG_CLKTOP2_HSCLKCTL(tc_port));
3984 val &= ~(MG_CLKTOP2_HSCLKCTL_TLINEDRV_CLKSEL_MASK |
3985 MG_CLKTOP2_HSCLKCTL_CORE_INPUTSEL_MASK |
3986 MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_MASK |
3987 MG_CLKTOP2_HSCLKCTL_DSDIV_RATIO_MASK);
3988 val |= hw_state->mg_clktop2_hsclkctl;
3989 intel_de_write(dev_priv, MG_CLKTOP2_HSCLKCTL(tc_port), val);
3990
3991 intel_de_write(dev_priv, MG_PLL_DIV0(tc_port), hw_state->mg_pll_div0);
3992 intel_de_write(dev_priv, MG_PLL_DIV1(tc_port), hw_state->mg_pll_div1);
3993 intel_de_write(dev_priv, MG_PLL_LF(tc_port), hw_state->mg_pll_lf);
3994 intel_de_write(dev_priv, MG_PLL_FRAC_LOCK(tc_port),
3995 hw_state->mg_pll_frac_lock);
3996 intel_de_write(dev_priv, MG_PLL_SSC(tc_port), hw_state->mg_pll_ssc);
3997
3998 val = intel_de_read(dev_priv, MG_PLL_BIAS(tc_port));
3999 val &= ~hw_state->mg_pll_bias_mask;
4000 val |= hw_state->mg_pll_bias;
4001 intel_de_write(dev_priv, MG_PLL_BIAS(tc_port), val);
4002
4003 val = intel_de_read(dev_priv, MG_PLL_TDC_COLDST_BIAS(tc_port));
4004 val &= ~hw_state->mg_pll_tdc_coldst_bias_mask;
4005 val |= hw_state->mg_pll_tdc_coldst_bias;
4006 intel_de_write(dev_priv, MG_PLL_TDC_COLDST_BIAS(tc_port), val);
4007
4008 intel_de_posting_read(dev_priv, MG_PLL_TDC_COLDST_BIAS(tc_port));
4009}
4010
4011static void dkl_pll_write(struct drm_i915_private *dev_priv,
4012 struct intel_shared_dpll *pll)
4013{
4014 struct intel_dpll_hw_state *hw_state = &pll->state.hw_state;
4015 enum tc_port tc_port = icl_pll_id_to_tc_port(pll->info->id);
4016 u32 val;
4017
4018
4019
4020
4021
4022 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port),
4023 HIP_INDEX_VAL(tc_port, 0x2));
4024
4025
4026 val = intel_de_read(dev_priv, DKL_REFCLKIN_CTL(tc_port));
4027 val &= ~MG_REFCLKIN_CTL_OD_2_MUX_MASK;
4028 val |= hw_state->mg_refclkin_ctl;
4029 intel_de_write(dev_priv, DKL_REFCLKIN_CTL(tc_port), val);
4030
4031 val = intel_de_read(dev_priv, DKL_CLKTOP2_CORECLKCTL1(tc_port));
4032 val &= ~MG_CLKTOP2_CORECLKCTL1_A_DIVRATIO_MASK;
4033 val |= hw_state->mg_clktop2_coreclkctl1;
4034 intel_de_write(dev_priv, DKL_CLKTOP2_CORECLKCTL1(tc_port), val);
4035
4036 val = intel_de_read(dev_priv, DKL_CLKTOP2_HSCLKCTL(tc_port));
4037 val &= ~(MG_CLKTOP2_HSCLKCTL_TLINEDRV_CLKSEL_MASK |
4038 MG_CLKTOP2_HSCLKCTL_CORE_INPUTSEL_MASK |
4039 MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_MASK |
4040 MG_CLKTOP2_HSCLKCTL_DSDIV_RATIO_MASK);
4041 val |= hw_state->mg_clktop2_hsclkctl;
4042 intel_de_write(dev_priv, DKL_CLKTOP2_HSCLKCTL(tc_port), val);
4043
4044 val = intel_de_read(dev_priv, DKL_PLL_DIV0(tc_port));
4045 val &= ~(DKL_PLL_DIV0_INTEG_COEFF_MASK |
4046 DKL_PLL_DIV0_PROP_COEFF_MASK |
4047 DKL_PLL_DIV0_FBPREDIV_MASK |
4048 DKL_PLL_DIV0_FBDIV_INT_MASK);
4049 val |= hw_state->mg_pll_div0;
4050 intel_de_write(dev_priv, DKL_PLL_DIV0(tc_port), val);
4051
4052 val = intel_de_read(dev_priv, DKL_PLL_DIV1(tc_port));
4053 val &= ~(DKL_PLL_DIV1_IREF_TRIM_MASK |
4054 DKL_PLL_DIV1_TDC_TARGET_CNT_MASK);
4055 val |= hw_state->mg_pll_div1;
4056 intel_de_write(dev_priv, DKL_PLL_DIV1(tc_port), val);
4057
4058 val = intel_de_read(dev_priv, DKL_PLL_SSC(tc_port));
4059 val &= ~(DKL_PLL_SSC_IREF_NDIV_RATIO_MASK |
4060 DKL_PLL_SSC_STEP_LEN_MASK |
4061 DKL_PLL_SSC_STEP_NUM_MASK |
4062 DKL_PLL_SSC_EN);
4063 val |= hw_state->mg_pll_ssc;
4064 intel_de_write(dev_priv, DKL_PLL_SSC(tc_port), val);
4065
4066 val = intel_de_read(dev_priv, DKL_PLL_BIAS(tc_port));
4067 val &= ~(DKL_PLL_BIAS_FRAC_EN_H |
4068 DKL_PLL_BIAS_FBDIV_FRAC_MASK);
4069 val |= hw_state->mg_pll_bias;
4070 intel_de_write(dev_priv, DKL_PLL_BIAS(tc_port), val);
4071
4072 val = intel_de_read(dev_priv, DKL_PLL_TDC_COLDST_BIAS(tc_port));
4073 val &= ~(DKL_PLL_TDC_SSC_STEP_SIZE_MASK |
4074 DKL_PLL_TDC_FEED_FWD_GAIN_MASK);
4075 val |= hw_state->mg_pll_tdc_coldst_bias;
4076 intel_de_write(dev_priv, DKL_PLL_TDC_COLDST_BIAS(tc_port), val);
4077
4078 intel_de_posting_read(dev_priv, DKL_PLL_TDC_COLDST_BIAS(tc_port));
4079}
4080
4081static void icl_pll_power_enable(struct drm_i915_private *dev_priv,
4082 struct intel_shared_dpll *pll,
4083 i915_reg_t enable_reg)
4084{
4085 u32 val;
4086
4087 val = intel_de_read(dev_priv, enable_reg);
4088 val |= PLL_POWER_ENABLE;
4089 intel_de_write(dev_priv, enable_reg, val);
4090
4091
4092
4093
4094
4095 if (intel_de_wait_for_set(dev_priv, enable_reg, PLL_POWER_STATE, 1))
4096 drm_err(&dev_priv->drm, "PLL %d Power not enabled\n",
4097 pll->info->id);
4098}
4099
4100static void icl_pll_enable(struct drm_i915_private *dev_priv,
4101 struct intel_shared_dpll *pll,
4102 i915_reg_t enable_reg)
4103{
4104 u32 val;
4105
4106 val = intel_de_read(dev_priv, enable_reg);
4107 val |= PLL_ENABLE;
4108 intel_de_write(dev_priv, enable_reg, val);
4109
4110
4111 if (intel_de_wait_for_set(dev_priv, enable_reg, PLL_LOCK, 1))
4112 drm_err(&dev_priv->drm, "PLL %d not locked\n", pll->info->id);
4113}
4114
4115static void combo_pll_enable(struct drm_i915_private *dev_priv,
4116 struct intel_shared_dpll *pll)
4117{
4118 i915_reg_t enable_reg = intel_combo_pll_enable_reg(dev_priv, pll);
4119
4120 if (IS_JSL_EHL(dev_priv) &&
4121 pll->info->id == DPLL_ID_EHL_DPLL4) {
4122
4123
4124
4125
4126
4127
4128 pll->wakeref = intel_display_power_get(dev_priv,
4129 POWER_DOMAIN_DPLL_DC_OFF);
4130 }
4131
4132 icl_pll_power_enable(dev_priv, pll, enable_reg);
4133
4134 icl_dpll_write(dev_priv, pll);
4135
4136
4137
4138
4139
4140
4141
4142 icl_pll_enable(dev_priv, pll, enable_reg);
4143
4144
4145}
4146
4147static void tbt_pll_enable(struct drm_i915_private *dev_priv,
4148 struct intel_shared_dpll *pll)
4149{
4150 icl_pll_power_enable(dev_priv, pll, TBT_PLL_ENABLE);
4151
4152 icl_dpll_write(dev_priv, pll);
4153
4154
4155
4156
4157
4158
4159
4160 icl_pll_enable(dev_priv, pll, TBT_PLL_ENABLE);
4161
4162
4163}
4164
4165static void mg_pll_enable(struct drm_i915_private *dev_priv,
4166 struct intel_shared_dpll *pll)
4167{
4168 i915_reg_t enable_reg =
4169 MG_PLL_ENABLE(icl_pll_id_to_tc_port(pll->info->id));
4170
4171 icl_pll_power_enable(dev_priv, pll, enable_reg);
4172
4173 if (INTEL_GEN(dev_priv) >= 12)
4174 dkl_pll_write(dev_priv, pll);
4175 else
4176 icl_mg_pll_write(dev_priv, pll);
4177
4178
4179
4180
4181
4182
4183
4184 icl_pll_enable(dev_priv, pll, enable_reg);
4185
4186
4187}
4188
4189static void icl_pll_disable(struct drm_i915_private *dev_priv,
4190 struct intel_shared_dpll *pll,
4191 i915_reg_t enable_reg)
4192{
4193 u32 val;
4194
4195
4196
4197
4198
4199
4200
4201
4202
4203 val = intel_de_read(dev_priv, enable_reg);
4204 val &= ~PLL_ENABLE;
4205 intel_de_write(dev_priv, enable_reg, val);
4206
4207
4208 if (intel_de_wait_for_clear(dev_priv, enable_reg, PLL_LOCK, 1))
4209 drm_err(&dev_priv->drm, "PLL %d locked\n", pll->info->id);
4210
4211
4212
4213 val = intel_de_read(dev_priv, enable_reg);
4214 val &= ~PLL_POWER_ENABLE;
4215 intel_de_write(dev_priv, enable_reg, val);
4216
4217
4218
4219
4220
4221 if (intel_de_wait_for_clear(dev_priv, enable_reg, PLL_POWER_STATE, 1))
4222 drm_err(&dev_priv->drm, "PLL %d Power not disabled\n",
4223 pll->info->id);
4224}
4225
4226static void combo_pll_disable(struct drm_i915_private *dev_priv,
4227 struct intel_shared_dpll *pll)
4228{
4229 i915_reg_t enable_reg = intel_combo_pll_enable_reg(dev_priv, pll);
4230
4231 icl_pll_disable(dev_priv, pll, enable_reg);
4232
4233 if (IS_JSL_EHL(dev_priv) &&
4234 pll->info->id == DPLL_ID_EHL_DPLL4)
4235 intel_display_power_put(dev_priv, POWER_DOMAIN_DPLL_DC_OFF,
4236 pll->wakeref);
4237}
4238
4239static void tbt_pll_disable(struct drm_i915_private *dev_priv,
4240 struct intel_shared_dpll *pll)
4241{
4242 icl_pll_disable(dev_priv, pll, TBT_PLL_ENABLE);
4243}
4244
4245static void mg_pll_disable(struct drm_i915_private *dev_priv,
4246 struct intel_shared_dpll *pll)
4247{
4248 i915_reg_t enable_reg =
4249 MG_PLL_ENABLE(icl_pll_id_to_tc_port(pll->info->id));
4250
4251 icl_pll_disable(dev_priv, pll, enable_reg);
4252}
4253
4254static void icl_update_dpll_ref_clks(struct drm_i915_private *i915)
4255{
4256
4257 i915->dpll.ref_clks.nssc = i915->cdclk.hw.ref;
4258}
4259
4260static void icl_dump_hw_state(struct drm_i915_private *dev_priv,
4261 const struct intel_dpll_hw_state *hw_state)
4262{
4263 drm_dbg_kms(&dev_priv->drm,
4264 "dpll_hw_state: cfgcr0: 0x%x, cfgcr1: 0x%x, "
4265 "mg_refclkin_ctl: 0x%x, hg_clktop2_coreclkctl1: 0x%x, "
4266 "mg_clktop2_hsclkctl: 0x%x, mg_pll_div0: 0x%x, "
4267 "mg_pll_div2: 0x%x, mg_pll_lf: 0x%x, "
4268 "mg_pll_frac_lock: 0x%x, mg_pll_ssc: 0x%x, "
4269 "mg_pll_bias: 0x%x, mg_pll_tdc_coldst_bias: 0x%x\n",
4270 hw_state->cfgcr0, hw_state->cfgcr1,
4271 hw_state->mg_refclkin_ctl,
4272 hw_state->mg_clktop2_coreclkctl1,
4273 hw_state->mg_clktop2_hsclkctl,
4274 hw_state->mg_pll_div0,
4275 hw_state->mg_pll_div1,
4276 hw_state->mg_pll_lf,
4277 hw_state->mg_pll_frac_lock,
4278 hw_state->mg_pll_ssc,
4279 hw_state->mg_pll_bias,
4280 hw_state->mg_pll_tdc_coldst_bias);
4281}
4282
4283static const struct intel_shared_dpll_funcs combo_pll_funcs = {
4284 .enable = combo_pll_enable,
4285 .disable = combo_pll_disable,
4286 .get_hw_state = combo_pll_get_hw_state,
4287 .get_freq = icl_ddi_combo_pll_get_freq,
4288};
4289
4290static const struct intel_shared_dpll_funcs tbt_pll_funcs = {
4291 .enable = tbt_pll_enable,
4292 .disable = tbt_pll_disable,
4293 .get_hw_state = tbt_pll_get_hw_state,
4294 .get_freq = icl_ddi_tbt_pll_get_freq,
4295};
4296
4297static const struct intel_shared_dpll_funcs mg_pll_funcs = {
4298 .enable = mg_pll_enable,
4299 .disable = mg_pll_disable,
4300 .get_hw_state = mg_pll_get_hw_state,
4301 .get_freq = icl_ddi_mg_pll_get_freq,
4302};
4303
4304static const struct dpll_info icl_plls[] = {
4305 { "DPLL 0", &combo_pll_funcs, DPLL_ID_ICL_DPLL0, 0 },
4306 { "DPLL 1", &combo_pll_funcs, DPLL_ID_ICL_DPLL1, 0 },
4307 { "TBT PLL", &tbt_pll_funcs, DPLL_ID_ICL_TBTPLL, 0 },
4308 { "MG PLL 1", &mg_pll_funcs, DPLL_ID_ICL_MGPLL1, 0 },
4309 { "MG PLL 2", &mg_pll_funcs, DPLL_ID_ICL_MGPLL2, 0 },
4310 { "MG PLL 3", &mg_pll_funcs, DPLL_ID_ICL_MGPLL3, 0 },
4311 { "MG PLL 4", &mg_pll_funcs, DPLL_ID_ICL_MGPLL4, 0 },
4312 { },
4313};
4314
4315static const struct intel_dpll_mgr icl_pll_mgr = {
4316 .dpll_info = icl_plls,
4317 .get_dplls = icl_get_dplls,
4318 .put_dplls = icl_put_dplls,
4319 .update_active_dpll = icl_update_active_dpll,
4320 .update_ref_clks = icl_update_dpll_ref_clks,
4321 .dump_hw_state = icl_dump_hw_state,
4322};
4323
4324static const struct dpll_info ehl_plls[] = {
4325 { "DPLL 0", &combo_pll_funcs, DPLL_ID_ICL_DPLL0, 0 },
4326 { "DPLL 1", &combo_pll_funcs, DPLL_ID_ICL_DPLL1, 0 },
4327 { "DPLL 4", &combo_pll_funcs, DPLL_ID_EHL_DPLL4, 0 },
4328 { },
4329};
4330
4331static const struct intel_dpll_mgr ehl_pll_mgr = {
4332 .dpll_info = ehl_plls,
4333 .get_dplls = icl_get_dplls,
4334 .put_dplls = icl_put_dplls,
4335 .update_ref_clks = icl_update_dpll_ref_clks,
4336 .dump_hw_state = icl_dump_hw_state,
4337};
4338
4339static const struct intel_shared_dpll_funcs dkl_pll_funcs = {
4340 .enable = mg_pll_enable,
4341 .disable = mg_pll_disable,
4342 .get_hw_state = dkl_pll_get_hw_state,
4343 .get_freq = icl_ddi_mg_pll_get_freq,
4344};
4345
4346static const struct dpll_info tgl_plls[] = {
4347 { "DPLL 0", &combo_pll_funcs, DPLL_ID_ICL_DPLL0, 0 },
4348 { "DPLL 1", &combo_pll_funcs, DPLL_ID_ICL_DPLL1, 0 },
4349 { "TBT PLL", &tbt_pll_funcs, DPLL_ID_ICL_TBTPLL, 0 },
4350 { "TC PLL 1", &dkl_pll_funcs, DPLL_ID_ICL_MGPLL1, 0 },
4351 { "TC PLL 2", &dkl_pll_funcs, DPLL_ID_ICL_MGPLL2, 0 },
4352 { "TC PLL 3", &dkl_pll_funcs, DPLL_ID_ICL_MGPLL3, 0 },
4353 { "TC PLL 4", &dkl_pll_funcs, DPLL_ID_ICL_MGPLL4, 0 },
4354 { "TC PLL 5", &dkl_pll_funcs, DPLL_ID_TGL_MGPLL5, 0 },
4355 { "TC PLL 6", &dkl_pll_funcs, DPLL_ID_TGL_MGPLL6, 0 },
4356 { },
4357};
4358
4359static const struct intel_dpll_mgr tgl_pll_mgr = {
4360 .dpll_info = tgl_plls,
4361 .get_dplls = icl_get_dplls,
4362 .put_dplls = icl_put_dplls,
4363 .update_active_dpll = icl_update_active_dpll,
4364 .update_ref_clks = icl_update_dpll_ref_clks,
4365 .dump_hw_state = icl_dump_hw_state,
4366};
4367
4368static const struct dpll_info rkl_plls[] = {
4369 { "DPLL 0", &combo_pll_funcs, DPLL_ID_ICL_DPLL0, 0 },
4370 { "DPLL 1", &combo_pll_funcs, DPLL_ID_ICL_DPLL1, 0 },
4371 { "DPLL 4", &combo_pll_funcs, DPLL_ID_EHL_DPLL4, 0 },
4372 { },
4373};
4374
4375static const struct intel_dpll_mgr rkl_pll_mgr = {
4376 .dpll_info = rkl_plls,
4377 .get_dplls = icl_get_dplls,
4378 .put_dplls = icl_put_dplls,
4379 .update_ref_clks = icl_update_dpll_ref_clks,
4380 .dump_hw_state = icl_dump_hw_state,
4381};
4382
4383static const struct dpll_info dg1_plls[] = {
4384 { "DPLL 0", &combo_pll_funcs, DPLL_ID_DG1_DPLL0, 0 },
4385 { "DPLL 1", &combo_pll_funcs, DPLL_ID_DG1_DPLL1, 0 },
4386 { "DPLL 2", &combo_pll_funcs, DPLL_ID_DG1_DPLL2, 0 },
4387 { "DPLL 3", &combo_pll_funcs, DPLL_ID_DG1_DPLL3, 0 },
4388 { },
4389};
4390
4391static const struct intel_dpll_mgr dg1_pll_mgr = {
4392 .dpll_info = dg1_plls,
4393 .get_dplls = icl_get_dplls,
4394 .put_dplls = icl_put_dplls,
4395 .update_ref_clks = icl_update_dpll_ref_clks,
4396 .dump_hw_state = icl_dump_hw_state,
4397};
4398
4399static const struct dpll_info adls_plls[] = {
4400 { "DPLL 0", &combo_pll_funcs, DPLL_ID_ICL_DPLL0, 0 },
4401 { "DPLL 1", &combo_pll_funcs, DPLL_ID_ICL_DPLL1, 0 },
4402 { "DPLL 2", &combo_pll_funcs, DPLL_ID_DG1_DPLL2, 0 },
4403 { "DPLL 3", &combo_pll_funcs, DPLL_ID_DG1_DPLL3, 0 },
4404 { },
4405};
4406
4407static const struct intel_dpll_mgr adls_pll_mgr = {
4408 .dpll_info = adls_plls,
4409 .get_dplls = icl_get_dplls,
4410 .put_dplls = icl_put_dplls,
4411 .update_ref_clks = icl_update_dpll_ref_clks,
4412 .dump_hw_state = icl_dump_hw_state,
4413};
4414
4415
4416
4417
4418
4419
4420
4421void intel_shared_dpll_init(struct drm_device *dev)
4422{
4423 struct drm_i915_private *dev_priv = to_i915(dev);
4424 const struct intel_dpll_mgr *dpll_mgr = NULL;
4425 const struct dpll_info *dpll_info;
4426 int i;
4427
4428 if (IS_ALDERLAKE_S(dev_priv))
4429 dpll_mgr = &adls_pll_mgr;
4430 else if (IS_DG1(dev_priv))
4431 dpll_mgr = &dg1_pll_mgr;
4432 else if (IS_ROCKETLAKE(dev_priv))
4433 dpll_mgr = &rkl_pll_mgr;
4434 else if (INTEL_GEN(dev_priv) >= 12)
4435 dpll_mgr = &tgl_pll_mgr;
4436 else if (IS_JSL_EHL(dev_priv))
4437 dpll_mgr = &ehl_pll_mgr;
4438 else if (INTEL_GEN(dev_priv) >= 11)
4439 dpll_mgr = &icl_pll_mgr;
4440 else if (IS_CANNONLAKE(dev_priv))
4441 dpll_mgr = &cnl_pll_mgr;
4442 else if (IS_GEN9_BC(dev_priv))
4443 dpll_mgr = &skl_pll_mgr;
4444 else if (IS_GEN9_LP(dev_priv))
4445 dpll_mgr = &bxt_pll_mgr;
4446 else if (HAS_DDI(dev_priv))
4447 dpll_mgr = &hsw_pll_mgr;
4448 else if (HAS_PCH_IBX(dev_priv) || HAS_PCH_CPT(dev_priv))
4449 dpll_mgr = &pch_pll_mgr;
4450
4451 if (!dpll_mgr) {
4452 dev_priv->dpll.num_shared_dpll = 0;
4453 return;
4454 }
4455
4456 dpll_info = dpll_mgr->dpll_info;
4457
4458 for (i = 0; dpll_info[i].name; i++) {
4459 drm_WARN_ON(dev, i != dpll_info[i].id);
4460 dev_priv->dpll.shared_dplls[i].info = &dpll_info[i];
4461 }
4462
4463 dev_priv->dpll.mgr = dpll_mgr;
4464 dev_priv->dpll.num_shared_dpll = i;
4465 mutex_init(&dev_priv->dpll.lock);
4466
4467 BUG_ON(dev_priv->dpll.num_shared_dpll > I915_NUM_PLLS);
4468}
4469
4470
4471
4472
4473
4474
4475
4476
4477
4478
4479
4480
4481
4482
4483
4484
4485
4486
4487
4488
4489bool intel_reserve_shared_dplls(struct intel_atomic_state *state,
4490 struct intel_crtc *crtc,
4491 struct intel_encoder *encoder)
4492{
4493 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
4494 const struct intel_dpll_mgr *dpll_mgr = dev_priv->dpll.mgr;
4495
4496 if (drm_WARN_ON(&dev_priv->drm, !dpll_mgr))
4497 return false;
4498
4499 return dpll_mgr->get_dplls(state, crtc, encoder);
4500}
4501
4502
4503
4504
4505
4506
4507
4508
4509
4510
4511
4512
4513void intel_release_shared_dplls(struct intel_atomic_state *state,
4514 struct intel_crtc *crtc)
4515{
4516 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
4517 const struct intel_dpll_mgr *dpll_mgr = dev_priv->dpll.mgr;
4518
4519
4520
4521
4522
4523
4524
4525 if (!dpll_mgr)
4526 return;
4527
4528 dpll_mgr->put_dplls(state, crtc);
4529}
4530
4531
4532
4533
4534
4535
4536
4537
4538
4539
4540
4541void intel_update_active_dpll(struct intel_atomic_state *state,
4542 struct intel_crtc *crtc,
4543 struct intel_encoder *encoder)
4544{
4545 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4546 const struct intel_dpll_mgr *dpll_mgr = dev_priv->dpll.mgr;
4547
4548 if (drm_WARN_ON(&dev_priv->drm, !dpll_mgr))
4549 return;
4550
4551 dpll_mgr->update_active_dpll(state, crtc, encoder);
4552}
4553
4554
4555
4556
4557
4558
4559
4560
4561
4562int intel_dpll_get_freq(struct drm_i915_private *i915,
4563 const struct intel_shared_dpll *pll,
4564 const struct intel_dpll_hw_state *pll_state)
4565{
4566 if (drm_WARN_ON(&i915->drm, !pll->info->funcs->get_freq))
4567 return 0;
4568
4569 return pll->info->funcs->get_freq(i915, pll, pll_state);
4570}
4571
4572
4573
4574
4575
4576
4577
4578
4579
4580bool intel_dpll_get_hw_state(struct drm_i915_private *i915,
4581 struct intel_shared_dpll *pll,
4582 struct intel_dpll_hw_state *hw_state)
4583{
4584 return pll->info->funcs->get_hw_state(i915, pll, hw_state);
4585}
4586
4587static void readout_dpll_hw_state(struct drm_i915_private *i915,
4588 struct intel_shared_dpll *pll)
4589{
4590 struct intel_crtc *crtc;
4591
4592 pll->on = intel_dpll_get_hw_state(i915, pll, &pll->state.hw_state);
4593
4594 if (IS_JSL_EHL(i915) && pll->on &&
4595 pll->info->id == DPLL_ID_EHL_DPLL4) {
4596 pll->wakeref = intel_display_power_get(i915,
4597 POWER_DOMAIN_DPLL_DC_OFF);
4598 }
4599
4600 pll->state.crtc_mask = 0;
4601 for_each_intel_crtc(&i915->drm, crtc) {
4602 struct intel_crtc_state *crtc_state =
4603 to_intel_crtc_state(crtc->base.state);
4604
4605 if (crtc_state->hw.active && crtc_state->shared_dpll == pll)
4606 pll->state.crtc_mask |= 1 << crtc->pipe;
4607 }
4608 pll->active_mask = pll->state.crtc_mask;
4609
4610 drm_dbg_kms(&i915->drm,
4611 "%s hw state readout: crtc_mask 0x%08x, on %i\n",
4612 pll->info->name, pll->state.crtc_mask, pll->on);
4613}
4614
4615void intel_dpll_readout_hw_state(struct drm_i915_private *i915)
4616{
4617 int i;
4618
4619 if (i915->dpll.mgr && i915->dpll.mgr->update_ref_clks)
4620 i915->dpll.mgr->update_ref_clks(i915);
4621
4622 for (i = 0; i < i915->dpll.num_shared_dpll; i++)
4623 readout_dpll_hw_state(i915, &i915->dpll.shared_dplls[i]);
4624}
4625
4626static void sanitize_dpll_state(struct drm_i915_private *i915,
4627 struct intel_shared_dpll *pll)
4628{
4629 if (!pll->on || pll->active_mask)
4630 return;
4631
4632 drm_dbg_kms(&i915->drm,
4633 "%s enabled but not in use, disabling\n",
4634 pll->info->name);
4635
4636 pll->info->funcs->disable(i915, pll);
4637 pll->on = false;
4638}
4639
4640void intel_dpll_sanitize_state(struct drm_i915_private *i915)
4641{
4642 int i;
4643
4644 for (i = 0; i < i915->dpll.num_shared_dpll; i++)
4645 sanitize_dpll_state(i915, &i915->dpll.shared_dplls[i]);
4646}
4647
4648
4649
4650
4651
4652
4653
4654
4655void intel_dpll_dump_hw_state(struct drm_i915_private *dev_priv,
4656 const struct intel_dpll_hw_state *hw_state)
4657{
4658 if (dev_priv->dpll.mgr) {
4659 dev_priv->dpll.mgr->dump_hw_state(dev_priv, hw_state);
4660 } else {
4661
4662
4663
4664 drm_dbg_kms(&dev_priv->drm,
4665 "dpll_hw_state: dpll: 0x%x, dpll_md: 0x%x, "
4666 "fp0: 0x%x, fp1: 0x%x\n",
4667 hw_state->dpll,
4668 hw_state->dpll_md,
4669 hw_state->fp0,
4670 hw_state->fp1);
4671 }
4672}
4673