1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27#include <linux/i2c.h>
28#include <linux/input.h>
29#include <linux/intel-iommu.h>
30#include <linux/kernel.h>
31#include <linux/module.h>
32#include <linux/dma-resv.h>
33#include <linux/slab.h>
34
35#include <drm/drm_atomic.h>
36#include <drm/drm_atomic_helper.h>
37#include <drm/drm_atomic_uapi.h>
38#include <drm/drm_damage_helper.h>
39#include <drm/drm_dp_helper.h>
40#include <drm/drm_edid.h>
41#include <drm/drm_fourcc.h>
42#include <drm/drm_plane_helper.h>
43#include <drm/drm_probe_helper.h>
44#include <drm/drm_rect.h>
45
46#include "display/intel_crt.h"
47#include "display/intel_ddi.h"
48#include "display/intel_dp.h"
49#include "display/intel_dp_mst.h"
50#include "display/intel_dpll_mgr.h"
51#include "display/intel_dsi.h"
52#include "display/intel_dvo.h"
53#include "display/intel_gmbus.h"
54#include "display/intel_hdmi.h"
55#include "display/intel_lvds.h"
56#include "display/intel_sdvo.h"
57#include "display/intel_tv.h"
58#include "display/intel_vdsc.h"
59
60#include "gt/intel_rps.h"
61
62#include "i915_drv.h"
63#include "i915_trace.h"
64#include "intel_acpi.h"
65#include "intel_atomic.h"
66#include "intel_atomic_plane.h"
67#include "intel_bw.h"
68#include "intel_cdclk.h"
69#include "intel_color.h"
70#include "intel_csr.h"
71#include "intel_display_types.h"
72#include "intel_dp_link_training.h"
73#include "intel_fbc.h"
74#include "intel_fbdev.h"
75#include "intel_fifo_underrun.h"
76#include "intel_frontbuffer.h"
77#include "intel_hdcp.h"
78#include "intel_hotplug.h"
79#include "intel_overlay.h"
80#include "intel_pipe_crc.h"
81#include "intel_pm.h"
82#include "intel_psr.h"
83#include "intel_quirks.h"
84#include "intel_sideband.h"
85#include "intel_sprite.h"
86#include "intel_tc.h"
87#include "intel_vga.h"
88
89
90static const u32 i8xx_primary_formats[] = {
91 DRM_FORMAT_C8,
92 DRM_FORMAT_XRGB1555,
93 DRM_FORMAT_RGB565,
94 DRM_FORMAT_XRGB8888,
95};
96
97
98static const u32 ivb_primary_formats[] = {
99 DRM_FORMAT_C8,
100 DRM_FORMAT_RGB565,
101 DRM_FORMAT_XRGB8888,
102 DRM_FORMAT_XBGR8888,
103 DRM_FORMAT_XRGB2101010,
104 DRM_FORMAT_XBGR2101010,
105};
106
107
108static const u32 i965_primary_formats[] = {
109 DRM_FORMAT_C8,
110 DRM_FORMAT_RGB565,
111 DRM_FORMAT_XRGB8888,
112 DRM_FORMAT_XBGR8888,
113 DRM_FORMAT_XRGB2101010,
114 DRM_FORMAT_XBGR2101010,
115 DRM_FORMAT_XBGR16161616F,
116};
117
118
119static const u32 vlv_primary_formats[] = {
120 DRM_FORMAT_C8,
121 DRM_FORMAT_RGB565,
122 DRM_FORMAT_XRGB8888,
123 DRM_FORMAT_XBGR8888,
124 DRM_FORMAT_ARGB8888,
125 DRM_FORMAT_ABGR8888,
126 DRM_FORMAT_XRGB2101010,
127 DRM_FORMAT_XBGR2101010,
128 DRM_FORMAT_ARGB2101010,
129 DRM_FORMAT_ABGR2101010,
130 DRM_FORMAT_XBGR16161616F,
131};
132
133static const u64 i9xx_format_modifiers[] = {
134 I915_FORMAT_MOD_X_TILED,
135 DRM_FORMAT_MOD_LINEAR,
136 DRM_FORMAT_MOD_INVALID
137};
138
139
140static const u32 intel_cursor_formats[] = {
141 DRM_FORMAT_ARGB8888,
142};
143
144static const u64 cursor_format_modifiers[] = {
145 DRM_FORMAT_MOD_LINEAR,
146 DRM_FORMAT_MOD_INVALID
147};
148
149static void i9xx_crtc_clock_get(struct intel_crtc *crtc,
150 struct intel_crtc_state *pipe_config);
151static void ilk_pch_clock_get(struct intel_crtc *crtc,
152 struct intel_crtc_state *pipe_config);
153
154static int intel_framebuffer_init(struct intel_framebuffer *ifb,
155 struct drm_i915_gem_object *obj,
156 struct drm_mode_fb_cmd2 *mode_cmd);
157static void intel_set_transcoder_timings(const struct intel_crtc_state *crtc_state);
158static void intel_set_pipe_src_size(const struct intel_crtc_state *crtc_state);
159static void intel_cpu_transcoder_set_m_n(const struct intel_crtc_state *crtc_state,
160 const struct intel_link_m_n *m_n,
161 const struct intel_link_m_n *m2_n2);
162static void i9xx_set_pipeconf(const struct intel_crtc_state *crtc_state);
163static void ilk_set_pipeconf(const struct intel_crtc_state *crtc_state);
164static void hsw_set_pipeconf(const struct intel_crtc_state *crtc_state);
165static void bdw_set_pipemisc(const struct intel_crtc_state *crtc_state);
166static void vlv_prepare_pll(struct intel_crtc *crtc,
167 const struct intel_crtc_state *pipe_config);
168static void chv_prepare_pll(struct intel_crtc *crtc,
169 const struct intel_crtc_state *pipe_config);
170static void skl_pfit_enable(const struct intel_crtc_state *crtc_state);
171static void ilk_pfit_enable(const struct intel_crtc_state *crtc_state);
172static void intel_modeset_setup_hw_state(struct drm_device *dev,
173 struct drm_modeset_acquire_ctx *ctx);
174static struct intel_crtc_state *intel_crtc_state_alloc(struct intel_crtc *crtc);
175
176struct intel_limit {
177 struct {
178 int min, max;
179 } dot, vco, n, m, m1, m2, p, p1;
180
181 struct {
182 int dot_limit;
183 int p2_slow, p2_fast;
184 } p2;
185};
186
187
188int vlv_get_hpll_vco(struct drm_i915_private *dev_priv)
189{
190 int hpll_freq, vco_freq[] = { 800, 1600, 2000, 2400 };
191
192
193 hpll_freq = vlv_cck_read(dev_priv, CCK_FUSE_REG) &
194 CCK_FUSE_HPLL_FREQ_MASK;
195
196 return vco_freq[hpll_freq] * 1000;
197}
198
199int vlv_get_cck_clock(struct drm_i915_private *dev_priv,
200 const char *name, u32 reg, int ref_freq)
201{
202 u32 val;
203 int divider;
204
205 val = vlv_cck_read(dev_priv, reg);
206 divider = val & CCK_FREQUENCY_VALUES;
207
208 drm_WARN(&dev_priv->drm, (val & CCK_FREQUENCY_STATUS) !=
209 (divider << CCK_FREQUENCY_STATUS_SHIFT),
210 "%s change in progress\n", name);
211
212 return DIV_ROUND_CLOSEST(ref_freq << 1, divider + 1);
213}
214
215int vlv_get_cck_clock_hpll(struct drm_i915_private *dev_priv,
216 const char *name, u32 reg)
217{
218 int hpll;
219
220 vlv_cck_get(dev_priv);
221
222 if (dev_priv->hpll_freq == 0)
223 dev_priv->hpll_freq = vlv_get_hpll_vco(dev_priv);
224
225 hpll = vlv_get_cck_clock(dev_priv, name, reg, dev_priv->hpll_freq);
226
227 vlv_cck_put(dev_priv);
228
229 return hpll;
230}
231
232static void intel_update_czclk(struct drm_i915_private *dev_priv)
233{
234 if (!(IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)))
235 return;
236
237 dev_priv->czclk_freq = vlv_get_cck_clock_hpll(dev_priv, "czclk",
238 CCK_CZ_CLOCK_CONTROL);
239
240 drm_dbg(&dev_priv->drm, "CZ clock rate: %d kHz\n",
241 dev_priv->czclk_freq);
242}
243
244
245static u32 intel_fdi_link_freq(struct drm_i915_private *dev_priv,
246 const struct intel_crtc_state *pipe_config)
247{
248 if (HAS_DDI(dev_priv))
249 return pipe_config->port_clock;
250 else
251 return dev_priv->fdi_pll_freq;
252}
253
254static const struct intel_limit intel_limits_i8xx_dac = {
255 .dot = { .min = 25000, .max = 350000 },
256 .vco = { .min = 908000, .max = 1512000 },
257 .n = { .min = 2, .max = 16 },
258 .m = { .min = 96, .max = 140 },
259 .m1 = { .min = 18, .max = 26 },
260 .m2 = { .min = 6, .max = 16 },
261 .p = { .min = 4, .max = 128 },
262 .p1 = { .min = 2, .max = 33 },
263 .p2 = { .dot_limit = 165000,
264 .p2_slow = 4, .p2_fast = 2 },
265};
266
267static const struct intel_limit intel_limits_i8xx_dvo = {
268 .dot = { .min = 25000, .max = 350000 },
269 .vco = { .min = 908000, .max = 1512000 },
270 .n = { .min = 2, .max = 16 },
271 .m = { .min = 96, .max = 140 },
272 .m1 = { .min = 18, .max = 26 },
273 .m2 = { .min = 6, .max = 16 },
274 .p = { .min = 4, .max = 128 },
275 .p1 = { .min = 2, .max = 33 },
276 .p2 = { .dot_limit = 165000,
277 .p2_slow = 4, .p2_fast = 4 },
278};
279
280static const struct intel_limit intel_limits_i8xx_lvds = {
281 .dot = { .min = 25000, .max = 350000 },
282 .vco = { .min = 908000, .max = 1512000 },
283 .n = { .min = 2, .max = 16 },
284 .m = { .min = 96, .max = 140 },
285 .m1 = { .min = 18, .max = 26 },
286 .m2 = { .min = 6, .max = 16 },
287 .p = { .min = 4, .max = 128 },
288 .p1 = { .min = 1, .max = 6 },
289 .p2 = { .dot_limit = 165000,
290 .p2_slow = 14, .p2_fast = 7 },
291};
292
293static const struct intel_limit intel_limits_i9xx_sdvo = {
294 .dot = { .min = 20000, .max = 400000 },
295 .vco = { .min = 1400000, .max = 2800000 },
296 .n = { .min = 1, .max = 6 },
297 .m = { .min = 70, .max = 120 },
298 .m1 = { .min = 8, .max = 18 },
299 .m2 = { .min = 3, .max = 7 },
300 .p = { .min = 5, .max = 80 },
301 .p1 = { .min = 1, .max = 8 },
302 .p2 = { .dot_limit = 200000,
303 .p2_slow = 10, .p2_fast = 5 },
304};
305
306static const struct intel_limit intel_limits_i9xx_lvds = {
307 .dot = { .min = 20000, .max = 400000 },
308 .vco = { .min = 1400000, .max = 2800000 },
309 .n = { .min = 1, .max = 6 },
310 .m = { .min = 70, .max = 120 },
311 .m1 = { .min = 8, .max = 18 },
312 .m2 = { .min = 3, .max = 7 },
313 .p = { .min = 7, .max = 98 },
314 .p1 = { .min = 1, .max = 8 },
315 .p2 = { .dot_limit = 112000,
316 .p2_slow = 14, .p2_fast = 7 },
317};
318
319
320static const struct intel_limit intel_limits_g4x_sdvo = {
321 .dot = { .min = 25000, .max = 270000 },
322 .vco = { .min = 1750000, .max = 3500000},
323 .n = { .min = 1, .max = 4 },
324 .m = { .min = 104, .max = 138 },
325 .m1 = { .min = 17, .max = 23 },
326 .m2 = { .min = 5, .max = 11 },
327 .p = { .min = 10, .max = 30 },
328 .p1 = { .min = 1, .max = 3},
329 .p2 = { .dot_limit = 270000,
330 .p2_slow = 10,
331 .p2_fast = 10
332 },
333};
334
335static const struct intel_limit intel_limits_g4x_hdmi = {
336 .dot = { .min = 22000, .max = 400000 },
337 .vco = { .min = 1750000, .max = 3500000},
338 .n = { .min = 1, .max = 4 },
339 .m = { .min = 104, .max = 138 },
340 .m1 = { .min = 16, .max = 23 },
341 .m2 = { .min = 5, .max = 11 },
342 .p = { .min = 5, .max = 80 },
343 .p1 = { .min = 1, .max = 8},
344 .p2 = { .dot_limit = 165000,
345 .p2_slow = 10, .p2_fast = 5 },
346};
347
348static const struct intel_limit intel_limits_g4x_single_channel_lvds = {
349 .dot = { .min = 20000, .max = 115000 },
350 .vco = { .min = 1750000, .max = 3500000 },
351 .n = { .min = 1, .max = 3 },
352 .m = { .min = 104, .max = 138 },
353 .m1 = { .min = 17, .max = 23 },
354 .m2 = { .min = 5, .max = 11 },
355 .p = { .min = 28, .max = 112 },
356 .p1 = { .min = 2, .max = 8 },
357 .p2 = { .dot_limit = 0,
358 .p2_slow = 14, .p2_fast = 14
359 },
360};
361
362static const struct intel_limit intel_limits_g4x_dual_channel_lvds = {
363 .dot = { .min = 80000, .max = 224000 },
364 .vco = { .min = 1750000, .max = 3500000 },
365 .n = { .min = 1, .max = 3 },
366 .m = { .min = 104, .max = 138 },
367 .m1 = { .min = 17, .max = 23 },
368 .m2 = { .min = 5, .max = 11 },
369 .p = { .min = 14, .max = 42 },
370 .p1 = { .min = 2, .max = 6 },
371 .p2 = { .dot_limit = 0,
372 .p2_slow = 7, .p2_fast = 7
373 },
374};
375
376static const struct intel_limit pnv_limits_sdvo = {
377 .dot = { .min = 20000, .max = 400000},
378 .vco = { .min = 1700000, .max = 3500000 },
379
380 .n = { .min = 3, .max = 6 },
381 .m = { .min = 2, .max = 256 },
382
383 .m1 = { .min = 0, .max = 0 },
384 .m2 = { .min = 0, .max = 254 },
385 .p = { .min = 5, .max = 80 },
386 .p1 = { .min = 1, .max = 8 },
387 .p2 = { .dot_limit = 200000,
388 .p2_slow = 10, .p2_fast = 5 },
389};
390
391static const struct intel_limit pnv_limits_lvds = {
392 .dot = { .min = 20000, .max = 400000 },
393 .vco = { .min = 1700000, .max = 3500000 },
394 .n = { .min = 3, .max = 6 },
395 .m = { .min = 2, .max = 256 },
396 .m1 = { .min = 0, .max = 0 },
397 .m2 = { .min = 0, .max = 254 },
398 .p = { .min = 7, .max = 112 },
399 .p1 = { .min = 1, .max = 8 },
400 .p2 = { .dot_limit = 112000,
401 .p2_slow = 14, .p2_fast = 14 },
402};
403
404
405
406
407
408
409static const struct intel_limit ilk_limits_dac = {
410 .dot = { .min = 25000, .max = 350000 },
411 .vco = { .min = 1760000, .max = 3510000 },
412 .n = { .min = 1, .max = 5 },
413 .m = { .min = 79, .max = 127 },
414 .m1 = { .min = 12, .max = 22 },
415 .m2 = { .min = 5, .max = 9 },
416 .p = { .min = 5, .max = 80 },
417 .p1 = { .min = 1, .max = 8 },
418 .p2 = { .dot_limit = 225000,
419 .p2_slow = 10, .p2_fast = 5 },
420};
421
422static const struct intel_limit ilk_limits_single_lvds = {
423 .dot = { .min = 25000, .max = 350000 },
424 .vco = { .min = 1760000, .max = 3510000 },
425 .n = { .min = 1, .max = 3 },
426 .m = { .min = 79, .max = 118 },
427 .m1 = { .min = 12, .max = 22 },
428 .m2 = { .min = 5, .max = 9 },
429 .p = { .min = 28, .max = 112 },
430 .p1 = { .min = 2, .max = 8 },
431 .p2 = { .dot_limit = 225000,
432 .p2_slow = 14, .p2_fast = 14 },
433};
434
435static const struct intel_limit ilk_limits_dual_lvds = {
436 .dot = { .min = 25000, .max = 350000 },
437 .vco = { .min = 1760000, .max = 3510000 },
438 .n = { .min = 1, .max = 3 },
439 .m = { .min = 79, .max = 127 },
440 .m1 = { .min = 12, .max = 22 },
441 .m2 = { .min = 5, .max = 9 },
442 .p = { .min = 14, .max = 56 },
443 .p1 = { .min = 2, .max = 8 },
444 .p2 = { .dot_limit = 225000,
445 .p2_slow = 7, .p2_fast = 7 },
446};
447
448
449static const struct intel_limit ilk_limits_single_lvds_100m = {
450 .dot = { .min = 25000, .max = 350000 },
451 .vco = { .min = 1760000, .max = 3510000 },
452 .n = { .min = 1, .max = 2 },
453 .m = { .min = 79, .max = 126 },
454 .m1 = { .min = 12, .max = 22 },
455 .m2 = { .min = 5, .max = 9 },
456 .p = { .min = 28, .max = 112 },
457 .p1 = { .min = 2, .max = 8 },
458 .p2 = { .dot_limit = 225000,
459 .p2_slow = 14, .p2_fast = 14 },
460};
461
462static const struct intel_limit ilk_limits_dual_lvds_100m = {
463 .dot = { .min = 25000, .max = 350000 },
464 .vco = { .min = 1760000, .max = 3510000 },
465 .n = { .min = 1, .max = 3 },
466 .m = { .min = 79, .max = 126 },
467 .m1 = { .min = 12, .max = 22 },
468 .m2 = { .min = 5, .max = 9 },
469 .p = { .min = 14, .max = 42 },
470 .p1 = { .min = 2, .max = 6 },
471 .p2 = { .dot_limit = 225000,
472 .p2_slow = 7, .p2_fast = 7 },
473};
474
475static const struct intel_limit intel_limits_vlv = {
476
477
478
479
480
481
482 .dot = { .min = 25000 * 5, .max = 270000 * 5 },
483 .vco = { .min = 4000000, .max = 6000000 },
484 .n = { .min = 1, .max = 7 },
485 .m1 = { .min = 2, .max = 3 },
486 .m2 = { .min = 11, .max = 156 },
487 .p1 = { .min = 2, .max = 3 },
488 .p2 = { .p2_slow = 2, .p2_fast = 20 },
489};
490
491static const struct intel_limit intel_limits_chv = {
492
493
494
495
496
497
498 .dot = { .min = 25000 * 5, .max = 540000 * 5},
499 .vco = { .min = 4800000, .max = 6480000 },
500 .n = { .min = 1, .max = 1 },
501 .m1 = { .min = 2, .max = 2 },
502 .m2 = { .min = 24 << 22, .max = 175 << 22 },
503 .p1 = { .min = 2, .max = 4 },
504 .p2 = { .p2_slow = 1, .p2_fast = 14 },
505};
506
507static const struct intel_limit intel_limits_bxt = {
508
509 .dot = { .min = 0, .max = INT_MAX },
510 .vco = { .min = 4800000, .max = 6700000 },
511 .n = { .min = 1, .max = 1 },
512 .m1 = { .min = 2, .max = 2 },
513
514 .m2 = { .min = 2 << 22, .max = 255 << 22 },
515 .p1 = { .min = 2, .max = 4 },
516 .p2 = { .p2_slow = 1, .p2_fast = 20 },
517};
518
519
520static void
521skl_wa_827(struct drm_i915_private *dev_priv, enum pipe pipe, bool enable)
522{
523 if (enable)
524 intel_de_write(dev_priv, CLKGATE_DIS_PSL(pipe),
525 intel_de_read(dev_priv, CLKGATE_DIS_PSL(pipe)) | DUPS1_GATING_DIS | DUPS2_GATING_DIS);
526 else
527 intel_de_write(dev_priv, CLKGATE_DIS_PSL(pipe),
528 intel_de_read(dev_priv, CLKGATE_DIS_PSL(pipe)) & ~(DUPS1_GATING_DIS | DUPS2_GATING_DIS));
529}
530
531
532static void
533icl_wa_scalerclkgating(struct drm_i915_private *dev_priv, enum pipe pipe,
534 bool enable)
535{
536 if (enable)
537 intel_de_write(dev_priv, CLKGATE_DIS_PSL(pipe),
538 intel_de_read(dev_priv, CLKGATE_DIS_PSL(pipe)) | DPFR_GATING_DIS);
539 else
540 intel_de_write(dev_priv, CLKGATE_DIS_PSL(pipe),
541 intel_de_read(dev_priv, CLKGATE_DIS_PSL(pipe)) & ~DPFR_GATING_DIS);
542}
543
544static bool
545needs_modeset(const struct intel_crtc_state *state)
546{
547 return drm_atomic_crtc_needs_modeset(&state->uapi);
548}
549
550static bool
551is_trans_port_sync_slave(const struct intel_crtc_state *crtc_state)
552{
553 return crtc_state->master_transcoder != INVALID_TRANSCODER;
554}
555
556static bool
557is_trans_port_sync_master(const struct intel_crtc_state *crtc_state)
558{
559 return crtc_state->sync_mode_slaves_mask != 0;
560}
561
562bool
563is_trans_port_sync_mode(const struct intel_crtc_state *crtc_state)
564{
565 return is_trans_port_sync_master(crtc_state) ||
566 is_trans_port_sync_slave(crtc_state);
567}
568
569
570
571
572
573
574
575
576
577
578static int pnv_calc_dpll_params(int refclk, struct dpll *clock)
579{
580 clock->m = clock->m2 + 2;
581 clock->p = clock->p1 * clock->p2;
582 if (WARN_ON(clock->n == 0 || clock->p == 0))
583 return 0;
584 clock->vco = DIV_ROUND_CLOSEST(refclk * clock->m, clock->n);
585 clock->dot = DIV_ROUND_CLOSEST(clock->vco, clock->p);
586
587 return clock->dot;
588}
589
590static u32 i9xx_dpll_compute_m(struct dpll *dpll)
591{
592 return 5 * (dpll->m1 + 2) + (dpll->m2 + 2);
593}
594
595static int i9xx_calc_dpll_params(int refclk, struct dpll *clock)
596{
597 clock->m = i9xx_dpll_compute_m(clock);
598 clock->p = clock->p1 * clock->p2;
599 if (WARN_ON(clock->n + 2 == 0 || clock->p == 0))
600 return 0;
601 clock->vco = DIV_ROUND_CLOSEST(refclk * clock->m, clock->n + 2);
602 clock->dot = DIV_ROUND_CLOSEST(clock->vco, clock->p);
603
604 return clock->dot;
605}
606
607static int vlv_calc_dpll_params(int refclk, struct dpll *clock)
608{
609 clock->m = clock->m1 * clock->m2;
610 clock->p = clock->p1 * clock->p2;
611 if (WARN_ON(clock->n == 0 || clock->p == 0))
612 return 0;
613 clock->vco = DIV_ROUND_CLOSEST(refclk * clock->m, clock->n);
614 clock->dot = DIV_ROUND_CLOSEST(clock->vco, clock->p);
615
616 return clock->dot / 5;
617}
618
619int chv_calc_dpll_params(int refclk, struct dpll *clock)
620{
621 clock->m = clock->m1 * clock->m2;
622 clock->p = clock->p1 * clock->p2;
623 if (WARN_ON(clock->n == 0 || clock->p == 0))
624 return 0;
625 clock->vco = DIV_ROUND_CLOSEST_ULL(mul_u32_u32(refclk, clock->m),
626 clock->n << 22);
627 clock->dot = DIV_ROUND_CLOSEST(clock->vco, clock->p);
628
629 return clock->dot / 5;
630}
631
632
633
634
635
636static bool intel_pll_is_valid(struct drm_i915_private *dev_priv,
637 const struct intel_limit *limit,
638 const struct dpll *clock)
639{
640 if (clock->n < limit->n.min || limit->n.max < clock->n)
641 return false;
642 if (clock->p1 < limit->p1.min || limit->p1.max < clock->p1)
643 return false;
644 if (clock->m2 < limit->m2.min || limit->m2.max < clock->m2)
645 return false;
646 if (clock->m1 < limit->m1.min || limit->m1.max < clock->m1)
647 return false;
648
649 if (!IS_PINEVIEW(dev_priv) && !IS_VALLEYVIEW(dev_priv) &&
650 !IS_CHERRYVIEW(dev_priv) && !IS_GEN9_LP(dev_priv))
651 if (clock->m1 <= clock->m2)
652 return false;
653
654 if (!IS_VALLEYVIEW(dev_priv) && !IS_CHERRYVIEW(dev_priv) &&
655 !IS_GEN9_LP(dev_priv)) {
656 if (clock->p < limit->p.min || limit->p.max < clock->p)
657 return false;
658 if (clock->m < limit->m.min || limit->m.max < clock->m)
659 return false;
660 }
661
662 if (clock->vco < limit->vco.min || limit->vco.max < clock->vco)
663 return false;
664
665
666
667 if (clock->dot < limit->dot.min || limit->dot.max < clock->dot)
668 return false;
669
670 return true;
671}
672
673static int
674i9xx_select_p2_div(const struct intel_limit *limit,
675 const struct intel_crtc_state *crtc_state,
676 int target)
677{
678 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
679
680 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_LVDS)) {
681
682
683
684
685
686 if (intel_is_dual_link_lvds(dev_priv))
687 return limit->p2.p2_fast;
688 else
689 return limit->p2.p2_slow;
690 } else {
691 if (target < limit->p2.dot_limit)
692 return limit->p2.p2_slow;
693 else
694 return limit->p2.p2_fast;
695 }
696}
697
698
699
700
701
702
703
704
705
706
707
708static bool
709i9xx_find_best_dpll(const struct intel_limit *limit,
710 struct intel_crtc_state *crtc_state,
711 int target, int refclk, struct dpll *match_clock,
712 struct dpll *best_clock)
713{
714 struct drm_device *dev = crtc_state->uapi.crtc->dev;
715 struct dpll clock;
716 int err = target;
717
718 memset(best_clock, 0, sizeof(*best_clock));
719
720 clock.p2 = i9xx_select_p2_div(limit, crtc_state, target);
721
722 for (clock.m1 = limit->m1.min; clock.m1 <= limit->m1.max;
723 clock.m1++) {
724 for (clock.m2 = limit->m2.min;
725 clock.m2 <= limit->m2.max; clock.m2++) {
726 if (clock.m2 >= clock.m1)
727 break;
728 for (clock.n = limit->n.min;
729 clock.n <= limit->n.max; clock.n++) {
730 for (clock.p1 = limit->p1.min;
731 clock.p1 <= limit->p1.max; clock.p1++) {
732 int this_err;
733
734 i9xx_calc_dpll_params(refclk, &clock);
735 if (!intel_pll_is_valid(to_i915(dev),
736 limit,
737 &clock))
738 continue;
739 if (match_clock &&
740 clock.p != match_clock->p)
741 continue;
742
743 this_err = abs(clock.dot - target);
744 if (this_err < err) {
745 *best_clock = clock;
746 err = this_err;
747 }
748 }
749 }
750 }
751 }
752
753 return (err != target);
754}
755
756
757
758
759
760
761
762
763
764
765
766static bool
767pnv_find_best_dpll(const struct intel_limit *limit,
768 struct intel_crtc_state *crtc_state,
769 int target, int refclk, struct dpll *match_clock,
770 struct dpll *best_clock)
771{
772 struct drm_device *dev = crtc_state->uapi.crtc->dev;
773 struct dpll clock;
774 int err = target;
775
776 memset(best_clock, 0, sizeof(*best_clock));
777
778 clock.p2 = i9xx_select_p2_div(limit, crtc_state, target);
779
780 for (clock.m1 = limit->m1.min; clock.m1 <= limit->m1.max;
781 clock.m1++) {
782 for (clock.m2 = limit->m2.min;
783 clock.m2 <= limit->m2.max; clock.m2++) {
784 for (clock.n = limit->n.min;
785 clock.n <= limit->n.max; clock.n++) {
786 for (clock.p1 = limit->p1.min;
787 clock.p1 <= limit->p1.max; clock.p1++) {
788 int this_err;
789
790 pnv_calc_dpll_params(refclk, &clock);
791 if (!intel_pll_is_valid(to_i915(dev),
792 limit,
793 &clock))
794 continue;
795 if (match_clock &&
796 clock.p != match_clock->p)
797 continue;
798
799 this_err = abs(clock.dot - target);
800 if (this_err < err) {
801 *best_clock = clock;
802 err = this_err;
803 }
804 }
805 }
806 }
807 }
808
809 return (err != target);
810}
811
812
813
814
815
816
817
818
819
820
821
822static bool
823g4x_find_best_dpll(const struct intel_limit *limit,
824 struct intel_crtc_state *crtc_state,
825 int target, int refclk, struct dpll *match_clock,
826 struct dpll *best_clock)
827{
828 struct drm_device *dev = crtc_state->uapi.crtc->dev;
829 struct dpll clock;
830 int max_n;
831 bool found = false;
832
833 int err_most = (target >> 8) + (target >> 9);
834
835 memset(best_clock, 0, sizeof(*best_clock));
836
837 clock.p2 = i9xx_select_p2_div(limit, crtc_state, target);
838
839 max_n = limit->n.max;
840
841 for (clock.n = limit->n.min; clock.n <= max_n; clock.n++) {
842
843 for (clock.m1 = limit->m1.max;
844 clock.m1 >= limit->m1.min; clock.m1--) {
845 for (clock.m2 = limit->m2.max;
846 clock.m2 >= limit->m2.min; clock.m2--) {
847 for (clock.p1 = limit->p1.max;
848 clock.p1 >= limit->p1.min; clock.p1--) {
849 int this_err;
850
851 i9xx_calc_dpll_params(refclk, &clock);
852 if (!intel_pll_is_valid(to_i915(dev),
853 limit,
854 &clock))
855 continue;
856
857 this_err = abs(clock.dot - target);
858 if (this_err < err_most) {
859 *best_clock = clock;
860 err_most = this_err;
861 max_n = clock.n;
862 found = true;
863 }
864 }
865 }
866 }
867 }
868 return found;
869}
870
871
872
873
874
875static bool vlv_PLL_is_optimal(struct drm_device *dev, int target_freq,
876 const struct dpll *calculated_clock,
877 const struct dpll *best_clock,
878 unsigned int best_error_ppm,
879 unsigned int *error_ppm)
880{
881
882
883
884
885 if (IS_CHERRYVIEW(to_i915(dev))) {
886 *error_ppm = 0;
887
888 return calculated_clock->p > best_clock->p;
889 }
890
891 if (drm_WARN_ON_ONCE(dev, !target_freq))
892 return false;
893
894 *error_ppm = div_u64(1000000ULL *
895 abs(target_freq - calculated_clock->dot),
896 target_freq);
897
898
899
900
901
902 if (*error_ppm < 100 && calculated_clock->p > best_clock->p) {
903 *error_ppm = 0;
904
905 return true;
906 }
907
908 return *error_ppm + 10 < best_error_ppm;
909}
910
911
912
913
914
915
916static bool
917vlv_find_best_dpll(const struct intel_limit *limit,
918 struct intel_crtc_state *crtc_state,
919 int target, int refclk, struct dpll *match_clock,
920 struct dpll *best_clock)
921{
922 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
923 struct drm_device *dev = crtc->base.dev;
924 struct dpll clock;
925 unsigned int bestppm = 1000000;
926
927 int max_n = min(limit->n.max, refclk / 19200);
928 bool found = false;
929
930 target *= 5;
931
932 memset(best_clock, 0, sizeof(*best_clock));
933
934
935 for (clock.n = limit->n.min; clock.n <= max_n; clock.n++) {
936 for (clock.p1 = limit->p1.max; clock.p1 >= limit->p1.min; clock.p1--) {
937 for (clock.p2 = limit->p2.p2_fast; clock.p2 >= limit->p2.p2_slow;
938 clock.p2 -= clock.p2 > 10 ? 2 : 1) {
939 clock.p = clock.p1 * clock.p2;
940
941 for (clock.m1 = limit->m1.min; clock.m1 <= limit->m1.max; clock.m1++) {
942 unsigned int ppm;
943
944 clock.m2 = DIV_ROUND_CLOSEST(target * clock.p * clock.n,
945 refclk * clock.m1);
946
947 vlv_calc_dpll_params(refclk, &clock);
948
949 if (!intel_pll_is_valid(to_i915(dev),
950 limit,
951 &clock))
952 continue;
953
954 if (!vlv_PLL_is_optimal(dev, target,
955 &clock,
956 best_clock,
957 bestppm, &ppm))
958 continue;
959
960 *best_clock = clock;
961 bestppm = ppm;
962 found = true;
963 }
964 }
965 }
966 }
967
968 return found;
969}
970
971
972
973
974
975
976static bool
977chv_find_best_dpll(const struct intel_limit *limit,
978 struct intel_crtc_state *crtc_state,
979 int target, int refclk, struct dpll *match_clock,
980 struct dpll *best_clock)
981{
982 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
983 struct drm_device *dev = crtc->base.dev;
984 unsigned int best_error_ppm;
985 struct dpll clock;
986 u64 m2;
987 int found = false;
988
989 memset(best_clock, 0, sizeof(*best_clock));
990 best_error_ppm = 1000000;
991
992
993
994
995
996
997 clock.n = 1, clock.m1 = 2;
998 target *= 5;
999
1000 for (clock.p1 = limit->p1.max; clock.p1 >= limit->p1.min; clock.p1--) {
1001 for (clock.p2 = limit->p2.p2_fast;
1002 clock.p2 >= limit->p2.p2_slow;
1003 clock.p2 -= clock.p2 > 10 ? 2 : 1) {
1004 unsigned int error_ppm;
1005
1006 clock.p = clock.p1 * clock.p2;
1007
1008 m2 = DIV_ROUND_CLOSEST_ULL(mul_u32_u32(target, clock.p * clock.n) << 22,
1009 refclk * clock.m1);
1010
1011 if (m2 > INT_MAX/clock.m1)
1012 continue;
1013
1014 clock.m2 = m2;
1015
1016 chv_calc_dpll_params(refclk, &clock);
1017
1018 if (!intel_pll_is_valid(to_i915(dev), limit, &clock))
1019 continue;
1020
1021 if (!vlv_PLL_is_optimal(dev, target, &clock, best_clock,
1022 best_error_ppm, &error_ppm))
1023 continue;
1024
1025 *best_clock = clock;
1026 best_error_ppm = error_ppm;
1027 found = true;
1028 }
1029 }
1030
1031 return found;
1032}
1033
1034bool bxt_find_best_dpll(struct intel_crtc_state *crtc_state,
1035 struct dpll *best_clock)
1036{
1037 int refclk = 100000;
1038 const struct intel_limit *limit = &intel_limits_bxt;
1039
1040 return chv_find_best_dpll(limit, crtc_state,
1041 crtc_state->port_clock, refclk,
1042 NULL, best_clock);
1043}
1044
1045static bool pipe_scanline_is_moving(struct drm_i915_private *dev_priv,
1046 enum pipe pipe)
1047{
1048 i915_reg_t reg = PIPEDSL(pipe);
1049 u32 line1, line2;
1050 u32 line_mask;
1051
1052 if (IS_GEN(dev_priv, 2))
1053 line_mask = DSL_LINEMASK_GEN2;
1054 else
1055 line_mask = DSL_LINEMASK_GEN3;
1056
1057 line1 = intel_de_read(dev_priv, reg) & line_mask;
1058 msleep(5);
1059 line2 = intel_de_read(dev_priv, reg) & line_mask;
1060
1061 return line1 != line2;
1062}
1063
1064static void wait_for_pipe_scanline_moving(struct intel_crtc *crtc, bool state)
1065{
1066 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1067 enum pipe pipe = crtc->pipe;
1068
1069
1070 if (wait_for(pipe_scanline_is_moving(dev_priv, pipe) == state, 100))
1071 drm_err(&dev_priv->drm,
1072 "pipe %c scanline %s wait timed out\n",
1073 pipe_name(pipe), onoff(state));
1074}
1075
1076static void intel_wait_for_pipe_scanline_stopped(struct intel_crtc *crtc)
1077{
1078 wait_for_pipe_scanline_moving(crtc, false);
1079}
1080
1081static void intel_wait_for_pipe_scanline_moving(struct intel_crtc *crtc)
1082{
1083 wait_for_pipe_scanline_moving(crtc, true);
1084}
1085
1086static void
1087intel_wait_for_pipe_off(const struct intel_crtc_state *old_crtc_state)
1088{
1089 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc);
1090 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1091
1092 if (INTEL_GEN(dev_priv) >= 4) {
1093 enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder;
1094 i915_reg_t reg = PIPECONF(cpu_transcoder);
1095
1096
1097 if (intel_de_wait_for_clear(dev_priv, reg,
1098 I965_PIPECONF_ACTIVE, 100))
1099 drm_WARN(&dev_priv->drm, 1,
1100 "pipe_off wait timed out\n");
1101 } else {
1102 intel_wait_for_pipe_scanline_stopped(crtc);
1103 }
1104}
1105
1106
1107void assert_pll(struct drm_i915_private *dev_priv,
1108 enum pipe pipe, bool state)
1109{
1110 u32 val;
1111 bool cur_state;
1112
1113 val = intel_de_read(dev_priv, DPLL(pipe));
1114 cur_state = !!(val & DPLL_VCO_ENABLE);
1115 I915_STATE_WARN(cur_state != state,
1116 "PLL state assertion failure (expected %s, current %s)\n",
1117 onoff(state), onoff(cur_state));
1118}
1119
1120
1121void assert_dsi_pll(struct drm_i915_private *dev_priv, bool state)
1122{
1123 u32 val;
1124 bool cur_state;
1125
1126 vlv_cck_get(dev_priv);
1127 val = vlv_cck_read(dev_priv, CCK_REG_DSI_PLL_CONTROL);
1128 vlv_cck_put(dev_priv);
1129
1130 cur_state = val & DSI_PLL_VCO_EN;
1131 I915_STATE_WARN(cur_state != state,
1132 "DSI PLL state assertion failure (expected %s, current %s)\n",
1133 onoff(state), onoff(cur_state));
1134}
1135
1136static void assert_fdi_tx(struct drm_i915_private *dev_priv,
1137 enum pipe pipe, bool state)
1138{
1139 bool cur_state;
1140
1141 if (HAS_DDI(dev_priv)) {
1142
1143
1144
1145
1146
1147
1148 enum transcoder cpu_transcoder = (enum transcoder)pipe;
1149 u32 val = intel_de_read(dev_priv,
1150 TRANS_DDI_FUNC_CTL(cpu_transcoder));
1151 cur_state = !!(val & TRANS_DDI_FUNC_ENABLE);
1152 } else {
1153 u32 val = intel_de_read(dev_priv, FDI_TX_CTL(pipe));
1154 cur_state = !!(val & FDI_TX_ENABLE);
1155 }
1156 I915_STATE_WARN(cur_state != state,
1157 "FDI TX state assertion failure (expected %s, current %s)\n",
1158 onoff(state), onoff(cur_state));
1159}
1160#define assert_fdi_tx_enabled(d, p) assert_fdi_tx(d, p, true)
1161#define assert_fdi_tx_disabled(d, p) assert_fdi_tx(d, p, false)
1162
1163static void assert_fdi_rx(struct drm_i915_private *dev_priv,
1164 enum pipe pipe, bool state)
1165{
1166 u32 val;
1167 bool cur_state;
1168
1169 val = intel_de_read(dev_priv, FDI_RX_CTL(pipe));
1170 cur_state = !!(val & FDI_RX_ENABLE);
1171 I915_STATE_WARN(cur_state != state,
1172 "FDI RX state assertion failure (expected %s, current %s)\n",
1173 onoff(state), onoff(cur_state));
1174}
1175#define assert_fdi_rx_enabled(d, p) assert_fdi_rx(d, p, true)
1176#define assert_fdi_rx_disabled(d, p) assert_fdi_rx(d, p, false)
1177
1178static void assert_fdi_tx_pll_enabled(struct drm_i915_private *dev_priv,
1179 enum pipe pipe)
1180{
1181 u32 val;
1182
1183
1184 if (IS_GEN(dev_priv, 5))
1185 return;
1186
1187
1188 if (HAS_DDI(dev_priv))
1189 return;
1190
1191 val = intel_de_read(dev_priv, FDI_TX_CTL(pipe));
1192 I915_STATE_WARN(!(val & FDI_TX_PLL_ENABLE), "FDI TX PLL assertion failure, should be active but is disabled\n");
1193}
1194
1195void assert_fdi_rx_pll(struct drm_i915_private *dev_priv,
1196 enum pipe pipe, bool state)
1197{
1198 u32 val;
1199 bool cur_state;
1200
1201 val = intel_de_read(dev_priv, FDI_RX_CTL(pipe));
1202 cur_state = !!(val & FDI_RX_PLL_ENABLE);
1203 I915_STATE_WARN(cur_state != state,
1204 "FDI RX PLL assertion failure (expected %s, current %s)\n",
1205 onoff(state), onoff(cur_state));
1206}
1207
1208void assert_panel_unlocked(struct drm_i915_private *dev_priv, enum pipe pipe)
1209{
1210 i915_reg_t pp_reg;
1211 u32 val;
1212 enum pipe panel_pipe = INVALID_PIPE;
1213 bool locked = true;
1214
1215 if (drm_WARN_ON(&dev_priv->drm, HAS_DDI(dev_priv)))
1216 return;
1217
1218 if (HAS_PCH_SPLIT(dev_priv)) {
1219 u32 port_sel;
1220
1221 pp_reg = PP_CONTROL(0);
1222 port_sel = intel_de_read(dev_priv, PP_ON_DELAYS(0)) & PANEL_PORT_SELECT_MASK;
1223
1224 switch (port_sel) {
1225 case PANEL_PORT_SELECT_LVDS:
1226 intel_lvds_port_enabled(dev_priv, PCH_LVDS, &panel_pipe);
1227 break;
1228 case PANEL_PORT_SELECT_DPA:
1229 intel_dp_port_enabled(dev_priv, DP_A, PORT_A, &panel_pipe);
1230 break;
1231 case PANEL_PORT_SELECT_DPC:
1232 intel_dp_port_enabled(dev_priv, PCH_DP_C, PORT_C, &panel_pipe);
1233 break;
1234 case PANEL_PORT_SELECT_DPD:
1235 intel_dp_port_enabled(dev_priv, PCH_DP_D, PORT_D, &panel_pipe);
1236 break;
1237 default:
1238 MISSING_CASE(port_sel);
1239 break;
1240 }
1241 } else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
1242
1243 pp_reg = PP_CONTROL(pipe);
1244 panel_pipe = pipe;
1245 } else {
1246 u32 port_sel;
1247
1248 pp_reg = PP_CONTROL(0);
1249 port_sel = intel_de_read(dev_priv, PP_ON_DELAYS(0)) & PANEL_PORT_SELECT_MASK;
1250
1251 drm_WARN_ON(&dev_priv->drm,
1252 port_sel != PANEL_PORT_SELECT_LVDS);
1253 intel_lvds_port_enabled(dev_priv, LVDS, &panel_pipe);
1254 }
1255
1256 val = intel_de_read(dev_priv, pp_reg);
1257 if (!(val & PANEL_POWER_ON) ||
1258 ((val & PANEL_UNLOCK_MASK) == PANEL_UNLOCK_REGS))
1259 locked = false;
1260
1261 I915_STATE_WARN(panel_pipe == pipe && locked,
1262 "panel assertion failure, pipe %c regs locked\n",
1263 pipe_name(pipe));
1264}
1265
1266void assert_pipe(struct drm_i915_private *dev_priv,
1267 enum transcoder cpu_transcoder, bool state)
1268{
1269 bool cur_state;
1270 enum intel_display_power_domain power_domain;
1271 intel_wakeref_t wakeref;
1272
1273
1274 if (IS_I830(dev_priv))
1275 state = true;
1276
1277 power_domain = POWER_DOMAIN_TRANSCODER(cpu_transcoder);
1278 wakeref = intel_display_power_get_if_enabled(dev_priv, power_domain);
1279 if (wakeref) {
1280 u32 val = intel_de_read(dev_priv, PIPECONF(cpu_transcoder));
1281 cur_state = !!(val & PIPECONF_ENABLE);
1282
1283 intel_display_power_put(dev_priv, power_domain, wakeref);
1284 } else {
1285 cur_state = false;
1286 }
1287
1288 I915_STATE_WARN(cur_state != state,
1289 "transcoder %s assertion failure (expected %s, current %s)\n",
1290 transcoder_name(cpu_transcoder),
1291 onoff(state), onoff(cur_state));
1292}
1293
1294static void assert_plane(struct intel_plane *plane, bool state)
1295{
1296 enum pipe pipe;
1297 bool cur_state;
1298
1299 cur_state = plane->get_hw_state(plane, &pipe);
1300
1301 I915_STATE_WARN(cur_state != state,
1302 "%s assertion failure (expected %s, current %s)\n",
1303 plane->base.name, onoff(state), onoff(cur_state));
1304}
1305
1306#define assert_plane_enabled(p) assert_plane(p, true)
1307#define assert_plane_disabled(p) assert_plane(p, false)
1308
1309static void assert_planes_disabled(struct intel_crtc *crtc)
1310{
1311 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1312 struct intel_plane *plane;
1313
1314 for_each_intel_plane_on_crtc(&dev_priv->drm, crtc, plane)
1315 assert_plane_disabled(plane);
1316}
1317
1318static void assert_vblank_disabled(struct drm_crtc *crtc)
1319{
1320 if (I915_STATE_WARN_ON(drm_crtc_vblank_get(crtc) == 0))
1321 drm_crtc_vblank_put(crtc);
1322}
1323
1324void assert_pch_transcoder_disabled(struct drm_i915_private *dev_priv,
1325 enum pipe pipe)
1326{
1327 u32 val;
1328 bool enabled;
1329
1330 val = intel_de_read(dev_priv, PCH_TRANSCONF(pipe));
1331 enabled = !!(val & TRANS_ENABLE);
1332 I915_STATE_WARN(enabled,
1333 "transcoder assertion failed, should be off on pipe %c but is still active\n",
1334 pipe_name(pipe));
1335}
1336
1337static void assert_pch_dp_disabled(struct drm_i915_private *dev_priv,
1338 enum pipe pipe, enum port port,
1339 i915_reg_t dp_reg)
1340{
1341 enum pipe port_pipe;
1342 bool state;
1343
1344 state = intel_dp_port_enabled(dev_priv, dp_reg, port, &port_pipe);
1345
1346 I915_STATE_WARN(state && port_pipe == pipe,
1347 "PCH DP %c enabled on transcoder %c, should be disabled\n",
1348 port_name(port), pipe_name(pipe));
1349
1350 I915_STATE_WARN(HAS_PCH_IBX(dev_priv) && !state && port_pipe == PIPE_B,
1351 "IBX PCH DP %c still using transcoder B\n",
1352 port_name(port));
1353}
1354
1355static void assert_pch_hdmi_disabled(struct drm_i915_private *dev_priv,
1356 enum pipe pipe, enum port port,
1357 i915_reg_t hdmi_reg)
1358{
1359 enum pipe port_pipe;
1360 bool state;
1361
1362 state = intel_sdvo_port_enabled(dev_priv, hdmi_reg, &port_pipe);
1363
1364 I915_STATE_WARN(state && port_pipe == pipe,
1365 "PCH HDMI %c enabled on transcoder %c, should be disabled\n",
1366 port_name(port), pipe_name(pipe));
1367
1368 I915_STATE_WARN(HAS_PCH_IBX(dev_priv) && !state && port_pipe == PIPE_B,
1369 "IBX PCH HDMI %c still using transcoder B\n",
1370 port_name(port));
1371}
1372
1373static void assert_pch_ports_disabled(struct drm_i915_private *dev_priv,
1374 enum pipe pipe)
1375{
1376 enum pipe port_pipe;
1377
1378 assert_pch_dp_disabled(dev_priv, pipe, PORT_B, PCH_DP_B);
1379 assert_pch_dp_disabled(dev_priv, pipe, PORT_C, PCH_DP_C);
1380 assert_pch_dp_disabled(dev_priv, pipe, PORT_D, PCH_DP_D);
1381
1382 I915_STATE_WARN(intel_crt_port_enabled(dev_priv, PCH_ADPA, &port_pipe) &&
1383 port_pipe == pipe,
1384 "PCH VGA enabled on transcoder %c, should be disabled\n",
1385 pipe_name(pipe));
1386
1387 I915_STATE_WARN(intel_lvds_port_enabled(dev_priv, PCH_LVDS, &port_pipe) &&
1388 port_pipe == pipe,
1389 "PCH LVDS enabled on transcoder %c, should be disabled\n",
1390 pipe_name(pipe));
1391
1392
1393 assert_pch_hdmi_disabled(dev_priv, pipe, PORT_B, PCH_HDMIB);
1394 assert_pch_hdmi_disabled(dev_priv, pipe, PORT_C, PCH_HDMIC);
1395 assert_pch_hdmi_disabled(dev_priv, pipe, PORT_D, PCH_HDMID);
1396}
1397
1398static void _vlv_enable_pll(struct intel_crtc *crtc,
1399 const struct intel_crtc_state *pipe_config)
1400{
1401 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1402 enum pipe pipe = crtc->pipe;
1403
1404 intel_de_write(dev_priv, DPLL(pipe), pipe_config->dpll_hw_state.dpll);
1405 intel_de_posting_read(dev_priv, DPLL(pipe));
1406 udelay(150);
1407
1408 if (intel_de_wait_for_set(dev_priv, DPLL(pipe), DPLL_LOCK_VLV, 1))
1409 drm_err(&dev_priv->drm, "DPLL %d failed to lock\n", pipe);
1410}
1411
1412static void vlv_enable_pll(struct intel_crtc *crtc,
1413 const struct intel_crtc_state *pipe_config)
1414{
1415 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1416 enum pipe pipe = crtc->pipe;
1417
1418 assert_pipe_disabled(dev_priv, pipe_config->cpu_transcoder);
1419
1420
1421 assert_panel_unlocked(dev_priv, pipe);
1422
1423 if (pipe_config->dpll_hw_state.dpll & DPLL_VCO_ENABLE)
1424 _vlv_enable_pll(crtc, pipe_config);
1425
1426 intel_de_write(dev_priv, DPLL_MD(pipe),
1427 pipe_config->dpll_hw_state.dpll_md);
1428 intel_de_posting_read(dev_priv, DPLL_MD(pipe));
1429}
1430
1431
1432static void _chv_enable_pll(struct intel_crtc *crtc,
1433 const struct intel_crtc_state *pipe_config)
1434{
1435 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1436 enum pipe pipe = crtc->pipe;
1437 enum dpio_channel port = vlv_pipe_to_channel(pipe);
1438 u32 tmp;
1439
1440 vlv_dpio_get(dev_priv);
1441
1442
1443 tmp = vlv_dpio_read(dev_priv, pipe, CHV_CMN_DW14(port));
1444 tmp |= DPIO_DCLKP_EN;
1445 vlv_dpio_write(dev_priv, pipe, CHV_CMN_DW14(port), tmp);
1446
1447 vlv_dpio_put(dev_priv);
1448
1449
1450
1451
1452 udelay(1);
1453
1454
1455 intel_de_write(dev_priv, DPLL(pipe), pipe_config->dpll_hw_state.dpll);
1456
1457
1458 if (intel_de_wait_for_set(dev_priv, DPLL(pipe), DPLL_LOCK_VLV, 1))
1459 drm_err(&dev_priv->drm, "PLL %d failed to lock\n", pipe);
1460}
1461
1462static void chv_enable_pll(struct intel_crtc *crtc,
1463 const struct intel_crtc_state *pipe_config)
1464{
1465 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1466 enum pipe pipe = crtc->pipe;
1467
1468 assert_pipe_disabled(dev_priv, pipe_config->cpu_transcoder);
1469
1470
1471 assert_panel_unlocked(dev_priv, pipe);
1472
1473 if (pipe_config->dpll_hw_state.dpll & DPLL_VCO_ENABLE)
1474 _chv_enable_pll(crtc, pipe_config);
1475
1476 if (pipe != PIPE_A) {
1477
1478
1479
1480
1481
1482
1483 intel_de_write(dev_priv, CBR4_VLV, CBR_DPLLBMD_PIPE(pipe));
1484 intel_de_write(dev_priv, DPLL_MD(PIPE_B),
1485 pipe_config->dpll_hw_state.dpll_md);
1486 intel_de_write(dev_priv, CBR4_VLV, 0);
1487 dev_priv->chv_dpll_md[pipe] = pipe_config->dpll_hw_state.dpll_md;
1488
1489
1490
1491
1492
1493 drm_WARN_ON(&dev_priv->drm,
1494 (intel_de_read(dev_priv, DPLL(PIPE_B)) &
1495 DPLL_VGA_MODE_DIS) == 0);
1496 } else {
1497 intel_de_write(dev_priv, DPLL_MD(pipe),
1498 pipe_config->dpll_hw_state.dpll_md);
1499 intel_de_posting_read(dev_priv, DPLL_MD(pipe));
1500 }
1501}
1502
1503static bool i9xx_has_pps(struct drm_i915_private *dev_priv)
1504{
1505 if (IS_I830(dev_priv))
1506 return false;
1507
1508 return IS_PINEVIEW(dev_priv) || IS_MOBILE(dev_priv);
1509}
1510
1511static void i9xx_enable_pll(struct intel_crtc *crtc,
1512 const struct intel_crtc_state *crtc_state)
1513{
1514 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1515 i915_reg_t reg = DPLL(crtc->pipe);
1516 u32 dpll = crtc_state->dpll_hw_state.dpll;
1517 int i;
1518
1519 assert_pipe_disabled(dev_priv, crtc_state->cpu_transcoder);
1520
1521
1522 if (i9xx_has_pps(dev_priv))
1523 assert_panel_unlocked(dev_priv, crtc->pipe);
1524
1525
1526
1527
1528
1529
1530 intel_de_write(dev_priv, reg, dpll & ~DPLL_VGA_MODE_DIS);
1531 intel_de_write(dev_priv, reg, dpll);
1532
1533
1534 intel_de_posting_read(dev_priv, reg);
1535 udelay(150);
1536
1537 if (INTEL_GEN(dev_priv) >= 4) {
1538 intel_de_write(dev_priv, DPLL_MD(crtc->pipe),
1539 crtc_state->dpll_hw_state.dpll_md);
1540 } else {
1541
1542
1543
1544
1545
1546 intel_de_write(dev_priv, reg, dpll);
1547 }
1548
1549
1550 for (i = 0; i < 3; i++) {
1551 intel_de_write(dev_priv, reg, dpll);
1552 intel_de_posting_read(dev_priv, reg);
1553 udelay(150);
1554 }
1555}
1556
1557static void i9xx_disable_pll(const struct intel_crtc_state *crtc_state)
1558{
1559 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
1560 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1561 enum pipe pipe = crtc->pipe;
1562
1563
1564 if (IS_I830(dev_priv))
1565 return;
1566
1567
1568 assert_pipe_disabled(dev_priv, crtc_state->cpu_transcoder);
1569
1570 intel_de_write(dev_priv, DPLL(pipe), DPLL_VGA_MODE_DIS);
1571 intel_de_posting_read(dev_priv, DPLL(pipe));
1572}
1573
1574static void vlv_disable_pll(struct drm_i915_private *dev_priv, enum pipe pipe)
1575{
1576 u32 val;
1577
1578
1579 assert_pipe_disabled(dev_priv, (enum transcoder)pipe);
1580
1581 val = DPLL_INTEGRATED_REF_CLK_VLV |
1582 DPLL_REF_CLK_ENABLE_VLV | DPLL_VGA_MODE_DIS;
1583 if (pipe != PIPE_A)
1584 val |= DPLL_INTEGRATED_CRI_CLK_VLV;
1585
1586 intel_de_write(dev_priv, DPLL(pipe), val);
1587 intel_de_posting_read(dev_priv, DPLL(pipe));
1588}
1589
1590static void chv_disable_pll(struct drm_i915_private *dev_priv, enum pipe pipe)
1591{
1592 enum dpio_channel port = vlv_pipe_to_channel(pipe);
1593 u32 val;
1594
1595
1596 assert_pipe_disabled(dev_priv, (enum transcoder)pipe);
1597
1598 val = DPLL_SSC_REF_CLK_CHV |
1599 DPLL_REF_CLK_ENABLE_VLV | DPLL_VGA_MODE_DIS;
1600 if (pipe != PIPE_A)
1601 val |= DPLL_INTEGRATED_CRI_CLK_VLV;
1602
1603 intel_de_write(dev_priv, DPLL(pipe), val);
1604 intel_de_posting_read(dev_priv, DPLL(pipe));
1605
1606 vlv_dpio_get(dev_priv);
1607
1608
1609 val = vlv_dpio_read(dev_priv, pipe, CHV_CMN_DW14(port));
1610 val &= ~DPIO_DCLKP_EN;
1611 vlv_dpio_write(dev_priv, pipe, CHV_CMN_DW14(port), val);
1612
1613 vlv_dpio_put(dev_priv);
1614}
1615
1616void vlv_wait_port_ready(struct drm_i915_private *dev_priv,
1617 struct intel_digital_port *dig_port,
1618 unsigned int expected_mask)
1619{
1620 u32 port_mask;
1621 i915_reg_t dpll_reg;
1622
1623 switch (dig_port->base.port) {
1624 case PORT_B:
1625 port_mask = DPLL_PORTB_READY_MASK;
1626 dpll_reg = DPLL(0);
1627 break;
1628 case PORT_C:
1629 port_mask = DPLL_PORTC_READY_MASK;
1630 dpll_reg = DPLL(0);
1631 expected_mask <<= 4;
1632 break;
1633 case PORT_D:
1634 port_mask = DPLL_PORTD_READY_MASK;
1635 dpll_reg = DPIO_PHY_STATUS;
1636 break;
1637 default:
1638 BUG();
1639 }
1640
1641 if (intel_de_wait_for_register(dev_priv, dpll_reg,
1642 port_mask, expected_mask, 1000))
1643 drm_WARN(&dev_priv->drm, 1,
1644 "timed out waiting for [ENCODER:%d:%s] port ready: got 0x%x, expected 0x%x\n",
1645 dig_port->base.base.base.id, dig_port->base.base.name,
1646 intel_de_read(dev_priv, dpll_reg) & port_mask,
1647 expected_mask);
1648}
1649
1650static void ilk_enable_pch_transcoder(const struct intel_crtc_state *crtc_state)
1651{
1652 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
1653 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1654 enum pipe pipe = crtc->pipe;
1655 i915_reg_t reg;
1656 u32 val, pipeconf_val;
1657
1658
1659 assert_shared_dpll_enabled(dev_priv, crtc_state->shared_dpll);
1660
1661
1662 assert_fdi_tx_enabled(dev_priv, pipe);
1663 assert_fdi_rx_enabled(dev_priv, pipe);
1664
1665 if (HAS_PCH_CPT(dev_priv)) {
1666 reg = TRANS_CHICKEN2(pipe);
1667 val = intel_de_read(dev_priv, reg);
1668
1669
1670
1671
1672 val |= TRANS_CHICKEN2_TIMING_OVERRIDE;
1673
1674 val &= ~TRANS_CHICKEN2_FRAME_START_DELAY_MASK;
1675 val |= TRANS_CHICKEN2_FRAME_START_DELAY(0);
1676 intel_de_write(dev_priv, reg, val);
1677 }
1678
1679 reg = PCH_TRANSCONF(pipe);
1680 val = intel_de_read(dev_priv, reg);
1681 pipeconf_val = intel_de_read(dev_priv, PIPECONF(pipe));
1682
1683 if (HAS_PCH_IBX(dev_priv)) {
1684
1685 val &= ~TRANS_FRAME_START_DELAY_MASK;
1686 val |= TRANS_FRAME_START_DELAY(0);
1687
1688
1689
1690
1691
1692
1693 val &= ~PIPECONF_BPC_MASK;
1694 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
1695 val |= PIPECONF_8BPC;
1696 else
1697 val |= pipeconf_val & PIPECONF_BPC_MASK;
1698 }
1699
1700 val &= ~TRANS_INTERLACE_MASK;
1701 if ((pipeconf_val & PIPECONF_INTERLACE_MASK) == PIPECONF_INTERLACED_ILK) {
1702 if (HAS_PCH_IBX(dev_priv) &&
1703 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_SDVO))
1704 val |= TRANS_LEGACY_INTERLACED_ILK;
1705 else
1706 val |= TRANS_INTERLACED;
1707 } else {
1708 val |= TRANS_PROGRESSIVE;
1709 }
1710
1711 intel_de_write(dev_priv, reg, val | TRANS_ENABLE);
1712 if (intel_de_wait_for_set(dev_priv, reg, TRANS_STATE_ENABLE, 100))
1713 drm_err(&dev_priv->drm, "failed to enable transcoder %c\n",
1714 pipe_name(pipe));
1715}
1716
1717static void lpt_enable_pch_transcoder(struct drm_i915_private *dev_priv,
1718 enum transcoder cpu_transcoder)
1719{
1720 u32 val, pipeconf_val;
1721
1722
1723 assert_fdi_tx_enabled(dev_priv, (enum pipe) cpu_transcoder);
1724 assert_fdi_rx_enabled(dev_priv, PIPE_A);
1725
1726 val = intel_de_read(dev_priv, TRANS_CHICKEN2(PIPE_A));
1727
1728 val |= TRANS_CHICKEN2_TIMING_OVERRIDE;
1729
1730 val &= ~TRANS_CHICKEN2_FRAME_START_DELAY_MASK;
1731 val |= TRANS_CHICKEN2_FRAME_START_DELAY(0);
1732 intel_de_write(dev_priv, TRANS_CHICKEN2(PIPE_A), val);
1733
1734 val = TRANS_ENABLE;
1735 pipeconf_val = intel_de_read(dev_priv, PIPECONF(cpu_transcoder));
1736
1737 if ((pipeconf_val & PIPECONF_INTERLACE_MASK_HSW) ==
1738 PIPECONF_INTERLACED_ILK)
1739 val |= TRANS_INTERLACED;
1740 else
1741 val |= TRANS_PROGRESSIVE;
1742
1743 intel_de_write(dev_priv, LPT_TRANSCONF, val);
1744 if (intel_de_wait_for_set(dev_priv, LPT_TRANSCONF,
1745 TRANS_STATE_ENABLE, 100))
1746 drm_err(&dev_priv->drm, "Failed to enable PCH transcoder\n");
1747}
1748
1749static void ilk_disable_pch_transcoder(struct drm_i915_private *dev_priv,
1750 enum pipe pipe)
1751{
1752 i915_reg_t reg;
1753 u32 val;
1754
1755
1756 assert_fdi_tx_disabled(dev_priv, pipe);
1757 assert_fdi_rx_disabled(dev_priv, pipe);
1758
1759
1760 assert_pch_ports_disabled(dev_priv, pipe);
1761
1762 reg = PCH_TRANSCONF(pipe);
1763 val = intel_de_read(dev_priv, reg);
1764 val &= ~TRANS_ENABLE;
1765 intel_de_write(dev_priv, reg, val);
1766
1767 if (intel_de_wait_for_clear(dev_priv, reg, TRANS_STATE_ENABLE, 50))
1768 drm_err(&dev_priv->drm, "failed to disable transcoder %c\n",
1769 pipe_name(pipe));
1770
1771 if (HAS_PCH_CPT(dev_priv)) {
1772
1773 reg = TRANS_CHICKEN2(pipe);
1774 val = intel_de_read(dev_priv, reg);
1775 val &= ~TRANS_CHICKEN2_TIMING_OVERRIDE;
1776 intel_de_write(dev_priv, reg, val);
1777 }
1778}
1779
1780void lpt_disable_pch_transcoder(struct drm_i915_private *dev_priv)
1781{
1782 u32 val;
1783
1784 val = intel_de_read(dev_priv, LPT_TRANSCONF);
1785 val &= ~TRANS_ENABLE;
1786 intel_de_write(dev_priv, LPT_TRANSCONF, val);
1787
1788 if (intel_de_wait_for_clear(dev_priv, LPT_TRANSCONF,
1789 TRANS_STATE_ENABLE, 50))
1790 drm_err(&dev_priv->drm, "Failed to disable PCH transcoder\n");
1791
1792
1793 val = intel_de_read(dev_priv, TRANS_CHICKEN2(PIPE_A));
1794 val &= ~TRANS_CHICKEN2_TIMING_OVERRIDE;
1795 intel_de_write(dev_priv, TRANS_CHICKEN2(PIPE_A), val);
1796}
1797
1798enum pipe intel_crtc_pch_transcoder(struct intel_crtc *crtc)
1799{
1800 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1801
1802 if (HAS_PCH_LPT(dev_priv))
1803 return PIPE_A;
1804 else
1805 return crtc->pipe;
1806}
1807
1808static u32 intel_crtc_max_vblank_count(const struct intel_crtc_state *crtc_state)
1809{
1810 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
1811 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
1812 u32 mode_flags = crtc->mode_flags;
1813
1814
1815
1816
1817
1818
1819
1820 if (mode_flags & (I915_MODE_FLAG_DSI_USE_TE0 | I915_MODE_FLAG_DSI_USE_TE1))
1821 return 0;
1822
1823
1824
1825
1826
1827 if (IS_I965GM(dev_priv) &&
1828 (crtc_state->output_types & BIT(INTEL_OUTPUT_TVOUT)))
1829 return 0;
1830
1831 if (INTEL_GEN(dev_priv) >= 5 || IS_G4X(dev_priv))
1832 return 0xffffffff;
1833 else if (INTEL_GEN(dev_priv) >= 3)
1834 return 0xffffff;
1835 else
1836 return 0;
1837}
1838
1839void intel_crtc_vblank_on(const struct intel_crtc_state *crtc_state)
1840{
1841 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
1842
1843 assert_vblank_disabled(&crtc->base);
1844 drm_crtc_set_max_vblank_count(&crtc->base,
1845 intel_crtc_max_vblank_count(crtc_state));
1846 drm_crtc_vblank_on(&crtc->base);
1847}
1848
1849void intel_crtc_vblank_off(const struct intel_crtc_state *crtc_state)
1850{
1851 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
1852
1853 drm_crtc_vblank_off(&crtc->base);
1854 assert_vblank_disabled(&crtc->base);
1855}
1856
1857void intel_enable_pipe(const struct intel_crtc_state *new_crtc_state)
1858{
1859 struct intel_crtc *crtc = to_intel_crtc(new_crtc_state->uapi.crtc);
1860 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1861 enum transcoder cpu_transcoder = new_crtc_state->cpu_transcoder;
1862 enum pipe pipe = crtc->pipe;
1863 i915_reg_t reg;
1864 u32 val;
1865
1866 drm_dbg_kms(&dev_priv->drm, "enabling pipe %c\n", pipe_name(pipe));
1867
1868 assert_planes_disabled(crtc);
1869
1870
1871
1872
1873
1874
1875 if (HAS_GMCH(dev_priv)) {
1876 if (intel_crtc_has_type(new_crtc_state, INTEL_OUTPUT_DSI))
1877 assert_dsi_pll_enabled(dev_priv);
1878 else
1879 assert_pll_enabled(dev_priv, pipe);
1880 } else {
1881 if (new_crtc_state->has_pch_encoder) {
1882
1883 assert_fdi_rx_pll_enabled(dev_priv,
1884 intel_crtc_pch_transcoder(crtc));
1885 assert_fdi_tx_pll_enabled(dev_priv,
1886 (enum pipe) cpu_transcoder);
1887 }
1888
1889 }
1890
1891 trace_intel_pipe_enable(crtc);
1892
1893 reg = PIPECONF(cpu_transcoder);
1894 val = intel_de_read(dev_priv, reg);
1895 if (val & PIPECONF_ENABLE) {
1896
1897 drm_WARN_ON(&dev_priv->drm, !IS_I830(dev_priv));
1898 return;
1899 }
1900
1901 intel_de_write(dev_priv, reg, val | PIPECONF_ENABLE);
1902 intel_de_posting_read(dev_priv, reg);
1903
1904
1905
1906
1907
1908
1909
1910
1911 if (intel_crtc_max_vblank_count(new_crtc_state) == 0)
1912 intel_wait_for_pipe_scanline_moving(crtc);
1913}
1914
1915void intel_disable_pipe(const struct intel_crtc_state *old_crtc_state)
1916{
1917 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc);
1918 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1919 enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder;
1920 enum pipe pipe = crtc->pipe;
1921 i915_reg_t reg;
1922 u32 val;
1923
1924 drm_dbg_kms(&dev_priv->drm, "disabling pipe %c\n", pipe_name(pipe));
1925
1926
1927
1928
1929
1930 assert_planes_disabled(crtc);
1931
1932 trace_intel_pipe_disable(crtc);
1933
1934 reg = PIPECONF(cpu_transcoder);
1935 val = intel_de_read(dev_priv, reg);
1936 if ((val & PIPECONF_ENABLE) == 0)
1937 return;
1938
1939
1940
1941
1942
1943 if (old_crtc_state->double_wide)
1944 val &= ~PIPECONF_DOUBLE_WIDE;
1945
1946
1947 if (!IS_I830(dev_priv))
1948 val &= ~PIPECONF_ENABLE;
1949
1950 intel_de_write(dev_priv, reg, val);
1951 if ((val & PIPECONF_ENABLE) == 0)
1952 intel_wait_for_pipe_off(old_crtc_state);
1953}
1954
1955static unsigned int intel_tile_size(const struct drm_i915_private *dev_priv)
1956{
1957 return IS_GEN(dev_priv, 2) ? 2048 : 4096;
1958}
1959
1960static bool is_ccs_plane(const struct drm_framebuffer *fb, int plane)
1961{
1962 if (!is_ccs_modifier(fb->modifier))
1963 return false;
1964
1965 return plane >= fb->format->num_planes / 2;
1966}
1967
1968static bool is_gen12_ccs_modifier(u64 modifier)
1969{
1970 return modifier == I915_FORMAT_MOD_Y_TILED_GEN12_RC_CCS ||
1971 modifier == I915_FORMAT_MOD_Y_TILED_GEN12_MC_CCS;
1972
1973}
1974
1975static bool is_gen12_ccs_plane(const struct drm_framebuffer *fb, int plane)
1976{
1977 return is_gen12_ccs_modifier(fb->modifier) && is_ccs_plane(fb, plane);
1978}
1979
1980static bool is_aux_plane(const struct drm_framebuffer *fb, int plane)
1981{
1982 if (is_ccs_modifier(fb->modifier))
1983 return is_ccs_plane(fb, plane);
1984
1985 return plane == 1;
1986}
1987
1988static int main_to_ccs_plane(const struct drm_framebuffer *fb, int main_plane)
1989{
1990 drm_WARN_ON(fb->dev, !is_ccs_modifier(fb->modifier) ||
1991 (main_plane && main_plane >= fb->format->num_planes / 2));
1992
1993 return fb->format->num_planes / 2 + main_plane;
1994}
1995
1996static int ccs_to_main_plane(const struct drm_framebuffer *fb, int ccs_plane)
1997{
1998 drm_WARN_ON(fb->dev, !is_ccs_modifier(fb->modifier) ||
1999 ccs_plane < fb->format->num_planes / 2);
2000
2001 return ccs_plane - fb->format->num_planes / 2;
2002}
2003
2004int intel_main_to_aux_plane(const struct drm_framebuffer *fb, int main_plane)
2005{
2006 struct drm_i915_private *i915 = to_i915(fb->dev);
2007
2008 if (is_ccs_modifier(fb->modifier))
2009 return main_to_ccs_plane(fb, main_plane);
2010 else if (INTEL_GEN(i915) < 11 &&
2011 intel_format_info_is_yuv_semiplanar(fb->format, fb->modifier))
2012 return 1;
2013 else
2014 return 0;
2015}
2016
2017bool
2018intel_format_info_is_yuv_semiplanar(const struct drm_format_info *info,
2019 uint64_t modifier)
2020{
2021 return info->is_yuv &&
2022 info->num_planes == (is_ccs_modifier(modifier) ? 4 : 2);
2023}
2024
2025static bool is_semiplanar_uv_plane(const struct drm_framebuffer *fb,
2026 int color_plane)
2027{
2028 return intel_format_info_is_yuv_semiplanar(fb->format, fb->modifier) &&
2029 color_plane == 1;
2030}
2031
2032static unsigned int
2033intel_tile_width_bytes(const struct drm_framebuffer *fb, int color_plane)
2034{
2035 struct drm_i915_private *dev_priv = to_i915(fb->dev);
2036 unsigned int cpp = fb->format->cpp[color_plane];
2037
2038 switch (fb->modifier) {
2039 case DRM_FORMAT_MOD_LINEAR:
2040 return intel_tile_size(dev_priv);
2041 case I915_FORMAT_MOD_X_TILED:
2042 if (IS_GEN(dev_priv, 2))
2043 return 128;
2044 else
2045 return 512;
2046 case I915_FORMAT_MOD_Y_TILED_CCS:
2047 if (is_ccs_plane(fb, color_plane))
2048 return 128;
2049 fallthrough;
2050 case I915_FORMAT_MOD_Y_TILED_GEN12_RC_CCS:
2051 case I915_FORMAT_MOD_Y_TILED_GEN12_MC_CCS:
2052 if (is_ccs_plane(fb, color_plane))
2053 return 64;
2054 fallthrough;
2055 case I915_FORMAT_MOD_Y_TILED:
2056 if (IS_GEN(dev_priv, 2) || HAS_128_BYTE_Y_TILING(dev_priv))
2057 return 128;
2058 else
2059 return 512;
2060 case I915_FORMAT_MOD_Yf_TILED_CCS:
2061 if (is_ccs_plane(fb, color_plane))
2062 return 128;
2063 fallthrough;
2064 case I915_FORMAT_MOD_Yf_TILED:
2065 switch (cpp) {
2066 case 1:
2067 return 64;
2068 case 2:
2069 case 4:
2070 return 128;
2071 case 8:
2072 case 16:
2073 return 256;
2074 default:
2075 MISSING_CASE(cpp);
2076 return cpp;
2077 }
2078 break;
2079 default:
2080 MISSING_CASE(fb->modifier);
2081 return cpp;
2082 }
2083}
2084
2085static unsigned int
2086intel_tile_height(const struct drm_framebuffer *fb, int color_plane)
2087{
2088 if (is_gen12_ccs_plane(fb, color_plane))
2089 return 1;
2090
2091 return intel_tile_size(to_i915(fb->dev)) /
2092 intel_tile_width_bytes(fb, color_plane);
2093}
2094
2095
2096static void intel_tile_dims(const struct drm_framebuffer *fb, int color_plane,
2097 unsigned int *tile_width,
2098 unsigned int *tile_height)
2099{
2100 unsigned int tile_width_bytes = intel_tile_width_bytes(fb, color_plane);
2101 unsigned int cpp = fb->format->cpp[color_plane];
2102
2103 *tile_width = tile_width_bytes / cpp;
2104 *tile_height = intel_tile_height(fb, color_plane);
2105}
2106
2107static unsigned int intel_tile_row_size(const struct drm_framebuffer *fb,
2108 int color_plane)
2109{
2110 unsigned int tile_width, tile_height;
2111
2112 intel_tile_dims(fb, color_plane, &tile_width, &tile_height);
2113
2114 return fb->pitches[color_plane] * tile_height;
2115}
2116
2117unsigned int
2118intel_fb_align_height(const struct drm_framebuffer *fb,
2119 int color_plane, unsigned int height)
2120{
2121 unsigned int tile_height = intel_tile_height(fb, color_plane);
2122
2123 return ALIGN(height, tile_height);
2124}
2125
2126unsigned int intel_rotation_info_size(const struct intel_rotation_info *rot_info)
2127{
2128 unsigned int size = 0;
2129 int i;
2130
2131 for (i = 0 ; i < ARRAY_SIZE(rot_info->plane); i++)
2132 size += rot_info->plane[i].width * rot_info->plane[i].height;
2133
2134 return size;
2135}
2136
2137unsigned int intel_remapped_info_size(const struct intel_remapped_info *rem_info)
2138{
2139 unsigned int size = 0;
2140 int i;
2141
2142 for (i = 0 ; i < ARRAY_SIZE(rem_info->plane); i++)
2143 size += rem_info->plane[i].width * rem_info->plane[i].height;
2144
2145 return size;
2146}
2147
2148static void
2149intel_fill_fb_ggtt_view(struct i915_ggtt_view *view,
2150 const struct drm_framebuffer *fb,
2151 unsigned int rotation)
2152{
2153 view->type = I915_GGTT_VIEW_NORMAL;
2154 if (drm_rotation_90_or_270(rotation)) {
2155 view->type = I915_GGTT_VIEW_ROTATED;
2156 view->rotated = to_intel_framebuffer(fb)->rot_info;
2157 }
2158}
2159
2160static unsigned int intel_cursor_alignment(const struct drm_i915_private *dev_priv)
2161{
2162 if (IS_I830(dev_priv))
2163 return 16 * 1024;
2164 else if (IS_I85X(dev_priv))
2165 return 256;
2166 else if (IS_I845G(dev_priv) || IS_I865G(dev_priv))
2167 return 32;
2168 else
2169 return 4 * 1024;
2170}
2171
2172static unsigned int intel_linear_alignment(const struct drm_i915_private *dev_priv)
2173{
2174 if (INTEL_GEN(dev_priv) >= 9)
2175 return 256 * 1024;
2176 else if (IS_I965G(dev_priv) || IS_I965GM(dev_priv) ||
2177 IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
2178 return 128 * 1024;
2179 else if (INTEL_GEN(dev_priv) >= 4)
2180 return 4 * 1024;
2181 else
2182 return 0;
2183}
2184
2185static unsigned int intel_surf_alignment(const struct drm_framebuffer *fb,
2186 int color_plane)
2187{
2188 struct drm_i915_private *dev_priv = to_i915(fb->dev);
2189
2190
2191 if ((INTEL_GEN(dev_priv) < 12 && is_aux_plane(fb, color_plane)) ||
2192 is_ccs_plane(fb, color_plane))
2193 return 4096;
2194
2195 switch (fb->modifier) {
2196 case DRM_FORMAT_MOD_LINEAR:
2197 return intel_linear_alignment(dev_priv);
2198 case I915_FORMAT_MOD_X_TILED:
2199 if (INTEL_GEN(dev_priv) >= 9)
2200 return 256 * 1024;
2201 return 0;
2202 case I915_FORMAT_MOD_Y_TILED_GEN12_MC_CCS:
2203 if (is_semiplanar_uv_plane(fb, color_plane))
2204 return intel_tile_row_size(fb, color_plane);
2205 fallthrough;
2206 case I915_FORMAT_MOD_Y_TILED_GEN12_RC_CCS:
2207 return 16 * 1024;
2208 case I915_FORMAT_MOD_Y_TILED_CCS:
2209 case I915_FORMAT_MOD_Yf_TILED_CCS:
2210 case I915_FORMAT_MOD_Y_TILED:
2211 if (INTEL_GEN(dev_priv) >= 12 &&
2212 is_semiplanar_uv_plane(fb, color_plane))
2213 return intel_tile_row_size(fb, color_plane);
2214 fallthrough;
2215 case I915_FORMAT_MOD_Yf_TILED:
2216 return 1 * 1024 * 1024;
2217 default:
2218 MISSING_CASE(fb->modifier);
2219 return 0;
2220 }
2221}
2222
2223static bool intel_plane_uses_fence(const struct intel_plane_state *plane_state)
2224{
2225 struct intel_plane *plane = to_intel_plane(plane_state->uapi.plane);
2226 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
2227
2228 return INTEL_GEN(dev_priv) < 4 ||
2229 (plane->has_fbc &&
2230 plane_state->view.type == I915_GGTT_VIEW_NORMAL);
2231}
2232
2233struct i915_vma *
2234intel_pin_and_fence_fb_obj(struct drm_framebuffer *fb,
2235 const struct i915_ggtt_view *view,
2236 bool uses_fence,
2237 unsigned long *out_flags)
2238{
2239 struct drm_device *dev = fb->dev;
2240 struct drm_i915_private *dev_priv = to_i915(dev);
2241 struct drm_i915_gem_object *obj = intel_fb_obj(fb);
2242 intel_wakeref_t wakeref;
2243 struct i915_vma *vma;
2244 unsigned int pinctl;
2245 u32 alignment;
2246
2247 if (drm_WARN_ON(dev, !i915_gem_object_is_framebuffer(obj)))
2248 return ERR_PTR(-EINVAL);
2249
2250 alignment = intel_surf_alignment(fb, 0);
2251 if (drm_WARN_ON(dev, alignment && !is_power_of_2(alignment)))
2252 return ERR_PTR(-EINVAL);
2253
2254
2255
2256
2257
2258
2259 if (intel_scanout_needs_vtd_wa(dev_priv) && alignment < 256 * 1024)
2260 alignment = 256 * 1024;
2261
2262
2263
2264
2265
2266
2267
2268
2269 wakeref = intel_runtime_pm_get(&dev_priv->runtime_pm);
2270
2271 atomic_inc(&dev_priv->gpu_error.pending_fb_pin);
2272
2273
2274
2275
2276
2277
2278
2279
2280
2281 pinctl = 0;
2282 if (HAS_GMCH(dev_priv))
2283 pinctl |= PIN_MAPPABLE;
2284
2285 vma = i915_gem_object_pin_to_display_plane(obj,
2286 alignment, view, pinctl);
2287 if (IS_ERR(vma))
2288 goto err;
2289
2290 if (uses_fence && i915_vma_is_map_and_fenceable(vma)) {
2291 int ret;
2292
2293
2294
2295
2296
2297
2298
2299
2300
2301
2302
2303
2304
2305
2306
2307
2308
2309
2310 ret = i915_vma_pin_fence(vma);
2311 if (ret != 0 && INTEL_GEN(dev_priv) < 4) {
2312 i915_vma_unpin(vma);
2313 vma = ERR_PTR(ret);
2314 goto err;
2315 }
2316
2317 if (ret == 0 && vma->fence)
2318 *out_flags |= PLANE_HAS_FENCE;
2319 }
2320
2321 i915_vma_get(vma);
2322err:
2323 atomic_dec(&dev_priv->gpu_error.pending_fb_pin);
2324 intel_runtime_pm_put(&dev_priv->runtime_pm, wakeref);
2325 return vma;
2326}
2327
2328void intel_unpin_fb_vma(struct i915_vma *vma, unsigned long flags)
2329{
2330 if (flags & PLANE_HAS_FENCE)
2331 i915_vma_unpin_fence(vma);
2332 i915_vma_unpin(vma);
2333 i915_vma_put(vma);
2334}
2335
2336static int intel_fb_pitch(const struct drm_framebuffer *fb, int color_plane,
2337 unsigned int rotation)
2338{
2339 if (drm_rotation_90_or_270(rotation))
2340 return to_intel_framebuffer(fb)->rotated[color_plane].pitch;
2341 else
2342 return fb->pitches[color_plane];
2343}
2344
2345
2346
2347
2348
2349
2350
2351u32 intel_fb_xy_to_linear(int x, int y,
2352 const struct intel_plane_state *state,
2353 int color_plane)
2354{
2355 const struct drm_framebuffer *fb = state->hw.fb;
2356 unsigned int cpp = fb->format->cpp[color_plane];
2357 unsigned int pitch = state->color_plane[color_plane].stride;
2358
2359 return y * pitch + x * cpp;
2360}
2361
2362
2363
2364
2365
2366
2367void intel_add_fb_offsets(int *x, int *y,
2368 const struct intel_plane_state *state,
2369 int color_plane)
2370
2371{
2372 *x += state->color_plane[color_plane].x;
2373 *y += state->color_plane[color_plane].y;
2374}
2375
2376static u32 intel_adjust_tile_offset(int *x, int *y,
2377 unsigned int tile_width,
2378 unsigned int tile_height,
2379 unsigned int tile_size,
2380 unsigned int pitch_tiles,
2381 u32 old_offset,
2382 u32 new_offset)
2383{
2384 unsigned int pitch_pixels = pitch_tiles * tile_width;
2385 unsigned int tiles;
2386
2387 WARN_ON(old_offset & (tile_size - 1));
2388 WARN_ON(new_offset & (tile_size - 1));
2389 WARN_ON(new_offset > old_offset);
2390
2391 tiles = (old_offset - new_offset) / tile_size;
2392
2393 *y += tiles / pitch_tiles * tile_height;
2394 *x += tiles % pitch_tiles * tile_width;
2395
2396
2397 *y += *x / pitch_pixels * tile_height;
2398 *x %= pitch_pixels;
2399
2400 return new_offset;
2401}
2402
2403static bool is_surface_linear(const struct drm_framebuffer *fb, int color_plane)
2404{
2405 return fb->modifier == DRM_FORMAT_MOD_LINEAR ||
2406 is_gen12_ccs_plane(fb, color_plane);
2407}
2408
2409static u32 intel_adjust_aligned_offset(int *x, int *y,
2410 const struct drm_framebuffer *fb,
2411 int color_plane,
2412 unsigned int rotation,
2413 unsigned int pitch,
2414 u32 old_offset, u32 new_offset)
2415{
2416 struct drm_i915_private *dev_priv = to_i915(fb->dev);
2417 unsigned int cpp = fb->format->cpp[color_plane];
2418
2419 drm_WARN_ON(&dev_priv->drm, new_offset > old_offset);
2420
2421 if (!is_surface_linear(fb, color_plane)) {
2422 unsigned int tile_size, tile_width, tile_height;
2423 unsigned int pitch_tiles;
2424
2425 tile_size = intel_tile_size(dev_priv);
2426 intel_tile_dims(fb, color_plane, &tile_width, &tile_height);
2427
2428 if (drm_rotation_90_or_270(rotation)) {
2429 pitch_tiles = pitch / tile_height;
2430 swap(tile_width, tile_height);
2431 } else {
2432 pitch_tiles = pitch / (tile_width * cpp);
2433 }
2434
2435 intel_adjust_tile_offset(x, y, tile_width, tile_height,
2436 tile_size, pitch_tiles,
2437 old_offset, new_offset);
2438 } else {
2439 old_offset += *y * pitch + *x * cpp;
2440
2441 *y = (old_offset - new_offset) / pitch;
2442 *x = ((old_offset - new_offset) - *y * pitch) / cpp;
2443 }
2444
2445 return new_offset;
2446}
2447
2448
2449
2450
2451
2452static u32 intel_plane_adjust_aligned_offset(int *x, int *y,
2453 const struct intel_plane_state *state,
2454 int color_plane,
2455 u32 old_offset, u32 new_offset)
2456{
2457 return intel_adjust_aligned_offset(x, y, state->hw.fb, color_plane,
2458 state->hw.rotation,
2459 state->color_plane[color_plane].stride,
2460 old_offset, new_offset);
2461}
2462
2463
2464
2465
2466
2467
2468
2469
2470
2471
2472
2473
2474
2475
2476
2477static u32 intel_compute_aligned_offset(struct drm_i915_private *dev_priv,
2478 int *x, int *y,
2479 const struct drm_framebuffer *fb,
2480 int color_plane,
2481 unsigned int pitch,
2482 unsigned int rotation,
2483 u32 alignment)
2484{
2485 unsigned int cpp = fb->format->cpp[color_plane];
2486 u32 offset, offset_aligned;
2487
2488 if (!is_surface_linear(fb, color_plane)) {
2489 unsigned int tile_size, tile_width, tile_height;
2490 unsigned int tile_rows, tiles, pitch_tiles;
2491
2492 tile_size = intel_tile_size(dev_priv);
2493 intel_tile_dims(fb, color_plane, &tile_width, &tile_height);
2494
2495 if (drm_rotation_90_or_270(rotation)) {
2496 pitch_tiles = pitch / tile_height;
2497 swap(tile_width, tile_height);
2498 } else {
2499 pitch_tiles = pitch / (tile_width * cpp);
2500 }
2501
2502 tile_rows = *y / tile_height;
2503 *y %= tile_height;
2504
2505 tiles = *x / tile_width;
2506 *x %= tile_width;
2507
2508 offset = (tile_rows * pitch_tiles + tiles) * tile_size;
2509
2510 offset_aligned = offset;
2511 if (alignment)
2512 offset_aligned = rounddown(offset_aligned, alignment);
2513
2514 intel_adjust_tile_offset(x, y, tile_width, tile_height,
2515 tile_size, pitch_tiles,
2516 offset, offset_aligned);
2517 } else {
2518 offset = *y * pitch + *x * cpp;
2519 offset_aligned = offset;
2520 if (alignment) {
2521 offset_aligned = rounddown(offset_aligned, alignment);
2522 *y = (offset % alignment) / pitch;
2523 *x = ((offset % alignment) - *y * pitch) / cpp;
2524 } else {
2525 *y = *x = 0;
2526 }
2527 }
2528
2529 return offset_aligned;
2530}
2531
2532static u32 intel_plane_compute_aligned_offset(int *x, int *y,
2533 const struct intel_plane_state *state,
2534 int color_plane)
2535{
2536 struct intel_plane *intel_plane = to_intel_plane(state->uapi.plane);
2537 struct drm_i915_private *dev_priv = to_i915(intel_plane->base.dev);
2538 const struct drm_framebuffer *fb = state->hw.fb;
2539 unsigned int rotation = state->hw.rotation;
2540 int pitch = state->color_plane[color_plane].stride;
2541 u32 alignment;
2542
2543 if (intel_plane->id == PLANE_CURSOR)
2544 alignment = intel_cursor_alignment(dev_priv);
2545 else
2546 alignment = intel_surf_alignment(fb, color_plane);
2547
2548 return intel_compute_aligned_offset(dev_priv, x, y, fb, color_plane,
2549 pitch, rotation, alignment);
2550}
2551
2552
2553static int intel_fb_offset_to_xy(int *x, int *y,
2554 const struct drm_framebuffer *fb,
2555 int color_plane)
2556{
2557 struct drm_i915_private *dev_priv = to_i915(fb->dev);
2558 unsigned int height;
2559 u32 alignment;
2560
2561 if (INTEL_GEN(dev_priv) >= 12 &&
2562 is_semiplanar_uv_plane(fb, color_plane))
2563 alignment = intel_tile_row_size(fb, color_plane);
2564 else if (fb->modifier != DRM_FORMAT_MOD_LINEAR)
2565 alignment = intel_tile_size(dev_priv);
2566 else
2567 alignment = 0;
2568
2569 if (alignment != 0 && fb->offsets[color_plane] % alignment) {
2570 drm_dbg_kms(&dev_priv->drm,
2571 "Misaligned offset 0x%08x for color plane %d\n",
2572 fb->offsets[color_plane], color_plane);
2573 return -EINVAL;
2574 }
2575
2576 height = drm_framebuffer_plane_height(fb->height, fb, color_plane);
2577 height = ALIGN(height, intel_tile_height(fb, color_plane));
2578
2579
2580 if (add_overflows_t(u32, mul_u32_u32(height, fb->pitches[color_plane]),
2581 fb->offsets[color_plane])) {
2582 drm_dbg_kms(&dev_priv->drm,
2583 "Bad offset 0x%08x or pitch %d for color plane %d\n",
2584 fb->offsets[color_plane], fb->pitches[color_plane],
2585 color_plane);
2586 return -ERANGE;
2587 }
2588
2589 *x = 0;
2590 *y = 0;
2591
2592 intel_adjust_aligned_offset(x, y,
2593 fb, color_plane, DRM_MODE_ROTATE_0,
2594 fb->pitches[color_plane],
2595 fb->offsets[color_plane], 0);
2596
2597 return 0;
2598}
2599
2600static unsigned int intel_fb_modifier_to_tiling(u64 fb_modifier)
2601{
2602 switch (fb_modifier) {
2603 case I915_FORMAT_MOD_X_TILED:
2604 return I915_TILING_X;
2605 case I915_FORMAT_MOD_Y_TILED:
2606 case I915_FORMAT_MOD_Y_TILED_CCS:
2607 case I915_FORMAT_MOD_Y_TILED_GEN12_RC_CCS:
2608 case I915_FORMAT_MOD_Y_TILED_GEN12_MC_CCS:
2609 return I915_TILING_Y;
2610 default:
2611 return I915_TILING_NONE;
2612 }
2613}
2614
2615
2616
2617
2618
2619
2620
2621
2622
2623
2624
2625
2626
2627
2628
2629static const struct drm_format_info skl_ccs_formats[] = {
2630 { .format = DRM_FORMAT_XRGB8888, .depth = 24, .num_planes = 2,
2631 .cpp = { 4, 1, }, .hsub = 8, .vsub = 16, },
2632 { .format = DRM_FORMAT_XBGR8888, .depth = 24, .num_planes = 2,
2633 .cpp = { 4, 1, }, .hsub = 8, .vsub = 16, },
2634 { .format = DRM_FORMAT_ARGB8888, .depth = 32, .num_planes = 2,
2635 .cpp = { 4, 1, }, .hsub = 8, .vsub = 16, .has_alpha = true, },
2636 { .format = DRM_FORMAT_ABGR8888, .depth = 32, .num_planes = 2,
2637 .cpp = { 4, 1, }, .hsub = 8, .vsub = 16, .has_alpha = true, },
2638};
2639
2640
2641
2642
2643
2644
2645
2646
2647static const struct drm_format_info gen12_ccs_formats[] = {
2648 { .format = DRM_FORMAT_XRGB8888, .depth = 24, .num_planes = 2,
2649 .char_per_block = { 4, 1 }, .block_w = { 1, 2 }, .block_h = { 1, 1 },
2650 .hsub = 1, .vsub = 1, },
2651 { .format = DRM_FORMAT_XBGR8888, .depth = 24, .num_planes = 2,
2652 .char_per_block = { 4, 1 }, .block_w = { 1, 2 }, .block_h = { 1, 1 },
2653 .hsub = 1, .vsub = 1, },
2654 { .format = DRM_FORMAT_ARGB8888, .depth = 32, .num_planes = 2,
2655 .char_per_block = { 4, 1 }, .block_w = { 1, 2 }, .block_h = { 1, 1 },
2656 .hsub = 1, .vsub = 1, .has_alpha = true },
2657 { .format = DRM_FORMAT_ABGR8888, .depth = 32, .num_planes = 2,
2658 .char_per_block = { 4, 1 }, .block_w = { 1, 2 }, .block_h = { 1, 1 },
2659 .hsub = 1, .vsub = 1, .has_alpha = true },
2660 { .format = DRM_FORMAT_YUYV, .num_planes = 2,
2661 .char_per_block = { 2, 1 }, .block_w = { 1, 2 }, .block_h = { 1, 1 },
2662 .hsub = 2, .vsub = 1, .is_yuv = true },
2663 { .format = DRM_FORMAT_YVYU, .num_planes = 2,
2664 .char_per_block = { 2, 1 }, .block_w = { 1, 2 }, .block_h = { 1, 1 },
2665 .hsub = 2, .vsub = 1, .is_yuv = true },
2666 { .format = DRM_FORMAT_UYVY, .num_planes = 2,
2667 .char_per_block = { 2, 1 }, .block_w = { 1, 2 }, .block_h = { 1, 1 },
2668 .hsub = 2, .vsub = 1, .is_yuv = true },
2669 { .format = DRM_FORMAT_VYUY, .num_planes = 2,
2670 .char_per_block = { 2, 1 }, .block_w = { 1, 2 }, .block_h = { 1, 1 },
2671 .hsub = 2, .vsub = 1, .is_yuv = true },
2672 { .format = DRM_FORMAT_NV12, .num_planes = 4,
2673 .char_per_block = { 1, 2, 1, 1 }, .block_w = { 1, 1, 4, 4 }, .block_h = { 1, 1, 1, 1 },
2674 .hsub = 2, .vsub = 2, .is_yuv = true },
2675 { .format = DRM_FORMAT_P010, .num_planes = 4,
2676 .char_per_block = { 2, 4, 1, 1 }, .block_w = { 1, 1, 2, 2 }, .block_h = { 1, 1, 1, 1 },
2677 .hsub = 2, .vsub = 2, .is_yuv = true },
2678 { .format = DRM_FORMAT_P012, .num_planes = 4,
2679 .char_per_block = { 2, 4, 1, 1 }, .block_w = { 1, 1, 2, 2 }, .block_h = { 1, 1, 1, 1 },
2680 .hsub = 2, .vsub = 2, .is_yuv = true },
2681 { .format = DRM_FORMAT_P016, .num_planes = 4,
2682 .char_per_block = { 2, 4, 1, 1 }, .block_w = { 1, 1, 2, 2 }, .block_h = { 1, 1, 1, 1 },
2683 .hsub = 2, .vsub = 2, .is_yuv = true },
2684};
2685
2686static const struct drm_format_info *
2687lookup_format_info(const struct drm_format_info formats[],
2688 int num_formats, u32 format)
2689{
2690 int i;
2691
2692 for (i = 0; i < num_formats; i++) {
2693 if (formats[i].format == format)
2694 return &formats[i];
2695 }
2696
2697 return NULL;
2698}
2699
2700static const struct drm_format_info *
2701intel_get_format_info(const struct drm_mode_fb_cmd2 *cmd)
2702{
2703 switch (cmd->modifier[0]) {
2704 case I915_FORMAT_MOD_Y_TILED_CCS:
2705 case I915_FORMAT_MOD_Yf_TILED_CCS:
2706 return lookup_format_info(skl_ccs_formats,
2707 ARRAY_SIZE(skl_ccs_formats),
2708 cmd->pixel_format);
2709 case I915_FORMAT_MOD_Y_TILED_GEN12_RC_CCS:
2710 case I915_FORMAT_MOD_Y_TILED_GEN12_MC_CCS:
2711 return lookup_format_info(gen12_ccs_formats,
2712 ARRAY_SIZE(gen12_ccs_formats),
2713 cmd->pixel_format);
2714 default:
2715 return NULL;
2716 }
2717}
2718
2719bool is_ccs_modifier(u64 modifier)
2720{
2721 return modifier == I915_FORMAT_MOD_Y_TILED_GEN12_RC_CCS ||
2722 modifier == I915_FORMAT_MOD_Y_TILED_GEN12_MC_CCS ||
2723 modifier == I915_FORMAT_MOD_Y_TILED_CCS ||
2724 modifier == I915_FORMAT_MOD_Yf_TILED_CCS;
2725}
2726
2727static int gen12_ccs_aux_stride(struct drm_framebuffer *fb, int ccs_plane)
2728{
2729 return DIV_ROUND_UP(fb->pitches[ccs_to_main_plane(fb, ccs_plane)],
2730 512) * 64;
2731}
2732
2733u32 intel_plane_fb_max_stride(struct drm_i915_private *dev_priv,
2734 u32 pixel_format, u64 modifier)
2735{
2736 struct intel_crtc *crtc;
2737 struct intel_plane *plane;
2738
2739
2740
2741
2742
2743
2744 crtc = intel_get_first_crtc(dev_priv);
2745 if (!crtc)
2746 return 0;
2747
2748 plane = to_intel_plane(crtc->base.primary);
2749
2750 return plane->max_stride(plane, pixel_format, modifier,
2751 DRM_MODE_ROTATE_0);
2752}
2753
2754static
2755u32 intel_fb_max_stride(struct drm_i915_private *dev_priv,
2756 u32 pixel_format, u64 modifier)
2757{
2758
2759
2760
2761
2762
2763
2764 if (!is_ccs_modifier(modifier)) {
2765 if (INTEL_GEN(dev_priv) >= 7)
2766 return 256*1024;
2767 else if (INTEL_GEN(dev_priv) >= 4)
2768 return 128*1024;
2769 }
2770
2771 return intel_plane_fb_max_stride(dev_priv, pixel_format, modifier);
2772}
2773
2774static u32
2775intel_fb_stride_alignment(const struct drm_framebuffer *fb, int color_plane)
2776{
2777 struct drm_i915_private *dev_priv = to_i915(fb->dev);
2778 u32 tile_width;
2779
2780 if (is_surface_linear(fb, color_plane)) {
2781 u32 max_stride = intel_plane_fb_max_stride(dev_priv,
2782 fb->format->format,
2783 fb->modifier);
2784
2785
2786
2787
2788
2789 if (fb->pitches[color_plane] > max_stride &&
2790 !is_ccs_modifier(fb->modifier))
2791 return intel_tile_size(dev_priv);
2792 else
2793 return 64;
2794 }
2795
2796 tile_width = intel_tile_width_bytes(fb, color_plane);
2797 if (is_ccs_modifier(fb->modifier)) {
2798
2799
2800
2801
2802
2803
2804
2805
2806
2807 if (IS_GEN(dev_priv, 9) && color_plane == 0 && fb->width > 3840)
2808 tile_width *= 4;
2809
2810
2811
2812
2813 else if (INTEL_GEN(dev_priv) >= 12)
2814 tile_width *= 4;
2815 }
2816 return tile_width;
2817}
2818
2819bool intel_plane_can_remap(const struct intel_plane_state *plane_state)
2820{
2821 struct intel_plane *plane = to_intel_plane(plane_state->uapi.plane);
2822 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
2823 const struct drm_framebuffer *fb = plane_state->hw.fb;
2824 int i;
2825
2826
2827 if (plane->id == PLANE_CURSOR)
2828 return false;
2829
2830
2831
2832
2833
2834
2835
2836 if (INTEL_GEN(dev_priv) < 4)
2837 return false;
2838
2839
2840
2841
2842
2843 if (is_ccs_modifier(fb->modifier))
2844 return false;
2845
2846
2847 if (fb->modifier == DRM_FORMAT_MOD_LINEAR) {
2848 unsigned int alignment = intel_tile_size(dev_priv) - 1;
2849
2850 for (i = 0; i < fb->format->num_planes; i++) {
2851 if (fb->pitches[i] & alignment)
2852 return false;
2853 }
2854 }
2855
2856 return true;
2857}
2858
2859static bool intel_plane_needs_remap(const struct intel_plane_state *plane_state)
2860{
2861 struct intel_plane *plane = to_intel_plane(plane_state->uapi.plane);
2862 const struct drm_framebuffer *fb = plane_state->hw.fb;
2863 unsigned int rotation = plane_state->hw.rotation;
2864 u32 stride, max_stride;
2865
2866
2867
2868
2869
2870 if (!plane_state->uapi.visible)
2871 return false;
2872
2873 if (!intel_plane_can_remap(plane_state))
2874 return false;
2875
2876
2877
2878
2879
2880 stride = intel_fb_pitch(fb, 0, rotation);
2881 max_stride = plane->max_stride(plane, fb->format->format,
2882 fb->modifier, rotation);
2883
2884 return stride > max_stride;
2885}
2886
2887static void
2888intel_fb_plane_get_subsampling(int *hsub, int *vsub,
2889 const struct drm_framebuffer *fb,
2890 int color_plane)
2891{
2892 int main_plane;
2893
2894 if (color_plane == 0) {
2895 *hsub = 1;
2896 *vsub = 1;
2897
2898 return;
2899 }
2900
2901
2902
2903
2904
2905 if (!is_gen12_ccs_plane(fb, color_plane)) {
2906 *hsub = fb->format->hsub;
2907 *vsub = fb->format->vsub;
2908
2909 return;
2910 }
2911
2912 main_plane = ccs_to_main_plane(fb, color_plane);
2913 *hsub = drm_format_info_block_width(fb->format, color_plane) /
2914 drm_format_info_block_width(fb->format, main_plane);
2915
2916
2917
2918
2919
2920
2921
2922
2923
2924 if (main_plane == 0)
2925 *hsub *= fb->format->hsub;
2926
2927 *vsub = 32;
2928}
2929static int
2930intel_fb_check_ccs_xy(struct drm_framebuffer *fb, int ccs_plane, int x, int y)
2931{
2932 struct drm_i915_private *i915 = to_i915(fb->dev);
2933 struct intel_framebuffer *intel_fb = to_intel_framebuffer(fb);
2934 int main_plane;
2935 int hsub, vsub;
2936 int tile_width, tile_height;
2937 int ccs_x, ccs_y;
2938 int main_x, main_y;
2939
2940 if (!is_ccs_plane(fb, ccs_plane))
2941 return 0;
2942
2943 intel_tile_dims(fb, ccs_plane, &tile_width, &tile_height);
2944 intel_fb_plane_get_subsampling(&hsub, &vsub, fb, ccs_plane);
2945
2946 tile_width *= hsub;
2947 tile_height *= vsub;
2948
2949 ccs_x = (x * hsub) % tile_width;
2950 ccs_y = (y * vsub) % tile_height;
2951
2952 main_plane = ccs_to_main_plane(fb, ccs_plane);
2953 main_x = intel_fb->normal[main_plane].x % tile_width;
2954 main_y = intel_fb->normal[main_plane].y % tile_height;
2955
2956
2957
2958
2959
2960 if (main_x != ccs_x || main_y != ccs_y) {
2961 drm_dbg_kms(&i915->drm,
2962 "Bad CCS x/y (main %d,%d ccs %d,%d) full (main %d,%d ccs %d,%d)\n",
2963 main_x, main_y,
2964 ccs_x, ccs_y,
2965 intel_fb->normal[main_plane].x,
2966 intel_fb->normal[main_plane].y,
2967 x, y);
2968 return -EINVAL;
2969 }
2970
2971 return 0;
2972}
2973
2974static void
2975intel_fb_plane_dims(int *w, int *h, struct drm_framebuffer *fb, int color_plane)
2976{
2977 int main_plane = is_ccs_plane(fb, color_plane) ?
2978 ccs_to_main_plane(fb, color_plane) : 0;
2979 int main_hsub, main_vsub;
2980 int hsub, vsub;
2981
2982 intel_fb_plane_get_subsampling(&main_hsub, &main_vsub, fb, main_plane);
2983 intel_fb_plane_get_subsampling(&hsub, &vsub, fb, color_plane);
2984 *w = fb->width / main_hsub / hsub;
2985 *h = fb->height / main_vsub / vsub;
2986}
2987
2988
2989
2990
2991
2992static u32
2993setup_fb_rotation(int plane, const struct intel_remapped_plane_info *plane_info,
2994 u32 gtt_offset_rotated, int x, int y,
2995 unsigned int width, unsigned int height,
2996 unsigned int tile_size,
2997 unsigned int tile_width, unsigned int tile_height,
2998 struct drm_framebuffer *fb)
2999{
3000 struct intel_framebuffer *intel_fb = to_intel_framebuffer(fb);
3001 struct intel_rotation_info *rot_info = &intel_fb->rot_info;
3002 unsigned int pitch_tiles;
3003 struct drm_rect r;
3004
3005
3006 if (fb->modifier != I915_FORMAT_MOD_Y_TILED &&
3007 fb->modifier != I915_FORMAT_MOD_Yf_TILED)
3008 return 0;
3009
3010 if (drm_WARN_ON(fb->dev, plane >= ARRAY_SIZE(rot_info->plane)))
3011 return 0;
3012
3013 rot_info->plane[plane] = *plane_info;
3014
3015 intel_fb->rotated[plane].pitch = plane_info->height * tile_height;
3016
3017
3018 drm_rect_init(&r, x, y, width, height);
3019 drm_rect_rotate(&r,
3020 plane_info->width * tile_width,
3021 plane_info->height * tile_height,
3022 DRM_MODE_ROTATE_270);
3023 x = r.x1;
3024 y = r.y1;
3025
3026
3027 pitch_tiles = intel_fb->rotated[plane].pitch / tile_height;
3028 swap(tile_width, tile_height);
3029
3030
3031
3032
3033
3034 intel_adjust_tile_offset(&x, &y,
3035 tile_width, tile_height,
3036 tile_size, pitch_tiles,
3037 gtt_offset_rotated * tile_size, 0);
3038
3039
3040
3041
3042
3043 intel_fb->rotated[plane].x = x;
3044 intel_fb->rotated[plane].y = y;
3045
3046 return plane_info->width * plane_info->height;
3047}
3048
3049static int
3050intel_fill_fb_info(struct drm_i915_private *dev_priv,
3051 struct drm_framebuffer *fb)
3052{
3053 struct intel_framebuffer *intel_fb = to_intel_framebuffer(fb);
3054 struct drm_i915_gem_object *obj = intel_fb_obj(fb);
3055 u32 gtt_offset_rotated = 0;
3056 unsigned int max_size = 0;
3057 int i, num_planes = fb->format->num_planes;
3058 unsigned int tile_size = intel_tile_size(dev_priv);
3059
3060 for (i = 0; i < num_planes; i++) {
3061 unsigned int width, height;
3062 unsigned int cpp, size;
3063 u32 offset;
3064 int x, y;
3065 int ret;
3066
3067 cpp = fb->format->cpp[i];
3068 intel_fb_plane_dims(&width, &height, fb, i);
3069
3070 ret = intel_fb_offset_to_xy(&x, &y, fb, i);
3071 if (ret) {
3072 drm_dbg_kms(&dev_priv->drm,
3073 "bad fb plane %d offset: 0x%x\n",
3074 i, fb->offsets[i]);
3075 return ret;
3076 }
3077
3078 ret = intel_fb_check_ccs_xy(fb, i, x, y);
3079 if (ret)
3080 return ret;
3081
3082
3083
3084
3085
3086
3087
3088
3089
3090
3091 if (i == 0 && i915_gem_object_is_tiled(obj) &&
3092 (x + width) * cpp > fb->pitches[i]) {
3093 drm_dbg_kms(&dev_priv->drm,
3094 "bad fb plane %d offset: 0x%x\n",
3095 i, fb->offsets[i]);
3096 return -EINVAL;
3097 }
3098
3099
3100
3101
3102
3103 intel_fb->normal[i].x = x;
3104 intel_fb->normal[i].y = y;
3105
3106 offset = intel_compute_aligned_offset(dev_priv, &x, &y, fb, i,
3107 fb->pitches[i],
3108 DRM_MODE_ROTATE_0,
3109 tile_size);
3110 offset /= tile_size;
3111
3112 if (!is_surface_linear(fb, i)) {
3113 struct intel_remapped_plane_info plane_info;
3114 unsigned int tile_width, tile_height;
3115
3116 intel_tile_dims(fb, i, &tile_width, &tile_height);
3117
3118 plane_info.offset = offset;
3119 plane_info.stride = DIV_ROUND_UP(fb->pitches[i],
3120 tile_width * cpp);
3121 plane_info.width = DIV_ROUND_UP(x + width, tile_width);
3122 plane_info.height = DIV_ROUND_UP(y + height,
3123 tile_height);
3124
3125
3126 size = plane_info.stride * plane_info.height;
3127
3128
3129
3130
3131 if (x != 0)
3132 size++;
3133
3134 gtt_offset_rotated +=
3135 setup_fb_rotation(i, &plane_info,
3136 gtt_offset_rotated,
3137 x, y, width, height,
3138 tile_size,
3139 tile_width, tile_height,
3140 fb);
3141 } else {
3142 size = DIV_ROUND_UP((y + height) * fb->pitches[i] +
3143 x * cpp, tile_size);
3144 }
3145
3146
3147 max_size = max(max_size, offset + size);
3148 }
3149
3150 if (mul_u32_u32(max_size, tile_size) > obj->base.size) {
3151 drm_dbg_kms(&dev_priv->drm,
3152 "fb too big for bo (need %llu bytes, have %zu bytes)\n",
3153 mul_u32_u32(max_size, tile_size), obj->base.size);
3154 return -EINVAL;
3155 }
3156
3157 return 0;
3158}
3159
3160static void
3161intel_plane_remap_gtt(struct intel_plane_state *plane_state)
3162{
3163 struct drm_i915_private *dev_priv =
3164 to_i915(plane_state->uapi.plane->dev);
3165 struct drm_framebuffer *fb = plane_state->hw.fb;
3166 struct intel_framebuffer *intel_fb = to_intel_framebuffer(fb);
3167 struct intel_rotation_info *info = &plane_state->view.rotated;
3168 unsigned int rotation = plane_state->hw.rotation;
3169 int i, num_planes = fb->format->num_planes;
3170 unsigned int tile_size = intel_tile_size(dev_priv);
3171 unsigned int src_x, src_y;
3172 unsigned int src_w, src_h;
3173 u32 gtt_offset = 0;
3174
3175 memset(&plane_state->view, 0, sizeof(plane_state->view));
3176 plane_state->view.type = drm_rotation_90_or_270(rotation) ?
3177 I915_GGTT_VIEW_ROTATED : I915_GGTT_VIEW_REMAPPED;
3178
3179 src_x = plane_state->uapi.src.x1 >> 16;
3180 src_y = plane_state->uapi.src.y1 >> 16;
3181 src_w = drm_rect_width(&plane_state->uapi.src) >> 16;
3182 src_h = drm_rect_height(&plane_state->uapi.src) >> 16;
3183
3184 drm_WARN_ON(&dev_priv->drm, is_ccs_modifier(fb->modifier));
3185
3186
3187 drm_rect_translate(&plane_state->uapi.src,
3188 -(src_x << 16), -(src_y << 16));
3189
3190
3191 if (drm_rotation_90_or_270(rotation))
3192 drm_rect_rotate(&plane_state->uapi.src,
3193 src_w << 16, src_h << 16,
3194 DRM_MODE_ROTATE_270);
3195
3196 for (i = 0; i < num_planes; i++) {
3197 unsigned int hsub = i ? fb->format->hsub : 1;
3198 unsigned int vsub = i ? fb->format->vsub : 1;
3199 unsigned int cpp = fb->format->cpp[i];
3200 unsigned int tile_width, tile_height;
3201 unsigned int width, height;
3202 unsigned int pitch_tiles;
3203 unsigned int x, y;
3204 u32 offset;
3205
3206 intel_tile_dims(fb, i, &tile_width, &tile_height);
3207
3208 x = src_x / hsub;
3209 y = src_y / vsub;
3210 width = src_w / hsub;
3211 height = src_h / vsub;
3212
3213
3214
3215
3216
3217 x += intel_fb->normal[i].x;
3218 y += intel_fb->normal[i].y;
3219
3220 offset = intel_compute_aligned_offset(dev_priv, &x, &y,
3221 fb, i, fb->pitches[i],
3222 DRM_MODE_ROTATE_0, tile_size);
3223 offset /= tile_size;
3224
3225 drm_WARN_ON(&dev_priv->drm, i >= ARRAY_SIZE(info->plane));
3226 info->plane[i].offset = offset;
3227 info->plane[i].stride = DIV_ROUND_UP(fb->pitches[i],
3228 tile_width * cpp);
3229 info->plane[i].width = DIV_ROUND_UP(x + width, tile_width);
3230 info->plane[i].height = DIV_ROUND_UP(y + height, tile_height);
3231
3232 if (drm_rotation_90_or_270(rotation)) {
3233 struct drm_rect r;
3234
3235
3236 drm_rect_init(&r, x, y, width, height);
3237 drm_rect_rotate(&r,
3238 info->plane[i].width * tile_width,
3239 info->plane[i].height * tile_height,
3240 DRM_MODE_ROTATE_270);
3241 x = r.x1;
3242 y = r.y1;
3243
3244 pitch_tiles = info->plane[i].height;
3245 plane_state->color_plane[i].stride = pitch_tiles * tile_height;
3246
3247
3248 swap(tile_width, tile_height);
3249 } else {
3250 pitch_tiles = info->plane[i].width;
3251 plane_state->color_plane[i].stride = pitch_tiles * tile_width * cpp;
3252 }
3253
3254
3255
3256
3257
3258 intel_adjust_tile_offset(&x, &y,
3259 tile_width, tile_height,
3260 tile_size, pitch_tiles,
3261 gtt_offset * tile_size, 0);
3262
3263 gtt_offset += info->plane[i].width * info->plane[i].height;
3264
3265 plane_state->color_plane[i].offset = 0;
3266 plane_state->color_plane[i].x = x;
3267 plane_state->color_plane[i].y = y;
3268 }
3269}
3270
3271static int
3272intel_plane_compute_gtt(struct intel_plane_state *plane_state)
3273{
3274 const struct intel_framebuffer *fb =
3275 to_intel_framebuffer(plane_state->hw.fb);
3276 unsigned int rotation = plane_state->hw.rotation;
3277 int i, num_planes;
3278
3279 if (!fb)
3280 return 0;
3281
3282 num_planes = fb->base.format->num_planes;
3283
3284 if (intel_plane_needs_remap(plane_state)) {
3285 intel_plane_remap_gtt(plane_state);
3286
3287
3288
3289
3290
3291
3292
3293 return intel_plane_check_stride(plane_state);
3294 }
3295
3296 intel_fill_fb_ggtt_view(&plane_state->view, &fb->base, rotation);
3297
3298 for (i = 0; i < num_planes; i++) {
3299 plane_state->color_plane[i].stride = intel_fb_pitch(&fb->base, i, rotation);
3300 plane_state->color_plane[i].offset = 0;
3301
3302 if (drm_rotation_90_or_270(rotation)) {
3303 plane_state->color_plane[i].x = fb->rotated[i].x;
3304 plane_state->color_plane[i].y = fb->rotated[i].y;
3305 } else {
3306 plane_state->color_plane[i].x = fb->normal[i].x;
3307 plane_state->color_plane[i].y = fb->normal[i].y;
3308 }
3309 }
3310
3311
3312 if (drm_rotation_90_or_270(rotation))
3313 drm_rect_rotate(&plane_state->uapi.src,
3314 fb->base.width << 16, fb->base.height << 16,
3315 DRM_MODE_ROTATE_270);
3316
3317 return intel_plane_check_stride(plane_state);
3318}
3319
3320static int i9xx_format_to_fourcc(int format)
3321{
3322 switch (format) {
3323 case DISPPLANE_8BPP:
3324 return DRM_FORMAT_C8;
3325 case DISPPLANE_BGRA555:
3326 return DRM_FORMAT_ARGB1555;
3327 case DISPPLANE_BGRX555:
3328 return DRM_FORMAT_XRGB1555;
3329 case DISPPLANE_BGRX565:
3330 return DRM_FORMAT_RGB565;
3331 default:
3332 case DISPPLANE_BGRX888:
3333 return DRM_FORMAT_XRGB8888;
3334 case DISPPLANE_RGBX888:
3335 return DRM_FORMAT_XBGR8888;
3336 case DISPPLANE_BGRA888:
3337 return DRM_FORMAT_ARGB8888;
3338 case DISPPLANE_RGBA888:
3339 return DRM_FORMAT_ABGR8888;
3340 case DISPPLANE_BGRX101010:
3341 return DRM_FORMAT_XRGB2101010;
3342 case DISPPLANE_RGBX101010:
3343 return DRM_FORMAT_XBGR2101010;
3344 case DISPPLANE_BGRA101010:
3345 return DRM_FORMAT_ARGB2101010;
3346 case DISPPLANE_RGBA101010:
3347 return DRM_FORMAT_ABGR2101010;
3348 case DISPPLANE_RGBX161616:
3349 return DRM_FORMAT_XBGR16161616F;
3350 }
3351}
3352
3353int skl_format_to_fourcc(int format, bool rgb_order, bool alpha)
3354{
3355 switch (format) {
3356 case PLANE_CTL_FORMAT_RGB_565:
3357 return DRM_FORMAT_RGB565;
3358 case PLANE_CTL_FORMAT_NV12:
3359 return DRM_FORMAT_NV12;
3360 case PLANE_CTL_FORMAT_XYUV:
3361 return DRM_FORMAT_XYUV8888;
3362 case PLANE_CTL_FORMAT_P010:
3363 return DRM_FORMAT_P010;
3364 case PLANE_CTL_FORMAT_P012:
3365 return DRM_FORMAT_P012;
3366 case PLANE_CTL_FORMAT_P016:
3367 return DRM_FORMAT_P016;
3368 case PLANE_CTL_FORMAT_Y210:
3369 return DRM_FORMAT_Y210;
3370 case PLANE_CTL_FORMAT_Y212:
3371 return DRM_FORMAT_Y212;
3372 case PLANE_CTL_FORMAT_Y216:
3373 return DRM_FORMAT_Y216;
3374 case PLANE_CTL_FORMAT_Y410:
3375 return DRM_FORMAT_XVYU2101010;
3376 case PLANE_CTL_FORMAT_Y412:
3377 return DRM_FORMAT_XVYU12_16161616;
3378 case PLANE_CTL_FORMAT_Y416:
3379 return DRM_FORMAT_XVYU16161616;
3380 default:
3381 case PLANE_CTL_FORMAT_XRGB_8888:
3382 if (rgb_order) {
3383 if (alpha)
3384 return DRM_FORMAT_ABGR8888;
3385 else
3386 return DRM_FORMAT_XBGR8888;
3387 } else {
3388 if (alpha)
3389 return DRM_FORMAT_ARGB8888;
3390 else
3391 return DRM_FORMAT_XRGB8888;
3392 }
3393 case PLANE_CTL_FORMAT_XRGB_2101010:
3394 if (rgb_order) {
3395 if (alpha)
3396 return DRM_FORMAT_ABGR2101010;
3397 else
3398 return DRM_FORMAT_XBGR2101010;
3399 } else {
3400 if (alpha)
3401 return DRM_FORMAT_ARGB2101010;
3402 else
3403 return DRM_FORMAT_XRGB2101010;
3404 }
3405 case PLANE_CTL_FORMAT_XRGB_16161616F:
3406 if (rgb_order) {
3407 if (alpha)
3408 return DRM_FORMAT_ABGR16161616F;
3409 else
3410 return DRM_FORMAT_XBGR16161616F;
3411 } else {
3412 if (alpha)
3413 return DRM_FORMAT_ARGB16161616F;
3414 else
3415 return DRM_FORMAT_XRGB16161616F;
3416 }
3417 }
3418}
3419
3420static struct i915_vma *
3421initial_plane_vma(struct drm_i915_private *i915,
3422 struct intel_initial_plane_config *plane_config)
3423{
3424 struct drm_i915_gem_object *obj;
3425 struct i915_vma *vma;
3426 u32 base, size;
3427
3428 if (plane_config->size == 0)
3429 return NULL;
3430
3431 base = round_down(plane_config->base,
3432 I915_GTT_MIN_ALIGNMENT);
3433 size = round_up(plane_config->base + plane_config->size,
3434 I915_GTT_MIN_ALIGNMENT);
3435 size -= base;
3436
3437
3438
3439
3440
3441
3442 if (size * 2 > i915->stolen_usable_size)
3443 return NULL;
3444
3445 obj = i915_gem_object_create_stolen_for_preallocated(i915, base, size);
3446 if (IS_ERR(obj))
3447 return NULL;
3448
3449
3450
3451
3452
3453
3454 i915_gem_object_set_cache_coherency(obj, HAS_WT(i915) ?
3455 I915_CACHE_WT : I915_CACHE_NONE);
3456
3457 switch (plane_config->tiling) {
3458 case I915_TILING_NONE:
3459 break;
3460 case I915_TILING_X:
3461 case I915_TILING_Y:
3462 obj->tiling_and_stride =
3463 plane_config->fb->base.pitches[0] |
3464 plane_config->tiling;
3465 break;
3466 default:
3467 MISSING_CASE(plane_config->tiling);
3468 goto err_obj;
3469 }
3470
3471 vma = i915_vma_instance(obj, &i915->ggtt.vm, NULL);
3472 if (IS_ERR(vma))
3473 goto err_obj;
3474
3475 if (i915_ggtt_pin(vma, NULL, 0, PIN_MAPPABLE | PIN_OFFSET_FIXED | base))
3476 goto err_obj;
3477
3478 if (i915_gem_object_is_tiled(obj) &&
3479 !i915_vma_is_map_and_fenceable(vma))
3480 goto err_obj;
3481
3482 return vma;
3483
3484err_obj:
3485 i915_gem_object_put(obj);
3486 return NULL;
3487}
3488
3489static bool
3490intel_alloc_initial_plane_obj(struct intel_crtc *crtc,
3491 struct intel_initial_plane_config *plane_config)
3492{
3493 struct drm_device *dev = crtc->base.dev;
3494 struct drm_i915_private *dev_priv = to_i915(dev);
3495 struct drm_mode_fb_cmd2 mode_cmd = { 0 };
3496 struct drm_framebuffer *fb = &plane_config->fb->base;
3497 struct i915_vma *vma;
3498
3499 switch (fb->modifier) {
3500 case DRM_FORMAT_MOD_LINEAR:
3501 case I915_FORMAT_MOD_X_TILED:
3502 case I915_FORMAT_MOD_Y_TILED:
3503 break;
3504 default:
3505 drm_dbg(&dev_priv->drm,
3506 "Unsupported modifier for initial FB: 0x%llx\n",
3507 fb->modifier);
3508 return false;
3509 }
3510
3511 vma = initial_plane_vma(dev_priv, plane_config);
3512 if (!vma)
3513 return false;
3514
3515 mode_cmd.pixel_format = fb->format->format;
3516 mode_cmd.width = fb->width;
3517 mode_cmd.height = fb->height;
3518 mode_cmd.pitches[0] = fb->pitches[0];
3519 mode_cmd.modifier[0] = fb->modifier;
3520 mode_cmd.flags = DRM_MODE_FB_MODIFIERS;
3521
3522 if (intel_framebuffer_init(to_intel_framebuffer(fb),
3523 vma->obj, &mode_cmd)) {
3524 drm_dbg_kms(&dev_priv->drm, "intel fb init failed\n");
3525 goto err_vma;
3526 }
3527
3528 plane_config->vma = vma;
3529 return true;
3530
3531err_vma:
3532 i915_vma_put(vma);
3533 return false;
3534}
3535
3536static void
3537intel_set_plane_visible(struct intel_crtc_state *crtc_state,
3538 struct intel_plane_state *plane_state,
3539 bool visible)
3540{
3541 struct intel_plane *plane = to_intel_plane(plane_state->uapi.plane);
3542
3543 plane_state->uapi.visible = visible;
3544
3545 if (visible)
3546 crtc_state->uapi.plane_mask |= drm_plane_mask(&plane->base);
3547 else
3548 crtc_state->uapi.plane_mask &= ~drm_plane_mask(&plane->base);
3549}
3550
3551static void fixup_active_planes(struct intel_crtc_state *crtc_state)
3552{
3553 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
3554 struct drm_plane *plane;
3555
3556
3557
3558
3559
3560
3561 crtc_state->active_planes = 0;
3562
3563 drm_for_each_plane_mask(plane, &dev_priv->drm,
3564 crtc_state->uapi.plane_mask)
3565 crtc_state->active_planes |= BIT(to_intel_plane(plane)->id);
3566}
3567
3568static void intel_plane_disable_noatomic(struct intel_crtc *crtc,
3569 struct intel_plane *plane)
3570{
3571 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
3572 struct intel_crtc_state *crtc_state =
3573 to_intel_crtc_state(crtc->base.state);
3574 struct intel_plane_state *plane_state =
3575 to_intel_plane_state(plane->base.state);
3576
3577 drm_dbg_kms(&dev_priv->drm,
3578 "Disabling [PLANE:%d:%s] on [CRTC:%d:%s]\n",
3579 plane->base.base.id, plane->base.name,
3580 crtc->base.base.id, crtc->base.name);
3581
3582 intel_set_plane_visible(crtc_state, plane_state, false);
3583 fixup_active_planes(crtc_state);
3584 crtc_state->data_rate[plane->id] = 0;
3585 crtc_state->min_cdclk[plane->id] = 0;
3586
3587 if (plane->id == PLANE_PRIMARY)
3588 hsw_disable_ips(crtc_state);
3589
3590
3591
3592
3593
3594
3595
3596
3597
3598
3599 if (HAS_GMCH(dev_priv) &&
3600 intel_set_memory_cxsr(dev_priv, false))
3601 intel_wait_for_vblank(dev_priv, crtc->pipe);
3602
3603
3604
3605
3606
3607 if (IS_GEN(dev_priv, 2) && !crtc_state->active_planes)
3608 intel_set_cpu_fifo_underrun_reporting(dev_priv, crtc->pipe, false);
3609
3610 intel_disable_plane(plane, crtc_state);
3611}
3612
3613static struct intel_frontbuffer *
3614to_intel_frontbuffer(struct drm_framebuffer *fb)
3615{
3616 return fb ? to_intel_framebuffer(fb)->frontbuffer : NULL;
3617}
3618
3619static void
3620intel_find_initial_plane_obj(struct intel_crtc *intel_crtc,
3621 struct intel_initial_plane_config *plane_config)
3622{
3623 struct drm_device *dev = intel_crtc->base.dev;
3624 struct drm_i915_private *dev_priv = to_i915(dev);
3625 struct drm_crtc *c;
3626 struct drm_plane *primary = intel_crtc->base.primary;
3627 struct drm_plane_state *plane_state = primary->state;
3628 struct intel_plane *intel_plane = to_intel_plane(primary);
3629 struct intel_plane_state *intel_state =
3630 to_intel_plane_state(plane_state);
3631 struct intel_crtc_state *crtc_state =
3632 to_intel_crtc_state(intel_crtc->base.state);
3633 struct drm_framebuffer *fb;
3634 struct i915_vma *vma;
3635
3636 if (!plane_config->fb)
3637 return;
3638
3639 if (intel_alloc_initial_plane_obj(intel_crtc, plane_config)) {
3640 fb = &plane_config->fb->base;
3641 vma = plane_config->vma;
3642 goto valid_fb;
3643 }
3644
3645
3646
3647
3648
3649 for_each_crtc(dev, c) {
3650 struct intel_plane_state *state;
3651
3652 if (c == &intel_crtc->base)
3653 continue;
3654
3655 if (!to_intel_crtc_state(c->state)->uapi.active)
3656 continue;
3657
3658 state = to_intel_plane_state(c->primary->state);
3659 if (!state->vma)
3660 continue;
3661
3662 if (intel_plane_ggtt_offset(state) == plane_config->base) {
3663 fb = state->hw.fb;
3664 vma = state->vma;
3665 goto valid_fb;
3666 }
3667 }
3668
3669
3670
3671
3672
3673
3674
3675
3676 intel_plane_disable_noatomic(intel_crtc, intel_plane);
3677 if (crtc_state->bigjoiner) {
3678 struct intel_crtc *slave =
3679 crtc_state->bigjoiner_linked_crtc;
3680 intel_plane_disable_noatomic(slave, to_intel_plane(slave->base.primary));
3681 }
3682
3683 return;
3684
3685valid_fb:
3686 intel_state->hw.rotation = plane_config->rotation;
3687 intel_fill_fb_ggtt_view(&intel_state->view, fb,
3688 intel_state->hw.rotation);
3689 intel_state->color_plane[0].stride =
3690 intel_fb_pitch(fb, 0, intel_state->hw.rotation);
3691
3692 __i915_vma_pin(vma);
3693 intel_state->vma = i915_vma_get(vma);
3694 if (intel_plane_uses_fence(intel_state) && i915_vma_pin_fence(vma) == 0)
3695 if (vma->fence)
3696 intel_state->flags |= PLANE_HAS_FENCE;
3697
3698 plane_state->src_x = 0;
3699 plane_state->src_y = 0;
3700 plane_state->src_w = fb->width << 16;
3701 plane_state->src_h = fb->height << 16;
3702
3703 plane_state->crtc_x = 0;
3704 plane_state->crtc_y = 0;
3705 plane_state->crtc_w = fb->width;
3706 plane_state->crtc_h = fb->height;
3707
3708 intel_state->uapi.src = drm_plane_state_src(plane_state);
3709 intel_state->uapi.dst = drm_plane_state_dest(plane_state);
3710
3711 if (plane_config->tiling)
3712 dev_priv->preserve_bios_swizzle = true;
3713
3714 plane_state->fb = fb;
3715 drm_framebuffer_get(fb);
3716
3717 plane_state->crtc = &intel_crtc->base;
3718 intel_plane_copy_uapi_to_hw_state(intel_state, intel_state,
3719 intel_crtc);
3720
3721 intel_frontbuffer_flush(to_intel_frontbuffer(fb), ORIGIN_DIRTYFB);
3722
3723 atomic_or(to_intel_plane(primary)->frontbuffer_bit,
3724 &to_intel_frontbuffer(fb)->bits);
3725}
3726
3727
3728static bool
3729skl_check_main_ccs_coordinates(struct intel_plane_state *plane_state,
3730 int main_x, int main_y, u32 main_offset,
3731 int ccs_plane)
3732{
3733 const struct drm_framebuffer *fb = plane_state->hw.fb;
3734 int aux_x = plane_state->color_plane[ccs_plane].x;
3735 int aux_y = plane_state->color_plane[ccs_plane].y;
3736 u32 aux_offset = plane_state->color_plane[ccs_plane].offset;
3737 u32 alignment = intel_surf_alignment(fb, ccs_plane);
3738 int hsub;
3739 int vsub;
3740
3741 intel_fb_plane_get_subsampling(&hsub, &vsub, fb, ccs_plane);
3742 while (aux_offset >= main_offset && aux_y <= main_y) {
3743 int x, y;
3744
3745 if (aux_x == main_x && aux_y == main_y)
3746 break;
3747
3748 if (aux_offset == 0)
3749 break;
3750
3751 x = aux_x / hsub;
3752 y = aux_y / vsub;
3753 aux_offset = intel_plane_adjust_aligned_offset(&x, &y,
3754 plane_state,
3755 ccs_plane,
3756 aux_offset,
3757 aux_offset -
3758 alignment);
3759 aux_x = x * hsub + aux_x % hsub;
3760 aux_y = y * vsub + aux_y % vsub;
3761 }
3762
3763 if (aux_x != main_x || aux_y != main_y)
3764 return false;
3765
3766 plane_state->color_plane[ccs_plane].offset = aux_offset;
3767 plane_state->color_plane[ccs_plane].x = aux_x;
3768 plane_state->color_plane[ccs_plane].y = aux_y;
3769
3770 return true;
3771}
3772
3773unsigned int
3774intel_plane_fence_y_offset(const struct intel_plane_state *plane_state)
3775{
3776 int x = 0, y = 0;
3777
3778 intel_plane_adjust_aligned_offset(&x, &y, plane_state, 0,
3779 plane_state->color_plane[0].offset, 0);
3780
3781 return y;
3782}
3783
3784static int intel_plane_min_width(struct intel_plane *plane,
3785 const struct drm_framebuffer *fb,
3786 int color_plane,
3787 unsigned int rotation)
3788{
3789 if (plane->min_width)
3790 return plane->min_width(fb, color_plane, rotation);
3791 else
3792 return 1;
3793}
3794
3795static int intel_plane_max_width(struct intel_plane *plane,
3796 const struct drm_framebuffer *fb,
3797 int color_plane,
3798 unsigned int rotation)
3799{
3800 if (plane->max_width)
3801 return plane->max_width(fb, color_plane, rotation);
3802 else
3803 return INT_MAX;
3804}
3805
3806static int intel_plane_max_height(struct intel_plane *plane,
3807 const struct drm_framebuffer *fb,
3808 int color_plane,
3809 unsigned int rotation)
3810{
3811 if (plane->max_height)
3812 return plane->max_height(fb, color_plane, rotation);
3813 else
3814 return INT_MAX;
3815}
3816
3817static int skl_check_main_surface(struct intel_plane_state *plane_state)
3818{
3819 struct intel_plane *plane = to_intel_plane(plane_state->uapi.plane);
3820 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
3821 const struct drm_framebuffer *fb = plane_state->hw.fb;
3822 unsigned int rotation = plane_state->hw.rotation;
3823 int x = plane_state->uapi.src.x1 >> 16;
3824 int y = plane_state->uapi.src.y1 >> 16;
3825 int w = drm_rect_width(&plane_state->uapi.src) >> 16;
3826 int h = drm_rect_height(&plane_state->uapi.src) >> 16;
3827 int min_width = intel_plane_min_width(plane, fb, 0, rotation);
3828 int max_width = intel_plane_max_width(plane, fb, 0, rotation);
3829 int max_height = intel_plane_max_height(plane, fb, 0, rotation);
3830 int aux_plane = intel_main_to_aux_plane(fb, 0);
3831 u32 aux_offset = plane_state->color_plane[aux_plane].offset;
3832 u32 alignment, offset;
3833
3834 if (w > max_width || w < min_width || h > max_height) {
3835 drm_dbg_kms(&dev_priv->drm,
3836 "requested Y/RGB source size %dx%d outside limits (min: %dx1 max: %dx%d)\n",
3837 w, h, min_width, max_width, max_height);
3838 return -EINVAL;
3839 }
3840
3841 intel_add_fb_offsets(&x, &y, plane_state, 0);
3842 offset = intel_plane_compute_aligned_offset(&x, &y, plane_state, 0);
3843 alignment = intel_surf_alignment(fb, 0);
3844 if (drm_WARN_ON(&dev_priv->drm, alignment && !is_power_of_2(alignment)))
3845 return -EINVAL;
3846
3847
3848
3849
3850
3851
3852 if (aux_plane && offset > aux_offset)
3853 offset = intel_plane_adjust_aligned_offset(&x, &y, plane_state, 0,
3854 offset, aux_offset & ~(alignment - 1));
3855
3856
3857
3858
3859
3860
3861
3862 if (fb->modifier == I915_FORMAT_MOD_X_TILED) {
3863 int cpp = fb->format->cpp[0];
3864
3865 while ((x + w) * cpp > plane_state->color_plane[0].stride) {
3866 if (offset == 0) {
3867 drm_dbg_kms(&dev_priv->drm,
3868 "Unable to find suitable display surface offset due to X-tiling\n");
3869 return -EINVAL;
3870 }
3871
3872 offset = intel_plane_adjust_aligned_offset(&x, &y, plane_state, 0,
3873 offset, offset - alignment);
3874 }
3875 }
3876
3877
3878
3879
3880
3881 if (is_ccs_modifier(fb->modifier)) {
3882 while (!skl_check_main_ccs_coordinates(plane_state, x, y,
3883 offset, aux_plane)) {
3884 if (offset == 0)
3885 break;
3886
3887 offset = intel_plane_adjust_aligned_offset(&x, &y, plane_state, 0,
3888 offset, offset - alignment);
3889 }
3890
3891 if (x != plane_state->color_plane[aux_plane].x ||
3892 y != plane_state->color_plane[aux_plane].y) {
3893 drm_dbg_kms(&dev_priv->drm,
3894 "Unable to find suitable display surface offset due to CCS\n");
3895 return -EINVAL;
3896 }
3897 }
3898
3899 plane_state->color_plane[0].offset = offset;
3900 plane_state->color_plane[0].x = x;
3901 plane_state->color_plane[0].y = y;
3902
3903
3904
3905
3906
3907 drm_rect_translate_to(&plane_state->uapi.src,
3908 x << 16, y << 16);
3909
3910 return 0;
3911}
3912
3913static int skl_check_nv12_aux_surface(struct intel_plane_state *plane_state)
3914{
3915 struct intel_plane *plane = to_intel_plane(plane_state->uapi.plane);
3916 struct drm_i915_private *i915 = to_i915(plane->base.dev);
3917 const struct drm_framebuffer *fb = plane_state->hw.fb;
3918 unsigned int rotation = plane_state->hw.rotation;
3919 int uv_plane = 1;
3920 int max_width = intel_plane_max_width(plane, fb, uv_plane, rotation);
3921 int max_height = intel_plane_max_height(plane, fb, uv_plane, rotation);
3922 int x = plane_state->uapi.src.x1 >> 17;
3923 int y = plane_state->uapi.src.y1 >> 17;
3924 int w = drm_rect_width(&plane_state->uapi.src) >> 17;
3925 int h = drm_rect_height(&plane_state->uapi.src) >> 17;
3926 u32 offset;
3927
3928
3929 if (w > max_width || h > max_height) {
3930 drm_dbg_kms(&i915->drm,
3931 "CbCr source size %dx%d too big (limit %dx%d)\n",
3932 w, h, max_width, max_height);
3933 return -EINVAL;
3934 }
3935
3936 intel_add_fb_offsets(&x, &y, plane_state, uv_plane);
3937 offset = intel_plane_compute_aligned_offset(&x, &y,
3938 plane_state, uv_plane);
3939
3940 if (is_ccs_modifier(fb->modifier)) {
3941 int ccs_plane = main_to_ccs_plane(fb, uv_plane);
3942 u32 aux_offset = plane_state->color_plane[ccs_plane].offset;
3943 u32 alignment = intel_surf_alignment(fb, uv_plane);
3944
3945 if (offset > aux_offset)
3946 offset = intel_plane_adjust_aligned_offset(&x, &y,
3947 plane_state,
3948 uv_plane,
3949 offset,
3950 aux_offset & ~(alignment - 1));
3951
3952 while (!skl_check_main_ccs_coordinates(plane_state, x, y,
3953 offset, ccs_plane)) {
3954 if (offset == 0)
3955 break;
3956
3957 offset = intel_plane_adjust_aligned_offset(&x, &y,
3958 plane_state,
3959 uv_plane,
3960 offset, offset - alignment);
3961 }
3962
3963 if (x != plane_state->color_plane[ccs_plane].x ||
3964 y != plane_state->color_plane[ccs_plane].y) {
3965 drm_dbg_kms(&i915->drm,
3966 "Unable to find suitable display surface offset due to CCS\n");
3967 return -EINVAL;
3968 }
3969 }
3970
3971 plane_state->color_plane[uv_plane].offset = offset;
3972 plane_state->color_plane[uv_plane].x = x;
3973 plane_state->color_plane[uv_plane].y = y;
3974
3975 return 0;
3976}
3977
3978static int skl_check_ccs_aux_surface(struct intel_plane_state *plane_state)
3979{
3980 const struct drm_framebuffer *fb = plane_state->hw.fb;
3981 int src_x = plane_state->uapi.src.x1 >> 16;
3982 int src_y = plane_state->uapi.src.y1 >> 16;
3983 u32 offset;
3984 int ccs_plane;
3985
3986 for (ccs_plane = 0; ccs_plane < fb->format->num_planes; ccs_plane++) {
3987 int main_hsub, main_vsub;
3988 int hsub, vsub;
3989 int x, y;
3990
3991 if (!is_ccs_plane(fb, ccs_plane))
3992 continue;
3993
3994 intel_fb_plane_get_subsampling(&main_hsub, &main_vsub, fb,
3995 ccs_to_main_plane(fb, ccs_plane));
3996 intel_fb_plane_get_subsampling(&hsub, &vsub, fb, ccs_plane);
3997
3998 hsub *= main_hsub;
3999 vsub *= main_vsub;
4000 x = src_x / hsub;
4001 y = src_y / vsub;
4002
4003 intel_add_fb_offsets(&x, &y, plane_state, ccs_plane);
4004
4005 offset = intel_plane_compute_aligned_offset(&x, &y,
4006 plane_state,
4007 ccs_plane);
4008
4009 plane_state->color_plane[ccs_plane].offset = offset;
4010 plane_state->color_plane[ccs_plane].x = (x * hsub +
4011 src_x % hsub) /
4012 main_hsub;
4013 plane_state->color_plane[ccs_plane].y = (y * vsub +
4014 src_y % vsub) /
4015 main_vsub;
4016 }
4017
4018 return 0;
4019}
4020
4021int skl_check_plane_surface(struct intel_plane_state *plane_state)
4022{
4023 const struct drm_framebuffer *fb = plane_state->hw.fb;
4024 int ret, i;
4025
4026 ret = intel_plane_compute_gtt(plane_state);
4027 if (ret)
4028 return ret;
4029
4030 if (!plane_state->uapi.visible)
4031 return 0;
4032
4033
4034
4035
4036
4037 if (is_ccs_modifier(fb->modifier)) {
4038 ret = skl_check_ccs_aux_surface(plane_state);
4039 if (ret)
4040 return ret;
4041 }
4042
4043 if (intel_format_info_is_yuv_semiplanar(fb->format,
4044 fb->modifier)) {
4045 ret = skl_check_nv12_aux_surface(plane_state);
4046 if (ret)
4047 return ret;
4048 }
4049
4050 for (i = fb->format->num_planes; i < ARRAY_SIZE(plane_state->color_plane); i++) {
4051 plane_state->color_plane[i].offset = 0;
4052 plane_state->color_plane[i].x = 0;
4053 plane_state->color_plane[i].y = 0;
4054 }
4055
4056 ret = skl_check_main_surface(plane_state);
4057 if (ret)
4058 return ret;
4059
4060 return 0;
4061}
4062
4063static void i9xx_plane_ratio(const struct intel_crtc_state *crtc_state,
4064 const struct intel_plane_state *plane_state,
4065 unsigned int *num, unsigned int *den)
4066{
4067 const struct drm_framebuffer *fb = plane_state->hw.fb;
4068 unsigned int cpp = fb->format->cpp[0];
4069
4070
4071
4072
4073
4074
4075
4076
4077 if (cpp == 8) {
4078 *num = 10;
4079 *den = 8;
4080 } else {
4081 *num = 1;
4082 *den = 1;
4083 }
4084}
4085
4086static int i9xx_plane_min_cdclk(const struct intel_crtc_state *crtc_state,
4087 const struct intel_plane_state *plane_state)
4088{
4089 unsigned int pixel_rate;
4090 unsigned int num, den;
4091
4092
4093
4094
4095
4096
4097
4098
4099 pixel_rate = crtc_state->pixel_rate;
4100
4101 i9xx_plane_ratio(crtc_state, plane_state, &num, &den);
4102
4103
4104 if (crtc_state->double_wide)
4105 den *= 2;
4106
4107 return DIV_ROUND_UP(pixel_rate * num, den);
4108}
4109
4110unsigned int
4111i9xx_plane_max_stride(struct intel_plane *plane,
4112 u32 pixel_format, u64 modifier,
4113 unsigned int rotation)
4114{
4115 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
4116
4117 if (!HAS_GMCH(dev_priv)) {
4118 return 32*1024;
4119 } else if (INTEL_GEN(dev_priv) >= 4) {
4120 if (modifier == I915_FORMAT_MOD_X_TILED)
4121 return 16*1024;
4122 else
4123 return 32*1024;
4124 } else if (INTEL_GEN(dev_priv) >= 3) {
4125 if (modifier == I915_FORMAT_MOD_X_TILED)
4126 return 8*1024;
4127 else
4128 return 16*1024;
4129 } else {
4130 if (plane->i9xx_plane == PLANE_C)
4131 return 4*1024;
4132 else
4133 return 8*1024;
4134 }
4135}
4136
4137static u32 i9xx_plane_ctl_crtc(const struct intel_crtc_state *crtc_state)
4138{
4139 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
4140 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
4141 u32 dspcntr = 0;
4142
4143 if (crtc_state->gamma_enable)
4144 dspcntr |= DISPPLANE_GAMMA_ENABLE;
4145
4146 if (crtc_state->csc_enable)
4147 dspcntr |= DISPPLANE_PIPE_CSC_ENABLE;
4148
4149 if (INTEL_GEN(dev_priv) < 5)
4150 dspcntr |= DISPPLANE_SEL_PIPE(crtc->pipe);
4151
4152 return dspcntr;
4153}
4154
4155static u32 i9xx_plane_ctl(const struct intel_crtc_state *crtc_state,
4156 const struct intel_plane_state *plane_state)
4157{
4158 struct drm_i915_private *dev_priv =
4159 to_i915(plane_state->uapi.plane->dev);
4160 const struct drm_framebuffer *fb = plane_state->hw.fb;
4161 unsigned int rotation = plane_state->hw.rotation;
4162 u32 dspcntr;
4163
4164 dspcntr = DISPLAY_PLANE_ENABLE;
4165
4166 if (IS_G4X(dev_priv) || IS_GEN(dev_priv, 5) ||
4167 IS_GEN(dev_priv, 6) || IS_IVYBRIDGE(dev_priv))
4168 dspcntr |= DISPPLANE_TRICKLE_FEED_DISABLE;
4169
4170 switch (fb->format->format) {
4171 case DRM_FORMAT_C8:
4172 dspcntr |= DISPPLANE_8BPP;
4173 break;
4174 case DRM_FORMAT_XRGB1555:
4175 dspcntr |= DISPPLANE_BGRX555;
4176 break;
4177 case DRM_FORMAT_ARGB1555:
4178 dspcntr |= DISPPLANE_BGRA555;
4179 break;
4180 case DRM_FORMAT_RGB565:
4181 dspcntr |= DISPPLANE_BGRX565;
4182 break;
4183 case DRM_FORMAT_XRGB8888:
4184 dspcntr |= DISPPLANE_BGRX888;
4185 break;
4186 case DRM_FORMAT_XBGR8888:
4187 dspcntr |= DISPPLANE_RGBX888;
4188 break;
4189 case DRM_FORMAT_ARGB8888:
4190 dspcntr |= DISPPLANE_BGRA888;
4191 break;
4192 case DRM_FORMAT_ABGR8888:
4193 dspcntr |= DISPPLANE_RGBA888;
4194 break;
4195 case DRM_FORMAT_XRGB2101010:
4196 dspcntr |= DISPPLANE_BGRX101010;
4197 break;
4198 case DRM_FORMAT_XBGR2101010:
4199 dspcntr |= DISPPLANE_RGBX101010;
4200 break;
4201 case DRM_FORMAT_ARGB2101010:
4202 dspcntr |= DISPPLANE_BGRA101010;
4203 break;
4204 case DRM_FORMAT_ABGR2101010:
4205 dspcntr |= DISPPLANE_RGBA101010;
4206 break;
4207 case DRM_FORMAT_XBGR16161616F:
4208 dspcntr |= DISPPLANE_RGBX161616;
4209 break;
4210 default:
4211 MISSING_CASE(fb->format->format);
4212 return 0;
4213 }
4214
4215 if (INTEL_GEN(dev_priv) >= 4 &&
4216 fb->modifier == I915_FORMAT_MOD_X_TILED)
4217 dspcntr |= DISPPLANE_TILED;
4218
4219 if (rotation & DRM_MODE_ROTATE_180)
4220 dspcntr |= DISPPLANE_ROTATE_180;
4221
4222 if (rotation & DRM_MODE_REFLECT_X)
4223 dspcntr |= DISPPLANE_MIRROR;
4224
4225 return dspcntr;
4226}
4227
4228int i9xx_check_plane_surface(struct intel_plane_state *plane_state)
4229{
4230 struct drm_i915_private *dev_priv =
4231 to_i915(plane_state->uapi.plane->dev);
4232 const struct drm_framebuffer *fb = plane_state->hw.fb;
4233 int src_x, src_y, src_w;
4234 u32 offset;
4235 int ret;
4236
4237 ret = intel_plane_compute_gtt(plane_state);
4238 if (ret)
4239 return ret;
4240
4241 if (!plane_state->uapi.visible)
4242 return 0;
4243
4244 src_w = drm_rect_width(&plane_state->uapi.src) >> 16;
4245 src_x = plane_state->uapi.src.x1 >> 16;
4246 src_y = plane_state->uapi.src.y1 >> 16;
4247
4248
4249 if (HAS_GMCH(dev_priv) && fb->format->cpp[0] == 8 && src_w > 2048)
4250 return -EINVAL;
4251
4252 intel_add_fb_offsets(&src_x, &src_y, plane_state, 0);
4253
4254 if (INTEL_GEN(dev_priv) >= 4)
4255 offset = intel_plane_compute_aligned_offset(&src_x, &src_y,
4256 plane_state, 0);
4257 else
4258 offset = 0;
4259
4260
4261
4262
4263
4264 drm_rect_translate_to(&plane_state->uapi.src,
4265 src_x << 16, src_y << 16);
4266
4267
4268 if (!IS_HASWELL(dev_priv) && !IS_BROADWELL(dev_priv)) {
4269 unsigned int rotation = plane_state->hw.rotation;
4270 int src_w = drm_rect_width(&plane_state->uapi.src) >> 16;
4271 int src_h = drm_rect_height(&plane_state->uapi.src) >> 16;
4272
4273 if (rotation & DRM_MODE_ROTATE_180) {
4274 src_x += src_w - 1;
4275 src_y += src_h - 1;
4276 } else if (rotation & DRM_MODE_REFLECT_X) {
4277 src_x += src_w - 1;
4278 }
4279 }
4280
4281 plane_state->color_plane[0].offset = offset;
4282 plane_state->color_plane[0].x = src_x;
4283 plane_state->color_plane[0].y = src_y;
4284
4285 return 0;
4286}
4287
4288static bool i9xx_plane_has_windowing(struct intel_plane *plane)
4289{
4290 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
4291 enum i9xx_plane_id i9xx_plane = plane->i9xx_plane;
4292
4293 if (IS_CHERRYVIEW(dev_priv))
4294 return i9xx_plane == PLANE_B;
4295 else if (INTEL_GEN(dev_priv) >= 5 || IS_G4X(dev_priv))
4296 return false;
4297 else if (IS_GEN(dev_priv, 4))
4298 return i9xx_plane == PLANE_C;
4299 else
4300 return i9xx_plane == PLANE_B ||
4301 i9xx_plane == PLANE_C;
4302}
4303
4304static int
4305i9xx_plane_check(struct intel_crtc_state *crtc_state,
4306 struct intel_plane_state *plane_state)
4307{
4308 struct intel_plane *plane = to_intel_plane(plane_state->uapi.plane);
4309 int ret;
4310
4311 ret = chv_plane_check_rotation(plane_state);
4312 if (ret)
4313 return ret;
4314
4315 ret = intel_atomic_plane_check_clipping(plane_state, crtc_state,
4316 DRM_PLANE_HELPER_NO_SCALING,
4317 DRM_PLANE_HELPER_NO_SCALING,
4318 i9xx_plane_has_windowing(plane));
4319 if (ret)
4320 return ret;
4321
4322 ret = i9xx_check_plane_surface(plane_state);
4323 if (ret)
4324 return ret;
4325
4326 if (!plane_state->uapi.visible)
4327 return 0;
4328
4329 ret = intel_plane_check_src_coordinates(plane_state);
4330 if (ret)
4331 return ret;
4332
4333 plane_state->ctl = i9xx_plane_ctl(crtc_state, plane_state);
4334
4335 return 0;
4336}
4337
4338static void i9xx_update_plane(struct intel_plane *plane,
4339 const struct intel_crtc_state *crtc_state,
4340 const struct intel_plane_state *plane_state)
4341{
4342 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
4343 enum i9xx_plane_id i9xx_plane = plane->i9xx_plane;
4344 u32 linear_offset;
4345 int x = plane_state->color_plane[0].x;
4346 int y = plane_state->color_plane[0].y;
4347 int crtc_x = plane_state->uapi.dst.x1;
4348 int crtc_y = plane_state->uapi.dst.y1;
4349 int crtc_w = drm_rect_width(&plane_state->uapi.dst);
4350 int crtc_h = drm_rect_height(&plane_state->uapi.dst);
4351 unsigned long irqflags;
4352 u32 dspaddr_offset;
4353 u32 dspcntr;
4354
4355 dspcntr = plane_state->ctl | i9xx_plane_ctl_crtc(crtc_state);
4356
4357 linear_offset = intel_fb_xy_to_linear(x, y, plane_state, 0);
4358
4359 if (INTEL_GEN(dev_priv) >= 4)
4360 dspaddr_offset = plane_state->color_plane[0].offset;
4361 else
4362 dspaddr_offset = linear_offset;
4363
4364 spin_lock_irqsave(&dev_priv->uncore.lock, irqflags);
4365
4366 intel_de_write_fw(dev_priv, DSPSTRIDE(i9xx_plane),
4367 plane_state->color_plane[0].stride);
4368
4369 if (INTEL_GEN(dev_priv) < 4) {
4370
4371
4372
4373
4374
4375 intel_de_write_fw(dev_priv, DSPPOS(i9xx_plane),
4376 (crtc_y << 16) | crtc_x);
4377 intel_de_write_fw(dev_priv, DSPSIZE(i9xx_plane),
4378 ((crtc_h - 1) << 16) | (crtc_w - 1));
4379 } else if (IS_CHERRYVIEW(dev_priv) && i9xx_plane == PLANE_B) {
4380 intel_de_write_fw(dev_priv, PRIMPOS(i9xx_plane),
4381 (crtc_y << 16) | crtc_x);
4382 intel_de_write_fw(dev_priv, PRIMSIZE(i9xx_plane),
4383 ((crtc_h - 1) << 16) | (crtc_w - 1));
4384 intel_de_write_fw(dev_priv, PRIMCNSTALPHA(i9xx_plane), 0);
4385 }
4386
4387 if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv)) {
4388 intel_de_write_fw(dev_priv, DSPOFFSET(i9xx_plane),
4389 (y << 16) | x);
4390 } else if (INTEL_GEN(dev_priv) >= 4) {
4391 intel_de_write_fw(dev_priv, DSPLINOFF(i9xx_plane),
4392 linear_offset);
4393 intel_de_write_fw(dev_priv, DSPTILEOFF(i9xx_plane),
4394 (y << 16) | x);
4395 }
4396
4397
4398
4399
4400
4401
4402 intel_de_write_fw(dev_priv, DSPCNTR(i9xx_plane), dspcntr);
4403 if (INTEL_GEN(dev_priv) >= 4)
4404 intel_de_write_fw(dev_priv, DSPSURF(i9xx_plane),
4405 intel_plane_ggtt_offset(plane_state) + dspaddr_offset);
4406 else
4407 intel_de_write_fw(dev_priv, DSPADDR(i9xx_plane),
4408 intel_plane_ggtt_offset(plane_state) + dspaddr_offset);
4409
4410 spin_unlock_irqrestore(&dev_priv->uncore.lock, irqflags);
4411}
4412
4413static void i9xx_disable_plane(struct intel_plane *plane,
4414 const struct intel_crtc_state *crtc_state)
4415{
4416 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
4417 enum i9xx_plane_id i9xx_plane = plane->i9xx_plane;
4418 unsigned long irqflags;
4419 u32 dspcntr;
4420
4421
4422
4423
4424
4425
4426
4427
4428
4429
4430
4431 dspcntr = i9xx_plane_ctl_crtc(crtc_state);
4432
4433 spin_lock_irqsave(&dev_priv->uncore.lock, irqflags);
4434
4435 intel_de_write_fw(dev_priv, DSPCNTR(i9xx_plane), dspcntr);
4436 if (INTEL_GEN(dev_priv) >= 4)
4437 intel_de_write_fw(dev_priv, DSPSURF(i9xx_plane), 0);
4438 else
4439 intel_de_write_fw(dev_priv, DSPADDR(i9xx_plane), 0);
4440
4441 spin_unlock_irqrestore(&dev_priv->uncore.lock, irqflags);
4442}
4443
4444static bool i9xx_plane_get_hw_state(struct intel_plane *plane,
4445 enum pipe *pipe)
4446{
4447 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
4448 enum intel_display_power_domain power_domain;
4449 enum i9xx_plane_id i9xx_plane = plane->i9xx_plane;
4450 intel_wakeref_t wakeref;
4451 bool ret;
4452 u32 val;
4453
4454
4455
4456
4457
4458
4459 power_domain = POWER_DOMAIN_PIPE(plane->pipe);
4460 wakeref = intel_display_power_get_if_enabled(dev_priv, power_domain);
4461 if (!wakeref)
4462 return false;
4463
4464 val = intel_de_read(dev_priv, DSPCNTR(i9xx_plane));
4465
4466 ret = val & DISPLAY_PLANE_ENABLE;
4467
4468 if (INTEL_GEN(dev_priv) >= 5)
4469 *pipe = plane->pipe;
4470 else
4471 *pipe = (val & DISPPLANE_SEL_PIPE_MASK) >>
4472 DISPPLANE_SEL_PIPE_SHIFT;
4473
4474 intel_display_power_put(dev_priv, power_domain, wakeref);
4475
4476 return ret;
4477}
4478
4479static void skl_detach_scaler(struct intel_crtc *intel_crtc, int id)
4480{
4481 struct drm_device *dev = intel_crtc->base.dev;
4482 struct drm_i915_private *dev_priv = to_i915(dev);
4483 unsigned long irqflags;
4484
4485 spin_lock_irqsave(&dev_priv->uncore.lock, irqflags);
4486
4487 intel_de_write_fw(dev_priv, SKL_PS_CTRL(intel_crtc->pipe, id), 0);
4488 intel_de_write_fw(dev_priv, SKL_PS_WIN_POS(intel_crtc->pipe, id), 0);
4489 intel_de_write_fw(dev_priv, SKL_PS_WIN_SZ(intel_crtc->pipe, id), 0);
4490
4491 spin_unlock_irqrestore(&dev_priv->uncore.lock, irqflags);
4492}
4493
4494
4495
4496
4497static void skl_detach_scalers(const struct intel_crtc_state *crtc_state)
4498{
4499 struct intel_crtc *intel_crtc = to_intel_crtc(crtc_state->uapi.crtc);
4500 const struct intel_crtc_scaler_state *scaler_state =
4501 &crtc_state->scaler_state;
4502 int i;
4503
4504
4505 for (i = 0; i < intel_crtc->num_scalers; i++) {
4506 if (!scaler_state->scalers[i].in_use)
4507 skl_detach_scaler(intel_crtc, i);
4508 }
4509}
4510
4511static unsigned int skl_plane_stride_mult(const struct drm_framebuffer *fb,
4512 int color_plane, unsigned int rotation)
4513{
4514
4515
4516
4517
4518 if (is_surface_linear(fb, color_plane))
4519 return 64;
4520 else if (drm_rotation_90_or_270(rotation))
4521 return intel_tile_height(fb, color_plane);
4522 else
4523 return intel_tile_width_bytes(fb, color_plane);
4524}
4525
4526u32 skl_plane_stride(const struct intel_plane_state *plane_state,
4527 int color_plane)
4528{
4529 const struct drm_framebuffer *fb = plane_state->hw.fb;
4530 unsigned int rotation = plane_state->hw.rotation;
4531 u32 stride = plane_state->color_plane[color_plane].stride;
4532
4533 if (color_plane >= fb->format->num_planes)
4534 return 0;
4535
4536 return stride / skl_plane_stride_mult(fb, color_plane, rotation);
4537}
4538
4539static u32 skl_plane_ctl_format(u32 pixel_format)
4540{
4541 switch (pixel_format) {
4542 case DRM_FORMAT_C8:
4543 return PLANE_CTL_FORMAT_INDEXED;
4544 case DRM_FORMAT_RGB565:
4545 return PLANE_CTL_FORMAT_RGB_565;
4546 case DRM_FORMAT_XBGR8888:
4547 case DRM_FORMAT_ABGR8888:
4548 return PLANE_CTL_FORMAT_XRGB_8888 | PLANE_CTL_ORDER_RGBX;
4549 case DRM_FORMAT_XRGB8888:
4550 case DRM_FORMAT_ARGB8888:
4551 return PLANE_CTL_FORMAT_XRGB_8888;
4552 case DRM_FORMAT_XBGR2101010:
4553 case DRM_FORMAT_ABGR2101010:
4554 return PLANE_CTL_FORMAT_XRGB_2101010 | PLANE_CTL_ORDER_RGBX;
4555 case DRM_FORMAT_XRGB2101010:
4556 case DRM_FORMAT_ARGB2101010:
4557 return PLANE_CTL_FORMAT_XRGB_2101010;
4558 case DRM_FORMAT_XBGR16161616F:
4559 case DRM_FORMAT_ABGR16161616F:
4560 return PLANE_CTL_FORMAT_XRGB_16161616F | PLANE_CTL_ORDER_RGBX;
4561 case DRM_FORMAT_XRGB16161616F:
4562 case DRM_FORMAT_ARGB16161616F:
4563 return PLANE_CTL_FORMAT_XRGB_16161616F;
4564 case DRM_FORMAT_XYUV8888:
4565 return PLANE_CTL_FORMAT_XYUV;
4566 case DRM_FORMAT_YUYV:
4567 return PLANE_CTL_FORMAT_YUV422 | PLANE_CTL_YUV422_YUYV;
4568 case DRM_FORMAT_YVYU:
4569 return PLANE_CTL_FORMAT_YUV422 | PLANE_CTL_YUV422_YVYU;
4570 case DRM_FORMAT_UYVY:
4571 return PLANE_CTL_FORMAT_YUV422 | PLANE_CTL_YUV422_UYVY;
4572 case DRM_FORMAT_VYUY:
4573 return PLANE_CTL_FORMAT_YUV422 | PLANE_CTL_YUV422_VYUY;
4574 case DRM_FORMAT_NV12:
4575 return PLANE_CTL_FORMAT_NV12;
4576 case DRM_FORMAT_P010:
4577 return PLANE_CTL_FORMAT_P010;
4578 case DRM_FORMAT_P012:
4579 return PLANE_CTL_FORMAT_P012;
4580 case DRM_FORMAT_P016:
4581 return PLANE_CTL_FORMAT_P016;
4582 case DRM_FORMAT_Y210:
4583 return PLANE_CTL_FORMAT_Y210;
4584 case DRM_FORMAT_Y212:
4585 return PLANE_CTL_FORMAT_Y212;
4586 case DRM_FORMAT_Y216:
4587 return PLANE_CTL_FORMAT_Y216;
4588 case DRM_FORMAT_XVYU2101010:
4589 return PLANE_CTL_FORMAT_Y410;
4590 case DRM_FORMAT_XVYU12_16161616:
4591 return PLANE_CTL_FORMAT_Y412;
4592 case DRM_FORMAT_XVYU16161616:
4593 return PLANE_CTL_FORMAT_Y416;
4594 default:
4595 MISSING_CASE(pixel_format);
4596 }
4597
4598 return 0;
4599}
4600
4601static u32 skl_plane_ctl_alpha(const struct intel_plane_state *plane_state)
4602{
4603 if (!plane_state->hw.fb->format->has_alpha)
4604 return PLANE_CTL_ALPHA_DISABLE;
4605
4606 switch (plane_state->hw.pixel_blend_mode) {
4607 case DRM_MODE_BLEND_PIXEL_NONE:
4608 return PLANE_CTL_ALPHA_DISABLE;
4609 case DRM_MODE_BLEND_PREMULTI:
4610 return PLANE_CTL_ALPHA_SW_PREMULTIPLY;
4611 case DRM_MODE_BLEND_COVERAGE:
4612 return PLANE_CTL_ALPHA_HW_PREMULTIPLY;
4613 default:
4614 MISSING_CASE(plane_state->hw.pixel_blend_mode);
4615 return PLANE_CTL_ALPHA_DISABLE;
4616 }
4617}
4618
4619static u32 glk_plane_color_ctl_alpha(const struct intel_plane_state *plane_state)
4620{
4621 if (!plane_state->hw.fb->format->has_alpha)
4622 return PLANE_COLOR_ALPHA_DISABLE;
4623
4624 switch (plane_state->hw.pixel_blend_mode) {
4625 case DRM_MODE_BLEND_PIXEL_NONE:
4626 return PLANE_COLOR_ALPHA_DISABLE;
4627 case DRM_MODE_BLEND_PREMULTI:
4628 return PLANE_COLOR_ALPHA_SW_PREMULTIPLY;
4629 case DRM_MODE_BLEND_COVERAGE:
4630 return PLANE_COLOR_ALPHA_HW_PREMULTIPLY;
4631 default:
4632 MISSING_CASE(plane_state->hw.pixel_blend_mode);
4633 return PLANE_COLOR_ALPHA_DISABLE;
4634 }
4635}
4636
4637static u32 skl_plane_ctl_tiling(u64 fb_modifier)
4638{
4639 switch (fb_modifier) {
4640 case DRM_FORMAT_MOD_LINEAR:
4641 break;
4642 case I915_FORMAT_MOD_X_TILED:
4643 return PLANE_CTL_TILED_X;
4644 case I915_FORMAT_MOD_Y_TILED:
4645 return PLANE_CTL_TILED_Y;
4646 case I915_FORMAT_MOD_Y_TILED_CCS:
4647 return PLANE_CTL_TILED_Y | PLANE_CTL_RENDER_DECOMPRESSION_ENABLE;
4648 case I915_FORMAT_MOD_Y_TILED_GEN12_RC_CCS:
4649 return PLANE_CTL_TILED_Y |
4650 PLANE_CTL_RENDER_DECOMPRESSION_ENABLE |
4651 PLANE_CTL_CLEAR_COLOR_DISABLE;
4652 case I915_FORMAT_MOD_Y_TILED_GEN12_MC_CCS:
4653 return PLANE_CTL_TILED_Y | PLANE_CTL_MEDIA_DECOMPRESSION_ENABLE;
4654 case I915_FORMAT_MOD_Yf_TILED:
4655 return PLANE_CTL_TILED_YF;
4656 case I915_FORMAT_MOD_Yf_TILED_CCS:
4657 return PLANE_CTL_TILED_YF | PLANE_CTL_RENDER_DECOMPRESSION_ENABLE;
4658 default:
4659 MISSING_CASE(fb_modifier);
4660 }
4661
4662 return 0;
4663}
4664
4665static u32 skl_plane_ctl_rotate(unsigned int rotate)
4666{
4667 switch (rotate) {
4668 case DRM_MODE_ROTATE_0:
4669 break;
4670
4671
4672
4673
4674 case DRM_MODE_ROTATE_90:
4675 return PLANE_CTL_ROTATE_270;
4676 case DRM_MODE_ROTATE_180:
4677 return PLANE_CTL_ROTATE_180;
4678 case DRM_MODE_ROTATE_270:
4679 return PLANE_CTL_ROTATE_90;
4680 default:
4681 MISSING_CASE(rotate);
4682 }
4683
4684 return 0;
4685}
4686
4687static u32 cnl_plane_ctl_flip(unsigned int reflect)
4688{
4689 switch (reflect) {
4690 case 0:
4691 break;
4692 case DRM_MODE_REFLECT_X:
4693 return PLANE_CTL_FLIP_HORIZONTAL;
4694 case DRM_MODE_REFLECT_Y:
4695 default:
4696 MISSING_CASE(reflect);
4697 }
4698
4699 return 0;
4700}
4701
4702u32 skl_plane_ctl_crtc(const struct intel_crtc_state *crtc_state)
4703{
4704 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
4705 u32 plane_ctl = 0;
4706
4707 if (crtc_state->uapi.async_flip)
4708 plane_ctl |= PLANE_CTL_ASYNC_FLIP;
4709
4710 if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv))
4711 return plane_ctl;
4712
4713 if (crtc_state->gamma_enable)
4714 plane_ctl |= PLANE_CTL_PIPE_GAMMA_ENABLE;
4715
4716 if (crtc_state->csc_enable)
4717 plane_ctl |= PLANE_CTL_PIPE_CSC_ENABLE;
4718
4719 return plane_ctl;
4720}
4721
4722u32 skl_plane_ctl(const struct intel_crtc_state *crtc_state,
4723 const struct intel_plane_state *plane_state)
4724{
4725 struct drm_i915_private *dev_priv =
4726 to_i915(plane_state->uapi.plane->dev);
4727 const struct drm_framebuffer *fb = plane_state->hw.fb;
4728 unsigned int rotation = plane_state->hw.rotation;
4729 const struct drm_intel_sprite_colorkey *key = &plane_state->ckey;
4730 u32 plane_ctl;
4731
4732 plane_ctl = PLANE_CTL_ENABLE;
4733
4734 if (INTEL_GEN(dev_priv) < 10 && !IS_GEMINILAKE(dev_priv)) {
4735 plane_ctl |= skl_plane_ctl_alpha(plane_state);
4736 plane_ctl |= PLANE_CTL_PLANE_GAMMA_DISABLE;
4737
4738 if (plane_state->hw.color_encoding == DRM_COLOR_YCBCR_BT709)
4739 plane_ctl |= PLANE_CTL_YUV_TO_RGB_CSC_FORMAT_BT709;
4740
4741 if (plane_state->hw.color_range == DRM_COLOR_YCBCR_FULL_RANGE)
4742 plane_ctl |= PLANE_CTL_YUV_RANGE_CORRECTION_DISABLE;
4743 }
4744
4745 plane_ctl |= skl_plane_ctl_format(fb->format->format);
4746 plane_ctl |= skl_plane_ctl_tiling(fb->modifier);
4747 plane_ctl |= skl_plane_ctl_rotate(rotation & DRM_MODE_ROTATE_MASK);
4748
4749 if (INTEL_GEN(dev_priv) >= 10)
4750 plane_ctl |= cnl_plane_ctl_flip(rotation &
4751 DRM_MODE_REFLECT_MASK);
4752
4753 if (key->flags & I915_SET_COLORKEY_DESTINATION)
4754 plane_ctl |= PLANE_CTL_KEY_ENABLE_DESTINATION;
4755 else if (key->flags & I915_SET_COLORKEY_SOURCE)
4756 plane_ctl |= PLANE_CTL_KEY_ENABLE_SOURCE;
4757
4758 return plane_ctl;
4759}
4760
4761u32 glk_plane_color_ctl_crtc(const struct intel_crtc_state *crtc_state)
4762{
4763 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
4764 u32 plane_color_ctl = 0;
4765
4766 if (INTEL_GEN(dev_priv) >= 11)
4767 return plane_color_ctl;
4768
4769 if (crtc_state->gamma_enable)
4770 plane_color_ctl |= PLANE_COLOR_PIPE_GAMMA_ENABLE;
4771
4772 if (crtc_state->csc_enable)
4773 plane_color_ctl |= PLANE_COLOR_PIPE_CSC_ENABLE;
4774
4775 return plane_color_ctl;
4776}
4777
4778u32 glk_plane_color_ctl(const struct intel_crtc_state *crtc_state,
4779 const struct intel_plane_state *plane_state)
4780{
4781 struct drm_i915_private *dev_priv =
4782 to_i915(plane_state->uapi.plane->dev);
4783 const struct drm_framebuffer *fb = plane_state->hw.fb;
4784 struct intel_plane *plane = to_intel_plane(plane_state->uapi.plane);
4785 u32 plane_color_ctl = 0;
4786
4787 plane_color_ctl |= PLANE_COLOR_PLANE_GAMMA_DISABLE;
4788 plane_color_ctl |= glk_plane_color_ctl_alpha(plane_state);
4789
4790 if (fb->format->is_yuv && !icl_is_hdr_plane(dev_priv, plane->id)) {
4791 switch (plane_state->hw.color_encoding) {
4792 case DRM_COLOR_YCBCR_BT709:
4793 plane_color_ctl |= PLANE_COLOR_CSC_MODE_YUV709_TO_RGB709;
4794 break;
4795 case DRM_COLOR_YCBCR_BT2020:
4796 plane_color_ctl |=
4797 PLANE_COLOR_CSC_MODE_YUV2020_TO_RGB2020;
4798 break;
4799 default:
4800 plane_color_ctl |=
4801 PLANE_COLOR_CSC_MODE_YUV601_TO_RGB601;
4802 }
4803 if (plane_state->hw.color_range == DRM_COLOR_YCBCR_FULL_RANGE)
4804 plane_color_ctl |= PLANE_COLOR_YUV_RANGE_CORRECTION_DISABLE;
4805 } else if (fb->format->is_yuv) {
4806 plane_color_ctl |= PLANE_COLOR_INPUT_CSC_ENABLE;
4807 if (plane_state->hw.color_range == DRM_COLOR_YCBCR_FULL_RANGE)
4808 plane_color_ctl |= PLANE_COLOR_YUV_RANGE_CORRECTION_DISABLE;
4809 }
4810
4811 return plane_color_ctl;
4812}
4813
4814static int
4815__intel_display_resume(struct drm_device *dev,
4816 struct drm_atomic_state *state,
4817 struct drm_modeset_acquire_ctx *ctx)
4818{
4819 struct drm_crtc_state *crtc_state;
4820 struct drm_crtc *crtc;
4821 int i, ret;
4822
4823 intel_modeset_setup_hw_state(dev, ctx);
4824 intel_vga_redisable(to_i915(dev));
4825
4826 if (!state)
4827 return 0;
4828
4829
4830
4831
4832
4833
4834 for_each_new_crtc_in_state(state, crtc, crtc_state, i) {
4835
4836
4837
4838
4839
4840 crtc_state->mode_changed = true;
4841 }
4842
4843
4844 if (!HAS_GMCH(to_i915(dev)))
4845 to_intel_atomic_state(state)->skip_intermediate_wm = true;
4846
4847 ret = drm_atomic_helper_commit_duplicated_state(state, ctx);
4848
4849 drm_WARN_ON(dev, ret == -EDEADLK);
4850 return ret;
4851}
4852
4853static bool gpu_reset_clobbers_display(struct drm_i915_private *dev_priv)
4854{
4855 return (INTEL_INFO(dev_priv)->gpu_reset_clobbers_display &&
4856 intel_has_gpu_reset(&dev_priv->gt));
4857}
4858
4859void intel_display_prepare_reset(struct drm_i915_private *dev_priv)
4860{
4861 struct drm_device *dev = &dev_priv->drm;
4862 struct drm_modeset_acquire_ctx *ctx = &dev_priv->reset_ctx;
4863 struct drm_atomic_state *state;
4864 int ret;
4865
4866 if (!HAS_DISPLAY(dev_priv))
4867 return;
4868
4869
4870 if (!dev_priv->params.force_reset_modeset_test &&
4871 !gpu_reset_clobbers_display(dev_priv))
4872 return;
4873
4874
4875 set_bit(I915_RESET_MODESET, &dev_priv->gt.reset.flags);
4876 smp_mb__after_atomic();
4877 wake_up_bit(&dev_priv->gt.reset.flags, I915_RESET_MODESET);
4878
4879 if (atomic_read(&dev_priv->gpu_error.pending_fb_pin)) {
4880 drm_dbg_kms(&dev_priv->drm,
4881 "Modeset potentially stuck, unbreaking through wedging\n");
4882 intel_gt_set_wedged(&dev_priv->gt);
4883 }
4884
4885
4886
4887
4888
4889 mutex_lock(&dev->mode_config.mutex);
4890 drm_modeset_acquire_init(ctx, 0);
4891 while (1) {
4892 ret = drm_modeset_lock_all_ctx(dev, ctx);
4893 if (ret != -EDEADLK)
4894 break;
4895
4896 drm_modeset_backoff(ctx);
4897 }
4898
4899
4900
4901
4902 state = drm_atomic_helper_duplicate_state(dev, ctx);
4903 if (IS_ERR(state)) {
4904 ret = PTR_ERR(state);
4905 drm_err(&dev_priv->drm, "Duplicating state failed with %i\n",
4906 ret);
4907 return;
4908 }
4909
4910 ret = drm_atomic_helper_disable_all(dev, ctx);
4911 if (ret) {
4912 drm_err(&dev_priv->drm, "Suspending crtc's failed with %i\n",
4913 ret);
4914 drm_atomic_state_put(state);
4915 return;
4916 }
4917
4918 dev_priv->modeset_restore_state = state;
4919 state->acquire_ctx = ctx;
4920}
4921
4922void intel_display_finish_reset(struct drm_i915_private *dev_priv)
4923{
4924 struct drm_device *dev = &dev_priv->drm;
4925 struct drm_modeset_acquire_ctx *ctx = &dev_priv->reset_ctx;
4926 struct drm_atomic_state *state;
4927 int ret;
4928
4929 if (!HAS_DISPLAY(dev_priv))
4930 return;
4931
4932
4933 if (!test_bit(I915_RESET_MODESET, &dev_priv->gt.reset.flags))
4934 return;
4935
4936 state = fetch_and_zero(&dev_priv->modeset_restore_state);
4937 if (!state)
4938 goto unlock;
4939
4940
4941 if (!gpu_reset_clobbers_display(dev_priv)) {
4942
4943 ret = __intel_display_resume(dev, state, ctx);
4944 if (ret)
4945 drm_err(&dev_priv->drm,
4946 "Restoring old state failed with %i\n", ret);
4947 } else {
4948
4949
4950
4951
4952 intel_pps_unlock_regs_wa(dev_priv);
4953 intel_modeset_init_hw(dev_priv);
4954 intel_init_clock_gating(dev_priv);
4955 intel_hpd_init(dev_priv);
4956
4957 ret = __intel_display_resume(dev, state, ctx);
4958 if (ret)
4959 drm_err(&dev_priv->drm,
4960 "Restoring old state failed with %i\n", ret);
4961
4962 intel_hpd_poll_disable(dev_priv);
4963 }
4964
4965 drm_atomic_state_put(state);
4966unlock:
4967 drm_modeset_drop_locks(ctx);
4968 drm_modeset_acquire_fini(ctx);
4969 mutex_unlock(&dev->mode_config.mutex);
4970
4971 clear_bit_unlock(I915_RESET_MODESET, &dev_priv->gt.reset.flags);
4972}
4973
4974static void icl_set_pipe_chicken(struct intel_crtc *crtc)
4975{
4976 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
4977 enum pipe pipe = crtc->pipe;
4978 u32 tmp;
4979
4980 tmp = intel_de_read(dev_priv, PIPE_CHICKEN(pipe));
4981
4982
4983
4984
4985
4986
4987 tmp |= PER_PIXEL_ALPHA_BYPASS_EN;
4988
4989
4990
4991
4992
4993
4994 tmp |= PIXEL_ROUNDING_TRUNC_FB_PASSTHRU;
4995 intel_de_write(dev_priv, PIPE_CHICKEN(pipe), tmp);
4996}
4997
4998static void intel_fdi_normal_train(struct intel_crtc *crtc)
4999{
5000 struct drm_device *dev = crtc->base.dev;
5001 struct drm_i915_private *dev_priv = to_i915(dev);
5002 enum pipe pipe = crtc->pipe;
5003 i915_reg_t reg;
5004 u32 temp;
5005
5006
5007 reg = FDI_TX_CTL(pipe);
5008 temp = intel_de_read(dev_priv, reg);
5009 if (IS_IVYBRIDGE(dev_priv)) {
5010 temp &= ~FDI_LINK_TRAIN_NONE_IVB;
5011 temp |= FDI_LINK_TRAIN_NONE_IVB | FDI_TX_ENHANCE_FRAME_ENABLE;
5012 } else {
5013 temp &= ~FDI_LINK_TRAIN_NONE;
5014 temp |= FDI_LINK_TRAIN_NONE | FDI_TX_ENHANCE_FRAME_ENABLE;
5015 }
5016 intel_de_write(dev_priv, reg, temp);
5017
5018 reg = FDI_RX_CTL(pipe);
5019 temp = intel_de_read(dev_priv, reg);
5020 if (HAS_PCH_CPT(dev_priv)) {
5021 temp &= ~FDI_LINK_TRAIN_PATTERN_MASK_CPT;
5022 temp |= FDI_LINK_TRAIN_NORMAL_CPT;
5023 } else {
5024 temp &= ~FDI_LINK_TRAIN_NONE;
5025 temp |= FDI_LINK_TRAIN_NONE;
5026 }
5027 intel_de_write(dev_priv, reg, temp | FDI_RX_ENHANCE_FRAME_ENABLE);
5028
5029
5030 intel_de_posting_read(dev_priv, reg);
5031 udelay(1000);
5032
5033
5034 if (IS_IVYBRIDGE(dev_priv))
5035 intel_de_write(dev_priv, reg,
5036 intel_de_read(dev_priv, reg) | FDI_FS_ERRC_ENABLE | FDI_FE_ERRC_ENABLE);
5037}
5038
5039
5040static void ilk_fdi_link_train(struct intel_crtc *crtc,
5041 const struct intel_crtc_state *crtc_state)
5042{
5043 struct drm_device *dev = crtc->base.dev;
5044 struct drm_i915_private *dev_priv = to_i915(dev);
5045 enum pipe pipe = crtc->pipe;
5046 i915_reg_t reg;
5047 u32 temp, tries;
5048
5049
5050 assert_pipe_enabled(dev_priv, crtc_state->cpu_transcoder);
5051
5052
5053
5054 reg = FDI_RX_IMR(pipe);
5055 temp = intel_de_read(dev_priv, reg);
5056 temp &= ~FDI_RX_SYMBOL_LOCK;
5057 temp &= ~FDI_RX_BIT_LOCK;
5058 intel_de_write(dev_priv, reg, temp);
5059 intel_de_read(dev_priv, reg);
5060 udelay(150);
5061
5062
5063 reg = FDI_TX_CTL(pipe);
5064 temp = intel_de_read(dev_priv, reg);
5065 temp &= ~FDI_DP_PORT_WIDTH_MASK;
5066 temp |= FDI_DP_PORT_WIDTH(crtc_state->fdi_lanes);
5067 temp &= ~FDI_LINK_TRAIN_NONE;
5068 temp |= FDI_LINK_TRAIN_PATTERN_1;
5069 intel_de_write(dev_priv, reg, temp | FDI_TX_ENABLE);
5070
5071 reg = FDI_RX_CTL(pipe);
5072 temp = intel_de_read(dev_priv, reg);
5073 temp &= ~FDI_LINK_TRAIN_NONE;
5074 temp |= FDI_LINK_TRAIN_PATTERN_1;
5075 intel_de_write(dev_priv, reg, temp | FDI_RX_ENABLE);
5076
5077 intel_de_posting_read(dev_priv, reg);
5078 udelay(150);
5079
5080
5081 intel_de_write(dev_priv, FDI_RX_CHICKEN(pipe),
5082 FDI_RX_PHASE_SYNC_POINTER_OVR);
5083 intel_de_write(dev_priv, FDI_RX_CHICKEN(pipe),
5084 FDI_RX_PHASE_SYNC_POINTER_OVR | FDI_RX_PHASE_SYNC_POINTER_EN);
5085
5086 reg = FDI_RX_IIR(pipe);
5087 for (tries = 0; tries < 5; tries++) {
5088 temp = intel_de_read(dev_priv, reg);
5089 drm_dbg_kms(&dev_priv->drm, "FDI_RX_IIR 0x%x\n", temp);
5090
5091 if ((temp & FDI_RX_BIT_LOCK)) {
5092 drm_dbg_kms(&dev_priv->drm, "FDI train 1 done.\n");
5093 intel_de_write(dev_priv, reg, temp | FDI_RX_BIT_LOCK);
5094 break;
5095 }
5096 }
5097 if (tries == 5)
5098 drm_err(&dev_priv->drm, "FDI train 1 fail!\n");
5099
5100
5101 reg = FDI_TX_CTL(pipe);
5102 temp = intel_de_read(dev_priv, reg);
5103 temp &= ~FDI_LINK_TRAIN_NONE;
5104 temp |= FDI_LINK_TRAIN_PATTERN_2;
5105 intel_de_write(dev_priv, reg, temp);
5106
5107 reg = FDI_RX_CTL(pipe);
5108 temp = intel_de_read(dev_priv, reg);
5109 temp &= ~FDI_LINK_TRAIN_NONE;
5110 temp |= FDI_LINK_TRAIN_PATTERN_2;
5111 intel_de_write(dev_priv, reg, temp);
5112
5113 intel_de_posting_read(dev_priv, reg);
5114 udelay(150);
5115
5116 reg = FDI_RX_IIR(pipe);
5117 for (tries = 0; tries < 5; tries++) {
5118 temp = intel_de_read(dev_priv, reg);
5119 drm_dbg_kms(&dev_priv->drm, "FDI_RX_IIR 0x%x\n", temp);
5120
5121 if (temp & FDI_RX_SYMBOL_LOCK) {
5122 intel_de_write(dev_priv, reg,
5123 temp | FDI_RX_SYMBOL_LOCK);
5124 drm_dbg_kms(&dev_priv->drm, "FDI train 2 done.\n");
5125 break;
5126 }
5127 }
5128 if (tries == 5)
5129 drm_err(&dev_priv->drm, "FDI train 2 fail!\n");
5130
5131 drm_dbg_kms(&dev_priv->drm, "FDI train done\n");
5132
5133}
5134
5135static const int snb_b_fdi_train_param[] = {
5136 FDI_LINK_TRAIN_400MV_0DB_SNB_B,
5137 FDI_LINK_TRAIN_400MV_6DB_SNB_B,
5138 FDI_LINK_TRAIN_600MV_3_5DB_SNB_B,
5139 FDI_LINK_TRAIN_800MV_0DB_SNB_B,
5140};
5141
5142
5143static void gen6_fdi_link_train(struct intel_crtc *crtc,
5144 const struct intel_crtc_state *crtc_state)
5145{
5146 struct drm_device *dev = crtc->base.dev;
5147 struct drm_i915_private *dev_priv = to_i915(dev);
5148 enum pipe pipe = crtc->pipe;
5149 i915_reg_t reg;
5150 u32 temp, i, retry;
5151
5152
5153
5154 reg = FDI_RX_IMR(pipe);
5155 temp = intel_de_read(dev_priv, reg);
5156 temp &= ~FDI_RX_SYMBOL_LOCK;
5157 temp &= ~FDI_RX_BIT_LOCK;
5158 intel_de_write(dev_priv, reg, temp);
5159
5160 intel_de_posting_read(dev_priv, reg);
5161 udelay(150);
5162
5163
5164 reg = FDI_TX_CTL(pipe);
5165 temp = intel_de_read(dev_priv, reg);
5166 temp &= ~FDI_DP_PORT_WIDTH_MASK;
5167 temp |= FDI_DP_PORT_WIDTH(crtc_state->fdi_lanes);
5168 temp &= ~FDI_LINK_TRAIN_NONE;
5169 temp |= FDI_LINK_TRAIN_PATTERN_1;
5170 temp &= ~FDI_LINK_TRAIN_VOL_EMP_MASK;
5171
5172 temp |= FDI_LINK_TRAIN_400MV_0DB_SNB_B;
5173 intel_de_write(dev_priv, reg, temp | FDI_TX_ENABLE);
5174
5175 intel_de_write(dev_priv, FDI_RX_MISC(pipe),
5176 FDI_RX_TP1_TO_TP2_48 | FDI_RX_FDI_DELAY_90);
5177
5178 reg = FDI_RX_CTL(pipe);
5179 temp = intel_de_read(dev_priv, reg);
5180 if (HAS_PCH_CPT(dev_priv)) {
5181 temp &= ~FDI_LINK_TRAIN_PATTERN_MASK_CPT;
5182 temp |= FDI_LINK_TRAIN_PATTERN_1_CPT;
5183 } else {
5184 temp &= ~FDI_LINK_TRAIN_NONE;
5185 temp |= FDI_LINK_TRAIN_PATTERN_1;
5186 }
5187 intel_de_write(dev_priv, reg, temp | FDI_RX_ENABLE);
5188
5189 intel_de_posting_read(dev_priv, reg);
5190 udelay(150);
5191
5192 for (i = 0; i < 4; i++) {
5193 reg = FDI_TX_CTL(pipe);
5194 temp = intel_de_read(dev_priv, reg);
5195 temp &= ~FDI_LINK_TRAIN_VOL_EMP_MASK;
5196 temp |= snb_b_fdi_train_param[i];
5197 intel_de_write(dev_priv, reg, temp);
5198
5199 intel_de_posting_read(dev_priv, reg);
5200 udelay(500);
5201
5202 for (retry = 0; retry < 5; retry++) {
5203 reg = FDI_RX_IIR(pipe);
5204 temp = intel_de_read(dev_priv, reg);
5205 drm_dbg_kms(&dev_priv->drm, "FDI_RX_IIR 0x%x\n", temp);
5206 if (temp & FDI_RX_BIT_LOCK) {
5207 intel_de_write(dev_priv, reg,
5208 temp | FDI_RX_BIT_LOCK);
5209 drm_dbg_kms(&dev_priv->drm,
5210 "FDI train 1 done.\n");
5211 break;
5212 }
5213 udelay(50);
5214 }
5215 if (retry < 5)
5216 break;
5217 }
5218 if (i == 4)
5219 drm_err(&dev_priv->drm, "FDI train 1 fail!\n");
5220
5221
5222 reg = FDI_TX_CTL(pipe);
5223 temp = intel_de_read(dev_priv, reg);
5224 temp &= ~FDI_LINK_TRAIN_NONE;
5225 temp |= FDI_LINK_TRAIN_PATTERN_2;
5226 if (IS_GEN(dev_priv, 6)) {
5227 temp &= ~FDI_LINK_TRAIN_VOL_EMP_MASK;
5228
5229 temp |= FDI_LINK_TRAIN_400MV_0DB_SNB_B;
5230 }
5231 intel_de_write(dev_priv, reg, temp);
5232
5233 reg = FDI_RX_CTL(pipe);
5234 temp = intel_de_read(dev_priv, reg);
5235 if (HAS_PCH_CPT(dev_priv)) {
5236 temp &= ~FDI_LINK_TRAIN_PATTERN_MASK_CPT;
5237 temp |= FDI_LINK_TRAIN_PATTERN_2_CPT;
5238 } else {
5239 temp &= ~FDI_LINK_TRAIN_NONE;
5240 temp |= FDI_LINK_TRAIN_PATTERN_2;
5241 }
5242 intel_de_write(dev_priv, reg, temp);
5243
5244 intel_de_posting_read(dev_priv, reg);
5245 udelay(150);
5246
5247 for (i = 0; i < 4; i++) {
5248 reg = FDI_TX_CTL(pipe);
5249 temp = intel_de_read(dev_priv, reg);
5250 temp &= ~FDI_LINK_TRAIN_VOL_EMP_MASK;
5251 temp |= snb_b_fdi_train_param[i];
5252 intel_de_write(dev_priv, reg, temp);
5253
5254 intel_de_posting_read(dev_priv, reg);
5255 udelay(500);
5256
5257 for (retry = 0; retry < 5; retry++) {
5258 reg = FDI_RX_IIR(pipe);
5259 temp = intel_de_read(dev_priv, reg);
5260 drm_dbg_kms(&dev_priv->drm, "FDI_RX_IIR 0x%x\n", temp);
5261 if (temp & FDI_RX_SYMBOL_LOCK) {
5262 intel_de_write(dev_priv, reg,
5263 temp | FDI_RX_SYMBOL_LOCK);
5264 drm_dbg_kms(&dev_priv->drm,
5265 "FDI train 2 done.\n");
5266 break;
5267 }
5268 udelay(50);
5269 }
5270 if (retry < 5)
5271 break;
5272 }
5273 if (i == 4)
5274 drm_err(&dev_priv->drm, "FDI train 2 fail!\n");
5275
5276 drm_dbg_kms(&dev_priv->drm, "FDI train done.\n");
5277}
5278
5279
5280static void ivb_manual_fdi_link_train(struct intel_crtc *crtc,
5281 const struct intel_crtc_state *crtc_state)
5282{
5283 struct drm_device *dev = crtc->base.dev;
5284 struct drm_i915_private *dev_priv = to_i915(dev);
5285 enum pipe pipe = crtc->pipe;
5286 i915_reg_t reg;
5287 u32 temp, i, j;
5288
5289
5290
5291 reg = FDI_RX_IMR(pipe);
5292 temp = intel_de_read(dev_priv, reg);
5293 temp &= ~FDI_RX_SYMBOL_LOCK;
5294 temp &= ~FDI_RX_BIT_LOCK;
5295 intel_de_write(dev_priv, reg, temp);
5296
5297 intel_de_posting_read(dev_priv, reg);
5298 udelay(150);
5299
5300 drm_dbg_kms(&dev_priv->drm, "FDI_RX_IIR before link train 0x%x\n",
5301 intel_de_read(dev_priv, FDI_RX_IIR(pipe)));
5302
5303
5304 for (j = 0; j < ARRAY_SIZE(snb_b_fdi_train_param) * 2; j++) {
5305
5306 reg = FDI_TX_CTL(pipe);
5307 temp = intel_de_read(dev_priv, reg);
5308 temp &= ~(FDI_LINK_TRAIN_AUTO | FDI_LINK_TRAIN_NONE_IVB);
5309 temp &= ~FDI_TX_ENABLE;
5310 intel_de_write(dev_priv, reg, temp);
5311
5312 reg = FDI_RX_CTL(pipe);
5313 temp = intel_de_read(dev_priv, reg);
5314 temp &= ~FDI_LINK_TRAIN_AUTO;
5315 temp &= ~FDI_LINK_TRAIN_PATTERN_MASK_CPT;
5316 temp &= ~FDI_RX_ENABLE;
5317 intel_de_write(dev_priv, reg, temp);
5318
5319
5320 reg = FDI_TX_CTL(pipe);
5321 temp = intel_de_read(dev_priv, reg);
5322 temp &= ~FDI_DP_PORT_WIDTH_MASK;
5323 temp |= FDI_DP_PORT_WIDTH(crtc_state->fdi_lanes);
5324 temp |= FDI_LINK_TRAIN_PATTERN_1_IVB;
5325 temp &= ~FDI_LINK_TRAIN_VOL_EMP_MASK;
5326 temp |= snb_b_fdi_train_param[j/2];
5327 temp |= FDI_COMPOSITE_SYNC;
5328 intel_de_write(dev_priv, reg, temp | FDI_TX_ENABLE);
5329
5330 intel_de_write(dev_priv, FDI_RX_MISC(pipe),
5331 FDI_RX_TP1_TO_TP2_48 | FDI_RX_FDI_DELAY_90);
5332
5333 reg = FDI_RX_CTL(pipe);
5334 temp = intel_de_read(dev_priv, reg);
5335 temp |= FDI_LINK_TRAIN_PATTERN_1_CPT;
5336 temp |= FDI_COMPOSITE_SYNC;
5337 intel_de_write(dev_priv, reg, temp | FDI_RX_ENABLE);
5338
5339 intel_de_posting_read(dev_priv, reg);
5340 udelay(1);
5341
5342 for (i = 0; i < 4; i++) {
5343 reg = FDI_RX_IIR(pipe);
5344 temp = intel_de_read(dev_priv, reg);
5345 drm_dbg_kms(&dev_priv->drm, "FDI_RX_IIR 0x%x\n", temp);
5346
5347 if (temp & FDI_RX_BIT_LOCK ||
5348 (intel_de_read(dev_priv, reg) & FDI_RX_BIT_LOCK)) {
5349 intel_de_write(dev_priv, reg,
5350 temp | FDI_RX_BIT_LOCK);
5351 drm_dbg_kms(&dev_priv->drm,
5352 "FDI train 1 done, level %i.\n",
5353 i);
5354 break;
5355 }
5356 udelay(1);
5357 }
5358 if (i == 4) {
5359 drm_dbg_kms(&dev_priv->drm,
5360 "FDI train 1 fail on vswing %d\n", j / 2);
5361 continue;
5362 }
5363
5364
5365 reg = FDI_TX_CTL(pipe);
5366 temp = intel_de_read(dev_priv, reg);
5367 temp &= ~FDI_LINK_TRAIN_NONE_IVB;
5368 temp |= FDI_LINK_TRAIN_PATTERN_2_IVB;
5369 intel_de_write(dev_priv, reg, temp);
5370
5371 reg = FDI_RX_CTL(pipe);
5372 temp = intel_de_read(dev_priv, reg);
5373 temp &= ~FDI_LINK_TRAIN_PATTERN_MASK_CPT;
5374 temp |= FDI_LINK_TRAIN_PATTERN_2_CPT;
5375 intel_de_write(dev_priv, reg, temp);
5376
5377 intel_de_posting_read(dev_priv, reg);
5378 udelay(2);
5379
5380 for (i = 0; i < 4; i++) {
5381 reg = FDI_RX_IIR(pipe);
5382 temp = intel_de_read(dev_priv, reg);
5383 drm_dbg_kms(&dev_priv->drm, "FDI_RX_IIR 0x%x\n", temp);
5384
5385 if (temp & FDI_RX_SYMBOL_LOCK ||
5386 (intel_de_read(dev_priv, reg) & FDI_RX_SYMBOL_LOCK)) {
5387 intel_de_write(dev_priv, reg,
5388 temp | FDI_RX_SYMBOL_LOCK);
5389 drm_dbg_kms(&dev_priv->drm,
5390 "FDI train 2 done, level %i.\n",
5391 i);
5392 goto train_done;
5393 }
5394 udelay(2);
5395 }
5396 if (i == 4)
5397 drm_dbg_kms(&dev_priv->drm,
5398 "FDI train 2 fail on vswing %d\n", j / 2);
5399 }
5400
5401train_done:
5402 drm_dbg_kms(&dev_priv->drm, "FDI train done.\n");
5403}
5404
5405static void ilk_fdi_pll_enable(const struct intel_crtc_state *crtc_state)
5406{
5407 struct intel_crtc *intel_crtc = to_intel_crtc(crtc_state->uapi.crtc);
5408 struct drm_i915_private *dev_priv = to_i915(intel_crtc->base.dev);
5409 enum pipe pipe = intel_crtc->pipe;
5410 i915_reg_t reg;
5411 u32 temp;
5412
5413
5414 reg = FDI_RX_CTL(pipe);
5415 temp = intel_de_read(dev_priv, reg);
5416 temp &= ~(FDI_DP_PORT_WIDTH_MASK | (0x7 << 16));
5417 temp |= FDI_DP_PORT_WIDTH(crtc_state->fdi_lanes);
5418 temp |= (intel_de_read(dev_priv, PIPECONF(pipe)) & PIPECONF_BPC_MASK) << 11;
5419 intel_de_write(dev_priv, reg, temp | FDI_RX_PLL_ENABLE);
5420
5421 intel_de_posting_read(dev_priv, reg);
5422 udelay(200);
5423
5424
5425 temp = intel_de_read(dev_priv, reg);
5426 intel_de_write(dev_priv, reg, temp | FDI_PCDCLK);
5427
5428 intel_de_posting_read(dev_priv, reg);
5429 udelay(200);
5430
5431
5432 reg = FDI_TX_CTL(pipe);
5433 temp = intel_de_read(dev_priv, reg);
5434 if ((temp & FDI_TX_PLL_ENABLE) == 0) {
5435 intel_de_write(dev_priv, reg, temp | FDI_TX_PLL_ENABLE);
5436
5437 intel_de_posting_read(dev_priv, reg);
5438 udelay(100);
5439 }
5440}
5441
5442static void ilk_fdi_pll_disable(struct intel_crtc *intel_crtc)
5443{
5444 struct drm_device *dev = intel_crtc->base.dev;
5445 struct drm_i915_private *dev_priv = to_i915(dev);
5446 enum pipe pipe = intel_crtc->pipe;
5447 i915_reg_t reg;
5448 u32 temp;
5449
5450
5451 reg = FDI_RX_CTL(pipe);
5452 temp = intel_de_read(dev_priv, reg);
5453 intel_de_write(dev_priv, reg, temp & ~FDI_PCDCLK);
5454
5455
5456 reg = FDI_TX_CTL(pipe);
5457 temp = intel_de_read(dev_priv, reg);
5458 intel_de_write(dev_priv, reg, temp & ~FDI_TX_PLL_ENABLE);
5459
5460 intel_de_posting_read(dev_priv, reg);
5461 udelay(100);
5462
5463 reg = FDI_RX_CTL(pipe);
5464 temp = intel_de_read(dev_priv, reg);
5465 intel_de_write(dev_priv, reg, temp & ~FDI_RX_PLL_ENABLE);
5466
5467
5468 intel_de_posting_read(dev_priv, reg);
5469 udelay(100);
5470}
5471
5472static void ilk_fdi_disable(struct intel_crtc *crtc)
5473{
5474 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
5475 enum pipe pipe = crtc->pipe;
5476 i915_reg_t reg;
5477 u32 temp;
5478
5479
5480 reg = FDI_TX_CTL(pipe);
5481 temp = intel_de_read(dev_priv, reg);
5482 intel_de_write(dev_priv, reg, temp & ~FDI_TX_ENABLE);
5483 intel_de_posting_read(dev_priv, reg);
5484
5485 reg = FDI_RX_CTL(pipe);
5486 temp = intel_de_read(dev_priv, reg);
5487 temp &= ~(0x7 << 16);
5488 temp |= (intel_de_read(dev_priv, PIPECONF(pipe)) & PIPECONF_BPC_MASK) << 11;
5489 intel_de_write(dev_priv, reg, temp & ~FDI_RX_ENABLE);
5490
5491 intel_de_posting_read(dev_priv, reg);
5492 udelay(100);
5493
5494
5495 if (HAS_PCH_IBX(dev_priv))
5496 intel_de_write(dev_priv, FDI_RX_CHICKEN(pipe),
5497 FDI_RX_PHASE_SYNC_POINTER_OVR);
5498
5499
5500 reg = FDI_TX_CTL(pipe);
5501 temp = intel_de_read(dev_priv, reg);
5502 temp &= ~FDI_LINK_TRAIN_NONE;
5503 temp |= FDI_LINK_TRAIN_PATTERN_1;
5504 intel_de_write(dev_priv, reg, temp);
5505
5506 reg = FDI_RX_CTL(pipe);
5507 temp = intel_de_read(dev_priv, reg);
5508 if (HAS_PCH_CPT(dev_priv)) {
5509 temp &= ~FDI_LINK_TRAIN_PATTERN_MASK_CPT;
5510 temp |= FDI_LINK_TRAIN_PATTERN_1_CPT;
5511 } else {
5512 temp &= ~FDI_LINK_TRAIN_NONE;
5513 temp |= FDI_LINK_TRAIN_PATTERN_1;
5514 }
5515
5516 temp &= ~(0x07 << 16);
5517 temp |= (intel_de_read(dev_priv, PIPECONF(pipe)) & PIPECONF_BPC_MASK) << 11;
5518 intel_de_write(dev_priv, reg, temp);
5519
5520 intel_de_posting_read(dev_priv, reg);
5521 udelay(100);
5522}
5523
5524bool intel_has_pending_fb_unpin(struct drm_i915_private *dev_priv)
5525{
5526 struct drm_crtc *crtc;
5527 bool cleanup_done;
5528
5529 drm_for_each_crtc(crtc, &dev_priv->drm) {
5530 struct drm_crtc_commit *commit;
5531 spin_lock(&crtc->commit_lock);
5532 commit = list_first_entry_or_null(&crtc->commit_list,
5533 struct drm_crtc_commit, commit_entry);
5534 cleanup_done = commit ?
5535 try_wait_for_completion(&commit->cleanup_done) : true;
5536 spin_unlock(&crtc->commit_lock);
5537
5538 if (cleanup_done)
5539 continue;
5540
5541 drm_crtc_wait_one_vblank(crtc);
5542
5543 return true;
5544 }
5545
5546 return false;
5547}
5548
5549void lpt_disable_iclkip(struct drm_i915_private *dev_priv)
5550{
5551 u32 temp;
5552
5553 intel_de_write(dev_priv, PIXCLK_GATE, PIXCLK_GATE_GATE);
5554
5555 mutex_lock(&dev_priv->sb_lock);
5556
5557 temp = intel_sbi_read(dev_priv, SBI_SSCCTL6, SBI_ICLK);
5558 temp |= SBI_SSCCTL_DISABLE;
5559 intel_sbi_write(dev_priv, SBI_SSCCTL6, temp, SBI_ICLK);
5560
5561 mutex_unlock(&dev_priv->sb_lock);
5562}
5563
5564
5565static void lpt_program_iclkip(const struct intel_crtc_state *crtc_state)
5566{
5567 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
5568 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
5569 int clock = crtc_state->hw.adjusted_mode.crtc_clock;
5570 u32 divsel, phaseinc, auxdiv, phasedir = 0;
5571 u32 temp;
5572
5573 lpt_disable_iclkip(dev_priv);
5574
5575
5576
5577
5578
5579
5580
5581 for (auxdiv = 0; auxdiv < 2; auxdiv++) {
5582 u32 iclk_virtual_root_freq = 172800 * 1000;
5583 u32 iclk_pi_range = 64;
5584 u32 desired_divisor;
5585
5586 desired_divisor = DIV_ROUND_CLOSEST(iclk_virtual_root_freq,
5587 clock << auxdiv);
5588 divsel = (desired_divisor / iclk_pi_range) - 2;
5589 phaseinc = desired_divisor % iclk_pi_range;
5590
5591
5592
5593
5594
5595 if (divsel <= 0x7f)
5596 break;
5597 }
5598
5599
5600 drm_WARN_ON(&dev_priv->drm, SBI_SSCDIVINTPHASE_DIVSEL(divsel) &
5601 ~SBI_SSCDIVINTPHASE_DIVSEL_MASK);
5602 drm_WARN_ON(&dev_priv->drm, SBI_SSCDIVINTPHASE_DIR(phasedir) &
5603 ~SBI_SSCDIVINTPHASE_INCVAL_MASK);
5604
5605 drm_dbg_kms(&dev_priv->drm,
5606 "iCLKIP clock: found settings for %dKHz refresh rate: auxdiv=%x, divsel=%x, phasedir=%x, phaseinc=%x\n",
5607 clock, auxdiv, divsel, phasedir, phaseinc);
5608
5609 mutex_lock(&dev_priv->sb_lock);
5610
5611
5612 temp = intel_sbi_read(dev_priv, SBI_SSCDIVINTPHASE6, SBI_ICLK);
5613 temp &= ~SBI_SSCDIVINTPHASE_DIVSEL_MASK;
5614 temp |= SBI_SSCDIVINTPHASE_DIVSEL(divsel);
5615 temp &= ~SBI_SSCDIVINTPHASE_INCVAL_MASK;
5616 temp |= SBI_SSCDIVINTPHASE_INCVAL(phaseinc);
5617 temp |= SBI_SSCDIVINTPHASE_DIR(phasedir);
5618 temp |= SBI_SSCDIVINTPHASE_PROPAGATE;
5619 intel_sbi_write(dev_priv, SBI_SSCDIVINTPHASE6, temp, SBI_ICLK);
5620
5621
5622 temp = intel_sbi_read(dev_priv, SBI_SSCAUXDIV6, SBI_ICLK);
5623 temp &= ~SBI_SSCAUXDIV_FINALDIV2SEL(1);
5624 temp |= SBI_SSCAUXDIV_FINALDIV2SEL(auxdiv);
5625 intel_sbi_write(dev_priv, SBI_SSCAUXDIV6, temp, SBI_ICLK);
5626
5627
5628 temp = intel_sbi_read(dev_priv, SBI_SSCCTL6, SBI_ICLK);
5629 temp &= ~SBI_SSCCTL_DISABLE;
5630 intel_sbi_write(dev_priv, SBI_SSCCTL6, temp, SBI_ICLK);
5631
5632 mutex_unlock(&dev_priv->sb_lock);
5633
5634
5635 udelay(24);
5636
5637 intel_de_write(dev_priv, PIXCLK_GATE, PIXCLK_GATE_UNGATE);
5638}
5639
5640int lpt_get_iclkip(struct drm_i915_private *dev_priv)
5641{
5642 u32 divsel, phaseinc, auxdiv;
5643 u32 iclk_virtual_root_freq = 172800 * 1000;
5644 u32 iclk_pi_range = 64;
5645 u32 desired_divisor;
5646 u32 temp;
5647
5648 if ((intel_de_read(dev_priv, PIXCLK_GATE) & PIXCLK_GATE_UNGATE) == 0)
5649 return 0;
5650
5651 mutex_lock(&dev_priv->sb_lock);
5652
5653 temp = intel_sbi_read(dev_priv, SBI_SSCCTL6, SBI_ICLK);
5654 if (temp & SBI_SSCCTL_DISABLE) {
5655 mutex_unlock(&dev_priv->sb_lock);
5656 return 0;
5657 }
5658
5659 temp = intel_sbi_read(dev_priv, SBI_SSCDIVINTPHASE6, SBI_ICLK);
5660 divsel = (temp & SBI_SSCDIVINTPHASE_DIVSEL_MASK) >>
5661 SBI_SSCDIVINTPHASE_DIVSEL_SHIFT;
5662 phaseinc = (temp & SBI_SSCDIVINTPHASE_INCVAL_MASK) >>
5663 SBI_SSCDIVINTPHASE_INCVAL_SHIFT;
5664
5665 temp = intel_sbi_read(dev_priv, SBI_SSCAUXDIV6, SBI_ICLK);
5666 auxdiv = (temp & SBI_SSCAUXDIV_FINALDIV2SEL_MASK) >>
5667 SBI_SSCAUXDIV_FINALDIV2SEL_SHIFT;
5668
5669 mutex_unlock(&dev_priv->sb_lock);
5670
5671 desired_divisor = (divsel + 2) * iclk_pi_range + phaseinc;
5672
5673 return DIV_ROUND_CLOSEST(iclk_virtual_root_freq,
5674 desired_divisor << auxdiv);
5675}
5676
5677static void ilk_pch_transcoder_set_timings(const struct intel_crtc_state *crtc_state,
5678 enum pipe pch_transcoder)
5679{
5680 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
5681 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
5682 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
5683
5684 intel_de_write(dev_priv, PCH_TRANS_HTOTAL(pch_transcoder),
5685 intel_de_read(dev_priv, HTOTAL(cpu_transcoder)));
5686 intel_de_write(dev_priv, PCH_TRANS_HBLANK(pch_transcoder),
5687 intel_de_read(dev_priv, HBLANK(cpu_transcoder)));
5688 intel_de_write(dev_priv, PCH_TRANS_HSYNC(pch_transcoder),
5689 intel_de_read(dev_priv, HSYNC(cpu_transcoder)));
5690
5691 intel_de_write(dev_priv, PCH_TRANS_VTOTAL(pch_transcoder),
5692 intel_de_read(dev_priv, VTOTAL(cpu_transcoder)));
5693 intel_de_write(dev_priv, PCH_TRANS_VBLANK(pch_transcoder),
5694 intel_de_read(dev_priv, VBLANK(cpu_transcoder)));
5695 intel_de_write(dev_priv, PCH_TRANS_VSYNC(pch_transcoder),
5696 intel_de_read(dev_priv, VSYNC(cpu_transcoder)));
5697 intel_de_write(dev_priv, PCH_TRANS_VSYNCSHIFT(pch_transcoder),
5698 intel_de_read(dev_priv, VSYNCSHIFT(cpu_transcoder)));
5699}
5700
5701static void cpt_set_fdi_bc_bifurcation(struct drm_i915_private *dev_priv, bool enable)
5702{
5703 u32 temp;
5704
5705 temp = intel_de_read(dev_priv, SOUTH_CHICKEN1);
5706 if (!!(temp & FDI_BC_BIFURCATION_SELECT) == enable)
5707 return;
5708
5709 drm_WARN_ON(&dev_priv->drm,
5710 intel_de_read(dev_priv, FDI_RX_CTL(PIPE_B)) &
5711 FDI_RX_ENABLE);
5712 drm_WARN_ON(&dev_priv->drm,
5713 intel_de_read(dev_priv, FDI_RX_CTL(PIPE_C)) &
5714 FDI_RX_ENABLE);
5715
5716 temp &= ~FDI_BC_BIFURCATION_SELECT;
5717 if (enable)
5718 temp |= FDI_BC_BIFURCATION_SELECT;
5719
5720 drm_dbg_kms(&dev_priv->drm, "%sabling fdi C rx\n",
5721 enable ? "en" : "dis");
5722 intel_de_write(dev_priv, SOUTH_CHICKEN1, temp);
5723 intel_de_posting_read(dev_priv, SOUTH_CHICKEN1);
5724}
5725
5726static void ivb_update_fdi_bc_bifurcation(const struct intel_crtc_state *crtc_state)
5727{
5728 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
5729 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
5730
5731 switch (crtc->pipe) {
5732 case PIPE_A:
5733 break;
5734 case PIPE_B:
5735 if (crtc_state->fdi_lanes > 2)
5736 cpt_set_fdi_bc_bifurcation(dev_priv, false);
5737 else
5738 cpt_set_fdi_bc_bifurcation(dev_priv, true);
5739
5740 break;
5741 case PIPE_C:
5742 cpt_set_fdi_bc_bifurcation(dev_priv, true);
5743
5744 break;
5745 default:
5746 BUG();
5747 }
5748}
5749
5750
5751
5752
5753
5754static struct intel_encoder *
5755intel_get_crtc_new_encoder(const struct intel_atomic_state *state,
5756 const struct intel_crtc_state *crtc_state)
5757{
5758 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
5759 const struct drm_connector_state *connector_state;
5760 const struct drm_connector *connector;
5761 struct intel_encoder *encoder = NULL;
5762 int num_encoders = 0;
5763 int i;
5764
5765 for_each_new_connector_in_state(&state->base, connector, connector_state, i) {
5766 if (connector_state->crtc != &crtc->base)
5767 continue;
5768
5769 encoder = to_intel_encoder(connector_state->best_encoder);
5770 num_encoders++;
5771 }
5772
5773 drm_WARN(encoder->base.dev, num_encoders != 1,
5774 "%d encoders for pipe %c\n",
5775 num_encoders, pipe_name(crtc->pipe));
5776
5777 return encoder;
5778}
5779
5780
5781
5782
5783
5784
5785
5786
5787
5788static void ilk_pch_enable(const struct intel_atomic_state *state,
5789 const struct intel_crtc_state *crtc_state)
5790{
5791 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
5792 struct drm_device *dev = crtc->base.dev;
5793 struct drm_i915_private *dev_priv = to_i915(dev);
5794 enum pipe pipe = crtc->pipe;
5795 u32 temp;
5796
5797 assert_pch_transcoder_disabled(dev_priv, pipe);
5798
5799 if (IS_IVYBRIDGE(dev_priv))
5800 ivb_update_fdi_bc_bifurcation(crtc_state);
5801
5802
5803
5804 intel_de_write(dev_priv, FDI_RX_TUSIZE1(pipe),
5805 intel_de_read(dev_priv, PIPE_DATA_M1(pipe)) & TU_SIZE_MASK);
5806
5807
5808 dev_priv->display.fdi_link_train(crtc, crtc_state);
5809
5810
5811
5812 if (HAS_PCH_CPT(dev_priv)) {
5813 u32 sel;
5814
5815 temp = intel_de_read(dev_priv, PCH_DPLL_SEL);
5816 temp |= TRANS_DPLL_ENABLE(pipe);
5817 sel = TRANS_DPLLB_SEL(pipe);
5818 if (crtc_state->shared_dpll ==
5819 intel_get_shared_dpll_by_id(dev_priv, DPLL_ID_PCH_PLL_B))
5820 temp |= sel;
5821 else
5822 temp &= ~sel;
5823 intel_de_write(dev_priv, PCH_DPLL_SEL, temp);
5824 }
5825
5826
5827
5828
5829
5830
5831
5832
5833 intel_enable_shared_dpll(crtc_state);
5834
5835
5836 assert_panel_unlocked(dev_priv, pipe);
5837 ilk_pch_transcoder_set_timings(crtc_state, pipe);
5838
5839 intel_fdi_normal_train(crtc);
5840
5841
5842 if (HAS_PCH_CPT(dev_priv) &&
5843 intel_crtc_has_dp_encoder(crtc_state)) {
5844 const struct drm_display_mode *adjusted_mode =
5845 &crtc_state->hw.adjusted_mode;
5846 u32 bpc = (intel_de_read(dev_priv, PIPECONF(pipe)) & PIPECONF_BPC_MASK) >> 5;
5847 i915_reg_t reg = TRANS_DP_CTL(pipe);
5848 enum port port;
5849
5850 temp = intel_de_read(dev_priv, reg);
5851 temp &= ~(TRANS_DP_PORT_SEL_MASK |
5852 TRANS_DP_SYNC_MASK |
5853 TRANS_DP_BPC_MASK);
5854 temp |= TRANS_DP_OUTPUT_ENABLE;
5855 temp |= bpc << 9;
5856
5857 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
5858 temp |= TRANS_DP_HSYNC_ACTIVE_HIGH;
5859 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
5860 temp |= TRANS_DP_VSYNC_ACTIVE_HIGH;
5861
5862 port = intel_get_crtc_new_encoder(state, crtc_state)->port;
5863 drm_WARN_ON(dev, port < PORT_B || port > PORT_D);
5864 temp |= TRANS_DP_PORT_SEL(port);
5865
5866 intel_de_write(dev_priv, reg, temp);
5867 }
5868
5869 ilk_enable_pch_transcoder(crtc_state);
5870}
5871
5872void lpt_pch_enable(const struct intel_crtc_state *crtc_state)
5873{
5874 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
5875 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
5876 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
5877
5878 assert_pch_transcoder_disabled(dev_priv, PIPE_A);
5879
5880 lpt_program_iclkip(crtc_state);
5881
5882
5883 ilk_pch_transcoder_set_timings(crtc_state, PIPE_A);
5884
5885 lpt_enable_pch_transcoder(dev_priv, cpu_transcoder);
5886}
5887
5888static void cpt_verify_modeset(struct drm_i915_private *dev_priv,
5889 enum pipe pipe)
5890{
5891 i915_reg_t dslreg = PIPEDSL(pipe);
5892 u32 temp;
5893
5894 temp = intel_de_read(dev_priv, dslreg);
5895 udelay(500);
5896 if (wait_for(intel_de_read(dev_priv, dslreg) != temp, 5)) {
5897 if (wait_for(intel_de_read(dev_priv, dslreg) != temp, 5))
5898 drm_err(&dev_priv->drm,
5899 "mode set failed: pipe %c stuck\n",
5900 pipe_name(pipe));
5901 }
5902}
5903
5904
5905
5906
5907
5908
5909
5910
5911
5912
5913
5914
5915
5916
5917
5918
5919
5920
5921
5922
5923
5924
5925
5926
5927
5928
5929
5930
5931
5932
5933
5934
5935
5936
5937
5938
5939
5940
5941
5942
5943
5944u16 skl_scaler_calc_phase(int sub, int scale, bool chroma_cosited)
5945{
5946 int phase = -0x8000;
5947 u16 trip = 0;
5948
5949 if (chroma_cosited)
5950 phase += (sub - 1) * 0x8000 / sub;
5951
5952 phase += scale / (2 * sub);
5953
5954
5955
5956
5957
5958
5959 WARN_ON(phase < -0x8000 || phase > 0x18000);
5960
5961 if (phase < 0)
5962 phase = 0x10000 + phase;
5963 else
5964 trip = PS_PHASE_TRIP;
5965
5966 return ((phase >> 2) & PS_PHASE_MASK) | trip;
5967}
5968
5969#define SKL_MIN_SRC_W 8
5970#define SKL_MAX_SRC_W 4096
5971#define SKL_MIN_SRC_H 8
5972#define SKL_MAX_SRC_H 4096
5973#define SKL_MIN_DST_W 8
5974#define SKL_MAX_DST_W 4096
5975#define SKL_MIN_DST_H 8
5976#define SKL_MAX_DST_H 4096
5977#define ICL_MAX_SRC_W 5120
5978#define ICL_MAX_SRC_H 4096
5979#define ICL_MAX_DST_W 5120
5980#define ICL_MAX_DST_H 4096
5981#define SKL_MIN_YUV_420_SRC_W 16
5982#define SKL_MIN_YUV_420_SRC_H 16
5983
5984static int
5985skl_update_scaler(struct intel_crtc_state *crtc_state, bool force_detach,
5986 unsigned int scaler_user, int *scaler_id,
5987 int src_w, int src_h, int dst_w, int dst_h,
5988 const struct drm_format_info *format,
5989 u64 modifier, bool need_scaler)
5990{
5991 struct intel_crtc_scaler_state *scaler_state =
5992 &crtc_state->scaler_state;
5993 struct intel_crtc *intel_crtc =
5994 to_intel_crtc(crtc_state->uapi.crtc);
5995 struct drm_i915_private *dev_priv = to_i915(intel_crtc->base.dev);
5996 const struct drm_display_mode *adjusted_mode =
5997 &crtc_state->hw.adjusted_mode;
5998
5999
6000
6001
6002
6003
6004 if (src_w != dst_w || src_h != dst_h)
6005 need_scaler = true;
6006
6007
6008
6009
6010
6011
6012
6013 if (INTEL_GEN(dev_priv) >= 9 && crtc_state->hw.enable &&
6014 need_scaler && adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) {
6015 drm_dbg_kms(&dev_priv->drm,
6016 "Pipe/Plane scaling not supported with IF-ID mode\n");
6017 return -EINVAL;
6018 }
6019
6020
6021
6022
6023
6024
6025
6026
6027
6028
6029
6030 if (force_detach || !need_scaler) {
6031 if (*scaler_id >= 0) {
6032 scaler_state->scaler_users &= ~(1 << scaler_user);
6033 scaler_state->scalers[*scaler_id].in_use = 0;
6034
6035 drm_dbg_kms(&dev_priv->drm,
6036 "scaler_user index %u.%u: "
6037 "Staged freeing scaler id %d scaler_users = 0x%x\n",
6038 intel_crtc->pipe, scaler_user, *scaler_id,
6039 scaler_state->scaler_users);
6040 *scaler_id = -1;
6041 }
6042 return 0;
6043 }
6044
6045 if (format && intel_format_info_is_yuv_semiplanar(format, modifier) &&
6046 (src_h < SKL_MIN_YUV_420_SRC_H || src_w < SKL_MIN_YUV_420_SRC_W)) {
6047 drm_dbg_kms(&dev_priv->drm,
6048 "Planar YUV: src dimensions not met\n");
6049 return -EINVAL;
6050 }
6051
6052
6053 if (src_w < SKL_MIN_SRC_W || src_h < SKL_MIN_SRC_H ||
6054 dst_w < SKL_MIN_DST_W || dst_h < SKL_MIN_DST_H ||
6055 (INTEL_GEN(dev_priv) >= 11 &&
6056 (src_w > ICL_MAX_SRC_W || src_h > ICL_MAX_SRC_H ||
6057 dst_w > ICL_MAX_DST_W || dst_h > ICL_MAX_DST_H)) ||
6058 (INTEL_GEN(dev_priv) < 11 &&
6059 (src_w > SKL_MAX_SRC_W || src_h > SKL_MAX_SRC_H ||
6060 dst_w > SKL_MAX_DST_W || dst_h > SKL_MAX_DST_H))) {
6061 drm_dbg_kms(&dev_priv->drm,
6062 "scaler_user index %u.%u: src %ux%u dst %ux%u "
6063 "size is out of scaler range\n",
6064 intel_crtc->pipe, scaler_user, src_w, src_h,
6065 dst_w, dst_h);
6066 return -EINVAL;
6067 }
6068
6069
6070 scaler_state->scaler_users |= (1 << scaler_user);
6071 drm_dbg_kms(&dev_priv->drm, "scaler_user index %u.%u: "
6072 "staged scaling request for %ux%u->%ux%u scaler_users = 0x%x\n",
6073 intel_crtc->pipe, scaler_user, src_w, src_h, dst_w, dst_h,
6074 scaler_state->scaler_users);
6075
6076 return 0;
6077}
6078
6079static int skl_update_scaler_crtc(struct intel_crtc_state *crtc_state)
6080{
6081 const struct drm_display_mode *pipe_mode = &crtc_state->hw.pipe_mode;
6082 int width, height;
6083
6084 if (crtc_state->pch_pfit.enabled) {
6085 width = drm_rect_width(&crtc_state->pch_pfit.dst);
6086 height = drm_rect_height(&crtc_state->pch_pfit.dst);
6087 } else {
6088 width = pipe_mode->crtc_hdisplay;
6089 height = pipe_mode->crtc_vdisplay;
6090 }
6091 return skl_update_scaler(crtc_state, !crtc_state->hw.active,
6092 SKL_CRTC_INDEX,
6093 &crtc_state->scaler_state.scaler_id,
6094 crtc_state->pipe_src_w, crtc_state->pipe_src_h,
6095 width, height, NULL, 0,
6096 crtc_state->pch_pfit.enabled);
6097}
6098
6099
6100
6101
6102
6103
6104
6105
6106
6107
6108static int skl_update_scaler_plane(struct intel_crtc_state *crtc_state,
6109 struct intel_plane_state *plane_state)
6110{
6111 struct intel_plane *intel_plane =
6112 to_intel_plane(plane_state->uapi.plane);
6113 struct drm_i915_private *dev_priv = to_i915(intel_plane->base.dev);
6114 struct drm_framebuffer *fb = plane_state->hw.fb;
6115 int ret;
6116 bool force_detach = !fb || !plane_state->uapi.visible;
6117 bool need_scaler = false;
6118
6119
6120 if (!icl_is_hdr_plane(dev_priv, intel_plane->id) &&
6121 fb && intel_format_info_is_yuv_semiplanar(fb->format, fb->modifier))
6122 need_scaler = true;
6123
6124 ret = skl_update_scaler(crtc_state, force_detach,
6125 drm_plane_index(&intel_plane->base),
6126 &plane_state->scaler_id,
6127 drm_rect_width(&plane_state->uapi.src) >> 16,
6128 drm_rect_height(&plane_state->uapi.src) >> 16,
6129 drm_rect_width(&plane_state->uapi.dst),
6130 drm_rect_height(&plane_state->uapi.dst),
6131 fb ? fb->format : NULL,
6132 fb ? fb->modifier : 0,
6133 need_scaler);
6134
6135 if (ret || plane_state->scaler_id < 0)
6136 return ret;
6137
6138
6139 if (plane_state->ckey.flags) {
6140 drm_dbg_kms(&dev_priv->drm,
6141 "[PLANE:%d:%s] scaling with color key not allowed",
6142 intel_plane->base.base.id,
6143 intel_plane->base.name);
6144 return -EINVAL;
6145 }
6146
6147
6148 switch (fb->format->format) {
6149 case DRM_FORMAT_RGB565:
6150 case DRM_FORMAT_XBGR8888:
6151 case DRM_FORMAT_XRGB8888:
6152 case DRM_FORMAT_ABGR8888:
6153 case DRM_FORMAT_ARGB8888:
6154 case DRM_FORMAT_XRGB2101010:
6155 case DRM_FORMAT_XBGR2101010:
6156 case DRM_FORMAT_ARGB2101010:
6157 case DRM_FORMAT_ABGR2101010:
6158 case DRM_FORMAT_YUYV:
6159 case DRM_FORMAT_YVYU:
6160 case DRM_FORMAT_UYVY:
6161 case DRM_FORMAT_VYUY:
6162 case DRM_FORMAT_NV12:
6163 case DRM_FORMAT_XYUV8888:
6164 case DRM_FORMAT_P010:
6165 case DRM_FORMAT_P012:
6166 case DRM_FORMAT_P016:
6167 case DRM_FORMAT_Y210:
6168 case DRM_FORMAT_Y212:
6169 case DRM_FORMAT_Y216:
6170 case DRM_FORMAT_XVYU2101010:
6171 case DRM_FORMAT_XVYU12_16161616:
6172 case DRM_FORMAT_XVYU16161616:
6173 break;
6174 case DRM_FORMAT_XBGR16161616F:
6175 case DRM_FORMAT_ABGR16161616F:
6176 case DRM_FORMAT_XRGB16161616F:
6177 case DRM_FORMAT_ARGB16161616F:
6178 if (INTEL_GEN(dev_priv) >= 11)
6179 break;
6180 fallthrough;
6181 default:
6182 drm_dbg_kms(&dev_priv->drm,
6183 "[PLANE:%d:%s] FB:%d unsupported scaling format 0x%x\n",
6184 intel_plane->base.base.id, intel_plane->base.name,
6185 fb->base.id, fb->format->format);
6186 return -EINVAL;
6187 }
6188
6189 return 0;
6190}
6191
6192void skl_scaler_disable(const struct intel_crtc_state *old_crtc_state)
6193{
6194 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc);
6195 int i;
6196
6197 for (i = 0; i < crtc->num_scalers; i++)
6198 skl_detach_scaler(crtc, i);
6199}
6200
6201static int cnl_coef_tap(int i)
6202{
6203 return i % 7;
6204}
6205
6206static u16 cnl_nearest_filter_coef(int t)
6207{
6208 return t == 3 ? 0x0800 : 0x3000;
6209}
6210
6211
6212
6213
6214
6215
6216
6217
6218
6219
6220
6221
6222
6223
6224
6225
6226
6227
6228
6229
6230
6231
6232
6233
6234
6235
6236
6237
6238
6239
6240
6241
6242
6243
6244
6245
6246
6247
6248static void cnl_program_nearest_filter_coefs(struct drm_i915_private *dev_priv,
6249 enum pipe pipe, int id, int set)
6250{
6251 int i;
6252
6253 intel_de_write_fw(dev_priv, CNL_PS_COEF_INDEX_SET(pipe, id, set),
6254 PS_COEE_INDEX_AUTO_INC);
6255
6256 for (i = 0; i < 17 * 7; i += 2) {
6257 u32 tmp;
6258 int t;
6259
6260 t = cnl_coef_tap(i);
6261 tmp = cnl_nearest_filter_coef(t);
6262
6263 t = cnl_coef_tap(i + 1);
6264 tmp |= cnl_nearest_filter_coef(t) << 16;
6265
6266 intel_de_write_fw(dev_priv, CNL_PS_COEF_DATA_SET(pipe, id, set),
6267 tmp);
6268 }
6269
6270 intel_de_write_fw(dev_priv, CNL_PS_COEF_INDEX_SET(pipe, id, set), 0);
6271}
6272
6273inline u32 skl_scaler_get_filter_select(enum drm_scaling_filter filter, int set)
6274{
6275 if (filter == DRM_SCALING_FILTER_NEAREST_NEIGHBOR) {
6276 return (PS_FILTER_PROGRAMMED |
6277 PS_Y_VERT_FILTER_SELECT(set) |
6278 PS_Y_HORZ_FILTER_SELECT(set) |
6279 PS_UV_VERT_FILTER_SELECT(set) |
6280 PS_UV_HORZ_FILTER_SELECT(set));
6281 }
6282
6283 return PS_FILTER_MEDIUM;
6284}
6285
6286void skl_scaler_setup_filter(struct drm_i915_private *dev_priv, enum pipe pipe,
6287 int id, int set, enum drm_scaling_filter filter)
6288{
6289 switch (filter) {
6290 case DRM_SCALING_FILTER_DEFAULT:
6291 break;
6292 case DRM_SCALING_FILTER_NEAREST_NEIGHBOR:
6293 cnl_program_nearest_filter_coefs(dev_priv, pipe, id, set);
6294 break;
6295 default:
6296 MISSING_CASE(filter);
6297 }
6298}
6299
6300static void skl_pfit_enable(const struct intel_crtc_state *crtc_state)
6301{
6302 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
6303 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
6304 const struct intel_crtc_scaler_state *scaler_state =
6305 &crtc_state->scaler_state;
6306 struct drm_rect src = {
6307 .x2 = crtc_state->pipe_src_w << 16,
6308 .y2 = crtc_state->pipe_src_h << 16,
6309 };
6310 const struct drm_rect *dst = &crtc_state->pch_pfit.dst;
6311 u16 uv_rgb_hphase, uv_rgb_vphase;
6312 enum pipe pipe = crtc->pipe;
6313 int width = drm_rect_width(dst);
6314 int height = drm_rect_height(dst);
6315 int x = dst->x1;
6316 int y = dst->y1;
6317 int hscale, vscale;
6318 unsigned long irqflags;
6319 int id;
6320 u32 ps_ctrl;
6321
6322 if (!crtc_state->pch_pfit.enabled)
6323 return;
6324
6325 if (drm_WARN_ON(&dev_priv->drm,
6326 crtc_state->scaler_state.scaler_id < 0))
6327 return;
6328
6329 hscale = drm_rect_calc_hscale(&src, dst, 0, INT_MAX);
6330 vscale = drm_rect_calc_vscale(&src, dst, 0, INT_MAX);
6331
6332 uv_rgb_hphase = skl_scaler_calc_phase(1, hscale, false);
6333 uv_rgb_vphase = skl_scaler_calc_phase(1, vscale, false);
6334
6335 id = scaler_state->scaler_id;
6336
6337 ps_ctrl = skl_scaler_get_filter_select(crtc_state->hw.scaling_filter, 0);
6338 ps_ctrl |= PS_SCALER_EN | scaler_state->scalers[id].mode;
6339
6340 spin_lock_irqsave(&dev_priv->uncore.lock, irqflags);
6341
6342 skl_scaler_setup_filter(dev_priv, pipe, id, 0,
6343 crtc_state->hw.scaling_filter);
6344
6345 intel_de_write_fw(dev_priv, SKL_PS_CTRL(pipe, id), ps_ctrl);
6346
6347 intel_de_write_fw(dev_priv, SKL_PS_VPHASE(pipe, id),
6348 PS_Y_PHASE(0) | PS_UV_RGB_PHASE(uv_rgb_vphase));
6349 intel_de_write_fw(dev_priv, SKL_PS_HPHASE(pipe, id),
6350 PS_Y_PHASE(0) | PS_UV_RGB_PHASE(uv_rgb_hphase));
6351 intel_de_write_fw(dev_priv, SKL_PS_WIN_POS(pipe, id),
6352 x << 16 | y);
6353 intel_de_write_fw(dev_priv, SKL_PS_WIN_SZ(pipe, id),
6354 width << 16 | height);
6355
6356 spin_unlock_irqrestore(&dev_priv->uncore.lock, irqflags);
6357}
6358
6359static void ilk_pfit_enable(const struct intel_crtc_state *crtc_state)
6360{
6361 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
6362 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
6363 const struct drm_rect *dst = &crtc_state->pch_pfit.dst;
6364 enum pipe pipe = crtc->pipe;
6365 int width = drm_rect_width(dst);
6366 int height = drm_rect_height(dst);
6367 int x = dst->x1;
6368 int y = dst->y1;
6369
6370 if (!crtc_state->pch_pfit.enabled)
6371 return;
6372
6373
6374
6375
6376
6377 if (IS_IVYBRIDGE(dev_priv) || IS_HASWELL(dev_priv))
6378 intel_de_write(dev_priv, PF_CTL(pipe), PF_ENABLE |
6379 PF_FILTER_MED_3x3 | PF_PIPE_SEL_IVB(pipe));
6380 else
6381 intel_de_write(dev_priv, PF_CTL(pipe), PF_ENABLE |
6382 PF_FILTER_MED_3x3);
6383 intel_de_write(dev_priv, PF_WIN_POS(pipe), x << 16 | y);
6384 intel_de_write(dev_priv, PF_WIN_SZ(pipe), width << 16 | height);
6385}
6386
6387void hsw_enable_ips(const struct intel_crtc_state *crtc_state)
6388{
6389 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
6390 struct drm_device *dev = crtc->base.dev;
6391 struct drm_i915_private *dev_priv = to_i915(dev);
6392
6393 if (!crtc_state->ips_enabled)
6394 return;
6395
6396
6397
6398
6399
6400
6401 drm_WARN_ON(dev, !(crtc_state->active_planes & ~BIT(PLANE_CURSOR)));
6402
6403 if (IS_BROADWELL(dev_priv)) {
6404 drm_WARN_ON(dev, sandybridge_pcode_write(dev_priv, DISPLAY_IPS_CONTROL,
6405 IPS_ENABLE | IPS_PCODE_CONTROL));
6406
6407
6408
6409
6410
6411 } else {
6412 intel_de_write(dev_priv, IPS_CTL, IPS_ENABLE);
6413
6414
6415
6416
6417
6418 if (intel_de_wait_for_set(dev_priv, IPS_CTL, IPS_ENABLE, 50))
6419 drm_err(&dev_priv->drm,
6420 "Timed out waiting for IPS enable\n");
6421 }
6422}
6423
6424void hsw_disable_ips(const struct intel_crtc_state *crtc_state)
6425{
6426 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
6427 struct drm_device *dev = crtc->base.dev;
6428 struct drm_i915_private *dev_priv = to_i915(dev);
6429
6430 if (!crtc_state->ips_enabled)
6431 return;
6432
6433 if (IS_BROADWELL(dev_priv)) {
6434 drm_WARN_ON(dev,
6435 sandybridge_pcode_write(dev_priv, DISPLAY_IPS_CONTROL, 0));
6436
6437
6438
6439
6440
6441 if (intel_de_wait_for_clear(dev_priv, IPS_CTL, IPS_ENABLE, 100))
6442 drm_err(&dev_priv->drm,
6443 "Timed out waiting for IPS disable\n");
6444 } else {
6445 intel_de_write(dev_priv, IPS_CTL, 0);
6446 intel_de_posting_read(dev_priv, IPS_CTL);
6447 }
6448
6449
6450 intel_wait_for_vblank(dev_priv, crtc->pipe);
6451}
6452
6453static void intel_crtc_dpms_overlay_disable(struct intel_crtc *intel_crtc)
6454{
6455 if (intel_crtc->overlay)
6456 (void) intel_overlay_switch_off(intel_crtc->overlay);
6457
6458
6459
6460
6461}
6462
6463static bool hsw_pre_update_disable_ips(const struct intel_crtc_state *old_crtc_state,
6464 const struct intel_crtc_state *new_crtc_state)
6465{
6466 struct intel_crtc *crtc = to_intel_crtc(new_crtc_state->uapi.crtc);
6467 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
6468
6469 if (!old_crtc_state->ips_enabled)
6470 return false;
6471
6472 if (needs_modeset(new_crtc_state))
6473 return true;
6474
6475
6476
6477
6478
6479
6480
6481 if (IS_HASWELL(dev_priv) &&
6482 (new_crtc_state->uapi.color_mgmt_changed ||
6483 new_crtc_state->update_pipe) &&
6484 new_crtc_state->gamma_mode == GAMMA_MODE_MODE_SPLIT)
6485 return true;
6486
6487 return !new_crtc_state->ips_enabled;
6488}
6489
6490static bool hsw_post_update_enable_ips(const struct intel_crtc_state *old_crtc_state,
6491 const struct intel_crtc_state *new_crtc_state)
6492{
6493 struct intel_crtc *crtc = to_intel_crtc(new_crtc_state->uapi.crtc);
6494 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
6495
6496 if (!new_crtc_state->ips_enabled)
6497 return false;
6498
6499 if (needs_modeset(new_crtc_state))
6500 return true;
6501
6502
6503
6504
6505
6506
6507
6508 if (IS_HASWELL(dev_priv) &&
6509 (new_crtc_state->uapi.color_mgmt_changed ||
6510 new_crtc_state->update_pipe) &&
6511 new_crtc_state->gamma_mode == GAMMA_MODE_MODE_SPLIT)
6512 return true;
6513
6514
6515
6516
6517
6518 if (new_crtc_state->update_pipe && old_crtc_state->inherited)
6519 return true;
6520
6521 return !old_crtc_state->ips_enabled;
6522}
6523
6524static bool needs_nv12_wa(const struct intel_crtc_state *crtc_state)
6525{
6526 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
6527
6528 if (!crtc_state->nv12_planes)
6529 return false;
6530
6531
6532 if (IS_GEN(dev_priv, 9) && !IS_GEMINILAKE(dev_priv))
6533 return true;
6534
6535 return false;
6536}
6537
6538static bool needs_scalerclk_wa(const struct intel_crtc_state *crtc_state)
6539{
6540 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
6541
6542
6543 if (crtc_state->scaler_state.scaler_users > 0 && IS_GEN(dev_priv, 11))
6544 return true;
6545
6546 return false;
6547}
6548
6549static bool planes_enabling(const struct intel_crtc_state *old_crtc_state,
6550 const struct intel_crtc_state *new_crtc_state)
6551{
6552 return (!old_crtc_state->active_planes || needs_modeset(new_crtc_state)) &&
6553 new_crtc_state->active_planes;
6554}
6555
6556static bool planes_disabling(const struct intel_crtc_state *old_crtc_state,
6557 const struct intel_crtc_state *new_crtc_state)
6558{
6559 return old_crtc_state->active_planes &&
6560 (!new_crtc_state->active_planes || needs_modeset(new_crtc_state));
6561}
6562
6563static void intel_post_plane_update(struct intel_atomic_state *state,
6564 struct intel_crtc *crtc)
6565{
6566 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
6567 const struct intel_crtc_state *old_crtc_state =
6568 intel_atomic_get_old_crtc_state(state, crtc);
6569 const struct intel_crtc_state *new_crtc_state =
6570 intel_atomic_get_new_crtc_state(state, crtc);
6571 enum pipe pipe = crtc->pipe;
6572
6573 intel_frontbuffer_flip(dev_priv, new_crtc_state->fb_bits);
6574
6575 if (new_crtc_state->update_wm_post && new_crtc_state->hw.active)
6576 intel_update_watermarks(crtc);
6577
6578 if (hsw_post_update_enable_ips(old_crtc_state, new_crtc_state))
6579 hsw_enable_ips(new_crtc_state);
6580
6581 intel_fbc_post_update(state, crtc);
6582
6583 if (needs_nv12_wa(old_crtc_state) &&
6584 !needs_nv12_wa(new_crtc_state))
6585 skl_wa_827(dev_priv, pipe, false);
6586
6587 if (needs_scalerclk_wa(old_crtc_state) &&
6588 !needs_scalerclk_wa(new_crtc_state))
6589 icl_wa_scalerclkgating(dev_priv, pipe, false);
6590}
6591
6592static void skl_disable_async_flip_wa(struct intel_atomic_state *state,
6593 struct intel_crtc *crtc,
6594 const struct intel_crtc_state *new_crtc_state)
6595{
6596 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
6597 struct intel_plane *plane;
6598 struct intel_plane_state *new_plane_state;
6599 int i;
6600
6601 for_each_new_intel_plane_in_state(state, plane, new_plane_state, i) {
6602 u32 update_mask = new_crtc_state->update_planes;
6603 u32 plane_ctl, surf_addr;
6604 enum plane_id plane_id;
6605 unsigned long irqflags;
6606 enum pipe pipe;
6607
6608 if (crtc->pipe != plane->pipe ||
6609 !(update_mask & BIT(plane->id)))
6610 continue;
6611
6612 plane_id = plane->id;
6613 pipe = plane->pipe;
6614
6615 spin_lock_irqsave(&dev_priv->uncore.lock, irqflags);
6616 plane_ctl = intel_de_read_fw(dev_priv, PLANE_CTL(pipe, plane_id));
6617 surf_addr = intel_de_read_fw(dev_priv, PLANE_SURF(pipe, plane_id));
6618
6619 plane_ctl &= ~PLANE_CTL_ASYNC_FLIP;
6620
6621 intel_de_write_fw(dev_priv, PLANE_CTL(pipe, plane_id), plane_ctl);
6622 intel_de_write_fw(dev_priv, PLANE_SURF(pipe, plane_id), surf_addr);
6623 spin_unlock_irqrestore(&dev_priv->uncore.lock, irqflags);
6624 }
6625
6626 intel_wait_for_vblank(dev_priv, crtc->pipe);
6627}
6628
6629static void intel_pre_plane_update(struct intel_atomic_state *state,
6630 struct intel_crtc *crtc)
6631{
6632 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
6633 const struct intel_crtc_state *old_crtc_state =
6634 intel_atomic_get_old_crtc_state(state, crtc);
6635 const struct intel_crtc_state *new_crtc_state =
6636 intel_atomic_get_new_crtc_state(state, crtc);
6637 enum pipe pipe = crtc->pipe;
6638
6639 if (hsw_pre_update_disable_ips(old_crtc_state, new_crtc_state))
6640 hsw_disable_ips(old_crtc_state);
6641
6642 if (intel_fbc_pre_update(state, crtc))
6643 intel_wait_for_vblank(dev_priv, pipe);
6644
6645
6646 if (!needs_nv12_wa(old_crtc_state) &&
6647 needs_nv12_wa(new_crtc_state))
6648 skl_wa_827(dev_priv, pipe, true);
6649
6650
6651 if (!needs_scalerclk_wa(old_crtc_state) &&
6652 needs_scalerclk_wa(new_crtc_state))
6653 icl_wa_scalerclkgating(dev_priv, pipe, true);
6654
6655
6656
6657
6658
6659
6660
6661
6662
6663
6664 if (HAS_GMCH(dev_priv) && old_crtc_state->hw.active &&
6665 new_crtc_state->disable_cxsr && intel_set_memory_cxsr(dev_priv, false))
6666 intel_wait_for_vblank(dev_priv, pipe);
6667
6668
6669
6670
6671
6672
6673
6674
6675 if (old_crtc_state->hw.active &&
6676 new_crtc_state->disable_lp_wm && ilk_disable_lp_wm(dev_priv))
6677 intel_wait_for_vblank(dev_priv, pipe);
6678
6679
6680
6681
6682
6683 if (!needs_modeset(new_crtc_state)) {
6684
6685
6686
6687
6688
6689
6690
6691
6692
6693
6694
6695
6696
6697
6698 if (dev_priv->display.initial_watermarks)
6699 dev_priv->display.initial_watermarks(state, crtc);
6700 else if (new_crtc_state->update_wm_pre)
6701 intel_update_watermarks(crtc);
6702 }
6703
6704
6705
6706
6707
6708
6709
6710
6711
6712 if (IS_GEN(dev_priv, 2) && planes_disabling(old_crtc_state, new_crtc_state))
6713 intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, false);
6714
6715
6716
6717
6718
6719 if (old_crtc_state->uapi.async_flip &&
6720 !new_crtc_state->uapi.async_flip &&
6721 IS_GEN_RANGE(dev_priv, 9, 10))
6722 skl_disable_async_flip_wa(state, crtc, new_crtc_state);
6723}
6724
6725static void intel_crtc_disable_planes(struct intel_atomic_state *state,
6726 struct intel_crtc *crtc)
6727{
6728 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
6729 const struct intel_crtc_state *new_crtc_state =
6730 intel_atomic_get_new_crtc_state(state, crtc);
6731 unsigned int update_mask = new_crtc_state->update_planes;
6732 const struct intel_plane_state *old_plane_state;
6733 struct intel_plane *plane;
6734 unsigned fb_bits = 0;
6735 int i;
6736
6737 intel_crtc_dpms_overlay_disable(crtc);
6738
6739 for_each_old_intel_plane_in_state(state, plane, old_plane_state, i) {
6740 if (crtc->pipe != plane->pipe ||
6741 !(update_mask & BIT(plane->id)))
6742 continue;
6743
6744 intel_disable_plane(plane, new_crtc_state);
6745
6746 if (old_plane_state->uapi.visible)
6747 fb_bits |= plane->frontbuffer_bit;
6748 }
6749
6750 intel_frontbuffer_flip(dev_priv, fb_bits);
6751}
6752
6753
6754
6755
6756
6757
6758
6759
6760
6761
6762static struct intel_encoder *
6763intel_connector_primary_encoder(struct intel_connector *connector)
6764{
6765 struct intel_encoder *encoder;
6766
6767 if (connector->mst_port)
6768 return &dp_to_dig_port(connector->mst_port)->base;
6769
6770 encoder = intel_attached_encoder(connector);
6771 drm_WARN_ON(connector->base.dev, !encoder);
6772
6773 return encoder;
6774}
6775
6776static void intel_encoders_update_prepare(struct intel_atomic_state *state)
6777{
6778 struct drm_connector_state *new_conn_state;
6779 struct drm_connector *connector;
6780 int i;
6781
6782 for_each_new_connector_in_state(&state->base, connector, new_conn_state,
6783 i) {
6784 struct intel_connector *intel_connector;
6785 struct intel_encoder *encoder;
6786 struct intel_crtc *crtc;
6787
6788 if (!intel_connector_needs_modeset(state, connector))
6789 continue;
6790
6791 intel_connector = to_intel_connector(connector);
6792 encoder = intel_connector_primary_encoder(intel_connector);
6793 if (!encoder->update_prepare)
6794 continue;
6795
6796 crtc = new_conn_state->crtc ?
6797 to_intel_crtc(new_conn_state->crtc) : NULL;
6798 encoder->update_prepare(state, encoder, crtc);
6799 }
6800}
6801
6802static void intel_encoders_update_complete(struct intel_atomic_state *state)
6803{
6804 struct drm_connector_state *new_conn_state;
6805 struct drm_connector *connector;
6806 int i;
6807
6808 for_each_new_connector_in_state(&state->base, connector, new_conn_state,
6809 i) {
6810 struct intel_connector *intel_connector;
6811 struct intel_encoder *encoder;
6812 struct intel_crtc *crtc;
6813
6814 if (!intel_connector_needs_modeset(state, connector))
6815 continue;
6816
6817 intel_connector = to_intel_connector(connector);
6818 encoder = intel_connector_primary_encoder(intel_connector);
6819 if (!encoder->update_complete)
6820 continue;
6821
6822 crtc = new_conn_state->crtc ?
6823 to_intel_crtc(new_conn_state->crtc) : NULL;
6824 encoder->update_complete(state, encoder, crtc);
6825 }
6826}
6827
6828static void intel_encoders_pre_pll_enable(struct intel_atomic_state *state,
6829 struct intel_crtc *crtc)
6830{
6831 const struct intel_crtc_state *crtc_state =
6832 intel_atomic_get_new_crtc_state(state, crtc);
6833 const struct drm_connector_state *conn_state;
6834 struct drm_connector *conn;
6835 int i;
6836
6837 for_each_new_connector_in_state(&state->base, conn, conn_state, i) {
6838 struct intel_encoder *encoder =
6839 to_intel_encoder(conn_state->best_encoder);
6840
6841 if (conn_state->crtc != &crtc->base)
6842 continue;
6843
6844 if (encoder->pre_pll_enable)
6845 encoder->pre_pll_enable(state, encoder,
6846 crtc_state, conn_state);
6847 }
6848}
6849
6850static void intel_encoders_pre_enable(struct intel_atomic_state *state,
6851 struct intel_crtc *crtc)
6852{
6853 const struct intel_crtc_state *crtc_state =
6854 intel_atomic_get_new_crtc_state(state, crtc);
6855 const struct drm_connector_state *conn_state;
6856 struct drm_connector *conn;
6857 int i;
6858
6859 for_each_new_connector_in_state(&state->base, conn, conn_state, i) {
6860 struct intel_encoder *encoder =
6861 to_intel_encoder(conn_state->best_encoder);
6862
6863 if (conn_state->crtc != &crtc->base)
6864 continue;
6865
6866 if (encoder->pre_enable)
6867 encoder->pre_enable(state, encoder,
6868 crtc_state, conn_state);
6869 }
6870}
6871
6872static void intel_encoders_enable(struct intel_atomic_state *state,
6873 struct intel_crtc *crtc)
6874{
6875 const struct intel_crtc_state *crtc_state =
6876 intel_atomic_get_new_crtc_state(state, crtc);
6877 const struct drm_connector_state *conn_state;
6878 struct drm_connector *conn;
6879 int i;
6880
6881 for_each_new_connector_in_state(&state->base, conn, conn_state, i) {
6882 struct intel_encoder *encoder =
6883 to_intel_encoder(conn_state->best_encoder);
6884
6885 if (conn_state->crtc != &crtc->base)
6886 continue;
6887
6888 if (encoder->enable)
6889 encoder->enable(state, encoder,
6890 crtc_state, conn_state);
6891 intel_opregion_notify_encoder(encoder, true);
6892 }
6893}
6894
6895static void intel_encoders_disable(struct intel_atomic_state *state,
6896 struct intel_crtc *crtc)
6897{
6898 const struct intel_crtc_state *old_crtc_state =
6899 intel_atomic_get_old_crtc_state(state, crtc);
6900 const struct drm_connector_state *old_conn_state;
6901 struct drm_connector *conn;
6902 int i;
6903
6904 for_each_old_connector_in_state(&state->base, conn, old_conn_state, i) {
6905 struct intel_encoder *encoder =
6906 to_intel_encoder(old_conn_state->best_encoder);
6907
6908 if (old_conn_state->crtc != &crtc->base)
6909 continue;
6910
6911 intel_opregion_notify_encoder(encoder, false);
6912 if (encoder->disable)
6913 encoder->disable(state, encoder,
6914 old_crtc_state, old_conn_state);
6915 }
6916}
6917
6918static void intel_encoders_post_disable(struct intel_atomic_state *state,
6919 struct intel_crtc *crtc)
6920{
6921 const struct intel_crtc_state *old_crtc_state =
6922 intel_atomic_get_old_crtc_state(state, crtc);
6923 const struct drm_connector_state *old_conn_state;
6924 struct drm_connector *conn;
6925 int i;
6926
6927 for_each_old_connector_in_state(&state->base, conn, old_conn_state, i) {
6928 struct intel_encoder *encoder =
6929 to_intel_encoder(old_conn_state->best_encoder);
6930
6931 if (old_conn_state->crtc != &crtc->base)
6932 continue;
6933
6934 if (encoder->post_disable)
6935 encoder->post_disable(state, encoder,
6936 old_crtc_state, old_conn_state);
6937 }
6938}
6939
6940static void intel_encoders_post_pll_disable(struct intel_atomic_state *state,
6941 struct intel_crtc *crtc)
6942{
6943 const struct intel_crtc_state *old_crtc_state =
6944 intel_atomic_get_old_crtc_state(state, crtc);
6945 const struct drm_connector_state *old_conn_state;
6946 struct drm_connector *conn;
6947 int i;
6948
6949 for_each_old_connector_in_state(&state->base, conn, old_conn_state, i) {
6950 struct intel_encoder *encoder =
6951 to_intel_encoder(old_conn_state->best_encoder);
6952
6953 if (old_conn_state->crtc != &crtc->base)
6954 continue;
6955
6956 if (encoder->post_pll_disable)
6957 encoder->post_pll_disable(state, encoder,
6958 old_crtc_state, old_conn_state);
6959 }
6960}
6961
6962static void intel_encoders_update_pipe(struct intel_atomic_state *state,
6963 struct intel_crtc *crtc)
6964{
6965 const struct intel_crtc_state *crtc_state =
6966 intel_atomic_get_new_crtc_state(state, crtc);
6967 const struct drm_connector_state *conn_state;
6968 struct drm_connector *conn;
6969 int i;
6970
6971 for_each_new_connector_in_state(&state->base, conn, conn_state, i) {
6972 struct intel_encoder *encoder =
6973 to_intel_encoder(conn_state->best_encoder);
6974
6975 if (conn_state->crtc != &crtc->base)
6976 continue;
6977
6978 if (encoder->update_pipe)
6979 encoder->update_pipe(state, encoder,
6980 crtc_state, conn_state);
6981 }
6982}
6983
6984static void intel_disable_primary_plane(const struct intel_crtc_state *crtc_state)
6985{
6986 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
6987 struct intel_plane *plane = to_intel_plane(crtc->base.primary);
6988
6989 plane->disable_plane(plane, crtc_state);
6990}
6991
6992static void ilk_crtc_enable(struct intel_atomic_state *state,
6993 struct intel_crtc *crtc)
6994{
6995 const struct intel_crtc_state *new_crtc_state =
6996 intel_atomic_get_new_crtc_state(state, crtc);
6997 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
6998 enum pipe pipe = crtc->pipe;
6999
7000 if (drm_WARN_ON(&dev_priv->drm, crtc->active))
7001 return;
7002
7003
7004
7005
7006
7007
7008
7009
7010
7011
7012
7013 intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, false);
7014 intel_set_pch_fifo_underrun_reporting(dev_priv, pipe, false);
7015
7016 if (new_crtc_state->has_pch_encoder)
7017 intel_prepare_shared_dpll(new_crtc_state);
7018
7019 if (intel_crtc_has_dp_encoder(new_crtc_state))
7020 intel_dp_set_m_n(new_crtc_state, M1_N1);
7021
7022 intel_set_transcoder_timings(new_crtc_state);
7023 intel_set_pipe_src_size(new_crtc_state);
7024
7025 if (new_crtc_state->has_pch_encoder)
7026 intel_cpu_transcoder_set_m_n(new_crtc_state,
7027 &new_crtc_state->fdi_m_n, NULL);
7028
7029 ilk_set_pipeconf(new_crtc_state);
7030
7031 crtc->active = true;
7032
7033 intel_encoders_pre_enable(state, crtc);
7034
7035 if (new_crtc_state->has_pch_encoder) {
7036
7037
7038
7039 ilk_fdi_pll_enable(new_crtc_state);
7040 } else {
7041 assert_fdi_tx_disabled(dev_priv, pipe);
7042 assert_fdi_rx_disabled(dev_priv, pipe);
7043 }
7044
7045 ilk_pfit_enable(new_crtc_state);
7046
7047
7048
7049
7050
7051 intel_color_load_luts(new_crtc_state);
7052 intel_color_commit(new_crtc_state);
7053
7054 intel_disable_primary_plane(new_crtc_state);
7055
7056 if (dev_priv->display.initial_watermarks)
7057 dev_priv->display.initial_watermarks(state, crtc);
7058 intel_enable_pipe(new_crtc_state);
7059
7060 if (new_crtc_state->has_pch_encoder)
7061 ilk_pch_enable(state, new_crtc_state);
7062
7063 intel_crtc_vblank_on(new_crtc_state);
7064
7065 intel_encoders_enable(state, crtc);
7066
7067 if (HAS_PCH_CPT(dev_priv))
7068 cpt_verify_modeset(dev_priv, pipe);
7069
7070
7071
7072
7073
7074
7075
7076 if (new_crtc_state->has_pch_encoder) {
7077 intel_wait_for_vblank(dev_priv, pipe);
7078 intel_wait_for_vblank(dev_priv, pipe);
7079 }
7080 intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, true);
7081 intel_set_pch_fifo_underrun_reporting(dev_priv, pipe, true);
7082}
7083
7084
7085static bool hsw_crtc_supports_ips(struct intel_crtc *crtc)
7086{
7087 return HAS_IPS(to_i915(crtc->base.dev)) && crtc->pipe == PIPE_A;
7088}
7089
7090static void glk_pipe_scaler_clock_gating_wa(struct drm_i915_private *dev_priv,
7091 enum pipe pipe, bool apply)
7092{
7093 u32 val = intel_de_read(dev_priv, CLKGATE_DIS_PSL(pipe));
7094 u32 mask = DPF_GATING_DIS | DPF_RAM_GATING_DIS | DPFR_GATING_DIS;
7095
7096 if (apply)
7097 val |= mask;
7098 else
7099 val &= ~mask;
7100
7101 intel_de_write(dev_priv, CLKGATE_DIS_PSL(pipe), val);
7102}
7103
7104static void icl_pipe_mbus_enable(struct intel_crtc *crtc)
7105{
7106 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
7107 enum pipe pipe = crtc->pipe;
7108 u32 val;
7109
7110 val = MBUS_DBOX_A_CREDIT(2);
7111
7112 if (INTEL_GEN(dev_priv) >= 12) {
7113 val |= MBUS_DBOX_BW_CREDIT(2);
7114 val |= MBUS_DBOX_B_CREDIT(12);
7115 } else {
7116 val |= MBUS_DBOX_BW_CREDIT(1);
7117 val |= MBUS_DBOX_B_CREDIT(8);
7118 }
7119
7120 intel_de_write(dev_priv, PIPE_MBUS_DBOX_CTL(pipe), val);
7121}
7122
7123static void hsw_set_linetime_wm(const struct intel_crtc_state *crtc_state)
7124{
7125 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
7126 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
7127
7128 intel_de_write(dev_priv, WM_LINETIME(crtc->pipe),
7129 HSW_LINETIME(crtc_state->linetime) |
7130 HSW_IPS_LINETIME(crtc_state->ips_linetime));
7131}
7132
7133static void hsw_set_frame_start_delay(const struct intel_crtc_state *crtc_state)
7134{
7135 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
7136 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
7137 i915_reg_t reg = CHICKEN_TRANS(crtc_state->cpu_transcoder);
7138 u32 val;
7139
7140 val = intel_de_read(dev_priv, reg);
7141 val &= ~HSW_FRAME_START_DELAY_MASK;
7142 val |= HSW_FRAME_START_DELAY(0);
7143 intel_de_write(dev_priv, reg, val);
7144}
7145
7146static void icl_ddi_bigjoiner_pre_enable(struct intel_atomic_state *state,
7147 const struct intel_crtc_state *crtc_state)
7148{
7149 struct intel_crtc *master = to_intel_crtc(crtc_state->uapi.crtc);
7150 struct intel_crtc_state *master_crtc_state;
7151 struct drm_connector_state *conn_state;
7152 struct drm_connector *conn;
7153 struct intel_encoder *encoder = NULL;
7154 int i;
7155
7156 if (crtc_state->bigjoiner_slave)
7157 master = crtc_state->bigjoiner_linked_crtc;
7158
7159 master_crtc_state = intel_atomic_get_new_crtc_state(state, master);
7160
7161 for_each_new_connector_in_state(&state->base, conn, conn_state, i) {
7162 if (conn_state->crtc != &master->base)
7163 continue;
7164
7165 encoder = to_intel_encoder(conn_state->best_encoder);
7166 break;
7167 }
7168
7169 if (!crtc_state->bigjoiner_slave) {
7170
7171 intel_dsc_enable(encoder, crtc_state);
7172 } else {
7173
7174
7175
7176 intel_encoders_pre_pll_enable(state, master);
7177 intel_enable_shared_dpll(master_crtc_state);
7178 intel_encoders_pre_enable(state, master);
7179
7180
7181 intel_dsc_enable(NULL, crtc_state);
7182 }
7183}
7184
7185static void hsw_crtc_enable(struct intel_atomic_state *state,
7186 struct intel_crtc *crtc)
7187{
7188 const struct intel_crtc_state *new_crtc_state =
7189 intel_atomic_get_new_crtc_state(state, crtc);
7190 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
7191 enum pipe pipe = crtc->pipe, hsw_workaround_pipe;
7192 enum transcoder cpu_transcoder = new_crtc_state->cpu_transcoder;
7193 bool psl_clkgate_wa;
7194
7195 if (drm_WARN_ON(&dev_priv->drm, crtc->active))
7196 return;
7197
7198 if (!new_crtc_state->bigjoiner) {
7199 intel_encoders_pre_pll_enable(state, crtc);
7200
7201 if (new_crtc_state->shared_dpll)
7202 intel_enable_shared_dpll(new_crtc_state);
7203
7204 intel_encoders_pre_enable(state, crtc);
7205 } else {
7206 icl_ddi_bigjoiner_pre_enable(state, new_crtc_state);
7207 }
7208
7209 intel_set_pipe_src_size(new_crtc_state);
7210 if (INTEL_GEN(dev_priv) >= 9 || IS_BROADWELL(dev_priv))
7211 bdw_set_pipemisc(new_crtc_state);
7212
7213 if (!new_crtc_state->bigjoiner_slave && !transcoder_is_dsi(cpu_transcoder)) {
7214 intel_set_transcoder_timings(new_crtc_state);
7215
7216 if (cpu_transcoder != TRANSCODER_EDP)
7217 intel_de_write(dev_priv, PIPE_MULT(cpu_transcoder),
7218 new_crtc_state->pixel_multiplier - 1);
7219
7220 if (new_crtc_state->has_pch_encoder)
7221 intel_cpu_transcoder_set_m_n(new_crtc_state,
7222 &new_crtc_state->fdi_m_n, NULL);
7223
7224 hsw_set_frame_start_delay(new_crtc_state);
7225 }
7226
7227 if (!transcoder_is_dsi(cpu_transcoder))
7228 hsw_set_pipeconf(new_crtc_state);
7229
7230 crtc->active = true;
7231
7232
7233 psl_clkgate_wa = (IS_GEMINILAKE(dev_priv) || IS_CANNONLAKE(dev_priv)) &&
7234 new_crtc_state->pch_pfit.enabled;
7235 if (psl_clkgate_wa)
7236 glk_pipe_scaler_clock_gating_wa(dev_priv, pipe, true);
7237
7238 if (INTEL_GEN(dev_priv) >= 9)
7239 skl_pfit_enable(new_crtc_state);
7240 else
7241 ilk_pfit_enable(new_crtc_state);
7242
7243
7244
7245
7246
7247 intel_color_load_luts(new_crtc_state);
7248 intel_color_commit(new_crtc_state);
7249
7250 if (INTEL_GEN(dev_priv) < 9)
7251 intel_disable_primary_plane(new_crtc_state);
7252
7253 hsw_set_linetime_wm(new_crtc_state);
7254
7255 if (INTEL_GEN(dev_priv) >= 11)
7256 icl_set_pipe_chicken(crtc);
7257
7258 if (dev_priv->display.initial_watermarks)
7259 dev_priv->display.initial_watermarks(state, crtc);
7260
7261 if (INTEL_GEN(dev_priv) >= 11)
7262 icl_pipe_mbus_enable(crtc);
7263
7264 if (new_crtc_state->bigjoiner_slave) {
7265 trace_intel_pipe_enable(crtc);
7266 intel_crtc_vblank_on(new_crtc_state);
7267 }
7268
7269 intel_encoders_enable(state, crtc);
7270
7271 if (psl_clkgate_wa) {
7272 intel_wait_for_vblank(dev_priv, pipe);
7273 glk_pipe_scaler_clock_gating_wa(dev_priv, pipe, false);
7274 }
7275
7276
7277
7278 hsw_workaround_pipe = new_crtc_state->hsw_workaround_pipe;
7279 if (IS_HASWELL(dev_priv) && hsw_workaround_pipe != INVALID_PIPE) {
7280 intel_wait_for_vblank(dev_priv, hsw_workaround_pipe);
7281 intel_wait_for_vblank(dev_priv, hsw_workaround_pipe);
7282 }
7283}
7284
7285void ilk_pfit_disable(const struct intel_crtc_state *old_crtc_state)
7286{
7287 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc);
7288 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
7289 enum pipe pipe = crtc->pipe;
7290
7291
7292
7293 if (!old_crtc_state->pch_pfit.enabled)
7294 return;
7295
7296 intel_de_write(dev_priv, PF_CTL(pipe), 0);
7297 intel_de_write(dev_priv, PF_WIN_POS(pipe), 0);
7298 intel_de_write(dev_priv, PF_WIN_SZ(pipe), 0);
7299}
7300
7301static void ilk_crtc_disable(struct intel_atomic_state *state,
7302 struct intel_crtc *crtc)
7303{
7304 const struct intel_crtc_state *old_crtc_state =
7305 intel_atomic_get_old_crtc_state(state, crtc);
7306 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
7307 enum pipe pipe = crtc->pipe;
7308
7309
7310
7311
7312
7313
7314 intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, false);
7315 intel_set_pch_fifo_underrun_reporting(dev_priv, pipe, false);
7316
7317 intel_encoders_disable(state, crtc);
7318
7319 intel_crtc_vblank_off(old_crtc_state);
7320
7321 intel_disable_pipe(old_crtc_state);
7322
7323 ilk_pfit_disable(old_crtc_state);
7324
7325 if (old_crtc_state->has_pch_encoder)
7326 ilk_fdi_disable(crtc);
7327
7328 intel_encoders_post_disable(state, crtc);
7329
7330 if (old_crtc_state->has_pch_encoder) {
7331 ilk_disable_pch_transcoder(dev_priv, pipe);
7332
7333 if (HAS_PCH_CPT(dev_priv)) {
7334 i915_reg_t reg;
7335 u32 temp;
7336
7337
7338 reg = TRANS_DP_CTL(pipe);
7339 temp = intel_de_read(dev_priv, reg);
7340 temp &= ~(TRANS_DP_OUTPUT_ENABLE |
7341 TRANS_DP_PORT_SEL_MASK);
7342 temp |= TRANS_DP_PORT_SEL_NONE;
7343 intel_de_write(dev_priv, reg, temp);
7344
7345
7346 temp = intel_de_read(dev_priv, PCH_DPLL_SEL);
7347 temp &= ~(TRANS_DPLL_ENABLE(pipe) | TRANS_DPLLB_SEL(pipe));
7348 intel_de_write(dev_priv, PCH_DPLL_SEL, temp);
7349 }
7350
7351 ilk_fdi_pll_disable(crtc);
7352 }
7353
7354 intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, true);
7355 intel_set_pch_fifo_underrun_reporting(dev_priv, pipe, true);
7356}
7357
7358static void hsw_crtc_disable(struct intel_atomic_state *state,
7359 struct intel_crtc *crtc)
7360{
7361
7362
7363
7364
7365 intel_encoders_disable(state, crtc);
7366 intel_encoders_post_disable(state, crtc);
7367}
7368
7369static void i9xx_pfit_enable(const struct intel_crtc_state *crtc_state)
7370{
7371 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
7372 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
7373
7374 if (!crtc_state->gmch_pfit.control)
7375 return;
7376
7377
7378
7379
7380
7381 drm_WARN_ON(&dev_priv->drm,
7382 intel_de_read(dev_priv, PFIT_CONTROL) & PFIT_ENABLE);
7383 assert_pipe_disabled(dev_priv, crtc_state->cpu_transcoder);
7384
7385 intel_de_write(dev_priv, PFIT_PGM_RATIOS,
7386 crtc_state->gmch_pfit.pgm_ratios);
7387 intel_de_write(dev_priv, PFIT_CONTROL, crtc_state->gmch_pfit.control);
7388
7389
7390
7391 intel_de_write(dev_priv, BCLRPAT(crtc->pipe), 0);
7392}
7393
7394bool intel_phy_is_combo(struct drm_i915_private *dev_priv, enum phy phy)
7395{
7396 if (phy == PHY_NONE)
7397 return false;
7398 else if (IS_DG1(dev_priv) || IS_ROCKETLAKE(dev_priv))
7399 return phy <= PHY_D;
7400 else if (IS_JSL_EHL(dev_priv))
7401 return phy <= PHY_C;
7402 else if (INTEL_GEN(dev_priv) >= 11)
7403 return phy <= PHY_B;
7404 else
7405 return false;
7406}
7407
7408bool intel_phy_is_tc(struct drm_i915_private *dev_priv, enum phy phy)
7409{
7410 if (IS_DG1(dev_priv) || IS_ROCKETLAKE(dev_priv))
7411 return false;
7412 else if (INTEL_GEN(dev_priv) >= 12)
7413 return phy >= PHY_D && phy <= PHY_I;
7414 else if (INTEL_GEN(dev_priv) >= 11 && !IS_JSL_EHL(dev_priv))
7415 return phy >= PHY_C && phy <= PHY_F;
7416 else
7417 return false;
7418}
7419
7420enum phy intel_port_to_phy(struct drm_i915_private *i915, enum port port)
7421{
7422 if ((IS_DG1(i915) || IS_ROCKETLAKE(i915)) && port >= PORT_TC1)
7423 return PHY_C + port - PORT_TC1;
7424 else if (IS_JSL_EHL(i915) && port == PORT_D)
7425 return PHY_A;
7426
7427 return PHY_A + port - PORT_A;
7428}
7429
7430enum tc_port intel_port_to_tc(struct drm_i915_private *dev_priv, enum port port)
7431{
7432 if (!intel_phy_is_tc(dev_priv, intel_port_to_phy(dev_priv, port)))
7433 return TC_PORT_NONE;
7434
7435 if (INTEL_GEN(dev_priv) >= 12)
7436 return TC_PORT_1 + port - PORT_TC1;
7437 else
7438 return TC_PORT_1 + port - PORT_C;
7439}
7440
7441enum intel_display_power_domain intel_port_to_power_domain(enum port port)
7442{
7443 switch (port) {
7444 case PORT_A:
7445 return POWER_DOMAIN_PORT_DDI_A_LANES;
7446 case PORT_B:
7447 return POWER_DOMAIN_PORT_DDI_B_LANES;
7448 case PORT_C:
7449 return POWER_DOMAIN_PORT_DDI_C_LANES;
7450 case PORT_D:
7451 return POWER_DOMAIN_PORT_DDI_D_LANES;
7452 case PORT_E:
7453 return POWER_DOMAIN_PORT_DDI_E_LANES;
7454 case PORT_F:
7455 return POWER_DOMAIN_PORT_DDI_F_LANES;
7456 case PORT_G:
7457 return POWER_DOMAIN_PORT_DDI_G_LANES;
7458 case PORT_H:
7459 return POWER_DOMAIN_PORT_DDI_H_LANES;
7460 case PORT_I:
7461 return POWER_DOMAIN_PORT_DDI_I_LANES;
7462 default:
7463 MISSING_CASE(port);
7464 return POWER_DOMAIN_PORT_OTHER;
7465 }
7466}
7467
7468enum intel_display_power_domain
7469intel_aux_power_domain(struct intel_digital_port *dig_port)
7470{
7471 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
7472 enum phy phy = intel_port_to_phy(dev_priv, dig_port->base.port);
7473
7474 if (intel_phy_is_tc(dev_priv, phy) &&
7475 dig_port->tc_mode == TC_PORT_TBT_ALT) {
7476 switch (dig_port->aux_ch) {
7477 case AUX_CH_C:
7478 return POWER_DOMAIN_AUX_C_TBT;
7479 case AUX_CH_D:
7480 return POWER_DOMAIN_AUX_D_TBT;
7481 case AUX_CH_E:
7482 return POWER_DOMAIN_AUX_E_TBT;
7483 case AUX_CH_F:
7484 return POWER_DOMAIN_AUX_F_TBT;
7485 case AUX_CH_G:
7486 return POWER_DOMAIN_AUX_G_TBT;
7487 case AUX_CH_H:
7488 return POWER_DOMAIN_AUX_H_TBT;
7489 case AUX_CH_I:
7490 return POWER_DOMAIN_AUX_I_TBT;
7491 default:
7492 MISSING_CASE(dig_port->aux_ch);
7493 return POWER_DOMAIN_AUX_C_TBT;
7494 }
7495 }
7496
7497 return intel_legacy_aux_to_power_domain(dig_port->aux_ch);
7498}
7499
7500
7501
7502
7503
7504enum intel_display_power_domain
7505intel_legacy_aux_to_power_domain(enum aux_ch aux_ch)
7506{
7507 switch (aux_ch) {
7508 case AUX_CH_A:
7509 return POWER_DOMAIN_AUX_A;
7510 case AUX_CH_B:
7511 return POWER_DOMAIN_AUX_B;
7512 case AUX_CH_C:
7513 return POWER_DOMAIN_AUX_C;
7514 case AUX_CH_D:
7515 return POWER_DOMAIN_AUX_D;
7516 case AUX_CH_E:
7517 return POWER_DOMAIN_AUX_E;
7518 case AUX_CH_F:
7519 return POWER_DOMAIN_AUX_F;
7520 case AUX_CH_G:
7521 return POWER_DOMAIN_AUX_G;
7522 case AUX_CH_H:
7523 return POWER_DOMAIN_AUX_H;
7524 case AUX_CH_I:
7525 return POWER_DOMAIN_AUX_I;
7526 default:
7527 MISSING_CASE(aux_ch);
7528 return POWER_DOMAIN_AUX_A;
7529 }
7530}
7531
7532static u64 get_crtc_power_domains(struct intel_crtc_state *crtc_state)
7533{
7534 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
7535 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
7536 struct drm_encoder *encoder;
7537 enum pipe pipe = crtc->pipe;
7538 u64 mask;
7539 enum transcoder transcoder = crtc_state->cpu_transcoder;
7540
7541 if (!crtc_state->hw.active)
7542 return 0;
7543
7544 mask = BIT_ULL(POWER_DOMAIN_PIPE(pipe));
7545 mask |= BIT_ULL(POWER_DOMAIN_TRANSCODER(transcoder));
7546 if (crtc_state->pch_pfit.enabled ||
7547 crtc_state->pch_pfit.force_thru)
7548 mask |= BIT_ULL(POWER_DOMAIN_PIPE_PANEL_FITTER(pipe));
7549
7550 drm_for_each_encoder_mask(encoder, &dev_priv->drm,
7551 crtc_state->uapi.encoder_mask) {
7552 struct intel_encoder *intel_encoder = to_intel_encoder(encoder);
7553
7554 mask |= BIT_ULL(intel_encoder->power_domain);
7555 }
7556
7557 if (HAS_DDI(dev_priv) && crtc_state->has_audio)
7558 mask |= BIT_ULL(POWER_DOMAIN_AUDIO);
7559
7560 if (crtc_state->shared_dpll)
7561 mask |= BIT_ULL(POWER_DOMAIN_DISPLAY_CORE);
7562
7563 if (crtc_state->dsc.compression_enable)
7564 mask |= BIT_ULL(intel_dsc_power_domain(crtc_state));
7565
7566 return mask;
7567}
7568
7569static u64
7570modeset_get_crtc_power_domains(struct intel_crtc_state *crtc_state)
7571{
7572 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
7573 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
7574 enum intel_display_power_domain domain;
7575 u64 domains, new_domains, old_domains;
7576
7577 old_domains = crtc->enabled_power_domains;
7578 crtc->enabled_power_domains = new_domains =
7579 get_crtc_power_domains(crtc_state);
7580
7581 domains = new_domains & ~old_domains;
7582
7583 for_each_power_domain(domain, domains)
7584 intel_display_power_get(dev_priv, domain);
7585
7586 return old_domains & ~new_domains;
7587}
7588
7589static void modeset_put_power_domains(struct drm_i915_private *dev_priv,
7590 u64 domains)
7591{
7592 enum intel_display_power_domain domain;
7593
7594 for_each_power_domain(domain, domains)
7595 intel_display_power_put_unchecked(dev_priv, domain);
7596}
7597
7598static void valleyview_crtc_enable(struct intel_atomic_state *state,
7599 struct intel_crtc *crtc)
7600{
7601 const struct intel_crtc_state *new_crtc_state =
7602 intel_atomic_get_new_crtc_state(state, crtc);
7603 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
7604 enum pipe pipe = crtc->pipe;
7605
7606 if (drm_WARN_ON(&dev_priv->drm, crtc->active))
7607 return;
7608
7609 if (intel_crtc_has_dp_encoder(new_crtc_state))
7610 intel_dp_set_m_n(new_crtc_state, M1_N1);
7611
7612 intel_set_transcoder_timings(new_crtc_state);
7613 intel_set_pipe_src_size(new_crtc_state);
7614
7615 if (IS_CHERRYVIEW(dev_priv) && pipe == PIPE_B) {
7616 intel_de_write(dev_priv, CHV_BLEND(pipe), CHV_BLEND_LEGACY);
7617 intel_de_write(dev_priv, CHV_CANVAS(pipe), 0);
7618 }
7619
7620 i9xx_set_pipeconf(new_crtc_state);
7621
7622 crtc->active = true;
7623
7624 intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, true);
7625
7626 intel_encoders_pre_pll_enable(state, crtc);
7627
7628 if (IS_CHERRYVIEW(dev_priv)) {
7629 chv_prepare_pll(crtc, new_crtc_state);
7630 chv_enable_pll(crtc, new_crtc_state);
7631 } else {
7632 vlv_prepare_pll(crtc, new_crtc_state);
7633 vlv_enable_pll(crtc, new_crtc_state);
7634 }
7635
7636 intel_encoders_pre_enable(state, crtc);
7637
7638 i9xx_pfit_enable(new_crtc_state);
7639
7640 intel_color_load_luts(new_crtc_state);
7641 intel_color_commit(new_crtc_state);
7642
7643 intel_disable_primary_plane(new_crtc_state);
7644
7645 dev_priv->display.initial_watermarks(state, crtc);
7646 intel_enable_pipe(new_crtc_state);
7647
7648 intel_crtc_vblank_on(new_crtc_state);
7649
7650 intel_encoders_enable(state, crtc);
7651}
7652
7653static void i9xx_set_pll_dividers(const struct intel_crtc_state *crtc_state)
7654{
7655 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
7656 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
7657
7658 intel_de_write(dev_priv, FP0(crtc->pipe),
7659 crtc_state->dpll_hw_state.fp0);
7660 intel_de_write(dev_priv, FP1(crtc->pipe),
7661 crtc_state->dpll_hw_state.fp1);
7662}
7663
7664static void i9xx_crtc_enable(struct intel_atomic_state *state,
7665 struct intel_crtc *crtc)
7666{
7667 const struct intel_crtc_state *new_crtc_state =
7668 intel_atomic_get_new_crtc_state(state, crtc);
7669 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
7670 enum pipe pipe = crtc->pipe;
7671
7672 if (drm_WARN_ON(&dev_priv->drm, crtc->active))
7673 return;
7674
7675 i9xx_set_pll_dividers(new_crtc_state);
7676
7677 if (intel_crtc_has_dp_encoder(new_crtc_state))
7678 intel_dp_set_m_n(new_crtc_state, M1_N1);
7679
7680 intel_set_transcoder_timings(new_crtc_state);
7681 intel_set_pipe_src_size(new_crtc_state);
7682
7683 i9xx_set_pipeconf(new_crtc_state);
7684
7685 crtc->active = true;
7686
7687 if (!IS_GEN(dev_priv, 2))
7688 intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, true);
7689
7690 intel_encoders_pre_enable(state, crtc);
7691
7692 i9xx_enable_pll(crtc, new_crtc_state);
7693
7694 i9xx_pfit_enable(new_crtc_state);
7695
7696 intel_color_load_luts(new_crtc_state);
7697 intel_color_commit(new_crtc_state);
7698
7699 intel_disable_primary_plane(new_crtc_state);
7700
7701 if (dev_priv->display.initial_watermarks)
7702 dev_priv->display.initial_watermarks(state, crtc);
7703 else
7704 intel_update_watermarks(crtc);
7705 intel_enable_pipe(new_crtc_state);
7706
7707 intel_crtc_vblank_on(new_crtc_state);
7708
7709 intel_encoders_enable(state, crtc);
7710
7711
7712 if (IS_GEN(dev_priv, 2))
7713 intel_wait_for_vblank(dev_priv, pipe);
7714}
7715
7716static void i9xx_pfit_disable(const struct intel_crtc_state *old_crtc_state)
7717{
7718 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc);
7719 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
7720
7721 if (!old_crtc_state->gmch_pfit.control)
7722 return;
7723
7724 assert_pipe_disabled(dev_priv, old_crtc_state->cpu_transcoder);
7725
7726 drm_dbg_kms(&dev_priv->drm, "disabling pfit, current: 0x%08x\n",
7727 intel_de_read(dev_priv, PFIT_CONTROL));
7728 intel_de_write(dev_priv, PFIT_CONTROL, 0);
7729}
7730
7731static void i9xx_crtc_disable(struct intel_atomic_state *state,
7732 struct intel_crtc *crtc)
7733{
7734 struct intel_crtc_state *old_crtc_state =
7735 intel_atomic_get_old_crtc_state(state, crtc);
7736 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
7737 enum pipe pipe = crtc->pipe;
7738
7739
7740
7741
7742
7743 if (IS_GEN(dev_priv, 2))
7744 intel_wait_for_vblank(dev_priv, pipe);
7745
7746 intel_encoders_disable(state, crtc);
7747
7748 intel_crtc_vblank_off(old_crtc_state);
7749
7750 intel_disable_pipe(old_crtc_state);
7751
7752 i9xx_pfit_disable(old_crtc_state);
7753
7754 intel_encoders_post_disable(state, crtc);
7755
7756 if (!intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_DSI)) {
7757 if (IS_CHERRYVIEW(dev_priv))
7758 chv_disable_pll(dev_priv, pipe);
7759 else if (IS_VALLEYVIEW(dev_priv))
7760 vlv_disable_pll(dev_priv, pipe);
7761 else
7762 i9xx_disable_pll(old_crtc_state);
7763 }
7764
7765 intel_encoders_post_pll_disable(state, crtc);
7766
7767 if (!IS_GEN(dev_priv, 2))
7768 intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, false);
7769
7770 if (!dev_priv->display.initial_watermarks)
7771 intel_update_watermarks(crtc);
7772
7773
7774 if (IS_I830(dev_priv))
7775 i830_enable_pipe(dev_priv, pipe);
7776}
7777
7778static void intel_crtc_disable_noatomic(struct intel_crtc *crtc,
7779 struct drm_modeset_acquire_ctx *ctx)
7780{
7781 struct intel_encoder *encoder;
7782 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
7783 struct intel_bw_state *bw_state =
7784 to_intel_bw_state(dev_priv->bw_obj.state);
7785 struct intel_cdclk_state *cdclk_state =
7786 to_intel_cdclk_state(dev_priv->cdclk.obj.state);
7787 struct intel_dbuf_state *dbuf_state =
7788 to_intel_dbuf_state(dev_priv->dbuf.obj.state);
7789 struct intel_crtc_state *crtc_state =
7790 to_intel_crtc_state(crtc->base.state);
7791 enum intel_display_power_domain domain;
7792 struct intel_plane *plane;
7793 struct drm_atomic_state *state;
7794 struct intel_crtc_state *temp_crtc_state;
7795 enum pipe pipe = crtc->pipe;
7796 u64 domains;
7797 int ret;
7798
7799 if (!crtc_state->hw.active)
7800 return;
7801
7802 for_each_intel_plane_on_crtc(&dev_priv->drm, crtc, plane) {
7803 const struct intel_plane_state *plane_state =
7804 to_intel_plane_state(plane->base.state);
7805
7806 if (plane_state->uapi.visible)
7807 intel_plane_disable_noatomic(crtc, plane);
7808 }
7809
7810 state = drm_atomic_state_alloc(&dev_priv->drm);
7811 if (!state) {
7812 drm_dbg_kms(&dev_priv->drm,
7813 "failed to disable [CRTC:%d:%s], out of memory",
7814 crtc->base.base.id, crtc->base.name);
7815 return;
7816 }
7817
7818 state->acquire_ctx = ctx;
7819
7820
7821 temp_crtc_state = intel_atomic_get_crtc_state(state, crtc);
7822 ret = drm_atomic_add_affected_connectors(state, &crtc->base);
7823
7824 drm_WARN_ON(&dev_priv->drm, IS_ERR(temp_crtc_state) || ret);
7825
7826 dev_priv->display.crtc_disable(to_intel_atomic_state(state), crtc);
7827
7828 drm_atomic_state_put(state);
7829
7830 drm_dbg_kms(&dev_priv->drm,
7831 "[CRTC:%d:%s] hw state adjusted, was enabled, now disabled\n",
7832 crtc->base.base.id, crtc->base.name);
7833
7834 crtc->active = false;
7835 crtc->base.enabled = false;
7836
7837 drm_WARN_ON(&dev_priv->drm,
7838 drm_atomic_set_mode_for_crtc(&crtc_state->uapi, NULL) < 0);
7839 crtc_state->uapi.active = false;
7840 crtc_state->uapi.connector_mask = 0;
7841 crtc_state->uapi.encoder_mask = 0;
7842 intel_crtc_free_hw_state(crtc_state);
7843 memset(&crtc_state->hw, 0, sizeof(crtc_state->hw));
7844
7845 for_each_encoder_on_crtc(&dev_priv->drm, &crtc->base, encoder)
7846 encoder->base.crtc = NULL;
7847
7848 intel_fbc_disable(crtc);
7849 intel_update_watermarks(crtc);
7850 intel_disable_shared_dpll(crtc_state);
7851
7852 domains = crtc->enabled_power_domains;
7853 for_each_power_domain(domain, domains)
7854 intel_display_power_put_unchecked(dev_priv, domain);
7855 crtc->enabled_power_domains = 0;
7856
7857 dev_priv->active_pipes &= ~BIT(pipe);
7858 cdclk_state->min_cdclk[pipe] = 0;
7859 cdclk_state->min_voltage_level[pipe] = 0;
7860 cdclk_state->active_pipes &= ~BIT(pipe);
7861
7862 dbuf_state->active_pipes &= ~BIT(pipe);
7863
7864 bw_state->data_rate[pipe] = 0;
7865 bw_state->num_active_planes[pipe] = 0;
7866}
7867
7868
7869
7870
7871
7872int intel_display_suspend(struct drm_device *dev)
7873{
7874 struct drm_i915_private *dev_priv = to_i915(dev);
7875 struct drm_atomic_state *state;
7876 int ret;
7877
7878 state = drm_atomic_helper_suspend(dev);
7879 ret = PTR_ERR_OR_ZERO(state);
7880 if (ret)
7881 drm_err(&dev_priv->drm, "Suspending crtc's failed with %i\n",
7882 ret);
7883 else
7884 dev_priv->modeset_restore_state = state;
7885 return ret;
7886}
7887
7888void intel_encoder_destroy(struct drm_encoder *encoder)
7889{
7890 struct intel_encoder *intel_encoder = to_intel_encoder(encoder);
7891
7892 drm_encoder_cleanup(encoder);
7893 kfree(intel_encoder);
7894}
7895
7896
7897
7898static void intel_connector_verify_state(struct intel_crtc_state *crtc_state,
7899 struct drm_connector_state *conn_state)
7900{
7901 struct intel_connector *connector = to_intel_connector(conn_state->connector);
7902 struct drm_i915_private *i915 = to_i915(connector->base.dev);
7903
7904 drm_dbg_kms(&i915->drm, "[CONNECTOR:%d:%s]\n",
7905 connector->base.base.id, connector->base.name);
7906
7907 if (connector->get_hw_state(connector)) {
7908 struct intel_encoder *encoder = intel_attached_encoder(connector);
7909
7910 I915_STATE_WARN(!crtc_state,
7911 "connector enabled without attached crtc\n");
7912
7913 if (!crtc_state)
7914 return;
7915
7916 I915_STATE_WARN(!crtc_state->hw.active,
7917 "connector is active, but attached crtc isn't\n");
7918
7919 if (!encoder || encoder->type == INTEL_OUTPUT_DP_MST)
7920 return;
7921
7922 I915_STATE_WARN(conn_state->best_encoder != &encoder->base,
7923 "atomic encoder doesn't match attached encoder\n");
7924
7925 I915_STATE_WARN(conn_state->crtc != encoder->base.crtc,
7926 "attached encoder crtc differs from connector crtc\n");
7927 } else {
7928 I915_STATE_WARN(crtc_state && crtc_state->hw.active,
7929 "attached crtc is active, but connector isn't\n");
7930 I915_STATE_WARN(!crtc_state && conn_state->best_encoder,
7931 "best encoder set without crtc!\n");
7932 }
7933}
7934
7935static int pipe_required_fdi_lanes(struct intel_crtc_state *crtc_state)
7936{
7937 if (crtc_state->hw.enable && crtc_state->has_pch_encoder)
7938 return crtc_state->fdi_lanes;
7939
7940 return 0;
7941}
7942
7943static int ilk_check_fdi_lanes(struct drm_device *dev, enum pipe pipe,
7944 struct intel_crtc_state *pipe_config)
7945{
7946 struct drm_i915_private *dev_priv = to_i915(dev);
7947 struct drm_atomic_state *state = pipe_config->uapi.state;
7948 struct intel_crtc *other_crtc;
7949 struct intel_crtc_state *other_crtc_state;
7950
7951 drm_dbg_kms(&dev_priv->drm,
7952 "checking fdi config on pipe %c, lanes %i\n",
7953 pipe_name(pipe), pipe_config->fdi_lanes);
7954 if (pipe_config->fdi_lanes > 4) {
7955 drm_dbg_kms(&dev_priv->drm,
7956 "invalid fdi lane config on pipe %c: %i lanes\n",
7957 pipe_name(pipe), pipe_config->fdi_lanes);
7958 return -EINVAL;
7959 }
7960
7961 if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv)) {
7962 if (pipe_config->fdi_lanes > 2) {
7963 drm_dbg_kms(&dev_priv->drm,
7964 "only 2 lanes on haswell, required: %i lanes\n",
7965 pipe_config->fdi_lanes);
7966 return -EINVAL;
7967 } else {
7968 return 0;
7969 }
7970 }
7971
7972 if (INTEL_NUM_PIPES(dev_priv) == 2)
7973 return 0;
7974
7975
7976 switch (pipe) {
7977 case PIPE_A:
7978 return 0;
7979 case PIPE_B:
7980 if (pipe_config->fdi_lanes <= 2)
7981 return 0;
7982
7983 other_crtc = intel_get_crtc_for_pipe(dev_priv, PIPE_C);
7984 other_crtc_state =
7985 intel_atomic_get_crtc_state(state, other_crtc);
7986 if (IS_ERR(other_crtc_state))
7987 return PTR_ERR(other_crtc_state);
7988
7989 if (pipe_required_fdi_lanes(other_crtc_state) > 0) {
7990 drm_dbg_kms(&dev_priv->drm,
7991 "invalid shared fdi lane config on pipe %c: %i lanes\n",
7992 pipe_name(pipe), pipe_config->fdi_lanes);
7993 return -EINVAL;
7994 }
7995 return 0;
7996 case PIPE_C:
7997 if (pipe_config->fdi_lanes > 2) {
7998 drm_dbg_kms(&dev_priv->drm,
7999 "only 2 lanes on pipe %c: required %i lanes\n",
8000 pipe_name(pipe), pipe_config->fdi_lanes);
8001 return -EINVAL;
8002 }
8003
8004 other_crtc = intel_get_crtc_for_pipe(dev_priv, PIPE_B);
8005 other_crtc_state =
8006 intel_atomic_get_crtc_state(state, other_crtc);
8007 if (IS_ERR(other_crtc_state))
8008 return PTR_ERR(other_crtc_state);
8009
8010 if (pipe_required_fdi_lanes(other_crtc_state) > 2) {
8011 drm_dbg_kms(&dev_priv->drm,
8012 "fdi link B uses too many lanes to enable link C\n");
8013 return -EINVAL;
8014 }
8015 return 0;
8016 default:
8017 BUG();
8018 }
8019}
8020
8021#define RETRY 1
8022static int ilk_fdi_compute_config(struct intel_crtc *intel_crtc,
8023 struct intel_crtc_state *pipe_config)
8024{
8025 struct drm_device *dev = intel_crtc->base.dev;
8026 struct drm_i915_private *i915 = to_i915(dev);
8027 const struct drm_display_mode *adjusted_mode = &pipe_config->hw.adjusted_mode;
8028 int lane, link_bw, fdi_dotclock, ret;
8029 bool needs_recompute = false;
8030
8031retry:
8032
8033
8034
8035
8036
8037
8038
8039 link_bw = intel_fdi_link_freq(i915, pipe_config);
8040
8041 fdi_dotclock = adjusted_mode->crtc_clock;
8042
8043 lane = ilk_get_lanes_required(fdi_dotclock, link_bw,
8044 pipe_config->pipe_bpp);
8045
8046 pipe_config->fdi_lanes = lane;
8047
8048 intel_link_compute_m_n(pipe_config->pipe_bpp, lane, fdi_dotclock,
8049 link_bw, &pipe_config->fdi_m_n, false, false);
8050
8051 ret = ilk_check_fdi_lanes(dev, intel_crtc->pipe, pipe_config);
8052 if (ret == -EDEADLK)
8053 return ret;
8054
8055 if (ret == -EINVAL && pipe_config->pipe_bpp > 6*3) {
8056 pipe_config->pipe_bpp -= 2*3;
8057 drm_dbg_kms(&i915->drm,
8058 "fdi link bw constraint, reducing pipe bpp to %i\n",
8059 pipe_config->pipe_bpp);
8060 needs_recompute = true;
8061 pipe_config->bw_constrained = true;
8062
8063 goto retry;
8064 }
8065
8066 if (needs_recompute)
8067 return RETRY;
8068
8069 return ret;
8070}
8071
8072bool hsw_crtc_state_ips_capable(const struct intel_crtc_state *crtc_state)
8073{
8074 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
8075 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
8076
8077
8078 if (!hsw_crtc_supports_ips(crtc))
8079 return false;
8080
8081 if (!dev_priv->params.enable_ips)
8082 return false;
8083
8084 if (crtc_state->pipe_bpp > 24)
8085 return false;
8086
8087
8088
8089
8090
8091
8092
8093
8094 if (IS_BROADWELL(dev_priv) &&
8095 crtc_state->pixel_rate > dev_priv->max_cdclk_freq * 95 / 100)
8096 return false;
8097
8098 return true;
8099}
8100
8101static int hsw_compute_ips_config(struct intel_crtc_state *crtc_state)
8102{
8103 struct drm_i915_private *dev_priv =
8104 to_i915(crtc_state->uapi.crtc->dev);
8105 struct intel_atomic_state *state =
8106 to_intel_atomic_state(crtc_state->uapi.state);
8107
8108 crtc_state->ips_enabled = false;
8109
8110 if (!hsw_crtc_state_ips_capable(crtc_state))
8111 return 0;
8112
8113
8114
8115
8116
8117
8118
8119 if (crtc_state->crc_enabled)
8120 return 0;
8121
8122
8123 if (!(crtc_state->active_planes & ~BIT(PLANE_CURSOR)))
8124 return 0;
8125
8126 if (IS_BROADWELL(dev_priv)) {
8127 const struct intel_cdclk_state *cdclk_state;
8128
8129 cdclk_state = intel_atomic_get_cdclk_state(state);
8130 if (IS_ERR(cdclk_state))
8131 return PTR_ERR(cdclk_state);
8132
8133
8134 if (crtc_state->pixel_rate > cdclk_state->logical.cdclk * 95 / 100)
8135 return 0;
8136 }
8137
8138 crtc_state->ips_enabled = true;
8139
8140 return 0;
8141}
8142
8143static bool intel_crtc_supports_double_wide(const struct intel_crtc *crtc)
8144{
8145 const struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
8146
8147
8148 return INTEL_GEN(dev_priv) < 4 &&
8149 (crtc->pipe == PIPE_A || IS_I915G(dev_priv));
8150}
8151
8152static u32 ilk_pipe_pixel_rate(const struct intel_crtc_state *crtc_state)
8153{
8154 u32 pixel_rate = crtc_state->hw.pipe_mode.crtc_clock;
8155 unsigned int pipe_w, pipe_h, pfit_w, pfit_h;
8156
8157
8158
8159
8160
8161
8162 if (!crtc_state->pch_pfit.enabled)
8163 return pixel_rate;
8164
8165 pipe_w = crtc_state->pipe_src_w;
8166 pipe_h = crtc_state->pipe_src_h;
8167
8168 pfit_w = drm_rect_width(&crtc_state->pch_pfit.dst);
8169 pfit_h = drm_rect_height(&crtc_state->pch_pfit.dst);
8170
8171 if (pipe_w < pfit_w)
8172 pipe_w = pfit_w;
8173 if (pipe_h < pfit_h)
8174 pipe_h = pfit_h;
8175
8176 if (drm_WARN_ON(crtc_state->uapi.crtc->dev,
8177 !pfit_w || !pfit_h))
8178 return pixel_rate;
8179
8180 return div_u64(mul_u32_u32(pixel_rate, pipe_w * pipe_h),
8181 pfit_w * pfit_h);
8182}
8183
8184static void intel_mode_from_crtc_timings(struct drm_display_mode *mode,
8185 const struct drm_display_mode *timings)
8186{
8187 mode->hdisplay = timings->crtc_hdisplay;
8188 mode->htotal = timings->crtc_htotal;
8189 mode->hsync_start = timings->crtc_hsync_start;
8190 mode->hsync_end = timings->crtc_hsync_end;
8191
8192 mode->vdisplay = timings->crtc_vdisplay;
8193 mode->vtotal = timings->crtc_vtotal;
8194 mode->vsync_start = timings->crtc_vsync_start;
8195 mode->vsync_end = timings->crtc_vsync_end;
8196
8197 mode->flags = timings->flags;
8198 mode->type = DRM_MODE_TYPE_DRIVER;
8199
8200 mode->clock = timings->crtc_clock;
8201
8202 drm_mode_set_name(mode);
8203}
8204
8205static void intel_crtc_compute_pixel_rate(struct intel_crtc_state *crtc_state)
8206{
8207 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
8208
8209 if (HAS_GMCH(dev_priv))
8210
8211 crtc_state->pixel_rate =
8212 crtc_state->hw.pipe_mode.crtc_clock;
8213 else
8214 crtc_state->pixel_rate =
8215 ilk_pipe_pixel_rate(crtc_state);
8216}
8217
8218static void intel_crtc_readout_derived_state(struct intel_crtc_state *crtc_state)
8219{
8220 struct drm_display_mode *mode = &crtc_state->hw.mode;
8221 struct drm_display_mode *pipe_mode = &crtc_state->hw.pipe_mode;
8222 struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
8223
8224 drm_mode_copy(pipe_mode, adjusted_mode);
8225
8226 if (crtc_state->bigjoiner) {
8227
8228
8229
8230
8231 pipe_mode->crtc_hdisplay /= 2;
8232 pipe_mode->crtc_hblank_start /= 2;
8233 pipe_mode->crtc_hblank_end /= 2;
8234 pipe_mode->crtc_hsync_start /= 2;
8235 pipe_mode->crtc_hsync_end /= 2;
8236 pipe_mode->crtc_htotal /= 2;
8237 pipe_mode->crtc_clock /= 2;
8238 }
8239
8240 intel_mode_from_crtc_timings(pipe_mode, pipe_mode);
8241 intel_mode_from_crtc_timings(adjusted_mode, adjusted_mode);
8242
8243 intel_crtc_compute_pixel_rate(crtc_state);
8244
8245 drm_mode_copy(mode, adjusted_mode);
8246 mode->hdisplay = crtc_state->pipe_src_w << crtc_state->bigjoiner;
8247 mode->vdisplay = crtc_state->pipe_src_h;
8248}
8249
8250static void intel_encoder_get_config(struct intel_encoder *encoder,
8251 struct intel_crtc_state *crtc_state)
8252{
8253 encoder->get_config(encoder, crtc_state);
8254
8255 intel_crtc_readout_derived_state(crtc_state);
8256}
8257
8258static int intel_crtc_compute_config(struct intel_crtc *crtc,
8259 struct intel_crtc_state *pipe_config)
8260{
8261 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
8262 struct drm_display_mode *pipe_mode = &pipe_config->hw.pipe_mode;
8263 int clock_limit = dev_priv->max_dotclk_freq;
8264
8265 drm_mode_copy(pipe_mode, &pipe_config->hw.adjusted_mode);
8266
8267
8268 if (pipe_config->bigjoiner) {
8269 pipe_mode->crtc_clock /= 2;
8270 pipe_mode->crtc_hdisplay /= 2;
8271 pipe_mode->crtc_hblank_start /= 2;
8272 pipe_mode->crtc_hblank_end /= 2;
8273 pipe_mode->crtc_hsync_start /= 2;
8274 pipe_mode->crtc_hsync_end /= 2;
8275 pipe_mode->crtc_htotal /= 2;
8276 pipe_config->pipe_src_w /= 2;
8277 }
8278
8279 intel_mode_from_crtc_timings(pipe_mode, pipe_mode);
8280
8281 if (INTEL_GEN(dev_priv) < 4) {
8282 clock_limit = dev_priv->max_cdclk_freq * 9 / 10;
8283
8284
8285
8286
8287
8288 if (intel_crtc_supports_double_wide(crtc) &&
8289 pipe_mode->crtc_clock > clock_limit) {
8290 clock_limit = dev_priv->max_dotclk_freq;
8291 pipe_config->double_wide = true;
8292 }
8293 }
8294
8295 if (pipe_mode->crtc_clock > clock_limit) {
8296 drm_dbg_kms(&dev_priv->drm,
8297 "requested pixel clock (%d kHz) too high (max: %d kHz, double wide: %s)\n",
8298 pipe_mode->crtc_clock, clock_limit,
8299 yesno(pipe_config->double_wide));
8300 return -EINVAL;
8301 }
8302
8303 if ((pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR420 ||
8304 pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR444) &&
8305 pipe_config->hw.ctm) {
8306
8307
8308
8309
8310
8311 drm_dbg_kms(&dev_priv->drm,
8312 "YCBCR420 and CTM together are not possible\n");
8313 return -EINVAL;
8314 }
8315
8316
8317
8318
8319
8320
8321
8322 if (pipe_config->pipe_src_w & 1) {
8323 if (pipe_config->double_wide) {
8324 drm_dbg_kms(&dev_priv->drm,
8325 "Odd pipe source width not supported with double wide pipe\n");
8326 return -EINVAL;
8327 }
8328
8329 if (intel_crtc_has_type(pipe_config, INTEL_OUTPUT_LVDS) &&
8330 intel_is_dual_link_lvds(dev_priv)) {
8331 drm_dbg_kms(&dev_priv->drm,
8332 "Odd pipe source width not supported with dual link LVDS\n");
8333 return -EINVAL;
8334 }
8335 }
8336
8337
8338
8339
8340 if ((INTEL_GEN(dev_priv) > 4 || IS_G4X(dev_priv)) &&
8341 pipe_mode->crtc_hsync_start == pipe_mode->crtc_hdisplay)
8342 return -EINVAL;
8343
8344 intel_crtc_compute_pixel_rate(pipe_config);
8345
8346 if (pipe_config->has_pch_encoder)
8347 return ilk_fdi_compute_config(crtc, pipe_config);
8348
8349 return 0;
8350}
8351
8352static void
8353intel_reduce_m_n_ratio(u32 *num, u32 *den)
8354{
8355 while (*num > DATA_LINK_M_N_MASK ||
8356 *den > DATA_LINK_M_N_MASK) {
8357 *num >>= 1;
8358 *den >>= 1;
8359 }
8360}
8361
8362static void compute_m_n(unsigned int m, unsigned int n,
8363 u32 *ret_m, u32 *ret_n,
8364 bool constant_n)
8365{
8366
8367
8368
8369
8370
8371
8372
8373 if (constant_n)
8374 *ret_n = DP_LINK_CONSTANT_N_VALUE;
8375 else
8376 *ret_n = min_t(unsigned int, roundup_pow_of_two(n), DATA_LINK_N_MAX);
8377
8378 *ret_m = div_u64(mul_u32_u32(m, *ret_n), n);
8379 intel_reduce_m_n_ratio(ret_m, ret_n);
8380}
8381
8382void
8383intel_link_compute_m_n(u16 bits_per_pixel, int nlanes,
8384 int pixel_clock, int link_clock,
8385 struct intel_link_m_n *m_n,
8386 bool constant_n, bool fec_enable)
8387{
8388 u32 data_clock = bits_per_pixel * pixel_clock;
8389
8390 if (fec_enable)
8391 data_clock = intel_dp_mode_to_fec_clock(data_clock);
8392
8393 m_n->tu = 64;
8394 compute_m_n(data_clock,
8395 link_clock * nlanes * 8,
8396 &m_n->gmch_m, &m_n->gmch_n,
8397 constant_n);
8398
8399 compute_m_n(pixel_clock, link_clock,
8400 &m_n->link_m, &m_n->link_n,
8401 constant_n);
8402}
8403
8404static void intel_panel_sanitize_ssc(struct drm_i915_private *dev_priv)
8405{
8406
8407
8408
8409
8410
8411
8412 if (HAS_PCH_IBX(dev_priv) || HAS_PCH_CPT(dev_priv)) {
8413 bool bios_lvds_use_ssc = intel_de_read(dev_priv,
8414 PCH_DREF_CONTROL) &
8415 DREF_SSC1_ENABLE;
8416
8417 if (dev_priv->vbt.lvds_use_ssc != bios_lvds_use_ssc) {
8418 drm_dbg_kms(&dev_priv->drm,
8419 "SSC %s by BIOS, overriding VBT which says %s\n",
8420 enableddisabled(bios_lvds_use_ssc),
8421 enableddisabled(dev_priv->vbt.lvds_use_ssc));
8422 dev_priv->vbt.lvds_use_ssc = bios_lvds_use_ssc;
8423 }
8424 }
8425}
8426
8427static bool intel_panel_use_ssc(struct drm_i915_private *dev_priv)
8428{
8429 if (dev_priv->params.panel_use_ssc >= 0)
8430 return dev_priv->params.panel_use_ssc != 0;
8431 return dev_priv->vbt.lvds_use_ssc
8432 && !(dev_priv->quirks & QUIRK_LVDS_SSC_DISABLE);
8433}
8434
8435static u32 pnv_dpll_compute_fp(struct dpll *dpll)
8436{
8437 return (1 << dpll->n) << 16 | dpll->m2;
8438}
8439
8440static u32 i9xx_dpll_compute_fp(struct dpll *dpll)
8441{
8442 return dpll->n << 16 | dpll->m1 << 8 | dpll->m2;
8443}
8444
8445static void i9xx_update_pll_dividers(struct intel_crtc *crtc,
8446 struct intel_crtc_state *crtc_state,
8447 struct dpll *reduced_clock)
8448{
8449 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
8450 u32 fp, fp2 = 0;
8451
8452 if (IS_PINEVIEW(dev_priv)) {
8453 fp = pnv_dpll_compute_fp(&crtc_state->dpll);
8454 if (reduced_clock)
8455 fp2 = pnv_dpll_compute_fp(reduced_clock);
8456 } else {
8457 fp = i9xx_dpll_compute_fp(&crtc_state->dpll);
8458 if (reduced_clock)
8459 fp2 = i9xx_dpll_compute_fp(reduced_clock);
8460 }
8461
8462 crtc_state->dpll_hw_state.fp0 = fp;
8463
8464 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_LVDS) &&
8465 reduced_clock) {
8466 crtc_state->dpll_hw_state.fp1 = fp2;
8467 } else {
8468 crtc_state->dpll_hw_state.fp1 = fp;
8469 }
8470}
8471
8472static void vlv_pllb_recal_opamp(struct drm_i915_private *dev_priv, enum pipe
8473 pipe)
8474{
8475 u32 reg_val;
8476
8477
8478
8479
8480
8481 reg_val = vlv_dpio_read(dev_priv, pipe, VLV_PLL_DW9(1));
8482 reg_val &= 0xffffff00;
8483 reg_val |= 0x00000030;
8484 vlv_dpio_write(dev_priv, pipe, VLV_PLL_DW9(1), reg_val);
8485
8486 reg_val = vlv_dpio_read(dev_priv, pipe, VLV_REF_DW13);
8487 reg_val &= 0x00ffffff;
8488 reg_val |= 0x8c000000;
8489 vlv_dpio_write(dev_priv, pipe, VLV_REF_DW13, reg_val);
8490
8491 reg_val = vlv_dpio_read(dev_priv, pipe, VLV_PLL_DW9(1));
8492 reg_val &= 0xffffff00;
8493 vlv_dpio_write(dev_priv, pipe, VLV_PLL_DW9(1), reg_val);
8494
8495 reg_val = vlv_dpio_read(dev_priv, pipe, VLV_REF_DW13);
8496 reg_val &= 0x00ffffff;
8497 reg_val |= 0xb0000000;
8498 vlv_dpio_write(dev_priv, pipe, VLV_REF_DW13, reg_val);
8499}
8500
8501static void intel_pch_transcoder_set_m_n(const struct intel_crtc_state *crtc_state,
8502 const struct intel_link_m_n *m_n)
8503{
8504 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
8505 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
8506 enum pipe pipe = crtc->pipe;
8507
8508 intel_de_write(dev_priv, PCH_TRANS_DATA_M1(pipe),
8509 TU_SIZE(m_n->tu) | m_n->gmch_m);
8510 intel_de_write(dev_priv, PCH_TRANS_DATA_N1(pipe), m_n->gmch_n);
8511 intel_de_write(dev_priv, PCH_TRANS_LINK_M1(pipe), m_n->link_m);
8512 intel_de_write(dev_priv, PCH_TRANS_LINK_N1(pipe), m_n->link_n);
8513}
8514
8515static bool transcoder_has_m2_n2(struct drm_i915_private *dev_priv,
8516 enum transcoder transcoder)
8517{
8518 if (IS_HASWELL(dev_priv))
8519 return transcoder == TRANSCODER_EDP;
8520
8521
8522
8523
8524
8525 return IS_GEN(dev_priv, 7) || IS_CHERRYVIEW(dev_priv);
8526}
8527
8528static void intel_cpu_transcoder_set_m_n(const struct intel_crtc_state *crtc_state,
8529 const struct intel_link_m_n *m_n,
8530 const struct intel_link_m_n *m2_n2)
8531{
8532 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
8533 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
8534 enum pipe pipe = crtc->pipe;
8535 enum transcoder transcoder = crtc_state->cpu_transcoder;
8536
8537 if (INTEL_GEN(dev_priv) >= 5) {
8538 intel_de_write(dev_priv, PIPE_DATA_M1(transcoder),
8539 TU_SIZE(m_n->tu) | m_n->gmch_m);
8540 intel_de_write(dev_priv, PIPE_DATA_N1(transcoder),
8541 m_n->gmch_n);
8542 intel_de_write(dev_priv, PIPE_LINK_M1(transcoder),
8543 m_n->link_m);
8544 intel_de_write(dev_priv, PIPE_LINK_N1(transcoder),
8545 m_n->link_n);
8546
8547
8548
8549
8550 if (m2_n2 && crtc_state->has_drrs &&
8551 transcoder_has_m2_n2(dev_priv, transcoder)) {
8552 intel_de_write(dev_priv, PIPE_DATA_M2(transcoder),
8553 TU_SIZE(m2_n2->tu) | m2_n2->gmch_m);
8554 intel_de_write(dev_priv, PIPE_DATA_N2(transcoder),
8555 m2_n2->gmch_n);
8556 intel_de_write(dev_priv, PIPE_LINK_M2(transcoder),
8557 m2_n2->link_m);
8558 intel_de_write(dev_priv, PIPE_LINK_N2(transcoder),
8559 m2_n2->link_n);
8560 }
8561 } else {
8562 intel_de_write(dev_priv, PIPE_DATA_M_G4X(pipe),
8563 TU_SIZE(m_n->tu) | m_n->gmch_m);
8564 intel_de_write(dev_priv, PIPE_DATA_N_G4X(pipe), m_n->gmch_n);
8565 intel_de_write(dev_priv, PIPE_LINK_M_G4X(pipe), m_n->link_m);
8566 intel_de_write(dev_priv, PIPE_LINK_N_G4X(pipe), m_n->link_n);
8567 }
8568}
8569
8570void intel_dp_set_m_n(const struct intel_crtc_state *crtc_state, enum link_m_n_set m_n)
8571{
8572 const struct intel_link_m_n *dp_m_n, *dp_m2_n2 = NULL;
8573 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev);
8574
8575 if (m_n == M1_N1) {
8576 dp_m_n = &crtc_state->dp_m_n;
8577 dp_m2_n2 = &crtc_state->dp_m2_n2;
8578 } else if (m_n == M2_N2) {
8579
8580
8581
8582
8583
8584 dp_m_n = &crtc_state->dp_m2_n2;
8585 } else {
8586 drm_err(&i915->drm, "Unsupported divider value\n");
8587 return;
8588 }
8589
8590 if (crtc_state->has_pch_encoder)
8591 intel_pch_transcoder_set_m_n(crtc_state, &crtc_state->dp_m_n);
8592 else
8593 intel_cpu_transcoder_set_m_n(crtc_state, dp_m_n, dp_m2_n2);
8594}
8595
8596static void vlv_compute_dpll(struct intel_crtc *crtc,
8597 struct intel_crtc_state *pipe_config)
8598{
8599 pipe_config->dpll_hw_state.dpll = DPLL_INTEGRATED_REF_CLK_VLV |
8600 DPLL_REF_CLK_ENABLE_VLV | DPLL_VGA_MODE_DIS;
8601 if (crtc->pipe != PIPE_A)
8602 pipe_config->dpll_hw_state.dpll |= DPLL_INTEGRATED_CRI_CLK_VLV;
8603
8604
8605 if (!intel_crtc_has_type(pipe_config, INTEL_OUTPUT_DSI))
8606 pipe_config->dpll_hw_state.dpll |= DPLL_VCO_ENABLE |
8607 DPLL_EXT_BUFFER_ENABLE_VLV;
8608
8609 pipe_config->dpll_hw_state.dpll_md =
8610 (pipe_config->pixel_multiplier - 1) << DPLL_MD_UDI_MULTIPLIER_SHIFT;
8611}
8612
8613static void chv_compute_dpll(struct intel_crtc *crtc,
8614 struct intel_crtc_state *pipe_config)
8615{
8616 pipe_config->dpll_hw_state.dpll = DPLL_SSC_REF_CLK_CHV |
8617 DPLL_REF_CLK_ENABLE_VLV | DPLL_VGA_MODE_DIS;
8618 if (crtc->pipe != PIPE_A)
8619 pipe_config->dpll_hw_state.dpll |= DPLL_INTEGRATED_CRI_CLK_VLV;
8620
8621
8622 if (!intel_crtc_has_type(pipe_config, INTEL_OUTPUT_DSI))
8623 pipe_config->dpll_hw_state.dpll |= DPLL_VCO_ENABLE;
8624
8625 pipe_config->dpll_hw_state.dpll_md =
8626 (pipe_config->pixel_multiplier - 1) << DPLL_MD_UDI_MULTIPLIER_SHIFT;
8627}
8628
8629static void vlv_prepare_pll(struct intel_crtc *crtc,
8630 const struct intel_crtc_state *pipe_config)
8631{
8632 struct drm_device *dev = crtc->base.dev;
8633 struct drm_i915_private *dev_priv = to_i915(dev);
8634 enum pipe pipe = crtc->pipe;
8635 u32 mdiv;
8636 u32 bestn, bestm1, bestm2, bestp1, bestp2;
8637 u32 coreclk, reg_val;
8638
8639
8640 intel_de_write(dev_priv, DPLL(pipe),
8641 pipe_config->dpll_hw_state.dpll & ~(DPLL_VCO_ENABLE | DPLL_EXT_BUFFER_ENABLE_VLV));
8642
8643
8644 if ((pipe_config->dpll_hw_state.dpll & DPLL_VCO_ENABLE) == 0)
8645 return;
8646
8647 vlv_dpio_get(dev_priv);
8648
8649 bestn = pipe_config->dpll.n;
8650 bestm1 = pipe_config->dpll.m1;
8651 bestm2 = pipe_config->dpll.m2;
8652 bestp1 = pipe_config->dpll.p1;
8653 bestp2 = pipe_config->dpll.p2;
8654
8655
8656
8657
8658 if (pipe == PIPE_B)
8659 vlv_pllb_recal_opamp(dev_priv, pipe);
8660
8661
8662 vlv_dpio_write(dev_priv, pipe, VLV_PLL_DW9_BCAST, 0x0100000f);
8663
8664
8665 reg_val = vlv_dpio_read(dev_priv, pipe, VLV_PLL_DW8(pipe));
8666 reg_val &= 0x00ffffff;
8667 vlv_dpio_write(dev_priv, pipe, VLV_PLL_DW8(pipe), reg_val);
8668
8669
8670 vlv_dpio_write(dev_priv, pipe, VLV_CMN_DW0, 0x610);
8671
8672
8673 mdiv = ((bestm1 << DPIO_M1DIV_SHIFT) | (bestm2 & DPIO_M2DIV_MASK));
8674 mdiv |= ((bestp1 << DPIO_P1_SHIFT) | (bestp2 << DPIO_P2_SHIFT));
8675 mdiv |= ((bestn << DPIO_N_SHIFT));
8676 mdiv |= (1 << DPIO_K_SHIFT);
8677
8678
8679
8680
8681
8682
8683 mdiv |= (DPIO_POST_DIV_HDMIDP << DPIO_POST_DIV_SHIFT);
8684 vlv_dpio_write(dev_priv, pipe, VLV_PLL_DW3(pipe), mdiv);
8685
8686 mdiv |= DPIO_ENABLE_CALIBRATION;
8687 vlv_dpio_write(dev_priv, pipe, VLV_PLL_DW3(pipe), mdiv);
8688
8689
8690 if (pipe_config->port_clock == 162000 ||
8691 intel_crtc_has_type(pipe_config, INTEL_OUTPUT_ANALOG) ||
8692 intel_crtc_has_type(pipe_config, INTEL_OUTPUT_HDMI))
8693 vlv_dpio_write(dev_priv, pipe, VLV_PLL_DW10(pipe),
8694 0x009f0003);
8695 else
8696 vlv_dpio_write(dev_priv, pipe, VLV_PLL_DW10(pipe),
8697 0x00d0000f);
8698
8699 if (intel_crtc_has_dp_encoder(pipe_config)) {
8700
8701 if (pipe == PIPE_A)
8702 vlv_dpio_write(dev_priv, pipe, VLV_PLL_DW5(pipe),
8703 0x0df40000);
8704 else
8705 vlv_dpio_write(dev_priv, pipe, VLV_PLL_DW5(pipe),
8706 0x0df70000);
8707 } else {
8708
8709 if (pipe == PIPE_A)
8710 vlv_dpio_write(dev_priv, pipe, VLV_PLL_DW5(pipe),
8711 0x0df70000);
8712 else
8713 vlv_dpio_write(dev_priv, pipe, VLV_PLL_DW5(pipe),
8714 0x0df40000);
8715 }
8716
8717 coreclk = vlv_dpio_read(dev_priv, pipe, VLV_PLL_DW7(pipe));
8718 coreclk = (coreclk & 0x0000ff00) | 0x01c00000;
8719 if (intel_crtc_has_dp_encoder(pipe_config))
8720 coreclk |= 0x01000000;
8721 vlv_dpio_write(dev_priv, pipe, VLV_PLL_DW7(pipe), coreclk);
8722
8723 vlv_dpio_write(dev_priv, pipe, VLV_PLL_DW11(pipe), 0x87871000);
8724
8725 vlv_dpio_put(dev_priv);
8726}
8727
8728static void chv_prepare_pll(struct intel_crtc *crtc,
8729 const struct intel_crtc_state *pipe_config)
8730{
8731 struct drm_device *dev = crtc->base.dev;
8732 struct drm_i915_private *dev_priv = to_i915(dev);
8733 enum pipe pipe = crtc->pipe;
8734 enum dpio_channel port = vlv_pipe_to_channel(pipe);
8735 u32 loopfilter, tribuf_calcntr;
8736 u32 bestn, bestm1, bestm2, bestp1, bestp2, bestm2_frac;
8737 u32 dpio_val;
8738 int vco;
8739
8740
8741 intel_de_write(dev_priv, DPLL(pipe),
8742 pipe_config->dpll_hw_state.dpll & ~DPLL_VCO_ENABLE);
8743
8744
8745 if ((pipe_config->dpll_hw_state.dpll & DPLL_VCO_ENABLE) == 0)
8746 return;
8747
8748 bestn = pipe_config->dpll.n;
8749 bestm2_frac = pipe_config->dpll.m2 & 0x3fffff;
8750 bestm1 = pipe_config->dpll.m1;
8751 bestm2 = pipe_config->dpll.m2 >> 22;
8752 bestp1 = pipe_config->dpll.p1;
8753 bestp2 = pipe_config->dpll.p2;
8754 vco = pipe_config->dpll.vco;
8755 dpio_val = 0;
8756 loopfilter = 0;
8757
8758 vlv_dpio_get(dev_priv);
8759
8760
8761 vlv_dpio_write(dev_priv, pipe, CHV_CMN_DW13(port),
8762 5 << DPIO_CHV_S1_DIV_SHIFT |
8763 bestp1 << DPIO_CHV_P1_DIV_SHIFT |
8764 bestp2 << DPIO_CHV_P2_DIV_SHIFT |
8765 1 << DPIO_CHV_K_DIV_SHIFT);
8766
8767
8768 vlv_dpio_write(dev_priv, pipe, CHV_PLL_DW0(port), bestm2);
8769
8770
8771 vlv_dpio_write(dev_priv, pipe, CHV_PLL_DW1(port),
8772 DPIO_CHV_M1_DIV_BY_2 |
8773 1 << DPIO_CHV_N_DIV_SHIFT);
8774
8775
8776 vlv_dpio_write(dev_priv, pipe, CHV_PLL_DW2(port), bestm2_frac);
8777
8778
8779 dpio_val = vlv_dpio_read(dev_priv, pipe, CHV_PLL_DW3(port));
8780 dpio_val &= ~(DPIO_CHV_FEEDFWD_GAIN_MASK | DPIO_CHV_FRAC_DIV_EN);
8781 dpio_val |= (2 << DPIO_CHV_FEEDFWD_GAIN_SHIFT);
8782 if (bestm2_frac)
8783 dpio_val |= DPIO_CHV_FRAC_DIV_EN;
8784 vlv_dpio_write(dev_priv, pipe, CHV_PLL_DW3(port), dpio_val);
8785
8786
8787 dpio_val = vlv_dpio_read(dev_priv, pipe, CHV_PLL_DW9(port));
8788 dpio_val &= ~(DPIO_CHV_INT_LOCK_THRESHOLD_MASK |
8789 DPIO_CHV_INT_LOCK_THRESHOLD_SEL_COARSE);
8790 dpio_val |= (0x5 << DPIO_CHV_INT_LOCK_THRESHOLD_SHIFT);
8791 if (!bestm2_frac)
8792 dpio_val |= DPIO_CHV_INT_LOCK_THRESHOLD_SEL_COARSE;
8793 vlv_dpio_write(dev_priv, pipe, CHV_PLL_DW9(port), dpio_val);
8794
8795
8796 if (vco == 5400000) {
8797 loopfilter |= (0x3 << DPIO_CHV_PROP_COEFF_SHIFT);
8798 loopfilter |= (0x8 << DPIO_CHV_INT_COEFF_SHIFT);
8799 loopfilter |= (0x1 << DPIO_CHV_GAIN_CTRL_SHIFT);
8800 tribuf_calcntr = 0x9;
8801 } else if (vco <= 6200000) {
8802 loopfilter |= (0x5 << DPIO_CHV_PROP_COEFF_SHIFT);
8803 loopfilter |= (0xB << DPIO_CHV_INT_COEFF_SHIFT);
8804 loopfilter |= (0x3 << DPIO_CHV_GAIN_CTRL_SHIFT);
8805 tribuf_calcntr = 0x9;
8806 } else if (vco <= 6480000) {
8807 loopfilter |= (0x4 << DPIO_CHV_PROP_COEFF_SHIFT);
8808 loopfilter |= (0x9 << DPIO_CHV_INT_COEFF_SHIFT);
8809 loopfilter |= (0x3 << DPIO_CHV_GAIN_CTRL_SHIFT);
8810 tribuf_calcntr = 0x8;
8811 } else {
8812
8813 loopfilter |= (0x4 << DPIO_CHV_PROP_COEFF_SHIFT);
8814 loopfilter |= (0x9 << DPIO_CHV_INT_COEFF_SHIFT);
8815 loopfilter |= (0x3 << DPIO_CHV_GAIN_CTRL_SHIFT);
8816 tribuf_calcntr = 0;
8817 }
8818 vlv_dpio_write(dev_priv, pipe, CHV_PLL_DW6(port), loopfilter);
8819
8820 dpio_val = vlv_dpio_read(dev_priv, pipe, CHV_PLL_DW8(port));
8821 dpio_val &= ~DPIO_CHV_TDC_TARGET_CNT_MASK;
8822 dpio_val |= (tribuf_calcntr << DPIO_CHV_TDC_TARGET_CNT_SHIFT);
8823 vlv_dpio_write(dev_priv, pipe, CHV_PLL_DW8(port), dpio_val);
8824
8825
8826 vlv_dpio_write(dev_priv, pipe, CHV_CMN_DW14(port),
8827 vlv_dpio_read(dev_priv, pipe, CHV_CMN_DW14(port)) |
8828 DPIO_AFC_RECAL);
8829
8830 vlv_dpio_put(dev_priv);
8831}
8832
8833
8834
8835
8836
8837
8838
8839
8840
8841
8842
8843int vlv_force_pll_on(struct drm_i915_private *dev_priv, enum pipe pipe,
8844 const struct dpll *dpll)
8845{
8846 struct intel_crtc *crtc = intel_get_crtc_for_pipe(dev_priv, pipe);
8847 struct intel_crtc_state *pipe_config;
8848
8849 pipe_config = intel_crtc_state_alloc(crtc);
8850 if (!pipe_config)
8851 return -ENOMEM;
8852
8853 pipe_config->cpu_transcoder = (enum transcoder)pipe;
8854 pipe_config->pixel_multiplier = 1;
8855 pipe_config->dpll = *dpll;
8856
8857 if (IS_CHERRYVIEW(dev_priv)) {
8858 chv_compute_dpll(crtc, pipe_config);
8859 chv_prepare_pll(crtc, pipe_config);
8860 chv_enable_pll(crtc, pipe_config);
8861 } else {
8862 vlv_compute_dpll(crtc, pipe_config);
8863 vlv_prepare_pll(crtc, pipe_config);
8864 vlv_enable_pll(crtc, pipe_config);
8865 }
8866
8867 kfree(pipe_config);
8868
8869 return 0;
8870}
8871
8872
8873
8874
8875
8876
8877
8878
8879
8880void vlv_force_pll_off(struct drm_i915_private *dev_priv, enum pipe pipe)
8881{
8882 if (IS_CHERRYVIEW(dev_priv))
8883 chv_disable_pll(dev_priv, pipe);
8884 else
8885 vlv_disable_pll(dev_priv, pipe);
8886}
8887
8888static void i9xx_compute_dpll(struct intel_crtc *crtc,
8889 struct intel_crtc_state *crtc_state,
8890 struct dpll *reduced_clock)
8891{
8892 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
8893 u32 dpll;
8894 struct dpll *clock = &crtc_state->dpll;
8895
8896 i9xx_update_pll_dividers(crtc, crtc_state, reduced_clock);
8897
8898 dpll = DPLL_VGA_MODE_DIS;
8899
8900 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_LVDS))
8901 dpll |= DPLLB_MODE_LVDS;
8902 else
8903 dpll |= DPLLB_MODE_DAC_SERIAL;
8904
8905 if (IS_I945G(dev_priv) || IS_I945GM(dev_priv) ||
8906 IS_G33(dev_priv) || IS_PINEVIEW(dev_priv)) {
8907 dpll |= (crtc_state->pixel_multiplier - 1)
8908 << SDVO_MULTIPLIER_SHIFT_HIRES;
8909 }
8910
8911 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_SDVO) ||
8912 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
8913 dpll |= DPLL_SDVO_HIGH_SPEED;
8914
8915 if (intel_crtc_has_dp_encoder(crtc_state))
8916 dpll |= DPLL_SDVO_HIGH_SPEED;
8917
8918
8919 if (IS_PINEVIEW(dev_priv))
8920 dpll |= (1 << (clock->p1 - 1)) << DPLL_FPA01_P1_POST_DIV_SHIFT_PINEVIEW;
8921 else {
8922 dpll |= (1 << (clock->p1 - 1)) << DPLL_FPA01_P1_POST_DIV_SHIFT;
8923 if (IS_G4X(dev_priv) && reduced_clock)
8924 dpll |= (1 << (reduced_clock->p1 - 1)) << DPLL_FPA1_P1_POST_DIV_SHIFT;
8925 }
8926 switch (clock->p2) {
8927 case 5:
8928 dpll |= DPLL_DAC_SERIAL_P2_CLOCK_DIV_5;
8929 break;
8930 case 7:
8931 dpll |= DPLLB_LVDS_P2_CLOCK_DIV_7;
8932 break;
8933 case 10:
8934 dpll |= DPLL_DAC_SERIAL_P2_CLOCK_DIV_10;
8935 break;
8936 case 14:
8937 dpll |= DPLLB_LVDS_P2_CLOCK_DIV_14;
8938 break;
8939 }
8940 if (INTEL_GEN(dev_priv) >= 4)
8941 dpll |= (6 << PLL_LOAD_PULSE_PHASE_SHIFT);
8942
8943 if (crtc_state->sdvo_tv_clock)
8944 dpll |= PLL_REF_INPUT_TVCLKINBC;
8945 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_LVDS) &&
8946 intel_panel_use_ssc(dev_priv))
8947 dpll |= PLLB_REF_INPUT_SPREADSPECTRUMIN;
8948 else
8949 dpll |= PLL_REF_INPUT_DREFCLK;
8950
8951 dpll |= DPLL_VCO_ENABLE;
8952 crtc_state->dpll_hw_state.dpll = dpll;
8953
8954 if (INTEL_GEN(dev_priv) >= 4) {
8955 u32 dpll_md = (crtc_state->pixel_multiplier - 1)
8956 << DPLL_MD_UDI_MULTIPLIER_SHIFT;
8957 crtc_state->dpll_hw_state.dpll_md = dpll_md;
8958 }
8959}
8960
8961static void i8xx_compute_dpll(struct intel_crtc *crtc,
8962 struct intel_crtc_state *crtc_state,
8963 struct dpll *reduced_clock)
8964{
8965 struct drm_device *dev = crtc->base.dev;
8966 struct drm_i915_private *dev_priv = to_i915(dev);
8967 u32 dpll;
8968 struct dpll *clock = &crtc_state->dpll;
8969
8970 i9xx_update_pll_dividers(crtc, crtc_state, reduced_clock);
8971
8972 dpll = DPLL_VGA_MODE_DIS;
8973
8974 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_LVDS)) {
8975 dpll |= (1 << (clock->p1 - 1)) << DPLL_FPA01_P1_POST_DIV_SHIFT;
8976 } else {
8977 if (clock->p1 == 2)
8978 dpll |= PLL_P1_DIVIDE_BY_TWO;
8979 else
8980 dpll |= (clock->p1 - 2) << DPLL_FPA01_P1_POST_DIV_SHIFT;
8981 if (clock->p2 == 4)
8982 dpll |= PLL_P2_DIVIDE_BY_4;
8983 }
8984
8985
8986
8987
8988
8989
8990
8991
8992
8993
8994
8995
8996
8997 if (IS_I830(dev_priv) ||
8998 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DVO))
8999 dpll |= DPLL_DVO_2X_MODE;
9000
9001 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_LVDS) &&
9002 intel_panel_use_ssc(dev_priv))
9003 dpll |= PLLB_REF_INPUT_SPREADSPECTRUMIN;
9004 else
9005 dpll |= PLL_REF_INPUT_DREFCLK;
9006
9007 dpll |= DPLL_VCO_ENABLE;
9008 crtc_state->dpll_hw_state.dpll = dpll;
9009}
9010
9011static void intel_set_transcoder_timings(const struct intel_crtc_state *crtc_state)
9012{
9013 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
9014 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
9015 enum pipe pipe = crtc->pipe;
9016 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
9017 const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
9018 u32 crtc_vtotal, crtc_vblank_end;
9019 int vsyncshift = 0;
9020
9021
9022
9023 crtc_vtotal = adjusted_mode->crtc_vtotal;
9024 crtc_vblank_end = adjusted_mode->crtc_vblank_end;
9025
9026 if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) {
9027
9028 crtc_vtotal -= 1;
9029 crtc_vblank_end -= 1;
9030
9031 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_SDVO))
9032 vsyncshift = (adjusted_mode->crtc_htotal - 1) / 2;
9033 else
9034 vsyncshift = adjusted_mode->crtc_hsync_start -
9035 adjusted_mode->crtc_htotal / 2;
9036 if (vsyncshift < 0)
9037 vsyncshift += adjusted_mode->crtc_htotal;
9038 }
9039
9040 if (INTEL_GEN(dev_priv) > 3)
9041 intel_de_write(dev_priv, VSYNCSHIFT(cpu_transcoder),
9042 vsyncshift);
9043
9044 intel_de_write(dev_priv, HTOTAL(cpu_transcoder),
9045 (adjusted_mode->crtc_hdisplay - 1) | ((adjusted_mode->crtc_htotal - 1) << 16));
9046 intel_de_write(dev_priv, HBLANK(cpu_transcoder),
9047 (adjusted_mode->crtc_hblank_start - 1) | ((adjusted_mode->crtc_hblank_end - 1) << 16));
9048 intel_de_write(dev_priv, HSYNC(cpu_transcoder),
9049 (adjusted_mode->crtc_hsync_start - 1) | ((adjusted_mode->crtc_hsync_end - 1) << 16));
9050
9051 intel_de_write(dev_priv, VTOTAL(cpu_transcoder),
9052 (adjusted_mode->crtc_vdisplay - 1) | ((crtc_vtotal - 1) << 16));
9053 intel_de_write(dev_priv, VBLANK(cpu_transcoder),
9054 (adjusted_mode->crtc_vblank_start - 1) | ((crtc_vblank_end - 1) << 16));
9055 intel_de_write(dev_priv, VSYNC(cpu_transcoder),
9056 (adjusted_mode->crtc_vsync_start - 1) | ((adjusted_mode->crtc_vsync_end - 1) << 16));
9057
9058
9059
9060
9061
9062 if (IS_HASWELL(dev_priv) && cpu_transcoder == TRANSCODER_EDP &&
9063 (pipe == PIPE_B || pipe == PIPE_C))
9064 intel_de_write(dev_priv, VTOTAL(pipe),
9065 intel_de_read(dev_priv, VTOTAL(cpu_transcoder)));
9066
9067}
9068
9069static void intel_set_pipe_src_size(const struct intel_crtc_state *crtc_state)
9070{
9071 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
9072 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
9073 enum pipe pipe = crtc->pipe;
9074
9075
9076
9077
9078 intel_de_write(dev_priv, PIPESRC(pipe),
9079 ((crtc_state->pipe_src_w - 1) << 16) | (crtc_state->pipe_src_h - 1));
9080}
9081
9082static bool intel_pipe_is_interlaced(const struct intel_crtc_state *crtc_state)
9083{
9084 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
9085 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
9086
9087 if (IS_GEN(dev_priv, 2))
9088 return false;
9089
9090 if (INTEL_GEN(dev_priv) >= 9 ||
9091 IS_BROADWELL(dev_priv) || IS_HASWELL(dev_priv))
9092 return intel_de_read(dev_priv, PIPECONF(cpu_transcoder)) & PIPECONF_INTERLACE_MASK_HSW;
9093 else
9094 return intel_de_read(dev_priv, PIPECONF(cpu_transcoder)) & PIPECONF_INTERLACE_MASK;
9095}
9096
9097static void intel_get_transcoder_timings(struct intel_crtc *crtc,
9098 struct intel_crtc_state *pipe_config)
9099{
9100 struct drm_device *dev = crtc->base.dev;
9101 struct drm_i915_private *dev_priv = to_i915(dev);
9102 enum transcoder cpu_transcoder = pipe_config->cpu_transcoder;
9103 u32 tmp;
9104
9105 tmp = intel_de_read(dev_priv, HTOTAL(cpu_transcoder));
9106 pipe_config->hw.adjusted_mode.crtc_hdisplay = (tmp & 0xffff) + 1;
9107 pipe_config->hw.adjusted_mode.crtc_htotal = ((tmp >> 16) & 0xffff) + 1;
9108
9109 if (!transcoder_is_dsi(cpu_transcoder)) {
9110 tmp = intel_de_read(dev_priv, HBLANK(cpu_transcoder));
9111 pipe_config->hw.adjusted_mode.crtc_hblank_start =
9112 (tmp & 0xffff) + 1;
9113 pipe_config->hw.adjusted_mode.crtc_hblank_end =
9114 ((tmp >> 16) & 0xffff) + 1;
9115 }
9116 tmp = intel_de_read(dev_priv, HSYNC(cpu_transcoder));
9117 pipe_config->hw.adjusted_mode.crtc_hsync_start = (tmp & 0xffff) + 1;
9118 pipe_config->hw.adjusted_mode.crtc_hsync_end = ((tmp >> 16) & 0xffff) + 1;
9119
9120 tmp = intel_de_read(dev_priv, VTOTAL(cpu_transcoder));
9121 pipe_config->hw.adjusted_mode.crtc_vdisplay = (tmp & 0xffff) + 1;
9122 pipe_config->hw.adjusted_mode.crtc_vtotal = ((tmp >> 16) & 0xffff) + 1;
9123
9124 if (!transcoder_is_dsi(cpu_transcoder)) {
9125 tmp = intel_de_read(dev_priv, VBLANK(cpu_transcoder));
9126 pipe_config->hw.adjusted_mode.crtc_vblank_start =
9127 (tmp & 0xffff) + 1;
9128 pipe_config->hw.adjusted_mode.crtc_vblank_end =
9129 ((tmp >> 16) & 0xffff) + 1;
9130 }
9131 tmp = intel_de_read(dev_priv, VSYNC(cpu_transcoder));
9132 pipe_config->hw.adjusted_mode.crtc_vsync_start = (tmp & 0xffff) + 1;
9133 pipe_config->hw.adjusted_mode.crtc_vsync_end = ((tmp >> 16) & 0xffff) + 1;
9134
9135 if (intel_pipe_is_interlaced(pipe_config)) {
9136 pipe_config->hw.adjusted_mode.flags |= DRM_MODE_FLAG_INTERLACE;
9137 pipe_config->hw.adjusted_mode.crtc_vtotal += 1;
9138 pipe_config->hw.adjusted_mode.crtc_vblank_end += 1;
9139 }
9140}
9141
9142static void intel_get_pipe_src_size(struct intel_crtc *crtc,
9143 struct intel_crtc_state *pipe_config)
9144{
9145 struct drm_device *dev = crtc->base.dev;
9146 struct drm_i915_private *dev_priv = to_i915(dev);
9147 u32 tmp;
9148
9149 tmp = intel_de_read(dev_priv, PIPESRC(crtc->pipe));
9150 pipe_config->pipe_src_h = (tmp & 0xffff) + 1;
9151 pipe_config->pipe_src_w = ((tmp >> 16) & 0xffff) + 1;
9152}
9153
9154static void i9xx_set_pipeconf(const struct intel_crtc_state *crtc_state)
9155{
9156 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
9157 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
9158 u32 pipeconf;
9159
9160 pipeconf = 0;
9161
9162
9163 if (IS_I830(dev_priv))
9164 pipeconf |= intel_de_read(dev_priv, PIPECONF(crtc->pipe)) & PIPECONF_ENABLE;
9165
9166 if (crtc_state->double_wide)
9167 pipeconf |= PIPECONF_DOUBLE_WIDE;
9168
9169
9170 if (IS_G4X(dev_priv) || IS_VALLEYVIEW(dev_priv) ||
9171 IS_CHERRYVIEW(dev_priv)) {
9172
9173 if (crtc_state->dither && crtc_state->pipe_bpp != 30)
9174 pipeconf |= PIPECONF_DITHER_EN |
9175 PIPECONF_DITHER_TYPE_SP;
9176
9177 switch (crtc_state->pipe_bpp) {
9178 case 18:
9179 pipeconf |= PIPECONF_6BPC;
9180 break;
9181 case 24:
9182 pipeconf |= PIPECONF_8BPC;
9183 break;
9184 case 30:
9185 pipeconf |= PIPECONF_10BPC;
9186 break;
9187 default:
9188
9189 BUG();
9190 }
9191 }
9192
9193 if (crtc_state->hw.adjusted_mode.flags & DRM_MODE_FLAG_INTERLACE) {
9194 if (INTEL_GEN(dev_priv) < 4 ||
9195 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_SDVO))
9196 pipeconf |= PIPECONF_INTERLACE_W_FIELD_INDICATION;
9197 else
9198 pipeconf |= PIPECONF_INTERLACE_W_SYNC_SHIFT;
9199 } else {
9200 pipeconf |= PIPECONF_PROGRESSIVE;
9201 }
9202
9203 if ((IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) &&
9204 crtc_state->limited_color_range)
9205 pipeconf |= PIPECONF_COLOR_RANGE_SELECT;
9206
9207 pipeconf |= PIPECONF_GAMMA_MODE(crtc_state->gamma_mode);
9208
9209 pipeconf |= PIPECONF_FRAME_START_DELAY(0);
9210
9211 intel_de_write(dev_priv, PIPECONF(crtc->pipe), pipeconf);
9212 intel_de_posting_read(dev_priv, PIPECONF(crtc->pipe));
9213}
9214
9215static int i8xx_crtc_compute_clock(struct intel_crtc *crtc,
9216 struct intel_crtc_state *crtc_state)
9217{
9218 struct drm_device *dev = crtc->base.dev;
9219 struct drm_i915_private *dev_priv = to_i915(dev);
9220 const struct intel_limit *limit;
9221 int refclk = 48000;
9222
9223 memset(&crtc_state->dpll_hw_state, 0,
9224 sizeof(crtc_state->dpll_hw_state));
9225
9226 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_LVDS)) {
9227 if (intel_panel_use_ssc(dev_priv)) {
9228 refclk = dev_priv->vbt.lvds_ssc_freq;
9229 drm_dbg_kms(&dev_priv->drm,
9230 "using SSC reference clock of %d kHz\n",
9231 refclk);
9232 }
9233
9234 limit = &intel_limits_i8xx_lvds;
9235 } else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DVO)) {
9236 limit = &intel_limits_i8xx_dvo;
9237 } else {
9238 limit = &intel_limits_i8xx_dac;
9239 }
9240
9241 if (!crtc_state->clock_set &&
9242 !i9xx_find_best_dpll(limit, crtc_state, crtc_state->port_clock,
9243 refclk, NULL, &crtc_state->dpll)) {
9244 drm_err(&dev_priv->drm,
9245 "Couldn't find PLL settings for mode!\n");
9246 return -EINVAL;
9247 }
9248
9249 i8xx_compute_dpll(crtc, crtc_state, NULL);
9250
9251 return 0;
9252}
9253
9254static int g4x_crtc_compute_clock(struct intel_crtc *crtc,
9255 struct intel_crtc_state *crtc_state)
9256{
9257 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
9258 const struct intel_limit *limit;
9259 int refclk = 96000;
9260
9261 memset(&crtc_state->dpll_hw_state, 0,
9262 sizeof(crtc_state->dpll_hw_state));
9263
9264 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_LVDS)) {
9265 if (intel_panel_use_ssc(dev_priv)) {
9266 refclk = dev_priv->vbt.lvds_ssc_freq;
9267 drm_dbg_kms(&dev_priv->drm,
9268 "using SSC reference clock of %d kHz\n",
9269 refclk);
9270 }
9271
9272 if (intel_is_dual_link_lvds(dev_priv))
9273 limit = &intel_limits_g4x_dual_channel_lvds;
9274 else
9275 limit = &intel_limits_g4x_single_channel_lvds;
9276 } else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI) ||
9277 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_ANALOG)) {
9278 limit = &intel_limits_g4x_hdmi;
9279 } else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_SDVO)) {
9280 limit = &intel_limits_g4x_sdvo;
9281 } else {
9282
9283 limit = &intel_limits_i9xx_sdvo;
9284 }
9285
9286 if (!crtc_state->clock_set &&
9287 !g4x_find_best_dpll(limit, crtc_state, crtc_state->port_clock,
9288 refclk, NULL, &crtc_state->dpll)) {
9289 drm_err(&dev_priv->drm,
9290 "Couldn't find PLL settings for mode!\n");
9291 return -EINVAL;
9292 }
9293
9294 i9xx_compute_dpll(crtc, crtc_state, NULL);
9295
9296 return 0;
9297}
9298
9299static int pnv_crtc_compute_clock(struct intel_crtc *crtc,
9300 struct intel_crtc_state *crtc_state)
9301{
9302 struct drm_device *dev = crtc->base.dev;
9303 struct drm_i915_private *dev_priv = to_i915(dev);
9304 const struct intel_limit *limit;
9305 int refclk = 96000;
9306
9307 memset(&crtc_state->dpll_hw_state, 0,
9308 sizeof(crtc_state->dpll_hw_state));
9309
9310 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_LVDS)) {
9311 if (intel_panel_use_ssc(dev_priv)) {
9312 refclk = dev_priv->vbt.lvds_ssc_freq;
9313 drm_dbg_kms(&dev_priv->drm,
9314 "using SSC reference clock of %d kHz\n",
9315 refclk);
9316 }
9317
9318 limit = &pnv_limits_lvds;
9319 } else {
9320 limit = &pnv_limits_sdvo;
9321 }
9322
9323 if (!crtc_state->clock_set &&
9324 !pnv_find_best_dpll(limit, crtc_state, crtc_state->port_clock,
9325 refclk, NULL, &crtc_state->dpll)) {
9326 drm_err(&dev_priv->drm,
9327 "Couldn't find PLL settings for mode!\n");
9328 return -EINVAL;
9329 }
9330
9331 i9xx_compute_dpll(crtc, crtc_state, NULL);
9332
9333 return 0;
9334}
9335
9336static int i9xx_crtc_compute_clock(struct intel_crtc *crtc,
9337 struct intel_crtc_state *crtc_state)
9338{
9339 struct drm_device *dev = crtc->base.dev;
9340 struct drm_i915_private *dev_priv = to_i915(dev);
9341 const struct intel_limit *limit;
9342 int refclk = 96000;
9343
9344 memset(&crtc_state->dpll_hw_state, 0,
9345 sizeof(crtc_state->dpll_hw_state));
9346
9347 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_LVDS)) {
9348 if (intel_panel_use_ssc(dev_priv)) {
9349 refclk = dev_priv->vbt.lvds_ssc_freq;
9350 drm_dbg_kms(&dev_priv->drm,
9351 "using SSC reference clock of %d kHz\n",
9352 refclk);
9353 }
9354
9355 limit = &intel_limits_i9xx_lvds;
9356 } else {
9357 limit = &intel_limits_i9xx_sdvo;
9358 }
9359
9360 if (!crtc_state->clock_set &&
9361 !i9xx_find_best_dpll(limit, crtc_state, crtc_state->port_clock,
9362 refclk, NULL, &crtc_state->dpll)) {
9363 drm_err(&dev_priv->drm,
9364 "Couldn't find PLL settings for mode!\n");
9365 return -EINVAL;
9366 }
9367
9368 i9xx_compute_dpll(crtc, crtc_state, NULL);
9369
9370 return 0;
9371}
9372
9373static int chv_crtc_compute_clock(struct intel_crtc *crtc,
9374 struct intel_crtc_state *crtc_state)
9375{
9376 int refclk = 100000;
9377 const struct intel_limit *limit = &intel_limits_chv;
9378 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev);
9379
9380 memset(&crtc_state->dpll_hw_state, 0,
9381 sizeof(crtc_state->dpll_hw_state));
9382
9383 if (!crtc_state->clock_set &&
9384 !chv_find_best_dpll(limit, crtc_state, crtc_state->port_clock,
9385 refclk, NULL, &crtc_state->dpll)) {
9386 drm_err(&i915->drm, "Couldn't find PLL settings for mode!\n");
9387 return -EINVAL;
9388 }
9389
9390 chv_compute_dpll(crtc, crtc_state);
9391
9392 return 0;
9393}
9394
9395static int vlv_crtc_compute_clock(struct intel_crtc *crtc,
9396 struct intel_crtc_state *crtc_state)
9397{
9398 int refclk = 100000;
9399 const struct intel_limit *limit = &intel_limits_vlv;
9400 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev);
9401
9402 memset(&crtc_state->dpll_hw_state, 0,
9403 sizeof(crtc_state->dpll_hw_state));
9404
9405 if (!crtc_state->clock_set &&
9406 !vlv_find_best_dpll(limit, crtc_state, crtc_state->port_clock,
9407 refclk, NULL, &crtc_state->dpll)) {
9408 drm_err(&i915->drm, "Couldn't find PLL settings for mode!\n");
9409 return -EINVAL;
9410 }
9411
9412 vlv_compute_dpll(crtc, crtc_state);
9413
9414 return 0;
9415}
9416
9417static bool i9xx_has_pfit(struct drm_i915_private *dev_priv)
9418{
9419 if (IS_I830(dev_priv))
9420 return false;
9421
9422 return INTEL_GEN(dev_priv) >= 4 ||
9423 IS_PINEVIEW(dev_priv) || IS_MOBILE(dev_priv);
9424}
9425
9426static void i9xx_get_pfit_config(struct intel_crtc_state *crtc_state)
9427{
9428 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
9429 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
9430 u32 tmp;
9431
9432 if (!i9xx_has_pfit(dev_priv))
9433 return;
9434
9435 tmp = intel_de_read(dev_priv, PFIT_CONTROL);
9436 if (!(tmp & PFIT_ENABLE))
9437 return;
9438
9439
9440 if (INTEL_GEN(dev_priv) < 4) {
9441 if (crtc->pipe != PIPE_B)
9442 return;
9443 } else {
9444 if ((tmp & PFIT_PIPE_MASK) != (crtc->pipe << PFIT_PIPE_SHIFT))
9445 return;
9446 }
9447
9448 crtc_state->gmch_pfit.control = tmp;
9449 crtc_state->gmch_pfit.pgm_ratios =
9450 intel_de_read(dev_priv, PFIT_PGM_RATIOS);
9451}
9452
9453static void vlv_crtc_clock_get(struct intel_crtc *crtc,
9454 struct intel_crtc_state *pipe_config)
9455{
9456 struct drm_device *dev = crtc->base.dev;
9457 struct drm_i915_private *dev_priv = to_i915(dev);
9458 enum pipe pipe = crtc->pipe;
9459 struct dpll clock;
9460 u32 mdiv;
9461 int refclk = 100000;
9462
9463
9464 if ((pipe_config->dpll_hw_state.dpll & DPLL_VCO_ENABLE) == 0)
9465 return;
9466
9467 vlv_dpio_get(dev_priv);
9468 mdiv = vlv_dpio_read(dev_priv, pipe, VLV_PLL_DW3(pipe));
9469 vlv_dpio_put(dev_priv);
9470
9471 clock.m1 = (mdiv >> DPIO_M1DIV_SHIFT) & 7;
9472 clock.m2 = mdiv & DPIO_M2DIV_MASK;
9473 clock.n = (mdiv >> DPIO_N_SHIFT) & 0xf;
9474 clock.p1 = (mdiv >> DPIO_P1_SHIFT) & 7;
9475 clock.p2 = (mdiv >> DPIO_P2_SHIFT) & 0x1f;
9476
9477 pipe_config->port_clock = vlv_calc_dpll_params(refclk, &clock);
9478}
9479
9480static void
9481i9xx_get_initial_plane_config(struct intel_crtc *crtc,
9482 struct intel_initial_plane_config *plane_config)
9483{
9484 struct drm_device *dev = crtc->base.dev;
9485 struct drm_i915_private *dev_priv = to_i915(dev);
9486 struct intel_plane *plane = to_intel_plane(crtc->base.primary);
9487 enum i9xx_plane_id i9xx_plane = plane->i9xx_plane;
9488 enum pipe pipe;
9489 u32 val, base, offset;
9490 int fourcc, pixel_format;
9491 unsigned int aligned_height;
9492 struct drm_framebuffer *fb;
9493 struct intel_framebuffer *intel_fb;
9494
9495 if (!plane->get_hw_state(plane, &pipe))
9496 return;
9497
9498 drm_WARN_ON(dev, pipe != crtc->pipe);
9499
9500 intel_fb = kzalloc(sizeof(*intel_fb), GFP_KERNEL);
9501 if (!intel_fb) {
9502 drm_dbg_kms(&dev_priv->drm, "failed to alloc fb\n");
9503 return;
9504 }
9505
9506 fb = &intel_fb->base;
9507
9508 fb->dev = dev;
9509
9510 val = intel_de_read(dev_priv, DSPCNTR(i9xx_plane));
9511
9512 if (INTEL_GEN(dev_priv) >= 4) {
9513 if (val & DISPPLANE_TILED) {
9514 plane_config->tiling = I915_TILING_X;
9515 fb->modifier = I915_FORMAT_MOD_X_TILED;
9516 }
9517
9518 if (val & DISPPLANE_ROTATE_180)
9519 plane_config->rotation = DRM_MODE_ROTATE_180;
9520 }
9521
9522 if (IS_CHERRYVIEW(dev_priv) && pipe == PIPE_B &&
9523 val & DISPPLANE_MIRROR)
9524 plane_config->rotation |= DRM_MODE_REFLECT_X;
9525
9526 pixel_format = val & DISPPLANE_PIXFORMAT_MASK;
9527 fourcc = i9xx_format_to_fourcc(pixel_format);
9528 fb->format = drm_format_info(fourcc);
9529
9530 if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv)) {
9531 offset = intel_de_read(dev_priv, DSPOFFSET(i9xx_plane));
9532 base = intel_de_read(dev_priv, DSPSURF(i9xx_plane)) & 0xfffff000;
9533 } else if (INTEL_GEN(dev_priv) >= 4) {
9534 if (plane_config->tiling)
9535 offset = intel_de_read(dev_priv,
9536 DSPTILEOFF(i9xx_plane));
9537 else
9538 offset = intel_de_read(dev_priv,
9539 DSPLINOFF(i9xx_plane));
9540 base = intel_de_read(dev_priv, DSPSURF(i9xx_plane)) & 0xfffff000;
9541 } else {
9542 base = intel_de_read(dev_priv, DSPADDR(i9xx_plane));
9543 }
9544 plane_config->base = base;
9545
9546 val = intel_de_read(dev_priv, PIPESRC(pipe));
9547 fb->width = ((val >> 16) & 0xfff) + 1;
9548 fb->height = ((val >> 0) & 0xfff) + 1;
9549
9550 val = intel_de_read(dev_priv, DSPSTRIDE(i9xx_plane));
9551 fb->pitches[0] = val & 0xffffffc0;
9552
9553 aligned_height = intel_fb_align_height(fb, 0, fb->height);
9554
9555 plane_config->size = fb->pitches[0] * aligned_height;
9556
9557 drm_dbg_kms(&dev_priv->drm,
9558 "%s/%s with fb: size=%dx%d@%d, offset=%x, pitch %d, size 0x%x\n",
9559 crtc->base.name, plane->base.name, fb->width, fb->height,
9560 fb->format->cpp[0] * 8, base, fb->pitches[0],
9561 plane_config->size);
9562
9563 plane_config->fb = intel_fb;
9564}
9565
9566static void chv_crtc_clock_get(struct intel_crtc *crtc,
9567 struct intel_crtc_state *pipe_config)
9568{
9569 struct drm_device *dev = crtc->base.dev;
9570 struct drm_i915_private *dev_priv = to_i915(dev);
9571 enum pipe pipe = crtc->pipe;
9572 enum dpio_channel port = vlv_pipe_to_channel(pipe);
9573 struct dpll clock;
9574 u32 cmn_dw13, pll_dw0, pll_dw1, pll_dw2, pll_dw3;
9575 int refclk = 100000;
9576
9577
9578 if ((pipe_config->dpll_hw_state.dpll & DPLL_VCO_ENABLE) == 0)
9579 return;
9580
9581 vlv_dpio_get(dev_priv);
9582 cmn_dw13 = vlv_dpio_read(dev_priv, pipe, CHV_CMN_DW13(port));
9583 pll_dw0 = vlv_dpio_read(dev_priv, pipe, CHV_PLL_DW0(port));
9584 pll_dw1 = vlv_dpio_read(dev_priv, pipe, CHV_PLL_DW1(port));
9585 pll_dw2 = vlv_dpio_read(dev_priv, pipe, CHV_PLL_DW2(port));
9586 pll_dw3 = vlv_dpio_read(dev_priv, pipe, CHV_PLL_DW3(port));
9587 vlv_dpio_put(dev_priv);
9588
9589 clock.m1 = (pll_dw1 & 0x7) == DPIO_CHV_M1_DIV_BY_2 ? 2 : 0;
9590 clock.m2 = (pll_dw0 & 0xff) << 22;
9591 if (pll_dw3 & DPIO_CHV_FRAC_DIV_EN)
9592 clock.m2 |= pll_dw2 & 0x3fffff;
9593 clock.n = (pll_dw1 >> DPIO_CHV_N_DIV_SHIFT) & 0xf;
9594 clock.p1 = (cmn_dw13 >> DPIO_CHV_P1_DIV_SHIFT) & 0x7;
9595 clock.p2 = (cmn_dw13 >> DPIO_CHV_P2_DIV_SHIFT) & 0x1f;
9596
9597 pipe_config->port_clock = chv_calc_dpll_params(refclk, &clock);
9598}
9599
9600static enum intel_output_format
9601bdw_get_pipemisc_output_format(struct intel_crtc *crtc)
9602{
9603 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
9604 u32 tmp;
9605
9606 tmp = intel_de_read(dev_priv, PIPEMISC(crtc->pipe));
9607
9608 if (tmp & PIPEMISC_YUV420_ENABLE) {
9609
9610 drm_WARN_ON(&dev_priv->drm,
9611 (tmp & PIPEMISC_YUV420_MODE_FULL_BLEND) == 0);
9612
9613 return INTEL_OUTPUT_FORMAT_YCBCR420;
9614 } else if (tmp & PIPEMISC_OUTPUT_COLORSPACE_YUV) {
9615 return INTEL_OUTPUT_FORMAT_YCBCR444;
9616 } else {
9617 return INTEL_OUTPUT_FORMAT_RGB;
9618 }
9619}
9620
9621static void i9xx_get_pipe_color_config(struct intel_crtc_state *crtc_state)
9622{
9623 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
9624 struct intel_plane *plane = to_intel_plane(crtc->base.primary);
9625 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
9626 enum i9xx_plane_id i9xx_plane = plane->i9xx_plane;
9627 u32 tmp;
9628
9629 tmp = intel_de_read(dev_priv, DSPCNTR(i9xx_plane));
9630
9631 if (tmp & DISPPLANE_GAMMA_ENABLE)
9632 crtc_state->gamma_enable = true;
9633
9634 if (!HAS_GMCH(dev_priv) &&
9635 tmp & DISPPLANE_PIPE_CSC_ENABLE)
9636 crtc_state->csc_enable = true;
9637}
9638
9639static bool i9xx_get_pipe_config(struct intel_crtc *crtc,
9640 struct intel_crtc_state *pipe_config)
9641{
9642 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
9643 enum intel_display_power_domain power_domain;
9644 intel_wakeref_t wakeref;
9645 u32 tmp;
9646 bool ret;
9647
9648 power_domain = POWER_DOMAIN_PIPE(crtc->pipe);
9649 wakeref = intel_display_power_get_if_enabled(dev_priv, power_domain);
9650 if (!wakeref)
9651 return false;
9652
9653 pipe_config->output_format = INTEL_OUTPUT_FORMAT_RGB;
9654 pipe_config->cpu_transcoder = (enum transcoder) crtc->pipe;
9655 pipe_config->shared_dpll = NULL;
9656
9657 ret = false;
9658
9659 tmp = intel_de_read(dev_priv, PIPECONF(crtc->pipe));
9660 if (!(tmp & PIPECONF_ENABLE))
9661 goto out;
9662
9663 if (IS_G4X(dev_priv) || IS_VALLEYVIEW(dev_priv) ||
9664 IS_CHERRYVIEW(dev_priv)) {
9665 switch (tmp & PIPECONF_BPC_MASK) {
9666 case PIPECONF_6BPC:
9667 pipe_config->pipe_bpp = 18;
9668 break;
9669 case PIPECONF_8BPC:
9670 pipe_config->pipe_bpp = 24;
9671 break;
9672 case PIPECONF_10BPC:
9673 pipe_config->pipe_bpp = 30;
9674 break;
9675 default:
9676 break;
9677 }
9678 }
9679
9680 if ((IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) &&
9681 (tmp & PIPECONF_COLOR_RANGE_SELECT))
9682 pipe_config->limited_color_range = true;
9683
9684 pipe_config->gamma_mode = (tmp & PIPECONF_GAMMA_MODE_MASK_I9XX) >>
9685 PIPECONF_GAMMA_MODE_SHIFT;
9686
9687 if (IS_CHERRYVIEW(dev_priv))
9688 pipe_config->cgm_mode = intel_de_read(dev_priv,
9689 CGM_PIPE_MODE(crtc->pipe));
9690
9691 i9xx_get_pipe_color_config(pipe_config);
9692 intel_color_get_config(pipe_config);
9693
9694 if (INTEL_GEN(dev_priv) < 4)
9695 pipe_config->double_wide = tmp & PIPECONF_DOUBLE_WIDE;
9696
9697 intel_get_transcoder_timings(crtc, pipe_config);
9698 intel_get_pipe_src_size(crtc, pipe_config);
9699
9700 i9xx_get_pfit_config(pipe_config);
9701
9702 if (INTEL_GEN(dev_priv) >= 4) {
9703
9704 if (IS_CHERRYVIEW(dev_priv) && crtc->pipe != PIPE_A)
9705 tmp = dev_priv->chv_dpll_md[crtc->pipe];
9706 else
9707 tmp = intel_de_read(dev_priv, DPLL_MD(crtc->pipe));
9708 pipe_config->pixel_multiplier =
9709 ((tmp & DPLL_MD_UDI_MULTIPLIER_MASK)
9710 >> DPLL_MD_UDI_MULTIPLIER_SHIFT) + 1;
9711 pipe_config->dpll_hw_state.dpll_md = tmp;
9712 } else if (IS_I945G(dev_priv) || IS_I945GM(dev_priv) ||
9713 IS_G33(dev_priv) || IS_PINEVIEW(dev_priv)) {
9714 tmp = intel_de_read(dev_priv, DPLL(crtc->pipe));
9715 pipe_config->pixel_multiplier =
9716 ((tmp & SDVO_MULTIPLIER_MASK)
9717 >> SDVO_MULTIPLIER_SHIFT_HIRES) + 1;
9718 } else {
9719
9720
9721
9722 pipe_config->pixel_multiplier = 1;
9723 }
9724 pipe_config->dpll_hw_state.dpll = intel_de_read(dev_priv,
9725 DPLL(crtc->pipe));
9726 if (!IS_VALLEYVIEW(dev_priv) && !IS_CHERRYVIEW(dev_priv)) {
9727 pipe_config->dpll_hw_state.fp0 = intel_de_read(dev_priv,
9728 FP0(crtc->pipe));
9729 pipe_config->dpll_hw_state.fp1 = intel_de_read(dev_priv,
9730 FP1(crtc->pipe));
9731 } else {
9732
9733 pipe_config->dpll_hw_state.dpll &= ~(DPLL_LOCK_VLV |
9734 DPLL_PORTC_READY_MASK |
9735 DPLL_PORTB_READY_MASK);
9736 }
9737
9738 if (IS_CHERRYVIEW(dev_priv))
9739 chv_crtc_clock_get(crtc, pipe_config);
9740 else if (IS_VALLEYVIEW(dev_priv))
9741 vlv_crtc_clock_get(crtc, pipe_config);
9742 else
9743 i9xx_crtc_clock_get(crtc, pipe_config);
9744
9745
9746
9747
9748
9749
9750 pipe_config->hw.adjusted_mode.crtc_clock =
9751 pipe_config->port_clock / pipe_config->pixel_multiplier;
9752
9753 ret = true;
9754
9755out:
9756 intel_display_power_put(dev_priv, power_domain, wakeref);
9757
9758 return ret;
9759}
9760
9761static void ilk_init_pch_refclk(struct drm_i915_private *dev_priv)
9762{
9763 struct intel_encoder *encoder;
9764 int i;
9765 u32 val, final;
9766 bool has_lvds = false;
9767 bool has_cpu_edp = false;
9768 bool has_panel = false;
9769 bool has_ck505 = false;
9770 bool can_ssc = false;
9771 bool using_ssc_source = false;
9772
9773
9774 for_each_intel_encoder(&dev_priv->drm, encoder) {
9775 switch (encoder->type) {
9776 case INTEL_OUTPUT_LVDS:
9777 has_panel = true;
9778 has_lvds = true;
9779 break;
9780 case INTEL_OUTPUT_EDP:
9781 has_panel = true;
9782 if (encoder->port == PORT_A)
9783 has_cpu_edp = true;
9784 break;
9785 default:
9786 break;
9787 }
9788 }
9789
9790 if (HAS_PCH_IBX(dev_priv)) {
9791 has_ck505 = dev_priv->vbt.display_clock_mode;
9792 can_ssc = has_ck505;
9793 } else {
9794 has_ck505 = false;
9795 can_ssc = true;
9796 }
9797
9798
9799 for (i = 0; i < dev_priv->dpll.num_shared_dpll; i++) {
9800 u32 temp = intel_de_read(dev_priv, PCH_DPLL(i));
9801
9802 if (!(temp & DPLL_VCO_ENABLE))
9803 continue;
9804
9805 if ((temp & PLL_REF_INPUT_MASK) ==
9806 PLLB_REF_INPUT_SPREADSPECTRUMIN) {
9807 using_ssc_source = true;
9808 break;
9809 }
9810 }
9811
9812 drm_dbg_kms(&dev_priv->drm,
9813 "has_panel %d has_lvds %d has_ck505 %d using_ssc_source %d\n",
9814 has_panel, has_lvds, has_ck505, using_ssc_source);
9815
9816
9817
9818
9819
9820
9821 val = intel_de_read(dev_priv, PCH_DREF_CONTROL);
9822
9823
9824
9825
9826
9827 final = val;
9828 final &= ~DREF_NONSPREAD_SOURCE_MASK;
9829 if (has_ck505)
9830 final |= DREF_NONSPREAD_CK505_ENABLE;
9831 else
9832 final |= DREF_NONSPREAD_SOURCE_ENABLE;
9833
9834 final &= ~DREF_SSC_SOURCE_MASK;
9835 final &= ~DREF_CPU_SOURCE_OUTPUT_MASK;
9836 final &= ~DREF_SSC1_ENABLE;
9837
9838 if (has_panel) {
9839 final |= DREF_SSC_SOURCE_ENABLE;
9840
9841 if (intel_panel_use_ssc(dev_priv) && can_ssc)
9842 final |= DREF_SSC1_ENABLE;
9843
9844 if (has_cpu_edp) {
9845 if (intel_panel_use_ssc(dev_priv) && can_ssc)
9846 final |= DREF_CPU_SOURCE_OUTPUT_DOWNSPREAD;
9847 else
9848 final |= DREF_CPU_SOURCE_OUTPUT_NONSPREAD;
9849 } else
9850 final |= DREF_CPU_SOURCE_OUTPUT_DISABLE;
9851 } else if (using_ssc_source) {
9852 final |= DREF_SSC_SOURCE_ENABLE;
9853 final |= DREF_SSC1_ENABLE;
9854 }
9855
9856 if (final == val)
9857 return;
9858
9859
9860 val &= ~DREF_NONSPREAD_SOURCE_MASK;
9861
9862 if (has_ck505)
9863 val |= DREF_NONSPREAD_CK505_ENABLE;
9864 else
9865 val |= DREF_NONSPREAD_SOURCE_ENABLE;
9866
9867 if (has_panel) {
9868 val &= ~DREF_SSC_SOURCE_MASK;
9869 val |= DREF_SSC_SOURCE_ENABLE;
9870
9871
9872 if (intel_panel_use_ssc(dev_priv) && can_ssc) {
9873 drm_dbg_kms(&dev_priv->drm, "Using SSC on panel\n");
9874 val |= DREF_SSC1_ENABLE;
9875 } else
9876 val &= ~DREF_SSC1_ENABLE;
9877
9878
9879 intel_de_write(dev_priv, PCH_DREF_CONTROL, val);
9880 intel_de_posting_read(dev_priv, PCH_DREF_CONTROL);
9881 udelay(200);
9882
9883 val &= ~DREF_CPU_SOURCE_OUTPUT_MASK;
9884
9885
9886 if (has_cpu_edp) {
9887 if (intel_panel_use_ssc(dev_priv) && can_ssc) {
9888 drm_dbg_kms(&dev_priv->drm,
9889 "Using SSC on eDP\n");
9890 val |= DREF_CPU_SOURCE_OUTPUT_DOWNSPREAD;
9891 } else
9892 val |= DREF_CPU_SOURCE_OUTPUT_NONSPREAD;
9893 } else
9894 val |= DREF_CPU_SOURCE_OUTPUT_DISABLE;
9895
9896 intel_de_write(dev_priv, PCH_DREF_CONTROL, val);
9897 intel_de_posting_read(dev_priv, PCH_DREF_CONTROL);
9898 udelay(200);
9899 } else {
9900 drm_dbg_kms(&dev_priv->drm, "Disabling CPU source output\n");
9901
9902 val &= ~DREF_CPU_SOURCE_OUTPUT_MASK;
9903
9904
9905 val |= DREF_CPU_SOURCE_OUTPUT_DISABLE;
9906
9907 intel_de_write(dev_priv, PCH_DREF_CONTROL, val);
9908 intel_de_posting_read(dev_priv, PCH_DREF_CONTROL);
9909 udelay(200);
9910
9911 if (!using_ssc_source) {
9912 drm_dbg_kms(&dev_priv->drm, "Disabling SSC source\n");
9913
9914
9915 val &= ~DREF_SSC_SOURCE_MASK;
9916 val |= DREF_SSC_SOURCE_DISABLE;
9917
9918
9919 val &= ~DREF_SSC1_ENABLE;
9920
9921 intel_de_write(dev_priv, PCH_DREF_CONTROL, val);
9922 intel_de_posting_read(dev_priv, PCH_DREF_CONTROL);
9923 udelay(200);
9924 }
9925 }
9926
9927 BUG_ON(val != final);
9928}
9929
9930static void lpt_reset_fdi_mphy(struct drm_i915_private *dev_priv)
9931{
9932 u32 tmp;
9933
9934 tmp = intel_de_read(dev_priv, SOUTH_CHICKEN2);
9935 tmp |= FDI_MPHY_IOSFSB_RESET_CTL;
9936 intel_de_write(dev_priv, SOUTH_CHICKEN2, tmp);
9937
9938 if (wait_for_us(intel_de_read(dev_priv, SOUTH_CHICKEN2) &
9939 FDI_MPHY_IOSFSB_RESET_STATUS, 100))
9940 drm_err(&dev_priv->drm, "FDI mPHY reset assert timeout\n");
9941
9942 tmp = intel_de_read(dev_priv, SOUTH_CHICKEN2);
9943 tmp &= ~FDI_MPHY_IOSFSB_RESET_CTL;
9944 intel_de_write(dev_priv, SOUTH_CHICKEN2, tmp);
9945
9946 if (wait_for_us((intel_de_read(dev_priv, SOUTH_CHICKEN2) &
9947 FDI_MPHY_IOSFSB_RESET_STATUS) == 0, 100))
9948 drm_err(&dev_priv->drm, "FDI mPHY reset de-assert timeout\n");
9949}
9950
9951
9952static void lpt_program_fdi_mphy(struct drm_i915_private *dev_priv)
9953{
9954 u32 tmp;
9955
9956 tmp = intel_sbi_read(dev_priv, 0x8008, SBI_MPHY);
9957 tmp &= ~(0xFF << 24);
9958 tmp |= (0x12 << 24);
9959 intel_sbi_write(dev_priv, 0x8008, tmp, SBI_MPHY);
9960
9961 tmp = intel_sbi_read(dev_priv, 0x2008, SBI_MPHY);
9962 tmp |= (1 << 11);
9963 intel_sbi_write(dev_priv, 0x2008, tmp, SBI_MPHY);
9964
9965 tmp = intel_sbi_read(dev_priv, 0x2108, SBI_MPHY);
9966 tmp |= (1 << 11);
9967 intel_sbi_write(dev_priv, 0x2108, tmp, SBI_MPHY);
9968
9969 tmp = intel_sbi_read(dev_priv, 0x206C, SBI_MPHY);
9970 tmp |= (1 << 24) | (1 << 21) | (1 << 18);
9971 intel_sbi_write(dev_priv, 0x206C, tmp, SBI_MPHY);
9972
9973 tmp = intel_sbi_read(dev_priv, 0x216C, SBI_MPHY);
9974 tmp |= (1 << 24) | (1 << 21) | (1 << 18);
9975 intel_sbi_write(dev_priv, 0x216C, tmp, SBI_MPHY);
9976
9977 tmp = intel_sbi_read(dev_priv, 0x2080, SBI_MPHY);
9978 tmp &= ~(7 << 13);
9979 tmp |= (5 << 13);
9980 intel_sbi_write(dev_priv, 0x2080, tmp, SBI_MPHY);
9981
9982 tmp = intel_sbi_read(dev_priv, 0x2180, SBI_MPHY);
9983 tmp &= ~(7 << 13);
9984 tmp |= (5 << 13);
9985 intel_sbi_write(dev_priv, 0x2180, tmp, SBI_MPHY);
9986
9987 tmp = intel_sbi_read(dev_priv, 0x208C, SBI_MPHY);
9988 tmp &= ~0xFF;
9989 tmp |= 0x1C;
9990 intel_sbi_write(dev_priv, 0x208C, tmp, SBI_MPHY);
9991
9992 tmp = intel_sbi_read(dev_priv, 0x218C, SBI_MPHY);
9993 tmp &= ~0xFF;
9994 tmp |= 0x1C;
9995 intel_sbi_write(dev_priv, 0x218C, tmp, SBI_MPHY);
9996
9997 tmp = intel_sbi_read(dev_priv, 0x2098, SBI_MPHY);
9998 tmp &= ~(0xFF << 16);
9999 tmp |= (0x1C << 16);
10000 intel_sbi_write(dev_priv, 0x2098, tmp, SBI_MPHY);
10001
10002 tmp = intel_sbi_read(dev_priv, 0x2198, SBI_MPHY);
10003 tmp &= ~(0xFF << 16);
10004 tmp |= (0x1C << 16);
10005 intel_sbi_write(dev_priv, 0x2198, tmp, SBI_MPHY);
10006
10007 tmp = intel_sbi_read(dev_priv, 0x20C4, SBI_MPHY);
10008 tmp |= (1 << 27);
10009 intel_sbi_write(dev_priv, 0x20C4, tmp, SBI_MPHY);
10010
10011 tmp = intel_sbi_read(dev_priv, 0x21C4, SBI_MPHY);
10012 tmp |= (1 << 27);
10013 intel_sbi_write(dev_priv, 0x21C4, tmp, SBI_MPHY);
10014
10015 tmp = intel_sbi_read(dev_priv, 0x20EC, SBI_MPHY);
10016 tmp &= ~(0xF << 28);
10017 tmp |= (4 << 28);
10018 intel_sbi_write(dev_priv, 0x20EC, tmp, SBI_MPHY);
10019
10020 tmp = intel_sbi_read(dev_priv, 0x21EC, SBI_MPHY);
10021 tmp &= ~(0xF << 28);
10022 tmp |= (4 << 28);
10023 intel_sbi_write(dev_priv, 0x21EC, tmp, SBI_MPHY);
10024}
10025
10026
10027
10028
10029
10030
10031
10032static void lpt_enable_clkout_dp(struct drm_i915_private *dev_priv,
10033 bool with_spread, bool with_fdi)
10034{
10035 u32 reg, tmp;
10036
10037 if (drm_WARN(&dev_priv->drm, with_fdi && !with_spread,
10038 "FDI requires downspread\n"))
10039 with_spread = true;
10040 if (drm_WARN(&dev_priv->drm, HAS_PCH_LPT_LP(dev_priv) &&
10041 with_fdi, "LP PCH doesn't have FDI\n"))
10042 with_fdi = false;
10043
10044 mutex_lock(&dev_priv->sb_lock);
10045
10046 tmp = intel_sbi_read(dev_priv, SBI_SSCCTL, SBI_ICLK);
10047 tmp &= ~SBI_SSCCTL_DISABLE;
10048 tmp |= SBI_SSCCTL_PATHALT;
10049 intel_sbi_write(dev_priv, SBI_SSCCTL, tmp, SBI_ICLK);
10050
10051 udelay(24);
10052
10053 if (with_spread) {
10054 tmp = intel_sbi_read(dev_priv, SBI_SSCCTL, SBI_ICLK);
10055 tmp &= ~SBI_SSCCTL_PATHALT;
10056 intel_sbi_write(dev_priv, SBI_SSCCTL, tmp, SBI_ICLK);
10057
10058 if (with_fdi) {
10059 lpt_reset_fdi_mphy(dev_priv);
10060 lpt_program_fdi_mphy(dev_priv);
10061 }
10062 }
10063
10064 reg = HAS_PCH_LPT_LP(dev_priv) ? SBI_GEN0 : SBI_DBUFF0;
10065 tmp = intel_sbi_read(dev_priv, reg, SBI_ICLK);
10066 tmp |= SBI_GEN0_CFG_BUFFENABLE_DISABLE;
10067 intel_sbi_write(dev_priv, reg, tmp, SBI_ICLK);
10068
10069 mutex_unlock(&dev_priv->sb_lock);
10070}
10071
10072
10073void lpt_disable_clkout_dp(struct drm_i915_private *dev_priv)
10074{
10075 u32 reg, tmp;
10076
10077 mutex_lock(&dev_priv->sb_lock);
10078
10079 reg = HAS_PCH_LPT_LP(dev_priv) ? SBI_GEN0 : SBI_DBUFF0;
10080 tmp = intel_sbi_read(dev_priv, reg, SBI_ICLK);
10081 tmp &= ~SBI_GEN0_CFG_BUFFENABLE_DISABLE;
10082 intel_sbi_write(dev_priv, reg, tmp, SBI_ICLK);
10083
10084 tmp = intel_sbi_read(dev_priv, SBI_SSCCTL, SBI_ICLK);
10085 if (!(tmp & SBI_SSCCTL_DISABLE)) {
10086 if (!(tmp & SBI_SSCCTL_PATHALT)) {
10087 tmp |= SBI_SSCCTL_PATHALT;
10088 intel_sbi_write(dev_priv, SBI_SSCCTL, tmp, SBI_ICLK);
10089 udelay(32);
10090 }
10091 tmp |= SBI_SSCCTL_DISABLE;
10092 intel_sbi_write(dev_priv, SBI_SSCCTL, tmp, SBI_ICLK);
10093 }
10094
10095 mutex_unlock(&dev_priv->sb_lock);
10096}
10097
10098#define BEND_IDX(steps) ((50 + (steps)) / 5)
10099
10100static const u16 sscdivintphase[] = {
10101 [BEND_IDX( 50)] = 0x3B23,
10102 [BEND_IDX( 45)] = 0x3B23,
10103 [BEND_IDX( 40)] = 0x3C23,
10104 [BEND_IDX( 35)] = 0x3C23,
10105 [BEND_IDX( 30)] = 0x3D23,
10106 [BEND_IDX( 25)] = 0x3D23,
10107 [BEND_IDX( 20)] = 0x3E23,
10108 [BEND_IDX( 15)] = 0x3E23,
10109 [BEND_IDX( 10)] = 0x3F23,
10110 [BEND_IDX( 5)] = 0x3F23,
10111 [BEND_IDX( 0)] = 0x0025,
10112 [BEND_IDX( -5)] = 0x0025,
10113 [BEND_IDX(-10)] = 0x0125,
10114 [BEND_IDX(-15)] = 0x0125,
10115 [BEND_IDX(-20)] = 0x0225,
10116 [BEND_IDX(-25)] = 0x0225,
10117 [BEND_IDX(-30)] = 0x0325,
10118 [BEND_IDX(-35)] = 0x0325,
10119 [BEND_IDX(-40)] = 0x0425,
10120 [BEND_IDX(-45)] = 0x0425,
10121 [BEND_IDX(-50)] = 0x0525,
10122};
10123
10124
10125
10126
10127
10128
10129
10130static void lpt_bend_clkout_dp(struct drm_i915_private *dev_priv, int steps)
10131{
10132 u32 tmp;
10133 int idx = BEND_IDX(steps);
10134
10135 if (drm_WARN_ON(&dev_priv->drm, steps % 5 != 0))
10136 return;
10137
10138 if (drm_WARN_ON(&dev_priv->drm, idx >= ARRAY_SIZE(sscdivintphase)))
10139 return;
10140
10141 mutex_lock(&dev_priv->sb_lock);
10142
10143 if (steps % 10 != 0)
10144 tmp = 0xAAAAAAAB;
10145 else
10146 tmp = 0x00000000;
10147 intel_sbi_write(dev_priv, SBI_SSCDITHPHASE, tmp, SBI_ICLK);
10148
10149 tmp = intel_sbi_read(dev_priv, SBI_SSCDIVINTPHASE, SBI_ICLK);
10150 tmp &= 0xffff0000;
10151 tmp |= sscdivintphase[idx];
10152 intel_sbi_write(dev_priv, SBI_SSCDIVINTPHASE, tmp, SBI_ICLK);
10153
10154 mutex_unlock(&dev_priv->sb_lock);
10155}
10156
10157#undef BEND_IDX
10158
10159static bool spll_uses_pch_ssc(struct drm_i915_private *dev_priv)
10160{
10161 u32 fuse_strap = intel_de_read(dev_priv, FUSE_STRAP);
10162 u32 ctl = intel_de_read(dev_priv, SPLL_CTL);
10163
10164 if ((ctl & SPLL_PLL_ENABLE) == 0)
10165 return false;
10166
10167 if ((ctl & SPLL_REF_MASK) == SPLL_REF_MUXED_SSC &&
10168 (fuse_strap & HSW_CPU_SSC_ENABLE) == 0)
10169 return true;
10170
10171 if (IS_BROADWELL(dev_priv) &&
10172 (ctl & SPLL_REF_MASK) == SPLL_REF_PCH_SSC_BDW)
10173 return true;
10174
10175 return false;
10176}
10177
10178static bool wrpll_uses_pch_ssc(struct drm_i915_private *dev_priv,
10179 enum intel_dpll_id id)
10180{
10181 u32 fuse_strap = intel_de_read(dev_priv, FUSE_STRAP);
10182 u32 ctl = intel_de_read(dev_priv, WRPLL_CTL(id));
10183
10184 if ((ctl & WRPLL_PLL_ENABLE) == 0)
10185 return false;
10186
10187 if ((ctl & WRPLL_REF_MASK) == WRPLL_REF_PCH_SSC)
10188 return true;
10189
10190 if ((IS_BROADWELL(dev_priv) || IS_HSW_ULT(dev_priv)) &&
10191 (ctl & WRPLL_REF_MASK) == WRPLL_REF_MUXED_SSC_BDW &&
10192 (fuse_strap & HSW_CPU_SSC_ENABLE) == 0)
10193 return true;
10194
10195 return false;
10196}
10197
10198static void lpt_init_pch_refclk(struct drm_i915_private *dev_priv)
10199{
10200 struct intel_encoder *encoder;
10201 bool has_fdi = false;
10202
10203 for_each_intel_encoder(&dev_priv->drm, encoder) {
10204 switch (encoder->type) {
10205 case INTEL_OUTPUT_ANALOG:
10206 has_fdi = true;
10207 break;
10208 default:
10209 break;
10210 }
10211 }
10212
10213
10214
10215
10216
10217
10218
10219
10220
10221
10222
10223
10224
10225
10226
10227
10228 dev_priv->pch_ssc_use = 0;
10229
10230 if (spll_uses_pch_ssc(dev_priv)) {
10231 drm_dbg_kms(&dev_priv->drm, "SPLL using PCH SSC\n");
10232 dev_priv->pch_ssc_use |= BIT(DPLL_ID_SPLL);
10233 }
10234
10235 if (wrpll_uses_pch_ssc(dev_priv, DPLL_ID_WRPLL1)) {
10236 drm_dbg_kms(&dev_priv->drm, "WRPLL1 using PCH SSC\n");
10237 dev_priv->pch_ssc_use |= BIT(DPLL_ID_WRPLL1);
10238 }
10239
10240 if (wrpll_uses_pch_ssc(dev_priv, DPLL_ID_WRPLL2)) {
10241 drm_dbg_kms(&dev_priv->drm, "WRPLL2 using PCH SSC\n");
10242 dev_priv->pch_ssc_use |= BIT(DPLL_ID_WRPLL2);
10243 }
10244
10245 if (dev_priv->pch_ssc_use)
10246 return;
10247
10248 if (has_fdi) {
10249 lpt_bend_clkout_dp(dev_priv, 0);
10250 lpt_enable_clkout_dp(dev_priv, true, true);
10251 } else {
10252 lpt_disable_clkout_dp(dev_priv);
10253 }
10254}
10255
10256
10257
10258
10259void intel_init_pch_refclk(struct drm_i915_private *dev_priv)
10260{
10261 if (HAS_PCH_IBX(dev_priv) || HAS_PCH_CPT(dev_priv))
10262 ilk_init_pch_refclk(dev_priv);
10263 else if (HAS_PCH_LPT(dev_priv))
10264 lpt_init_pch_refclk(dev_priv);
10265}
10266
10267static void ilk_set_pipeconf(const struct intel_crtc_state *crtc_state)
10268{
10269 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
10270 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
10271 enum pipe pipe = crtc->pipe;
10272 u32 val;
10273
10274 val = 0;
10275
10276 switch (crtc_state->pipe_bpp) {
10277 case 18:
10278 val |= PIPECONF_6BPC;
10279 break;
10280 case 24:
10281 val |= PIPECONF_8BPC;
10282 break;
10283 case 30:
10284 val |= PIPECONF_10BPC;
10285 break;
10286 case 36:
10287 val |= PIPECONF_12BPC;
10288 break;
10289 default:
10290
10291 BUG();
10292 }
10293
10294 if (crtc_state->dither)
10295 val |= (PIPECONF_DITHER_EN | PIPECONF_DITHER_TYPE_SP);
10296
10297 if (crtc_state->hw.adjusted_mode.flags & DRM_MODE_FLAG_INTERLACE)
10298 val |= PIPECONF_INTERLACED_ILK;
10299 else
10300 val |= PIPECONF_PROGRESSIVE;
10301
10302
10303
10304
10305
10306 drm_WARN_ON(&dev_priv->drm, crtc_state->limited_color_range &&
10307 crtc_state->output_format != INTEL_OUTPUT_FORMAT_RGB);
10308
10309 if (crtc_state->limited_color_range &&
10310 !intel_crtc_has_type(crtc_state, INTEL_OUTPUT_SDVO))
10311 val |= PIPECONF_COLOR_RANGE_SELECT;
10312
10313 if (crtc_state->output_format != INTEL_OUTPUT_FORMAT_RGB)
10314 val |= PIPECONF_OUTPUT_COLORSPACE_YUV709;
10315
10316 val |= PIPECONF_GAMMA_MODE(crtc_state->gamma_mode);
10317
10318 val |= PIPECONF_FRAME_START_DELAY(0);
10319
10320 intel_de_write(dev_priv, PIPECONF(pipe), val);
10321 intel_de_posting_read(dev_priv, PIPECONF(pipe));
10322}
10323
10324static void hsw_set_pipeconf(const struct intel_crtc_state *crtc_state)
10325{
10326 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
10327 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
10328 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
10329 u32 val = 0;
10330
10331 if (IS_HASWELL(dev_priv) && crtc_state->dither)
10332 val |= (PIPECONF_DITHER_EN | PIPECONF_DITHER_TYPE_SP);
10333
10334 if (crtc_state->hw.adjusted_mode.flags & DRM_MODE_FLAG_INTERLACE)
10335 val |= PIPECONF_INTERLACED_ILK;
10336 else
10337 val |= PIPECONF_PROGRESSIVE;
10338
10339 if (IS_HASWELL(dev_priv) &&
10340 crtc_state->output_format != INTEL_OUTPUT_FORMAT_RGB)
10341 val |= PIPECONF_OUTPUT_COLORSPACE_YUV_HSW;
10342
10343 intel_de_write(dev_priv, PIPECONF(cpu_transcoder), val);
10344 intel_de_posting_read(dev_priv, PIPECONF(cpu_transcoder));
10345}
10346
10347static void bdw_set_pipemisc(const struct intel_crtc_state *crtc_state)
10348{
10349 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
10350 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
10351 u32 val = 0;
10352
10353 switch (crtc_state->pipe_bpp) {
10354 case 18:
10355 val |= PIPEMISC_DITHER_6_BPC;
10356 break;
10357 case 24:
10358 val |= PIPEMISC_DITHER_8_BPC;
10359 break;
10360 case 30:
10361 val |= PIPEMISC_DITHER_10_BPC;
10362 break;
10363 case 36:
10364 val |= PIPEMISC_DITHER_12_BPC;
10365 break;
10366 default:
10367 MISSING_CASE(crtc_state->pipe_bpp);
10368 break;
10369 }
10370
10371 if (crtc_state->dither)
10372 val |= PIPEMISC_DITHER_ENABLE | PIPEMISC_DITHER_TYPE_SP;
10373
10374 if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420 ||
10375 crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR444)
10376 val |= PIPEMISC_OUTPUT_COLORSPACE_YUV;
10377
10378 if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420)
10379 val |= PIPEMISC_YUV420_ENABLE |
10380 PIPEMISC_YUV420_MODE_FULL_BLEND;
10381
10382 if (INTEL_GEN(dev_priv) >= 11 &&
10383 (crtc_state->active_planes & ~(icl_hdr_plane_mask() |
10384 BIT(PLANE_CURSOR))) == 0)
10385 val |= PIPEMISC_HDR_MODE_PRECISION;
10386
10387 if (INTEL_GEN(dev_priv) >= 12)
10388 val |= PIPEMISC_PIXEL_ROUNDING_TRUNC;
10389
10390 intel_de_write(dev_priv, PIPEMISC(crtc->pipe), val);
10391}
10392
10393int bdw_get_pipemisc_bpp(struct intel_crtc *crtc)
10394{
10395 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
10396 u32 tmp;
10397
10398 tmp = intel_de_read(dev_priv, PIPEMISC(crtc->pipe));
10399
10400 switch (tmp & PIPEMISC_DITHER_BPC_MASK) {
10401 case PIPEMISC_DITHER_6_BPC:
10402 return 18;
10403 case PIPEMISC_DITHER_8_BPC:
10404 return 24;
10405 case PIPEMISC_DITHER_10_BPC:
10406 return 30;
10407 case PIPEMISC_DITHER_12_BPC:
10408 return 36;
10409 default:
10410 MISSING_CASE(tmp);
10411 return 0;
10412 }
10413}
10414
10415int ilk_get_lanes_required(int target_clock, int link_bw, int bpp)
10416{
10417
10418
10419
10420
10421
10422 u32 bps = target_clock * bpp * 21 / 20;
10423 return DIV_ROUND_UP(bps, link_bw * 8);
10424}
10425
10426static bool ilk_needs_fb_cb_tune(struct dpll *dpll, int factor)
10427{
10428 return i9xx_dpll_compute_m(dpll) < factor * dpll->n;
10429}
10430
10431static void ilk_compute_dpll(struct intel_crtc *crtc,
10432 struct intel_crtc_state *crtc_state,
10433 struct dpll *reduced_clock)
10434{
10435 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
10436 u32 dpll, fp, fp2;
10437 int factor;
10438
10439
10440 factor = 21;
10441 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_LVDS)) {
10442 if ((intel_panel_use_ssc(dev_priv) &&
10443 dev_priv->vbt.lvds_ssc_freq == 100000) ||
10444 (HAS_PCH_IBX(dev_priv) &&
10445 intel_is_dual_link_lvds(dev_priv)))
10446 factor = 25;
10447 } else if (crtc_state->sdvo_tv_clock) {
10448 factor = 20;
10449 }
10450
10451 fp = i9xx_dpll_compute_fp(&crtc_state->dpll);
10452
10453 if (ilk_needs_fb_cb_tune(&crtc_state->dpll, factor))
10454 fp |= FP_CB_TUNE;
10455
10456 if (reduced_clock) {
10457 fp2 = i9xx_dpll_compute_fp(reduced_clock);
10458
10459 if (reduced_clock->m < factor * reduced_clock->n)
10460 fp2 |= FP_CB_TUNE;
10461 } else {
10462 fp2 = fp;
10463 }
10464
10465 dpll = 0;
10466
10467 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_LVDS))
10468 dpll |= DPLLB_MODE_LVDS;
10469 else
10470 dpll |= DPLLB_MODE_DAC_SERIAL;
10471
10472 dpll |= (crtc_state->pixel_multiplier - 1)
10473 << PLL_REF_SDVO_HDMI_MULTIPLIER_SHIFT;
10474
10475 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_SDVO) ||
10476 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
10477 dpll |= DPLL_SDVO_HIGH_SPEED;
10478
10479 if (intel_crtc_has_dp_encoder(crtc_state))
10480 dpll |= DPLL_SDVO_HIGH_SPEED;
10481
10482
10483
10484
10485
10486
10487
10488
10489
10490
10491
10492
10493
10494
10495
10496 if (INTEL_NUM_PIPES(dev_priv) == 3 &&
10497 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_ANALOG))
10498 dpll |= DPLL_SDVO_HIGH_SPEED;
10499
10500
10501 dpll |= (1 << (crtc_state->dpll.p1 - 1)) << DPLL_FPA01_P1_POST_DIV_SHIFT;
10502
10503 dpll |= (1 << (crtc_state->dpll.p1 - 1)) << DPLL_FPA1_P1_POST_DIV_SHIFT;
10504
10505 switch (crtc_state->dpll.p2) {
10506 case 5:
10507 dpll |= DPLL_DAC_SERIAL_P2_CLOCK_DIV_5;
10508 break;
10509 case 7:
10510 dpll |= DPLLB_LVDS_P2_CLOCK_DIV_7;
10511 break;
10512 case 10:
10513 dpll |= DPLL_DAC_SERIAL_P2_CLOCK_DIV_10;
10514 break;
10515 case 14:
10516 dpll |= DPLLB_LVDS_P2_CLOCK_DIV_14;
10517 break;
10518 }
10519
10520 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_LVDS) &&
10521 intel_panel_use_ssc(dev_priv))
10522 dpll |= PLLB_REF_INPUT_SPREADSPECTRUMIN;
10523 else
10524 dpll |= PLL_REF_INPUT_DREFCLK;
10525
10526 dpll |= DPLL_VCO_ENABLE;
10527
10528 crtc_state->dpll_hw_state.dpll = dpll;
10529 crtc_state->dpll_hw_state.fp0 = fp;
10530 crtc_state->dpll_hw_state.fp1 = fp2;
10531}
10532
10533static int ilk_crtc_compute_clock(struct intel_crtc *crtc,
10534 struct intel_crtc_state *crtc_state)
10535{
10536 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
10537 struct intel_atomic_state *state =
10538 to_intel_atomic_state(crtc_state->uapi.state);
10539 const struct intel_limit *limit;
10540 int refclk = 120000;
10541
10542 memset(&crtc_state->dpll_hw_state, 0,
10543 sizeof(crtc_state->dpll_hw_state));
10544
10545
10546 if (!crtc_state->has_pch_encoder)
10547 return 0;
10548
10549 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_LVDS)) {
10550 if (intel_panel_use_ssc(dev_priv)) {
10551 drm_dbg_kms(&dev_priv->drm,
10552 "using SSC reference clock of %d kHz\n",
10553 dev_priv->vbt.lvds_ssc_freq);
10554 refclk = dev_priv->vbt.lvds_ssc_freq;
10555 }
10556
10557 if (intel_is_dual_link_lvds(dev_priv)) {
10558 if (refclk == 100000)
10559 limit = &ilk_limits_dual_lvds_100m;
10560 else
10561 limit = &ilk_limits_dual_lvds;
10562 } else {
10563 if (refclk == 100000)
10564 limit = &ilk_limits_single_lvds_100m;
10565 else
10566 limit = &ilk_limits_single_lvds;
10567 }
10568 } else {
10569 limit = &ilk_limits_dac;
10570 }
10571
10572 if (!crtc_state->clock_set &&
10573 !g4x_find_best_dpll(limit, crtc_state, crtc_state->port_clock,
10574 refclk, NULL, &crtc_state->dpll)) {
10575 drm_err(&dev_priv->drm,
10576 "Couldn't find PLL settings for mode!\n");
10577 return -EINVAL;
10578 }
10579
10580 ilk_compute_dpll(crtc, crtc_state, NULL);
10581
10582 if (!intel_reserve_shared_dplls(state, crtc, NULL)) {
10583 drm_dbg_kms(&dev_priv->drm,
10584 "failed to find PLL for pipe %c\n",
10585 pipe_name(crtc->pipe));
10586 return -EINVAL;
10587 }
10588
10589 return 0;
10590}
10591
10592static void intel_pch_transcoder_get_m_n(struct intel_crtc *crtc,
10593 struct intel_link_m_n *m_n)
10594{
10595 struct drm_device *dev = crtc->base.dev;
10596 struct drm_i915_private *dev_priv = to_i915(dev);
10597 enum pipe pipe = crtc->pipe;
10598
10599 m_n->link_m = intel_de_read(dev_priv, PCH_TRANS_LINK_M1(pipe));
10600 m_n->link_n = intel_de_read(dev_priv, PCH_TRANS_LINK_N1(pipe));
10601 m_n->gmch_m = intel_de_read(dev_priv, PCH_TRANS_DATA_M1(pipe))
10602 & ~TU_SIZE_MASK;
10603 m_n->gmch_n = intel_de_read(dev_priv, PCH_TRANS_DATA_N1(pipe));
10604 m_n->tu = ((intel_de_read(dev_priv, PCH_TRANS_DATA_M1(pipe))
10605 & TU_SIZE_MASK) >> TU_SIZE_SHIFT) + 1;
10606}
10607
10608static void intel_cpu_transcoder_get_m_n(struct intel_crtc *crtc,
10609 enum transcoder transcoder,
10610 struct intel_link_m_n *m_n,
10611 struct intel_link_m_n *m2_n2)
10612{
10613 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
10614 enum pipe pipe = crtc->pipe;
10615
10616 if (INTEL_GEN(dev_priv) >= 5) {
10617 m_n->link_m = intel_de_read(dev_priv,
10618 PIPE_LINK_M1(transcoder));
10619 m_n->link_n = intel_de_read(dev_priv,
10620 PIPE_LINK_N1(transcoder));
10621 m_n->gmch_m = intel_de_read(dev_priv,
10622 PIPE_DATA_M1(transcoder))
10623 & ~TU_SIZE_MASK;
10624 m_n->gmch_n = intel_de_read(dev_priv,
10625 PIPE_DATA_N1(transcoder));
10626 m_n->tu = ((intel_de_read(dev_priv, PIPE_DATA_M1(transcoder))
10627 & TU_SIZE_MASK) >> TU_SIZE_SHIFT) + 1;
10628
10629 if (m2_n2 && transcoder_has_m2_n2(dev_priv, transcoder)) {
10630 m2_n2->link_m = intel_de_read(dev_priv,
10631 PIPE_LINK_M2(transcoder));
10632 m2_n2->link_n = intel_de_read(dev_priv,
10633 PIPE_LINK_N2(transcoder));
10634 m2_n2->gmch_m = intel_de_read(dev_priv,
10635 PIPE_DATA_M2(transcoder))
10636 & ~TU_SIZE_MASK;
10637 m2_n2->gmch_n = intel_de_read(dev_priv,
10638 PIPE_DATA_N2(transcoder));
10639 m2_n2->tu = ((intel_de_read(dev_priv, PIPE_DATA_M2(transcoder))
10640 & TU_SIZE_MASK) >> TU_SIZE_SHIFT) + 1;
10641 }
10642 } else {
10643 m_n->link_m = intel_de_read(dev_priv, PIPE_LINK_M_G4X(pipe));
10644 m_n->link_n = intel_de_read(dev_priv, PIPE_LINK_N_G4X(pipe));
10645 m_n->gmch_m = intel_de_read(dev_priv, PIPE_DATA_M_G4X(pipe))
10646 & ~TU_SIZE_MASK;
10647 m_n->gmch_n = intel_de_read(dev_priv, PIPE_DATA_N_G4X(pipe));
10648 m_n->tu = ((intel_de_read(dev_priv, PIPE_DATA_M_G4X(pipe))
10649 & TU_SIZE_MASK) >> TU_SIZE_SHIFT) + 1;
10650 }
10651}
10652
10653void intel_dp_get_m_n(struct intel_crtc *crtc,
10654 struct intel_crtc_state *pipe_config)
10655{
10656 if (pipe_config->has_pch_encoder)
10657 intel_pch_transcoder_get_m_n(crtc, &pipe_config->dp_m_n);
10658 else
10659 intel_cpu_transcoder_get_m_n(crtc, pipe_config->cpu_transcoder,
10660 &pipe_config->dp_m_n,
10661 &pipe_config->dp_m2_n2);
10662}
10663
10664static void ilk_get_fdi_m_n_config(struct intel_crtc *crtc,
10665 struct intel_crtc_state *pipe_config)
10666{
10667 intel_cpu_transcoder_get_m_n(crtc, pipe_config->cpu_transcoder,
10668 &pipe_config->fdi_m_n, NULL);
10669}
10670
10671static void ilk_get_pfit_pos_size(struct intel_crtc_state *crtc_state,
10672 u32 pos, u32 size)
10673{
10674 drm_rect_init(&crtc_state->pch_pfit.dst,
10675 pos >> 16, pos & 0xffff,
10676 size >> 16, size & 0xffff);
10677}
10678
10679static void skl_get_pfit_config(struct intel_crtc_state *crtc_state)
10680{
10681 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
10682 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
10683 struct intel_crtc_scaler_state *scaler_state = &crtc_state->scaler_state;
10684 int id = -1;
10685 int i;
10686
10687
10688 for (i = 0; i < crtc->num_scalers; i++) {
10689 u32 ctl, pos, size;
10690
10691 ctl = intel_de_read(dev_priv, SKL_PS_CTRL(crtc->pipe, i));
10692 if ((ctl & (PS_SCALER_EN | PS_PLANE_SEL_MASK)) != PS_SCALER_EN)
10693 continue;
10694
10695 id = i;
10696 crtc_state->pch_pfit.enabled = true;
10697
10698 pos = intel_de_read(dev_priv, SKL_PS_WIN_POS(crtc->pipe, i));
10699 size = intel_de_read(dev_priv, SKL_PS_WIN_SZ(crtc->pipe, i));
10700
10701 ilk_get_pfit_pos_size(crtc_state, pos, size);
10702
10703 scaler_state->scalers[i].in_use = true;
10704 break;
10705 }
10706
10707 scaler_state->scaler_id = id;
10708 if (id >= 0)
10709 scaler_state->scaler_users |= (1 << SKL_CRTC_INDEX);
10710 else
10711 scaler_state->scaler_users &= ~(1 << SKL_CRTC_INDEX);
10712}
10713
10714static void
10715skl_get_initial_plane_config(struct intel_crtc *crtc,
10716 struct intel_initial_plane_config *plane_config)
10717{
10718 struct intel_crtc_state *crtc_state = to_intel_crtc_state(crtc->base.state);
10719 struct drm_device *dev = crtc->base.dev;
10720 struct drm_i915_private *dev_priv = to_i915(dev);
10721 struct intel_plane *plane = to_intel_plane(crtc->base.primary);
10722 enum plane_id plane_id = plane->id;
10723 enum pipe pipe;
10724 u32 val, base, offset, stride_mult, tiling, alpha;
10725 int fourcc, pixel_format;
10726 unsigned int aligned_height;
10727 struct drm_framebuffer *fb;
10728 struct intel_framebuffer *intel_fb;
10729
10730 if (!plane->get_hw_state(plane, &pipe))
10731 return;
10732
10733 drm_WARN_ON(dev, pipe != crtc->pipe);
10734
10735 if (crtc_state->bigjoiner) {
10736 drm_dbg_kms(&dev_priv->drm,
10737 "Unsupported bigjoiner configuration for initial FB\n");
10738 return;
10739 }
10740
10741 intel_fb = kzalloc(sizeof(*intel_fb), GFP_KERNEL);
10742 if (!intel_fb) {
10743 drm_dbg_kms(&dev_priv->drm, "failed to alloc fb\n");
10744 return;
10745 }
10746
10747 fb = &intel_fb->base;
10748
10749 fb->dev = dev;
10750
10751 val = intel_de_read(dev_priv, PLANE_CTL(pipe, plane_id));
10752
10753 if (INTEL_GEN(dev_priv) >= 11)
10754 pixel_format = val & ICL_PLANE_CTL_FORMAT_MASK;
10755 else
10756 pixel_format = val & PLANE_CTL_FORMAT_MASK;
10757
10758 if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv)) {
10759 alpha = intel_de_read(dev_priv,
10760 PLANE_COLOR_CTL(pipe, plane_id));
10761 alpha &= PLANE_COLOR_ALPHA_MASK;
10762 } else {
10763 alpha = val & PLANE_CTL_ALPHA_MASK;
10764 }
10765
10766 fourcc = skl_format_to_fourcc(pixel_format,
10767 val & PLANE_CTL_ORDER_RGBX, alpha);
10768 fb->format = drm_format_info(fourcc);
10769
10770 tiling = val & PLANE_CTL_TILED_MASK;
10771 switch (tiling) {
10772 case PLANE_CTL_TILED_LINEAR:
10773 fb->modifier = DRM_FORMAT_MOD_LINEAR;
10774 break;
10775 case PLANE_CTL_TILED_X:
10776 plane_config->tiling = I915_TILING_X;
10777 fb->modifier = I915_FORMAT_MOD_X_TILED;
10778 break;
10779 case PLANE_CTL_TILED_Y:
10780 plane_config->tiling = I915_TILING_Y;
10781 if (val & PLANE_CTL_RENDER_DECOMPRESSION_ENABLE)
10782 fb->modifier = INTEL_GEN(dev_priv) >= 12 ?
10783 I915_FORMAT_MOD_Y_TILED_GEN12_RC_CCS :
10784 I915_FORMAT_MOD_Y_TILED_CCS;
10785 else if (val & PLANE_CTL_MEDIA_DECOMPRESSION_ENABLE)
10786 fb->modifier = I915_FORMAT_MOD_Y_TILED_GEN12_MC_CCS;
10787 else
10788 fb->modifier = I915_FORMAT_MOD_Y_TILED;
10789 break;
10790 case PLANE_CTL_TILED_YF:
10791 if (val & PLANE_CTL_RENDER_DECOMPRESSION_ENABLE)
10792 fb->modifier = I915_FORMAT_MOD_Yf_TILED_CCS;
10793 else
10794 fb->modifier = I915_FORMAT_MOD_Yf_TILED;
10795 break;
10796 default:
10797 MISSING_CASE(tiling);
10798 goto error;
10799 }
10800
10801
10802
10803
10804
10805 switch (val & PLANE_CTL_ROTATE_MASK) {
10806 case PLANE_CTL_ROTATE_0:
10807 plane_config->rotation = DRM_MODE_ROTATE_0;
10808 break;
10809 case PLANE_CTL_ROTATE_90:
10810 plane_config->rotation = DRM_MODE_ROTATE_270;
10811 break;
10812 case PLANE_CTL_ROTATE_180:
10813 plane_config->rotation = DRM_MODE_ROTATE_180;
10814 break;
10815 case PLANE_CTL_ROTATE_270:
10816 plane_config->rotation = DRM_MODE_ROTATE_90;
10817 break;
10818 }
10819
10820 if (INTEL_GEN(dev_priv) >= 10 &&
10821 val & PLANE_CTL_FLIP_HORIZONTAL)
10822 plane_config->rotation |= DRM_MODE_REFLECT_X;
10823
10824
10825 if (drm_rotation_90_or_270(plane_config->rotation))
10826 goto error;
10827
10828 base = intel_de_read(dev_priv, PLANE_SURF(pipe, plane_id)) & 0xfffff000;
10829 plane_config->base = base;
10830
10831 offset = intel_de_read(dev_priv, PLANE_OFFSET(pipe, plane_id));
10832
10833 val = intel_de_read(dev_priv, PLANE_SIZE(pipe, plane_id));
10834 fb->height = ((val >> 16) & 0xffff) + 1;
10835 fb->width = ((val >> 0) & 0xffff) + 1;
10836
10837 val = intel_de_read(dev_priv, PLANE_STRIDE(pipe, plane_id));
10838 stride_mult = skl_plane_stride_mult(fb, 0, DRM_MODE_ROTATE_0);
10839 fb->pitches[0] = (val & 0x3ff) * stride_mult;
10840
10841 aligned_height = intel_fb_align_height(fb, 0, fb->height);
10842
10843 plane_config->size = fb->pitches[0] * aligned_height;
10844
10845 drm_dbg_kms(&dev_priv->drm,
10846 "%s/%s with fb: size=%dx%d@%d, offset=%x, pitch %d, size 0x%x\n",
10847 crtc->base.name, plane->base.name, fb->width, fb->height,
10848 fb->format->cpp[0] * 8, base, fb->pitches[0],
10849 plane_config->size);
10850
10851 plane_config->fb = intel_fb;
10852 return;
10853
10854error:
10855 kfree(intel_fb);
10856}
10857
10858static void ilk_get_pfit_config(struct intel_crtc_state *crtc_state)
10859{
10860 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
10861 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
10862 u32 ctl, pos, size;
10863
10864 ctl = intel_de_read(dev_priv, PF_CTL(crtc->pipe));
10865 if ((ctl & PF_ENABLE) == 0)
10866 return;
10867
10868 crtc_state->pch_pfit.enabled = true;
10869
10870 pos = intel_de_read(dev_priv, PF_WIN_POS(crtc->pipe));
10871 size = intel_de_read(dev_priv, PF_WIN_SZ(crtc->pipe));
10872
10873 ilk_get_pfit_pos_size(crtc_state, pos, size);
10874
10875
10876
10877
10878
10879
10880 drm_WARN_ON(&dev_priv->drm, IS_GEN(dev_priv, 7) &&
10881 (ctl & PF_PIPE_SEL_MASK_IVB) != PF_PIPE_SEL_IVB(crtc->pipe));
10882}
10883
10884static bool ilk_get_pipe_config(struct intel_crtc *crtc,
10885 struct intel_crtc_state *pipe_config)
10886{
10887 struct drm_device *dev = crtc->base.dev;
10888 struct drm_i915_private *dev_priv = to_i915(dev);
10889 enum intel_display_power_domain power_domain;
10890 intel_wakeref_t wakeref;
10891 u32 tmp;
10892 bool ret;
10893
10894 power_domain = POWER_DOMAIN_PIPE(crtc->pipe);
10895 wakeref = intel_display_power_get_if_enabled(dev_priv, power_domain);
10896 if (!wakeref)
10897 return false;
10898
10899 pipe_config->cpu_transcoder = (enum transcoder) crtc->pipe;
10900 pipe_config->shared_dpll = NULL;
10901
10902 ret = false;
10903 tmp = intel_de_read(dev_priv, PIPECONF(crtc->pipe));
10904 if (!(tmp & PIPECONF_ENABLE))
10905 goto out;
10906
10907 switch (tmp & PIPECONF_BPC_MASK) {
10908 case PIPECONF_6BPC:
10909 pipe_config->pipe_bpp = 18;
10910 break;
10911 case PIPECONF_8BPC:
10912 pipe_config->pipe_bpp = 24;
10913 break;
10914 case PIPECONF_10BPC:
10915 pipe_config->pipe_bpp = 30;
10916 break;
10917 case PIPECONF_12BPC:
10918 pipe_config->pipe_bpp = 36;
10919 break;
10920 default:
10921 break;
10922 }
10923
10924 if (tmp & PIPECONF_COLOR_RANGE_SELECT)
10925 pipe_config->limited_color_range = true;
10926
10927 switch (tmp & PIPECONF_OUTPUT_COLORSPACE_MASK) {
10928 case PIPECONF_OUTPUT_COLORSPACE_YUV601:
10929 case PIPECONF_OUTPUT_COLORSPACE_YUV709:
10930 pipe_config->output_format = INTEL_OUTPUT_FORMAT_YCBCR444;
10931 break;
10932 default:
10933 pipe_config->output_format = INTEL_OUTPUT_FORMAT_RGB;
10934 break;
10935 }
10936
10937 pipe_config->gamma_mode = (tmp & PIPECONF_GAMMA_MODE_MASK_ILK) >>
10938 PIPECONF_GAMMA_MODE_SHIFT;
10939
10940 pipe_config->csc_mode = intel_de_read(dev_priv,
10941 PIPE_CSC_MODE(crtc->pipe));
10942
10943 i9xx_get_pipe_color_config(pipe_config);
10944 intel_color_get_config(pipe_config);
10945
10946 if (intel_de_read(dev_priv, PCH_TRANSCONF(crtc->pipe)) & TRANS_ENABLE) {
10947 struct intel_shared_dpll *pll;
10948 enum intel_dpll_id pll_id;
10949 bool pll_active;
10950
10951 pipe_config->has_pch_encoder = true;
10952
10953 tmp = intel_de_read(dev_priv, FDI_RX_CTL(crtc->pipe));
10954 pipe_config->fdi_lanes = ((FDI_DP_PORT_WIDTH_MASK & tmp) >>
10955 FDI_DP_PORT_WIDTH_SHIFT) + 1;
10956
10957 ilk_get_fdi_m_n_config(crtc, pipe_config);
10958
10959 if (HAS_PCH_IBX(dev_priv)) {
10960
10961
10962
10963
10964 pll_id = (enum intel_dpll_id) crtc->pipe;
10965 } else {
10966 tmp = intel_de_read(dev_priv, PCH_DPLL_SEL);
10967 if (tmp & TRANS_DPLLB_SEL(crtc->pipe))
10968 pll_id = DPLL_ID_PCH_PLL_B;
10969 else
10970 pll_id= DPLL_ID_PCH_PLL_A;
10971 }
10972
10973 pipe_config->shared_dpll =
10974 intel_get_shared_dpll_by_id(dev_priv, pll_id);
10975 pll = pipe_config->shared_dpll;
10976
10977 pll_active = intel_dpll_get_hw_state(dev_priv, pll,
10978 &pipe_config->dpll_hw_state);
10979 drm_WARN_ON(dev, !pll_active);
10980
10981 tmp = pipe_config->dpll_hw_state.dpll;
10982 pipe_config->pixel_multiplier =
10983 ((tmp & PLL_REF_SDVO_HDMI_MULTIPLIER_MASK)
10984 >> PLL_REF_SDVO_HDMI_MULTIPLIER_SHIFT) + 1;
10985
10986 ilk_pch_clock_get(crtc, pipe_config);
10987 } else {
10988 pipe_config->pixel_multiplier = 1;
10989 }
10990
10991 intel_get_transcoder_timings(crtc, pipe_config);
10992 intel_get_pipe_src_size(crtc, pipe_config);
10993
10994 ilk_get_pfit_config(pipe_config);
10995
10996 ret = true;
10997
10998out:
10999 intel_display_power_put(dev_priv, power_domain, wakeref);
11000
11001 return ret;
11002}
11003
11004static int hsw_crtc_compute_clock(struct intel_crtc *crtc,
11005 struct intel_crtc_state *crtc_state)
11006{
11007 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
11008 struct intel_atomic_state *state =
11009 to_intel_atomic_state(crtc_state->uapi.state);
11010
11011 if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DSI) ||
11012 INTEL_GEN(dev_priv) >= 11) {
11013 struct intel_encoder *encoder =
11014 intel_get_crtc_new_encoder(state, crtc_state);
11015
11016 if (!intel_reserve_shared_dplls(state, crtc, encoder)) {
11017 drm_dbg_kms(&dev_priv->drm,
11018 "failed to find PLL for pipe %c\n",
11019 pipe_name(crtc->pipe));
11020 return -EINVAL;
11021 }
11022 }
11023
11024 return 0;
11025}
11026
11027static void dg1_get_ddi_pll(struct drm_i915_private *dev_priv, enum port port,
11028 struct intel_crtc_state *pipe_config)
11029{
11030 enum icl_port_dpll_id port_dpll_id = ICL_PORT_DPLL_DEFAULT;
11031 enum phy phy = intel_port_to_phy(dev_priv, port);
11032 struct icl_port_dpll *port_dpll;
11033 struct intel_shared_dpll *pll;
11034 enum intel_dpll_id id;
11035 bool pll_active;
11036 u32 clk_sel;
11037
11038 clk_sel = intel_de_read(dev_priv, DG1_DPCLKA_CFGCR0(phy)) & DG1_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy);
11039 id = DG1_DPCLKA_CFGCR0_DDI_CLK_SEL_DPLL_MAP(clk_sel, phy);
11040
11041 if (WARN_ON(id > DPLL_ID_DG1_DPLL3))
11042 return;
11043
11044 pll = intel_get_shared_dpll_by_id(dev_priv, id);
11045 port_dpll = &pipe_config->icl_port_dplls[port_dpll_id];
11046
11047 port_dpll->pll = pll;
11048 pll_active = intel_dpll_get_hw_state(dev_priv, pll,
11049 &port_dpll->hw_state);
11050 drm_WARN_ON(&dev_priv->drm, !pll_active);
11051
11052 icl_set_active_port_dpll(pipe_config, port_dpll_id);
11053}
11054
11055static void icl_get_ddi_pll(struct drm_i915_private *dev_priv, enum port port,
11056 struct intel_crtc_state *pipe_config)
11057{
11058 enum phy phy = intel_port_to_phy(dev_priv, port);
11059 enum icl_port_dpll_id port_dpll_id;
11060 struct icl_port_dpll *port_dpll;
11061 struct intel_shared_dpll *pll;
11062 enum intel_dpll_id id;
11063 bool pll_active;
11064 u32 temp;
11065
11066 if (intel_phy_is_combo(dev_priv, phy)) {
11067 u32 mask, shift;
11068
11069 if (IS_ROCKETLAKE(dev_priv)) {
11070 mask = RKL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy);
11071 shift = RKL_DPCLKA_CFGCR0_DDI_CLK_SEL_SHIFT(phy);
11072 } else {
11073 mask = ICL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy);
11074 shift = ICL_DPCLKA_CFGCR0_DDI_CLK_SEL_SHIFT(phy);
11075 }
11076
11077 temp = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0) & mask;
11078 id = temp >> shift;
11079 port_dpll_id = ICL_PORT_DPLL_DEFAULT;
11080 } else if (intel_phy_is_tc(dev_priv, phy)) {
11081 u32 clk_sel = intel_de_read(dev_priv, DDI_CLK_SEL(port)) & DDI_CLK_SEL_MASK;
11082
11083 if (clk_sel == DDI_CLK_SEL_MG) {
11084 id = icl_tc_port_to_pll_id(intel_port_to_tc(dev_priv,
11085 port));
11086 port_dpll_id = ICL_PORT_DPLL_MG_PHY;
11087 } else {
11088 drm_WARN_ON(&dev_priv->drm,
11089 clk_sel < DDI_CLK_SEL_TBT_162);
11090 id = DPLL_ID_ICL_TBTPLL;
11091 port_dpll_id = ICL_PORT_DPLL_DEFAULT;
11092 }
11093 } else {
11094 drm_WARN(&dev_priv->drm, 1, "Invalid port %x\n", port);
11095 return;
11096 }
11097
11098 pll = intel_get_shared_dpll_by_id(dev_priv, id);
11099 port_dpll = &pipe_config->icl_port_dplls[port_dpll_id];
11100
11101 port_dpll->pll = pll;
11102 pll_active = intel_dpll_get_hw_state(dev_priv, pll,
11103 &port_dpll->hw_state);
11104 drm_WARN_ON(&dev_priv->drm, !pll_active);
11105
11106 icl_set_active_port_dpll(pipe_config, port_dpll_id);
11107}
11108
11109static void cnl_get_ddi_pll(struct drm_i915_private *dev_priv, enum port port,
11110 struct intel_crtc_state *pipe_config)
11111{
11112 struct intel_shared_dpll *pll;
11113 enum intel_dpll_id id;
11114 bool pll_active;
11115 u32 temp;
11116
11117 temp = intel_de_read(dev_priv, DPCLKA_CFGCR0) & DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(port);
11118 id = temp >> DPCLKA_CFGCR0_DDI_CLK_SEL_SHIFT(port);
11119
11120 if (drm_WARN_ON(&dev_priv->drm, id < SKL_DPLL0 || id > SKL_DPLL2))
11121 return;
11122
11123 pll = intel_get_shared_dpll_by_id(dev_priv, id);
11124
11125 pipe_config->shared_dpll = pll;
11126 pll_active = intel_dpll_get_hw_state(dev_priv, pll,
11127 &pipe_config->dpll_hw_state);
11128 drm_WARN_ON(&dev_priv->drm, !pll_active);
11129}
11130
11131static void bxt_get_ddi_pll(struct drm_i915_private *dev_priv,
11132 enum port port,
11133 struct intel_crtc_state *pipe_config)
11134{
11135 struct intel_shared_dpll *pll;
11136 enum intel_dpll_id id;
11137 bool pll_active;
11138
11139 switch (port) {
11140 case PORT_A:
11141 id = DPLL_ID_SKL_DPLL0;
11142 break;
11143 case PORT_B:
11144 id = DPLL_ID_SKL_DPLL1;
11145 break;
11146 case PORT_C:
11147 id = DPLL_ID_SKL_DPLL2;
11148 break;
11149 default:
11150 drm_err(&dev_priv->drm, "Incorrect port type\n");
11151 return;
11152 }
11153
11154 pll = intel_get_shared_dpll_by_id(dev_priv, id);
11155
11156 pipe_config->shared_dpll = pll;
11157 pll_active = intel_dpll_get_hw_state(dev_priv, pll,
11158 &pipe_config->dpll_hw_state);
11159 drm_WARN_ON(&dev_priv->drm, !pll_active);
11160}
11161
11162static void skl_get_ddi_pll(struct drm_i915_private *dev_priv, enum port port,
11163 struct intel_crtc_state *pipe_config)
11164{
11165 struct intel_shared_dpll *pll;
11166 enum intel_dpll_id id;
11167 bool pll_active;
11168 u32 temp;
11169
11170 temp = intel_de_read(dev_priv, DPLL_CTRL2) & DPLL_CTRL2_DDI_CLK_SEL_MASK(port);
11171 id = temp >> (port * 3 + 1);
11172
11173 if (drm_WARN_ON(&dev_priv->drm, id < SKL_DPLL0 || id > SKL_DPLL3))
11174 return;
11175
11176 pll = intel_get_shared_dpll_by_id(dev_priv, id);
11177
11178 pipe_config->shared_dpll = pll;
11179 pll_active = intel_dpll_get_hw_state(dev_priv, pll,
11180 &pipe_config->dpll_hw_state);
11181 drm_WARN_ON(&dev_priv->drm, !pll_active);
11182}
11183
11184static void hsw_get_ddi_pll(struct drm_i915_private *dev_priv, enum port port,
11185 struct intel_crtc_state *pipe_config)
11186{
11187 struct intel_shared_dpll *pll;
11188 enum intel_dpll_id id;
11189 u32 ddi_pll_sel = intel_de_read(dev_priv, PORT_CLK_SEL(port));
11190 bool pll_active;
11191
11192 switch (ddi_pll_sel) {
11193 case PORT_CLK_SEL_WRPLL1:
11194 id = DPLL_ID_WRPLL1;
11195 break;
11196 case PORT_CLK_SEL_WRPLL2:
11197 id = DPLL_ID_WRPLL2;
11198 break;
11199 case PORT_CLK_SEL_SPLL:
11200 id = DPLL_ID_SPLL;
11201 break;
11202 case PORT_CLK_SEL_LCPLL_810:
11203 id = DPLL_ID_LCPLL_810;
11204 break;
11205 case PORT_CLK_SEL_LCPLL_1350:
11206 id = DPLL_ID_LCPLL_1350;
11207 break;
11208 case PORT_CLK_SEL_LCPLL_2700:
11209 id = DPLL_ID_LCPLL_2700;
11210 break;
11211 default:
11212 MISSING_CASE(ddi_pll_sel);
11213 fallthrough;
11214 case PORT_CLK_SEL_NONE:
11215 return;
11216 }
11217
11218 pll = intel_get_shared_dpll_by_id(dev_priv, id);
11219
11220 pipe_config->shared_dpll = pll;
11221 pll_active = intel_dpll_get_hw_state(dev_priv, pll,
11222 &pipe_config->dpll_hw_state);
11223 drm_WARN_ON(&dev_priv->drm, !pll_active);
11224}
11225
11226static bool hsw_get_transcoder_state(struct intel_crtc *crtc,
11227 struct intel_crtc_state *pipe_config,
11228 u64 *power_domain_mask,
11229 intel_wakeref_t *wakerefs)
11230{
11231 struct drm_device *dev = crtc->base.dev;
11232 struct drm_i915_private *dev_priv = to_i915(dev);
11233 enum intel_display_power_domain power_domain;
11234 unsigned long panel_transcoder_mask = BIT(TRANSCODER_EDP);
11235 unsigned long enabled_panel_transcoders = 0;
11236 enum transcoder panel_transcoder;
11237 intel_wakeref_t wf;
11238 u32 tmp;
11239
11240 if (INTEL_GEN(dev_priv) >= 11)
11241 panel_transcoder_mask |=
11242 BIT(TRANSCODER_DSI_0) | BIT(TRANSCODER_DSI_1);
11243
11244
11245
11246
11247
11248 pipe_config->cpu_transcoder = (enum transcoder) crtc->pipe;
11249
11250
11251
11252
11253
11254 for_each_cpu_transcoder_masked(dev_priv, panel_transcoder,
11255 panel_transcoder_mask) {
11256 bool force_thru = false;
11257 enum pipe trans_pipe;
11258
11259 tmp = intel_de_read(dev_priv,
11260 TRANS_DDI_FUNC_CTL(panel_transcoder));
11261 if (!(tmp & TRANS_DDI_FUNC_ENABLE))
11262 continue;
11263
11264
11265
11266
11267
11268
11269 enabled_panel_transcoders |= BIT(panel_transcoder);
11270 if (enabled_panel_transcoders != BIT(panel_transcoder))
11271 continue;
11272
11273 switch (tmp & TRANS_DDI_EDP_INPUT_MASK) {
11274 default:
11275 drm_WARN(dev, 1,
11276 "unknown pipe linked to transcoder %s\n",
11277 transcoder_name(panel_transcoder));
11278 fallthrough;
11279 case TRANS_DDI_EDP_INPUT_A_ONOFF:
11280 force_thru = true;
11281 fallthrough;
11282 case TRANS_DDI_EDP_INPUT_A_ON:
11283 trans_pipe = PIPE_A;
11284 break;
11285 case TRANS_DDI_EDP_INPUT_B_ONOFF:
11286 trans_pipe = PIPE_B;
11287 break;
11288 case TRANS_DDI_EDP_INPUT_C_ONOFF:
11289 trans_pipe = PIPE_C;
11290 break;
11291 case TRANS_DDI_EDP_INPUT_D_ONOFF:
11292 trans_pipe = PIPE_D;
11293 break;
11294 }
11295
11296 if (trans_pipe == crtc->pipe) {
11297 pipe_config->cpu_transcoder = panel_transcoder;
11298 pipe_config->pch_pfit.force_thru = force_thru;
11299 }
11300 }
11301
11302
11303
11304
11305 drm_WARN_ON(dev, (enabled_panel_transcoders & BIT(TRANSCODER_EDP)) &&
11306 enabled_panel_transcoders != BIT(TRANSCODER_EDP));
11307
11308 power_domain = POWER_DOMAIN_TRANSCODER(pipe_config->cpu_transcoder);
11309 drm_WARN_ON(dev, *power_domain_mask & BIT_ULL(power_domain));
11310
11311 wf = intel_display_power_get_if_enabled(dev_priv, power_domain);
11312 if (!wf)
11313 return false;
11314
11315 wakerefs[power_domain] = wf;
11316 *power_domain_mask |= BIT_ULL(power_domain);
11317
11318 tmp = intel_de_read(dev_priv, PIPECONF(pipe_config->cpu_transcoder));
11319
11320 return tmp & PIPECONF_ENABLE;
11321}
11322
11323static bool bxt_get_dsi_transcoder_state(struct intel_crtc *crtc,
11324 struct intel_crtc_state *pipe_config,
11325 u64 *power_domain_mask,
11326 intel_wakeref_t *wakerefs)
11327{
11328 struct drm_device *dev = crtc->base.dev;
11329 struct drm_i915_private *dev_priv = to_i915(dev);
11330 enum intel_display_power_domain power_domain;
11331 enum transcoder cpu_transcoder;
11332 intel_wakeref_t wf;
11333 enum port port;
11334 u32 tmp;
11335
11336 for_each_port_masked(port, BIT(PORT_A) | BIT(PORT_C)) {
11337 if (port == PORT_A)
11338 cpu_transcoder = TRANSCODER_DSI_A;
11339 else
11340 cpu_transcoder = TRANSCODER_DSI_C;
11341
11342 power_domain = POWER_DOMAIN_TRANSCODER(cpu_transcoder);
11343 drm_WARN_ON(dev, *power_domain_mask & BIT_ULL(power_domain));
11344
11345 wf = intel_display_power_get_if_enabled(dev_priv, power_domain);
11346 if (!wf)
11347 continue;
11348
11349 wakerefs[power_domain] = wf;
11350 *power_domain_mask |= BIT_ULL(power_domain);
11351
11352
11353
11354
11355
11356
11357
11358
11359 if (!bxt_dsi_pll_is_enabled(dev_priv))
11360 break;
11361
11362
11363 tmp = intel_de_read(dev_priv, BXT_MIPI_PORT_CTRL(port));
11364 if (!(tmp & DPI_ENABLE))
11365 continue;
11366
11367 tmp = intel_de_read(dev_priv, MIPI_CTRL(port));
11368 if ((tmp & BXT_PIPE_SELECT_MASK) != BXT_PIPE_SELECT(crtc->pipe))
11369 continue;
11370
11371 pipe_config->cpu_transcoder = cpu_transcoder;
11372 break;
11373 }
11374
11375 return transcoder_is_dsi(pipe_config->cpu_transcoder);
11376}
11377
11378static void hsw_get_ddi_port_state(struct intel_crtc *crtc,
11379 struct intel_crtc_state *pipe_config)
11380{
11381 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
11382 enum transcoder cpu_transcoder = pipe_config->cpu_transcoder;
11383 enum port port;
11384 u32 tmp;
11385
11386 if (transcoder_is_dsi(cpu_transcoder)) {
11387 port = (cpu_transcoder == TRANSCODER_DSI_A) ?
11388 PORT_A : PORT_B;
11389 } else {
11390 tmp = intel_de_read(dev_priv,
11391 TRANS_DDI_FUNC_CTL(cpu_transcoder));
11392 if (!(tmp & TRANS_DDI_FUNC_ENABLE))
11393 return;
11394 if (INTEL_GEN(dev_priv) >= 12)
11395 port = TGL_TRANS_DDI_FUNC_CTL_VAL_TO_PORT(tmp);
11396 else
11397 port = TRANS_DDI_FUNC_CTL_VAL_TO_PORT(tmp);
11398 }
11399
11400 if (IS_DG1(dev_priv))
11401 dg1_get_ddi_pll(dev_priv, port, pipe_config);
11402 else if (INTEL_GEN(dev_priv) >= 11)
11403 icl_get_ddi_pll(dev_priv, port, pipe_config);
11404 else if (IS_CANNONLAKE(dev_priv))
11405 cnl_get_ddi_pll(dev_priv, port, pipe_config);
11406 else if (IS_GEN9_LP(dev_priv))
11407 bxt_get_ddi_pll(dev_priv, port, pipe_config);
11408 else if (IS_GEN9_BC(dev_priv))
11409 skl_get_ddi_pll(dev_priv, port, pipe_config);
11410 else
11411 hsw_get_ddi_pll(dev_priv, port, pipe_config);
11412
11413
11414
11415
11416
11417
11418 if (INTEL_GEN(dev_priv) < 9 &&
11419 (port == PORT_E) && intel_de_read(dev_priv, LPT_TRANSCONF) & TRANS_ENABLE) {
11420 pipe_config->has_pch_encoder = true;
11421
11422 tmp = intel_de_read(dev_priv, FDI_RX_CTL(PIPE_A));
11423 pipe_config->fdi_lanes = ((FDI_DP_PORT_WIDTH_MASK & tmp) >>
11424 FDI_DP_PORT_WIDTH_SHIFT) + 1;
11425
11426 ilk_get_fdi_m_n_config(crtc, pipe_config);
11427 }
11428}
11429
11430static bool hsw_get_pipe_config(struct intel_crtc *crtc,
11431 struct intel_crtc_state *pipe_config)
11432{
11433 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
11434 intel_wakeref_t wakerefs[POWER_DOMAIN_NUM], wf;
11435 enum intel_display_power_domain power_domain;
11436 u64 power_domain_mask;
11437 bool active;
11438 u32 tmp;
11439
11440 pipe_config->master_transcoder = INVALID_TRANSCODER;
11441
11442 power_domain = POWER_DOMAIN_PIPE(crtc->pipe);
11443 wf = intel_display_power_get_if_enabled(dev_priv, power_domain);
11444 if (!wf)
11445 return false;
11446
11447 wakerefs[power_domain] = wf;
11448 power_domain_mask = BIT_ULL(power_domain);
11449
11450 pipe_config->shared_dpll = NULL;
11451
11452 active = hsw_get_transcoder_state(crtc, pipe_config,
11453 &power_domain_mask, wakerefs);
11454
11455 if (IS_GEN9_LP(dev_priv) &&
11456 bxt_get_dsi_transcoder_state(crtc, pipe_config,
11457 &power_domain_mask, wakerefs)) {
11458 drm_WARN_ON(&dev_priv->drm, active);
11459 active = true;
11460 }
11461
11462 intel_dsc_get_config(pipe_config);
11463
11464 if (!active) {
11465
11466 if (!pipe_config->bigjoiner_slave)
11467 goto out;
11468
11469 active = true;
11470 pipe_config->pixel_multiplier = 1;
11471
11472
11473 pipe_config->quirks |= PIPE_CONFIG_QUIRK_BIGJOINER_SLAVE;
11474 } else if (!transcoder_is_dsi(pipe_config->cpu_transcoder) ||
11475 INTEL_GEN(dev_priv) >= 11) {
11476 hsw_get_ddi_port_state(crtc, pipe_config);
11477 intel_get_transcoder_timings(crtc, pipe_config);
11478 }
11479
11480 intel_get_pipe_src_size(crtc, pipe_config);
11481
11482 if (IS_HASWELL(dev_priv)) {
11483 u32 tmp = intel_de_read(dev_priv,
11484 PIPECONF(pipe_config->cpu_transcoder));
11485
11486 if (tmp & PIPECONF_OUTPUT_COLORSPACE_YUV_HSW)
11487 pipe_config->output_format = INTEL_OUTPUT_FORMAT_YCBCR444;
11488 else
11489 pipe_config->output_format = INTEL_OUTPUT_FORMAT_RGB;
11490 } else {
11491 pipe_config->output_format =
11492 bdw_get_pipemisc_output_format(crtc);
11493 }
11494
11495 pipe_config->gamma_mode = intel_de_read(dev_priv,
11496 GAMMA_MODE(crtc->pipe));
11497
11498 pipe_config->csc_mode = intel_de_read(dev_priv,
11499 PIPE_CSC_MODE(crtc->pipe));
11500
11501 if (INTEL_GEN(dev_priv) >= 9) {
11502 tmp = intel_de_read(dev_priv, SKL_BOTTOM_COLOR(crtc->pipe));
11503
11504 if (tmp & SKL_BOTTOM_COLOR_GAMMA_ENABLE)
11505 pipe_config->gamma_enable = true;
11506
11507 if (tmp & SKL_BOTTOM_COLOR_CSC_ENABLE)
11508 pipe_config->csc_enable = true;
11509 } else {
11510 i9xx_get_pipe_color_config(pipe_config);
11511 }
11512
11513 intel_color_get_config(pipe_config);
11514
11515 tmp = intel_de_read(dev_priv, WM_LINETIME(crtc->pipe));
11516 pipe_config->linetime = REG_FIELD_GET(HSW_LINETIME_MASK, tmp);
11517 if (IS_BROADWELL(dev_priv) || IS_HASWELL(dev_priv))
11518 pipe_config->ips_linetime =
11519 REG_FIELD_GET(HSW_IPS_LINETIME_MASK, tmp);
11520
11521 power_domain = POWER_DOMAIN_PIPE_PANEL_FITTER(crtc->pipe);
11522 drm_WARN_ON(&dev_priv->drm, power_domain_mask & BIT_ULL(power_domain));
11523
11524 wf = intel_display_power_get_if_enabled(dev_priv, power_domain);
11525 if (wf) {
11526 wakerefs[power_domain] = wf;
11527 power_domain_mask |= BIT_ULL(power_domain);
11528
11529 if (INTEL_GEN(dev_priv) >= 9)
11530 skl_get_pfit_config(pipe_config);
11531 else
11532 ilk_get_pfit_config(pipe_config);
11533 }
11534
11535 if (hsw_crtc_supports_ips(crtc)) {
11536 if (IS_HASWELL(dev_priv))
11537 pipe_config->ips_enabled = intel_de_read(dev_priv,
11538 IPS_CTL) & IPS_ENABLE;
11539 else {
11540
11541
11542
11543
11544
11545 pipe_config->ips_enabled = true;
11546 }
11547 }
11548
11549 if (pipe_config->bigjoiner_slave) {
11550
11551 pipe_config->pixel_multiplier = 0;
11552 } else if (pipe_config->cpu_transcoder != TRANSCODER_EDP &&
11553 !transcoder_is_dsi(pipe_config->cpu_transcoder)) {
11554 pipe_config->pixel_multiplier =
11555 intel_de_read(dev_priv,
11556 PIPE_MULT(pipe_config->cpu_transcoder)) + 1;
11557 } else {
11558 pipe_config->pixel_multiplier = 1;
11559 }
11560
11561out:
11562 for_each_power_domain(power_domain, power_domain_mask)
11563 intel_display_power_put(dev_priv,
11564 power_domain, wakerefs[power_domain]);
11565
11566 return active;
11567}
11568
11569static bool intel_crtc_get_pipe_config(struct intel_crtc_state *crtc_state)
11570{
11571 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
11572 struct drm_i915_private *i915 = to_i915(crtc->base.dev);
11573
11574 if (!i915->display.get_pipe_config(crtc, crtc_state))
11575 return false;
11576
11577 crtc_state->hw.active = true;
11578
11579 intel_crtc_readout_derived_state(crtc_state);
11580
11581 return true;
11582}
11583
11584static u32 intel_cursor_base(const struct intel_plane_state *plane_state)
11585{
11586 struct drm_i915_private *dev_priv =
11587 to_i915(plane_state->uapi.plane->dev);
11588 const struct drm_framebuffer *fb = plane_state->hw.fb;
11589 const struct drm_i915_gem_object *obj = intel_fb_obj(fb);
11590 u32 base;
11591
11592 if (INTEL_INFO(dev_priv)->display.cursor_needs_physical)
11593 base = sg_dma_address(obj->mm.pages->sgl);
11594 else
11595 base = intel_plane_ggtt_offset(plane_state);
11596
11597 return base + plane_state->color_plane[0].offset;
11598}
11599
11600static u32 intel_cursor_position(const struct intel_plane_state *plane_state)
11601{
11602 int x = plane_state->uapi.dst.x1;
11603 int y = plane_state->uapi.dst.y1;
11604 u32 pos = 0;
11605
11606 if (x < 0) {
11607 pos |= CURSOR_POS_SIGN << CURSOR_X_SHIFT;
11608 x = -x;
11609 }
11610 pos |= x << CURSOR_X_SHIFT;
11611
11612 if (y < 0) {
11613 pos |= CURSOR_POS_SIGN << CURSOR_Y_SHIFT;
11614 y = -y;
11615 }
11616 pos |= y << CURSOR_Y_SHIFT;
11617
11618 return pos;
11619}
11620
11621static bool intel_cursor_size_ok(const struct intel_plane_state *plane_state)
11622{
11623 const struct drm_mode_config *config =
11624 &plane_state->uapi.plane->dev->mode_config;
11625 int width = drm_rect_width(&plane_state->uapi.dst);
11626 int height = drm_rect_height(&plane_state->uapi.dst);
11627
11628 return width > 0 && width <= config->cursor_width &&
11629 height > 0 && height <= config->cursor_height;
11630}
11631
11632static int intel_cursor_check_surface(struct intel_plane_state *plane_state)
11633{
11634 struct drm_i915_private *dev_priv =
11635 to_i915(plane_state->uapi.plane->dev);
11636 unsigned int rotation = plane_state->hw.rotation;
11637 int src_x, src_y;
11638 u32 offset;
11639 int ret;
11640
11641 ret = intel_plane_compute_gtt(plane_state);
11642 if (ret)
11643 return ret;
11644
11645 if (!plane_state->uapi.visible)
11646 return 0;
11647
11648 src_x = plane_state->uapi.src.x1 >> 16;
11649 src_y = plane_state->uapi.src.y1 >> 16;
11650
11651 intel_add_fb_offsets(&src_x, &src_y, plane_state, 0);
11652 offset = intel_plane_compute_aligned_offset(&src_x, &src_y,
11653 plane_state, 0);
11654
11655 if (src_x != 0 || src_y != 0) {
11656 drm_dbg_kms(&dev_priv->drm,
11657 "Arbitrary cursor panning not supported\n");
11658 return -EINVAL;
11659 }
11660
11661
11662
11663
11664
11665 drm_rect_translate_to(&plane_state->uapi.src,
11666 src_x << 16, src_y << 16);
11667
11668
11669 if (HAS_GMCH(dev_priv) && rotation & DRM_MODE_ROTATE_180) {
11670 const struct drm_framebuffer *fb = plane_state->hw.fb;
11671 int src_w = drm_rect_width(&plane_state->uapi.src) >> 16;
11672 int src_h = drm_rect_height(&plane_state->uapi.src) >> 16;
11673
11674 offset += (src_h * src_w - 1) * fb->format->cpp[0];
11675 }
11676
11677 plane_state->color_plane[0].offset = offset;
11678 plane_state->color_plane[0].x = src_x;
11679 plane_state->color_plane[0].y = src_y;
11680
11681 return 0;
11682}
11683
11684static int intel_check_cursor(struct intel_crtc_state *crtc_state,
11685 struct intel_plane_state *plane_state)
11686{
11687 const struct drm_framebuffer *fb = plane_state->hw.fb;
11688 struct drm_i915_private *i915 = to_i915(plane_state->uapi.plane->dev);
11689 const struct drm_rect src = plane_state->uapi.src;
11690 const struct drm_rect dst = plane_state->uapi.dst;
11691 int ret;
11692
11693 if (fb && fb->modifier != DRM_FORMAT_MOD_LINEAR) {
11694 drm_dbg_kms(&i915->drm, "cursor cannot be tiled\n");
11695 return -EINVAL;
11696 }
11697
11698 ret = intel_atomic_plane_check_clipping(plane_state, crtc_state,
11699 DRM_PLANE_HELPER_NO_SCALING,
11700 DRM_PLANE_HELPER_NO_SCALING,
11701 true);
11702 if (ret)
11703 return ret;
11704
11705
11706 plane_state->uapi.src = src;
11707 plane_state->uapi.dst = dst;
11708
11709 ret = intel_cursor_check_surface(plane_state);
11710 if (ret)
11711 return ret;
11712
11713 if (!plane_state->uapi.visible)
11714 return 0;
11715
11716 ret = intel_plane_check_src_coordinates(plane_state);
11717 if (ret)
11718 return ret;
11719
11720 return 0;
11721}
11722
11723static unsigned int
11724i845_cursor_max_stride(struct intel_plane *plane,
11725 u32 pixel_format, u64 modifier,
11726 unsigned int rotation)
11727{
11728 return 2048;
11729}
11730
11731static u32 i845_cursor_ctl_crtc(const struct intel_crtc_state *crtc_state)
11732{
11733 u32 cntl = 0;
11734
11735 if (crtc_state->gamma_enable)
11736 cntl |= CURSOR_GAMMA_ENABLE;
11737
11738 return cntl;
11739}
11740
11741static u32 i845_cursor_ctl(const struct intel_crtc_state *crtc_state,
11742 const struct intel_plane_state *plane_state)
11743{
11744 return CURSOR_ENABLE |
11745 CURSOR_FORMAT_ARGB |
11746 CURSOR_STRIDE(plane_state->color_plane[0].stride);
11747}
11748
11749static bool i845_cursor_size_ok(const struct intel_plane_state *plane_state)
11750{
11751 int width = drm_rect_width(&plane_state->uapi.dst);
11752
11753
11754
11755
11756
11757 return intel_cursor_size_ok(plane_state) && IS_ALIGNED(width, 64);
11758}
11759
11760static int i845_check_cursor(struct intel_crtc_state *crtc_state,
11761 struct intel_plane_state *plane_state)
11762{
11763 const struct drm_framebuffer *fb = plane_state->hw.fb;
11764 struct drm_i915_private *i915 = to_i915(plane_state->uapi.plane->dev);
11765 int ret;
11766
11767 ret = intel_check_cursor(crtc_state, plane_state);
11768 if (ret)
11769 return ret;
11770
11771
11772 if (!fb)
11773 return 0;
11774
11775
11776 if (!i845_cursor_size_ok(plane_state)) {
11777 drm_dbg_kms(&i915->drm,
11778 "Cursor dimension %dx%d not supported\n",
11779 drm_rect_width(&plane_state->uapi.dst),
11780 drm_rect_height(&plane_state->uapi.dst));
11781 return -EINVAL;
11782 }
11783
11784 drm_WARN_ON(&i915->drm, plane_state->uapi.visible &&
11785 plane_state->color_plane[0].stride != fb->pitches[0]);
11786
11787 switch (fb->pitches[0]) {
11788 case 256:
11789 case 512:
11790 case 1024:
11791 case 2048:
11792 break;
11793 default:
11794 drm_dbg_kms(&i915->drm, "Invalid cursor stride (%u)\n",
11795 fb->pitches[0]);
11796 return -EINVAL;
11797 }
11798
11799 plane_state->ctl = i845_cursor_ctl(crtc_state, plane_state);
11800
11801 return 0;
11802}
11803
11804static void i845_update_cursor(struct intel_plane *plane,
11805 const struct intel_crtc_state *crtc_state,
11806 const struct intel_plane_state *plane_state)
11807{
11808 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
11809 u32 cntl = 0, base = 0, pos = 0, size = 0;
11810 unsigned long irqflags;
11811
11812 if (plane_state && plane_state->uapi.visible) {
11813 unsigned int width = drm_rect_width(&plane_state->uapi.dst);
11814 unsigned int height = drm_rect_height(&plane_state->uapi.dst);
11815
11816 cntl = plane_state->ctl |
11817 i845_cursor_ctl_crtc(crtc_state);
11818
11819 size = (height << 12) | width;
11820
11821 base = intel_cursor_base(plane_state);
11822 pos = intel_cursor_position(plane_state);
11823 }
11824
11825 spin_lock_irqsave(&dev_priv->uncore.lock, irqflags);
11826
11827
11828
11829
11830 if (plane->cursor.base != base ||
11831 plane->cursor.size != size ||
11832 plane->cursor.cntl != cntl) {
11833 intel_de_write_fw(dev_priv, CURCNTR(PIPE_A), 0);
11834 intel_de_write_fw(dev_priv, CURBASE(PIPE_A), base);
11835 intel_de_write_fw(dev_priv, CURSIZE, size);
11836 intel_de_write_fw(dev_priv, CURPOS(PIPE_A), pos);
11837 intel_de_write_fw(dev_priv, CURCNTR(PIPE_A), cntl);
11838
11839 plane->cursor.base = base;
11840 plane->cursor.size = size;
11841 plane->cursor.cntl = cntl;
11842 } else {
11843 intel_de_write_fw(dev_priv, CURPOS(PIPE_A), pos);
11844 }
11845
11846 spin_unlock_irqrestore(&dev_priv->uncore.lock, irqflags);
11847}
11848
11849static void i845_disable_cursor(struct intel_plane *plane,
11850 const struct intel_crtc_state *crtc_state)
11851{
11852 i845_update_cursor(plane, crtc_state, NULL);
11853}
11854
11855static bool i845_cursor_get_hw_state(struct intel_plane *plane,
11856 enum pipe *pipe)
11857{
11858 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
11859 enum intel_display_power_domain power_domain;
11860 intel_wakeref_t wakeref;
11861 bool ret;
11862
11863 power_domain = POWER_DOMAIN_PIPE(PIPE_A);
11864 wakeref = intel_display_power_get_if_enabled(dev_priv, power_domain);
11865 if (!wakeref)
11866 return false;
11867
11868 ret = intel_de_read(dev_priv, CURCNTR(PIPE_A)) & CURSOR_ENABLE;
11869
11870 *pipe = PIPE_A;
11871
11872 intel_display_power_put(dev_priv, power_domain, wakeref);
11873
11874 return ret;
11875}
11876
11877static unsigned int
11878i9xx_cursor_max_stride(struct intel_plane *plane,
11879 u32 pixel_format, u64 modifier,
11880 unsigned int rotation)
11881{
11882 return plane->base.dev->mode_config.cursor_width * 4;
11883}
11884
11885static u32 i9xx_cursor_ctl_crtc(const struct intel_crtc_state *crtc_state)
11886{
11887 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
11888 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
11889 u32 cntl = 0;
11890
11891 if (INTEL_GEN(dev_priv) >= 11)
11892 return cntl;
11893
11894 if (crtc_state->gamma_enable)
11895 cntl = MCURSOR_GAMMA_ENABLE;
11896
11897 if (crtc_state->csc_enable)
11898 cntl |= MCURSOR_PIPE_CSC_ENABLE;
11899
11900 if (INTEL_GEN(dev_priv) < 5 && !IS_G4X(dev_priv))
11901 cntl |= MCURSOR_PIPE_SELECT(crtc->pipe);
11902
11903 return cntl;
11904}
11905
11906static u32 i9xx_cursor_ctl(const struct intel_crtc_state *crtc_state,
11907 const struct intel_plane_state *plane_state)
11908{
11909 struct drm_i915_private *dev_priv =
11910 to_i915(plane_state->uapi.plane->dev);
11911 u32 cntl = 0;
11912
11913 if (IS_GEN(dev_priv, 6) || IS_IVYBRIDGE(dev_priv))
11914 cntl |= MCURSOR_TRICKLE_FEED_DISABLE;
11915
11916 switch (drm_rect_width(&plane_state->uapi.dst)) {
11917 case 64:
11918 cntl |= MCURSOR_MODE_64_ARGB_AX;
11919 break;
11920 case 128:
11921 cntl |= MCURSOR_MODE_128_ARGB_AX;
11922 break;
11923 case 256:
11924 cntl |= MCURSOR_MODE_256_ARGB_AX;
11925 break;
11926 default:
11927 MISSING_CASE(drm_rect_width(&plane_state->uapi.dst));
11928 return 0;
11929 }
11930
11931 if (plane_state->hw.rotation & DRM_MODE_ROTATE_180)
11932 cntl |= MCURSOR_ROTATE_180;
11933
11934 return cntl;
11935}
11936
11937static bool i9xx_cursor_size_ok(const struct intel_plane_state *plane_state)
11938{
11939 struct drm_i915_private *dev_priv =
11940 to_i915(plane_state->uapi.plane->dev);
11941 int width = drm_rect_width(&plane_state->uapi.dst);
11942 int height = drm_rect_height(&plane_state->uapi.dst);
11943
11944 if (!intel_cursor_size_ok(plane_state))
11945 return false;
11946
11947
11948 switch (width) {
11949 case 256:
11950 case 128:
11951 case 64:
11952 break;
11953 default:
11954 return false;
11955 }
11956
11957
11958
11959
11960
11961
11962
11963 if (HAS_CUR_FBC(dev_priv) &&
11964 plane_state->hw.rotation & DRM_MODE_ROTATE_0) {
11965 if (height < 8 || height > width)
11966 return false;
11967 } else {
11968 if (height != width)
11969 return false;
11970 }
11971
11972 return true;
11973}
11974
11975static int i9xx_check_cursor(struct intel_crtc_state *crtc_state,
11976 struct intel_plane_state *plane_state)
11977{
11978 struct intel_plane *plane = to_intel_plane(plane_state->uapi.plane);
11979 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
11980 const struct drm_framebuffer *fb = plane_state->hw.fb;
11981 enum pipe pipe = plane->pipe;
11982 int ret;
11983
11984 ret = intel_check_cursor(crtc_state, plane_state);
11985 if (ret)
11986 return ret;
11987
11988
11989 if (!fb)
11990 return 0;
11991
11992
11993 if (!i9xx_cursor_size_ok(plane_state)) {
11994 drm_dbg(&dev_priv->drm,
11995 "Cursor dimension %dx%d not supported\n",
11996 drm_rect_width(&plane_state->uapi.dst),
11997 drm_rect_height(&plane_state->uapi.dst));
11998 return -EINVAL;
11999 }
12000
12001 drm_WARN_ON(&dev_priv->drm, plane_state->uapi.visible &&
12002 plane_state->color_plane[0].stride != fb->pitches[0]);
12003
12004 if (fb->pitches[0] !=
12005 drm_rect_width(&plane_state->uapi.dst) * fb->format->cpp[0]) {
12006 drm_dbg_kms(&dev_priv->drm,
12007 "Invalid cursor stride (%u) (cursor width %d)\n",
12008 fb->pitches[0],
12009 drm_rect_width(&plane_state->uapi.dst));
12010 return -EINVAL;
12011 }
12012
12013
12014
12015
12016
12017
12018
12019
12020
12021
12022
12023 if (IS_CHERRYVIEW(dev_priv) && pipe == PIPE_C &&
12024 plane_state->uapi.visible && plane_state->uapi.dst.x1 < 0) {
12025 drm_dbg_kms(&dev_priv->drm,
12026 "CHV cursor C not allowed to straddle the left screen edge\n");
12027 return -EINVAL;
12028 }
12029
12030 plane_state->ctl = i9xx_cursor_ctl(crtc_state, plane_state);
12031
12032 return 0;
12033}
12034
12035static void i9xx_update_cursor(struct intel_plane *plane,
12036 const struct intel_crtc_state *crtc_state,
12037 const struct intel_plane_state *plane_state)
12038{
12039 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
12040 enum pipe pipe = plane->pipe;
12041 u32 cntl = 0, base = 0, pos = 0, fbc_ctl = 0;
12042 unsigned long irqflags;
12043
12044 if (plane_state && plane_state->uapi.visible) {
12045 unsigned width = drm_rect_width(&plane_state->uapi.dst);
12046 unsigned height = drm_rect_height(&plane_state->uapi.dst);
12047
12048 cntl = plane_state->ctl |
12049 i9xx_cursor_ctl_crtc(crtc_state);
12050
12051 if (width != height)
12052 fbc_ctl = CUR_FBC_CTL_EN | (height - 1);
12053
12054 base = intel_cursor_base(plane_state);
12055 pos = intel_cursor_position(plane_state);
12056 }
12057
12058 spin_lock_irqsave(&dev_priv->uncore.lock, irqflags);
12059
12060
12061
12062
12063
12064
12065
12066
12067
12068
12069
12070
12071
12072
12073
12074
12075
12076
12077
12078
12079
12080 if (INTEL_GEN(dev_priv) >= 9)
12081 skl_write_cursor_wm(plane, crtc_state);
12082
12083 if (!needs_modeset(crtc_state))
12084 intel_psr2_program_plane_sel_fetch(plane, crtc_state, plane_state, 0);
12085
12086 if (plane->cursor.base != base ||
12087 plane->cursor.size != fbc_ctl ||
12088 plane->cursor.cntl != cntl) {
12089 if (HAS_CUR_FBC(dev_priv))
12090 intel_de_write_fw(dev_priv, CUR_FBC_CTL(pipe),
12091 fbc_ctl);
12092 intel_de_write_fw(dev_priv, CURCNTR(pipe), cntl);
12093 intel_de_write_fw(dev_priv, CURPOS(pipe), pos);
12094 intel_de_write_fw(dev_priv, CURBASE(pipe), base);
12095
12096 plane->cursor.base = base;
12097 plane->cursor.size = fbc_ctl;
12098 plane->cursor.cntl = cntl;
12099 } else {
12100 intel_de_write_fw(dev_priv, CURPOS(pipe), pos);
12101 intel_de_write_fw(dev_priv, CURBASE(pipe), base);
12102 }
12103
12104 spin_unlock_irqrestore(&dev_priv->uncore.lock, irqflags);
12105}
12106
12107static void i9xx_disable_cursor(struct intel_plane *plane,
12108 const struct intel_crtc_state *crtc_state)
12109{
12110 i9xx_update_cursor(plane, crtc_state, NULL);
12111}
12112
12113static bool i9xx_cursor_get_hw_state(struct intel_plane *plane,
12114 enum pipe *pipe)
12115{
12116 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
12117 enum intel_display_power_domain power_domain;
12118 intel_wakeref_t wakeref;
12119 bool ret;
12120 u32 val;
12121
12122
12123
12124
12125
12126
12127 power_domain = POWER_DOMAIN_PIPE(plane->pipe);
12128 wakeref = intel_display_power_get_if_enabled(dev_priv, power_domain);
12129 if (!wakeref)
12130 return false;
12131
12132 val = intel_de_read(dev_priv, CURCNTR(plane->pipe));
12133
12134 ret = val & MCURSOR_MODE;
12135
12136 if (INTEL_GEN(dev_priv) >= 5 || IS_G4X(dev_priv))
12137 *pipe = plane->pipe;
12138 else
12139 *pipe = (val & MCURSOR_PIPE_SELECT_MASK) >>
12140 MCURSOR_PIPE_SELECT_SHIFT;
12141
12142 intel_display_power_put(dev_priv, power_domain, wakeref);
12143
12144 return ret;
12145}
12146
12147
12148static const struct drm_display_mode load_detect_mode = {
12149 DRM_MODE("640x480", DRM_MODE_TYPE_DEFAULT, 31500, 640, 664,
12150 704, 832, 0, 480, 489, 491, 520, 0, DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC),
12151};
12152
12153struct drm_framebuffer *
12154intel_framebuffer_create(struct drm_i915_gem_object *obj,
12155 struct drm_mode_fb_cmd2 *mode_cmd)
12156{
12157 struct intel_framebuffer *intel_fb;
12158 int ret;
12159
12160 intel_fb = kzalloc(sizeof(*intel_fb), GFP_KERNEL);
12161 if (!intel_fb)
12162 return ERR_PTR(-ENOMEM);
12163
12164 ret = intel_framebuffer_init(intel_fb, obj, mode_cmd);
12165 if (ret)
12166 goto err;
12167
12168 return &intel_fb->base;
12169
12170err:
12171 kfree(intel_fb);
12172 return ERR_PTR(ret);
12173}
12174
12175static int intel_modeset_disable_planes(struct drm_atomic_state *state,
12176 struct drm_crtc *crtc)
12177{
12178 struct drm_plane *plane;
12179 struct drm_plane_state *plane_state;
12180 int ret, i;
12181
12182 ret = drm_atomic_add_affected_planes(state, crtc);
12183 if (ret)
12184 return ret;
12185
12186 for_each_new_plane_in_state(state, plane, plane_state, i) {
12187 if (plane_state->crtc != crtc)
12188 continue;
12189
12190 ret = drm_atomic_set_crtc_for_plane(plane_state, NULL);
12191 if (ret)
12192 return ret;
12193
12194 drm_atomic_set_fb_for_plane(plane_state, NULL);
12195 }
12196
12197 return 0;
12198}
12199
12200int intel_get_load_detect_pipe(struct drm_connector *connector,
12201 struct intel_load_detect_pipe *old,
12202 struct drm_modeset_acquire_ctx *ctx)
12203{
12204 struct intel_crtc *intel_crtc;
12205 struct intel_encoder *intel_encoder =
12206 intel_attached_encoder(to_intel_connector(connector));
12207 struct drm_crtc *possible_crtc;
12208 struct drm_encoder *encoder = &intel_encoder->base;
12209 struct drm_crtc *crtc = NULL;
12210 struct drm_device *dev = encoder->dev;
12211 struct drm_i915_private *dev_priv = to_i915(dev);
12212 struct drm_mode_config *config = &dev->mode_config;
12213 struct drm_atomic_state *state = NULL, *restore_state = NULL;
12214 struct drm_connector_state *connector_state;
12215 struct intel_crtc_state *crtc_state;
12216 int ret, i = -1;
12217
12218 drm_dbg_kms(&dev_priv->drm, "[CONNECTOR:%d:%s], [ENCODER:%d:%s]\n",
12219 connector->base.id, connector->name,
12220 encoder->base.id, encoder->name);
12221
12222 old->restore_state = NULL;
12223
12224 drm_WARN_ON(dev, !drm_modeset_is_locked(&config->connection_mutex));
12225
12226
12227
12228
12229
12230
12231
12232
12233
12234
12235
12236
12237 if (connector->state->crtc) {
12238 crtc = connector->state->crtc;
12239
12240 ret = drm_modeset_lock(&crtc->mutex, ctx);
12241 if (ret)
12242 goto fail;
12243
12244
12245 goto found;
12246 }
12247
12248
12249 for_each_crtc(dev, possible_crtc) {
12250 i++;
12251 if (!(encoder->possible_crtcs & (1 << i)))
12252 continue;
12253
12254 ret = drm_modeset_lock(&possible_crtc->mutex, ctx);
12255 if (ret)
12256 goto fail;
12257
12258 if (possible_crtc->state->enable) {
12259 drm_modeset_unlock(&possible_crtc->mutex);
12260 continue;
12261 }
12262
12263 crtc = possible_crtc;
12264 break;
12265 }
12266
12267
12268
12269
12270 if (!crtc) {
12271 drm_dbg_kms(&dev_priv->drm,
12272 "no pipe available for load-detect\n");
12273 ret = -ENODEV;
12274 goto fail;
12275 }
12276
12277found:
12278 intel_crtc = to_intel_crtc(crtc);
12279
12280 state = drm_atomic_state_alloc(dev);
12281 restore_state = drm_atomic_state_alloc(dev);
12282 if (!state || !restore_state) {
12283 ret = -ENOMEM;
12284 goto fail;
12285 }
12286
12287 state->acquire_ctx = ctx;
12288 restore_state->acquire_ctx = ctx;
12289
12290 connector_state = drm_atomic_get_connector_state(state, connector);
12291 if (IS_ERR(connector_state)) {
12292 ret = PTR_ERR(connector_state);
12293 goto fail;
12294 }
12295
12296 ret = drm_atomic_set_crtc_for_connector(connector_state, crtc);
12297 if (ret)
12298 goto fail;
12299
12300 crtc_state = intel_atomic_get_crtc_state(state, intel_crtc);
12301 if (IS_ERR(crtc_state)) {
12302 ret = PTR_ERR(crtc_state);
12303 goto fail;
12304 }
12305
12306 crtc_state->uapi.active = true;
12307
12308 ret = drm_atomic_set_mode_for_crtc(&crtc_state->uapi,
12309 &load_detect_mode);
12310 if (ret)
12311 goto fail;
12312
12313 ret = intel_modeset_disable_planes(state, crtc);
12314 if (ret)
12315 goto fail;
12316
12317 ret = PTR_ERR_OR_ZERO(drm_atomic_get_connector_state(restore_state, connector));
12318 if (!ret)
12319 ret = PTR_ERR_OR_ZERO(drm_atomic_get_crtc_state(restore_state, crtc));
12320 if (!ret)
12321 ret = drm_atomic_add_affected_planes(restore_state, crtc);
12322 if (ret) {
12323 drm_dbg_kms(&dev_priv->drm,
12324 "Failed to create a copy of old state to restore: %i\n",
12325 ret);
12326 goto fail;
12327 }
12328
12329 ret = drm_atomic_commit(state);
12330 if (ret) {
12331 drm_dbg_kms(&dev_priv->drm,
12332 "failed to set mode on load-detect pipe\n");
12333 goto fail;
12334 }
12335
12336 old->restore_state = restore_state;
12337 drm_atomic_state_put(state);
12338
12339
12340 intel_wait_for_vblank(dev_priv, intel_crtc->pipe);
12341 return true;
12342
12343fail:
12344 if (state) {
12345 drm_atomic_state_put(state);
12346 state = NULL;
12347 }
12348 if (restore_state) {
12349 drm_atomic_state_put(restore_state);
12350 restore_state = NULL;
12351 }
12352
12353 if (ret == -EDEADLK)
12354 return ret;
12355
12356 return false;
12357}
12358
12359void intel_release_load_detect_pipe(struct drm_connector *connector,
12360 struct intel_load_detect_pipe *old,
12361 struct drm_modeset_acquire_ctx *ctx)
12362{
12363 struct intel_encoder *intel_encoder =
12364 intel_attached_encoder(to_intel_connector(connector));
12365 struct drm_i915_private *i915 = to_i915(intel_encoder->base.dev);
12366 struct drm_encoder *encoder = &intel_encoder->base;
12367 struct drm_atomic_state *state = old->restore_state;
12368 int ret;
12369
12370 drm_dbg_kms(&i915->drm, "[CONNECTOR:%d:%s], [ENCODER:%d:%s]\n",
12371 connector->base.id, connector->name,
12372 encoder->base.id, encoder->name);
12373
12374 if (!state)
12375 return;
12376
12377 ret = drm_atomic_helper_commit_duplicated_state(state, ctx);
12378 if (ret)
12379 drm_dbg_kms(&i915->drm,
12380 "Couldn't release load detect pipe: %i\n", ret);
12381 drm_atomic_state_put(state);
12382}
12383
12384static int i9xx_pll_refclk(struct drm_device *dev,
12385 const struct intel_crtc_state *pipe_config)
12386{
12387 struct drm_i915_private *dev_priv = to_i915(dev);
12388 u32 dpll = pipe_config->dpll_hw_state.dpll;
12389
12390 if ((dpll & PLL_REF_INPUT_MASK) == PLLB_REF_INPUT_SPREADSPECTRUMIN)
12391 return dev_priv->vbt.lvds_ssc_freq;
12392 else if (HAS_PCH_SPLIT(dev_priv))
12393 return 120000;
12394 else if (!IS_GEN(dev_priv, 2))
12395 return 96000;
12396 else
12397 return 48000;
12398}
12399
12400
12401static void i9xx_crtc_clock_get(struct intel_crtc *crtc,
12402 struct intel_crtc_state *pipe_config)
12403{
12404 struct drm_device *dev = crtc->base.dev;
12405 struct drm_i915_private *dev_priv = to_i915(dev);
12406 enum pipe pipe = crtc->pipe;
12407 u32 dpll = pipe_config->dpll_hw_state.dpll;
12408 u32 fp;
12409 struct dpll clock;
12410 int port_clock;
12411 int refclk = i9xx_pll_refclk(dev, pipe_config);
12412
12413 if ((dpll & DISPLAY_RATE_SELECT_FPA1) == 0)
12414 fp = pipe_config->dpll_hw_state.fp0;
12415 else
12416 fp = pipe_config->dpll_hw_state.fp1;
12417
12418 clock.m1 = (fp & FP_M1_DIV_MASK) >> FP_M1_DIV_SHIFT;
12419 if (IS_PINEVIEW(dev_priv)) {
12420 clock.n = ffs((fp & FP_N_PINEVIEW_DIV_MASK) >> FP_N_DIV_SHIFT) - 1;
12421 clock.m2 = (fp & FP_M2_PINEVIEW_DIV_MASK) >> FP_M2_DIV_SHIFT;
12422 } else {
12423 clock.n = (fp & FP_N_DIV_MASK) >> FP_N_DIV_SHIFT;
12424 clock.m2 = (fp & FP_M2_DIV_MASK) >> FP_M2_DIV_SHIFT;
12425 }
12426
12427 if (!IS_GEN(dev_priv, 2)) {
12428 if (IS_PINEVIEW(dev_priv))
12429 clock.p1 = ffs((dpll & DPLL_FPA01_P1_POST_DIV_MASK_PINEVIEW) >>
12430 DPLL_FPA01_P1_POST_DIV_SHIFT_PINEVIEW);
12431 else
12432 clock.p1 = ffs((dpll & DPLL_FPA01_P1_POST_DIV_MASK) >>
12433 DPLL_FPA01_P1_POST_DIV_SHIFT);
12434
12435 switch (dpll & DPLL_MODE_MASK) {
12436 case DPLLB_MODE_DAC_SERIAL:
12437 clock.p2 = dpll & DPLL_DAC_SERIAL_P2_CLOCK_DIV_5 ?
12438 5 : 10;
12439 break;
12440 case DPLLB_MODE_LVDS:
12441 clock.p2 = dpll & DPLLB_LVDS_P2_CLOCK_DIV_7 ?
12442 7 : 14;
12443 break;
12444 default:
12445 drm_dbg_kms(&dev_priv->drm,
12446 "Unknown DPLL mode %08x in programmed "
12447 "mode\n", (int)(dpll & DPLL_MODE_MASK));
12448 return;
12449 }
12450
12451 if (IS_PINEVIEW(dev_priv))
12452 port_clock = pnv_calc_dpll_params(refclk, &clock);
12453 else
12454 port_clock = i9xx_calc_dpll_params(refclk, &clock);
12455 } else {
12456 u32 lvds = IS_I830(dev_priv) ? 0 : intel_de_read(dev_priv,
12457 LVDS);
12458 bool is_lvds = (pipe == 1) && (lvds & LVDS_PORT_EN);
12459
12460 if (is_lvds) {
12461 clock.p1 = ffs((dpll & DPLL_FPA01_P1_POST_DIV_MASK_I830_LVDS) >>
12462 DPLL_FPA01_P1_POST_DIV_SHIFT);
12463
12464 if (lvds & LVDS_CLKB_POWER_UP)
12465 clock.p2 = 7;
12466 else
12467 clock.p2 = 14;
12468 } else {
12469 if (dpll & PLL_P1_DIVIDE_BY_TWO)
12470 clock.p1 = 2;
12471 else {
12472 clock.p1 = ((dpll & DPLL_FPA01_P1_POST_DIV_MASK_I830) >>
12473 DPLL_FPA01_P1_POST_DIV_SHIFT) + 2;
12474 }
12475 if (dpll & PLL_P2_DIVIDE_BY_4)
12476 clock.p2 = 4;
12477 else
12478 clock.p2 = 2;
12479 }
12480
12481 port_clock = i9xx_calc_dpll_params(refclk, &clock);
12482 }
12483
12484
12485
12486
12487
12488
12489 pipe_config->port_clock = port_clock;
12490}
12491
12492int intel_dotclock_calculate(int link_freq,
12493 const struct intel_link_m_n *m_n)
12494{
12495
12496
12497
12498
12499
12500
12501
12502
12503
12504
12505 if (!m_n->link_n)
12506 return 0;
12507
12508 return div_u64(mul_u32_u32(m_n->link_m, link_freq), m_n->link_n);
12509}
12510
12511static void ilk_pch_clock_get(struct intel_crtc *crtc,
12512 struct intel_crtc_state *pipe_config)
12513{
12514 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
12515
12516
12517 i9xx_crtc_clock_get(crtc, pipe_config);
12518
12519
12520
12521
12522
12523
12524 pipe_config->hw.adjusted_mode.crtc_clock =
12525 intel_dotclock_calculate(intel_fdi_link_freq(dev_priv, pipe_config),
12526 &pipe_config->fdi_m_n);
12527}
12528
12529static void intel_crtc_state_reset(struct intel_crtc_state *crtc_state,
12530 struct intel_crtc *crtc)
12531{
12532 memset(crtc_state, 0, sizeof(*crtc_state));
12533
12534 __drm_atomic_helper_crtc_state_reset(&crtc_state->uapi, &crtc->base);
12535
12536 crtc_state->cpu_transcoder = INVALID_TRANSCODER;
12537 crtc_state->master_transcoder = INVALID_TRANSCODER;
12538 crtc_state->hsw_workaround_pipe = INVALID_PIPE;
12539 crtc_state->output_format = INTEL_OUTPUT_FORMAT_INVALID;
12540 crtc_state->scaler_state.scaler_id = -1;
12541 crtc_state->mst_master_transcoder = INVALID_TRANSCODER;
12542}
12543
12544static struct intel_crtc_state *intel_crtc_state_alloc(struct intel_crtc *crtc)
12545{
12546 struct intel_crtc_state *crtc_state;
12547
12548 crtc_state = kmalloc(sizeof(*crtc_state), GFP_KERNEL);
12549
12550 if (crtc_state)
12551 intel_crtc_state_reset(crtc_state, crtc);
12552
12553 return crtc_state;
12554}
12555
12556
12557struct drm_display_mode *
12558intel_encoder_current_mode(struct intel_encoder *encoder)
12559{
12560 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
12561 struct intel_crtc_state *crtc_state;
12562 struct drm_display_mode *mode;
12563 struct intel_crtc *crtc;
12564 enum pipe pipe;
12565
12566 if (!encoder->get_hw_state(encoder, &pipe))
12567 return NULL;
12568
12569 crtc = intel_get_crtc_for_pipe(dev_priv, pipe);
12570
12571 mode = kzalloc(sizeof(*mode), GFP_KERNEL);
12572 if (!mode)
12573 return NULL;
12574
12575 crtc_state = intel_crtc_state_alloc(crtc);
12576 if (!crtc_state) {
12577 kfree(mode);
12578 return NULL;
12579 }
12580
12581 if (!intel_crtc_get_pipe_config(crtc_state)) {
12582 kfree(crtc_state);
12583 kfree(mode);
12584 return NULL;
12585 }
12586
12587 intel_encoder_get_config(encoder, crtc_state);
12588
12589 intel_mode_from_crtc_timings(mode, &crtc_state->hw.adjusted_mode);
12590
12591 kfree(crtc_state);
12592
12593 return mode;
12594}
12595
12596static void intel_crtc_destroy(struct drm_crtc *crtc)
12597{
12598 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
12599
12600 drm_crtc_cleanup(crtc);
12601 kfree(intel_crtc);
12602}
12603
12604
12605
12606
12607
12608
12609
12610
12611
12612
12613
12614static bool intel_wm_need_update(const struct intel_plane_state *cur,
12615 struct intel_plane_state *new)
12616{
12617
12618 if (new->uapi.visible != cur->uapi.visible)
12619 return true;
12620
12621 if (!cur->hw.fb || !new->hw.fb)
12622 return false;
12623
12624 if (cur->hw.fb->modifier != new->hw.fb->modifier ||
12625 cur->hw.rotation != new->hw.rotation ||
12626 drm_rect_width(&new->uapi.src) != drm_rect_width(&cur->uapi.src) ||
12627 drm_rect_height(&new->uapi.src) != drm_rect_height(&cur->uapi.src) ||
12628 drm_rect_width(&new->uapi.dst) != drm_rect_width(&cur->uapi.dst) ||
12629 drm_rect_height(&new->uapi.dst) != drm_rect_height(&cur->uapi.dst))
12630 return true;
12631
12632 return false;
12633}
12634
12635static bool needs_scaling(const struct intel_plane_state *state)
12636{
12637 int src_w = drm_rect_width(&state->uapi.src) >> 16;
12638 int src_h = drm_rect_height(&state->uapi.src) >> 16;
12639 int dst_w = drm_rect_width(&state->uapi.dst);
12640 int dst_h = drm_rect_height(&state->uapi.dst);
12641
12642 return (src_w != dst_w || src_h != dst_h);
12643}
12644
12645int intel_plane_atomic_calc_changes(const struct intel_crtc_state *old_crtc_state,
12646 struct intel_crtc_state *crtc_state,
12647 const struct intel_plane_state *old_plane_state,
12648 struct intel_plane_state *plane_state)
12649{
12650 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
12651 struct intel_plane *plane = to_intel_plane(plane_state->uapi.plane);
12652 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
12653 bool mode_changed = needs_modeset(crtc_state);
12654 bool was_crtc_enabled = old_crtc_state->hw.active;
12655 bool is_crtc_enabled = crtc_state->hw.active;
12656 bool turn_off, turn_on, visible, was_visible;
12657 int ret;
12658
12659 if (INTEL_GEN(dev_priv) >= 9 && plane->id != PLANE_CURSOR) {
12660 ret = skl_update_scaler_plane(crtc_state, plane_state);
12661 if (ret)
12662 return ret;
12663 }
12664
12665 was_visible = old_plane_state->uapi.visible;
12666 visible = plane_state->uapi.visible;
12667
12668 if (!was_crtc_enabled && drm_WARN_ON(&dev_priv->drm, was_visible))
12669 was_visible = false;
12670
12671
12672
12673
12674
12675
12676
12677
12678
12679
12680
12681 if (!is_crtc_enabled) {
12682 intel_plane_set_invisible(crtc_state, plane_state);
12683 visible = false;
12684 }
12685
12686 if (!was_visible && !visible)
12687 return 0;
12688
12689 turn_off = was_visible && (!visible || mode_changed);
12690 turn_on = visible && (!was_visible || mode_changed);
12691
12692 drm_dbg_atomic(&dev_priv->drm,
12693 "[CRTC:%d:%s] with [PLANE:%d:%s] visible %i -> %i, off %i, on %i, ms %i\n",
12694 crtc->base.base.id, crtc->base.name,
12695 plane->base.base.id, plane->base.name,
12696 was_visible, visible,
12697 turn_off, turn_on, mode_changed);
12698
12699 if (turn_on) {
12700 if (INTEL_GEN(dev_priv) < 5 && !IS_G4X(dev_priv))
12701 crtc_state->update_wm_pre = true;
12702
12703
12704 if (plane->id != PLANE_CURSOR)
12705 crtc_state->disable_cxsr = true;
12706 } else if (turn_off) {
12707 if (INTEL_GEN(dev_priv) < 5 && !IS_G4X(dev_priv))
12708 crtc_state->update_wm_post = true;
12709
12710
12711 if (plane->id != PLANE_CURSOR)
12712 crtc_state->disable_cxsr = true;
12713 } else if (intel_wm_need_update(old_plane_state, plane_state)) {
12714 if (INTEL_GEN(dev_priv) < 5 && !IS_G4X(dev_priv)) {
12715
12716 crtc_state->update_wm_pre = true;
12717 crtc_state->update_wm_post = true;
12718 }
12719 }
12720
12721 if (visible || was_visible)
12722 crtc_state->fb_bits |= plane->frontbuffer_bit;
12723
12724
12725
12726
12727
12728
12729
12730
12731
12732
12733
12734
12735
12736
12737
12738
12739
12740
12741
12742
12743
12744
12745
12746
12747
12748
12749
12750
12751
12752
12753
12754
12755
12756
12757 if (plane->id != PLANE_CURSOR &&
12758 (IS_GEN_RANGE(dev_priv, 5, 6) ||
12759 IS_IVYBRIDGE(dev_priv)) &&
12760 (turn_on || (!needs_scaling(old_plane_state) &&
12761 needs_scaling(plane_state))))
12762 crtc_state->disable_lp_wm = true;
12763
12764 return 0;
12765}
12766
12767static bool encoders_cloneable(const struct intel_encoder *a,
12768 const struct intel_encoder *b)
12769{
12770
12771 return a == b || (a->cloneable & (1 << b->type) &&
12772 b->cloneable & (1 << a->type));
12773}
12774
12775static bool check_single_encoder_cloning(struct intel_atomic_state *state,
12776 struct intel_crtc *crtc,
12777 struct intel_encoder *encoder)
12778{
12779 struct intel_encoder *source_encoder;
12780 struct drm_connector *connector;
12781 struct drm_connector_state *connector_state;
12782 int i;
12783
12784 for_each_new_connector_in_state(&state->base, connector, connector_state, i) {
12785 if (connector_state->crtc != &crtc->base)
12786 continue;
12787
12788 source_encoder =
12789 to_intel_encoder(connector_state->best_encoder);
12790 if (!encoders_cloneable(encoder, source_encoder))
12791 return false;
12792 }
12793
12794 return true;
12795}
12796
12797static int icl_add_linked_planes(struct intel_atomic_state *state)
12798{
12799 struct intel_plane *plane, *linked;
12800 struct intel_plane_state *plane_state, *linked_plane_state;
12801 int i;
12802
12803 for_each_new_intel_plane_in_state(state, plane, plane_state, i) {
12804 linked = plane_state->planar_linked_plane;
12805
12806 if (!linked)
12807 continue;
12808
12809 linked_plane_state = intel_atomic_get_plane_state(state, linked);
12810 if (IS_ERR(linked_plane_state))
12811 return PTR_ERR(linked_plane_state);
12812
12813 drm_WARN_ON(state->base.dev,
12814 linked_plane_state->planar_linked_plane != plane);
12815 drm_WARN_ON(state->base.dev,
12816 linked_plane_state->planar_slave == plane_state->planar_slave);
12817 }
12818
12819 return 0;
12820}
12821
12822static int icl_check_nv12_planes(struct intel_crtc_state *crtc_state)
12823{
12824 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
12825 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
12826 struct intel_atomic_state *state = to_intel_atomic_state(crtc_state->uapi.state);
12827 struct intel_plane *plane, *linked;
12828 struct intel_plane_state *plane_state;
12829 int i;
12830
12831 if (INTEL_GEN(dev_priv) < 11)
12832 return 0;
12833
12834
12835
12836
12837
12838 for_each_new_intel_plane_in_state(state, plane, plane_state, i) {
12839 if (plane->pipe != crtc->pipe || !plane_state->planar_linked_plane)
12840 continue;
12841
12842 plane_state->planar_linked_plane = NULL;
12843 if (plane_state->planar_slave && !plane_state->uapi.visible) {
12844 crtc_state->active_planes &= ~BIT(plane->id);
12845 crtc_state->update_planes |= BIT(plane->id);
12846 }
12847
12848 plane_state->planar_slave = false;
12849 }
12850
12851 if (!crtc_state->nv12_planes)
12852 return 0;
12853
12854 for_each_new_intel_plane_in_state(state, plane, plane_state, i) {
12855 struct intel_plane_state *linked_state = NULL;
12856
12857 if (plane->pipe != crtc->pipe ||
12858 !(crtc_state->nv12_planes & BIT(plane->id)))
12859 continue;
12860
12861 for_each_intel_plane_on_crtc(&dev_priv->drm, crtc, linked) {
12862 if (!icl_is_nv12_y_plane(dev_priv, linked->id))
12863 continue;
12864
12865 if (crtc_state->active_planes & BIT(linked->id))
12866 continue;
12867
12868 linked_state = intel_atomic_get_plane_state(state, linked);
12869 if (IS_ERR(linked_state))
12870 return PTR_ERR(linked_state);
12871
12872 break;
12873 }
12874
12875 if (!linked_state) {
12876 drm_dbg_kms(&dev_priv->drm,
12877 "Need %d free Y planes for planar YUV\n",
12878 hweight8(crtc_state->nv12_planes));
12879
12880 return -EINVAL;
12881 }
12882
12883 plane_state->planar_linked_plane = linked;
12884
12885 linked_state->planar_slave = true;
12886 linked_state->planar_linked_plane = plane;
12887 crtc_state->active_planes |= BIT(linked->id);
12888 crtc_state->update_planes |= BIT(linked->id);
12889 drm_dbg_kms(&dev_priv->drm, "Using %s as Y plane for %s\n",
12890 linked->base.name, plane->base.name);
12891
12892
12893 linked_state->ctl = plane_state->ctl | PLANE_CTL_YUV420_Y_PLANE;
12894 linked_state->color_ctl = plane_state->color_ctl;
12895 linked_state->view = plane_state->view;
12896 memcpy(linked_state->color_plane, plane_state->color_plane,
12897 sizeof(linked_state->color_plane));
12898
12899 intel_plane_copy_hw_state(linked_state, plane_state);
12900 linked_state->uapi.src = plane_state->uapi.src;
12901 linked_state->uapi.dst = plane_state->uapi.dst;
12902
12903 if (icl_is_hdr_plane(dev_priv, plane->id)) {
12904 if (linked->id == PLANE_SPRITE5)
12905 plane_state->cus_ctl |= PLANE_CUS_PLANE_7;
12906 else if (linked->id == PLANE_SPRITE4)
12907 plane_state->cus_ctl |= PLANE_CUS_PLANE_6;
12908 else if (linked->id == PLANE_SPRITE3)
12909 plane_state->cus_ctl |= PLANE_CUS_PLANE_5_RKL;
12910 else if (linked->id == PLANE_SPRITE2)
12911 plane_state->cus_ctl |= PLANE_CUS_PLANE_4_RKL;
12912 else
12913 MISSING_CASE(linked->id);
12914 }
12915 }
12916
12917 return 0;
12918}
12919
12920static bool c8_planes_changed(const struct intel_crtc_state *new_crtc_state)
12921{
12922 struct intel_crtc *crtc = to_intel_crtc(new_crtc_state->uapi.crtc);
12923 struct intel_atomic_state *state =
12924 to_intel_atomic_state(new_crtc_state->uapi.state);
12925 const struct intel_crtc_state *old_crtc_state =
12926 intel_atomic_get_old_crtc_state(state, crtc);
12927
12928 return !old_crtc_state->c8_planes != !new_crtc_state->c8_planes;
12929}
12930
12931static u16 hsw_linetime_wm(const struct intel_crtc_state *crtc_state)
12932{
12933 const struct drm_display_mode *pipe_mode =
12934 &crtc_state->hw.pipe_mode;
12935 int linetime_wm;
12936
12937 if (!crtc_state->hw.enable)
12938 return 0;
12939
12940 linetime_wm = DIV_ROUND_CLOSEST(pipe_mode->crtc_htotal * 1000 * 8,
12941 pipe_mode->crtc_clock);
12942
12943 return min(linetime_wm, 0x1ff);
12944}
12945
12946static u16 hsw_ips_linetime_wm(const struct intel_crtc_state *crtc_state,
12947 const struct intel_cdclk_state *cdclk_state)
12948{
12949 const struct drm_display_mode *pipe_mode =
12950 &crtc_state->hw.pipe_mode;
12951 int linetime_wm;
12952
12953 if (!crtc_state->hw.enable)
12954 return 0;
12955
12956 linetime_wm = DIV_ROUND_CLOSEST(pipe_mode->crtc_htotal * 1000 * 8,
12957 cdclk_state->logical.cdclk);
12958
12959 return min(linetime_wm, 0x1ff);
12960}
12961
12962static u16 skl_linetime_wm(const struct intel_crtc_state *crtc_state)
12963{
12964 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
12965 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
12966 const struct drm_display_mode *pipe_mode =
12967 &crtc_state->hw.pipe_mode;
12968 int linetime_wm;
12969
12970 if (!crtc_state->hw.enable)
12971 return 0;
12972
12973 linetime_wm = DIV_ROUND_UP(pipe_mode->crtc_htotal * 1000 * 8,
12974 crtc_state->pixel_rate);
12975
12976
12977 if (IS_GEN9_LP(dev_priv) && dev_priv->ipc_enabled)
12978 linetime_wm /= 2;
12979
12980 return min(linetime_wm, 0x1ff);
12981}
12982
12983static int hsw_compute_linetime_wm(struct intel_atomic_state *state,
12984 struct intel_crtc *crtc)
12985{
12986 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
12987 struct intel_crtc_state *crtc_state =
12988 intel_atomic_get_new_crtc_state(state, crtc);
12989 const struct intel_cdclk_state *cdclk_state;
12990
12991 if (INTEL_GEN(dev_priv) >= 9)
12992 crtc_state->linetime = skl_linetime_wm(crtc_state);
12993 else
12994 crtc_state->linetime = hsw_linetime_wm(crtc_state);
12995
12996 if (!hsw_crtc_supports_ips(crtc))
12997 return 0;
12998
12999 cdclk_state = intel_atomic_get_cdclk_state(state);
13000 if (IS_ERR(cdclk_state))
13001 return PTR_ERR(cdclk_state);
13002
13003 crtc_state->ips_linetime = hsw_ips_linetime_wm(crtc_state,
13004 cdclk_state);
13005
13006 return 0;
13007}
13008
13009static int intel_crtc_atomic_check(struct intel_atomic_state *state,
13010 struct intel_crtc *crtc)
13011{
13012 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
13013 struct intel_crtc_state *crtc_state =
13014 intel_atomic_get_new_crtc_state(state, crtc);
13015 bool mode_changed = needs_modeset(crtc_state);
13016 int ret;
13017
13018 if (INTEL_GEN(dev_priv) < 5 && !IS_G4X(dev_priv) &&
13019 mode_changed && !crtc_state->hw.active)
13020 crtc_state->update_wm_post = true;
13021
13022 if (mode_changed && crtc_state->hw.enable &&
13023 dev_priv->display.crtc_compute_clock &&
13024 !crtc_state->bigjoiner_slave &&
13025 !drm_WARN_ON(&dev_priv->drm, crtc_state->shared_dpll)) {
13026 ret = dev_priv->display.crtc_compute_clock(crtc, crtc_state);
13027 if (ret)
13028 return ret;
13029 }
13030
13031
13032
13033
13034
13035 if (c8_planes_changed(crtc_state))
13036 crtc_state->uapi.color_mgmt_changed = true;
13037
13038 if (mode_changed || crtc_state->update_pipe ||
13039 crtc_state->uapi.color_mgmt_changed) {
13040 ret = intel_color_check(crtc_state);
13041 if (ret)
13042 return ret;
13043 }
13044
13045 if (dev_priv->display.compute_pipe_wm) {
13046 ret = dev_priv->display.compute_pipe_wm(crtc_state);
13047 if (ret) {
13048 drm_dbg_kms(&dev_priv->drm,
13049 "Target pipe watermarks are invalid\n");
13050 return ret;
13051 }
13052 }
13053
13054 if (dev_priv->display.compute_intermediate_wm) {
13055 if (drm_WARN_ON(&dev_priv->drm,
13056 !dev_priv->display.compute_pipe_wm))
13057 return 0;
13058
13059
13060
13061
13062
13063
13064 ret = dev_priv->display.compute_intermediate_wm(crtc_state);
13065 if (ret) {
13066 drm_dbg_kms(&dev_priv->drm,
13067 "No valid intermediate pipe watermarks are possible\n");
13068 return ret;
13069 }
13070 }
13071
13072 if (INTEL_GEN(dev_priv) >= 9) {
13073 if (mode_changed || crtc_state->update_pipe) {
13074 ret = skl_update_scaler_crtc(crtc_state);
13075 if (ret)
13076 return ret;
13077 }
13078
13079 ret = intel_atomic_setup_scalers(dev_priv, crtc, crtc_state);
13080 if (ret)
13081 return ret;
13082 }
13083
13084 if (HAS_IPS(dev_priv)) {
13085 ret = hsw_compute_ips_config(crtc_state);
13086 if (ret)
13087 return ret;
13088 }
13089
13090 if (INTEL_GEN(dev_priv) >= 9 ||
13091 IS_BROADWELL(dev_priv) || IS_HASWELL(dev_priv)) {
13092 ret = hsw_compute_linetime_wm(state, crtc);
13093 if (ret)
13094 return ret;
13095
13096 }
13097
13098 if (!mode_changed) {
13099 ret = intel_psr2_sel_fetch_update(state, crtc);
13100 if (ret)
13101 return ret;
13102 }
13103
13104 return 0;
13105}
13106
13107static void intel_modeset_update_connector_atomic_state(struct drm_device *dev)
13108{
13109 struct intel_connector *connector;
13110 struct drm_connector_list_iter conn_iter;
13111
13112 drm_connector_list_iter_begin(dev, &conn_iter);
13113 for_each_intel_connector_iter(connector, &conn_iter) {
13114 if (connector->base.state->crtc)
13115 drm_connector_put(&connector->base);
13116
13117 if (connector->base.encoder) {
13118 connector->base.state->best_encoder =
13119 connector->base.encoder;
13120 connector->base.state->crtc =
13121 connector->base.encoder->crtc;
13122
13123 drm_connector_get(&connector->base);
13124 } else {
13125 connector->base.state->best_encoder = NULL;
13126 connector->base.state->crtc = NULL;
13127 }
13128 }
13129 drm_connector_list_iter_end(&conn_iter);
13130}
13131
13132static int
13133compute_sink_pipe_bpp(const struct drm_connector_state *conn_state,
13134 struct intel_crtc_state *pipe_config)
13135{
13136 struct drm_connector *connector = conn_state->connector;
13137 struct drm_i915_private *i915 = to_i915(pipe_config->uapi.crtc->dev);
13138 const struct drm_display_info *info = &connector->display_info;
13139 int bpp;
13140
13141 switch (conn_state->max_bpc) {
13142 case 6 ... 7:
13143 bpp = 6 * 3;
13144 break;
13145 case 8 ... 9:
13146 bpp = 8 * 3;
13147 break;
13148 case 10 ... 11:
13149 bpp = 10 * 3;
13150 break;
13151 case 12 ... 16:
13152 bpp = 12 * 3;
13153 break;
13154 default:
13155 MISSING_CASE(conn_state->max_bpc);
13156 return -EINVAL;
13157 }
13158
13159 if (bpp < pipe_config->pipe_bpp) {
13160 drm_dbg_kms(&i915->drm,
13161 "[CONNECTOR:%d:%s] Limiting display bpp to %d instead of "
13162 "EDID bpp %d, requested bpp %d, max platform bpp %d\n",
13163 connector->base.id, connector->name,
13164 bpp, 3 * info->bpc,
13165 3 * conn_state->max_requested_bpc,
13166 pipe_config->pipe_bpp);
13167
13168 pipe_config->pipe_bpp = bpp;
13169 }
13170
13171 return 0;
13172}
13173
13174static int
13175compute_baseline_pipe_bpp(struct intel_crtc *crtc,
13176 struct intel_crtc_state *pipe_config)
13177{
13178 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
13179 struct drm_atomic_state *state = pipe_config->uapi.state;
13180 struct drm_connector *connector;
13181 struct drm_connector_state *connector_state;
13182 int bpp, i;
13183
13184 if ((IS_G4X(dev_priv) || IS_VALLEYVIEW(dev_priv) ||
13185 IS_CHERRYVIEW(dev_priv)))
13186 bpp = 10*3;
13187 else if (INTEL_GEN(dev_priv) >= 5)
13188 bpp = 12*3;
13189 else
13190 bpp = 8*3;
13191
13192 pipe_config->pipe_bpp = bpp;
13193
13194
13195 for_each_new_connector_in_state(state, connector, connector_state, i) {
13196 int ret;
13197
13198 if (connector_state->crtc != &crtc->base)
13199 continue;
13200
13201 ret = compute_sink_pipe_bpp(connector_state, pipe_config);
13202 if (ret)
13203 return ret;
13204 }
13205
13206 return 0;
13207}
13208
13209static void intel_dump_crtc_timings(struct drm_i915_private *i915,
13210 const struct drm_display_mode *mode)
13211{
13212 drm_dbg_kms(&i915->drm, "crtc timings: %d %d %d %d %d %d %d %d %d, "
13213 "type: 0x%x flags: 0x%x\n",
13214 mode->crtc_clock,
13215 mode->crtc_hdisplay, mode->crtc_hsync_start,
13216 mode->crtc_hsync_end, mode->crtc_htotal,
13217 mode->crtc_vdisplay, mode->crtc_vsync_start,
13218 mode->crtc_vsync_end, mode->crtc_vtotal,
13219 mode->type, mode->flags);
13220}
13221
13222static void
13223intel_dump_m_n_config(const struct intel_crtc_state *pipe_config,
13224 const char *id, unsigned int lane_count,
13225 const struct intel_link_m_n *m_n)
13226{
13227 struct drm_i915_private *i915 = to_i915(pipe_config->uapi.crtc->dev);
13228
13229 drm_dbg_kms(&i915->drm,
13230 "%s: lanes: %i; gmch_m: %u, gmch_n: %u, link_m: %u, link_n: %u, tu: %u\n",
13231 id, lane_count,
13232 m_n->gmch_m, m_n->gmch_n,
13233 m_n->link_m, m_n->link_n, m_n->tu);
13234}
13235
13236static void
13237intel_dump_infoframe(struct drm_i915_private *dev_priv,
13238 const union hdmi_infoframe *frame)
13239{
13240 if (!drm_debug_enabled(DRM_UT_KMS))
13241 return;
13242
13243 hdmi_infoframe_log(KERN_DEBUG, dev_priv->drm.dev, frame);
13244}
13245
13246static void
13247intel_dump_dp_vsc_sdp(struct drm_i915_private *dev_priv,
13248 const struct drm_dp_vsc_sdp *vsc)
13249{
13250 if (!drm_debug_enabled(DRM_UT_KMS))
13251 return;
13252
13253 drm_dp_vsc_sdp_log(KERN_DEBUG, dev_priv->drm.dev, vsc);
13254}
13255
13256#define OUTPUT_TYPE(x) [INTEL_OUTPUT_ ## x] = #x
13257
13258static const char * const output_type_str[] = {
13259 OUTPUT_TYPE(UNUSED),
13260 OUTPUT_TYPE(ANALOG),
13261 OUTPUT_TYPE(DVO),
13262 OUTPUT_TYPE(SDVO),
13263 OUTPUT_TYPE(LVDS),
13264 OUTPUT_TYPE(TVOUT),
13265 OUTPUT_TYPE(HDMI),
13266 OUTPUT_TYPE(DP),
13267 OUTPUT_TYPE(EDP),
13268 OUTPUT_TYPE(DSI),
13269 OUTPUT_TYPE(DDI),
13270 OUTPUT_TYPE(DP_MST),
13271};
13272
13273#undef OUTPUT_TYPE
13274
13275static void snprintf_output_types(char *buf, size_t len,
13276 unsigned int output_types)
13277{
13278 char *str = buf;
13279 int i;
13280
13281 str[0] = '\0';
13282
13283 for (i = 0; i < ARRAY_SIZE(output_type_str); i++) {
13284 int r;
13285
13286 if ((output_types & BIT(i)) == 0)
13287 continue;
13288
13289 r = snprintf(str, len, "%s%s",
13290 str != buf ? "," : "", output_type_str[i]);
13291 if (r >= len)
13292 break;
13293 str += r;
13294 len -= r;
13295
13296 output_types &= ~BIT(i);
13297 }
13298
13299 WARN_ON_ONCE(output_types != 0);
13300}
13301
13302static const char * const output_format_str[] = {
13303 [INTEL_OUTPUT_FORMAT_INVALID] = "Invalid",
13304 [INTEL_OUTPUT_FORMAT_RGB] = "RGB",
13305 [INTEL_OUTPUT_FORMAT_YCBCR420] = "YCBCR4:2:0",
13306 [INTEL_OUTPUT_FORMAT_YCBCR444] = "YCBCR4:4:4",
13307};
13308
13309static const char *output_formats(enum intel_output_format format)
13310{
13311 if (format >= ARRAY_SIZE(output_format_str))
13312 format = INTEL_OUTPUT_FORMAT_INVALID;
13313 return output_format_str[format];
13314}
13315
13316static void intel_dump_plane_state(const struct intel_plane_state *plane_state)
13317{
13318 struct intel_plane *plane = to_intel_plane(plane_state->uapi.plane);
13319 struct drm_i915_private *i915 = to_i915(plane->base.dev);
13320 const struct drm_framebuffer *fb = plane_state->hw.fb;
13321 struct drm_format_name_buf format_name;
13322
13323 if (!fb) {
13324 drm_dbg_kms(&i915->drm,
13325 "[PLANE:%d:%s] fb: [NOFB], visible: %s\n",
13326 plane->base.base.id, plane->base.name,
13327 yesno(plane_state->uapi.visible));
13328 return;
13329 }
13330
13331 drm_dbg_kms(&i915->drm,
13332 "[PLANE:%d:%s] fb: [FB:%d] %ux%u format = %s modifier = 0x%llx, visible: %s\n",
13333 plane->base.base.id, plane->base.name,
13334 fb->base.id, fb->width, fb->height,
13335 drm_get_format_name(fb->format->format, &format_name),
13336 fb->modifier, yesno(plane_state->uapi.visible));
13337 drm_dbg_kms(&i915->drm, "\trotation: 0x%x, scaler: %d\n",
13338 plane_state->hw.rotation, plane_state->scaler_id);
13339 if (plane_state->uapi.visible)
13340 drm_dbg_kms(&i915->drm,
13341 "\tsrc: " DRM_RECT_FP_FMT " dst: " DRM_RECT_FMT "\n",
13342 DRM_RECT_FP_ARG(&plane_state->uapi.src),
13343 DRM_RECT_ARG(&plane_state->uapi.dst));
13344}
13345
13346static void intel_dump_pipe_config(const struct intel_crtc_state *pipe_config,
13347 struct intel_atomic_state *state,
13348 const char *context)
13349{
13350 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
13351 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
13352 const struct intel_plane_state *plane_state;
13353 struct intel_plane *plane;
13354 char buf[64];
13355 int i;
13356
13357 drm_dbg_kms(&dev_priv->drm, "[CRTC:%d:%s] enable: %s %s\n",
13358 crtc->base.base.id, crtc->base.name,
13359 yesno(pipe_config->hw.enable), context);
13360
13361 if (!pipe_config->hw.enable)
13362 goto dump_planes;
13363
13364 snprintf_output_types(buf, sizeof(buf), pipe_config->output_types);
13365 drm_dbg_kms(&dev_priv->drm,
13366 "active: %s, output_types: %s (0x%x), output format: %s\n",
13367 yesno(pipe_config->hw.active),
13368 buf, pipe_config->output_types,
13369 output_formats(pipe_config->output_format));
13370
13371 drm_dbg_kms(&dev_priv->drm,
13372 "cpu_transcoder: %s, pipe bpp: %i, dithering: %i\n",
13373 transcoder_name(pipe_config->cpu_transcoder),
13374 pipe_config->pipe_bpp, pipe_config->dither);
13375
13376 drm_dbg_kms(&dev_priv->drm, "MST master transcoder: %s\n",
13377 transcoder_name(pipe_config->mst_master_transcoder));
13378
13379 drm_dbg_kms(&dev_priv->drm,
13380 "port sync: master transcoder: %s, slave transcoder bitmask = 0x%x\n",
13381 transcoder_name(pipe_config->master_transcoder),
13382 pipe_config->sync_mode_slaves_mask);
13383
13384 drm_dbg_kms(&dev_priv->drm, "bigjoiner: %s\n",
13385 pipe_config->bigjoiner_slave ? "slave" :
13386 pipe_config->bigjoiner ? "master" : "no");
13387
13388 if (pipe_config->has_pch_encoder)
13389 intel_dump_m_n_config(pipe_config, "fdi",
13390 pipe_config->fdi_lanes,
13391 &pipe_config->fdi_m_n);
13392
13393 if (intel_crtc_has_dp_encoder(pipe_config)) {
13394 intel_dump_m_n_config(pipe_config, "dp m_n",
13395 pipe_config->lane_count, &pipe_config->dp_m_n);
13396 if (pipe_config->has_drrs)
13397 intel_dump_m_n_config(pipe_config, "dp m2_n2",
13398 pipe_config->lane_count,
13399 &pipe_config->dp_m2_n2);
13400 }
13401
13402 drm_dbg_kms(&dev_priv->drm,
13403 "audio: %i, infoframes: %i, infoframes enabled: 0x%x\n",
13404 pipe_config->has_audio, pipe_config->has_infoframe,
13405 pipe_config->infoframes.enable);
13406
13407 if (pipe_config->infoframes.enable &
13408 intel_hdmi_infoframe_enable(HDMI_PACKET_TYPE_GENERAL_CONTROL))
13409 drm_dbg_kms(&dev_priv->drm, "GCP: 0x%x\n",
13410 pipe_config->infoframes.gcp);
13411 if (pipe_config->infoframes.enable &
13412 intel_hdmi_infoframe_enable(HDMI_INFOFRAME_TYPE_AVI))
13413 intel_dump_infoframe(dev_priv, &pipe_config->infoframes.avi);
13414 if (pipe_config->infoframes.enable &
13415 intel_hdmi_infoframe_enable(HDMI_INFOFRAME_TYPE_SPD))
13416 intel_dump_infoframe(dev_priv, &pipe_config->infoframes.spd);
13417 if (pipe_config->infoframes.enable &
13418 intel_hdmi_infoframe_enable(HDMI_INFOFRAME_TYPE_VENDOR))
13419 intel_dump_infoframe(dev_priv, &pipe_config->infoframes.hdmi);
13420 if (pipe_config->infoframes.enable &
13421 intel_hdmi_infoframe_enable(HDMI_INFOFRAME_TYPE_DRM))
13422 intel_dump_infoframe(dev_priv, &pipe_config->infoframes.drm);
13423 if (pipe_config->infoframes.enable &
13424 intel_hdmi_infoframe_enable(HDMI_PACKET_TYPE_GAMUT_METADATA))
13425 intel_dump_infoframe(dev_priv, &pipe_config->infoframes.drm);
13426 if (pipe_config->infoframes.enable &
13427 intel_hdmi_infoframe_enable(DP_SDP_VSC))
13428 intel_dump_dp_vsc_sdp(dev_priv, &pipe_config->infoframes.vsc);
13429
13430 drm_dbg_kms(&dev_priv->drm, "requested mode:\n");
13431 drm_mode_debug_printmodeline(&pipe_config->hw.mode);
13432 drm_dbg_kms(&dev_priv->drm, "adjusted mode:\n");
13433 drm_mode_debug_printmodeline(&pipe_config->hw.adjusted_mode);
13434 intel_dump_crtc_timings(dev_priv, &pipe_config->hw.adjusted_mode);
13435 drm_dbg_kms(&dev_priv->drm, "pipe mode:\n");
13436 drm_mode_debug_printmodeline(&pipe_config->hw.pipe_mode);
13437 intel_dump_crtc_timings(dev_priv, &pipe_config->hw.pipe_mode);
13438 drm_dbg_kms(&dev_priv->drm,
13439 "port clock: %d, pipe src size: %dx%d, pixel rate %d\n",
13440 pipe_config->port_clock,
13441 pipe_config->pipe_src_w, pipe_config->pipe_src_h,
13442 pipe_config->pixel_rate);
13443
13444 drm_dbg_kms(&dev_priv->drm, "linetime: %d, ips linetime: %d\n",
13445 pipe_config->linetime, pipe_config->ips_linetime);
13446
13447 if (INTEL_GEN(dev_priv) >= 9)
13448 drm_dbg_kms(&dev_priv->drm,
13449 "num_scalers: %d, scaler_users: 0x%x, scaler_id: %d\n",
13450 crtc->num_scalers,
13451 pipe_config->scaler_state.scaler_users,
13452 pipe_config->scaler_state.scaler_id);
13453
13454 if (HAS_GMCH(dev_priv))
13455 drm_dbg_kms(&dev_priv->drm,
13456 "gmch pfit: control: 0x%08x, ratios: 0x%08x, lvds border: 0x%08x\n",
13457 pipe_config->gmch_pfit.control,
13458 pipe_config->gmch_pfit.pgm_ratios,
13459 pipe_config->gmch_pfit.lvds_border_bits);
13460 else
13461 drm_dbg_kms(&dev_priv->drm,
13462 "pch pfit: " DRM_RECT_FMT ", %s, force thru: %s\n",
13463 DRM_RECT_ARG(&pipe_config->pch_pfit.dst),
13464 enableddisabled(pipe_config->pch_pfit.enabled),
13465 yesno(pipe_config->pch_pfit.force_thru));
13466
13467 drm_dbg_kms(&dev_priv->drm, "ips: %i, double wide: %i\n",
13468 pipe_config->ips_enabled, pipe_config->double_wide);
13469
13470 intel_dpll_dump_hw_state(dev_priv, &pipe_config->dpll_hw_state);
13471
13472 if (IS_CHERRYVIEW(dev_priv))
13473 drm_dbg_kms(&dev_priv->drm,
13474 "cgm_mode: 0x%x gamma_mode: 0x%x gamma_enable: %d csc_enable: %d\n",
13475 pipe_config->cgm_mode, pipe_config->gamma_mode,
13476 pipe_config->gamma_enable, pipe_config->csc_enable);
13477 else
13478 drm_dbg_kms(&dev_priv->drm,
13479 "csc_mode: 0x%x gamma_mode: 0x%x gamma_enable: %d csc_enable: %d\n",
13480 pipe_config->csc_mode, pipe_config->gamma_mode,
13481 pipe_config->gamma_enable, pipe_config->csc_enable);
13482
13483 drm_dbg_kms(&dev_priv->drm, "degamma lut: %d entries, gamma lut: %d entries\n",
13484 pipe_config->hw.degamma_lut ?
13485 drm_color_lut_size(pipe_config->hw.degamma_lut) : 0,
13486 pipe_config->hw.gamma_lut ?
13487 drm_color_lut_size(pipe_config->hw.gamma_lut) : 0);
13488
13489dump_planes:
13490 if (!state)
13491 return;
13492
13493 for_each_new_intel_plane_in_state(state, plane, plane_state, i) {
13494 if (plane->pipe == crtc->pipe)
13495 intel_dump_plane_state(plane_state);
13496 }
13497}
13498
13499static bool check_digital_port_conflicts(struct intel_atomic_state *state)
13500{
13501 struct drm_device *dev = state->base.dev;
13502 struct drm_connector *connector;
13503 struct drm_connector_list_iter conn_iter;
13504 unsigned int used_ports = 0;
13505 unsigned int used_mst_ports = 0;
13506 bool ret = true;
13507
13508
13509
13510
13511
13512 drm_modeset_lock_assert_held(&dev->mode_config.connection_mutex);
13513
13514
13515
13516
13517
13518
13519 drm_connector_list_iter_begin(dev, &conn_iter);
13520 drm_for_each_connector_iter(connector, &conn_iter) {
13521 struct drm_connector_state *connector_state;
13522 struct intel_encoder *encoder;
13523
13524 connector_state =
13525 drm_atomic_get_new_connector_state(&state->base,
13526 connector);
13527 if (!connector_state)
13528 connector_state = connector->state;
13529
13530 if (!connector_state->best_encoder)
13531 continue;
13532
13533 encoder = to_intel_encoder(connector_state->best_encoder);
13534
13535 drm_WARN_ON(dev, !connector_state->crtc);
13536
13537 switch (encoder->type) {
13538 case INTEL_OUTPUT_DDI:
13539 if (drm_WARN_ON(dev, !HAS_DDI(to_i915(dev))))
13540 break;
13541 fallthrough;
13542 case INTEL_OUTPUT_DP:
13543 case INTEL_OUTPUT_HDMI:
13544 case INTEL_OUTPUT_EDP:
13545
13546 if (used_ports & BIT(encoder->port))
13547 ret = false;
13548
13549 used_ports |= BIT(encoder->port);
13550 break;
13551 case INTEL_OUTPUT_DP_MST:
13552 used_mst_ports |=
13553 1 << encoder->port;
13554 break;
13555 default:
13556 break;
13557 }
13558 }
13559 drm_connector_list_iter_end(&conn_iter);
13560
13561
13562 if (used_ports & used_mst_ports)
13563 return false;
13564
13565 return ret;
13566}
13567
13568static void
13569intel_crtc_copy_uapi_to_hw_state_nomodeset(struct intel_atomic_state *state,
13570 struct intel_crtc_state *crtc_state)
13571{
13572 const struct intel_crtc_state *from_crtc_state = crtc_state;
13573
13574 if (crtc_state->bigjoiner_slave) {
13575 from_crtc_state = intel_atomic_get_new_crtc_state(state,
13576 crtc_state->bigjoiner_linked_crtc);
13577
13578
13579 if (!from_crtc_state)
13580 return;
13581 }
13582
13583 intel_crtc_copy_color_blobs(crtc_state, from_crtc_state);
13584}
13585
13586static void
13587intel_crtc_copy_uapi_to_hw_state(struct intel_atomic_state *state,
13588 struct intel_crtc_state *crtc_state)
13589{
13590 crtc_state->hw.enable = crtc_state->uapi.enable;
13591 crtc_state->hw.active = crtc_state->uapi.active;
13592 crtc_state->hw.mode = crtc_state->uapi.mode;
13593 crtc_state->hw.adjusted_mode = crtc_state->uapi.adjusted_mode;
13594 crtc_state->hw.scaling_filter = crtc_state->uapi.scaling_filter;
13595
13596 intel_crtc_copy_uapi_to_hw_state_nomodeset(state, crtc_state);
13597}
13598
13599static void intel_crtc_copy_hw_to_uapi_state(struct intel_crtc_state *crtc_state)
13600{
13601 if (crtc_state->bigjoiner_slave)
13602 return;
13603
13604 crtc_state->uapi.enable = crtc_state->hw.enable;
13605 crtc_state->uapi.active = crtc_state->hw.active;
13606 drm_WARN_ON(crtc_state->uapi.crtc->dev,
13607 drm_atomic_set_mode_for_crtc(&crtc_state->uapi, &crtc_state->hw.mode) < 0);
13608
13609 crtc_state->uapi.adjusted_mode = crtc_state->hw.adjusted_mode;
13610 crtc_state->uapi.scaling_filter = crtc_state->hw.scaling_filter;
13611
13612
13613 drm_property_replace_blob(&crtc_state->uapi.degamma_lut,
13614 crtc_state->hw.degamma_lut);
13615 drm_property_replace_blob(&crtc_state->uapi.gamma_lut,
13616 crtc_state->hw.gamma_lut);
13617 drm_property_replace_blob(&crtc_state->uapi.ctm,
13618 crtc_state->hw.ctm);
13619}
13620
13621static int
13622copy_bigjoiner_crtc_state(struct intel_crtc_state *crtc_state,
13623 const struct intel_crtc_state *from_crtc_state)
13624{
13625 struct intel_crtc_state *saved_state;
13626 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
13627
13628 saved_state = kmemdup(from_crtc_state, sizeof(*saved_state), GFP_KERNEL);
13629 if (!saved_state)
13630 return -ENOMEM;
13631
13632 saved_state->uapi = crtc_state->uapi;
13633 saved_state->scaler_state = crtc_state->scaler_state;
13634 saved_state->shared_dpll = crtc_state->shared_dpll;
13635 saved_state->dpll_hw_state = crtc_state->dpll_hw_state;
13636 saved_state->crc_enabled = crtc_state->crc_enabled;
13637
13638 intel_crtc_free_hw_state(crtc_state);
13639 memcpy(crtc_state, saved_state, sizeof(*crtc_state));
13640 kfree(saved_state);
13641
13642
13643 memset(&crtc_state->hw, 0, sizeof(saved_state->hw));
13644 crtc_state->hw.enable = from_crtc_state->hw.enable;
13645 crtc_state->hw.active = from_crtc_state->hw.active;
13646 crtc_state->hw.pipe_mode = from_crtc_state->hw.pipe_mode;
13647 crtc_state->hw.adjusted_mode = from_crtc_state->hw.adjusted_mode;
13648
13649
13650 crtc_state->uapi.mode_changed = from_crtc_state->uapi.mode_changed;
13651 crtc_state->uapi.connectors_changed = from_crtc_state->uapi.connectors_changed;
13652 crtc_state->uapi.active_changed = from_crtc_state->uapi.active_changed;
13653 crtc_state->nv12_planes = crtc_state->c8_planes = crtc_state->update_planes = 0;
13654 crtc_state->bigjoiner_linked_crtc = to_intel_crtc(from_crtc_state->uapi.crtc);
13655 crtc_state->bigjoiner_slave = true;
13656 crtc_state->cpu_transcoder = (enum transcoder)crtc->pipe;
13657 crtc_state->has_audio = false;
13658
13659 return 0;
13660}
13661
13662static int
13663intel_crtc_prepare_cleared_state(struct intel_atomic_state *state,
13664 struct intel_crtc_state *crtc_state)
13665{
13666 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
13667 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
13668 struct intel_crtc_state *saved_state;
13669
13670 saved_state = intel_crtc_state_alloc(crtc);
13671 if (!saved_state)
13672 return -ENOMEM;
13673
13674
13675 intel_crtc_free_hw_state(crtc_state);
13676
13677
13678
13679
13680
13681
13682 saved_state->uapi = crtc_state->uapi;
13683 saved_state->scaler_state = crtc_state->scaler_state;
13684 saved_state->shared_dpll = crtc_state->shared_dpll;
13685 saved_state->dpll_hw_state = crtc_state->dpll_hw_state;
13686 memcpy(saved_state->icl_port_dplls, crtc_state->icl_port_dplls,
13687 sizeof(saved_state->icl_port_dplls));
13688 saved_state->crc_enabled = crtc_state->crc_enabled;
13689 if (IS_G4X(dev_priv) ||
13690 IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
13691 saved_state->wm = crtc_state->wm;
13692
13693 memcpy(crtc_state, saved_state, sizeof(*crtc_state));
13694 kfree(saved_state);
13695
13696 intel_crtc_copy_uapi_to_hw_state(state, crtc_state);
13697
13698 return 0;
13699}
13700
13701static int
13702intel_modeset_pipe_config(struct intel_atomic_state *state,
13703 struct intel_crtc_state *pipe_config)
13704{
13705 struct drm_crtc *crtc = pipe_config->uapi.crtc;
13706 struct drm_i915_private *i915 = to_i915(pipe_config->uapi.crtc->dev);
13707 struct drm_connector *connector;
13708 struct drm_connector_state *connector_state;
13709 int base_bpp, ret, i;
13710 bool retry = true;
13711
13712 pipe_config->cpu_transcoder =
13713 (enum transcoder) to_intel_crtc(crtc)->pipe;
13714
13715
13716
13717
13718
13719
13720 if (!(pipe_config->hw.adjusted_mode.flags &
13721 (DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_NHSYNC)))
13722 pipe_config->hw.adjusted_mode.flags |= DRM_MODE_FLAG_NHSYNC;
13723
13724 if (!(pipe_config->hw.adjusted_mode.flags &
13725 (DRM_MODE_FLAG_PVSYNC | DRM_MODE_FLAG_NVSYNC)))
13726 pipe_config->hw.adjusted_mode.flags |= DRM_MODE_FLAG_NVSYNC;
13727
13728 ret = compute_baseline_pipe_bpp(to_intel_crtc(crtc),
13729 pipe_config);
13730 if (ret)
13731 return ret;
13732
13733 base_bpp = pipe_config->pipe_bpp;
13734
13735
13736
13737
13738
13739
13740
13741
13742
13743 drm_mode_get_hv_timing(&pipe_config->hw.mode,
13744 &pipe_config->pipe_src_w,
13745 &pipe_config->pipe_src_h);
13746
13747 for_each_new_connector_in_state(&state->base, connector, connector_state, i) {
13748 struct intel_encoder *encoder =
13749 to_intel_encoder(connector_state->best_encoder);
13750
13751 if (connector_state->crtc != crtc)
13752 continue;
13753
13754 if (!check_single_encoder_cloning(state, to_intel_crtc(crtc), encoder)) {
13755 drm_dbg_kms(&i915->drm,
13756 "rejecting invalid cloning configuration\n");
13757 return -EINVAL;
13758 }
13759
13760
13761
13762
13763
13764 if (encoder->compute_output_type)
13765 pipe_config->output_types |=
13766 BIT(encoder->compute_output_type(encoder, pipe_config,
13767 connector_state));
13768 else
13769 pipe_config->output_types |= BIT(encoder->type);
13770 }
13771
13772encoder_retry:
13773
13774 pipe_config->port_clock = 0;
13775 pipe_config->pixel_multiplier = 1;
13776
13777
13778 drm_mode_set_crtcinfo(&pipe_config->hw.adjusted_mode,
13779 CRTC_STEREO_DOUBLE);
13780
13781
13782
13783
13784
13785 for_each_new_connector_in_state(&state->base, connector, connector_state, i) {
13786 struct intel_encoder *encoder =
13787 to_intel_encoder(connector_state->best_encoder);
13788
13789 if (connector_state->crtc != crtc)
13790 continue;
13791
13792 ret = encoder->compute_config(encoder, pipe_config,
13793 connector_state);
13794 if (ret < 0) {
13795 if (ret != -EDEADLK)
13796 drm_dbg_kms(&i915->drm,
13797 "Encoder config failure: %d\n",
13798 ret);
13799 return ret;
13800 }
13801 }
13802
13803
13804
13805 if (!pipe_config->port_clock)
13806 pipe_config->port_clock = pipe_config->hw.adjusted_mode.crtc_clock
13807 * pipe_config->pixel_multiplier;
13808
13809 ret = intel_crtc_compute_config(to_intel_crtc(crtc), pipe_config);
13810 if (ret == -EDEADLK)
13811 return ret;
13812 if (ret < 0) {
13813 drm_dbg_kms(&i915->drm, "CRTC fixup failed\n");
13814 return ret;
13815 }
13816
13817 if (ret == RETRY) {
13818 if (drm_WARN(&i915->drm, !retry,
13819 "loop in pipe configuration computation\n"))
13820 return -EINVAL;
13821
13822 drm_dbg_kms(&i915->drm, "CRTC bw constrained, retrying\n");
13823 retry = false;
13824 goto encoder_retry;
13825 }
13826
13827
13828
13829
13830
13831 pipe_config->dither = (pipe_config->pipe_bpp == 6*3) &&
13832 !pipe_config->dither_force_disable;
13833 drm_dbg_kms(&i915->drm,
13834 "hw max bpp: %i, pipe bpp: %i, dithering: %i\n",
13835 base_bpp, pipe_config->pipe_bpp, pipe_config->dither);
13836
13837 return 0;
13838}
13839
13840static int
13841intel_modeset_pipe_config_late(struct intel_crtc_state *crtc_state)
13842{
13843 struct intel_atomic_state *state =
13844 to_intel_atomic_state(crtc_state->uapi.state);
13845 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
13846 struct drm_connector_state *conn_state;
13847 struct drm_connector *connector;
13848 int i;
13849
13850 for_each_new_connector_in_state(&state->base, connector,
13851 conn_state, i) {
13852 struct intel_encoder *encoder =
13853 to_intel_encoder(conn_state->best_encoder);
13854 int ret;
13855
13856 if (conn_state->crtc != &crtc->base ||
13857 !encoder->compute_config_late)
13858 continue;
13859
13860 ret = encoder->compute_config_late(encoder, crtc_state,
13861 conn_state);
13862 if (ret)
13863 return ret;
13864 }
13865
13866 return 0;
13867}
13868
13869bool intel_fuzzy_clock_check(int clock1, int clock2)
13870{
13871 int diff;
13872
13873 if (clock1 == clock2)
13874 return true;
13875
13876 if (!clock1 || !clock2)
13877 return false;
13878
13879 diff = abs(clock1 - clock2);
13880
13881 if (((((diff + clock1 + clock2) * 100)) / (clock1 + clock2)) < 105)
13882 return true;
13883
13884 return false;
13885}
13886
13887static bool
13888intel_compare_m_n(unsigned int m, unsigned int n,
13889 unsigned int m2, unsigned int n2,
13890 bool exact)
13891{
13892 if (m == m2 && n == n2)
13893 return true;
13894
13895 if (exact || !m || !n || !m2 || !n2)
13896 return false;
13897
13898 BUILD_BUG_ON(DATA_LINK_M_N_MASK > INT_MAX);
13899
13900 if (n > n2) {
13901 while (n > n2) {
13902 m2 <<= 1;
13903 n2 <<= 1;
13904 }
13905 } else if (n < n2) {
13906 while (n < n2) {
13907 m <<= 1;
13908 n <<= 1;
13909 }
13910 }
13911
13912 if (n != n2)
13913 return false;
13914
13915 return intel_fuzzy_clock_check(m, m2);
13916}
13917
13918static bool
13919intel_compare_link_m_n(const struct intel_link_m_n *m_n,
13920 const struct intel_link_m_n *m2_n2,
13921 bool exact)
13922{
13923 return m_n->tu == m2_n2->tu &&
13924 intel_compare_m_n(m_n->gmch_m, m_n->gmch_n,
13925 m2_n2->gmch_m, m2_n2->gmch_n, exact) &&
13926 intel_compare_m_n(m_n->link_m, m_n->link_n,
13927 m2_n2->link_m, m2_n2->link_n, exact);
13928}
13929
13930static bool
13931intel_compare_infoframe(const union hdmi_infoframe *a,
13932 const union hdmi_infoframe *b)
13933{
13934 return memcmp(a, b, sizeof(*a)) == 0;
13935}
13936
13937static bool
13938intel_compare_dp_vsc_sdp(const struct drm_dp_vsc_sdp *a,
13939 const struct drm_dp_vsc_sdp *b)
13940{
13941 return memcmp(a, b, sizeof(*a)) == 0;
13942}
13943
13944static void
13945pipe_config_infoframe_mismatch(struct drm_i915_private *dev_priv,
13946 bool fastset, const char *name,
13947 const union hdmi_infoframe *a,
13948 const union hdmi_infoframe *b)
13949{
13950 if (fastset) {
13951 if (!drm_debug_enabled(DRM_UT_KMS))
13952 return;
13953
13954 drm_dbg_kms(&dev_priv->drm,
13955 "fastset mismatch in %s infoframe\n", name);
13956 drm_dbg_kms(&dev_priv->drm, "expected:\n");
13957 hdmi_infoframe_log(KERN_DEBUG, dev_priv->drm.dev, a);
13958 drm_dbg_kms(&dev_priv->drm, "found:\n");
13959 hdmi_infoframe_log(KERN_DEBUG, dev_priv->drm.dev, b);
13960 } else {
13961 drm_err(&dev_priv->drm, "mismatch in %s infoframe\n", name);
13962 drm_err(&dev_priv->drm, "expected:\n");
13963 hdmi_infoframe_log(KERN_ERR, dev_priv->drm.dev, a);
13964 drm_err(&dev_priv->drm, "found:\n");
13965 hdmi_infoframe_log(KERN_ERR, dev_priv->drm.dev, b);
13966 }
13967}
13968
13969static void
13970pipe_config_dp_vsc_sdp_mismatch(struct drm_i915_private *dev_priv,
13971 bool fastset, const char *name,
13972 const struct drm_dp_vsc_sdp *a,
13973 const struct drm_dp_vsc_sdp *b)
13974{
13975 if (fastset) {
13976 if (!drm_debug_enabled(DRM_UT_KMS))
13977 return;
13978
13979 drm_dbg_kms(&dev_priv->drm,
13980 "fastset mismatch in %s dp sdp\n", name);
13981 drm_dbg_kms(&dev_priv->drm, "expected:\n");
13982 drm_dp_vsc_sdp_log(KERN_DEBUG, dev_priv->drm.dev, a);
13983 drm_dbg_kms(&dev_priv->drm, "found:\n");
13984 drm_dp_vsc_sdp_log(KERN_DEBUG, dev_priv->drm.dev, b);
13985 } else {
13986 drm_err(&dev_priv->drm, "mismatch in %s dp sdp\n", name);
13987 drm_err(&dev_priv->drm, "expected:\n");
13988 drm_dp_vsc_sdp_log(KERN_ERR, dev_priv->drm.dev, a);
13989 drm_err(&dev_priv->drm, "found:\n");
13990 drm_dp_vsc_sdp_log(KERN_ERR, dev_priv->drm.dev, b);
13991 }
13992}
13993
13994static void __printf(4, 5)
13995pipe_config_mismatch(bool fastset, const struct intel_crtc *crtc,
13996 const char *name, const char *format, ...)
13997{
13998 struct drm_i915_private *i915 = to_i915(crtc->base.dev);
13999 struct va_format vaf;
14000 va_list args;
14001
14002 va_start(args, format);
14003 vaf.fmt = format;
14004 vaf.va = &args;
14005
14006 if (fastset)
14007 drm_dbg_kms(&i915->drm,
14008 "[CRTC:%d:%s] fastset mismatch in %s %pV\n",
14009 crtc->base.base.id, crtc->base.name, name, &vaf);
14010 else
14011 drm_err(&i915->drm, "[CRTC:%d:%s] mismatch in %s %pV\n",
14012 crtc->base.base.id, crtc->base.name, name, &vaf);
14013
14014 va_end(args);
14015}
14016
14017static bool fastboot_enabled(struct drm_i915_private *dev_priv)
14018{
14019 if (dev_priv->params.fastboot != -1)
14020 return dev_priv->params.fastboot;
14021
14022
14023 if (INTEL_GEN(dev_priv) >= 9)
14024 return true;
14025
14026
14027 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
14028 return true;
14029
14030
14031 return false;
14032}
14033
14034static bool
14035intel_pipe_config_compare(const struct intel_crtc_state *current_config,
14036 const struct intel_crtc_state *pipe_config,
14037 bool fastset)
14038{
14039 struct drm_i915_private *dev_priv = to_i915(current_config->uapi.crtc->dev);
14040 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
14041 bool ret = true;
14042 u32 bp_gamma = 0;
14043 bool fixup_inherited = fastset &&
14044 current_config->inherited && !pipe_config->inherited;
14045
14046 if (fixup_inherited && !fastboot_enabled(dev_priv)) {
14047 drm_dbg_kms(&dev_priv->drm,
14048 "initial modeset and fastboot not set\n");
14049 ret = false;
14050 }
14051
14052#define PIPE_CONF_CHECK_X(name) do { \
14053 if (current_config->name != pipe_config->name) { \
14054 pipe_config_mismatch(fastset, crtc, __stringify(name), \
14055 "(expected 0x%08x, found 0x%08x)", \
14056 current_config->name, \
14057 pipe_config->name); \
14058 ret = false; \
14059 } \
14060} while (0)
14061
14062#define PIPE_CONF_CHECK_I(name) do { \
14063 if (current_config->name != pipe_config->name) { \
14064 pipe_config_mismatch(fastset, crtc, __stringify(name), \
14065 "(expected %i, found %i)", \
14066 current_config->name, \
14067 pipe_config->name); \
14068 ret = false; \
14069 } \
14070} while (0)
14071
14072#define PIPE_CONF_CHECK_BOOL(name) do { \
14073 if (current_config->name != pipe_config->name) { \
14074 pipe_config_mismatch(fastset, crtc, __stringify(name), \
14075 "(expected %s, found %s)", \
14076 yesno(current_config->name), \
14077 yesno(pipe_config->name)); \
14078 ret = false; \
14079 } \
14080} while (0)
14081
14082
14083
14084
14085
14086
14087#define PIPE_CONF_CHECK_BOOL_INCOMPLETE(name) do { \
14088 if (!fixup_inherited || (!current_config->name && !pipe_config->name)) { \
14089 PIPE_CONF_CHECK_BOOL(name); \
14090 } else { \
14091 pipe_config_mismatch(fastset, crtc, __stringify(name), \
14092 "unable to verify whether state matches exactly, forcing modeset (expected %s, found %s)", \
14093 yesno(current_config->name), \
14094 yesno(pipe_config->name)); \
14095 ret = false; \
14096 } \
14097} while (0)
14098
14099#define PIPE_CONF_CHECK_P(name) do { \
14100 if (current_config->name != pipe_config->name) { \
14101 pipe_config_mismatch(fastset, crtc, __stringify(name), \
14102 "(expected %p, found %p)", \
14103 current_config->name, \
14104 pipe_config->name); \
14105 ret = false; \
14106 } \
14107} while (0)
14108
14109#define PIPE_CONF_CHECK_M_N(name) do { \
14110 if (!intel_compare_link_m_n(¤t_config->name, \
14111 &pipe_config->name,\
14112 !fastset)) { \
14113 pipe_config_mismatch(fastset, crtc, __stringify(name), \
14114 "(expected tu %i gmch %i/%i link %i/%i, " \
14115 "found tu %i, gmch %i/%i link %i/%i)", \
14116 current_config->name.tu, \
14117 current_config->name.gmch_m, \
14118 current_config->name.gmch_n, \
14119 current_config->name.link_m, \
14120 current_config->name.link_n, \
14121 pipe_config->name.tu, \
14122 pipe_config->name.gmch_m, \
14123 pipe_config->name.gmch_n, \
14124 pipe_config->name.link_m, \
14125 pipe_config->name.link_n); \
14126 ret = false; \
14127 } \
14128} while (0)
14129
14130
14131
14132
14133
14134
14135#define PIPE_CONF_CHECK_M_N_ALT(name, alt_name) do { \
14136 if (!intel_compare_link_m_n(¤t_config->name, \
14137 &pipe_config->name, !fastset) && \
14138 !intel_compare_link_m_n(¤t_config->alt_name, \
14139 &pipe_config->name, !fastset)) { \
14140 pipe_config_mismatch(fastset, crtc, __stringify(name), \
14141 "(expected tu %i gmch %i/%i link %i/%i, " \
14142 "or tu %i gmch %i/%i link %i/%i, " \
14143 "found tu %i, gmch %i/%i link %i/%i)", \
14144 current_config->name.tu, \
14145 current_config->name.gmch_m, \
14146 current_config->name.gmch_n, \
14147 current_config->name.link_m, \
14148 current_config->name.link_n, \
14149 current_config->alt_name.tu, \
14150 current_config->alt_name.gmch_m, \
14151 current_config->alt_name.gmch_n, \
14152 current_config->alt_name.link_m, \
14153 current_config->alt_name.link_n, \
14154 pipe_config->name.tu, \
14155 pipe_config->name.gmch_m, \
14156 pipe_config->name.gmch_n, \
14157 pipe_config->name.link_m, \
14158 pipe_config->name.link_n); \
14159 ret = false; \
14160 } \
14161} while (0)
14162
14163#define PIPE_CONF_CHECK_FLAGS(name, mask) do { \
14164 if ((current_config->name ^ pipe_config->name) & (mask)) { \
14165 pipe_config_mismatch(fastset, crtc, __stringify(name), \
14166 "(%x) (expected %i, found %i)", \
14167 (mask), \
14168 current_config->name & (mask), \
14169 pipe_config->name & (mask)); \
14170 ret = false; \
14171 } \
14172} while (0)
14173
14174#define PIPE_CONF_CHECK_CLOCK_FUZZY(name) do { \
14175 if (!intel_fuzzy_clock_check(current_config->name, pipe_config->name)) { \
14176 pipe_config_mismatch(fastset, crtc, __stringify(name), \
14177 "(expected %i, found %i)", \
14178 current_config->name, \
14179 pipe_config->name); \
14180 ret = false; \
14181 } \
14182} while (0)
14183
14184#define PIPE_CONF_CHECK_INFOFRAME(name) do { \
14185 if (!intel_compare_infoframe(¤t_config->infoframes.name, \
14186 &pipe_config->infoframes.name)) { \
14187 pipe_config_infoframe_mismatch(dev_priv, fastset, __stringify(name), \
14188 ¤t_config->infoframes.name, \
14189 &pipe_config->infoframes.name); \
14190 ret = false; \
14191 } \
14192} while (0)
14193
14194#define PIPE_CONF_CHECK_DP_VSC_SDP(name) do { \
14195 if (!current_config->has_psr && !pipe_config->has_psr && \
14196 !intel_compare_dp_vsc_sdp(¤t_config->infoframes.name, \
14197 &pipe_config->infoframes.name)) { \
14198 pipe_config_dp_vsc_sdp_mismatch(dev_priv, fastset, __stringify(name), \
14199 ¤t_config->infoframes.name, \
14200 &pipe_config->infoframes.name); \
14201 ret = false; \
14202 } \
14203} while (0)
14204
14205#define PIPE_CONF_CHECK_COLOR_LUT(name1, name2, bit_precision) do { \
14206 if (current_config->name1 != pipe_config->name1) { \
14207 pipe_config_mismatch(fastset, crtc, __stringify(name1), \
14208 "(expected %i, found %i, won't compare lut values)", \
14209 current_config->name1, \
14210 pipe_config->name1); \
14211 ret = false;\
14212 } else { \
14213 if (!intel_color_lut_equal(current_config->name2, \
14214 pipe_config->name2, pipe_config->name1, \
14215 bit_precision)) { \
14216 pipe_config_mismatch(fastset, crtc, __stringify(name2), \
14217 "hw_state doesn't match sw_state"); \
14218 ret = false; \
14219 } \
14220 } \
14221} while (0)
14222
14223#define PIPE_CONF_QUIRK(quirk) \
14224 ((current_config->quirks | pipe_config->quirks) & (quirk))
14225
14226 PIPE_CONF_CHECK_I(cpu_transcoder);
14227
14228 PIPE_CONF_CHECK_BOOL(has_pch_encoder);
14229 PIPE_CONF_CHECK_I(fdi_lanes);
14230 PIPE_CONF_CHECK_M_N(fdi_m_n);
14231
14232 PIPE_CONF_CHECK_I(lane_count);
14233 PIPE_CONF_CHECK_X(lane_lat_optim_mask);
14234
14235 if (INTEL_GEN(dev_priv) < 8) {
14236 PIPE_CONF_CHECK_M_N(dp_m_n);
14237
14238 if (current_config->has_drrs)
14239 PIPE_CONF_CHECK_M_N(dp_m2_n2);
14240 } else
14241 PIPE_CONF_CHECK_M_N_ALT(dp_m_n, dp_m2_n2);
14242
14243 PIPE_CONF_CHECK_X(output_types);
14244
14245
14246 if (!PIPE_CONF_QUIRK(PIPE_CONFIG_QUIRK_BIGJOINER_SLAVE)) {
14247 PIPE_CONF_CHECK_I(hw.pipe_mode.crtc_hdisplay);
14248 PIPE_CONF_CHECK_I(hw.pipe_mode.crtc_htotal);
14249 PIPE_CONF_CHECK_I(hw.pipe_mode.crtc_hblank_start);
14250 PIPE_CONF_CHECK_I(hw.pipe_mode.crtc_hblank_end);
14251 PIPE_CONF_CHECK_I(hw.pipe_mode.crtc_hsync_start);
14252 PIPE_CONF_CHECK_I(hw.pipe_mode.crtc_hsync_end);
14253
14254 PIPE_CONF_CHECK_I(hw.pipe_mode.crtc_vdisplay);
14255 PIPE_CONF_CHECK_I(hw.pipe_mode.crtc_vtotal);
14256 PIPE_CONF_CHECK_I(hw.pipe_mode.crtc_vblank_start);
14257 PIPE_CONF_CHECK_I(hw.pipe_mode.crtc_vblank_end);
14258 PIPE_CONF_CHECK_I(hw.pipe_mode.crtc_vsync_start);
14259 PIPE_CONF_CHECK_I(hw.pipe_mode.crtc_vsync_end);
14260
14261 PIPE_CONF_CHECK_I(hw.adjusted_mode.crtc_hdisplay);
14262 PIPE_CONF_CHECK_I(hw.adjusted_mode.crtc_htotal);
14263 PIPE_CONF_CHECK_I(hw.adjusted_mode.crtc_hblank_start);
14264 PIPE_CONF_CHECK_I(hw.adjusted_mode.crtc_hblank_end);
14265 PIPE_CONF_CHECK_I(hw.adjusted_mode.crtc_hsync_start);
14266 PIPE_CONF_CHECK_I(hw.adjusted_mode.crtc_hsync_end);
14267
14268 PIPE_CONF_CHECK_I(hw.adjusted_mode.crtc_vdisplay);
14269 PIPE_CONF_CHECK_I(hw.adjusted_mode.crtc_vtotal);
14270 PIPE_CONF_CHECK_I(hw.adjusted_mode.crtc_vblank_start);
14271 PIPE_CONF_CHECK_I(hw.adjusted_mode.crtc_vblank_end);
14272 PIPE_CONF_CHECK_I(hw.adjusted_mode.crtc_vsync_start);
14273 PIPE_CONF_CHECK_I(hw.adjusted_mode.crtc_vsync_end);
14274
14275 PIPE_CONF_CHECK_I(pixel_multiplier);
14276
14277 PIPE_CONF_CHECK_FLAGS(hw.adjusted_mode.flags,
14278 DRM_MODE_FLAG_INTERLACE);
14279
14280 if (!PIPE_CONF_QUIRK(PIPE_CONFIG_QUIRK_MODE_SYNC_FLAGS)) {
14281 PIPE_CONF_CHECK_FLAGS(hw.adjusted_mode.flags,
14282 DRM_MODE_FLAG_PHSYNC);
14283 PIPE_CONF_CHECK_FLAGS(hw.adjusted_mode.flags,
14284 DRM_MODE_FLAG_NHSYNC);
14285 PIPE_CONF_CHECK_FLAGS(hw.adjusted_mode.flags,
14286 DRM_MODE_FLAG_PVSYNC);
14287 PIPE_CONF_CHECK_FLAGS(hw.adjusted_mode.flags,
14288 DRM_MODE_FLAG_NVSYNC);
14289 }
14290 }
14291
14292 PIPE_CONF_CHECK_I(output_format);
14293 PIPE_CONF_CHECK_BOOL(has_hdmi_sink);
14294 if ((INTEL_GEN(dev_priv) < 8 && !IS_HASWELL(dev_priv)) ||
14295 IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
14296 PIPE_CONF_CHECK_BOOL(limited_color_range);
14297
14298 PIPE_CONF_CHECK_BOOL(hdmi_scrambling);
14299 PIPE_CONF_CHECK_BOOL(hdmi_high_tmds_clock_ratio);
14300 PIPE_CONF_CHECK_BOOL(has_infoframe);
14301
14302 if (!PIPE_CONF_QUIRK(PIPE_CONFIG_QUIRK_BIGJOINER_SLAVE))
14303 PIPE_CONF_CHECK_BOOL(fec_enable);
14304
14305 PIPE_CONF_CHECK_BOOL_INCOMPLETE(has_audio);
14306
14307 PIPE_CONF_CHECK_X(gmch_pfit.control);
14308
14309 if (INTEL_GEN(dev_priv) < 4)
14310 PIPE_CONF_CHECK_X(gmch_pfit.pgm_ratios);
14311 PIPE_CONF_CHECK_X(gmch_pfit.lvds_border_bits);
14312
14313
14314
14315
14316
14317 PIPE_CONF_CHECK_BOOL(pch_pfit.force_thru);
14318
14319 if (!fastset) {
14320 PIPE_CONF_CHECK_I(pipe_src_w);
14321 PIPE_CONF_CHECK_I(pipe_src_h);
14322
14323 PIPE_CONF_CHECK_BOOL(pch_pfit.enabled);
14324 if (current_config->pch_pfit.enabled) {
14325 PIPE_CONF_CHECK_I(pch_pfit.dst.x1);
14326 PIPE_CONF_CHECK_I(pch_pfit.dst.y1);
14327 PIPE_CONF_CHECK_I(pch_pfit.dst.x2);
14328 PIPE_CONF_CHECK_I(pch_pfit.dst.y2);
14329 }
14330
14331 PIPE_CONF_CHECK_I(scaler_state.scaler_id);
14332
14333 if (!PIPE_CONF_QUIRK(PIPE_CONFIG_QUIRK_BIGJOINER_SLAVE))
14334 PIPE_CONF_CHECK_CLOCK_FUZZY(pixel_rate);
14335
14336 PIPE_CONF_CHECK_X(gamma_mode);
14337 if (IS_CHERRYVIEW(dev_priv))
14338 PIPE_CONF_CHECK_X(cgm_mode);
14339 else
14340 PIPE_CONF_CHECK_X(csc_mode);
14341 PIPE_CONF_CHECK_BOOL(gamma_enable);
14342 PIPE_CONF_CHECK_BOOL(csc_enable);
14343
14344 PIPE_CONF_CHECK_I(linetime);
14345 PIPE_CONF_CHECK_I(ips_linetime);
14346
14347 bp_gamma = intel_color_get_gamma_bit_precision(pipe_config);
14348 if (bp_gamma)
14349 PIPE_CONF_CHECK_COLOR_LUT(gamma_mode, hw.gamma_lut, bp_gamma);
14350 }
14351
14352 PIPE_CONF_CHECK_BOOL(double_wide);
14353
14354 PIPE_CONF_CHECK_P(shared_dpll);
14355
14356
14357 if (!PIPE_CONF_QUIRK(PIPE_CONFIG_QUIRK_BIGJOINER_SLAVE)) {
14358 PIPE_CONF_CHECK_X(dpll_hw_state.dpll);
14359 PIPE_CONF_CHECK_X(dpll_hw_state.dpll_md);
14360 PIPE_CONF_CHECK_X(dpll_hw_state.fp0);
14361 PIPE_CONF_CHECK_X(dpll_hw_state.fp1);
14362 PIPE_CONF_CHECK_X(dpll_hw_state.wrpll);
14363 PIPE_CONF_CHECK_X(dpll_hw_state.spll);
14364 PIPE_CONF_CHECK_X(dpll_hw_state.ctrl1);
14365 PIPE_CONF_CHECK_X(dpll_hw_state.cfgcr1);
14366 PIPE_CONF_CHECK_X(dpll_hw_state.cfgcr2);
14367 PIPE_CONF_CHECK_X(dpll_hw_state.cfgcr0);
14368 PIPE_CONF_CHECK_X(dpll_hw_state.ebb0);
14369 PIPE_CONF_CHECK_X(dpll_hw_state.ebb4);
14370 PIPE_CONF_CHECK_X(dpll_hw_state.pll0);
14371 PIPE_CONF_CHECK_X(dpll_hw_state.pll1);
14372 PIPE_CONF_CHECK_X(dpll_hw_state.pll2);
14373 PIPE_CONF_CHECK_X(dpll_hw_state.pll3);
14374 PIPE_CONF_CHECK_X(dpll_hw_state.pll6);
14375 PIPE_CONF_CHECK_X(dpll_hw_state.pll8);
14376 PIPE_CONF_CHECK_X(dpll_hw_state.pll9);
14377 PIPE_CONF_CHECK_X(dpll_hw_state.pll10);
14378 PIPE_CONF_CHECK_X(dpll_hw_state.pcsdw12);
14379 PIPE_CONF_CHECK_X(dpll_hw_state.mg_refclkin_ctl);
14380 PIPE_CONF_CHECK_X(dpll_hw_state.mg_clktop2_coreclkctl1);
14381 PIPE_CONF_CHECK_X(dpll_hw_state.mg_clktop2_hsclkctl);
14382 PIPE_CONF_CHECK_X(dpll_hw_state.mg_pll_div0);
14383 PIPE_CONF_CHECK_X(dpll_hw_state.mg_pll_div1);
14384 PIPE_CONF_CHECK_X(dpll_hw_state.mg_pll_lf);
14385 PIPE_CONF_CHECK_X(dpll_hw_state.mg_pll_frac_lock);
14386 PIPE_CONF_CHECK_X(dpll_hw_state.mg_pll_ssc);
14387 PIPE_CONF_CHECK_X(dpll_hw_state.mg_pll_bias);
14388 PIPE_CONF_CHECK_X(dpll_hw_state.mg_pll_tdc_coldst_bias);
14389
14390 PIPE_CONF_CHECK_X(dsi_pll.ctrl);
14391 PIPE_CONF_CHECK_X(dsi_pll.div);
14392
14393 if (IS_G4X(dev_priv) || INTEL_GEN(dev_priv) >= 5)
14394 PIPE_CONF_CHECK_I(pipe_bpp);
14395
14396 PIPE_CONF_CHECK_CLOCK_FUZZY(hw.pipe_mode.crtc_clock);
14397 PIPE_CONF_CHECK_CLOCK_FUZZY(hw.adjusted_mode.crtc_clock);
14398 PIPE_CONF_CHECK_CLOCK_FUZZY(port_clock);
14399
14400 PIPE_CONF_CHECK_I(min_voltage_level);
14401 }
14402
14403 PIPE_CONF_CHECK_X(infoframes.enable);
14404 PIPE_CONF_CHECK_X(infoframes.gcp);
14405 PIPE_CONF_CHECK_INFOFRAME(avi);
14406 PIPE_CONF_CHECK_INFOFRAME(spd);
14407 PIPE_CONF_CHECK_INFOFRAME(hdmi);
14408 PIPE_CONF_CHECK_INFOFRAME(drm);
14409 PIPE_CONF_CHECK_DP_VSC_SDP(vsc);
14410
14411 PIPE_CONF_CHECK_X(sync_mode_slaves_mask);
14412 PIPE_CONF_CHECK_I(master_transcoder);
14413 PIPE_CONF_CHECK_BOOL(bigjoiner);
14414 PIPE_CONF_CHECK_BOOL(bigjoiner_slave);
14415 PIPE_CONF_CHECK_P(bigjoiner_linked_crtc);
14416
14417 PIPE_CONF_CHECK_I(dsc.compression_enable);
14418 PIPE_CONF_CHECK_I(dsc.dsc_split);
14419 PIPE_CONF_CHECK_I(dsc.compressed_bpp);
14420
14421 PIPE_CONF_CHECK_I(mst_master_transcoder);
14422
14423#undef PIPE_CONF_CHECK_X
14424#undef PIPE_CONF_CHECK_I
14425#undef PIPE_CONF_CHECK_BOOL
14426#undef PIPE_CONF_CHECK_BOOL_INCOMPLETE
14427#undef PIPE_CONF_CHECK_P
14428#undef PIPE_CONF_CHECK_FLAGS
14429#undef PIPE_CONF_CHECK_CLOCK_FUZZY
14430#undef PIPE_CONF_CHECK_COLOR_LUT
14431#undef PIPE_CONF_QUIRK
14432
14433 return ret;
14434}
14435
14436static void intel_pipe_config_sanity_check(struct drm_i915_private *dev_priv,
14437 const struct intel_crtc_state *pipe_config)
14438{
14439 if (pipe_config->has_pch_encoder) {
14440 int fdi_dotclock = intel_dotclock_calculate(intel_fdi_link_freq(dev_priv, pipe_config),
14441 &pipe_config->fdi_m_n);
14442 int dotclock = pipe_config->hw.adjusted_mode.crtc_clock;
14443
14444
14445
14446
14447
14448 drm_WARN(&dev_priv->drm,
14449 !intel_fuzzy_clock_check(fdi_dotclock, dotclock),
14450 "FDI dotclock and encoder dotclock mismatch, fdi: %i, encoder: %i\n",
14451 fdi_dotclock, dotclock);
14452 }
14453}
14454
14455static void verify_wm_state(struct intel_crtc *crtc,
14456 struct intel_crtc_state *new_crtc_state)
14457{
14458 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
14459 struct skl_hw_state {
14460 struct skl_ddb_entry ddb_y[I915_MAX_PLANES];
14461 struct skl_ddb_entry ddb_uv[I915_MAX_PLANES];
14462 struct skl_pipe_wm wm;
14463 } *hw;
14464 struct skl_pipe_wm *sw_wm;
14465 struct skl_ddb_entry *hw_ddb_entry, *sw_ddb_entry;
14466 u8 hw_enabled_slices;
14467 const enum pipe pipe = crtc->pipe;
14468 int plane, level, max_level = ilk_wm_max_level(dev_priv);
14469
14470 if (INTEL_GEN(dev_priv) < 9 || !new_crtc_state->hw.active)
14471 return;
14472
14473 hw = kzalloc(sizeof(*hw), GFP_KERNEL);
14474 if (!hw)
14475 return;
14476
14477 skl_pipe_wm_get_hw_state(crtc, &hw->wm);
14478 sw_wm = &new_crtc_state->wm.skl.optimal;
14479
14480 skl_pipe_ddb_get_hw_state(crtc, hw->ddb_y, hw->ddb_uv);
14481
14482 hw_enabled_slices = intel_enabled_dbuf_slices_mask(dev_priv);
14483
14484 if (INTEL_GEN(dev_priv) >= 11 &&
14485 hw_enabled_slices != dev_priv->dbuf.enabled_slices)
14486 drm_err(&dev_priv->drm,
14487 "mismatch in DBUF Slices (expected 0x%x, got 0x%x)\n",
14488 dev_priv->dbuf.enabled_slices,
14489 hw_enabled_slices);
14490
14491
14492 for_each_universal_plane(dev_priv, pipe, plane) {
14493 struct skl_plane_wm *hw_plane_wm, *sw_plane_wm;
14494
14495 hw_plane_wm = &hw->wm.planes[plane];
14496 sw_plane_wm = &sw_wm->planes[plane];
14497
14498
14499 for (level = 0; level <= max_level; level++) {
14500 if (skl_wm_level_equals(&hw_plane_wm->wm[level],
14501 &sw_plane_wm->wm[level]) ||
14502 (level == 0 && skl_wm_level_equals(&hw_plane_wm->wm[level],
14503 &sw_plane_wm->sagv_wm0)))
14504 continue;
14505
14506 drm_err(&dev_priv->drm,
14507 "mismatch in WM pipe %c plane %d level %d (expected e=%d b=%u l=%u, got e=%d b=%u l=%u)\n",
14508 pipe_name(pipe), plane + 1, level,
14509 sw_plane_wm->wm[level].plane_en,
14510 sw_plane_wm->wm[level].plane_res_b,
14511 sw_plane_wm->wm[level].plane_res_l,
14512 hw_plane_wm->wm[level].plane_en,
14513 hw_plane_wm->wm[level].plane_res_b,
14514 hw_plane_wm->wm[level].plane_res_l);
14515 }
14516
14517 if (!skl_wm_level_equals(&hw_plane_wm->trans_wm,
14518 &sw_plane_wm->trans_wm)) {
14519 drm_err(&dev_priv->drm,
14520 "mismatch in trans WM pipe %c plane %d (expected e=%d b=%u l=%u, got e=%d b=%u l=%u)\n",
14521 pipe_name(pipe), plane + 1,
14522 sw_plane_wm->trans_wm.plane_en,
14523 sw_plane_wm->trans_wm.plane_res_b,
14524 sw_plane_wm->trans_wm.plane_res_l,
14525 hw_plane_wm->trans_wm.plane_en,
14526 hw_plane_wm->trans_wm.plane_res_b,
14527 hw_plane_wm->trans_wm.plane_res_l);
14528 }
14529
14530
14531 hw_ddb_entry = &hw->ddb_y[plane];
14532 sw_ddb_entry = &new_crtc_state->wm.skl.plane_ddb_y[plane];
14533
14534 if (!skl_ddb_entry_equal(hw_ddb_entry, sw_ddb_entry)) {
14535 drm_err(&dev_priv->drm,
14536 "mismatch in DDB state pipe %c plane %d (expected (%u,%u), found (%u,%u))\n",
14537 pipe_name(pipe), plane + 1,
14538 sw_ddb_entry->start, sw_ddb_entry->end,
14539 hw_ddb_entry->start, hw_ddb_entry->end);
14540 }
14541 }
14542
14543
14544
14545
14546
14547
14548
14549 if (1) {
14550 struct skl_plane_wm *hw_plane_wm, *sw_plane_wm;
14551
14552 hw_plane_wm = &hw->wm.planes[PLANE_CURSOR];
14553 sw_plane_wm = &sw_wm->planes[PLANE_CURSOR];
14554
14555
14556 for (level = 0; level <= max_level; level++) {
14557 if (skl_wm_level_equals(&hw_plane_wm->wm[level],
14558 &sw_plane_wm->wm[level]) ||
14559 (level == 0 && skl_wm_level_equals(&hw_plane_wm->wm[level],
14560 &sw_plane_wm->sagv_wm0)))
14561 continue;
14562
14563 drm_err(&dev_priv->drm,
14564 "mismatch in WM pipe %c cursor level %d (expected e=%d b=%u l=%u, got e=%d b=%u l=%u)\n",
14565 pipe_name(pipe), level,
14566 sw_plane_wm->wm[level].plane_en,
14567 sw_plane_wm->wm[level].plane_res_b,
14568 sw_plane_wm->wm[level].plane_res_l,
14569 hw_plane_wm->wm[level].plane_en,
14570 hw_plane_wm->wm[level].plane_res_b,
14571 hw_plane_wm->wm[level].plane_res_l);
14572 }
14573
14574 if (!skl_wm_level_equals(&hw_plane_wm->trans_wm,
14575 &sw_plane_wm->trans_wm)) {
14576 drm_err(&dev_priv->drm,
14577 "mismatch in trans WM pipe %c cursor (expected e=%d b=%u l=%u, got e=%d b=%u l=%u)\n",
14578 pipe_name(pipe),
14579 sw_plane_wm->trans_wm.plane_en,
14580 sw_plane_wm->trans_wm.plane_res_b,
14581 sw_plane_wm->trans_wm.plane_res_l,
14582 hw_plane_wm->trans_wm.plane_en,
14583 hw_plane_wm->trans_wm.plane_res_b,
14584 hw_plane_wm->trans_wm.plane_res_l);
14585 }
14586
14587
14588 hw_ddb_entry = &hw->ddb_y[PLANE_CURSOR];
14589 sw_ddb_entry = &new_crtc_state->wm.skl.plane_ddb_y[PLANE_CURSOR];
14590
14591 if (!skl_ddb_entry_equal(hw_ddb_entry, sw_ddb_entry)) {
14592 drm_err(&dev_priv->drm,
14593 "mismatch in DDB state pipe %c cursor (expected (%u,%u), found (%u,%u))\n",
14594 pipe_name(pipe),
14595 sw_ddb_entry->start, sw_ddb_entry->end,
14596 hw_ddb_entry->start, hw_ddb_entry->end);
14597 }
14598 }
14599
14600 kfree(hw);
14601}
14602
14603static void
14604verify_connector_state(struct intel_atomic_state *state,
14605 struct intel_crtc *crtc)
14606{
14607 struct drm_connector *connector;
14608 struct drm_connector_state *new_conn_state;
14609 int i;
14610
14611 for_each_new_connector_in_state(&state->base, connector, new_conn_state, i) {
14612 struct drm_encoder *encoder = connector->encoder;
14613 struct intel_crtc_state *crtc_state = NULL;
14614
14615 if (new_conn_state->crtc != &crtc->base)
14616 continue;
14617
14618 if (crtc)
14619 crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
14620
14621 intel_connector_verify_state(crtc_state, new_conn_state);
14622
14623 I915_STATE_WARN(new_conn_state->best_encoder != encoder,
14624 "connector's atomic encoder doesn't match legacy encoder\n");
14625 }
14626}
14627
14628static void
14629verify_encoder_state(struct drm_i915_private *dev_priv, struct intel_atomic_state *state)
14630{
14631 struct intel_encoder *encoder;
14632 struct drm_connector *connector;
14633 struct drm_connector_state *old_conn_state, *new_conn_state;
14634 int i;
14635
14636 for_each_intel_encoder(&dev_priv->drm, encoder) {
14637 bool enabled = false, found = false;
14638 enum pipe pipe;
14639
14640 drm_dbg_kms(&dev_priv->drm, "[ENCODER:%d:%s]\n",
14641 encoder->base.base.id,
14642 encoder->base.name);
14643
14644 for_each_oldnew_connector_in_state(&state->base, connector, old_conn_state,
14645 new_conn_state, i) {
14646 if (old_conn_state->best_encoder == &encoder->base)
14647 found = true;
14648
14649 if (new_conn_state->best_encoder != &encoder->base)
14650 continue;
14651 found = enabled = true;
14652
14653 I915_STATE_WARN(new_conn_state->crtc !=
14654 encoder->base.crtc,
14655 "connector's crtc doesn't match encoder crtc\n");
14656 }
14657
14658 if (!found)
14659 continue;
14660
14661 I915_STATE_WARN(!!encoder->base.crtc != enabled,
14662 "encoder's enabled state mismatch "
14663 "(expected %i, found %i)\n",
14664 !!encoder->base.crtc, enabled);
14665
14666 if (!encoder->base.crtc) {
14667 bool active;
14668
14669 active = encoder->get_hw_state(encoder, &pipe);
14670 I915_STATE_WARN(active,
14671 "encoder detached but still enabled on pipe %c.\n",
14672 pipe_name(pipe));
14673 }
14674 }
14675}
14676
14677static void
14678verify_crtc_state(struct intel_crtc *crtc,
14679 struct intel_crtc_state *old_crtc_state,
14680 struct intel_crtc_state *new_crtc_state)
14681{
14682 struct drm_device *dev = crtc->base.dev;
14683 struct drm_i915_private *dev_priv = to_i915(dev);
14684 struct intel_encoder *encoder;
14685 struct intel_crtc_state *pipe_config = old_crtc_state;
14686 struct drm_atomic_state *state = old_crtc_state->uapi.state;
14687 struct intel_crtc *master = crtc;
14688
14689 __drm_atomic_helper_crtc_destroy_state(&old_crtc_state->uapi);
14690 intel_crtc_free_hw_state(old_crtc_state);
14691 intel_crtc_state_reset(old_crtc_state, crtc);
14692 old_crtc_state->uapi.state = state;
14693
14694 drm_dbg_kms(&dev_priv->drm, "[CRTC:%d:%s]\n", crtc->base.base.id,
14695 crtc->base.name);
14696
14697 pipe_config->hw.enable = new_crtc_state->hw.enable;
14698
14699 intel_crtc_get_pipe_config(pipe_config);
14700
14701
14702 if (IS_I830(dev_priv) && pipe_config->hw.active)
14703 pipe_config->hw.active = new_crtc_state->hw.active;
14704
14705 I915_STATE_WARN(new_crtc_state->hw.active != pipe_config->hw.active,
14706 "crtc active state doesn't match with hw state "
14707 "(expected %i, found %i)\n",
14708 new_crtc_state->hw.active, pipe_config->hw.active);
14709
14710 I915_STATE_WARN(crtc->active != new_crtc_state->hw.active,
14711 "transitional active state does not match atomic hw state "
14712 "(expected %i, found %i)\n",
14713 new_crtc_state->hw.active, crtc->active);
14714
14715 if (new_crtc_state->bigjoiner_slave)
14716 master = new_crtc_state->bigjoiner_linked_crtc;
14717
14718 for_each_encoder_on_crtc(dev, &master->base, encoder) {
14719 enum pipe pipe;
14720 bool active;
14721
14722 active = encoder->get_hw_state(encoder, &pipe);
14723 I915_STATE_WARN(active != new_crtc_state->hw.active,
14724 "[ENCODER:%i] active %i with crtc active %i\n",
14725 encoder->base.base.id, active,
14726 new_crtc_state->hw.active);
14727
14728 I915_STATE_WARN(active && master->pipe != pipe,
14729 "Encoder connected to wrong pipe %c\n",
14730 pipe_name(pipe));
14731
14732 if (active)
14733 intel_encoder_get_config(encoder, pipe_config);
14734 }
14735
14736 if (!new_crtc_state->hw.active)
14737 return;
14738
14739 intel_pipe_config_sanity_check(dev_priv, pipe_config);
14740
14741 if (!intel_pipe_config_compare(new_crtc_state,
14742 pipe_config, false)) {
14743 I915_STATE_WARN(1, "pipe state doesn't match!\n");
14744 intel_dump_pipe_config(pipe_config, NULL, "[hw state]");
14745 intel_dump_pipe_config(new_crtc_state, NULL, "[sw state]");
14746 }
14747}
14748
14749static void
14750intel_verify_planes(struct intel_atomic_state *state)
14751{
14752 struct intel_plane *plane;
14753 const struct intel_plane_state *plane_state;
14754 int i;
14755
14756 for_each_new_intel_plane_in_state(state, plane,
14757 plane_state, i)
14758 assert_plane(plane, plane_state->planar_slave ||
14759 plane_state->uapi.visible);
14760}
14761
14762static void
14763verify_single_dpll_state(struct drm_i915_private *dev_priv,
14764 struct intel_shared_dpll *pll,
14765 struct intel_crtc *crtc,
14766 struct intel_crtc_state *new_crtc_state)
14767{
14768 struct intel_dpll_hw_state dpll_hw_state;
14769 unsigned int crtc_mask;
14770 bool active;
14771
14772 memset(&dpll_hw_state, 0, sizeof(dpll_hw_state));
14773
14774 drm_dbg_kms(&dev_priv->drm, "%s\n", pll->info->name);
14775
14776 active = intel_dpll_get_hw_state(dev_priv, pll, &dpll_hw_state);
14777
14778 if (!(pll->info->flags & INTEL_DPLL_ALWAYS_ON)) {
14779 I915_STATE_WARN(!pll->on && pll->active_mask,
14780 "pll in active use but not on in sw tracking\n");
14781 I915_STATE_WARN(pll->on && !pll->active_mask,
14782 "pll is on but not used by any active crtc\n");
14783 I915_STATE_WARN(pll->on != active,
14784 "pll on state mismatch (expected %i, found %i)\n",
14785 pll->on, active);
14786 }
14787
14788 if (!crtc) {
14789 I915_STATE_WARN(pll->active_mask & ~pll->state.crtc_mask,
14790 "more active pll users than references: %x vs %x\n",
14791 pll->active_mask, pll->state.crtc_mask);
14792
14793 return;
14794 }
14795
14796 crtc_mask = drm_crtc_mask(&crtc->base);
14797
14798 if (new_crtc_state->hw.active)
14799 I915_STATE_WARN(!(pll->active_mask & crtc_mask),
14800 "pll active mismatch (expected pipe %c in active mask 0x%02x)\n",
14801 pipe_name(crtc->pipe), pll->active_mask);
14802 else
14803 I915_STATE_WARN(pll->active_mask & crtc_mask,
14804 "pll active mismatch (didn't expect pipe %c in active mask 0x%02x)\n",
14805 pipe_name(crtc->pipe), pll->active_mask);
14806
14807 I915_STATE_WARN(!(pll->state.crtc_mask & crtc_mask),
14808 "pll enabled crtcs mismatch (expected 0x%x in 0x%02x)\n",
14809 crtc_mask, pll->state.crtc_mask);
14810
14811 I915_STATE_WARN(pll->on && memcmp(&pll->state.hw_state,
14812 &dpll_hw_state,
14813 sizeof(dpll_hw_state)),
14814 "pll hw state mismatch\n");
14815}
14816
14817static void
14818verify_shared_dpll_state(struct intel_crtc *crtc,
14819 struct intel_crtc_state *old_crtc_state,
14820 struct intel_crtc_state *new_crtc_state)
14821{
14822 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
14823
14824 if (new_crtc_state->shared_dpll)
14825 verify_single_dpll_state(dev_priv, new_crtc_state->shared_dpll, crtc, new_crtc_state);
14826
14827 if (old_crtc_state->shared_dpll &&
14828 old_crtc_state->shared_dpll != new_crtc_state->shared_dpll) {
14829 unsigned int crtc_mask = drm_crtc_mask(&crtc->base);
14830 struct intel_shared_dpll *pll = old_crtc_state->shared_dpll;
14831
14832 I915_STATE_WARN(pll->active_mask & crtc_mask,
14833 "pll active mismatch (didn't expect pipe %c in active mask)\n",
14834 pipe_name(crtc->pipe));
14835 I915_STATE_WARN(pll->state.crtc_mask & crtc_mask,
14836 "pll enabled crtcs mismatch (found %x in enabled mask)\n",
14837 pipe_name(crtc->pipe));
14838 }
14839}
14840
14841static void
14842intel_modeset_verify_crtc(struct intel_crtc *crtc,
14843 struct intel_atomic_state *state,
14844 struct intel_crtc_state *old_crtc_state,
14845 struct intel_crtc_state *new_crtc_state)
14846{
14847 if (!needs_modeset(new_crtc_state) && !new_crtc_state->update_pipe)
14848 return;
14849
14850 verify_wm_state(crtc, new_crtc_state);
14851 verify_connector_state(state, crtc);
14852 verify_crtc_state(crtc, old_crtc_state, new_crtc_state);
14853 verify_shared_dpll_state(crtc, old_crtc_state, new_crtc_state);
14854}
14855
14856static void
14857verify_disabled_dpll_state(struct drm_i915_private *dev_priv)
14858{
14859 int i;
14860
14861 for (i = 0; i < dev_priv->dpll.num_shared_dpll; i++)
14862 verify_single_dpll_state(dev_priv,
14863 &dev_priv->dpll.shared_dplls[i],
14864 NULL, NULL);
14865}
14866
14867static void
14868intel_modeset_verify_disabled(struct drm_i915_private *dev_priv,
14869 struct intel_atomic_state *state)
14870{
14871 verify_encoder_state(dev_priv, state);
14872 verify_connector_state(state, NULL);
14873 verify_disabled_dpll_state(dev_priv);
14874}
14875
14876static void
14877intel_crtc_update_active_timings(const struct intel_crtc_state *crtc_state)
14878{
14879 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
14880 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
14881 const struct drm_display_mode *adjusted_mode =
14882 &crtc_state->hw.adjusted_mode;
14883
14884 drm_calc_timestamping_constants(&crtc->base, adjusted_mode);
14885
14886 crtc->mode_flags = crtc_state->mode_flags;
14887
14888
14889
14890
14891
14892
14893
14894
14895
14896
14897
14898
14899
14900
14901
14902
14903
14904
14905
14906
14907
14908
14909
14910
14911
14912
14913
14914
14915 if (IS_GEN(dev_priv, 2)) {
14916 int vtotal;
14917
14918 vtotal = adjusted_mode->crtc_vtotal;
14919 if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE)
14920 vtotal /= 2;
14921
14922 crtc->scanline_offset = vtotal - 1;
14923 } else if (HAS_DDI(dev_priv) &&
14924 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) {
14925 crtc->scanline_offset = 2;
14926 } else {
14927 crtc->scanline_offset = 1;
14928 }
14929}
14930
14931static void intel_modeset_clear_plls(struct intel_atomic_state *state)
14932{
14933 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
14934 struct intel_crtc_state *new_crtc_state;
14935 struct intel_crtc *crtc;
14936 int i;
14937
14938 if (!dev_priv->display.crtc_compute_clock)
14939 return;
14940
14941 for_each_new_intel_crtc_in_state(state, crtc, new_crtc_state, i) {
14942 if (!needs_modeset(new_crtc_state))
14943 continue;
14944
14945 intel_release_shared_dplls(state, crtc);
14946 }
14947}
14948
14949
14950
14951
14952
14953
14954
14955static int hsw_mode_set_planes_workaround(struct intel_atomic_state *state)
14956{
14957 struct intel_crtc_state *crtc_state;
14958 struct intel_crtc *crtc;
14959 struct intel_crtc_state *first_crtc_state = NULL;
14960 struct intel_crtc_state *other_crtc_state = NULL;
14961 enum pipe first_pipe = INVALID_PIPE, enabled_pipe = INVALID_PIPE;
14962 int i;
14963
14964
14965 for_each_new_intel_crtc_in_state(state, crtc, crtc_state, i) {
14966 if (!crtc_state->hw.active ||
14967 !needs_modeset(crtc_state))
14968 continue;
14969
14970 if (first_crtc_state) {
14971 other_crtc_state = crtc_state;
14972 break;
14973 } else {
14974 first_crtc_state = crtc_state;
14975 first_pipe = crtc->pipe;
14976 }
14977 }
14978
14979
14980 if (!first_crtc_state)
14981 return 0;
14982
14983
14984 for_each_intel_crtc(state->base.dev, crtc) {
14985 crtc_state = intel_atomic_get_crtc_state(&state->base, crtc);
14986 if (IS_ERR(crtc_state))
14987 return PTR_ERR(crtc_state);
14988
14989 crtc_state->hsw_workaround_pipe = INVALID_PIPE;
14990
14991 if (!crtc_state->hw.active ||
14992 needs_modeset(crtc_state))
14993 continue;
14994
14995
14996 if (enabled_pipe != INVALID_PIPE)
14997 return 0;
14998
14999 enabled_pipe = crtc->pipe;
15000 }
15001
15002 if (enabled_pipe != INVALID_PIPE)
15003 first_crtc_state->hsw_workaround_pipe = enabled_pipe;
15004 else if (other_crtc_state)
15005 other_crtc_state->hsw_workaround_pipe = first_pipe;
15006
15007 return 0;
15008}
15009
15010u8 intel_calc_active_pipes(struct intel_atomic_state *state,
15011 u8 active_pipes)
15012{
15013 const struct intel_crtc_state *crtc_state;
15014 struct intel_crtc *crtc;
15015 int i;
15016
15017 for_each_new_intel_crtc_in_state(state, crtc, crtc_state, i) {
15018 if (crtc_state->hw.active)
15019 active_pipes |= BIT(crtc->pipe);
15020 else
15021 active_pipes &= ~BIT(crtc->pipe);
15022 }
15023
15024 return active_pipes;
15025}
15026
15027static int intel_modeset_checks(struct intel_atomic_state *state)
15028{
15029 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
15030
15031 state->modeset = true;
15032
15033 if (IS_HASWELL(dev_priv))
15034 return hsw_mode_set_planes_workaround(state);
15035
15036 return 0;
15037}
15038
15039
15040
15041
15042
15043
15044static int calc_watermark_data(struct intel_atomic_state *state)
15045{
15046 struct drm_device *dev = state->base.dev;
15047 struct drm_i915_private *dev_priv = to_i915(dev);
15048
15049
15050 if (dev_priv->display.compute_global_watermarks)
15051 return dev_priv->display.compute_global_watermarks(state);
15052
15053 return 0;
15054}
15055
15056static void intel_crtc_check_fastset(const struct intel_crtc_state *old_crtc_state,
15057 struct intel_crtc_state *new_crtc_state)
15058{
15059 if (!intel_pipe_config_compare(old_crtc_state, new_crtc_state, true))
15060 return;
15061
15062 new_crtc_state->uapi.mode_changed = false;
15063 new_crtc_state->update_pipe = true;
15064}
15065
15066static void intel_crtc_copy_fastset(const struct intel_crtc_state *old_crtc_state,
15067 struct intel_crtc_state *new_crtc_state)
15068{
15069
15070
15071
15072
15073
15074
15075
15076
15077 new_crtc_state->fdi_m_n = old_crtc_state->fdi_m_n;
15078 new_crtc_state->dp_m_n = old_crtc_state->dp_m_n;
15079 new_crtc_state->dp_m2_n2 = old_crtc_state->dp_m2_n2;
15080 new_crtc_state->has_drrs = old_crtc_state->has_drrs;
15081}
15082
15083static int intel_crtc_add_planes_to_state(struct intel_atomic_state *state,
15084 struct intel_crtc *crtc,
15085 u8 plane_ids_mask)
15086{
15087 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
15088 struct intel_plane *plane;
15089
15090 for_each_intel_plane_on_crtc(&dev_priv->drm, crtc, plane) {
15091 struct intel_plane_state *plane_state;
15092
15093 if ((plane_ids_mask & BIT(plane->id)) == 0)
15094 continue;
15095
15096 plane_state = intel_atomic_get_plane_state(state, plane);
15097 if (IS_ERR(plane_state))
15098 return PTR_ERR(plane_state);
15099 }
15100
15101 return 0;
15102}
15103
15104static bool active_planes_affects_min_cdclk(struct drm_i915_private *dev_priv)
15105{
15106
15107 return IS_BROADWELL(dev_priv) || IS_HASWELL(dev_priv) ||
15108 IS_CHERRYVIEW(dev_priv) || IS_VALLEYVIEW(dev_priv) ||
15109 IS_IVYBRIDGE(dev_priv) || (INTEL_GEN(dev_priv) >= 11);
15110}
15111
15112static int intel_crtc_add_bigjoiner_planes(struct intel_atomic_state *state,
15113 struct intel_crtc *crtc,
15114 struct intel_crtc *other)
15115{
15116 const struct intel_plane_state *plane_state;
15117 struct intel_plane *plane;
15118 u8 plane_ids = 0;
15119 int i;
15120
15121 for_each_new_intel_plane_in_state(state, plane, plane_state, i) {
15122 if (plane->pipe == crtc->pipe)
15123 plane_ids |= BIT(plane->id);
15124 }
15125
15126 return intel_crtc_add_planes_to_state(state, other, plane_ids);
15127}
15128
15129static int intel_bigjoiner_add_affected_planes(struct intel_atomic_state *state)
15130{
15131 const struct intel_crtc_state *crtc_state;
15132 struct intel_crtc *crtc;
15133 int i;
15134
15135 for_each_new_intel_crtc_in_state(state, crtc, crtc_state, i) {
15136 int ret;
15137
15138 if (!crtc_state->bigjoiner)
15139 continue;
15140
15141 ret = intel_crtc_add_bigjoiner_planes(state, crtc,
15142 crtc_state->bigjoiner_linked_crtc);
15143 if (ret)
15144 return ret;
15145 }
15146
15147 return 0;
15148}
15149
15150static int intel_atomic_check_planes(struct intel_atomic_state *state)
15151{
15152 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
15153 struct intel_crtc_state *old_crtc_state, *new_crtc_state;
15154 struct intel_plane_state *plane_state;
15155 struct intel_plane *plane;
15156 struct intel_crtc *crtc;
15157 int i, ret;
15158
15159 ret = icl_add_linked_planes(state);
15160 if (ret)
15161 return ret;
15162
15163 ret = intel_bigjoiner_add_affected_planes(state);
15164 if (ret)
15165 return ret;
15166
15167 for_each_new_intel_plane_in_state(state, plane, plane_state, i) {
15168 ret = intel_plane_atomic_check(state, plane);
15169 if (ret) {
15170 drm_dbg_atomic(&dev_priv->drm,
15171 "[PLANE:%d:%s] atomic driver check failed\n",
15172 plane->base.base.id, plane->base.name);
15173 return ret;
15174 }
15175 }
15176
15177 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
15178 new_crtc_state, i) {
15179 u8 old_active_planes, new_active_planes;
15180
15181 ret = icl_check_nv12_planes(new_crtc_state);
15182 if (ret)
15183 return ret;
15184
15185
15186
15187
15188
15189
15190 if (!active_planes_affects_min_cdclk(dev_priv))
15191 continue;
15192
15193 old_active_planes = old_crtc_state->active_planes & ~BIT(PLANE_CURSOR);
15194 new_active_planes = new_crtc_state->active_planes & ~BIT(PLANE_CURSOR);
15195
15196
15197
15198
15199
15200
15201
15202 if (old_active_planes == new_active_planes)
15203 continue;
15204
15205 ret = intel_crtc_add_planes_to_state(state, crtc, new_active_planes);
15206 if (ret)
15207 return ret;
15208 }
15209
15210 return 0;
15211}
15212
15213static int intel_atomic_check_cdclk(struct intel_atomic_state *state,
15214 bool *need_cdclk_calc)
15215{
15216 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
15217 const struct intel_cdclk_state *old_cdclk_state;
15218 const struct intel_cdclk_state *new_cdclk_state;
15219 struct intel_plane_state *plane_state;
15220 struct intel_bw_state *new_bw_state;
15221 struct intel_plane *plane;
15222 int min_cdclk = 0;
15223 enum pipe pipe;
15224 int ret;
15225 int i;
15226
15227
15228
15229
15230
15231 for_each_new_intel_plane_in_state(state, plane, plane_state, i) {
15232 ret = intel_plane_calc_min_cdclk(state, plane, need_cdclk_calc);
15233 if (ret)
15234 return ret;
15235 }
15236
15237 old_cdclk_state = intel_atomic_get_old_cdclk_state(state);
15238 new_cdclk_state = intel_atomic_get_new_cdclk_state(state);
15239
15240 if (new_cdclk_state &&
15241 old_cdclk_state->force_min_cdclk != new_cdclk_state->force_min_cdclk)
15242 *need_cdclk_calc = true;
15243
15244 ret = dev_priv->display.bw_calc_min_cdclk(state);
15245 if (ret)
15246 return ret;
15247
15248 new_bw_state = intel_atomic_get_new_bw_state(state);
15249
15250 if (!new_cdclk_state || !new_bw_state)
15251 return 0;
15252
15253 for_each_pipe(dev_priv, pipe) {
15254 min_cdclk = max(new_cdclk_state->min_cdclk[pipe], min_cdclk);
15255
15256
15257
15258
15259 if (new_bw_state->min_cdclk > min_cdclk)
15260 *need_cdclk_calc = true;
15261 }
15262
15263 return 0;
15264}
15265
15266static int intel_atomic_check_crtcs(struct intel_atomic_state *state)
15267{
15268 struct intel_crtc_state *crtc_state;
15269 struct intel_crtc *crtc;
15270 int i;
15271
15272 for_each_new_intel_crtc_in_state(state, crtc, crtc_state, i) {
15273 struct drm_i915_private *i915 = to_i915(crtc->base.dev);
15274 int ret;
15275
15276 ret = intel_crtc_atomic_check(state, crtc);
15277 if (ret) {
15278 drm_dbg_atomic(&i915->drm,
15279 "[CRTC:%d:%s] atomic driver check failed\n",
15280 crtc->base.base.id, crtc->base.name);
15281 return ret;
15282 }
15283 }
15284
15285 return 0;
15286}
15287
15288static bool intel_cpu_transcoders_need_modeset(struct intel_atomic_state *state,
15289 u8 transcoders)
15290{
15291 const struct intel_crtc_state *new_crtc_state;
15292 struct intel_crtc *crtc;
15293 int i;
15294
15295 for_each_new_intel_crtc_in_state(state, crtc, new_crtc_state, i) {
15296 if (new_crtc_state->hw.enable &&
15297 transcoders & BIT(new_crtc_state->cpu_transcoder) &&
15298 needs_modeset(new_crtc_state))
15299 return true;
15300 }
15301
15302 return false;
15303}
15304
15305static int intel_atomic_check_bigjoiner(struct intel_atomic_state *state,
15306 struct intel_crtc *crtc,
15307 struct intel_crtc_state *old_crtc_state,
15308 struct intel_crtc_state *new_crtc_state)
15309{
15310 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
15311 struct intel_crtc_state *slave_crtc_state, *master_crtc_state;
15312 struct intel_crtc *slave, *master;
15313
15314
15315 if (old_crtc_state->bigjoiner_slave) {
15316 slave = crtc;
15317 master = old_crtc_state->bigjoiner_linked_crtc;
15318 master_crtc_state = intel_atomic_get_new_crtc_state(state, master);
15319 if (!master_crtc_state || !needs_modeset(master_crtc_state))
15320 goto claimed;
15321 }
15322
15323 if (!new_crtc_state->bigjoiner)
15324 return 0;
15325
15326 if (1 + crtc->pipe >= INTEL_NUM_PIPES(dev_priv)) {
15327 DRM_DEBUG_KMS("[CRTC:%d:%s] Big joiner configuration requires "
15328 "CRTC + 1 to be used, doesn't exist\n",
15329 crtc->base.base.id, crtc->base.name);
15330 return -EINVAL;
15331 }
15332
15333 slave = new_crtc_state->bigjoiner_linked_crtc =
15334 intel_get_crtc_for_pipe(dev_priv, crtc->pipe + 1);
15335 slave_crtc_state = intel_atomic_get_crtc_state(&state->base, slave);
15336 master = crtc;
15337 if (IS_ERR(slave_crtc_state))
15338 return PTR_ERR(slave_crtc_state);
15339
15340
15341 if (slave_crtc_state->uapi.enable)
15342 goto claimed;
15343
15344 DRM_DEBUG_KMS("[CRTC:%d:%s] Used as slave for big joiner\n",
15345 slave->base.base.id, slave->base.name);
15346
15347 return copy_bigjoiner_crtc_state(slave_crtc_state, new_crtc_state);
15348
15349claimed:
15350 DRM_DEBUG_KMS("[CRTC:%d:%s] Slave is enabled as normal CRTC, but "
15351 "[CRTC:%d:%s] claiming this CRTC for bigjoiner.\n",
15352 slave->base.base.id, slave->base.name,
15353 master->base.base.id, master->base.name);
15354 return -EINVAL;
15355}
15356
15357static int kill_bigjoiner_slave(struct intel_atomic_state *state,
15358 struct intel_crtc_state *master_crtc_state)
15359{
15360 struct intel_crtc_state *slave_crtc_state =
15361 intel_atomic_get_crtc_state(&state->base,
15362 master_crtc_state->bigjoiner_linked_crtc);
15363
15364 if (IS_ERR(slave_crtc_state))
15365 return PTR_ERR(slave_crtc_state);
15366
15367 slave_crtc_state->bigjoiner = master_crtc_state->bigjoiner = false;
15368 slave_crtc_state->bigjoiner_slave = master_crtc_state->bigjoiner_slave = false;
15369 slave_crtc_state->bigjoiner_linked_crtc = master_crtc_state->bigjoiner_linked_crtc = NULL;
15370 intel_crtc_copy_uapi_to_hw_state(state, slave_crtc_state);
15371 return 0;
15372}
15373
15374
15375
15376
15377
15378
15379
15380
15381
15382
15383
15384
15385
15386
15387
15388
15389
15390
15391
15392static int intel_atomic_check_async(struct intel_atomic_state *state)
15393{
15394 struct drm_i915_private *i915 = to_i915(state->base.dev);
15395 const struct intel_crtc_state *old_crtc_state, *new_crtc_state;
15396 const struct intel_plane_state *new_plane_state, *old_plane_state;
15397 struct intel_crtc *crtc;
15398 struct intel_plane *plane;
15399 int i;
15400
15401 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
15402 new_crtc_state, i) {
15403 if (needs_modeset(new_crtc_state)) {
15404 drm_dbg_kms(&i915->drm, "Modeset Required. Async flip not supported\n");
15405 return -EINVAL;
15406 }
15407
15408 if (!new_crtc_state->hw.active) {
15409 drm_dbg_kms(&i915->drm, "CRTC inactive\n");
15410 return -EINVAL;
15411 }
15412 if (old_crtc_state->active_planes != new_crtc_state->active_planes) {
15413 drm_dbg_kms(&i915->drm,
15414 "Active planes cannot be changed during async flip\n");
15415 return -EINVAL;
15416 }
15417 }
15418
15419 for_each_oldnew_intel_plane_in_state(state, plane, old_plane_state,
15420 new_plane_state, i) {
15421
15422
15423
15424
15425
15426
15427
15428 if (plane->id != PLANE_PRIMARY)
15429 return -EINVAL;
15430
15431
15432
15433
15434
15435
15436 switch (new_plane_state->hw.fb->modifier) {
15437 case I915_FORMAT_MOD_X_TILED:
15438 case I915_FORMAT_MOD_Y_TILED:
15439 case I915_FORMAT_MOD_Yf_TILED:
15440 break;
15441 default:
15442 drm_dbg_kms(&i915->drm,
15443 "Linear memory/CCS does not support async flips\n");
15444 return -EINVAL;
15445 }
15446
15447 if (old_plane_state->color_plane[0].stride !=
15448 new_plane_state->color_plane[0].stride) {
15449 drm_dbg_kms(&i915->drm, "Stride cannot be changed in async flip\n");
15450 return -EINVAL;
15451 }
15452
15453 if (old_plane_state->hw.fb->modifier !=
15454 new_plane_state->hw.fb->modifier) {
15455 drm_dbg_kms(&i915->drm,
15456 "Framebuffer modifiers cannot be changed in async flip\n");
15457 return -EINVAL;
15458 }
15459
15460 if (old_plane_state->hw.fb->format !=
15461 new_plane_state->hw.fb->format) {
15462 drm_dbg_kms(&i915->drm,
15463 "Framebuffer format cannot be changed in async flip\n");
15464 return -EINVAL;
15465 }
15466
15467 if (old_plane_state->hw.rotation !=
15468 new_plane_state->hw.rotation) {
15469 drm_dbg_kms(&i915->drm, "Rotation cannot be changed in async flip\n");
15470 return -EINVAL;
15471 }
15472
15473 if (!drm_rect_equals(&old_plane_state->uapi.src, &new_plane_state->uapi.src) ||
15474 !drm_rect_equals(&old_plane_state->uapi.dst, &new_plane_state->uapi.dst)) {
15475 drm_dbg_kms(&i915->drm,
15476 "Plane size/co-ordinates cannot be changed in async flip\n");
15477 return -EINVAL;
15478 }
15479
15480 if (old_plane_state->hw.alpha != new_plane_state->hw.alpha) {
15481 drm_dbg_kms(&i915->drm, "Alpha value cannot be changed in async flip\n");
15482 return -EINVAL;
15483 }
15484
15485 if (old_plane_state->hw.pixel_blend_mode !=
15486 new_plane_state->hw.pixel_blend_mode) {
15487 drm_dbg_kms(&i915->drm,
15488 "Pixel blend mode cannot be changed in async flip\n");
15489 return -EINVAL;
15490 }
15491
15492 if (old_plane_state->hw.color_encoding != new_plane_state->hw.color_encoding) {
15493 drm_dbg_kms(&i915->drm,
15494 "Color encoding cannot be changed in async flip\n");
15495 return -EINVAL;
15496 }
15497
15498 if (old_plane_state->hw.color_range != new_plane_state->hw.color_range) {
15499 drm_dbg_kms(&i915->drm, "Color range cannot be changed in async flip\n");
15500 return -EINVAL;
15501 }
15502 }
15503
15504 return 0;
15505}
15506
15507static int intel_bigjoiner_add_affected_crtcs(struct intel_atomic_state *state)
15508{
15509 const struct intel_crtc_state *crtc_state;
15510 struct intel_crtc *crtc;
15511 int i;
15512
15513 for_each_new_intel_crtc_in_state(state, crtc, crtc_state, i) {
15514 struct intel_crtc_state *linked_crtc_state;
15515
15516 if (!crtc_state->bigjoiner)
15517 continue;
15518
15519 linked_crtc_state = intel_atomic_get_crtc_state(&state->base,
15520 crtc_state->bigjoiner_linked_crtc);
15521 if (IS_ERR(linked_crtc_state))
15522 return PTR_ERR(linked_crtc_state);
15523 }
15524
15525 return 0;
15526}
15527
15528
15529
15530
15531
15532
15533static int intel_atomic_check(struct drm_device *dev,
15534 struct drm_atomic_state *_state)
15535{
15536 struct drm_i915_private *dev_priv = to_i915(dev);
15537 struct intel_atomic_state *state = to_intel_atomic_state(_state);
15538 struct intel_crtc_state *old_crtc_state, *new_crtc_state;
15539 struct intel_crtc *crtc;
15540 int ret, i;
15541 bool any_ms = false;
15542
15543 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
15544 new_crtc_state, i) {
15545 if (new_crtc_state->inherited != old_crtc_state->inherited)
15546 new_crtc_state->uapi.mode_changed = true;
15547 }
15548
15549 ret = drm_atomic_helper_check_modeset(dev, &state->base);
15550 if (ret)
15551 goto fail;
15552
15553 ret = intel_bigjoiner_add_affected_crtcs(state);
15554 if (ret)
15555 goto fail;
15556
15557 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
15558 new_crtc_state, i) {
15559 if (!needs_modeset(new_crtc_state)) {
15560
15561 intel_crtc_copy_uapi_to_hw_state_nomodeset(state, new_crtc_state);
15562
15563 continue;
15564 }
15565
15566
15567 if (old_crtc_state->bigjoiner && !old_crtc_state->bigjoiner_slave) {
15568 ret = kill_bigjoiner_slave(state, new_crtc_state);
15569 if (ret)
15570 goto fail;
15571 }
15572
15573 if (!new_crtc_state->uapi.enable) {
15574 if (!new_crtc_state->bigjoiner_slave) {
15575 intel_crtc_copy_uapi_to_hw_state(state, new_crtc_state);
15576 any_ms = true;
15577 }
15578 continue;
15579 }
15580
15581 ret = intel_crtc_prepare_cleared_state(state, new_crtc_state);
15582 if (ret)
15583 goto fail;
15584
15585 ret = intel_modeset_pipe_config(state, new_crtc_state);
15586 if (ret)
15587 goto fail;
15588
15589 ret = intel_atomic_check_bigjoiner(state, crtc, old_crtc_state,
15590 new_crtc_state);
15591 if (ret)
15592 goto fail;
15593 }
15594
15595 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
15596 new_crtc_state, i) {
15597 if (!needs_modeset(new_crtc_state))
15598 continue;
15599
15600 ret = intel_modeset_pipe_config_late(new_crtc_state);
15601 if (ret)
15602 goto fail;
15603
15604 intel_crtc_check_fastset(old_crtc_state, new_crtc_state);
15605 }
15606
15607
15608
15609
15610
15611
15612
15613
15614
15615
15616
15617
15618 for_each_new_intel_crtc_in_state(state, crtc, new_crtc_state, i) {
15619 if (!new_crtc_state->hw.enable || needs_modeset(new_crtc_state))
15620 continue;
15621
15622 if (intel_dp_mst_is_slave_trans(new_crtc_state)) {
15623 enum transcoder master = new_crtc_state->mst_master_transcoder;
15624
15625 if (intel_cpu_transcoders_need_modeset(state, BIT(master))) {
15626 new_crtc_state->uapi.mode_changed = true;
15627 new_crtc_state->update_pipe = false;
15628 }
15629 }
15630
15631 if (is_trans_port_sync_mode(new_crtc_state)) {
15632 u8 trans = new_crtc_state->sync_mode_slaves_mask;
15633
15634 if (new_crtc_state->master_transcoder != INVALID_TRANSCODER)
15635 trans |= BIT(new_crtc_state->master_transcoder);
15636
15637 if (intel_cpu_transcoders_need_modeset(state, trans)) {
15638 new_crtc_state->uapi.mode_changed = true;
15639 new_crtc_state->update_pipe = false;
15640 }
15641 }
15642 }
15643
15644 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
15645 new_crtc_state, i) {
15646 if (needs_modeset(new_crtc_state)) {
15647 any_ms = true;
15648 continue;
15649 }
15650
15651 if (!new_crtc_state->update_pipe)
15652 continue;
15653
15654 intel_crtc_copy_fastset(old_crtc_state, new_crtc_state);
15655 }
15656
15657 if (any_ms && !check_digital_port_conflicts(state)) {
15658 drm_dbg_kms(&dev_priv->drm,
15659 "rejecting conflicting digital port configuration\n");
15660 ret = -EINVAL;
15661 goto fail;
15662 }
15663
15664 ret = drm_dp_mst_atomic_check(&state->base);
15665 if (ret)
15666 goto fail;
15667
15668 ret = intel_atomic_check_planes(state);
15669 if (ret)
15670 goto fail;
15671
15672
15673
15674
15675
15676
15677
15678
15679
15680
15681
15682
15683 if (dev_priv->wm.distrust_bios_wm)
15684 any_ms = true;
15685
15686 intel_fbc_choose_crtc(dev_priv, state);
15687 ret = calc_watermark_data(state);
15688 if (ret)
15689 goto fail;
15690
15691 ret = intel_bw_atomic_check(state);
15692 if (ret)
15693 goto fail;
15694
15695 ret = intel_atomic_check_cdclk(state, &any_ms);
15696 if (ret)
15697 goto fail;
15698
15699 if (any_ms) {
15700 ret = intel_modeset_checks(state);
15701 if (ret)
15702 goto fail;
15703
15704 ret = intel_modeset_calc_cdclk(state);
15705 if (ret)
15706 return ret;
15707
15708 intel_modeset_clear_plls(state);
15709 }
15710
15711 ret = intel_atomic_check_crtcs(state);
15712 if (ret)
15713 goto fail;
15714
15715 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
15716 new_crtc_state, i) {
15717 if (new_crtc_state->uapi.async_flip) {
15718 ret = intel_atomic_check_async(state);
15719 if (ret)
15720 goto fail;
15721 }
15722
15723 if (!needs_modeset(new_crtc_state) &&
15724 !new_crtc_state->update_pipe)
15725 continue;
15726
15727 intel_dump_pipe_config(new_crtc_state, state,
15728 needs_modeset(new_crtc_state) ?
15729 "[modeset]" : "[fastset]");
15730 }
15731
15732 return 0;
15733
15734 fail:
15735 if (ret == -EDEADLK)
15736 return ret;
15737
15738
15739
15740
15741
15742 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
15743 new_crtc_state, i)
15744 intel_dump_pipe_config(new_crtc_state, state, "[failed]");
15745
15746 return ret;
15747}
15748
15749static int intel_atomic_prepare_commit(struct intel_atomic_state *state)
15750{
15751 struct intel_crtc_state *crtc_state;
15752 struct intel_crtc *crtc;
15753 int i, ret;
15754
15755 ret = drm_atomic_helper_prepare_planes(state->base.dev, &state->base);
15756 if (ret < 0)
15757 return ret;
15758
15759 for_each_new_intel_crtc_in_state(state, crtc, crtc_state, i) {
15760 bool mode_changed = needs_modeset(crtc_state);
15761
15762 if (mode_changed || crtc_state->update_pipe ||
15763 crtc_state->uapi.color_mgmt_changed) {
15764 intel_dsb_prepare(crtc_state);
15765 }
15766 }
15767
15768 return 0;
15769}
15770
15771u32 intel_crtc_get_vblank_counter(struct intel_crtc *crtc)
15772{
15773 struct drm_device *dev = crtc->base.dev;
15774 struct drm_vblank_crtc *vblank = &dev->vblank[drm_crtc_index(&crtc->base)];
15775
15776 if (!vblank->max_vblank_count)
15777 return (u32)drm_crtc_accurate_vblank_count(&crtc->base);
15778
15779 return crtc->base.funcs->get_vblank_counter(&crtc->base);
15780}
15781
15782void intel_crtc_arm_fifo_underrun(struct intel_crtc *crtc,
15783 struct intel_crtc_state *crtc_state)
15784{
15785 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
15786
15787 if (!IS_GEN(dev_priv, 2) || crtc_state->active_planes)
15788 intel_set_cpu_fifo_underrun_reporting(dev_priv, crtc->pipe, true);
15789
15790 if (crtc_state->has_pch_encoder) {
15791 enum pipe pch_transcoder =
15792 intel_crtc_pch_transcoder(crtc);
15793
15794 intel_set_pch_fifo_underrun_reporting(dev_priv, pch_transcoder, true);
15795 }
15796}
15797
15798static void intel_pipe_fastset(const struct intel_crtc_state *old_crtc_state,
15799 const struct intel_crtc_state *new_crtc_state)
15800{
15801 struct intel_crtc *crtc = to_intel_crtc(new_crtc_state->uapi.crtc);
15802 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
15803
15804
15805
15806
15807
15808
15809
15810
15811
15812 intel_set_pipe_src_size(new_crtc_state);
15813
15814
15815 if (INTEL_GEN(dev_priv) >= 9) {
15816 skl_detach_scalers(new_crtc_state);
15817
15818 if (new_crtc_state->pch_pfit.enabled)
15819 skl_pfit_enable(new_crtc_state);
15820 } else if (HAS_PCH_SPLIT(dev_priv)) {
15821 if (new_crtc_state->pch_pfit.enabled)
15822 ilk_pfit_enable(new_crtc_state);
15823 else if (old_crtc_state->pch_pfit.enabled)
15824 ilk_pfit_disable(old_crtc_state);
15825 }
15826
15827
15828
15829
15830
15831
15832
15833
15834
15835 if (INTEL_GEN(dev_priv) >= 9 ||
15836 IS_BROADWELL(dev_priv) || IS_HASWELL(dev_priv))
15837 hsw_set_linetime_wm(new_crtc_state);
15838
15839 if (INTEL_GEN(dev_priv) >= 11)
15840 icl_set_pipe_chicken(crtc);
15841}
15842
15843static void commit_pipe_config(struct intel_atomic_state *state,
15844 struct intel_crtc *crtc)
15845{
15846 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
15847 const struct intel_crtc_state *old_crtc_state =
15848 intel_atomic_get_old_crtc_state(state, crtc);
15849 const struct intel_crtc_state *new_crtc_state =
15850 intel_atomic_get_new_crtc_state(state, crtc);
15851 bool modeset = needs_modeset(new_crtc_state);
15852
15853
15854
15855
15856
15857 if (!modeset) {
15858 if (new_crtc_state->uapi.color_mgmt_changed ||
15859 new_crtc_state->update_pipe)
15860 intel_color_commit(new_crtc_state);
15861
15862 if (INTEL_GEN(dev_priv) >= 9)
15863 skl_detach_scalers(new_crtc_state);
15864
15865 if (INTEL_GEN(dev_priv) >= 9 || IS_BROADWELL(dev_priv))
15866 bdw_set_pipemisc(new_crtc_state);
15867
15868 if (new_crtc_state->update_pipe)
15869 intel_pipe_fastset(old_crtc_state, new_crtc_state);
15870
15871 intel_psr2_program_trans_man_trk_ctl(new_crtc_state);
15872 }
15873
15874 if (dev_priv->display.atomic_update_watermarks)
15875 dev_priv->display.atomic_update_watermarks(state, crtc);
15876}
15877
15878static void intel_enable_crtc(struct intel_atomic_state *state,
15879 struct intel_crtc *crtc)
15880{
15881 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
15882 const struct intel_crtc_state *new_crtc_state =
15883 intel_atomic_get_new_crtc_state(state, crtc);
15884
15885 if (!needs_modeset(new_crtc_state))
15886 return;
15887
15888 intel_crtc_update_active_timings(new_crtc_state);
15889
15890 dev_priv->display.crtc_enable(state, crtc);
15891
15892 if (new_crtc_state->bigjoiner_slave)
15893 return;
15894
15895
15896 intel_crtc_enable_pipe_crc(crtc);
15897}
15898
15899static void intel_update_crtc(struct intel_atomic_state *state,
15900 struct intel_crtc *crtc)
15901{
15902 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
15903 const struct intel_crtc_state *old_crtc_state =
15904 intel_atomic_get_old_crtc_state(state, crtc);
15905 struct intel_crtc_state *new_crtc_state =
15906 intel_atomic_get_new_crtc_state(state, crtc);
15907 bool modeset = needs_modeset(new_crtc_state);
15908
15909 if (!modeset) {
15910 if (new_crtc_state->preload_luts &&
15911 (new_crtc_state->uapi.color_mgmt_changed ||
15912 new_crtc_state->update_pipe))
15913 intel_color_load_luts(new_crtc_state);
15914
15915 intel_pre_plane_update(state, crtc);
15916
15917 if (new_crtc_state->update_pipe)
15918 intel_encoders_update_pipe(state, crtc);
15919 }
15920
15921 if (new_crtc_state->update_pipe && !new_crtc_state->enable_fbc)
15922 intel_fbc_disable(crtc);
15923 else
15924 intel_fbc_enable(state, crtc);
15925
15926
15927 intel_pipe_update_start(new_crtc_state);
15928
15929 commit_pipe_config(state, crtc);
15930
15931 if (INTEL_GEN(dev_priv) >= 9)
15932 skl_update_planes_on_crtc(state, crtc);
15933 else
15934 i9xx_update_planes_on_crtc(state, crtc);
15935
15936 intel_pipe_update_end(new_crtc_state);
15937
15938
15939
15940
15941
15942
15943
15944 if (new_crtc_state->update_pipe && !modeset &&
15945 old_crtc_state->inherited)
15946 intel_crtc_arm_fifo_underrun(crtc, new_crtc_state);
15947}
15948
15949static void intel_old_crtc_state_disables(struct intel_atomic_state *state,
15950 struct intel_crtc_state *old_crtc_state,
15951 struct intel_crtc_state *new_crtc_state,
15952 struct intel_crtc *crtc)
15953{
15954 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
15955
15956 drm_WARN_ON(&dev_priv->drm, old_crtc_state->bigjoiner_slave);
15957
15958 intel_crtc_disable_planes(state, crtc);
15959
15960
15961
15962
15963
15964
15965 if (old_crtc_state->bigjoiner) {
15966 intel_crtc_disable_planes(state,
15967 old_crtc_state->bigjoiner_linked_crtc);
15968 old_crtc_state->bigjoiner_linked_crtc->active = false;
15969 }
15970
15971
15972
15973
15974
15975 intel_crtc_disable_pipe_crc(crtc);
15976
15977 dev_priv->display.crtc_disable(state, crtc);
15978 crtc->active = false;
15979 intel_fbc_disable(crtc);
15980 intel_disable_shared_dpll(old_crtc_state);
15981
15982
15983 if (!new_crtc_state->hw.active &&
15984 !HAS_GMCH(dev_priv) &&
15985 dev_priv->display.initial_watermarks)
15986 dev_priv->display.initial_watermarks(state, crtc);
15987}
15988
15989static void intel_commit_modeset_disables(struct intel_atomic_state *state)
15990{
15991 struct intel_crtc_state *new_crtc_state, *old_crtc_state;
15992 struct intel_crtc *crtc;
15993 u32 handled = 0;
15994 int i;
15995
15996
15997 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
15998 new_crtc_state, i) {
15999 if (!needs_modeset(new_crtc_state) || old_crtc_state->bigjoiner)
16000 continue;
16001
16002 if (!old_crtc_state->hw.active)
16003 continue;
16004
16005
16006
16007
16008
16009
16010 if (!is_trans_port_sync_slave(old_crtc_state) &&
16011 !intel_dp_mst_is_slave_trans(old_crtc_state))
16012 continue;
16013
16014 intel_pre_plane_update(state, crtc);
16015 intel_old_crtc_state_disables(state, old_crtc_state,
16016 new_crtc_state, crtc);
16017 handled |= BIT(crtc->pipe);
16018 }
16019
16020
16021 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
16022 new_crtc_state, i) {
16023 if (!needs_modeset(new_crtc_state) ||
16024 (handled & BIT(crtc->pipe)) ||
16025 old_crtc_state->bigjoiner_slave)
16026 continue;
16027
16028 intel_pre_plane_update(state, crtc);
16029 if (old_crtc_state->bigjoiner) {
16030 struct intel_crtc *slave =
16031 old_crtc_state->bigjoiner_linked_crtc;
16032
16033 intel_pre_plane_update(state, slave);
16034 }
16035
16036 if (old_crtc_state->hw.active)
16037 intel_old_crtc_state_disables(state, old_crtc_state,
16038 new_crtc_state, crtc);
16039 }
16040}
16041
16042static void intel_commit_modeset_enables(struct intel_atomic_state *state)
16043{
16044 struct intel_crtc_state *new_crtc_state;
16045 struct intel_crtc *crtc;
16046 int i;
16047
16048 for_each_new_intel_crtc_in_state(state, crtc, new_crtc_state, i) {
16049 if (!new_crtc_state->hw.active)
16050 continue;
16051
16052 intel_enable_crtc(state, crtc);
16053 intel_update_crtc(state, crtc);
16054 }
16055}
16056
16057static void skl_commit_modeset_enables(struct intel_atomic_state *state)
16058{
16059 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
16060 struct intel_crtc *crtc;
16061 struct intel_crtc_state *old_crtc_state, *new_crtc_state;
16062 struct skl_ddb_entry entries[I915_MAX_PIPES] = {};
16063 u8 update_pipes = 0, modeset_pipes = 0;
16064 int i;
16065
16066 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
16067 enum pipe pipe = crtc->pipe;
16068
16069 if (!new_crtc_state->hw.active)
16070 continue;
16071
16072
16073 if (!needs_modeset(new_crtc_state)) {
16074 entries[pipe] = old_crtc_state->wm.skl.ddb;
16075 update_pipes |= BIT(pipe);
16076 } else {
16077 modeset_pipes |= BIT(pipe);
16078 }
16079 }
16080
16081
16082
16083
16084
16085
16086
16087
16088
16089
16090 while (update_pipes) {
16091 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
16092 new_crtc_state, i) {
16093 enum pipe pipe = crtc->pipe;
16094
16095 if ((update_pipes & BIT(pipe)) == 0)
16096 continue;
16097
16098 if (skl_ddb_allocation_overlaps(&new_crtc_state->wm.skl.ddb,
16099 entries, I915_MAX_PIPES, pipe))
16100 continue;
16101
16102 entries[pipe] = new_crtc_state->wm.skl.ddb;
16103 update_pipes &= ~BIT(pipe);
16104
16105 intel_update_crtc(state, crtc);
16106
16107
16108
16109
16110
16111
16112
16113 if (!skl_ddb_entry_equal(&new_crtc_state->wm.skl.ddb,
16114 &old_crtc_state->wm.skl.ddb) &&
16115 (update_pipes | modeset_pipes))
16116 intel_wait_for_vblank(dev_priv, pipe);
16117 }
16118 }
16119
16120 update_pipes = modeset_pipes;
16121
16122
16123
16124
16125
16126 for_each_new_intel_crtc_in_state(state, crtc, new_crtc_state, i) {
16127 enum pipe pipe = crtc->pipe;
16128
16129 if ((modeset_pipes & BIT(pipe)) == 0)
16130 continue;
16131
16132 if (intel_dp_mst_is_slave_trans(new_crtc_state) ||
16133 is_trans_port_sync_master(new_crtc_state) ||
16134 (new_crtc_state->bigjoiner && !new_crtc_state->bigjoiner_slave))
16135 continue;
16136
16137 modeset_pipes &= ~BIT(pipe);
16138
16139 intel_enable_crtc(state, crtc);
16140 }
16141
16142
16143
16144
16145
16146 for_each_new_intel_crtc_in_state(state, crtc, new_crtc_state, i) {
16147 enum pipe pipe = crtc->pipe;
16148
16149 if ((modeset_pipes & BIT(pipe)) == 0)
16150 continue;
16151
16152 modeset_pipes &= ~BIT(pipe);
16153
16154 intel_enable_crtc(state, crtc);
16155 }
16156
16157
16158
16159
16160 for_each_new_intel_crtc_in_state(state, crtc, new_crtc_state, i) {
16161 enum pipe pipe = crtc->pipe;
16162
16163 if ((update_pipes & BIT(pipe)) == 0)
16164 continue;
16165
16166 drm_WARN_ON(&dev_priv->drm, skl_ddb_allocation_overlaps(&new_crtc_state->wm.skl.ddb,
16167 entries, I915_MAX_PIPES, pipe));
16168
16169 entries[pipe] = new_crtc_state->wm.skl.ddb;
16170 update_pipes &= ~BIT(pipe);
16171
16172 intel_update_crtc(state, crtc);
16173 }
16174
16175 drm_WARN_ON(&dev_priv->drm, modeset_pipes);
16176 drm_WARN_ON(&dev_priv->drm, update_pipes);
16177}
16178
16179static void intel_atomic_helper_free_state(struct drm_i915_private *dev_priv)
16180{
16181 struct intel_atomic_state *state, *next;
16182 struct llist_node *freed;
16183
16184 freed = llist_del_all(&dev_priv->atomic_helper.free_list);
16185 llist_for_each_entry_safe(state, next, freed, freed)
16186 drm_atomic_state_put(&state->base);
16187}
16188
16189static void intel_atomic_helper_free_state_worker(struct work_struct *work)
16190{
16191 struct drm_i915_private *dev_priv =
16192 container_of(work, typeof(*dev_priv), atomic_helper.free_work);
16193
16194 intel_atomic_helper_free_state(dev_priv);
16195}
16196
16197static void intel_atomic_commit_fence_wait(struct intel_atomic_state *intel_state)
16198{
16199 struct wait_queue_entry wait_fence, wait_reset;
16200 struct drm_i915_private *dev_priv = to_i915(intel_state->base.dev);
16201
16202 init_wait_entry(&wait_fence, 0);
16203 init_wait_entry(&wait_reset, 0);
16204 for (;;) {
16205 prepare_to_wait(&intel_state->commit_ready.wait,
16206 &wait_fence, TASK_UNINTERRUPTIBLE);
16207 prepare_to_wait(bit_waitqueue(&dev_priv->gt.reset.flags,
16208 I915_RESET_MODESET),
16209 &wait_reset, TASK_UNINTERRUPTIBLE);
16210
16211
16212 if (i915_sw_fence_done(&intel_state->commit_ready) ||
16213 test_bit(I915_RESET_MODESET, &dev_priv->gt.reset.flags))
16214 break;
16215
16216 schedule();
16217 }
16218 finish_wait(&intel_state->commit_ready.wait, &wait_fence);
16219 finish_wait(bit_waitqueue(&dev_priv->gt.reset.flags,
16220 I915_RESET_MODESET),
16221 &wait_reset);
16222}
16223
16224static void intel_cleanup_dsbs(struct intel_atomic_state *state)
16225{
16226 struct intel_crtc_state *old_crtc_state, *new_crtc_state;
16227 struct intel_crtc *crtc;
16228 int i;
16229
16230 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
16231 new_crtc_state, i)
16232 intel_dsb_cleanup(old_crtc_state);
16233}
16234
16235static void intel_atomic_cleanup_work(struct work_struct *work)
16236{
16237 struct intel_atomic_state *state =
16238 container_of(work, struct intel_atomic_state, base.commit_work);
16239 struct drm_i915_private *i915 = to_i915(state->base.dev);
16240
16241 intel_cleanup_dsbs(state);
16242 drm_atomic_helper_cleanup_planes(&i915->drm, &state->base);
16243 drm_atomic_helper_commit_cleanup_done(&state->base);
16244 drm_atomic_state_put(&state->base);
16245
16246 intel_atomic_helper_free_state(i915);
16247}
16248
16249static void intel_atomic_commit_tail(struct intel_atomic_state *state)
16250{
16251 struct drm_device *dev = state->base.dev;
16252 struct drm_i915_private *dev_priv = to_i915(dev);
16253 struct intel_crtc_state *new_crtc_state, *old_crtc_state;
16254 struct intel_crtc *crtc;
16255 u64 put_domains[I915_MAX_PIPES] = {};
16256 intel_wakeref_t wakeref = 0;
16257 int i;
16258
16259 intel_atomic_commit_fence_wait(state);
16260
16261 drm_atomic_helper_wait_for_dependencies(&state->base);
16262
16263 if (state->modeset)
16264 wakeref = intel_display_power_get(dev_priv, POWER_DOMAIN_MODESET);
16265
16266 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
16267 new_crtc_state, i) {
16268 if (needs_modeset(new_crtc_state) ||
16269 new_crtc_state->update_pipe) {
16270
16271 put_domains[crtc->pipe] =
16272 modeset_get_crtc_power_domains(new_crtc_state);
16273 }
16274 }
16275
16276 intel_commit_modeset_disables(state);
16277
16278
16279 for_each_new_intel_crtc_in_state(state, crtc, new_crtc_state, i)
16280 crtc->config = new_crtc_state;
16281
16282 if (state->modeset) {
16283 drm_atomic_helper_update_legacy_modeset_state(dev, &state->base);
16284
16285 intel_set_cdclk_pre_plane_update(state);
16286
16287 intel_modeset_verify_disabled(dev_priv, state);
16288 }
16289
16290 intel_sagv_pre_plane_update(state);
16291
16292
16293 for_each_new_intel_crtc_in_state(state, crtc, new_crtc_state, i) {
16294 bool modeset = needs_modeset(new_crtc_state);
16295
16296
16297 if (modeset && !new_crtc_state->hw.active && new_crtc_state->uapi.event) {
16298 spin_lock_irq(&dev->event_lock);
16299 drm_crtc_send_vblank_event(&crtc->base,
16300 new_crtc_state->uapi.event);
16301 spin_unlock_irq(&dev->event_lock);
16302
16303 new_crtc_state->uapi.event = NULL;
16304 }
16305 }
16306
16307 if (state->modeset)
16308 intel_encoders_update_prepare(state);
16309
16310 intel_dbuf_pre_plane_update(state);
16311
16312 for_each_new_intel_crtc_in_state(state, crtc, new_crtc_state, i) {
16313 if (new_crtc_state->uapi.async_flip)
16314 skl_enable_flip_done(crtc);
16315 }
16316
16317
16318 dev_priv->display.commit_modeset_enables(state);
16319
16320 if (state->modeset) {
16321 intel_encoders_update_complete(state);
16322
16323 intel_set_cdclk_post_plane_update(state);
16324 }
16325
16326
16327
16328
16329
16330
16331
16332
16333
16334
16335 drm_atomic_helper_wait_for_flip_done(dev, &state->base);
16336
16337 for_each_new_intel_crtc_in_state(state, crtc, new_crtc_state, i) {
16338 if (new_crtc_state->uapi.async_flip)
16339 skl_disable_flip_done(crtc);
16340
16341 if (new_crtc_state->hw.active &&
16342 !needs_modeset(new_crtc_state) &&
16343 !new_crtc_state->preload_luts &&
16344 (new_crtc_state->uapi.color_mgmt_changed ||
16345 new_crtc_state->update_pipe))
16346 intel_color_load_luts(new_crtc_state);
16347 }
16348
16349
16350
16351
16352
16353
16354
16355
16356 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
16357 new_crtc_state, i) {
16358
16359
16360
16361
16362
16363
16364
16365
16366 if (IS_GEN(dev_priv, 2) && planes_enabling(old_crtc_state, new_crtc_state))
16367 intel_set_cpu_fifo_underrun_reporting(dev_priv, crtc->pipe, true);
16368
16369 if (dev_priv->display.optimize_watermarks)
16370 dev_priv->display.optimize_watermarks(state, crtc);
16371 }
16372
16373 intel_dbuf_post_plane_update(state);
16374
16375 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
16376 intel_post_plane_update(state, crtc);
16377
16378 if (put_domains[i])
16379 modeset_put_power_domains(dev_priv, put_domains[i]);
16380
16381 intel_modeset_verify_crtc(crtc, state, old_crtc_state, new_crtc_state);
16382
16383
16384
16385
16386
16387
16388 old_crtc_state->dsb = fetch_and_zero(&new_crtc_state->dsb);
16389 }
16390
16391
16392 intel_check_cpu_fifo_underruns(dev_priv);
16393 intel_check_pch_fifo_underruns(dev_priv);
16394
16395 if (state->modeset)
16396 intel_verify_planes(state);
16397
16398 intel_sagv_post_plane_update(state);
16399
16400 drm_atomic_helper_commit_hw_done(&state->base);
16401
16402 if (state->modeset) {
16403
16404
16405
16406
16407
16408
16409 intel_uncore_arm_unclaimed_mmio_detection(&dev_priv->uncore);
16410 intel_display_power_put(dev_priv, POWER_DOMAIN_MODESET, wakeref);
16411 }
16412 intel_runtime_pm_put(&dev_priv->runtime_pm, state->wakeref);
16413
16414
16415
16416
16417
16418
16419
16420
16421
16422 INIT_WORK(&state->base.commit_work, intel_atomic_cleanup_work);
16423 queue_work(system_highpri_wq, &state->base.commit_work);
16424}
16425
16426static void intel_atomic_commit_work(struct work_struct *work)
16427{
16428 struct intel_atomic_state *state =
16429 container_of(work, struct intel_atomic_state, base.commit_work);
16430
16431 intel_atomic_commit_tail(state);
16432}
16433
16434static int __i915_sw_fence_call
16435intel_atomic_commit_ready(struct i915_sw_fence *fence,
16436 enum i915_sw_fence_notify notify)
16437{
16438 struct intel_atomic_state *state =
16439 container_of(fence, struct intel_atomic_state, commit_ready);
16440
16441 switch (notify) {
16442 case FENCE_COMPLETE:
16443
16444 break;
16445 case FENCE_FREE:
16446 {
16447 struct intel_atomic_helper *helper =
16448 &to_i915(state->base.dev)->atomic_helper;
16449
16450 if (llist_add(&state->freed, &helper->free_list))
16451 schedule_work(&helper->free_work);
16452 break;
16453 }
16454 }
16455
16456 return NOTIFY_DONE;
16457}
16458
16459static void intel_atomic_track_fbs(struct intel_atomic_state *state)
16460{
16461 struct intel_plane_state *old_plane_state, *new_plane_state;
16462 struct intel_plane *plane;
16463 int i;
16464
16465 for_each_oldnew_intel_plane_in_state(state, plane, old_plane_state,
16466 new_plane_state, i)
16467 intel_frontbuffer_track(to_intel_frontbuffer(old_plane_state->hw.fb),
16468 to_intel_frontbuffer(new_plane_state->hw.fb),
16469 plane->frontbuffer_bit);
16470}
16471
16472static int intel_atomic_commit(struct drm_device *dev,
16473 struct drm_atomic_state *_state,
16474 bool nonblock)
16475{
16476 struct intel_atomic_state *state = to_intel_atomic_state(_state);
16477 struct drm_i915_private *dev_priv = to_i915(dev);
16478 int ret = 0;
16479
16480 state->wakeref = intel_runtime_pm_get(&dev_priv->runtime_pm);
16481
16482 drm_atomic_state_get(&state->base);
16483 i915_sw_fence_init(&state->commit_ready,
16484 intel_atomic_commit_ready);
16485
16486
16487
16488
16489
16490
16491
16492
16493
16494
16495
16496
16497
16498
16499
16500
16501
16502
16503 if (INTEL_GEN(dev_priv) < 9 && state->base.legacy_cursor_update) {
16504 struct intel_crtc_state *new_crtc_state;
16505 struct intel_crtc *crtc;
16506 int i;
16507
16508 for_each_new_intel_crtc_in_state(state, crtc, new_crtc_state, i)
16509 if (new_crtc_state->wm.need_postvbl_update ||
16510 new_crtc_state->update_wm_post)
16511 state->base.legacy_cursor_update = false;
16512 }
16513
16514 ret = intel_atomic_prepare_commit(state);
16515 if (ret) {
16516 drm_dbg_atomic(&dev_priv->drm,
16517 "Preparing state failed with %i\n", ret);
16518 i915_sw_fence_commit(&state->commit_ready);
16519 intel_runtime_pm_put(&dev_priv->runtime_pm, state->wakeref);
16520 return ret;
16521 }
16522
16523 ret = drm_atomic_helper_setup_commit(&state->base, nonblock);
16524 if (!ret)
16525 ret = drm_atomic_helper_swap_state(&state->base, true);
16526 if (!ret)
16527 intel_atomic_swap_global_state(state);
16528
16529 if (ret) {
16530 struct intel_crtc_state *new_crtc_state;
16531 struct intel_crtc *crtc;
16532 int i;
16533
16534 i915_sw_fence_commit(&state->commit_ready);
16535
16536 for_each_new_intel_crtc_in_state(state, crtc, new_crtc_state, i)
16537 intel_dsb_cleanup(new_crtc_state);
16538
16539 drm_atomic_helper_cleanup_planes(dev, &state->base);
16540 intel_runtime_pm_put(&dev_priv->runtime_pm, state->wakeref);
16541 return ret;
16542 }
16543 dev_priv->wm.distrust_bios_wm = false;
16544 intel_shared_dpll_swap_state(state);
16545 intel_atomic_track_fbs(state);
16546
16547 drm_atomic_state_get(&state->base);
16548 INIT_WORK(&state->base.commit_work, intel_atomic_commit_work);
16549
16550 i915_sw_fence_commit(&state->commit_ready);
16551 if (nonblock && state->modeset) {
16552 queue_work(dev_priv->modeset_wq, &state->base.commit_work);
16553 } else if (nonblock) {
16554 queue_work(dev_priv->flip_wq, &state->base.commit_work);
16555 } else {
16556 if (state->modeset)
16557 flush_workqueue(dev_priv->modeset_wq);
16558 intel_atomic_commit_tail(state);
16559 }
16560
16561 return 0;
16562}
16563
16564struct wait_rps_boost {
16565 struct wait_queue_entry wait;
16566
16567 struct drm_crtc *crtc;
16568 struct i915_request *request;
16569};
16570
16571static int do_rps_boost(struct wait_queue_entry *_wait,
16572 unsigned mode, int sync, void *key)
16573{
16574 struct wait_rps_boost *wait = container_of(_wait, typeof(*wait), wait);
16575 struct i915_request *rq = wait->request;
16576
16577
16578
16579
16580
16581
16582 if (!i915_request_started(rq))
16583 intel_rps_boost(rq);
16584 i915_request_put(rq);
16585
16586 drm_crtc_vblank_put(wait->crtc);
16587
16588 list_del(&wait->wait.entry);
16589 kfree(wait);
16590 return 1;
16591}
16592
16593static void add_rps_boost_after_vblank(struct drm_crtc *crtc,
16594 struct dma_fence *fence)
16595{
16596 struct wait_rps_boost *wait;
16597
16598 if (!dma_fence_is_i915(fence))
16599 return;
16600
16601 if (INTEL_GEN(to_i915(crtc->dev)) < 6)
16602 return;
16603
16604 if (drm_crtc_vblank_get(crtc))
16605 return;
16606
16607 wait = kmalloc(sizeof(*wait), GFP_KERNEL);
16608 if (!wait) {
16609 drm_crtc_vblank_put(crtc);
16610 return;
16611 }
16612
16613 wait->request = to_request(dma_fence_get(fence));
16614 wait->crtc = crtc;
16615
16616 wait->wait.func = do_rps_boost;
16617 wait->wait.flags = 0;
16618
16619 add_wait_queue(drm_crtc_vblank_waitqueue(crtc), &wait->wait);
16620}
16621
16622static int intel_plane_pin_fb(struct intel_plane_state *plane_state)
16623{
16624 struct intel_plane *plane = to_intel_plane(plane_state->uapi.plane);
16625 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
16626 struct drm_framebuffer *fb = plane_state->hw.fb;
16627 struct i915_vma *vma;
16628
16629 if (plane->id == PLANE_CURSOR &&
16630 INTEL_INFO(dev_priv)->display.cursor_needs_physical) {
16631 struct drm_i915_gem_object *obj = intel_fb_obj(fb);
16632 const int align = intel_cursor_alignment(dev_priv);
16633 int err;
16634
16635 err = i915_gem_object_attach_phys(obj, align);
16636 if (err)
16637 return err;
16638 }
16639
16640 vma = intel_pin_and_fence_fb_obj(fb,
16641 &plane_state->view,
16642 intel_plane_uses_fence(plane_state),
16643 &plane_state->flags);
16644 if (IS_ERR(vma))
16645 return PTR_ERR(vma);
16646
16647 plane_state->vma = vma;
16648
16649 return 0;
16650}
16651
16652static void intel_plane_unpin_fb(struct intel_plane_state *old_plane_state)
16653{
16654 struct i915_vma *vma;
16655
16656 vma = fetch_and_zero(&old_plane_state->vma);
16657 if (vma)
16658 intel_unpin_fb_vma(vma, old_plane_state->flags);
16659}
16660
16661static void fb_obj_bump_render_priority(struct drm_i915_gem_object *obj)
16662{
16663 struct i915_sched_attr attr = {
16664 .priority = I915_USER_PRIORITY(I915_PRIORITY_DISPLAY),
16665 };
16666
16667 i915_gem_object_wait_priority(obj, 0, &attr);
16668}
16669
16670
16671
16672
16673
16674
16675
16676
16677
16678
16679
16680
16681
16682int
16683intel_prepare_plane_fb(struct drm_plane *_plane,
16684 struct drm_plane_state *_new_plane_state)
16685{
16686 struct intel_plane *plane = to_intel_plane(_plane);
16687 struct intel_plane_state *new_plane_state =
16688 to_intel_plane_state(_new_plane_state);
16689 struct intel_atomic_state *state =
16690 to_intel_atomic_state(new_plane_state->uapi.state);
16691 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
16692 const struct intel_plane_state *old_plane_state =
16693 intel_atomic_get_old_plane_state(state, plane);
16694 struct drm_i915_gem_object *obj = intel_fb_obj(new_plane_state->hw.fb);
16695 struct drm_i915_gem_object *old_obj = intel_fb_obj(old_plane_state->hw.fb);
16696 int ret;
16697
16698 if (old_obj) {
16699 const struct intel_crtc_state *crtc_state =
16700 intel_atomic_get_new_crtc_state(state,
16701 to_intel_crtc(old_plane_state->hw.crtc));
16702
16703
16704
16705
16706
16707
16708
16709
16710
16711
16712
16713
16714 if (needs_modeset(crtc_state)) {
16715 ret = i915_sw_fence_await_reservation(&state->commit_ready,
16716 old_obj->base.resv, NULL,
16717 false, 0,
16718 GFP_KERNEL);
16719 if (ret < 0)
16720 return ret;
16721 }
16722 }
16723
16724 if (new_plane_state->uapi.fence) {
16725 ret = i915_sw_fence_await_dma_fence(&state->commit_ready,
16726 new_plane_state->uapi.fence,
16727 i915_fence_timeout(dev_priv),
16728 GFP_KERNEL);
16729 if (ret < 0)
16730 return ret;
16731 }
16732
16733 if (!obj)
16734 return 0;
16735
16736 ret = i915_gem_object_pin_pages(obj);
16737 if (ret)
16738 return ret;
16739
16740 ret = intel_plane_pin_fb(new_plane_state);
16741
16742 i915_gem_object_unpin_pages(obj);
16743 if (ret)
16744 return ret;
16745
16746 fb_obj_bump_render_priority(obj);
16747 i915_gem_object_flush_frontbuffer(obj, ORIGIN_DIRTYFB);
16748
16749 if (!new_plane_state->uapi.fence) {
16750 struct dma_fence *fence;
16751
16752 ret = i915_sw_fence_await_reservation(&state->commit_ready,
16753 obj->base.resv, NULL,
16754 false,
16755 i915_fence_timeout(dev_priv),
16756 GFP_KERNEL);
16757 if (ret < 0)
16758 goto unpin_fb;
16759
16760 fence = dma_resv_get_excl_rcu(obj->base.resv);
16761 if (fence) {
16762 add_rps_boost_after_vblank(new_plane_state->hw.crtc,
16763 fence);
16764 dma_fence_put(fence);
16765 }
16766 } else {
16767 add_rps_boost_after_vblank(new_plane_state->hw.crtc,
16768 new_plane_state->uapi.fence);
16769 }
16770
16771
16772
16773
16774
16775
16776
16777
16778
16779 if (!state->rps_interactive) {
16780 intel_rps_mark_interactive(&dev_priv->gt.rps, true);
16781 state->rps_interactive = true;
16782 }
16783
16784 return 0;
16785
16786unpin_fb:
16787 intel_plane_unpin_fb(new_plane_state);
16788
16789 return ret;
16790}
16791
16792
16793
16794
16795
16796
16797
16798
16799void
16800intel_cleanup_plane_fb(struct drm_plane *plane,
16801 struct drm_plane_state *_old_plane_state)
16802{
16803 struct intel_plane_state *old_plane_state =
16804 to_intel_plane_state(_old_plane_state);
16805 struct intel_atomic_state *state =
16806 to_intel_atomic_state(old_plane_state->uapi.state);
16807 struct drm_i915_private *dev_priv = to_i915(plane->dev);
16808 struct drm_i915_gem_object *obj = intel_fb_obj(old_plane_state->hw.fb);
16809
16810 if (!obj)
16811 return;
16812
16813 if (state->rps_interactive) {
16814 intel_rps_mark_interactive(&dev_priv->gt.rps, false);
16815 state->rps_interactive = false;
16816 }
16817
16818
16819 intel_plane_unpin_fb(old_plane_state);
16820}
16821
16822
16823
16824
16825
16826
16827
16828
16829void intel_plane_destroy(struct drm_plane *plane)
16830{
16831 drm_plane_cleanup(plane);
16832 kfree(to_intel_plane(plane));
16833}
16834
16835static bool i8xx_plane_format_mod_supported(struct drm_plane *_plane,
16836 u32 format, u64 modifier)
16837{
16838 switch (modifier) {
16839 case DRM_FORMAT_MOD_LINEAR:
16840 case I915_FORMAT_MOD_X_TILED:
16841 break;
16842 default:
16843 return false;
16844 }
16845
16846 switch (format) {
16847 case DRM_FORMAT_C8:
16848 case DRM_FORMAT_RGB565:
16849 case DRM_FORMAT_XRGB1555:
16850 case DRM_FORMAT_XRGB8888:
16851 return modifier == DRM_FORMAT_MOD_LINEAR ||
16852 modifier == I915_FORMAT_MOD_X_TILED;
16853 default:
16854 return false;
16855 }
16856}
16857
16858static bool i965_plane_format_mod_supported(struct drm_plane *_plane,
16859 u32 format, u64 modifier)
16860{
16861 switch (modifier) {
16862 case DRM_FORMAT_MOD_LINEAR:
16863 case I915_FORMAT_MOD_X_TILED:
16864 break;
16865 default:
16866 return false;
16867 }
16868
16869 switch (format) {
16870 case DRM_FORMAT_C8:
16871 case DRM_FORMAT_RGB565:
16872 case DRM_FORMAT_XRGB8888:
16873 case DRM_FORMAT_XBGR8888:
16874 case DRM_FORMAT_ARGB8888:
16875 case DRM_FORMAT_ABGR8888:
16876 case DRM_FORMAT_XRGB2101010:
16877 case DRM_FORMAT_XBGR2101010:
16878 case DRM_FORMAT_ARGB2101010:
16879 case DRM_FORMAT_ABGR2101010:
16880 case DRM_FORMAT_XBGR16161616F:
16881 return modifier == DRM_FORMAT_MOD_LINEAR ||
16882 modifier == I915_FORMAT_MOD_X_TILED;
16883 default:
16884 return false;
16885 }
16886}
16887
16888static bool intel_cursor_format_mod_supported(struct drm_plane *_plane,
16889 u32 format, u64 modifier)
16890{
16891 return modifier == DRM_FORMAT_MOD_LINEAR &&
16892 format == DRM_FORMAT_ARGB8888;
16893}
16894
16895static const struct drm_plane_funcs i965_plane_funcs = {
16896 .update_plane = drm_atomic_helper_update_plane,
16897 .disable_plane = drm_atomic_helper_disable_plane,
16898 .destroy = intel_plane_destroy,
16899 .atomic_duplicate_state = intel_plane_duplicate_state,
16900 .atomic_destroy_state = intel_plane_destroy_state,
16901 .format_mod_supported = i965_plane_format_mod_supported,
16902};
16903
16904static const struct drm_plane_funcs i8xx_plane_funcs = {
16905 .update_plane = drm_atomic_helper_update_plane,
16906 .disable_plane = drm_atomic_helper_disable_plane,
16907 .destroy = intel_plane_destroy,
16908 .atomic_duplicate_state = intel_plane_duplicate_state,
16909 .atomic_destroy_state = intel_plane_destroy_state,
16910 .format_mod_supported = i8xx_plane_format_mod_supported,
16911};
16912
16913static int
16914intel_legacy_cursor_update(struct drm_plane *_plane,
16915 struct drm_crtc *_crtc,
16916 struct drm_framebuffer *fb,
16917 int crtc_x, int crtc_y,
16918 unsigned int crtc_w, unsigned int crtc_h,
16919 u32 src_x, u32 src_y,
16920 u32 src_w, u32 src_h,
16921 struct drm_modeset_acquire_ctx *ctx)
16922{
16923 struct intel_plane *plane = to_intel_plane(_plane);
16924 struct intel_crtc *crtc = to_intel_crtc(_crtc);
16925 struct intel_plane_state *old_plane_state =
16926 to_intel_plane_state(plane->base.state);
16927 struct intel_plane_state *new_plane_state;
16928 struct intel_crtc_state *crtc_state =
16929 to_intel_crtc_state(crtc->base.state);
16930 struct intel_crtc_state *new_crtc_state;
16931 int ret;
16932
16933
16934
16935
16936
16937
16938
16939 if (!crtc_state->hw.active || needs_modeset(crtc_state) ||
16940 crtc_state->update_pipe || crtc_state->bigjoiner)
16941 goto slow;
16942
16943
16944
16945
16946
16947
16948 if (old_plane_state->uapi.commit &&
16949 !try_wait_for_completion(&old_plane_state->uapi.commit->hw_done))
16950 goto slow;
16951
16952
16953
16954
16955
16956
16957 if (old_plane_state->uapi.crtc != &crtc->base ||
16958 old_plane_state->uapi.src_w != src_w ||
16959 old_plane_state->uapi.src_h != src_h ||
16960 old_plane_state->uapi.crtc_w != crtc_w ||
16961 old_plane_state->uapi.crtc_h != crtc_h ||
16962 !old_plane_state->uapi.fb != !fb)
16963 goto slow;
16964
16965 new_plane_state = to_intel_plane_state(intel_plane_duplicate_state(&plane->base));
16966 if (!new_plane_state)
16967 return -ENOMEM;
16968
16969 new_crtc_state = to_intel_crtc_state(intel_crtc_duplicate_state(&crtc->base));
16970 if (!new_crtc_state) {
16971 ret = -ENOMEM;
16972 goto out_free;
16973 }
16974
16975 drm_atomic_set_fb_for_plane(&new_plane_state->uapi, fb);
16976
16977 new_plane_state->uapi.src_x = src_x;
16978 new_plane_state->uapi.src_y = src_y;
16979 new_plane_state->uapi.src_w = src_w;
16980 new_plane_state->uapi.src_h = src_h;
16981 new_plane_state->uapi.crtc_x = crtc_x;
16982 new_plane_state->uapi.crtc_y = crtc_y;
16983 new_plane_state->uapi.crtc_w = crtc_w;
16984 new_plane_state->uapi.crtc_h = crtc_h;
16985
16986 intel_plane_copy_uapi_to_hw_state(new_plane_state, new_plane_state, crtc);
16987
16988 ret = intel_plane_atomic_check_with_state(crtc_state, new_crtc_state,
16989 old_plane_state, new_plane_state);
16990 if (ret)
16991 goto out_free;
16992
16993 ret = intel_plane_pin_fb(new_plane_state);
16994 if (ret)
16995 goto out_free;
16996
16997 intel_frontbuffer_flush(to_intel_frontbuffer(new_plane_state->hw.fb),
16998 ORIGIN_FLIP);
16999 intel_frontbuffer_track(to_intel_frontbuffer(old_plane_state->hw.fb),
17000 to_intel_frontbuffer(new_plane_state->hw.fb),
17001 plane->frontbuffer_bit);
17002
17003
17004 plane->base.state = &new_plane_state->uapi;
17005
17006
17007
17008
17009
17010
17011
17012
17013
17014
17015
17016 crtc_state->active_planes = new_crtc_state->active_planes;
17017
17018 if (new_plane_state->uapi.visible)
17019 intel_update_plane(plane, crtc_state, new_plane_state);
17020 else
17021 intel_disable_plane(plane, crtc_state);
17022
17023 intel_plane_unpin_fb(old_plane_state);
17024
17025out_free:
17026 if (new_crtc_state)
17027 intel_crtc_destroy_state(&crtc->base, &new_crtc_state->uapi);
17028 if (ret)
17029 intel_plane_destroy_state(&plane->base, &new_plane_state->uapi);
17030 else
17031 intel_plane_destroy_state(&plane->base, &old_plane_state->uapi);
17032 return ret;
17033
17034slow:
17035 return drm_atomic_helper_update_plane(&plane->base, &crtc->base, fb,
17036 crtc_x, crtc_y, crtc_w, crtc_h,
17037 src_x, src_y, src_w, src_h, ctx);
17038}
17039
17040static const struct drm_plane_funcs intel_cursor_plane_funcs = {
17041 .update_plane = intel_legacy_cursor_update,
17042 .disable_plane = drm_atomic_helper_disable_plane,
17043 .destroy = intel_plane_destroy,
17044 .atomic_duplicate_state = intel_plane_duplicate_state,
17045 .atomic_destroy_state = intel_plane_destroy_state,
17046 .format_mod_supported = intel_cursor_format_mod_supported,
17047};
17048
17049static bool i9xx_plane_has_fbc(struct drm_i915_private *dev_priv,
17050 enum i9xx_plane_id i9xx_plane)
17051{
17052 if (!HAS_FBC(dev_priv))
17053 return false;
17054
17055 if (IS_BROADWELL(dev_priv) || IS_HASWELL(dev_priv))
17056 return i9xx_plane == PLANE_A;
17057 else if (IS_IVYBRIDGE(dev_priv))
17058 return i9xx_plane == PLANE_A || i9xx_plane == PLANE_B ||
17059 i9xx_plane == PLANE_C;
17060 else if (INTEL_GEN(dev_priv) >= 4)
17061 return i9xx_plane == PLANE_A || i9xx_plane == PLANE_B;
17062 else
17063 return i9xx_plane == PLANE_A;
17064}
17065
17066static struct intel_plane *
17067intel_primary_plane_create(struct drm_i915_private *dev_priv, enum pipe pipe)
17068{
17069 struct intel_plane *plane;
17070 const struct drm_plane_funcs *plane_funcs;
17071 unsigned int supported_rotations;
17072 const u32 *formats;
17073 int num_formats;
17074 int ret, zpos;
17075
17076 if (INTEL_GEN(dev_priv) >= 9)
17077 return skl_universal_plane_create(dev_priv, pipe,
17078 PLANE_PRIMARY);
17079
17080 plane = intel_plane_alloc();
17081 if (IS_ERR(plane))
17082 return plane;
17083
17084 plane->pipe = pipe;
17085
17086
17087
17088
17089 if (HAS_FBC(dev_priv) && INTEL_GEN(dev_priv) < 4 &&
17090 INTEL_NUM_PIPES(dev_priv) == 2)
17091 plane->i9xx_plane = (enum i9xx_plane_id) !pipe;
17092 else
17093 plane->i9xx_plane = (enum i9xx_plane_id) pipe;
17094 plane->id = PLANE_PRIMARY;
17095 plane->frontbuffer_bit = INTEL_FRONTBUFFER(pipe, plane->id);
17096
17097 plane->has_fbc = i9xx_plane_has_fbc(dev_priv, plane->i9xx_plane);
17098 if (plane->has_fbc) {
17099 struct intel_fbc *fbc = &dev_priv->fbc;
17100
17101 fbc->possible_framebuffer_bits |= plane->frontbuffer_bit;
17102 }
17103
17104 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
17105 formats = vlv_primary_formats;
17106 num_formats = ARRAY_SIZE(vlv_primary_formats);
17107 } else if (INTEL_GEN(dev_priv) >= 4) {
17108
17109
17110
17111
17112
17113
17114
17115
17116
17117
17118
17119
17120
17121 if (IS_IVYBRIDGE(dev_priv)) {
17122 formats = ivb_primary_formats;
17123 num_formats = ARRAY_SIZE(ivb_primary_formats);
17124 } else {
17125 formats = i965_primary_formats;
17126 num_formats = ARRAY_SIZE(i965_primary_formats);
17127 }
17128 } else {
17129 formats = i8xx_primary_formats;
17130 num_formats = ARRAY_SIZE(i8xx_primary_formats);
17131 }
17132
17133 if (INTEL_GEN(dev_priv) >= 4)
17134 plane_funcs = &i965_plane_funcs;
17135 else
17136 plane_funcs = &i8xx_plane_funcs;
17137
17138 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
17139 plane->min_cdclk = vlv_plane_min_cdclk;
17140 else if (IS_BROADWELL(dev_priv) || IS_HASWELL(dev_priv))
17141 plane->min_cdclk = hsw_plane_min_cdclk;
17142 else if (IS_IVYBRIDGE(dev_priv))
17143 plane->min_cdclk = ivb_plane_min_cdclk;
17144 else
17145 plane->min_cdclk = i9xx_plane_min_cdclk;
17146
17147 plane->max_stride = i9xx_plane_max_stride;
17148 plane->update_plane = i9xx_update_plane;
17149 plane->disable_plane = i9xx_disable_plane;
17150 plane->get_hw_state = i9xx_plane_get_hw_state;
17151 plane->check_plane = i9xx_plane_check;
17152
17153 if (INTEL_GEN(dev_priv) >= 5 || IS_G4X(dev_priv))
17154 ret = drm_universal_plane_init(&dev_priv->drm, &plane->base,
17155 0, plane_funcs,
17156 formats, num_formats,
17157 i9xx_format_modifiers,
17158 DRM_PLANE_TYPE_PRIMARY,
17159 "primary %c", pipe_name(pipe));
17160 else
17161 ret = drm_universal_plane_init(&dev_priv->drm, &plane->base,
17162 0, plane_funcs,
17163 formats, num_formats,
17164 i9xx_format_modifiers,
17165 DRM_PLANE_TYPE_PRIMARY,
17166 "plane %c",
17167 plane_name(plane->i9xx_plane));
17168 if (ret)
17169 goto fail;
17170
17171 if (IS_CHERRYVIEW(dev_priv) && pipe == PIPE_B) {
17172 supported_rotations =
17173 DRM_MODE_ROTATE_0 | DRM_MODE_ROTATE_180 |
17174 DRM_MODE_REFLECT_X;
17175 } else if (INTEL_GEN(dev_priv) >= 4) {
17176 supported_rotations =
17177 DRM_MODE_ROTATE_0 | DRM_MODE_ROTATE_180;
17178 } else {
17179 supported_rotations = DRM_MODE_ROTATE_0;
17180 }
17181
17182 if (INTEL_GEN(dev_priv) >= 4)
17183 drm_plane_create_rotation_property(&plane->base,
17184 DRM_MODE_ROTATE_0,
17185 supported_rotations);
17186
17187 zpos = 0;
17188 drm_plane_create_zpos_immutable_property(&plane->base, zpos);
17189
17190 drm_plane_helper_add(&plane->base, &intel_plane_helper_funcs);
17191
17192 return plane;
17193
17194fail:
17195 intel_plane_free(plane);
17196
17197 return ERR_PTR(ret);
17198}
17199
17200static struct intel_plane *
17201intel_cursor_plane_create(struct drm_i915_private *dev_priv,
17202 enum pipe pipe)
17203{
17204 struct intel_plane *cursor;
17205 int ret, zpos;
17206
17207 cursor = intel_plane_alloc();
17208 if (IS_ERR(cursor))
17209 return cursor;
17210
17211 cursor->pipe = pipe;
17212 cursor->i9xx_plane = (enum i9xx_plane_id) pipe;
17213 cursor->id = PLANE_CURSOR;
17214 cursor->frontbuffer_bit = INTEL_FRONTBUFFER(pipe, cursor->id);
17215
17216 if (IS_I845G(dev_priv) || IS_I865G(dev_priv)) {
17217 cursor->max_stride = i845_cursor_max_stride;
17218 cursor->update_plane = i845_update_cursor;
17219 cursor->disable_plane = i845_disable_cursor;
17220 cursor->get_hw_state = i845_cursor_get_hw_state;
17221 cursor->check_plane = i845_check_cursor;
17222 } else {
17223 cursor->max_stride = i9xx_cursor_max_stride;
17224 cursor->update_plane = i9xx_update_cursor;
17225 cursor->disable_plane = i9xx_disable_cursor;
17226 cursor->get_hw_state = i9xx_cursor_get_hw_state;
17227 cursor->check_plane = i9xx_check_cursor;
17228 }
17229
17230 cursor->cursor.base = ~0;
17231 cursor->cursor.cntl = ~0;
17232
17233 if (IS_I845G(dev_priv) || IS_I865G(dev_priv) || HAS_CUR_FBC(dev_priv))
17234 cursor->cursor.size = ~0;
17235
17236 ret = drm_universal_plane_init(&dev_priv->drm, &cursor->base,
17237 0, &intel_cursor_plane_funcs,
17238 intel_cursor_formats,
17239 ARRAY_SIZE(intel_cursor_formats),
17240 cursor_format_modifiers,
17241 DRM_PLANE_TYPE_CURSOR,
17242 "cursor %c", pipe_name(pipe));
17243 if (ret)
17244 goto fail;
17245
17246 if (INTEL_GEN(dev_priv) >= 4)
17247 drm_plane_create_rotation_property(&cursor->base,
17248 DRM_MODE_ROTATE_0,
17249 DRM_MODE_ROTATE_0 |
17250 DRM_MODE_ROTATE_180);
17251
17252 zpos = RUNTIME_INFO(dev_priv)->num_sprites[pipe] + 1;
17253 drm_plane_create_zpos_immutable_property(&cursor->base, zpos);
17254
17255 if (INTEL_GEN(dev_priv) >= 12)
17256 drm_plane_enable_fb_damage_clips(&cursor->base);
17257
17258 drm_plane_helper_add(&cursor->base, &intel_plane_helper_funcs);
17259
17260 return cursor;
17261
17262fail:
17263 intel_plane_free(cursor);
17264
17265 return ERR_PTR(ret);
17266}
17267
17268#define INTEL_CRTC_FUNCS \
17269 .gamma_set = drm_atomic_helper_legacy_gamma_set, \
17270 .set_config = drm_atomic_helper_set_config, \
17271 .destroy = intel_crtc_destroy, \
17272 .page_flip = drm_atomic_helper_page_flip, \
17273 .atomic_duplicate_state = intel_crtc_duplicate_state, \
17274 .atomic_destroy_state = intel_crtc_destroy_state, \
17275 .set_crc_source = intel_crtc_set_crc_source, \
17276 .verify_crc_source = intel_crtc_verify_crc_source, \
17277 .get_crc_sources = intel_crtc_get_crc_sources
17278
17279static const struct drm_crtc_funcs bdw_crtc_funcs = {
17280 INTEL_CRTC_FUNCS,
17281
17282 .get_vblank_counter = g4x_get_vblank_counter,
17283 .enable_vblank = bdw_enable_vblank,
17284 .disable_vblank = bdw_disable_vblank,
17285 .get_vblank_timestamp = intel_crtc_get_vblank_timestamp,
17286};
17287
17288static const struct drm_crtc_funcs ilk_crtc_funcs = {
17289 INTEL_CRTC_FUNCS,
17290
17291 .get_vblank_counter = g4x_get_vblank_counter,
17292 .enable_vblank = ilk_enable_vblank,
17293 .disable_vblank = ilk_disable_vblank,
17294 .get_vblank_timestamp = intel_crtc_get_vblank_timestamp,
17295};
17296
17297static const struct drm_crtc_funcs g4x_crtc_funcs = {
17298 INTEL_CRTC_FUNCS,
17299
17300 .get_vblank_counter = g4x_get_vblank_counter,
17301 .enable_vblank = i965_enable_vblank,
17302 .disable_vblank = i965_disable_vblank,
17303 .get_vblank_timestamp = intel_crtc_get_vblank_timestamp,
17304};
17305
17306static const struct drm_crtc_funcs i965_crtc_funcs = {
17307 INTEL_CRTC_FUNCS,
17308
17309 .get_vblank_counter = i915_get_vblank_counter,
17310 .enable_vblank = i965_enable_vblank,
17311 .disable_vblank = i965_disable_vblank,
17312 .get_vblank_timestamp = intel_crtc_get_vblank_timestamp,
17313};
17314
17315static const struct drm_crtc_funcs i915gm_crtc_funcs = {
17316 INTEL_CRTC_FUNCS,
17317
17318 .get_vblank_counter = i915_get_vblank_counter,
17319 .enable_vblank = i915gm_enable_vblank,
17320 .disable_vblank = i915gm_disable_vblank,
17321 .get_vblank_timestamp = intel_crtc_get_vblank_timestamp,
17322};
17323
17324static const struct drm_crtc_funcs i915_crtc_funcs = {
17325 INTEL_CRTC_FUNCS,
17326
17327 .get_vblank_counter = i915_get_vblank_counter,
17328 .enable_vblank = i8xx_enable_vblank,
17329 .disable_vblank = i8xx_disable_vblank,
17330 .get_vblank_timestamp = intel_crtc_get_vblank_timestamp,
17331};
17332
17333static const struct drm_crtc_funcs i8xx_crtc_funcs = {
17334 INTEL_CRTC_FUNCS,
17335
17336
17337 .enable_vblank = i8xx_enable_vblank,
17338 .disable_vblank = i8xx_disable_vblank,
17339 .get_vblank_timestamp = intel_crtc_get_vblank_timestamp,
17340};
17341
17342static struct intel_crtc *intel_crtc_alloc(void)
17343{
17344 struct intel_crtc_state *crtc_state;
17345 struct intel_crtc *crtc;
17346
17347 crtc = kzalloc(sizeof(*crtc), GFP_KERNEL);
17348 if (!crtc)
17349 return ERR_PTR(-ENOMEM);
17350
17351 crtc_state = intel_crtc_state_alloc(crtc);
17352 if (!crtc_state) {
17353 kfree(crtc);
17354 return ERR_PTR(-ENOMEM);
17355 }
17356
17357 crtc->base.state = &crtc_state->uapi;
17358 crtc->config = crtc_state;
17359
17360 return crtc;
17361}
17362
17363static void intel_crtc_free(struct intel_crtc *crtc)
17364{
17365 intel_crtc_destroy_state(&crtc->base, crtc->base.state);
17366 kfree(crtc);
17367}
17368
17369static void intel_plane_possible_crtcs_init(struct drm_i915_private *dev_priv)
17370{
17371 struct intel_plane *plane;
17372
17373 for_each_intel_plane(&dev_priv->drm, plane) {
17374 struct intel_crtc *crtc = intel_get_crtc_for_pipe(dev_priv,
17375 plane->pipe);
17376
17377 plane->base.possible_crtcs = drm_crtc_mask(&crtc->base);
17378 }
17379}
17380
17381static int intel_crtc_init(struct drm_i915_private *dev_priv, enum pipe pipe)
17382{
17383 struct intel_plane *primary, *cursor;
17384 const struct drm_crtc_funcs *funcs;
17385 struct intel_crtc *crtc;
17386 int sprite, ret;
17387
17388 crtc = intel_crtc_alloc();
17389 if (IS_ERR(crtc))
17390 return PTR_ERR(crtc);
17391
17392 crtc->pipe = pipe;
17393 crtc->num_scalers = RUNTIME_INFO(dev_priv)->num_scalers[pipe];
17394
17395 primary = intel_primary_plane_create(dev_priv, pipe);
17396 if (IS_ERR(primary)) {
17397 ret = PTR_ERR(primary);
17398 goto fail;
17399 }
17400 crtc->plane_ids_mask |= BIT(primary->id);
17401
17402 for_each_sprite(dev_priv, pipe, sprite) {
17403 struct intel_plane *plane;
17404
17405 plane = intel_sprite_plane_create(dev_priv, pipe, sprite);
17406 if (IS_ERR(plane)) {
17407 ret = PTR_ERR(plane);
17408 goto fail;
17409 }
17410 crtc->plane_ids_mask |= BIT(plane->id);
17411 }
17412
17413 cursor = intel_cursor_plane_create(dev_priv, pipe);
17414 if (IS_ERR(cursor)) {
17415 ret = PTR_ERR(cursor);
17416 goto fail;
17417 }
17418 crtc->plane_ids_mask |= BIT(cursor->id);
17419
17420 if (HAS_GMCH(dev_priv)) {
17421 if (IS_CHERRYVIEW(dev_priv) ||
17422 IS_VALLEYVIEW(dev_priv) || IS_G4X(dev_priv))
17423 funcs = &g4x_crtc_funcs;
17424 else if (IS_GEN(dev_priv, 4))
17425 funcs = &i965_crtc_funcs;
17426 else if (IS_I945GM(dev_priv) || IS_I915GM(dev_priv))
17427 funcs = &i915gm_crtc_funcs;
17428 else if (IS_GEN(dev_priv, 3))
17429 funcs = &i915_crtc_funcs;
17430 else
17431 funcs = &i8xx_crtc_funcs;
17432 } else {
17433 if (INTEL_GEN(dev_priv) >= 8)
17434 funcs = &bdw_crtc_funcs;
17435 else
17436 funcs = &ilk_crtc_funcs;
17437 }
17438
17439 ret = drm_crtc_init_with_planes(&dev_priv->drm, &crtc->base,
17440 &primary->base, &cursor->base,
17441 funcs, "pipe %c", pipe_name(pipe));
17442 if (ret)
17443 goto fail;
17444
17445 BUG_ON(pipe >= ARRAY_SIZE(dev_priv->pipe_to_crtc_mapping) ||
17446 dev_priv->pipe_to_crtc_mapping[pipe] != NULL);
17447 dev_priv->pipe_to_crtc_mapping[pipe] = crtc;
17448
17449 if (INTEL_GEN(dev_priv) < 9) {
17450 enum i9xx_plane_id i9xx_plane = primary->i9xx_plane;
17451
17452 BUG_ON(i9xx_plane >= ARRAY_SIZE(dev_priv->plane_to_crtc_mapping) ||
17453 dev_priv->plane_to_crtc_mapping[i9xx_plane] != NULL);
17454 dev_priv->plane_to_crtc_mapping[i9xx_plane] = crtc;
17455 }
17456
17457 if (INTEL_GEN(dev_priv) >= 10)
17458 drm_crtc_create_scaling_filter_property(&crtc->base,
17459 BIT(DRM_SCALING_FILTER_DEFAULT) |
17460 BIT(DRM_SCALING_FILTER_NEAREST_NEIGHBOR));
17461
17462 intel_color_init(crtc);
17463
17464 intel_crtc_crc_init(crtc);
17465
17466 drm_WARN_ON(&dev_priv->drm, drm_crtc_index(&crtc->base) != crtc->pipe);
17467
17468 return 0;
17469
17470fail:
17471 intel_crtc_free(crtc);
17472
17473 return ret;
17474}
17475
17476int intel_get_pipe_from_crtc_id_ioctl(struct drm_device *dev, void *data,
17477 struct drm_file *file)
17478{
17479 struct drm_i915_get_pipe_from_crtc_id *pipe_from_crtc_id = data;
17480 struct drm_crtc *drmmode_crtc;
17481 struct intel_crtc *crtc;
17482
17483 drmmode_crtc = drm_crtc_find(dev, file, pipe_from_crtc_id->crtc_id);
17484 if (!drmmode_crtc)
17485 return -ENOENT;
17486
17487 crtc = to_intel_crtc(drmmode_crtc);
17488 pipe_from_crtc_id->pipe = crtc->pipe;
17489
17490 return 0;
17491}
17492
17493static u32 intel_encoder_possible_clones(struct intel_encoder *encoder)
17494{
17495 struct drm_device *dev = encoder->base.dev;
17496 struct intel_encoder *source_encoder;
17497 u32 possible_clones = 0;
17498
17499 for_each_intel_encoder(dev, source_encoder) {
17500 if (encoders_cloneable(encoder, source_encoder))
17501 possible_clones |= drm_encoder_mask(&source_encoder->base);
17502 }
17503
17504 return possible_clones;
17505}
17506
17507static u32 intel_encoder_possible_crtcs(struct intel_encoder *encoder)
17508{
17509 struct drm_device *dev = encoder->base.dev;
17510 struct intel_crtc *crtc;
17511 u32 possible_crtcs = 0;
17512
17513 for_each_intel_crtc(dev, crtc) {
17514 if (encoder->pipe_mask & BIT(crtc->pipe))
17515 possible_crtcs |= drm_crtc_mask(&crtc->base);
17516 }
17517
17518 return possible_crtcs;
17519}
17520
17521static bool ilk_has_edp_a(struct drm_i915_private *dev_priv)
17522{
17523 if (!IS_MOBILE(dev_priv))
17524 return false;
17525
17526 if ((intel_de_read(dev_priv, DP_A) & DP_DETECTED) == 0)
17527 return false;
17528
17529 if (IS_GEN(dev_priv, 5) && (intel_de_read(dev_priv, FUSE_STRAP) & ILK_eDP_A_DISABLE))
17530 return false;
17531
17532 return true;
17533}
17534
17535static bool intel_ddi_crt_present(struct drm_i915_private *dev_priv)
17536{
17537 if (INTEL_GEN(dev_priv) >= 9)
17538 return false;
17539
17540 if (IS_HSW_ULT(dev_priv) || IS_BDW_ULT(dev_priv))
17541 return false;
17542
17543 if (HAS_PCH_LPT_H(dev_priv) &&
17544 intel_de_read(dev_priv, SFUSE_STRAP) & SFUSE_STRAP_CRT_DISABLED)
17545 return false;
17546
17547
17548 if (intel_de_read(dev_priv, DDI_BUF_CTL(PORT_A)) & DDI_A_4_LANES)
17549 return false;
17550
17551 if (!dev_priv->vbt.int_crt_support)
17552 return false;
17553
17554 return true;
17555}
17556
17557void intel_pps_unlock_regs_wa(struct drm_i915_private *dev_priv)
17558{
17559 int pps_num;
17560 int pps_idx;
17561
17562 if (HAS_DDI(dev_priv))
17563 return;
17564
17565
17566
17567
17568 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
17569 pps_num = 2;
17570 else
17571 pps_num = 1;
17572
17573 for (pps_idx = 0; pps_idx < pps_num; pps_idx++) {
17574 u32 val = intel_de_read(dev_priv, PP_CONTROL(pps_idx));
17575
17576 val = (val & ~PANEL_UNLOCK_MASK) | PANEL_UNLOCK_REGS;
17577 intel_de_write(dev_priv, PP_CONTROL(pps_idx), val);
17578 }
17579}
17580
17581static void intel_pps_init(struct drm_i915_private *dev_priv)
17582{
17583 if (HAS_PCH_SPLIT(dev_priv) || IS_GEN9_LP(dev_priv))
17584 dev_priv->pps_mmio_base = PCH_PPS_BASE;
17585 else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
17586 dev_priv->pps_mmio_base = VLV_PPS_BASE;
17587 else
17588 dev_priv->pps_mmio_base = PPS_BASE;
17589
17590 intel_pps_unlock_regs_wa(dev_priv);
17591}
17592
17593static void intel_setup_outputs(struct drm_i915_private *dev_priv)
17594{
17595 struct intel_encoder *encoder;
17596 bool dpd_is_edp = false;
17597
17598 intel_pps_init(dev_priv);
17599
17600 if (!HAS_DISPLAY(dev_priv))
17601 return;
17602
17603 if (IS_DG1(dev_priv) || IS_ROCKETLAKE(dev_priv)) {
17604 intel_ddi_init(dev_priv, PORT_A);
17605 intel_ddi_init(dev_priv, PORT_B);
17606 intel_ddi_init(dev_priv, PORT_TC1);
17607 intel_ddi_init(dev_priv, PORT_TC2);
17608 } else if (INTEL_GEN(dev_priv) >= 12) {
17609 intel_ddi_init(dev_priv, PORT_A);
17610 intel_ddi_init(dev_priv, PORT_B);
17611 intel_ddi_init(dev_priv, PORT_TC1);
17612 intel_ddi_init(dev_priv, PORT_TC2);
17613 intel_ddi_init(dev_priv, PORT_TC3);
17614 intel_ddi_init(dev_priv, PORT_TC4);
17615 intel_ddi_init(dev_priv, PORT_TC5);
17616 intel_ddi_init(dev_priv, PORT_TC6);
17617 icl_dsi_init(dev_priv);
17618 } else if (IS_JSL_EHL(dev_priv)) {
17619 intel_ddi_init(dev_priv, PORT_A);
17620 intel_ddi_init(dev_priv, PORT_B);
17621 intel_ddi_init(dev_priv, PORT_C);
17622 intel_ddi_init(dev_priv, PORT_D);
17623 icl_dsi_init(dev_priv);
17624 } else if (IS_GEN(dev_priv, 11)) {
17625 intel_ddi_init(dev_priv, PORT_A);
17626 intel_ddi_init(dev_priv, PORT_B);
17627 intel_ddi_init(dev_priv, PORT_C);
17628 intel_ddi_init(dev_priv, PORT_D);
17629 intel_ddi_init(dev_priv, PORT_E);
17630
17631
17632
17633
17634
17635 if (IS_ICL_WITH_PORT_F(dev_priv) &&
17636 intel_bios_is_port_present(dev_priv, PORT_F))
17637 intel_ddi_init(dev_priv, PORT_F);
17638
17639 icl_dsi_init(dev_priv);
17640 } else if (IS_GEN9_LP(dev_priv)) {
17641
17642
17643
17644
17645
17646 intel_ddi_init(dev_priv, PORT_A);
17647 intel_ddi_init(dev_priv, PORT_B);
17648 intel_ddi_init(dev_priv, PORT_C);
17649
17650 vlv_dsi_init(dev_priv);
17651 } else if (HAS_DDI(dev_priv)) {
17652 int found;
17653
17654 if (intel_ddi_crt_present(dev_priv))
17655 intel_crt_init(dev_priv);
17656
17657
17658
17659
17660
17661
17662 found = intel_de_read(dev_priv, DDI_BUF_CTL(PORT_A)) & DDI_INIT_DISPLAY_DETECTED;
17663
17664 if (found || IS_GEN9_BC(dev_priv))
17665 intel_ddi_init(dev_priv, PORT_A);
17666
17667
17668
17669 found = intel_de_read(dev_priv, SFUSE_STRAP);
17670
17671 if (found & SFUSE_STRAP_DDIB_DETECTED)
17672 intel_ddi_init(dev_priv, PORT_B);
17673 if (found & SFUSE_STRAP_DDIC_DETECTED)
17674 intel_ddi_init(dev_priv, PORT_C);
17675 if (found & SFUSE_STRAP_DDID_DETECTED)
17676 intel_ddi_init(dev_priv, PORT_D);
17677 if (found & SFUSE_STRAP_DDIF_DETECTED)
17678 intel_ddi_init(dev_priv, PORT_F);
17679
17680
17681
17682 if (IS_GEN9_BC(dev_priv) &&
17683 intel_bios_is_port_present(dev_priv, PORT_E))
17684 intel_ddi_init(dev_priv, PORT_E);
17685
17686 } else if (HAS_PCH_SPLIT(dev_priv)) {
17687 int found;
17688
17689
17690
17691
17692
17693
17694 intel_lvds_init(dev_priv);
17695 intel_crt_init(dev_priv);
17696
17697 dpd_is_edp = intel_dp_is_port_edp(dev_priv, PORT_D);
17698
17699 if (ilk_has_edp_a(dev_priv))
17700 intel_dp_init(dev_priv, DP_A, PORT_A);
17701
17702 if (intel_de_read(dev_priv, PCH_HDMIB) & SDVO_DETECTED) {
17703
17704 found = intel_sdvo_init(dev_priv, PCH_SDVOB, PORT_B);
17705 if (!found)
17706 intel_hdmi_init(dev_priv, PCH_HDMIB, PORT_B);
17707 if (!found && (intel_de_read(dev_priv, PCH_DP_B) & DP_DETECTED))
17708 intel_dp_init(dev_priv, PCH_DP_B, PORT_B);
17709 }
17710
17711 if (intel_de_read(dev_priv, PCH_HDMIC) & SDVO_DETECTED)
17712 intel_hdmi_init(dev_priv, PCH_HDMIC, PORT_C);
17713
17714 if (!dpd_is_edp && intel_de_read(dev_priv, PCH_HDMID) & SDVO_DETECTED)
17715 intel_hdmi_init(dev_priv, PCH_HDMID, PORT_D);
17716
17717 if (intel_de_read(dev_priv, PCH_DP_C) & DP_DETECTED)
17718 intel_dp_init(dev_priv, PCH_DP_C, PORT_C);
17719
17720 if (intel_de_read(dev_priv, PCH_DP_D) & DP_DETECTED)
17721 intel_dp_init(dev_priv, PCH_DP_D, PORT_D);
17722 } else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
17723 bool has_edp, has_port;
17724
17725 if (IS_VALLEYVIEW(dev_priv) && dev_priv->vbt.int_crt_support)
17726 intel_crt_init(dev_priv);
17727
17728
17729
17730
17731
17732
17733
17734
17735
17736
17737
17738
17739
17740
17741
17742
17743 has_edp = intel_dp_is_port_edp(dev_priv, PORT_B);
17744 has_port = intel_bios_is_port_present(dev_priv, PORT_B);
17745 if (intel_de_read(dev_priv, VLV_DP_B) & DP_DETECTED || has_port)
17746 has_edp &= intel_dp_init(dev_priv, VLV_DP_B, PORT_B);
17747 if ((intel_de_read(dev_priv, VLV_HDMIB) & SDVO_DETECTED || has_port) && !has_edp)
17748 intel_hdmi_init(dev_priv, VLV_HDMIB, PORT_B);
17749
17750 has_edp = intel_dp_is_port_edp(dev_priv, PORT_C);
17751 has_port = intel_bios_is_port_present(dev_priv, PORT_C);
17752 if (intel_de_read(dev_priv, VLV_DP_C) & DP_DETECTED || has_port)
17753 has_edp &= intel_dp_init(dev_priv, VLV_DP_C, PORT_C);
17754 if ((intel_de_read(dev_priv, VLV_HDMIC) & SDVO_DETECTED || has_port) && !has_edp)
17755 intel_hdmi_init(dev_priv, VLV_HDMIC, PORT_C);
17756
17757 if (IS_CHERRYVIEW(dev_priv)) {
17758
17759
17760
17761
17762 has_port = intel_bios_is_port_present(dev_priv, PORT_D);
17763 if (intel_de_read(dev_priv, CHV_DP_D) & DP_DETECTED || has_port)
17764 intel_dp_init(dev_priv, CHV_DP_D, PORT_D);
17765 if (intel_de_read(dev_priv, CHV_HDMID) & SDVO_DETECTED || has_port)
17766 intel_hdmi_init(dev_priv, CHV_HDMID, PORT_D);
17767 }
17768
17769 vlv_dsi_init(dev_priv);
17770 } else if (IS_PINEVIEW(dev_priv)) {
17771 intel_lvds_init(dev_priv);
17772 intel_crt_init(dev_priv);
17773 } else if (IS_GEN_RANGE(dev_priv, 3, 4)) {
17774 bool found = false;
17775
17776 if (IS_MOBILE(dev_priv))
17777 intel_lvds_init(dev_priv);
17778
17779 intel_crt_init(dev_priv);
17780
17781 if (intel_de_read(dev_priv, GEN3_SDVOB) & SDVO_DETECTED) {
17782 drm_dbg_kms(&dev_priv->drm, "probing SDVOB\n");
17783 found = intel_sdvo_init(dev_priv, GEN3_SDVOB, PORT_B);
17784 if (!found && IS_G4X(dev_priv)) {
17785 drm_dbg_kms(&dev_priv->drm,
17786 "probing HDMI on SDVOB\n");
17787 intel_hdmi_init(dev_priv, GEN4_HDMIB, PORT_B);
17788 }
17789
17790 if (!found && IS_G4X(dev_priv))
17791 intel_dp_init(dev_priv, DP_B, PORT_B);
17792 }
17793
17794
17795
17796 if (intel_de_read(dev_priv, GEN3_SDVOB) & SDVO_DETECTED) {
17797 drm_dbg_kms(&dev_priv->drm, "probing SDVOC\n");
17798 found = intel_sdvo_init(dev_priv, GEN3_SDVOC, PORT_C);
17799 }
17800
17801 if (!found && (intel_de_read(dev_priv, GEN3_SDVOC) & SDVO_DETECTED)) {
17802
17803 if (IS_G4X(dev_priv)) {
17804 drm_dbg_kms(&dev_priv->drm,
17805 "probing HDMI on SDVOC\n");
17806 intel_hdmi_init(dev_priv, GEN4_HDMIC, PORT_C);
17807 }
17808 if (IS_G4X(dev_priv))
17809 intel_dp_init(dev_priv, DP_C, PORT_C);
17810 }
17811
17812 if (IS_G4X(dev_priv) && (intel_de_read(dev_priv, DP_D) & DP_DETECTED))
17813 intel_dp_init(dev_priv, DP_D, PORT_D);
17814
17815 if (SUPPORTS_TV(dev_priv))
17816 intel_tv_init(dev_priv);
17817 } else if (IS_GEN(dev_priv, 2)) {
17818 if (IS_I85X(dev_priv))
17819 intel_lvds_init(dev_priv);
17820
17821 intel_crt_init(dev_priv);
17822 intel_dvo_init(dev_priv);
17823 }
17824
17825 intel_psr_init(dev_priv);
17826
17827 for_each_intel_encoder(&dev_priv->drm, encoder) {
17828 encoder->base.possible_crtcs =
17829 intel_encoder_possible_crtcs(encoder);
17830 encoder->base.possible_clones =
17831 intel_encoder_possible_clones(encoder);
17832 }
17833
17834 intel_init_pch_refclk(dev_priv);
17835
17836 drm_helper_move_panel_connectors_to_head(&dev_priv->drm);
17837}
17838
17839static void intel_user_framebuffer_destroy(struct drm_framebuffer *fb)
17840{
17841 struct intel_framebuffer *intel_fb = to_intel_framebuffer(fb);
17842
17843 drm_framebuffer_cleanup(fb);
17844 intel_frontbuffer_put(intel_fb->frontbuffer);
17845
17846 kfree(intel_fb);
17847}
17848
17849static int intel_user_framebuffer_create_handle(struct drm_framebuffer *fb,
17850 struct drm_file *file,
17851 unsigned int *handle)
17852{
17853 struct drm_i915_gem_object *obj = intel_fb_obj(fb);
17854 struct drm_i915_private *i915 = to_i915(obj->base.dev);
17855
17856 if (obj->userptr.mm) {
17857 drm_dbg(&i915->drm,
17858 "attempting to use a userptr for a framebuffer, denied\n");
17859 return -EINVAL;
17860 }
17861
17862 return drm_gem_handle_create(file, &obj->base, handle);
17863}
17864
17865static int intel_user_framebuffer_dirty(struct drm_framebuffer *fb,
17866 struct drm_file *file,
17867 unsigned flags, unsigned color,
17868 struct drm_clip_rect *clips,
17869 unsigned num_clips)
17870{
17871 struct drm_i915_gem_object *obj = intel_fb_obj(fb);
17872
17873 i915_gem_object_flush_if_display(obj);
17874 intel_frontbuffer_flush(to_intel_frontbuffer(fb), ORIGIN_DIRTYFB);
17875
17876 return 0;
17877}
17878
17879static const struct drm_framebuffer_funcs intel_fb_funcs = {
17880 .destroy = intel_user_framebuffer_destroy,
17881 .create_handle = intel_user_framebuffer_create_handle,
17882 .dirty = intel_user_framebuffer_dirty,
17883};
17884
17885static int intel_framebuffer_init(struct intel_framebuffer *intel_fb,
17886 struct drm_i915_gem_object *obj,
17887 struct drm_mode_fb_cmd2 *mode_cmd)
17888{
17889 struct drm_i915_private *dev_priv = to_i915(obj->base.dev);
17890 struct drm_framebuffer *fb = &intel_fb->base;
17891 u32 max_stride;
17892 unsigned int tiling, stride;
17893 int ret = -EINVAL;
17894 int i;
17895
17896 intel_fb->frontbuffer = intel_frontbuffer_get(obj);
17897 if (!intel_fb->frontbuffer)
17898 return -ENOMEM;
17899
17900 i915_gem_object_lock(obj, NULL);
17901 tiling = i915_gem_object_get_tiling(obj);
17902 stride = i915_gem_object_get_stride(obj);
17903 i915_gem_object_unlock(obj);
17904
17905 if (mode_cmd->flags & DRM_MODE_FB_MODIFIERS) {
17906
17907
17908
17909
17910 if (tiling != I915_TILING_NONE &&
17911 tiling != intel_fb_modifier_to_tiling(mode_cmd->modifier[0])) {
17912 drm_dbg_kms(&dev_priv->drm,
17913 "tiling_mode doesn't match fb modifier\n");
17914 goto err;
17915 }
17916 } else {
17917 if (tiling == I915_TILING_X) {
17918 mode_cmd->modifier[0] = I915_FORMAT_MOD_X_TILED;
17919 } else if (tiling == I915_TILING_Y) {
17920 drm_dbg_kms(&dev_priv->drm,
17921 "No Y tiling for legacy addfb\n");
17922 goto err;
17923 }
17924 }
17925
17926 if (!drm_any_plane_has_format(&dev_priv->drm,
17927 mode_cmd->pixel_format,
17928 mode_cmd->modifier[0])) {
17929 struct drm_format_name_buf format_name;
17930
17931 drm_dbg_kms(&dev_priv->drm,
17932 "unsupported pixel format %s / modifier 0x%llx\n",
17933 drm_get_format_name(mode_cmd->pixel_format,
17934 &format_name),
17935 mode_cmd->modifier[0]);
17936 goto err;
17937 }
17938
17939
17940
17941
17942
17943 if (INTEL_GEN(dev_priv) < 4 &&
17944 tiling != intel_fb_modifier_to_tiling(mode_cmd->modifier[0])) {
17945 drm_dbg_kms(&dev_priv->drm,
17946 "tiling_mode must match fb modifier exactly on gen2/3\n");
17947 goto err;
17948 }
17949
17950 max_stride = intel_fb_max_stride(dev_priv, mode_cmd->pixel_format,
17951 mode_cmd->modifier[0]);
17952 if (mode_cmd->pitches[0] > max_stride) {
17953 drm_dbg_kms(&dev_priv->drm,
17954 "%s pitch (%u) must be at most %d\n",
17955 mode_cmd->modifier[0] != DRM_FORMAT_MOD_LINEAR ?
17956 "tiled" : "linear",
17957 mode_cmd->pitches[0], max_stride);
17958 goto err;
17959 }
17960
17961
17962
17963
17964
17965 if (tiling != I915_TILING_NONE && mode_cmd->pitches[0] != stride) {
17966 drm_dbg_kms(&dev_priv->drm,
17967 "pitch (%d) must match tiling stride (%d)\n",
17968 mode_cmd->pitches[0], stride);
17969 goto err;
17970 }
17971
17972
17973 if (mode_cmd->offsets[0] != 0) {
17974 drm_dbg_kms(&dev_priv->drm,
17975 "plane 0 offset (0x%08x) must be 0\n",
17976 mode_cmd->offsets[0]);
17977 goto err;
17978 }
17979
17980 drm_helper_mode_fill_fb_struct(&dev_priv->drm, fb, mode_cmd);
17981
17982 for (i = 0; i < fb->format->num_planes; i++) {
17983 u32 stride_alignment;
17984
17985 if (mode_cmd->handles[i] != mode_cmd->handles[0]) {
17986 drm_dbg_kms(&dev_priv->drm, "bad plane %d handle\n",
17987 i);
17988 goto err;
17989 }
17990
17991 stride_alignment = intel_fb_stride_alignment(fb, i);
17992 if (fb->pitches[i] & (stride_alignment - 1)) {
17993 drm_dbg_kms(&dev_priv->drm,
17994 "plane %d pitch (%d) must be at least %u byte aligned\n",
17995 i, fb->pitches[i], stride_alignment);
17996 goto err;
17997 }
17998
17999 if (is_gen12_ccs_plane(fb, i)) {
18000 int ccs_aux_stride = gen12_ccs_aux_stride(fb, i);
18001
18002 if (fb->pitches[i] != ccs_aux_stride) {
18003 drm_dbg_kms(&dev_priv->drm,
18004 "ccs aux plane %d pitch (%d) must be %d\n",
18005 i,
18006 fb->pitches[i], ccs_aux_stride);
18007 goto err;
18008 }
18009 }
18010
18011 fb->obj[i] = &obj->base;
18012 }
18013
18014 ret = intel_fill_fb_info(dev_priv, fb);
18015 if (ret)
18016 goto err;
18017
18018 ret = drm_framebuffer_init(&dev_priv->drm, fb, &intel_fb_funcs);
18019 if (ret) {
18020 drm_err(&dev_priv->drm, "framebuffer init failed %d\n", ret);
18021 goto err;
18022 }
18023
18024 return 0;
18025
18026err:
18027 intel_frontbuffer_put(intel_fb->frontbuffer);
18028 return ret;
18029}
18030
18031static struct drm_framebuffer *
18032intel_user_framebuffer_create(struct drm_device *dev,
18033 struct drm_file *filp,
18034 const struct drm_mode_fb_cmd2 *user_mode_cmd)
18035{
18036 struct drm_framebuffer *fb;
18037 struct drm_i915_gem_object *obj;
18038 struct drm_mode_fb_cmd2 mode_cmd = *user_mode_cmd;
18039
18040 obj = i915_gem_object_lookup(filp, mode_cmd.handles[0]);
18041 if (!obj)
18042 return ERR_PTR(-ENOENT);
18043
18044 fb = intel_framebuffer_create(obj, &mode_cmd);
18045 i915_gem_object_put(obj);
18046
18047 return fb;
18048}
18049
18050static enum drm_mode_status
18051intel_mode_valid(struct drm_device *dev,
18052 const struct drm_display_mode *mode)
18053{
18054 struct drm_i915_private *dev_priv = to_i915(dev);
18055 int hdisplay_max, htotal_max;
18056 int vdisplay_max, vtotal_max;
18057
18058
18059
18060
18061
18062
18063
18064
18065
18066
18067
18068
18069
18070
18071 if (mode->vscan > 1)
18072 return MODE_NO_VSCAN;
18073
18074 if (mode->flags & DRM_MODE_FLAG_HSKEW)
18075 return MODE_H_ILLEGAL;
18076
18077 if (mode->flags & (DRM_MODE_FLAG_CSYNC |
18078 DRM_MODE_FLAG_NCSYNC |
18079 DRM_MODE_FLAG_PCSYNC))
18080 return MODE_HSYNC;
18081
18082 if (mode->flags & (DRM_MODE_FLAG_BCAST |
18083 DRM_MODE_FLAG_PIXMUX |
18084 DRM_MODE_FLAG_CLKDIV2))
18085 return MODE_BAD;
18086
18087
18088 if (INTEL_GEN(dev_priv) >= 11) {
18089 hdisplay_max = 16384;
18090 vdisplay_max = 8192;
18091 htotal_max = 16384;
18092 vtotal_max = 8192;
18093 } else if (INTEL_GEN(dev_priv) >= 9 ||
18094 IS_BROADWELL(dev_priv) || IS_HASWELL(dev_priv)) {
18095 hdisplay_max = 8192;
18096 vdisplay_max = 4096;
18097 htotal_max = 8192;
18098 vtotal_max = 8192;
18099 } else if (INTEL_GEN(dev_priv) >= 3) {
18100 hdisplay_max = 4096;
18101 vdisplay_max = 4096;
18102 htotal_max = 8192;
18103 vtotal_max = 8192;
18104 } else {
18105 hdisplay_max = 2048;
18106 vdisplay_max = 2048;
18107 htotal_max = 4096;
18108 vtotal_max = 4096;
18109 }
18110
18111 if (mode->hdisplay > hdisplay_max ||
18112 mode->hsync_start > htotal_max ||
18113 mode->hsync_end > htotal_max ||
18114 mode->htotal > htotal_max)
18115 return MODE_H_ILLEGAL;
18116
18117 if (mode->vdisplay > vdisplay_max ||
18118 mode->vsync_start > vtotal_max ||
18119 mode->vsync_end > vtotal_max ||
18120 mode->vtotal > vtotal_max)
18121 return MODE_V_ILLEGAL;
18122
18123 if (INTEL_GEN(dev_priv) >= 5) {
18124 if (mode->hdisplay < 64 ||
18125 mode->htotal - mode->hdisplay < 32)
18126 return MODE_H_ILLEGAL;
18127
18128 if (mode->vtotal - mode->vdisplay < 5)
18129 return MODE_V_ILLEGAL;
18130 } else {
18131 if (mode->htotal - mode->hdisplay < 32)
18132 return MODE_H_ILLEGAL;
18133
18134 if (mode->vtotal - mode->vdisplay < 3)
18135 return MODE_V_ILLEGAL;
18136 }
18137
18138 return MODE_OK;
18139}
18140
18141enum drm_mode_status
18142intel_mode_valid_max_plane_size(struct drm_i915_private *dev_priv,
18143 const struct drm_display_mode *mode,
18144 bool bigjoiner)
18145{
18146 int plane_width_max, plane_height_max;
18147
18148
18149
18150
18151
18152 if (INTEL_GEN(dev_priv) < 9)
18153 return MODE_OK;
18154
18155
18156
18157
18158
18159
18160 if (INTEL_GEN(dev_priv) >= 11) {
18161 plane_width_max = 5120 << bigjoiner;
18162 plane_height_max = 4320;
18163 } else {
18164 plane_width_max = 5120;
18165 plane_height_max = 4096;
18166 }
18167
18168 if (mode->hdisplay > plane_width_max)
18169 return MODE_H_ILLEGAL;
18170
18171 if (mode->vdisplay > plane_height_max)
18172 return MODE_V_ILLEGAL;
18173
18174 return MODE_OK;
18175}
18176
18177static const struct drm_mode_config_funcs intel_mode_funcs = {
18178 .fb_create = intel_user_framebuffer_create,
18179 .get_format_info = intel_get_format_info,
18180 .output_poll_changed = intel_fbdev_output_poll_changed,
18181 .mode_valid = intel_mode_valid,
18182 .atomic_check = intel_atomic_check,
18183 .atomic_commit = intel_atomic_commit,
18184 .atomic_state_alloc = intel_atomic_state_alloc,
18185 .atomic_state_clear = intel_atomic_state_clear,
18186 .atomic_state_free = intel_atomic_state_free,
18187};
18188
18189
18190
18191
18192
18193void intel_init_display_hooks(struct drm_i915_private *dev_priv)
18194{
18195 intel_init_cdclk_hooks(dev_priv);
18196
18197 if (INTEL_GEN(dev_priv) >= 9) {
18198 dev_priv->display.get_pipe_config = hsw_get_pipe_config;
18199 dev_priv->display.get_initial_plane_config =
18200 skl_get_initial_plane_config;
18201 dev_priv->display.crtc_compute_clock = hsw_crtc_compute_clock;
18202 dev_priv->display.crtc_enable = hsw_crtc_enable;
18203 dev_priv->display.crtc_disable = hsw_crtc_disable;
18204 } else if (HAS_DDI(dev_priv)) {
18205 dev_priv->display.get_pipe_config = hsw_get_pipe_config;
18206 dev_priv->display.get_initial_plane_config =
18207 i9xx_get_initial_plane_config;
18208 dev_priv->display.crtc_compute_clock =
18209 hsw_crtc_compute_clock;
18210 dev_priv->display.crtc_enable = hsw_crtc_enable;
18211 dev_priv->display.crtc_disable = hsw_crtc_disable;
18212 } else if (HAS_PCH_SPLIT(dev_priv)) {
18213 dev_priv->display.get_pipe_config = ilk_get_pipe_config;
18214 dev_priv->display.get_initial_plane_config =
18215 i9xx_get_initial_plane_config;
18216 dev_priv->display.crtc_compute_clock =
18217 ilk_crtc_compute_clock;
18218 dev_priv->display.crtc_enable = ilk_crtc_enable;
18219 dev_priv->display.crtc_disable = ilk_crtc_disable;
18220 } else if (IS_CHERRYVIEW(dev_priv)) {
18221 dev_priv->display.get_pipe_config = i9xx_get_pipe_config;
18222 dev_priv->display.get_initial_plane_config =
18223 i9xx_get_initial_plane_config;
18224 dev_priv->display.crtc_compute_clock = chv_crtc_compute_clock;
18225 dev_priv->display.crtc_enable = valleyview_crtc_enable;
18226 dev_priv->display.crtc_disable = i9xx_crtc_disable;
18227 } else if (IS_VALLEYVIEW(dev_priv)) {
18228 dev_priv->display.get_pipe_config = i9xx_get_pipe_config;
18229 dev_priv->display.get_initial_plane_config =
18230 i9xx_get_initial_plane_config;
18231 dev_priv->display.crtc_compute_clock = vlv_crtc_compute_clock;
18232 dev_priv->display.crtc_enable = valleyview_crtc_enable;
18233 dev_priv->display.crtc_disable = i9xx_crtc_disable;
18234 } else if (IS_G4X(dev_priv)) {
18235 dev_priv->display.get_pipe_config = i9xx_get_pipe_config;
18236 dev_priv->display.get_initial_plane_config =
18237 i9xx_get_initial_plane_config;
18238 dev_priv->display.crtc_compute_clock = g4x_crtc_compute_clock;
18239 dev_priv->display.crtc_enable = i9xx_crtc_enable;
18240 dev_priv->display.crtc_disable = i9xx_crtc_disable;
18241 } else if (IS_PINEVIEW(dev_priv)) {
18242 dev_priv->display.get_pipe_config = i9xx_get_pipe_config;
18243 dev_priv->display.get_initial_plane_config =
18244 i9xx_get_initial_plane_config;
18245 dev_priv->display.crtc_compute_clock = pnv_crtc_compute_clock;
18246 dev_priv->display.crtc_enable = i9xx_crtc_enable;
18247 dev_priv->display.crtc_disable = i9xx_crtc_disable;
18248 } else if (!IS_GEN(dev_priv, 2)) {
18249 dev_priv->display.get_pipe_config = i9xx_get_pipe_config;
18250 dev_priv->display.get_initial_plane_config =
18251 i9xx_get_initial_plane_config;
18252 dev_priv->display.crtc_compute_clock = i9xx_crtc_compute_clock;
18253 dev_priv->display.crtc_enable = i9xx_crtc_enable;
18254 dev_priv->display.crtc_disable = i9xx_crtc_disable;
18255 } else {
18256 dev_priv->display.get_pipe_config = i9xx_get_pipe_config;
18257 dev_priv->display.get_initial_plane_config =
18258 i9xx_get_initial_plane_config;
18259 dev_priv->display.crtc_compute_clock = i8xx_crtc_compute_clock;
18260 dev_priv->display.crtc_enable = i9xx_crtc_enable;
18261 dev_priv->display.crtc_disable = i9xx_crtc_disable;
18262 }
18263
18264 if (IS_GEN(dev_priv, 5)) {
18265 dev_priv->display.fdi_link_train = ilk_fdi_link_train;
18266 } else if (IS_GEN(dev_priv, 6)) {
18267 dev_priv->display.fdi_link_train = gen6_fdi_link_train;
18268 } else if (IS_IVYBRIDGE(dev_priv)) {
18269
18270 dev_priv->display.fdi_link_train = ivb_manual_fdi_link_train;
18271 }
18272
18273 if (INTEL_GEN(dev_priv) >= 9)
18274 dev_priv->display.commit_modeset_enables = skl_commit_modeset_enables;
18275 else
18276 dev_priv->display.commit_modeset_enables = intel_commit_modeset_enables;
18277
18278}
18279
18280void intel_modeset_init_hw(struct drm_i915_private *i915)
18281{
18282 struct intel_cdclk_state *cdclk_state =
18283 to_intel_cdclk_state(i915->cdclk.obj.state);
18284 struct intel_dbuf_state *dbuf_state =
18285 to_intel_dbuf_state(i915->dbuf.obj.state);
18286
18287 intel_update_cdclk(i915);
18288 intel_dump_cdclk_config(&i915->cdclk.hw, "Current CDCLK");
18289 cdclk_state->logical = cdclk_state->actual = i915->cdclk.hw;
18290
18291 dbuf_state->enabled_slices = i915->dbuf.enabled_slices;
18292}
18293
18294static int sanitize_watermarks_add_affected(struct drm_atomic_state *state)
18295{
18296 struct drm_plane *plane;
18297 struct intel_crtc *crtc;
18298
18299 for_each_intel_crtc(state->dev, crtc) {
18300 struct intel_crtc_state *crtc_state;
18301
18302 crtc_state = intel_atomic_get_crtc_state(state, crtc);
18303 if (IS_ERR(crtc_state))
18304 return PTR_ERR(crtc_state);
18305
18306 if (crtc_state->hw.active) {
18307
18308
18309
18310
18311 crtc_state->inherited = true;
18312 }
18313 }
18314
18315 drm_for_each_plane(plane, state->dev) {
18316 struct drm_plane_state *plane_state;
18317
18318 plane_state = drm_atomic_get_plane_state(state, plane);
18319 if (IS_ERR(plane_state))
18320 return PTR_ERR(plane_state);
18321 }
18322
18323 return 0;
18324}
18325
18326
18327
18328
18329
18330
18331
18332
18333
18334
18335
18336static void sanitize_watermarks(struct drm_i915_private *dev_priv)
18337{
18338 struct drm_atomic_state *state;
18339 struct intel_atomic_state *intel_state;
18340 struct intel_crtc *crtc;
18341 struct intel_crtc_state *crtc_state;
18342 struct drm_modeset_acquire_ctx ctx;
18343 int ret;
18344 int i;
18345
18346
18347 if (!dev_priv->display.optimize_watermarks)
18348 return;
18349
18350 state = drm_atomic_state_alloc(&dev_priv->drm);
18351 if (drm_WARN_ON(&dev_priv->drm, !state))
18352 return;
18353
18354 intel_state = to_intel_atomic_state(state);
18355
18356 drm_modeset_acquire_init(&ctx, 0);
18357
18358retry:
18359 state->acquire_ctx = &ctx;
18360
18361
18362
18363
18364
18365
18366 if (!HAS_GMCH(dev_priv))
18367 intel_state->skip_intermediate_wm = true;
18368
18369 ret = sanitize_watermarks_add_affected(state);
18370 if (ret)
18371 goto fail;
18372
18373 ret = intel_atomic_check(&dev_priv->drm, state);
18374 if (ret)
18375 goto fail;
18376
18377
18378 for_each_new_intel_crtc_in_state(intel_state, crtc, crtc_state, i) {
18379 crtc_state->wm.need_postvbl_update = true;
18380 dev_priv->display.optimize_watermarks(intel_state, crtc);
18381
18382 to_intel_crtc_state(crtc->base.state)->wm = crtc_state->wm;
18383 }
18384
18385fail:
18386 if (ret == -EDEADLK) {
18387 drm_atomic_state_clear(state);
18388 drm_modeset_backoff(&ctx);
18389 goto retry;
18390 }
18391
18392
18393
18394
18395
18396
18397
18398
18399
18400
18401
18402
18403 drm_WARN(&dev_priv->drm, ret,
18404 "Could not determine valid watermarks for inherited state\n");
18405
18406 drm_atomic_state_put(state);
18407
18408 drm_modeset_drop_locks(&ctx);
18409 drm_modeset_acquire_fini(&ctx);
18410}
18411
18412static void intel_update_fdi_pll_freq(struct drm_i915_private *dev_priv)
18413{
18414 if (IS_GEN(dev_priv, 5)) {
18415 u32 fdi_pll_clk =
18416 intel_de_read(dev_priv, FDI_PLL_BIOS_0) & FDI_PLL_FB_CLOCK_MASK;
18417
18418 dev_priv->fdi_pll_freq = (fdi_pll_clk + 2) * 10000;
18419 } else if (IS_GEN(dev_priv, 6) || IS_IVYBRIDGE(dev_priv)) {
18420 dev_priv->fdi_pll_freq = 270000;
18421 } else {
18422 return;
18423 }
18424
18425 drm_dbg(&dev_priv->drm, "FDI PLL freq=%d\n", dev_priv->fdi_pll_freq);
18426}
18427
18428static int intel_initial_commit(struct drm_device *dev)
18429{
18430 struct drm_atomic_state *state = NULL;
18431 struct drm_modeset_acquire_ctx ctx;
18432 struct intel_crtc *crtc;
18433 int ret = 0;
18434
18435 state = drm_atomic_state_alloc(dev);
18436 if (!state)
18437 return -ENOMEM;
18438
18439 drm_modeset_acquire_init(&ctx, 0);
18440
18441retry:
18442 state->acquire_ctx = &ctx;
18443
18444 for_each_intel_crtc(dev, crtc) {
18445 struct intel_crtc_state *crtc_state =
18446 intel_atomic_get_crtc_state(state, crtc);
18447
18448 if (IS_ERR(crtc_state)) {
18449 ret = PTR_ERR(crtc_state);
18450 goto out;
18451 }
18452
18453 if (crtc_state->hw.active) {
18454 struct intel_encoder *encoder;
18455
18456
18457
18458
18459
18460
18461
18462
18463 crtc_state->inherited = true;
18464
18465 ret = drm_atomic_add_affected_planes(state, &crtc->base);
18466 if (ret)
18467 goto out;
18468
18469
18470
18471
18472
18473
18474
18475 crtc_state->uapi.color_mgmt_changed = true;
18476
18477 for_each_intel_encoder_mask(dev, encoder,
18478 crtc_state->uapi.encoder_mask) {
18479 if (encoder->initial_fastset_check &&
18480 !encoder->initial_fastset_check(encoder, crtc_state)) {
18481 ret = drm_atomic_add_affected_connectors(state,
18482 &crtc->base);
18483 if (ret)
18484 goto out;
18485 }
18486 }
18487 }
18488 }
18489
18490 ret = drm_atomic_commit(state);
18491
18492out:
18493 if (ret == -EDEADLK) {
18494 drm_atomic_state_clear(state);
18495 drm_modeset_backoff(&ctx);
18496 goto retry;
18497 }
18498
18499 drm_atomic_state_put(state);
18500
18501 drm_modeset_drop_locks(&ctx);
18502 drm_modeset_acquire_fini(&ctx);
18503
18504 return ret;
18505}
18506
18507static void intel_mode_config_init(struct drm_i915_private *i915)
18508{
18509 struct drm_mode_config *mode_config = &i915->drm.mode_config;
18510
18511 drm_mode_config_init(&i915->drm);
18512 INIT_LIST_HEAD(&i915->global_obj_list);
18513
18514 mode_config->min_width = 0;
18515 mode_config->min_height = 0;
18516
18517 mode_config->preferred_depth = 24;
18518 mode_config->prefer_shadow = 1;
18519
18520 mode_config->allow_fb_modifiers = true;
18521
18522 mode_config->funcs = &intel_mode_funcs;
18523
18524 if (INTEL_GEN(i915) >= 9)
18525 mode_config->async_page_flip = true;
18526
18527
18528
18529
18530
18531 if (INTEL_GEN(i915) >= 7) {
18532 mode_config->max_width = 16384;
18533 mode_config->max_height = 16384;
18534 } else if (INTEL_GEN(i915) >= 4) {
18535 mode_config->max_width = 8192;
18536 mode_config->max_height = 8192;
18537 } else if (IS_GEN(i915, 3)) {
18538 mode_config->max_width = 4096;
18539 mode_config->max_height = 4096;
18540 } else {
18541 mode_config->max_width = 2048;
18542 mode_config->max_height = 2048;
18543 }
18544
18545 if (IS_I845G(i915) || IS_I865G(i915)) {
18546 mode_config->cursor_width = IS_I845G(i915) ? 64 : 512;
18547 mode_config->cursor_height = 1023;
18548 } else if (IS_I830(i915) || IS_I85X(i915) ||
18549 IS_I915G(i915) || IS_I915GM(i915)) {
18550 mode_config->cursor_width = 64;
18551 mode_config->cursor_height = 64;
18552 } else {
18553 mode_config->cursor_width = 256;
18554 mode_config->cursor_height = 256;
18555 }
18556}
18557
18558static void intel_mode_config_cleanup(struct drm_i915_private *i915)
18559{
18560 intel_atomic_global_obj_cleanup(i915);
18561 drm_mode_config_cleanup(&i915->drm);
18562}
18563
18564static void plane_config_fini(struct intel_initial_plane_config *plane_config)
18565{
18566 if (plane_config->fb) {
18567 struct drm_framebuffer *fb = &plane_config->fb->base;
18568
18569
18570 if (drm_framebuffer_read_refcount(fb))
18571 drm_framebuffer_put(fb);
18572 else
18573 kfree(fb);
18574 }
18575
18576 if (plane_config->vma)
18577 i915_vma_put(plane_config->vma);
18578}
18579
18580
18581int intel_modeset_init_noirq(struct drm_i915_private *i915)
18582{
18583 int ret;
18584
18585 if (i915_inject_probe_failure(i915))
18586 return -ENODEV;
18587
18588 if (HAS_DISPLAY(i915)) {
18589 ret = drm_vblank_init(&i915->drm,
18590 INTEL_NUM_PIPES(i915));
18591 if (ret)
18592 return ret;
18593 }
18594
18595 intel_bios_init(i915);
18596
18597 ret = intel_vga_register(i915);
18598 if (ret)
18599 goto cleanup_bios;
18600
18601
18602 intel_power_domains_init_hw(i915, false);
18603
18604 intel_csr_ucode_init(i915);
18605
18606 i915->modeset_wq = alloc_ordered_workqueue("i915_modeset", 0);
18607 i915->flip_wq = alloc_workqueue("i915_flip", WQ_HIGHPRI |
18608 WQ_UNBOUND, WQ_UNBOUND_MAX_ACTIVE);
18609
18610 intel_mode_config_init(i915);
18611
18612 ret = intel_cdclk_init(i915);
18613 if (ret)
18614 goto cleanup_vga_client_pw_domain_csr;
18615
18616 ret = intel_dbuf_init(i915);
18617 if (ret)
18618 goto cleanup_vga_client_pw_domain_csr;
18619
18620 ret = intel_bw_init(i915);
18621 if (ret)
18622 goto cleanup_vga_client_pw_domain_csr;
18623
18624 init_llist_head(&i915->atomic_helper.free_list);
18625 INIT_WORK(&i915->atomic_helper.free_work,
18626 intel_atomic_helper_free_state_worker);
18627
18628 intel_init_quirks(i915);
18629
18630 intel_fbc_init(i915);
18631
18632 return 0;
18633
18634cleanup_vga_client_pw_domain_csr:
18635 intel_csr_ucode_fini(i915);
18636 intel_power_domains_driver_remove(i915);
18637 intel_vga_unregister(i915);
18638cleanup_bios:
18639 intel_bios_driver_remove(i915);
18640
18641 return ret;
18642}
18643
18644
18645int intel_modeset_init_nogem(struct drm_i915_private *i915)
18646{
18647 struct drm_device *dev = &i915->drm;
18648 enum pipe pipe;
18649 struct intel_crtc *crtc;
18650 int ret;
18651
18652 intel_init_pm(i915);
18653
18654 intel_panel_sanitize_ssc(i915);
18655
18656 intel_gmbus_setup(i915);
18657
18658 drm_dbg_kms(&i915->drm, "%d display pipe%s available.\n",
18659 INTEL_NUM_PIPES(i915),
18660 INTEL_NUM_PIPES(i915) > 1 ? "s" : "");
18661
18662 if (HAS_DISPLAY(i915)) {
18663 for_each_pipe(i915, pipe) {
18664 ret = intel_crtc_init(i915, pipe);
18665 if (ret) {
18666 intel_mode_config_cleanup(i915);
18667 return ret;
18668 }
18669 }
18670 }
18671
18672 intel_plane_possible_crtcs_init(i915);
18673 intel_shared_dpll_init(dev);
18674 intel_update_fdi_pll_freq(i915);
18675
18676 intel_update_czclk(i915);
18677 intel_modeset_init_hw(i915);
18678
18679 intel_hdcp_component_init(i915);
18680
18681 if (i915->max_cdclk_freq == 0)
18682 intel_update_max_cdclk(i915);
18683
18684
18685
18686
18687
18688 if (INTEL_INFO(i915)->display.has_hti)
18689 i915->hti_state = intel_de_read(i915, HDPORT_STATE);
18690
18691
18692 intel_vga_disable(i915);
18693 intel_setup_outputs(i915);
18694
18695 drm_modeset_lock_all(dev);
18696 intel_modeset_setup_hw_state(dev, dev->mode_config.acquire_ctx);
18697 drm_modeset_unlock_all(dev);
18698
18699 for_each_intel_crtc(dev, crtc) {
18700 struct intel_initial_plane_config plane_config = {};
18701
18702 if (!to_intel_crtc_state(crtc->base.state)->uapi.active)
18703 continue;
18704
18705
18706
18707
18708
18709
18710
18711
18712 i915->display.get_initial_plane_config(crtc, &plane_config);
18713
18714
18715
18716
18717
18718 intel_find_initial_plane_obj(crtc, &plane_config);
18719
18720 plane_config_fini(&plane_config);
18721 }
18722
18723
18724
18725
18726
18727
18728 if (!HAS_GMCH(i915))
18729 sanitize_watermarks(i915);
18730
18731 return 0;
18732}
18733
18734
18735int intel_modeset_init(struct drm_i915_private *i915)
18736{
18737 int ret;
18738
18739 if (!HAS_DISPLAY(i915))
18740 return 0;
18741
18742
18743
18744
18745
18746
18747
18748 ret = intel_initial_commit(&i915->drm);
18749 if (ret)
18750 drm_dbg_kms(&i915->drm, "Initial modeset failed, %d\n", ret);
18751
18752 intel_overlay_setup(i915);
18753
18754 ret = intel_fbdev_init(&i915->drm);
18755 if (ret)
18756 return ret;
18757
18758
18759 intel_hpd_init(i915);
18760 intel_hpd_poll_disable(i915);
18761
18762 intel_init_ipc(i915);
18763
18764 return 0;
18765}
18766
18767void i830_enable_pipe(struct drm_i915_private *dev_priv, enum pipe pipe)
18768{
18769 struct intel_crtc *crtc = intel_get_crtc_for_pipe(dev_priv, pipe);
18770
18771 struct dpll clock = {
18772 .m1 = 18,
18773 .m2 = 7,
18774 .p1 = 13,
18775 .p2 = 4,
18776 .n = 2,
18777 };
18778 u32 dpll, fp;
18779 int i;
18780
18781 drm_WARN_ON(&dev_priv->drm,
18782 i9xx_calc_dpll_params(48000, &clock) != 25154);
18783
18784 drm_dbg_kms(&dev_priv->drm,
18785 "enabling pipe %c due to force quirk (vco=%d dot=%d)\n",
18786 pipe_name(pipe), clock.vco, clock.dot);
18787
18788 fp = i9xx_dpll_compute_fp(&clock);
18789 dpll = DPLL_DVO_2X_MODE |
18790 DPLL_VGA_MODE_DIS |
18791 ((clock.p1 - 2) << DPLL_FPA01_P1_POST_DIV_SHIFT) |
18792 PLL_P2_DIVIDE_BY_4 |
18793 PLL_REF_INPUT_DREFCLK |
18794 DPLL_VCO_ENABLE;
18795
18796 intel_de_write(dev_priv, FP0(pipe), fp);
18797 intel_de_write(dev_priv, FP1(pipe), fp);
18798
18799 intel_de_write(dev_priv, HTOTAL(pipe), (640 - 1) | ((800 - 1) << 16));
18800 intel_de_write(dev_priv, HBLANK(pipe), (640 - 1) | ((800 - 1) << 16));
18801 intel_de_write(dev_priv, HSYNC(pipe), (656 - 1) | ((752 - 1) << 16));
18802 intel_de_write(dev_priv, VTOTAL(pipe), (480 - 1) | ((525 - 1) << 16));
18803 intel_de_write(dev_priv, VBLANK(pipe), (480 - 1) | ((525 - 1) << 16));
18804 intel_de_write(dev_priv, VSYNC(pipe), (490 - 1) | ((492 - 1) << 16));
18805 intel_de_write(dev_priv, PIPESRC(pipe), ((640 - 1) << 16) | (480 - 1));
18806
18807
18808
18809
18810
18811
18812 intel_de_write(dev_priv, DPLL(pipe), dpll & ~DPLL_VGA_MODE_DIS);
18813 intel_de_write(dev_priv, DPLL(pipe), dpll);
18814
18815
18816 intel_de_posting_read(dev_priv, DPLL(pipe));
18817 udelay(150);
18818
18819
18820
18821
18822
18823
18824 intel_de_write(dev_priv, DPLL(pipe), dpll);
18825
18826
18827 for (i = 0; i < 3 ; i++) {
18828 intel_de_write(dev_priv, DPLL(pipe), dpll);
18829 intel_de_posting_read(dev_priv, DPLL(pipe));
18830 udelay(150);
18831 }
18832
18833 intel_de_write(dev_priv, PIPECONF(pipe),
18834 PIPECONF_ENABLE | PIPECONF_PROGRESSIVE);
18835 intel_de_posting_read(dev_priv, PIPECONF(pipe));
18836
18837 intel_wait_for_pipe_scanline_moving(crtc);
18838}
18839
18840void i830_disable_pipe(struct drm_i915_private *dev_priv, enum pipe pipe)
18841{
18842 struct intel_crtc *crtc = intel_get_crtc_for_pipe(dev_priv, pipe);
18843
18844 drm_dbg_kms(&dev_priv->drm, "disabling pipe %c due to force quirk\n",
18845 pipe_name(pipe));
18846
18847 drm_WARN_ON(&dev_priv->drm,
18848 intel_de_read(dev_priv, DSPCNTR(PLANE_A)) &
18849 DISPLAY_PLANE_ENABLE);
18850 drm_WARN_ON(&dev_priv->drm,
18851 intel_de_read(dev_priv, DSPCNTR(PLANE_B)) &
18852 DISPLAY_PLANE_ENABLE);
18853 drm_WARN_ON(&dev_priv->drm,
18854 intel_de_read(dev_priv, DSPCNTR(PLANE_C)) &
18855 DISPLAY_PLANE_ENABLE);
18856 drm_WARN_ON(&dev_priv->drm,
18857 intel_de_read(dev_priv, CURCNTR(PIPE_A)) & MCURSOR_MODE);
18858 drm_WARN_ON(&dev_priv->drm,
18859 intel_de_read(dev_priv, CURCNTR(PIPE_B)) & MCURSOR_MODE);
18860
18861 intel_de_write(dev_priv, PIPECONF(pipe), 0);
18862 intel_de_posting_read(dev_priv, PIPECONF(pipe));
18863
18864 intel_wait_for_pipe_scanline_stopped(crtc);
18865
18866 intel_de_write(dev_priv, DPLL(pipe), DPLL_VGA_MODE_DIS);
18867 intel_de_posting_read(dev_priv, DPLL(pipe));
18868}
18869
18870static void
18871intel_sanitize_plane_mapping(struct drm_i915_private *dev_priv)
18872{
18873 struct intel_crtc *crtc;
18874
18875 if (INTEL_GEN(dev_priv) >= 4)
18876 return;
18877
18878 for_each_intel_crtc(&dev_priv->drm, crtc) {
18879 struct intel_plane *plane =
18880 to_intel_plane(crtc->base.primary);
18881 struct intel_crtc *plane_crtc;
18882 enum pipe pipe;
18883
18884 if (!plane->get_hw_state(plane, &pipe))
18885 continue;
18886
18887 if (pipe == crtc->pipe)
18888 continue;
18889
18890 drm_dbg_kms(&dev_priv->drm,
18891 "[PLANE:%d:%s] attached to the wrong pipe, disabling plane\n",
18892 plane->base.base.id, plane->base.name);
18893
18894 plane_crtc = intel_get_crtc_for_pipe(dev_priv, pipe);
18895 intel_plane_disable_noatomic(plane_crtc, plane);
18896 }
18897}
18898
18899static bool intel_crtc_has_encoders(struct intel_crtc *crtc)
18900{
18901 struct drm_device *dev = crtc->base.dev;
18902 struct intel_encoder *encoder;
18903
18904 for_each_encoder_on_crtc(dev, &crtc->base, encoder)
18905 return true;
18906
18907 return false;
18908}
18909
18910static struct intel_connector *intel_encoder_find_connector(struct intel_encoder *encoder)
18911{
18912 struct drm_device *dev = encoder->base.dev;
18913 struct intel_connector *connector;
18914
18915 for_each_connector_on_encoder(dev, &encoder->base, connector)
18916 return connector;
18917
18918 return NULL;
18919}
18920
18921static bool has_pch_trancoder(struct drm_i915_private *dev_priv,
18922 enum pipe pch_transcoder)
18923{
18924 return HAS_PCH_IBX(dev_priv) || HAS_PCH_CPT(dev_priv) ||
18925 (HAS_PCH_LPT_H(dev_priv) && pch_transcoder == PIPE_A);
18926}
18927
18928static void intel_sanitize_frame_start_delay(const struct intel_crtc_state *crtc_state)
18929{
18930 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
18931 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
18932 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
18933
18934 if (INTEL_GEN(dev_priv) >= 9 ||
18935 IS_BROADWELL(dev_priv) || IS_HASWELL(dev_priv)) {
18936 i915_reg_t reg = CHICKEN_TRANS(cpu_transcoder);
18937 u32 val;
18938
18939 if (transcoder_is_dsi(cpu_transcoder))
18940 return;
18941
18942 val = intel_de_read(dev_priv, reg);
18943 val &= ~HSW_FRAME_START_DELAY_MASK;
18944 val |= HSW_FRAME_START_DELAY(0);
18945 intel_de_write(dev_priv, reg, val);
18946 } else {
18947 i915_reg_t reg = PIPECONF(cpu_transcoder);
18948 u32 val;
18949
18950 val = intel_de_read(dev_priv, reg);
18951 val &= ~PIPECONF_FRAME_START_DELAY_MASK;
18952 val |= PIPECONF_FRAME_START_DELAY(0);
18953 intel_de_write(dev_priv, reg, val);
18954 }
18955
18956 if (!crtc_state->has_pch_encoder)
18957 return;
18958
18959 if (HAS_PCH_IBX(dev_priv)) {
18960 i915_reg_t reg = PCH_TRANSCONF(crtc->pipe);
18961 u32 val;
18962
18963 val = intel_de_read(dev_priv, reg);
18964 val &= ~TRANS_FRAME_START_DELAY_MASK;
18965 val |= TRANS_FRAME_START_DELAY(0);
18966 intel_de_write(dev_priv, reg, val);
18967 } else {
18968 enum pipe pch_transcoder = intel_crtc_pch_transcoder(crtc);
18969 i915_reg_t reg = TRANS_CHICKEN2(pch_transcoder);
18970 u32 val;
18971
18972 val = intel_de_read(dev_priv, reg);
18973 val &= ~TRANS_CHICKEN2_FRAME_START_DELAY_MASK;
18974 val |= TRANS_CHICKEN2_FRAME_START_DELAY(0);
18975 intel_de_write(dev_priv, reg, val);
18976 }
18977}
18978
18979static void intel_sanitize_crtc(struct intel_crtc *crtc,
18980 struct drm_modeset_acquire_ctx *ctx)
18981{
18982 struct drm_device *dev = crtc->base.dev;
18983 struct drm_i915_private *dev_priv = to_i915(dev);
18984 struct intel_crtc_state *crtc_state = to_intel_crtc_state(crtc->base.state);
18985
18986 if (crtc_state->hw.active) {
18987 struct intel_plane *plane;
18988
18989
18990 intel_sanitize_frame_start_delay(crtc_state);
18991
18992
18993 for_each_intel_plane_on_crtc(dev, crtc, plane) {
18994 const struct intel_plane_state *plane_state =
18995 to_intel_plane_state(plane->base.state);
18996
18997 if (plane_state->uapi.visible &&
18998 plane->base.type != DRM_PLANE_TYPE_PRIMARY)
18999 intel_plane_disable_noatomic(crtc, plane);
19000 }
19001
19002
19003
19004
19005
19006 if (INTEL_GEN(dev_priv) >= 9)
19007 intel_de_write(dev_priv, SKL_BOTTOM_COLOR(crtc->pipe),
19008 SKL_BOTTOM_COLOR_GAMMA_ENABLE | SKL_BOTTOM_COLOR_CSC_ENABLE);
19009 }
19010
19011
19012
19013 if (crtc_state->hw.active && !intel_crtc_has_encoders(crtc) &&
19014 !crtc_state->bigjoiner_slave)
19015 intel_crtc_disable_noatomic(crtc, ctx);
19016
19017 if (crtc_state->hw.active || HAS_GMCH(dev_priv)) {
19018
19019
19020
19021
19022
19023
19024
19025
19026
19027
19028
19029
19030
19031 crtc->cpu_fifo_underrun_disabled = true;
19032
19033
19034
19035
19036
19037
19038
19039
19040
19041 if (has_pch_trancoder(dev_priv, crtc->pipe))
19042 crtc->pch_fifo_underrun_disabled = true;
19043 }
19044}
19045
19046static bool has_bogus_dpll_config(const struct intel_crtc_state *crtc_state)
19047{
19048 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
19049
19050
19051
19052
19053
19054
19055
19056
19057
19058
19059
19060 return IS_GEN(dev_priv, 6) &&
19061 crtc_state->hw.active &&
19062 crtc_state->shared_dpll &&
19063 crtc_state->port_clock == 0;
19064}
19065
19066static void intel_sanitize_encoder(struct intel_encoder *encoder)
19067{
19068 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
19069 struct intel_connector *connector;
19070 struct intel_crtc *crtc = to_intel_crtc(encoder->base.crtc);
19071 struct intel_crtc_state *crtc_state = crtc ?
19072 to_intel_crtc_state(crtc->base.state) : NULL;
19073
19074
19075
19076
19077 bool has_active_crtc = crtc_state &&
19078 crtc_state->hw.active;
19079
19080 if (crtc_state && has_bogus_dpll_config(crtc_state)) {
19081 drm_dbg_kms(&dev_priv->drm,
19082 "BIOS has misprogrammed the hardware. Disabling pipe %c\n",
19083 pipe_name(crtc->pipe));
19084 has_active_crtc = false;
19085 }
19086
19087 connector = intel_encoder_find_connector(encoder);
19088 if (connector && !has_active_crtc) {
19089 drm_dbg_kms(&dev_priv->drm,
19090 "[ENCODER:%d:%s] has active connectors but no active pipe!\n",
19091 encoder->base.base.id,
19092 encoder->base.name);
19093
19094
19095
19096
19097 if (crtc_state) {
19098 struct drm_encoder *best_encoder;
19099
19100 drm_dbg_kms(&dev_priv->drm,
19101 "[ENCODER:%d:%s] manually disabled\n",
19102 encoder->base.base.id,
19103 encoder->base.name);
19104
19105
19106 best_encoder = connector->base.state->best_encoder;
19107 connector->base.state->best_encoder = &encoder->base;
19108
19109
19110 if (encoder->disable)
19111 encoder->disable(NULL, encoder, crtc_state,
19112 connector->base.state);
19113 if (encoder->post_disable)
19114 encoder->post_disable(NULL, encoder, crtc_state,
19115 connector->base.state);
19116
19117 connector->base.state->best_encoder = best_encoder;
19118 }
19119 encoder->base.crtc = NULL;
19120
19121
19122
19123
19124
19125
19126 connector->base.dpms = DRM_MODE_DPMS_OFF;
19127 connector->base.encoder = NULL;
19128 }
19129
19130
19131 intel_opregion_notify_encoder(encoder, connector && has_active_crtc);
19132
19133 if (INTEL_GEN(dev_priv) >= 11)
19134 icl_sanitize_encoder_pll_mapping(encoder);
19135}
19136
19137
19138static void readout_plane_state(struct drm_i915_private *dev_priv)
19139{
19140 struct intel_plane *plane;
19141 struct intel_crtc *crtc;
19142
19143 for_each_intel_plane(&dev_priv->drm, plane) {
19144 struct intel_plane_state *plane_state =
19145 to_intel_plane_state(plane->base.state);
19146 struct intel_crtc_state *crtc_state;
19147 enum pipe pipe = PIPE_A;
19148 bool visible;
19149
19150 visible = plane->get_hw_state(plane, &pipe);
19151
19152 crtc = intel_get_crtc_for_pipe(dev_priv, pipe);
19153 crtc_state = to_intel_crtc_state(crtc->base.state);
19154
19155 intel_set_plane_visible(crtc_state, plane_state, visible);
19156
19157 drm_dbg_kms(&dev_priv->drm,
19158 "[PLANE:%d:%s] hw state readout: %s, pipe %c\n",
19159 plane->base.base.id, plane->base.name,
19160 enableddisabled(visible), pipe_name(pipe));
19161 }
19162
19163 for_each_intel_crtc(&dev_priv->drm, crtc) {
19164 struct intel_crtc_state *crtc_state =
19165 to_intel_crtc_state(crtc->base.state);
19166
19167 fixup_active_planes(crtc_state);
19168 }
19169}
19170
19171static void intel_modeset_readout_hw_state(struct drm_device *dev)
19172{
19173 struct drm_i915_private *dev_priv = to_i915(dev);
19174 struct intel_cdclk_state *cdclk_state =
19175 to_intel_cdclk_state(dev_priv->cdclk.obj.state);
19176 struct intel_dbuf_state *dbuf_state =
19177 to_intel_dbuf_state(dev_priv->dbuf.obj.state);
19178 enum pipe pipe;
19179 struct intel_crtc *crtc;
19180 struct intel_encoder *encoder;
19181 struct intel_connector *connector;
19182 struct drm_connector_list_iter conn_iter;
19183 u8 active_pipes = 0;
19184
19185 for_each_intel_crtc(dev, crtc) {
19186 struct intel_crtc_state *crtc_state =
19187 to_intel_crtc_state(crtc->base.state);
19188
19189 __drm_atomic_helper_crtc_destroy_state(&crtc_state->uapi);
19190 intel_crtc_free_hw_state(crtc_state);
19191 intel_crtc_state_reset(crtc_state, crtc);
19192
19193 intel_crtc_get_pipe_config(crtc_state);
19194
19195 crtc_state->hw.enable = crtc_state->hw.active;
19196
19197 crtc->base.enabled = crtc_state->hw.enable;
19198 crtc->active = crtc_state->hw.active;
19199
19200 if (crtc_state->hw.active)
19201 active_pipes |= BIT(crtc->pipe);
19202
19203 drm_dbg_kms(&dev_priv->drm,
19204 "[CRTC:%d:%s] hw state readout: %s\n",
19205 crtc->base.base.id, crtc->base.name,
19206 enableddisabled(crtc_state->hw.active));
19207 }
19208
19209 dev_priv->active_pipes = cdclk_state->active_pipes =
19210 dbuf_state->active_pipes = active_pipes;
19211
19212 readout_plane_state(dev_priv);
19213
19214 intel_dpll_readout_hw_state(dev_priv);
19215
19216 for_each_intel_encoder(dev, encoder) {
19217 pipe = 0;
19218
19219 if (encoder->get_hw_state(encoder, &pipe)) {
19220 struct intel_crtc_state *crtc_state;
19221
19222 crtc = intel_get_crtc_for_pipe(dev_priv, pipe);
19223 crtc_state = to_intel_crtc_state(crtc->base.state);
19224
19225 encoder->base.crtc = &crtc->base;
19226 intel_encoder_get_config(encoder, crtc_state);
19227 if (encoder->sync_state)
19228 encoder->sync_state(encoder, crtc_state);
19229
19230
19231 if (crtc_state->bigjoiner) {
19232
19233 WARN_ON(crtc_state->bigjoiner_slave);
19234
19235 crtc = crtc_state->bigjoiner_linked_crtc;
19236 crtc_state = to_intel_crtc_state(crtc->base.state);
19237 intel_encoder_get_config(encoder, crtc_state);
19238 }
19239 } else {
19240 encoder->base.crtc = NULL;
19241 }
19242
19243 drm_dbg_kms(&dev_priv->drm,
19244 "[ENCODER:%d:%s] hw state readout: %s, pipe %c\n",
19245 encoder->base.base.id, encoder->base.name,
19246 enableddisabled(encoder->base.crtc),
19247 pipe_name(pipe));
19248 }
19249
19250 drm_connector_list_iter_begin(dev, &conn_iter);
19251 for_each_intel_connector_iter(connector, &conn_iter) {
19252 if (connector->get_hw_state(connector)) {
19253 struct intel_crtc_state *crtc_state;
19254 struct intel_crtc *crtc;
19255
19256 connector->base.dpms = DRM_MODE_DPMS_ON;
19257
19258 encoder = intel_attached_encoder(connector);
19259 connector->base.encoder = &encoder->base;
19260
19261 crtc = to_intel_crtc(encoder->base.crtc);
19262 crtc_state = crtc ? to_intel_crtc_state(crtc->base.state) : NULL;
19263
19264 if (crtc_state && crtc_state->hw.active) {
19265
19266
19267
19268
19269
19270 crtc_state->uapi.connector_mask |=
19271 drm_connector_mask(&connector->base);
19272 crtc_state->uapi.encoder_mask |=
19273 drm_encoder_mask(&encoder->base);
19274 }
19275 } else {
19276 connector->base.dpms = DRM_MODE_DPMS_OFF;
19277 connector->base.encoder = NULL;
19278 }
19279 drm_dbg_kms(&dev_priv->drm,
19280 "[CONNECTOR:%d:%s] hw state readout: %s\n",
19281 connector->base.base.id, connector->base.name,
19282 enableddisabled(connector->base.encoder));
19283 }
19284 drm_connector_list_iter_end(&conn_iter);
19285
19286 for_each_intel_crtc(dev, crtc) {
19287 struct intel_bw_state *bw_state =
19288 to_intel_bw_state(dev_priv->bw_obj.state);
19289 struct intel_crtc_state *crtc_state =
19290 to_intel_crtc_state(crtc->base.state);
19291 struct intel_plane *plane;
19292 int min_cdclk = 0;
19293
19294 if (crtc_state->bigjoiner_slave)
19295 continue;
19296
19297 if (crtc_state->hw.active) {
19298
19299
19300
19301
19302
19303
19304
19305
19306
19307 crtc_state->inherited = true;
19308
19309 intel_crtc_update_active_timings(crtc_state);
19310
19311 intel_crtc_copy_hw_to_uapi_state(crtc_state);
19312 }
19313
19314 for_each_intel_plane_on_crtc(&dev_priv->drm, crtc, plane) {
19315 const struct intel_plane_state *plane_state =
19316 to_intel_plane_state(plane->base.state);
19317
19318
19319
19320
19321
19322 if (plane_state->uapi.visible)
19323 crtc_state->data_rate[plane->id] =
19324 4 * crtc_state->pixel_rate;
19325
19326
19327
19328
19329 if (plane_state->uapi.visible && plane->min_cdclk) {
19330 if (crtc_state->double_wide ||
19331 INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv))
19332 crtc_state->min_cdclk[plane->id] =
19333 DIV_ROUND_UP(crtc_state->pixel_rate, 2);
19334 else
19335 crtc_state->min_cdclk[plane->id] =
19336 crtc_state->pixel_rate;
19337 }
19338 drm_dbg_kms(&dev_priv->drm,
19339 "[PLANE:%d:%s] min_cdclk %d kHz\n",
19340 plane->base.base.id, plane->base.name,
19341 crtc_state->min_cdclk[plane->id]);
19342 }
19343
19344 if (crtc_state->hw.active) {
19345 min_cdclk = intel_crtc_compute_min_cdclk(crtc_state);
19346 if (drm_WARN_ON(dev, min_cdclk < 0))
19347 min_cdclk = 0;
19348 }
19349
19350 cdclk_state->min_cdclk[crtc->pipe] = min_cdclk;
19351 cdclk_state->min_voltage_level[crtc->pipe] =
19352 crtc_state->min_voltage_level;
19353
19354 intel_bw_crtc_update(bw_state, crtc_state);
19355
19356 intel_pipe_config_sanity_check(dev_priv, crtc_state);
19357
19358
19359 if (crtc_state->bigjoiner && crtc_state->hw.active) {
19360 struct intel_crtc *slave = crtc_state->bigjoiner_linked_crtc;
19361 struct intel_crtc_state *slave_crtc_state =
19362 to_intel_crtc_state(slave->base.state);
19363
19364 copy_bigjoiner_crtc_state(slave_crtc_state, crtc_state);
19365 slave->base.mode = crtc->base.mode;
19366
19367 cdclk_state->min_cdclk[slave->pipe] = min_cdclk;
19368 cdclk_state->min_voltage_level[slave->pipe] =
19369 crtc_state->min_voltage_level;
19370
19371 for_each_intel_plane_on_crtc(&dev_priv->drm, slave, plane) {
19372 const struct intel_plane_state *plane_state =
19373 to_intel_plane_state(plane->base.state);
19374
19375
19376
19377
19378
19379 if (plane_state->uapi.visible)
19380 crtc_state->data_rate[plane->id] =
19381 4 * crtc_state->pixel_rate;
19382 else
19383 crtc_state->data_rate[plane->id] = 0;
19384 }
19385
19386 intel_bw_crtc_update(bw_state, slave_crtc_state);
19387 drm_calc_timestamping_constants(&slave->base,
19388 &slave_crtc_state->hw.adjusted_mode);
19389 }
19390 }
19391}
19392
19393static void
19394get_encoder_power_domains(struct drm_i915_private *dev_priv)
19395{
19396 struct intel_encoder *encoder;
19397
19398 for_each_intel_encoder(&dev_priv->drm, encoder) {
19399 struct intel_crtc_state *crtc_state;
19400
19401 if (!encoder->get_power_domains)
19402 continue;
19403
19404
19405
19406
19407
19408 if (!encoder->base.crtc)
19409 continue;
19410
19411 crtc_state = to_intel_crtc_state(encoder->base.crtc->state);
19412 encoder->get_power_domains(encoder, crtc_state);
19413 }
19414}
19415
19416static void intel_early_display_was(struct drm_i915_private *dev_priv)
19417{
19418
19419
19420
19421
19422 if (IS_GEN_RANGE(dev_priv, 10, 12) || IS_GEMINILAKE(dev_priv))
19423 intel_de_write(dev_priv, GEN9_CLKGATE_DIS_0,
19424 intel_de_read(dev_priv, GEN9_CLKGATE_DIS_0) | DARBF_GATING_DIS);
19425
19426 if (IS_HASWELL(dev_priv)) {
19427
19428
19429
19430
19431 intel_de_write(dev_priv, CHICKEN_PAR1_1,
19432 intel_de_read(dev_priv, CHICKEN_PAR1_1) | FORCE_ARB_IDLE_PLANES);
19433 }
19434
19435 if (IS_KABYLAKE(dev_priv) || IS_COFFEELAKE(dev_priv) || IS_COMETLAKE(dev_priv)) {
19436
19437 intel_de_rmw(dev_priv, CHICKEN_PAR1_1,
19438 KBL_ARB_FILL_SPARE_22, KBL_ARB_FILL_SPARE_22);
19439 intel_de_rmw(dev_priv, CHICKEN_MISC_2,
19440 KBL_ARB_FILL_SPARE_13 | KBL_ARB_FILL_SPARE_14,
19441 KBL_ARB_FILL_SPARE_14);
19442 }
19443}
19444
19445static void ibx_sanitize_pch_hdmi_port(struct drm_i915_private *dev_priv,
19446 enum port port, i915_reg_t hdmi_reg)
19447{
19448 u32 val = intel_de_read(dev_priv, hdmi_reg);
19449
19450 if (val & SDVO_ENABLE ||
19451 (val & SDVO_PIPE_SEL_MASK) == SDVO_PIPE_SEL(PIPE_A))
19452 return;
19453
19454 drm_dbg_kms(&dev_priv->drm,
19455 "Sanitizing transcoder select for HDMI %c\n",
19456 port_name(port));
19457
19458 val &= ~SDVO_PIPE_SEL_MASK;
19459 val |= SDVO_PIPE_SEL(PIPE_A);
19460
19461 intel_de_write(dev_priv, hdmi_reg, val);
19462}
19463
19464static void ibx_sanitize_pch_dp_port(struct drm_i915_private *dev_priv,
19465 enum port port, i915_reg_t dp_reg)
19466{
19467 u32 val = intel_de_read(dev_priv, dp_reg);
19468
19469 if (val & DP_PORT_EN ||
19470 (val & DP_PIPE_SEL_MASK) == DP_PIPE_SEL(PIPE_A))
19471 return;
19472
19473 drm_dbg_kms(&dev_priv->drm,
19474 "Sanitizing transcoder select for DP %c\n",
19475 port_name(port));
19476
19477 val &= ~DP_PIPE_SEL_MASK;
19478 val |= DP_PIPE_SEL(PIPE_A);
19479
19480 intel_de_write(dev_priv, dp_reg, val);
19481}
19482
19483static void ibx_sanitize_pch_ports(struct drm_i915_private *dev_priv)
19484{
19485
19486
19487
19488
19489
19490
19491
19492
19493
19494
19495
19496 ibx_sanitize_pch_dp_port(dev_priv, PORT_B, PCH_DP_B);
19497 ibx_sanitize_pch_dp_port(dev_priv, PORT_C, PCH_DP_C);
19498 ibx_sanitize_pch_dp_port(dev_priv, PORT_D, PCH_DP_D);
19499
19500
19501 ibx_sanitize_pch_hdmi_port(dev_priv, PORT_B, PCH_HDMIB);
19502 ibx_sanitize_pch_hdmi_port(dev_priv, PORT_C, PCH_HDMIC);
19503 ibx_sanitize_pch_hdmi_port(dev_priv, PORT_D, PCH_HDMID);
19504}
19505
19506
19507
19508
19509static void
19510intel_modeset_setup_hw_state(struct drm_device *dev,
19511 struct drm_modeset_acquire_ctx *ctx)
19512{
19513 struct drm_i915_private *dev_priv = to_i915(dev);
19514 struct intel_encoder *encoder;
19515 struct intel_crtc *crtc;
19516 intel_wakeref_t wakeref;
19517
19518 wakeref = intel_display_power_get(dev_priv, POWER_DOMAIN_INIT);
19519
19520 intel_early_display_was(dev_priv);
19521 intel_modeset_readout_hw_state(dev);
19522
19523
19524
19525
19526 for_each_intel_encoder(dev, encoder) {
19527 enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
19528
19529
19530 if (encoder->type != INTEL_OUTPUT_DP_MST &&
19531 intel_phy_is_tc(dev_priv, phy))
19532 intel_tc_port_sanitize(enc_to_dig_port(encoder));
19533 }
19534
19535 get_encoder_power_domains(dev_priv);
19536
19537 if (HAS_PCH_IBX(dev_priv))
19538 ibx_sanitize_pch_ports(dev_priv);
19539
19540
19541
19542
19543
19544 for_each_intel_crtc(&dev_priv->drm, crtc) {
19545 struct intel_crtc_state *crtc_state =
19546 to_intel_crtc_state(crtc->base.state);
19547
19548 drm_crtc_vblank_reset(&crtc->base);
19549
19550 if (crtc_state->hw.active)
19551 intel_crtc_vblank_on(crtc_state);
19552 }
19553
19554 intel_sanitize_plane_mapping(dev_priv);
19555
19556 for_each_intel_encoder(dev, encoder)
19557 intel_sanitize_encoder(encoder);
19558
19559 for_each_intel_crtc(&dev_priv->drm, crtc) {
19560 struct intel_crtc_state *crtc_state =
19561 to_intel_crtc_state(crtc->base.state);
19562
19563 intel_sanitize_crtc(crtc, ctx);
19564 intel_dump_pipe_config(crtc_state, NULL, "[setup_hw_state]");
19565 }
19566
19567 intel_modeset_update_connector_atomic_state(dev);
19568
19569 intel_dpll_sanitize_state(dev_priv);
19570
19571 if (IS_G4X(dev_priv)) {
19572 g4x_wm_get_hw_state(dev_priv);
19573 g4x_wm_sanitize(dev_priv);
19574 } else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
19575 vlv_wm_get_hw_state(dev_priv);
19576 vlv_wm_sanitize(dev_priv);
19577 } else if (INTEL_GEN(dev_priv) >= 9) {
19578 skl_wm_get_hw_state(dev_priv);
19579 } else if (HAS_PCH_SPLIT(dev_priv)) {
19580 ilk_wm_get_hw_state(dev_priv);
19581 }
19582
19583 for_each_intel_crtc(dev, crtc) {
19584 struct intel_crtc_state *crtc_state =
19585 to_intel_crtc_state(crtc->base.state);
19586 u64 put_domains;
19587
19588 put_domains = modeset_get_crtc_power_domains(crtc_state);
19589 if (drm_WARN_ON(dev, put_domains))
19590 modeset_put_power_domains(dev_priv, put_domains);
19591 }
19592
19593 intel_display_power_put(dev_priv, POWER_DOMAIN_INIT, wakeref);
19594}
19595
19596void intel_display_resume(struct drm_device *dev)
19597{
19598 struct drm_i915_private *dev_priv = to_i915(dev);
19599 struct drm_atomic_state *state = dev_priv->modeset_restore_state;
19600 struct drm_modeset_acquire_ctx ctx;
19601 int ret;
19602
19603 dev_priv->modeset_restore_state = NULL;
19604 if (state)
19605 state->acquire_ctx = &ctx;
19606
19607 drm_modeset_acquire_init(&ctx, 0);
19608
19609 while (1) {
19610 ret = drm_modeset_lock_all_ctx(dev, &ctx);
19611 if (ret != -EDEADLK)
19612 break;
19613
19614 drm_modeset_backoff(&ctx);
19615 }
19616
19617 if (!ret)
19618 ret = __intel_display_resume(dev, state, &ctx);
19619
19620 intel_enable_ipc(dev_priv);
19621 drm_modeset_drop_locks(&ctx);
19622 drm_modeset_acquire_fini(&ctx);
19623
19624 if (ret)
19625 drm_err(&dev_priv->drm,
19626 "Restoring old state failed with %i\n", ret);
19627 if (state)
19628 drm_atomic_state_put(state);
19629}
19630
19631static void intel_hpd_poll_fini(struct drm_i915_private *i915)
19632{
19633 struct intel_connector *connector;
19634 struct drm_connector_list_iter conn_iter;
19635
19636
19637 drm_connector_list_iter_begin(&i915->drm, &conn_iter);
19638 for_each_intel_connector_iter(connector, &conn_iter) {
19639 if (connector->modeset_retry_work.func)
19640 cancel_work_sync(&connector->modeset_retry_work);
19641 if (connector->hdcp.shim) {
19642 cancel_delayed_work_sync(&connector->hdcp.check_work);
19643 cancel_work_sync(&connector->hdcp.prop_work);
19644 }
19645 }
19646 drm_connector_list_iter_end(&conn_iter);
19647}
19648
19649
19650void intel_modeset_driver_remove(struct drm_i915_private *i915)
19651{
19652 flush_workqueue(i915->flip_wq);
19653 flush_workqueue(i915->modeset_wq);
19654
19655 flush_work(&i915->atomic_helper.free_work);
19656 drm_WARN_ON(&i915->drm, !llist_empty(&i915->atomic_helper.free_list));
19657}
19658
19659
19660void intel_modeset_driver_remove_noirq(struct drm_i915_private *i915)
19661{
19662
19663
19664
19665
19666 intel_hpd_poll_fini(i915);
19667
19668
19669
19670
19671
19672
19673 intel_dp_mst_suspend(i915);
19674
19675
19676 intel_fbdev_fini(i915);
19677
19678 intel_unregister_dsm_handler();
19679
19680 intel_fbc_global_disable(i915);
19681
19682
19683 flush_scheduled_work();
19684
19685 intel_hdcp_component_fini(i915);
19686
19687 intel_mode_config_cleanup(i915);
19688
19689 intel_overlay_cleanup(i915);
19690
19691 intel_gmbus_teardown(i915);
19692
19693 destroy_workqueue(i915->flip_wq);
19694 destroy_workqueue(i915->modeset_wq);
19695
19696 intel_fbc_cleanup_cfb(i915);
19697}
19698
19699
19700void intel_modeset_driver_remove_nogem(struct drm_i915_private *i915)
19701{
19702 intel_csr_ucode_fini(i915);
19703
19704 intel_power_domains_driver_remove(i915);
19705
19706 intel_vga_unregister(i915);
19707
19708 intel_bios_driver_remove(i915);
19709}
19710
19711#if IS_ENABLED(CONFIG_DRM_I915_CAPTURE_ERROR)
19712
19713struct intel_display_error_state {
19714
19715 u32 power_well_driver;
19716
19717 struct intel_cursor_error_state {
19718 u32 control;
19719 u32 position;
19720 u32 base;
19721 u32 size;
19722 } cursor[I915_MAX_PIPES];
19723
19724 struct intel_pipe_error_state {
19725 bool power_domain_on;
19726 u32 source;
19727 u32 stat;
19728 } pipe[I915_MAX_PIPES];
19729
19730 struct intel_plane_error_state {
19731 u32 control;
19732 u32 stride;
19733 u32 size;
19734 u32 pos;
19735 u32 addr;
19736 u32 surface;
19737 u32 tile_offset;
19738 } plane[I915_MAX_PIPES];
19739
19740 struct intel_transcoder_error_state {
19741 bool available;
19742 bool power_domain_on;
19743 enum transcoder cpu_transcoder;
19744
19745 u32 conf;
19746
19747 u32 htotal;
19748 u32 hblank;
19749 u32 hsync;
19750 u32 vtotal;
19751 u32 vblank;
19752 u32 vsync;
19753 } transcoder[5];
19754};
19755
19756struct intel_display_error_state *
19757intel_display_capture_error_state(struct drm_i915_private *dev_priv)
19758{
19759 struct intel_display_error_state *error;
19760 int transcoders[] = {
19761 TRANSCODER_A,
19762 TRANSCODER_B,
19763 TRANSCODER_C,
19764 TRANSCODER_D,
19765 TRANSCODER_EDP,
19766 };
19767 int i;
19768
19769 BUILD_BUG_ON(ARRAY_SIZE(transcoders) != ARRAY_SIZE(error->transcoder));
19770
19771 if (!HAS_DISPLAY(dev_priv))
19772 return NULL;
19773
19774 error = kzalloc(sizeof(*error), GFP_ATOMIC);
19775 if (error == NULL)
19776 return NULL;
19777
19778 if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv))
19779 error->power_well_driver = intel_de_read(dev_priv,
19780 HSW_PWR_WELL_CTL2);
19781
19782 for_each_pipe(dev_priv, i) {
19783 error->pipe[i].power_domain_on =
19784 __intel_display_power_is_enabled(dev_priv,
19785 POWER_DOMAIN_PIPE(i));
19786 if (!error->pipe[i].power_domain_on)
19787 continue;
19788
19789 error->cursor[i].control = intel_de_read(dev_priv, CURCNTR(i));
19790 error->cursor[i].position = intel_de_read(dev_priv, CURPOS(i));
19791 error->cursor[i].base = intel_de_read(dev_priv, CURBASE(i));
19792
19793 error->plane[i].control = intel_de_read(dev_priv, DSPCNTR(i));
19794 error->plane[i].stride = intel_de_read(dev_priv, DSPSTRIDE(i));
19795 if (INTEL_GEN(dev_priv) <= 3) {
19796 error->plane[i].size = intel_de_read(dev_priv,
19797 DSPSIZE(i));
19798 error->plane[i].pos = intel_de_read(dev_priv,
19799 DSPPOS(i));
19800 }
19801 if (INTEL_GEN(dev_priv) <= 7 && !IS_HASWELL(dev_priv))
19802 error->plane[i].addr = intel_de_read(dev_priv,
19803 DSPADDR(i));
19804 if (INTEL_GEN(dev_priv) >= 4) {
19805 error->plane[i].surface = intel_de_read(dev_priv,
19806 DSPSURF(i));
19807 error->plane[i].tile_offset = intel_de_read(dev_priv,
19808 DSPTILEOFF(i));
19809 }
19810
19811 error->pipe[i].source = intel_de_read(dev_priv, PIPESRC(i));
19812
19813 if (HAS_GMCH(dev_priv))
19814 error->pipe[i].stat = intel_de_read(dev_priv,
19815 PIPESTAT(i));
19816 }
19817
19818 for (i = 0; i < ARRAY_SIZE(error->transcoder); i++) {
19819 enum transcoder cpu_transcoder = transcoders[i];
19820
19821 if (!HAS_TRANSCODER(dev_priv, cpu_transcoder))
19822 continue;
19823
19824 error->transcoder[i].available = true;
19825 error->transcoder[i].power_domain_on =
19826 __intel_display_power_is_enabled(dev_priv,
19827 POWER_DOMAIN_TRANSCODER(cpu_transcoder));
19828 if (!error->transcoder[i].power_domain_on)
19829 continue;
19830
19831 error->transcoder[i].cpu_transcoder = cpu_transcoder;
19832
19833 error->transcoder[i].conf = intel_de_read(dev_priv,
19834 PIPECONF(cpu_transcoder));
19835 error->transcoder[i].htotal = intel_de_read(dev_priv,
19836 HTOTAL(cpu_transcoder));
19837 error->transcoder[i].hblank = intel_de_read(dev_priv,
19838 HBLANK(cpu_transcoder));
19839 error->transcoder[i].hsync = intel_de_read(dev_priv,
19840 HSYNC(cpu_transcoder));
19841 error->transcoder[i].vtotal = intel_de_read(dev_priv,
19842 VTOTAL(cpu_transcoder));
19843 error->transcoder[i].vblank = intel_de_read(dev_priv,
19844 VBLANK(cpu_transcoder));
19845 error->transcoder[i].vsync = intel_de_read(dev_priv,
19846 VSYNC(cpu_transcoder));
19847 }
19848
19849 return error;
19850}
19851
19852#define err_printf(e, ...) i915_error_printf(e, __VA_ARGS__)
19853
19854void
19855intel_display_print_error_state(struct drm_i915_error_state_buf *m,
19856 struct intel_display_error_state *error)
19857{
19858 struct drm_i915_private *dev_priv = m->i915;
19859 int i;
19860
19861 if (!error)
19862 return;
19863
19864 err_printf(m, "Num Pipes: %d\n", INTEL_NUM_PIPES(dev_priv));
19865 if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv))
19866 err_printf(m, "PWR_WELL_CTL2: %08x\n",
19867 error->power_well_driver);
19868 for_each_pipe(dev_priv, i) {
19869 err_printf(m, "Pipe [%d]:\n", i);
19870 err_printf(m, " Power: %s\n",
19871 onoff(error->pipe[i].power_domain_on));
19872 err_printf(m, " SRC: %08x\n", error->pipe[i].source);
19873 err_printf(m, " STAT: %08x\n", error->pipe[i].stat);
19874
19875 err_printf(m, "Plane [%d]:\n", i);
19876 err_printf(m, " CNTR: %08x\n", error->plane[i].control);
19877 err_printf(m, " STRIDE: %08x\n", error->plane[i].stride);
19878 if (INTEL_GEN(dev_priv) <= 3) {
19879 err_printf(m, " SIZE: %08x\n", error->plane[i].size);
19880 err_printf(m, " POS: %08x\n", error->plane[i].pos);
19881 }
19882 if (INTEL_GEN(dev_priv) <= 7 && !IS_HASWELL(dev_priv))
19883 err_printf(m, " ADDR: %08x\n", error->plane[i].addr);
19884 if (INTEL_GEN(dev_priv) >= 4) {
19885 err_printf(m, " SURF: %08x\n", error->plane[i].surface);
19886 err_printf(m, " TILEOFF: %08x\n", error->plane[i].tile_offset);
19887 }
19888
19889 err_printf(m, "Cursor [%d]:\n", i);
19890 err_printf(m, " CNTR: %08x\n", error->cursor[i].control);
19891 err_printf(m, " POS: %08x\n", error->cursor[i].position);
19892 err_printf(m, " BASE: %08x\n", error->cursor[i].base);
19893 }
19894
19895 for (i = 0; i < ARRAY_SIZE(error->transcoder); i++) {
19896 if (!error->transcoder[i].available)
19897 continue;
19898
19899 err_printf(m, "CPU transcoder: %s\n",
19900 transcoder_name(error->transcoder[i].cpu_transcoder));
19901 err_printf(m, " Power: %s\n",
19902 onoff(error->transcoder[i].power_domain_on));
19903 err_printf(m, " CONF: %08x\n", error->transcoder[i].conf);
19904 err_printf(m, " HTOTAL: %08x\n", error->transcoder[i].htotal);
19905 err_printf(m, " HBLANK: %08x\n", error->transcoder[i].hblank);
19906 err_printf(m, " HSYNC: %08x\n", error->transcoder[i].hsync);
19907 err_printf(m, " VTOTAL: %08x\n", error->transcoder[i].vtotal);
19908 err_printf(m, " VBLANK: %08x\n", error->transcoder[i].vblank);
19909 err_printf(m, " VSYNC: %08x\n", error->transcoder[i].vsync);
19910 }
19911}
19912
19913#endif
19914