1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27#include <linux/cpufreq.h>
28#include <linux/module.h>
29#include <linux/input.h>
30#include <linux/i2c.h>
31#include <linux/kernel.h>
32#include <linux/slab.h>
33#include <linux/vgaarb.h>
34#include <drm/drm_edid.h>
35#include "drmP.h"
36#include "intel_drv.h"
37#include "i915_drm.h"
38#include "i915_drv.h"
39#include "i915_trace.h"
40#include "drm_dp_helper.h"
41#include "drm_crtc_helper.h"
42#include <linux/dma_remapping.h>
43
44#define HAS_eDP (intel_pipe_has_type(crtc, INTEL_OUTPUT_EDP))
45
46bool intel_pipe_has_type(struct drm_crtc *crtc, int type);
47static void intel_update_watermarks(struct drm_device *dev);
48static void intel_increase_pllclock(struct drm_crtc *crtc);
49static void intel_crtc_update_cursor(struct drm_crtc *crtc, bool on);
50
51typedef struct {
52
53 int n;
54 int m1, m2;
55 int p1, p2;
56
57 int dot;
58 int vco;
59 int m;
60 int p;
61} intel_clock_t;
62
63typedef struct {
64 int min, max;
65} intel_range_t;
66
67typedef struct {
68 int dot_limit;
69 int p2_slow, p2_fast;
70} intel_p2_t;
71
72#define INTEL_P2_NUM 2
73typedef struct intel_limit intel_limit_t;
74struct intel_limit {
75 intel_range_t dot, vco, n, m, m1, m2, p, p1;
76 intel_p2_t p2;
77 bool (* find_pll)(const intel_limit_t *, struct drm_crtc *,
78 int, int, intel_clock_t *);
79};
80
81
82#define IRONLAKE_FDI_FREQ 2700000
83
84static bool
85intel_find_best_PLL(const intel_limit_t *limit, struct drm_crtc *crtc,
86 int target, int refclk, intel_clock_t *best_clock);
87static bool
88intel_g4x_find_best_PLL(const intel_limit_t *limit, struct drm_crtc *crtc,
89 int target, int refclk, intel_clock_t *best_clock);
90
91static bool
92intel_find_pll_g4x_dp(const intel_limit_t *, struct drm_crtc *crtc,
93 int target, int refclk, intel_clock_t *best_clock);
94static bool
95intel_find_pll_ironlake_dp(const intel_limit_t *, struct drm_crtc *crtc,
96 int target, int refclk, intel_clock_t *best_clock);
97
98static inline u32
99intel_fdi_link_freq(struct drm_device *dev)
100{
101 if (IS_GEN5(dev)) {
102 struct drm_i915_private *dev_priv = dev->dev_private;
103 return (I915_READ(FDI_PLL_BIOS_0) & FDI_PLL_FB_CLOCK_MASK) + 2;
104 } else
105 return 27;
106}
107
108static const intel_limit_t intel_limits_i8xx_dvo = {
109 .dot = { .min = 25000, .max = 350000 },
110 .vco = { .min = 930000, .max = 1400000 },
111 .n = { .min = 3, .max = 16 },
112 .m = { .min = 96, .max = 140 },
113 .m1 = { .min = 18, .max = 26 },
114 .m2 = { .min = 6, .max = 16 },
115 .p = { .min = 4, .max = 128 },
116 .p1 = { .min = 2, .max = 33 },
117 .p2 = { .dot_limit = 165000,
118 .p2_slow = 4, .p2_fast = 2 },
119 .find_pll = intel_find_best_PLL,
120};
121
122static const intel_limit_t intel_limits_i8xx_lvds = {
123 .dot = { .min = 25000, .max = 350000 },
124 .vco = { .min = 930000, .max = 1400000 },
125 .n = { .min = 3, .max = 16 },
126 .m = { .min = 96, .max = 140 },
127 .m1 = { .min = 18, .max = 26 },
128 .m2 = { .min = 6, .max = 16 },
129 .p = { .min = 4, .max = 128 },
130 .p1 = { .min = 1, .max = 6 },
131 .p2 = { .dot_limit = 165000,
132 .p2_slow = 14, .p2_fast = 7 },
133 .find_pll = intel_find_best_PLL,
134};
135
136static const intel_limit_t intel_limits_i9xx_sdvo = {
137 .dot = { .min = 20000, .max = 400000 },
138 .vco = { .min = 1400000, .max = 2800000 },
139 .n = { .min = 1, .max = 6 },
140 .m = { .min = 70, .max = 120 },
141 .m1 = { .min = 10, .max = 22 },
142 .m2 = { .min = 5, .max = 9 },
143 .p = { .min = 5, .max = 80 },
144 .p1 = { .min = 1, .max = 8 },
145 .p2 = { .dot_limit = 200000,
146 .p2_slow = 10, .p2_fast = 5 },
147 .find_pll = intel_find_best_PLL,
148};
149
150static const intel_limit_t intel_limits_i9xx_lvds = {
151 .dot = { .min = 20000, .max = 400000 },
152 .vco = { .min = 1400000, .max = 2800000 },
153 .n = { .min = 1, .max = 6 },
154 .m = { .min = 70, .max = 120 },
155 .m1 = { .min = 10, .max = 22 },
156 .m2 = { .min = 5, .max = 9 },
157 .p = { .min = 7, .max = 98 },
158 .p1 = { .min = 1, .max = 8 },
159 .p2 = { .dot_limit = 112000,
160 .p2_slow = 14, .p2_fast = 7 },
161 .find_pll = intel_find_best_PLL,
162};
163
164
165static const intel_limit_t intel_limits_g4x_sdvo = {
166 .dot = { .min = 25000, .max = 270000 },
167 .vco = { .min = 1750000, .max = 3500000},
168 .n = { .min = 1, .max = 4 },
169 .m = { .min = 104, .max = 138 },
170 .m1 = { .min = 17, .max = 23 },
171 .m2 = { .min = 5, .max = 11 },
172 .p = { .min = 10, .max = 30 },
173 .p1 = { .min = 1, .max = 3},
174 .p2 = { .dot_limit = 270000,
175 .p2_slow = 10,
176 .p2_fast = 10
177 },
178 .find_pll = intel_g4x_find_best_PLL,
179};
180
181static const intel_limit_t intel_limits_g4x_hdmi = {
182 .dot = { .min = 22000, .max = 400000 },
183 .vco = { .min = 1750000, .max = 3500000},
184 .n = { .min = 1, .max = 4 },
185 .m = { .min = 104, .max = 138 },
186 .m1 = { .min = 16, .max = 23 },
187 .m2 = { .min = 5, .max = 11 },
188 .p = { .min = 5, .max = 80 },
189 .p1 = { .min = 1, .max = 8},
190 .p2 = { .dot_limit = 165000,
191 .p2_slow = 10, .p2_fast = 5 },
192 .find_pll = intel_g4x_find_best_PLL,
193};
194
195static const intel_limit_t intel_limits_g4x_single_channel_lvds = {
196 .dot = { .min = 20000, .max = 115000 },
197 .vco = { .min = 1750000, .max = 3500000 },
198 .n = { .min = 1, .max = 3 },
199 .m = { .min = 104, .max = 138 },
200 .m1 = { .min = 17, .max = 23 },
201 .m2 = { .min = 5, .max = 11 },
202 .p = { .min = 28, .max = 112 },
203 .p1 = { .min = 2, .max = 8 },
204 .p2 = { .dot_limit = 0,
205 .p2_slow = 14, .p2_fast = 14
206 },
207 .find_pll = intel_g4x_find_best_PLL,
208};
209
210static const intel_limit_t intel_limits_g4x_dual_channel_lvds = {
211 .dot = { .min = 80000, .max = 224000 },
212 .vco = { .min = 1750000, .max = 3500000 },
213 .n = { .min = 1, .max = 3 },
214 .m = { .min = 104, .max = 138 },
215 .m1 = { .min = 17, .max = 23 },
216 .m2 = { .min = 5, .max = 11 },
217 .p = { .min = 14, .max = 42 },
218 .p1 = { .min = 2, .max = 6 },
219 .p2 = { .dot_limit = 0,
220 .p2_slow = 7, .p2_fast = 7
221 },
222 .find_pll = intel_g4x_find_best_PLL,
223};
224
225static const intel_limit_t intel_limits_g4x_display_port = {
226 .dot = { .min = 161670, .max = 227000 },
227 .vco = { .min = 1750000, .max = 3500000},
228 .n = { .min = 1, .max = 2 },
229 .m = { .min = 97, .max = 108 },
230 .m1 = { .min = 0x10, .max = 0x12 },
231 .m2 = { .min = 0x05, .max = 0x06 },
232 .p = { .min = 10, .max = 20 },
233 .p1 = { .min = 1, .max = 2},
234 .p2 = { .dot_limit = 0,
235 .p2_slow = 10, .p2_fast = 10 },
236 .find_pll = intel_find_pll_g4x_dp,
237};
238
239static const intel_limit_t intel_limits_pineview_sdvo = {
240 .dot = { .min = 20000, .max = 400000},
241 .vco = { .min = 1700000, .max = 3500000 },
242
243 .n = { .min = 3, .max = 6 },
244 .m = { .min = 2, .max = 256 },
245
246 .m1 = { .min = 0, .max = 0 },
247 .m2 = { .min = 0, .max = 254 },
248 .p = { .min = 5, .max = 80 },
249 .p1 = { .min = 1, .max = 8 },
250 .p2 = { .dot_limit = 200000,
251 .p2_slow = 10, .p2_fast = 5 },
252 .find_pll = intel_find_best_PLL,
253};
254
255static const intel_limit_t intel_limits_pineview_lvds = {
256 .dot = { .min = 20000, .max = 400000 },
257 .vco = { .min = 1700000, .max = 3500000 },
258 .n = { .min = 3, .max = 6 },
259 .m = { .min = 2, .max = 256 },
260 .m1 = { .min = 0, .max = 0 },
261 .m2 = { .min = 0, .max = 254 },
262 .p = { .min = 7, .max = 112 },
263 .p1 = { .min = 1, .max = 8 },
264 .p2 = { .dot_limit = 112000,
265 .p2_slow = 14, .p2_fast = 14 },
266 .find_pll = intel_find_best_PLL,
267};
268
269
270
271
272
273
274static const intel_limit_t intel_limits_ironlake_dac = {
275 .dot = { .min = 25000, .max = 350000 },
276 .vco = { .min = 1760000, .max = 3510000 },
277 .n = { .min = 1, .max = 5 },
278 .m = { .min = 79, .max = 127 },
279 .m1 = { .min = 12, .max = 22 },
280 .m2 = { .min = 5, .max = 9 },
281 .p = { .min = 5, .max = 80 },
282 .p1 = { .min = 1, .max = 8 },
283 .p2 = { .dot_limit = 225000,
284 .p2_slow = 10, .p2_fast = 5 },
285 .find_pll = intel_g4x_find_best_PLL,
286};
287
288static const intel_limit_t intel_limits_ironlake_single_lvds = {
289 .dot = { .min = 25000, .max = 350000 },
290 .vco = { .min = 1760000, .max = 3510000 },
291 .n = { .min = 1, .max = 3 },
292 .m = { .min = 79, .max = 118 },
293 .m1 = { .min = 12, .max = 22 },
294 .m2 = { .min = 5, .max = 9 },
295 .p = { .min = 28, .max = 112 },
296 .p1 = { .min = 2, .max = 8 },
297 .p2 = { .dot_limit = 225000,
298 .p2_slow = 14, .p2_fast = 14 },
299 .find_pll = intel_g4x_find_best_PLL,
300};
301
302static const intel_limit_t intel_limits_ironlake_dual_lvds = {
303 .dot = { .min = 25000, .max = 350000 },
304 .vco = { .min = 1760000, .max = 3510000 },
305 .n = { .min = 1, .max = 3 },
306 .m = { .min = 79, .max = 127 },
307 .m1 = { .min = 12, .max = 22 },
308 .m2 = { .min = 5, .max = 9 },
309 .p = { .min = 14, .max = 56 },
310 .p1 = { .min = 2, .max = 8 },
311 .p2 = { .dot_limit = 225000,
312 .p2_slow = 7, .p2_fast = 7 },
313 .find_pll = intel_g4x_find_best_PLL,
314};
315
316
317static const intel_limit_t intel_limits_ironlake_single_lvds_100m = {
318 .dot = { .min = 25000, .max = 350000 },
319 .vco = { .min = 1760000, .max = 3510000 },
320 .n = { .min = 1, .max = 2 },
321 .m = { .min = 79, .max = 126 },
322 .m1 = { .min = 12, .max = 22 },
323 .m2 = { .min = 5, .max = 9 },
324 .p = { .min = 28, .max = 112 },
325 .p1 = { .min = 2, .max = 8 },
326 .p2 = { .dot_limit = 225000,
327 .p2_slow = 14, .p2_fast = 14 },
328 .find_pll = intel_g4x_find_best_PLL,
329};
330
331static const intel_limit_t intel_limits_ironlake_dual_lvds_100m = {
332 .dot = { .min = 25000, .max = 350000 },
333 .vco = { .min = 1760000, .max = 3510000 },
334 .n = { .min = 1, .max = 3 },
335 .m = { .min = 79, .max = 126 },
336 .m1 = { .min = 12, .max = 22 },
337 .m2 = { .min = 5, .max = 9 },
338 .p = { .min = 14, .max = 42 },
339 .p1 = { .min = 2, .max = 6 },
340 .p2 = { .dot_limit = 225000,
341 .p2_slow = 7, .p2_fast = 7 },
342 .find_pll = intel_g4x_find_best_PLL,
343};
344
345static const intel_limit_t intel_limits_ironlake_display_port = {
346 .dot = { .min = 25000, .max = 350000 },
347 .vco = { .min = 1760000, .max = 3510000},
348 .n = { .min = 1, .max = 2 },
349 .m = { .min = 81, .max = 90 },
350 .m1 = { .min = 12, .max = 22 },
351 .m2 = { .min = 5, .max = 9 },
352 .p = { .min = 10, .max = 20 },
353 .p1 = { .min = 1, .max = 2},
354 .p2 = { .dot_limit = 0,
355 .p2_slow = 10, .p2_fast = 10 },
356 .find_pll = intel_find_pll_ironlake_dp,
357};
358
359static const intel_limit_t *intel_ironlake_limit(struct drm_crtc *crtc,
360 int refclk)
361{
362 struct drm_device *dev = crtc->dev;
363 struct drm_i915_private *dev_priv = dev->dev_private;
364 const intel_limit_t *limit;
365
366 if (intel_pipe_has_type(crtc, INTEL_OUTPUT_LVDS)) {
367 if ((I915_READ(PCH_LVDS) & LVDS_CLKB_POWER_MASK) ==
368 LVDS_CLKB_POWER_UP) {
369
370 if (refclk == 100000)
371 limit = &intel_limits_ironlake_dual_lvds_100m;
372 else
373 limit = &intel_limits_ironlake_dual_lvds;
374 } else {
375 if (refclk == 100000)
376 limit = &intel_limits_ironlake_single_lvds_100m;
377 else
378 limit = &intel_limits_ironlake_single_lvds;
379 }
380 } else if (intel_pipe_has_type(crtc, INTEL_OUTPUT_DISPLAYPORT) ||
381 HAS_eDP)
382 limit = &intel_limits_ironlake_display_port;
383 else
384 limit = &intel_limits_ironlake_dac;
385
386 return limit;
387}
388
389static const intel_limit_t *intel_g4x_limit(struct drm_crtc *crtc)
390{
391 struct drm_device *dev = crtc->dev;
392 struct drm_i915_private *dev_priv = dev->dev_private;
393 const intel_limit_t *limit;
394
395 if (intel_pipe_has_type(crtc, INTEL_OUTPUT_LVDS)) {
396 if ((I915_READ(LVDS) & LVDS_CLKB_POWER_MASK) ==
397 LVDS_CLKB_POWER_UP)
398
399 limit = &intel_limits_g4x_dual_channel_lvds;
400 else
401
402 limit = &intel_limits_g4x_single_channel_lvds;
403 } else if (intel_pipe_has_type(crtc, INTEL_OUTPUT_HDMI) ||
404 intel_pipe_has_type(crtc, INTEL_OUTPUT_ANALOG)) {
405 limit = &intel_limits_g4x_hdmi;
406 } else if (intel_pipe_has_type(crtc, INTEL_OUTPUT_SDVO)) {
407 limit = &intel_limits_g4x_sdvo;
408 } else if (intel_pipe_has_type(crtc, INTEL_OUTPUT_DISPLAYPORT)) {
409 limit = &intel_limits_g4x_display_port;
410 } else
411 limit = &intel_limits_i9xx_sdvo;
412
413 return limit;
414}
415
416static const intel_limit_t *intel_limit(struct drm_crtc *crtc, int refclk)
417{
418 struct drm_device *dev = crtc->dev;
419 const intel_limit_t *limit;
420
421 if (HAS_PCH_SPLIT(dev))
422 limit = intel_ironlake_limit(crtc, refclk);
423 else if (IS_G4X(dev)) {
424 limit = intel_g4x_limit(crtc);
425 } else if (IS_PINEVIEW(dev)) {
426 if (intel_pipe_has_type(crtc, INTEL_OUTPUT_LVDS))
427 limit = &intel_limits_pineview_lvds;
428 else
429 limit = &intel_limits_pineview_sdvo;
430 } else if (!IS_GEN2(dev)) {
431 if (intel_pipe_has_type(crtc, INTEL_OUTPUT_LVDS))
432 limit = &intel_limits_i9xx_lvds;
433 else
434 limit = &intel_limits_i9xx_sdvo;
435 } else {
436 if (intel_pipe_has_type(crtc, INTEL_OUTPUT_LVDS))
437 limit = &intel_limits_i8xx_lvds;
438 else
439 limit = &intel_limits_i8xx_dvo;
440 }
441 return limit;
442}
443
444
445static void pineview_clock(int refclk, intel_clock_t *clock)
446{
447 clock->m = clock->m2 + 2;
448 clock->p = clock->p1 * clock->p2;
449 clock->vco = refclk * clock->m / clock->n;
450 clock->dot = clock->vco / clock->p;
451}
452
453static void intel_clock(struct drm_device *dev, int refclk, intel_clock_t *clock)
454{
455 if (IS_PINEVIEW(dev)) {
456 pineview_clock(refclk, clock);
457 return;
458 }
459 clock->m = 5 * (clock->m1 + 2) + (clock->m2 + 2);
460 clock->p = clock->p1 * clock->p2;
461 clock->vco = refclk * clock->m / (clock->n + 2);
462 clock->dot = clock->vco / clock->p;
463}
464
465
466
467
468bool intel_pipe_has_type(struct drm_crtc *crtc, int type)
469{
470 struct drm_device *dev = crtc->dev;
471 struct drm_mode_config *mode_config = &dev->mode_config;
472 struct intel_encoder *encoder;
473
474 list_for_each_entry(encoder, &mode_config->encoder_list, base.head)
475 if (encoder->base.crtc == crtc && encoder->type == type)
476 return true;
477
478 return false;
479}
480
481#define INTELPllInvalid(s) do { return false; } while (0)
482
483
484
485
486
487static bool intel_PLL_is_valid(struct drm_device *dev,
488 const intel_limit_t *limit,
489 const intel_clock_t *clock)
490{
491 if (clock->p1 < limit->p1.min || limit->p1.max < clock->p1)
492 INTELPllInvalid("p1 out of range\n");
493 if (clock->p < limit->p.min || limit->p.max < clock->p)
494 INTELPllInvalid("p out of range\n");
495 if (clock->m2 < limit->m2.min || limit->m2.max < clock->m2)
496 INTELPllInvalid("m2 out of range\n");
497 if (clock->m1 < limit->m1.min || limit->m1.max < clock->m1)
498 INTELPllInvalid("m1 out of range\n");
499 if (clock->m1 <= clock->m2 && !IS_PINEVIEW(dev))
500 INTELPllInvalid("m1 <= m2\n");
501 if (clock->m < limit->m.min || limit->m.max < clock->m)
502 INTELPllInvalid("m out of range\n");
503 if (clock->n < limit->n.min || limit->n.max < clock->n)
504 INTELPllInvalid("n out of range\n");
505 if (clock->vco < limit->vco.min || limit->vco.max < clock->vco)
506 INTELPllInvalid("vco out of range\n");
507
508
509
510 if (clock->dot < limit->dot.min || limit->dot.max < clock->dot)
511 INTELPllInvalid("dot out of range\n");
512
513 return true;
514}
515
516static bool
517intel_find_best_PLL(const intel_limit_t *limit, struct drm_crtc *crtc,
518 int target, int refclk, intel_clock_t *best_clock)
519
520{
521 struct drm_device *dev = crtc->dev;
522 struct drm_i915_private *dev_priv = dev->dev_private;
523 intel_clock_t clock;
524 int err = target;
525
526 if (intel_pipe_has_type(crtc, INTEL_OUTPUT_LVDS) &&
527 (I915_READ(LVDS)) != 0) {
528
529
530
531
532
533
534 if ((I915_READ(LVDS) & LVDS_CLKB_POWER_MASK) ==
535 LVDS_CLKB_POWER_UP)
536 clock.p2 = limit->p2.p2_fast;
537 else
538 clock.p2 = limit->p2.p2_slow;
539 } else {
540 if (target < limit->p2.dot_limit)
541 clock.p2 = limit->p2.p2_slow;
542 else
543 clock.p2 = limit->p2.p2_fast;
544 }
545
546 memset(best_clock, 0, sizeof(*best_clock));
547
548 for (clock.m1 = limit->m1.min; clock.m1 <= limit->m1.max;
549 clock.m1++) {
550 for (clock.m2 = limit->m2.min;
551 clock.m2 <= limit->m2.max; clock.m2++) {
552
553 if (clock.m2 >= clock.m1 && !IS_PINEVIEW(dev))
554 break;
555 for (clock.n = limit->n.min;
556 clock.n <= limit->n.max; clock.n++) {
557 for (clock.p1 = limit->p1.min;
558 clock.p1 <= limit->p1.max; clock.p1++) {
559 int this_err;
560
561 intel_clock(dev, refclk, &clock);
562 if (!intel_PLL_is_valid(dev, limit,
563 &clock))
564 continue;
565
566 this_err = abs(clock.dot - target);
567 if (this_err < err) {
568 *best_clock = clock;
569 err = this_err;
570 }
571 }
572 }
573 }
574 }
575
576 return (err != target);
577}
578
579static bool
580intel_g4x_find_best_PLL(const intel_limit_t *limit, struct drm_crtc *crtc,
581 int target, int refclk, intel_clock_t *best_clock)
582{
583 struct drm_device *dev = crtc->dev;
584 struct drm_i915_private *dev_priv = dev->dev_private;
585 intel_clock_t clock;
586 int max_n;
587 bool found;
588
589 int err_most = (target >> 8) + (target >> 9);
590 found = false;
591
592 if (intel_pipe_has_type(crtc, INTEL_OUTPUT_LVDS)) {
593 int lvds_reg;
594
595 if (HAS_PCH_SPLIT(dev))
596 lvds_reg = PCH_LVDS;
597 else
598 lvds_reg = LVDS;
599 if ((I915_READ(lvds_reg) & LVDS_CLKB_POWER_MASK) ==
600 LVDS_CLKB_POWER_UP)
601 clock.p2 = limit->p2.p2_fast;
602 else
603 clock.p2 = limit->p2.p2_slow;
604 } else {
605 if (target < limit->p2.dot_limit)
606 clock.p2 = limit->p2.p2_slow;
607 else
608 clock.p2 = limit->p2.p2_fast;
609 }
610
611 memset(best_clock, 0, sizeof(*best_clock));
612 max_n = limit->n.max;
613
614 for (clock.n = limit->n.min; clock.n <= max_n; clock.n++) {
615
616 for (clock.m1 = limit->m1.max;
617 clock.m1 >= limit->m1.min; clock.m1--) {
618 for (clock.m2 = limit->m2.max;
619 clock.m2 >= limit->m2.min; clock.m2--) {
620 for (clock.p1 = limit->p1.max;
621 clock.p1 >= limit->p1.min; clock.p1--) {
622 int this_err;
623
624 intel_clock(dev, refclk, &clock);
625 if (!intel_PLL_is_valid(dev, limit,
626 &clock))
627 continue;
628
629 this_err = abs(clock.dot - target);
630 if (this_err < err_most) {
631 *best_clock = clock;
632 err_most = this_err;
633 max_n = clock.n;
634 found = true;
635 }
636 }
637 }
638 }
639 }
640 return found;
641}
642
643static bool
644intel_find_pll_ironlake_dp(const intel_limit_t *limit, struct drm_crtc *crtc,
645 int target, int refclk, intel_clock_t *best_clock)
646{
647 struct drm_device *dev = crtc->dev;
648 intel_clock_t clock;
649
650 if (target < 200000) {
651 clock.n = 1;
652 clock.p1 = 2;
653 clock.p2 = 10;
654 clock.m1 = 12;
655 clock.m2 = 9;
656 } else {
657 clock.n = 2;
658 clock.p1 = 1;
659 clock.p2 = 10;
660 clock.m1 = 14;
661 clock.m2 = 8;
662 }
663 intel_clock(dev, refclk, &clock);
664 memcpy(best_clock, &clock, sizeof(intel_clock_t));
665 return true;
666}
667
668
669static bool
670intel_find_pll_g4x_dp(const intel_limit_t *limit, struct drm_crtc *crtc,
671 int target, int refclk, intel_clock_t *best_clock)
672{
673 intel_clock_t clock;
674 if (target < 200000) {
675 clock.p1 = 2;
676 clock.p2 = 10;
677 clock.n = 2;
678 clock.m1 = 23;
679 clock.m2 = 8;
680 } else {
681 clock.p1 = 1;
682 clock.p2 = 10;
683 clock.n = 1;
684 clock.m1 = 14;
685 clock.m2 = 2;
686 }
687 clock.m = 5 * (clock.m1 + 2) + (clock.m2 + 2);
688 clock.p = (clock.p1 * clock.p2);
689 clock.dot = 96000 * clock.m / (clock.n + 2) / clock.p;
690 clock.vco = 0;
691 memcpy(best_clock, &clock, sizeof(intel_clock_t));
692 return true;
693}
694
695
696
697
698
699
700
701
702
703void intel_wait_for_vblank(struct drm_device *dev, int pipe)
704{
705 struct drm_i915_private *dev_priv = dev->dev_private;
706 int pipestat_reg = PIPESTAT(pipe);
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721 I915_WRITE(pipestat_reg,
722 I915_READ(pipestat_reg) | PIPE_VBLANK_INTERRUPT_STATUS);
723
724
725 if (wait_for(I915_READ(pipestat_reg) &
726 PIPE_VBLANK_INTERRUPT_STATUS,
727 50))
728 DRM_DEBUG_KMS("vblank wait timed out\n");
729}
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748void intel_wait_for_pipe_off(struct drm_device *dev, int pipe)
749{
750 struct drm_i915_private *dev_priv = dev->dev_private;
751
752 if (INTEL_INFO(dev)->gen >= 4) {
753 int reg = PIPECONF(pipe);
754
755
756 if (wait_for((I915_READ(reg) & I965_PIPECONF_ACTIVE) == 0,
757 100))
758 DRM_DEBUG_KMS("pipe_off wait timed out\n");
759 } else {
760 u32 last_line;
761 int reg = PIPEDSL(pipe);
762 unsigned long timeout = jiffies + msecs_to_jiffies(100);
763
764
765 do {
766 last_line = I915_READ(reg) & DSL_LINEMASK;
767 mdelay(5);
768 } while (((I915_READ(reg) & DSL_LINEMASK) != last_line) &&
769 time_after(timeout, jiffies));
770 if (time_after(jiffies, timeout))
771 DRM_DEBUG_KMS("pipe_off wait timed out\n");
772 }
773}
774
775static const char *state_string(bool enabled)
776{
777 return enabled ? "on" : "off";
778}
779
780
781static void assert_pll(struct drm_i915_private *dev_priv,
782 enum pipe pipe, bool state)
783{
784 int reg;
785 u32 val;
786 bool cur_state;
787
788 reg = DPLL(pipe);
789 val = I915_READ(reg);
790 cur_state = !!(val & DPLL_VCO_ENABLE);
791 WARN(cur_state != state,
792 "PLL state assertion failure (expected %s, current %s)\n",
793 state_string(state), state_string(cur_state));
794}
795#define assert_pll_enabled(d, p) assert_pll(d, p, true)
796#define assert_pll_disabled(d, p) assert_pll(d, p, false)
797
798
799static void assert_pch_pll(struct drm_i915_private *dev_priv,
800 enum pipe pipe, bool state)
801{
802 int reg;
803 u32 val;
804 bool cur_state;
805
806 if (HAS_PCH_CPT(dev_priv->dev)) {
807 u32 pch_dpll;
808
809 pch_dpll = I915_READ(PCH_DPLL_SEL);
810
811
812 WARN(!((pch_dpll >> (4 * pipe)) & 8),
813 "transcoder %d PLL not enabled\n", pipe);
814
815
816 pipe = (pch_dpll >> (4 * pipe)) & 1;
817 }
818
819 reg = PCH_DPLL(pipe);
820 val = I915_READ(reg);
821 cur_state = !!(val & DPLL_VCO_ENABLE);
822 WARN(cur_state != state,
823 "PCH PLL state assertion failure (expected %s, current %s)\n",
824 state_string(state), state_string(cur_state));
825}
826#define assert_pch_pll_enabled(d, p) assert_pch_pll(d, p, true)
827#define assert_pch_pll_disabled(d, p) assert_pch_pll(d, p, false)
828
829static void assert_fdi_tx(struct drm_i915_private *dev_priv,
830 enum pipe pipe, bool state)
831{
832 int reg;
833 u32 val;
834 bool cur_state;
835
836 reg = FDI_TX_CTL(pipe);
837 val = I915_READ(reg);
838 cur_state = !!(val & FDI_TX_ENABLE);
839 WARN(cur_state != state,
840 "FDI TX state assertion failure (expected %s, current %s)\n",
841 state_string(state), state_string(cur_state));
842}
843#define assert_fdi_tx_enabled(d, p) assert_fdi_tx(d, p, true)
844#define assert_fdi_tx_disabled(d, p) assert_fdi_tx(d, p, false)
845
846static void assert_fdi_rx(struct drm_i915_private *dev_priv,
847 enum pipe pipe, bool state)
848{
849 int reg;
850 u32 val;
851 bool cur_state;
852
853 reg = FDI_RX_CTL(pipe);
854 val = I915_READ(reg);
855 cur_state = !!(val & FDI_RX_ENABLE);
856 WARN(cur_state != state,
857 "FDI RX state assertion failure (expected %s, current %s)\n",
858 state_string(state), state_string(cur_state));
859}
860#define assert_fdi_rx_enabled(d, p) assert_fdi_rx(d, p, true)
861#define assert_fdi_rx_disabled(d, p) assert_fdi_rx(d, p, false)
862
863static void assert_fdi_tx_pll_enabled(struct drm_i915_private *dev_priv,
864 enum pipe pipe)
865{
866 int reg;
867 u32 val;
868
869
870 if (dev_priv->info->gen == 5)
871 return;
872
873 reg = FDI_TX_CTL(pipe);
874 val = I915_READ(reg);
875 WARN(!(val & FDI_TX_PLL_ENABLE), "FDI TX PLL assertion failure, should be active but is disabled\n");
876}
877
878static void assert_fdi_rx_pll_enabled(struct drm_i915_private *dev_priv,
879 enum pipe pipe)
880{
881 int reg;
882 u32 val;
883
884 reg = FDI_RX_CTL(pipe);
885 val = I915_READ(reg);
886 WARN(!(val & FDI_RX_PLL_ENABLE), "FDI RX PLL assertion failure, should be active but is disabled\n");
887}
888
889static void assert_panel_unlocked(struct drm_i915_private *dev_priv,
890 enum pipe pipe)
891{
892 int pp_reg, lvds_reg;
893 u32 val;
894 enum pipe panel_pipe = PIPE_A;
895 bool locked = true;
896
897 if (HAS_PCH_SPLIT(dev_priv->dev)) {
898 pp_reg = PCH_PP_CONTROL;
899 lvds_reg = PCH_LVDS;
900 } else {
901 pp_reg = PP_CONTROL;
902 lvds_reg = LVDS;
903 }
904
905 val = I915_READ(pp_reg);
906 if (!(val & PANEL_POWER_ON) ||
907 ((val & PANEL_UNLOCK_REGS) == PANEL_UNLOCK_REGS))
908 locked = false;
909
910 if (I915_READ(lvds_reg) & LVDS_PIPEB_SELECT)
911 panel_pipe = PIPE_B;
912
913 WARN(panel_pipe == pipe && locked,
914 "panel assertion failure, pipe %c regs locked\n",
915 pipe_name(pipe));
916}
917
918static void assert_pipe(struct drm_i915_private *dev_priv,
919 enum pipe pipe, bool state)
920{
921 int reg;
922 u32 val;
923 bool cur_state;
924
925 reg = PIPECONF(pipe);
926 val = I915_READ(reg);
927 cur_state = !!(val & PIPECONF_ENABLE);
928 WARN(cur_state != state,
929 "pipe %c assertion failure (expected %s, current %s)\n",
930 pipe_name(pipe), state_string(state), state_string(cur_state));
931}
932#define assert_pipe_enabled(d, p) assert_pipe(d, p, true)
933#define assert_pipe_disabled(d, p) assert_pipe(d, p, false)
934
935static void assert_plane_enabled(struct drm_i915_private *dev_priv,
936 enum plane plane)
937{
938 int reg;
939 u32 val;
940
941 reg = DSPCNTR(plane);
942 val = I915_READ(reg);
943 WARN(!(val & DISPLAY_PLANE_ENABLE),
944 "plane %c assertion failure, should be active but is disabled\n",
945 plane_name(plane));
946}
947
948static void assert_planes_disabled(struct drm_i915_private *dev_priv,
949 enum pipe pipe)
950{
951 int reg, i;
952 u32 val;
953 int cur_pipe;
954
955
956 if (HAS_PCH_SPLIT(dev_priv->dev))
957 return;
958
959
960 for (i = 0; i < 2; i++) {
961 reg = DSPCNTR(i);
962 val = I915_READ(reg);
963 cur_pipe = (val & DISPPLANE_SEL_PIPE_MASK) >>
964 DISPPLANE_SEL_PIPE_SHIFT;
965 WARN((val & DISPLAY_PLANE_ENABLE) && pipe == cur_pipe,
966 "plane %c assertion failure, should be off on pipe %c but is still active\n",
967 plane_name(i), pipe_name(pipe));
968 }
969}
970
971static void assert_pch_refclk_enabled(struct drm_i915_private *dev_priv)
972{
973 u32 val;
974 bool enabled;
975
976 val = I915_READ(PCH_DREF_CONTROL);
977 enabled = !!(val & (DREF_SSC_SOURCE_MASK | DREF_NONSPREAD_SOURCE_MASK |
978 DREF_SUPERSPREAD_SOURCE_MASK));
979 WARN(!enabled, "PCH refclk assertion failure, should be active but is disabled\n");
980}
981
982static void assert_transcoder_disabled(struct drm_i915_private *dev_priv,
983 enum pipe pipe)
984{
985 int reg;
986 u32 val;
987 bool enabled;
988
989 reg = TRANSCONF(pipe);
990 val = I915_READ(reg);
991 enabled = !!(val & TRANS_ENABLE);
992 WARN(enabled,
993 "transcoder assertion failed, should be off on pipe %c but is still active\n",
994 pipe_name(pipe));
995}
996
997static bool dp_pipe_enabled(struct drm_i915_private *dev_priv,
998 enum pipe pipe, u32 port_sel, u32 val)
999{
1000 if ((val & DP_PORT_EN) == 0)
1001 return false;
1002
1003 if (HAS_PCH_CPT(dev_priv->dev)) {
1004 u32 trans_dp_ctl_reg = TRANS_DP_CTL(pipe);
1005 u32 trans_dp_ctl = I915_READ(trans_dp_ctl_reg);
1006 if ((trans_dp_ctl & TRANS_DP_PORT_SEL_MASK) != port_sel)
1007 return false;
1008 } else {
1009 if ((val & DP_PIPE_MASK) != (pipe << 30))
1010 return false;
1011 }
1012 return true;
1013}
1014
1015static bool hdmi_pipe_enabled(struct drm_i915_private *dev_priv,
1016 enum pipe pipe, u32 val)
1017{
1018 if ((val & PORT_ENABLE) == 0)
1019 return false;
1020
1021 if (HAS_PCH_CPT(dev_priv->dev)) {
1022 if ((val & PORT_TRANS_SEL_MASK) != PORT_TRANS_SEL_CPT(pipe))
1023 return false;
1024 } else {
1025 if ((val & TRANSCODER_MASK) != TRANSCODER(pipe))
1026 return false;
1027 }
1028 return true;
1029}
1030
1031static bool lvds_pipe_enabled(struct drm_i915_private *dev_priv,
1032 enum pipe pipe, u32 val)
1033{
1034 if ((val & LVDS_PORT_EN) == 0)
1035 return false;
1036
1037 if (HAS_PCH_CPT(dev_priv->dev)) {
1038 if ((val & PORT_TRANS_SEL_MASK) != PORT_TRANS_SEL_CPT(pipe))
1039 return false;
1040 } else {
1041 if ((val & LVDS_PIPE_MASK) != LVDS_PIPE(pipe))
1042 return false;
1043 }
1044 return true;
1045}
1046
1047static bool adpa_pipe_enabled(struct drm_i915_private *dev_priv,
1048 enum pipe pipe, u32 val)
1049{
1050 if ((val & ADPA_DAC_ENABLE) == 0)
1051 return false;
1052 if (HAS_PCH_CPT(dev_priv->dev)) {
1053 if ((val & PORT_TRANS_SEL_MASK) != PORT_TRANS_SEL_CPT(pipe))
1054 return false;
1055 } else {
1056 if ((val & ADPA_PIPE_SELECT_MASK) != ADPA_PIPE_SELECT(pipe))
1057 return false;
1058 }
1059 return true;
1060}
1061
1062static void assert_pch_dp_disabled(struct drm_i915_private *dev_priv,
1063 enum pipe pipe, int reg, u32 port_sel)
1064{
1065 u32 val = I915_READ(reg);
1066 WARN(dp_pipe_enabled(dev_priv, pipe, port_sel, val),
1067 "PCH DP (0x%08x) enabled on transcoder %c, should be disabled\n",
1068 reg, pipe_name(pipe));
1069}
1070
1071static void assert_pch_hdmi_disabled(struct drm_i915_private *dev_priv,
1072 enum pipe pipe, int reg)
1073{
1074 u32 val = I915_READ(reg);
1075 WARN(hdmi_pipe_enabled(dev_priv, val, pipe),
1076 "PCH DP (0x%08x) enabled on transcoder %c, should be disabled\n",
1077 reg, pipe_name(pipe));
1078}
1079
1080static void assert_pch_ports_disabled(struct drm_i915_private *dev_priv,
1081 enum pipe pipe)
1082{
1083 int reg;
1084 u32 val;
1085
1086 assert_pch_dp_disabled(dev_priv, pipe, PCH_DP_B, TRANS_DP_PORT_SEL_B);
1087 assert_pch_dp_disabled(dev_priv, pipe, PCH_DP_C, TRANS_DP_PORT_SEL_C);
1088 assert_pch_dp_disabled(dev_priv, pipe, PCH_DP_D, TRANS_DP_PORT_SEL_D);
1089
1090 reg = PCH_ADPA;
1091 val = I915_READ(reg);
1092 WARN(adpa_pipe_enabled(dev_priv, val, pipe),
1093 "PCH VGA enabled on transcoder %c, should be disabled\n",
1094 pipe_name(pipe));
1095
1096 reg = PCH_LVDS;
1097 val = I915_READ(reg);
1098 WARN(lvds_pipe_enabled(dev_priv, val, pipe),
1099 "PCH LVDS enabled on transcoder %c, should be disabled\n",
1100 pipe_name(pipe));
1101
1102 assert_pch_hdmi_disabled(dev_priv, pipe, HDMIB);
1103 assert_pch_hdmi_disabled(dev_priv, pipe, HDMIC);
1104 assert_pch_hdmi_disabled(dev_priv, pipe, HDMID);
1105}
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118static void intel_enable_pll(struct drm_i915_private *dev_priv, enum pipe pipe)
1119{
1120 int reg;
1121 u32 val;
1122
1123
1124 BUG_ON(dev_priv->info->gen >= 5);
1125
1126
1127 if (IS_MOBILE(dev_priv->dev) && !IS_I830(dev_priv->dev))
1128 assert_panel_unlocked(dev_priv, pipe);
1129
1130 reg = DPLL(pipe);
1131 val = I915_READ(reg);
1132 val |= DPLL_VCO_ENABLE;
1133
1134
1135 I915_WRITE(reg, val);
1136 POSTING_READ(reg);
1137 udelay(150);
1138 I915_WRITE(reg, val);
1139 POSTING_READ(reg);
1140 udelay(150);
1141 I915_WRITE(reg, val);
1142 POSTING_READ(reg);
1143 udelay(150);
1144}
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155static void intel_disable_pll(struct drm_i915_private *dev_priv, enum pipe pipe)
1156{
1157 int reg;
1158 u32 val;
1159
1160
1161 if (pipe == PIPE_A && (dev_priv->quirks & QUIRK_PIPEA_FORCE))
1162 return;
1163
1164
1165 assert_pipe_disabled(dev_priv, pipe);
1166
1167 reg = DPLL(pipe);
1168 val = I915_READ(reg);
1169 val &= ~DPLL_VCO_ENABLE;
1170 I915_WRITE(reg, val);
1171 POSTING_READ(reg);
1172}
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182static void intel_enable_pch_pll(struct drm_i915_private *dev_priv,
1183 enum pipe pipe)
1184{
1185 int reg;
1186 u32 val;
1187
1188 if (pipe > 1)
1189 return;
1190
1191
1192 BUG_ON(dev_priv->info->gen < 5);
1193
1194
1195 assert_pch_refclk_enabled(dev_priv);
1196
1197 reg = PCH_DPLL(pipe);
1198 val = I915_READ(reg);
1199 val |= DPLL_VCO_ENABLE;
1200 I915_WRITE(reg, val);
1201 POSTING_READ(reg);
1202 udelay(200);
1203}
1204
1205static void intel_disable_pch_pll(struct drm_i915_private *dev_priv,
1206 enum pipe pipe)
1207{
1208 int reg;
1209 u32 val;
1210
1211 if (pipe > 1)
1212 return;
1213
1214
1215 BUG_ON(dev_priv->info->gen < 5);
1216
1217
1218 assert_transcoder_disabled(dev_priv, pipe);
1219
1220 reg = PCH_DPLL(pipe);
1221 val = I915_READ(reg);
1222 val &= ~DPLL_VCO_ENABLE;
1223 I915_WRITE(reg, val);
1224 POSTING_READ(reg);
1225 udelay(200);
1226}
1227
1228static void intel_enable_transcoder(struct drm_i915_private *dev_priv,
1229 enum pipe pipe)
1230{
1231 int reg;
1232 u32 val;
1233
1234
1235 BUG_ON(dev_priv->info->gen < 5);
1236
1237
1238 assert_pch_pll_enabled(dev_priv, pipe);
1239
1240
1241 assert_fdi_tx_enabled(dev_priv, pipe);
1242 assert_fdi_rx_enabled(dev_priv, pipe);
1243
1244 reg = TRANSCONF(pipe);
1245 val = I915_READ(reg);
1246
1247 if (HAS_PCH_IBX(dev_priv->dev)) {
1248
1249
1250
1251
1252 val &= ~PIPE_BPC_MASK;
1253 val |= I915_READ(PIPECONF(pipe)) & PIPE_BPC_MASK;
1254 }
1255 I915_WRITE(reg, val | TRANS_ENABLE);
1256 if (wait_for(I915_READ(reg) & TRANS_STATE_ENABLE, 100))
1257 DRM_ERROR("failed to enable transcoder %d\n", pipe);
1258}
1259
1260static void intel_disable_transcoder(struct drm_i915_private *dev_priv,
1261 enum pipe pipe)
1262{
1263 int reg;
1264 u32 val;
1265
1266
1267 assert_fdi_tx_disabled(dev_priv, pipe);
1268 assert_fdi_rx_disabled(dev_priv, pipe);
1269
1270
1271 assert_pch_ports_disabled(dev_priv, pipe);
1272
1273 reg = TRANSCONF(pipe);
1274 val = I915_READ(reg);
1275 val &= ~TRANS_ENABLE;
1276 I915_WRITE(reg, val);
1277
1278 if (wait_for((I915_READ(reg) & TRANS_STATE_ENABLE) == 0, 50))
1279 DRM_ERROR("failed to disable transcoder %d\n", pipe);
1280}
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296static void intel_enable_pipe(struct drm_i915_private *dev_priv, enum pipe pipe,
1297 bool pch_port)
1298{
1299 int reg;
1300 u32 val;
1301
1302
1303
1304
1305
1306
1307 if (!HAS_PCH_SPLIT(dev_priv->dev))
1308 assert_pll_enabled(dev_priv, pipe);
1309 else {
1310 if (pch_port) {
1311
1312 assert_fdi_rx_pll_enabled(dev_priv, pipe);
1313 assert_fdi_tx_pll_enabled(dev_priv, pipe);
1314 }
1315
1316 }
1317
1318 reg = PIPECONF(pipe);
1319 val = I915_READ(reg);
1320 if (val & PIPECONF_ENABLE)
1321 return;
1322
1323 I915_WRITE(reg, val | PIPECONF_ENABLE);
1324 intel_wait_for_vblank(dev_priv->dev, pipe);
1325}
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339static void intel_disable_pipe(struct drm_i915_private *dev_priv,
1340 enum pipe pipe)
1341{
1342 int reg;
1343 u32 val;
1344
1345
1346
1347
1348
1349 assert_planes_disabled(dev_priv, pipe);
1350
1351
1352 if (pipe == PIPE_A && (dev_priv->quirks & QUIRK_PIPEA_FORCE))
1353 return;
1354
1355 reg = PIPECONF(pipe);
1356 val = I915_READ(reg);
1357 if ((val & PIPECONF_ENABLE) == 0)
1358 return;
1359
1360 I915_WRITE(reg, val & ~PIPECONF_ENABLE);
1361 intel_wait_for_pipe_off(dev_priv->dev, pipe);
1362}
1363
1364
1365
1366
1367
1368static void intel_flush_display_plane(struct drm_i915_private *dev_priv,
1369 enum plane plane)
1370{
1371 I915_WRITE(DSPADDR(plane), I915_READ(DSPADDR(plane)));
1372 I915_WRITE(DSPSURF(plane), I915_READ(DSPSURF(plane)));
1373}
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383static void intel_enable_plane(struct drm_i915_private *dev_priv,
1384 enum plane plane, enum pipe pipe)
1385{
1386 int reg;
1387 u32 val;
1388
1389
1390 assert_pipe_enabled(dev_priv, pipe);
1391
1392 reg = DSPCNTR(plane);
1393 val = I915_READ(reg);
1394 if (val & DISPLAY_PLANE_ENABLE)
1395 return;
1396
1397 I915_WRITE(reg, val | DISPLAY_PLANE_ENABLE);
1398 intel_flush_display_plane(dev_priv, plane);
1399 intel_wait_for_vblank(dev_priv->dev, pipe);
1400}
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410static void intel_disable_plane(struct drm_i915_private *dev_priv,
1411 enum plane plane, enum pipe pipe)
1412{
1413 int reg;
1414 u32 val;
1415
1416 reg = DSPCNTR(plane);
1417 val = I915_READ(reg);
1418 if ((val & DISPLAY_PLANE_ENABLE) == 0)
1419 return;
1420
1421 I915_WRITE(reg, val & ~DISPLAY_PLANE_ENABLE);
1422 intel_flush_display_plane(dev_priv, plane);
1423 intel_wait_for_vblank(dev_priv->dev, pipe);
1424}
1425
1426static void disable_pch_dp(struct drm_i915_private *dev_priv,
1427 enum pipe pipe, int reg, u32 port_sel)
1428{
1429 u32 val = I915_READ(reg);
1430 if (dp_pipe_enabled(dev_priv, pipe, port_sel, val)) {
1431 DRM_DEBUG_KMS("Disabling pch dp %x on pipe %d\n", reg, pipe);
1432 I915_WRITE(reg, val & ~DP_PORT_EN);
1433 }
1434}
1435
1436static void disable_pch_hdmi(struct drm_i915_private *dev_priv,
1437 enum pipe pipe, int reg)
1438{
1439 u32 val = I915_READ(reg);
1440 if (hdmi_pipe_enabled(dev_priv, val, pipe)) {
1441 DRM_DEBUG_KMS("Disabling pch HDMI %x on pipe %d\n",
1442 reg, pipe);
1443 I915_WRITE(reg, val & ~PORT_ENABLE);
1444 }
1445}
1446
1447
1448static void intel_disable_pch_ports(struct drm_i915_private *dev_priv,
1449 enum pipe pipe)
1450{
1451 u32 reg, val;
1452
1453 val = I915_READ(PCH_PP_CONTROL);
1454 I915_WRITE(PCH_PP_CONTROL, val | PANEL_UNLOCK_REGS);
1455
1456 disable_pch_dp(dev_priv, pipe, PCH_DP_B, TRANS_DP_PORT_SEL_B);
1457 disable_pch_dp(dev_priv, pipe, PCH_DP_C, TRANS_DP_PORT_SEL_C);
1458 disable_pch_dp(dev_priv, pipe, PCH_DP_D, TRANS_DP_PORT_SEL_D);
1459
1460 reg = PCH_ADPA;
1461 val = I915_READ(reg);
1462 if (adpa_pipe_enabled(dev_priv, val, pipe))
1463 I915_WRITE(reg, val & ~ADPA_DAC_ENABLE);
1464
1465 reg = PCH_LVDS;
1466 val = I915_READ(reg);
1467 if (lvds_pipe_enabled(dev_priv, val, pipe)) {
1468 DRM_DEBUG_KMS("disable lvds on pipe %d val 0x%08x\n", pipe, val);
1469 I915_WRITE(reg, val & ~LVDS_PORT_EN);
1470 POSTING_READ(reg);
1471 udelay(100);
1472 }
1473
1474 disable_pch_hdmi(dev_priv, pipe, HDMIB);
1475 disable_pch_hdmi(dev_priv, pipe, HDMIC);
1476 disable_pch_hdmi(dev_priv, pipe, HDMID);
1477}
1478
1479static void i8xx_disable_fbc(struct drm_device *dev)
1480{
1481 struct drm_i915_private *dev_priv = dev->dev_private;
1482 u32 fbc_ctl;
1483
1484
1485 fbc_ctl = I915_READ(FBC_CONTROL);
1486 if ((fbc_ctl & FBC_CTL_EN) == 0)
1487 return;
1488
1489 fbc_ctl &= ~FBC_CTL_EN;
1490 I915_WRITE(FBC_CONTROL, fbc_ctl);
1491
1492
1493 if (wait_for((I915_READ(FBC_STATUS) & FBC_STAT_COMPRESSING) == 0, 10)) {
1494 DRM_DEBUG_KMS("FBC idle timed out\n");
1495 return;
1496 }
1497
1498 DRM_DEBUG_KMS("disabled FBC\n");
1499}
1500
1501static void i8xx_enable_fbc(struct drm_crtc *crtc, unsigned long interval)
1502{
1503 struct drm_device *dev = crtc->dev;
1504 struct drm_i915_private *dev_priv = dev->dev_private;
1505 struct drm_framebuffer *fb = crtc->fb;
1506 struct intel_framebuffer *intel_fb = to_intel_framebuffer(fb);
1507 struct drm_i915_gem_object *obj = intel_fb->obj;
1508 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
1509 int cfb_pitch;
1510 int plane, i;
1511 u32 fbc_ctl, fbc_ctl2;
1512
1513 cfb_pitch = dev_priv->cfb_size / FBC_LL_SIZE;
1514 if (fb->pitch < cfb_pitch)
1515 cfb_pitch = fb->pitch;
1516
1517
1518 cfb_pitch = (cfb_pitch / 64) - 1;
1519 plane = intel_crtc->plane == 0 ? FBC_CTL_PLANEA : FBC_CTL_PLANEB;
1520
1521
1522 for (i = 0; i < (FBC_LL_SIZE / 32) + 1; i++)
1523 I915_WRITE(FBC_TAG + (i * 4), 0);
1524
1525
1526 fbc_ctl2 = FBC_CTL_FENCE_DBL | FBC_CTL_IDLE_IMM | FBC_CTL_CPU_FENCE;
1527 fbc_ctl2 |= plane;
1528 I915_WRITE(FBC_CONTROL2, fbc_ctl2);
1529 I915_WRITE(FBC_FENCE_OFF, crtc->y);
1530
1531
1532 fbc_ctl = FBC_CTL_EN | FBC_CTL_PERIODIC;
1533 if (IS_I945GM(dev))
1534 fbc_ctl |= FBC_CTL_C3_IDLE;
1535 fbc_ctl |= (cfb_pitch & 0xff) << FBC_CTL_STRIDE_SHIFT;
1536 fbc_ctl |= (interval & 0x2fff) << FBC_CTL_INTERVAL_SHIFT;
1537 fbc_ctl |= obj->fence_reg;
1538 I915_WRITE(FBC_CONTROL, fbc_ctl);
1539
1540 DRM_DEBUG_KMS("enabled FBC, pitch %d, yoff %d, plane %d, ",
1541 cfb_pitch, crtc->y, intel_crtc->plane);
1542}
1543
1544static bool i8xx_fbc_enabled(struct drm_device *dev)
1545{
1546 struct drm_i915_private *dev_priv = dev->dev_private;
1547
1548 return I915_READ(FBC_CONTROL) & FBC_CTL_EN;
1549}
1550
1551static void g4x_enable_fbc(struct drm_crtc *crtc, unsigned long interval)
1552{
1553 struct drm_device *dev = crtc->dev;
1554 struct drm_i915_private *dev_priv = dev->dev_private;
1555 struct drm_framebuffer *fb = crtc->fb;
1556 struct intel_framebuffer *intel_fb = to_intel_framebuffer(fb);
1557 struct drm_i915_gem_object *obj = intel_fb->obj;
1558 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
1559 int plane = intel_crtc->plane == 0 ? DPFC_CTL_PLANEA : DPFC_CTL_PLANEB;
1560 unsigned long stall_watermark = 200;
1561 u32 dpfc_ctl;
1562
1563 dpfc_ctl = plane | DPFC_SR_EN | DPFC_CTL_LIMIT_1X;
1564 dpfc_ctl |= DPFC_CTL_FENCE_EN | obj->fence_reg;
1565 I915_WRITE(DPFC_CHICKEN, DPFC_HT_MODIFY);
1566
1567 I915_WRITE(DPFC_RECOMP_CTL, DPFC_RECOMP_STALL_EN |
1568 (stall_watermark << DPFC_RECOMP_STALL_WM_SHIFT) |
1569 (interval << DPFC_RECOMP_TIMER_COUNT_SHIFT));
1570 I915_WRITE(DPFC_FENCE_YOFF, crtc->y);
1571
1572
1573 I915_WRITE(DPFC_CONTROL, I915_READ(DPFC_CONTROL) | DPFC_CTL_EN);
1574
1575 DRM_DEBUG_KMS("enabled fbc on plane %d\n", intel_crtc->plane);
1576}
1577
1578static void g4x_disable_fbc(struct drm_device *dev)
1579{
1580 struct drm_i915_private *dev_priv = dev->dev_private;
1581 u32 dpfc_ctl;
1582
1583
1584 dpfc_ctl = I915_READ(DPFC_CONTROL);
1585 if (dpfc_ctl & DPFC_CTL_EN) {
1586 dpfc_ctl &= ~DPFC_CTL_EN;
1587 I915_WRITE(DPFC_CONTROL, dpfc_ctl);
1588
1589 DRM_DEBUG_KMS("disabled FBC\n");
1590 }
1591}
1592
1593static bool g4x_fbc_enabled(struct drm_device *dev)
1594{
1595 struct drm_i915_private *dev_priv = dev->dev_private;
1596
1597 return I915_READ(DPFC_CONTROL) & DPFC_CTL_EN;
1598}
1599
1600static void sandybridge_blit_fbc_update(struct drm_device *dev)
1601{
1602 struct drm_i915_private *dev_priv = dev->dev_private;
1603 u32 blt_ecoskpd;
1604
1605
1606 gen6_gt_force_wake_get(dev_priv);
1607 blt_ecoskpd = I915_READ(GEN6_BLITTER_ECOSKPD);
1608 blt_ecoskpd |= GEN6_BLITTER_FBC_NOTIFY <<
1609 GEN6_BLITTER_LOCK_SHIFT;
1610 I915_WRITE(GEN6_BLITTER_ECOSKPD, blt_ecoskpd);
1611 blt_ecoskpd |= GEN6_BLITTER_FBC_NOTIFY;
1612 I915_WRITE(GEN6_BLITTER_ECOSKPD, blt_ecoskpd);
1613 blt_ecoskpd &= ~(GEN6_BLITTER_FBC_NOTIFY <<
1614 GEN6_BLITTER_LOCK_SHIFT);
1615 I915_WRITE(GEN6_BLITTER_ECOSKPD, blt_ecoskpd);
1616 POSTING_READ(GEN6_BLITTER_ECOSKPD);
1617 gen6_gt_force_wake_put(dev_priv);
1618}
1619
1620static void ironlake_enable_fbc(struct drm_crtc *crtc, unsigned long interval)
1621{
1622 struct drm_device *dev = crtc->dev;
1623 struct drm_i915_private *dev_priv = dev->dev_private;
1624 struct drm_framebuffer *fb = crtc->fb;
1625 struct intel_framebuffer *intel_fb = to_intel_framebuffer(fb);
1626 struct drm_i915_gem_object *obj = intel_fb->obj;
1627 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
1628 int plane = intel_crtc->plane == 0 ? DPFC_CTL_PLANEA : DPFC_CTL_PLANEB;
1629 unsigned long stall_watermark = 200;
1630 u32 dpfc_ctl;
1631
1632 dpfc_ctl = I915_READ(ILK_DPFC_CONTROL);
1633 dpfc_ctl &= DPFC_RESERVED;
1634 dpfc_ctl |= (plane | DPFC_CTL_LIMIT_1X);
1635
1636 dpfc_ctl |= DPFC_CTL_PERSISTENT_MODE;
1637 dpfc_ctl |= (DPFC_CTL_FENCE_EN | obj->fence_reg);
1638 I915_WRITE(ILK_DPFC_CHICKEN, DPFC_HT_MODIFY);
1639
1640 I915_WRITE(ILK_DPFC_RECOMP_CTL, DPFC_RECOMP_STALL_EN |
1641 (stall_watermark << DPFC_RECOMP_STALL_WM_SHIFT) |
1642 (interval << DPFC_RECOMP_TIMER_COUNT_SHIFT));
1643 I915_WRITE(ILK_DPFC_FENCE_YOFF, crtc->y);
1644 I915_WRITE(ILK_FBC_RT_BASE, obj->gtt_offset | ILK_FBC_RT_VALID);
1645
1646 I915_WRITE(ILK_DPFC_CONTROL, dpfc_ctl | DPFC_CTL_EN);
1647
1648 if (IS_GEN6(dev)) {
1649 I915_WRITE(SNB_DPFC_CTL_SA,
1650 SNB_CPU_FENCE_ENABLE | obj->fence_reg);
1651 I915_WRITE(DPFC_CPU_FENCE_OFFSET, crtc->y);
1652 sandybridge_blit_fbc_update(dev);
1653 }
1654
1655 DRM_DEBUG_KMS("enabled fbc on plane %d\n", intel_crtc->plane);
1656}
1657
1658static void ironlake_disable_fbc(struct drm_device *dev)
1659{
1660 struct drm_i915_private *dev_priv = dev->dev_private;
1661 u32 dpfc_ctl;
1662
1663
1664 dpfc_ctl = I915_READ(ILK_DPFC_CONTROL);
1665 if (dpfc_ctl & DPFC_CTL_EN) {
1666 dpfc_ctl &= ~DPFC_CTL_EN;
1667 I915_WRITE(ILK_DPFC_CONTROL, dpfc_ctl);
1668
1669 DRM_DEBUG_KMS("disabled FBC\n");
1670 }
1671}
1672
1673static bool ironlake_fbc_enabled(struct drm_device *dev)
1674{
1675 struct drm_i915_private *dev_priv = dev->dev_private;
1676
1677 return I915_READ(ILK_DPFC_CONTROL) & DPFC_CTL_EN;
1678}
1679
1680bool intel_fbc_enabled(struct drm_device *dev)
1681{
1682 struct drm_i915_private *dev_priv = dev->dev_private;
1683
1684 if (!dev_priv->display.fbc_enabled)
1685 return false;
1686
1687 return dev_priv->display.fbc_enabled(dev);
1688}
1689
1690static void intel_fbc_work_fn(struct work_struct *__work)
1691{
1692 struct intel_fbc_work *work =
1693 container_of(to_delayed_work(__work),
1694 struct intel_fbc_work, work);
1695 struct drm_device *dev = work->crtc->dev;
1696 struct drm_i915_private *dev_priv = dev->dev_private;
1697
1698 mutex_lock(&dev->struct_mutex);
1699 if (work == dev_priv->fbc_work) {
1700
1701
1702
1703 if (work->crtc->fb == work->fb) {
1704 dev_priv->display.enable_fbc(work->crtc,
1705 work->interval);
1706
1707 dev_priv->cfb_plane = to_intel_crtc(work->crtc)->plane;
1708 dev_priv->cfb_fb = work->crtc->fb->base.id;
1709 dev_priv->cfb_y = work->crtc->y;
1710 }
1711
1712 dev_priv->fbc_work = NULL;
1713 }
1714 mutex_unlock(&dev->struct_mutex);
1715
1716 kfree(work);
1717}
1718
1719static void intel_cancel_fbc_work(struct drm_i915_private *dev_priv)
1720{
1721 if (dev_priv->fbc_work == NULL)
1722 return;
1723
1724 DRM_DEBUG_KMS("cancelling pending FBC enable\n");
1725
1726
1727
1728
1729
1730 if (cancel_delayed_work(&dev_priv->fbc_work->work))
1731
1732 kfree(dev_priv->fbc_work);
1733
1734
1735
1736
1737
1738
1739 dev_priv->fbc_work = NULL;
1740}
1741
1742static void intel_enable_fbc(struct drm_crtc *crtc, unsigned long interval)
1743{
1744 struct intel_fbc_work *work;
1745 struct drm_device *dev = crtc->dev;
1746 struct drm_i915_private *dev_priv = dev->dev_private;
1747
1748 if (!dev_priv->display.enable_fbc)
1749 return;
1750
1751 intel_cancel_fbc_work(dev_priv);
1752
1753 work = kzalloc(sizeof *work, GFP_KERNEL);
1754 if (work == NULL) {
1755 dev_priv->display.enable_fbc(crtc, interval);
1756 return;
1757 }
1758
1759 work->crtc = crtc;
1760 work->fb = crtc->fb;
1761 work->interval = interval;
1762 INIT_DELAYED_WORK(&work->work, intel_fbc_work_fn);
1763
1764 dev_priv->fbc_work = work;
1765
1766 DRM_DEBUG_KMS("scheduling delayed FBC enable\n");
1767
1768
1769
1770
1771
1772
1773
1774
1775
1776
1777
1778
1779 schedule_delayed_work(&work->work, msecs_to_jiffies(50));
1780}
1781
1782void intel_disable_fbc(struct drm_device *dev)
1783{
1784 struct drm_i915_private *dev_priv = dev->dev_private;
1785
1786 intel_cancel_fbc_work(dev_priv);
1787
1788 if (!dev_priv->display.disable_fbc)
1789 return;
1790
1791 dev_priv->display.disable_fbc(dev);
1792 dev_priv->cfb_plane = -1;
1793}
1794
1795
1796
1797
1798
1799
1800
1801
1802
1803
1804
1805
1806
1807
1808
1809
1810
1811
1812
1813
1814static void intel_update_fbc(struct drm_device *dev)
1815{
1816 struct drm_i915_private *dev_priv = dev->dev_private;
1817 struct drm_crtc *crtc = NULL, *tmp_crtc;
1818 struct intel_crtc *intel_crtc;
1819 struct drm_framebuffer *fb;
1820 struct intel_framebuffer *intel_fb;
1821 struct drm_i915_gem_object *obj;
1822 int enable_fbc;
1823
1824 DRM_DEBUG_KMS("\n");
1825
1826 if (!i915_powersave)
1827 return;
1828
1829 if (!I915_HAS_FBC(dev))
1830 return;
1831
1832
1833
1834
1835
1836
1837
1838
1839
1840
1841 list_for_each_entry(tmp_crtc, &dev->mode_config.crtc_list, head) {
1842 if (tmp_crtc->enabled && tmp_crtc->fb) {
1843 if (crtc) {
1844 DRM_DEBUG_KMS("more than one pipe active, disabling compression\n");
1845 dev_priv->no_fbc_reason = FBC_MULTIPLE_PIPES;
1846 goto out_disable;
1847 }
1848 crtc = tmp_crtc;
1849 }
1850 }
1851
1852 if (!crtc || crtc->fb == NULL) {
1853 DRM_DEBUG_KMS("no output, disabling\n");
1854 dev_priv->no_fbc_reason = FBC_NO_OUTPUT;
1855 goto out_disable;
1856 }
1857
1858 intel_crtc = to_intel_crtc(crtc);
1859 fb = crtc->fb;
1860 intel_fb = to_intel_framebuffer(fb);
1861 obj = intel_fb->obj;
1862
1863 enable_fbc = i915_enable_fbc;
1864 if (enable_fbc < 0) {
1865 DRM_DEBUG_KMS("fbc set to per-chip default\n");
1866 enable_fbc = 1;
1867 if (INTEL_INFO(dev)->gen <= 5)
1868 enable_fbc = 0;
1869 }
1870 if (!enable_fbc) {
1871 DRM_DEBUG_KMS("fbc disabled per module param\n");
1872 dev_priv->no_fbc_reason = FBC_MODULE_PARAM;
1873 goto out_disable;
1874 }
1875 if (intel_fb->obj->base.size > dev_priv->cfb_size) {
1876 DRM_DEBUG_KMS("framebuffer too large, disabling "
1877 "compression\n");
1878 dev_priv->no_fbc_reason = FBC_STOLEN_TOO_SMALL;
1879 goto out_disable;
1880 }
1881 if ((crtc->mode.flags & DRM_MODE_FLAG_INTERLACE) ||
1882 (crtc->mode.flags & DRM_MODE_FLAG_DBLSCAN)) {
1883 DRM_DEBUG_KMS("mode incompatible with compression, "
1884 "disabling\n");
1885 dev_priv->no_fbc_reason = FBC_UNSUPPORTED_MODE;
1886 goto out_disable;
1887 }
1888 if ((crtc->mode.hdisplay > 2048) ||
1889 (crtc->mode.vdisplay > 1536)) {
1890 DRM_DEBUG_KMS("mode too large for compression, disabling\n");
1891 dev_priv->no_fbc_reason = FBC_MODE_TOO_LARGE;
1892 goto out_disable;
1893 }
1894 if ((IS_I915GM(dev) || IS_I945GM(dev)) && intel_crtc->plane != 0) {
1895 DRM_DEBUG_KMS("plane not 0, disabling compression\n");
1896 dev_priv->no_fbc_reason = FBC_BAD_PLANE;
1897 goto out_disable;
1898 }
1899
1900
1901
1902
1903 if (obj->tiling_mode != I915_TILING_X ||
1904 obj->fence_reg == I915_FENCE_REG_NONE) {
1905 DRM_DEBUG_KMS("framebuffer not tiled or fenced, disabling compression\n");
1906 dev_priv->no_fbc_reason = FBC_NOT_TILED;
1907 goto out_disable;
1908 }
1909
1910
1911 if (in_dbg_master())
1912 goto out_disable;
1913
1914
1915
1916
1917
1918
1919 if (dev_priv->cfb_plane == intel_crtc->plane &&
1920 dev_priv->cfb_fb == fb->base.id &&
1921 dev_priv->cfb_y == crtc->y)
1922 return;
1923
1924 if (intel_fbc_enabled(dev)) {
1925
1926
1927
1928
1929
1930
1931
1932
1933
1934
1935
1936
1937
1938
1939
1940
1941
1942
1943
1944
1945
1946
1947
1948 DRM_DEBUG_KMS("disabling active FBC for update\n");
1949 intel_disable_fbc(dev);
1950 }
1951
1952 intel_enable_fbc(crtc, 500);
1953 return;
1954
1955out_disable:
1956
1957 if (intel_fbc_enabled(dev)) {
1958 DRM_DEBUG_KMS("unsupported config, disabling FBC\n");
1959 intel_disable_fbc(dev);
1960 }
1961}
1962
1963int
1964intel_pin_and_fence_fb_obj(struct drm_device *dev,
1965 struct drm_i915_gem_object *obj,
1966 struct intel_ring_buffer *pipelined)
1967{
1968 struct drm_i915_private *dev_priv = dev->dev_private;
1969 u32 alignment;
1970 int ret;
1971
1972 switch (obj->tiling_mode) {
1973 case I915_TILING_NONE:
1974 if (IS_BROADWATER(dev) || IS_CRESTLINE(dev))
1975 alignment = 128 * 1024;
1976 else if (INTEL_INFO(dev)->gen >= 4)
1977 alignment = 4 * 1024;
1978 else
1979 alignment = 64 * 1024;
1980 break;
1981 case I915_TILING_X:
1982
1983 alignment = 0;
1984 break;
1985 case I915_TILING_Y:
1986
1987 DRM_ERROR("Y tiled not allowed for scan out buffers\n");
1988 return -EINVAL;
1989 default:
1990 BUG();
1991 }
1992
1993 dev_priv->mm.interruptible = false;
1994 ret = i915_gem_object_pin_to_display_plane(obj, alignment, pipelined);
1995 if (ret)
1996 goto err_interruptible;
1997
1998
1999
2000
2001
2002
2003 if (obj->tiling_mode != I915_TILING_NONE) {
2004 ret = i915_gem_object_get_fence(obj, pipelined);
2005 if (ret)
2006 goto err_unpin;
2007 }
2008
2009 dev_priv->mm.interruptible = true;
2010 return 0;
2011
2012err_unpin:
2013 i915_gem_object_unpin(obj);
2014err_interruptible:
2015 dev_priv->mm.interruptible = true;
2016 return ret;
2017}
2018
2019static int i9xx_update_plane(struct drm_crtc *crtc, struct drm_framebuffer *fb,
2020 int x, int y)
2021{
2022 struct drm_device *dev = crtc->dev;
2023 struct drm_i915_private *dev_priv = dev->dev_private;
2024 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
2025 struct intel_framebuffer *intel_fb;
2026 struct drm_i915_gem_object *obj;
2027 int plane = intel_crtc->plane;
2028 unsigned long Start, Offset;
2029 u32 dspcntr;
2030 u32 reg;
2031
2032 switch (plane) {
2033 case 0:
2034 case 1:
2035 break;
2036 default:
2037 DRM_ERROR("Can't update plane %d in SAREA\n", plane);
2038 return -EINVAL;
2039 }
2040
2041 intel_fb = to_intel_framebuffer(fb);
2042 obj = intel_fb->obj;
2043
2044 reg = DSPCNTR(plane);
2045 dspcntr = I915_READ(reg);
2046
2047 dspcntr &= ~DISPPLANE_PIXFORMAT_MASK;
2048 switch (fb->bits_per_pixel) {
2049 case 8:
2050 dspcntr |= DISPPLANE_8BPP;
2051 break;
2052 case 16:
2053 if (fb->depth == 15)
2054 dspcntr |= DISPPLANE_15_16BPP;
2055 else
2056 dspcntr |= DISPPLANE_16BPP;
2057 break;
2058 case 24:
2059 case 32:
2060 dspcntr |= DISPPLANE_32BPP_NO_ALPHA;
2061 break;
2062 default:
2063 DRM_ERROR("Unknown color depth %d\n", fb->bits_per_pixel);
2064 return -EINVAL;
2065 }
2066 if (INTEL_INFO(dev)->gen >= 4) {
2067 if (obj->tiling_mode != I915_TILING_NONE)
2068 dspcntr |= DISPPLANE_TILED;
2069 else
2070 dspcntr &= ~DISPPLANE_TILED;
2071 }
2072
2073 I915_WRITE(reg, dspcntr);
2074
2075 Start = obj->gtt_offset;
2076 Offset = y * fb->pitch + x * (fb->bits_per_pixel / 8);
2077
2078 DRM_DEBUG_KMS("Writing base %08lX %08lX %d %d %d\n",
2079 Start, Offset, x, y, fb->pitch);
2080 I915_WRITE(DSPSTRIDE(plane), fb->pitch);
2081 if (INTEL_INFO(dev)->gen >= 4) {
2082 I915_WRITE(DSPSURF(plane), Start);
2083 I915_WRITE(DSPTILEOFF(plane), (y << 16) | x);
2084 I915_WRITE(DSPADDR(plane), Offset);
2085 } else
2086 I915_WRITE(DSPADDR(plane), Start + Offset);
2087 POSTING_READ(reg);
2088
2089 return 0;
2090}
2091
2092static int ironlake_update_plane(struct drm_crtc *crtc,
2093 struct drm_framebuffer *fb, int x, int y)
2094{
2095 struct drm_device *dev = crtc->dev;
2096 struct drm_i915_private *dev_priv = dev->dev_private;
2097 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
2098 struct intel_framebuffer *intel_fb;
2099 struct drm_i915_gem_object *obj;
2100 int plane = intel_crtc->plane;
2101 unsigned long Start, Offset;
2102 u32 dspcntr;
2103 u32 reg;
2104
2105 switch (plane) {
2106 case 0:
2107 case 1:
2108 case 2:
2109 break;
2110 default:
2111 DRM_ERROR("Can't update plane %d in SAREA\n", plane);
2112 return -EINVAL;
2113 }
2114
2115 intel_fb = to_intel_framebuffer(fb);
2116 obj = intel_fb->obj;
2117
2118 reg = DSPCNTR(plane);
2119 dspcntr = I915_READ(reg);
2120
2121 dspcntr &= ~DISPPLANE_PIXFORMAT_MASK;
2122 switch (fb->bits_per_pixel) {
2123 case 8:
2124 dspcntr |= DISPPLANE_8BPP;
2125 break;
2126 case 16:
2127 if (fb->depth != 16)
2128 return -EINVAL;
2129
2130 dspcntr |= DISPPLANE_16BPP;
2131 break;
2132 case 24:
2133 case 32:
2134 if (fb->depth == 24)
2135 dspcntr |= DISPPLANE_32BPP_NO_ALPHA;
2136 else if (fb->depth == 30)
2137 dspcntr |= DISPPLANE_32BPP_30BIT_NO_ALPHA;
2138 else
2139 return -EINVAL;
2140 break;
2141 default:
2142 DRM_ERROR("Unknown color depth %d\n", fb->bits_per_pixel);
2143 return -EINVAL;
2144 }
2145
2146 if (obj->tiling_mode != I915_TILING_NONE)
2147 dspcntr |= DISPPLANE_TILED;
2148 else
2149 dspcntr &= ~DISPPLANE_TILED;
2150
2151
2152 dspcntr |= DISPPLANE_TRICKLE_FEED_DISABLE;
2153
2154 I915_WRITE(reg, dspcntr);
2155
2156 Start = obj->gtt_offset;
2157 Offset = y * fb->pitch + x * (fb->bits_per_pixel / 8);
2158
2159 DRM_DEBUG_KMS("Writing base %08lX %08lX %d %d %d\n",
2160 Start, Offset, x, y, fb->pitch);
2161 I915_WRITE(DSPSTRIDE(plane), fb->pitch);
2162 I915_WRITE(DSPSURF(plane), Start);
2163 I915_WRITE(DSPTILEOFF(plane), (y << 16) | x);
2164 I915_WRITE(DSPADDR(plane), Offset);
2165 POSTING_READ(reg);
2166
2167 return 0;
2168}
2169
2170
2171static int
2172intel_pipe_set_base_atomic(struct drm_crtc *crtc, struct drm_framebuffer *fb,
2173 int x, int y, enum mode_set_atomic state)
2174{
2175 struct drm_device *dev = crtc->dev;
2176 struct drm_i915_private *dev_priv = dev->dev_private;
2177 int ret;
2178
2179 ret = dev_priv->display.update_plane(crtc, fb, x, y);
2180 if (ret)
2181 return ret;
2182
2183 intel_update_fbc(dev);
2184 intel_increase_pllclock(crtc);
2185
2186 return 0;
2187}
2188
2189static int
2190intel_pipe_set_base(struct drm_crtc *crtc, int x, int y,
2191 struct drm_framebuffer *old_fb)
2192{
2193 struct drm_device *dev = crtc->dev;
2194 struct drm_i915_master_private *master_priv;
2195 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
2196 int ret;
2197
2198
2199 if (!crtc->fb) {
2200 DRM_ERROR("No FB bound\n");
2201 return 0;
2202 }
2203
2204 switch (intel_crtc->plane) {
2205 case 0:
2206 case 1:
2207 break;
2208 case 2:
2209 if (IS_IVYBRIDGE(dev))
2210 break;
2211
2212 default:
2213 DRM_ERROR("no plane for crtc\n");
2214 return -EINVAL;
2215 }
2216
2217 mutex_lock(&dev->struct_mutex);
2218 ret = intel_pin_and_fence_fb_obj(dev,
2219 to_intel_framebuffer(crtc->fb)->obj,
2220 NULL);
2221 if (ret != 0) {
2222 mutex_unlock(&dev->struct_mutex);
2223 DRM_ERROR("pin & fence failed\n");
2224 return ret;
2225 }
2226
2227 if (old_fb) {
2228 struct drm_i915_private *dev_priv = dev->dev_private;
2229 struct drm_i915_gem_object *obj = to_intel_framebuffer(old_fb)->obj;
2230
2231 wait_event(dev_priv->pending_flip_queue,
2232 atomic_read(&dev_priv->mm.wedged) ||
2233 atomic_read(&obj->pending_flip) == 0);
2234
2235
2236
2237
2238
2239
2240
2241
2242
2243 ret = i915_gem_object_finish_gpu(obj);
2244 (void) ret;
2245 }
2246
2247 ret = intel_pipe_set_base_atomic(crtc, crtc->fb, x, y,
2248 LEAVE_ATOMIC_MODE_SET);
2249 if (ret) {
2250 i915_gem_object_unpin(to_intel_framebuffer(crtc->fb)->obj);
2251 mutex_unlock(&dev->struct_mutex);
2252 DRM_ERROR("failed to update base address\n");
2253 return ret;
2254 }
2255
2256 if (old_fb) {
2257 intel_wait_for_vblank(dev, intel_crtc->pipe);
2258 i915_gem_object_unpin(to_intel_framebuffer(old_fb)->obj);
2259 }
2260
2261 mutex_unlock(&dev->struct_mutex);
2262
2263 if (!dev->primary->master)
2264 return 0;
2265
2266 master_priv = dev->primary->master->driver_priv;
2267 if (!master_priv->sarea_priv)
2268 return 0;
2269
2270 if (intel_crtc->pipe) {
2271 master_priv->sarea_priv->pipeB_x = x;
2272 master_priv->sarea_priv->pipeB_y = y;
2273 } else {
2274 master_priv->sarea_priv->pipeA_x = x;
2275 master_priv->sarea_priv->pipeA_y = y;
2276 }
2277
2278 return 0;
2279}
2280
2281static void ironlake_set_pll_edp(struct drm_crtc *crtc, int clock)
2282{
2283 struct drm_device *dev = crtc->dev;
2284 struct drm_i915_private *dev_priv = dev->dev_private;
2285 u32 dpa_ctl;
2286
2287 DRM_DEBUG_KMS("eDP PLL enable for clock %d\n", clock);
2288 dpa_ctl = I915_READ(DP_A);
2289 dpa_ctl &= ~DP_PLL_FREQ_MASK;
2290
2291 if (clock < 200000) {
2292 u32 temp;
2293 dpa_ctl |= DP_PLL_FREQ_160MHZ;
2294
2295
2296
2297
2298
2299
2300 temp = I915_READ(0x4600c);
2301 temp &= 0xffff0000;
2302 I915_WRITE(0x4600c, temp | 0x8124);
2303
2304 temp = I915_READ(0x46010);
2305 I915_WRITE(0x46010, temp | 1);
2306
2307 temp = I915_READ(0x46034);
2308 I915_WRITE(0x46034, temp | (1 << 24));
2309 } else {
2310 dpa_ctl |= DP_PLL_FREQ_270MHZ;
2311 }
2312 I915_WRITE(DP_A, dpa_ctl);
2313
2314 POSTING_READ(DP_A);
2315 udelay(500);
2316}
2317
2318static void intel_fdi_normal_train(struct drm_crtc *crtc)
2319{
2320 struct drm_device *dev = crtc->dev;
2321 struct drm_i915_private *dev_priv = dev->dev_private;
2322 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
2323 int pipe = intel_crtc->pipe;
2324 u32 reg, temp;
2325
2326
2327 reg = FDI_TX_CTL(pipe);
2328 temp = I915_READ(reg);
2329 if (IS_IVYBRIDGE(dev)) {
2330 temp &= ~FDI_LINK_TRAIN_NONE_IVB;
2331 temp |= FDI_LINK_TRAIN_NONE_IVB | FDI_TX_ENHANCE_FRAME_ENABLE;
2332 } else {
2333 temp &= ~FDI_LINK_TRAIN_NONE;
2334 temp |= FDI_LINK_TRAIN_NONE | FDI_TX_ENHANCE_FRAME_ENABLE;
2335 }
2336 I915_WRITE(reg, temp);
2337
2338 reg = FDI_RX_CTL(pipe);
2339 temp = I915_READ(reg);
2340 if (HAS_PCH_CPT(dev)) {
2341 temp &= ~FDI_LINK_TRAIN_PATTERN_MASK_CPT;
2342 temp |= FDI_LINK_TRAIN_NORMAL_CPT;
2343 } else {
2344 temp &= ~FDI_LINK_TRAIN_NONE;
2345 temp |= FDI_LINK_TRAIN_NONE;
2346 }
2347 I915_WRITE(reg, temp | FDI_RX_ENHANCE_FRAME_ENABLE);
2348
2349
2350 POSTING_READ(reg);
2351 udelay(1000);
2352
2353
2354 if (IS_IVYBRIDGE(dev))
2355 I915_WRITE(reg, I915_READ(reg) | FDI_FS_ERRC_ENABLE |
2356 FDI_FE_ERRC_ENABLE);
2357}
2358
2359static void cpt_phase_pointer_enable(struct drm_device *dev, int pipe)
2360{
2361 struct drm_i915_private *dev_priv = dev->dev_private;
2362 u32 flags = I915_READ(SOUTH_CHICKEN1);
2363
2364 flags |= FDI_PHASE_SYNC_OVR(pipe);
2365 I915_WRITE(SOUTH_CHICKEN1, flags);
2366 flags |= FDI_PHASE_SYNC_EN(pipe);
2367 I915_WRITE(SOUTH_CHICKEN1, flags);
2368 POSTING_READ(SOUTH_CHICKEN1);
2369}
2370
2371
2372static void ironlake_fdi_link_train(struct drm_crtc *crtc)
2373{
2374 struct drm_device *dev = crtc->dev;
2375 struct drm_i915_private *dev_priv = dev->dev_private;
2376 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
2377 int pipe = intel_crtc->pipe;
2378 int plane = intel_crtc->plane;
2379 u32 reg, temp, tries;
2380
2381
2382 assert_pipe_enabled(dev_priv, pipe);
2383 assert_plane_enabled(dev_priv, plane);
2384
2385
2386
2387 reg = FDI_RX_IMR(pipe);
2388 temp = I915_READ(reg);
2389 temp &= ~FDI_RX_SYMBOL_LOCK;
2390 temp &= ~FDI_RX_BIT_LOCK;
2391 I915_WRITE(reg, temp);
2392 I915_READ(reg);
2393 udelay(150);
2394
2395
2396 reg = FDI_TX_CTL(pipe);
2397 temp = I915_READ(reg);
2398 temp &= ~(7 << 19);
2399 temp |= (intel_crtc->fdi_lanes - 1) << 19;
2400 temp &= ~FDI_LINK_TRAIN_NONE;
2401 temp |= FDI_LINK_TRAIN_PATTERN_1;
2402 I915_WRITE(reg, temp | FDI_TX_ENABLE);
2403
2404 reg = FDI_RX_CTL(pipe);
2405 temp = I915_READ(reg);
2406 temp &= ~FDI_LINK_TRAIN_NONE;
2407 temp |= FDI_LINK_TRAIN_PATTERN_1;
2408 I915_WRITE(reg, temp | FDI_RX_ENABLE);
2409
2410 POSTING_READ(reg);
2411 udelay(150);
2412
2413
2414 if (HAS_PCH_IBX(dev)) {
2415 I915_WRITE(FDI_RX_CHICKEN(pipe), FDI_RX_PHASE_SYNC_POINTER_OVR);
2416 I915_WRITE(FDI_RX_CHICKEN(pipe), FDI_RX_PHASE_SYNC_POINTER_OVR |
2417 FDI_RX_PHASE_SYNC_POINTER_EN);
2418 }
2419
2420 reg = FDI_RX_IIR(pipe);
2421 for (tries = 0; tries < 5; tries++) {
2422 temp = I915_READ(reg);
2423 DRM_DEBUG_KMS("FDI_RX_IIR 0x%x\n", temp);
2424
2425 if ((temp & FDI_RX_BIT_LOCK)) {
2426 DRM_DEBUG_KMS("FDI train 1 done.\n");
2427 I915_WRITE(reg, temp | FDI_RX_BIT_LOCK);
2428 break;
2429 }
2430 }
2431 if (tries == 5)
2432 DRM_ERROR("FDI train 1 fail!\n");
2433
2434
2435 reg = FDI_TX_CTL(pipe);
2436 temp = I915_READ(reg);
2437 temp &= ~FDI_LINK_TRAIN_NONE;
2438 temp |= FDI_LINK_TRAIN_PATTERN_2;
2439 I915_WRITE(reg, temp);
2440
2441 reg = FDI_RX_CTL(pipe);
2442 temp = I915_READ(reg);
2443 temp &= ~FDI_LINK_TRAIN_NONE;
2444 temp |= FDI_LINK_TRAIN_PATTERN_2;
2445 I915_WRITE(reg, temp);
2446
2447 POSTING_READ(reg);
2448 udelay(150);
2449
2450 reg = FDI_RX_IIR(pipe);
2451 for (tries = 0; tries < 5; tries++) {
2452 temp = I915_READ(reg);
2453 DRM_DEBUG_KMS("FDI_RX_IIR 0x%x\n", temp);
2454
2455 if (temp & FDI_RX_SYMBOL_LOCK) {
2456 I915_WRITE(reg, temp | FDI_RX_SYMBOL_LOCK);
2457 DRM_DEBUG_KMS("FDI train 2 done.\n");
2458 break;
2459 }
2460 }
2461 if (tries == 5)
2462 DRM_ERROR("FDI train 2 fail!\n");
2463
2464 DRM_DEBUG_KMS("FDI train done\n");
2465
2466}
2467
2468static const int snb_b_fdi_train_param[] = {
2469 FDI_LINK_TRAIN_400MV_0DB_SNB_B,
2470 FDI_LINK_TRAIN_400MV_6DB_SNB_B,
2471 FDI_LINK_TRAIN_600MV_3_5DB_SNB_B,
2472 FDI_LINK_TRAIN_800MV_0DB_SNB_B,
2473};
2474
2475
2476static void gen6_fdi_link_train(struct drm_crtc *crtc)
2477{
2478 struct drm_device *dev = crtc->dev;
2479 struct drm_i915_private *dev_priv = dev->dev_private;
2480 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
2481 int pipe = intel_crtc->pipe;
2482 u32 reg, temp, i;
2483
2484
2485
2486 reg = FDI_RX_IMR(pipe);
2487 temp = I915_READ(reg);
2488 temp &= ~FDI_RX_SYMBOL_LOCK;
2489 temp &= ~FDI_RX_BIT_LOCK;
2490 I915_WRITE(reg, temp);
2491
2492 POSTING_READ(reg);
2493 udelay(150);
2494
2495
2496 reg = FDI_TX_CTL(pipe);
2497 temp = I915_READ(reg);
2498 temp &= ~(7 << 19);
2499 temp |= (intel_crtc->fdi_lanes - 1) << 19;
2500 temp &= ~FDI_LINK_TRAIN_NONE;
2501 temp |= FDI_LINK_TRAIN_PATTERN_1;
2502 temp &= ~FDI_LINK_TRAIN_VOL_EMP_MASK;
2503
2504 temp |= FDI_LINK_TRAIN_400MV_0DB_SNB_B;
2505 I915_WRITE(reg, temp | FDI_TX_ENABLE);
2506
2507 reg = FDI_RX_CTL(pipe);
2508 temp = I915_READ(reg);
2509 if (HAS_PCH_CPT(dev)) {
2510 temp &= ~FDI_LINK_TRAIN_PATTERN_MASK_CPT;
2511 temp |= FDI_LINK_TRAIN_PATTERN_1_CPT;
2512 } else {
2513 temp &= ~FDI_LINK_TRAIN_NONE;
2514 temp |= FDI_LINK_TRAIN_PATTERN_1;
2515 }
2516 I915_WRITE(reg, temp | FDI_RX_ENABLE);
2517
2518 POSTING_READ(reg);
2519 udelay(150);
2520
2521 if (HAS_PCH_CPT(dev))
2522 cpt_phase_pointer_enable(dev, pipe);
2523
2524 for (i = 0; i < 4; i++) {
2525 reg = FDI_TX_CTL(pipe);
2526 temp = I915_READ(reg);
2527 temp &= ~FDI_LINK_TRAIN_VOL_EMP_MASK;
2528 temp |= snb_b_fdi_train_param[i];
2529 I915_WRITE(reg, temp);
2530
2531 POSTING_READ(reg);
2532 udelay(500);
2533
2534 reg = FDI_RX_IIR(pipe);
2535 temp = I915_READ(reg);
2536 DRM_DEBUG_KMS("FDI_RX_IIR 0x%x\n", temp);
2537
2538 if (temp & FDI_RX_BIT_LOCK) {
2539 I915_WRITE(reg, temp | FDI_RX_BIT_LOCK);
2540 DRM_DEBUG_KMS("FDI train 1 done.\n");
2541 break;
2542 }
2543 }
2544 if (i == 4)
2545 DRM_ERROR("FDI train 1 fail!\n");
2546
2547
2548 reg = FDI_TX_CTL(pipe);
2549 temp = I915_READ(reg);
2550 temp &= ~FDI_LINK_TRAIN_NONE;
2551 temp |= FDI_LINK_TRAIN_PATTERN_2;
2552 if (IS_GEN6(dev)) {
2553 temp &= ~FDI_LINK_TRAIN_VOL_EMP_MASK;
2554
2555 temp |= FDI_LINK_TRAIN_400MV_0DB_SNB_B;
2556 }
2557 I915_WRITE(reg, temp);
2558
2559 reg = FDI_RX_CTL(pipe);
2560 temp = I915_READ(reg);
2561 if (HAS_PCH_CPT(dev)) {
2562 temp &= ~FDI_LINK_TRAIN_PATTERN_MASK_CPT;
2563 temp |= FDI_LINK_TRAIN_PATTERN_2_CPT;
2564 } else {
2565 temp &= ~FDI_LINK_TRAIN_NONE;
2566 temp |= FDI_LINK_TRAIN_PATTERN_2;
2567 }
2568 I915_WRITE(reg, temp);
2569
2570 POSTING_READ(reg);
2571 udelay(150);
2572
2573 for (i = 0; i < 4; i++) {
2574 reg = FDI_TX_CTL(pipe);
2575 temp = I915_READ(reg);
2576 temp &= ~FDI_LINK_TRAIN_VOL_EMP_MASK;
2577 temp |= snb_b_fdi_train_param[i];
2578 I915_WRITE(reg, temp);
2579
2580 POSTING_READ(reg);
2581 udelay(500);
2582
2583 reg = FDI_RX_IIR(pipe);
2584 temp = I915_READ(reg);
2585 DRM_DEBUG_KMS("FDI_RX_IIR 0x%x\n", temp);
2586
2587 if (temp & FDI_RX_SYMBOL_LOCK) {
2588 I915_WRITE(reg, temp | FDI_RX_SYMBOL_LOCK);
2589 DRM_DEBUG_KMS("FDI train 2 done.\n");
2590 break;
2591 }
2592 }
2593 if (i == 4)
2594 DRM_ERROR("FDI train 2 fail!\n");
2595
2596 DRM_DEBUG_KMS("FDI train done.\n");
2597}
2598
2599
2600static void ivb_manual_fdi_link_train(struct drm_crtc *crtc)
2601{
2602 struct drm_device *dev = crtc->dev;
2603 struct drm_i915_private *dev_priv = dev->dev_private;
2604 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
2605 int pipe = intel_crtc->pipe;
2606 u32 reg, temp, i;
2607
2608
2609
2610 reg = FDI_RX_IMR(pipe);
2611 temp = I915_READ(reg);
2612 temp &= ~FDI_RX_SYMBOL_LOCK;
2613 temp &= ~FDI_RX_BIT_LOCK;
2614 I915_WRITE(reg, temp);
2615
2616 POSTING_READ(reg);
2617 udelay(150);
2618
2619
2620 reg = FDI_TX_CTL(pipe);
2621 temp = I915_READ(reg);
2622 temp &= ~(7 << 19);
2623 temp |= (intel_crtc->fdi_lanes - 1) << 19;
2624 temp &= ~(FDI_LINK_TRAIN_AUTO | FDI_LINK_TRAIN_NONE_IVB);
2625 temp |= FDI_LINK_TRAIN_PATTERN_1_IVB;
2626 temp &= ~FDI_LINK_TRAIN_VOL_EMP_MASK;
2627 temp |= FDI_LINK_TRAIN_400MV_0DB_SNB_B;
2628 temp |= FDI_COMPOSITE_SYNC;
2629 I915_WRITE(reg, temp | FDI_TX_ENABLE);
2630
2631 reg = FDI_RX_CTL(pipe);
2632 temp = I915_READ(reg);
2633 temp &= ~FDI_LINK_TRAIN_AUTO;
2634 temp &= ~FDI_LINK_TRAIN_PATTERN_MASK_CPT;
2635 temp |= FDI_LINK_TRAIN_PATTERN_1_CPT;
2636 temp |= FDI_COMPOSITE_SYNC;
2637 I915_WRITE(reg, temp | FDI_RX_ENABLE);
2638
2639 POSTING_READ(reg);
2640 udelay(150);
2641
2642 if (HAS_PCH_CPT(dev))
2643 cpt_phase_pointer_enable(dev, pipe);
2644
2645 for (i = 0; i < 4; i++) {
2646 reg = FDI_TX_CTL(pipe);
2647 temp = I915_READ(reg);
2648 temp &= ~FDI_LINK_TRAIN_VOL_EMP_MASK;
2649 temp |= snb_b_fdi_train_param[i];
2650 I915_WRITE(reg, temp);
2651
2652 POSTING_READ(reg);
2653 udelay(500);
2654
2655 reg = FDI_RX_IIR(pipe);
2656 temp = I915_READ(reg);
2657 DRM_DEBUG_KMS("FDI_RX_IIR 0x%x\n", temp);
2658
2659 if (temp & FDI_RX_BIT_LOCK ||
2660 (I915_READ(reg) & FDI_RX_BIT_LOCK)) {
2661 I915_WRITE(reg, temp | FDI_RX_BIT_LOCK);
2662 DRM_DEBUG_KMS("FDI train 1 done.\n");
2663 break;
2664 }
2665 }
2666 if (i == 4)
2667 DRM_ERROR("FDI train 1 fail!\n");
2668
2669
2670 reg = FDI_TX_CTL(pipe);
2671 temp = I915_READ(reg);
2672 temp &= ~FDI_LINK_TRAIN_NONE_IVB;
2673 temp |= FDI_LINK_TRAIN_PATTERN_2_IVB;
2674 temp &= ~FDI_LINK_TRAIN_VOL_EMP_MASK;
2675 temp |= FDI_LINK_TRAIN_400MV_0DB_SNB_B;
2676 I915_WRITE(reg, temp);
2677
2678 reg = FDI_RX_CTL(pipe);
2679 temp = I915_READ(reg);
2680 temp &= ~FDI_LINK_TRAIN_PATTERN_MASK_CPT;
2681 temp |= FDI_LINK_TRAIN_PATTERN_2_CPT;
2682 I915_WRITE(reg, temp);
2683
2684 POSTING_READ(reg);
2685 udelay(150);
2686
2687 for (i = 0; i < 4; i++) {
2688 reg = FDI_TX_CTL(pipe);
2689 temp = I915_READ(reg);
2690 temp &= ~FDI_LINK_TRAIN_VOL_EMP_MASK;
2691 temp |= snb_b_fdi_train_param[i];
2692 I915_WRITE(reg, temp);
2693
2694 POSTING_READ(reg);
2695 udelay(500);
2696
2697 reg = FDI_RX_IIR(pipe);
2698 temp = I915_READ(reg);
2699 DRM_DEBUG_KMS("FDI_RX_IIR 0x%x\n", temp);
2700
2701 if (temp & FDI_RX_SYMBOL_LOCK) {
2702 I915_WRITE(reg, temp | FDI_RX_SYMBOL_LOCK);
2703 DRM_DEBUG_KMS("FDI train 2 done.\n");
2704 break;
2705 }
2706 }
2707 if (i == 4)
2708 DRM_ERROR("FDI train 2 fail!\n");
2709
2710 DRM_DEBUG_KMS("FDI train done.\n");
2711}
2712
2713static void ironlake_fdi_pll_enable(struct drm_crtc *crtc)
2714{
2715 struct drm_device *dev = crtc->dev;
2716 struct drm_i915_private *dev_priv = dev->dev_private;
2717 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
2718 int pipe = intel_crtc->pipe;
2719 u32 reg, temp;
2720
2721
2722 I915_WRITE(FDI_RX_TUSIZE1(pipe),
2723 I915_READ(PIPE_DATA_M1(pipe)) & TU_SIZE_MASK);
2724
2725
2726 reg = FDI_RX_CTL(pipe);
2727 temp = I915_READ(reg);
2728 temp &= ~((0x7 << 19) | (0x7 << 16));
2729 temp |= (intel_crtc->fdi_lanes - 1) << 19;
2730 temp |= (I915_READ(PIPECONF(pipe)) & PIPE_BPC_MASK) << 11;
2731 I915_WRITE(reg, temp | FDI_RX_PLL_ENABLE);
2732
2733 POSTING_READ(reg);
2734 udelay(200);
2735
2736
2737 temp = I915_READ(reg);
2738 I915_WRITE(reg, temp | FDI_PCDCLK);
2739
2740 POSTING_READ(reg);
2741 udelay(200);
2742
2743
2744 reg = FDI_TX_CTL(pipe);
2745 temp = I915_READ(reg);
2746 if ((temp & FDI_TX_PLL_ENABLE) == 0) {
2747 I915_WRITE(reg, temp | FDI_TX_PLL_ENABLE);
2748
2749 POSTING_READ(reg);
2750 udelay(100);
2751 }
2752}
2753
2754static void cpt_phase_pointer_disable(struct drm_device *dev, int pipe)
2755{
2756 struct drm_i915_private *dev_priv = dev->dev_private;
2757 u32 flags = I915_READ(SOUTH_CHICKEN1);
2758
2759 flags &= ~(FDI_PHASE_SYNC_EN(pipe));
2760 I915_WRITE(SOUTH_CHICKEN1, flags);
2761 flags &= ~(FDI_PHASE_SYNC_OVR(pipe));
2762 I915_WRITE(SOUTH_CHICKEN1, flags);
2763 POSTING_READ(SOUTH_CHICKEN1);
2764}
2765static void ironlake_fdi_disable(struct drm_crtc *crtc)
2766{
2767 struct drm_device *dev = crtc->dev;
2768 struct drm_i915_private *dev_priv = dev->dev_private;
2769 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
2770 int pipe = intel_crtc->pipe;
2771 u32 reg, temp;
2772
2773
2774 reg = FDI_TX_CTL(pipe);
2775 temp = I915_READ(reg);
2776 I915_WRITE(reg, temp & ~FDI_TX_ENABLE);
2777 POSTING_READ(reg);
2778
2779 reg = FDI_RX_CTL(pipe);
2780 temp = I915_READ(reg);
2781 temp &= ~(0x7 << 16);
2782 temp |= (I915_READ(PIPECONF(pipe)) & PIPE_BPC_MASK) << 11;
2783 I915_WRITE(reg, temp & ~FDI_RX_ENABLE);
2784
2785 POSTING_READ(reg);
2786 udelay(100);
2787
2788
2789 if (HAS_PCH_IBX(dev)) {
2790 I915_WRITE(FDI_RX_CHICKEN(pipe), FDI_RX_PHASE_SYNC_POINTER_OVR);
2791 I915_WRITE(FDI_RX_CHICKEN(pipe),
2792 I915_READ(FDI_RX_CHICKEN(pipe) &
2793 ~FDI_RX_PHASE_SYNC_POINTER_EN));
2794 } else if (HAS_PCH_CPT(dev)) {
2795 cpt_phase_pointer_disable(dev, pipe);
2796 }
2797
2798
2799 reg = FDI_TX_CTL(pipe);
2800 temp = I915_READ(reg);
2801 temp &= ~FDI_LINK_TRAIN_NONE;
2802 temp |= FDI_LINK_TRAIN_PATTERN_1;
2803 I915_WRITE(reg, temp);
2804
2805 reg = FDI_RX_CTL(pipe);
2806 temp = I915_READ(reg);
2807 if (HAS_PCH_CPT(dev)) {
2808 temp &= ~FDI_LINK_TRAIN_PATTERN_MASK_CPT;
2809 temp |= FDI_LINK_TRAIN_PATTERN_1_CPT;
2810 } else {
2811 temp &= ~FDI_LINK_TRAIN_NONE;
2812 temp |= FDI_LINK_TRAIN_PATTERN_1;
2813 }
2814
2815 temp &= ~(0x07 << 16);
2816 temp |= (I915_READ(PIPECONF(pipe)) & PIPE_BPC_MASK) << 11;
2817 I915_WRITE(reg, temp);
2818
2819 POSTING_READ(reg);
2820 udelay(100);
2821}
2822
2823
2824
2825
2826
2827static void intel_clear_scanline_wait(struct drm_device *dev)
2828{
2829 struct drm_i915_private *dev_priv = dev->dev_private;
2830 struct intel_ring_buffer *ring;
2831 u32 tmp;
2832
2833 if (IS_GEN2(dev))
2834
2835 return;
2836
2837 ring = LP_RING(dev_priv);
2838 tmp = I915_READ_CTL(ring);
2839 if (tmp & RING_WAIT)
2840 I915_WRITE_CTL(ring, tmp);
2841}
2842
2843static void intel_crtc_wait_for_pending_flips(struct drm_crtc *crtc)
2844{
2845 struct drm_i915_gem_object *obj;
2846 struct drm_i915_private *dev_priv;
2847
2848 if (crtc->fb == NULL)
2849 return;
2850
2851 obj = to_intel_framebuffer(crtc->fb)->obj;
2852 dev_priv = crtc->dev->dev_private;
2853 wait_event(dev_priv->pending_flip_queue,
2854 atomic_read(&obj->pending_flip) == 0);
2855}
2856
2857static bool intel_crtc_driving_pch(struct drm_crtc *crtc)
2858{
2859 struct drm_device *dev = crtc->dev;
2860 struct drm_mode_config *mode_config = &dev->mode_config;
2861 struct intel_encoder *encoder;
2862
2863
2864
2865
2866
2867 list_for_each_entry(encoder, &mode_config->encoder_list, base.head) {
2868 if (encoder->base.crtc != crtc)
2869 continue;
2870
2871 switch (encoder->type) {
2872 case INTEL_OUTPUT_EDP:
2873 if (!intel_encoder_is_pch_edp(&encoder->base))
2874 return false;
2875 continue;
2876 }
2877 }
2878
2879 return true;
2880}
2881
2882
2883
2884
2885
2886
2887
2888
2889
2890static void ironlake_pch_enable(struct drm_crtc *crtc)
2891{
2892 struct drm_device *dev = crtc->dev;
2893 struct drm_i915_private *dev_priv = dev->dev_private;
2894 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
2895 int pipe = intel_crtc->pipe;
2896 u32 reg, temp, transc_sel;
2897
2898
2899 dev_priv->display.fdi_link_train(crtc);
2900
2901 intel_enable_pch_pll(dev_priv, pipe);
2902
2903 if (HAS_PCH_CPT(dev)) {
2904 transc_sel = intel_crtc->use_pll_a ? TRANSC_DPLLA_SEL :
2905 TRANSC_DPLLB_SEL;
2906
2907
2908 temp = I915_READ(PCH_DPLL_SEL);
2909 if (pipe == 0) {
2910 temp &= ~(TRANSA_DPLLB_SEL);
2911 temp |= (TRANSA_DPLL_ENABLE | TRANSA_DPLLA_SEL);
2912 } else if (pipe == 1) {
2913 temp &= ~(TRANSB_DPLLB_SEL);
2914 temp |= (TRANSB_DPLL_ENABLE | TRANSB_DPLLB_SEL);
2915 } else if (pipe == 2) {
2916 temp &= ~(TRANSC_DPLLB_SEL);
2917 temp |= (TRANSC_DPLL_ENABLE | transc_sel);
2918 }
2919 I915_WRITE(PCH_DPLL_SEL, temp);
2920 }
2921
2922
2923 assert_panel_unlocked(dev_priv, pipe);
2924 I915_WRITE(TRANS_HTOTAL(pipe), I915_READ(HTOTAL(pipe)));
2925 I915_WRITE(TRANS_HBLANK(pipe), I915_READ(HBLANK(pipe)));
2926 I915_WRITE(TRANS_HSYNC(pipe), I915_READ(HSYNC(pipe)));
2927
2928 I915_WRITE(TRANS_VTOTAL(pipe), I915_READ(VTOTAL(pipe)));
2929 I915_WRITE(TRANS_VBLANK(pipe), I915_READ(VBLANK(pipe)));
2930 I915_WRITE(TRANS_VSYNC(pipe), I915_READ(VSYNC(pipe)));
2931
2932 intel_fdi_normal_train(crtc);
2933
2934
2935 if (HAS_PCH_CPT(dev) &&
2936 (intel_pipe_has_type(crtc, INTEL_OUTPUT_DISPLAYPORT) ||
2937 intel_pipe_has_type(crtc, INTEL_OUTPUT_EDP))) {
2938 u32 bpc = (I915_READ(PIPECONF(pipe)) & PIPE_BPC_MASK) >> 5;
2939 reg = TRANS_DP_CTL(pipe);
2940 temp = I915_READ(reg);
2941 temp &= ~(TRANS_DP_PORT_SEL_MASK |
2942 TRANS_DP_SYNC_MASK |
2943 TRANS_DP_BPC_MASK);
2944 temp |= (TRANS_DP_OUTPUT_ENABLE |
2945 TRANS_DP_ENH_FRAMING);
2946 temp |= bpc << 9;
2947
2948 if (crtc->mode.flags & DRM_MODE_FLAG_PHSYNC)
2949 temp |= TRANS_DP_HSYNC_ACTIVE_HIGH;
2950 if (crtc->mode.flags & DRM_MODE_FLAG_PVSYNC)
2951 temp |= TRANS_DP_VSYNC_ACTIVE_HIGH;
2952
2953 switch (intel_trans_dp_port_sel(crtc)) {
2954 case PCH_DP_B:
2955 temp |= TRANS_DP_PORT_SEL_B;
2956 break;
2957 case PCH_DP_C:
2958 temp |= TRANS_DP_PORT_SEL_C;
2959 break;
2960 case PCH_DP_D:
2961 temp |= TRANS_DP_PORT_SEL_D;
2962 break;
2963 default:
2964 DRM_DEBUG_KMS("Wrong PCH DP port return. Guess port B\n");
2965 temp |= TRANS_DP_PORT_SEL_B;
2966 break;
2967 }
2968
2969 I915_WRITE(reg, temp);
2970 }
2971
2972 intel_enable_transcoder(dev_priv, pipe);
2973}
2974
2975void intel_cpt_verify_modeset(struct drm_device *dev, int pipe)
2976{
2977 struct drm_i915_private *dev_priv = dev->dev_private;
2978 int dslreg = PIPEDSL(pipe), tc2reg = TRANS_CHICKEN2(pipe);
2979 u32 temp;
2980
2981 temp = I915_READ(dslreg);
2982 udelay(500);
2983 if (wait_for(I915_READ(dslreg) != temp, 5)) {
2984
2985 I915_WRITE(tc2reg, TRANS_AUTOTRAIN_GEN_STALL_DIS);
2986 udelay(250);
2987 I915_WRITE(tc2reg, 0);
2988 if (wait_for(I915_READ(dslreg) != temp, 5))
2989 DRM_ERROR("mode set failed: pipe %d stuck\n", pipe);
2990 }
2991}
2992
2993static void ironlake_crtc_enable(struct drm_crtc *crtc)
2994{
2995 struct drm_device *dev = crtc->dev;
2996 struct drm_i915_private *dev_priv = dev->dev_private;
2997 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
2998 int pipe = intel_crtc->pipe;
2999 int plane = intel_crtc->plane;
3000 u32 temp;
3001 bool is_pch_port;
3002
3003 if (intel_crtc->active)
3004 return;
3005
3006 intel_crtc->active = true;
3007 intel_update_watermarks(dev);
3008
3009 if (intel_pipe_has_type(crtc, INTEL_OUTPUT_LVDS)) {
3010 temp = I915_READ(PCH_LVDS);
3011 if ((temp & LVDS_PORT_EN) == 0)
3012 I915_WRITE(PCH_LVDS, temp | LVDS_PORT_EN);
3013 }
3014
3015 is_pch_port = intel_crtc_driving_pch(crtc);
3016
3017 if (is_pch_port)
3018 ironlake_fdi_pll_enable(crtc);
3019 else
3020 ironlake_fdi_disable(crtc);
3021
3022
3023 if (dev_priv->pch_pf_size &&
3024 (intel_pipe_has_type(crtc, INTEL_OUTPUT_LVDS) || HAS_eDP)) {
3025
3026
3027
3028
3029 I915_WRITE(PF_CTL(pipe), PF_ENABLE | PF_FILTER_MED_3x3);
3030 I915_WRITE(PF_WIN_POS(pipe), dev_priv->pch_pf_pos);
3031 I915_WRITE(PF_WIN_SZ(pipe), dev_priv->pch_pf_size);
3032 }
3033
3034
3035
3036
3037
3038 intel_crtc_load_lut(crtc);
3039
3040 intel_enable_pipe(dev_priv, pipe, is_pch_port);
3041 intel_enable_plane(dev_priv, plane, pipe);
3042
3043 if (is_pch_port)
3044 ironlake_pch_enable(crtc);
3045
3046 mutex_lock(&dev->struct_mutex);
3047 intel_update_fbc(dev);
3048 mutex_unlock(&dev->struct_mutex);
3049
3050 intel_crtc_update_cursor(crtc, true);
3051}
3052
3053static void ironlake_crtc_disable(struct drm_crtc *crtc)
3054{
3055 struct drm_device *dev = crtc->dev;
3056 struct drm_i915_private *dev_priv = dev->dev_private;
3057 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
3058 int pipe = intel_crtc->pipe;
3059 int plane = intel_crtc->plane;
3060 u32 reg, temp;
3061
3062 if (!intel_crtc->active)
3063 return;
3064
3065 intel_crtc_wait_for_pending_flips(crtc);
3066 drm_vblank_off(dev, pipe);
3067 intel_crtc_update_cursor(crtc, false);
3068
3069 intel_disable_plane(dev_priv, plane, pipe);
3070
3071 if (dev_priv->cfb_plane == plane)
3072 intel_disable_fbc(dev);
3073
3074 intel_disable_pipe(dev_priv, pipe);
3075
3076
3077 I915_WRITE(PF_CTL(pipe), 0);
3078 I915_WRITE(PF_WIN_SZ(pipe), 0);
3079
3080 ironlake_fdi_disable(crtc);
3081
3082
3083
3084
3085
3086
3087 intel_disable_pch_ports(dev_priv, pipe);
3088
3089 intel_disable_transcoder(dev_priv, pipe);
3090
3091 if (HAS_PCH_CPT(dev)) {
3092
3093 reg = TRANS_DP_CTL(pipe);
3094 temp = I915_READ(reg);
3095 temp &= ~(TRANS_DP_OUTPUT_ENABLE | TRANS_DP_PORT_SEL_MASK);
3096 temp |= TRANS_DP_PORT_SEL_NONE;
3097 I915_WRITE(reg, temp);
3098
3099
3100 temp = I915_READ(PCH_DPLL_SEL);
3101 switch (pipe) {
3102 case 0:
3103 temp &= ~(TRANSA_DPLL_ENABLE | TRANSA_DPLLB_SEL);
3104 break;
3105 case 1:
3106 temp &= ~(TRANSB_DPLL_ENABLE | TRANSB_DPLLB_SEL);
3107 break;
3108 case 2:
3109
3110 temp &= ~(TRANSC_DPLL_ENABLE | TRANSC_DPLLB_SEL);
3111 break;
3112 default:
3113 BUG();
3114 }
3115 I915_WRITE(PCH_DPLL_SEL, temp);
3116 }
3117
3118
3119 if (!intel_crtc->no_pll)
3120 intel_disable_pch_pll(dev_priv, pipe);
3121
3122
3123 reg = FDI_RX_CTL(pipe);
3124 temp = I915_READ(reg);
3125 I915_WRITE(reg, temp & ~FDI_PCDCLK);
3126
3127
3128 reg = FDI_TX_CTL(pipe);
3129 temp = I915_READ(reg);
3130 I915_WRITE(reg, temp & ~FDI_TX_PLL_ENABLE);
3131
3132 POSTING_READ(reg);
3133 udelay(100);
3134
3135 reg = FDI_RX_CTL(pipe);
3136 temp = I915_READ(reg);
3137 I915_WRITE(reg, temp & ~FDI_RX_PLL_ENABLE);
3138
3139
3140 POSTING_READ(reg);
3141 udelay(100);
3142
3143 intel_crtc->active = false;
3144 intel_update_watermarks(dev);
3145
3146 mutex_lock(&dev->struct_mutex);
3147 intel_update_fbc(dev);
3148 intel_clear_scanline_wait(dev);
3149 mutex_unlock(&dev->struct_mutex);
3150}
3151
3152static void ironlake_crtc_dpms(struct drm_crtc *crtc, int mode)
3153{
3154 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
3155 int pipe = intel_crtc->pipe;
3156 int plane = intel_crtc->plane;
3157
3158
3159
3160
3161 switch (mode) {
3162 case DRM_MODE_DPMS_ON:
3163 case DRM_MODE_DPMS_STANDBY:
3164 case DRM_MODE_DPMS_SUSPEND:
3165 DRM_DEBUG_KMS("crtc %d/%d dpms on\n", pipe, plane);
3166 ironlake_crtc_enable(crtc);
3167 break;
3168
3169 case DRM_MODE_DPMS_OFF:
3170 DRM_DEBUG_KMS("crtc %d/%d dpms off\n", pipe, plane);
3171 ironlake_crtc_disable(crtc);
3172 break;
3173 }
3174}
3175
3176static void intel_crtc_dpms_overlay(struct intel_crtc *intel_crtc, bool enable)
3177{
3178 if (!enable && intel_crtc->overlay) {
3179 struct drm_device *dev = intel_crtc->base.dev;
3180 struct drm_i915_private *dev_priv = dev->dev_private;
3181
3182 mutex_lock(&dev->struct_mutex);
3183 dev_priv->mm.interruptible = false;
3184 (void) intel_overlay_switch_off(intel_crtc->overlay);
3185 dev_priv->mm.interruptible = true;
3186 mutex_unlock(&dev->struct_mutex);
3187 }
3188
3189
3190
3191
3192}
3193
3194static void i9xx_crtc_enable(struct drm_crtc *crtc)
3195{
3196 struct drm_device *dev = crtc->dev;
3197 struct drm_i915_private *dev_priv = dev->dev_private;
3198 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
3199 int pipe = intel_crtc->pipe;
3200 int plane = intel_crtc->plane;
3201
3202 if (intel_crtc->active)
3203 return;
3204
3205 intel_crtc->active = true;
3206 intel_update_watermarks(dev);
3207
3208 intel_enable_pll(dev_priv, pipe);
3209 intel_enable_pipe(dev_priv, pipe, false);
3210 intel_enable_plane(dev_priv, plane, pipe);
3211
3212 intel_crtc_load_lut(crtc);
3213 intel_update_fbc(dev);
3214
3215
3216 intel_crtc_dpms_overlay(intel_crtc, true);
3217 intel_crtc_update_cursor(crtc, true);
3218}
3219
3220static void i9xx_crtc_disable(struct drm_crtc *crtc)
3221{
3222 struct drm_device *dev = crtc->dev;
3223 struct drm_i915_private *dev_priv = dev->dev_private;
3224 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
3225 int pipe = intel_crtc->pipe;
3226 int plane = intel_crtc->plane;
3227
3228 if (!intel_crtc->active)
3229 return;
3230
3231
3232 intel_crtc_wait_for_pending_flips(crtc);
3233 drm_vblank_off(dev, pipe);
3234 intel_crtc_dpms_overlay(intel_crtc, false);
3235 intel_crtc_update_cursor(crtc, false);
3236
3237 if (dev_priv->cfb_plane == plane)
3238 intel_disable_fbc(dev);
3239
3240 intel_disable_plane(dev_priv, plane, pipe);
3241 intel_disable_pipe(dev_priv, pipe);
3242 intel_disable_pll(dev_priv, pipe);
3243
3244 intel_crtc->active = false;
3245 intel_update_fbc(dev);
3246 intel_update_watermarks(dev);
3247 intel_clear_scanline_wait(dev);
3248}
3249
3250static void i9xx_crtc_dpms(struct drm_crtc *crtc, int mode)
3251{
3252
3253
3254
3255 switch (mode) {
3256 case DRM_MODE_DPMS_ON:
3257 case DRM_MODE_DPMS_STANDBY:
3258 case DRM_MODE_DPMS_SUSPEND:
3259 i9xx_crtc_enable(crtc);
3260 break;
3261 case DRM_MODE_DPMS_OFF:
3262 i9xx_crtc_disable(crtc);
3263 break;
3264 }
3265}
3266
3267
3268
3269
3270static void intel_crtc_dpms(struct drm_crtc *crtc, int mode)
3271{
3272 struct drm_device *dev = crtc->dev;
3273 struct drm_i915_private *dev_priv = dev->dev_private;
3274 struct drm_i915_master_private *master_priv;
3275 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
3276 int pipe = intel_crtc->pipe;
3277 bool enabled;
3278
3279 if (intel_crtc->dpms_mode == mode)
3280 return;
3281
3282 intel_crtc->dpms_mode = mode;
3283
3284 dev_priv->display.dpms(crtc, mode);
3285
3286 if (!dev->primary->master)
3287 return;
3288
3289 master_priv = dev->primary->master->driver_priv;
3290 if (!master_priv->sarea_priv)
3291 return;
3292
3293 enabled = crtc->enabled && mode != DRM_MODE_DPMS_OFF;
3294
3295 switch (pipe) {
3296 case 0:
3297 master_priv->sarea_priv->pipeA_w = enabled ? crtc->mode.hdisplay : 0;
3298 master_priv->sarea_priv->pipeA_h = enabled ? crtc->mode.vdisplay : 0;
3299 break;
3300 case 1:
3301 master_priv->sarea_priv->pipeB_w = enabled ? crtc->mode.hdisplay : 0;
3302 master_priv->sarea_priv->pipeB_h = enabled ? crtc->mode.vdisplay : 0;
3303 break;
3304 default:
3305 DRM_ERROR("Can't update pipe %c in SAREA\n", pipe_name(pipe));
3306 break;
3307 }
3308}
3309
3310static void intel_crtc_disable(struct drm_crtc *crtc)
3311{
3312 struct drm_crtc_helper_funcs *crtc_funcs = crtc->helper_private;
3313 struct drm_device *dev = crtc->dev;
3314
3315 crtc_funcs->dpms(crtc, DRM_MODE_DPMS_OFF);
3316
3317 if (crtc->fb) {
3318 mutex_lock(&dev->struct_mutex);
3319 i915_gem_object_unpin(to_intel_framebuffer(crtc->fb)->obj);
3320 mutex_unlock(&dev->struct_mutex);
3321 }
3322}
3323
3324
3325
3326
3327
3328
3329
3330
3331
3332static void i9xx_crtc_prepare(struct drm_crtc *crtc)
3333{
3334 i9xx_crtc_disable(crtc);
3335}
3336
3337static void i9xx_crtc_commit(struct drm_crtc *crtc)
3338{
3339 i9xx_crtc_enable(crtc);
3340}
3341
3342static void ironlake_crtc_prepare(struct drm_crtc *crtc)
3343{
3344 ironlake_crtc_disable(crtc);
3345}
3346
3347static void ironlake_crtc_commit(struct drm_crtc *crtc)
3348{
3349 ironlake_crtc_enable(crtc);
3350}
3351
3352void intel_encoder_prepare(struct drm_encoder *encoder)
3353{
3354 struct drm_encoder_helper_funcs *encoder_funcs = encoder->helper_private;
3355
3356 encoder_funcs->dpms(encoder, DRM_MODE_DPMS_OFF);
3357}
3358
3359void intel_encoder_commit(struct drm_encoder *encoder)
3360{
3361 struct drm_encoder_helper_funcs *encoder_funcs = encoder->helper_private;
3362 struct drm_device *dev = encoder->dev;
3363 struct intel_encoder *intel_encoder = to_intel_encoder(encoder);
3364 struct intel_crtc *intel_crtc = to_intel_crtc(intel_encoder->base.crtc);
3365
3366
3367 encoder_funcs->dpms(encoder, DRM_MODE_DPMS_ON);
3368
3369 if (HAS_PCH_CPT(dev))
3370 intel_cpt_verify_modeset(dev, intel_crtc->pipe);
3371}
3372
3373void intel_encoder_destroy(struct drm_encoder *encoder)
3374{
3375 struct intel_encoder *intel_encoder = to_intel_encoder(encoder);
3376
3377 drm_encoder_cleanup(encoder);
3378 kfree(intel_encoder);
3379}
3380
3381static bool intel_crtc_mode_fixup(struct drm_crtc *crtc,
3382 struct drm_display_mode *mode,
3383 struct drm_display_mode *adjusted_mode)
3384{
3385 struct drm_device *dev = crtc->dev;
3386
3387 if (HAS_PCH_SPLIT(dev)) {
3388
3389 if (mode->clock * 3 > IRONLAKE_FDI_FREQ * 4)
3390 return false;
3391 }
3392
3393
3394
3395
3396 if (adjusted_mode->crtc_htotal == 0)
3397 drm_mode_set_crtcinfo(adjusted_mode, 0);
3398
3399 return true;
3400}
3401
3402static int i945_get_display_clock_speed(struct drm_device *dev)
3403{
3404 return 400000;
3405}
3406
3407static int i915_get_display_clock_speed(struct drm_device *dev)
3408{
3409 return 333000;
3410}
3411
3412static int i9xx_misc_get_display_clock_speed(struct drm_device *dev)
3413{
3414 return 200000;
3415}
3416
3417static int i915gm_get_display_clock_speed(struct drm_device *dev)
3418{
3419 u16 gcfgc = 0;
3420
3421 pci_read_config_word(dev->pdev, GCFGC, &gcfgc);
3422
3423 if (gcfgc & GC_LOW_FREQUENCY_ENABLE)
3424 return 133000;
3425 else {
3426 switch (gcfgc & GC_DISPLAY_CLOCK_MASK) {
3427 case GC_DISPLAY_CLOCK_333_MHZ:
3428 return 333000;
3429 default:
3430 case GC_DISPLAY_CLOCK_190_200_MHZ:
3431 return 190000;
3432 }
3433 }
3434}
3435
3436static int i865_get_display_clock_speed(struct drm_device *dev)
3437{
3438 return 266000;
3439}
3440
3441static int i855_get_display_clock_speed(struct drm_device *dev)
3442{
3443 u16 hpllcc = 0;
3444
3445
3446
3447 switch (hpllcc & GC_CLOCK_CONTROL_MASK) {
3448 case GC_CLOCK_133_200:
3449 case GC_CLOCK_100_200:
3450 return 200000;
3451 case GC_CLOCK_166_250:
3452 return 250000;
3453 case GC_CLOCK_100_133:
3454 return 133000;
3455 }
3456
3457
3458 return 0;
3459}
3460
3461static int i830_get_display_clock_speed(struct drm_device *dev)
3462{
3463 return 133000;
3464}
3465
3466struct fdi_m_n {
3467 u32 tu;
3468 u32 gmch_m;
3469 u32 gmch_n;
3470 u32 link_m;
3471 u32 link_n;
3472};
3473
3474static void
3475fdi_reduce_ratio(u32 *num, u32 *den)
3476{
3477 while (*num > 0xffffff || *den > 0xffffff) {
3478 *num >>= 1;
3479 *den >>= 1;
3480 }
3481}
3482
3483static void
3484ironlake_compute_m_n(int bits_per_pixel, int nlanes, int pixel_clock,
3485 int link_clock, struct fdi_m_n *m_n)
3486{
3487 m_n->tu = 64;
3488
3489
3490 m_n->gmch_m = bits_per_pixel * pixel_clock;
3491 m_n->gmch_n = link_clock * nlanes * 8;
3492 fdi_reduce_ratio(&m_n->gmch_m, &m_n->gmch_n);
3493
3494 m_n->link_m = pixel_clock;
3495 m_n->link_n = link_clock;
3496 fdi_reduce_ratio(&m_n->link_m, &m_n->link_n);
3497}
3498
3499
3500struct intel_watermark_params {
3501 unsigned long fifo_size;
3502 unsigned long max_wm;
3503 unsigned long default_wm;
3504 unsigned long guard_size;
3505 unsigned long cacheline_size;
3506};
3507
3508
3509static const struct intel_watermark_params pineview_display_wm = {
3510 PINEVIEW_DISPLAY_FIFO,
3511 PINEVIEW_MAX_WM,
3512 PINEVIEW_DFT_WM,
3513 PINEVIEW_GUARD_WM,
3514 PINEVIEW_FIFO_LINE_SIZE
3515};
3516static const struct intel_watermark_params pineview_display_hplloff_wm = {
3517 PINEVIEW_DISPLAY_FIFO,
3518 PINEVIEW_MAX_WM,
3519 PINEVIEW_DFT_HPLLOFF_WM,
3520 PINEVIEW_GUARD_WM,
3521 PINEVIEW_FIFO_LINE_SIZE
3522};
3523static const struct intel_watermark_params pineview_cursor_wm = {
3524 PINEVIEW_CURSOR_FIFO,
3525 PINEVIEW_CURSOR_MAX_WM,
3526 PINEVIEW_CURSOR_DFT_WM,
3527 PINEVIEW_CURSOR_GUARD_WM,
3528 PINEVIEW_FIFO_LINE_SIZE,
3529};
3530static const struct intel_watermark_params pineview_cursor_hplloff_wm = {
3531 PINEVIEW_CURSOR_FIFO,
3532 PINEVIEW_CURSOR_MAX_WM,
3533 PINEVIEW_CURSOR_DFT_WM,
3534 PINEVIEW_CURSOR_GUARD_WM,
3535 PINEVIEW_FIFO_LINE_SIZE
3536};
3537static const struct intel_watermark_params g4x_wm_info = {
3538 G4X_FIFO_SIZE,
3539 G4X_MAX_WM,
3540 G4X_MAX_WM,
3541 2,
3542 G4X_FIFO_LINE_SIZE,
3543};
3544static const struct intel_watermark_params g4x_cursor_wm_info = {
3545 I965_CURSOR_FIFO,
3546 I965_CURSOR_MAX_WM,
3547 I965_CURSOR_DFT_WM,
3548 2,
3549 G4X_FIFO_LINE_SIZE,
3550};
3551static const struct intel_watermark_params i965_cursor_wm_info = {
3552 I965_CURSOR_FIFO,
3553 I965_CURSOR_MAX_WM,
3554 I965_CURSOR_DFT_WM,
3555 2,
3556 I915_FIFO_LINE_SIZE,
3557};
3558static const struct intel_watermark_params i945_wm_info = {
3559 I945_FIFO_SIZE,
3560 I915_MAX_WM,
3561 1,
3562 2,
3563 I915_FIFO_LINE_SIZE
3564};
3565static const struct intel_watermark_params i915_wm_info = {
3566 I915_FIFO_SIZE,
3567 I915_MAX_WM,
3568 1,
3569 2,
3570 I915_FIFO_LINE_SIZE
3571};
3572static const struct intel_watermark_params i855_wm_info = {
3573 I855GM_FIFO_SIZE,
3574 I915_MAX_WM,
3575 1,
3576 2,
3577 I830_FIFO_LINE_SIZE
3578};
3579static const struct intel_watermark_params i830_wm_info = {
3580 I830_FIFO_SIZE,
3581 I915_MAX_WM,
3582 1,
3583 2,
3584 I830_FIFO_LINE_SIZE
3585};
3586
3587static const struct intel_watermark_params ironlake_display_wm_info = {
3588 ILK_DISPLAY_FIFO,
3589 ILK_DISPLAY_MAXWM,
3590 ILK_DISPLAY_DFTWM,
3591 2,
3592 ILK_FIFO_LINE_SIZE
3593};
3594static const struct intel_watermark_params ironlake_cursor_wm_info = {
3595 ILK_CURSOR_FIFO,
3596 ILK_CURSOR_MAXWM,
3597 ILK_CURSOR_DFTWM,
3598 2,
3599 ILK_FIFO_LINE_SIZE
3600};
3601static const struct intel_watermark_params ironlake_display_srwm_info = {
3602 ILK_DISPLAY_SR_FIFO,
3603 ILK_DISPLAY_MAX_SRWM,
3604 ILK_DISPLAY_DFT_SRWM,
3605 2,
3606 ILK_FIFO_LINE_SIZE
3607};
3608static const struct intel_watermark_params ironlake_cursor_srwm_info = {
3609 ILK_CURSOR_SR_FIFO,
3610 ILK_CURSOR_MAX_SRWM,
3611 ILK_CURSOR_DFT_SRWM,
3612 2,
3613 ILK_FIFO_LINE_SIZE
3614};
3615
3616static const struct intel_watermark_params sandybridge_display_wm_info = {
3617 SNB_DISPLAY_FIFO,
3618 SNB_DISPLAY_MAXWM,
3619 SNB_DISPLAY_DFTWM,
3620 2,
3621 SNB_FIFO_LINE_SIZE
3622};
3623static const struct intel_watermark_params sandybridge_cursor_wm_info = {
3624 SNB_CURSOR_FIFO,
3625 SNB_CURSOR_MAXWM,
3626 SNB_CURSOR_DFTWM,
3627 2,
3628 SNB_FIFO_LINE_SIZE
3629};
3630static const struct intel_watermark_params sandybridge_display_srwm_info = {
3631 SNB_DISPLAY_SR_FIFO,
3632 SNB_DISPLAY_MAX_SRWM,
3633 SNB_DISPLAY_DFT_SRWM,
3634 2,
3635 SNB_FIFO_LINE_SIZE
3636};
3637static const struct intel_watermark_params sandybridge_cursor_srwm_info = {
3638 SNB_CURSOR_SR_FIFO,
3639 SNB_CURSOR_MAX_SRWM,
3640 SNB_CURSOR_DFT_SRWM,
3641 2,
3642 SNB_FIFO_LINE_SIZE
3643};
3644
3645
3646
3647
3648
3649
3650
3651
3652
3653
3654
3655
3656
3657
3658
3659
3660
3661
3662
3663
3664static unsigned long intel_calculate_wm(unsigned long clock_in_khz,
3665 const struct intel_watermark_params *wm,
3666 int fifo_size,
3667 int pixel_size,
3668 unsigned long latency_ns)
3669{
3670 long entries_required, wm_size;
3671
3672
3673
3674
3675
3676
3677
3678 entries_required = ((clock_in_khz / 1000) * pixel_size * latency_ns) /
3679 1000;
3680 entries_required = DIV_ROUND_UP(entries_required, wm->cacheline_size);
3681
3682 DRM_DEBUG_KMS("FIFO entries required for mode: %ld\n", entries_required);
3683
3684 wm_size = fifo_size - (entries_required + wm->guard_size);
3685
3686 DRM_DEBUG_KMS("FIFO watermark level: %ld\n", wm_size);
3687
3688
3689 if (wm_size > (long)wm->max_wm)
3690 wm_size = wm->max_wm;
3691 if (wm_size <= 0)
3692 wm_size = wm->default_wm;
3693 return wm_size;
3694}
3695
3696struct cxsr_latency {
3697 int is_desktop;
3698 int is_ddr3;
3699 unsigned long fsb_freq;
3700 unsigned long mem_freq;
3701 unsigned long display_sr;
3702 unsigned long display_hpll_disable;
3703 unsigned long cursor_sr;
3704 unsigned long cursor_hpll_disable;
3705};
3706
3707static const struct cxsr_latency cxsr_latency_table[] = {
3708 {1, 0, 800, 400, 3382, 33382, 3983, 33983},
3709 {1, 0, 800, 667, 3354, 33354, 3807, 33807},
3710 {1, 0, 800, 800, 3347, 33347, 3763, 33763},
3711 {1, 1, 800, 667, 6420, 36420, 6873, 36873},
3712 {1, 1, 800, 800, 5902, 35902, 6318, 36318},
3713
3714 {1, 0, 667, 400, 3400, 33400, 4021, 34021},
3715 {1, 0, 667, 667, 3372, 33372, 3845, 33845},
3716 {1, 0, 667, 800, 3386, 33386, 3822, 33822},
3717 {1, 1, 667, 667, 6438, 36438, 6911, 36911},
3718 {1, 1, 667, 800, 5941, 35941, 6377, 36377},
3719
3720 {1, 0, 400, 400, 3472, 33472, 4173, 34173},
3721 {1, 0, 400, 667, 3443, 33443, 3996, 33996},
3722 {1, 0, 400, 800, 3430, 33430, 3946, 33946},
3723 {1, 1, 400, 667, 6509, 36509, 7062, 37062},
3724 {1, 1, 400, 800, 5985, 35985, 6501, 36501},
3725
3726 {0, 0, 800, 400, 3438, 33438, 4065, 34065},
3727 {0, 0, 800, 667, 3410, 33410, 3889, 33889},
3728 {0, 0, 800, 800, 3403, 33403, 3845, 33845},
3729 {0, 1, 800, 667, 6476, 36476, 6955, 36955},
3730 {0, 1, 800, 800, 5958, 35958, 6400, 36400},
3731
3732 {0, 0, 667, 400, 3456, 33456, 4103, 34106},
3733 {0, 0, 667, 667, 3428, 33428, 3927, 33927},
3734 {0, 0, 667, 800, 3443, 33443, 3905, 33905},
3735 {0, 1, 667, 667, 6494, 36494, 6993, 36993},
3736 {0, 1, 667, 800, 5998, 35998, 6460, 36460},
3737
3738 {0, 0, 400, 400, 3528, 33528, 4255, 34255},
3739 {0, 0, 400, 667, 3500, 33500, 4079, 34079},
3740 {0, 0, 400, 800, 3487, 33487, 4029, 34029},
3741 {0, 1, 400, 667, 6566, 36566, 7145, 37145},
3742 {0, 1, 400, 800, 6042, 36042, 6584, 36584},
3743};
3744
3745static const struct cxsr_latency *intel_get_cxsr_latency(int is_desktop,
3746 int is_ddr3,
3747 int fsb,
3748 int mem)
3749{
3750 const struct cxsr_latency *latency;
3751 int i;
3752
3753 if (fsb == 0 || mem == 0)
3754 return NULL;
3755
3756 for (i = 0; i < ARRAY_SIZE(cxsr_latency_table); i++) {
3757 latency = &cxsr_latency_table[i];
3758 if (is_desktop == latency->is_desktop &&
3759 is_ddr3 == latency->is_ddr3 &&
3760 fsb == latency->fsb_freq && mem == latency->mem_freq)
3761 return latency;
3762 }
3763
3764 DRM_DEBUG_KMS("Unknown FSB/MEM found, disable CxSR\n");
3765
3766 return NULL;
3767}
3768
3769static void pineview_disable_cxsr(struct drm_device *dev)
3770{
3771 struct drm_i915_private *dev_priv = dev->dev_private;
3772
3773
3774 I915_WRITE(DSPFW3, I915_READ(DSPFW3) & ~PINEVIEW_SELF_REFRESH_EN);
3775}
3776
3777
3778
3779
3780
3781
3782
3783
3784
3785
3786
3787
3788
3789
3790
3791static const int latency_ns = 5000;
3792
3793static int i9xx_get_fifo_size(struct drm_device *dev, int plane)
3794{
3795 struct drm_i915_private *dev_priv = dev->dev_private;
3796 uint32_t dsparb = I915_READ(DSPARB);
3797 int size;
3798
3799 size = dsparb & 0x7f;
3800 if (plane)
3801 size = ((dsparb >> DSPARB_CSTART_SHIFT) & 0x7f) - size;
3802
3803 DRM_DEBUG_KMS("FIFO size - (0x%08x) %s: %d\n", dsparb,
3804 plane ? "B" : "A", size);
3805
3806 return size;
3807}
3808
3809static int i85x_get_fifo_size(struct drm_device *dev, int plane)
3810{
3811 struct drm_i915_private *dev_priv = dev->dev_private;
3812 uint32_t dsparb = I915_READ(DSPARB);
3813 int size;
3814
3815 size = dsparb & 0x1ff;
3816 if (plane)
3817 size = ((dsparb >> DSPARB_BEND_SHIFT) & 0x1ff) - size;
3818 size >>= 1;
3819
3820 DRM_DEBUG_KMS("FIFO size - (0x%08x) %s: %d\n", dsparb,
3821 plane ? "B" : "A", size);
3822
3823 return size;
3824}
3825
3826static int i845_get_fifo_size(struct drm_device *dev, int plane)
3827{
3828 struct drm_i915_private *dev_priv = dev->dev_private;
3829 uint32_t dsparb = I915_READ(DSPARB);
3830 int size;
3831
3832 size = dsparb & 0x7f;
3833 size >>= 2;
3834
3835 DRM_DEBUG_KMS("FIFO size - (0x%08x) %s: %d\n", dsparb,
3836 plane ? "B" : "A",
3837 size);
3838
3839 return size;
3840}
3841
3842static int i830_get_fifo_size(struct drm_device *dev, int plane)
3843{
3844 struct drm_i915_private *dev_priv = dev->dev_private;
3845 uint32_t dsparb = I915_READ(DSPARB);
3846 int size;
3847
3848 size = dsparb & 0x7f;
3849 size >>= 1;
3850
3851 DRM_DEBUG_KMS("FIFO size - (0x%08x) %s: %d\n", dsparb,
3852 plane ? "B" : "A", size);
3853
3854 return size;
3855}
3856
3857static struct drm_crtc *single_enabled_crtc(struct drm_device *dev)
3858{
3859 struct drm_crtc *crtc, *enabled = NULL;
3860
3861 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
3862 if (crtc->enabled && crtc->fb) {
3863 if (enabled)
3864 return NULL;
3865 enabled = crtc;
3866 }
3867 }
3868
3869 return enabled;
3870}
3871
3872static void pineview_update_wm(struct drm_device *dev)
3873{
3874 struct drm_i915_private *dev_priv = dev->dev_private;
3875 struct drm_crtc *crtc;
3876 const struct cxsr_latency *latency;
3877 u32 reg;
3878 unsigned long wm;
3879
3880 latency = intel_get_cxsr_latency(IS_PINEVIEW_G(dev), dev_priv->is_ddr3,
3881 dev_priv->fsb_freq, dev_priv->mem_freq);
3882 if (!latency) {
3883 DRM_DEBUG_KMS("Unknown FSB/MEM found, disable CxSR\n");
3884 pineview_disable_cxsr(dev);
3885 return;
3886 }
3887
3888 crtc = single_enabled_crtc(dev);
3889 if (crtc) {
3890 int clock = crtc->mode.clock;
3891 int pixel_size = crtc->fb->bits_per_pixel / 8;
3892
3893
3894 wm = intel_calculate_wm(clock, &pineview_display_wm,
3895 pineview_display_wm.fifo_size,
3896 pixel_size, latency->display_sr);
3897 reg = I915_READ(DSPFW1);
3898 reg &= ~DSPFW_SR_MASK;
3899 reg |= wm << DSPFW_SR_SHIFT;
3900 I915_WRITE(DSPFW1, reg);
3901 DRM_DEBUG_KMS("DSPFW1 register is %x\n", reg);
3902
3903
3904 wm = intel_calculate_wm(clock, &pineview_cursor_wm,
3905 pineview_display_wm.fifo_size,
3906 pixel_size, latency->cursor_sr);
3907 reg = I915_READ(DSPFW3);
3908 reg &= ~DSPFW_CURSOR_SR_MASK;
3909 reg |= (wm & 0x3f) << DSPFW_CURSOR_SR_SHIFT;
3910 I915_WRITE(DSPFW3, reg);
3911
3912
3913 wm = intel_calculate_wm(clock, &pineview_display_hplloff_wm,
3914 pineview_display_hplloff_wm.fifo_size,
3915 pixel_size, latency->display_hpll_disable);
3916 reg = I915_READ(DSPFW3);
3917 reg &= ~DSPFW_HPLL_SR_MASK;
3918 reg |= wm & DSPFW_HPLL_SR_MASK;
3919 I915_WRITE(DSPFW3, reg);
3920
3921
3922 wm = intel_calculate_wm(clock, &pineview_cursor_hplloff_wm,
3923 pineview_display_hplloff_wm.fifo_size,
3924 pixel_size, latency->cursor_hpll_disable);
3925 reg = I915_READ(DSPFW3);
3926 reg &= ~DSPFW_HPLL_CURSOR_MASK;
3927 reg |= (wm & 0x3f) << DSPFW_HPLL_CURSOR_SHIFT;
3928 I915_WRITE(DSPFW3, reg);
3929 DRM_DEBUG_KMS("DSPFW3 register is %x\n", reg);
3930
3931
3932 I915_WRITE(DSPFW3,
3933 I915_READ(DSPFW3) | PINEVIEW_SELF_REFRESH_EN);
3934 DRM_DEBUG_KMS("Self-refresh is enabled\n");
3935 } else {
3936 pineview_disable_cxsr(dev);
3937 DRM_DEBUG_KMS("Self-refresh is disabled\n");
3938 }
3939}
3940
3941static bool g4x_compute_wm0(struct drm_device *dev,
3942 int plane,
3943 const struct intel_watermark_params *display,
3944 int display_latency_ns,
3945 const struct intel_watermark_params *cursor,
3946 int cursor_latency_ns,
3947 int *plane_wm,
3948 int *cursor_wm)
3949{
3950 struct drm_crtc *crtc;
3951 int htotal, hdisplay, clock, pixel_size;
3952 int line_time_us, line_count;
3953 int entries, tlb_miss;
3954
3955 crtc = intel_get_crtc_for_plane(dev, plane);
3956 if (crtc->fb == NULL || !crtc->enabled) {
3957 *cursor_wm = cursor->guard_size;
3958 *plane_wm = display->guard_size;
3959 return false;
3960 }
3961
3962 htotal = crtc->mode.htotal;
3963 hdisplay = crtc->mode.hdisplay;
3964 clock = crtc->mode.clock;
3965 pixel_size = crtc->fb->bits_per_pixel / 8;
3966
3967
3968 entries = ((clock * pixel_size / 1000) * display_latency_ns) / 1000;
3969 tlb_miss = display->fifo_size*display->cacheline_size - hdisplay * 8;
3970 if (tlb_miss > 0)
3971 entries += tlb_miss;
3972 entries = DIV_ROUND_UP(entries, display->cacheline_size);
3973 *plane_wm = entries + display->guard_size;
3974 if (*plane_wm > (int)display->max_wm)
3975 *plane_wm = display->max_wm;
3976
3977
3978 line_time_us = ((htotal * 1000) / clock);
3979 line_count = (cursor_latency_ns / line_time_us + 1000) / 1000;
3980 entries = line_count * 64 * pixel_size;
3981 tlb_miss = cursor->fifo_size*cursor->cacheline_size - hdisplay * 8;
3982 if (tlb_miss > 0)
3983 entries += tlb_miss;
3984 entries = DIV_ROUND_UP(entries, cursor->cacheline_size);
3985 *cursor_wm = entries + cursor->guard_size;
3986 if (*cursor_wm > (int)cursor->max_wm)
3987 *cursor_wm = (int)cursor->max_wm;
3988
3989 return true;
3990}
3991
3992
3993
3994
3995
3996
3997
3998
3999static bool g4x_check_srwm(struct drm_device *dev,
4000 int display_wm, int cursor_wm,
4001 const struct intel_watermark_params *display,
4002 const struct intel_watermark_params *cursor)
4003{
4004 DRM_DEBUG_KMS("SR watermark: display plane %d, cursor %d\n",
4005 display_wm, cursor_wm);
4006
4007 if (display_wm > display->max_wm) {
4008 DRM_DEBUG_KMS("display watermark is too large(%d/%ld), disabling\n",
4009 display_wm, display->max_wm);
4010 return false;
4011 }
4012
4013 if (cursor_wm > cursor->max_wm) {
4014 DRM_DEBUG_KMS("cursor watermark is too large(%d/%ld), disabling\n",
4015 cursor_wm, cursor->max_wm);
4016 return false;
4017 }
4018
4019 if (!(display_wm || cursor_wm)) {
4020 DRM_DEBUG_KMS("SR latency is 0, disabling\n");
4021 return false;
4022 }
4023
4024 return true;
4025}
4026
4027static bool g4x_compute_srwm(struct drm_device *dev,
4028 int plane,
4029 int latency_ns,
4030 const struct intel_watermark_params *display,
4031 const struct intel_watermark_params *cursor,
4032 int *display_wm, int *cursor_wm)
4033{
4034 struct drm_crtc *crtc;
4035 int hdisplay, htotal, pixel_size, clock;
4036 unsigned long line_time_us;
4037 int line_count, line_size;
4038 int small, large;
4039 int entries;
4040
4041 if (!latency_ns) {
4042 *display_wm = *cursor_wm = 0;
4043 return false;
4044 }
4045
4046 crtc = intel_get_crtc_for_plane(dev, plane);
4047 hdisplay = crtc->mode.hdisplay;
4048 htotal = crtc->mode.htotal;
4049 clock = crtc->mode.clock;
4050 pixel_size = crtc->fb->bits_per_pixel / 8;
4051
4052 line_time_us = (htotal * 1000) / clock;
4053 line_count = (latency_ns / line_time_us + 1000) / 1000;
4054 line_size = hdisplay * pixel_size;
4055
4056
4057 small = ((clock * pixel_size / 1000) * latency_ns) / 1000;
4058 large = line_count * line_size;
4059
4060 entries = DIV_ROUND_UP(min(small, large), display->cacheline_size);
4061 *display_wm = entries + display->guard_size;
4062
4063
4064 entries = line_count * pixel_size * 64;
4065 entries = DIV_ROUND_UP(entries, cursor->cacheline_size);
4066 *cursor_wm = entries + cursor->guard_size;
4067
4068 return g4x_check_srwm(dev,
4069 *display_wm, *cursor_wm,
4070 display, cursor);
4071}
4072
4073#define single_plane_enabled(mask) is_power_of_2(mask)
4074
4075static void g4x_update_wm(struct drm_device *dev)
4076{
4077 static const int sr_latency_ns = 12000;
4078 struct drm_i915_private *dev_priv = dev->dev_private;
4079 int planea_wm, planeb_wm, cursora_wm, cursorb_wm;
4080 int plane_sr, cursor_sr;
4081 unsigned int enabled = 0;
4082
4083 if (g4x_compute_wm0(dev, 0,
4084 &g4x_wm_info, latency_ns,
4085 &g4x_cursor_wm_info, latency_ns,
4086 &planea_wm, &cursora_wm))
4087 enabled |= 1;
4088
4089 if (g4x_compute_wm0(dev, 1,
4090 &g4x_wm_info, latency_ns,
4091 &g4x_cursor_wm_info, latency_ns,
4092 &planeb_wm, &cursorb_wm))
4093 enabled |= 2;
4094
4095 plane_sr = cursor_sr = 0;
4096 if (single_plane_enabled(enabled) &&
4097 g4x_compute_srwm(dev, ffs(enabled) - 1,
4098 sr_latency_ns,
4099 &g4x_wm_info,
4100 &g4x_cursor_wm_info,
4101 &plane_sr, &cursor_sr))
4102 I915_WRITE(FW_BLC_SELF, FW_BLC_SELF_EN);
4103 else
4104 I915_WRITE(FW_BLC_SELF,
4105 I915_READ(FW_BLC_SELF) & ~FW_BLC_SELF_EN);
4106
4107 DRM_DEBUG_KMS("Setting FIFO watermarks - A: plane=%d, cursor=%d, B: plane=%d, cursor=%d, SR: plane=%d, cursor=%d\n",
4108 planea_wm, cursora_wm,
4109 planeb_wm, cursorb_wm,
4110 plane_sr, cursor_sr);
4111
4112 I915_WRITE(DSPFW1,
4113 (plane_sr << DSPFW_SR_SHIFT) |
4114 (cursorb_wm << DSPFW_CURSORB_SHIFT) |
4115 (planeb_wm << DSPFW_PLANEB_SHIFT) |
4116 planea_wm);
4117 I915_WRITE(DSPFW2,
4118 (I915_READ(DSPFW2) & DSPFW_CURSORA_MASK) |
4119 (cursora_wm << DSPFW_CURSORA_SHIFT));
4120
4121 I915_WRITE(DSPFW3,
4122 (I915_READ(DSPFW3) & ~DSPFW_HPLL_SR_EN) |
4123 (cursor_sr << DSPFW_CURSOR_SR_SHIFT));
4124}
4125
4126static void i965_update_wm(struct drm_device *dev)
4127{
4128 struct drm_i915_private *dev_priv = dev->dev_private;
4129 struct drm_crtc *crtc;
4130 int srwm = 1;
4131 int cursor_sr = 16;
4132
4133
4134 crtc = single_enabled_crtc(dev);
4135 if (crtc) {
4136
4137 static const int sr_latency_ns = 12000;
4138 int clock = crtc->mode.clock;
4139 int htotal = crtc->mode.htotal;
4140 int hdisplay = crtc->mode.hdisplay;
4141 int pixel_size = crtc->fb->bits_per_pixel / 8;
4142 unsigned long line_time_us;
4143 int entries;
4144
4145 line_time_us = ((htotal * 1000) / clock);
4146
4147
4148 entries = (((sr_latency_ns / line_time_us) + 1000) / 1000) *
4149 pixel_size * hdisplay;
4150 entries = DIV_ROUND_UP(entries, I915_FIFO_LINE_SIZE);
4151 srwm = I965_FIFO_SIZE - entries;
4152 if (srwm < 0)
4153 srwm = 1;
4154 srwm &= 0x1ff;
4155 DRM_DEBUG_KMS("self-refresh entries: %d, wm: %d\n",
4156 entries, srwm);
4157
4158 entries = (((sr_latency_ns / line_time_us) + 1000) / 1000) *
4159 pixel_size * 64;
4160 entries = DIV_ROUND_UP(entries,
4161 i965_cursor_wm_info.cacheline_size);
4162 cursor_sr = i965_cursor_wm_info.fifo_size -
4163 (entries + i965_cursor_wm_info.guard_size);
4164
4165 if (cursor_sr > i965_cursor_wm_info.max_wm)
4166 cursor_sr = i965_cursor_wm_info.max_wm;
4167
4168 DRM_DEBUG_KMS("self-refresh watermark: display plane %d "
4169 "cursor %d\n", srwm, cursor_sr);
4170
4171 if (IS_CRESTLINE(dev))
4172 I915_WRITE(FW_BLC_SELF, FW_BLC_SELF_EN);
4173 } else {
4174
4175 if (IS_CRESTLINE(dev))
4176 I915_WRITE(FW_BLC_SELF, I915_READ(FW_BLC_SELF)
4177 & ~FW_BLC_SELF_EN);
4178 }
4179
4180 DRM_DEBUG_KMS("Setting FIFO watermarks - A: 8, B: 8, C: 8, SR %d\n",
4181 srwm);
4182
4183
4184 I915_WRITE(DSPFW1, (srwm << DSPFW_SR_SHIFT) |
4185 (8 << 16) | (8 << 8) | (8 << 0));
4186 I915_WRITE(DSPFW2, (8 << 8) | (8 << 0));
4187
4188 I915_WRITE(DSPFW3, (cursor_sr << DSPFW_CURSOR_SR_SHIFT));
4189}
4190
4191static void i9xx_update_wm(struct drm_device *dev)
4192{
4193 struct drm_i915_private *dev_priv = dev->dev_private;
4194 const struct intel_watermark_params *wm_info;
4195 uint32_t fwater_lo;
4196 uint32_t fwater_hi;
4197 int cwm, srwm = 1;
4198 int fifo_size;
4199 int planea_wm, planeb_wm;
4200 struct drm_crtc *crtc, *enabled = NULL;
4201
4202 if (IS_I945GM(dev))
4203 wm_info = &i945_wm_info;
4204 else if (!IS_GEN2(dev))
4205 wm_info = &i915_wm_info;
4206 else
4207 wm_info = &i855_wm_info;
4208
4209 fifo_size = dev_priv->display.get_fifo_size(dev, 0);
4210 crtc = intel_get_crtc_for_plane(dev, 0);
4211 if (crtc->enabled && crtc->fb) {
4212 planea_wm = intel_calculate_wm(crtc->mode.clock,
4213 wm_info, fifo_size,
4214 crtc->fb->bits_per_pixel / 8,
4215 latency_ns);
4216 enabled = crtc;
4217 } else
4218 planea_wm = fifo_size - wm_info->guard_size;
4219
4220 fifo_size = dev_priv->display.get_fifo_size(dev, 1);
4221 crtc = intel_get_crtc_for_plane(dev, 1);
4222 if (crtc->enabled && crtc->fb) {
4223 planeb_wm = intel_calculate_wm(crtc->mode.clock,
4224 wm_info, fifo_size,
4225 crtc->fb->bits_per_pixel / 8,
4226 latency_ns);
4227 if (enabled == NULL)
4228 enabled = crtc;
4229 else
4230 enabled = NULL;
4231 } else
4232 planeb_wm = fifo_size - wm_info->guard_size;
4233
4234 DRM_DEBUG_KMS("FIFO watermarks - A: %d, B: %d\n", planea_wm, planeb_wm);
4235
4236
4237
4238
4239 cwm = 2;
4240
4241
4242 if (IS_I945G(dev) || IS_I945GM(dev))
4243 I915_WRITE(FW_BLC_SELF, FW_BLC_SELF_EN_MASK | 0);
4244 else if (IS_I915GM(dev))
4245 I915_WRITE(INSTPM, I915_READ(INSTPM) & ~INSTPM_SELF_EN);
4246
4247
4248 if (HAS_FW_BLC(dev) && enabled) {
4249
4250 static const int sr_latency_ns = 6000;
4251 int clock = enabled->mode.clock;
4252 int htotal = enabled->mode.htotal;
4253 int hdisplay = enabled->mode.hdisplay;
4254 int pixel_size = enabled->fb->bits_per_pixel / 8;
4255 unsigned long line_time_us;
4256 int entries;
4257
4258 line_time_us = (htotal * 1000) / clock;
4259
4260
4261 entries = (((sr_latency_ns / line_time_us) + 1000) / 1000) *
4262 pixel_size * hdisplay;
4263 entries = DIV_ROUND_UP(entries, wm_info->cacheline_size);
4264 DRM_DEBUG_KMS("self-refresh entries: %d\n", entries);
4265 srwm = wm_info->fifo_size - entries;
4266 if (srwm < 0)
4267 srwm = 1;
4268
4269 if (IS_I945G(dev) || IS_I945GM(dev))
4270 I915_WRITE(FW_BLC_SELF,
4271 FW_BLC_SELF_FIFO_MASK | (srwm & 0xff));
4272 else if (IS_I915GM(dev))
4273 I915_WRITE(FW_BLC_SELF, srwm & 0x3f);
4274 }
4275
4276 DRM_DEBUG_KMS("Setting FIFO watermarks - A: %d, B: %d, C: %d, SR %d\n",
4277 planea_wm, planeb_wm, cwm, srwm);
4278
4279 fwater_lo = ((planeb_wm & 0x3f) << 16) | (planea_wm & 0x3f);
4280 fwater_hi = (cwm & 0x1f);
4281
4282
4283 fwater_lo = fwater_lo | (1 << 24) | (1 << 8);
4284 fwater_hi = fwater_hi | (1 << 8);
4285
4286 I915_WRITE(FW_BLC, fwater_lo);
4287 I915_WRITE(FW_BLC2, fwater_hi);
4288
4289 if (HAS_FW_BLC(dev)) {
4290 if (enabled) {
4291 if (IS_I945G(dev) || IS_I945GM(dev))
4292 I915_WRITE(FW_BLC_SELF,
4293 FW_BLC_SELF_EN_MASK | FW_BLC_SELF_EN);
4294 else if (IS_I915GM(dev))
4295 I915_WRITE(INSTPM, I915_READ(INSTPM) | INSTPM_SELF_EN);
4296 DRM_DEBUG_KMS("memory self refresh enabled\n");
4297 } else
4298 DRM_DEBUG_KMS("memory self refresh disabled\n");
4299 }
4300}
4301
4302static void i830_update_wm(struct drm_device *dev)
4303{
4304 struct drm_i915_private *dev_priv = dev->dev_private;
4305 struct drm_crtc *crtc;
4306 uint32_t fwater_lo;
4307 int planea_wm;
4308
4309 crtc = single_enabled_crtc(dev);
4310 if (crtc == NULL)
4311 return;
4312
4313 planea_wm = intel_calculate_wm(crtc->mode.clock, &i830_wm_info,
4314 dev_priv->display.get_fifo_size(dev, 0),
4315 crtc->fb->bits_per_pixel / 8,
4316 latency_ns);
4317 fwater_lo = I915_READ(FW_BLC) & ~0xfff;
4318 fwater_lo |= (3<<8) | planea_wm;
4319
4320 DRM_DEBUG_KMS("Setting FIFO watermarks - A: %d\n", planea_wm);
4321
4322 I915_WRITE(FW_BLC, fwater_lo);
4323}
4324
4325#define ILK_LP0_PLANE_LATENCY 700
4326#define ILK_LP0_CURSOR_LATENCY 1300
4327
4328
4329
4330
4331
4332
4333
4334
4335static bool ironlake_check_srwm(struct drm_device *dev, int level,
4336 int fbc_wm, int display_wm, int cursor_wm,
4337 const struct intel_watermark_params *display,
4338 const struct intel_watermark_params *cursor)
4339{
4340 struct drm_i915_private *dev_priv = dev->dev_private;
4341
4342 DRM_DEBUG_KMS("watermark %d: display plane %d, fbc lines %d,"
4343 " cursor %d\n", level, display_wm, fbc_wm, cursor_wm);
4344
4345 if (fbc_wm > SNB_FBC_MAX_SRWM) {
4346 DRM_DEBUG_KMS("fbc watermark(%d) is too large(%d), disabling wm%d+\n",
4347 fbc_wm, SNB_FBC_MAX_SRWM, level);
4348
4349
4350 I915_WRITE(DISP_ARB_CTL,
4351 I915_READ(DISP_ARB_CTL) | DISP_FBC_WM_DIS);
4352 return false;
4353 }
4354
4355 if (display_wm > display->max_wm) {
4356 DRM_DEBUG_KMS("display watermark(%d) is too large(%d), disabling wm%d+\n",
4357 display_wm, SNB_DISPLAY_MAX_SRWM, level);
4358 return false;
4359 }
4360
4361 if (cursor_wm > cursor->max_wm) {
4362 DRM_DEBUG_KMS("cursor watermark(%d) is too large(%d), disabling wm%d+\n",
4363 cursor_wm, SNB_CURSOR_MAX_SRWM, level);
4364 return false;
4365 }
4366
4367 if (!(fbc_wm || display_wm || cursor_wm)) {
4368 DRM_DEBUG_KMS("latency %d is 0, disabling wm%d+\n", level, level);
4369 return false;
4370 }
4371
4372 return true;
4373}
4374
4375
4376
4377
4378static bool ironlake_compute_srwm(struct drm_device *dev, int level, int plane,
4379 int latency_ns,
4380 const struct intel_watermark_params *display,
4381 const struct intel_watermark_params *cursor,
4382 int *fbc_wm, int *display_wm, int *cursor_wm)
4383{
4384 struct drm_crtc *crtc;
4385 unsigned long line_time_us;
4386 int hdisplay, htotal, pixel_size, clock;
4387 int line_count, line_size;
4388 int small, large;
4389 int entries;
4390
4391 if (!latency_ns) {
4392 *fbc_wm = *display_wm = *cursor_wm = 0;
4393 return false;
4394 }
4395
4396 crtc = intel_get_crtc_for_plane(dev, plane);
4397 hdisplay = crtc->mode.hdisplay;
4398 htotal = crtc->mode.htotal;
4399 clock = crtc->mode.clock;
4400 pixel_size = crtc->fb->bits_per_pixel / 8;
4401
4402 line_time_us = (htotal * 1000) / clock;
4403 line_count = (latency_ns / line_time_us + 1000) / 1000;
4404 line_size = hdisplay * pixel_size;
4405
4406
4407 small = ((clock * pixel_size / 1000) * latency_ns) / 1000;
4408 large = line_count * line_size;
4409
4410 entries = DIV_ROUND_UP(min(small, large), display->cacheline_size);
4411 *display_wm = entries + display->guard_size;
4412
4413
4414
4415
4416
4417 *fbc_wm = DIV_ROUND_UP(*display_wm * 64, line_size) + 2;
4418
4419
4420 entries = line_count * pixel_size * 64;
4421 entries = DIV_ROUND_UP(entries, cursor->cacheline_size);
4422 *cursor_wm = entries + cursor->guard_size;
4423
4424 return ironlake_check_srwm(dev, level,
4425 *fbc_wm, *display_wm, *cursor_wm,
4426 display, cursor);
4427}
4428
4429static void ironlake_update_wm(struct drm_device *dev)
4430{
4431 struct drm_i915_private *dev_priv = dev->dev_private;
4432 int fbc_wm, plane_wm, cursor_wm;
4433 unsigned int enabled;
4434
4435 enabled = 0;
4436 if (g4x_compute_wm0(dev, 0,
4437 &ironlake_display_wm_info,
4438 ILK_LP0_PLANE_LATENCY,
4439 &ironlake_cursor_wm_info,
4440 ILK_LP0_CURSOR_LATENCY,
4441 &plane_wm, &cursor_wm)) {
4442 I915_WRITE(WM0_PIPEA_ILK,
4443 (plane_wm << WM0_PIPE_PLANE_SHIFT) | cursor_wm);
4444 DRM_DEBUG_KMS("FIFO watermarks For pipe A -"
4445 " plane %d, " "cursor: %d\n",
4446 plane_wm, cursor_wm);
4447 enabled |= 1;
4448 }
4449
4450 if (g4x_compute_wm0(dev, 1,
4451 &ironlake_display_wm_info,
4452 ILK_LP0_PLANE_LATENCY,
4453 &ironlake_cursor_wm_info,
4454 ILK_LP0_CURSOR_LATENCY,
4455 &plane_wm, &cursor_wm)) {
4456 I915_WRITE(WM0_PIPEB_ILK,
4457 (plane_wm << WM0_PIPE_PLANE_SHIFT) | cursor_wm);
4458 DRM_DEBUG_KMS("FIFO watermarks For pipe B -"
4459 " plane %d, cursor: %d\n",
4460 plane_wm, cursor_wm);
4461 enabled |= 2;
4462 }
4463
4464
4465
4466
4467
4468 I915_WRITE(WM3_LP_ILK, 0);
4469 I915_WRITE(WM2_LP_ILK, 0);
4470 I915_WRITE(WM1_LP_ILK, 0);
4471
4472 if (!single_plane_enabled(enabled))
4473 return;
4474 enabled = ffs(enabled) - 1;
4475
4476
4477 if (!ironlake_compute_srwm(dev, 1, enabled,
4478 ILK_READ_WM1_LATENCY() * 500,
4479 &ironlake_display_srwm_info,
4480 &ironlake_cursor_srwm_info,
4481 &fbc_wm, &plane_wm, &cursor_wm))
4482 return;
4483
4484 I915_WRITE(WM1_LP_ILK,
4485 WM1_LP_SR_EN |
4486 (ILK_READ_WM1_LATENCY() << WM1_LP_LATENCY_SHIFT) |
4487 (fbc_wm << WM1_LP_FBC_SHIFT) |
4488 (plane_wm << WM1_LP_SR_SHIFT) |
4489 cursor_wm);
4490
4491
4492 if (!ironlake_compute_srwm(dev, 2, enabled,
4493 ILK_READ_WM2_LATENCY() * 500,
4494 &ironlake_display_srwm_info,
4495 &ironlake_cursor_srwm_info,
4496 &fbc_wm, &plane_wm, &cursor_wm))
4497 return;
4498
4499 I915_WRITE(WM2_LP_ILK,
4500 WM2_LP_EN |
4501 (ILK_READ_WM2_LATENCY() << WM1_LP_LATENCY_SHIFT) |
4502 (fbc_wm << WM1_LP_FBC_SHIFT) |
4503 (plane_wm << WM1_LP_SR_SHIFT) |
4504 cursor_wm);
4505
4506
4507
4508
4509
4510}
4511
4512static void sandybridge_update_wm(struct drm_device *dev)
4513{
4514 struct drm_i915_private *dev_priv = dev->dev_private;
4515 int latency = SNB_READ_WM0_LATENCY() * 100;
4516 int fbc_wm, plane_wm, cursor_wm;
4517 unsigned int enabled;
4518
4519 enabled = 0;
4520 if (g4x_compute_wm0(dev, 0,
4521 &sandybridge_display_wm_info, latency,
4522 &sandybridge_cursor_wm_info, latency,
4523 &plane_wm, &cursor_wm)) {
4524 I915_WRITE(WM0_PIPEA_ILK,
4525 (plane_wm << WM0_PIPE_PLANE_SHIFT) | cursor_wm);
4526 DRM_DEBUG_KMS("FIFO watermarks For pipe A -"
4527 " plane %d, " "cursor: %d\n",
4528 plane_wm, cursor_wm);
4529 enabled |= 1;
4530 }
4531
4532 if (g4x_compute_wm0(dev, 1,
4533 &sandybridge_display_wm_info, latency,
4534 &sandybridge_cursor_wm_info, latency,
4535 &plane_wm, &cursor_wm)) {
4536 I915_WRITE(WM0_PIPEB_ILK,
4537 (plane_wm << WM0_PIPE_PLANE_SHIFT) | cursor_wm);
4538 DRM_DEBUG_KMS("FIFO watermarks For pipe B -"
4539 " plane %d, cursor: %d\n",
4540 plane_wm, cursor_wm);
4541 enabled |= 2;
4542 }
4543
4544
4545 if (IS_IVYBRIDGE(dev) &&
4546 g4x_compute_wm0(dev, 2,
4547 &sandybridge_display_wm_info, latency,
4548 &sandybridge_cursor_wm_info, latency,
4549 &plane_wm, &cursor_wm)) {
4550 I915_WRITE(WM0_PIPEC_IVB,
4551 (plane_wm << WM0_PIPE_PLANE_SHIFT) | cursor_wm);
4552 DRM_DEBUG_KMS("FIFO watermarks For pipe C -"
4553 " plane %d, cursor: %d\n",
4554 plane_wm, cursor_wm);
4555 enabled |= 3;
4556 }
4557
4558
4559
4560
4561
4562
4563
4564
4565
4566
4567
4568 I915_WRITE(WM3_LP_ILK, 0);
4569 I915_WRITE(WM2_LP_ILK, 0);
4570 I915_WRITE(WM1_LP_ILK, 0);
4571
4572 if (!single_plane_enabled(enabled))
4573 return;
4574 enabled = ffs(enabled) - 1;
4575
4576
4577 if (!ironlake_compute_srwm(dev, 1, enabled,
4578 SNB_READ_WM1_LATENCY() * 500,
4579 &sandybridge_display_srwm_info,
4580 &sandybridge_cursor_srwm_info,
4581 &fbc_wm, &plane_wm, &cursor_wm))
4582 return;
4583
4584 I915_WRITE(WM1_LP_ILK,
4585 WM1_LP_SR_EN |
4586 (SNB_READ_WM1_LATENCY() << WM1_LP_LATENCY_SHIFT) |
4587 (fbc_wm << WM1_LP_FBC_SHIFT) |
4588 (plane_wm << WM1_LP_SR_SHIFT) |
4589 cursor_wm);
4590
4591
4592 if (!ironlake_compute_srwm(dev, 2, enabled,
4593 SNB_READ_WM2_LATENCY() * 500,
4594 &sandybridge_display_srwm_info,
4595 &sandybridge_cursor_srwm_info,
4596 &fbc_wm, &plane_wm, &cursor_wm))
4597 return;
4598
4599 I915_WRITE(WM2_LP_ILK,
4600 WM2_LP_EN |
4601 (SNB_READ_WM2_LATENCY() << WM1_LP_LATENCY_SHIFT) |
4602 (fbc_wm << WM1_LP_FBC_SHIFT) |
4603 (plane_wm << WM1_LP_SR_SHIFT) |
4604 cursor_wm);
4605
4606
4607 if (!ironlake_compute_srwm(dev, 3, enabled,
4608 SNB_READ_WM3_LATENCY() * 500,
4609 &sandybridge_display_srwm_info,
4610 &sandybridge_cursor_srwm_info,
4611 &fbc_wm, &plane_wm, &cursor_wm))
4612 return;
4613
4614 I915_WRITE(WM3_LP_ILK,
4615 WM3_LP_EN |
4616 (SNB_READ_WM3_LATENCY() << WM1_LP_LATENCY_SHIFT) |
4617 (fbc_wm << WM1_LP_FBC_SHIFT) |
4618 (plane_wm << WM1_LP_SR_SHIFT) |
4619 cursor_wm);
4620}
4621
4622
4623
4624
4625
4626
4627
4628
4629
4630
4631
4632
4633
4634
4635
4636
4637
4638
4639
4640
4641
4642
4643
4644
4645
4646
4647
4648
4649
4650
4651
4652
4653
4654static void intel_update_watermarks(struct drm_device *dev)
4655{
4656 struct drm_i915_private *dev_priv = dev->dev_private;
4657
4658 if (dev_priv->display.update_wm)
4659 dev_priv->display.update_wm(dev);
4660}
4661
4662static inline bool intel_panel_use_ssc(struct drm_i915_private *dev_priv)
4663{
4664 if (i915_panel_use_ssc >= 0)
4665 return i915_panel_use_ssc != 0;
4666 return dev_priv->lvds_use_ssc
4667 && !(dev_priv->quirks & QUIRK_LVDS_SSC_DISABLE);
4668}
4669
4670
4671
4672
4673
4674
4675
4676
4677
4678
4679
4680
4681
4682
4683
4684
4685
4686
4687
4688
4689
4690
4691static bool intel_choose_pipe_bpp_dither(struct drm_crtc *crtc,
4692 unsigned int *pipe_bpp,
4693 struct drm_display_mode *mode)
4694{
4695 struct drm_device *dev = crtc->dev;
4696 struct drm_i915_private *dev_priv = dev->dev_private;
4697 struct drm_encoder *encoder;
4698 struct drm_connector *connector;
4699 unsigned int display_bpc = UINT_MAX, bpc;
4700
4701
4702 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
4703 struct intel_encoder *intel_encoder = to_intel_encoder(encoder);
4704
4705 if (encoder->crtc != crtc)
4706 continue;
4707
4708 if (intel_encoder->type == INTEL_OUTPUT_LVDS) {
4709 unsigned int lvds_bpc;
4710
4711 if ((I915_READ(PCH_LVDS) & LVDS_A3_POWER_MASK) ==
4712 LVDS_A3_POWER_UP)
4713 lvds_bpc = 8;
4714 else
4715 lvds_bpc = 6;
4716
4717 if (lvds_bpc < display_bpc) {
4718 DRM_DEBUG_KMS("clamping display bpc (was %d) to LVDS (%d)\n", display_bpc, lvds_bpc);
4719 display_bpc = lvds_bpc;
4720 }
4721 continue;
4722 }
4723
4724 if (intel_encoder->type == INTEL_OUTPUT_EDP) {
4725
4726 unsigned int edp_bpc = dev_priv->edp.bpp / 3;
4727
4728 if (edp_bpc < display_bpc) {
4729 DRM_DEBUG_KMS("clamping display bpc (was %d) to eDP (%d)\n", display_bpc, edp_bpc);
4730 display_bpc = edp_bpc;
4731 }
4732 continue;
4733 }
4734
4735
4736 list_for_each_entry(connector, &dev->mode_config.connector_list,
4737 head) {
4738 if (connector->encoder != encoder)
4739 continue;
4740
4741
4742 if (connector->display_info.bpc &&
4743 connector->display_info.bpc < display_bpc) {
4744 DRM_DEBUG_KMS("clamping display bpc (was %d) to EDID reported max of %d\n", display_bpc, connector->display_info.bpc);
4745 display_bpc = connector->display_info.bpc;
4746 }
4747 }
4748
4749
4750
4751
4752
4753 if (intel_encoder->type == INTEL_OUTPUT_HDMI) {
4754 if (display_bpc > 8 && display_bpc < 12) {
4755 DRM_DEBUG_KMS("forcing bpc to 12 for HDMI\n");
4756 display_bpc = 12;
4757 } else {
4758 DRM_DEBUG_KMS("forcing bpc to 8 for HDMI\n");
4759 display_bpc = 8;
4760 }
4761 }
4762 }
4763
4764 if (mode->private_flags & INTEL_MODE_DP_FORCE_6BPC) {
4765 DRM_DEBUG_KMS("Dithering DP to 6bpc\n");
4766 display_bpc = 6;
4767 }
4768
4769
4770
4771
4772
4773
4774
4775
4776 switch (crtc->fb->depth) {
4777 case 8:
4778 bpc = 8;
4779 break;
4780 case 15:
4781 case 16:
4782 bpc = 6;
4783 break;
4784 case 24:
4785 bpc = 8;
4786 break;
4787 case 30:
4788 bpc = 10;
4789 break;
4790 case 48:
4791 bpc = 12;
4792 break;
4793 default:
4794 DRM_DEBUG("unsupported depth, assuming 24 bits\n");
4795 bpc = min((unsigned int)8, display_bpc);
4796 break;
4797 }
4798
4799 display_bpc = min(display_bpc, bpc);
4800
4801 DRM_DEBUG_KMS("setting pipe bpc to %d (max display bpc %d)\n",
4802 bpc, display_bpc);
4803
4804 *pipe_bpp = display_bpc * 3;
4805
4806 return display_bpc != bpc;
4807}
4808
4809static int i9xx_crtc_mode_set(struct drm_crtc *crtc,
4810 struct drm_display_mode *mode,
4811 struct drm_display_mode *adjusted_mode,
4812 int x, int y,
4813 struct drm_framebuffer *old_fb)
4814{
4815 struct drm_device *dev = crtc->dev;
4816 struct drm_i915_private *dev_priv = dev->dev_private;
4817 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
4818 int pipe = intel_crtc->pipe;
4819 int plane = intel_crtc->plane;
4820 int refclk, num_connectors = 0;
4821 intel_clock_t clock, reduced_clock;
4822 u32 dpll, fp = 0, fp2 = 0, dspcntr, pipeconf;
4823 bool ok, has_reduced_clock = false, is_sdvo = false, is_dvo = false;
4824 bool is_crt = false, is_lvds = false, is_tv = false, is_dp = false;
4825 struct drm_mode_config *mode_config = &dev->mode_config;
4826 struct intel_encoder *encoder;
4827 const intel_limit_t *limit;
4828 int ret;
4829 u32 temp;
4830 u32 lvds_sync = 0;
4831
4832 list_for_each_entry(encoder, &mode_config->encoder_list, base.head) {
4833 if (encoder->base.crtc != crtc)
4834 continue;
4835
4836 switch (encoder->type) {
4837 case INTEL_OUTPUT_LVDS:
4838 is_lvds = true;
4839 break;
4840 case INTEL_OUTPUT_SDVO:
4841 case INTEL_OUTPUT_HDMI:
4842 is_sdvo = true;
4843 if (encoder->needs_tv_clock)
4844 is_tv = true;
4845 break;
4846 case INTEL_OUTPUT_DVO:
4847 is_dvo = true;
4848 break;
4849 case INTEL_OUTPUT_TVOUT:
4850 is_tv = true;
4851 break;
4852 case INTEL_OUTPUT_ANALOG:
4853 is_crt = true;
4854 break;
4855 case INTEL_OUTPUT_DISPLAYPORT:
4856 is_dp = true;
4857 break;
4858 }
4859
4860 num_connectors++;
4861 }
4862
4863 if (is_lvds && intel_panel_use_ssc(dev_priv) && num_connectors < 2) {
4864 refclk = dev_priv->lvds_ssc_freq * 1000;
4865 DRM_DEBUG_KMS("using SSC reference clock of %d MHz\n",
4866 refclk / 1000);
4867 } else if (!IS_GEN2(dev)) {
4868 refclk = 96000;
4869 } else {
4870 refclk = 48000;
4871 }
4872
4873
4874
4875
4876
4877
4878 limit = intel_limit(crtc, refclk);
4879 ok = limit->find_pll(limit, crtc, adjusted_mode->clock, refclk, &clock);
4880 if (!ok) {
4881 DRM_ERROR("Couldn't find PLL settings for mode!\n");
4882 return -EINVAL;
4883 }
4884
4885
4886 intel_crtc_update_cursor(crtc, true);
4887
4888 if (is_lvds && dev_priv->lvds_downclock_avail) {
4889 has_reduced_clock = limit->find_pll(limit, crtc,
4890 dev_priv->lvds_downclock,
4891 refclk,
4892 &reduced_clock);
4893 if (has_reduced_clock && (clock.p != reduced_clock.p)) {
4894
4895
4896
4897
4898
4899
4900 DRM_DEBUG_KMS("Different P is found for "
4901 "LVDS clock/downclock\n");
4902 has_reduced_clock = 0;
4903 }
4904 }
4905
4906
4907 if (is_sdvo && is_tv) {
4908 if (adjusted_mode->clock >= 100000
4909 && adjusted_mode->clock < 140500) {
4910 clock.p1 = 2;
4911 clock.p2 = 10;
4912 clock.n = 3;
4913 clock.m1 = 16;
4914 clock.m2 = 8;
4915 } else if (adjusted_mode->clock >= 140500
4916 && adjusted_mode->clock <= 200000) {
4917 clock.p1 = 1;
4918 clock.p2 = 10;
4919 clock.n = 6;
4920 clock.m1 = 12;
4921 clock.m2 = 8;
4922 }
4923 }
4924
4925 if (IS_PINEVIEW(dev)) {
4926 fp = (1 << clock.n) << 16 | clock.m1 << 8 | clock.m2;
4927 if (has_reduced_clock)
4928 fp2 = (1 << reduced_clock.n) << 16 |
4929 reduced_clock.m1 << 8 | reduced_clock.m2;
4930 } else {
4931 fp = clock.n << 16 | clock.m1 << 8 | clock.m2;
4932 if (has_reduced_clock)
4933 fp2 = reduced_clock.n << 16 | reduced_clock.m1 << 8 |
4934 reduced_clock.m2;
4935 }
4936
4937 dpll = DPLL_VGA_MODE_DIS;
4938
4939 if (!IS_GEN2(dev)) {
4940 if (is_lvds)
4941 dpll |= DPLLB_MODE_LVDS;
4942 else
4943 dpll |= DPLLB_MODE_DAC_SERIAL;
4944 if (is_sdvo) {
4945 int pixel_multiplier = intel_mode_get_pixel_multiplier(adjusted_mode);
4946 if (pixel_multiplier > 1) {
4947 if (IS_I945G(dev) || IS_I945GM(dev) || IS_G33(dev))
4948 dpll |= (pixel_multiplier - 1) << SDVO_MULTIPLIER_SHIFT_HIRES;
4949 }
4950 dpll |= DPLL_DVO_HIGH_SPEED;
4951 }
4952 if (is_dp)
4953 dpll |= DPLL_DVO_HIGH_SPEED;
4954
4955
4956 if (IS_PINEVIEW(dev))
4957 dpll |= (1 << (clock.p1 - 1)) << DPLL_FPA01_P1_POST_DIV_SHIFT_PINEVIEW;
4958 else {
4959 dpll |= (1 << (clock.p1 - 1)) << DPLL_FPA01_P1_POST_DIV_SHIFT;
4960 if (IS_G4X(dev) && has_reduced_clock)
4961 dpll |= (1 << (reduced_clock.p1 - 1)) << DPLL_FPA1_P1_POST_DIV_SHIFT;
4962 }
4963 switch (clock.p2) {
4964 case 5:
4965 dpll |= DPLL_DAC_SERIAL_P2_CLOCK_DIV_5;
4966 break;
4967 case 7:
4968 dpll |= DPLLB_LVDS_P2_CLOCK_DIV_7;
4969 break;
4970 case 10:
4971 dpll |= DPLL_DAC_SERIAL_P2_CLOCK_DIV_10;
4972 break;
4973 case 14:
4974 dpll |= DPLLB_LVDS_P2_CLOCK_DIV_14;
4975 break;
4976 }
4977 if (INTEL_INFO(dev)->gen >= 4)
4978 dpll |= (6 << PLL_LOAD_PULSE_PHASE_SHIFT);
4979 } else {
4980 if (is_lvds) {
4981 dpll |= (1 << (clock.p1 - 1)) << DPLL_FPA01_P1_POST_DIV_SHIFT;
4982 } else {
4983 if (clock.p1 == 2)
4984 dpll |= PLL_P1_DIVIDE_BY_TWO;
4985 else
4986 dpll |= (clock.p1 - 2) << DPLL_FPA01_P1_POST_DIV_SHIFT;
4987 if (clock.p2 == 4)
4988 dpll |= PLL_P2_DIVIDE_BY_4;
4989 }
4990 }
4991
4992 if (is_sdvo && is_tv)
4993 dpll |= PLL_REF_INPUT_TVCLKINBC;
4994 else if (is_tv)
4995
4996
4997 dpll |= 3;
4998 else if (is_lvds && intel_panel_use_ssc(dev_priv) && num_connectors < 2)
4999 dpll |= PLLB_REF_INPUT_SPREADSPECTRUMIN;
5000 else
5001 dpll |= PLL_REF_INPUT_DREFCLK;
5002
5003
5004 pipeconf = I915_READ(PIPECONF(pipe));
5005
5006
5007 dspcntr = DISPPLANE_GAMMA_ENABLE;
5008
5009
5010
5011 if (pipe == 0)
5012 dspcntr &= ~DISPPLANE_SEL_PIPE_MASK;
5013 else
5014 dspcntr |= DISPPLANE_SEL_PIPE_B;
5015
5016 if (pipe == 0 && INTEL_INFO(dev)->gen < 4) {
5017
5018
5019
5020
5021
5022
5023 if (mode->clock >
5024 dev_priv->display.get_display_clock_speed(dev) * 9 / 10)
5025 pipeconf |= PIPECONF_DOUBLE_WIDE;
5026 else
5027 pipeconf &= ~PIPECONF_DOUBLE_WIDE;
5028 }
5029
5030
5031 pipeconf &= ~(PIPECONF_BPP_MASK | PIPECONF_DITHER_EN);
5032 if (is_dp) {
5033 if (mode->private_flags & INTEL_MODE_DP_FORCE_6BPC) {
5034 pipeconf |= PIPECONF_BPP_6 |
5035 PIPECONF_DITHER_EN |
5036 PIPECONF_DITHER_TYPE_SP;
5037 }
5038 }
5039
5040 dpll |= DPLL_VCO_ENABLE;
5041
5042 DRM_DEBUG_KMS("Mode for pipe %c:\n", pipe == 0 ? 'A' : 'B');
5043 drm_mode_debug_printmodeline(mode);
5044
5045 I915_WRITE(FP0(pipe), fp);
5046 I915_WRITE(DPLL(pipe), dpll & ~DPLL_VCO_ENABLE);
5047
5048 POSTING_READ(DPLL(pipe));
5049 udelay(150);
5050
5051
5052
5053
5054
5055 if (is_lvds) {
5056 temp = I915_READ(LVDS);
5057 temp |= LVDS_PORT_EN | LVDS_A0A2_CLKA_POWER_UP;
5058 if (pipe == 1) {
5059 temp |= LVDS_PIPEB_SELECT;
5060 } else {
5061 temp &= ~LVDS_PIPEB_SELECT;
5062 }
5063
5064 temp |= dev_priv->lvds_border_bits;
5065
5066
5067
5068 if (clock.p2 == 7)
5069 temp |= LVDS_B0B3_POWER_UP | LVDS_CLKB_POWER_UP;
5070 else
5071 temp &= ~(LVDS_B0B3_POWER_UP | LVDS_CLKB_POWER_UP);
5072
5073
5074
5075
5076
5077
5078 if (INTEL_INFO(dev)->gen >= 4) {
5079 if (dev_priv->lvds_dither)
5080 temp |= LVDS_ENABLE_DITHER;
5081 else
5082 temp &= ~LVDS_ENABLE_DITHER;
5083 }
5084 if (adjusted_mode->flags & DRM_MODE_FLAG_NHSYNC)
5085 lvds_sync |= LVDS_HSYNC_POLARITY;
5086 if (adjusted_mode->flags & DRM_MODE_FLAG_NVSYNC)
5087 lvds_sync |= LVDS_VSYNC_POLARITY;
5088 if ((temp & (LVDS_HSYNC_POLARITY | LVDS_VSYNC_POLARITY))
5089 != lvds_sync) {
5090 char flags[2] = "-+";
5091 DRM_INFO("Changing LVDS panel from "
5092 "(%chsync, %cvsync) to (%chsync, %cvsync)\n",
5093 flags[!(temp & LVDS_HSYNC_POLARITY)],
5094 flags[!(temp & LVDS_VSYNC_POLARITY)],
5095 flags[!(lvds_sync & LVDS_HSYNC_POLARITY)],
5096 flags[!(lvds_sync & LVDS_VSYNC_POLARITY)]);
5097 temp &= ~(LVDS_HSYNC_POLARITY | LVDS_VSYNC_POLARITY);
5098 temp |= lvds_sync;
5099 }
5100 I915_WRITE(LVDS, temp);
5101 }
5102
5103 if (is_dp) {
5104 intel_dp_set_m_n(crtc, mode, adjusted_mode);
5105 }
5106
5107 I915_WRITE(DPLL(pipe), dpll);
5108
5109
5110 POSTING_READ(DPLL(pipe));
5111 udelay(150);
5112
5113 if (INTEL_INFO(dev)->gen >= 4) {
5114 temp = 0;
5115 if (is_sdvo) {
5116 temp = intel_mode_get_pixel_multiplier(adjusted_mode);
5117 if (temp > 1)
5118 temp = (temp - 1) << DPLL_MD_UDI_MULTIPLIER_SHIFT;
5119 else
5120 temp = 0;
5121 }
5122 I915_WRITE(DPLL_MD(pipe), temp);
5123 } else {
5124
5125
5126
5127
5128
5129 I915_WRITE(DPLL(pipe), dpll);
5130 }
5131
5132 intel_crtc->lowfreq_avail = false;
5133 if (is_lvds && has_reduced_clock && i915_powersave) {
5134 I915_WRITE(FP1(pipe), fp2);
5135 intel_crtc->lowfreq_avail = true;
5136 if (HAS_PIPE_CXSR(dev)) {
5137 DRM_DEBUG_KMS("enabling CxSR downclocking\n");
5138 pipeconf |= PIPECONF_CXSR_DOWNCLOCK;
5139 }
5140 } else {
5141 I915_WRITE(FP1(pipe), fp);
5142 if (HAS_PIPE_CXSR(dev)) {
5143 DRM_DEBUG_KMS("disabling CxSR downclocking\n");
5144 pipeconf &= ~PIPECONF_CXSR_DOWNCLOCK;
5145 }
5146 }
5147
5148 if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) {
5149 pipeconf |= PIPECONF_INTERLACE_W_FIELD_INDICATION;
5150
5151 adjusted_mode->crtc_vdisplay -= 1;
5152 adjusted_mode->crtc_vtotal -= 1;
5153 adjusted_mode->crtc_vblank_start -= 1;
5154 adjusted_mode->crtc_vblank_end -= 1;
5155 adjusted_mode->crtc_vsync_end -= 1;
5156 adjusted_mode->crtc_vsync_start -= 1;
5157 } else
5158 pipeconf &= ~PIPECONF_INTERLACE_W_FIELD_INDICATION;
5159
5160 I915_WRITE(HTOTAL(pipe),
5161 (adjusted_mode->crtc_hdisplay - 1) |
5162 ((adjusted_mode->crtc_htotal - 1) << 16));
5163 I915_WRITE(HBLANK(pipe),
5164 (adjusted_mode->crtc_hblank_start - 1) |
5165 ((adjusted_mode->crtc_hblank_end - 1) << 16));
5166 I915_WRITE(HSYNC(pipe),
5167 (adjusted_mode->crtc_hsync_start - 1) |
5168 ((adjusted_mode->crtc_hsync_end - 1) << 16));
5169
5170 I915_WRITE(VTOTAL(pipe),
5171 (adjusted_mode->crtc_vdisplay - 1) |
5172 ((adjusted_mode->crtc_vtotal - 1) << 16));
5173 I915_WRITE(VBLANK(pipe),
5174 (adjusted_mode->crtc_vblank_start - 1) |
5175 ((adjusted_mode->crtc_vblank_end - 1) << 16));
5176 I915_WRITE(VSYNC(pipe),
5177 (adjusted_mode->crtc_vsync_start - 1) |
5178 ((adjusted_mode->crtc_vsync_end - 1) << 16));
5179
5180
5181
5182
5183 I915_WRITE(DSPSIZE(plane),
5184 ((mode->vdisplay - 1) << 16) |
5185 (mode->hdisplay - 1));
5186 I915_WRITE(DSPPOS(plane), 0);
5187 I915_WRITE(PIPESRC(pipe),
5188 ((mode->hdisplay - 1) << 16) | (mode->vdisplay - 1));
5189
5190 I915_WRITE(PIPECONF(pipe), pipeconf);
5191 POSTING_READ(PIPECONF(pipe));
5192 intel_enable_pipe(dev_priv, pipe, false);
5193
5194 intel_wait_for_vblank(dev, pipe);
5195
5196 I915_WRITE(DSPCNTR(plane), dspcntr);
5197 POSTING_READ(DSPCNTR(plane));
5198 intel_enable_plane(dev_priv, plane, pipe);
5199
5200 ret = intel_pipe_set_base(crtc, x, y, old_fb);
5201
5202 intel_update_watermarks(dev);
5203
5204 return ret;
5205}
5206
5207
5208
5209
5210void ironlake_init_pch_refclk(struct drm_device *dev)
5211{
5212 struct drm_i915_private *dev_priv = dev->dev_private;
5213 struct drm_mode_config *mode_config = &dev->mode_config;
5214 struct intel_encoder *encoder;
5215 u32 temp;
5216 bool has_lvds = false;
5217 bool has_cpu_edp = false;
5218 bool has_pch_edp = false;
5219 bool has_panel = false;
5220 bool has_ck505 = false;
5221 bool can_ssc = false;
5222
5223
5224 list_for_each_entry(encoder, &mode_config->encoder_list,
5225 base.head) {
5226 switch (encoder->type) {
5227 case INTEL_OUTPUT_LVDS:
5228 has_panel = true;
5229 has_lvds = true;
5230 break;
5231 case INTEL_OUTPUT_EDP:
5232 has_panel = true;
5233 if (intel_encoder_is_pch_edp(&encoder->base))
5234 has_pch_edp = true;
5235 else
5236 has_cpu_edp = true;
5237 break;
5238 }
5239 }
5240
5241 if (HAS_PCH_IBX(dev)) {
5242 has_ck505 = dev_priv->display_clock_mode;
5243 can_ssc = has_ck505;
5244 } else {
5245 has_ck505 = false;
5246 can_ssc = true;
5247 }
5248
5249 DRM_DEBUG_KMS("has_panel %d has_lvds %d has_pch_edp %d has_cpu_edp %d has_ck505 %d\n",
5250 has_panel, has_lvds, has_pch_edp, has_cpu_edp,
5251 has_ck505);
5252
5253
5254
5255
5256
5257
5258 temp = I915_READ(PCH_DREF_CONTROL);
5259
5260 temp &= ~DREF_NONSPREAD_SOURCE_MASK;
5261
5262 if (has_ck505)
5263 temp |= DREF_NONSPREAD_CK505_ENABLE;
5264 else
5265 temp |= DREF_NONSPREAD_SOURCE_ENABLE;
5266
5267 if (has_panel) {
5268 temp &= ~DREF_SSC_SOURCE_MASK;
5269 temp |= DREF_SSC_SOURCE_ENABLE;
5270
5271
5272 if (intel_panel_use_ssc(dev_priv) && can_ssc) {
5273 DRM_DEBUG_KMS("Using SSC on panel\n");
5274 temp |= DREF_SSC1_ENABLE;
5275 }
5276
5277
5278 I915_WRITE(PCH_DREF_CONTROL, temp);
5279 POSTING_READ(PCH_DREF_CONTROL);
5280 udelay(200);
5281
5282 temp &= ~DREF_CPU_SOURCE_OUTPUT_MASK;
5283
5284
5285 if (has_cpu_edp) {
5286 if (intel_panel_use_ssc(dev_priv) && can_ssc) {
5287 DRM_DEBUG_KMS("Using SSC on eDP\n");
5288 temp |= DREF_CPU_SOURCE_OUTPUT_DOWNSPREAD;
5289 }
5290 else
5291 temp |= DREF_CPU_SOURCE_OUTPUT_NONSPREAD;
5292 } else
5293 temp |= DREF_CPU_SOURCE_OUTPUT_DISABLE;
5294
5295 I915_WRITE(PCH_DREF_CONTROL, temp);
5296 POSTING_READ(PCH_DREF_CONTROL);
5297 udelay(200);
5298 } else {
5299 DRM_DEBUG_KMS("Disabling SSC entirely\n");
5300
5301 temp &= ~DREF_CPU_SOURCE_OUTPUT_MASK;
5302
5303
5304 temp |= DREF_CPU_SOURCE_OUTPUT_DISABLE;
5305
5306 I915_WRITE(PCH_DREF_CONTROL, temp);
5307 POSTING_READ(PCH_DREF_CONTROL);
5308 udelay(200);
5309
5310
5311 temp &= ~DREF_SSC_SOURCE_MASK;
5312 temp |= DREF_SSC_SOURCE_DISABLE;
5313
5314
5315 temp &= ~ DREF_SSC1_ENABLE;
5316
5317 I915_WRITE(PCH_DREF_CONTROL, temp);
5318 POSTING_READ(PCH_DREF_CONTROL);
5319 udelay(200);
5320 }
5321}
5322
5323static int ironlake_get_refclk(struct drm_crtc *crtc)
5324{
5325 struct drm_device *dev = crtc->dev;
5326 struct drm_i915_private *dev_priv = dev->dev_private;
5327 struct intel_encoder *encoder;
5328 struct drm_mode_config *mode_config = &dev->mode_config;
5329 struct intel_encoder *edp_encoder = NULL;
5330 int num_connectors = 0;
5331 bool is_lvds = false;
5332
5333 list_for_each_entry(encoder, &mode_config->encoder_list, base.head) {
5334 if (encoder->base.crtc != crtc)
5335 continue;
5336
5337 switch (encoder->type) {
5338 case INTEL_OUTPUT_LVDS:
5339 is_lvds = true;
5340 break;
5341 case INTEL_OUTPUT_EDP:
5342 edp_encoder = encoder;
5343 break;
5344 }
5345 num_connectors++;
5346 }
5347
5348 if (is_lvds && intel_panel_use_ssc(dev_priv) && num_connectors < 2) {
5349 DRM_DEBUG_KMS("using SSC reference clock of %d MHz\n",
5350 dev_priv->lvds_ssc_freq);
5351 return dev_priv->lvds_ssc_freq * 1000;
5352 }
5353
5354 return 120000;
5355}
5356
5357static int ironlake_crtc_mode_set(struct drm_crtc *crtc,
5358 struct drm_display_mode *mode,
5359 struct drm_display_mode *adjusted_mode,
5360 int x, int y,
5361 struct drm_framebuffer *old_fb)
5362{
5363 struct drm_device *dev = crtc->dev;
5364 struct drm_i915_private *dev_priv = dev->dev_private;
5365 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
5366 int pipe = intel_crtc->pipe;
5367 int plane = intel_crtc->plane;
5368 int refclk, num_connectors = 0;
5369 intel_clock_t clock, reduced_clock;
5370 u32 dpll, fp = 0, fp2 = 0, dspcntr, pipeconf;
5371 bool ok, has_reduced_clock = false, is_sdvo = false;
5372 bool is_crt = false, is_lvds = false, is_tv = false, is_dp = false;
5373 struct intel_encoder *has_edp_encoder = NULL;
5374 struct drm_mode_config *mode_config = &dev->mode_config;
5375 struct intel_encoder *encoder;
5376 const intel_limit_t *limit;
5377 int ret;
5378 struct fdi_m_n m_n = {0};
5379 u32 temp;
5380 u32 lvds_sync = 0;
5381 int target_clock, pixel_multiplier, lane, link_bw, factor;
5382 unsigned int pipe_bpp;
5383 bool dither;
5384
5385 list_for_each_entry(encoder, &mode_config->encoder_list, base.head) {
5386 if (encoder->base.crtc != crtc)
5387 continue;
5388
5389 switch (encoder->type) {
5390 case INTEL_OUTPUT_LVDS:
5391 is_lvds = true;
5392 break;
5393 case INTEL_OUTPUT_SDVO:
5394 case INTEL_OUTPUT_HDMI:
5395 is_sdvo = true;
5396 if (encoder->needs_tv_clock)
5397 is_tv = true;
5398 break;
5399 case INTEL_OUTPUT_TVOUT:
5400 is_tv = true;
5401 break;
5402 case INTEL_OUTPUT_ANALOG:
5403 is_crt = true;
5404 break;
5405 case INTEL_OUTPUT_DISPLAYPORT:
5406 is_dp = true;
5407 break;
5408 case INTEL_OUTPUT_EDP:
5409 has_edp_encoder = encoder;
5410 break;
5411 }
5412
5413 num_connectors++;
5414 }
5415
5416 refclk = ironlake_get_refclk(crtc);
5417
5418
5419
5420
5421
5422
5423 limit = intel_limit(crtc, refclk);
5424 ok = limit->find_pll(limit, crtc, adjusted_mode->clock, refclk, &clock);
5425 if (!ok) {
5426 DRM_ERROR("Couldn't find PLL settings for mode!\n");
5427 return -EINVAL;
5428 }
5429
5430
5431 intel_crtc_update_cursor(crtc, true);
5432
5433 if (is_lvds && dev_priv->lvds_downclock_avail) {
5434 has_reduced_clock = limit->find_pll(limit, crtc,
5435 dev_priv->lvds_downclock,
5436 refclk,
5437 &reduced_clock);
5438 if (has_reduced_clock && (clock.p != reduced_clock.p)) {
5439
5440
5441
5442
5443
5444
5445 DRM_DEBUG_KMS("Different P is found for "
5446 "LVDS clock/downclock\n");
5447 has_reduced_clock = 0;
5448 }
5449 }
5450
5451
5452 if (is_sdvo && is_tv) {
5453 if (adjusted_mode->clock >= 100000
5454 && adjusted_mode->clock < 140500) {
5455 clock.p1 = 2;
5456 clock.p2 = 10;
5457 clock.n = 3;
5458 clock.m1 = 16;
5459 clock.m2 = 8;
5460 } else if (adjusted_mode->clock >= 140500
5461 && adjusted_mode->clock <= 200000) {
5462 clock.p1 = 1;
5463 clock.p2 = 10;
5464 clock.n = 6;
5465 clock.m1 = 12;
5466 clock.m2 = 8;
5467 }
5468 }
5469
5470
5471 pixel_multiplier = intel_mode_get_pixel_multiplier(adjusted_mode);
5472 lane = 0;
5473
5474
5475 if (has_edp_encoder &&
5476 !intel_encoder_is_pch_edp(&has_edp_encoder->base)) {
5477 target_clock = mode->clock;
5478 intel_edp_link_config(has_edp_encoder,
5479 &lane, &link_bw);
5480 } else {
5481
5482
5483 if (is_dp || intel_encoder_is_pch_edp(&has_edp_encoder->base))
5484 target_clock = mode->clock;
5485 else
5486 target_clock = adjusted_mode->clock;
5487
5488
5489
5490
5491
5492
5493
5494
5495 link_bw = intel_fdi_link_freq(dev) * MHz(100)/KHz(1)/10;
5496 }
5497
5498
5499 temp = I915_READ(PIPECONF(pipe));
5500 temp &= ~PIPE_BPC_MASK;
5501 dither = intel_choose_pipe_bpp_dither(crtc, &pipe_bpp, mode);
5502 switch (pipe_bpp) {
5503 case 18:
5504 temp |= PIPE_6BPC;
5505 break;
5506 case 24:
5507 temp |= PIPE_8BPC;
5508 break;
5509 case 30:
5510 temp |= PIPE_10BPC;
5511 break;
5512 case 36:
5513 temp |= PIPE_12BPC;
5514 break;
5515 default:
5516 WARN(1, "intel_choose_pipe_bpp returned invalid value %d\n",
5517 pipe_bpp);
5518 temp |= PIPE_8BPC;
5519 pipe_bpp = 24;
5520 break;
5521 }
5522
5523 intel_crtc->bpp = pipe_bpp;
5524 I915_WRITE(PIPECONF(pipe), temp);
5525
5526 if (!lane) {
5527
5528
5529
5530
5531
5532 u32 bps = target_clock * intel_crtc->bpp * 21 / 20;
5533 lane = bps / (link_bw * 8) + 1;
5534 }
5535
5536 intel_crtc->fdi_lanes = lane;
5537
5538 if (pixel_multiplier > 1)
5539 link_bw *= pixel_multiplier;
5540 ironlake_compute_m_n(intel_crtc->bpp, lane, target_clock, link_bw,
5541 &m_n);
5542
5543 fp = clock.n << 16 | clock.m1 << 8 | clock.m2;
5544 if (has_reduced_clock)
5545 fp2 = reduced_clock.n << 16 | reduced_clock.m1 << 8 |
5546 reduced_clock.m2;
5547
5548
5549 factor = 21;
5550 if (is_lvds) {
5551 if ((intel_panel_use_ssc(dev_priv) &&
5552 dev_priv->lvds_ssc_freq == 100) ||
5553 (I915_READ(PCH_LVDS) & LVDS_CLKB_POWER_MASK) == LVDS_CLKB_POWER_UP)
5554 factor = 25;
5555 } else if (is_sdvo && is_tv)
5556 factor = 20;
5557
5558 if (clock.m < factor * clock.n)
5559 fp |= FP_CB_TUNE;
5560
5561 dpll = 0;
5562
5563 if (is_lvds)
5564 dpll |= DPLLB_MODE_LVDS;
5565 else
5566 dpll |= DPLLB_MODE_DAC_SERIAL;
5567 if (is_sdvo) {
5568 int pixel_multiplier = intel_mode_get_pixel_multiplier(adjusted_mode);
5569 if (pixel_multiplier > 1) {
5570 dpll |= (pixel_multiplier - 1) << PLL_REF_SDVO_HDMI_MULTIPLIER_SHIFT;
5571 }
5572 dpll |= DPLL_DVO_HIGH_SPEED;
5573 }
5574 if (is_dp || intel_encoder_is_pch_edp(&has_edp_encoder->base))
5575 dpll |= DPLL_DVO_HIGH_SPEED;
5576
5577
5578 dpll |= (1 << (clock.p1 - 1)) << DPLL_FPA01_P1_POST_DIV_SHIFT;
5579
5580 dpll |= (1 << (clock.p1 - 1)) << DPLL_FPA1_P1_POST_DIV_SHIFT;
5581
5582 switch (clock.p2) {
5583 case 5:
5584 dpll |= DPLL_DAC_SERIAL_P2_CLOCK_DIV_5;
5585 break;
5586 case 7:
5587 dpll |= DPLLB_LVDS_P2_CLOCK_DIV_7;
5588 break;
5589 case 10:
5590 dpll |= DPLL_DAC_SERIAL_P2_CLOCK_DIV_10;
5591 break;
5592 case 14:
5593 dpll |= DPLLB_LVDS_P2_CLOCK_DIV_14;
5594 break;
5595 }
5596
5597 if (is_sdvo && is_tv)
5598 dpll |= PLL_REF_INPUT_TVCLKINBC;
5599 else if (is_tv)
5600
5601
5602 dpll |= 3;
5603 else if (is_lvds && intel_panel_use_ssc(dev_priv) && num_connectors < 2)
5604 dpll |= PLLB_REF_INPUT_SPREADSPECTRUMIN;
5605 else
5606 dpll |= PLL_REF_INPUT_DREFCLK;
5607
5608
5609 pipeconf = I915_READ(PIPECONF(pipe));
5610
5611
5612 dspcntr = DISPPLANE_GAMMA_ENABLE;
5613
5614 DRM_DEBUG_KMS("Mode for pipe %d:\n", pipe);
5615 drm_mode_debug_printmodeline(mode);
5616
5617
5618 if (!intel_crtc->no_pll) {
5619 if (!has_edp_encoder ||
5620 intel_encoder_is_pch_edp(&has_edp_encoder->base)) {
5621 I915_WRITE(PCH_FP0(pipe), fp);
5622 I915_WRITE(PCH_DPLL(pipe), dpll & ~DPLL_VCO_ENABLE);
5623
5624 POSTING_READ(PCH_DPLL(pipe));
5625 udelay(150);
5626 }
5627 } else {
5628 if (dpll == (I915_READ(PCH_DPLL(0)) & 0x7fffffff) &&
5629 fp == I915_READ(PCH_FP0(0))) {
5630 intel_crtc->use_pll_a = true;
5631 DRM_DEBUG_KMS("using pipe a dpll\n");
5632 } else if (dpll == (I915_READ(PCH_DPLL(1)) & 0x7fffffff) &&
5633 fp == I915_READ(PCH_FP0(1))) {
5634 intel_crtc->use_pll_a = false;
5635 DRM_DEBUG_KMS("using pipe b dpll\n");
5636 } else {
5637 DRM_DEBUG_KMS("no matching PLL configuration for pipe 2\n");
5638 return -EINVAL;
5639 }
5640 }
5641
5642
5643
5644
5645
5646 if (is_lvds) {
5647 temp = I915_READ(PCH_LVDS);
5648 temp |= LVDS_PORT_EN | LVDS_A0A2_CLKA_POWER_UP;
5649 if (HAS_PCH_CPT(dev))
5650 temp |= PORT_TRANS_SEL_CPT(pipe);
5651 else if (pipe == 1)
5652 temp |= LVDS_PIPEB_SELECT;
5653 else
5654 temp &= ~LVDS_PIPEB_SELECT;
5655
5656
5657 temp |= dev_priv->lvds_border_bits;
5658
5659
5660
5661 if (clock.p2 == 7)
5662 temp |= LVDS_B0B3_POWER_UP | LVDS_CLKB_POWER_UP;
5663 else
5664 temp &= ~(LVDS_B0B3_POWER_UP | LVDS_CLKB_POWER_UP);
5665
5666
5667
5668
5669
5670 if (adjusted_mode->flags & DRM_MODE_FLAG_NHSYNC)
5671 lvds_sync |= LVDS_HSYNC_POLARITY;
5672 if (adjusted_mode->flags & DRM_MODE_FLAG_NVSYNC)
5673 lvds_sync |= LVDS_VSYNC_POLARITY;
5674 if ((temp & (LVDS_HSYNC_POLARITY | LVDS_VSYNC_POLARITY))
5675 != lvds_sync) {
5676 char flags[2] = "-+";
5677 DRM_INFO("Changing LVDS panel from "
5678 "(%chsync, %cvsync) to (%chsync, %cvsync)\n",
5679 flags[!(temp & LVDS_HSYNC_POLARITY)],
5680 flags[!(temp & LVDS_VSYNC_POLARITY)],
5681 flags[!(lvds_sync & LVDS_HSYNC_POLARITY)],
5682 flags[!(lvds_sync & LVDS_VSYNC_POLARITY)]);
5683 temp &= ~(LVDS_HSYNC_POLARITY | LVDS_VSYNC_POLARITY);
5684 temp |= lvds_sync;
5685 }
5686 I915_WRITE(PCH_LVDS, temp);
5687 }
5688
5689 pipeconf &= ~PIPECONF_DITHER_EN;
5690 pipeconf &= ~PIPECONF_DITHER_TYPE_MASK;
5691 if ((is_lvds && dev_priv->lvds_dither) || dither) {
5692 pipeconf |= PIPECONF_DITHER_EN;
5693 pipeconf |= PIPECONF_DITHER_TYPE_SP;
5694 }
5695 if (is_dp || intel_encoder_is_pch_edp(&has_edp_encoder->base)) {
5696 intel_dp_set_m_n(crtc, mode, adjusted_mode);
5697 } else {
5698
5699 I915_WRITE(TRANSDATA_M1(pipe), 0);
5700 I915_WRITE(TRANSDATA_N1(pipe), 0);
5701 I915_WRITE(TRANSDPLINK_M1(pipe), 0);
5702 I915_WRITE(TRANSDPLINK_N1(pipe), 0);
5703 }
5704
5705 if (!intel_crtc->no_pll &&
5706 (!has_edp_encoder ||
5707 intel_encoder_is_pch_edp(&has_edp_encoder->base))) {
5708 I915_WRITE(PCH_DPLL(pipe), dpll);
5709
5710
5711 POSTING_READ(PCH_DPLL(pipe));
5712 udelay(150);
5713
5714
5715
5716
5717
5718
5719 I915_WRITE(PCH_DPLL(pipe), dpll);
5720 }
5721
5722 intel_crtc->lowfreq_avail = false;
5723 if (!intel_crtc->no_pll) {
5724 if (is_lvds && has_reduced_clock && i915_powersave) {
5725 I915_WRITE(PCH_FP1(pipe), fp2);
5726 intel_crtc->lowfreq_avail = true;
5727 if (HAS_PIPE_CXSR(dev)) {
5728 DRM_DEBUG_KMS("enabling CxSR downclocking\n");
5729 pipeconf |= PIPECONF_CXSR_DOWNCLOCK;
5730 }
5731 } else {
5732 I915_WRITE(PCH_FP1(pipe), fp);
5733 if (HAS_PIPE_CXSR(dev)) {
5734 DRM_DEBUG_KMS("disabling CxSR downclocking\n");
5735 pipeconf &= ~PIPECONF_CXSR_DOWNCLOCK;
5736 }
5737 }
5738 }
5739
5740 if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) {
5741 pipeconf |= PIPECONF_INTERLACE_W_FIELD_INDICATION;
5742
5743 adjusted_mode->crtc_vdisplay -= 1;
5744 adjusted_mode->crtc_vtotal -= 1;
5745 adjusted_mode->crtc_vblank_start -= 1;
5746 adjusted_mode->crtc_vblank_end -= 1;
5747 adjusted_mode->crtc_vsync_end -= 1;
5748 adjusted_mode->crtc_vsync_start -= 1;
5749 } else
5750 pipeconf &= ~PIPECONF_INTERLACE_W_FIELD_INDICATION;
5751
5752 I915_WRITE(HTOTAL(pipe),
5753 (adjusted_mode->crtc_hdisplay - 1) |
5754 ((adjusted_mode->crtc_htotal - 1) << 16));
5755 I915_WRITE(HBLANK(pipe),
5756 (adjusted_mode->crtc_hblank_start - 1) |
5757 ((adjusted_mode->crtc_hblank_end - 1) << 16));
5758 I915_WRITE(HSYNC(pipe),
5759 (adjusted_mode->crtc_hsync_start - 1) |
5760 ((adjusted_mode->crtc_hsync_end - 1) << 16));
5761
5762 I915_WRITE(VTOTAL(pipe),
5763 (adjusted_mode->crtc_vdisplay - 1) |
5764 ((adjusted_mode->crtc_vtotal - 1) << 16));
5765 I915_WRITE(VBLANK(pipe),
5766 (adjusted_mode->crtc_vblank_start - 1) |
5767 ((adjusted_mode->crtc_vblank_end - 1) << 16));
5768 I915_WRITE(VSYNC(pipe),
5769 (adjusted_mode->crtc_vsync_start - 1) |
5770 ((adjusted_mode->crtc_vsync_end - 1) << 16));
5771
5772
5773
5774
5775 I915_WRITE(PIPESRC(pipe),
5776 ((mode->hdisplay - 1) << 16) | (mode->vdisplay - 1));
5777
5778 I915_WRITE(PIPE_DATA_M1(pipe), TU_SIZE(m_n.tu) | m_n.gmch_m);
5779 I915_WRITE(PIPE_DATA_N1(pipe), m_n.gmch_n);
5780 I915_WRITE(PIPE_LINK_M1(pipe), m_n.link_m);
5781 I915_WRITE(PIPE_LINK_N1(pipe), m_n.link_n);
5782
5783 if (has_edp_encoder &&
5784 !intel_encoder_is_pch_edp(&has_edp_encoder->base)) {
5785 ironlake_set_pll_edp(crtc, adjusted_mode->clock);
5786 }
5787
5788 I915_WRITE(PIPECONF(pipe), pipeconf);
5789 POSTING_READ(PIPECONF(pipe));
5790
5791 intel_wait_for_vblank(dev, pipe);
5792
5793 if (IS_GEN5(dev)) {
5794
5795 temp = I915_READ(DISP_ARB_CTL);
5796 I915_WRITE(DISP_ARB_CTL, temp | DISP_TILE_SURFACE_SWIZZLING);
5797 }
5798
5799 I915_WRITE(DSPCNTR(plane), dspcntr);
5800 POSTING_READ(DSPCNTR(plane));
5801
5802 ret = intel_pipe_set_base(crtc, x, y, old_fb);
5803
5804 intel_update_watermarks(dev);
5805
5806 return ret;
5807}
5808
5809static int intel_crtc_mode_set(struct drm_crtc *crtc,
5810 struct drm_display_mode *mode,
5811 struct drm_display_mode *adjusted_mode,
5812 int x, int y,
5813 struct drm_framebuffer *old_fb)
5814{
5815 struct drm_device *dev = crtc->dev;
5816 struct drm_i915_private *dev_priv = dev->dev_private;
5817 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
5818 int pipe = intel_crtc->pipe;
5819 int ret;
5820
5821 drm_vblank_pre_modeset(dev, pipe);
5822
5823 ret = dev_priv->display.crtc_mode_set(crtc, mode, adjusted_mode,
5824 x, y, old_fb);
5825
5826 drm_vblank_post_modeset(dev, pipe);
5827
5828 intel_crtc->dpms_mode = DRM_MODE_DPMS_ON;
5829
5830 return ret;
5831}
5832
5833static void g4x_write_eld(struct drm_connector *connector,
5834 struct drm_crtc *crtc)
5835{
5836 struct drm_i915_private *dev_priv = connector->dev->dev_private;
5837 uint8_t *eld = connector->eld;
5838 uint32_t eldv;
5839 uint32_t len;
5840 uint32_t i;
5841
5842 i = I915_READ(G4X_AUD_VID_DID);
5843
5844 if (i == INTEL_AUDIO_DEVBLC || i == INTEL_AUDIO_DEVCL)
5845 eldv = G4X_ELDV_DEVCL_DEVBLC;
5846 else
5847 eldv = G4X_ELDV_DEVCTG;
5848
5849 i = I915_READ(G4X_AUD_CNTL_ST);
5850 i &= ~(eldv | G4X_ELD_ADDR);
5851 len = (i >> 9) & 0x1f;
5852 I915_WRITE(G4X_AUD_CNTL_ST, i);
5853
5854 if (!eld[0])
5855 return;
5856
5857 len = min_t(uint8_t, eld[2], len);
5858 DRM_DEBUG_DRIVER("ELD size %d\n", len);
5859 for (i = 0; i < len; i++)
5860 I915_WRITE(G4X_HDMIW_HDMIEDID, *((uint32_t *)eld + i));
5861
5862 i = I915_READ(G4X_AUD_CNTL_ST);
5863 i |= eldv;
5864 I915_WRITE(G4X_AUD_CNTL_ST, i);
5865}
5866
5867static void ironlake_write_eld(struct drm_connector *connector,
5868 struct drm_crtc *crtc)
5869{
5870 struct drm_i915_private *dev_priv = connector->dev->dev_private;
5871 uint8_t *eld = connector->eld;
5872 uint32_t eldv;
5873 uint32_t i;
5874 int len;
5875 int hdmiw_hdmiedid;
5876 int aud_cntl_st;
5877 int aud_cntrl_st2;
5878
5879 if (IS_IVYBRIDGE(connector->dev)) {
5880 hdmiw_hdmiedid = GEN7_HDMIW_HDMIEDID_A;
5881 aud_cntl_st = GEN7_AUD_CNTRL_ST_A;
5882 aud_cntrl_st2 = GEN7_AUD_CNTRL_ST2;
5883 } else {
5884 hdmiw_hdmiedid = GEN5_HDMIW_HDMIEDID_A;
5885 aud_cntl_st = GEN5_AUD_CNTL_ST_A;
5886 aud_cntrl_st2 = GEN5_AUD_CNTL_ST2;
5887 }
5888
5889 i = to_intel_crtc(crtc)->pipe;
5890 hdmiw_hdmiedid += i * 0x100;
5891 aud_cntl_st += i * 0x100;
5892
5893 DRM_DEBUG_DRIVER("ELD on pipe %c\n", pipe_name(i));
5894
5895 i = I915_READ(aud_cntl_st);
5896 i = (i >> 29) & 0x3;
5897 if (!i) {
5898 DRM_DEBUG_DRIVER("Audio directed to unknown port\n");
5899
5900 eldv = GEN5_ELD_VALIDB;
5901 eldv |= GEN5_ELD_VALIDB << 4;
5902 eldv |= GEN5_ELD_VALIDB << 8;
5903 } else {
5904 DRM_DEBUG_DRIVER("ELD on port %c\n", 'A' + i);
5905 eldv = GEN5_ELD_VALIDB << ((i - 1) * 4);
5906 }
5907
5908 i = I915_READ(aud_cntrl_st2);
5909 i &= ~eldv;
5910 I915_WRITE(aud_cntrl_st2, i);
5911
5912 if (!eld[0])
5913 return;
5914
5915 if (intel_pipe_has_type(crtc, INTEL_OUTPUT_DISPLAYPORT)) {
5916 DRM_DEBUG_DRIVER("ELD: DisplayPort detected\n");
5917 eld[5] |= (1 << 2);
5918 }
5919
5920 i = I915_READ(aud_cntl_st);
5921 i &= ~GEN5_ELD_ADDRESS;
5922 I915_WRITE(aud_cntl_st, i);
5923
5924 len = min_t(uint8_t, eld[2], 21);
5925 DRM_DEBUG_DRIVER("ELD size %d\n", len);
5926 for (i = 0; i < len; i++)
5927 I915_WRITE(hdmiw_hdmiedid, *((uint32_t *)eld + i));
5928
5929 i = I915_READ(aud_cntrl_st2);
5930 i |= eldv;
5931 I915_WRITE(aud_cntrl_st2, i);
5932}
5933
5934void intel_write_eld(struct drm_encoder *encoder,
5935 struct drm_display_mode *mode)
5936{
5937 struct drm_crtc *crtc = encoder->crtc;
5938 struct drm_connector *connector;
5939 struct drm_device *dev = encoder->dev;
5940 struct drm_i915_private *dev_priv = dev->dev_private;
5941
5942 connector = drm_select_eld(encoder, mode);
5943 if (!connector)
5944 return;
5945
5946 DRM_DEBUG_DRIVER("ELD on [CONNECTOR:%d:%s], [ENCODER:%d:%s]\n",
5947 connector->base.id,
5948 drm_get_connector_name(connector),
5949 connector->encoder->base.id,
5950 drm_get_encoder_name(connector->encoder));
5951
5952 connector->eld[6] = drm_av_sync_delay(connector, mode) / 2;
5953
5954 if (dev_priv->display.write_eld)
5955 dev_priv->display.write_eld(connector, crtc);
5956}
5957
5958
5959void intel_crtc_load_lut(struct drm_crtc *crtc)
5960{
5961 struct drm_device *dev = crtc->dev;
5962 struct drm_i915_private *dev_priv = dev->dev_private;
5963 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
5964 int palreg = PALETTE(intel_crtc->pipe);
5965 int i;
5966
5967
5968 if (!crtc->enabled)
5969 return;
5970
5971
5972 if (HAS_PCH_SPLIT(dev))
5973 palreg = LGC_PALETTE(intel_crtc->pipe);
5974
5975 for (i = 0; i < 256; i++) {
5976 I915_WRITE(palreg + 4 * i,
5977 (intel_crtc->lut_r[i] << 16) |
5978 (intel_crtc->lut_g[i] << 8) |
5979 intel_crtc->lut_b[i]);
5980 }
5981}
5982
5983static void i845_update_cursor(struct drm_crtc *crtc, u32 base)
5984{
5985 struct drm_device *dev = crtc->dev;
5986 struct drm_i915_private *dev_priv = dev->dev_private;
5987 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
5988 bool visible = base != 0;
5989 u32 cntl;
5990
5991 if (intel_crtc->cursor_visible == visible)
5992 return;
5993
5994 cntl = I915_READ(_CURACNTR);
5995 if (visible) {
5996
5997
5998
5999 I915_WRITE(_CURABASE, base);
6000
6001 cntl &= ~(CURSOR_FORMAT_MASK);
6002
6003 cntl |= CURSOR_ENABLE |
6004 CURSOR_GAMMA_ENABLE |
6005 CURSOR_FORMAT_ARGB;
6006 } else
6007 cntl &= ~(CURSOR_ENABLE | CURSOR_GAMMA_ENABLE);
6008 I915_WRITE(_CURACNTR, cntl);
6009
6010 intel_crtc->cursor_visible = visible;
6011}
6012
6013static void i9xx_update_cursor(struct drm_crtc *crtc, u32 base)
6014{
6015 struct drm_device *dev = crtc->dev;
6016 struct drm_i915_private *dev_priv = dev->dev_private;
6017 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
6018 int pipe = intel_crtc->pipe;
6019 bool visible = base != 0;
6020
6021 if (intel_crtc->cursor_visible != visible) {
6022 uint32_t cntl = I915_READ(CURCNTR(pipe));
6023 if (base) {
6024 cntl &= ~(CURSOR_MODE | MCURSOR_PIPE_SELECT);
6025 cntl |= CURSOR_MODE_64_ARGB_AX | MCURSOR_GAMMA_ENABLE;
6026 cntl |= pipe << 28;
6027 } else {
6028 cntl &= ~(CURSOR_MODE | MCURSOR_GAMMA_ENABLE);
6029 cntl |= CURSOR_MODE_DISABLE;
6030 }
6031 I915_WRITE(CURCNTR(pipe), cntl);
6032
6033 intel_crtc->cursor_visible = visible;
6034 }
6035
6036 I915_WRITE(CURBASE(pipe), base);
6037}
6038
6039static void ivb_update_cursor(struct drm_crtc *crtc, u32 base)
6040{
6041 struct drm_device *dev = crtc->dev;
6042 struct drm_i915_private *dev_priv = dev->dev_private;
6043 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
6044 int pipe = intel_crtc->pipe;
6045 bool visible = base != 0;
6046
6047 if (intel_crtc->cursor_visible != visible) {
6048 uint32_t cntl = I915_READ(CURCNTR_IVB(pipe));
6049 if (base) {
6050 cntl &= ~CURSOR_MODE;
6051 cntl |= CURSOR_MODE_64_ARGB_AX | MCURSOR_GAMMA_ENABLE;
6052 } else {
6053 cntl &= ~(CURSOR_MODE | MCURSOR_GAMMA_ENABLE);
6054 cntl |= CURSOR_MODE_DISABLE;
6055 }
6056 I915_WRITE(CURCNTR_IVB(pipe), cntl);
6057
6058 intel_crtc->cursor_visible = visible;
6059 }
6060
6061 I915_WRITE(CURBASE_IVB(pipe), base);
6062}
6063
6064
6065static void intel_crtc_update_cursor(struct drm_crtc *crtc,
6066 bool on)
6067{
6068 struct drm_device *dev = crtc->dev;
6069 struct drm_i915_private *dev_priv = dev->dev_private;
6070 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
6071 int pipe = intel_crtc->pipe;
6072 int x = intel_crtc->cursor_x;
6073 int y = intel_crtc->cursor_y;
6074 u32 base, pos;
6075 bool visible;
6076
6077 pos = 0;
6078
6079 if (on && crtc->enabled && crtc->fb) {
6080 base = intel_crtc->cursor_addr;
6081 if (x > (int) crtc->fb->width)
6082 base = 0;
6083
6084 if (y > (int) crtc->fb->height)
6085 base = 0;
6086 } else
6087 base = 0;
6088
6089 if (x < 0) {
6090 if (x + intel_crtc->cursor_width < 0)
6091 base = 0;
6092
6093 pos |= CURSOR_POS_SIGN << CURSOR_X_SHIFT;
6094 x = -x;
6095 }
6096 pos |= x << CURSOR_X_SHIFT;
6097
6098 if (y < 0) {
6099 if (y + intel_crtc->cursor_height < 0)
6100 base = 0;
6101
6102 pos |= CURSOR_POS_SIGN << CURSOR_Y_SHIFT;
6103 y = -y;
6104 }
6105 pos |= y << CURSOR_Y_SHIFT;
6106
6107 visible = base != 0;
6108 if (!visible && !intel_crtc->cursor_visible)
6109 return;
6110
6111 if (IS_IVYBRIDGE(dev)) {
6112 I915_WRITE(CURPOS_IVB(pipe), pos);
6113 ivb_update_cursor(crtc, base);
6114 } else {
6115 I915_WRITE(CURPOS(pipe), pos);
6116 if (IS_845G(dev) || IS_I865G(dev))
6117 i845_update_cursor(crtc, base);
6118 else
6119 i9xx_update_cursor(crtc, base);
6120 }
6121
6122 if (visible)
6123 intel_mark_busy(dev, to_intel_framebuffer(crtc->fb)->obj);
6124}
6125
6126static int intel_crtc_cursor_set(struct drm_crtc *crtc,
6127 struct drm_file *file,
6128 uint32_t handle,
6129 uint32_t width, uint32_t height)
6130{
6131 struct drm_device *dev = crtc->dev;
6132 struct drm_i915_private *dev_priv = dev->dev_private;
6133 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
6134 struct drm_i915_gem_object *obj;
6135 uint32_t addr;
6136 int ret;
6137
6138 DRM_DEBUG_KMS("\n");
6139
6140
6141 if (!handle) {
6142 DRM_DEBUG_KMS("cursor off\n");
6143 addr = 0;
6144 obj = NULL;
6145 mutex_lock(&dev->struct_mutex);
6146 goto finish;
6147 }
6148
6149
6150 if (width != 64 || height != 64) {
6151 DRM_ERROR("we currently only support 64x64 cursors\n");
6152 return -EINVAL;
6153 }
6154
6155 obj = to_intel_bo(drm_gem_object_lookup(dev, file, handle));
6156 if (&obj->base == NULL)
6157 return -ENOENT;
6158
6159 if (obj->base.size < width * height * 4) {
6160 DRM_ERROR("buffer is to small\n");
6161 ret = -ENOMEM;
6162 goto fail;
6163 }
6164
6165
6166 mutex_lock(&dev->struct_mutex);
6167 if (!dev_priv->info->cursor_needs_physical) {
6168 if (obj->tiling_mode) {
6169 DRM_ERROR("cursor cannot be tiled\n");
6170 ret = -EINVAL;
6171 goto fail_locked;
6172 }
6173
6174 ret = i915_gem_object_pin_to_display_plane(obj, 0, NULL);
6175 if (ret) {
6176 DRM_ERROR("failed to move cursor bo into the GTT\n");
6177 goto fail_locked;
6178 }
6179
6180 ret = i915_gem_object_put_fence(obj);
6181 if (ret) {
6182 DRM_ERROR("failed to release fence for cursor");
6183 goto fail_unpin;
6184 }
6185
6186 addr = obj->gtt_offset;
6187 } else {
6188 int align = IS_I830(dev) ? 16 * 1024 : 256;
6189 ret = i915_gem_attach_phys_object(dev, obj,
6190 (intel_crtc->pipe == 0) ? I915_GEM_PHYS_CURSOR_0 : I915_GEM_PHYS_CURSOR_1,
6191 align);
6192 if (ret) {
6193 DRM_ERROR("failed to attach phys object\n");
6194 goto fail_locked;
6195 }
6196 addr = obj->phys_obj->handle->busaddr;
6197 }
6198
6199 if (IS_GEN2(dev))
6200 I915_WRITE(CURSIZE, (height << 12) | width);
6201
6202 finish:
6203 if (intel_crtc->cursor_bo) {
6204 if (dev_priv->info->cursor_needs_physical) {
6205 if (intel_crtc->cursor_bo != obj)
6206 i915_gem_detach_phys_object(dev, intel_crtc->cursor_bo);
6207 } else
6208 i915_gem_object_unpin(intel_crtc->cursor_bo);
6209 drm_gem_object_unreference(&intel_crtc->cursor_bo->base);
6210 }
6211
6212 mutex_unlock(&dev->struct_mutex);
6213
6214 intel_crtc->cursor_addr = addr;
6215 intel_crtc->cursor_bo = obj;
6216 intel_crtc->cursor_width = width;
6217 intel_crtc->cursor_height = height;
6218
6219 intel_crtc_update_cursor(crtc, true);
6220
6221 return 0;
6222fail_unpin:
6223 i915_gem_object_unpin(obj);
6224fail_locked:
6225 mutex_unlock(&dev->struct_mutex);
6226fail:
6227 drm_gem_object_unreference_unlocked(&obj->base);
6228 return ret;
6229}
6230
6231static int intel_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
6232{
6233 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
6234
6235 intel_crtc->cursor_x = x;
6236 intel_crtc->cursor_y = y;
6237
6238 intel_crtc_update_cursor(crtc, true);
6239
6240 return 0;
6241}
6242
6243
6244void intel_crtc_fb_gamma_set(struct drm_crtc *crtc, u16 red, u16 green,
6245 u16 blue, int regno)
6246{
6247 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
6248
6249 intel_crtc->lut_r[regno] = red >> 8;
6250 intel_crtc->lut_g[regno] = green >> 8;
6251 intel_crtc->lut_b[regno] = blue >> 8;
6252}
6253
6254void intel_crtc_fb_gamma_get(struct drm_crtc *crtc, u16 *red, u16 *green,
6255 u16 *blue, int regno)
6256{
6257 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
6258
6259 *red = intel_crtc->lut_r[regno] << 8;
6260 *green = intel_crtc->lut_g[regno] << 8;
6261 *blue = intel_crtc->lut_b[regno] << 8;
6262}
6263
6264static void intel_crtc_gamma_set(struct drm_crtc *crtc, u16 *red, u16 *green,
6265 u16 *blue, uint32_t start, uint32_t size)
6266{
6267 int end = (start + size > 256) ? 256 : start + size, i;
6268 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
6269
6270 for (i = start; i < end; i++) {
6271 intel_crtc->lut_r[i] = red[i] >> 8;
6272 intel_crtc->lut_g[i] = green[i] >> 8;
6273 intel_crtc->lut_b[i] = blue[i] >> 8;
6274 }
6275
6276 intel_crtc_load_lut(crtc);
6277}
6278
6279
6280
6281
6282
6283
6284
6285
6286
6287
6288
6289
6290
6291
6292
6293
6294static struct drm_display_mode load_detect_mode = {
6295 DRM_MODE("640x480", DRM_MODE_TYPE_DEFAULT, 31500, 640, 664,
6296 704, 832, 0, 480, 489, 491, 520, 0, DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC),
6297};
6298
6299static struct drm_framebuffer *
6300intel_framebuffer_create(struct drm_device *dev,
6301 struct drm_mode_fb_cmd *mode_cmd,
6302 struct drm_i915_gem_object *obj)
6303{
6304 struct intel_framebuffer *intel_fb;
6305 int ret;
6306
6307 intel_fb = kzalloc(sizeof(*intel_fb), GFP_KERNEL);
6308 if (!intel_fb) {
6309 drm_gem_object_unreference_unlocked(&obj->base);
6310 return ERR_PTR(-ENOMEM);
6311 }
6312
6313 ret = intel_framebuffer_init(dev, intel_fb, mode_cmd, obj);
6314 if (ret) {
6315 drm_gem_object_unreference_unlocked(&obj->base);
6316 kfree(intel_fb);
6317 return ERR_PTR(ret);
6318 }
6319
6320 return &intel_fb->base;
6321}
6322
6323static u32
6324intel_framebuffer_pitch_for_width(int width, int bpp)
6325{
6326 u32 pitch = DIV_ROUND_UP(width * bpp, 8);
6327 return ALIGN(pitch, 64);
6328}
6329
6330static u32
6331intel_framebuffer_size_for_mode(struct drm_display_mode *mode, int bpp)
6332{
6333 u32 pitch = intel_framebuffer_pitch_for_width(mode->hdisplay, bpp);
6334 return ALIGN(pitch * mode->vdisplay, PAGE_SIZE);
6335}
6336
6337static struct drm_framebuffer *
6338intel_framebuffer_create_for_mode(struct drm_device *dev,
6339 struct drm_display_mode *mode,
6340 int depth, int bpp)
6341{
6342 struct drm_i915_gem_object *obj;
6343 struct drm_mode_fb_cmd mode_cmd;
6344
6345 obj = i915_gem_alloc_object(dev,
6346 intel_framebuffer_size_for_mode(mode, bpp));
6347 if (obj == NULL)
6348 return ERR_PTR(-ENOMEM);
6349
6350 mode_cmd.width = mode->hdisplay;
6351 mode_cmd.height = mode->vdisplay;
6352 mode_cmd.depth = depth;
6353 mode_cmd.bpp = bpp;
6354 mode_cmd.pitch = intel_framebuffer_pitch_for_width(mode_cmd.width, bpp);
6355
6356 return intel_framebuffer_create(dev, &mode_cmd, obj);
6357}
6358
6359static struct drm_framebuffer *
6360mode_fits_in_fbdev(struct drm_device *dev,
6361 struct drm_display_mode *mode)
6362{
6363 struct drm_i915_private *dev_priv = dev->dev_private;
6364 struct drm_i915_gem_object *obj;
6365 struct drm_framebuffer *fb;
6366
6367 if (dev_priv->fbdev == NULL)
6368 return NULL;
6369
6370 obj = dev_priv->fbdev->ifb.obj;
6371 if (obj == NULL)
6372 return NULL;
6373
6374 fb = &dev_priv->fbdev->ifb.base;
6375 if (fb->pitch < intel_framebuffer_pitch_for_width(mode->hdisplay,
6376 fb->bits_per_pixel))
6377 return NULL;
6378
6379 if (obj->base.size < mode->vdisplay * fb->pitch)
6380 return NULL;
6381
6382 return fb;
6383}
6384
6385bool intel_get_load_detect_pipe(struct intel_encoder *intel_encoder,
6386 struct drm_connector *connector,
6387 struct drm_display_mode *mode,
6388 struct intel_load_detect_pipe *old)
6389{
6390 struct intel_crtc *intel_crtc;
6391 struct drm_crtc *possible_crtc;
6392 struct drm_encoder *encoder = &intel_encoder->base;
6393 struct drm_crtc *crtc = NULL;
6394 struct drm_device *dev = encoder->dev;
6395 struct drm_framebuffer *old_fb;
6396 int i = -1;
6397
6398 DRM_DEBUG_KMS("[CONNECTOR:%d:%s], [ENCODER:%d:%s]\n",
6399 connector->base.id, drm_get_connector_name(connector),
6400 encoder->base.id, drm_get_encoder_name(encoder));
6401
6402
6403
6404
6405
6406
6407
6408
6409
6410
6411
6412
6413 if (encoder->crtc) {
6414 crtc = encoder->crtc;
6415
6416 intel_crtc = to_intel_crtc(crtc);
6417 old->dpms_mode = intel_crtc->dpms_mode;
6418 old->load_detect_temp = false;
6419
6420
6421 if (intel_crtc->dpms_mode != DRM_MODE_DPMS_ON) {
6422 struct drm_encoder_helper_funcs *encoder_funcs;
6423 struct drm_crtc_helper_funcs *crtc_funcs;
6424
6425 crtc_funcs = crtc->helper_private;
6426 crtc_funcs->dpms(crtc, DRM_MODE_DPMS_ON);
6427
6428 encoder_funcs = encoder->helper_private;
6429 encoder_funcs->dpms(encoder, DRM_MODE_DPMS_ON);
6430 }
6431
6432 return true;
6433 }
6434
6435
6436 list_for_each_entry(possible_crtc, &dev->mode_config.crtc_list, head) {
6437 i++;
6438 if (!(encoder->possible_crtcs & (1 << i)))
6439 continue;
6440 if (!possible_crtc->enabled) {
6441 crtc = possible_crtc;
6442 break;
6443 }
6444 }
6445
6446
6447
6448
6449 if (!crtc) {
6450 DRM_DEBUG_KMS("no pipe available for load-detect\n");
6451 return false;
6452 }
6453
6454 encoder->crtc = crtc;
6455 connector->encoder = encoder;
6456
6457 intel_crtc = to_intel_crtc(crtc);
6458 old->dpms_mode = intel_crtc->dpms_mode;
6459 old->load_detect_temp = true;
6460 old->release_fb = NULL;
6461
6462 if (!mode)
6463 mode = &load_detect_mode;
6464
6465 old_fb = crtc->fb;
6466
6467
6468
6469
6470
6471
6472
6473
6474 crtc->fb = mode_fits_in_fbdev(dev, mode);
6475 if (crtc->fb == NULL) {
6476 DRM_DEBUG_KMS("creating tmp fb for load-detection\n");
6477 crtc->fb = intel_framebuffer_create_for_mode(dev, mode, 24, 32);
6478 old->release_fb = crtc->fb;
6479 } else
6480 DRM_DEBUG_KMS("reusing fbdev for load-detection framebuffer\n");
6481 if (IS_ERR(crtc->fb)) {
6482 DRM_DEBUG_KMS("failed to allocate framebuffer for load-detection\n");
6483 crtc->fb = old_fb;
6484 return false;
6485 }
6486
6487 if (!drm_crtc_helper_set_mode(crtc, mode, 0, 0, old_fb)) {
6488 DRM_DEBUG_KMS("failed to set mode on load-detect pipe\n");
6489 if (old->release_fb)
6490 old->release_fb->funcs->destroy(old->release_fb);
6491 crtc->fb = old_fb;
6492 return false;
6493 }
6494
6495
6496 intel_wait_for_vblank(dev, intel_crtc->pipe);
6497
6498 return true;
6499}
6500
6501void intel_release_load_detect_pipe(struct intel_encoder *intel_encoder,
6502 struct drm_connector *connector,
6503 struct intel_load_detect_pipe *old)
6504{
6505 struct drm_encoder *encoder = &intel_encoder->base;
6506 struct drm_device *dev = encoder->dev;
6507 struct drm_crtc *crtc = encoder->crtc;
6508 struct drm_encoder_helper_funcs *encoder_funcs = encoder->helper_private;
6509 struct drm_crtc_helper_funcs *crtc_funcs = crtc->helper_private;
6510
6511 DRM_DEBUG_KMS("[CONNECTOR:%d:%s], [ENCODER:%d:%s]\n",
6512 connector->base.id, drm_get_connector_name(connector),
6513 encoder->base.id, drm_get_encoder_name(encoder));
6514
6515 if (old->load_detect_temp) {
6516 connector->encoder = NULL;
6517 drm_helper_disable_unused_functions(dev);
6518
6519 if (old->release_fb)
6520 old->release_fb->funcs->destroy(old->release_fb);
6521
6522 return;
6523 }
6524
6525
6526 if (old->dpms_mode != DRM_MODE_DPMS_ON) {
6527 encoder_funcs->dpms(encoder, old->dpms_mode);
6528 crtc_funcs->dpms(crtc, old->dpms_mode);
6529 }
6530}
6531
6532
6533static int intel_crtc_clock_get(struct drm_device *dev, struct drm_crtc *crtc)
6534{
6535 struct drm_i915_private *dev_priv = dev->dev_private;
6536 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
6537 int pipe = intel_crtc->pipe;
6538 u32 dpll = I915_READ(DPLL(pipe));
6539 u32 fp;
6540 intel_clock_t clock;
6541
6542 if ((dpll & DISPLAY_RATE_SELECT_FPA1) == 0)
6543 fp = I915_READ(FP0(pipe));
6544 else
6545 fp = I915_READ(FP1(pipe));
6546
6547 clock.m1 = (fp & FP_M1_DIV_MASK) >> FP_M1_DIV_SHIFT;
6548 if (IS_PINEVIEW(dev)) {
6549 clock.n = ffs((fp & FP_N_PINEVIEW_DIV_MASK) >> FP_N_DIV_SHIFT) - 1;
6550 clock.m2 = (fp & FP_M2_PINEVIEW_DIV_MASK) >> FP_M2_DIV_SHIFT;
6551 } else {
6552 clock.n = (fp & FP_N_DIV_MASK) >> FP_N_DIV_SHIFT;
6553 clock.m2 = (fp & FP_M2_DIV_MASK) >> FP_M2_DIV_SHIFT;
6554 }
6555
6556 if (!IS_GEN2(dev)) {
6557 if (IS_PINEVIEW(dev))
6558 clock.p1 = ffs((dpll & DPLL_FPA01_P1_POST_DIV_MASK_PINEVIEW) >>
6559 DPLL_FPA01_P1_POST_DIV_SHIFT_PINEVIEW);
6560 else
6561 clock.p1 = ffs((dpll & DPLL_FPA01_P1_POST_DIV_MASK) >>
6562 DPLL_FPA01_P1_POST_DIV_SHIFT);
6563
6564 switch (dpll & DPLL_MODE_MASK) {
6565 case DPLLB_MODE_DAC_SERIAL:
6566 clock.p2 = dpll & DPLL_DAC_SERIAL_P2_CLOCK_DIV_5 ?
6567 5 : 10;
6568 break;
6569 case DPLLB_MODE_LVDS:
6570 clock.p2 = dpll & DPLLB_LVDS_P2_CLOCK_DIV_7 ?
6571 7 : 14;
6572 break;
6573 default:
6574 DRM_DEBUG_KMS("Unknown DPLL mode %08x in programmed "
6575 "mode\n", (int)(dpll & DPLL_MODE_MASK));
6576 return 0;
6577 }
6578
6579
6580 intel_clock(dev, 96000, &clock);
6581 } else {
6582 bool is_lvds = (pipe == 1) && (I915_READ(LVDS) & LVDS_PORT_EN);
6583
6584 if (is_lvds) {
6585 clock.p1 = ffs((dpll & DPLL_FPA01_P1_POST_DIV_MASK_I830_LVDS) >>
6586 DPLL_FPA01_P1_POST_DIV_SHIFT);
6587 clock.p2 = 14;
6588
6589 if ((dpll & PLL_REF_INPUT_MASK) ==
6590 PLLB_REF_INPUT_SPREADSPECTRUMIN) {
6591
6592 intel_clock(dev, 66000, &clock);
6593 } else
6594 intel_clock(dev, 48000, &clock);
6595 } else {
6596 if (dpll & PLL_P1_DIVIDE_BY_TWO)
6597 clock.p1 = 2;
6598 else {
6599 clock.p1 = ((dpll & DPLL_FPA01_P1_POST_DIV_MASK_I830) >>
6600 DPLL_FPA01_P1_POST_DIV_SHIFT) + 2;
6601 }
6602 if (dpll & PLL_P2_DIVIDE_BY_4)
6603 clock.p2 = 4;
6604 else
6605 clock.p2 = 2;
6606
6607 intel_clock(dev, 48000, &clock);
6608 }
6609 }
6610
6611
6612
6613
6614
6615
6616 return clock.dot;
6617}
6618
6619
6620struct drm_display_mode *intel_crtc_mode_get(struct drm_device *dev,
6621 struct drm_crtc *crtc)
6622{
6623 struct drm_i915_private *dev_priv = dev->dev_private;
6624 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
6625 int pipe = intel_crtc->pipe;
6626 struct drm_display_mode *mode;
6627 int htot = I915_READ(HTOTAL(pipe));
6628 int hsync = I915_READ(HSYNC(pipe));
6629 int vtot = I915_READ(VTOTAL(pipe));
6630 int vsync = I915_READ(VSYNC(pipe));
6631
6632 mode = kzalloc(sizeof(*mode), GFP_KERNEL);
6633 if (!mode)
6634 return NULL;
6635
6636 mode->clock = intel_crtc_clock_get(dev, crtc);
6637 mode->hdisplay = (htot & 0xffff) + 1;
6638 mode->htotal = ((htot & 0xffff0000) >> 16) + 1;
6639 mode->hsync_start = (hsync & 0xffff) + 1;
6640 mode->hsync_end = ((hsync & 0xffff0000) >> 16) + 1;
6641 mode->vdisplay = (vtot & 0xffff) + 1;
6642 mode->vtotal = ((vtot & 0xffff0000) >> 16) + 1;
6643 mode->vsync_start = (vsync & 0xffff) + 1;
6644 mode->vsync_end = ((vsync & 0xffff0000) >> 16) + 1;
6645
6646 drm_mode_set_name(mode);
6647 drm_mode_set_crtcinfo(mode, 0);
6648
6649 return mode;
6650}
6651
6652#define GPU_IDLE_TIMEOUT 500
6653
6654
6655static void intel_gpu_idle_timer(unsigned long arg)
6656{
6657 struct drm_device *dev = (struct drm_device *)arg;
6658 drm_i915_private_t *dev_priv = dev->dev_private;
6659
6660 if (!list_empty(&dev_priv->mm.active_list)) {
6661
6662 mod_timer(&dev_priv->idle_timer, jiffies +
6663 msecs_to_jiffies(GPU_IDLE_TIMEOUT));
6664 return;
6665 }
6666
6667 dev_priv->busy = false;
6668 queue_work(dev_priv->wq, &dev_priv->idle_work);
6669}
6670
6671#define CRTC_IDLE_TIMEOUT 1000
6672
6673static void intel_crtc_idle_timer(unsigned long arg)
6674{
6675 struct intel_crtc *intel_crtc = (struct intel_crtc *)arg;
6676 struct drm_crtc *crtc = &intel_crtc->base;
6677 drm_i915_private_t *dev_priv = crtc->dev->dev_private;
6678 struct intel_framebuffer *intel_fb;
6679
6680 intel_fb = to_intel_framebuffer(crtc->fb);
6681 if (intel_fb && intel_fb->obj->active) {
6682
6683 mod_timer(&intel_crtc->idle_timer, jiffies +
6684 msecs_to_jiffies(CRTC_IDLE_TIMEOUT));
6685 return;
6686 }
6687
6688 intel_crtc->busy = false;
6689 queue_work(dev_priv->wq, &dev_priv->idle_work);
6690}
6691
6692static void intel_increase_pllclock(struct drm_crtc *crtc)
6693{
6694 struct drm_device *dev = crtc->dev;
6695 drm_i915_private_t *dev_priv = dev->dev_private;
6696 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
6697 int pipe = intel_crtc->pipe;
6698 int dpll_reg = DPLL(pipe);
6699 int dpll;
6700
6701 if (HAS_PCH_SPLIT(dev))
6702 return;
6703
6704 if (!dev_priv->lvds_downclock_avail)
6705 return;
6706
6707 dpll = I915_READ(dpll_reg);
6708 if (!HAS_PIPE_CXSR(dev) && (dpll & DISPLAY_RATE_SELECT_FPA1)) {
6709 DRM_DEBUG_DRIVER("upclocking LVDS\n");
6710
6711
6712 I915_WRITE(PP_CONTROL,
6713 I915_READ(PP_CONTROL) | PANEL_UNLOCK_REGS);
6714
6715 dpll &= ~DISPLAY_RATE_SELECT_FPA1;
6716 I915_WRITE(dpll_reg, dpll);
6717 intel_wait_for_vblank(dev, pipe);
6718
6719 dpll = I915_READ(dpll_reg);
6720 if (dpll & DISPLAY_RATE_SELECT_FPA1)
6721 DRM_DEBUG_DRIVER("failed to upclock LVDS!\n");
6722
6723
6724 I915_WRITE(PP_CONTROL, I915_READ(PP_CONTROL) & 0x3);
6725 }
6726
6727
6728 mod_timer(&intel_crtc->idle_timer, jiffies +
6729 msecs_to_jiffies(CRTC_IDLE_TIMEOUT));
6730}
6731
6732static void intel_decrease_pllclock(struct drm_crtc *crtc)
6733{
6734 struct drm_device *dev = crtc->dev;
6735 drm_i915_private_t *dev_priv = dev->dev_private;
6736 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
6737 int pipe = intel_crtc->pipe;
6738 int dpll_reg = DPLL(pipe);
6739 int dpll = I915_READ(dpll_reg);
6740
6741 if (HAS_PCH_SPLIT(dev))
6742 return;
6743
6744 if (!dev_priv->lvds_downclock_avail)
6745 return;
6746
6747
6748
6749
6750
6751 if (!HAS_PIPE_CXSR(dev) && intel_crtc->lowfreq_avail) {
6752 DRM_DEBUG_DRIVER("downclocking LVDS\n");
6753
6754
6755 I915_WRITE(PP_CONTROL, I915_READ(PP_CONTROL) |
6756 PANEL_UNLOCK_REGS);
6757
6758 dpll |= DISPLAY_RATE_SELECT_FPA1;
6759 I915_WRITE(dpll_reg, dpll);
6760 intel_wait_for_vblank(dev, pipe);
6761 dpll = I915_READ(dpll_reg);
6762 if (!(dpll & DISPLAY_RATE_SELECT_FPA1))
6763 DRM_DEBUG_DRIVER("failed to downclock LVDS!\n");
6764
6765
6766 I915_WRITE(PP_CONTROL, I915_READ(PP_CONTROL) & 0x3);
6767 }
6768
6769}
6770
6771
6772
6773
6774
6775
6776
6777
6778static void intel_idle_update(struct work_struct *work)
6779{
6780 drm_i915_private_t *dev_priv = container_of(work, drm_i915_private_t,
6781 idle_work);
6782 struct drm_device *dev = dev_priv->dev;
6783 struct drm_crtc *crtc;
6784 struct intel_crtc *intel_crtc;
6785
6786 if (!i915_powersave)
6787 return;
6788
6789 mutex_lock(&dev->struct_mutex);
6790
6791 i915_update_gfx_val(dev_priv);
6792
6793 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
6794
6795 if (!crtc->fb)
6796 continue;
6797
6798 intel_crtc = to_intel_crtc(crtc);
6799 if (!intel_crtc->busy)
6800 intel_decrease_pllclock(crtc);
6801 }
6802
6803
6804 mutex_unlock(&dev->struct_mutex);
6805}
6806
6807
6808
6809
6810
6811
6812
6813
6814
6815
6816
6817void intel_mark_busy(struct drm_device *dev, struct drm_i915_gem_object *obj)
6818{
6819 drm_i915_private_t *dev_priv = dev->dev_private;
6820 struct drm_crtc *crtc = NULL;
6821 struct intel_framebuffer *intel_fb;
6822 struct intel_crtc *intel_crtc;
6823
6824 if (!drm_core_check_feature(dev, DRIVER_MODESET))
6825 return;
6826
6827 if (!dev_priv->busy)
6828 dev_priv->busy = true;
6829 else
6830 mod_timer(&dev_priv->idle_timer, jiffies +
6831 msecs_to_jiffies(GPU_IDLE_TIMEOUT));
6832
6833 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
6834 if (!crtc->fb)
6835 continue;
6836
6837 intel_crtc = to_intel_crtc(crtc);
6838 intel_fb = to_intel_framebuffer(crtc->fb);
6839 if (intel_fb->obj == obj) {
6840 if (!intel_crtc->busy) {
6841
6842 intel_increase_pllclock(crtc);
6843 intel_crtc->busy = true;
6844 } else {
6845
6846 mod_timer(&intel_crtc->idle_timer, jiffies +
6847 msecs_to_jiffies(CRTC_IDLE_TIMEOUT));
6848 }
6849 }
6850 }
6851}
6852
6853static void intel_crtc_destroy(struct drm_crtc *crtc)
6854{
6855 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
6856 struct drm_device *dev = crtc->dev;
6857 struct intel_unpin_work *work;
6858 unsigned long flags;
6859
6860 spin_lock_irqsave(&dev->event_lock, flags);
6861 work = intel_crtc->unpin_work;
6862 intel_crtc->unpin_work = NULL;
6863 spin_unlock_irqrestore(&dev->event_lock, flags);
6864
6865 if (work) {
6866 cancel_work_sync(&work->work);
6867 kfree(work);
6868 }
6869
6870 drm_crtc_cleanup(crtc);
6871
6872 kfree(intel_crtc);
6873}
6874
6875static void intel_unpin_work_fn(struct work_struct *__work)
6876{
6877 struct intel_unpin_work *work =
6878 container_of(__work, struct intel_unpin_work, work);
6879
6880 mutex_lock(&work->dev->struct_mutex);
6881 i915_gem_object_unpin(work->old_fb_obj);
6882 drm_gem_object_unreference(&work->pending_flip_obj->base);
6883 drm_gem_object_unreference(&work->old_fb_obj->base);
6884
6885 intel_update_fbc(work->dev);
6886 mutex_unlock(&work->dev->struct_mutex);
6887 kfree(work);
6888}
6889
6890static void do_intel_finish_page_flip(struct drm_device *dev,
6891 struct drm_crtc *crtc)
6892{
6893 drm_i915_private_t *dev_priv = dev->dev_private;
6894 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
6895 struct intel_unpin_work *work;
6896 struct drm_i915_gem_object *obj;
6897 struct drm_pending_vblank_event *e;
6898 struct timeval tnow, tvbl;
6899 unsigned long flags;
6900
6901
6902 if (intel_crtc == NULL)
6903 return;
6904
6905 do_gettimeofday(&tnow);
6906
6907 spin_lock_irqsave(&dev->event_lock, flags);
6908 work = intel_crtc->unpin_work;
6909 if (work == NULL || !work->pending) {
6910 spin_unlock_irqrestore(&dev->event_lock, flags);
6911 return;
6912 }
6913
6914 intel_crtc->unpin_work = NULL;
6915
6916 if (work->event) {
6917 e = work->event;
6918 e->event.sequence = drm_vblank_count_and_time(dev, intel_crtc->pipe, &tvbl);
6919
6920
6921
6922
6923
6924
6925
6926
6927
6928
6929
6930
6931
6932 if (10 * (timeval_to_ns(&tnow) - timeval_to_ns(&tvbl)) >
6933 9 * crtc->framedur_ns) {
6934 e->event.sequence++;
6935 tvbl = ns_to_timeval(timeval_to_ns(&tvbl) +
6936 crtc->framedur_ns);
6937 }
6938
6939 e->event.tv_sec = tvbl.tv_sec;
6940 e->event.tv_usec = tvbl.tv_usec;
6941
6942 list_add_tail(&e->base.link,
6943 &e->base.file_priv->event_list);
6944 wake_up_interruptible(&e->base.file_priv->event_wait);
6945 }
6946
6947 drm_vblank_put(dev, intel_crtc->pipe);
6948
6949 spin_unlock_irqrestore(&dev->event_lock, flags);
6950
6951 obj = work->old_fb_obj;
6952
6953 atomic_clear_mask(1 << intel_crtc->plane,
6954 &obj->pending_flip.counter);
6955 if (atomic_read(&obj->pending_flip) == 0)
6956 wake_up(&dev_priv->pending_flip_queue);
6957
6958 schedule_work(&work->work);
6959
6960 trace_i915_flip_complete(intel_crtc->plane, work->pending_flip_obj);
6961}
6962
6963void intel_finish_page_flip(struct drm_device *dev, int pipe)
6964{
6965 drm_i915_private_t *dev_priv = dev->dev_private;
6966 struct drm_crtc *crtc = dev_priv->pipe_to_crtc_mapping[pipe];
6967
6968 do_intel_finish_page_flip(dev, crtc);
6969}
6970
6971void intel_finish_page_flip_plane(struct drm_device *dev, int plane)
6972{
6973 drm_i915_private_t *dev_priv = dev->dev_private;
6974 struct drm_crtc *crtc = dev_priv->plane_to_crtc_mapping[plane];
6975
6976 do_intel_finish_page_flip(dev, crtc);
6977}
6978
6979void intel_prepare_page_flip(struct drm_device *dev, int plane)
6980{
6981 drm_i915_private_t *dev_priv = dev->dev_private;
6982 struct intel_crtc *intel_crtc =
6983 to_intel_crtc(dev_priv->plane_to_crtc_mapping[plane]);
6984 unsigned long flags;
6985
6986 spin_lock_irqsave(&dev->event_lock, flags);
6987 if (intel_crtc->unpin_work) {
6988 if ((++intel_crtc->unpin_work->pending) > 1)
6989 DRM_ERROR("Prepared flip multiple times\n");
6990 } else {
6991 DRM_DEBUG_DRIVER("preparing flip with no unpin work?\n");
6992 }
6993 spin_unlock_irqrestore(&dev->event_lock, flags);
6994}
6995
6996static int intel_gen2_queue_flip(struct drm_device *dev,
6997 struct drm_crtc *crtc,
6998 struct drm_framebuffer *fb,
6999 struct drm_i915_gem_object *obj)
7000{
7001 struct drm_i915_private *dev_priv = dev->dev_private;
7002 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
7003 unsigned long offset;
7004 u32 flip_mask;
7005 int ret;
7006
7007 ret = intel_pin_and_fence_fb_obj(dev, obj, LP_RING(dev_priv));
7008 if (ret)
7009 goto out;
7010
7011
7012 offset = crtc->y * fb->pitch + crtc->x * fb->bits_per_pixel/8;
7013
7014 ret = BEGIN_LP_RING(6);
7015 if (ret)
7016 goto out;
7017
7018
7019
7020
7021 if (intel_crtc->plane)
7022 flip_mask = MI_WAIT_FOR_PLANE_B_FLIP;
7023 else
7024 flip_mask = MI_WAIT_FOR_PLANE_A_FLIP;
7025 OUT_RING(MI_WAIT_FOR_EVENT | flip_mask);
7026 OUT_RING(MI_NOOP);
7027 OUT_RING(MI_DISPLAY_FLIP |
7028 MI_DISPLAY_FLIP_PLANE(intel_crtc->plane));
7029 OUT_RING(fb->pitch);
7030 OUT_RING(obj->gtt_offset + offset);
7031 OUT_RING(MI_NOOP);
7032 ADVANCE_LP_RING();
7033out:
7034 return ret;
7035}
7036
7037static int intel_gen3_queue_flip(struct drm_device *dev,
7038 struct drm_crtc *crtc,
7039 struct drm_framebuffer *fb,
7040 struct drm_i915_gem_object *obj)
7041{
7042 struct drm_i915_private *dev_priv = dev->dev_private;
7043 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
7044 unsigned long offset;
7045 u32 flip_mask;
7046 int ret;
7047
7048 ret = intel_pin_and_fence_fb_obj(dev, obj, LP_RING(dev_priv));
7049 if (ret)
7050 goto out;
7051
7052
7053 offset = crtc->y * fb->pitch + crtc->x * fb->bits_per_pixel/8;
7054
7055 ret = BEGIN_LP_RING(6);
7056 if (ret)
7057 goto out;
7058
7059 if (intel_crtc->plane)
7060 flip_mask = MI_WAIT_FOR_PLANE_B_FLIP;
7061 else
7062 flip_mask = MI_WAIT_FOR_PLANE_A_FLIP;
7063 OUT_RING(MI_WAIT_FOR_EVENT | flip_mask);
7064 OUT_RING(MI_NOOP);
7065 OUT_RING(MI_DISPLAY_FLIP_I915 |
7066 MI_DISPLAY_FLIP_PLANE(intel_crtc->plane));
7067 OUT_RING(fb->pitch);
7068 OUT_RING(obj->gtt_offset + offset);
7069 OUT_RING(MI_NOOP);
7070
7071 ADVANCE_LP_RING();
7072out:
7073 return ret;
7074}
7075
7076static int intel_gen4_queue_flip(struct drm_device *dev,
7077 struct drm_crtc *crtc,
7078 struct drm_framebuffer *fb,
7079 struct drm_i915_gem_object *obj)
7080{
7081 struct drm_i915_private *dev_priv = dev->dev_private;
7082 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
7083 uint32_t pf, pipesrc;
7084 int ret;
7085
7086 ret = intel_pin_and_fence_fb_obj(dev, obj, LP_RING(dev_priv));
7087 if (ret)
7088 goto out;
7089
7090 ret = BEGIN_LP_RING(4);
7091 if (ret)
7092 goto out;
7093
7094
7095
7096
7097
7098 OUT_RING(MI_DISPLAY_FLIP |
7099 MI_DISPLAY_FLIP_PLANE(intel_crtc->plane));
7100 OUT_RING(fb->pitch);
7101 OUT_RING(obj->gtt_offset | obj->tiling_mode);
7102
7103
7104
7105
7106
7107 pf = 0;
7108 pipesrc = I915_READ(PIPESRC(intel_crtc->pipe)) & 0x0fff0fff;
7109 OUT_RING(pf | pipesrc);
7110 ADVANCE_LP_RING();
7111out:
7112 return ret;
7113}
7114
7115static int intel_gen6_queue_flip(struct drm_device *dev,
7116 struct drm_crtc *crtc,
7117 struct drm_framebuffer *fb,
7118 struct drm_i915_gem_object *obj)
7119{
7120 struct drm_i915_private *dev_priv = dev->dev_private;
7121 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
7122 uint32_t pf, pipesrc;
7123 int ret;
7124
7125 ret = intel_pin_and_fence_fb_obj(dev, obj, LP_RING(dev_priv));
7126 if (ret)
7127 goto out;
7128
7129 ret = BEGIN_LP_RING(4);
7130 if (ret)
7131 goto out;
7132
7133 OUT_RING(MI_DISPLAY_FLIP |
7134 MI_DISPLAY_FLIP_PLANE(intel_crtc->plane));
7135 OUT_RING(fb->pitch | obj->tiling_mode);
7136 OUT_RING(obj->gtt_offset);
7137
7138 pf = I915_READ(PF_CTL(intel_crtc->pipe)) & PF_ENABLE;
7139 pipesrc = I915_READ(PIPESRC(intel_crtc->pipe)) & 0x0fff0fff;
7140 OUT_RING(pf | pipesrc);
7141 ADVANCE_LP_RING();
7142out:
7143 return ret;
7144}
7145
7146
7147
7148
7149
7150
7151
7152static int intel_gen7_queue_flip(struct drm_device *dev,
7153 struct drm_crtc *crtc,
7154 struct drm_framebuffer *fb,
7155 struct drm_i915_gem_object *obj)
7156{
7157 struct drm_i915_private *dev_priv = dev->dev_private;
7158 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
7159 struct intel_ring_buffer *ring = &dev_priv->ring[BCS];
7160 int ret;
7161
7162 ret = intel_pin_and_fence_fb_obj(dev, obj, ring);
7163 if (ret)
7164 goto out;
7165
7166 ret = intel_ring_begin(ring, 4);
7167 if (ret)
7168 goto out;
7169
7170 intel_ring_emit(ring, MI_DISPLAY_FLIP_I915 | (intel_crtc->plane << 19));
7171 intel_ring_emit(ring, (fb->pitch | obj->tiling_mode));
7172 intel_ring_emit(ring, (obj->gtt_offset));
7173 intel_ring_emit(ring, (MI_NOOP));
7174 intel_ring_advance(ring);
7175out:
7176 return ret;
7177}
7178
7179static int intel_default_queue_flip(struct drm_device *dev,
7180 struct drm_crtc *crtc,
7181 struct drm_framebuffer *fb,
7182 struct drm_i915_gem_object *obj)
7183{
7184 return -ENODEV;
7185}
7186
7187static int intel_crtc_page_flip(struct drm_crtc *crtc,
7188 struct drm_framebuffer *fb,
7189 struct drm_pending_vblank_event *event)
7190{
7191 struct drm_device *dev = crtc->dev;
7192 struct drm_i915_private *dev_priv = dev->dev_private;
7193 struct intel_framebuffer *intel_fb;
7194 struct drm_i915_gem_object *obj;
7195 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
7196 struct intel_unpin_work *work;
7197 unsigned long flags;
7198 int ret;
7199
7200 work = kzalloc(sizeof *work, GFP_KERNEL);
7201 if (work == NULL)
7202 return -ENOMEM;
7203
7204 work->event = event;
7205 work->dev = crtc->dev;
7206 intel_fb = to_intel_framebuffer(crtc->fb);
7207 work->old_fb_obj = intel_fb->obj;
7208 INIT_WORK(&work->work, intel_unpin_work_fn);
7209
7210 ret = drm_vblank_get(dev, intel_crtc->pipe);
7211 if (ret)
7212 goto free_work;
7213
7214
7215 spin_lock_irqsave(&dev->event_lock, flags);
7216 if (intel_crtc->unpin_work) {
7217 spin_unlock_irqrestore(&dev->event_lock, flags);
7218 kfree(work);
7219 drm_vblank_put(dev, intel_crtc->pipe);
7220
7221 DRM_DEBUG_DRIVER("flip queue: crtc already busy\n");
7222 return -EBUSY;
7223 }
7224 intel_crtc->unpin_work = work;
7225 spin_unlock_irqrestore(&dev->event_lock, flags);
7226
7227 intel_fb = to_intel_framebuffer(fb);
7228 obj = intel_fb->obj;
7229
7230 mutex_lock(&dev->struct_mutex);
7231
7232
7233 drm_gem_object_reference(&work->old_fb_obj->base);
7234 drm_gem_object_reference(&obj->base);
7235
7236 crtc->fb = fb;
7237
7238 work->pending_flip_obj = obj;
7239
7240 work->enable_stall_check = true;
7241
7242
7243
7244
7245 atomic_add(1 << intel_crtc->plane, &work->old_fb_obj->pending_flip);
7246
7247 ret = dev_priv->display.queue_flip(dev, crtc, fb, obj);
7248 if (ret)
7249 goto cleanup_pending;
7250
7251 intel_disable_fbc(dev);
7252 mutex_unlock(&dev->struct_mutex);
7253
7254 trace_i915_flip_request(intel_crtc->plane, obj);
7255
7256 return 0;
7257
7258cleanup_pending:
7259 atomic_sub(1 << intel_crtc->plane, &work->old_fb_obj->pending_flip);
7260 drm_gem_object_unreference(&work->old_fb_obj->base);
7261 drm_gem_object_unreference(&obj->base);
7262 mutex_unlock(&dev->struct_mutex);
7263
7264 spin_lock_irqsave(&dev->event_lock, flags);
7265 intel_crtc->unpin_work = NULL;
7266 spin_unlock_irqrestore(&dev->event_lock, flags);
7267
7268 drm_vblank_put(dev, intel_crtc->pipe);
7269free_work:
7270 kfree(work);
7271
7272 return ret;
7273}
7274
7275static void intel_sanitize_modesetting(struct drm_device *dev,
7276 int pipe, int plane)
7277{
7278 struct drm_i915_private *dev_priv = dev->dev_private;
7279 u32 reg, val;
7280
7281 if (HAS_PCH_SPLIT(dev))
7282 return;
7283
7284
7285
7286
7287
7288
7289
7290
7291
7292
7293
7294
7295 reg = DSPCNTR(plane);
7296 val = I915_READ(reg);
7297
7298 if ((val & DISPLAY_PLANE_ENABLE) == 0)
7299 return;
7300 if (!!(val & DISPPLANE_SEL_PIPE_MASK) == pipe)
7301 return;
7302
7303
7304 pipe = !pipe;
7305
7306
7307 intel_disable_plane(dev_priv, plane, pipe);
7308 intel_disable_pipe(dev_priv, pipe);
7309}
7310
7311static void intel_crtc_reset(struct drm_crtc *crtc)
7312{
7313 struct drm_device *dev = crtc->dev;
7314 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
7315
7316
7317
7318
7319 intel_crtc->dpms_mode = -1;
7320
7321
7322
7323
7324 intel_sanitize_modesetting(dev, intel_crtc->pipe, intel_crtc->plane);
7325}
7326
7327static struct drm_crtc_helper_funcs intel_helper_funcs = {
7328 .dpms = intel_crtc_dpms,
7329 .mode_fixup = intel_crtc_mode_fixup,
7330 .mode_set = intel_crtc_mode_set,
7331 .mode_set_base = intel_pipe_set_base,
7332 .mode_set_base_atomic = intel_pipe_set_base_atomic,
7333 .load_lut = intel_crtc_load_lut,
7334 .disable = intel_crtc_disable,
7335};
7336
7337static const struct drm_crtc_funcs intel_crtc_funcs = {
7338 .reset = intel_crtc_reset,
7339 .cursor_set = intel_crtc_cursor_set,
7340 .cursor_move = intel_crtc_cursor_move,
7341 .gamma_set = intel_crtc_gamma_set,
7342 .set_config = drm_crtc_helper_set_config,
7343 .destroy = intel_crtc_destroy,
7344 .page_flip = intel_crtc_page_flip,
7345};
7346
7347static void intel_crtc_init(struct drm_device *dev, int pipe)
7348{
7349 drm_i915_private_t *dev_priv = dev->dev_private;
7350 struct intel_crtc *intel_crtc;
7351 int i;
7352
7353 intel_crtc = kzalloc(sizeof(struct intel_crtc) + (INTELFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL);
7354 if (intel_crtc == NULL)
7355 return;
7356
7357 drm_crtc_init(dev, &intel_crtc->base, &intel_crtc_funcs);
7358
7359 drm_mode_crtc_set_gamma_size(&intel_crtc->base, 256);
7360 for (i = 0; i < 256; i++) {
7361 intel_crtc->lut_r[i] = i;
7362 intel_crtc->lut_g[i] = i;
7363 intel_crtc->lut_b[i] = i;
7364 }
7365
7366
7367 intel_crtc->pipe = pipe;
7368 intel_crtc->plane = pipe;
7369 if (IS_MOBILE(dev) && IS_GEN3(dev)) {
7370 DRM_DEBUG_KMS("swapping pipes & planes for FBC\n");
7371 intel_crtc->plane = !pipe;
7372 }
7373
7374 BUG_ON(pipe >= ARRAY_SIZE(dev_priv->plane_to_crtc_mapping) ||
7375 dev_priv->plane_to_crtc_mapping[intel_crtc->plane] != NULL);
7376 dev_priv->plane_to_crtc_mapping[intel_crtc->plane] = &intel_crtc->base;
7377 dev_priv->pipe_to_crtc_mapping[intel_crtc->pipe] = &intel_crtc->base;
7378
7379 intel_crtc_reset(&intel_crtc->base);
7380 intel_crtc->active = true;
7381 intel_crtc->bpp = 24;
7382
7383 if (HAS_PCH_SPLIT(dev)) {
7384 if (pipe == 2 && IS_IVYBRIDGE(dev))
7385 intel_crtc->no_pll = true;
7386 intel_helper_funcs.prepare = ironlake_crtc_prepare;
7387 intel_helper_funcs.commit = ironlake_crtc_commit;
7388 } else {
7389 intel_helper_funcs.prepare = i9xx_crtc_prepare;
7390 intel_helper_funcs.commit = i9xx_crtc_commit;
7391 }
7392
7393 drm_crtc_helper_add(&intel_crtc->base, &intel_helper_funcs);
7394
7395 intel_crtc->busy = false;
7396
7397 setup_timer(&intel_crtc->idle_timer, intel_crtc_idle_timer,
7398 (unsigned long)intel_crtc);
7399}
7400
7401int intel_get_pipe_from_crtc_id(struct drm_device *dev, void *data,
7402 struct drm_file *file)
7403{
7404 drm_i915_private_t *dev_priv = dev->dev_private;
7405 struct drm_i915_get_pipe_from_crtc_id *pipe_from_crtc_id = data;
7406 struct drm_mode_object *drmmode_obj;
7407 struct intel_crtc *crtc;
7408
7409 if (!dev_priv) {
7410 DRM_ERROR("called with no initialization\n");
7411 return -EINVAL;
7412 }
7413
7414 drmmode_obj = drm_mode_object_find(dev, pipe_from_crtc_id->crtc_id,
7415 DRM_MODE_OBJECT_CRTC);
7416
7417 if (!drmmode_obj) {
7418 DRM_ERROR("no such CRTC id\n");
7419 return -EINVAL;
7420 }
7421
7422 crtc = to_intel_crtc(obj_to_crtc(drmmode_obj));
7423 pipe_from_crtc_id->pipe = crtc->pipe;
7424
7425 return 0;
7426}
7427
7428static int intel_encoder_clones(struct drm_device *dev, int type_mask)
7429{
7430 struct intel_encoder *encoder;
7431 int index_mask = 0;
7432 int entry = 0;
7433
7434 list_for_each_entry(encoder, &dev->mode_config.encoder_list, base.head) {
7435 if (type_mask & encoder->clone_mask)
7436 index_mask |= (1 << entry);
7437 entry++;
7438 }
7439
7440 return index_mask;
7441}
7442
7443static bool has_edp_a(struct drm_device *dev)
7444{
7445 struct drm_i915_private *dev_priv = dev->dev_private;
7446
7447 if (!IS_MOBILE(dev))
7448 return false;
7449
7450 if ((I915_READ(DP_A) & DP_DETECTED) == 0)
7451 return false;
7452
7453 if (IS_GEN5(dev) &&
7454 (I915_READ(ILK_DISPLAY_CHICKEN_FUSES) & ILK_eDP_A_DISABLE))
7455 return false;
7456
7457 return true;
7458}
7459
7460static void intel_setup_outputs(struct drm_device *dev)
7461{
7462 struct drm_i915_private *dev_priv = dev->dev_private;
7463 struct intel_encoder *encoder;
7464 bool dpd_is_edp = false;
7465 bool has_lvds = false;
7466
7467 if (IS_MOBILE(dev) && !IS_I830(dev))
7468 has_lvds = intel_lvds_init(dev);
7469 if (!has_lvds && !HAS_PCH_SPLIT(dev)) {
7470
7471 I915_WRITE(PFIT_CONTROL, 0);
7472 }
7473
7474 if (HAS_PCH_SPLIT(dev)) {
7475 dpd_is_edp = intel_dpd_is_edp(dev);
7476
7477 if (has_edp_a(dev))
7478 intel_dp_init(dev, DP_A);
7479
7480 if (dpd_is_edp && (I915_READ(PCH_DP_D) & DP_DETECTED))
7481 intel_dp_init(dev, PCH_DP_D);
7482 }
7483
7484 intel_crt_init(dev);
7485
7486 if (HAS_PCH_SPLIT(dev)) {
7487 int found;
7488
7489 if (I915_READ(HDMIB) & PORT_DETECTED) {
7490
7491 found = intel_sdvo_init(dev, PCH_SDVOB);
7492 if (!found)
7493 intel_hdmi_init(dev, HDMIB);
7494 if (!found && (I915_READ(PCH_DP_B) & DP_DETECTED))
7495 intel_dp_init(dev, PCH_DP_B);
7496 }
7497
7498 if (I915_READ(HDMIC) & PORT_DETECTED)
7499 intel_hdmi_init(dev, HDMIC);
7500
7501 if (I915_READ(HDMID) & PORT_DETECTED)
7502 intel_hdmi_init(dev, HDMID);
7503
7504 if (I915_READ(PCH_DP_C) & DP_DETECTED)
7505 intel_dp_init(dev, PCH_DP_C);
7506
7507 if (!dpd_is_edp && (I915_READ(PCH_DP_D) & DP_DETECTED))
7508 intel_dp_init(dev, PCH_DP_D);
7509
7510 } else if (SUPPORTS_DIGITAL_OUTPUTS(dev)) {
7511 bool found = false;
7512
7513 if (I915_READ(SDVOB) & SDVO_DETECTED) {
7514 DRM_DEBUG_KMS("probing SDVOB\n");
7515 found = intel_sdvo_init(dev, SDVOB);
7516 if (!found && SUPPORTS_INTEGRATED_HDMI(dev)) {
7517 DRM_DEBUG_KMS("probing HDMI on SDVOB\n");
7518 intel_hdmi_init(dev, SDVOB);
7519 }
7520
7521 if (!found && SUPPORTS_INTEGRATED_DP(dev)) {
7522 DRM_DEBUG_KMS("probing DP_B\n");
7523 intel_dp_init(dev, DP_B);
7524 }
7525 }
7526
7527
7528
7529 if (I915_READ(SDVOB) & SDVO_DETECTED) {
7530 DRM_DEBUG_KMS("probing SDVOC\n");
7531 found = intel_sdvo_init(dev, SDVOC);
7532 }
7533
7534 if (!found && (I915_READ(SDVOC) & SDVO_DETECTED)) {
7535
7536 if (SUPPORTS_INTEGRATED_HDMI(dev)) {
7537 DRM_DEBUG_KMS("probing HDMI on SDVOC\n");
7538 intel_hdmi_init(dev, SDVOC);
7539 }
7540 if (SUPPORTS_INTEGRATED_DP(dev)) {
7541 DRM_DEBUG_KMS("probing DP_C\n");
7542 intel_dp_init(dev, DP_C);
7543 }
7544 }
7545
7546 if (SUPPORTS_INTEGRATED_DP(dev) &&
7547 (I915_READ(DP_D) & DP_DETECTED)) {
7548 DRM_DEBUG_KMS("probing DP_D\n");
7549 intel_dp_init(dev, DP_D);
7550 }
7551 } else if (IS_GEN2(dev))
7552 intel_dvo_init(dev);
7553
7554 if (SUPPORTS_TV(dev))
7555 intel_tv_init(dev);
7556
7557 list_for_each_entry(encoder, &dev->mode_config.encoder_list, base.head) {
7558 encoder->base.possible_crtcs = encoder->crtc_mask;
7559 encoder->base.possible_clones =
7560 intel_encoder_clones(dev, encoder->clone_mask);
7561 }
7562
7563
7564 drm_helper_disable_unused_functions(dev);
7565
7566 if (HAS_PCH_SPLIT(dev))
7567 ironlake_init_pch_refclk(dev);
7568}
7569
7570static void intel_user_framebuffer_destroy(struct drm_framebuffer *fb)
7571{
7572 struct intel_framebuffer *intel_fb = to_intel_framebuffer(fb);
7573
7574 drm_framebuffer_cleanup(fb);
7575 drm_gem_object_unreference_unlocked(&intel_fb->obj->base);
7576
7577 kfree(intel_fb);
7578}
7579
7580static int intel_user_framebuffer_create_handle(struct drm_framebuffer *fb,
7581 struct drm_file *file,
7582 unsigned int *handle)
7583{
7584 struct intel_framebuffer *intel_fb = to_intel_framebuffer(fb);
7585 struct drm_i915_gem_object *obj = intel_fb->obj;
7586
7587 return drm_gem_handle_create(file, &obj->base, handle);
7588}
7589
7590static const struct drm_framebuffer_funcs intel_fb_funcs = {
7591 .destroy = intel_user_framebuffer_destroy,
7592 .create_handle = intel_user_framebuffer_create_handle,
7593};
7594
7595int intel_framebuffer_init(struct drm_device *dev,
7596 struct intel_framebuffer *intel_fb,
7597 struct drm_mode_fb_cmd *mode_cmd,
7598 struct drm_i915_gem_object *obj)
7599{
7600 int ret;
7601
7602 if (obj->tiling_mode == I915_TILING_Y)
7603 return -EINVAL;
7604
7605 if (mode_cmd->pitch & 63)
7606 return -EINVAL;
7607
7608 switch (mode_cmd->bpp) {
7609 case 8:
7610 case 16:
7611
7612 if (mode_cmd->depth == 15 && !HAS_PCH_SPLIT(dev))
7613 return -EINVAL;
7614 break;
7615
7616 case 24:
7617 case 32:
7618 break;
7619 default:
7620 return -EINVAL;
7621 }
7622
7623 ret = drm_framebuffer_init(dev, &intel_fb->base, &intel_fb_funcs);
7624 if (ret) {
7625 DRM_ERROR("framebuffer init failed %d\n", ret);
7626 return ret;
7627 }
7628
7629 drm_helper_mode_fill_fb_struct(&intel_fb->base, mode_cmd);
7630 intel_fb->obj = obj;
7631 return 0;
7632}
7633
7634static struct drm_framebuffer *
7635intel_user_framebuffer_create(struct drm_device *dev,
7636 struct drm_file *filp,
7637 struct drm_mode_fb_cmd *mode_cmd)
7638{
7639 struct drm_i915_gem_object *obj;
7640
7641 obj = to_intel_bo(drm_gem_object_lookup(dev, filp, mode_cmd->handle));
7642 if (&obj->base == NULL)
7643 return ERR_PTR(-ENOENT);
7644
7645 return intel_framebuffer_create(dev, mode_cmd, obj);
7646}
7647
7648static const struct drm_mode_config_funcs intel_mode_funcs = {
7649 .fb_create = intel_user_framebuffer_create,
7650 .output_poll_changed = intel_fb_output_poll_changed,
7651};
7652
7653static struct drm_i915_gem_object *
7654intel_alloc_context_page(struct drm_device *dev)
7655{
7656 struct drm_i915_gem_object *ctx;
7657 int ret;
7658
7659 WARN_ON(!mutex_is_locked(&dev->struct_mutex));
7660
7661 ctx = i915_gem_alloc_object(dev, 4096);
7662 if (!ctx) {
7663 DRM_DEBUG("failed to alloc power context, RC6 disabled\n");
7664 return NULL;
7665 }
7666
7667 ret = i915_gem_object_pin(ctx, 4096, true);
7668 if (ret) {
7669 DRM_ERROR("failed to pin power context: %d\n", ret);
7670 goto err_unref;
7671 }
7672
7673 ret = i915_gem_object_set_to_gtt_domain(ctx, 1);
7674 if (ret) {
7675 DRM_ERROR("failed to set-domain on power context: %d\n", ret);
7676 goto err_unpin;
7677 }
7678
7679 return ctx;
7680
7681err_unpin:
7682 i915_gem_object_unpin(ctx);
7683err_unref:
7684 drm_gem_object_unreference(&ctx->base);
7685 mutex_unlock(&dev->struct_mutex);
7686 return NULL;
7687}
7688
7689bool ironlake_set_drps(struct drm_device *dev, u8 val)
7690{
7691 struct drm_i915_private *dev_priv = dev->dev_private;
7692 u16 rgvswctl;
7693
7694 rgvswctl = I915_READ16(MEMSWCTL);
7695 if (rgvswctl & MEMCTL_CMD_STS) {
7696 DRM_DEBUG("gpu busy, RCS change rejected\n");
7697 return false;
7698 }
7699
7700 rgvswctl = (MEMCTL_CMD_CHFREQ << MEMCTL_CMD_SHIFT) |
7701 (val << MEMCTL_FREQ_SHIFT) | MEMCTL_SFCAVM;
7702 I915_WRITE16(MEMSWCTL, rgvswctl);
7703 POSTING_READ16(MEMSWCTL);
7704
7705 rgvswctl |= MEMCTL_CMD_STS;
7706 I915_WRITE16(MEMSWCTL, rgvswctl);
7707
7708 return true;
7709}
7710
7711void ironlake_enable_drps(struct drm_device *dev)
7712{
7713 struct drm_i915_private *dev_priv = dev->dev_private;
7714 u32 rgvmodectl = I915_READ(MEMMODECTL);
7715 u8 fmax, fmin, fstart, vstart;
7716
7717
7718 I915_WRITE16(PMMISC, I915_READ(PMMISC) | MCPPCE_EN);
7719 I915_WRITE16(TSC1, I915_READ(TSC1) | TSE);
7720
7721
7722 I915_WRITE(RCUPEI, 100000);
7723 I915_WRITE(RCDNEI, 100000);
7724
7725
7726 I915_WRITE(RCBMAXAVG, 90000);
7727 I915_WRITE(RCBMINAVG, 80000);
7728
7729 I915_WRITE(MEMIHYST, 1);
7730
7731
7732 fmax = (rgvmodectl & MEMMODE_FMAX_MASK) >> MEMMODE_FMAX_SHIFT;
7733 fmin = (rgvmodectl & MEMMODE_FMIN_MASK);
7734 fstart = (rgvmodectl & MEMMODE_FSTART_MASK) >>
7735 MEMMODE_FSTART_SHIFT;
7736
7737 vstart = (I915_READ(PXVFREQ_BASE + (fstart * 4)) & PXVFREQ_PX_MASK) >>
7738 PXVFREQ_PX_SHIFT;
7739
7740 dev_priv->fmax = fmax;
7741 dev_priv->fstart = fstart;
7742
7743 dev_priv->max_delay = fstart;
7744 dev_priv->min_delay = fmin;
7745 dev_priv->cur_delay = fstart;
7746
7747 DRM_DEBUG_DRIVER("fmax: %d, fmin: %d, fstart: %d\n",
7748 fmax, fmin, fstart);
7749
7750 I915_WRITE(MEMINTREN, MEMINT_CX_SUPR_EN | MEMINT_EVAL_CHG_EN);
7751
7752
7753
7754
7755
7756 I915_WRITE(VIDSTART, vstart);
7757 POSTING_READ(VIDSTART);
7758
7759 rgvmodectl |= MEMMODE_SWMODE_EN;
7760 I915_WRITE(MEMMODECTL, rgvmodectl);
7761
7762 if (wait_for((I915_READ(MEMSWCTL) & MEMCTL_CMD_STS) == 0, 10))
7763 DRM_ERROR("stuck trying to change perf mode\n");
7764 msleep(1);
7765
7766 ironlake_set_drps(dev, fstart);
7767
7768 dev_priv->last_count1 = I915_READ(0x112e4) + I915_READ(0x112e8) +
7769 I915_READ(0x112e0);
7770 dev_priv->last_time1 = jiffies_to_msecs(jiffies);
7771 dev_priv->last_count2 = I915_READ(0x112f4);
7772 getrawmonotonic(&dev_priv->last_time2);
7773}
7774
7775void ironlake_disable_drps(struct drm_device *dev)
7776{
7777 struct drm_i915_private *dev_priv = dev->dev_private;
7778 u16 rgvswctl = I915_READ16(MEMSWCTL);
7779
7780
7781 I915_WRITE(MEMINTREN, I915_READ(MEMINTREN) & ~MEMINT_EVAL_CHG_EN);
7782 I915_WRITE(MEMINTRSTS, MEMINT_EVAL_CHG);
7783 I915_WRITE(DEIER, I915_READ(DEIER) & ~DE_PCU_EVENT);
7784 I915_WRITE(DEIIR, DE_PCU_EVENT);
7785 I915_WRITE(DEIMR, I915_READ(DEIMR) | DE_PCU_EVENT);
7786
7787
7788 ironlake_set_drps(dev, dev_priv->fstart);
7789 msleep(1);
7790 rgvswctl |= MEMCTL_CMD_STS;
7791 I915_WRITE(MEMSWCTL, rgvswctl);
7792 msleep(1);
7793
7794}
7795
7796void gen6_set_rps(struct drm_device *dev, u8 val)
7797{
7798 struct drm_i915_private *dev_priv = dev->dev_private;
7799 u32 swreq;
7800
7801 swreq = (val & 0x3ff) << 25;
7802 I915_WRITE(GEN6_RPNSWREQ, swreq);
7803}
7804
7805void gen6_disable_rps(struct drm_device *dev)
7806{
7807 struct drm_i915_private *dev_priv = dev->dev_private;
7808
7809 I915_WRITE(GEN6_RPNSWREQ, 1 << 31);
7810 I915_WRITE(GEN6_PMINTRMSK, 0xffffffff);
7811 I915_WRITE(GEN6_PMIER, 0);
7812
7813
7814
7815
7816
7817 spin_lock_irq(&dev_priv->rps_lock);
7818 dev_priv->pm_iir = 0;
7819 spin_unlock_irq(&dev_priv->rps_lock);
7820
7821 I915_WRITE(GEN6_PMIIR, I915_READ(GEN6_PMIIR));
7822}
7823
7824static unsigned long intel_pxfreq(u32 vidfreq)
7825{
7826 unsigned long freq;
7827 int div = (vidfreq & 0x3f0000) >> 16;
7828 int post = (vidfreq & 0x3000) >> 12;
7829 int pre = (vidfreq & 0x7);
7830
7831 if (!pre)
7832 return 0;
7833
7834 freq = ((div * 133333) / ((1<<post) * pre));
7835
7836 return freq;
7837}
7838
7839void intel_init_emon(struct drm_device *dev)
7840{
7841 struct drm_i915_private *dev_priv = dev->dev_private;
7842 u32 lcfuse;
7843 u8 pxw[16];
7844 int i;
7845
7846
7847 I915_WRITE(ECR, 0);
7848 POSTING_READ(ECR);
7849
7850
7851 I915_WRITE(SDEW, 0x15040d00);
7852 I915_WRITE(CSIEW0, 0x007f0000);
7853 I915_WRITE(CSIEW1, 0x1e220004);
7854 I915_WRITE(CSIEW2, 0x04000004);
7855
7856 for (i = 0; i < 5; i++)
7857 I915_WRITE(PEW + (i * 4), 0);
7858 for (i = 0; i < 3; i++)
7859 I915_WRITE(DEW + (i * 4), 0);
7860
7861
7862 for (i = 0; i < 16; i++) {
7863 u32 pxvidfreq = I915_READ(PXVFREQ_BASE + (i * 4));
7864 unsigned long freq = intel_pxfreq(pxvidfreq);
7865 unsigned long vid = (pxvidfreq & PXVFREQ_PX_MASK) >>
7866 PXVFREQ_PX_SHIFT;
7867 unsigned long val;
7868
7869 val = vid * vid;
7870 val *= (freq / 1000);
7871 val *= 255;
7872 val /= (127*127*900);
7873 if (val > 0xff)
7874 DRM_ERROR("bad pxval: %ld\n", val);
7875 pxw[i] = val;
7876 }
7877
7878 pxw[14] = 0;
7879 pxw[15] = 0;
7880
7881 for (i = 0; i < 4; i++) {
7882 u32 val = (pxw[i*4] << 24) | (pxw[(i*4)+1] << 16) |
7883 (pxw[(i*4)+2] << 8) | (pxw[(i*4)+3]);
7884 I915_WRITE(PXW + (i * 4), val);
7885 }
7886
7887
7888 I915_WRITE(OGW0, 0);
7889 I915_WRITE(OGW1, 0);
7890 I915_WRITE(EG0, 0x00007f00);
7891 I915_WRITE(EG1, 0x0000000e);
7892 I915_WRITE(EG2, 0x000e0000);
7893 I915_WRITE(EG3, 0x68000300);
7894 I915_WRITE(EG4, 0x42000000);
7895 I915_WRITE(EG5, 0x00140031);
7896 I915_WRITE(EG6, 0);
7897 I915_WRITE(EG7, 0);
7898
7899 for (i = 0; i < 8; i++)
7900 I915_WRITE(PXWL + (i * 4), 0);
7901
7902
7903 I915_WRITE(ECR, 0x80000019);
7904
7905 lcfuse = I915_READ(LCFUSE02);
7906
7907 dev_priv->corr = (lcfuse & LCFUSE_HIV_MASK);
7908}
7909
7910static bool intel_enable_rc6(struct drm_device *dev)
7911{
7912
7913
7914
7915 if (i915_enable_rc6 >= 0)
7916 return i915_enable_rc6;
7917
7918
7919
7920
7921 if (INTEL_INFO(dev)->gen == 5)
7922 return 0;
7923
7924
7925
7926
7927 if (INTEL_INFO(dev)->gen == 6) {
7928 DRM_DEBUG_DRIVER("Sandybridge: RC6 disabled\n");
7929 return 0;
7930 }
7931 DRM_DEBUG_DRIVER("RC6 enabled\n");
7932 return 1;
7933}
7934
7935void gen6_enable_rps(struct drm_i915_private *dev_priv)
7936{
7937 u32 rp_state_cap = I915_READ(GEN6_RP_STATE_CAP);
7938 u32 gt_perf_status = I915_READ(GEN6_GT_PERF_STATUS);
7939 u32 pcu_mbox, rc6_mask = 0;
7940 int cur_freq, min_freq, max_freq;
7941 int i;
7942
7943
7944
7945
7946
7947
7948
7949 I915_WRITE(GEN6_RC_STATE, 0);
7950 mutex_lock(&dev_priv->dev->struct_mutex);
7951 gen6_gt_force_wake_get(dev_priv);
7952
7953
7954 I915_WRITE(GEN6_RC_CONTROL, 0);
7955
7956 I915_WRITE(GEN6_RC1_WAKE_RATE_LIMIT, 1000 << 16);
7957 I915_WRITE(GEN6_RC6_WAKE_RATE_LIMIT, 40 << 16 | 30);
7958 I915_WRITE(GEN6_RC6pp_WAKE_RATE_LIMIT, 30);
7959 I915_WRITE(GEN6_RC_EVALUATION_INTERVAL, 125000);
7960 I915_WRITE(GEN6_RC_IDLE_HYSTERSIS, 25);
7961
7962 for (i = 0; i < I915_NUM_RINGS; i++)
7963 I915_WRITE(RING_MAX_IDLE(dev_priv->ring[i].mmio_base), 10);
7964
7965 I915_WRITE(GEN6_RC_SLEEP, 0);
7966 I915_WRITE(GEN6_RC1e_THRESHOLD, 1000);
7967 I915_WRITE(GEN6_RC6_THRESHOLD, 50000);
7968 I915_WRITE(GEN6_RC6p_THRESHOLD, 100000);
7969 I915_WRITE(GEN6_RC6pp_THRESHOLD, 64000);
7970
7971 if (intel_enable_rc6(dev_priv->dev))
7972 rc6_mask = GEN6_RC_CTL_RC6p_ENABLE |
7973 GEN6_RC_CTL_RC6_ENABLE;
7974
7975 I915_WRITE(GEN6_RC_CONTROL,
7976 rc6_mask |
7977 GEN6_RC_CTL_EI_MODE(1) |
7978 GEN6_RC_CTL_HW_ENABLE);
7979
7980 I915_WRITE(GEN6_RPNSWREQ,
7981 GEN6_FREQUENCY(10) |
7982 GEN6_OFFSET(0) |
7983 GEN6_AGGRESSIVE_TURBO);
7984 I915_WRITE(GEN6_RC_VIDEO_FREQ,
7985 GEN6_FREQUENCY(12));
7986
7987 I915_WRITE(GEN6_RP_DOWN_TIMEOUT, 1000000);
7988 I915_WRITE(GEN6_RP_INTERRUPT_LIMITS,
7989 18 << 24 |
7990 6 << 16);
7991 I915_WRITE(GEN6_RP_UP_THRESHOLD, 10000);
7992 I915_WRITE(GEN6_RP_DOWN_THRESHOLD, 1000000);
7993 I915_WRITE(GEN6_RP_UP_EI, 100000);
7994 I915_WRITE(GEN6_RP_DOWN_EI, 5000000);
7995 I915_WRITE(GEN6_RP_IDLE_HYSTERSIS, 10);
7996 I915_WRITE(GEN6_RP_CONTROL,
7997 GEN6_RP_MEDIA_TURBO |
7998 GEN6_RP_USE_NORMAL_FREQ |
7999 GEN6_RP_MEDIA_IS_GFX |
8000 GEN6_RP_ENABLE |
8001 GEN6_RP_UP_BUSY_AVG |
8002 GEN6_RP_DOWN_IDLE_CONT);
8003
8004 if (wait_for((I915_READ(GEN6_PCODE_MAILBOX) & GEN6_PCODE_READY) == 0,
8005 500))
8006 DRM_ERROR("timeout waiting for pcode mailbox to become idle\n");
8007
8008 I915_WRITE(GEN6_PCODE_DATA, 0);
8009 I915_WRITE(GEN6_PCODE_MAILBOX,
8010 GEN6_PCODE_READY |
8011 GEN6_PCODE_WRITE_MIN_FREQ_TABLE);
8012 if (wait_for((I915_READ(GEN6_PCODE_MAILBOX) & GEN6_PCODE_READY) == 0,
8013 500))
8014 DRM_ERROR("timeout waiting for pcode mailbox to finish\n");
8015
8016 min_freq = (rp_state_cap & 0xff0000) >> 16;
8017 max_freq = rp_state_cap & 0xff;
8018 cur_freq = (gt_perf_status & 0xff00) >> 8;
8019
8020
8021 if (wait_for((I915_READ(GEN6_PCODE_MAILBOX) & GEN6_PCODE_READY) == 0,
8022 500))
8023 DRM_ERROR("timeout waiting for pcode mailbox to become idle\n");
8024 I915_WRITE(GEN6_PCODE_MAILBOX, GEN6_READ_OC_PARAMS);
8025 pcu_mbox = I915_READ(GEN6_PCODE_DATA);
8026 if (wait_for((I915_READ(GEN6_PCODE_MAILBOX) & GEN6_PCODE_READY) == 0,
8027 500))
8028 DRM_ERROR("timeout waiting for pcode mailbox to finish\n");
8029 if (pcu_mbox & (1<<31)) {
8030 max_freq = pcu_mbox & 0xff;
8031 DRM_DEBUG_DRIVER("overclocking supported, adjusting frequency max to %dMHz\n", pcu_mbox * 50);
8032 }
8033
8034
8035 dev_priv->max_delay = max_freq;
8036 dev_priv->min_delay = min_freq;
8037 dev_priv->cur_delay = cur_freq;
8038
8039
8040 I915_WRITE(GEN6_PMIER,
8041 GEN6_PM_MBOX_EVENT |
8042 GEN6_PM_THERMAL_EVENT |
8043 GEN6_PM_RP_DOWN_TIMEOUT |
8044 GEN6_PM_RP_UP_THRESHOLD |
8045 GEN6_PM_RP_DOWN_THRESHOLD |
8046 GEN6_PM_RP_UP_EI_EXPIRED |
8047 GEN6_PM_RP_DOWN_EI_EXPIRED);
8048 spin_lock_irq(&dev_priv->rps_lock);
8049 WARN_ON(dev_priv->pm_iir != 0);
8050 I915_WRITE(GEN6_PMIMR, 0);
8051 spin_unlock_irq(&dev_priv->rps_lock);
8052
8053 I915_WRITE(GEN6_PMINTRMSK, 0);
8054
8055 gen6_gt_force_wake_put(dev_priv);
8056 mutex_unlock(&dev_priv->dev->struct_mutex);
8057}
8058
8059void gen6_update_ring_freq(struct drm_i915_private *dev_priv)
8060{
8061 int min_freq = 15;
8062 int gpu_freq, ia_freq, max_ia_freq;
8063 int scaling_factor = 180;
8064
8065 max_ia_freq = cpufreq_quick_get_max(0);
8066
8067
8068
8069
8070 if (!max_ia_freq)
8071 max_ia_freq = tsc_khz;
8072
8073
8074 max_ia_freq /= 1000;
8075
8076 mutex_lock(&dev_priv->dev->struct_mutex);
8077
8078
8079
8080
8081
8082
8083 for (gpu_freq = dev_priv->max_delay; gpu_freq >= dev_priv->min_delay;
8084 gpu_freq--) {
8085 int diff = dev_priv->max_delay - gpu_freq;
8086
8087
8088
8089
8090
8091 if (gpu_freq < min_freq)
8092 ia_freq = 800;
8093 else
8094 ia_freq = max_ia_freq - ((diff * scaling_factor) / 2);
8095 ia_freq = DIV_ROUND_CLOSEST(ia_freq, 100);
8096
8097 I915_WRITE(GEN6_PCODE_DATA,
8098 (ia_freq << GEN6_PCODE_FREQ_IA_RATIO_SHIFT) |
8099 gpu_freq);
8100 I915_WRITE(GEN6_PCODE_MAILBOX, GEN6_PCODE_READY |
8101 GEN6_PCODE_WRITE_MIN_FREQ_TABLE);
8102 if (wait_for((I915_READ(GEN6_PCODE_MAILBOX) &
8103 GEN6_PCODE_READY) == 0, 10)) {
8104 DRM_ERROR("pcode write of freq table timed out\n");
8105 continue;
8106 }
8107 }
8108
8109 mutex_unlock(&dev_priv->dev->struct_mutex);
8110}
8111
8112static void ironlake_init_clock_gating(struct drm_device *dev)
8113{
8114 struct drm_i915_private *dev_priv = dev->dev_private;
8115 uint32_t dspclk_gate = VRHUNIT_CLOCK_GATE_DISABLE;
8116
8117
8118 dspclk_gate |= DPFCUNIT_CLOCK_GATE_DISABLE |
8119 DPFCRUNIT_CLOCK_GATE_DISABLE |
8120 DPFDUNIT_CLOCK_GATE_DISABLE;
8121
8122 dspclk_gate |= DPARBUNIT_CLOCK_GATE_DISABLE;
8123
8124 I915_WRITE(PCH_3DCGDIS0,
8125 MARIUNIT_CLOCK_GATE_DISABLE |
8126 SVSMUNIT_CLOCK_GATE_DISABLE);
8127 I915_WRITE(PCH_3DCGDIS1,
8128 VFMUNIT_CLOCK_GATE_DISABLE);
8129
8130 I915_WRITE(PCH_DSPCLK_GATE_D, dspclk_gate);
8131
8132
8133
8134
8135
8136
8137
8138
8139 I915_WRITE(ILK_DISPLAY_CHICKEN2,
8140 (I915_READ(ILK_DISPLAY_CHICKEN2) |
8141 ILK_DPARB_GATE | ILK_VSDPFD_FULL));
8142 I915_WRITE(ILK_DSPCLK_GATE,
8143 (I915_READ(ILK_DSPCLK_GATE) |
8144 ILK_DPARB_CLK_GATE));
8145 I915_WRITE(DISP_ARB_CTL,
8146 (I915_READ(DISP_ARB_CTL) |
8147 DISP_FBC_WM_DIS));
8148 I915_WRITE(WM3_LP_ILK, 0);
8149 I915_WRITE(WM2_LP_ILK, 0);
8150 I915_WRITE(WM1_LP_ILK, 0);
8151
8152
8153
8154
8155
8156
8157
8158
8159 if (IS_IRONLAKE_M(dev)) {
8160 I915_WRITE(ILK_DISPLAY_CHICKEN1,
8161 I915_READ(ILK_DISPLAY_CHICKEN1) |
8162 ILK_FBCQ_DIS);
8163 I915_WRITE(ILK_DISPLAY_CHICKEN2,
8164 I915_READ(ILK_DISPLAY_CHICKEN2) |
8165 ILK_DPARB_GATE);
8166 I915_WRITE(ILK_DSPCLK_GATE,
8167 I915_READ(ILK_DSPCLK_GATE) |
8168 ILK_DPFC_DIS1 |
8169 ILK_DPFC_DIS2 |
8170 ILK_CLK_FBC);
8171 }
8172
8173 I915_WRITE(ILK_DISPLAY_CHICKEN2,
8174 I915_READ(ILK_DISPLAY_CHICKEN2) |
8175 ILK_ELPIN_409_SELECT);
8176 I915_WRITE(_3D_CHICKEN2,
8177 _3D_CHICKEN2_WM_READ_PIPELINED << 16 |
8178 _3D_CHICKEN2_WM_READ_PIPELINED);
8179}
8180
8181static void gen6_init_clock_gating(struct drm_device *dev)
8182{
8183 struct drm_i915_private *dev_priv = dev->dev_private;
8184 int pipe;
8185 uint32_t dspclk_gate = VRHUNIT_CLOCK_GATE_DISABLE;
8186
8187 I915_WRITE(PCH_DSPCLK_GATE_D, dspclk_gate);
8188
8189 I915_WRITE(ILK_DISPLAY_CHICKEN2,
8190 I915_READ(ILK_DISPLAY_CHICKEN2) |
8191 ILK_ELPIN_409_SELECT);
8192
8193 I915_WRITE(WM3_LP_ILK, 0);
8194 I915_WRITE(WM2_LP_ILK, 0);
8195 I915_WRITE(WM1_LP_ILK, 0);
8196
8197
8198
8199
8200
8201
8202
8203
8204
8205
8206
8207 I915_WRITE(GEN6_UCGCTL2,
8208 GEN6_RCPBUNIT_CLOCK_GATE_DISABLE |
8209 GEN6_RCCUNIT_CLOCK_GATE_DISABLE);
8210
8211
8212
8213
8214
8215
8216
8217
8218
8219
8220 I915_WRITE(ILK_DISPLAY_CHICKEN1,
8221 I915_READ(ILK_DISPLAY_CHICKEN1) |
8222 ILK_FBCQ_DIS | ILK_PABSTRETCH_DIS);
8223 I915_WRITE(ILK_DISPLAY_CHICKEN2,
8224 I915_READ(ILK_DISPLAY_CHICKEN2) |
8225 ILK_DPARB_GATE | ILK_VSDPFD_FULL);
8226 I915_WRITE(ILK_DSPCLK_GATE,
8227 I915_READ(ILK_DSPCLK_GATE) |
8228 ILK_DPARB_CLK_GATE |
8229 ILK_DPFD_CLK_GATE);
8230
8231 for_each_pipe(pipe) {
8232 I915_WRITE(DSPCNTR(pipe),
8233 I915_READ(DSPCNTR(pipe)) |
8234 DISPPLANE_TRICKLE_FEED_DISABLE);
8235 intel_flush_display_plane(dev_priv, pipe);
8236 }
8237}
8238
8239static void ivybridge_init_clock_gating(struct drm_device *dev)
8240{
8241 struct drm_i915_private *dev_priv = dev->dev_private;
8242 int pipe;
8243 uint32_t dspclk_gate = VRHUNIT_CLOCK_GATE_DISABLE;
8244
8245 I915_WRITE(PCH_DSPCLK_GATE_D, dspclk_gate);
8246
8247 I915_WRITE(WM3_LP_ILK, 0);
8248 I915_WRITE(WM2_LP_ILK, 0);
8249 I915_WRITE(WM1_LP_ILK, 0);
8250
8251 I915_WRITE(ILK_DSPCLK_GATE, IVB_VRHUNIT_CLK_GATE);
8252
8253 for_each_pipe(pipe) {
8254 I915_WRITE(DSPCNTR(pipe),
8255 I915_READ(DSPCNTR(pipe)) |
8256 DISPPLANE_TRICKLE_FEED_DISABLE);
8257 intel_flush_display_plane(dev_priv, pipe);
8258 }
8259}
8260
8261static void g4x_init_clock_gating(struct drm_device *dev)
8262{
8263 struct drm_i915_private *dev_priv = dev->dev_private;
8264 uint32_t dspclk_gate;
8265
8266 I915_WRITE(RENCLK_GATE_D1, 0);
8267 I915_WRITE(RENCLK_GATE_D2, VF_UNIT_CLOCK_GATE_DISABLE |
8268 GS_UNIT_CLOCK_GATE_DISABLE |
8269 CL_UNIT_CLOCK_GATE_DISABLE);
8270 I915_WRITE(RAMCLK_GATE_D, 0);
8271 dspclk_gate = VRHUNIT_CLOCK_GATE_DISABLE |
8272 OVRUNIT_CLOCK_GATE_DISABLE |
8273 OVCUNIT_CLOCK_GATE_DISABLE;
8274 if (IS_GM45(dev))
8275 dspclk_gate |= DSSUNIT_CLOCK_GATE_DISABLE;
8276 I915_WRITE(DSPCLK_GATE_D, dspclk_gate);
8277}
8278
8279static void crestline_init_clock_gating(struct drm_device *dev)
8280{
8281 struct drm_i915_private *dev_priv = dev->dev_private;
8282
8283 I915_WRITE(RENCLK_GATE_D1, I965_RCC_CLOCK_GATE_DISABLE);
8284 I915_WRITE(RENCLK_GATE_D2, 0);
8285 I915_WRITE(DSPCLK_GATE_D, 0);
8286 I915_WRITE(RAMCLK_GATE_D, 0);
8287 I915_WRITE16(DEUC, 0);
8288}
8289
8290static void broadwater_init_clock_gating(struct drm_device *dev)
8291{
8292 struct drm_i915_private *dev_priv = dev->dev_private;
8293
8294 I915_WRITE(RENCLK_GATE_D1, I965_RCZ_CLOCK_GATE_DISABLE |
8295 I965_RCC_CLOCK_GATE_DISABLE |
8296 I965_RCPB_CLOCK_GATE_DISABLE |
8297 I965_ISC_CLOCK_GATE_DISABLE |
8298 I965_FBC_CLOCK_GATE_DISABLE);
8299 I915_WRITE(RENCLK_GATE_D2, 0);
8300}
8301
8302static void gen3_init_clock_gating(struct drm_device *dev)
8303{
8304 struct drm_i915_private *dev_priv = dev->dev_private;
8305 u32 dstate = I915_READ(D_STATE);
8306
8307 dstate |= DSTATE_PLL_D3_OFF | DSTATE_GFX_CLOCK_GATING |
8308 DSTATE_DOT_CLOCK_GATING;
8309 I915_WRITE(D_STATE, dstate);
8310}
8311
8312static void i85x_init_clock_gating(struct drm_device *dev)
8313{
8314 struct drm_i915_private *dev_priv = dev->dev_private;
8315
8316 I915_WRITE(RENCLK_GATE_D1, SV_CLOCK_GATE_DISABLE);
8317}
8318
8319static void i830_init_clock_gating(struct drm_device *dev)
8320{
8321 struct drm_i915_private *dev_priv = dev->dev_private;
8322
8323 I915_WRITE(DSPCLK_GATE_D, OVRUNIT_CLOCK_GATE_DISABLE);
8324}
8325
8326static void ibx_init_clock_gating(struct drm_device *dev)
8327{
8328 struct drm_i915_private *dev_priv = dev->dev_private;
8329
8330
8331
8332
8333
8334
8335 I915_WRITE(SOUTH_DSPCLK_GATE_D, PCH_DPLSUNIT_CLOCK_GATE_DISABLE);
8336}
8337
8338static void cpt_init_clock_gating(struct drm_device *dev)
8339{
8340 struct drm_i915_private *dev_priv = dev->dev_private;
8341 int pipe;
8342
8343
8344
8345
8346
8347
8348 I915_WRITE(SOUTH_DSPCLK_GATE_D, PCH_DPLSUNIT_CLOCK_GATE_DISABLE);
8349 I915_WRITE(SOUTH_CHICKEN2, I915_READ(SOUTH_CHICKEN2) |
8350 DPLS_EDP_PPS_FIX_DIS);
8351
8352 for_each_pipe(pipe)
8353 I915_WRITE(TRANS_CHICKEN2(pipe), TRANS_AUTOTRAIN_GEN_STALL_DIS);
8354}
8355
8356static void ironlake_teardown_rc6(struct drm_device *dev)
8357{
8358 struct drm_i915_private *dev_priv = dev->dev_private;
8359
8360 if (dev_priv->renderctx) {
8361 i915_gem_object_unpin(dev_priv->renderctx);
8362 drm_gem_object_unreference(&dev_priv->renderctx->base);
8363 dev_priv->renderctx = NULL;
8364 }
8365
8366 if (dev_priv->pwrctx) {
8367 i915_gem_object_unpin(dev_priv->pwrctx);
8368 drm_gem_object_unreference(&dev_priv->pwrctx->base);
8369 dev_priv->pwrctx = NULL;
8370 }
8371}
8372
8373static void ironlake_disable_rc6(struct drm_device *dev)
8374{
8375 struct drm_i915_private *dev_priv = dev->dev_private;
8376
8377 if (I915_READ(PWRCTXA)) {
8378
8379 I915_WRITE(RSTDBYCTL, I915_READ(RSTDBYCTL) | RCX_SW_EXIT);
8380 wait_for(((I915_READ(RSTDBYCTL) & RSX_STATUS_MASK) == RSX_STATUS_ON),
8381 50);
8382
8383 I915_WRITE(PWRCTXA, 0);
8384 POSTING_READ(PWRCTXA);
8385
8386 I915_WRITE(RSTDBYCTL, I915_READ(RSTDBYCTL) & ~RCX_SW_EXIT);
8387 POSTING_READ(RSTDBYCTL);
8388 }
8389
8390 ironlake_teardown_rc6(dev);
8391}
8392
8393static int ironlake_setup_rc6(struct drm_device *dev)
8394{
8395 struct drm_i915_private *dev_priv = dev->dev_private;
8396
8397 if (dev_priv->renderctx == NULL)
8398 dev_priv->renderctx = intel_alloc_context_page(dev);
8399 if (!dev_priv->renderctx)
8400 return -ENOMEM;
8401
8402 if (dev_priv->pwrctx == NULL)
8403 dev_priv->pwrctx = intel_alloc_context_page(dev);
8404 if (!dev_priv->pwrctx) {
8405 ironlake_teardown_rc6(dev);
8406 return -ENOMEM;
8407 }
8408
8409 return 0;
8410}
8411
8412void ironlake_enable_rc6(struct drm_device *dev)
8413{
8414 struct drm_i915_private *dev_priv = dev->dev_private;
8415 int ret;
8416
8417
8418
8419
8420 if (!intel_enable_rc6(dev))
8421 return;
8422
8423 mutex_lock(&dev->struct_mutex);
8424 ret = ironlake_setup_rc6(dev);
8425 if (ret) {
8426 mutex_unlock(&dev->struct_mutex);
8427 return;
8428 }
8429
8430
8431
8432
8433
8434 ret = BEGIN_LP_RING(6);
8435 if (ret) {
8436 ironlake_teardown_rc6(dev);
8437 mutex_unlock(&dev->struct_mutex);
8438 return;
8439 }
8440
8441 OUT_RING(MI_SUSPEND_FLUSH | MI_SUSPEND_FLUSH_EN);
8442 OUT_RING(MI_SET_CONTEXT);
8443 OUT_RING(dev_priv->renderctx->gtt_offset |
8444 MI_MM_SPACE_GTT |
8445 MI_SAVE_EXT_STATE_EN |
8446 MI_RESTORE_EXT_STATE_EN |
8447 MI_RESTORE_INHIBIT);
8448 OUT_RING(MI_SUSPEND_FLUSH);
8449 OUT_RING(MI_NOOP);
8450 OUT_RING(MI_FLUSH);
8451 ADVANCE_LP_RING();
8452
8453
8454
8455
8456
8457
8458 ret = intel_wait_ring_idle(LP_RING(dev_priv));
8459 if (ret) {
8460 DRM_ERROR("failed to enable ironlake power power savings\n");
8461 ironlake_teardown_rc6(dev);
8462 mutex_unlock(&dev->struct_mutex);
8463 return;
8464 }
8465
8466 I915_WRITE(PWRCTXA, dev_priv->pwrctx->gtt_offset | PWRCTX_EN);
8467 I915_WRITE(RSTDBYCTL, I915_READ(RSTDBYCTL) & ~RCX_SW_EXIT);
8468 mutex_unlock(&dev->struct_mutex);
8469}
8470
8471void intel_init_clock_gating(struct drm_device *dev)
8472{
8473 struct drm_i915_private *dev_priv = dev->dev_private;
8474
8475 dev_priv->display.init_clock_gating(dev);
8476
8477 if (dev_priv->display.init_pch_clock_gating)
8478 dev_priv->display.init_pch_clock_gating(dev);
8479}
8480
8481
8482static void intel_init_display(struct drm_device *dev)
8483{
8484 struct drm_i915_private *dev_priv = dev->dev_private;
8485
8486
8487 if (HAS_PCH_SPLIT(dev)) {
8488 dev_priv->display.dpms = ironlake_crtc_dpms;
8489 dev_priv->display.crtc_mode_set = ironlake_crtc_mode_set;
8490 dev_priv->display.update_plane = ironlake_update_plane;
8491 } else {
8492 dev_priv->display.dpms = i9xx_crtc_dpms;
8493 dev_priv->display.crtc_mode_set = i9xx_crtc_mode_set;
8494 dev_priv->display.update_plane = i9xx_update_plane;
8495 }
8496
8497 if (I915_HAS_FBC(dev)) {
8498 if (HAS_PCH_SPLIT(dev)) {
8499 dev_priv->display.fbc_enabled = ironlake_fbc_enabled;
8500 dev_priv->display.enable_fbc = ironlake_enable_fbc;
8501 dev_priv->display.disable_fbc = ironlake_disable_fbc;
8502 } else if (IS_GM45(dev)) {
8503 dev_priv->display.fbc_enabled = g4x_fbc_enabled;
8504 dev_priv->display.enable_fbc = g4x_enable_fbc;
8505 dev_priv->display.disable_fbc = g4x_disable_fbc;
8506 } else if (IS_CRESTLINE(dev)) {
8507 dev_priv->display.fbc_enabled = i8xx_fbc_enabled;
8508 dev_priv->display.enable_fbc = i8xx_enable_fbc;
8509 dev_priv->display.disable_fbc = i8xx_disable_fbc;
8510 }
8511
8512 }
8513
8514
8515 if (IS_I945G(dev) || (IS_G33(dev) && !IS_PINEVIEW_M(dev)))
8516 dev_priv->display.get_display_clock_speed =
8517 i945_get_display_clock_speed;
8518 else if (IS_I915G(dev))
8519 dev_priv->display.get_display_clock_speed =
8520 i915_get_display_clock_speed;
8521 else if (IS_I945GM(dev) || IS_845G(dev) || IS_PINEVIEW_M(dev))
8522 dev_priv->display.get_display_clock_speed =
8523 i9xx_misc_get_display_clock_speed;
8524 else if (IS_I915GM(dev))
8525 dev_priv->display.get_display_clock_speed =
8526 i915gm_get_display_clock_speed;
8527 else if (IS_I865G(dev))
8528 dev_priv->display.get_display_clock_speed =
8529 i865_get_display_clock_speed;
8530 else if (IS_I85X(dev))
8531 dev_priv->display.get_display_clock_speed =
8532 i855_get_display_clock_speed;
8533 else
8534 dev_priv->display.get_display_clock_speed =
8535 i830_get_display_clock_speed;
8536
8537
8538 if (HAS_PCH_SPLIT(dev)) {
8539 dev_priv->display.force_wake_get = __gen6_gt_force_wake_get;
8540 dev_priv->display.force_wake_put = __gen6_gt_force_wake_put;
8541
8542
8543 if (IS_IVYBRIDGE(dev)) {
8544 u32 ecobus;
8545
8546 mutex_lock(&dev->struct_mutex);
8547 __gen6_gt_force_wake_mt_get(dev_priv);
8548 ecobus = I915_READ(ECOBUS);
8549 __gen6_gt_force_wake_mt_put(dev_priv);
8550 mutex_unlock(&dev->struct_mutex);
8551
8552 if (ecobus & FORCEWAKE_MT_ENABLE) {
8553 DRM_DEBUG_KMS("Using MT version of forcewake\n");
8554 dev_priv->display.force_wake_get =
8555 __gen6_gt_force_wake_mt_get;
8556 dev_priv->display.force_wake_put =
8557 __gen6_gt_force_wake_mt_put;
8558 }
8559 }
8560
8561 if (HAS_PCH_IBX(dev))
8562 dev_priv->display.init_pch_clock_gating = ibx_init_clock_gating;
8563 else if (HAS_PCH_CPT(dev))
8564 dev_priv->display.init_pch_clock_gating = cpt_init_clock_gating;
8565
8566 if (IS_GEN5(dev)) {
8567 if (I915_READ(MLTR_ILK) & ILK_SRLT_MASK)
8568 dev_priv->display.update_wm = ironlake_update_wm;
8569 else {
8570 DRM_DEBUG_KMS("Failed to get proper latency. "
8571 "Disable CxSR\n");
8572 dev_priv->display.update_wm = NULL;
8573 }
8574 dev_priv->display.fdi_link_train = ironlake_fdi_link_train;
8575 dev_priv->display.init_clock_gating = ironlake_init_clock_gating;
8576 dev_priv->display.write_eld = ironlake_write_eld;
8577 } else if (IS_GEN6(dev)) {
8578 if (SNB_READ_WM0_LATENCY()) {
8579 dev_priv->display.update_wm = sandybridge_update_wm;
8580 } else {
8581 DRM_DEBUG_KMS("Failed to read display plane latency. "
8582 "Disable CxSR\n");
8583 dev_priv->display.update_wm = NULL;
8584 }
8585 dev_priv->display.fdi_link_train = gen6_fdi_link_train;
8586 dev_priv->display.init_clock_gating = gen6_init_clock_gating;
8587 dev_priv->display.write_eld = ironlake_write_eld;
8588 } else if (IS_IVYBRIDGE(dev)) {
8589
8590 dev_priv->display.fdi_link_train = ivb_manual_fdi_link_train;
8591 if (SNB_READ_WM0_LATENCY()) {
8592 dev_priv->display.update_wm = sandybridge_update_wm;
8593 } else {
8594 DRM_DEBUG_KMS("Failed to read display plane latency. "
8595 "Disable CxSR\n");
8596 dev_priv->display.update_wm = NULL;
8597 }
8598 dev_priv->display.init_clock_gating = ivybridge_init_clock_gating;
8599 dev_priv->display.write_eld = ironlake_write_eld;
8600 } else
8601 dev_priv->display.update_wm = NULL;
8602 } else if (IS_PINEVIEW(dev)) {
8603 if (!intel_get_cxsr_latency(IS_PINEVIEW_G(dev),
8604 dev_priv->is_ddr3,
8605 dev_priv->fsb_freq,
8606 dev_priv->mem_freq)) {
8607 DRM_INFO("failed to find known CxSR latency "
8608 "(found ddr%s fsb freq %d, mem freq %d), "
8609 "disabling CxSR\n",
8610 (dev_priv->is_ddr3 == 1) ? "3" : "2",
8611 dev_priv->fsb_freq, dev_priv->mem_freq);
8612
8613 pineview_disable_cxsr(dev);
8614 dev_priv->display.update_wm = NULL;
8615 } else
8616 dev_priv->display.update_wm = pineview_update_wm;
8617 dev_priv->display.init_clock_gating = gen3_init_clock_gating;
8618 } else if (IS_G4X(dev)) {
8619 dev_priv->display.write_eld = g4x_write_eld;
8620 dev_priv->display.update_wm = g4x_update_wm;
8621 dev_priv->display.init_clock_gating = g4x_init_clock_gating;
8622 } else if (IS_GEN4(dev)) {
8623 dev_priv->display.update_wm = i965_update_wm;
8624 if (IS_CRESTLINE(dev))
8625 dev_priv->display.init_clock_gating = crestline_init_clock_gating;
8626 else if (IS_BROADWATER(dev))
8627 dev_priv->display.init_clock_gating = broadwater_init_clock_gating;
8628 } else if (IS_GEN3(dev)) {
8629 dev_priv->display.update_wm = i9xx_update_wm;
8630 dev_priv->display.get_fifo_size = i9xx_get_fifo_size;
8631 dev_priv->display.init_clock_gating = gen3_init_clock_gating;
8632 } else if (IS_I865G(dev)) {
8633 dev_priv->display.update_wm = i830_update_wm;
8634 dev_priv->display.init_clock_gating = i85x_init_clock_gating;
8635 dev_priv->display.get_fifo_size = i830_get_fifo_size;
8636 } else if (IS_I85X(dev)) {
8637 dev_priv->display.update_wm = i9xx_update_wm;
8638 dev_priv->display.get_fifo_size = i85x_get_fifo_size;
8639 dev_priv->display.init_clock_gating = i85x_init_clock_gating;
8640 } else {
8641 dev_priv->display.update_wm = i830_update_wm;
8642 dev_priv->display.init_clock_gating = i830_init_clock_gating;
8643 if (IS_845G(dev))
8644 dev_priv->display.get_fifo_size = i845_get_fifo_size;
8645 else
8646 dev_priv->display.get_fifo_size = i830_get_fifo_size;
8647 }
8648
8649
8650 dev_priv->display.queue_flip = intel_default_queue_flip;
8651
8652 switch (INTEL_INFO(dev)->gen) {
8653 case 2:
8654 dev_priv->display.queue_flip = intel_gen2_queue_flip;
8655 break;
8656
8657 case 3:
8658 dev_priv->display.queue_flip = intel_gen3_queue_flip;
8659 break;
8660
8661 case 4:
8662 case 5:
8663 dev_priv->display.queue_flip = intel_gen4_queue_flip;
8664 break;
8665
8666 case 6:
8667 dev_priv->display.queue_flip = intel_gen6_queue_flip;
8668 break;
8669 case 7:
8670 dev_priv->display.queue_flip = intel_gen7_queue_flip;
8671 break;
8672 }
8673}
8674
8675
8676
8677
8678
8679
8680static void quirk_pipea_force(struct drm_device *dev)
8681{
8682 struct drm_i915_private *dev_priv = dev->dev_private;
8683
8684 dev_priv->quirks |= QUIRK_PIPEA_FORCE;
8685 DRM_DEBUG_DRIVER("applying pipe a force quirk\n");
8686}
8687
8688
8689
8690
8691static void quirk_ssc_force_disable(struct drm_device *dev)
8692{
8693 struct drm_i915_private *dev_priv = dev->dev_private;
8694 dev_priv->quirks |= QUIRK_LVDS_SSC_DISABLE;
8695}
8696
8697struct intel_quirk {
8698 int device;
8699 int subsystem_vendor;
8700 int subsystem_device;
8701 void (*hook)(struct drm_device *dev);
8702};
8703
8704struct intel_quirk intel_quirks[] = {
8705
8706 { 0x2a42, 0x103c, 0x30eb, quirk_pipea_force },
8707
8708 { 0x27ae, 0x103c, 0x361a, quirk_pipea_force },
8709
8710
8711 { 0x3577, 0x1014, 0x0505, quirk_pipea_force },
8712
8713 { 0x2592, 0x1179, 0x0001, quirk_pipea_force },
8714
8715
8716 { 0x3577, 0x1014, 0x0513, quirk_pipea_force },
8717
8718
8719
8720 { 0x2782, 0x17aa, 0x201a, quirk_pipea_force },
8721
8722
8723 { 0x3582, PCI_ANY_ID, PCI_ANY_ID, quirk_pipea_force },
8724 { 0x2562, PCI_ANY_ID, PCI_ANY_ID, quirk_pipea_force },
8725
8726
8727 { 0x0046, 0x17aa, 0x3920, quirk_ssc_force_disable },
8728
8729
8730 { 0x0046, 0x104d, 0x9076, quirk_ssc_force_disable },
8731};
8732
8733static void intel_init_quirks(struct drm_device *dev)
8734{
8735 struct pci_dev *d = dev->pdev;
8736 int i;
8737
8738 for (i = 0; i < ARRAY_SIZE(intel_quirks); i++) {
8739 struct intel_quirk *q = &intel_quirks[i];
8740
8741 if (d->device == q->device &&
8742 (d->subsystem_vendor == q->subsystem_vendor ||
8743 q->subsystem_vendor == PCI_ANY_ID) &&
8744 (d->subsystem_device == q->subsystem_device ||
8745 q->subsystem_device == PCI_ANY_ID))
8746 q->hook(dev);
8747 }
8748}
8749
8750
8751static void i915_disable_vga(struct drm_device *dev)
8752{
8753 struct drm_i915_private *dev_priv = dev->dev_private;
8754 u8 sr1;
8755 u32 vga_reg;
8756
8757 if (HAS_PCH_SPLIT(dev))
8758 vga_reg = CPU_VGACNTRL;
8759 else
8760 vga_reg = VGACNTRL;
8761
8762 vga_get_uninterruptible(dev->pdev, VGA_RSRC_LEGACY_IO);
8763 outb(1, VGA_SR_INDEX);
8764 sr1 = inb(VGA_SR_DATA);
8765 outb(sr1 | 1<<5, VGA_SR_DATA);
8766 vga_put(dev->pdev, VGA_RSRC_LEGACY_IO);
8767 udelay(300);
8768
8769 I915_WRITE(vga_reg, VGA_DISP_DISABLE);
8770 POSTING_READ(vga_reg);
8771}
8772
8773void intel_modeset_init(struct drm_device *dev)
8774{
8775 struct drm_i915_private *dev_priv = dev->dev_private;
8776 int i;
8777
8778 drm_mode_config_init(dev);
8779
8780 dev->mode_config.min_width = 0;
8781 dev->mode_config.min_height = 0;
8782
8783 dev->mode_config.funcs = (void *)&intel_mode_funcs;
8784
8785 intel_init_quirks(dev);
8786
8787 intel_init_display(dev);
8788
8789 if (IS_GEN2(dev)) {
8790 dev->mode_config.max_width = 2048;
8791 dev->mode_config.max_height = 2048;
8792 } else if (IS_GEN3(dev)) {
8793 dev->mode_config.max_width = 4096;
8794 dev->mode_config.max_height = 4096;
8795 } else {
8796 dev->mode_config.max_width = 8192;
8797 dev->mode_config.max_height = 8192;
8798 }
8799 dev->mode_config.fb_base = dev->agp->base;
8800
8801 DRM_DEBUG_KMS("%d display pipe%s available.\n",
8802 dev_priv->num_pipe, dev_priv->num_pipe > 1 ? "s" : "");
8803
8804 for (i = 0; i < dev_priv->num_pipe; i++) {
8805 intel_crtc_init(dev, i);
8806 }
8807
8808
8809 i915_disable_vga(dev);
8810 intel_setup_outputs(dev);
8811
8812 intel_init_clock_gating(dev);
8813
8814 if (IS_IRONLAKE_M(dev)) {
8815 ironlake_enable_drps(dev);
8816 intel_init_emon(dev);
8817 }
8818
8819 if (IS_GEN6(dev) || IS_GEN7(dev)) {
8820 gen6_enable_rps(dev_priv);
8821 gen6_update_ring_freq(dev_priv);
8822 }
8823
8824 INIT_WORK(&dev_priv->idle_work, intel_idle_update);
8825 setup_timer(&dev_priv->idle_timer, intel_gpu_idle_timer,
8826 (unsigned long)dev);
8827}
8828
8829void intel_modeset_gem_init(struct drm_device *dev)
8830{
8831 if (IS_IRONLAKE_M(dev))
8832 ironlake_enable_rc6(dev);
8833
8834 intel_setup_overlay(dev);
8835}
8836
8837void intel_modeset_cleanup(struct drm_device *dev)
8838{
8839 struct drm_i915_private *dev_priv = dev->dev_private;
8840 struct drm_crtc *crtc;
8841 struct intel_crtc *intel_crtc;
8842
8843 drm_kms_helper_poll_fini(dev);
8844 mutex_lock(&dev->struct_mutex);
8845
8846 intel_unregister_dsm_handler();
8847
8848
8849 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
8850
8851 if (!crtc->fb)
8852 continue;
8853
8854 intel_crtc = to_intel_crtc(crtc);
8855 intel_increase_pllclock(crtc);
8856 }
8857
8858 intel_disable_fbc(dev);
8859
8860 if (IS_IRONLAKE_M(dev))
8861 ironlake_disable_drps(dev);
8862 if (IS_GEN6(dev) || IS_GEN7(dev))
8863 gen6_disable_rps(dev);
8864
8865 if (IS_IRONLAKE_M(dev))
8866 ironlake_disable_rc6(dev);
8867
8868 mutex_unlock(&dev->struct_mutex);
8869
8870
8871
8872 drm_irq_uninstall(dev);
8873 cancel_work_sync(&dev_priv->hotplug_work);
8874 cancel_work_sync(&dev_priv->rps_work);
8875
8876
8877 flush_scheduled_work();
8878
8879
8880 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
8881 intel_crtc = to_intel_crtc(crtc);
8882 del_timer_sync(&intel_crtc->idle_timer);
8883 }
8884 del_timer_sync(&dev_priv->idle_timer);
8885 cancel_work_sync(&dev_priv->idle_work);
8886
8887 drm_mode_config_cleanup(dev);
8888}
8889
8890
8891
8892
8893struct drm_encoder *intel_best_encoder(struct drm_connector *connector)
8894{
8895 return &intel_attached_encoder(connector)->base;
8896}
8897
8898void intel_connector_attach_encoder(struct intel_connector *connector,
8899 struct intel_encoder *encoder)
8900{
8901 connector->encoder = encoder;
8902 drm_mode_connector_attach_encoder(&connector->base,
8903 &encoder->base);
8904}
8905
8906
8907
8908
8909int intel_modeset_vga_set_state(struct drm_device *dev, bool state)
8910{
8911 struct drm_i915_private *dev_priv = dev->dev_private;
8912 u16 gmch_ctrl;
8913
8914 pci_read_config_word(dev_priv->bridge_dev, INTEL_GMCH_CTRL, &gmch_ctrl);
8915 if (state)
8916 gmch_ctrl &= ~INTEL_GMCH_VGA_DISABLE;
8917 else
8918 gmch_ctrl |= INTEL_GMCH_VGA_DISABLE;
8919 pci_write_config_word(dev_priv->bridge_dev, INTEL_GMCH_CTRL, gmch_ctrl);
8920 return 0;
8921}
8922
8923#ifdef CONFIG_DEBUG_FS
8924#include <linux/seq_file.h>
8925
8926struct intel_display_error_state {
8927 struct intel_cursor_error_state {
8928 u32 control;
8929 u32 position;
8930 u32 base;
8931 u32 size;
8932 } cursor[2];
8933
8934 struct intel_pipe_error_state {
8935 u32 conf;
8936 u32 source;
8937
8938 u32 htotal;
8939 u32 hblank;
8940 u32 hsync;
8941 u32 vtotal;
8942 u32 vblank;
8943 u32 vsync;
8944 } pipe[2];
8945
8946 struct intel_plane_error_state {
8947 u32 control;
8948 u32 stride;
8949 u32 size;
8950 u32 pos;
8951 u32 addr;
8952 u32 surface;
8953 u32 tile_offset;
8954 } plane[2];
8955};
8956
8957struct intel_display_error_state *
8958intel_display_capture_error_state(struct drm_device *dev)
8959{
8960 drm_i915_private_t *dev_priv = dev->dev_private;
8961 struct intel_display_error_state *error;
8962 int i;
8963
8964 error = kmalloc(sizeof(*error), GFP_ATOMIC);
8965 if (error == NULL)
8966 return NULL;
8967
8968 for (i = 0; i < 2; i++) {
8969 error->cursor[i].control = I915_READ(CURCNTR(i));
8970 error->cursor[i].position = I915_READ(CURPOS(i));
8971 error->cursor[i].base = I915_READ(CURBASE(i));
8972
8973 error->plane[i].control = I915_READ(DSPCNTR(i));
8974 error->plane[i].stride = I915_READ(DSPSTRIDE(i));
8975 error->plane[i].size = I915_READ(DSPSIZE(i));
8976 error->plane[i].pos = I915_READ(DSPPOS(i));
8977 error->plane[i].addr = I915_READ(DSPADDR(i));
8978 if (INTEL_INFO(dev)->gen >= 4) {
8979 error->plane[i].surface = I915_READ(DSPSURF(i));
8980 error->plane[i].tile_offset = I915_READ(DSPTILEOFF(i));
8981 }
8982
8983 error->pipe[i].conf = I915_READ(PIPECONF(i));
8984 error->pipe[i].source = I915_READ(PIPESRC(i));
8985 error->pipe[i].htotal = I915_READ(HTOTAL(i));
8986 error->pipe[i].hblank = I915_READ(HBLANK(i));
8987 error->pipe[i].hsync = I915_READ(HSYNC(i));
8988 error->pipe[i].vtotal = I915_READ(VTOTAL(i));
8989 error->pipe[i].vblank = I915_READ(VBLANK(i));
8990 error->pipe[i].vsync = I915_READ(VSYNC(i));
8991 }
8992
8993 return error;
8994}
8995
8996void
8997intel_display_print_error_state(struct seq_file *m,
8998 struct drm_device *dev,
8999 struct intel_display_error_state *error)
9000{
9001 int i;
9002
9003 for (i = 0; i < 2; i++) {
9004 seq_printf(m, "Pipe [%d]:\n", i);
9005 seq_printf(m, " CONF: %08x\n", error->pipe[i].conf);
9006 seq_printf(m, " SRC: %08x\n", error->pipe[i].source);
9007 seq_printf(m, " HTOTAL: %08x\n", error->pipe[i].htotal);
9008 seq_printf(m, " HBLANK: %08x\n", error->pipe[i].hblank);
9009 seq_printf(m, " HSYNC: %08x\n", error->pipe[i].hsync);
9010 seq_printf(m, " VTOTAL: %08x\n", error->pipe[i].vtotal);
9011 seq_printf(m, " VBLANK: %08x\n", error->pipe[i].vblank);
9012 seq_printf(m, " VSYNC: %08x\n", error->pipe[i].vsync);
9013
9014 seq_printf(m, "Plane [%d]:\n", i);
9015 seq_printf(m, " CNTR: %08x\n", error->plane[i].control);
9016 seq_printf(m, " STRIDE: %08x\n", error->plane[i].stride);
9017 seq_printf(m, " SIZE: %08x\n", error->plane[i].size);
9018 seq_printf(m, " POS: %08x\n", error->plane[i].pos);
9019 seq_printf(m, " ADDR: %08x\n", error->plane[i].addr);
9020 if (INTEL_INFO(dev)->gen >= 4) {
9021 seq_printf(m, " SURF: %08x\n", error->plane[i].surface);
9022 seq_printf(m, " TILEOFF: %08x\n", error->plane[i].tile_offset);
9023 }
9024
9025 seq_printf(m, "Cursor [%d]:\n", i);
9026 seq_printf(m, " CNTR: %08x\n", error->cursor[i].control);
9027 seq_printf(m, " POS: %08x\n", error->cursor[i].position);
9028 seq_printf(m, " BASE: %08x\n", error->cursor[i].base);
9029 }
9030}
9031#endif
9032