1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24#include <drm/drmP.h>
25#include <drm/i915_drm.h>
26#include "i915_drv.h"
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58#define pipelined 0
59
60static void i965_write_fence_reg(struct drm_i915_fence_reg *fence,
61 struct i915_vma *vma)
62{
63 i915_reg_t fence_reg_lo, fence_reg_hi;
64 int fence_pitch_shift;
65 u64 val;
66
67 if (INTEL_INFO(fence->i915)->gen >= 6) {
68 fence_reg_lo = FENCE_REG_GEN6_LO(fence->id);
69 fence_reg_hi = FENCE_REG_GEN6_HI(fence->id);
70 fence_pitch_shift = GEN6_FENCE_PITCH_SHIFT;
71
72 } else {
73 fence_reg_lo = FENCE_REG_965_LO(fence->id);
74 fence_reg_hi = FENCE_REG_965_HI(fence->id);
75 fence_pitch_shift = I965_FENCE_PITCH_SHIFT;
76 }
77
78 val = 0;
79 if (vma) {
80 unsigned int stride = i915_gem_object_get_stride(vma->obj);
81
82 GEM_BUG_ON(!i915_vma_is_map_and_fenceable(vma));
83 GEM_BUG_ON(!IS_ALIGNED(vma->node.start, I965_FENCE_PAGE));
84 GEM_BUG_ON(!IS_ALIGNED(vma->fence_size, I965_FENCE_PAGE));
85 GEM_BUG_ON(!IS_ALIGNED(stride, 128));
86
87 val = (vma->node.start + vma->fence_size - I965_FENCE_PAGE) << 32;
88 val |= vma->node.start;
89 val |= (u64)((stride / 128) - 1) << fence_pitch_shift;
90 if (i915_gem_object_get_tiling(vma->obj) == I915_TILING_Y)
91 val |= BIT(I965_FENCE_TILING_Y_SHIFT);
92 val |= I965_FENCE_REG_VALID;
93 }
94
95 if (!pipelined) {
96 struct drm_i915_private *dev_priv = fence->i915;
97
98
99
100
101
102
103
104
105
106
107 I915_WRITE(fence_reg_lo, 0);
108 POSTING_READ(fence_reg_lo);
109
110 I915_WRITE(fence_reg_hi, upper_32_bits(val));
111 I915_WRITE(fence_reg_lo, lower_32_bits(val));
112 POSTING_READ(fence_reg_lo);
113 }
114}
115
116static void i915_write_fence_reg(struct drm_i915_fence_reg *fence,
117 struct i915_vma *vma)
118{
119 u32 val;
120
121 val = 0;
122 if (vma) {
123 unsigned int tiling = i915_gem_object_get_tiling(vma->obj);
124 bool is_y_tiled = tiling == I915_TILING_Y;
125 unsigned int stride = i915_gem_object_get_stride(vma->obj);
126
127 GEM_BUG_ON(!i915_vma_is_map_and_fenceable(vma));
128 GEM_BUG_ON(vma->node.start & ~I915_FENCE_START_MASK);
129 GEM_BUG_ON(!is_power_of_2(vma->fence_size));
130 GEM_BUG_ON(!IS_ALIGNED(vma->node.start, vma->fence_size));
131
132 if (is_y_tiled && HAS_128_BYTE_Y_TILING(fence->i915))
133 stride /= 128;
134 else
135 stride /= 512;
136 GEM_BUG_ON(!is_power_of_2(stride));
137
138 val = vma->node.start;
139 if (is_y_tiled)
140 val |= BIT(I830_FENCE_TILING_Y_SHIFT);
141 val |= I915_FENCE_SIZE_BITS(vma->fence_size);
142 val |= ilog2(stride) << I830_FENCE_PITCH_SHIFT;
143
144 val |= I830_FENCE_REG_VALID;
145 }
146
147 if (!pipelined) {
148 struct drm_i915_private *dev_priv = fence->i915;
149 i915_reg_t reg = FENCE_REG(fence->id);
150
151 I915_WRITE(reg, val);
152 POSTING_READ(reg);
153 }
154}
155
156static void i830_write_fence_reg(struct drm_i915_fence_reg *fence,
157 struct i915_vma *vma)
158{
159 u32 val;
160
161 val = 0;
162 if (vma) {
163 unsigned int stride = i915_gem_object_get_stride(vma->obj);
164
165 GEM_BUG_ON(!i915_vma_is_map_and_fenceable(vma));
166 GEM_BUG_ON(vma->node.start & ~I830_FENCE_START_MASK);
167 GEM_BUG_ON(!is_power_of_2(vma->fence_size));
168 GEM_BUG_ON(!is_power_of_2(stride / 128));
169 GEM_BUG_ON(!IS_ALIGNED(vma->node.start, vma->fence_size));
170
171 val = vma->node.start;
172 if (i915_gem_object_get_tiling(vma->obj) == I915_TILING_Y)
173 val |= BIT(I830_FENCE_TILING_Y_SHIFT);
174 val |= I830_FENCE_SIZE_BITS(vma->fence_size);
175 val |= ilog2(stride / 128) << I830_FENCE_PITCH_SHIFT;
176 val |= I830_FENCE_REG_VALID;
177 }
178
179 if (!pipelined) {
180 struct drm_i915_private *dev_priv = fence->i915;
181 i915_reg_t reg = FENCE_REG(fence->id);
182
183 I915_WRITE(reg, val);
184 POSTING_READ(reg);
185 }
186}
187
188static void fence_write(struct drm_i915_fence_reg *fence,
189 struct i915_vma *vma)
190{
191
192
193
194
195
196 if (IS_GEN2(fence->i915))
197 i830_write_fence_reg(fence, vma);
198 else if (IS_GEN3(fence->i915))
199 i915_write_fence_reg(fence, vma);
200 else
201 i965_write_fence_reg(fence, vma);
202
203
204
205
206
207 fence->dirty = false;
208}
209
210static int fence_update(struct drm_i915_fence_reg *fence,
211 struct i915_vma *vma)
212{
213 int ret;
214
215 if (vma) {
216 if (!i915_vma_is_map_and_fenceable(vma))
217 return -EINVAL;
218
219 if (WARN(!i915_gem_object_get_stride(vma->obj) ||
220 !i915_gem_object_get_tiling(vma->obj),
221 "bogus fence setup with stride: 0x%x, tiling mode: %i\n",
222 i915_gem_object_get_stride(vma->obj),
223 i915_gem_object_get_tiling(vma->obj)))
224 return -EINVAL;
225
226 ret = i915_gem_active_retire(&vma->last_fence,
227 &vma->obj->base.dev->struct_mutex);
228 if (ret)
229 return ret;
230 }
231
232 if (fence->vma) {
233 ret = i915_gem_active_retire(&fence->vma->last_fence,
234 &fence->vma->obj->base.dev->struct_mutex);
235 if (ret)
236 return ret;
237 }
238
239 if (fence->vma && fence->vma != vma) {
240
241
242
243 i915_gem_release_mmap(fence->vma->obj);
244
245 fence->vma->fence = NULL;
246 fence->vma = NULL;
247
248 list_move(&fence->link, &fence->i915->mm.fence_list);
249 }
250
251
252
253
254
255 if (intel_runtime_pm_get_if_in_use(fence->i915)) {
256 fence_write(fence, vma);
257 intel_runtime_pm_put(fence->i915);
258 }
259
260 if (vma) {
261 if (fence->vma != vma) {
262 vma->fence = fence;
263 fence->vma = vma;
264 }
265
266 list_move_tail(&fence->link, &fence->i915->mm.fence_list);
267 }
268
269 return 0;
270}
271
272
273
274
275
276
277
278
279
280
281
282
283int
284i915_vma_put_fence(struct i915_vma *vma)
285{
286 struct drm_i915_fence_reg *fence = vma->fence;
287
288 if (!fence)
289 return 0;
290
291 if (fence->pin_count)
292 return -EBUSY;
293
294 return fence_update(fence, NULL);
295}
296
297static struct drm_i915_fence_reg *fence_find(struct drm_i915_private *dev_priv)
298{
299 struct drm_i915_fence_reg *fence;
300
301 list_for_each_entry(fence, &dev_priv->mm.fence_list, link) {
302 if (fence->pin_count)
303 continue;
304
305 return fence;
306 }
307
308
309 if (intel_has_pending_fb_unpin(dev_priv))
310 return ERR_PTR(-EAGAIN);
311
312 return ERR_PTR(-EDEADLK);
313}
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333int
334i915_vma_get_fence(struct i915_vma *vma)
335{
336 struct drm_i915_fence_reg *fence;
337 struct i915_vma *set = i915_gem_object_is_tiled(vma->obj) ? vma : NULL;
338
339
340
341
342 assert_rpm_wakelock_held(vma->vm->i915);
343
344
345 if (vma->fence) {
346 fence = vma->fence;
347 if (!fence->dirty) {
348 list_move_tail(&fence->link,
349 &fence->i915->mm.fence_list);
350 return 0;
351 }
352 } else if (set) {
353 fence = fence_find(vma->vm->i915);
354 if (IS_ERR(fence))
355 return PTR_ERR(fence);
356 } else
357 return 0;
358
359 return fence_update(fence, set);
360}
361
362
363
364
365
366
367
368
369
370
371
372void i915_gem_revoke_fences(struct drm_i915_private *dev_priv)
373{
374 int i;
375
376 lockdep_assert_held(&dev_priv->drm.struct_mutex);
377
378 for (i = 0; i < dev_priv->num_fence_regs; i++) {
379 struct drm_i915_fence_reg *fence = &dev_priv->fence_regs[i];
380
381 if (fence->vma)
382 i915_gem_release_mmap(fence->vma->obj);
383 }
384}
385
386
387
388
389
390
391
392
393
394void i915_gem_restore_fences(struct drm_i915_private *dev_priv)
395{
396 int i;
397
398 for (i = 0; i < dev_priv->num_fence_regs; i++) {
399 struct drm_i915_fence_reg *reg = &dev_priv->fence_regs[i];
400 struct i915_vma *vma = reg->vma;
401
402
403
404
405
406 if (vma && !i915_gem_object_is_tiled(vma->obj)) {
407 GEM_BUG_ON(!reg->dirty);
408 GEM_BUG_ON(!list_empty(&vma->obj->userfault_link));
409
410 list_move(®->link, &dev_priv->mm.fence_list);
411 vma->fence = NULL;
412 vma = NULL;
413 }
414
415 fence_write(reg, vma);
416 reg->vma = vma;
417 }
418}
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475void
476i915_gem_detect_bit_6_swizzle(struct drm_i915_private *dev_priv)
477{
478 uint32_t swizzle_x = I915_BIT_6_SWIZZLE_UNKNOWN;
479 uint32_t swizzle_y = I915_BIT_6_SWIZZLE_UNKNOWN;
480
481 if (INTEL_GEN(dev_priv) >= 8 || IS_VALLEYVIEW(dev_priv)) {
482
483
484
485
486
487
488
489 swizzle_x = I915_BIT_6_SWIZZLE_NONE;
490 swizzle_y = I915_BIT_6_SWIZZLE_NONE;
491 } else if (INTEL_GEN(dev_priv) >= 6) {
492 if (dev_priv->preserve_bios_swizzle) {
493 if (I915_READ(DISP_ARB_CTL) &
494 DISP_TILE_SURFACE_SWIZZLING) {
495 swizzle_x = I915_BIT_6_SWIZZLE_9_10;
496 swizzle_y = I915_BIT_6_SWIZZLE_9;
497 } else {
498 swizzle_x = I915_BIT_6_SWIZZLE_NONE;
499 swizzle_y = I915_BIT_6_SWIZZLE_NONE;
500 }
501 } else {
502 uint32_t dimm_c0, dimm_c1;
503 dimm_c0 = I915_READ(MAD_DIMM_C0);
504 dimm_c1 = I915_READ(MAD_DIMM_C1);
505 dimm_c0 &= MAD_DIMM_A_SIZE_MASK | MAD_DIMM_B_SIZE_MASK;
506 dimm_c1 &= MAD_DIMM_A_SIZE_MASK | MAD_DIMM_B_SIZE_MASK;
507
508
509
510
511
512 if (dimm_c0 == dimm_c1) {
513 swizzle_x = I915_BIT_6_SWIZZLE_9_10;
514 swizzle_y = I915_BIT_6_SWIZZLE_9;
515 } else {
516 swizzle_x = I915_BIT_6_SWIZZLE_NONE;
517 swizzle_y = I915_BIT_6_SWIZZLE_NONE;
518 }
519 }
520 } else if (IS_GEN5(dev_priv)) {
521
522
523
524 swizzle_x = I915_BIT_6_SWIZZLE_9_10;
525 swizzle_y = I915_BIT_6_SWIZZLE_9;
526 } else if (IS_GEN2(dev_priv)) {
527
528
529
530 swizzle_x = I915_BIT_6_SWIZZLE_NONE;
531 swizzle_y = I915_BIT_6_SWIZZLE_NONE;
532 } else if (IS_MOBILE(dev_priv) ||
533 IS_I915G(dev_priv) || IS_I945G(dev_priv)) {
534 uint32_t dcc;
535
536
537
538
539
540
541
542
543
544 dcc = I915_READ(DCC);
545 switch (dcc & DCC_ADDRESSING_MODE_MASK) {
546 case DCC_ADDRESSING_MODE_SINGLE_CHANNEL:
547 case DCC_ADDRESSING_MODE_DUAL_CHANNEL_ASYMMETRIC:
548 swizzle_x = I915_BIT_6_SWIZZLE_NONE;
549 swizzle_y = I915_BIT_6_SWIZZLE_NONE;
550 break;
551 case DCC_ADDRESSING_MODE_DUAL_CHANNEL_INTERLEAVED:
552 if (dcc & DCC_CHANNEL_XOR_DISABLE) {
553
554
555
556 swizzle_x = I915_BIT_6_SWIZZLE_9_10;
557 swizzle_y = I915_BIT_6_SWIZZLE_9;
558 } else if ((dcc & DCC_CHANNEL_XOR_BIT_17) == 0) {
559
560 swizzle_x = I915_BIT_6_SWIZZLE_9_10_11;
561 swizzle_y = I915_BIT_6_SWIZZLE_9_11;
562 } else {
563
564 swizzle_x = I915_BIT_6_SWIZZLE_9_10_17;
565 swizzle_y = I915_BIT_6_SWIZZLE_9_17;
566 }
567 break;
568 }
569
570
571 if (IS_GEN4(dev_priv) &&
572 !(I915_READ(DCC2) & DCC2_MODIFIED_ENHANCED_DISABLE)) {
573 swizzle_x = I915_BIT_6_SWIZZLE_UNKNOWN;
574 swizzle_y = I915_BIT_6_SWIZZLE_UNKNOWN;
575 }
576
577 if (dcc == 0xffffffff) {
578 DRM_ERROR("Couldn't read from MCHBAR. "
579 "Disabling tiling.\n");
580 swizzle_x = I915_BIT_6_SWIZZLE_UNKNOWN;
581 swizzle_y = I915_BIT_6_SWIZZLE_UNKNOWN;
582 }
583 } else {
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610 if (I915_READ16(C0DRB3) == I915_READ16(C1DRB3)) {
611 swizzle_x = I915_BIT_6_SWIZZLE_9_10;
612 swizzle_y = I915_BIT_6_SWIZZLE_9;
613 }
614 }
615
616 if (swizzle_x == I915_BIT_6_SWIZZLE_UNKNOWN ||
617 swizzle_y == I915_BIT_6_SWIZZLE_UNKNOWN) {
618
619
620
621
622
623
624
625
626
627 dev_priv->quirks |= QUIRK_PIN_SWIZZLED_PAGES;
628 swizzle_x = I915_BIT_6_SWIZZLE_NONE;
629 swizzle_y = I915_BIT_6_SWIZZLE_NONE;
630 }
631
632 dev_priv->mm.bit_6_swizzle_x = swizzle_x;
633 dev_priv->mm.bit_6_swizzle_y = swizzle_y;
634}
635
636
637
638
639
640
641static void
642i915_gem_swizzle_page(struct page *page)
643{
644 char temp[64];
645 char *vaddr;
646 int i;
647
648 vaddr = kmap(page);
649
650 for (i = 0; i < PAGE_SIZE; i += 128) {
651 memcpy(temp, &vaddr[i], 64);
652 memcpy(&vaddr[i], &vaddr[i + 64], 64);
653 memcpy(&vaddr[i + 64], temp, 64);
654 }
655
656 kunmap(page);
657}
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672void
673i915_gem_object_do_bit_17_swizzle(struct drm_i915_gem_object *obj,
674 struct sg_table *pages)
675{
676 struct sgt_iter sgt_iter;
677 struct page *page;
678 int i;
679
680 if (obj->bit_17 == NULL)
681 return;
682
683 i = 0;
684 for_each_sgt_page(page, sgt_iter, pages) {
685 char new_bit_17 = page_to_phys(page) >> 17;
686 if ((new_bit_17 & 0x1) != (test_bit(i, obj->bit_17) != 0)) {
687 i915_gem_swizzle_page(page);
688 set_page_dirty(page);
689 }
690 i++;
691 }
692}
693
694
695
696
697
698
699
700
701
702
703void
704i915_gem_object_save_bit_17_swizzle(struct drm_i915_gem_object *obj,
705 struct sg_table *pages)
706{
707 const unsigned int page_count = obj->base.size >> PAGE_SHIFT;
708 struct sgt_iter sgt_iter;
709 struct page *page;
710 int i;
711
712 if (obj->bit_17 == NULL) {
713 obj->bit_17 = kcalloc(BITS_TO_LONGS(page_count),
714 sizeof(long), GFP_KERNEL);
715 if (obj->bit_17 == NULL) {
716 DRM_ERROR("Failed to allocate memory for bit 17 "
717 "record\n");
718 return;
719 }
720 }
721
722 i = 0;
723
724 for_each_sgt_page(page, sgt_iter, pages) {
725 if (page_to_phys(page) & (1 << 17))
726 __set_bit(i, obj->bit_17);
727 else
728 __clear_bit(i, obj->bit_17);
729 i++;
730 }
731}
732