1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25#include <linux/prime_numbers.h>
26
27#include "gem/i915_gem_context.h"
28#include "gem/selftests/mock_context.h"
29
30#include "i915_scatterlist.h"
31#include "i915_selftest.h"
32
33#include "mock_gem_device.h"
34#include "mock_gtt.h"
35
36static bool assert_vma(struct i915_vma *vma,
37 struct drm_i915_gem_object *obj,
38 struct i915_gem_context *ctx)
39{
40 bool ok = true;
41
42 if (vma->vm != rcu_access_pointer(ctx->vm)) {
43 pr_err("VMA created with wrong VM\n");
44 ok = false;
45 }
46
47 if (vma->size != obj->base.size) {
48 pr_err("VMA created with wrong size, found %llu, expected %zu\n",
49 vma->size, obj->base.size);
50 ok = false;
51 }
52
53 if (vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL) {
54 pr_err("VMA created with wrong type [%d]\n",
55 vma->ggtt_view.type);
56 ok = false;
57 }
58
59 return ok;
60}
61
62static struct i915_vma *
63checked_vma_instance(struct drm_i915_gem_object *obj,
64 struct i915_address_space *vm,
65 const struct i915_ggtt_view *view)
66{
67 struct i915_vma *vma;
68 bool ok = true;
69
70 vma = i915_vma_instance(obj, vm, view);
71 if (IS_ERR(vma))
72 return vma;
73
74
75 if (vma->vm != vm) {
76 pr_err("VMA's vm [%p] does not match request [%p]\n",
77 vma->vm, vm);
78 ok = false;
79 }
80
81 if (i915_is_ggtt(vm) != i915_vma_is_ggtt(vma)) {
82 pr_err("VMA ggtt status [%d] does not match parent [%d]\n",
83 i915_vma_is_ggtt(vma), i915_is_ggtt(vm));
84 ok = false;
85 }
86
87 if (i915_vma_compare(vma, vm, view)) {
88 pr_err("i915_vma_compare failed with create parameters!\n");
89 return ERR_PTR(-EINVAL);
90 }
91
92 if (i915_vma_compare(vma, vma->vm,
93 i915_vma_is_ggtt(vma) ? &vma->ggtt_view : NULL)) {
94 pr_err("i915_vma_compare failed with itself\n");
95 return ERR_PTR(-EINVAL);
96 }
97
98 if (!ok) {
99 pr_err("i915_vma_compare failed to detect the difference!\n");
100 return ERR_PTR(-EINVAL);
101 }
102
103 return vma;
104}
105
106static int create_vmas(struct drm_i915_private *i915,
107 struct list_head *objects,
108 struct list_head *contexts)
109{
110 struct drm_i915_gem_object *obj;
111 struct i915_gem_context *ctx;
112 int pinned;
113
114 list_for_each_entry(obj, objects, st_link) {
115 for (pinned = 0; pinned <= 1; pinned++) {
116 list_for_each_entry(ctx, contexts, link) {
117 struct i915_address_space *vm;
118 struct i915_vma *vma;
119 int err;
120
121 vm = i915_gem_context_get_vm_rcu(ctx);
122 vma = checked_vma_instance(obj, vm, NULL);
123 i915_vm_put(vm);
124 if (IS_ERR(vma))
125 return PTR_ERR(vma);
126
127 if (!assert_vma(vma, obj, ctx)) {
128 pr_err("VMA lookup/create failed\n");
129 return -EINVAL;
130 }
131
132 if (!pinned) {
133 err = i915_vma_pin(vma, 0, 0, PIN_USER);
134 if (err) {
135 pr_err("Failed to pin VMA\n");
136 return err;
137 }
138 } else {
139 i915_vma_unpin(vma);
140 }
141 }
142 }
143 }
144
145 return 0;
146}
147
148static int igt_vma_create(void *arg)
149{
150 struct i915_ggtt *ggtt = arg;
151 struct drm_i915_private *i915 = ggtt->vm.i915;
152 struct drm_i915_gem_object *obj, *on;
153 struct i915_gem_context *ctx, *cn;
154 unsigned long num_obj, num_ctx;
155 unsigned long no, nc;
156 IGT_TIMEOUT(end_time);
157 LIST_HEAD(contexts);
158 LIST_HEAD(objects);
159 int err = -ENOMEM;
160
161
162
163
164
165 no = 0;
166 for_each_prime_number(num_obj, ULONG_MAX - 1) {
167 for (; no < num_obj; no++) {
168 obj = i915_gem_object_create_internal(i915, PAGE_SIZE);
169 if (IS_ERR(obj))
170 goto out;
171
172 list_add(&obj->st_link, &objects);
173 }
174
175 nc = 0;
176 for_each_prime_number(num_ctx, 2 * BITS_PER_LONG) {
177 for (; nc < num_ctx; nc++) {
178 ctx = mock_context(i915, "mock");
179 if (!ctx)
180 goto out;
181
182 list_move(&ctx->link, &contexts);
183 }
184
185 err = create_vmas(i915, &objects, &contexts);
186 if (err)
187 goto out;
188
189 if (igt_timeout(end_time,
190 "%s timed out: after %lu objects in %lu contexts\n",
191 __func__, no, nc))
192 goto end;
193 }
194
195 list_for_each_entry_safe(ctx, cn, &contexts, link) {
196 list_del_init(&ctx->link);
197 mock_context_close(ctx);
198 }
199
200 cond_resched();
201 }
202
203end:
204
205 err = create_vmas(i915, &objects, &contexts);
206out:
207 list_for_each_entry_safe(ctx, cn, &contexts, link) {
208 list_del_init(&ctx->link);
209 mock_context_close(ctx);
210 }
211
212 list_for_each_entry_safe(obj, on, &objects, st_link)
213 i915_gem_object_put(obj);
214 return err;
215}
216
217struct pin_mode {
218 u64 size;
219 u64 flags;
220 bool (*assert)(const struct i915_vma *,
221 const struct pin_mode *mode,
222 int result);
223 const char *string;
224};
225
226static bool assert_pin_valid(const struct i915_vma *vma,
227 const struct pin_mode *mode,
228 int result)
229{
230 if (result)
231 return false;
232
233 if (i915_vma_misplaced(vma, mode->size, 0, mode->flags))
234 return false;
235
236 return true;
237}
238
239__maybe_unused
240static bool assert_pin_enospc(const struct i915_vma *vma,
241 const struct pin_mode *mode,
242 int result)
243{
244 return result == -ENOSPC;
245}
246
247__maybe_unused
248static bool assert_pin_einval(const struct i915_vma *vma,
249 const struct pin_mode *mode,
250 int result)
251{
252 return result == -EINVAL;
253}
254
255static int igt_vma_pin1(void *arg)
256{
257 struct i915_ggtt *ggtt = arg;
258 const struct pin_mode modes[] = {
259#define VALID(sz, fl) { .size = (sz), .flags = (fl), .assert = assert_pin_valid, .string = #sz ", " #fl ", (valid) " }
260#define __INVALID(sz, fl, check, eval) { .size = (sz), .flags = (fl), .assert = (check), .string = #sz ", " #fl ", (invalid " #eval ")" }
261#define INVALID(sz, fl) __INVALID(sz, fl, assert_pin_einval, EINVAL)
262#define NOSPACE(sz, fl) __INVALID(sz, fl, assert_pin_enospc, ENOSPC)
263 VALID(0, PIN_GLOBAL),
264 VALID(0, PIN_GLOBAL | PIN_MAPPABLE),
265
266 VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | 4096),
267 VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | 8192),
268 VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)),
269 VALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)),
270 VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->vm.total - 4096)),
271
272 VALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_FIXED | (ggtt->mappable_end - 4096)),
273 INVALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_FIXED | ggtt->mappable_end),
274 VALID(0, PIN_GLOBAL | PIN_OFFSET_FIXED | (ggtt->vm.total - 4096)),
275 INVALID(0, PIN_GLOBAL | PIN_OFFSET_FIXED | ggtt->vm.total),
276 INVALID(0, PIN_GLOBAL | PIN_OFFSET_FIXED | round_down(U64_MAX, PAGE_SIZE)),
277
278 VALID(4096, PIN_GLOBAL),
279 VALID(8192, PIN_GLOBAL),
280 VALID(ggtt->mappable_end - 4096, PIN_GLOBAL | PIN_MAPPABLE),
281 VALID(ggtt->mappable_end, PIN_GLOBAL | PIN_MAPPABLE),
282 NOSPACE(ggtt->mappable_end + 4096, PIN_GLOBAL | PIN_MAPPABLE),
283 VALID(ggtt->vm.total - 4096, PIN_GLOBAL),
284 VALID(ggtt->vm.total, PIN_GLOBAL),
285 NOSPACE(ggtt->vm.total + 4096, PIN_GLOBAL),
286 NOSPACE(round_down(U64_MAX, PAGE_SIZE), PIN_GLOBAL),
287 INVALID(8192, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_FIXED | (ggtt->mappable_end - 4096)),
288 INVALID(8192, PIN_GLOBAL | PIN_OFFSET_FIXED | (ggtt->vm.total - 4096)),
289 INVALID(8192, PIN_GLOBAL | PIN_OFFSET_FIXED | (round_down(U64_MAX, PAGE_SIZE) - 4096)),
290
291 VALID(8192, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)),
292
293#if !IS_ENABLED(CONFIG_DRM_I915_DEBUG_GEM)
294
295
296
297
298
299 NOSPACE(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_BIAS | ggtt->mappable_end),
300 NOSPACE(0, PIN_GLOBAL | PIN_OFFSET_BIAS | ggtt->vm.total),
301 NOSPACE(8192, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)),
302 NOSPACE(8192, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->vm.total - 4096)),
303#endif
304 { },
305#undef NOSPACE
306#undef INVALID
307#undef __INVALID
308#undef VALID
309 }, *m;
310 struct drm_i915_gem_object *obj;
311 struct i915_vma *vma;
312 int err = -EINVAL;
313
314
315
316
317
318 GEM_BUG_ON(!drm_mm_clean(&ggtt->vm.mm));
319
320 obj = i915_gem_object_create_internal(ggtt->vm.i915, PAGE_SIZE);
321 if (IS_ERR(obj))
322 return PTR_ERR(obj);
323
324 vma = checked_vma_instance(obj, &ggtt->vm, NULL);
325 if (IS_ERR(vma))
326 goto out;
327
328 for (m = modes; m->assert; m++) {
329 err = i915_vma_pin(vma, m->size, 0, m->flags);
330 if (!m->assert(vma, m, err)) {
331 pr_err("%s to pin single page into GGTT with mode[%d:%s]: size=%llx flags=%llx, err=%d\n",
332 m->assert == assert_pin_valid ? "Failed" : "Unexpectedly succeeded",
333 (int)(m - modes), m->string, m->size, m->flags,
334 err);
335 if (!err)
336 i915_vma_unpin(vma);
337 err = -EINVAL;
338 goto out;
339 }
340
341 if (!err) {
342 i915_vma_unpin(vma);
343 err = i915_vma_unbind(vma);
344 if (err) {
345 pr_err("Failed to unbind single page from GGTT, err=%d\n", err);
346 goto out;
347 }
348 }
349
350 cond_resched();
351 }
352
353 err = 0;
354out:
355 i915_gem_object_put(obj);
356 return err;
357}
358
359static unsigned long rotated_index(const struct intel_rotation_info *r,
360 unsigned int n,
361 unsigned int x,
362 unsigned int y)
363{
364 return (r->plane[n].stride * (r->plane[n].height - y - 1) +
365 r->plane[n].offset + x);
366}
367
368static struct scatterlist *
369assert_rotated(struct drm_i915_gem_object *obj,
370 const struct intel_rotation_info *r, unsigned int n,
371 struct scatterlist *sg)
372{
373 unsigned int x, y;
374
375 for (x = 0; x < r->plane[n].width; x++) {
376 for (y = 0; y < r->plane[n].height; y++) {
377 unsigned long src_idx;
378 dma_addr_t src;
379
380 if (!sg) {
381 pr_err("Invalid sg table: too short at plane %d, (%d, %d)!\n",
382 n, x, y);
383 return ERR_PTR(-EINVAL);
384 }
385
386 src_idx = rotated_index(r, n, x, y);
387 src = i915_gem_object_get_dma_address(obj, src_idx);
388
389 if (sg_dma_len(sg) != PAGE_SIZE) {
390 pr_err("Invalid sg.length, found %d, expected %lu for rotated page (%d, %d) [src index %lu]\n",
391 sg_dma_len(sg), PAGE_SIZE,
392 x, y, src_idx);
393 return ERR_PTR(-EINVAL);
394 }
395
396 if (sg_dma_address(sg) != src) {
397 pr_err("Invalid address for rotated page (%d, %d) [src index %lu]\n",
398 x, y, src_idx);
399 return ERR_PTR(-EINVAL);
400 }
401
402 sg = sg_next(sg);
403 }
404 }
405
406 return sg;
407}
408
409static unsigned long remapped_index(const struct intel_remapped_info *r,
410 unsigned int n,
411 unsigned int x,
412 unsigned int y)
413{
414 return (r->plane[n].stride * y +
415 r->plane[n].offset + x);
416}
417
418static struct scatterlist *
419assert_remapped(struct drm_i915_gem_object *obj,
420 const struct intel_remapped_info *r, unsigned int n,
421 struct scatterlist *sg)
422{
423 unsigned int x, y;
424 unsigned int left = 0;
425 unsigned int offset;
426
427 for (y = 0; y < r->plane[n].height; y++) {
428 for (x = 0; x < r->plane[n].width; x++) {
429 unsigned long src_idx;
430 dma_addr_t src;
431
432 if (!sg) {
433 pr_err("Invalid sg table: too short at plane %d, (%d, %d)!\n",
434 n, x, y);
435 return ERR_PTR(-EINVAL);
436 }
437 if (!left) {
438 offset = 0;
439 left = sg_dma_len(sg);
440 }
441
442 src_idx = remapped_index(r, n, x, y);
443 src = i915_gem_object_get_dma_address(obj, src_idx);
444
445 if (left < PAGE_SIZE || left & (PAGE_SIZE-1)) {
446 pr_err("Invalid sg.length, found %d, expected %lu for remapped page (%d, %d) [src index %lu]\n",
447 sg_dma_len(sg), PAGE_SIZE,
448 x, y, src_idx);
449 return ERR_PTR(-EINVAL);
450 }
451
452 if (sg_dma_address(sg) + offset != src) {
453 pr_err("Invalid address for remapped page (%d, %d) [src index %lu]\n",
454 x, y, src_idx);
455 return ERR_PTR(-EINVAL);
456 }
457
458 left -= PAGE_SIZE;
459 offset += PAGE_SIZE;
460
461
462 if (!left)
463 sg = sg_next(sg);
464 }
465 }
466
467 return sg;
468}
469
470static unsigned int rotated_size(const struct intel_remapped_plane_info *a,
471 const struct intel_remapped_plane_info *b)
472{
473 return a->width * a->height + b->width * b->height;
474}
475
476static int igt_vma_rotate_remap(void *arg)
477{
478 struct i915_ggtt *ggtt = arg;
479 struct i915_address_space *vm = &ggtt->vm;
480 struct drm_i915_gem_object *obj;
481 const struct intel_remapped_plane_info planes[] = {
482 { .width = 1, .height = 1, .stride = 1 },
483 { .width = 2, .height = 2, .stride = 2 },
484 { .width = 4, .height = 4, .stride = 4 },
485 { .width = 8, .height = 8, .stride = 8 },
486
487 { .width = 3, .height = 5, .stride = 3 },
488 { .width = 3, .height = 5, .stride = 4 },
489 { .width = 3, .height = 5, .stride = 5 },
490
491 { .width = 5, .height = 3, .stride = 5 },
492 { .width = 5, .height = 3, .stride = 7 },
493 { .width = 5, .height = 3, .stride = 9 },
494
495 { .width = 4, .height = 6, .stride = 6 },
496 { .width = 6, .height = 4, .stride = 6 },
497 { }
498 }, *a, *b;
499 enum i915_ggtt_view_type types[] = {
500 I915_GGTT_VIEW_ROTATED,
501 I915_GGTT_VIEW_REMAPPED,
502 0,
503 }, *t;
504 const unsigned int max_pages = 64;
505 int err = -ENOMEM;
506
507
508
509
510
511 obj = i915_gem_object_create_internal(vm->i915, max_pages * PAGE_SIZE);
512 if (IS_ERR(obj))
513 goto out;
514
515 for (t = types; *t; t++) {
516 for (a = planes; a->width; a++) {
517 for (b = planes + ARRAY_SIZE(planes); b-- != planes; ) {
518 struct i915_ggtt_view view;
519 unsigned int n, max_offset;
520
521 max_offset = max(a->stride * a->height,
522 b->stride * b->height);
523 GEM_BUG_ON(max_offset > max_pages);
524 max_offset = max_pages - max_offset;
525
526 view.type = *t;
527 view.rotated.plane[0] = *a;
528 view.rotated.plane[1] = *b;
529
530 for_each_prime_number_from(view.rotated.plane[0].offset, 0, max_offset) {
531 for_each_prime_number_from(view.rotated.plane[1].offset, 0, max_offset) {
532 struct scatterlist *sg;
533 struct i915_vma *vma;
534
535 vma = checked_vma_instance(obj, vm, &view);
536 if (IS_ERR(vma)) {
537 err = PTR_ERR(vma);
538 goto out_object;
539 }
540
541 err = i915_vma_pin(vma, 0, 0, PIN_GLOBAL);
542 if (err) {
543 pr_err("Failed to pin VMA, err=%d\n", err);
544 goto out_object;
545 }
546
547 if (view.type == I915_GGTT_VIEW_ROTATED &&
548 vma->size != rotated_size(a, b) * PAGE_SIZE) {
549 pr_err("VMA is wrong size, expected %lu, found %llu\n",
550 PAGE_SIZE * rotated_size(a, b), vma->size);
551 err = -EINVAL;
552 goto out_object;
553 }
554
555 if (view.type == I915_GGTT_VIEW_REMAPPED &&
556 vma->size > rotated_size(a, b) * PAGE_SIZE) {
557 pr_err("VMA is wrong size, expected %lu, found %llu\n",
558 PAGE_SIZE * rotated_size(a, b), vma->size);
559 err = -EINVAL;
560 goto out_object;
561 }
562
563 if (vma->pages->nents > rotated_size(a, b)) {
564 pr_err("sg table is wrong sizeo, expected %u, found %u nents\n",
565 rotated_size(a, b), vma->pages->nents);
566 err = -EINVAL;
567 goto out_object;
568 }
569
570 if (vma->node.size < vma->size) {
571 pr_err("VMA binding too small, expected %llu, found %llu\n",
572 vma->size, vma->node.size);
573 err = -EINVAL;
574 goto out_object;
575 }
576
577 if (vma->pages == obj->mm.pages) {
578 pr_err("VMA using unrotated object pages!\n");
579 err = -EINVAL;
580 goto out_object;
581 }
582
583 sg = vma->pages->sgl;
584 for (n = 0; n < ARRAY_SIZE(view.rotated.plane); n++) {
585 if (view.type == I915_GGTT_VIEW_ROTATED)
586 sg = assert_rotated(obj, &view.rotated, n, sg);
587 else
588 sg = assert_remapped(obj, &view.remapped, n, sg);
589 if (IS_ERR(sg)) {
590 pr_err("Inconsistent %s VMA pages for plane %d: [(%d, %d, %d, %d), (%d, %d, %d, %d)]\n",
591 view.type == I915_GGTT_VIEW_ROTATED ?
592 "rotated" : "remapped", n,
593 view.rotated.plane[0].width,
594 view.rotated.plane[0].height,
595 view.rotated.plane[0].stride,
596 view.rotated.plane[0].offset,
597 view.rotated.plane[1].width,
598 view.rotated.plane[1].height,
599 view.rotated.plane[1].stride,
600 view.rotated.plane[1].offset);
601 err = -EINVAL;
602 goto out_object;
603 }
604 }
605
606 i915_vma_unpin(vma);
607
608 cond_resched();
609 }
610 }
611 }
612 }
613 }
614
615out_object:
616 i915_gem_object_put(obj);
617out:
618 return err;
619}
620
621static bool assert_partial(struct drm_i915_gem_object *obj,
622 struct i915_vma *vma,
623 unsigned long offset,
624 unsigned long size)
625{
626 struct sgt_iter sgt;
627 dma_addr_t dma;
628
629 for_each_sgt_daddr(dma, sgt, vma->pages) {
630 dma_addr_t src;
631
632 if (!size) {
633 pr_err("Partial scattergather list too long\n");
634 return false;
635 }
636
637 src = i915_gem_object_get_dma_address(obj, offset);
638 if (src != dma) {
639 pr_err("DMA mismatch for partial page offset %lu\n",
640 offset);
641 return false;
642 }
643
644 offset++;
645 size--;
646 }
647
648 return true;
649}
650
651static bool assert_pin(struct i915_vma *vma,
652 struct i915_ggtt_view *view,
653 u64 size,
654 const char *name)
655{
656 bool ok = true;
657
658 if (vma->size != size) {
659 pr_err("(%s) VMA is wrong size, expected %llu, found %llu\n",
660 name, size, vma->size);
661 ok = false;
662 }
663
664 if (vma->node.size < vma->size) {
665 pr_err("(%s) VMA binding too small, expected %llu, found %llu\n",
666 name, vma->size, vma->node.size);
667 ok = false;
668 }
669
670 if (view && view->type != I915_GGTT_VIEW_NORMAL) {
671 if (memcmp(&vma->ggtt_view, view, sizeof(*view))) {
672 pr_err("(%s) VMA mismatch upon creation!\n",
673 name);
674 ok = false;
675 }
676
677 if (vma->pages == vma->obj->mm.pages) {
678 pr_err("(%s) VMA using original object pages!\n",
679 name);
680 ok = false;
681 }
682 } else {
683 if (vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL) {
684 pr_err("Not the normal ggtt view! Found %d\n",
685 vma->ggtt_view.type);
686 ok = false;
687 }
688
689 if (vma->pages != vma->obj->mm.pages) {
690 pr_err("VMA not using object pages!\n");
691 ok = false;
692 }
693 }
694
695 return ok;
696}
697
698static int igt_vma_partial(void *arg)
699{
700 struct i915_ggtt *ggtt = arg;
701 struct i915_address_space *vm = &ggtt->vm;
702 const unsigned int npages = 1021;
703 struct drm_i915_gem_object *obj;
704 const struct phase {
705 const char *name;
706 } phases[] = {
707 { "create" },
708 { "lookup" },
709 { },
710 }, *p;
711 unsigned int sz, offset;
712 struct i915_vma *vma;
713 int err = -ENOMEM;
714
715
716
717
718
719 obj = i915_gem_object_create_internal(vm->i915, npages * PAGE_SIZE);
720 if (IS_ERR(obj))
721 goto out;
722
723 for (p = phases; p->name; p++) {
724 unsigned int count, nvma;
725
726 nvma = 0;
727 for_each_prime_number_from(sz, 1, npages) {
728 for_each_prime_number_from(offset, 0, npages - sz) {
729 struct i915_ggtt_view view;
730
731 view.type = I915_GGTT_VIEW_PARTIAL;
732 view.partial.offset = offset;
733 view.partial.size = sz;
734
735 if (sz == npages)
736 view.type = I915_GGTT_VIEW_NORMAL;
737
738 vma = checked_vma_instance(obj, vm, &view);
739 if (IS_ERR(vma)) {
740 err = PTR_ERR(vma);
741 goto out_object;
742 }
743
744 err = i915_vma_pin(vma, 0, 0, PIN_GLOBAL);
745 if (err)
746 goto out_object;
747
748 if (!assert_pin(vma, &view, sz*PAGE_SIZE, p->name)) {
749 pr_err("(%s) Inconsistent partial pinning for (offset=%d, size=%d)\n",
750 p->name, offset, sz);
751 err = -EINVAL;
752 goto out_object;
753 }
754
755 if (!assert_partial(obj, vma, offset, sz)) {
756 pr_err("(%s) Inconsistent partial pages for (offset=%d, size=%d)\n",
757 p->name, offset, sz);
758 err = -EINVAL;
759 goto out_object;
760 }
761
762 i915_vma_unpin(vma);
763 nvma++;
764
765 cond_resched();
766 }
767 }
768
769 count = 0;
770 list_for_each_entry(vma, &obj->vma.list, obj_link)
771 count++;
772 if (count != nvma) {
773 pr_err("(%s) All partial vma were not recorded on the obj->vma_list: found %u, expected %u\n",
774 p->name, count, nvma);
775 err = -EINVAL;
776 goto out_object;
777 }
778
779
780 vma = checked_vma_instance(obj, vm, NULL);
781 if (IS_ERR(vma)) {
782 err = PTR_ERR(vma);
783 goto out_object;
784 }
785
786 err = i915_vma_pin(vma, 0, 0, PIN_GLOBAL);
787 if (err)
788 goto out_object;
789
790 if (!assert_pin(vma, NULL, obj->base.size, p->name)) {
791 pr_err("(%s) inconsistent full pin\n", p->name);
792 err = -EINVAL;
793 goto out_object;
794 }
795
796 i915_vma_unpin(vma);
797
798 count = 0;
799 list_for_each_entry(vma, &obj->vma.list, obj_link)
800 count++;
801 if (count != nvma) {
802 pr_err("(%s) allocated an extra full vma!\n", p->name);
803 err = -EINVAL;
804 goto out_object;
805 }
806 }
807
808out_object:
809 i915_gem_object_put(obj);
810out:
811 return err;
812}
813
814int i915_vma_mock_selftests(void)
815{
816 static const struct i915_subtest tests[] = {
817 SUBTEST(igt_vma_create),
818 SUBTEST(igt_vma_pin1),
819 SUBTEST(igt_vma_rotate_remap),
820 SUBTEST(igt_vma_partial),
821 };
822 struct drm_i915_private *i915;
823 struct i915_ggtt *ggtt;
824 int err;
825
826 i915 = mock_gem_device();
827 if (!i915)
828 return -ENOMEM;
829
830 ggtt = kmalloc(sizeof(*ggtt), GFP_KERNEL);
831 if (!ggtt) {
832 err = -ENOMEM;
833 goto out_put;
834 }
835 mock_init_ggtt(i915, ggtt);
836
837 err = i915_subtests(tests, ggtt);
838
839 mock_device_flush(i915);
840 i915_gem_drain_freed_objects(i915);
841 mock_fini_ggtt(ggtt);
842 kfree(ggtt);
843out_put:
844 drm_dev_put(&i915->drm);
845 return err;
846}
847
848static int igt_vma_remapped_gtt(void *arg)
849{
850 struct drm_i915_private *i915 = arg;
851 const struct intel_remapped_plane_info planes[] = {
852 { .width = 1, .height = 1, .stride = 1 },
853 { .width = 2, .height = 2, .stride = 2 },
854 { .width = 4, .height = 4, .stride = 4 },
855 { .width = 8, .height = 8, .stride = 8 },
856
857 { .width = 3, .height = 5, .stride = 3 },
858 { .width = 3, .height = 5, .stride = 4 },
859 { .width = 3, .height = 5, .stride = 5 },
860
861 { .width = 5, .height = 3, .stride = 5 },
862 { .width = 5, .height = 3, .stride = 7 },
863 { .width = 5, .height = 3, .stride = 9 },
864
865 { .width = 4, .height = 6, .stride = 6 },
866 { .width = 6, .height = 4, .stride = 6 },
867 { }
868 }, *p;
869 enum i915_ggtt_view_type types[] = {
870 I915_GGTT_VIEW_ROTATED,
871 I915_GGTT_VIEW_REMAPPED,
872 0,
873 }, *t;
874 struct drm_i915_gem_object *obj;
875 intel_wakeref_t wakeref;
876 int err = 0;
877
878 obj = i915_gem_object_create_internal(i915, 10 * 10 * PAGE_SIZE);
879 if (IS_ERR(obj))
880 return PTR_ERR(obj);
881
882 wakeref = intel_runtime_pm_get(&i915->runtime_pm);
883
884 for (t = types; *t; t++) {
885 for (p = planes; p->width; p++) {
886 struct i915_ggtt_view view = {
887 .type = *t,
888 .rotated.plane[0] = *p,
889 };
890 struct i915_vma *vma;
891 u32 __iomem *map;
892 unsigned int x, y;
893 int err;
894
895 i915_gem_object_lock(obj);
896 err = i915_gem_object_set_to_gtt_domain(obj, true);
897 i915_gem_object_unlock(obj);
898 if (err)
899 goto out;
900
901 vma = i915_gem_object_ggtt_pin(obj, &view, 0, 0, PIN_MAPPABLE);
902 if (IS_ERR(vma)) {
903 err = PTR_ERR(vma);
904 goto out;
905 }
906
907 GEM_BUG_ON(vma->ggtt_view.type != *t);
908
909 map = i915_vma_pin_iomap(vma);
910 i915_vma_unpin(vma);
911 if (IS_ERR(map)) {
912 err = PTR_ERR(map);
913 goto out;
914 }
915
916 for (y = 0 ; y < p->height; y++) {
917 for (x = 0 ; x < p->width; x++) {
918 unsigned int offset;
919 u32 val = y << 16 | x;
920
921 if (*t == I915_GGTT_VIEW_ROTATED)
922 offset = (x * p->height + y) * PAGE_SIZE;
923 else
924 offset = (y * p->width + x) * PAGE_SIZE;
925
926 iowrite32(val, &map[offset / sizeof(*map)]);
927 }
928 }
929
930 i915_vma_unpin_iomap(vma);
931
932 vma = i915_gem_object_ggtt_pin(obj, NULL, 0, 0, PIN_MAPPABLE);
933 if (IS_ERR(vma)) {
934 err = PTR_ERR(vma);
935 goto out;
936 }
937
938 GEM_BUG_ON(vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL);
939
940 map = i915_vma_pin_iomap(vma);
941 i915_vma_unpin(vma);
942 if (IS_ERR(map)) {
943 err = PTR_ERR(map);
944 goto out;
945 }
946
947 for (y = 0 ; y < p->height; y++) {
948 for (x = 0 ; x < p->width; x++) {
949 unsigned int offset, src_idx;
950 u32 exp = y << 16 | x;
951 u32 val;
952
953 if (*t == I915_GGTT_VIEW_ROTATED)
954 src_idx = rotated_index(&view.rotated, 0, x, y);
955 else
956 src_idx = remapped_index(&view.remapped, 0, x, y);
957 offset = src_idx * PAGE_SIZE;
958
959 val = ioread32(&map[offset / sizeof(*map)]);
960 if (val != exp) {
961 pr_err("%s VMA write test failed, expected 0x%x, found 0x%x\n",
962 *t == I915_GGTT_VIEW_ROTATED ? "Rotated" : "Remapped",
963 val, exp);
964 i915_vma_unpin_iomap(vma);
965 goto out;
966 }
967 }
968 }
969 i915_vma_unpin_iomap(vma);
970
971 cond_resched();
972 }
973 }
974
975out:
976 intel_runtime_pm_put(&i915->runtime_pm, wakeref);
977 i915_gem_object_put(obj);
978
979 return err;
980}
981
982int i915_vma_live_selftests(struct drm_i915_private *i915)
983{
984 static const struct i915_subtest tests[] = {
985 SUBTEST(igt_vma_remapped_gtt),
986 };
987
988 return i915_subtests(tests, i915);
989}
990