1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25#include <linux/prime_numbers.h>
26
27#include "gem/selftests/mock_context.h"
28
29#include "i915_scatterlist.h"
30#include "i915_selftest.h"
31
32#include "mock_gem_device.h"
33#include "mock_gtt.h"
34
35static bool assert_vma(struct i915_vma *vma,
36 struct drm_i915_gem_object *obj,
37 struct i915_gem_context *ctx)
38{
39 bool ok = true;
40
41 if (vma->vm != ctx->vm) {
42 pr_err("VMA created with wrong VM\n");
43 ok = false;
44 }
45
46 if (vma->size != obj->base.size) {
47 pr_err("VMA created with wrong size, found %llu, expected %zu\n",
48 vma->size, obj->base.size);
49 ok = false;
50 }
51
52 if (vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL) {
53 pr_err("VMA created with wrong type [%d]\n",
54 vma->ggtt_view.type);
55 ok = false;
56 }
57
58 return ok;
59}
60
61static struct i915_vma *
62checked_vma_instance(struct drm_i915_gem_object *obj,
63 struct i915_address_space *vm,
64 const struct i915_ggtt_view *view)
65{
66 struct i915_vma *vma;
67 bool ok = true;
68
69 vma = i915_vma_instance(obj, vm, view);
70 if (IS_ERR(vma))
71 return vma;
72
73
74 if (vma->vm != vm) {
75 pr_err("VMA's vm [%p] does not match request [%p]\n",
76 vma->vm, vm);
77 ok = false;
78 }
79
80 if (i915_is_ggtt(vm) != i915_vma_is_ggtt(vma)) {
81 pr_err("VMA ggtt status [%d] does not match parent [%d]\n",
82 i915_vma_is_ggtt(vma), i915_is_ggtt(vm));
83 ok = false;
84 }
85
86 if (i915_vma_compare(vma, vm, view)) {
87 pr_err("i915_vma_compare failed with create parameters!\n");
88 return ERR_PTR(-EINVAL);
89 }
90
91 if (i915_vma_compare(vma, vma->vm,
92 i915_vma_is_ggtt(vma) ? &vma->ggtt_view : NULL)) {
93 pr_err("i915_vma_compare failed with itself\n");
94 return ERR_PTR(-EINVAL);
95 }
96
97 if (!ok) {
98 pr_err("i915_vma_compare failed to detect the difference!\n");
99 return ERR_PTR(-EINVAL);
100 }
101
102 return vma;
103}
104
105static int create_vmas(struct drm_i915_private *i915,
106 struct list_head *objects,
107 struct list_head *contexts)
108{
109 struct drm_i915_gem_object *obj;
110 struct i915_gem_context *ctx;
111 int pinned;
112
113 list_for_each_entry(obj, objects, st_link) {
114 for (pinned = 0; pinned <= 1; pinned++) {
115 list_for_each_entry(ctx, contexts, link) {
116 struct i915_address_space *vm = ctx->vm;
117 struct i915_vma *vma;
118 int err;
119
120 vma = checked_vma_instance(obj, vm, NULL);
121 if (IS_ERR(vma))
122 return PTR_ERR(vma);
123
124 if (!assert_vma(vma, obj, ctx)) {
125 pr_err("VMA lookup/create failed\n");
126 return -EINVAL;
127 }
128
129 if (!pinned) {
130 err = i915_vma_pin(vma, 0, 0, PIN_USER);
131 if (err) {
132 pr_err("Failed to pin VMA\n");
133 return err;
134 }
135 } else {
136 i915_vma_unpin(vma);
137 }
138 }
139 }
140 }
141
142 return 0;
143}
144
145static int igt_vma_create(void *arg)
146{
147 struct i915_ggtt *ggtt = arg;
148 struct drm_i915_private *i915 = ggtt->vm.i915;
149 struct drm_i915_gem_object *obj, *on;
150 struct i915_gem_context *ctx, *cn;
151 unsigned long num_obj, num_ctx;
152 unsigned long no, nc;
153 IGT_TIMEOUT(end_time);
154 LIST_HEAD(contexts);
155 LIST_HEAD(objects);
156 int err = -ENOMEM;
157
158
159
160
161
162 no = 0;
163 for_each_prime_number(num_obj, ULONG_MAX - 1) {
164 for (; no < num_obj; no++) {
165 obj = i915_gem_object_create_internal(i915, PAGE_SIZE);
166 if (IS_ERR(obj))
167 goto out;
168
169 list_add(&obj->st_link, &objects);
170 }
171
172 nc = 0;
173 for_each_prime_number(num_ctx, MAX_CONTEXT_HW_ID) {
174 for (; nc < num_ctx; nc++) {
175 ctx = mock_context(i915, "mock");
176 if (!ctx)
177 goto out;
178
179 list_move(&ctx->link, &contexts);
180 }
181
182 err = create_vmas(i915, &objects, &contexts);
183 if (err)
184 goto out;
185
186 if (igt_timeout(end_time,
187 "%s timed out: after %lu objects in %lu contexts\n",
188 __func__, no, nc))
189 goto end;
190 }
191
192 list_for_each_entry_safe(ctx, cn, &contexts, link) {
193 list_del_init(&ctx->link);
194 mock_context_close(ctx);
195 }
196 }
197
198end:
199
200 err = create_vmas(i915, &objects, &contexts);
201out:
202 list_for_each_entry_safe(ctx, cn, &contexts, link) {
203 list_del_init(&ctx->link);
204 mock_context_close(ctx);
205 }
206
207 list_for_each_entry_safe(obj, on, &objects, st_link)
208 i915_gem_object_put(obj);
209 return err;
210}
211
212struct pin_mode {
213 u64 size;
214 u64 flags;
215 bool (*assert)(const struct i915_vma *,
216 const struct pin_mode *mode,
217 int result);
218 const char *string;
219};
220
221static bool assert_pin_valid(const struct i915_vma *vma,
222 const struct pin_mode *mode,
223 int result)
224{
225 if (result)
226 return false;
227
228 if (i915_vma_misplaced(vma, mode->size, 0, mode->flags))
229 return false;
230
231 return true;
232}
233
234__maybe_unused
235static bool assert_pin_enospc(const struct i915_vma *vma,
236 const struct pin_mode *mode,
237 int result)
238{
239 return result == -ENOSPC;
240}
241
242__maybe_unused
243static bool assert_pin_einval(const struct i915_vma *vma,
244 const struct pin_mode *mode,
245 int result)
246{
247 return result == -EINVAL;
248}
249
250static int igt_vma_pin1(void *arg)
251{
252 struct i915_ggtt *ggtt = arg;
253 const struct pin_mode modes[] = {
254#define VALID(sz, fl) { .size = (sz), .flags = (fl), .assert = assert_pin_valid, .string = #sz ", " #fl ", (valid) " }
255#define __INVALID(sz, fl, check, eval) { .size = (sz), .flags = (fl), .assert = (check), .string = #sz ", " #fl ", (invalid " #eval ")" }
256#define INVALID(sz, fl) __INVALID(sz, fl, assert_pin_einval, EINVAL)
257#define NOSPACE(sz, fl) __INVALID(sz, fl, assert_pin_enospc, ENOSPC)
258 VALID(0, PIN_GLOBAL),
259 VALID(0, PIN_GLOBAL | PIN_MAPPABLE),
260
261 VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | 4096),
262 VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | 8192),
263 VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)),
264 VALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)),
265 VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->vm.total - 4096)),
266
267 VALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_FIXED | (ggtt->mappable_end - 4096)),
268 INVALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_FIXED | ggtt->mappable_end),
269 VALID(0, PIN_GLOBAL | PIN_OFFSET_FIXED | (ggtt->vm.total - 4096)),
270 INVALID(0, PIN_GLOBAL | PIN_OFFSET_FIXED | ggtt->vm.total),
271 INVALID(0, PIN_GLOBAL | PIN_OFFSET_FIXED | round_down(U64_MAX, PAGE_SIZE)),
272
273 VALID(4096, PIN_GLOBAL),
274 VALID(8192, PIN_GLOBAL),
275 VALID(ggtt->mappable_end - 4096, PIN_GLOBAL | PIN_MAPPABLE),
276 VALID(ggtt->mappable_end, PIN_GLOBAL | PIN_MAPPABLE),
277 NOSPACE(ggtt->mappable_end + 4096, PIN_GLOBAL | PIN_MAPPABLE),
278 VALID(ggtt->vm.total - 4096, PIN_GLOBAL),
279 VALID(ggtt->vm.total, PIN_GLOBAL),
280 NOSPACE(ggtt->vm.total + 4096, PIN_GLOBAL),
281 NOSPACE(round_down(U64_MAX, PAGE_SIZE), PIN_GLOBAL),
282 INVALID(8192, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_FIXED | (ggtt->mappable_end - 4096)),
283 INVALID(8192, PIN_GLOBAL | PIN_OFFSET_FIXED | (ggtt->vm.total - 4096)),
284 INVALID(8192, PIN_GLOBAL | PIN_OFFSET_FIXED | (round_down(U64_MAX, PAGE_SIZE) - 4096)),
285
286 VALID(8192, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)),
287
288#if !IS_ENABLED(CONFIG_DRM_I915_DEBUG_GEM)
289
290
291
292
293
294 NOSPACE(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_BIAS | ggtt->mappable_end),
295 NOSPACE(0, PIN_GLOBAL | PIN_OFFSET_BIAS | ggtt->vm.total),
296 NOSPACE(8192, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)),
297 NOSPACE(8192, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->vm.total - 4096)),
298#endif
299 { },
300#undef NOSPACE
301#undef INVALID
302#undef __INVALID
303#undef VALID
304 }, *m;
305 struct drm_i915_gem_object *obj;
306 struct i915_vma *vma;
307 int err = -EINVAL;
308
309
310
311
312
313 GEM_BUG_ON(!drm_mm_clean(&ggtt->vm.mm));
314
315 obj = i915_gem_object_create_internal(ggtt->vm.i915, PAGE_SIZE);
316 if (IS_ERR(obj))
317 return PTR_ERR(obj);
318
319 vma = checked_vma_instance(obj, &ggtt->vm, NULL);
320 if (IS_ERR(vma))
321 goto out;
322
323 for (m = modes; m->assert; m++) {
324 err = i915_vma_pin(vma, m->size, 0, m->flags);
325 if (!m->assert(vma, m, err)) {
326 pr_err("%s to pin single page into GGTT with mode[%d:%s]: size=%llx flags=%llx, err=%d\n",
327 m->assert == assert_pin_valid ? "Failed" : "Unexpectedly succeeded",
328 (int)(m - modes), m->string, m->size, m->flags,
329 err);
330 if (!err)
331 i915_vma_unpin(vma);
332 err = -EINVAL;
333 goto out;
334 }
335
336 if (!err) {
337 i915_vma_unpin(vma);
338 err = i915_vma_unbind(vma);
339 if (err) {
340 pr_err("Failed to unbind single page from GGTT, err=%d\n", err);
341 goto out;
342 }
343 }
344 }
345
346 err = 0;
347out:
348 i915_gem_object_put(obj);
349 return err;
350}
351
352static unsigned long rotated_index(const struct intel_rotation_info *r,
353 unsigned int n,
354 unsigned int x,
355 unsigned int y)
356{
357 return (r->plane[n].stride * (r->plane[n].height - y - 1) +
358 r->plane[n].offset + x);
359}
360
361static struct scatterlist *
362assert_rotated(struct drm_i915_gem_object *obj,
363 const struct intel_rotation_info *r, unsigned int n,
364 struct scatterlist *sg)
365{
366 unsigned int x, y;
367
368 for (x = 0; x < r->plane[n].width; x++) {
369 for (y = 0; y < r->plane[n].height; y++) {
370 unsigned long src_idx;
371 dma_addr_t src;
372
373 if (!sg) {
374 pr_err("Invalid sg table: too short at plane %d, (%d, %d)!\n",
375 n, x, y);
376 return ERR_PTR(-EINVAL);
377 }
378
379 src_idx = rotated_index(r, n, x, y);
380 src = i915_gem_object_get_dma_address(obj, src_idx);
381
382 if (sg_dma_len(sg) != PAGE_SIZE) {
383 pr_err("Invalid sg.length, found %d, expected %lu for rotated page (%d, %d) [src index %lu]\n",
384 sg_dma_len(sg), PAGE_SIZE,
385 x, y, src_idx);
386 return ERR_PTR(-EINVAL);
387 }
388
389 if (sg_dma_address(sg) != src) {
390 pr_err("Invalid address for rotated page (%d, %d) [src index %lu]\n",
391 x, y, src_idx);
392 return ERR_PTR(-EINVAL);
393 }
394
395 sg = sg_next(sg);
396 }
397 }
398
399 return sg;
400}
401
402static unsigned long remapped_index(const struct intel_remapped_info *r,
403 unsigned int n,
404 unsigned int x,
405 unsigned int y)
406{
407 return (r->plane[n].stride * y +
408 r->plane[n].offset + x);
409}
410
411static struct scatterlist *
412assert_remapped(struct drm_i915_gem_object *obj,
413 const struct intel_remapped_info *r, unsigned int n,
414 struct scatterlist *sg)
415{
416 unsigned int x, y;
417 unsigned int left = 0;
418 unsigned int offset;
419
420 for (y = 0; y < r->plane[n].height; y++) {
421 for (x = 0; x < r->plane[n].width; x++) {
422 unsigned long src_idx;
423 dma_addr_t src;
424
425 if (!sg) {
426 pr_err("Invalid sg table: too short at plane %d, (%d, %d)!\n",
427 n, x, y);
428 return ERR_PTR(-EINVAL);
429 }
430 if (!left) {
431 offset = 0;
432 left = sg_dma_len(sg);
433 }
434
435 src_idx = remapped_index(r, n, x, y);
436 src = i915_gem_object_get_dma_address(obj, src_idx);
437
438 if (left < PAGE_SIZE || left & (PAGE_SIZE-1)) {
439 pr_err("Invalid sg.length, found %d, expected %lu for remapped page (%d, %d) [src index %lu]\n",
440 sg_dma_len(sg), PAGE_SIZE,
441 x, y, src_idx);
442 return ERR_PTR(-EINVAL);
443 }
444
445 if (sg_dma_address(sg) + offset != src) {
446 pr_err("Invalid address for remapped page (%d, %d) [src index %lu]\n",
447 x, y, src_idx);
448 return ERR_PTR(-EINVAL);
449 }
450
451 left -= PAGE_SIZE;
452 offset += PAGE_SIZE;
453
454
455 if (!left)
456 sg = sg_next(sg);
457 }
458 }
459
460 return sg;
461}
462
463static unsigned int rotated_size(const struct intel_remapped_plane_info *a,
464 const struct intel_remapped_plane_info *b)
465{
466 return a->width * a->height + b->width * b->height;
467}
468
469static int igt_vma_rotate_remap(void *arg)
470{
471 struct i915_ggtt *ggtt = arg;
472 struct i915_address_space *vm = &ggtt->vm;
473 struct drm_i915_gem_object *obj;
474 const struct intel_remapped_plane_info planes[] = {
475 { .width = 1, .height = 1, .stride = 1 },
476 { .width = 2, .height = 2, .stride = 2 },
477 { .width = 4, .height = 4, .stride = 4 },
478 { .width = 8, .height = 8, .stride = 8 },
479
480 { .width = 3, .height = 5, .stride = 3 },
481 { .width = 3, .height = 5, .stride = 4 },
482 { .width = 3, .height = 5, .stride = 5 },
483
484 { .width = 5, .height = 3, .stride = 5 },
485 { .width = 5, .height = 3, .stride = 7 },
486 { .width = 5, .height = 3, .stride = 9 },
487
488 { .width = 4, .height = 6, .stride = 6 },
489 { .width = 6, .height = 4, .stride = 6 },
490 { }
491 }, *a, *b;
492 enum i915_ggtt_view_type types[] = {
493 I915_GGTT_VIEW_ROTATED,
494 I915_GGTT_VIEW_REMAPPED,
495 0,
496 }, *t;
497 const unsigned int max_pages = 64;
498 int err = -ENOMEM;
499
500
501
502
503
504 obj = i915_gem_object_create_internal(vm->i915, max_pages * PAGE_SIZE);
505 if (IS_ERR(obj))
506 goto out;
507
508 for (t = types; *t; t++) {
509 for (a = planes; a->width; a++) {
510 for (b = planes + ARRAY_SIZE(planes); b-- != planes; ) {
511 struct i915_ggtt_view view;
512 unsigned int n, max_offset;
513
514 max_offset = max(a->stride * a->height,
515 b->stride * b->height);
516 GEM_BUG_ON(max_offset > max_pages);
517 max_offset = max_pages - max_offset;
518
519 view.type = *t;
520 view.rotated.plane[0] = *a;
521 view.rotated.plane[1] = *b;
522
523 for_each_prime_number_from(view.rotated.plane[0].offset, 0, max_offset) {
524 for_each_prime_number_from(view.rotated.plane[1].offset, 0, max_offset) {
525 struct scatterlist *sg;
526 struct i915_vma *vma;
527
528 vma = checked_vma_instance(obj, vm, &view);
529 if (IS_ERR(vma)) {
530 err = PTR_ERR(vma);
531 goto out_object;
532 }
533
534 err = i915_vma_pin(vma, 0, 0, PIN_GLOBAL);
535 if (err) {
536 pr_err("Failed to pin VMA, err=%d\n", err);
537 goto out_object;
538 }
539
540 if (view.type == I915_GGTT_VIEW_ROTATED &&
541 vma->size != rotated_size(a, b) * PAGE_SIZE) {
542 pr_err("VMA is wrong size, expected %lu, found %llu\n",
543 PAGE_SIZE * rotated_size(a, b), vma->size);
544 err = -EINVAL;
545 goto out_object;
546 }
547
548 if (view.type == I915_GGTT_VIEW_REMAPPED &&
549 vma->size > rotated_size(a, b) * PAGE_SIZE) {
550 pr_err("VMA is wrong size, expected %lu, found %llu\n",
551 PAGE_SIZE * rotated_size(a, b), vma->size);
552 err = -EINVAL;
553 goto out_object;
554 }
555
556 if (vma->pages->nents > rotated_size(a, b)) {
557 pr_err("sg table is wrong sizeo, expected %u, found %u nents\n",
558 rotated_size(a, b), vma->pages->nents);
559 err = -EINVAL;
560 goto out_object;
561 }
562
563 if (vma->node.size < vma->size) {
564 pr_err("VMA binding too small, expected %llu, found %llu\n",
565 vma->size, vma->node.size);
566 err = -EINVAL;
567 goto out_object;
568 }
569
570 if (vma->pages == obj->mm.pages) {
571 pr_err("VMA using unrotated object pages!\n");
572 err = -EINVAL;
573 goto out_object;
574 }
575
576 sg = vma->pages->sgl;
577 for (n = 0; n < ARRAY_SIZE(view.rotated.plane); n++) {
578 if (view.type == I915_GGTT_VIEW_ROTATED)
579 sg = assert_rotated(obj, &view.rotated, n, sg);
580 else
581 sg = assert_remapped(obj, &view.remapped, n, sg);
582 if (IS_ERR(sg)) {
583 pr_err("Inconsistent %s VMA pages for plane %d: [(%d, %d, %d, %d), (%d, %d, %d, %d)]\n",
584 view.type == I915_GGTT_VIEW_ROTATED ?
585 "rotated" : "remapped", n,
586 view.rotated.plane[0].width,
587 view.rotated.plane[0].height,
588 view.rotated.plane[0].stride,
589 view.rotated.plane[0].offset,
590 view.rotated.plane[1].width,
591 view.rotated.plane[1].height,
592 view.rotated.plane[1].stride,
593 view.rotated.plane[1].offset);
594 err = -EINVAL;
595 goto out_object;
596 }
597 }
598
599 i915_vma_unpin(vma);
600 }
601 }
602 }
603 }
604 }
605
606out_object:
607 i915_gem_object_put(obj);
608out:
609 return err;
610}
611
612static bool assert_partial(struct drm_i915_gem_object *obj,
613 struct i915_vma *vma,
614 unsigned long offset,
615 unsigned long size)
616{
617 struct sgt_iter sgt;
618 dma_addr_t dma;
619
620 for_each_sgt_dma(dma, sgt, vma->pages) {
621 dma_addr_t src;
622
623 if (!size) {
624 pr_err("Partial scattergather list too long\n");
625 return false;
626 }
627
628 src = i915_gem_object_get_dma_address(obj, offset);
629 if (src != dma) {
630 pr_err("DMA mismatch for partial page offset %lu\n",
631 offset);
632 return false;
633 }
634
635 offset++;
636 size--;
637 }
638
639 return true;
640}
641
642static bool assert_pin(struct i915_vma *vma,
643 struct i915_ggtt_view *view,
644 u64 size,
645 const char *name)
646{
647 bool ok = true;
648
649 if (vma->size != size) {
650 pr_err("(%s) VMA is wrong size, expected %llu, found %llu\n",
651 name, size, vma->size);
652 ok = false;
653 }
654
655 if (vma->node.size < vma->size) {
656 pr_err("(%s) VMA binding too small, expected %llu, found %llu\n",
657 name, vma->size, vma->node.size);
658 ok = false;
659 }
660
661 if (view && view->type != I915_GGTT_VIEW_NORMAL) {
662 if (memcmp(&vma->ggtt_view, view, sizeof(*view))) {
663 pr_err("(%s) VMA mismatch upon creation!\n",
664 name);
665 ok = false;
666 }
667
668 if (vma->pages == vma->obj->mm.pages) {
669 pr_err("(%s) VMA using original object pages!\n",
670 name);
671 ok = false;
672 }
673 } else {
674 if (vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL) {
675 pr_err("Not the normal ggtt view! Found %d\n",
676 vma->ggtt_view.type);
677 ok = false;
678 }
679
680 if (vma->pages != vma->obj->mm.pages) {
681 pr_err("VMA not using object pages!\n");
682 ok = false;
683 }
684 }
685
686 return ok;
687}
688
689static int igt_vma_partial(void *arg)
690{
691 struct i915_ggtt *ggtt = arg;
692 struct i915_address_space *vm = &ggtt->vm;
693 const unsigned int npages = 1021;
694 struct drm_i915_gem_object *obj;
695 const struct phase {
696 const char *name;
697 } phases[] = {
698 { "create" },
699 { "lookup" },
700 { },
701 }, *p;
702 unsigned int sz, offset;
703 struct i915_vma *vma;
704 int err = -ENOMEM;
705
706
707
708
709
710 obj = i915_gem_object_create_internal(vm->i915, npages * PAGE_SIZE);
711 if (IS_ERR(obj))
712 goto out;
713
714 for (p = phases; p->name; p++) {
715 unsigned int count, nvma;
716
717 nvma = 0;
718 for_each_prime_number_from(sz, 1, npages) {
719 for_each_prime_number_from(offset, 0, npages - sz) {
720 struct i915_ggtt_view view;
721
722 view.type = I915_GGTT_VIEW_PARTIAL;
723 view.partial.offset = offset;
724 view.partial.size = sz;
725
726 if (sz == npages)
727 view.type = I915_GGTT_VIEW_NORMAL;
728
729 vma = checked_vma_instance(obj, vm, &view);
730 if (IS_ERR(vma)) {
731 err = PTR_ERR(vma);
732 goto out_object;
733 }
734
735 err = i915_vma_pin(vma, 0, 0, PIN_GLOBAL);
736 if (err)
737 goto out_object;
738
739 if (!assert_pin(vma, &view, sz*PAGE_SIZE, p->name)) {
740 pr_err("(%s) Inconsistent partial pinning for (offset=%d, size=%d)\n",
741 p->name, offset, sz);
742 err = -EINVAL;
743 goto out_object;
744 }
745
746 if (!assert_partial(obj, vma, offset, sz)) {
747 pr_err("(%s) Inconsistent partial pages for (offset=%d, size=%d)\n",
748 p->name, offset, sz);
749 err = -EINVAL;
750 goto out_object;
751 }
752
753 i915_vma_unpin(vma);
754 nvma++;
755 }
756 }
757
758 count = 0;
759 list_for_each_entry(vma, &obj->vma.list, obj_link)
760 count++;
761 if (count != nvma) {
762 pr_err("(%s) All partial vma were not recorded on the obj->vma_list: found %u, expected %u\n",
763 p->name, count, nvma);
764 err = -EINVAL;
765 goto out_object;
766 }
767
768
769 vma = checked_vma_instance(obj, vm, NULL);
770 if (IS_ERR(vma)) {
771 err = PTR_ERR(vma);
772 goto out_object;
773 }
774
775 err = i915_vma_pin(vma, 0, 0, PIN_GLOBAL);
776 if (err)
777 goto out_object;
778
779 if (!assert_pin(vma, NULL, obj->base.size, p->name)) {
780 pr_err("(%s) inconsistent full pin\n", p->name);
781 err = -EINVAL;
782 goto out_object;
783 }
784
785 i915_vma_unpin(vma);
786
787 count = 0;
788 list_for_each_entry(vma, &obj->vma.list, obj_link)
789 count++;
790 if (count != nvma) {
791 pr_err("(%s) allocated an extra full vma!\n", p->name);
792 err = -EINVAL;
793 goto out_object;
794 }
795 }
796
797out_object:
798 i915_gem_object_put(obj);
799out:
800 return err;
801}
802
803int i915_vma_mock_selftests(void)
804{
805 static const struct i915_subtest tests[] = {
806 SUBTEST(igt_vma_create),
807 SUBTEST(igt_vma_pin1),
808 SUBTEST(igt_vma_rotate_remap),
809 SUBTEST(igt_vma_partial),
810 };
811 struct drm_i915_private *i915;
812 struct i915_ggtt *ggtt;
813 int err;
814
815 i915 = mock_gem_device();
816 if (!i915)
817 return -ENOMEM;
818
819 ggtt = kmalloc(sizeof(*ggtt), GFP_KERNEL);
820 if (!ggtt) {
821 err = -ENOMEM;
822 goto out_put;
823 }
824 mock_init_ggtt(i915, ggtt);
825
826 mutex_lock(&i915->drm.struct_mutex);
827 err = i915_subtests(tests, ggtt);
828 mock_device_flush(i915);
829 mutex_unlock(&i915->drm.struct_mutex);
830
831 i915_gem_drain_freed_objects(i915);
832
833 mock_fini_ggtt(ggtt);
834 kfree(ggtt);
835out_put:
836 drm_dev_put(&i915->drm);
837 return err;
838}
839
840static int igt_vma_remapped_gtt(void *arg)
841{
842 struct drm_i915_private *i915 = arg;
843 const struct intel_remapped_plane_info planes[] = {
844 { .width = 1, .height = 1, .stride = 1 },
845 { .width = 2, .height = 2, .stride = 2 },
846 { .width = 4, .height = 4, .stride = 4 },
847 { .width = 8, .height = 8, .stride = 8 },
848
849 { .width = 3, .height = 5, .stride = 3 },
850 { .width = 3, .height = 5, .stride = 4 },
851 { .width = 3, .height = 5, .stride = 5 },
852
853 { .width = 5, .height = 3, .stride = 5 },
854 { .width = 5, .height = 3, .stride = 7 },
855 { .width = 5, .height = 3, .stride = 9 },
856
857 { .width = 4, .height = 6, .stride = 6 },
858 { .width = 6, .height = 4, .stride = 6 },
859 { }
860 }, *p;
861 enum i915_ggtt_view_type types[] = {
862 I915_GGTT_VIEW_ROTATED,
863 I915_GGTT_VIEW_REMAPPED,
864 0,
865 }, *t;
866 struct drm_i915_gem_object *obj;
867 intel_wakeref_t wakeref;
868 int err = 0;
869
870 obj = i915_gem_object_create_internal(i915, 10 * 10 * PAGE_SIZE);
871 if (IS_ERR(obj))
872 return PTR_ERR(obj);
873
874 mutex_lock(&i915->drm.struct_mutex);
875
876 wakeref = intel_runtime_pm_get(&i915->runtime_pm);
877
878 for (t = types; *t; t++) {
879 for (p = planes; p->width; p++) {
880 struct i915_ggtt_view view = {
881 .type = *t,
882 .rotated.plane[0] = *p,
883 };
884 struct i915_vma *vma;
885 u32 __iomem *map;
886 unsigned int x, y;
887 int err;
888
889 i915_gem_object_lock(obj);
890 err = i915_gem_object_set_to_gtt_domain(obj, true);
891 i915_gem_object_unlock(obj);
892 if (err)
893 goto out;
894
895 vma = i915_gem_object_ggtt_pin(obj, &view, 0, 0, PIN_MAPPABLE);
896 if (IS_ERR(vma)) {
897 err = PTR_ERR(vma);
898 goto out;
899 }
900
901 GEM_BUG_ON(vma->ggtt_view.type != *t);
902
903 map = i915_vma_pin_iomap(vma);
904 i915_vma_unpin(vma);
905 if (IS_ERR(map)) {
906 err = PTR_ERR(map);
907 goto out;
908 }
909
910 for (y = 0 ; y < p->height; y++) {
911 for (x = 0 ; x < p->width; x++) {
912 unsigned int offset;
913 u32 val = y << 16 | x;
914
915 if (*t == I915_GGTT_VIEW_ROTATED)
916 offset = (x * p->height + y) * PAGE_SIZE;
917 else
918 offset = (y * p->width + x) * PAGE_SIZE;
919
920 iowrite32(val, &map[offset / sizeof(*map)]);
921 }
922 }
923
924 i915_vma_unpin_iomap(vma);
925
926 vma = i915_gem_object_ggtt_pin(obj, NULL, 0, 0, PIN_MAPPABLE);
927 if (IS_ERR(vma)) {
928 err = PTR_ERR(vma);
929 goto out;
930 }
931
932 GEM_BUG_ON(vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL);
933
934 map = i915_vma_pin_iomap(vma);
935 i915_vma_unpin(vma);
936 if (IS_ERR(map)) {
937 err = PTR_ERR(map);
938 goto out;
939 }
940
941 for (y = 0 ; y < p->height; y++) {
942 for (x = 0 ; x < p->width; x++) {
943 unsigned int offset, src_idx;
944 u32 exp = y << 16 | x;
945 u32 val;
946
947 if (*t == I915_GGTT_VIEW_ROTATED)
948 src_idx = rotated_index(&view.rotated, 0, x, y);
949 else
950 src_idx = remapped_index(&view.remapped, 0, x, y);
951 offset = src_idx * PAGE_SIZE;
952
953 val = ioread32(&map[offset / sizeof(*map)]);
954 if (val != exp) {
955 pr_err("%s VMA write test failed, expected 0x%x, found 0x%x\n",
956 *t == I915_GGTT_VIEW_ROTATED ? "Rotated" : "Remapped",
957 val, exp);
958 i915_vma_unpin_iomap(vma);
959 goto out;
960 }
961 }
962 }
963 i915_vma_unpin_iomap(vma);
964 }
965 }
966
967out:
968 intel_runtime_pm_put(&i915->runtime_pm, wakeref);
969 mutex_unlock(&i915->drm.struct_mutex);
970 i915_gem_object_put(obj);
971
972 return err;
973}
974
975int i915_vma_live_selftests(struct drm_i915_private *i915)
976{
977 static const struct i915_subtest tests[] = {
978 SUBTEST(igt_vma_remapped_gtt),
979 };
980
981 return i915_subtests(tests, i915);
982}
983