1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25#ifndef __I915_VMA_H__
26#define __I915_VMA_H__
27
28#include <linux/io-mapping.h>
29#include <linux/rbtree.h>
30
31#include <drm/drm_mm.h>
32
33#include "i915_gem_gtt.h"
34#include "i915_gem_fence_reg.h"
35#include "i915_gem_object.h"
36
37#include "i915_request.h"
38
39enum i915_cache_level;
40
41
42
43
44
45
46
47
48
49struct i915_vma {
50 struct drm_mm_node node;
51 struct drm_i915_gem_object *obj;
52 struct i915_address_space *vm;
53 const struct i915_vma_ops *ops;
54 struct drm_i915_fence_reg *fence;
55 struct reservation_object *resv;
56 struct sg_table *pages;
57 void __iomem *iomap;
58 void *private;
59 u64 size;
60 u64 display_alignment;
61 struct i915_page_sizes page_sizes;
62
63 u32 fence_size;
64 u32 fence_alignment;
65
66
67
68
69
70
71 unsigned int open_count;
72 unsigned long flags;
73
74
75
76
77
78
79
80
81
82
83#define I915_VMA_PIN_MASK 0xf
84#define I915_VMA_PIN_OVERFLOW BIT(5)
85
86
87#define I915_VMA_GLOBAL_BIND BIT(6)
88#define I915_VMA_LOCAL_BIND BIT(7)
89#define I915_VMA_BIND_MASK (I915_VMA_GLOBAL_BIND | I915_VMA_LOCAL_BIND | I915_VMA_PIN_OVERFLOW)
90
91#define I915_VMA_GGTT BIT(8)
92#define I915_VMA_CAN_FENCE BIT(9)
93#define I915_VMA_CLOSED BIT(10)
94#define I915_VMA_USERFAULT_BIT 11
95#define I915_VMA_USERFAULT BIT(I915_VMA_USERFAULT_BIT)
96#define I915_VMA_GGTT_WRITE BIT(12)
97
98 unsigned int active_count;
99 struct rb_root active;
100 struct i915_gem_active last_active;
101 struct i915_gem_active last_fence;
102
103
104
105
106
107
108
109
110 struct i915_ggtt_view ggtt_view;
111
112
113 struct list_head vm_link;
114
115 struct list_head obj_link;
116 struct rb_node obj_node;
117 struct hlist_node obj_hash;
118
119
120 struct list_head exec_link;
121 struct list_head reloc_link;
122
123
124 struct list_head evict_link;
125
126 struct list_head closed_link;
127
128
129
130
131 unsigned int *exec_flags;
132 struct hlist_node exec_node;
133 u32 exec_handle;
134};
135
136struct i915_vma *
137i915_vma_instance(struct drm_i915_gem_object *obj,
138 struct i915_address_space *vm,
139 const struct i915_ggtt_view *view);
140
141void i915_vma_unpin_and_release(struct i915_vma **p_vma, unsigned int flags);
142#define I915_VMA_RELEASE_MAP BIT(0)
143
144static inline bool i915_vma_is_active(struct i915_vma *vma)
145{
146 return vma->active_count;
147}
148
149int __must_check i915_vma_move_to_active(struct i915_vma *vma,
150 struct i915_request *rq,
151 unsigned int flags);
152
153static inline bool i915_vma_is_ggtt(const struct i915_vma *vma)
154{
155 return vma->flags & I915_VMA_GGTT;
156}
157
158static inline bool i915_vma_has_ggtt_write(const struct i915_vma *vma)
159{
160 return vma->flags & I915_VMA_GGTT_WRITE;
161}
162
163static inline void i915_vma_set_ggtt_write(struct i915_vma *vma)
164{
165 GEM_BUG_ON(!i915_vma_is_ggtt(vma));
166 vma->flags |= I915_VMA_GGTT_WRITE;
167}
168
169static inline void i915_vma_unset_ggtt_write(struct i915_vma *vma)
170{
171 vma->flags &= ~I915_VMA_GGTT_WRITE;
172}
173
174void i915_vma_flush_writes(struct i915_vma *vma);
175
176static inline bool i915_vma_is_map_and_fenceable(const struct i915_vma *vma)
177{
178 return vma->flags & I915_VMA_CAN_FENCE;
179}
180
181static inline bool i915_vma_is_closed(const struct i915_vma *vma)
182{
183 return vma->flags & I915_VMA_CLOSED;
184}
185
186static inline bool i915_vma_set_userfault(struct i915_vma *vma)
187{
188 GEM_BUG_ON(!i915_vma_is_map_and_fenceable(vma));
189 return __test_and_set_bit(I915_VMA_USERFAULT_BIT, &vma->flags);
190}
191
192static inline void i915_vma_unset_userfault(struct i915_vma *vma)
193{
194 return __clear_bit(I915_VMA_USERFAULT_BIT, &vma->flags);
195}
196
197static inline bool i915_vma_has_userfault(const struct i915_vma *vma)
198{
199 return test_bit(I915_VMA_USERFAULT_BIT, &vma->flags);
200}
201
202static inline u32 i915_ggtt_offset(const struct i915_vma *vma)
203{
204 GEM_BUG_ON(!i915_vma_is_ggtt(vma));
205 GEM_BUG_ON(!vma->node.allocated);
206 GEM_BUG_ON(upper_32_bits(vma->node.start));
207 GEM_BUG_ON(upper_32_bits(vma->node.start + vma->node.size - 1));
208 return lower_32_bits(vma->node.start);
209}
210
211static inline u32 i915_ggtt_pin_bias(struct i915_vma *vma)
212{
213 return i915_vm_to_ggtt(vma->vm)->pin_bias;
214}
215
216static inline struct i915_vma *i915_vma_get(struct i915_vma *vma)
217{
218 i915_gem_object_get(vma->obj);
219 return vma;
220}
221
222static inline void i915_vma_put(struct i915_vma *vma)
223{
224 i915_gem_object_put(vma->obj);
225}
226
227static __always_inline ptrdiff_t ptrdiff(const void *a, const void *b)
228{
229 return a - b;
230}
231
232static inline long
233i915_vma_compare(struct i915_vma *vma,
234 struct i915_address_space *vm,
235 const struct i915_ggtt_view *view)
236{
237 ptrdiff_t cmp;
238
239 GEM_BUG_ON(view && !i915_is_ggtt(vm));
240
241 cmp = ptrdiff(vma->vm, vm);
242 if (cmp)
243 return cmp;
244
245 BUILD_BUG_ON(I915_GGTT_VIEW_NORMAL != 0);
246 cmp = vma->ggtt_view.type;
247 if (!view)
248 return cmp;
249
250 cmp -= view->type;
251 if (cmp)
252 return cmp;
253
254 assert_i915_gem_gtt_types();
255
256
257
258
259
260
261
262
263
264
265
266 BUILD_BUG_ON(I915_GGTT_VIEW_NORMAL >= I915_GGTT_VIEW_PARTIAL);
267 BUILD_BUG_ON(I915_GGTT_VIEW_PARTIAL >= I915_GGTT_VIEW_ROTATED);
268 BUILD_BUG_ON(offsetof(typeof(*view), rotated) !=
269 offsetof(typeof(*view), partial));
270 return memcmp(&vma->ggtt_view.partial, &view->partial, view->type);
271}
272
273int i915_vma_bind(struct i915_vma *vma, enum i915_cache_level cache_level,
274 u32 flags);
275bool i915_gem_valid_gtt_space(struct i915_vma *vma, unsigned long cache_level);
276bool i915_vma_misplaced(const struct i915_vma *vma,
277 u64 size, u64 alignment, u64 flags);
278void __i915_vma_set_map_and_fenceable(struct i915_vma *vma);
279void i915_vma_revoke_mmap(struct i915_vma *vma);
280int __must_check i915_vma_unbind(struct i915_vma *vma);
281void i915_vma_unlink_ctx(struct i915_vma *vma);
282void i915_vma_close(struct i915_vma *vma);
283void i915_vma_reopen(struct i915_vma *vma);
284void i915_vma_destroy(struct i915_vma *vma);
285
286int __i915_vma_do_pin(struct i915_vma *vma,
287 u64 size, u64 alignment, u64 flags);
288static inline int __must_check
289i915_vma_pin(struct i915_vma *vma, u64 size, u64 alignment, u64 flags)
290{
291 BUILD_BUG_ON(PIN_MBZ != I915_VMA_PIN_OVERFLOW);
292 BUILD_BUG_ON(PIN_GLOBAL != I915_VMA_GLOBAL_BIND);
293 BUILD_BUG_ON(PIN_USER != I915_VMA_LOCAL_BIND);
294
295
296
297
298 if (likely(((++vma->flags ^ flags) & I915_VMA_BIND_MASK) == 0)) {
299 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node));
300 GEM_BUG_ON(i915_vma_misplaced(vma, size, alignment, flags));
301 return 0;
302 }
303
304 return __i915_vma_do_pin(vma, size, alignment, flags);
305}
306
307static inline int i915_vma_pin_count(const struct i915_vma *vma)
308{
309 return vma->flags & I915_VMA_PIN_MASK;
310}
311
312static inline bool i915_vma_is_pinned(const struct i915_vma *vma)
313{
314 return i915_vma_pin_count(vma);
315}
316
317static inline void __i915_vma_pin(struct i915_vma *vma)
318{
319 vma->flags++;
320 GEM_BUG_ON(vma->flags & I915_VMA_PIN_OVERFLOW);
321}
322
323static inline void __i915_vma_unpin(struct i915_vma *vma)
324{
325 vma->flags--;
326}
327
328static inline void i915_vma_unpin(struct i915_vma *vma)
329{
330 GEM_BUG_ON(!i915_vma_is_pinned(vma));
331 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node));
332 __i915_vma_unpin(vma);
333}
334
335static inline bool i915_vma_is_bound(const struct i915_vma *vma,
336 unsigned int where)
337{
338 return vma->flags & where;
339}
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354void __iomem *i915_vma_pin_iomap(struct i915_vma *vma);
355#define IO_ERR_PTR(x) ((void __iomem *)ERR_PTR(x))
356
357
358
359
360
361
362
363
364
365
366void i915_vma_unpin_iomap(struct i915_vma *vma);
367
368static inline struct page *i915_vma_first_page(struct i915_vma *vma)
369{
370 GEM_BUG_ON(!vma->pages);
371 return sg_page(vma->pages->sgl);
372}
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389int i915_vma_pin_fence(struct i915_vma *vma);
390int __must_check i915_vma_put_fence(struct i915_vma *vma);
391
392static inline void __i915_vma_unpin_fence(struct i915_vma *vma)
393{
394 GEM_BUG_ON(vma->fence->pin_count <= 0);
395 vma->fence->pin_count--;
396}
397
398
399
400
401
402
403
404
405
406static inline void
407i915_vma_unpin_fence(struct i915_vma *vma)
408{
409
410 if (vma->fence)
411 __i915_vma_unpin_fence(vma);
412}
413
414void i915_vma_parked(struct drm_i915_private *i915);
415
416#define for_each_until(cond) if (cond) break; else
417
418
419
420
421
422
423
424
425
426
427#define for_each_ggtt_vma(V, OBJ) \
428 list_for_each_entry(V, &(OBJ)->vma_list, obj_link) \
429 for_each_until(!i915_vma_is_ggtt(V))
430
431#endif
432