1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25#ifndef __I915_GEM_OBJECT_H__
26#define __I915_GEM_OBJECT_H__
27
28#include <linux/reservation.h>
29
30#include <drm/drm_vma_manager.h>
31#include <drm/drm_gem.h>
32#include <drm/drmP.h>
33
34#include <drm/i915_drm.h>
35
36#include "i915_request.h"
37#include "i915_selftest.h"
38
39struct drm_i915_gem_object;
40
41
42
43
44
45
46
47struct i915_lut_handle {
48 struct list_head obj_link;
49 struct list_head ctx_link;
50 struct i915_gem_context *ctx;
51 u32 handle;
52};
53
54struct drm_i915_gem_object_ops {
55 unsigned int flags;
56#define I915_GEM_OBJECT_HAS_STRUCT_PAGE BIT(0)
57#define I915_GEM_OBJECT_IS_SHRINKABLE BIT(1)
58#define I915_GEM_OBJECT_IS_PROXY BIT(2)
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73 int (*get_pages)(struct drm_i915_gem_object *);
74 void (*put_pages)(struct drm_i915_gem_object *, struct sg_table *);
75
76 int (*pwrite)(struct drm_i915_gem_object *,
77 const struct drm_i915_gem_pwrite *);
78
79 int (*dmabuf_export)(struct drm_i915_gem_object *);
80 void (*release)(struct drm_i915_gem_object *);
81};
82
83struct drm_i915_gem_object {
84 struct drm_gem_object base;
85
86 const struct drm_i915_gem_object_ops *ops;
87
88
89
90
91
92
93
94
95
96
97 struct list_head vma_list;
98
99
100
101
102
103
104
105 struct rb_root vma_tree;
106
107
108
109
110
111
112
113
114 struct list_head lut_list;
115
116
117 struct drm_mm_node *stolen;
118 union {
119 struct rcu_head rcu;
120 struct llist_node freed;
121 };
122
123
124
125
126 unsigned int userfault_count;
127 struct list_head userfault_link;
128
129 struct list_head batch_pool_link;
130 I915_SELFTEST_DECLARE(struct list_head st_link);
131
132 unsigned long flags;
133
134
135
136
137
138#define I915_BO_ACTIVE_REF 0
139
140
141
142
143
144 unsigned int cache_level:3;
145 unsigned int cache_coherent:2;
146#define I915_BO_CACHE_COHERENT_FOR_READ BIT(0)
147#define I915_BO_CACHE_COHERENT_FOR_WRITE BIT(1)
148 unsigned int cache_dirty:1;
149
150
151
152
153
154
155
156
157
158 u16 read_domains;
159
160
161
162
163 u16 write_domain;
164
165 atomic_t frontbuffer_bits;
166 unsigned int frontbuffer_ggtt_origin;
167 struct i915_gem_active frontbuffer_write;
168
169
170 unsigned int tiling_and_stride;
171#define FENCE_MINIMUM_STRIDE 128
172#define TILING_MASK (FENCE_MINIMUM_STRIDE-1)
173#define STRIDE_MASK (~TILING_MASK)
174
175
176 unsigned int bind_count;
177 unsigned int active_count;
178
179 unsigned int pin_global;
180
181 struct {
182 struct mutex lock;
183 atomic_t pages_pin_count;
184
185 struct sg_table *pages;
186 void *mapping;
187
188
189 struct i915_page_sizes {
190
191
192
193
194 unsigned int phys;
195
196
197
198
199
200
201
202
203 unsigned int sg;
204
205
206
207
208
209
210
211
212 unsigned int gtt;
213 } page_sizes;
214
215 I915_SELFTEST_DECLARE(unsigned int page_mask);
216
217 struct i915_gem_object_page_iter {
218 struct scatterlist *sg_pos;
219 unsigned int sg_idx;
220
221 struct radix_tree_root radix;
222 struct mutex lock;
223 } get_page;
224
225
226
227
228
229 struct list_head link;
230
231
232
233
234 unsigned int madv:2;
235
236
237
238
239
240 bool dirty:1;
241
242
243
244
245
246 bool quirked:1;
247 } mm;
248
249
250
251
252
253
254
255
256
257
258
259 struct reservation_object *resv;
260
261
262 unsigned int framebuffer_references;
263
264
265 unsigned long *bit_17;
266
267 union {
268 struct i915_gem_userptr {
269 uintptr_t ptr;
270
271 struct i915_mm_struct *mm;
272 struct i915_mmu_object *mmu_object;
273 struct work_struct *work;
274 } userptr;
275
276 unsigned long scratch;
277
278 void *gvt_info;
279 };
280
281
282 struct drm_dma_handle *phys_handle;
283
284 struct reservation_object __builtin_resv;
285};
286
287static inline struct drm_i915_gem_object *
288to_intel_bo(struct drm_gem_object *gem)
289{
290
291 BUILD_BUG_ON(offsetof(struct drm_i915_gem_object, base));
292
293 return container_of(gem, struct drm_i915_gem_object, base);
294}
295
296
297
298
299
300
301
302
303
304
305
306
307static inline struct drm_i915_gem_object *
308i915_gem_object_lookup_rcu(struct drm_file *file, u32 handle)
309{
310#ifdef CONFIG_LOCKDEP
311 WARN_ON(debug_locks && !lock_is_held(&rcu_lock_map));
312#endif
313 return idr_find(&file->object_idr, handle);
314}
315
316static inline struct drm_i915_gem_object *
317i915_gem_object_lookup(struct drm_file *file, u32 handle)
318{
319 struct drm_i915_gem_object *obj;
320
321 rcu_read_lock();
322 obj = i915_gem_object_lookup_rcu(file, handle);
323 if (obj && !kref_get_unless_zero(&obj->base.refcount))
324 obj = NULL;
325 rcu_read_unlock();
326
327 return obj;
328}
329
330__deprecated
331extern struct drm_gem_object *
332drm_gem_object_lookup(struct drm_file *file, u32 handle);
333
334__attribute__((nonnull))
335static inline struct drm_i915_gem_object *
336i915_gem_object_get(struct drm_i915_gem_object *obj)
337{
338 drm_gem_object_get(&obj->base);
339 return obj;
340}
341
342__attribute__((nonnull))
343static inline void
344i915_gem_object_put(struct drm_i915_gem_object *obj)
345{
346 __drm_gem_object_put(&obj->base);
347}
348
349static inline void i915_gem_object_lock(struct drm_i915_gem_object *obj)
350{
351 reservation_object_lock(obj->resv, NULL);
352}
353
354static inline void i915_gem_object_unlock(struct drm_i915_gem_object *obj)
355{
356 reservation_object_unlock(obj->resv);
357}
358
359static inline void
360i915_gem_object_set_readonly(struct drm_i915_gem_object *obj)
361{
362 obj->base.vma_node.readonly = true;
363}
364
365static inline bool
366i915_gem_object_is_readonly(const struct drm_i915_gem_object *obj)
367{
368 return obj->base.vma_node.readonly;
369}
370
371static inline bool
372i915_gem_object_has_struct_page(const struct drm_i915_gem_object *obj)
373{
374 return obj->ops->flags & I915_GEM_OBJECT_HAS_STRUCT_PAGE;
375}
376
377static inline bool
378i915_gem_object_is_shrinkable(const struct drm_i915_gem_object *obj)
379{
380 return obj->ops->flags & I915_GEM_OBJECT_IS_SHRINKABLE;
381}
382
383static inline bool
384i915_gem_object_is_proxy(const struct drm_i915_gem_object *obj)
385{
386 return obj->ops->flags & I915_GEM_OBJECT_IS_PROXY;
387}
388
389static inline bool
390i915_gem_object_is_active(const struct drm_i915_gem_object *obj)
391{
392 return obj->active_count;
393}
394
395static inline bool
396i915_gem_object_has_active_reference(const struct drm_i915_gem_object *obj)
397{
398 return test_bit(I915_BO_ACTIVE_REF, &obj->flags);
399}
400
401static inline void
402i915_gem_object_set_active_reference(struct drm_i915_gem_object *obj)
403{
404 lockdep_assert_held(&obj->base.dev->struct_mutex);
405 __set_bit(I915_BO_ACTIVE_REF, &obj->flags);
406}
407
408static inline void
409i915_gem_object_clear_active_reference(struct drm_i915_gem_object *obj)
410{
411 lockdep_assert_held(&obj->base.dev->struct_mutex);
412 __clear_bit(I915_BO_ACTIVE_REF, &obj->flags);
413}
414
415void __i915_gem_object_release_unless_active(struct drm_i915_gem_object *obj);
416
417static inline bool
418i915_gem_object_is_framebuffer(const struct drm_i915_gem_object *obj)
419{
420 return READ_ONCE(obj->framebuffer_references);
421}
422
423static inline unsigned int
424i915_gem_object_get_tiling(const struct drm_i915_gem_object *obj)
425{
426 return obj->tiling_and_stride & TILING_MASK;
427}
428
429static inline bool
430i915_gem_object_is_tiled(const struct drm_i915_gem_object *obj)
431{
432 return i915_gem_object_get_tiling(obj) != I915_TILING_NONE;
433}
434
435static inline unsigned int
436i915_gem_object_get_stride(const struct drm_i915_gem_object *obj)
437{
438 return obj->tiling_and_stride & STRIDE_MASK;
439}
440
441static inline unsigned int
442i915_gem_tile_height(unsigned int tiling)
443{
444 GEM_BUG_ON(!tiling);
445 return tiling == I915_TILING_Y ? 32 : 8;
446}
447
448static inline unsigned int
449i915_gem_object_get_tile_height(const struct drm_i915_gem_object *obj)
450{
451 return i915_gem_tile_height(i915_gem_object_get_tiling(obj));
452}
453
454static inline unsigned int
455i915_gem_object_get_tile_row_size(const struct drm_i915_gem_object *obj)
456{
457 return (i915_gem_object_get_stride(obj) *
458 i915_gem_object_get_tile_height(obj));
459}
460
461int i915_gem_object_set_tiling(struct drm_i915_gem_object *obj,
462 unsigned int tiling, unsigned int stride);
463
464static inline struct intel_engine_cs *
465i915_gem_object_last_write_engine(struct drm_i915_gem_object *obj)
466{
467 struct intel_engine_cs *engine = NULL;
468 struct dma_fence *fence;
469
470 rcu_read_lock();
471 fence = reservation_object_get_excl_rcu(obj->resv);
472 rcu_read_unlock();
473
474 if (fence && dma_fence_is_i915(fence) && !dma_fence_is_signaled(fence))
475 engine = to_request(fence)->engine;
476 dma_fence_put(fence);
477
478 return engine;
479}
480
481void i915_gem_object_set_cache_coherency(struct drm_i915_gem_object *obj,
482 unsigned int cache_level);
483void i915_gem_object_flush_if_display(struct drm_i915_gem_object *obj);
484
485#endif
486
487