1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26#include <drm/drmP.h>
27#include "virtgpu_drv.h"
28
29void virtio_gpu_gem_free_object(struct drm_gem_object *gem_obj)
30{
31 struct virtio_gpu_object *obj = gem_to_virtio_gpu_obj(gem_obj);
32
33 if (obj)
34 virtio_gpu_object_unref(&obj);
35}
36
37struct virtio_gpu_object *virtio_gpu_alloc_object(struct drm_device *dev,
38 size_t size, bool kernel,
39 bool pinned)
40{
41 struct virtio_gpu_device *vgdev = dev->dev_private;
42 struct virtio_gpu_object *obj;
43 int ret;
44
45 ret = virtio_gpu_object_create(vgdev, size, kernel, pinned, &obj);
46 if (ret)
47 return ERR_PTR(ret);
48
49 return obj;
50}
51
52int virtio_gpu_gem_create(struct drm_file *file,
53 struct drm_device *dev,
54 uint64_t size,
55 struct drm_gem_object **obj_p,
56 uint32_t *handle_p)
57{
58 struct virtio_gpu_object *obj;
59 int ret;
60 u32 handle;
61
62 obj = virtio_gpu_alloc_object(dev, size, false, false);
63 if (IS_ERR(obj))
64 return PTR_ERR(obj);
65
66 ret = drm_gem_handle_create(file, &obj->gem_base, &handle);
67 if (ret) {
68 drm_gem_object_release(&obj->gem_base);
69 return ret;
70 }
71
72 *obj_p = &obj->gem_base;
73
74
75 drm_gem_object_unreference_unlocked(&obj->gem_base);
76
77 *handle_p = handle;
78 return 0;
79}
80
81int virtio_gpu_mode_dumb_create(struct drm_file *file_priv,
82 struct drm_device *dev,
83 struct drm_mode_create_dumb *args)
84{
85 struct virtio_gpu_device *vgdev = dev->dev_private;
86 struct drm_gem_object *gobj;
87 struct virtio_gpu_object *obj;
88 int ret;
89 uint32_t pitch;
90 uint32_t resid;
91 uint32_t format;
92
93 pitch = args->width * ((args->bpp + 1) / 8);
94 args->size = pitch * args->height;
95 args->size = ALIGN(args->size, PAGE_SIZE);
96
97 ret = virtio_gpu_gem_create(file_priv, dev, args->size, &gobj,
98 &args->handle);
99 if (ret)
100 goto fail;
101
102 format = virtio_gpu_translate_format(DRM_FORMAT_XRGB8888);
103 virtio_gpu_resource_id_get(vgdev, &resid);
104 virtio_gpu_cmd_create_resource(vgdev, resid, format,
105 args->width, args->height);
106
107
108 obj = gem_to_virtio_gpu_obj(gobj);
109 ret = virtio_gpu_object_attach(vgdev, obj, resid, NULL);
110 if (ret)
111 goto fail;
112
113 obj->dumb = true;
114 args->pitch = pitch;
115 return ret;
116
117fail:
118 return ret;
119}
120
121int virtio_gpu_mode_dumb_destroy(struct drm_file *file_priv,
122 struct drm_device *dev,
123 uint32_t handle)
124{
125 return drm_gem_handle_delete(file_priv, handle);
126}
127
128int virtio_gpu_mode_dumb_mmap(struct drm_file *file_priv,
129 struct drm_device *dev,
130 uint32_t handle, uint64_t *offset_p)
131{
132 struct drm_gem_object *gobj;
133 struct virtio_gpu_object *obj;
134 BUG_ON(!offset_p);
135 gobj = drm_gem_object_lookup(file_priv, handle);
136 if (gobj == NULL)
137 return -ENOENT;
138 obj = gem_to_virtio_gpu_obj(gobj);
139 *offset_p = virtio_gpu_object_mmap_offset(obj);
140 drm_gem_object_unreference_unlocked(gobj);
141 return 0;
142}
143
144int virtio_gpu_gem_object_open(struct drm_gem_object *obj,
145 struct drm_file *file)
146{
147 struct virtio_gpu_device *vgdev = obj->dev->dev_private;
148 struct virtio_gpu_fpriv *vfpriv = file->driver_priv;
149 struct virtio_gpu_object *qobj = gem_to_virtio_gpu_obj(obj);
150 int r;
151
152 if (!vgdev->has_virgl_3d)
153 return 0;
154
155 r = virtio_gpu_object_reserve(qobj, false);
156 if (r)
157 return r;
158
159 virtio_gpu_cmd_context_attach_resource(vgdev, vfpriv->ctx_id,
160 qobj->hw_res_handle);
161 virtio_gpu_object_unreserve(qobj);
162 return 0;
163}
164
165void virtio_gpu_gem_object_close(struct drm_gem_object *obj,
166 struct drm_file *file)
167{
168 struct virtio_gpu_device *vgdev = obj->dev->dev_private;
169 struct virtio_gpu_fpriv *vfpriv = file->driver_priv;
170 struct virtio_gpu_object *qobj = gem_to_virtio_gpu_obj(obj);
171 int r;
172
173 if (!vgdev->has_virgl_3d)
174 return;
175
176 r = virtio_gpu_object_reserve(qobj, false);
177 if (r)
178 return;
179
180 virtio_gpu_cmd_context_detach_resource(vgdev, vfpriv->ctx_id,
181 qobj->hw_res_handle);
182 virtio_gpu_object_unreserve(qobj);
183}
184