1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25#include "nouveau_drm.h"
26#include "nouveau_dma.h"
27#include "nouveau_fence.h"
28
29#include "nv50_display.h"
30
31u64
32nv84_fence_crtc(struct nouveau_channel *chan, int crtc)
33{
34 struct nv84_fence_chan *fctx = chan->fence;
35 return fctx->dispc_vma[crtc].offset;
36}
37
38static int
39nv84_fence_emit32(struct nouveau_channel *chan, u64 virtual, u32 sequence)
40{
41 int ret = RING_SPACE(chan, 8);
42 if (ret == 0) {
43 BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 1);
44 OUT_RING (chan, chan->vram.handle);
45 BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 5);
46 OUT_RING (chan, upper_32_bits(virtual));
47 OUT_RING (chan, lower_32_bits(virtual));
48 OUT_RING (chan, sequence);
49 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG);
50 OUT_RING (chan, 0x00000000);
51 FIRE_RING (chan);
52 }
53 return ret;
54}
55
56static int
57nv84_fence_sync32(struct nouveau_channel *chan, u64 virtual, u32 sequence)
58{
59 int ret = RING_SPACE(chan, 7);
60 if (ret == 0) {
61 BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 1);
62 OUT_RING (chan, chan->vram.handle);
63 BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
64 OUT_RING (chan, upper_32_bits(virtual));
65 OUT_RING (chan, lower_32_bits(virtual));
66 OUT_RING (chan, sequence);
67 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_GEQUAL);
68 FIRE_RING (chan);
69 }
70 return ret;
71}
72
73static int
74nv84_fence_emit(struct nouveau_fence *fence)
75{
76 struct nouveau_channel *chan = fence->channel;
77 struct nv84_fence_chan *fctx = chan->fence;
78 u64 addr = chan->chid * 16;
79
80 if (fence->sysmem)
81 addr += fctx->vma_gart.offset;
82 else
83 addr += fctx->vma.offset;
84
85 return fctx->base.emit32(chan, addr, fence->base.seqno);
86}
87
88static int
89nv84_fence_sync(struct nouveau_fence *fence,
90 struct nouveau_channel *prev, struct nouveau_channel *chan)
91{
92 struct nv84_fence_chan *fctx = chan->fence;
93 u64 addr = prev->chid * 16;
94
95 if (fence->sysmem)
96 addr += fctx->vma_gart.offset;
97 else
98 addr += fctx->vma.offset;
99
100 return fctx->base.sync32(chan, addr, fence->base.seqno);
101}
102
103static u32
104nv84_fence_read(struct nouveau_channel *chan)
105{
106 struct nv84_fence_priv *priv = chan->drm->fence;
107 return nouveau_bo_rd32(priv->bo, chan->chid * 16/4);
108}
109
110static void
111nv84_fence_context_del(struct nouveau_channel *chan)
112{
113 struct drm_device *dev = chan->drm->dev;
114 struct nv84_fence_priv *priv = chan->drm->fence;
115 struct nv84_fence_chan *fctx = chan->fence;
116 int i;
117
118 for (i = 0; i < dev->mode_config.num_crtc; i++) {
119 struct nouveau_bo *bo = nv50_display_crtc_sema(dev, i);
120 nouveau_bo_vma_del(bo, &fctx->dispc_vma[i]);
121 }
122
123 nouveau_bo_wr32(priv->bo, chan->chid * 16 / 4, fctx->base.sequence);
124 nouveau_bo_vma_del(priv->bo, &fctx->vma_gart);
125 nouveau_bo_vma_del(priv->bo, &fctx->vma);
126 nouveau_fence_context_del(&fctx->base);
127 chan->fence = NULL;
128 nouveau_fence_context_free(&fctx->base);
129}
130
131int
132nv84_fence_context_new(struct nouveau_channel *chan)
133{
134 struct nouveau_cli *cli = (void *)nvif_client(&chan->device->base);
135 struct nv84_fence_priv *priv = chan->drm->fence;
136 struct nv84_fence_chan *fctx;
137 int ret, i;
138
139 fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL);
140 if (!fctx)
141 return -ENOMEM;
142
143 nouveau_fence_context_new(chan, &fctx->base);
144 fctx->base.emit = nv84_fence_emit;
145 fctx->base.sync = nv84_fence_sync;
146 fctx->base.read = nv84_fence_read;
147 fctx->base.emit32 = nv84_fence_emit32;
148 fctx->base.sync32 = nv84_fence_sync32;
149 fctx->base.sequence = nv84_fence_read(chan);
150
151 ret = nouveau_bo_vma_add(priv->bo, cli->vm, &fctx->vma);
152 if (ret == 0) {
153 ret = nouveau_bo_vma_add(priv->bo_gart, cli->vm,
154 &fctx->vma_gart);
155 }
156
157
158 for (i = 0; !ret && i < chan->drm->dev->mode_config.num_crtc; i++) {
159 struct nouveau_bo *bo = nv50_display_crtc_sema(chan->drm->dev, i);
160 ret = nouveau_bo_vma_add(bo, cli->vm, &fctx->dispc_vma[i]);
161 }
162
163 if (ret)
164 nv84_fence_context_del(chan);
165 return ret;
166}
167
168static bool
169nv84_fence_suspend(struct nouveau_drm *drm)
170{
171 struct nv84_fence_priv *priv = drm->fence;
172 int i;
173
174 priv->suspend = vmalloc(priv->base.contexts * sizeof(u32));
175 if (priv->suspend) {
176 for (i = 0; i < priv->base.contexts; i++)
177 priv->suspend[i] = nouveau_bo_rd32(priv->bo, i*4);
178 }
179
180 return priv->suspend != NULL;
181}
182
183static void
184nv84_fence_resume(struct nouveau_drm *drm)
185{
186 struct nv84_fence_priv *priv = drm->fence;
187 int i;
188
189 if (priv->suspend) {
190 for (i = 0; i < priv->base.contexts; i++)
191 nouveau_bo_wr32(priv->bo, i*4, priv->suspend[i]);
192 vfree(priv->suspend);
193 priv->suspend = NULL;
194 }
195}
196
197static void
198nv84_fence_destroy(struct nouveau_drm *drm)
199{
200 struct nv84_fence_priv *priv = drm->fence;
201 nouveau_bo_unmap(priv->bo_gart);
202 if (priv->bo_gart)
203 nouveau_bo_unpin(priv->bo_gart);
204 nouveau_bo_ref(NULL, &priv->bo_gart);
205 nouveau_bo_unmap(priv->bo);
206 if (priv->bo)
207 nouveau_bo_unpin(priv->bo);
208 nouveau_bo_ref(NULL, &priv->bo);
209 drm->fence = NULL;
210 kfree(priv);
211}
212
213int
214nv84_fence_create(struct nouveau_drm *drm)
215{
216 struct nvkm_fifo *pfifo = nvxx_fifo(&drm->device);
217 struct nv84_fence_priv *priv;
218 int ret;
219
220 priv = drm->fence = kzalloc(sizeof(*priv), GFP_KERNEL);
221 if (!priv)
222 return -ENOMEM;
223
224 priv->base.dtor = nv84_fence_destroy;
225 priv->base.suspend = nv84_fence_suspend;
226 priv->base.resume = nv84_fence_resume;
227 priv->base.context_new = nv84_fence_context_new;
228 priv->base.context_del = nv84_fence_context_del;
229
230 priv->base.contexts = pfifo->max + 1;
231 priv->base.context_base = fence_context_alloc(priv->base.contexts);
232 priv->base.uevent = true;
233
234 ret = nouveau_bo_new(drm->dev, 16 * priv->base.contexts, 0,
235 TTM_PL_FLAG_VRAM, 0, 0, NULL, NULL, &priv->bo);
236 if (ret == 0) {
237 ret = nouveau_bo_pin(priv->bo, TTM_PL_FLAG_VRAM, false);
238 if (ret == 0) {
239 ret = nouveau_bo_map(priv->bo);
240 if (ret)
241 nouveau_bo_unpin(priv->bo);
242 }
243 if (ret)
244 nouveau_bo_ref(NULL, &priv->bo);
245 }
246
247 if (ret == 0)
248 ret = nouveau_bo_new(drm->dev, 16 * priv->base.contexts, 0,
249 TTM_PL_FLAG_TT | TTM_PL_FLAG_UNCACHED, 0,
250 0, NULL, NULL, &priv->bo_gart);
251 if (ret == 0) {
252 ret = nouveau_bo_pin(priv->bo_gart, TTM_PL_FLAG_TT, false);
253 if (ret == 0) {
254 ret = nouveau_bo_map(priv->bo_gart);
255 if (ret)
256 nouveau_bo_unpin(priv->bo_gart);
257 }
258 if (ret)
259 nouveau_bo_ref(NULL, &priv->bo_gart);
260 }
261
262 if (ret)
263 nv84_fence_destroy(drm);
264 return ret;
265}
266