1
2
3
4
5
6
7
8
9
10#include <linux/dma-fence-chain.h>
11
12static bool dma_fence_chain_enable_signaling(struct dma_fence *fence);
13
14
15
16
17
18
19
20
21static struct dma_fence *dma_fence_chain_get_prev(struct dma_fence_chain *chain)
22{
23 struct dma_fence *prev;
24
25 rcu_read_lock();
26 prev = dma_fence_get_rcu_safe(&chain->prev);
27 rcu_read_unlock();
28 return prev;
29}
30
31
32
33
34
35
36
37
38
39struct dma_fence *dma_fence_chain_walk(struct dma_fence *fence)
40{
41 struct dma_fence_chain *chain, *prev_chain;
42 struct dma_fence *prev, *replacement, *tmp;
43
44 chain = to_dma_fence_chain(fence);
45 if (!chain) {
46 dma_fence_put(fence);
47 return NULL;
48 }
49
50 while ((prev = dma_fence_chain_get_prev(chain))) {
51
52 prev_chain = to_dma_fence_chain(prev);
53 if (prev_chain) {
54 if (!dma_fence_is_signaled(prev_chain->fence))
55 break;
56
57 replacement = dma_fence_chain_get_prev(prev_chain);
58 } else {
59 if (!dma_fence_is_signaled(prev))
60 break;
61
62 replacement = NULL;
63 }
64
65 tmp = cmpxchg((struct dma_fence __force **)&chain->prev,
66 prev, replacement);
67 if (tmp == prev)
68 dma_fence_put(tmp);
69 else
70 dma_fence_put(replacement);
71 dma_fence_put(prev);
72 }
73
74 dma_fence_put(fence);
75 return prev;
76}
77EXPORT_SYMBOL(dma_fence_chain_walk);
78
79
80
81
82
83
84
85
86
87
88
89
90int dma_fence_chain_find_seqno(struct dma_fence **pfence, uint64_t seqno)
91{
92 struct dma_fence_chain *chain;
93
94 if (!seqno)
95 return 0;
96
97 chain = to_dma_fence_chain(*pfence);
98 if (!chain || chain->base.seqno < seqno)
99 return -EINVAL;
100
101 dma_fence_chain_for_each(*pfence, &chain->base) {
102 if ((*pfence)->context != chain->base.context ||
103 to_dma_fence_chain(*pfence)->prev_seqno < seqno)
104 break;
105 }
106 dma_fence_put(&chain->base);
107
108 return 0;
109}
110EXPORT_SYMBOL(dma_fence_chain_find_seqno);
111
112static const char *dma_fence_chain_get_driver_name(struct dma_fence *fence)
113{
114 return "dma_fence_chain";
115}
116
117static const char *dma_fence_chain_get_timeline_name(struct dma_fence *fence)
118{
119 return "unbound";
120}
121
122static void dma_fence_chain_irq_work(struct irq_work *work)
123{
124 struct dma_fence_chain *chain;
125
126 chain = container_of(work, typeof(*chain), work);
127
128
129 if (!dma_fence_chain_enable_signaling(&chain->base))
130
131 dma_fence_signal(&chain->base);
132 dma_fence_put(&chain->base);
133}
134
135static void dma_fence_chain_cb(struct dma_fence *f, struct dma_fence_cb *cb)
136{
137 struct dma_fence_chain *chain;
138
139 chain = container_of(cb, typeof(*chain), cb);
140 init_irq_work(&chain->work, dma_fence_chain_irq_work);
141 irq_work_queue(&chain->work);
142 dma_fence_put(f);
143}
144
145static bool dma_fence_chain_enable_signaling(struct dma_fence *fence)
146{
147 struct dma_fence_chain *head = to_dma_fence_chain(fence);
148
149 dma_fence_get(&head->base);
150 dma_fence_chain_for_each(fence, &head->base) {
151 struct dma_fence_chain *chain = to_dma_fence_chain(fence);
152 struct dma_fence *f = chain ? chain->fence : fence;
153
154 dma_fence_get(f);
155 if (!dma_fence_add_callback(f, &head->cb, dma_fence_chain_cb)) {
156 dma_fence_put(fence);
157 return true;
158 }
159 dma_fence_put(f);
160 }
161 dma_fence_put(&head->base);
162 return false;
163}
164
165static bool dma_fence_chain_signaled(struct dma_fence *fence)
166{
167 dma_fence_chain_for_each(fence, fence) {
168 struct dma_fence_chain *chain = to_dma_fence_chain(fence);
169 struct dma_fence *f = chain ? chain->fence : fence;
170
171 if (!dma_fence_is_signaled(f)) {
172 dma_fence_put(fence);
173 return false;
174 }
175 }
176
177 return true;
178}
179
180static void dma_fence_chain_release(struct dma_fence *fence)
181{
182 struct dma_fence_chain *chain = to_dma_fence_chain(fence);
183 struct dma_fence *prev;
184
185
186
187
188 while ((prev = rcu_dereference_protected(chain->prev, true))) {
189 struct dma_fence_chain *prev_chain;
190
191 if (kref_read(&prev->refcount) > 1)
192 break;
193
194 prev_chain = to_dma_fence_chain(prev);
195 if (!prev_chain)
196 break;
197
198
199
200
201 chain->prev = prev_chain->prev;
202 RCU_INIT_POINTER(prev_chain->prev, NULL);
203 dma_fence_put(prev);
204 }
205 dma_fence_put(prev);
206
207 dma_fence_put(chain->fence);
208 dma_fence_free(fence);
209}
210
211const struct dma_fence_ops dma_fence_chain_ops = {
212 .use_64bit_seqno = true,
213 .get_driver_name = dma_fence_chain_get_driver_name,
214 .get_timeline_name = dma_fence_chain_get_timeline_name,
215 .enable_signaling = dma_fence_chain_enable_signaling,
216 .signaled = dma_fence_chain_signaled,
217 .release = dma_fence_chain_release,
218};
219EXPORT_SYMBOL(dma_fence_chain_ops);
220
221
222
223
224
225
226
227
228
229
230
231void dma_fence_chain_init(struct dma_fence_chain *chain,
232 struct dma_fence *prev,
233 struct dma_fence *fence,
234 uint64_t seqno)
235{
236 struct dma_fence_chain *prev_chain = to_dma_fence_chain(prev);
237 uint64_t context;
238
239 spin_lock_init(&chain->lock);
240 rcu_assign_pointer(chain->prev, prev);
241 chain->fence = fence;
242 chain->prev_seqno = 0;
243
244
245 if (prev_chain && __dma_fence_is_later(seqno, prev->seqno, prev->ops)) {
246 context = prev->context;
247 chain->prev_seqno = prev->seqno;
248 } else {
249 context = dma_fence_context_alloc(1);
250
251 if (prev_chain)
252 seqno = max(prev->seqno, seqno);
253 }
254
255 dma_fence_init(&chain->base, &dma_fence_chain_ops,
256 &chain->lock, context, seqno);
257}
258EXPORT_SYMBOL(dma_fence_chain_init);
259