1
2
3
4
5
6
7
8
9#include <linux/reservation.h>
10#include <drm/drmP.h>
11#include <drm/drm_encoder.h>
12#include <drm/drm_gem_cma_helper.h>
13
14#include "uapi/drm/vc4_drm.h"
15
16
17
18
19enum vc4_kernel_bo_type {
20
21
22
23 VC4_BO_TYPE_KERNEL,
24 VC4_BO_TYPE_V3D,
25 VC4_BO_TYPE_V3D_SHADER,
26 VC4_BO_TYPE_DUMB,
27 VC4_BO_TYPE_BIN,
28 VC4_BO_TYPE_RCL,
29 VC4_BO_TYPE_BCL,
30 VC4_BO_TYPE_KERNEL_CACHE,
31 VC4_BO_TYPE_COUNT
32};
33
34
35
36
37
38
39
40struct vc4_perfmon {
41
42
43
44 refcount_t refcnt;
45
46
47
48
49 u8 ncounters;
50
51
52 u8 events[DRM_VC4_MAX_PERF_COUNTERS];
53
54
55
56
57
58
59
60
61 u64 counters[0];
62};
63
64struct vc4_dev {
65 struct drm_device *dev;
66
67 struct vc4_hdmi *hdmi;
68 struct vc4_hvs *hvs;
69 struct vc4_v3d *v3d;
70 struct vc4_dpi *dpi;
71 struct vc4_dsi *dsi1;
72 struct vc4_vec *vec;
73
74 struct vc4_hang_state *hang_state;
75
76
77
78
79
80 struct vc4_bo_cache {
81
82
83
84
85 struct list_head *size_list;
86 uint32_t size_list_size;
87
88
89
90
91
92 struct list_head time_list;
93 struct work_struct time_work;
94 struct timer_list time_timer;
95 } bo_cache;
96
97 u32 num_labels;
98 struct vc4_label {
99 const char *name;
100 u32 num_allocated;
101 u32 size_allocated;
102 } *bo_labels;
103
104
105 struct mutex bo_lock;
106
107
108
109
110
111 struct {
112 struct list_head list;
113 unsigned int num;
114 size_t size;
115 unsigned int purged_num;
116 size_t purged_size;
117 struct mutex lock;
118 } purgeable;
119
120 uint64_t dma_fence_context;
121
122
123
124
125 uint64_t emit_seqno;
126
127
128
129
130 uint64_t finished_seqno;
131
132
133
134
135
136 struct list_head bin_job_list;
137
138
139
140
141
142
143 struct list_head render_job_list;
144
145
146
147
148 struct list_head job_done_list;
149
150
151
152 spinlock_t job_lock;
153 wait_queue_head_t job_wait_queue;
154 struct work_struct job_done_work;
155
156
157
158
159 struct vc4_perfmon *active_perfmon;
160
161
162
163
164 struct list_head seqno_cb_list;
165
166
167
168
169
170 struct vc4_bo *bin_bo;
171
172
173 uint32_t bin_alloc_size;
174
175
176
177
178 uint32_t bin_alloc_used;
179
180
181 uint32_t bin_alloc_overflow;
182
183 struct work_struct overflow_mem_work;
184
185 int power_refcount;
186
187
188 struct mutex power_lock;
189
190 struct {
191 struct timer_list timer;
192 struct work_struct reset_work;
193 } hangcheck;
194
195 struct semaphore async_modeset;
196};
197
198static inline struct vc4_dev *
199to_vc4_dev(struct drm_device *dev)
200{
201 return (struct vc4_dev *)dev->dev_private;
202}
203
204struct vc4_bo {
205 struct drm_gem_cma_object base;
206
207
208 uint64_t seqno;
209
210
211
212
213
214
215 uint64_t write_seqno;
216
217 bool t_format;
218
219
220
221
222 struct list_head unref_head;
223
224
225 unsigned long free_time;
226
227
228 struct list_head size_head;
229
230
231
232
233 struct vc4_validated_shader_info *validated_shader;
234
235
236 struct reservation_object *resv;
237 struct reservation_object _resv;
238
239
240
241
242 int label;
243
244
245
246
247
248 refcount_t usecnt;
249
250
251 u32 madv;
252 struct mutex madv_lock;
253};
254
255static inline struct vc4_bo *
256to_vc4_bo(struct drm_gem_object *bo)
257{
258 return (struct vc4_bo *)bo;
259}
260
261struct vc4_fence {
262 struct dma_fence base;
263 struct drm_device *dev;
264
265 uint64_t seqno;
266};
267
268static inline struct vc4_fence *
269to_vc4_fence(struct dma_fence *fence)
270{
271 return (struct vc4_fence *)fence;
272}
273
274struct vc4_seqno_cb {
275 struct work_struct work;
276 uint64_t seqno;
277 void (*func)(struct vc4_seqno_cb *cb);
278};
279
280struct vc4_v3d {
281 struct vc4_dev *vc4;
282 struct platform_device *pdev;
283 void __iomem *regs;
284 struct clk *clk;
285};
286
287struct vc4_hvs {
288 struct platform_device *pdev;
289 void __iomem *regs;
290 u32 __iomem *dlist;
291
292
293
294
295 struct drm_mm dlist_mm;
296
297 struct drm_mm lbm_mm;
298 spinlock_t mm_lock;
299
300 struct drm_mm_node mitchell_netravali_filter;
301};
302
303struct vc4_plane {
304 struct drm_plane base;
305};
306
307static inline struct vc4_plane *
308to_vc4_plane(struct drm_plane *plane)
309{
310 return (struct vc4_plane *)plane;
311}
312
313enum vc4_scaling_mode {
314 VC4_SCALING_NONE,
315 VC4_SCALING_TPZ,
316 VC4_SCALING_PPF,
317};
318
319struct vc4_plane_state {
320 struct drm_plane_state base;
321
322
323
324 u32 *dlist;
325 u32 dlist_size;
326 u32 dlist_count;
327
328
329
330
331 u32 pos0_offset;
332 u32 pos2_offset;
333 u32 ptr0_offset;
334
335
336
337
338 u32 __iomem *hw_dlist;
339
340
341 int crtc_x, crtc_y, crtc_w, crtc_h;
342
343 u32 src_x, src_y;
344
345 u32 src_w[2], src_h[2];
346
347
348 enum vc4_scaling_mode x_scaling[2], y_scaling[2];
349 bool is_unity;
350 bool is_yuv;
351
352
353
354
355 u32 offsets[3];
356
357
358 struct drm_mm_node lbm;
359
360
361
362
363
364 bool needs_bg_fill;
365};
366
367static inline struct vc4_plane_state *
368to_vc4_plane_state(struct drm_plane_state *state)
369{
370 return (struct vc4_plane_state *)state;
371}
372
373enum vc4_encoder_type {
374 VC4_ENCODER_TYPE_NONE,
375 VC4_ENCODER_TYPE_HDMI,
376 VC4_ENCODER_TYPE_VEC,
377 VC4_ENCODER_TYPE_DSI0,
378 VC4_ENCODER_TYPE_DSI1,
379 VC4_ENCODER_TYPE_SMI,
380 VC4_ENCODER_TYPE_DPI,
381};
382
383struct vc4_encoder {
384 struct drm_encoder base;
385 enum vc4_encoder_type type;
386 u32 clock_select;
387};
388
389static inline struct vc4_encoder *
390to_vc4_encoder(struct drm_encoder *encoder)
391{
392 return container_of(encoder, struct vc4_encoder, base);
393}
394
395#define V3D_READ(offset) readl(vc4->v3d->regs + offset)
396#define V3D_WRITE(offset, val) writel(val, vc4->v3d->regs + offset)
397#define HVS_READ(offset) readl(vc4->hvs->regs + offset)
398#define HVS_WRITE(offset, val) writel(val, vc4->hvs->regs + offset)
399
400struct vc4_exec_info {
401
402 uint64_t seqno;
403
404
405 uint64_t bin_dep_seqno;
406
407 struct dma_fence *fence;
408
409
410
411
412 uint32_t last_ct0ca, last_ct1ca;
413
414
415 struct drm_vc4_submit_cl *args;
416
417
418
419
420 struct drm_gem_cma_object **bo;
421 uint32_t bo_count;
422
423
424
425
426
427 struct drm_gem_cma_object *rcl_write_bo[4];
428 uint32_t rcl_write_bo_count;
429
430
431 struct list_head head;
432
433
434
435
436 struct list_head unref_list;
437
438
439
440
441 uint32_t bo_index[2];
442
443
444
445
446 struct drm_gem_cma_object *exec_bo;
447
448
449
450
451
452
453
454 struct vc4_shader_state {
455 uint32_t addr;
456
457
458
459 uint32_t max_index;
460 } *shader_state;
461
462
463 uint32_t shader_state_size;
464
465 uint32_t shader_state_count;
466
467 bool found_tile_binning_mode_config_packet;
468 bool found_start_tile_binning_packet;
469 bool found_increment_semaphore_packet;
470 bool found_flush;
471 uint8_t bin_tiles_x, bin_tiles_y;
472
473
474
475 uint32_t tile_alloc_offset;
476
477 uint32_t bin_slots;
478
479
480
481
482
483 uint32_t ct0ca, ct0ea;
484 uint32_t ct1ca, ct1ea;
485
486
487 void *bin_u;
488
489
490
491
492
493
494 void *shader_rec_u;
495 void *shader_rec_v;
496 uint32_t shader_rec_p;
497 uint32_t shader_rec_size;
498
499
500
501
502 void *uniforms_u;
503 void *uniforms_v;
504 uint32_t uniforms_p;
505 uint32_t uniforms_size;
506
507
508
509
510 struct vc4_perfmon *perfmon;
511};
512
513
514
515
516struct vc4_file {
517 struct {
518 struct idr idr;
519 struct mutex lock;
520 } perfmon;
521};
522
523static inline struct vc4_exec_info *
524vc4_first_bin_job(struct vc4_dev *vc4)
525{
526 return list_first_entry_or_null(&vc4->bin_job_list,
527 struct vc4_exec_info, head);
528}
529
530static inline struct vc4_exec_info *
531vc4_first_render_job(struct vc4_dev *vc4)
532{
533 return list_first_entry_or_null(&vc4->render_job_list,
534 struct vc4_exec_info, head);
535}
536
537static inline struct vc4_exec_info *
538vc4_last_render_job(struct vc4_dev *vc4)
539{
540 if (list_empty(&vc4->render_job_list))
541 return NULL;
542 return list_last_entry(&vc4->render_job_list,
543 struct vc4_exec_info, head);
544}
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560struct vc4_texture_sample_info {
561 bool is_direct;
562 uint32_t p_offset[4];
563};
564
565
566
567
568
569
570
571
572
573
574
575struct vc4_validated_shader_info {
576 uint32_t uniforms_size;
577 uint32_t uniforms_src_size;
578 uint32_t num_texture_samples;
579 struct vc4_texture_sample_info *texture_samples;
580
581 uint32_t num_uniform_addr_offsets;
582 uint32_t *uniform_addr_offsets;
583
584 bool is_threaded;
585};
586
587
588
589
590
591
592
593
594
595#define _wait_for(COND, MS, W) ({ \
596 unsigned long timeout__ = jiffies + msecs_to_jiffies(MS) + 1; \
597 int ret__ = 0; \
598 while (!(COND)) { \
599 if (time_after(jiffies, timeout__)) { \
600 if (!(COND)) \
601 ret__ = -ETIMEDOUT; \
602 break; \
603 } \
604 if (W && drm_can_sleep()) { \
605 msleep(W); \
606 } else { \
607 cpu_relax(); \
608 } \
609 } \
610 ret__; \
611})
612
613#define wait_for(COND, MS) _wait_for(COND, MS, 1)
614
615
616struct drm_gem_object *vc4_create_object(struct drm_device *dev, size_t size);
617void vc4_free_object(struct drm_gem_object *gem_obj);
618struct vc4_bo *vc4_bo_create(struct drm_device *dev, size_t size,
619 bool from_cache, enum vc4_kernel_bo_type type);
620int vc4_dumb_create(struct drm_file *file_priv,
621 struct drm_device *dev,
622 struct drm_mode_create_dumb *args);
623struct dma_buf *vc4_prime_export(struct drm_device *dev,
624 struct drm_gem_object *obj, int flags);
625int vc4_create_bo_ioctl(struct drm_device *dev, void *data,
626 struct drm_file *file_priv);
627int vc4_create_shader_bo_ioctl(struct drm_device *dev, void *data,
628 struct drm_file *file_priv);
629int vc4_mmap_bo_ioctl(struct drm_device *dev, void *data,
630 struct drm_file *file_priv);
631int vc4_set_tiling_ioctl(struct drm_device *dev, void *data,
632 struct drm_file *file_priv);
633int vc4_get_tiling_ioctl(struct drm_device *dev, void *data,
634 struct drm_file *file_priv);
635int vc4_get_hang_state_ioctl(struct drm_device *dev, void *data,
636 struct drm_file *file_priv);
637int vc4_label_bo_ioctl(struct drm_device *dev, void *data,
638 struct drm_file *file_priv);
639int vc4_fault(struct vm_fault *vmf);
640int vc4_mmap(struct file *filp, struct vm_area_struct *vma);
641struct reservation_object *vc4_prime_res_obj(struct drm_gem_object *obj);
642int vc4_prime_mmap(struct drm_gem_object *obj, struct vm_area_struct *vma);
643struct drm_gem_object *vc4_prime_import_sg_table(struct drm_device *dev,
644 struct dma_buf_attachment *attach,
645 struct sg_table *sgt);
646void *vc4_prime_vmap(struct drm_gem_object *obj);
647int vc4_bo_cache_init(struct drm_device *dev);
648void vc4_bo_cache_destroy(struct drm_device *dev);
649int vc4_bo_stats_debugfs(struct seq_file *m, void *arg);
650int vc4_bo_inc_usecnt(struct vc4_bo *bo);
651void vc4_bo_dec_usecnt(struct vc4_bo *bo);
652void vc4_bo_add_to_purgeable_pool(struct vc4_bo *bo);
653void vc4_bo_remove_from_purgeable_pool(struct vc4_bo *bo);
654
655
656extern struct platform_driver vc4_crtc_driver;
657int vc4_crtc_debugfs_regs(struct seq_file *m, void *arg);
658bool vc4_crtc_get_scanoutpos(struct drm_device *dev, unsigned int crtc_id,
659 bool in_vblank_irq, int *vpos, int *hpos,
660 ktime_t *stime, ktime_t *etime,
661 const struct drm_display_mode *mode);
662
663
664int vc4_debugfs_init(struct drm_minor *minor);
665
666
667void __iomem *vc4_ioremap_regs(struct platform_device *dev, int index);
668
669
670extern struct platform_driver vc4_dpi_driver;
671int vc4_dpi_debugfs_regs(struct seq_file *m, void *unused);
672
673
674extern struct platform_driver vc4_dsi_driver;
675int vc4_dsi_debugfs_regs(struct seq_file *m, void *unused);
676
677
678extern const struct dma_fence_ops vc4_fence_ops;
679
680
681void vc4_gem_init(struct drm_device *dev);
682void vc4_gem_destroy(struct drm_device *dev);
683int vc4_submit_cl_ioctl(struct drm_device *dev, void *data,
684 struct drm_file *file_priv);
685int vc4_wait_seqno_ioctl(struct drm_device *dev, void *data,
686 struct drm_file *file_priv);
687int vc4_wait_bo_ioctl(struct drm_device *dev, void *data,
688 struct drm_file *file_priv);
689void vc4_submit_next_bin_job(struct drm_device *dev);
690void vc4_submit_next_render_job(struct drm_device *dev);
691void vc4_move_job_to_render(struct drm_device *dev, struct vc4_exec_info *exec);
692int vc4_wait_for_seqno(struct drm_device *dev, uint64_t seqno,
693 uint64_t timeout_ns, bool interruptible);
694void vc4_job_handle_completed(struct vc4_dev *vc4);
695int vc4_queue_seqno_cb(struct drm_device *dev,
696 struct vc4_seqno_cb *cb, uint64_t seqno,
697 void (*func)(struct vc4_seqno_cb *cb));
698int vc4_gem_madvise_ioctl(struct drm_device *dev, void *data,
699 struct drm_file *file_priv);
700
701
702extern struct platform_driver vc4_hdmi_driver;
703int vc4_hdmi_debugfs_regs(struct seq_file *m, void *unused);
704
705
706extern struct platform_driver vc4_vec_driver;
707int vc4_vec_debugfs_regs(struct seq_file *m, void *unused);
708
709
710irqreturn_t vc4_irq(int irq, void *arg);
711void vc4_irq_preinstall(struct drm_device *dev);
712int vc4_irq_postinstall(struct drm_device *dev);
713void vc4_irq_uninstall(struct drm_device *dev);
714void vc4_irq_reset(struct drm_device *dev);
715
716
717extern struct platform_driver vc4_hvs_driver;
718void vc4_hvs_dump_state(struct drm_device *dev);
719int vc4_hvs_debugfs_regs(struct seq_file *m, void *unused);
720
721
722int vc4_kms_load(struct drm_device *dev);
723
724
725struct drm_plane *vc4_plane_init(struct drm_device *dev,
726 enum drm_plane_type type);
727u32 vc4_plane_write_dlist(struct drm_plane *plane, u32 __iomem *dlist);
728u32 vc4_plane_dlist_size(const struct drm_plane_state *state);
729void vc4_plane_async_set_fb(struct drm_plane *plane,
730 struct drm_framebuffer *fb);
731
732
733extern struct platform_driver vc4_v3d_driver;
734int vc4_v3d_debugfs_ident(struct seq_file *m, void *unused);
735int vc4_v3d_debugfs_regs(struct seq_file *m, void *unused);
736int vc4_v3d_get_bin_slot(struct vc4_dev *vc4);
737
738
739int
740vc4_validate_bin_cl(struct drm_device *dev,
741 void *validated,
742 void *unvalidated,
743 struct vc4_exec_info *exec);
744
745int
746vc4_validate_shader_recs(struct drm_device *dev, struct vc4_exec_info *exec);
747
748struct drm_gem_cma_object *vc4_use_bo(struct vc4_exec_info *exec,
749 uint32_t hindex);
750
751int vc4_get_rcl(struct drm_device *dev, struct vc4_exec_info *exec);
752
753bool vc4_check_tex_size(struct vc4_exec_info *exec,
754 struct drm_gem_cma_object *fbo,
755 uint32_t offset, uint8_t tiling_format,
756 uint32_t width, uint32_t height, uint8_t cpp);
757
758
759struct vc4_validated_shader_info *
760vc4_validate_shader(struct drm_gem_cma_object *shader_obj);
761
762
763void vc4_perfmon_get(struct vc4_perfmon *perfmon);
764void vc4_perfmon_put(struct vc4_perfmon *perfmon);
765void vc4_perfmon_start(struct vc4_dev *vc4, struct vc4_perfmon *perfmon);
766void vc4_perfmon_stop(struct vc4_dev *vc4, struct vc4_perfmon *perfmon,
767 bool capture);
768struct vc4_perfmon *vc4_perfmon_find(struct vc4_file *vc4file, int id);
769void vc4_perfmon_open_file(struct vc4_file *vc4file);
770void vc4_perfmon_close_file(struct vc4_file *vc4file);
771int vc4_perfmon_create_ioctl(struct drm_device *dev, void *data,
772 struct drm_file *file_priv);
773int vc4_perfmon_destroy_ioctl(struct drm_device *dev, void *data,
774 struct drm_file *file_priv);
775int vc4_perfmon_get_values_ioctl(struct drm_device *dev, void *data,
776 struct drm_file *file_priv);
777