1
2
3
4
5
6
7
8
9#include <linux/reservation.h>
10#include <drm/drmP.h>
11#include <drm/drm_encoder.h>
12#include <drm/drm_gem_cma_helper.h>
13#include <drm/drm_atomic.h>
14#include <drm/drm_syncobj.h>
15
16#include "uapi/drm/vc4_drm.h"
17
18
19
20
21enum vc4_kernel_bo_type {
22
23
24
25 VC4_BO_TYPE_KERNEL,
26 VC4_BO_TYPE_V3D,
27 VC4_BO_TYPE_V3D_SHADER,
28 VC4_BO_TYPE_DUMB,
29 VC4_BO_TYPE_BIN,
30 VC4_BO_TYPE_RCL,
31 VC4_BO_TYPE_BCL,
32 VC4_BO_TYPE_KERNEL_CACHE,
33 VC4_BO_TYPE_COUNT
34};
35
36
37
38
39
40
41
42struct vc4_perfmon {
43
44
45
46 refcount_t refcnt;
47
48
49
50
51 u8 ncounters;
52
53
54 u8 events[DRM_VC4_MAX_PERF_COUNTERS];
55
56
57
58
59
60
61
62
63 u64 counters[0];
64};
65
66struct vc4_dev {
67 struct drm_device *dev;
68
69 struct vc4_hdmi *hdmi;
70 struct vc4_hvs *hvs;
71 struct vc4_v3d *v3d;
72 struct vc4_dpi *dpi;
73 struct vc4_dsi *dsi1;
74 struct vc4_vec *vec;
75
76 struct vc4_hang_state *hang_state;
77
78
79
80
81
82 struct vc4_bo_cache {
83
84
85
86
87 struct list_head *size_list;
88 uint32_t size_list_size;
89
90
91
92
93
94 struct list_head time_list;
95 struct work_struct time_work;
96 struct timer_list time_timer;
97 } bo_cache;
98
99 u32 num_labels;
100 struct vc4_label {
101 const char *name;
102 u32 num_allocated;
103 u32 size_allocated;
104 } *bo_labels;
105
106
107 struct mutex bo_lock;
108
109
110
111
112
113 struct {
114 struct list_head list;
115 unsigned int num;
116 size_t size;
117 unsigned int purged_num;
118 size_t purged_size;
119 struct mutex lock;
120 } purgeable;
121
122 uint64_t dma_fence_context;
123
124
125
126
127 uint64_t emit_seqno;
128
129
130
131
132 uint64_t finished_seqno;
133
134
135
136
137
138 struct list_head bin_job_list;
139
140
141
142
143
144
145 struct list_head render_job_list;
146
147
148
149
150 struct list_head job_done_list;
151
152
153
154 spinlock_t job_lock;
155 wait_queue_head_t job_wait_queue;
156 struct work_struct job_done_work;
157
158
159
160
161 struct vc4_perfmon *active_perfmon;
162
163
164
165
166 struct list_head seqno_cb_list;
167
168
169
170
171
172 struct vc4_bo *bin_bo;
173
174
175 uint32_t bin_alloc_size;
176
177
178
179
180 uint32_t bin_alloc_used;
181
182
183 uint32_t bin_alloc_overflow;
184
185 struct work_struct overflow_mem_work;
186
187 int power_refcount;
188
189
190 struct mutex power_lock;
191
192 struct {
193 struct timer_list timer;
194 struct work_struct reset_work;
195 } hangcheck;
196
197 struct semaphore async_modeset;
198
199 struct drm_modeset_lock ctm_state_lock;
200 struct drm_private_obj ctm_manager;
201};
202
203static inline struct vc4_dev *
204to_vc4_dev(struct drm_device *dev)
205{
206 return (struct vc4_dev *)dev->dev_private;
207}
208
209struct vc4_bo {
210 struct drm_gem_cma_object base;
211
212
213 uint64_t seqno;
214
215
216
217
218
219
220 uint64_t write_seqno;
221
222 bool t_format;
223
224
225
226
227 struct list_head unref_head;
228
229
230 unsigned long free_time;
231
232
233 struct list_head size_head;
234
235
236
237
238 struct vc4_validated_shader_info *validated_shader;
239
240
241 struct reservation_object *resv;
242 struct reservation_object _resv;
243
244
245
246
247 int label;
248
249
250
251
252
253 refcount_t usecnt;
254
255
256 u32 madv;
257 struct mutex madv_lock;
258};
259
260static inline struct vc4_bo *
261to_vc4_bo(struct drm_gem_object *bo)
262{
263 return (struct vc4_bo *)bo;
264}
265
266struct vc4_fence {
267 struct dma_fence base;
268 struct drm_device *dev;
269
270 uint64_t seqno;
271};
272
273static inline struct vc4_fence *
274to_vc4_fence(struct dma_fence *fence)
275{
276 return (struct vc4_fence *)fence;
277}
278
279struct vc4_seqno_cb {
280 struct work_struct work;
281 uint64_t seqno;
282 void (*func)(struct vc4_seqno_cb *cb);
283};
284
285struct vc4_v3d {
286 struct vc4_dev *vc4;
287 struct platform_device *pdev;
288 void __iomem *regs;
289 struct clk *clk;
290};
291
292struct vc4_hvs {
293 struct platform_device *pdev;
294 void __iomem *regs;
295 u32 __iomem *dlist;
296
297
298
299
300 struct drm_mm dlist_mm;
301
302 struct drm_mm lbm_mm;
303 spinlock_t mm_lock;
304
305 struct drm_mm_node mitchell_netravali_filter;
306};
307
308struct vc4_plane {
309 struct drm_plane base;
310};
311
312static inline struct vc4_plane *
313to_vc4_plane(struct drm_plane *plane)
314{
315 return (struct vc4_plane *)plane;
316}
317
318enum vc4_scaling_mode {
319 VC4_SCALING_NONE,
320 VC4_SCALING_TPZ,
321 VC4_SCALING_PPF,
322};
323
324struct vc4_plane_state {
325 struct drm_plane_state base;
326
327
328
329 u32 *dlist;
330 u32 dlist_size;
331 u32 dlist_count;
332
333
334
335
336 u32 pos0_offset;
337 u32 pos2_offset;
338 u32 ptr0_offset;
339
340
341
342
343 u32 __iomem *hw_dlist;
344
345
346 int crtc_x, crtc_y, crtc_w, crtc_h;
347
348 u32 src_x, src_y;
349
350 u32 src_w[2], src_h[2];
351
352
353 enum vc4_scaling_mode x_scaling[2], y_scaling[2];
354 bool is_unity;
355 bool is_yuv;
356
357
358
359
360 u32 offsets[3];
361
362
363 struct drm_mm_node lbm;
364
365
366
367
368
369 bool needs_bg_fill;
370};
371
372static inline struct vc4_plane_state *
373to_vc4_plane_state(struct drm_plane_state *state)
374{
375 return (struct vc4_plane_state *)state;
376}
377
378enum vc4_encoder_type {
379 VC4_ENCODER_TYPE_NONE,
380 VC4_ENCODER_TYPE_HDMI,
381 VC4_ENCODER_TYPE_VEC,
382 VC4_ENCODER_TYPE_DSI0,
383 VC4_ENCODER_TYPE_DSI1,
384 VC4_ENCODER_TYPE_SMI,
385 VC4_ENCODER_TYPE_DPI,
386};
387
388struct vc4_encoder {
389 struct drm_encoder base;
390 enum vc4_encoder_type type;
391 u32 clock_select;
392};
393
394static inline struct vc4_encoder *
395to_vc4_encoder(struct drm_encoder *encoder)
396{
397 return container_of(encoder, struct vc4_encoder, base);
398}
399
400struct vc4_crtc_data {
401
402 int hvs_channel;
403
404 enum vc4_encoder_type encoder_types[4];
405};
406
407struct vc4_crtc {
408 struct drm_crtc base;
409 const struct vc4_crtc_data *data;
410 void __iomem *regs;
411
412
413 ktime_t t_vblank;
414
415
416 int channel;
417
418 u8 lut_r[256];
419 u8 lut_g[256];
420 u8 lut_b[256];
421
422 u32 cob_size;
423
424 struct drm_pending_vblank_event *event;
425};
426
427static inline struct vc4_crtc *
428to_vc4_crtc(struct drm_crtc *crtc)
429{
430 return (struct vc4_crtc *)crtc;
431}
432
433#define V3D_READ(offset) readl(vc4->v3d->regs + offset)
434#define V3D_WRITE(offset, val) writel(val, vc4->v3d->regs + offset)
435#define HVS_READ(offset) readl(vc4->hvs->regs + offset)
436#define HVS_WRITE(offset, val) writel(val, vc4->hvs->regs + offset)
437
438struct vc4_exec_info {
439
440 uint64_t seqno;
441
442
443 uint64_t bin_dep_seqno;
444
445 struct dma_fence *fence;
446
447
448
449
450 uint32_t last_ct0ca, last_ct1ca;
451
452
453 struct drm_vc4_submit_cl *args;
454
455
456
457
458 struct drm_gem_cma_object **bo;
459 uint32_t bo_count;
460
461
462
463
464
465 struct drm_gem_cma_object *rcl_write_bo[4];
466 uint32_t rcl_write_bo_count;
467
468
469 struct list_head head;
470
471
472
473
474 struct list_head unref_list;
475
476
477
478
479 uint32_t bo_index[2];
480
481
482
483
484 struct drm_gem_cma_object *exec_bo;
485
486
487
488
489
490
491
492 struct vc4_shader_state {
493 uint32_t addr;
494
495
496
497 uint32_t max_index;
498 } *shader_state;
499
500
501 uint32_t shader_state_size;
502
503 uint32_t shader_state_count;
504
505 bool found_tile_binning_mode_config_packet;
506 bool found_start_tile_binning_packet;
507 bool found_increment_semaphore_packet;
508 bool found_flush;
509 uint8_t bin_tiles_x, bin_tiles_y;
510
511
512
513 uint32_t tile_alloc_offset;
514
515 uint32_t bin_slots;
516
517
518
519
520
521 uint32_t ct0ca, ct0ea;
522 uint32_t ct1ca, ct1ea;
523
524
525 void *bin_u;
526
527
528
529
530
531
532 void *shader_rec_u;
533 void *shader_rec_v;
534 uint32_t shader_rec_p;
535 uint32_t shader_rec_size;
536
537
538
539
540 void *uniforms_u;
541 void *uniforms_v;
542 uint32_t uniforms_p;
543 uint32_t uniforms_size;
544
545
546
547
548 struct vc4_perfmon *perfmon;
549};
550
551
552
553
554struct vc4_file {
555 struct {
556 struct idr idr;
557 struct mutex lock;
558 } perfmon;
559};
560
561static inline struct vc4_exec_info *
562vc4_first_bin_job(struct vc4_dev *vc4)
563{
564 return list_first_entry_or_null(&vc4->bin_job_list,
565 struct vc4_exec_info, head);
566}
567
568static inline struct vc4_exec_info *
569vc4_first_render_job(struct vc4_dev *vc4)
570{
571 return list_first_entry_or_null(&vc4->render_job_list,
572 struct vc4_exec_info, head);
573}
574
575static inline struct vc4_exec_info *
576vc4_last_render_job(struct vc4_dev *vc4)
577{
578 if (list_empty(&vc4->render_job_list))
579 return NULL;
580 return list_last_entry(&vc4->render_job_list,
581 struct vc4_exec_info, head);
582}
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598struct vc4_texture_sample_info {
599 bool is_direct;
600 uint32_t p_offset[4];
601};
602
603
604
605
606
607
608
609
610
611
612
613struct vc4_validated_shader_info {
614 uint32_t uniforms_size;
615 uint32_t uniforms_src_size;
616 uint32_t num_texture_samples;
617 struct vc4_texture_sample_info *texture_samples;
618
619 uint32_t num_uniform_addr_offsets;
620 uint32_t *uniform_addr_offsets;
621
622 bool is_threaded;
623};
624
625
626
627
628
629
630
631
632
633#define _wait_for(COND, MS, W) ({ \
634 unsigned long timeout__ = jiffies + msecs_to_jiffies(MS) + 1; \
635 int ret__ = 0; \
636 while (!(COND)) { \
637 if (time_after(jiffies, timeout__)) { \
638 if (!(COND)) \
639 ret__ = -ETIMEDOUT; \
640 break; \
641 } \
642 if (W && drm_can_sleep()) { \
643 msleep(W); \
644 } else { \
645 cpu_relax(); \
646 } \
647 } \
648 ret__; \
649})
650
651#define wait_for(COND, MS) _wait_for(COND, MS, 1)
652
653
654struct drm_gem_object *vc4_create_object(struct drm_device *dev, size_t size);
655void vc4_free_object(struct drm_gem_object *gem_obj);
656struct vc4_bo *vc4_bo_create(struct drm_device *dev, size_t size,
657 bool from_cache, enum vc4_kernel_bo_type type);
658int vc4_dumb_create(struct drm_file *file_priv,
659 struct drm_device *dev,
660 struct drm_mode_create_dumb *args);
661struct dma_buf *vc4_prime_export(struct drm_device *dev,
662 struct drm_gem_object *obj, int flags);
663int vc4_create_bo_ioctl(struct drm_device *dev, void *data,
664 struct drm_file *file_priv);
665int vc4_create_shader_bo_ioctl(struct drm_device *dev, void *data,
666 struct drm_file *file_priv);
667int vc4_mmap_bo_ioctl(struct drm_device *dev, void *data,
668 struct drm_file *file_priv);
669int vc4_set_tiling_ioctl(struct drm_device *dev, void *data,
670 struct drm_file *file_priv);
671int vc4_get_tiling_ioctl(struct drm_device *dev, void *data,
672 struct drm_file *file_priv);
673int vc4_get_hang_state_ioctl(struct drm_device *dev, void *data,
674 struct drm_file *file_priv);
675int vc4_label_bo_ioctl(struct drm_device *dev, void *data,
676 struct drm_file *file_priv);
677int vc4_fault(struct vm_fault *vmf);
678int vc4_mmap(struct file *filp, struct vm_area_struct *vma);
679struct reservation_object *vc4_prime_res_obj(struct drm_gem_object *obj);
680int vc4_prime_mmap(struct drm_gem_object *obj, struct vm_area_struct *vma);
681struct drm_gem_object *vc4_prime_import_sg_table(struct drm_device *dev,
682 struct dma_buf_attachment *attach,
683 struct sg_table *sgt);
684void *vc4_prime_vmap(struct drm_gem_object *obj);
685int vc4_bo_cache_init(struct drm_device *dev);
686void vc4_bo_cache_destroy(struct drm_device *dev);
687int vc4_bo_stats_debugfs(struct seq_file *m, void *arg);
688int vc4_bo_inc_usecnt(struct vc4_bo *bo);
689void vc4_bo_dec_usecnt(struct vc4_bo *bo);
690void vc4_bo_add_to_purgeable_pool(struct vc4_bo *bo);
691void vc4_bo_remove_from_purgeable_pool(struct vc4_bo *bo);
692
693
694extern struct platform_driver vc4_crtc_driver;
695int vc4_crtc_debugfs_regs(struct seq_file *m, void *arg);
696bool vc4_crtc_get_scanoutpos(struct drm_device *dev, unsigned int crtc_id,
697 bool in_vblank_irq, int *vpos, int *hpos,
698 ktime_t *stime, ktime_t *etime,
699 const struct drm_display_mode *mode);
700
701
702int vc4_debugfs_init(struct drm_minor *minor);
703
704
705void __iomem *vc4_ioremap_regs(struct platform_device *dev, int index);
706
707
708extern struct platform_driver vc4_dpi_driver;
709int vc4_dpi_debugfs_regs(struct seq_file *m, void *unused);
710
711
712extern struct platform_driver vc4_dsi_driver;
713int vc4_dsi_debugfs_regs(struct seq_file *m, void *unused);
714
715
716extern const struct dma_fence_ops vc4_fence_ops;
717
718
719void vc4_gem_init(struct drm_device *dev);
720void vc4_gem_destroy(struct drm_device *dev);
721int vc4_submit_cl_ioctl(struct drm_device *dev, void *data,
722 struct drm_file *file_priv);
723int vc4_wait_seqno_ioctl(struct drm_device *dev, void *data,
724 struct drm_file *file_priv);
725int vc4_wait_bo_ioctl(struct drm_device *dev, void *data,
726 struct drm_file *file_priv);
727void vc4_submit_next_bin_job(struct drm_device *dev);
728void vc4_submit_next_render_job(struct drm_device *dev);
729void vc4_move_job_to_render(struct drm_device *dev, struct vc4_exec_info *exec);
730int vc4_wait_for_seqno(struct drm_device *dev, uint64_t seqno,
731 uint64_t timeout_ns, bool interruptible);
732void vc4_job_handle_completed(struct vc4_dev *vc4);
733int vc4_queue_seqno_cb(struct drm_device *dev,
734 struct vc4_seqno_cb *cb, uint64_t seqno,
735 void (*func)(struct vc4_seqno_cb *cb));
736int vc4_gem_madvise_ioctl(struct drm_device *dev, void *data,
737 struct drm_file *file_priv);
738
739
740extern struct platform_driver vc4_hdmi_driver;
741int vc4_hdmi_debugfs_regs(struct seq_file *m, void *unused);
742
743
744extern struct platform_driver vc4_vec_driver;
745int vc4_vec_debugfs_regs(struct seq_file *m, void *unused);
746
747
748irqreturn_t vc4_irq(int irq, void *arg);
749void vc4_irq_preinstall(struct drm_device *dev);
750int vc4_irq_postinstall(struct drm_device *dev);
751void vc4_irq_uninstall(struct drm_device *dev);
752void vc4_irq_reset(struct drm_device *dev);
753
754
755extern struct platform_driver vc4_hvs_driver;
756void vc4_hvs_dump_state(struct drm_device *dev);
757int vc4_hvs_debugfs_regs(struct seq_file *m, void *unused);
758
759
760int vc4_kms_load(struct drm_device *dev);
761
762
763struct drm_plane *vc4_plane_init(struct drm_device *dev,
764 enum drm_plane_type type);
765u32 vc4_plane_write_dlist(struct drm_plane *plane, u32 __iomem *dlist);
766u32 vc4_plane_dlist_size(const struct drm_plane_state *state);
767void vc4_plane_async_set_fb(struct drm_plane *plane,
768 struct drm_framebuffer *fb);
769
770
771extern struct platform_driver vc4_v3d_driver;
772int vc4_v3d_debugfs_ident(struct seq_file *m, void *unused);
773int vc4_v3d_debugfs_regs(struct seq_file *m, void *unused);
774int vc4_v3d_get_bin_slot(struct vc4_dev *vc4);
775
776
777int
778vc4_validate_bin_cl(struct drm_device *dev,
779 void *validated,
780 void *unvalidated,
781 struct vc4_exec_info *exec);
782
783int
784vc4_validate_shader_recs(struct drm_device *dev, struct vc4_exec_info *exec);
785
786struct drm_gem_cma_object *vc4_use_bo(struct vc4_exec_info *exec,
787 uint32_t hindex);
788
789int vc4_get_rcl(struct drm_device *dev, struct vc4_exec_info *exec);
790
791bool vc4_check_tex_size(struct vc4_exec_info *exec,
792 struct drm_gem_cma_object *fbo,
793 uint32_t offset, uint8_t tiling_format,
794 uint32_t width, uint32_t height, uint8_t cpp);
795
796
797struct vc4_validated_shader_info *
798vc4_validate_shader(struct drm_gem_cma_object *shader_obj);
799
800
801void vc4_perfmon_get(struct vc4_perfmon *perfmon);
802void vc4_perfmon_put(struct vc4_perfmon *perfmon);
803void vc4_perfmon_start(struct vc4_dev *vc4, struct vc4_perfmon *perfmon);
804void vc4_perfmon_stop(struct vc4_dev *vc4, struct vc4_perfmon *perfmon,
805 bool capture);
806struct vc4_perfmon *vc4_perfmon_find(struct vc4_file *vc4file, int id);
807void vc4_perfmon_open_file(struct vc4_file *vc4file);
808void vc4_perfmon_close_file(struct vc4_file *vc4file);
809int vc4_perfmon_create_ioctl(struct drm_device *dev, void *data,
810 struct drm_file *file_priv);
811int vc4_perfmon_destroy_ioctl(struct drm_device *dev, void *data,
812 struct drm_file *file_priv);
813int vc4_perfmon_get_values_ioctl(struct drm_device *dev, void *data,
814 struct drm_file *file_priv);
815