0001
0002
0003
0004
0005 #ifndef _VC4_DRV_H_
0006 #define _VC4_DRV_H_
0007
0008 #include <linux/delay.h>
0009 #include <linux/of.h>
0010 #include <linux/refcount.h>
0011 #include <linux/uaccess.h>
0012
0013 #include <drm/drm_atomic.h>
0014 #include <drm/drm_debugfs.h>
0015 #include <drm/drm_device.h>
0016 #include <drm/drm_encoder.h>
0017 #include <drm/drm_gem_cma_helper.h>
0018 #include <drm/drm_managed.h>
0019 #include <drm/drm_mm.h>
0020 #include <drm/drm_modeset_lock.h>
0021
0022 #include "uapi/drm/vc4_drm.h"
0023
0024 struct drm_device;
0025 struct drm_gem_object;
0026
0027
0028
0029
0030 enum vc4_kernel_bo_type {
0031
0032
0033
0034 VC4_BO_TYPE_KERNEL,
0035 VC4_BO_TYPE_V3D,
0036 VC4_BO_TYPE_V3D_SHADER,
0037 VC4_BO_TYPE_DUMB,
0038 VC4_BO_TYPE_BIN,
0039 VC4_BO_TYPE_RCL,
0040 VC4_BO_TYPE_BCL,
0041 VC4_BO_TYPE_KERNEL_CACHE,
0042 VC4_BO_TYPE_COUNT
0043 };
0044
0045
0046
0047
0048
0049
0050
0051 struct vc4_perfmon {
0052 struct vc4_dev *dev;
0053
0054
0055
0056
0057 refcount_t refcnt;
0058
0059
0060
0061
0062 u8 ncounters;
0063
0064
0065 u8 events[DRM_VC4_MAX_PERF_COUNTERS];
0066
0067
0068
0069
0070
0071
0072
0073
0074 u64 counters[];
0075 };
0076
0077 struct vc4_dev {
0078 struct drm_device base;
0079
0080 bool is_vc5;
0081
0082 unsigned int irq;
0083
0084 struct vc4_hvs *hvs;
0085 struct vc4_v3d *v3d;
0086 struct vc4_dpi *dpi;
0087 struct vc4_vec *vec;
0088 struct vc4_txp *txp;
0089
0090 struct vc4_hang_state *hang_state;
0091
0092
0093
0094
0095
0096 struct vc4_bo_cache {
0097
0098
0099
0100
0101 struct list_head *size_list;
0102 uint32_t size_list_size;
0103
0104
0105
0106
0107
0108 struct list_head time_list;
0109 struct work_struct time_work;
0110 struct timer_list time_timer;
0111 } bo_cache;
0112
0113 u32 num_labels;
0114 struct vc4_label {
0115 const char *name;
0116 u32 num_allocated;
0117 u32 size_allocated;
0118 } *bo_labels;
0119
0120
0121 struct mutex bo_lock;
0122
0123
0124
0125
0126
0127 struct {
0128 struct list_head list;
0129 unsigned int num;
0130 size_t size;
0131 unsigned int purged_num;
0132 size_t purged_size;
0133 struct mutex lock;
0134 } purgeable;
0135
0136 uint64_t dma_fence_context;
0137
0138
0139
0140
0141 uint64_t emit_seqno;
0142
0143
0144
0145
0146 uint64_t finished_seqno;
0147
0148
0149
0150
0151
0152 struct list_head bin_job_list;
0153
0154
0155
0156
0157
0158
0159 struct list_head render_job_list;
0160
0161
0162
0163
0164 struct list_head job_done_list;
0165
0166
0167
0168 spinlock_t job_lock;
0169 wait_queue_head_t job_wait_queue;
0170 struct work_struct job_done_work;
0171
0172
0173
0174
0175 struct vc4_perfmon *active_perfmon;
0176
0177
0178
0179
0180 struct list_head seqno_cb_list;
0181
0182
0183
0184
0185
0186 struct vc4_bo *bin_bo;
0187
0188
0189 uint32_t bin_alloc_size;
0190
0191
0192
0193
0194 uint32_t bin_alloc_used;
0195
0196
0197 uint32_t bin_alloc_overflow;
0198
0199
0200
0201
0202
0203
0204 atomic_t underrun;
0205
0206 struct work_struct overflow_mem_work;
0207
0208 int power_refcount;
0209
0210
0211 bool load_tracker_enabled;
0212
0213
0214 struct mutex power_lock;
0215
0216 struct {
0217 struct timer_list timer;
0218 struct work_struct reset_work;
0219 } hangcheck;
0220
0221 struct drm_modeset_lock ctm_state_lock;
0222 struct drm_private_obj ctm_manager;
0223 struct drm_private_obj hvs_channels;
0224 struct drm_private_obj load_tracker;
0225
0226
0227
0228
0229 struct list_head debugfs_list;
0230
0231
0232 struct mutex bin_bo_lock;
0233
0234 struct kref bin_bo_kref;
0235 };
0236
0237 static inline struct vc4_dev *
0238 to_vc4_dev(struct drm_device *dev)
0239 {
0240 return container_of(dev, struct vc4_dev, base);
0241 }
0242
0243 struct vc4_bo {
0244 struct drm_gem_cma_object base;
0245
0246
0247 uint64_t seqno;
0248
0249
0250
0251
0252
0253
0254 uint64_t write_seqno;
0255
0256 bool t_format;
0257
0258
0259
0260
0261 struct list_head unref_head;
0262
0263
0264 unsigned long free_time;
0265
0266
0267 struct list_head size_head;
0268
0269
0270
0271
0272 struct vc4_validated_shader_info *validated_shader;
0273
0274
0275
0276
0277 int label;
0278
0279
0280
0281
0282
0283 refcount_t usecnt;
0284
0285
0286 u32 madv;
0287 struct mutex madv_lock;
0288 };
0289
0290 static inline struct vc4_bo *
0291 to_vc4_bo(struct drm_gem_object *bo)
0292 {
0293 return container_of(to_drm_gem_cma_obj(bo), struct vc4_bo, base);
0294 }
0295
0296 struct vc4_fence {
0297 struct dma_fence base;
0298 struct drm_device *dev;
0299
0300 uint64_t seqno;
0301 };
0302
0303 static inline struct vc4_fence *
0304 to_vc4_fence(struct dma_fence *fence)
0305 {
0306 return container_of(fence, struct vc4_fence, base);
0307 }
0308
0309 struct vc4_seqno_cb {
0310 struct work_struct work;
0311 uint64_t seqno;
0312 void (*func)(struct vc4_seqno_cb *cb);
0313 };
0314
0315 struct vc4_v3d {
0316 struct vc4_dev *vc4;
0317 struct platform_device *pdev;
0318 void __iomem *regs;
0319 struct clk *clk;
0320 struct debugfs_regset32 regset;
0321 };
0322
0323 struct vc4_hvs {
0324 struct vc4_dev *vc4;
0325 struct platform_device *pdev;
0326 void __iomem *regs;
0327 u32 __iomem *dlist;
0328
0329 struct clk *core_clk;
0330
0331
0332
0333
0334 struct drm_mm dlist_mm;
0335
0336 struct drm_mm lbm_mm;
0337 spinlock_t mm_lock;
0338
0339 struct drm_mm_node mitchell_netravali_filter;
0340
0341 struct debugfs_regset32 regset;
0342 };
0343
0344 struct vc4_plane {
0345 struct drm_plane base;
0346 };
0347
0348 static inline struct vc4_plane *
0349 to_vc4_plane(struct drm_plane *plane)
0350 {
0351 return container_of(plane, struct vc4_plane, base);
0352 }
0353
0354 enum vc4_scaling_mode {
0355 VC4_SCALING_NONE,
0356 VC4_SCALING_TPZ,
0357 VC4_SCALING_PPF,
0358 };
0359
0360 struct vc4_plane_state {
0361 struct drm_plane_state base;
0362
0363
0364
0365 u32 *dlist;
0366 u32 dlist_size;
0367 u32 dlist_count;
0368
0369
0370
0371
0372 u32 pos0_offset;
0373 u32 pos2_offset;
0374 u32 ptr0_offset;
0375 u32 lbm_offset;
0376
0377
0378
0379
0380 u32 __iomem *hw_dlist;
0381
0382
0383 int crtc_x, crtc_y, crtc_w, crtc_h;
0384
0385 u32 src_x, src_y;
0386
0387 u32 src_w[2], src_h[2];
0388
0389
0390 enum vc4_scaling_mode x_scaling[2], y_scaling[2];
0391 bool is_unity;
0392 bool is_yuv;
0393
0394
0395
0396
0397 u32 offsets[3];
0398
0399
0400 struct drm_mm_node lbm;
0401
0402
0403
0404
0405
0406 bool needs_bg_fill;
0407
0408
0409
0410
0411 bool dlist_initialized;
0412
0413
0414
0415
0416 u64 hvs_load;
0417
0418
0419
0420
0421 u64 membus_load;
0422 };
0423
0424 static inline struct vc4_plane_state *
0425 to_vc4_plane_state(struct drm_plane_state *state)
0426 {
0427 return container_of(state, struct vc4_plane_state, base);
0428 }
0429
0430 enum vc4_encoder_type {
0431 VC4_ENCODER_TYPE_NONE,
0432 VC4_ENCODER_TYPE_HDMI0,
0433 VC4_ENCODER_TYPE_HDMI1,
0434 VC4_ENCODER_TYPE_VEC,
0435 VC4_ENCODER_TYPE_DSI0,
0436 VC4_ENCODER_TYPE_DSI1,
0437 VC4_ENCODER_TYPE_SMI,
0438 VC4_ENCODER_TYPE_DPI,
0439 };
0440
0441 struct vc4_encoder {
0442 struct drm_encoder base;
0443 enum vc4_encoder_type type;
0444 u32 clock_select;
0445
0446 void (*pre_crtc_configure)(struct drm_encoder *encoder, struct drm_atomic_state *state);
0447 void (*pre_crtc_enable)(struct drm_encoder *encoder, struct drm_atomic_state *state);
0448 void (*post_crtc_enable)(struct drm_encoder *encoder, struct drm_atomic_state *state);
0449
0450 void (*post_crtc_disable)(struct drm_encoder *encoder, struct drm_atomic_state *state);
0451 void (*post_crtc_powerdown)(struct drm_encoder *encoder, struct drm_atomic_state *state);
0452 };
0453
0454 static inline struct vc4_encoder *
0455 to_vc4_encoder(struct drm_encoder *encoder)
0456 {
0457 return container_of(encoder, struct vc4_encoder, base);
0458 }
0459
0460 struct vc4_crtc_data {
0461
0462 unsigned int hvs_available_channels;
0463
0464
0465 int hvs_output;
0466 };
0467
0468 struct vc4_pv_data {
0469 struct vc4_crtc_data base;
0470
0471
0472 unsigned int fifo_depth;
0473
0474
0475 u8 pixels_per_clock;
0476
0477 enum vc4_encoder_type encoder_types[4];
0478 const char *debugfs_name;
0479
0480 };
0481
0482 struct vc4_crtc {
0483 struct drm_crtc base;
0484 struct platform_device *pdev;
0485 const struct vc4_crtc_data *data;
0486 void __iomem *regs;
0487
0488
0489 ktime_t t_vblank;
0490
0491 u8 lut_r[256];
0492 u8 lut_g[256];
0493 u8 lut_b[256];
0494
0495 struct drm_pending_vblank_event *event;
0496
0497 struct debugfs_regset32 regset;
0498
0499
0500
0501
0502 bool feeds_txp;
0503
0504
0505
0506
0507
0508 spinlock_t irq_lock;
0509
0510
0511
0512
0513
0514
0515
0516 unsigned int current_dlist;
0517
0518
0519
0520
0521
0522
0523
0524 unsigned int current_hvs_channel;
0525 };
0526
0527 static inline struct vc4_crtc *
0528 to_vc4_crtc(struct drm_crtc *crtc)
0529 {
0530 return container_of(crtc, struct vc4_crtc, base);
0531 }
0532
0533 static inline const struct vc4_crtc_data *
0534 vc4_crtc_to_vc4_crtc_data(const struct vc4_crtc *crtc)
0535 {
0536 return crtc->data;
0537 }
0538
0539 static inline const struct vc4_pv_data *
0540 vc4_crtc_to_vc4_pv_data(const struct vc4_crtc *crtc)
0541 {
0542 const struct vc4_crtc_data *data = vc4_crtc_to_vc4_crtc_data(crtc);
0543
0544 return container_of(data, struct vc4_pv_data, base);
0545 }
0546
0547 struct drm_encoder *vc4_get_crtc_encoder(struct drm_crtc *crtc,
0548 struct drm_crtc_state *state);
0549
0550 struct vc4_crtc_state {
0551 struct drm_crtc_state base;
0552
0553 struct drm_mm_node mm;
0554 bool txp_armed;
0555 unsigned int assigned_channel;
0556
0557 struct {
0558 unsigned int left;
0559 unsigned int right;
0560 unsigned int top;
0561 unsigned int bottom;
0562 } margins;
0563
0564 unsigned long hvs_load;
0565
0566
0567 bool update_muxing;
0568 };
0569
0570 #define VC4_HVS_CHANNEL_DISABLED ((unsigned int)-1)
0571
0572 static inline struct vc4_crtc_state *
0573 to_vc4_crtc_state(struct drm_crtc_state *crtc_state)
0574 {
0575 return container_of(crtc_state, struct vc4_crtc_state, base);
0576 }
0577
0578 #define V3D_READ(offset) readl(vc4->v3d->regs + offset)
0579 #define V3D_WRITE(offset, val) writel(val, vc4->v3d->regs + offset)
0580 #define HVS_READ(offset) readl(hvs->regs + offset)
0581 #define HVS_WRITE(offset, val) writel(val, hvs->regs + offset)
0582
0583 #define VC4_REG32(reg) { .name = #reg, .offset = reg }
0584
0585 struct vc4_exec_info {
0586 struct vc4_dev *dev;
0587
0588
0589 uint64_t seqno;
0590
0591
0592 uint64_t bin_dep_seqno;
0593
0594 struct dma_fence *fence;
0595
0596
0597
0598
0599 uint32_t last_ct0ca, last_ct1ca;
0600
0601
0602 struct drm_vc4_submit_cl *args;
0603
0604
0605
0606
0607 struct drm_gem_cma_object **bo;
0608 uint32_t bo_count;
0609
0610
0611
0612
0613
0614 struct drm_gem_cma_object *rcl_write_bo[4];
0615 uint32_t rcl_write_bo_count;
0616
0617
0618 struct list_head head;
0619
0620
0621
0622
0623 struct list_head unref_list;
0624
0625
0626
0627
0628 uint32_t bo_index[2];
0629
0630
0631
0632
0633 struct drm_gem_cma_object *exec_bo;
0634
0635
0636
0637
0638
0639
0640
0641 struct vc4_shader_state {
0642 uint32_t addr;
0643
0644
0645
0646 uint32_t max_index;
0647 } *shader_state;
0648
0649
0650 uint32_t shader_state_size;
0651
0652 uint32_t shader_state_count;
0653
0654 bool found_tile_binning_mode_config_packet;
0655 bool found_start_tile_binning_packet;
0656 bool found_increment_semaphore_packet;
0657 bool found_flush;
0658 uint8_t bin_tiles_x, bin_tiles_y;
0659
0660
0661
0662 uint32_t tile_alloc_offset;
0663
0664 uint32_t bin_slots;
0665
0666
0667
0668
0669
0670 uint32_t ct0ca, ct0ea;
0671 uint32_t ct1ca, ct1ea;
0672
0673
0674 void *bin_u;
0675
0676
0677
0678
0679
0680
0681 void *shader_rec_u;
0682 void *shader_rec_v;
0683 uint32_t shader_rec_p;
0684 uint32_t shader_rec_size;
0685
0686
0687
0688
0689 void *uniforms_u;
0690 void *uniforms_v;
0691 uint32_t uniforms_p;
0692 uint32_t uniforms_size;
0693
0694
0695
0696
0697 struct vc4_perfmon *perfmon;
0698
0699
0700
0701
0702 bool bin_bo_used;
0703 };
0704
0705
0706
0707
0708 struct vc4_file {
0709 struct vc4_dev *dev;
0710
0711 struct {
0712 struct idr idr;
0713 struct mutex lock;
0714 } perfmon;
0715
0716 bool bin_bo_used;
0717 };
0718
0719 static inline struct vc4_exec_info *
0720 vc4_first_bin_job(struct vc4_dev *vc4)
0721 {
0722 return list_first_entry_or_null(&vc4->bin_job_list,
0723 struct vc4_exec_info, head);
0724 }
0725
0726 static inline struct vc4_exec_info *
0727 vc4_first_render_job(struct vc4_dev *vc4)
0728 {
0729 return list_first_entry_or_null(&vc4->render_job_list,
0730 struct vc4_exec_info, head);
0731 }
0732
0733 static inline struct vc4_exec_info *
0734 vc4_last_render_job(struct vc4_dev *vc4)
0735 {
0736 if (list_empty(&vc4->render_job_list))
0737 return NULL;
0738 return list_last_entry(&vc4->render_job_list,
0739 struct vc4_exec_info, head);
0740 }
0741
0742
0743
0744
0745
0746
0747
0748
0749
0750
0751
0752
0753
0754
0755
0756 struct vc4_texture_sample_info {
0757 bool is_direct;
0758 uint32_t p_offset[4];
0759 };
0760
0761
0762
0763
0764
0765
0766
0767
0768
0769
0770
0771 struct vc4_validated_shader_info {
0772 uint32_t uniforms_size;
0773 uint32_t uniforms_src_size;
0774 uint32_t num_texture_samples;
0775 struct vc4_texture_sample_info *texture_samples;
0776
0777 uint32_t num_uniform_addr_offsets;
0778 uint32_t *uniform_addr_offsets;
0779
0780 bool is_threaded;
0781 };
0782
0783
0784
0785
0786
0787
0788
0789
0790
0791 #define __wait_for(OP, COND, US, Wmin, Wmax) ({ \
0792 const ktime_t end__ = ktime_add_ns(ktime_get_raw(), 1000ll * (US)); \
0793 long wait__ = (Wmin); \
0794 int ret__; \
0795 might_sleep(); \
0796 for (;;) { \
0797 const bool expired__ = ktime_after(ktime_get_raw(), end__); \
0798 OP; \
0799 \
0800 barrier(); \
0801 if (COND) { \
0802 ret__ = 0; \
0803 break; \
0804 } \
0805 if (expired__) { \
0806 ret__ = -ETIMEDOUT; \
0807 break; \
0808 } \
0809 usleep_range(wait__, wait__ * 2); \
0810 if (wait__ < (Wmax)) \
0811 wait__ <<= 1; \
0812 } \
0813 ret__; \
0814 })
0815
0816 #define _wait_for(COND, US, Wmin, Wmax) __wait_for(, (COND), (US), (Wmin), \
0817 (Wmax))
0818 #define wait_for(COND, MS) _wait_for((COND), (MS) * 1000, 10, 1000)
0819
0820
0821 struct drm_gem_object *vc4_create_object(struct drm_device *dev, size_t size);
0822 struct vc4_bo *vc4_bo_create(struct drm_device *dev, size_t size,
0823 bool from_cache, enum vc4_kernel_bo_type type);
0824 int vc4_bo_dumb_create(struct drm_file *file_priv,
0825 struct drm_device *dev,
0826 struct drm_mode_create_dumb *args);
0827 int vc4_create_bo_ioctl(struct drm_device *dev, void *data,
0828 struct drm_file *file_priv);
0829 int vc4_create_shader_bo_ioctl(struct drm_device *dev, void *data,
0830 struct drm_file *file_priv);
0831 int vc4_mmap_bo_ioctl(struct drm_device *dev, void *data,
0832 struct drm_file *file_priv);
0833 int vc4_set_tiling_ioctl(struct drm_device *dev, void *data,
0834 struct drm_file *file_priv);
0835 int vc4_get_tiling_ioctl(struct drm_device *dev, void *data,
0836 struct drm_file *file_priv);
0837 int vc4_get_hang_state_ioctl(struct drm_device *dev, void *data,
0838 struct drm_file *file_priv);
0839 int vc4_label_bo_ioctl(struct drm_device *dev, void *data,
0840 struct drm_file *file_priv);
0841 int vc4_bo_cache_init(struct drm_device *dev);
0842 int vc4_bo_inc_usecnt(struct vc4_bo *bo);
0843 void vc4_bo_dec_usecnt(struct vc4_bo *bo);
0844 void vc4_bo_add_to_purgeable_pool(struct vc4_bo *bo);
0845 void vc4_bo_remove_from_purgeable_pool(struct vc4_bo *bo);
0846
0847
0848 extern struct platform_driver vc4_crtc_driver;
0849 int vc4_crtc_disable_at_boot(struct drm_crtc *crtc);
0850 int vc4_crtc_init(struct drm_device *drm, struct vc4_crtc *vc4_crtc,
0851 const struct drm_crtc_funcs *crtc_funcs,
0852 const struct drm_crtc_helper_funcs *crtc_helper_funcs);
0853 void vc4_crtc_destroy(struct drm_crtc *crtc);
0854 int vc4_page_flip(struct drm_crtc *crtc,
0855 struct drm_framebuffer *fb,
0856 struct drm_pending_vblank_event *event,
0857 uint32_t flags,
0858 struct drm_modeset_acquire_ctx *ctx);
0859 struct drm_crtc_state *vc4_crtc_duplicate_state(struct drm_crtc *crtc);
0860 void vc4_crtc_destroy_state(struct drm_crtc *crtc,
0861 struct drm_crtc_state *state);
0862 void vc4_crtc_reset(struct drm_crtc *crtc);
0863 void vc4_crtc_handle_vblank(struct vc4_crtc *crtc);
0864 void vc4_crtc_get_margins(struct drm_crtc_state *state,
0865 unsigned int *left, unsigned int *right,
0866 unsigned int *top, unsigned int *bottom);
0867
0868
0869 void vc4_debugfs_init(struct drm_minor *minor);
0870 #ifdef CONFIG_DEBUG_FS
0871 void vc4_debugfs_add_file(struct drm_device *drm,
0872 const char *filename,
0873 int (*show)(struct seq_file*, void*),
0874 void *data);
0875 void vc4_debugfs_add_regset32(struct drm_device *drm,
0876 const char *filename,
0877 struct debugfs_regset32 *regset);
0878 #else
0879 static inline void vc4_debugfs_add_file(struct drm_device *drm,
0880 const char *filename,
0881 int (*show)(struct seq_file*, void*),
0882 void *data)
0883 {
0884 }
0885
0886 static inline void vc4_debugfs_add_regset32(struct drm_device *drm,
0887 const char *filename,
0888 struct debugfs_regset32 *regset)
0889 {
0890 }
0891 #endif
0892
0893
0894 void __iomem *vc4_ioremap_regs(struct platform_device *dev, int index);
0895 int vc4_dumb_fixup_args(struct drm_mode_create_dumb *args);
0896
0897
0898 extern struct platform_driver vc4_dpi_driver;
0899
0900
0901 extern struct platform_driver vc4_dsi_driver;
0902
0903
0904 extern const struct dma_fence_ops vc4_fence_ops;
0905
0906
0907 int vc4_gem_init(struct drm_device *dev);
0908 int vc4_submit_cl_ioctl(struct drm_device *dev, void *data,
0909 struct drm_file *file_priv);
0910 int vc4_wait_seqno_ioctl(struct drm_device *dev, void *data,
0911 struct drm_file *file_priv);
0912 int vc4_wait_bo_ioctl(struct drm_device *dev, void *data,
0913 struct drm_file *file_priv);
0914 void vc4_submit_next_bin_job(struct drm_device *dev);
0915 void vc4_submit_next_render_job(struct drm_device *dev);
0916 void vc4_move_job_to_render(struct drm_device *dev, struct vc4_exec_info *exec);
0917 int vc4_wait_for_seqno(struct drm_device *dev, uint64_t seqno,
0918 uint64_t timeout_ns, bool interruptible);
0919 void vc4_job_handle_completed(struct vc4_dev *vc4);
0920 int vc4_queue_seqno_cb(struct drm_device *dev,
0921 struct vc4_seqno_cb *cb, uint64_t seqno,
0922 void (*func)(struct vc4_seqno_cb *cb));
0923 int vc4_gem_madvise_ioctl(struct drm_device *dev, void *data,
0924 struct drm_file *file_priv);
0925
0926
0927 extern struct platform_driver vc4_hdmi_driver;
0928
0929
0930 extern struct platform_driver vc4_vec_driver;
0931
0932
0933 extern struct platform_driver vc4_txp_driver;
0934
0935
0936 void vc4_irq_enable(struct drm_device *dev);
0937 void vc4_irq_disable(struct drm_device *dev);
0938 int vc4_irq_install(struct drm_device *dev, int irq);
0939 void vc4_irq_uninstall(struct drm_device *dev);
0940 void vc4_irq_reset(struct drm_device *dev);
0941
0942
0943 extern struct platform_driver vc4_hvs_driver;
0944 void vc4_hvs_stop_channel(struct vc4_hvs *hvs, unsigned int output);
0945 int vc4_hvs_get_fifo_from_output(struct vc4_hvs *hvs, unsigned int output);
0946 u8 vc4_hvs_get_fifo_frame_count(struct vc4_hvs *hvs, unsigned int fifo);
0947 int vc4_hvs_atomic_check(struct drm_crtc *crtc, struct drm_atomic_state *state);
0948 void vc4_hvs_atomic_begin(struct drm_crtc *crtc, struct drm_atomic_state *state);
0949 void vc4_hvs_atomic_enable(struct drm_crtc *crtc, struct drm_atomic_state *state);
0950 void vc4_hvs_atomic_disable(struct drm_crtc *crtc, struct drm_atomic_state *state);
0951 void vc4_hvs_atomic_flush(struct drm_crtc *crtc, struct drm_atomic_state *state);
0952 void vc4_hvs_dump_state(struct vc4_hvs *hvs);
0953 void vc4_hvs_unmask_underrun(struct vc4_hvs *hvs, int channel);
0954 void vc4_hvs_mask_underrun(struct vc4_hvs *hvs, int channel);
0955
0956
0957 int vc4_kms_load(struct drm_device *dev);
0958
0959
0960 struct drm_plane *vc4_plane_init(struct drm_device *dev,
0961 enum drm_plane_type type);
0962 int vc4_plane_create_additional_planes(struct drm_device *dev);
0963 u32 vc4_plane_write_dlist(struct drm_plane *plane, u32 __iomem *dlist);
0964 u32 vc4_plane_dlist_size(const struct drm_plane_state *state);
0965 void vc4_plane_async_set_fb(struct drm_plane *plane,
0966 struct drm_framebuffer *fb);
0967
0968
0969 extern struct platform_driver vc4_v3d_driver;
0970 extern const struct of_device_id vc4_v3d_dt_match[];
0971 int vc4_v3d_get_bin_slot(struct vc4_dev *vc4);
0972 int vc4_v3d_bin_bo_get(struct vc4_dev *vc4, bool *used);
0973 void vc4_v3d_bin_bo_put(struct vc4_dev *vc4);
0974 int vc4_v3d_pm_get(struct vc4_dev *vc4);
0975 void vc4_v3d_pm_put(struct vc4_dev *vc4);
0976
0977
0978 int
0979 vc4_validate_bin_cl(struct drm_device *dev,
0980 void *validated,
0981 void *unvalidated,
0982 struct vc4_exec_info *exec);
0983
0984 int
0985 vc4_validate_shader_recs(struct drm_device *dev, struct vc4_exec_info *exec);
0986
0987 struct drm_gem_cma_object *vc4_use_bo(struct vc4_exec_info *exec,
0988 uint32_t hindex);
0989
0990 int vc4_get_rcl(struct drm_device *dev, struct vc4_exec_info *exec);
0991
0992 bool vc4_check_tex_size(struct vc4_exec_info *exec,
0993 struct drm_gem_cma_object *fbo,
0994 uint32_t offset, uint8_t tiling_format,
0995 uint32_t width, uint32_t height, uint8_t cpp);
0996
0997
0998 struct vc4_validated_shader_info *
0999 vc4_validate_shader(struct drm_gem_cma_object *shader_obj);
1000
1001
1002 void vc4_perfmon_get(struct vc4_perfmon *perfmon);
1003 void vc4_perfmon_put(struct vc4_perfmon *perfmon);
1004 void vc4_perfmon_start(struct vc4_dev *vc4, struct vc4_perfmon *perfmon);
1005 void vc4_perfmon_stop(struct vc4_dev *vc4, struct vc4_perfmon *perfmon,
1006 bool capture);
1007 struct vc4_perfmon *vc4_perfmon_find(struct vc4_file *vc4file, int id);
1008 void vc4_perfmon_open_file(struct vc4_file *vc4file);
1009 void vc4_perfmon_close_file(struct vc4_file *vc4file);
1010 int vc4_perfmon_create_ioctl(struct drm_device *dev, void *data,
1011 struct drm_file *file_priv);
1012 int vc4_perfmon_destroy_ioctl(struct drm_device *dev, void *data,
1013 struct drm_file *file_priv);
1014 int vc4_perfmon_get_values_ioctl(struct drm_device *dev, void *data,
1015 struct drm_file *file_priv);
1016
1017 #endif