0001
0002
0003
0004
0005
0006
0007
0008
0009
0010 #define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
0011 #include <linux/debugfs.h>
0012 #include <linux/kthread.h>
0013 #include <linux/seq_file.h>
0014
0015 #include <drm/drm_crtc.h>
0016 #include <drm/drm_file.h>
0017 #include <drm/drm_probe_helper.h>
0018
0019 #include "msm_drv.h"
0020 #include "dpu_kms.h"
0021 #include "dpu_hwio.h"
0022 #include "dpu_hw_catalog.h"
0023 #include "dpu_hw_intf.h"
0024 #include "dpu_hw_ctl.h"
0025 #include "dpu_hw_dspp.h"
0026 #include "dpu_hw_dsc.h"
0027 #include "dpu_hw_merge3d.h"
0028 #include "dpu_formats.h"
0029 #include "dpu_encoder_phys.h"
0030 #include "dpu_crtc.h"
0031 #include "dpu_trace.h"
0032 #include "dpu_core_irq.h"
0033 #include "disp/msm_disp_snapshot.h"
0034
0035 #define DPU_DEBUG_ENC(e, fmt, ...) DRM_DEBUG_ATOMIC("enc%d " fmt,\
0036 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
0037
0038 #define DPU_ERROR_ENC(e, fmt, ...) DPU_ERROR("enc%d " fmt,\
0039 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
0040
0041
0042
0043
0044
0045
0046 #define NUM_PHYS_ENCODER_TYPES 2
0047
0048 #define MAX_PHYS_ENCODERS_PER_VIRTUAL \
0049 (MAX_H_TILES_PER_DISPLAY * NUM_PHYS_ENCODER_TYPES)
0050
0051 #define MAX_CHANNELS_PER_ENC 2
0052
0053 #define IDLE_SHORT_TIMEOUT 1
0054
0055 #define MAX_HDISPLAY_SPLIT 1080
0056
0057
0058 #define DPU_ENCODER_FRAME_DONE_TIMEOUT_FRAMES 5
0059
0060
0061
0062
0063
0064
0065
0066
0067
0068
0069
0070
0071
0072
0073
0074
0075
0076
0077
0078
0079
0080
0081
0082
0083
0084
0085
0086
0087
0088
0089
0090
0091 enum dpu_enc_rc_events {
0092 DPU_ENC_RC_EVENT_KICKOFF = 1,
0093 DPU_ENC_RC_EVENT_FRAME_DONE,
0094 DPU_ENC_RC_EVENT_PRE_STOP,
0095 DPU_ENC_RC_EVENT_STOP,
0096 DPU_ENC_RC_EVENT_ENTER_IDLE
0097 };
0098
0099
0100
0101
0102
0103
0104
0105
0106
0107 enum dpu_enc_rc_states {
0108 DPU_ENC_RC_STATE_OFF,
0109 DPU_ENC_RC_STATE_PRE_OFF,
0110 DPU_ENC_RC_STATE_ON,
0111 DPU_ENC_RC_STATE_IDLE
0112 };
0113
0114
0115
0116
0117
0118
0119
0120
0121
0122
0123
0124
0125
0126
0127
0128
0129
0130
0131
0132
0133
0134
0135
0136
0137
0138
0139
0140
0141
0142
0143
0144
0145
0146
0147
0148
0149
0150
0151
0152
0153
0154
0155
0156
0157
0158
0159
0160
0161
0162
0163
0164
0165
0166
0167 struct dpu_encoder_virt {
0168 struct drm_encoder base;
0169 spinlock_t enc_spinlock;
0170
0171 bool enabled;
0172
0173 unsigned int num_phys_encs;
0174 struct dpu_encoder_phys *phys_encs[MAX_PHYS_ENCODERS_PER_VIRTUAL];
0175 struct dpu_encoder_phys *cur_master;
0176 struct dpu_encoder_phys *cur_slave;
0177 struct dpu_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
0178 struct dpu_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
0179
0180 unsigned int dsc_mask;
0181
0182 bool intfs_swapped;
0183
0184 struct drm_crtc *crtc;
0185 struct drm_connector *connector;
0186
0187 struct dentry *debugfs_root;
0188 struct mutex enc_lock;
0189 DECLARE_BITMAP(frame_busy_mask, MAX_PHYS_ENCODERS_PER_VIRTUAL);
0190 void (*crtc_frame_event_cb)(void *, u32 event);
0191 void *crtc_frame_event_cb_data;
0192
0193 atomic_t frame_done_timeout_ms;
0194 struct timer_list frame_done_timer;
0195 struct timer_list vsync_event_timer;
0196
0197 struct msm_display_info disp_info;
0198
0199 bool idle_pc_supported;
0200 struct mutex rc_lock;
0201 enum dpu_enc_rc_states rc_state;
0202 struct delayed_work delayed_off_work;
0203 struct kthread_work vsync_event_work;
0204 struct msm_display_topology topology;
0205
0206 u32 idle_timeout;
0207
0208 bool wide_bus_en;
0209
0210
0211 struct msm_display_dsc_config *dsc;
0212 };
0213
0214 #define to_dpu_encoder_virt(x) container_of(x, struct dpu_encoder_virt, base)
0215
0216 static u32 dither_matrix[DITHER_MATRIX_SZ] = {
0217 15, 7, 13, 5, 3, 11, 1, 9, 12, 4, 14, 6, 0, 8, 2, 10
0218 };
0219
0220
0221 bool dpu_encoder_is_widebus_enabled(const struct drm_encoder *drm_enc)
0222 {
0223 const struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
0224
0225 return dpu_enc->wide_bus_en;
0226 }
0227
0228 int dpu_encoder_get_crc_values_cnt(const struct drm_encoder *drm_enc)
0229 {
0230 struct dpu_encoder_virt *dpu_enc;
0231 int i, num_intf = 0;
0232
0233 dpu_enc = to_dpu_encoder_virt(drm_enc);
0234
0235 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
0236 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
0237
0238 if (phys->hw_intf && phys->hw_intf->ops.setup_misr
0239 && phys->hw_intf->ops.collect_misr)
0240 num_intf++;
0241 }
0242
0243 return num_intf;
0244 }
0245
0246 void dpu_encoder_setup_misr(const struct drm_encoder *drm_enc)
0247 {
0248 struct dpu_encoder_virt *dpu_enc;
0249
0250 int i;
0251
0252 dpu_enc = to_dpu_encoder_virt(drm_enc);
0253
0254 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
0255 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
0256
0257 if (!phys->hw_intf || !phys->hw_intf->ops.setup_misr)
0258 continue;
0259
0260 phys->hw_intf->ops.setup_misr(phys->hw_intf, true, 1);
0261 }
0262 }
0263
0264 int dpu_encoder_get_crc(const struct drm_encoder *drm_enc, u32 *crcs, int pos)
0265 {
0266 struct dpu_encoder_virt *dpu_enc;
0267
0268 int i, rc = 0, entries_added = 0;
0269
0270 if (!drm_enc->crtc) {
0271 DRM_ERROR("no crtc found for encoder %d\n", drm_enc->index);
0272 return -EINVAL;
0273 }
0274
0275 dpu_enc = to_dpu_encoder_virt(drm_enc);
0276
0277 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
0278 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
0279
0280 if (!phys->hw_intf || !phys->hw_intf->ops.collect_misr)
0281 continue;
0282
0283 rc = phys->hw_intf->ops.collect_misr(phys->hw_intf, &crcs[pos + entries_added]);
0284 if (rc)
0285 return rc;
0286 entries_added++;
0287 }
0288
0289 return entries_added;
0290 }
0291
0292 static void _dpu_encoder_setup_dither(struct dpu_hw_pingpong *hw_pp, unsigned bpc)
0293 {
0294 struct dpu_hw_dither_cfg dither_cfg = { 0 };
0295
0296 if (!hw_pp->ops.setup_dither)
0297 return;
0298
0299 switch (bpc) {
0300 case 6:
0301 dither_cfg.c0_bitdepth = 6;
0302 dither_cfg.c1_bitdepth = 6;
0303 dither_cfg.c2_bitdepth = 6;
0304 dither_cfg.c3_bitdepth = 6;
0305 dither_cfg.temporal_en = 0;
0306 break;
0307 default:
0308 hw_pp->ops.setup_dither(hw_pp, NULL);
0309 return;
0310 }
0311
0312 memcpy(&dither_cfg.matrix, dither_matrix,
0313 sizeof(u32) * DITHER_MATRIX_SZ);
0314
0315 hw_pp->ops.setup_dither(hw_pp, &dither_cfg);
0316 }
0317
0318 static char *dpu_encoder_helper_get_intf_type(enum dpu_intf_mode intf_mode)
0319 {
0320 switch (intf_mode) {
0321 case INTF_MODE_VIDEO:
0322 return "INTF_MODE_VIDEO";
0323 case INTF_MODE_CMD:
0324 return "INTF_MODE_CMD";
0325 case INTF_MODE_WB_BLOCK:
0326 return "INTF_MODE_WB_BLOCK";
0327 case INTF_MODE_WB_LINE:
0328 return "INTF_MODE_WB_LINE";
0329 default:
0330 return "INTF_MODE_UNKNOWN";
0331 }
0332 }
0333
0334 void dpu_encoder_helper_report_irq_timeout(struct dpu_encoder_phys *phys_enc,
0335 enum dpu_intr_idx intr_idx)
0336 {
0337 DRM_ERROR("irq timeout id=%u, intf_mode=%s intf=%d wb=%d, pp=%d, intr=%d\n",
0338 DRMID(phys_enc->parent),
0339 dpu_encoder_helper_get_intf_type(phys_enc->intf_mode),
0340 phys_enc->intf_idx - INTF_0, phys_enc->wb_idx - WB_0,
0341 phys_enc->hw_pp->idx - PINGPONG_0, intr_idx);
0342
0343 if (phys_enc->parent_ops->handle_frame_done)
0344 phys_enc->parent_ops->handle_frame_done(
0345 phys_enc->parent, phys_enc,
0346 DPU_ENCODER_FRAME_EVENT_ERROR);
0347 }
0348
0349 static int dpu_encoder_helper_wait_event_timeout(int32_t drm_id,
0350 u32 irq_idx, struct dpu_encoder_wait_info *info);
0351
0352 int dpu_encoder_helper_wait_for_irq(struct dpu_encoder_phys *phys_enc,
0353 int irq,
0354 void (*func)(void *arg, int irq_idx),
0355 struct dpu_encoder_wait_info *wait_info)
0356 {
0357 u32 irq_status;
0358 int ret;
0359
0360 if (!wait_info) {
0361 DPU_ERROR("invalid params\n");
0362 return -EINVAL;
0363 }
0364
0365
0366
0367 if (phys_enc->enable_state == DPU_ENC_DISABLED) {
0368 DRM_ERROR("encoder is disabled id=%u, callback=%ps, irq=%d\n",
0369 DRMID(phys_enc->parent), func,
0370 irq);
0371 return -EWOULDBLOCK;
0372 }
0373
0374 if (irq < 0) {
0375 DRM_DEBUG_KMS("skip irq wait id=%u, callback=%ps\n",
0376 DRMID(phys_enc->parent), func);
0377 return 0;
0378 }
0379
0380 DRM_DEBUG_KMS("id=%u, callback=%ps, irq=%d, pp=%d, pending_cnt=%d\n",
0381 DRMID(phys_enc->parent), func,
0382 irq, phys_enc->hw_pp->idx - PINGPONG_0,
0383 atomic_read(wait_info->atomic_cnt));
0384
0385 ret = dpu_encoder_helper_wait_event_timeout(
0386 DRMID(phys_enc->parent),
0387 irq,
0388 wait_info);
0389
0390 if (ret <= 0) {
0391 irq_status = dpu_core_irq_read(phys_enc->dpu_kms, irq);
0392 if (irq_status) {
0393 unsigned long flags;
0394
0395 DRM_DEBUG_KMS("irq not triggered id=%u, callback=%ps, irq=%d, pp=%d, atomic_cnt=%d\n",
0396 DRMID(phys_enc->parent), func,
0397 irq,
0398 phys_enc->hw_pp->idx - PINGPONG_0,
0399 atomic_read(wait_info->atomic_cnt));
0400 local_irq_save(flags);
0401 func(phys_enc, irq);
0402 local_irq_restore(flags);
0403 ret = 0;
0404 } else {
0405 ret = -ETIMEDOUT;
0406 DRM_DEBUG_KMS("irq timeout id=%u, callback=%ps, irq=%d, pp=%d, atomic_cnt=%d\n",
0407 DRMID(phys_enc->parent), func,
0408 irq,
0409 phys_enc->hw_pp->idx - PINGPONG_0,
0410 atomic_read(wait_info->atomic_cnt));
0411 }
0412 } else {
0413 ret = 0;
0414 trace_dpu_enc_irq_wait_success(DRMID(phys_enc->parent),
0415 func, irq,
0416 phys_enc->hw_pp->idx - PINGPONG_0,
0417 atomic_read(wait_info->atomic_cnt));
0418 }
0419
0420 return ret;
0421 }
0422
0423 int dpu_encoder_get_vsync_count(struct drm_encoder *drm_enc)
0424 {
0425 struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
0426 struct dpu_encoder_phys *phys = dpu_enc ? dpu_enc->cur_master : NULL;
0427 return phys ? atomic_read(&phys->vsync_cnt) : 0;
0428 }
0429
0430 int dpu_encoder_get_linecount(struct drm_encoder *drm_enc)
0431 {
0432 struct dpu_encoder_virt *dpu_enc;
0433 struct dpu_encoder_phys *phys;
0434 int linecount = 0;
0435
0436 dpu_enc = to_dpu_encoder_virt(drm_enc);
0437 phys = dpu_enc ? dpu_enc->cur_master : NULL;
0438
0439 if (phys && phys->ops.get_line_count)
0440 linecount = phys->ops.get_line_count(phys);
0441
0442 return linecount;
0443 }
0444
0445 static void dpu_encoder_destroy(struct drm_encoder *drm_enc)
0446 {
0447 struct dpu_encoder_virt *dpu_enc = NULL;
0448 int i = 0;
0449
0450 if (!drm_enc) {
0451 DPU_ERROR("invalid encoder\n");
0452 return;
0453 }
0454
0455 dpu_enc = to_dpu_encoder_virt(drm_enc);
0456 DPU_DEBUG_ENC(dpu_enc, "\n");
0457
0458 mutex_lock(&dpu_enc->enc_lock);
0459
0460 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
0461 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
0462
0463 if (phys->ops.destroy) {
0464 phys->ops.destroy(phys);
0465 --dpu_enc->num_phys_encs;
0466 dpu_enc->phys_encs[i] = NULL;
0467 }
0468 }
0469
0470 if (dpu_enc->num_phys_encs)
0471 DPU_ERROR_ENC(dpu_enc, "expected 0 num_phys_encs not %d\n",
0472 dpu_enc->num_phys_encs);
0473 dpu_enc->num_phys_encs = 0;
0474 mutex_unlock(&dpu_enc->enc_lock);
0475
0476 drm_encoder_cleanup(drm_enc);
0477 mutex_destroy(&dpu_enc->enc_lock);
0478 }
0479
0480 void dpu_encoder_helper_split_config(
0481 struct dpu_encoder_phys *phys_enc,
0482 enum dpu_intf interface)
0483 {
0484 struct dpu_encoder_virt *dpu_enc;
0485 struct split_pipe_cfg cfg = { 0 };
0486 struct dpu_hw_mdp *hw_mdptop;
0487 struct msm_display_info *disp_info;
0488
0489 if (!phys_enc->hw_mdptop || !phys_enc->parent) {
0490 DPU_ERROR("invalid arg(s), encoder %d\n", phys_enc != NULL);
0491 return;
0492 }
0493
0494 dpu_enc = to_dpu_encoder_virt(phys_enc->parent);
0495 hw_mdptop = phys_enc->hw_mdptop;
0496 disp_info = &dpu_enc->disp_info;
0497
0498 if (disp_info->intf_type != DRM_MODE_ENCODER_DSI)
0499 return;
0500
0501
0502
0503
0504
0505
0506
0507 if (phys_enc->split_role == ENC_ROLE_SOLO) {
0508 if (hw_mdptop->ops.setup_split_pipe)
0509 hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
0510 return;
0511 }
0512
0513 cfg.en = true;
0514 cfg.mode = phys_enc->intf_mode;
0515 cfg.intf = interface;
0516
0517 if (cfg.en && phys_enc->ops.needs_single_flush &&
0518 phys_enc->ops.needs_single_flush(phys_enc))
0519 cfg.split_flush_en = true;
0520
0521 if (phys_enc->split_role == ENC_ROLE_MASTER) {
0522 DPU_DEBUG_ENC(dpu_enc, "enable %d\n", cfg.en);
0523
0524 if (hw_mdptop->ops.setup_split_pipe)
0525 hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
0526 }
0527 }
0528
0529 bool dpu_encoder_use_dsc_merge(struct drm_encoder *drm_enc)
0530 {
0531 struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
0532 int i, intf_count = 0, num_dsc = 0;
0533
0534 for (i = 0; i < MAX_PHYS_ENCODERS_PER_VIRTUAL; i++)
0535 if (dpu_enc->phys_encs[i])
0536 intf_count++;
0537
0538
0539 if (dpu_enc->dsc)
0540 num_dsc = 2;
0541
0542 return (num_dsc > 0) && (num_dsc > intf_count);
0543 }
0544
0545 static struct msm_display_topology dpu_encoder_get_topology(
0546 struct dpu_encoder_virt *dpu_enc,
0547 struct dpu_kms *dpu_kms,
0548 struct drm_display_mode *mode)
0549 {
0550 struct msm_display_topology topology = {0};
0551 int i, intf_count = 0;
0552
0553 for (i = 0; i < MAX_PHYS_ENCODERS_PER_VIRTUAL; i++)
0554 if (dpu_enc->phys_encs[i])
0555 intf_count++;
0556
0557
0558
0559
0560
0561
0562
0563
0564
0565
0566
0567
0568
0569 if (intf_count == 2)
0570 topology.num_lm = 2;
0571 else if (!dpu_kms->catalog->caps->has_3d_merge)
0572 topology.num_lm = 1;
0573 else
0574 topology.num_lm = (mode->hdisplay > MAX_HDISPLAY_SPLIT) ? 2 : 1;
0575
0576 if (dpu_enc->disp_info.intf_type == DRM_MODE_ENCODER_DSI) {
0577 if (dpu_kms->catalog->dspp &&
0578 (dpu_kms->catalog->dspp_count >= topology.num_lm))
0579 topology.num_dspp = topology.num_lm;
0580 }
0581
0582 topology.num_enc = 0;
0583 topology.num_intf = intf_count;
0584
0585 if (dpu_enc->dsc) {
0586
0587
0588
0589
0590
0591 topology.num_enc = 2;
0592 topology.num_dsc = 2;
0593 topology.num_intf = 1;
0594 topology.num_lm = 2;
0595 }
0596
0597 return topology;
0598 }
0599
0600 static int dpu_encoder_virt_atomic_check(
0601 struct drm_encoder *drm_enc,
0602 struct drm_crtc_state *crtc_state,
0603 struct drm_connector_state *conn_state)
0604 {
0605 struct dpu_encoder_virt *dpu_enc;
0606 struct msm_drm_private *priv;
0607 struct dpu_kms *dpu_kms;
0608 struct drm_display_mode *adj_mode;
0609 struct msm_display_topology topology;
0610 struct dpu_global_state *global_state;
0611 int i = 0;
0612 int ret = 0;
0613
0614 if (!drm_enc || !crtc_state || !conn_state) {
0615 DPU_ERROR("invalid arg(s), drm_enc %d, crtc/conn state %d/%d\n",
0616 drm_enc != NULL, crtc_state != NULL, conn_state != NULL);
0617 return -EINVAL;
0618 }
0619
0620 dpu_enc = to_dpu_encoder_virt(drm_enc);
0621 DPU_DEBUG_ENC(dpu_enc, "\n");
0622
0623 priv = drm_enc->dev->dev_private;
0624 dpu_kms = to_dpu_kms(priv->kms);
0625 adj_mode = &crtc_state->adjusted_mode;
0626 global_state = dpu_kms_get_global_state(crtc_state->state);
0627 if (IS_ERR(global_state))
0628 return PTR_ERR(global_state);
0629
0630 trace_dpu_enc_atomic_check(DRMID(drm_enc));
0631
0632
0633 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
0634 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
0635
0636 if (phys->ops.atomic_check)
0637 ret = phys->ops.atomic_check(phys, crtc_state,
0638 conn_state);
0639 if (ret) {
0640 DPU_ERROR_ENC(dpu_enc,
0641 "mode unsupported, phys idx %d\n", i);
0642 break;
0643 }
0644 }
0645
0646 topology = dpu_encoder_get_topology(dpu_enc, dpu_kms, adj_mode);
0647
0648
0649 if (!ret) {
0650
0651
0652
0653
0654 if (drm_atomic_crtc_needs_modeset(crtc_state)) {
0655 dpu_rm_release(global_state, drm_enc);
0656
0657 if (!crtc_state->active_changed || crtc_state->active)
0658 ret = dpu_rm_reserve(&dpu_kms->rm, global_state,
0659 drm_enc, crtc_state, topology);
0660 }
0661 }
0662
0663 trace_dpu_enc_atomic_check_flags(DRMID(drm_enc), adj_mode->flags);
0664
0665 return ret;
0666 }
0667
0668 static void _dpu_encoder_update_vsync_source(struct dpu_encoder_virt *dpu_enc,
0669 struct msm_display_info *disp_info)
0670 {
0671 struct dpu_vsync_source_cfg vsync_cfg = { 0 };
0672 struct msm_drm_private *priv;
0673 struct dpu_kms *dpu_kms;
0674 struct dpu_hw_mdp *hw_mdptop;
0675 struct drm_encoder *drm_enc;
0676 int i;
0677
0678 if (!dpu_enc || !disp_info) {
0679 DPU_ERROR("invalid param dpu_enc:%d or disp_info:%d\n",
0680 dpu_enc != NULL, disp_info != NULL);
0681 return;
0682 } else if (dpu_enc->num_phys_encs > ARRAY_SIZE(dpu_enc->hw_pp)) {
0683 DPU_ERROR("invalid num phys enc %d/%d\n",
0684 dpu_enc->num_phys_encs,
0685 (int) ARRAY_SIZE(dpu_enc->hw_pp));
0686 return;
0687 }
0688
0689 drm_enc = &dpu_enc->base;
0690
0691 priv = drm_enc->dev->dev_private;
0692
0693 dpu_kms = to_dpu_kms(priv->kms);
0694 hw_mdptop = dpu_kms->hw_mdp;
0695 if (!hw_mdptop) {
0696 DPU_ERROR("invalid mdptop\n");
0697 return;
0698 }
0699
0700 if (hw_mdptop->ops.setup_vsync_source &&
0701 disp_info->is_cmd_mode) {
0702 for (i = 0; i < dpu_enc->num_phys_encs; i++)
0703 vsync_cfg.ppnumber[i] = dpu_enc->hw_pp[i]->idx;
0704
0705 vsync_cfg.pp_count = dpu_enc->num_phys_encs;
0706 if (disp_info->is_te_using_watchdog_timer)
0707 vsync_cfg.vsync_source = DPU_VSYNC_SOURCE_WD_TIMER_0;
0708 else
0709 vsync_cfg.vsync_source = DPU_VSYNC0_SOURCE_GPIO;
0710
0711 hw_mdptop->ops.setup_vsync_source(hw_mdptop, &vsync_cfg);
0712 }
0713 }
0714
0715 static void _dpu_encoder_irq_control(struct drm_encoder *drm_enc, bool enable)
0716 {
0717 struct dpu_encoder_virt *dpu_enc;
0718 int i;
0719
0720 if (!drm_enc) {
0721 DPU_ERROR("invalid encoder\n");
0722 return;
0723 }
0724
0725 dpu_enc = to_dpu_encoder_virt(drm_enc);
0726
0727 DPU_DEBUG_ENC(dpu_enc, "enable:%d\n", enable);
0728 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
0729 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
0730
0731 if (phys->ops.irq_control)
0732 phys->ops.irq_control(phys, enable);
0733 }
0734
0735 }
0736
0737 static void _dpu_encoder_resource_control_helper(struct drm_encoder *drm_enc,
0738 bool enable)
0739 {
0740 struct msm_drm_private *priv;
0741 struct dpu_kms *dpu_kms;
0742 struct dpu_encoder_virt *dpu_enc;
0743
0744 dpu_enc = to_dpu_encoder_virt(drm_enc);
0745 priv = drm_enc->dev->dev_private;
0746 dpu_kms = to_dpu_kms(priv->kms);
0747
0748 trace_dpu_enc_rc_helper(DRMID(drm_enc), enable);
0749
0750 if (!dpu_enc->cur_master) {
0751 DPU_ERROR("encoder master not set\n");
0752 return;
0753 }
0754
0755 if (enable) {
0756
0757 pm_runtime_get_sync(&dpu_kms->pdev->dev);
0758
0759
0760 _dpu_encoder_irq_control(drm_enc, true);
0761
0762 } else {
0763
0764 _dpu_encoder_irq_control(drm_enc, false);
0765
0766
0767 pm_runtime_put_sync(&dpu_kms->pdev->dev);
0768 }
0769
0770 }
0771
0772 static int dpu_encoder_resource_control(struct drm_encoder *drm_enc,
0773 u32 sw_event)
0774 {
0775 struct dpu_encoder_virt *dpu_enc;
0776 struct msm_drm_private *priv;
0777 bool is_vid_mode = false;
0778
0779 if (!drm_enc || !drm_enc->dev || !drm_enc->crtc) {
0780 DPU_ERROR("invalid parameters\n");
0781 return -EINVAL;
0782 }
0783 dpu_enc = to_dpu_encoder_virt(drm_enc);
0784 priv = drm_enc->dev->dev_private;
0785 is_vid_mode = !dpu_enc->disp_info.is_cmd_mode;
0786
0787
0788
0789
0790
0791 if (!dpu_enc->idle_pc_supported &&
0792 (sw_event != DPU_ENC_RC_EVENT_KICKOFF &&
0793 sw_event != DPU_ENC_RC_EVENT_STOP &&
0794 sw_event != DPU_ENC_RC_EVENT_PRE_STOP))
0795 return 0;
0796
0797 trace_dpu_enc_rc(DRMID(drm_enc), sw_event, dpu_enc->idle_pc_supported,
0798 dpu_enc->rc_state, "begin");
0799
0800 switch (sw_event) {
0801 case DPU_ENC_RC_EVENT_KICKOFF:
0802
0803 if (cancel_delayed_work_sync(&dpu_enc->delayed_off_work))
0804 DPU_DEBUG_ENC(dpu_enc, "sw_event:%d, work cancelled\n",
0805 sw_event);
0806
0807 mutex_lock(&dpu_enc->rc_lock);
0808
0809
0810 if (dpu_enc->rc_state == DPU_ENC_RC_STATE_ON) {
0811 DRM_DEBUG_ATOMIC("id;%u, sw_event:%d, rc in ON state\n",
0812 DRMID(drm_enc), sw_event);
0813 mutex_unlock(&dpu_enc->rc_lock);
0814 return 0;
0815 } else if (dpu_enc->rc_state != DPU_ENC_RC_STATE_OFF &&
0816 dpu_enc->rc_state != DPU_ENC_RC_STATE_IDLE) {
0817 DRM_DEBUG_ATOMIC("id;%u, sw_event:%d, rc in state %d\n",
0818 DRMID(drm_enc), sw_event,
0819 dpu_enc->rc_state);
0820 mutex_unlock(&dpu_enc->rc_lock);
0821 return -EINVAL;
0822 }
0823
0824 if (is_vid_mode && dpu_enc->rc_state == DPU_ENC_RC_STATE_IDLE)
0825 _dpu_encoder_irq_control(drm_enc, true);
0826 else
0827 _dpu_encoder_resource_control_helper(drm_enc, true);
0828
0829 dpu_enc->rc_state = DPU_ENC_RC_STATE_ON;
0830
0831 trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
0832 dpu_enc->idle_pc_supported, dpu_enc->rc_state,
0833 "kickoff");
0834
0835 mutex_unlock(&dpu_enc->rc_lock);
0836 break;
0837
0838 case DPU_ENC_RC_EVENT_FRAME_DONE:
0839
0840
0841
0842
0843
0844
0845 if (dpu_enc->rc_state != DPU_ENC_RC_STATE_ON) {
0846 DRM_DEBUG_KMS("id:%d, sw_event:%d,rc:%d-unexpected\n",
0847 DRMID(drm_enc), sw_event,
0848 dpu_enc->rc_state);
0849 return -EINVAL;
0850 }
0851
0852
0853
0854
0855
0856 if (dpu_crtc_frame_pending(drm_enc->crtc) > 1) {
0857 DRM_DEBUG_KMS("id:%d skip schedule work\n",
0858 DRMID(drm_enc));
0859 return 0;
0860 }
0861
0862 queue_delayed_work(priv->wq, &dpu_enc->delayed_off_work,
0863 msecs_to_jiffies(dpu_enc->idle_timeout));
0864
0865 trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
0866 dpu_enc->idle_pc_supported, dpu_enc->rc_state,
0867 "frame done");
0868 break;
0869
0870 case DPU_ENC_RC_EVENT_PRE_STOP:
0871
0872 if (cancel_delayed_work_sync(&dpu_enc->delayed_off_work))
0873 DPU_DEBUG_ENC(dpu_enc, "sw_event:%d, work cancelled\n",
0874 sw_event);
0875
0876 mutex_lock(&dpu_enc->rc_lock);
0877
0878 if (is_vid_mode &&
0879 dpu_enc->rc_state == DPU_ENC_RC_STATE_IDLE) {
0880 _dpu_encoder_irq_control(drm_enc, true);
0881 }
0882
0883 else if (dpu_enc->rc_state == DPU_ENC_RC_STATE_OFF ||
0884 dpu_enc->rc_state == DPU_ENC_RC_STATE_IDLE) {
0885 DRM_DEBUG_KMS("id:%u, sw_event:%d, rc in %d state\n",
0886 DRMID(drm_enc), sw_event,
0887 dpu_enc->rc_state);
0888 mutex_unlock(&dpu_enc->rc_lock);
0889 return 0;
0890 }
0891
0892 dpu_enc->rc_state = DPU_ENC_RC_STATE_PRE_OFF;
0893
0894 trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
0895 dpu_enc->idle_pc_supported, dpu_enc->rc_state,
0896 "pre stop");
0897
0898 mutex_unlock(&dpu_enc->rc_lock);
0899 break;
0900
0901 case DPU_ENC_RC_EVENT_STOP:
0902 mutex_lock(&dpu_enc->rc_lock);
0903
0904
0905 if (dpu_enc->rc_state == DPU_ENC_RC_STATE_OFF) {
0906 DRM_DEBUG_KMS("id: %u, sw_event:%d, rc in OFF state\n",
0907 DRMID(drm_enc), sw_event);
0908 mutex_unlock(&dpu_enc->rc_lock);
0909 return 0;
0910 } else if (dpu_enc->rc_state == DPU_ENC_RC_STATE_ON) {
0911 DRM_ERROR("id: %u, sw_event:%d, rc in state %d\n",
0912 DRMID(drm_enc), sw_event, dpu_enc->rc_state);
0913 mutex_unlock(&dpu_enc->rc_lock);
0914 return -EINVAL;
0915 }
0916
0917
0918
0919
0920
0921 if (dpu_enc->rc_state == DPU_ENC_RC_STATE_PRE_OFF)
0922 _dpu_encoder_resource_control_helper(drm_enc, false);
0923
0924 dpu_enc->rc_state = DPU_ENC_RC_STATE_OFF;
0925
0926 trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
0927 dpu_enc->idle_pc_supported, dpu_enc->rc_state,
0928 "stop");
0929
0930 mutex_unlock(&dpu_enc->rc_lock);
0931 break;
0932
0933 case DPU_ENC_RC_EVENT_ENTER_IDLE:
0934 mutex_lock(&dpu_enc->rc_lock);
0935
0936 if (dpu_enc->rc_state != DPU_ENC_RC_STATE_ON) {
0937 DRM_ERROR("id: %u, sw_event:%d, rc:%d !ON state\n",
0938 DRMID(drm_enc), sw_event, dpu_enc->rc_state);
0939 mutex_unlock(&dpu_enc->rc_lock);
0940 return 0;
0941 }
0942
0943
0944
0945
0946
0947 if (dpu_enc->frame_busy_mask[0]) {
0948 DRM_ERROR("id:%u, sw_event:%d, rc:%d frame pending\n",
0949 DRMID(drm_enc), sw_event, dpu_enc->rc_state);
0950 mutex_unlock(&dpu_enc->rc_lock);
0951 return 0;
0952 }
0953
0954 if (is_vid_mode)
0955 _dpu_encoder_irq_control(drm_enc, false);
0956 else
0957 _dpu_encoder_resource_control_helper(drm_enc, false);
0958
0959 dpu_enc->rc_state = DPU_ENC_RC_STATE_IDLE;
0960
0961 trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
0962 dpu_enc->idle_pc_supported, dpu_enc->rc_state,
0963 "idle");
0964
0965 mutex_unlock(&dpu_enc->rc_lock);
0966 break;
0967
0968 default:
0969 DRM_ERROR("id:%u, unexpected sw_event: %d\n", DRMID(drm_enc),
0970 sw_event);
0971 trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
0972 dpu_enc->idle_pc_supported, dpu_enc->rc_state,
0973 "error");
0974 break;
0975 }
0976
0977 trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
0978 dpu_enc->idle_pc_supported, dpu_enc->rc_state,
0979 "end");
0980 return 0;
0981 }
0982
0983 void dpu_encoder_prepare_wb_job(struct drm_encoder *drm_enc,
0984 struct drm_writeback_job *job)
0985 {
0986 struct dpu_encoder_virt *dpu_enc;
0987 int i;
0988
0989 dpu_enc = to_dpu_encoder_virt(drm_enc);
0990
0991 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
0992 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
0993
0994 if (phys->ops.prepare_wb_job)
0995 phys->ops.prepare_wb_job(phys, job);
0996
0997 }
0998 }
0999
1000 void dpu_encoder_cleanup_wb_job(struct drm_encoder *drm_enc,
1001 struct drm_writeback_job *job)
1002 {
1003 struct dpu_encoder_virt *dpu_enc;
1004 int i;
1005
1006 dpu_enc = to_dpu_encoder_virt(drm_enc);
1007
1008 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1009 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1010
1011 if (phys->ops.cleanup_wb_job)
1012 phys->ops.cleanup_wb_job(phys, job);
1013
1014 }
1015 }
1016
1017 static void dpu_encoder_virt_atomic_mode_set(struct drm_encoder *drm_enc,
1018 struct drm_crtc_state *crtc_state,
1019 struct drm_connector_state *conn_state)
1020 {
1021 struct dpu_encoder_virt *dpu_enc;
1022 struct msm_drm_private *priv;
1023 struct dpu_kms *dpu_kms;
1024 struct dpu_crtc_state *cstate;
1025 struct dpu_global_state *global_state;
1026 struct dpu_hw_blk *hw_pp[MAX_CHANNELS_PER_ENC];
1027 struct dpu_hw_blk *hw_ctl[MAX_CHANNELS_PER_ENC];
1028 struct dpu_hw_blk *hw_lm[MAX_CHANNELS_PER_ENC];
1029 struct dpu_hw_blk *hw_dspp[MAX_CHANNELS_PER_ENC] = { NULL };
1030 struct dpu_hw_blk *hw_dsc[MAX_CHANNELS_PER_ENC];
1031 int num_lm, num_ctl, num_pp, num_dsc;
1032 unsigned int dsc_mask = 0;
1033 int i;
1034
1035 if (!drm_enc) {
1036 DPU_ERROR("invalid encoder\n");
1037 return;
1038 }
1039
1040 dpu_enc = to_dpu_encoder_virt(drm_enc);
1041 DPU_DEBUG_ENC(dpu_enc, "\n");
1042
1043 priv = drm_enc->dev->dev_private;
1044 dpu_kms = to_dpu_kms(priv->kms);
1045
1046 global_state = dpu_kms_get_existing_global_state(dpu_kms);
1047 if (IS_ERR_OR_NULL(global_state)) {
1048 DPU_ERROR("Failed to get global state");
1049 return;
1050 }
1051
1052 trace_dpu_enc_mode_set(DRMID(drm_enc));
1053
1054
1055 num_pp = dpu_rm_get_assigned_resources(&dpu_kms->rm, global_state,
1056 drm_enc->base.id, DPU_HW_BLK_PINGPONG, hw_pp,
1057 ARRAY_SIZE(hw_pp));
1058 num_ctl = dpu_rm_get_assigned_resources(&dpu_kms->rm, global_state,
1059 drm_enc->base.id, DPU_HW_BLK_CTL, hw_ctl, ARRAY_SIZE(hw_ctl));
1060 num_lm = dpu_rm_get_assigned_resources(&dpu_kms->rm, global_state,
1061 drm_enc->base.id, DPU_HW_BLK_LM, hw_lm, ARRAY_SIZE(hw_lm));
1062 dpu_rm_get_assigned_resources(&dpu_kms->rm, global_state,
1063 drm_enc->base.id, DPU_HW_BLK_DSPP, hw_dspp,
1064 ARRAY_SIZE(hw_dspp));
1065
1066 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++)
1067 dpu_enc->hw_pp[i] = i < num_pp ? to_dpu_hw_pingpong(hw_pp[i])
1068 : NULL;
1069
1070 if (dpu_enc->dsc) {
1071 num_dsc = dpu_rm_get_assigned_resources(&dpu_kms->rm, global_state,
1072 drm_enc->base.id, DPU_HW_BLK_DSC,
1073 hw_dsc, ARRAY_SIZE(hw_dsc));
1074 for (i = 0; i < num_dsc; i++) {
1075 dpu_enc->hw_dsc[i] = to_dpu_hw_dsc(hw_dsc[i]);
1076 dsc_mask |= BIT(dpu_enc->hw_dsc[i]->idx - DSC_0);
1077 }
1078 }
1079
1080 dpu_enc->dsc_mask = dsc_mask;
1081
1082 cstate = to_dpu_crtc_state(crtc_state);
1083
1084 for (i = 0; i < num_lm; i++) {
1085 int ctl_idx = (i < num_ctl) ? i : (num_ctl-1);
1086
1087 cstate->mixers[i].hw_lm = to_dpu_hw_mixer(hw_lm[i]);
1088 cstate->mixers[i].lm_ctl = to_dpu_hw_ctl(hw_ctl[ctl_idx]);
1089 cstate->mixers[i].hw_dspp = to_dpu_hw_dspp(hw_dspp[i]);
1090 }
1091
1092 cstate->num_mixers = num_lm;
1093
1094 dpu_enc->connector = conn_state->connector;
1095
1096 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1097 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1098
1099 if (!dpu_enc->hw_pp[i]) {
1100 DPU_ERROR_ENC(dpu_enc,
1101 "no pp block assigned at idx: %d\n", i);
1102 return;
1103 }
1104
1105 if (!hw_ctl[i]) {
1106 DPU_ERROR_ENC(dpu_enc,
1107 "no ctl block assigned at idx: %d\n", i);
1108 return;
1109 }
1110
1111 phys->hw_pp = dpu_enc->hw_pp[i];
1112 phys->hw_ctl = to_dpu_hw_ctl(hw_ctl[i]);
1113
1114 phys->cached_mode = crtc_state->adjusted_mode;
1115 if (phys->ops.atomic_mode_set)
1116 phys->ops.atomic_mode_set(phys, crtc_state, conn_state);
1117 }
1118 }
1119
1120 static void _dpu_encoder_virt_enable_helper(struct drm_encoder *drm_enc)
1121 {
1122 struct dpu_encoder_virt *dpu_enc = NULL;
1123 int i;
1124
1125 if (!drm_enc || !drm_enc->dev) {
1126 DPU_ERROR("invalid parameters\n");
1127 return;
1128 }
1129
1130 dpu_enc = to_dpu_encoder_virt(drm_enc);
1131 if (!dpu_enc || !dpu_enc->cur_master) {
1132 DPU_ERROR("invalid dpu encoder/master\n");
1133 return;
1134 }
1135
1136
1137 if (dpu_enc->disp_info.intf_type == DRM_MODE_ENCODER_TMDS &&
1138 dpu_enc->cur_master->hw_mdptop &&
1139 dpu_enc->cur_master->hw_mdptop->ops.intf_audio_select)
1140 dpu_enc->cur_master->hw_mdptop->ops.intf_audio_select(
1141 dpu_enc->cur_master->hw_mdptop);
1142
1143 _dpu_encoder_update_vsync_source(dpu_enc, &dpu_enc->disp_info);
1144
1145 if (dpu_enc->disp_info.intf_type == DRM_MODE_ENCODER_DSI &&
1146 !WARN_ON(dpu_enc->num_phys_encs == 0)) {
1147 unsigned bpc = dpu_enc->connector->display_info.bpc;
1148 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1149 if (!dpu_enc->hw_pp[i])
1150 continue;
1151 _dpu_encoder_setup_dither(dpu_enc->hw_pp[i], bpc);
1152 }
1153 }
1154 }
1155
1156 void dpu_encoder_virt_runtime_resume(struct drm_encoder *drm_enc)
1157 {
1158 struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
1159
1160 mutex_lock(&dpu_enc->enc_lock);
1161
1162 if (!dpu_enc->enabled)
1163 goto out;
1164
1165 if (dpu_enc->cur_slave && dpu_enc->cur_slave->ops.restore)
1166 dpu_enc->cur_slave->ops.restore(dpu_enc->cur_slave);
1167 if (dpu_enc->cur_master && dpu_enc->cur_master->ops.restore)
1168 dpu_enc->cur_master->ops.restore(dpu_enc->cur_master);
1169
1170 _dpu_encoder_virt_enable_helper(drm_enc);
1171
1172 out:
1173 mutex_unlock(&dpu_enc->enc_lock);
1174 }
1175
1176 static void dpu_encoder_virt_enable(struct drm_encoder *drm_enc)
1177 {
1178 struct dpu_encoder_virt *dpu_enc = NULL;
1179 int ret = 0;
1180 struct drm_display_mode *cur_mode = NULL;
1181
1182 dpu_enc = to_dpu_encoder_virt(drm_enc);
1183
1184 mutex_lock(&dpu_enc->enc_lock);
1185 cur_mode = &dpu_enc->base.crtc->state->adjusted_mode;
1186
1187 trace_dpu_enc_enable(DRMID(drm_enc), cur_mode->hdisplay,
1188 cur_mode->vdisplay);
1189
1190
1191 if (dpu_enc->cur_slave && dpu_enc->cur_slave->ops.enable)
1192 dpu_enc->cur_slave->ops.enable(dpu_enc->cur_slave);
1193
1194 if (dpu_enc->cur_master && dpu_enc->cur_master->ops.enable)
1195 dpu_enc->cur_master->ops.enable(dpu_enc->cur_master);
1196
1197 ret = dpu_encoder_resource_control(drm_enc, DPU_ENC_RC_EVENT_KICKOFF);
1198 if (ret) {
1199 DPU_ERROR_ENC(dpu_enc, "dpu resource control failed: %d\n",
1200 ret);
1201 goto out;
1202 }
1203
1204 _dpu_encoder_virt_enable_helper(drm_enc);
1205
1206 dpu_enc->enabled = true;
1207
1208 out:
1209 mutex_unlock(&dpu_enc->enc_lock);
1210 }
1211
1212 static void dpu_encoder_virt_disable(struct drm_encoder *drm_enc)
1213 {
1214 struct dpu_encoder_virt *dpu_enc = NULL;
1215 int i = 0;
1216
1217 dpu_enc = to_dpu_encoder_virt(drm_enc);
1218 DPU_DEBUG_ENC(dpu_enc, "\n");
1219
1220 mutex_lock(&dpu_enc->enc_lock);
1221 dpu_enc->enabled = false;
1222
1223 trace_dpu_enc_disable(DRMID(drm_enc));
1224
1225
1226 dpu_encoder_wait_for_event(drm_enc, MSM_ENC_TX_COMPLETE);
1227
1228 dpu_encoder_resource_control(drm_enc, DPU_ENC_RC_EVENT_PRE_STOP);
1229
1230 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1231 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1232
1233 if (phys->ops.disable)
1234 phys->ops.disable(phys);
1235 }
1236
1237
1238
1239 if (atomic_xchg(&dpu_enc->frame_done_timeout_ms, 0)) {
1240 DPU_ERROR("enc%d timeout pending\n", drm_enc->base.id);
1241 del_timer_sync(&dpu_enc->frame_done_timer);
1242 }
1243
1244 dpu_encoder_resource_control(drm_enc, DPU_ENC_RC_EVENT_STOP);
1245
1246 dpu_enc->connector = NULL;
1247
1248 DPU_DEBUG_ENC(dpu_enc, "encoder disabled\n");
1249
1250 mutex_unlock(&dpu_enc->enc_lock);
1251 }
1252
1253 static enum dpu_intf dpu_encoder_get_intf(const struct dpu_mdss_cfg *catalog,
1254 enum dpu_intf_type type, u32 controller_id)
1255 {
1256 int i = 0;
1257
1258 if (type == INTF_WB)
1259 return INTF_MAX;
1260
1261 for (i = 0; i < catalog->intf_count; i++) {
1262 if (catalog->intf[i].type == type
1263 && catalog->intf[i].controller_id == controller_id) {
1264 return catalog->intf[i].id;
1265 }
1266 }
1267
1268 return INTF_MAX;
1269 }
1270
1271 static enum dpu_wb dpu_encoder_get_wb(const struct dpu_mdss_cfg *catalog,
1272 enum dpu_intf_type type, u32 controller_id)
1273 {
1274 int i = 0;
1275
1276 if (type != INTF_WB)
1277 return WB_MAX;
1278
1279 for (i = 0; i < catalog->wb_count; i++) {
1280 if (catalog->wb[i].id == controller_id)
1281 return catalog->wb[i].id;
1282 }
1283
1284 return WB_MAX;
1285 }
1286
1287 static void dpu_encoder_vblank_callback(struct drm_encoder *drm_enc,
1288 struct dpu_encoder_phys *phy_enc)
1289 {
1290 struct dpu_encoder_virt *dpu_enc = NULL;
1291 unsigned long lock_flags;
1292
1293 if (!drm_enc || !phy_enc)
1294 return;
1295
1296 DPU_ATRACE_BEGIN("encoder_vblank_callback");
1297 dpu_enc = to_dpu_encoder_virt(drm_enc);
1298
1299 atomic_inc(&phy_enc->vsync_cnt);
1300
1301 spin_lock_irqsave(&dpu_enc->enc_spinlock, lock_flags);
1302 if (dpu_enc->crtc)
1303 dpu_crtc_vblank_callback(dpu_enc->crtc);
1304 spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
1305
1306 DPU_ATRACE_END("encoder_vblank_callback");
1307 }
1308
1309 static void dpu_encoder_underrun_callback(struct drm_encoder *drm_enc,
1310 struct dpu_encoder_phys *phy_enc)
1311 {
1312 if (!phy_enc)
1313 return;
1314
1315 DPU_ATRACE_BEGIN("encoder_underrun_callback");
1316 atomic_inc(&phy_enc->underrun_cnt);
1317
1318
1319 if (atomic_read(&phy_enc->underrun_cnt) == 1)
1320 msm_disp_snapshot_state(drm_enc->dev);
1321
1322 trace_dpu_enc_underrun_cb(DRMID(drm_enc),
1323 atomic_read(&phy_enc->underrun_cnt));
1324 DPU_ATRACE_END("encoder_underrun_callback");
1325 }
1326
1327 void dpu_encoder_assign_crtc(struct drm_encoder *drm_enc, struct drm_crtc *crtc)
1328 {
1329 struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
1330 unsigned long lock_flags;
1331
1332 spin_lock_irqsave(&dpu_enc->enc_spinlock, lock_flags);
1333
1334 WARN_ON(crtc && dpu_enc->crtc);
1335 dpu_enc->crtc = crtc;
1336 spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
1337 }
1338
1339 void dpu_encoder_toggle_vblank_for_crtc(struct drm_encoder *drm_enc,
1340 struct drm_crtc *crtc, bool enable)
1341 {
1342 struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
1343 unsigned long lock_flags;
1344 int i;
1345
1346 trace_dpu_enc_vblank_cb(DRMID(drm_enc), enable);
1347
1348 spin_lock_irqsave(&dpu_enc->enc_spinlock, lock_flags);
1349 if (dpu_enc->crtc != crtc) {
1350 spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
1351 return;
1352 }
1353 spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
1354
1355 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1356 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1357
1358 if (phys->ops.control_vblank_irq)
1359 phys->ops.control_vblank_irq(phys, enable);
1360 }
1361 }
1362
1363 void dpu_encoder_register_frame_event_callback(struct drm_encoder *drm_enc,
1364 void (*frame_event_cb)(void *, u32 event),
1365 void *frame_event_cb_data)
1366 {
1367 struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
1368 unsigned long lock_flags;
1369 bool enable;
1370
1371 enable = frame_event_cb ? true : false;
1372
1373 if (!drm_enc) {
1374 DPU_ERROR("invalid encoder\n");
1375 return;
1376 }
1377 trace_dpu_enc_frame_event_cb(DRMID(drm_enc), enable);
1378
1379 spin_lock_irqsave(&dpu_enc->enc_spinlock, lock_flags);
1380 dpu_enc->crtc_frame_event_cb = frame_event_cb;
1381 dpu_enc->crtc_frame_event_cb_data = frame_event_cb_data;
1382 spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
1383 }
1384
1385 static void dpu_encoder_frame_done_callback(
1386 struct drm_encoder *drm_enc,
1387 struct dpu_encoder_phys *ready_phys, u32 event)
1388 {
1389 struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
1390 unsigned int i;
1391
1392 if (event & (DPU_ENCODER_FRAME_EVENT_DONE
1393 | DPU_ENCODER_FRAME_EVENT_ERROR
1394 | DPU_ENCODER_FRAME_EVENT_PANEL_DEAD)) {
1395
1396 if (!dpu_enc->frame_busy_mask[0]) {
1397
1398
1399
1400
1401 trace_dpu_enc_frame_done_cb_not_busy(DRMID(drm_enc), event,
1402 dpu_encoder_helper_get_intf_type(ready_phys->intf_mode),
1403 ready_phys->intf_idx, ready_phys->wb_idx);
1404 return;
1405 }
1406
1407
1408 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1409 if (dpu_enc->phys_encs[i] == ready_phys) {
1410 trace_dpu_enc_frame_done_cb(DRMID(drm_enc), i,
1411 dpu_enc->frame_busy_mask[0]);
1412 clear_bit(i, dpu_enc->frame_busy_mask);
1413 }
1414 }
1415
1416 if (!dpu_enc->frame_busy_mask[0]) {
1417 atomic_set(&dpu_enc->frame_done_timeout_ms, 0);
1418 del_timer(&dpu_enc->frame_done_timer);
1419
1420 dpu_encoder_resource_control(drm_enc,
1421 DPU_ENC_RC_EVENT_FRAME_DONE);
1422
1423 if (dpu_enc->crtc_frame_event_cb)
1424 dpu_enc->crtc_frame_event_cb(
1425 dpu_enc->crtc_frame_event_cb_data,
1426 event);
1427 }
1428 } else {
1429 if (dpu_enc->crtc_frame_event_cb)
1430 dpu_enc->crtc_frame_event_cb(
1431 dpu_enc->crtc_frame_event_cb_data, event);
1432 }
1433 }
1434
1435 static void dpu_encoder_off_work(struct work_struct *work)
1436 {
1437 struct dpu_encoder_virt *dpu_enc = container_of(work,
1438 struct dpu_encoder_virt, delayed_off_work.work);
1439
1440 dpu_encoder_resource_control(&dpu_enc->base,
1441 DPU_ENC_RC_EVENT_ENTER_IDLE);
1442
1443 dpu_encoder_frame_done_callback(&dpu_enc->base, NULL,
1444 DPU_ENCODER_FRAME_EVENT_IDLE);
1445 }
1446
1447
1448
1449
1450
1451
1452
1453 static void _dpu_encoder_trigger_flush(struct drm_encoder *drm_enc,
1454 struct dpu_encoder_phys *phys, uint32_t extra_flush_bits)
1455 {
1456 struct dpu_hw_ctl *ctl;
1457 int pending_kickoff_cnt;
1458 u32 ret = UINT_MAX;
1459
1460 if (!phys->hw_pp) {
1461 DPU_ERROR("invalid pingpong hw\n");
1462 return;
1463 }
1464
1465 ctl = phys->hw_ctl;
1466 if (!ctl->ops.trigger_flush) {
1467 DPU_ERROR("missing trigger cb\n");
1468 return;
1469 }
1470
1471 pending_kickoff_cnt = dpu_encoder_phys_inc_pending(phys);
1472
1473 if (extra_flush_bits && ctl->ops.update_pending_flush)
1474 ctl->ops.update_pending_flush(ctl, extra_flush_bits);
1475
1476 ctl->ops.trigger_flush(ctl);
1477
1478 if (ctl->ops.get_pending_flush)
1479 ret = ctl->ops.get_pending_flush(ctl);
1480
1481 trace_dpu_enc_trigger_flush(DRMID(drm_enc),
1482 dpu_encoder_helper_get_intf_type(phys->intf_mode),
1483 phys->intf_idx, phys->wb_idx,
1484 pending_kickoff_cnt, ctl->idx,
1485 extra_flush_bits, ret);
1486 }
1487
1488
1489
1490
1491
1492 static void _dpu_encoder_trigger_start(struct dpu_encoder_phys *phys)
1493 {
1494 if (!phys) {
1495 DPU_ERROR("invalid argument(s)\n");
1496 return;
1497 }
1498
1499 if (!phys->hw_pp) {
1500 DPU_ERROR("invalid pingpong hw\n");
1501 return;
1502 }
1503
1504 if (phys->ops.trigger_start && phys->enable_state != DPU_ENC_DISABLED)
1505 phys->ops.trigger_start(phys);
1506 }
1507
1508 void dpu_encoder_helper_trigger_start(struct dpu_encoder_phys *phys_enc)
1509 {
1510 struct dpu_hw_ctl *ctl;
1511
1512 ctl = phys_enc->hw_ctl;
1513 if (ctl->ops.trigger_start) {
1514 ctl->ops.trigger_start(ctl);
1515 trace_dpu_enc_trigger_start(DRMID(phys_enc->parent), ctl->idx);
1516 }
1517 }
1518
1519 static int dpu_encoder_helper_wait_event_timeout(
1520 int32_t drm_id,
1521 u32 irq_idx,
1522 struct dpu_encoder_wait_info *info)
1523 {
1524 int rc = 0;
1525 s64 expected_time = ktime_to_ms(ktime_get()) + info->timeout_ms;
1526 s64 jiffies = msecs_to_jiffies(info->timeout_ms);
1527 s64 time;
1528
1529 do {
1530 rc = wait_event_timeout(*(info->wq),
1531 atomic_read(info->atomic_cnt) == 0, jiffies);
1532 time = ktime_to_ms(ktime_get());
1533
1534 trace_dpu_enc_wait_event_timeout(drm_id, irq_idx, rc, time,
1535 expected_time,
1536 atomic_read(info->atomic_cnt));
1537
1538 } while (atomic_read(info->atomic_cnt) && (rc == 0) &&
1539 (time < expected_time));
1540
1541 return rc;
1542 }
1543
1544 static void dpu_encoder_helper_hw_reset(struct dpu_encoder_phys *phys_enc)
1545 {
1546 struct dpu_encoder_virt *dpu_enc;
1547 struct dpu_hw_ctl *ctl;
1548 int rc;
1549 struct drm_encoder *drm_enc;
1550
1551 dpu_enc = to_dpu_encoder_virt(phys_enc->parent);
1552 ctl = phys_enc->hw_ctl;
1553 drm_enc = phys_enc->parent;
1554
1555 if (!ctl->ops.reset)
1556 return;
1557
1558 DRM_DEBUG_KMS("id:%u ctl %d reset\n", DRMID(drm_enc),
1559 ctl->idx);
1560
1561 rc = ctl->ops.reset(ctl);
1562 if (rc) {
1563 DPU_ERROR_ENC(dpu_enc, "ctl %d reset failure\n", ctl->idx);
1564 msm_disp_snapshot_state(drm_enc->dev);
1565 }
1566
1567 phys_enc->enable_state = DPU_ENC_ENABLED;
1568 }
1569
1570
1571
1572
1573
1574
1575
1576
1577
1578
1579 static void _dpu_encoder_kickoff_phys(struct dpu_encoder_virt *dpu_enc)
1580 {
1581 struct dpu_hw_ctl *ctl;
1582 uint32_t i, pending_flush;
1583 unsigned long lock_flags;
1584
1585 pending_flush = 0x0;
1586
1587
1588 spin_lock_irqsave(&dpu_enc->enc_spinlock, lock_flags);
1589
1590
1591 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1592 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1593
1594 if (phys->enable_state == DPU_ENC_DISABLED)
1595 continue;
1596
1597 ctl = phys->hw_ctl;
1598
1599
1600
1601
1602
1603
1604 if (phys->split_role != ENC_ROLE_SLAVE)
1605 set_bit(i, dpu_enc->frame_busy_mask);
1606
1607 if (!phys->ops.needs_single_flush ||
1608 !phys->ops.needs_single_flush(phys))
1609 _dpu_encoder_trigger_flush(&dpu_enc->base, phys, 0x0);
1610 else if (ctl->ops.get_pending_flush)
1611 pending_flush |= ctl->ops.get_pending_flush(ctl);
1612 }
1613
1614
1615 if (pending_flush && dpu_enc->cur_master) {
1616 _dpu_encoder_trigger_flush(
1617 &dpu_enc->base,
1618 dpu_enc->cur_master,
1619 pending_flush);
1620 }
1621
1622 _dpu_encoder_trigger_start(dpu_enc->cur_master);
1623
1624 spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
1625 }
1626
1627 void dpu_encoder_trigger_kickoff_pending(struct drm_encoder *drm_enc)
1628 {
1629 struct dpu_encoder_virt *dpu_enc;
1630 struct dpu_encoder_phys *phys;
1631 unsigned int i;
1632 struct dpu_hw_ctl *ctl;
1633 struct msm_display_info *disp_info;
1634
1635 if (!drm_enc) {
1636 DPU_ERROR("invalid encoder\n");
1637 return;
1638 }
1639 dpu_enc = to_dpu_encoder_virt(drm_enc);
1640 disp_info = &dpu_enc->disp_info;
1641
1642 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1643 phys = dpu_enc->phys_encs[i];
1644
1645 ctl = phys->hw_ctl;
1646 if (ctl->ops.clear_pending_flush)
1647 ctl->ops.clear_pending_flush(ctl);
1648
1649
1650 if ((phys == dpu_enc->cur_master) &&
1651 disp_info->is_cmd_mode
1652 && ctl->ops.trigger_pending)
1653 ctl->ops.trigger_pending(ctl);
1654 }
1655 }
1656
1657 static u32 _dpu_encoder_calculate_linetime(struct dpu_encoder_virt *dpu_enc,
1658 struct drm_display_mode *mode)
1659 {
1660 u64 pclk_rate;
1661 u32 pclk_period;
1662 u32 line_time;
1663
1664
1665
1666
1667 if (!dpu_enc->cur_master)
1668 return 0;
1669
1670 if (!dpu_enc->cur_master->ops.get_line_count) {
1671 DPU_ERROR("get_line_count function not defined\n");
1672 return 0;
1673 }
1674
1675 pclk_rate = mode->clock;
1676 if (pclk_rate == 0) {
1677 DPU_ERROR("pclk is 0, cannot calculate line time\n");
1678 return 0;
1679 }
1680
1681 pclk_period = DIV_ROUND_UP_ULL(1000000000ull, pclk_rate);
1682 if (pclk_period == 0) {
1683 DPU_ERROR("pclk period is 0\n");
1684 return 0;
1685 }
1686
1687
1688
1689
1690
1691 line_time = (pclk_period * mode->htotal) / 1000;
1692 if (line_time == 0) {
1693 DPU_ERROR("line time calculation is 0\n");
1694 return 0;
1695 }
1696
1697 DPU_DEBUG_ENC(dpu_enc,
1698 "clk_rate=%lldkHz, clk_period=%d, linetime=%dns\n",
1699 pclk_rate, pclk_period, line_time);
1700
1701 return line_time;
1702 }
1703
1704 int dpu_encoder_vsync_time(struct drm_encoder *drm_enc, ktime_t *wakeup_time)
1705 {
1706 struct drm_display_mode *mode;
1707 struct dpu_encoder_virt *dpu_enc;
1708 u32 cur_line;
1709 u32 line_time;
1710 u32 vtotal, time_to_vsync;
1711 ktime_t cur_time;
1712
1713 dpu_enc = to_dpu_encoder_virt(drm_enc);
1714
1715 if (!drm_enc->crtc || !drm_enc->crtc->state) {
1716 DPU_ERROR("crtc/crtc state object is NULL\n");
1717 return -EINVAL;
1718 }
1719 mode = &drm_enc->crtc->state->adjusted_mode;
1720
1721 line_time = _dpu_encoder_calculate_linetime(dpu_enc, mode);
1722 if (!line_time)
1723 return -EINVAL;
1724
1725 cur_line = dpu_enc->cur_master->ops.get_line_count(dpu_enc->cur_master);
1726
1727 vtotal = mode->vtotal;
1728 if (cur_line >= vtotal)
1729 time_to_vsync = line_time * vtotal;
1730 else
1731 time_to_vsync = line_time * (vtotal - cur_line);
1732
1733 if (time_to_vsync == 0) {
1734 DPU_ERROR("time to vsync should not be zero, vtotal=%d\n",
1735 vtotal);
1736 return -EINVAL;
1737 }
1738
1739 cur_time = ktime_get();
1740 *wakeup_time = ktime_add_ns(cur_time, time_to_vsync);
1741
1742 DPU_DEBUG_ENC(dpu_enc,
1743 "cur_line=%u vtotal=%u time_to_vsync=%u, cur_time=%lld, wakeup_time=%lld\n",
1744 cur_line, vtotal, time_to_vsync,
1745 ktime_to_ms(cur_time),
1746 ktime_to_ms(*wakeup_time));
1747 return 0;
1748 }
1749
1750 static void dpu_encoder_vsync_event_handler(struct timer_list *t)
1751 {
1752 struct dpu_encoder_virt *dpu_enc = from_timer(dpu_enc, t,
1753 vsync_event_timer);
1754 struct drm_encoder *drm_enc = &dpu_enc->base;
1755 struct msm_drm_private *priv;
1756 struct msm_drm_thread *event_thread;
1757
1758 if (!drm_enc->dev || !drm_enc->crtc) {
1759 DPU_ERROR("invalid parameters\n");
1760 return;
1761 }
1762
1763 priv = drm_enc->dev->dev_private;
1764
1765 if (drm_enc->crtc->index >= ARRAY_SIZE(priv->event_thread)) {
1766 DPU_ERROR("invalid crtc index\n");
1767 return;
1768 }
1769 event_thread = &priv->event_thread[drm_enc->crtc->index];
1770 if (!event_thread) {
1771 DPU_ERROR("event_thread not found for crtc:%d\n",
1772 drm_enc->crtc->index);
1773 return;
1774 }
1775
1776 del_timer(&dpu_enc->vsync_event_timer);
1777 }
1778
1779 static void dpu_encoder_vsync_event_work_handler(struct kthread_work *work)
1780 {
1781 struct dpu_encoder_virt *dpu_enc = container_of(work,
1782 struct dpu_encoder_virt, vsync_event_work);
1783 ktime_t wakeup_time;
1784
1785 if (dpu_encoder_vsync_time(&dpu_enc->base, &wakeup_time))
1786 return;
1787
1788 trace_dpu_enc_vsync_event_work(DRMID(&dpu_enc->base), wakeup_time);
1789 mod_timer(&dpu_enc->vsync_event_timer,
1790 nsecs_to_jiffies(ktime_to_ns(wakeup_time)));
1791 }
1792
1793 static u32
1794 dpu_encoder_dsc_initial_line_calc(struct msm_display_dsc_config *dsc,
1795 u32 enc_ip_width)
1796 {
1797 int ssm_delay, total_pixels, soft_slice_per_enc;
1798
1799 soft_slice_per_enc = enc_ip_width / dsc->drm->slice_width;
1800
1801
1802
1803
1804
1805
1806
1807
1808
1809
1810
1811 ssm_delay = ((dsc->drm->bits_per_component < 10) ? 84 : 92);
1812 total_pixels = ssm_delay * 3 + dsc->drm->initial_xmit_delay + 47;
1813 if (soft_slice_per_enc > 1)
1814 total_pixels += (ssm_delay * 3);
1815 return DIV_ROUND_UP(total_pixels, dsc->drm->slice_width);
1816 }
1817
1818 static void dpu_encoder_dsc_pipe_cfg(struct dpu_hw_dsc *hw_dsc,
1819 struct dpu_hw_pingpong *hw_pp,
1820 struct msm_display_dsc_config *dsc,
1821 u32 common_mode,
1822 u32 initial_lines)
1823 {
1824 if (hw_dsc->ops.dsc_config)
1825 hw_dsc->ops.dsc_config(hw_dsc, dsc, common_mode, initial_lines);
1826
1827 if (hw_dsc->ops.dsc_config_thresh)
1828 hw_dsc->ops.dsc_config_thresh(hw_dsc, dsc);
1829
1830 if (hw_pp->ops.setup_dsc)
1831 hw_pp->ops.setup_dsc(hw_pp);
1832
1833 if (hw_pp->ops.enable_dsc)
1834 hw_pp->ops.enable_dsc(hw_pp);
1835 }
1836
1837 static void dpu_encoder_prep_dsc(struct dpu_encoder_virt *dpu_enc,
1838 struct msm_display_dsc_config *dsc)
1839 {
1840
1841 struct dpu_encoder_phys *enc_master = dpu_enc->cur_master;
1842 struct dpu_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
1843 struct dpu_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
1844 int this_frame_slices;
1845 int intf_ip_w, enc_ip_w;
1846 int dsc_common_mode;
1847 int pic_width;
1848 u32 initial_lines;
1849 int i;
1850
1851 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1852 hw_pp[i] = dpu_enc->hw_pp[i];
1853 hw_dsc[i] = dpu_enc->hw_dsc[i];
1854
1855 if (!hw_pp[i] || !hw_dsc[i]) {
1856 DPU_ERROR_ENC(dpu_enc, "invalid params for DSC\n");
1857 return;
1858 }
1859 }
1860
1861 pic_width = dsc->drm->pic_width;
1862
1863 dsc_common_mode = DSC_MODE_MULTIPLEX | DSC_MODE_SPLIT_PANEL;
1864 if (enc_master->intf_mode == INTF_MODE_VIDEO)
1865 dsc_common_mode |= DSC_MODE_VIDEO;
1866
1867 this_frame_slices = pic_width / dsc->drm->slice_width;
1868 intf_ip_w = this_frame_slices * dsc->drm->slice_width;
1869
1870
1871
1872
1873
1874 enc_ip_w = intf_ip_w / 2;
1875 initial_lines = dpu_encoder_dsc_initial_line_calc(dsc, enc_ip_w);
1876
1877 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++)
1878 dpu_encoder_dsc_pipe_cfg(hw_dsc[i], hw_pp[i], dsc, dsc_common_mode, initial_lines);
1879 }
1880
1881 void dpu_encoder_prepare_for_kickoff(struct drm_encoder *drm_enc)
1882 {
1883 struct dpu_encoder_virt *dpu_enc;
1884 struct dpu_encoder_phys *phys;
1885 bool needs_hw_reset = false;
1886 unsigned int i;
1887
1888 dpu_enc = to_dpu_encoder_virt(drm_enc);
1889
1890 trace_dpu_enc_prepare_kickoff(DRMID(drm_enc));
1891
1892
1893 DPU_ATRACE_BEGIN("enc_prepare_for_kickoff");
1894 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1895 phys = dpu_enc->phys_encs[i];
1896 if (phys->ops.prepare_for_kickoff)
1897 phys->ops.prepare_for_kickoff(phys);
1898 if (phys->enable_state == DPU_ENC_ERR_NEEDS_HW_RESET)
1899 needs_hw_reset = true;
1900 }
1901 DPU_ATRACE_END("enc_prepare_for_kickoff");
1902
1903 dpu_encoder_resource_control(drm_enc, DPU_ENC_RC_EVENT_KICKOFF);
1904
1905
1906 if (needs_hw_reset) {
1907 trace_dpu_enc_prepare_kickoff_reset(DRMID(drm_enc));
1908 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1909 dpu_encoder_helper_hw_reset(dpu_enc->phys_encs[i]);
1910 }
1911 }
1912
1913 if (dpu_enc->dsc)
1914 dpu_encoder_prep_dsc(dpu_enc, dpu_enc->dsc);
1915 }
1916
1917 bool dpu_encoder_is_valid_for_commit(struct drm_encoder *drm_enc)
1918 {
1919 struct dpu_encoder_virt *dpu_enc;
1920 unsigned int i;
1921 struct dpu_encoder_phys *phys;
1922
1923 dpu_enc = to_dpu_encoder_virt(drm_enc);
1924
1925 if (drm_enc->encoder_type == DRM_MODE_ENCODER_VIRTUAL) {
1926 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1927 phys = dpu_enc->phys_encs[i];
1928 if (phys->ops.is_valid_for_commit && !phys->ops.is_valid_for_commit(phys)) {
1929 DPU_DEBUG("invalid FB not kicking off\n");
1930 return false;
1931 }
1932 }
1933 }
1934
1935 return true;
1936 }
1937
1938 void dpu_encoder_kickoff(struct drm_encoder *drm_enc)
1939 {
1940 struct dpu_encoder_virt *dpu_enc;
1941 struct dpu_encoder_phys *phys;
1942 ktime_t wakeup_time;
1943 unsigned long timeout_ms;
1944 unsigned int i;
1945
1946 DPU_ATRACE_BEGIN("encoder_kickoff");
1947 dpu_enc = to_dpu_encoder_virt(drm_enc);
1948
1949 trace_dpu_enc_kickoff(DRMID(drm_enc));
1950
1951 timeout_ms = DPU_ENCODER_FRAME_DONE_TIMEOUT_FRAMES * 1000 /
1952 drm_mode_vrefresh(&drm_enc->crtc->state->adjusted_mode);
1953
1954 atomic_set(&dpu_enc->frame_done_timeout_ms, timeout_ms);
1955 mod_timer(&dpu_enc->frame_done_timer,
1956 jiffies + msecs_to_jiffies(timeout_ms));
1957
1958
1959 _dpu_encoder_kickoff_phys(dpu_enc);
1960
1961
1962 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1963 phys = dpu_enc->phys_encs[i];
1964 if (phys->ops.handle_post_kickoff)
1965 phys->ops.handle_post_kickoff(phys);
1966 }
1967
1968 if (dpu_enc->disp_info.intf_type == DRM_MODE_ENCODER_DSI &&
1969 !dpu_encoder_vsync_time(drm_enc, &wakeup_time)) {
1970 trace_dpu_enc_early_kickoff(DRMID(drm_enc),
1971 ktime_to_ms(wakeup_time));
1972 mod_timer(&dpu_enc->vsync_event_timer,
1973 nsecs_to_jiffies(ktime_to_ns(wakeup_time)));
1974 }
1975
1976 DPU_ATRACE_END("encoder_kickoff");
1977 }
1978
1979 static void dpu_encoder_helper_reset_mixers(struct dpu_encoder_phys *phys_enc)
1980 {
1981 struct dpu_hw_mixer_cfg mixer;
1982 int i, num_lm;
1983 u32 flush_mask = 0;
1984 struct dpu_global_state *global_state;
1985 struct dpu_hw_blk *hw_lm[2];
1986 struct dpu_hw_mixer *hw_mixer[2];
1987 struct dpu_hw_ctl *ctl = phys_enc->hw_ctl;
1988
1989 memset(&mixer, 0, sizeof(mixer));
1990
1991
1992 if (phys_enc->hw_ctl->ops.clear_all_blendstages)
1993 phys_enc->hw_ctl->ops.clear_all_blendstages(phys_enc->hw_ctl);
1994
1995 global_state = dpu_kms_get_existing_global_state(phys_enc->dpu_kms);
1996
1997 num_lm = dpu_rm_get_assigned_resources(&phys_enc->dpu_kms->rm, global_state,
1998 phys_enc->parent->base.id, DPU_HW_BLK_LM, hw_lm, ARRAY_SIZE(hw_lm));
1999
2000 for (i = 0; i < num_lm; i++) {
2001 hw_mixer[i] = to_dpu_hw_mixer(hw_lm[i]);
2002 flush_mask = phys_enc->hw_ctl->ops.get_bitmask_mixer(ctl, hw_mixer[i]->idx);
2003 if (phys_enc->hw_ctl->ops.update_pending_flush)
2004 phys_enc->hw_ctl->ops.update_pending_flush(ctl, flush_mask);
2005
2006
2007 if (phys_enc->hw_ctl->ops.setup_blendstage)
2008 phys_enc->hw_ctl->ops.setup_blendstage(ctl, hw_mixer[i]->idx, NULL);
2009 }
2010 }
2011
2012 void dpu_encoder_helper_phys_cleanup(struct dpu_encoder_phys *phys_enc)
2013 {
2014 struct dpu_hw_ctl *ctl = phys_enc->hw_ctl;
2015 struct dpu_hw_intf_cfg intf_cfg = { 0 };
2016 int i;
2017 struct dpu_encoder_virt *dpu_enc;
2018
2019 dpu_enc = to_dpu_encoder_virt(phys_enc->parent);
2020
2021 phys_enc->hw_ctl->ops.reset(ctl);
2022
2023 dpu_encoder_helper_reset_mixers(phys_enc);
2024
2025
2026
2027
2028
2029
2030 if (phys_enc->hw_wb) {
2031
2032 if (phys_enc->hw_wb->ops.bind_pingpong_blk)
2033 phys_enc->hw_wb->ops.bind_pingpong_blk(phys_enc->hw_wb, false,
2034 phys_enc->hw_pp->idx);
2035
2036
2037 if (phys_enc->hw_ctl->ops.update_pending_flush_wb)
2038 phys_enc->hw_ctl->ops.update_pending_flush_wb(ctl, phys_enc->hw_wb->idx);
2039 } else {
2040 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
2041 if (dpu_enc->phys_encs[i] && phys_enc->hw_intf->ops.bind_pingpong_blk)
2042 phys_enc->hw_intf->ops.bind_pingpong_blk(
2043 dpu_enc->phys_encs[i]->hw_intf, false,
2044 dpu_enc->phys_encs[i]->hw_pp->idx);
2045
2046
2047 if (phys_enc->hw_ctl->ops.update_pending_flush_intf)
2048 phys_enc->hw_ctl->ops.update_pending_flush_intf(phys_enc->hw_ctl,
2049 dpu_enc->phys_encs[i]->hw_intf->idx);
2050 }
2051 }
2052
2053
2054 if (phys_enc->hw_pp->merge_3d) {
2055 phys_enc->hw_pp->merge_3d->ops.setup_3d_mode(phys_enc->hw_pp->merge_3d,
2056 BLEND_3D_NONE);
2057 if (phys_enc->hw_ctl->ops.update_pending_flush_merge_3d)
2058 phys_enc->hw_ctl->ops.update_pending_flush_merge_3d(ctl,
2059 phys_enc->hw_pp->merge_3d->idx);
2060 }
2061
2062 intf_cfg.stream_sel = 0;
2063 intf_cfg.mode_3d = dpu_encoder_helper_get_3d_blend_mode(phys_enc);
2064
2065 if (phys_enc->hw_intf)
2066 intf_cfg.intf = phys_enc->hw_intf->idx;
2067 if (phys_enc->hw_wb)
2068 intf_cfg.wb = phys_enc->hw_wb->idx;
2069
2070 if (phys_enc->hw_pp->merge_3d)
2071 intf_cfg.merge_3d = phys_enc->hw_pp->merge_3d->idx;
2072
2073 if (ctl->ops.reset_intf_cfg)
2074 ctl->ops.reset_intf_cfg(ctl, &intf_cfg);
2075
2076 ctl->ops.trigger_flush(ctl);
2077 ctl->ops.trigger_start(ctl);
2078 ctl->ops.clear_pending_flush(ctl);
2079 }
2080
2081 void dpu_encoder_prepare_commit(struct drm_encoder *drm_enc)
2082 {
2083 struct dpu_encoder_virt *dpu_enc;
2084 struct dpu_encoder_phys *phys;
2085 int i;
2086
2087 if (!drm_enc) {
2088 DPU_ERROR("invalid encoder\n");
2089 return;
2090 }
2091 dpu_enc = to_dpu_encoder_virt(drm_enc);
2092
2093 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
2094 phys = dpu_enc->phys_encs[i];
2095 if (phys->ops.prepare_commit)
2096 phys->ops.prepare_commit(phys);
2097 }
2098 }
2099
2100 #ifdef CONFIG_DEBUG_FS
2101 static int _dpu_encoder_status_show(struct seq_file *s, void *data)
2102 {
2103 struct dpu_encoder_virt *dpu_enc = s->private;
2104 int i;
2105
2106 mutex_lock(&dpu_enc->enc_lock);
2107 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
2108 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
2109
2110 seq_printf(s, "intf:%d wb:%d vsync:%8d underrun:%8d ",
2111 phys->intf_idx - INTF_0, phys->wb_idx - WB_0,
2112 atomic_read(&phys->vsync_cnt),
2113 atomic_read(&phys->underrun_cnt));
2114
2115 seq_printf(s, "mode: %s\n", dpu_encoder_helper_get_intf_type(phys->intf_mode));
2116 }
2117 mutex_unlock(&dpu_enc->enc_lock);
2118
2119 return 0;
2120 }
2121
2122 DEFINE_SHOW_ATTRIBUTE(_dpu_encoder_status);
2123
2124 static int _dpu_encoder_init_debugfs(struct drm_encoder *drm_enc)
2125 {
2126 struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
2127 int i;
2128
2129 char name[DPU_NAME_SIZE];
2130
2131 if (!drm_enc->dev) {
2132 DPU_ERROR("invalid encoder or kms\n");
2133 return -EINVAL;
2134 }
2135
2136 snprintf(name, DPU_NAME_SIZE, "encoder%u", drm_enc->base.id);
2137
2138
2139 dpu_enc->debugfs_root = debugfs_create_dir(name,
2140 drm_enc->dev->primary->debugfs_root);
2141
2142
2143 debugfs_create_file("status", 0600,
2144 dpu_enc->debugfs_root, dpu_enc, &_dpu_encoder_status_fops);
2145
2146 for (i = 0; i < dpu_enc->num_phys_encs; i++)
2147 if (dpu_enc->phys_encs[i]->ops.late_register)
2148 dpu_enc->phys_encs[i]->ops.late_register(
2149 dpu_enc->phys_encs[i],
2150 dpu_enc->debugfs_root);
2151
2152 return 0;
2153 }
2154 #else
2155 static int _dpu_encoder_init_debugfs(struct drm_encoder *drm_enc)
2156 {
2157 return 0;
2158 }
2159 #endif
2160
2161 static int dpu_encoder_late_register(struct drm_encoder *encoder)
2162 {
2163 return _dpu_encoder_init_debugfs(encoder);
2164 }
2165
2166 static void dpu_encoder_early_unregister(struct drm_encoder *encoder)
2167 {
2168 struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(encoder);
2169
2170 debugfs_remove_recursive(dpu_enc->debugfs_root);
2171 }
2172
2173 static int dpu_encoder_virt_add_phys_encs(
2174 struct msm_display_info *disp_info,
2175 struct dpu_encoder_virt *dpu_enc,
2176 struct dpu_enc_phys_init_params *params)
2177 {
2178 struct dpu_encoder_phys *enc = NULL;
2179
2180 DPU_DEBUG_ENC(dpu_enc, "\n");
2181
2182
2183
2184
2185
2186 if (dpu_enc->num_phys_encs + NUM_PHYS_ENCODER_TYPES >=
2187 ARRAY_SIZE(dpu_enc->phys_encs)) {
2188 DPU_ERROR_ENC(dpu_enc, "too many physical encoders %d\n",
2189 dpu_enc->num_phys_encs);
2190 return -EINVAL;
2191 }
2192
2193
2194 if (disp_info->intf_type == DRM_MODE_ENCODER_VIRTUAL) {
2195 enc = dpu_encoder_phys_wb_init(params);
2196
2197 if (IS_ERR(enc)) {
2198 DPU_ERROR_ENC(dpu_enc, "failed to init wb enc: %ld\n",
2199 PTR_ERR(enc));
2200 return PTR_ERR(enc);
2201 }
2202
2203 dpu_enc->phys_encs[dpu_enc->num_phys_encs] = enc;
2204 ++dpu_enc->num_phys_encs;
2205 } else if (disp_info->is_cmd_mode) {
2206 enc = dpu_encoder_phys_cmd_init(params);
2207
2208 if (IS_ERR(enc)) {
2209 DPU_ERROR_ENC(dpu_enc, "failed to init cmd enc: %ld\n",
2210 PTR_ERR(enc));
2211 return PTR_ERR(enc);
2212 }
2213
2214 dpu_enc->phys_encs[dpu_enc->num_phys_encs] = enc;
2215 ++dpu_enc->num_phys_encs;
2216 } else {
2217 enc = dpu_encoder_phys_vid_init(params);
2218
2219 if (IS_ERR(enc)) {
2220 DPU_ERROR_ENC(dpu_enc, "failed to init vid enc: %ld\n",
2221 PTR_ERR(enc));
2222 return PTR_ERR(enc);
2223 }
2224
2225 dpu_enc->phys_encs[dpu_enc->num_phys_encs] = enc;
2226 ++dpu_enc->num_phys_encs;
2227 }
2228
2229 if (params->split_role == ENC_ROLE_SLAVE)
2230 dpu_enc->cur_slave = enc;
2231 else
2232 dpu_enc->cur_master = enc;
2233
2234 return 0;
2235 }
2236
2237 static const struct dpu_encoder_virt_ops dpu_encoder_parent_ops = {
2238 .handle_vblank_virt = dpu_encoder_vblank_callback,
2239 .handle_underrun_virt = dpu_encoder_underrun_callback,
2240 .handle_frame_done = dpu_encoder_frame_done_callback,
2241 };
2242
2243 static int dpu_encoder_setup_display(struct dpu_encoder_virt *dpu_enc,
2244 struct dpu_kms *dpu_kms,
2245 struct msm_display_info *disp_info)
2246 {
2247 int ret = 0;
2248 int i = 0;
2249 enum dpu_intf_type intf_type = INTF_NONE;
2250 struct dpu_enc_phys_init_params phys_params;
2251
2252 if (!dpu_enc) {
2253 DPU_ERROR("invalid arg(s), enc %d\n", dpu_enc != NULL);
2254 return -EINVAL;
2255 }
2256
2257 dpu_enc->cur_master = NULL;
2258
2259 memset(&phys_params, 0, sizeof(phys_params));
2260 phys_params.dpu_kms = dpu_kms;
2261 phys_params.parent = &dpu_enc->base;
2262 phys_params.parent_ops = &dpu_encoder_parent_ops;
2263 phys_params.enc_spinlock = &dpu_enc->enc_spinlock;
2264
2265 switch (disp_info->intf_type) {
2266 case DRM_MODE_ENCODER_DSI:
2267 intf_type = INTF_DSI;
2268 break;
2269 case DRM_MODE_ENCODER_TMDS:
2270 intf_type = INTF_DP;
2271 break;
2272 case DRM_MODE_ENCODER_VIRTUAL:
2273 intf_type = INTF_WB;
2274 break;
2275 }
2276
2277 WARN_ON(disp_info->num_of_h_tiles < 1);
2278
2279 DPU_DEBUG("dsi_info->num_of_h_tiles %d\n", disp_info->num_of_h_tiles);
2280
2281 if (disp_info->intf_type != DRM_MODE_ENCODER_VIRTUAL)
2282 dpu_enc->idle_pc_supported =
2283 dpu_kms->catalog->caps->has_idle_pc;
2284
2285 dpu_enc->dsc = disp_info->dsc;
2286
2287 mutex_lock(&dpu_enc->enc_lock);
2288 for (i = 0; i < disp_info->num_of_h_tiles && !ret; i++) {
2289
2290
2291
2292
2293
2294 u32 controller_id = disp_info->h_tile_instance[i];
2295
2296 if (disp_info->num_of_h_tiles > 1) {
2297 if (i == 0)
2298 phys_params.split_role = ENC_ROLE_MASTER;
2299 else
2300 phys_params.split_role = ENC_ROLE_SLAVE;
2301 } else {
2302 phys_params.split_role = ENC_ROLE_SOLO;
2303 }
2304
2305 DPU_DEBUG("h_tile_instance %d = %d, split_role %d\n",
2306 i, controller_id, phys_params.split_role);
2307
2308 phys_params.intf_idx = dpu_encoder_get_intf(dpu_kms->catalog,
2309 intf_type,
2310 controller_id);
2311
2312 phys_params.wb_idx = dpu_encoder_get_wb(dpu_kms->catalog,
2313 intf_type, controller_id);
2314
2315
2316
2317
2318 if ((phys_params.intf_idx == INTF_MAX) &&
2319 (phys_params.wb_idx == WB_MAX)) {
2320 DPU_ERROR_ENC(dpu_enc, "could not get intf or wb: type %d, id %d\n",
2321 intf_type, controller_id);
2322 ret = -EINVAL;
2323 }
2324
2325 if ((phys_params.intf_idx != INTF_MAX) &&
2326 (phys_params.wb_idx != WB_MAX)) {
2327 DPU_ERROR_ENC(dpu_enc, "both intf and wb present: type %d, id %d\n",
2328 intf_type, controller_id);
2329 ret = -EINVAL;
2330 }
2331
2332 if (!ret) {
2333 ret = dpu_encoder_virt_add_phys_encs(disp_info,
2334 dpu_enc, &phys_params);
2335 if (ret)
2336 DPU_ERROR_ENC(dpu_enc, "failed to add phys encs\n");
2337 }
2338 }
2339
2340 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
2341 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
2342 atomic_set(&phys->vsync_cnt, 0);
2343 atomic_set(&phys->underrun_cnt, 0);
2344
2345 if (phys->intf_idx >= INTF_0 && phys->intf_idx < INTF_MAX)
2346 phys->hw_intf = dpu_rm_get_intf(&dpu_kms->rm, phys->intf_idx);
2347
2348 if (phys->wb_idx >= WB_0 && phys->wb_idx < WB_MAX)
2349 phys->hw_wb = dpu_rm_get_wb(&dpu_kms->rm, phys->wb_idx);
2350
2351 if (!phys->hw_intf && !phys->hw_wb) {
2352 DPU_ERROR_ENC(dpu_enc, "no intf or wb block assigned at idx: %d\n", i);
2353 ret = -EINVAL;
2354 }
2355
2356 if (phys->hw_intf && phys->hw_wb) {
2357 DPU_ERROR_ENC(dpu_enc,
2358 "invalid phys both intf and wb block at idx: %d\n", i);
2359 ret = -EINVAL;
2360 }
2361 }
2362
2363 mutex_unlock(&dpu_enc->enc_lock);
2364
2365 return ret;
2366 }
2367
2368 static void dpu_encoder_frame_done_timeout(struct timer_list *t)
2369 {
2370 struct dpu_encoder_virt *dpu_enc = from_timer(dpu_enc, t,
2371 frame_done_timer);
2372 struct drm_encoder *drm_enc = &dpu_enc->base;
2373 u32 event;
2374
2375 if (!drm_enc->dev) {
2376 DPU_ERROR("invalid parameters\n");
2377 return;
2378 }
2379
2380 if (!dpu_enc->frame_busy_mask[0] || !dpu_enc->crtc_frame_event_cb) {
2381 DRM_DEBUG_KMS("id:%u invalid timeout frame_busy_mask=%lu\n",
2382 DRMID(drm_enc), dpu_enc->frame_busy_mask[0]);
2383 return;
2384 } else if (!atomic_xchg(&dpu_enc->frame_done_timeout_ms, 0)) {
2385 DRM_DEBUG_KMS("id:%u invalid timeout\n", DRMID(drm_enc));
2386 return;
2387 }
2388
2389 DPU_ERROR_ENC(dpu_enc, "frame done timeout\n");
2390
2391 event = DPU_ENCODER_FRAME_EVENT_ERROR;
2392 trace_dpu_enc_frame_done_timeout(DRMID(drm_enc), event);
2393 dpu_enc->crtc_frame_event_cb(dpu_enc->crtc_frame_event_cb_data, event);
2394 }
2395
2396 static const struct drm_encoder_helper_funcs dpu_encoder_helper_funcs = {
2397 .atomic_mode_set = dpu_encoder_virt_atomic_mode_set,
2398 .disable = dpu_encoder_virt_disable,
2399 .enable = dpu_encoder_virt_enable,
2400 .atomic_check = dpu_encoder_virt_atomic_check,
2401 };
2402
2403 static const struct drm_encoder_funcs dpu_encoder_funcs = {
2404 .destroy = dpu_encoder_destroy,
2405 .late_register = dpu_encoder_late_register,
2406 .early_unregister = dpu_encoder_early_unregister,
2407 };
2408
2409 int dpu_encoder_setup(struct drm_device *dev, struct drm_encoder *enc,
2410 struct msm_display_info *disp_info)
2411 {
2412 struct msm_drm_private *priv = dev->dev_private;
2413 struct dpu_kms *dpu_kms = to_dpu_kms(priv->kms);
2414 struct drm_encoder *drm_enc = NULL;
2415 struct dpu_encoder_virt *dpu_enc = NULL;
2416 int ret = 0;
2417
2418 dpu_enc = to_dpu_encoder_virt(enc);
2419
2420 ret = dpu_encoder_setup_display(dpu_enc, dpu_kms, disp_info);
2421 if (ret)
2422 goto fail;
2423
2424 atomic_set(&dpu_enc->frame_done_timeout_ms, 0);
2425 timer_setup(&dpu_enc->frame_done_timer,
2426 dpu_encoder_frame_done_timeout, 0);
2427
2428 if (disp_info->intf_type == DRM_MODE_ENCODER_DSI)
2429 timer_setup(&dpu_enc->vsync_event_timer,
2430 dpu_encoder_vsync_event_handler,
2431 0);
2432 else if (disp_info->intf_type == DRM_MODE_ENCODER_TMDS)
2433 dpu_enc->wide_bus_en = msm_dp_wide_bus_available(
2434 priv->dp[disp_info->h_tile_instance[0]]);
2435
2436 INIT_DELAYED_WORK(&dpu_enc->delayed_off_work,
2437 dpu_encoder_off_work);
2438 dpu_enc->idle_timeout = IDLE_TIMEOUT;
2439
2440 kthread_init_work(&dpu_enc->vsync_event_work,
2441 dpu_encoder_vsync_event_work_handler);
2442
2443 memcpy(&dpu_enc->disp_info, disp_info, sizeof(*disp_info));
2444
2445 DPU_DEBUG_ENC(dpu_enc, "created\n");
2446
2447 return ret;
2448
2449 fail:
2450 DPU_ERROR("failed to create encoder\n");
2451 if (drm_enc)
2452 dpu_encoder_destroy(drm_enc);
2453
2454 return ret;
2455
2456
2457 }
2458
2459 struct drm_encoder *dpu_encoder_init(struct drm_device *dev,
2460 int drm_enc_mode)
2461 {
2462 struct dpu_encoder_virt *dpu_enc = NULL;
2463 int rc = 0;
2464
2465 dpu_enc = devm_kzalloc(dev->dev, sizeof(*dpu_enc), GFP_KERNEL);
2466 if (!dpu_enc)
2467 return ERR_PTR(-ENOMEM);
2468
2469
2470 rc = drm_encoder_init(dev, &dpu_enc->base, &dpu_encoder_funcs,
2471 drm_enc_mode, NULL);
2472 if (rc) {
2473 devm_kfree(dev->dev, dpu_enc);
2474 return ERR_PTR(rc);
2475 }
2476
2477 drm_encoder_helper_add(&dpu_enc->base, &dpu_encoder_helper_funcs);
2478
2479 spin_lock_init(&dpu_enc->enc_spinlock);
2480 dpu_enc->enabled = false;
2481 mutex_init(&dpu_enc->enc_lock);
2482 mutex_init(&dpu_enc->rc_lock);
2483
2484 return &dpu_enc->base;
2485 }
2486
2487 int dpu_encoder_wait_for_event(struct drm_encoder *drm_enc,
2488 enum msm_event_wait event)
2489 {
2490 int (*fn_wait)(struct dpu_encoder_phys *phys_enc) = NULL;
2491 struct dpu_encoder_virt *dpu_enc = NULL;
2492 int i, ret = 0;
2493
2494 if (!drm_enc) {
2495 DPU_ERROR("invalid encoder\n");
2496 return -EINVAL;
2497 }
2498 dpu_enc = to_dpu_encoder_virt(drm_enc);
2499 DPU_DEBUG_ENC(dpu_enc, "\n");
2500
2501 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
2502 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
2503
2504 switch (event) {
2505 case MSM_ENC_COMMIT_DONE:
2506 fn_wait = phys->ops.wait_for_commit_done;
2507 break;
2508 case MSM_ENC_TX_COMPLETE:
2509 fn_wait = phys->ops.wait_for_tx_complete;
2510 break;
2511 case MSM_ENC_VBLANK:
2512 fn_wait = phys->ops.wait_for_vblank;
2513 break;
2514 default:
2515 DPU_ERROR_ENC(dpu_enc, "unknown wait event %d\n",
2516 event);
2517 return -EINVAL;
2518 }
2519
2520 if (fn_wait) {
2521 DPU_ATRACE_BEGIN("wait_for_completion_event");
2522 ret = fn_wait(phys);
2523 DPU_ATRACE_END("wait_for_completion_event");
2524 if (ret)
2525 return ret;
2526 }
2527 }
2528
2529 return ret;
2530 }
2531
2532 enum dpu_intf_mode dpu_encoder_get_intf_mode(struct drm_encoder *encoder)
2533 {
2534 struct dpu_encoder_virt *dpu_enc = NULL;
2535
2536 if (!encoder) {
2537 DPU_ERROR("invalid encoder\n");
2538 return INTF_MODE_NONE;
2539 }
2540 dpu_enc = to_dpu_encoder_virt(encoder);
2541
2542 if (dpu_enc->cur_master)
2543 return dpu_enc->cur_master->intf_mode;
2544
2545 if (dpu_enc->num_phys_encs)
2546 return dpu_enc->phys_encs[0]->intf_mode;
2547
2548 return INTF_MODE_NONE;
2549 }
2550
2551 unsigned int dpu_encoder_helper_get_dsc(struct dpu_encoder_phys *phys_enc)
2552 {
2553 struct drm_encoder *encoder = phys_enc->parent;
2554 struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(encoder);
2555
2556 return dpu_enc->dsc_mask;
2557 }