Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: GPL-2.0-only
0002 /*
0003  * Copyright (c) 2015-2018, 2020-2021 The Linux Foundation. All rights reserved.
0004  */
0005 
0006 #define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
0007 #include <linux/delay.h>
0008 #include "dpu_encoder_phys.h"
0009 #include "dpu_hw_interrupts.h"
0010 #include "dpu_hw_pingpong.h"
0011 #include "dpu_core_irq.h"
0012 #include "dpu_formats.h"
0013 #include "dpu_trace.h"
0014 #include "disp/msm_disp_snapshot.h"
0015 
0016 #define DPU_DEBUG_CMDENC(e, fmt, ...) DPU_DEBUG("enc%d intf%d " fmt, \
0017         (e) && (e)->base.parent ? \
0018         (e)->base.parent->base.id : -1, \
0019         (e) ? (e)->base.intf_idx - INTF_0 : -1, ##__VA_ARGS__)
0020 
0021 #define DPU_ERROR_CMDENC(e, fmt, ...) DPU_ERROR("enc%d intf%d " fmt, \
0022         (e) && (e)->base.parent ? \
0023         (e)->base.parent->base.id : -1, \
0024         (e) ? (e)->base.intf_idx - INTF_0 : -1, ##__VA_ARGS__)
0025 
0026 #define to_dpu_encoder_phys_cmd(x) \
0027     container_of(x, struct dpu_encoder_phys_cmd, base)
0028 
0029 #define PP_TIMEOUT_MAX_TRIALS   10
0030 
0031 /*
0032  * Tearcheck sync start and continue thresholds are empirically found
0033  * based on common panels In the future, may want to allow panels to override
0034  * these default values
0035  */
0036 #define DEFAULT_TEARCHECK_SYNC_THRESH_START 4
0037 #define DEFAULT_TEARCHECK_SYNC_THRESH_CONTINUE  4
0038 
0039 #define DPU_ENC_WR_PTR_START_TIMEOUT_US 20000
0040 
0041 #define DPU_ENC_MAX_POLL_TIMEOUT_US 2000
0042 
0043 static bool dpu_encoder_phys_cmd_is_master(struct dpu_encoder_phys *phys_enc)
0044 {
0045     return (phys_enc->split_role != ENC_ROLE_SLAVE);
0046 }
0047 
0048 static void _dpu_encoder_phys_cmd_update_intf_cfg(
0049         struct dpu_encoder_phys *phys_enc)
0050 {
0051     struct dpu_encoder_phys_cmd *cmd_enc =
0052             to_dpu_encoder_phys_cmd(phys_enc);
0053     struct dpu_hw_ctl *ctl;
0054     struct dpu_hw_intf_cfg intf_cfg = { 0 };
0055 
0056     ctl = phys_enc->hw_ctl;
0057     if (!ctl->ops.setup_intf_cfg)
0058         return;
0059 
0060     intf_cfg.intf = phys_enc->intf_idx;
0061     intf_cfg.intf_mode_sel = DPU_CTL_MODE_SEL_CMD;
0062     intf_cfg.stream_sel = cmd_enc->stream_sel;
0063     intf_cfg.mode_3d = dpu_encoder_helper_get_3d_blend_mode(phys_enc);
0064     ctl->ops.setup_intf_cfg(ctl, &intf_cfg);
0065 
0066     /* setup which pp blk will connect to this intf */
0067     if (test_bit(DPU_CTL_ACTIVE_CFG, &ctl->caps->features) && phys_enc->hw_intf->ops.bind_pingpong_blk)
0068         phys_enc->hw_intf->ops.bind_pingpong_blk(
0069                 phys_enc->hw_intf,
0070                 true,
0071                 phys_enc->hw_pp->idx);
0072 }
0073 
0074 static void dpu_encoder_phys_cmd_pp_tx_done_irq(void *arg, int irq_idx)
0075 {
0076     struct dpu_encoder_phys *phys_enc = arg;
0077     unsigned long lock_flags;
0078     int new_cnt;
0079     u32 event = DPU_ENCODER_FRAME_EVENT_DONE;
0080 
0081     if (!phys_enc->hw_pp)
0082         return;
0083 
0084     DPU_ATRACE_BEGIN("pp_done_irq");
0085     /* notify all synchronous clients first, then asynchronous clients */
0086     if (phys_enc->parent_ops->handle_frame_done)
0087         phys_enc->parent_ops->handle_frame_done(phys_enc->parent,
0088                 phys_enc, event);
0089 
0090     spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
0091     new_cnt = atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
0092     spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
0093 
0094     trace_dpu_enc_phys_cmd_pp_tx_done(DRMID(phys_enc->parent),
0095                       phys_enc->hw_pp->idx - PINGPONG_0,
0096                       new_cnt, event);
0097 
0098     /* Signal any waiting atomic commit thread */
0099     wake_up_all(&phys_enc->pending_kickoff_wq);
0100     DPU_ATRACE_END("pp_done_irq");
0101 }
0102 
0103 static void dpu_encoder_phys_cmd_pp_rd_ptr_irq(void *arg, int irq_idx)
0104 {
0105     struct dpu_encoder_phys *phys_enc = arg;
0106     struct dpu_encoder_phys_cmd *cmd_enc;
0107 
0108     if (!phys_enc->hw_pp)
0109         return;
0110 
0111     DPU_ATRACE_BEGIN("rd_ptr_irq");
0112     cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
0113 
0114     if (phys_enc->parent_ops->handle_vblank_virt)
0115         phys_enc->parent_ops->handle_vblank_virt(phys_enc->parent,
0116             phys_enc);
0117 
0118     atomic_add_unless(&cmd_enc->pending_vblank_cnt, -1, 0);
0119     wake_up_all(&cmd_enc->pending_vblank_wq);
0120     DPU_ATRACE_END("rd_ptr_irq");
0121 }
0122 
0123 static void dpu_encoder_phys_cmd_ctl_start_irq(void *arg, int irq_idx)
0124 {
0125     struct dpu_encoder_phys *phys_enc = arg;
0126 
0127     DPU_ATRACE_BEGIN("ctl_start_irq");
0128 
0129     atomic_add_unless(&phys_enc->pending_ctlstart_cnt, -1, 0);
0130 
0131     /* Signal any waiting ctl start interrupt */
0132     wake_up_all(&phys_enc->pending_kickoff_wq);
0133     DPU_ATRACE_END("ctl_start_irq");
0134 }
0135 
0136 static void dpu_encoder_phys_cmd_underrun_irq(void *arg, int irq_idx)
0137 {
0138     struct dpu_encoder_phys *phys_enc = arg;
0139 
0140     if (phys_enc->parent_ops->handle_underrun_virt)
0141         phys_enc->parent_ops->handle_underrun_virt(phys_enc->parent,
0142             phys_enc);
0143 }
0144 
0145 static void dpu_encoder_phys_cmd_atomic_mode_set(
0146         struct dpu_encoder_phys *phys_enc,
0147         struct drm_crtc_state *crtc_state,
0148         struct drm_connector_state *conn_state)
0149 {
0150     phys_enc->irq[INTR_IDX_CTL_START] = phys_enc->hw_ctl->caps->intr_start;
0151 
0152     phys_enc->irq[INTR_IDX_PINGPONG] = phys_enc->hw_pp->caps->intr_done;
0153 
0154     phys_enc->irq[INTR_IDX_RDPTR] = phys_enc->hw_pp->caps->intr_rdptr;
0155 
0156     phys_enc->irq[INTR_IDX_UNDERRUN] = phys_enc->hw_intf->cap->intr_underrun;
0157 }
0158 
0159 static int _dpu_encoder_phys_cmd_handle_ppdone_timeout(
0160         struct dpu_encoder_phys *phys_enc)
0161 {
0162     struct dpu_encoder_phys_cmd *cmd_enc =
0163             to_dpu_encoder_phys_cmd(phys_enc);
0164     u32 frame_event = DPU_ENCODER_FRAME_EVENT_ERROR;
0165     bool do_log = false;
0166     struct drm_encoder *drm_enc;
0167 
0168     if (!phys_enc->hw_pp)
0169         return -EINVAL;
0170 
0171     drm_enc = phys_enc->parent;
0172 
0173     cmd_enc->pp_timeout_report_cnt++;
0174     if (cmd_enc->pp_timeout_report_cnt == PP_TIMEOUT_MAX_TRIALS) {
0175         frame_event |= DPU_ENCODER_FRAME_EVENT_PANEL_DEAD;
0176         do_log = true;
0177     } else if (cmd_enc->pp_timeout_report_cnt == 1) {
0178         do_log = true;
0179     }
0180 
0181     trace_dpu_enc_phys_cmd_pdone_timeout(DRMID(drm_enc),
0182              phys_enc->hw_pp->idx - PINGPONG_0,
0183              cmd_enc->pp_timeout_report_cnt,
0184              atomic_read(&phys_enc->pending_kickoff_cnt),
0185              frame_event);
0186 
0187     /* to avoid flooding, only log first time, and "dead" time */
0188     if (do_log) {
0189         DRM_ERROR("id:%d pp:%d kickoff timeout %d cnt %d koff_cnt %d\n",
0190               DRMID(drm_enc),
0191               phys_enc->hw_pp->idx - PINGPONG_0,
0192               phys_enc->hw_ctl->idx - CTL_0,
0193               cmd_enc->pp_timeout_report_cnt,
0194               atomic_read(&phys_enc->pending_kickoff_cnt));
0195         msm_disp_snapshot_state(drm_enc->dev);
0196         dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
0197                 phys_enc->irq[INTR_IDX_RDPTR]);
0198     }
0199 
0200     atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
0201 
0202     /* request a ctl reset before the next kickoff */
0203     phys_enc->enable_state = DPU_ENC_ERR_NEEDS_HW_RESET;
0204 
0205     if (phys_enc->parent_ops->handle_frame_done)
0206         phys_enc->parent_ops->handle_frame_done(
0207                 drm_enc, phys_enc, frame_event);
0208 
0209     return -ETIMEDOUT;
0210 }
0211 
0212 static int _dpu_encoder_phys_cmd_wait_for_idle(
0213         struct dpu_encoder_phys *phys_enc)
0214 {
0215     struct dpu_encoder_phys_cmd *cmd_enc =
0216             to_dpu_encoder_phys_cmd(phys_enc);
0217     struct dpu_encoder_wait_info wait_info;
0218     int ret;
0219 
0220     wait_info.wq = &phys_enc->pending_kickoff_wq;
0221     wait_info.atomic_cnt = &phys_enc->pending_kickoff_cnt;
0222     wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
0223 
0224     ret = dpu_encoder_helper_wait_for_irq(phys_enc,
0225             phys_enc->irq[INTR_IDX_PINGPONG],
0226             dpu_encoder_phys_cmd_pp_tx_done_irq,
0227             &wait_info);
0228     if (ret == -ETIMEDOUT)
0229         _dpu_encoder_phys_cmd_handle_ppdone_timeout(phys_enc);
0230     else if (!ret)
0231         cmd_enc->pp_timeout_report_cnt = 0;
0232 
0233     return ret;
0234 }
0235 
0236 static int dpu_encoder_phys_cmd_control_vblank_irq(
0237         struct dpu_encoder_phys *phys_enc,
0238         bool enable)
0239 {
0240     int ret = 0;
0241     int refcount;
0242 
0243     if (!phys_enc->hw_pp) {
0244         DPU_ERROR("invalid encoder\n");
0245         return -EINVAL;
0246     }
0247 
0248     refcount = atomic_read(&phys_enc->vblank_refcount);
0249 
0250     /* Slave encoders don't report vblank */
0251     if (!dpu_encoder_phys_cmd_is_master(phys_enc))
0252         goto end;
0253 
0254     /* protect against negative */
0255     if (!enable && refcount == 0) {
0256         ret = -EINVAL;
0257         goto end;
0258     }
0259 
0260     DRM_DEBUG_KMS("id:%u pp:%d enable=%s/%d\n", DRMID(phys_enc->parent),
0261               phys_enc->hw_pp->idx - PINGPONG_0,
0262               enable ? "true" : "false", refcount);
0263 
0264     if (enable && atomic_inc_return(&phys_enc->vblank_refcount) == 1)
0265         ret = dpu_core_irq_register_callback(phys_enc->dpu_kms,
0266                 phys_enc->irq[INTR_IDX_RDPTR],
0267                 dpu_encoder_phys_cmd_pp_rd_ptr_irq,
0268                 phys_enc);
0269     else if (!enable && atomic_dec_return(&phys_enc->vblank_refcount) == 0)
0270         ret = dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
0271                 phys_enc->irq[INTR_IDX_RDPTR]);
0272 
0273 end:
0274     if (ret) {
0275         DRM_ERROR("vblank irq err id:%u pp:%d ret:%d, enable %s/%d\n",
0276               DRMID(phys_enc->parent),
0277               phys_enc->hw_pp->idx - PINGPONG_0, ret,
0278               enable ? "true" : "false", refcount);
0279     }
0280 
0281     return ret;
0282 }
0283 
0284 static void dpu_encoder_phys_cmd_irq_control(struct dpu_encoder_phys *phys_enc,
0285         bool enable)
0286 {
0287     trace_dpu_enc_phys_cmd_irq_ctrl(DRMID(phys_enc->parent),
0288             phys_enc->hw_pp->idx - PINGPONG_0,
0289             enable, atomic_read(&phys_enc->vblank_refcount));
0290 
0291     if (enable) {
0292         dpu_core_irq_register_callback(phys_enc->dpu_kms,
0293                 phys_enc->irq[INTR_IDX_PINGPONG],
0294                 dpu_encoder_phys_cmd_pp_tx_done_irq,
0295                 phys_enc);
0296         dpu_core_irq_register_callback(phys_enc->dpu_kms,
0297                 phys_enc->irq[INTR_IDX_UNDERRUN],
0298                 dpu_encoder_phys_cmd_underrun_irq,
0299                 phys_enc);
0300         dpu_encoder_phys_cmd_control_vblank_irq(phys_enc, true);
0301 
0302         if (dpu_encoder_phys_cmd_is_master(phys_enc))
0303             dpu_core_irq_register_callback(phys_enc->dpu_kms,
0304                     phys_enc->irq[INTR_IDX_CTL_START],
0305                     dpu_encoder_phys_cmd_ctl_start_irq,
0306                     phys_enc);
0307     } else {
0308         if (dpu_encoder_phys_cmd_is_master(phys_enc))
0309             dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
0310                     phys_enc->irq[INTR_IDX_CTL_START]);
0311 
0312         dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
0313                 phys_enc->irq[INTR_IDX_UNDERRUN]);
0314         dpu_encoder_phys_cmd_control_vblank_irq(phys_enc, false);
0315         dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
0316                 phys_enc->irq[INTR_IDX_PINGPONG]);
0317     }
0318 }
0319 
0320 static void dpu_encoder_phys_cmd_tearcheck_config(
0321         struct dpu_encoder_phys *phys_enc)
0322 {
0323     struct dpu_encoder_phys_cmd *cmd_enc =
0324         to_dpu_encoder_phys_cmd(phys_enc);
0325     struct dpu_hw_tear_check tc_cfg = { 0 };
0326     struct drm_display_mode *mode;
0327     bool tc_enable = true;
0328     u32 vsync_hz;
0329     struct dpu_kms *dpu_kms;
0330 
0331     if (!phys_enc->hw_pp) {
0332         DPU_ERROR("invalid encoder\n");
0333         return;
0334     }
0335     mode = &phys_enc->cached_mode;
0336 
0337     DPU_DEBUG_CMDENC(cmd_enc, "pp %d\n", phys_enc->hw_pp->idx - PINGPONG_0);
0338 
0339     if (!phys_enc->hw_pp->ops.setup_tearcheck ||
0340         !phys_enc->hw_pp->ops.enable_tearcheck) {
0341         DPU_DEBUG_CMDENC(cmd_enc, "tearcheck not supported\n");
0342         return;
0343     }
0344 
0345     dpu_kms = phys_enc->dpu_kms;
0346 
0347     /*
0348      * TE default: dsi byte clock calculated base on 70 fps;
0349      * around 14 ms to complete a kickoff cycle if te disabled;
0350      * vclk_line base on 60 fps; write is faster than read;
0351      * init == start == rdptr;
0352      *
0353      * vsync_count is ratio of MDP VSYNC clock frequency to LCD panel
0354      * frequency divided by the no. of rows (lines) in the LCDpanel.
0355      */
0356     vsync_hz = dpu_kms_get_clk_rate(dpu_kms, "vsync");
0357     if (vsync_hz <= 0) {
0358         DPU_DEBUG_CMDENC(cmd_enc, "invalid - vsync_hz %u\n",
0359                  vsync_hz);
0360         return;
0361     }
0362 
0363     tc_cfg.vsync_count = vsync_hz /
0364                 (mode->vtotal * drm_mode_vrefresh(mode));
0365 
0366     /*
0367      * Set the sync_cfg_height to twice vtotal so that if we lose a
0368      * TE event coming from the display TE pin we won't stall immediately
0369      */
0370     tc_cfg.hw_vsync_mode = 1;
0371     tc_cfg.sync_cfg_height = mode->vtotal * 2;
0372     tc_cfg.vsync_init_val = mode->vdisplay;
0373     tc_cfg.sync_threshold_start = DEFAULT_TEARCHECK_SYNC_THRESH_START;
0374     tc_cfg.sync_threshold_continue = DEFAULT_TEARCHECK_SYNC_THRESH_CONTINUE;
0375     tc_cfg.start_pos = mode->vdisplay;
0376     tc_cfg.rd_ptr_irq = mode->vdisplay + 1;
0377 
0378     DPU_DEBUG_CMDENC(cmd_enc,
0379         "tc %d vsync_clk_speed_hz %u vtotal %u vrefresh %u\n",
0380         phys_enc->hw_pp->idx - PINGPONG_0, vsync_hz,
0381         mode->vtotal, drm_mode_vrefresh(mode));
0382     DPU_DEBUG_CMDENC(cmd_enc,
0383         "tc %d enable %u start_pos %u rd_ptr_irq %u\n",
0384         phys_enc->hw_pp->idx - PINGPONG_0, tc_enable, tc_cfg.start_pos,
0385         tc_cfg.rd_ptr_irq);
0386     DPU_DEBUG_CMDENC(cmd_enc,
0387         "tc %d hw_vsync_mode %u vsync_count %u vsync_init_val %u\n",
0388         phys_enc->hw_pp->idx - PINGPONG_0, tc_cfg.hw_vsync_mode,
0389         tc_cfg.vsync_count, tc_cfg.vsync_init_val);
0390     DPU_DEBUG_CMDENC(cmd_enc,
0391         "tc %d cfgheight %u thresh_start %u thresh_cont %u\n",
0392         phys_enc->hw_pp->idx - PINGPONG_0, tc_cfg.sync_cfg_height,
0393         tc_cfg.sync_threshold_start, tc_cfg.sync_threshold_continue);
0394 
0395     phys_enc->hw_pp->ops.setup_tearcheck(phys_enc->hw_pp, &tc_cfg);
0396     phys_enc->hw_pp->ops.enable_tearcheck(phys_enc->hw_pp, tc_enable);
0397 }
0398 
0399 static void _dpu_encoder_phys_cmd_pingpong_config(
0400         struct dpu_encoder_phys *phys_enc)
0401 {
0402     struct dpu_encoder_phys_cmd *cmd_enc =
0403         to_dpu_encoder_phys_cmd(phys_enc);
0404 
0405     if (!phys_enc->hw_pp || !phys_enc->hw_ctl->ops.setup_intf_cfg) {
0406         DPU_ERROR("invalid arg(s), enc %d\n", phys_enc != NULL);
0407         return;
0408     }
0409 
0410     DPU_DEBUG_CMDENC(cmd_enc, "pp %d, enabling mode:\n",
0411             phys_enc->hw_pp->idx - PINGPONG_0);
0412     drm_mode_debug_printmodeline(&phys_enc->cached_mode);
0413 
0414     _dpu_encoder_phys_cmd_update_intf_cfg(phys_enc);
0415     dpu_encoder_phys_cmd_tearcheck_config(phys_enc);
0416 }
0417 
0418 static bool dpu_encoder_phys_cmd_needs_single_flush(
0419         struct dpu_encoder_phys *phys_enc)
0420 {
0421     /**
0422      * we do separate flush for each CTL and let
0423      * CTL_START synchronize them
0424      */
0425     return false;
0426 }
0427 
0428 static void dpu_encoder_phys_cmd_enable_helper(
0429         struct dpu_encoder_phys *phys_enc)
0430 {
0431     struct dpu_hw_ctl *ctl;
0432 
0433     if (!phys_enc->hw_pp) {
0434         DPU_ERROR("invalid arg(s), encoder %d\n", phys_enc != NULL);
0435         return;
0436     }
0437 
0438     dpu_encoder_helper_split_config(phys_enc, phys_enc->intf_idx);
0439 
0440     _dpu_encoder_phys_cmd_pingpong_config(phys_enc);
0441 
0442     if (!dpu_encoder_phys_cmd_is_master(phys_enc))
0443         return;
0444 
0445     ctl = phys_enc->hw_ctl;
0446     ctl->ops.update_pending_flush_intf(ctl, phys_enc->intf_idx);
0447 }
0448 
0449 static void dpu_encoder_phys_cmd_enable(struct dpu_encoder_phys *phys_enc)
0450 {
0451     struct dpu_encoder_phys_cmd *cmd_enc =
0452         to_dpu_encoder_phys_cmd(phys_enc);
0453 
0454     if (!phys_enc->hw_pp) {
0455         DPU_ERROR("invalid phys encoder\n");
0456         return;
0457     }
0458 
0459     DPU_DEBUG_CMDENC(cmd_enc, "pp %d\n", phys_enc->hw_pp->idx - PINGPONG_0);
0460 
0461     if (phys_enc->enable_state == DPU_ENC_ENABLED) {
0462         DPU_ERROR("already enabled\n");
0463         return;
0464     }
0465 
0466     dpu_encoder_phys_cmd_enable_helper(phys_enc);
0467     phys_enc->enable_state = DPU_ENC_ENABLED;
0468 }
0469 
0470 static void _dpu_encoder_phys_cmd_connect_te(
0471         struct dpu_encoder_phys *phys_enc, bool enable)
0472 {
0473     if (!phys_enc->hw_pp || !phys_enc->hw_pp->ops.connect_external_te)
0474         return;
0475 
0476     trace_dpu_enc_phys_cmd_connect_te(DRMID(phys_enc->parent), enable);
0477     phys_enc->hw_pp->ops.connect_external_te(phys_enc->hw_pp, enable);
0478 }
0479 
0480 static void dpu_encoder_phys_cmd_prepare_idle_pc(
0481         struct dpu_encoder_phys *phys_enc)
0482 {
0483     _dpu_encoder_phys_cmd_connect_te(phys_enc, false);
0484 }
0485 
0486 static int dpu_encoder_phys_cmd_get_line_count(
0487         struct dpu_encoder_phys *phys_enc)
0488 {
0489     struct dpu_hw_pingpong *hw_pp;
0490 
0491     if (!phys_enc->hw_pp)
0492         return -EINVAL;
0493 
0494     if (!dpu_encoder_phys_cmd_is_master(phys_enc))
0495         return -EINVAL;
0496 
0497     hw_pp = phys_enc->hw_pp;
0498     if (!hw_pp->ops.get_line_count)
0499         return -EINVAL;
0500 
0501     return hw_pp->ops.get_line_count(hw_pp);
0502 }
0503 
0504 static void dpu_encoder_phys_cmd_disable(struct dpu_encoder_phys *phys_enc)
0505 {
0506     struct dpu_encoder_phys_cmd *cmd_enc =
0507         to_dpu_encoder_phys_cmd(phys_enc);
0508     struct dpu_hw_ctl *ctl;
0509 
0510     if (!phys_enc->hw_pp) {
0511         DPU_ERROR("invalid encoder\n");
0512         return;
0513     }
0514     DRM_DEBUG_KMS("id:%u pp:%d state:%d\n", DRMID(phys_enc->parent),
0515               phys_enc->hw_pp->idx - PINGPONG_0,
0516               phys_enc->enable_state);
0517 
0518     if (phys_enc->enable_state == DPU_ENC_DISABLED) {
0519         DPU_ERROR_CMDENC(cmd_enc, "already disabled\n");
0520         return;
0521     }
0522 
0523     if (phys_enc->hw_pp->ops.enable_tearcheck)
0524         phys_enc->hw_pp->ops.enable_tearcheck(phys_enc->hw_pp, false);
0525 
0526     if (phys_enc->hw_intf->ops.bind_pingpong_blk) {
0527         phys_enc->hw_intf->ops.bind_pingpong_blk(
0528                 phys_enc->hw_intf,
0529                 false,
0530                 phys_enc->hw_pp->idx);
0531 
0532         ctl = phys_enc->hw_ctl;
0533         ctl->ops.update_pending_flush_intf(ctl, phys_enc->intf_idx);
0534     }
0535 
0536     phys_enc->enable_state = DPU_ENC_DISABLED;
0537 }
0538 
0539 static void dpu_encoder_phys_cmd_destroy(struct dpu_encoder_phys *phys_enc)
0540 {
0541     struct dpu_encoder_phys_cmd *cmd_enc =
0542         to_dpu_encoder_phys_cmd(phys_enc);
0543 
0544     kfree(cmd_enc);
0545 }
0546 
0547 static void dpu_encoder_phys_cmd_prepare_for_kickoff(
0548         struct dpu_encoder_phys *phys_enc)
0549 {
0550     struct dpu_encoder_phys_cmd *cmd_enc =
0551             to_dpu_encoder_phys_cmd(phys_enc);
0552     int ret;
0553 
0554     if (!phys_enc->hw_pp) {
0555         DPU_ERROR("invalid encoder\n");
0556         return;
0557     }
0558     DRM_DEBUG_KMS("id:%u pp:%d pending_cnt:%d\n", DRMID(phys_enc->parent),
0559               phys_enc->hw_pp->idx - PINGPONG_0,
0560               atomic_read(&phys_enc->pending_kickoff_cnt));
0561 
0562     /*
0563      * Mark kickoff request as outstanding. If there are more than one,
0564      * outstanding, then we have to wait for the previous one to complete
0565      */
0566     ret = _dpu_encoder_phys_cmd_wait_for_idle(phys_enc);
0567     if (ret) {
0568         /* force pending_kickoff_cnt 0 to discard failed kickoff */
0569         atomic_set(&phys_enc->pending_kickoff_cnt, 0);
0570         DRM_ERROR("failed wait_for_idle: id:%u ret:%d pp:%d\n",
0571               DRMID(phys_enc->parent), ret,
0572               phys_enc->hw_pp->idx - PINGPONG_0);
0573     }
0574 
0575     DPU_DEBUG_CMDENC(cmd_enc, "pp:%d pending_cnt %d\n",
0576             phys_enc->hw_pp->idx - PINGPONG_0,
0577             atomic_read(&phys_enc->pending_kickoff_cnt));
0578 }
0579 
0580 static bool dpu_encoder_phys_cmd_is_ongoing_pptx(
0581         struct dpu_encoder_phys *phys_enc)
0582 {
0583     struct dpu_hw_pp_vsync_info info;
0584 
0585     if (!phys_enc)
0586         return false;
0587 
0588     phys_enc->hw_pp->ops.get_vsync_info(phys_enc->hw_pp, &info);
0589     if (info.wr_ptr_line_count > 0 &&
0590         info.wr_ptr_line_count < phys_enc->cached_mode.vdisplay)
0591         return true;
0592 
0593     return false;
0594 }
0595 
0596 static void dpu_encoder_phys_cmd_prepare_commit(
0597         struct dpu_encoder_phys *phys_enc)
0598 {
0599     struct dpu_encoder_phys_cmd *cmd_enc =
0600         to_dpu_encoder_phys_cmd(phys_enc);
0601     int trial = 0;
0602 
0603     if (!phys_enc)
0604         return;
0605     if (!phys_enc->hw_pp)
0606         return;
0607     if (!dpu_encoder_phys_cmd_is_master(phys_enc))
0608         return;
0609 
0610     /* If autorefresh is already disabled, we have nothing to do */
0611     if (!phys_enc->hw_pp->ops.get_autorefresh(phys_enc->hw_pp, NULL))
0612         return;
0613 
0614     /*
0615      * If autorefresh is enabled, disable it and make sure it is safe to
0616      * proceed with current frame commit/push. Sequence fallowed is,
0617      * 1. Disable TE
0618      * 2. Disable autorefresh config
0619      * 4. Poll for frame transfer ongoing to be false
0620      * 5. Enable TE back
0621      */
0622     _dpu_encoder_phys_cmd_connect_te(phys_enc, false);
0623     phys_enc->hw_pp->ops.setup_autorefresh(phys_enc->hw_pp, 0, false);
0624 
0625     do {
0626         udelay(DPU_ENC_MAX_POLL_TIMEOUT_US);
0627         if ((trial * DPU_ENC_MAX_POLL_TIMEOUT_US)
0628                 > (KICKOFF_TIMEOUT_MS * USEC_PER_MSEC)) {
0629             DPU_ERROR_CMDENC(cmd_enc,
0630                     "disable autorefresh failed\n");
0631             break;
0632         }
0633 
0634         trial++;
0635     } while (dpu_encoder_phys_cmd_is_ongoing_pptx(phys_enc));
0636 
0637     _dpu_encoder_phys_cmd_connect_te(phys_enc, true);
0638 
0639     DPU_DEBUG_CMDENC(to_dpu_encoder_phys_cmd(phys_enc),
0640              "disabled autorefresh\n");
0641 }
0642 
0643 static int _dpu_encoder_phys_cmd_wait_for_ctl_start(
0644         struct dpu_encoder_phys *phys_enc)
0645 {
0646     struct dpu_encoder_phys_cmd *cmd_enc =
0647             to_dpu_encoder_phys_cmd(phys_enc);
0648     struct dpu_encoder_wait_info wait_info;
0649     int ret;
0650 
0651     wait_info.wq = &phys_enc->pending_kickoff_wq;
0652     wait_info.atomic_cnt = &phys_enc->pending_ctlstart_cnt;
0653     wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
0654 
0655     ret = dpu_encoder_helper_wait_for_irq(phys_enc,
0656             phys_enc->irq[INTR_IDX_CTL_START],
0657             dpu_encoder_phys_cmd_ctl_start_irq,
0658             &wait_info);
0659     if (ret == -ETIMEDOUT) {
0660         DPU_ERROR_CMDENC(cmd_enc, "ctl start interrupt wait failed\n");
0661         ret = -EINVAL;
0662     } else if (!ret)
0663         ret = 0;
0664 
0665     return ret;
0666 }
0667 
0668 static int dpu_encoder_phys_cmd_wait_for_tx_complete(
0669         struct dpu_encoder_phys *phys_enc)
0670 {
0671     int rc;
0672 
0673     rc = _dpu_encoder_phys_cmd_wait_for_idle(phys_enc);
0674     if (rc) {
0675         DRM_ERROR("failed wait_for_idle: id:%u ret:%d intf:%d\n",
0676               DRMID(phys_enc->parent), rc,
0677               phys_enc->intf_idx - INTF_0);
0678     }
0679 
0680     return rc;
0681 }
0682 
0683 static int dpu_encoder_phys_cmd_wait_for_commit_done(
0684         struct dpu_encoder_phys *phys_enc)
0685 {
0686     /* only required for master controller */
0687     if (!dpu_encoder_phys_cmd_is_master(phys_enc))
0688         return 0;
0689 
0690     if (phys_enc->hw_ctl->ops.is_started(phys_enc->hw_ctl))
0691         return dpu_encoder_phys_cmd_wait_for_tx_complete(phys_enc);
0692 
0693     return _dpu_encoder_phys_cmd_wait_for_ctl_start(phys_enc);
0694 }
0695 
0696 static int dpu_encoder_phys_cmd_wait_for_vblank(
0697         struct dpu_encoder_phys *phys_enc)
0698 {
0699     int rc = 0;
0700     struct dpu_encoder_phys_cmd *cmd_enc;
0701     struct dpu_encoder_wait_info wait_info;
0702 
0703     cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
0704 
0705     /* only required for master controller */
0706     if (!dpu_encoder_phys_cmd_is_master(phys_enc))
0707         return rc;
0708 
0709     wait_info.wq = &cmd_enc->pending_vblank_wq;
0710     wait_info.atomic_cnt = &cmd_enc->pending_vblank_cnt;
0711     wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
0712 
0713     atomic_inc(&cmd_enc->pending_vblank_cnt);
0714 
0715     rc = dpu_encoder_helper_wait_for_irq(phys_enc,
0716             phys_enc->irq[INTR_IDX_RDPTR],
0717             dpu_encoder_phys_cmd_pp_rd_ptr_irq,
0718             &wait_info);
0719 
0720     return rc;
0721 }
0722 
0723 static void dpu_encoder_phys_cmd_handle_post_kickoff(
0724         struct dpu_encoder_phys *phys_enc)
0725 {
0726     /**
0727      * re-enable external TE, either for the first time after enabling
0728      * or if disabled for Autorefresh
0729      */
0730     _dpu_encoder_phys_cmd_connect_te(phys_enc, true);
0731 }
0732 
0733 static void dpu_encoder_phys_cmd_trigger_start(
0734         struct dpu_encoder_phys *phys_enc)
0735 {
0736     dpu_encoder_helper_trigger_start(phys_enc);
0737 }
0738 
0739 static void dpu_encoder_phys_cmd_init_ops(
0740         struct dpu_encoder_phys_ops *ops)
0741 {
0742     ops->prepare_commit = dpu_encoder_phys_cmd_prepare_commit;
0743     ops->is_master = dpu_encoder_phys_cmd_is_master;
0744     ops->atomic_mode_set = dpu_encoder_phys_cmd_atomic_mode_set;
0745     ops->enable = dpu_encoder_phys_cmd_enable;
0746     ops->disable = dpu_encoder_phys_cmd_disable;
0747     ops->destroy = dpu_encoder_phys_cmd_destroy;
0748     ops->control_vblank_irq = dpu_encoder_phys_cmd_control_vblank_irq;
0749     ops->wait_for_commit_done = dpu_encoder_phys_cmd_wait_for_commit_done;
0750     ops->prepare_for_kickoff = dpu_encoder_phys_cmd_prepare_for_kickoff;
0751     ops->wait_for_tx_complete = dpu_encoder_phys_cmd_wait_for_tx_complete;
0752     ops->wait_for_vblank = dpu_encoder_phys_cmd_wait_for_vblank;
0753     ops->trigger_start = dpu_encoder_phys_cmd_trigger_start;
0754     ops->needs_single_flush = dpu_encoder_phys_cmd_needs_single_flush;
0755     ops->irq_control = dpu_encoder_phys_cmd_irq_control;
0756     ops->restore = dpu_encoder_phys_cmd_enable_helper;
0757     ops->prepare_idle_pc = dpu_encoder_phys_cmd_prepare_idle_pc;
0758     ops->handle_post_kickoff = dpu_encoder_phys_cmd_handle_post_kickoff;
0759     ops->get_line_count = dpu_encoder_phys_cmd_get_line_count;
0760 }
0761 
0762 struct dpu_encoder_phys *dpu_encoder_phys_cmd_init(
0763         struct dpu_enc_phys_init_params *p)
0764 {
0765     struct dpu_encoder_phys *phys_enc = NULL;
0766     struct dpu_encoder_phys_cmd *cmd_enc = NULL;
0767     int i, ret = 0;
0768 
0769     DPU_DEBUG("intf %d\n", p->intf_idx - INTF_0);
0770 
0771     cmd_enc = kzalloc(sizeof(*cmd_enc), GFP_KERNEL);
0772     if (!cmd_enc) {
0773         ret = -ENOMEM;
0774         DPU_ERROR("failed to allocate\n");
0775         return ERR_PTR(ret);
0776     }
0777     phys_enc = &cmd_enc->base;
0778     phys_enc->hw_mdptop = p->dpu_kms->hw_mdp;
0779     phys_enc->intf_idx = p->intf_idx;
0780 
0781     dpu_encoder_phys_cmd_init_ops(&phys_enc->ops);
0782     phys_enc->parent = p->parent;
0783     phys_enc->parent_ops = p->parent_ops;
0784     phys_enc->dpu_kms = p->dpu_kms;
0785     phys_enc->split_role = p->split_role;
0786     phys_enc->intf_mode = INTF_MODE_CMD;
0787     phys_enc->enc_spinlock = p->enc_spinlock;
0788     cmd_enc->stream_sel = 0;
0789     phys_enc->enable_state = DPU_ENC_DISABLED;
0790     for (i = 0; i < ARRAY_SIZE(phys_enc->irq); i++)
0791         phys_enc->irq[i] = -EINVAL;
0792 
0793     atomic_set(&phys_enc->vblank_refcount, 0);
0794     atomic_set(&phys_enc->pending_kickoff_cnt, 0);
0795     atomic_set(&phys_enc->pending_ctlstart_cnt, 0);
0796     atomic_set(&cmd_enc->pending_vblank_cnt, 0);
0797     init_waitqueue_head(&phys_enc->pending_kickoff_wq);
0798     init_waitqueue_head(&cmd_enc->pending_vblank_wq);
0799 
0800     DPU_DEBUG_CMDENC(cmd_enc, "created\n");
0801 
0802     return phys_enc;
0803 }