Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: GPL-2.0-only
0002 /*
0003  * Copyright (c) 2022 Qualcomm Innovation Center, Inc. All rights reserved.
0004  * Copyright (c) 2014-2021 The Linux Foundation. All rights reserved.
0005  * Copyright (C) 2013 Red Hat
0006  * Author: Rob Clark <robdclark@gmail.com>
0007  */
0008 
0009 #define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
0010 #include <linux/sort.h>
0011 #include <linux/debugfs.h>
0012 #include <linux/ktime.h>
0013 #include <linux/bits.h>
0014 
0015 #include <drm/drm_atomic.h>
0016 #include <drm/drm_blend.h>
0017 #include <drm/drm_crtc.h>
0018 #include <drm/drm_flip_work.h>
0019 #include <drm/drm_framebuffer.h>
0020 #include <drm/drm_mode.h>
0021 #include <drm/drm_probe_helper.h>
0022 #include <drm/drm_rect.h>
0023 #include <drm/drm_vblank.h>
0024 
0025 #include "dpu_kms.h"
0026 #include "dpu_hw_lm.h"
0027 #include "dpu_hw_ctl.h"
0028 #include "dpu_hw_dspp.h"
0029 #include "dpu_crtc.h"
0030 #include "dpu_plane.h"
0031 #include "dpu_encoder.h"
0032 #include "dpu_vbif.h"
0033 #include "dpu_core_perf.h"
0034 #include "dpu_trace.h"
0035 
0036 /* layer mixer index on dpu_crtc */
0037 #define LEFT_MIXER 0
0038 #define RIGHT_MIXER 1
0039 
0040 /* timeout in ms waiting for frame done */
0041 #define DPU_CRTC_FRAME_DONE_TIMEOUT_MS  60
0042 
0043 #define CONVERT_S3_15(val) \
0044     (((((u64)val) & ~BIT_ULL(63)) >> 17) & GENMASK_ULL(17, 0))
0045 
0046 static struct dpu_kms *_dpu_crtc_get_kms(struct drm_crtc *crtc)
0047 {
0048     struct msm_drm_private *priv = crtc->dev->dev_private;
0049 
0050     return to_dpu_kms(priv->kms);
0051 }
0052 
0053 static void dpu_crtc_destroy(struct drm_crtc *crtc)
0054 {
0055     struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
0056 
0057     if (!crtc)
0058         return;
0059 
0060     drm_crtc_cleanup(crtc);
0061     kfree(dpu_crtc);
0062 }
0063 
0064 static struct drm_encoder *get_encoder_from_crtc(struct drm_crtc *crtc)
0065 {
0066     struct drm_device *dev = crtc->dev;
0067     struct drm_encoder *encoder;
0068 
0069     drm_for_each_encoder(encoder, dev)
0070         if (encoder->crtc == crtc)
0071             return encoder;
0072 
0073     return NULL;
0074 }
0075 
0076 static enum dpu_crtc_crc_source dpu_crtc_parse_crc_source(const char *src_name)
0077 {
0078     if (!src_name ||
0079         !strcmp(src_name, "none"))
0080         return DPU_CRTC_CRC_SOURCE_NONE;
0081     if (!strcmp(src_name, "auto") ||
0082         !strcmp(src_name, "lm"))
0083         return DPU_CRTC_CRC_SOURCE_LAYER_MIXER;
0084     if (!strcmp(src_name, "encoder"))
0085         return DPU_CRTC_CRC_SOURCE_ENCODER;
0086 
0087     return DPU_CRTC_CRC_SOURCE_INVALID;
0088 }
0089 
0090 static int dpu_crtc_verify_crc_source(struct drm_crtc *crtc,
0091         const char *src_name, size_t *values_cnt)
0092 {
0093     enum dpu_crtc_crc_source source = dpu_crtc_parse_crc_source(src_name);
0094     struct dpu_crtc_state *crtc_state = to_dpu_crtc_state(crtc->state);
0095 
0096     if (source < 0) {
0097         DRM_DEBUG_DRIVER("Invalid source %s for CRTC%d\n", src_name, crtc->index);
0098         return -EINVAL;
0099     }
0100 
0101     if (source == DPU_CRTC_CRC_SOURCE_LAYER_MIXER) {
0102         *values_cnt = crtc_state->num_mixers;
0103     } else if (source == DPU_CRTC_CRC_SOURCE_ENCODER) {
0104         struct drm_encoder *drm_enc;
0105 
0106         *values_cnt = 0;
0107 
0108         drm_for_each_encoder_mask(drm_enc, crtc->dev, crtc->state->encoder_mask)
0109             *values_cnt += dpu_encoder_get_crc_values_cnt(drm_enc);
0110     }
0111 
0112     return 0;
0113 }
0114 
0115 static void dpu_crtc_setup_lm_misr(struct dpu_crtc_state *crtc_state)
0116 {
0117     struct dpu_crtc_mixer *m;
0118     int i;
0119 
0120     for (i = 0; i < crtc_state->num_mixers; ++i) {
0121         m = &crtc_state->mixers[i];
0122 
0123         if (!m->hw_lm || !m->hw_lm->ops.setup_misr)
0124             continue;
0125 
0126         /* Calculate MISR over 1 frame */
0127         m->hw_lm->ops.setup_misr(m->hw_lm, true, 1);
0128     }
0129 }
0130 
0131 static void dpu_crtc_setup_encoder_misr(struct drm_crtc *crtc)
0132 {
0133     struct drm_encoder *drm_enc;
0134 
0135     drm_for_each_encoder_mask(drm_enc, crtc->dev, crtc->state->encoder_mask)
0136         dpu_encoder_setup_misr(drm_enc);
0137 }
0138 
0139 static int dpu_crtc_set_crc_source(struct drm_crtc *crtc, const char *src_name)
0140 {
0141     enum dpu_crtc_crc_source source = dpu_crtc_parse_crc_source(src_name);
0142     enum dpu_crtc_crc_source current_source;
0143     struct dpu_crtc_state *crtc_state;
0144     struct drm_device *drm_dev = crtc->dev;
0145 
0146     bool was_enabled;
0147     bool enable = false;
0148     int ret = 0;
0149 
0150     if (source < 0) {
0151         DRM_DEBUG_DRIVER("Invalid CRC source %s for CRTC%d\n", src_name, crtc->index);
0152         return -EINVAL;
0153     }
0154 
0155     ret = drm_modeset_lock(&crtc->mutex, NULL);
0156 
0157     if (ret)
0158         return ret;
0159 
0160     enable = (source != DPU_CRTC_CRC_SOURCE_NONE);
0161     crtc_state = to_dpu_crtc_state(crtc->state);
0162 
0163     spin_lock_irq(&drm_dev->event_lock);
0164     current_source = crtc_state->crc_source;
0165     spin_unlock_irq(&drm_dev->event_lock);
0166 
0167     was_enabled = (current_source != DPU_CRTC_CRC_SOURCE_NONE);
0168 
0169     if (!was_enabled && enable) {
0170         ret = drm_crtc_vblank_get(crtc);
0171 
0172         if (ret)
0173             goto cleanup;
0174 
0175     } else if (was_enabled && !enable) {
0176         drm_crtc_vblank_put(crtc);
0177     }
0178 
0179     spin_lock_irq(&drm_dev->event_lock);
0180     crtc_state->crc_source = source;
0181     spin_unlock_irq(&drm_dev->event_lock);
0182 
0183     crtc_state->crc_frame_skip_count = 0;
0184 
0185     if (source == DPU_CRTC_CRC_SOURCE_LAYER_MIXER)
0186         dpu_crtc_setup_lm_misr(crtc_state);
0187     else if (source == DPU_CRTC_CRC_SOURCE_ENCODER)
0188         dpu_crtc_setup_encoder_misr(crtc);
0189     else
0190         ret = -EINVAL;
0191 
0192 cleanup:
0193     drm_modeset_unlock(&crtc->mutex);
0194 
0195     return ret;
0196 }
0197 
0198 static u32 dpu_crtc_get_vblank_counter(struct drm_crtc *crtc)
0199 {
0200     struct drm_encoder *encoder = get_encoder_from_crtc(crtc);
0201     if (!encoder) {
0202         DRM_ERROR("no encoder found for crtc %d\n", crtc->index);
0203         return 0;
0204     }
0205 
0206     return dpu_encoder_get_vsync_count(encoder);
0207 }
0208 
0209 static int dpu_crtc_get_lm_crc(struct drm_crtc *crtc,
0210         struct dpu_crtc_state *crtc_state)
0211 {
0212     struct dpu_crtc_mixer *m;
0213     u32 crcs[CRTC_DUAL_MIXERS];
0214 
0215     int rc = 0;
0216     int i;
0217 
0218     BUILD_BUG_ON(ARRAY_SIZE(crcs) != ARRAY_SIZE(crtc_state->mixers));
0219 
0220     for (i = 0; i < crtc_state->num_mixers; ++i) {
0221 
0222         m = &crtc_state->mixers[i];
0223 
0224         if (!m->hw_lm || !m->hw_lm->ops.collect_misr)
0225             continue;
0226 
0227         rc = m->hw_lm->ops.collect_misr(m->hw_lm, &crcs[i]);
0228 
0229         if (rc) {
0230             if (rc != -ENODATA)
0231                 DRM_DEBUG_DRIVER("MISR read failed\n");
0232             return rc;
0233         }
0234     }
0235 
0236     return drm_crtc_add_crc_entry(crtc, true,
0237             drm_crtc_accurate_vblank_count(crtc), crcs);
0238 }
0239 
0240 static int dpu_crtc_get_encoder_crc(struct drm_crtc *crtc)
0241 {
0242     struct drm_encoder *drm_enc;
0243     int rc, pos = 0;
0244     u32 crcs[INTF_MAX];
0245 
0246     drm_for_each_encoder_mask(drm_enc, crtc->dev, crtc->state->encoder_mask) {
0247         rc = dpu_encoder_get_crc(drm_enc, crcs, pos);
0248         if (rc < 0) {
0249             if (rc != -ENODATA)
0250                 DRM_DEBUG_DRIVER("MISR read failed\n");
0251 
0252             return rc;
0253         }
0254 
0255         pos += rc;
0256     }
0257 
0258     return drm_crtc_add_crc_entry(crtc, true,
0259             drm_crtc_accurate_vblank_count(crtc), crcs);
0260 }
0261 
0262 static int dpu_crtc_get_crc(struct drm_crtc *crtc)
0263 {
0264     struct dpu_crtc_state *crtc_state = to_dpu_crtc_state(crtc->state);
0265 
0266     /* Skip first 2 frames in case of "uncooked" CRCs */
0267     if (crtc_state->crc_frame_skip_count < 2) {
0268         crtc_state->crc_frame_skip_count++;
0269         return 0;
0270     }
0271 
0272     if (crtc_state->crc_source == DPU_CRTC_CRC_SOURCE_LAYER_MIXER)
0273         return dpu_crtc_get_lm_crc(crtc, crtc_state);
0274     else if (crtc_state->crc_source == DPU_CRTC_CRC_SOURCE_ENCODER)
0275         return dpu_crtc_get_encoder_crc(crtc);
0276 
0277     return -EINVAL;
0278 }
0279 
0280 static bool dpu_crtc_get_scanout_position(struct drm_crtc *crtc,
0281                        bool in_vblank_irq,
0282                        int *vpos, int *hpos,
0283                        ktime_t *stime, ktime_t *etime,
0284                        const struct drm_display_mode *mode)
0285 {
0286     unsigned int pipe = crtc->index;
0287     struct drm_encoder *encoder;
0288     int line, vsw, vbp, vactive_start, vactive_end, vfp_end;
0289 
0290     encoder = get_encoder_from_crtc(crtc);
0291     if (!encoder) {
0292         DRM_ERROR("no encoder found for crtc %d\n", pipe);
0293         return false;
0294     }
0295 
0296     vsw = mode->crtc_vsync_end - mode->crtc_vsync_start;
0297     vbp = mode->crtc_vtotal - mode->crtc_vsync_end;
0298 
0299     /*
0300      * the line counter is 1 at the start of the VSYNC pulse and VTOTAL at
0301      * the end of VFP. Translate the porch values relative to the line
0302      * counter positions.
0303      */
0304 
0305     vactive_start = vsw + vbp + 1;
0306     vactive_end = vactive_start + mode->crtc_vdisplay;
0307 
0308     /* last scan line before VSYNC */
0309     vfp_end = mode->crtc_vtotal;
0310 
0311     if (stime)
0312         *stime = ktime_get();
0313 
0314     line = dpu_encoder_get_linecount(encoder);
0315 
0316     if (line < vactive_start)
0317         line -= vactive_start;
0318     else if (line > vactive_end)
0319         line = line - vfp_end - vactive_start;
0320     else
0321         line -= vactive_start;
0322 
0323     *vpos = line;
0324     *hpos = 0;
0325 
0326     if (etime)
0327         *etime = ktime_get();
0328 
0329     return true;
0330 }
0331 
0332 static void _dpu_crtc_setup_blend_cfg(struct dpu_crtc_mixer *mixer,
0333         struct dpu_plane_state *pstate, struct dpu_format *format)
0334 {
0335     struct dpu_hw_mixer *lm = mixer->hw_lm;
0336     uint32_t blend_op;
0337     uint32_t fg_alpha, bg_alpha;
0338 
0339     fg_alpha = pstate->base.alpha >> 8;
0340     bg_alpha = 0xff - fg_alpha;
0341 
0342     /* default to opaque blending */
0343     if (pstate->base.pixel_blend_mode == DRM_MODE_BLEND_PIXEL_NONE ||
0344         !format->alpha_enable) {
0345         blend_op = DPU_BLEND_FG_ALPHA_FG_CONST |
0346             DPU_BLEND_BG_ALPHA_BG_CONST;
0347     } else if (pstate->base.pixel_blend_mode == DRM_MODE_BLEND_PREMULTI) {
0348         blend_op = DPU_BLEND_FG_ALPHA_FG_CONST |
0349             DPU_BLEND_BG_ALPHA_FG_PIXEL;
0350         if (fg_alpha != 0xff) {
0351             bg_alpha = fg_alpha;
0352             blend_op |= DPU_BLEND_BG_MOD_ALPHA |
0353                     DPU_BLEND_BG_INV_MOD_ALPHA;
0354         } else {
0355             blend_op |= DPU_BLEND_BG_INV_ALPHA;
0356         }
0357     } else {
0358         /* coverage blending */
0359         blend_op = DPU_BLEND_FG_ALPHA_FG_PIXEL |
0360             DPU_BLEND_BG_ALPHA_FG_PIXEL;
0361         if (fg_alpha != 0xff) {
0362             bg_alpha = fg_alpha;
0363             blend_op |= DPU_BLEND_FG_MOD_ALPHA |
0364                     DPU_BLEND_FG_INV_MOD_ALPHA |
0365                     DPU_BLEND_BG_MOD_ALPHA |
0366                     DPU_BLEND_BG_INV_MOD_ALPHA;
0367         } else {
0368             blend_op |= DPU_BLEND_BG_INV_ALPHA;
0369         }
0370     }
0371 
0372     lm->ops.setup_blend_config(lm, pstate->stage,
0373                 fg_alpha, bg_alpha, blend_op);
0374 
0375     DRM_DEBUG_ATOMIC("format:%p4cc, alpha_en:%u blend_op:0x%x\n",
0376           &format->base.pixel_format, format->alpha_enable, blend_op);
0377 }
0378 
0379 static void _dpu_crtc_program_lm_output_roi(struct drm_crtc *crtc)
0380 {
0381     struct dpu_crtc_state *crtc_state;
0382     int lm_idx, lm_horiz_position;
0383 
0384     crtc_state = to_dpu_crtc_state(crtc->state);
0385 
0386     lm_horiz_position = 0;
0387     for (lm_idx = 0; lm_idx < crtc_state->num_mixers; lm_idx++) {
0388         const struct drm_rect *lm_roi = &crtc_state->lm_bounds[lm_idx];
0389         struct dpu_hw_mixer *hw_lm = crtc_state->mixers[lm_idx].hw_lm;
0390         struct dpu_hw_mixer_cfg cfg;
0391 
0392         if (!lm_roi || !drm_rect_visible(lm_roi))
0393             continue;
0394 
0395         cfg.out_width = drm_rect_width(lm_roi);
0396         cfg.out_height = drm_rect_height(lm_roi);
0397         cfg.right_mixer = lm_horiz_position++;
0398         cfg.flags = 0;
0399         hw_lm->ops.setup_mixer_out(hw_lm, &cfg);
0400     }
0401 }
0402 
0403 static void _dpu_crtc_blend_setup_mixer(struct drm_crtc *crtc,
0404     struct dpu_crtc *dpu_crtc, struct dpu_crtc_mixer *mixer,
0405     struct dpu_hw_stage_cfg *stage_cfg)
0406 {
0407     struct drm_plane *plane;
0408     struct drm_framebuffer *fb;
0409     struct drm_plane_state *state;
0410     struct dpu_crtc_state *cstate = to_dpu_crtc_state(crtc->state);
0411     struct dpu_plane_state *pstate = NULL;
0412     struct dpu_format *format;
0413     struct dpu_hw_ctl *ctl = mixer->lm_ctl;
0414 
0415     u32 flush_mask;
0416     uint32_t stage_idx, lm_idx;
0417     int zpos_cnt[DPU_STAGE_MAX + 1] = { 0 };
0418     bool bg_alpha_enable = false;
0419     DECLARE_BITMAP(fetch_active, SSPP_MAX);
0420 
0421     memset(fetch_active, 0, sizeof(fetch_active));
0422     drm_atomic_crtc_for_each_plane(plane, crtc) {
0423         state = plane->state;
0424         if (!state)
0425             continue;
0426 
0427         if (!state->visible)
0428             continue;
0429 
0430         pstate = to_dpu_plane_state(state);
0431         fb = state->fb;
0432 
0433         dpu_plane_get_ctl_flush(plane, ctl, &flush_mask);
0434         set_bit(dpu_plane_pipe(plane), fetch_active);
0435 
0436         DRM_DEBUG_ATOMIC("crtc %d stage:%d - plane %d sspp %d fb %d\n",
0437                 crtc->base.id,
0438                 pstate->stage,
0439                 plane->base.id,
0440                 dpu_plane_pipe(plane) - SSPP_VIG0,
0441                 state->fb ? state->fb->base.id : -1);
0442 
0443         format = to_dpu_format(msm_framebuffer_format(pstate->base.fb));
0444 
0445         if (pstate->stage == DPU_STAGE_BASE && format->alpha_enable)
0446             bg_alpha_enable = true;
0447 
0448         stage_idx = zpos_cnt[pstate->stage]++;
0449         stage_cfg->stage[pstate->stage][stage_idx] =
0450                     dpu_plane_pipe(plane);
0451         stage_cfg->multirect_index[pstate->stage][stage_idx] =
0452                     pstate->multirect_index;
0453 
0454         trace_dpu_crtc_setup_mixer(DRMID(crtc), DRMID(plane),
0455                        state, pstate, stage_idx,
0456                        dpu_plane_pipe(plane) - SSPP_VIG0,
0457                        format->base.pixel_format,
0458                        fb ? fb->modifier : 0);
0459 
0460         /* blend config update */
0461         for (lm_idx = 0; lm_idx < cstate->num_mixers; lm_idx++) {
0462             _dpu_crtc_setup_blend_cfg(mixer + lm_idx,
0463                         pstate, format);
0464 
0465             mixer[lm_idx].flush_mask |= flush_mask;
0466 
0467             if (bg_alpha_enable && !format->alpha_enable)
0468                 mixer[lm_idx].mixer_op_mode = 0;
0469             else
0470                 mixer[lm_idx].mixer_op_mode |=
0471                         1 << pstate->stage;
0472         }
0473     }
0474 
0475     if (ctl->ops.set_active_pipes)
0476         ctl->ops.set_active_pipes(ctl, fetch_active);
0477 
0478     _dpu_crtc_program_lm_output_roi(crtc);
0479 }
0480 
0481 /**
0482  * _dpu_crtc_blend_setup - configure crtc mixers
0483  * @crtc: Pointer to drm crtc structure
0484  */
0485 static void _dpu_crtc_blend_setup(struct drm_crtc *crtc)
0486 {
0487     struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
0488     struct dpu_crtc_state *cstate = to_dpu_crtc_state(crtc->state);
0489     struct dpu_crtc_mixer *mixer = cstate->mixers;
0490     struct dpu_hw_ctl *ctl;
0491     struct dpu_hw_mixer *lm;
0492     struct dpu_hw_stage_cfg stage_cfg;
0493     int i;
0494 
0495     DRM_DEBUG_ATOMIC("%s\n", dpu_crtc->name);
0496 
0497     for (i = 0; i < cstate->num_mixers; i++) {
0498         mixer[i].mixer_op_mode = 0;
0499         mixer[i].flush_mask = 0;
0500         if (mixer[i].lm_ctl->ops.clear_all_blendstages)
0501             mixer[i].lm_ctl->ops.clear_all_blendstages(
0502                     mixer[i].lm_ctl);
0503     }
0504 
0505     /* initialize stage cfg */
0506     memset(&stage_cfg, 0, sizeof(struct dpu_hw_stage_cfg));
0507 
0508     _dpu_crtc_blend_setup_mixer(crtc, dpu_crtc, mixer, &stage_cfg);
0509 
0510     for (i = 0; i < cstate->num_mixers; i++) {
0511         ctl = mixer[i].lm_ctl;
0512         lm = mixer[i].hw_lm;
0513 
0514         lm->ops.setup_alpha_out(lm, mixer[i].mixer_op_mode);
0515 
0516         mixer[i].flush_mask |= ctl->ops.get_bitmask_mixer(ctl,
0517             mixer[i].hw_lm->idx);
0518 
0519         /* stage config flush mask */
0520         ctl->ops.update_pending_flush(ctl, mixer[i].flush_mask);
0521 
0522         DRM_DEBUG_ATOMIC("lm %d, op_mode 0x%X, ctl %d, flush mask 0x%x\n",
0523             mixer[i].hw_lm->idx - LM_0,
0524             mixer[i].mixer_op_mode,
0525             ctl->idx - CTL_0,
0526             mixer[i].flush_mask);
0527 
0528         ctl->ops.setup_blendstage(ctl, mixer[i].hw_lm->idx,
0529             &stage_cfg);
0530     }
0531 }
0532 
0533 /**
0534  *  _dpu_crtc_complete_flip - signal pending page_flip events
0535  * Any pending vblank events are added to the vblank_event_list
0536  * so that the next vblank interrupt shall signal them.
0537  * However PAGE_FLIP events are not handled through the vblank_event_list.
0538  * This API signals any pending PAGE_FLIP events requested through
0539  * DRM_IOCTL_MODE_PAGE_FLIP and are cached in the dpu_crtc->event.
0540  * @crtc: Pointer to drm crtc structure
0541  */
0542 static void _dpu_crtc_complete_flip(struct drm_crtc *crtc)
0543 {
0544     struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
0545     struct drm_device *dev = crtc->dev;
0546     unsigned long flags;
0547 
0548     spin_lock_irqsave(&dev->event_lock, flags);
0549     if (dpu_crtc->event) {
0550         DRM_DEBUG_VBL("%s: send event: %pK\n", dpu_crtc->name,
0551                   dpu_crtc->event);
0552         trace_dpu_crtc_complete_flip(DRMID(crtc));
0553         drm_crtc_send_vblank_event(crtc, dpu_crtc->event);
0554         dpu_crtc->event = NULL;
0555     }
0556     spin_unlock_irqrestore(&dev->event_lock, flags);
0557 }
0558 
0559 enum dpu_intf_mode dpu_crtc_get_intf_mode(struct drm_crtc *crtc)
0560 {
0561     struct drm_encoder *encoder;
0562 
0563     /*
0564      * TODO: This function is called from dpu debugfs and as part of atomic
0565      * check. When called from debugfs, the crtc->mutex must be held to
0566      * read crtc->state. However reading crtc->state from atomic check isn't
0567      * allowed (unless you have a good reason, a big comment, and a deep
0568      * understanding of how the atomic/modeset locks work (<- and this is
0569      * probably not possible)). So we'll keep the WARN_ON here for now, but
0570      * really we need to figure out a better way to track our operating mode
0571      */
0572     WARN_ON(!drm_modeset_is_locked(&crtc->mutex));
0573 
0574     /* TODO: Returns the first INTF_MODE, could there be multiple values? */
0575     drm_for_each_encoder_mask(encoder, crtc->dev, crtc->state->encoder_mask)
0576         return dpu_encoder_get_intf_mode(encoder);
0577 
0578     return INTF_MODE_NONE;
0579 }
0580 
0581 void dpu_crtc_vblank_callback(struct drm_crtc *crtc)
0582 {
0583     struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
0584 
0585     /* keep statistics on vblank callback - with auto reset via debugfs */
0586     if (ktime_compare(dpu_crtc->vblank_cb_time, ktime_set(0, 0)) == 0)
0587         dpu_crtc->vblank_cb_time = ktime_get();
0588     else
0589         dpu_crtc->vblank_cb_count++;
0590 
0591     dpu_crtc_get_crc(crtc);
0592 
0593     drm_crtc_handle_vblank(crtc);
0594     trace_dpu_crtc_vblank_cb(DRMID(crtc));
0595 }
0596 
0597 static void dpu_crtc_frame_event_work(struct kthread_work *work)
0598 {
0599     struct dpu_crtc_frame_event *fevent = container_of(work,
0600             struct dpu_crtc_frame_event, work);
0601     struct drm_crtc *crtc = fevent->crtc;
0602     struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
0603     unsigned long flags;
0604     bool frame_done = false;
0605 
0606     DPU_ATRACE_BEGIN("crtc_frame_event");
0607 
0608     DRM_DEBUG_ATOMIC("crtc%d event:%u ts:%lld\n", crtc->base.id, fevent->event,
0609             ktime_to_ns(fevent->ts));
0610 
0611     if (fevent->event & (DPU_ENCODER_FRAME_EVENT_DONE
0612                 | DPU_ENCODER_FRAME_EVENT_ERROR
0613                 | DPU_ENCODER_FRAME_EVENT_PANEL_DEAD)) {
0614 
0615         if (atomic_read(&dpu_crtc->frame_pending) < 1) {
0616             /* ignore vblank when not pending */
0617         } else if (atomic_dec_return(&dpu_crtc->frame_pending) == 0) {
0618             /* release bandwidth and other resources */
0619             trace_dpu_crtc_frame_event_done(DRMID(crtc),
0620                             fevent->event);
0621             dpu_core_perf_crtc_release_bw(crtc);
0622         } else {
0623             trace_dpu_crtc_frame_event_more_pending(DRMID(crtc),
0624                                 fevent->event);
0625         }
0626 
0627         if (fevent->event & (DPU_ENCODER_FRAME_EVENT_DONE
0628                     | DPU_ENCODER_FRAME_EVENT_ERROR))
0629             frame_done = true;
0630     }
0631 
0632     if (fevent->event & DPU_ENCODER_FRAME_EVENT_PANEL_DEAD)
0633         DPU_ERROR("crtc%d ts:%lld received panel dead event\n",
0634                 crtc->base.id, ktime_to_ns(fevent->ts));
0635 
0636     if (frame_done)
0637         complete_all(&dpu_crtc->frame_done_comp);
0638 
0639     spin_lock_irqsave(&dpu_crtc->spin_lock, flags);
0640     list_add_tail(&fevent->list, &dpu_crtc->frame_event_list);
0641     spin_unlock_irqrestore(&dpu_crtc->spin_lock, flags);
0642     DPU_ATRACE_END("crtc_frame_event");
0643 }
0644 
0645 /*
0646  * dpu_crtc_frame_event_cb - crtc frame event callback API. CRTC module
0647  * registers this API to encoder for all frame event callbacks like
0648  * frame_error, frame_done, idle_timeout, etc. Encoder may call different events
0649  * from different context - IRQ, user thread, commit_thread, etc. Each event
0650  * should be carefully reviewed and should be processed in proper task context
0651  * to avoid schedulin delay or properly manage the irq context's bottom half
0652  * processing.
0653  */
0654 static void dpu_crtc_frame_event_cb(void *data, u32 event)
0655 {
0656     struct drm_crtc *crtc = (struct drm_crtc *)data;
0657     struct dpu_crtc *dpu_crtc;
0658     struct msm_drm_private *priv;
0659     struct dpu_crtc_frame_event *fevent;
0660     unsigned long flags;
0661     u32 crtc_id;
0662 
0663     /* Nothing to do on idle event */
0664     if (event & DPU_ENCODER_FRAME_EVENT_IDLE)
0665         return;
0666 
0667     dpu_crtc = to_dpu_crtc(crtc);
0668     priv = crtc->dev->dev_private;
0669     crtc_id = drm_crtc_index(crtc);
0670 
0671     trace_dpu_crtc_frame_event_cb(DRMID(crtc), event);
0672 
0673     spin_lock_irqsave(&dpu_crtc->spin_lock, flags);
0674     fevent = list_first_entry_or_null(&dpu_crtc->frame_event_list,
0675             struct dpu_crtc_frame_event, list);
0676     if (fevent)
0677         list_del_init(&fevent->list);
0678     spin_unlock_irqrestore(&dpu_crtc->spin_lock, flags);
0679 
0680     if (!fevent) {
0681         DRM_ERROR_RATELIMITED("crtc%d event %d overflow\n", crtc->base.id, event);
0682         return;
0683     }
0684 
0685     fevent->event = event;
0686     fevent->crtc = crtc;
0687     fevent->ts = ktime_get();
0688     kthread_queue_work(priv->event_thread[crtc_id].worker, &fevent->work);
0689 }
0690 
0691 void dpu_crtc_complete_commit(struct drm_crtc *crtc)
0692 {
0693     trace_dpu_crtc_complete_commit(DRMID(crtc));
0694     dpu_core_perf_crtc_update(crtc, 0, false);
0695     _dpu_crtc_complete_flip(crtc);
0696 }
0697 
0698 static void _dpu_crtc_setup_lm_bounds(struct drm_crtc *crtc,
0699         struct drm_crtc_state *state)
0700 {
0701     struct dpu_crtc_state *cstate = to_dpu_crtc_state(state);
0702     struct drm_display_mode *adj_mode = &state->adjusted_mode;
0703     u32 crtc_split_width = adj_mode->hdisplay / cstate->num_mixers;
0704     int i;
0705 
0706     for (i = 0; i < cstate->num_mixers; i++) {
0707         struct drm_rect *r = &cstate->lm_bounds[i];
0708         r->x1 = crtc_split_width * i;
0709         r->y1 = 0;
0710         r->x2 = r->x1 + crtc_split_width;
0711         r->y2 = adj_mode->vdisplay;
0712 
0713         trace_dpu_crtc_setup_lm_bounds(DRMID(crtc), i, r);
0714     }
0715 }
0716 
0717 static void _dpu_crtc_get_pcc_coeff(struct drm_crtc_state *state,
0718         struct dpu_hw_pcc_cfg *cfg)
0719 {
0720     struct drm_color_ctm *ctm;
0721 
0722     memset(cfg, 0, sizeof(struct dpu_hw_pcc_cfg));
0723 
0724     ctm = (struct drm_color_ctm *)state->ctm->data;
0725 
0726     if (!ctm)
0727         return;
0728 
0729     cfg->r.r = CONVERT_S3_15(ctm->matrix[0]);
0730     cfg->g.r = CONVERT_S3_15(ctm->matrix[1]);
0731     cfg->b.r = CONVERT_S3_15(ctm->matrix[2]);
0732 
0733     cfg->r.g = CONVERT_S3_15(ctm->matrix[3]);
0734     cfg->g.g = CONVERT_S3_15(ctm->matrix[4]);
0735     cfg->b.g = CONVERT_S3_15(ctm->matrix[5]);
0736 
0737     cfg->r.b = CONVERT_S3_15(ctm->matrix[6]);
0738     cfg->g.b = CONVERT_S3_15(ctm->matrix[7]);
0739     cfg->b.b = CONVERT_S3_15(ctm->matrix[8]);
0740 }
0741 
0742 static void _dpu_crtc_setup_cp_blocks(struct drm_crtc *crtc)
0743 {
0744     struct drm_crtc_state *state = crtc->state;
0745     struct dpu_crtc_state *cstate = to_dpu_crtc_state(crtc->state);
0746     struct dpu_crtc_mixer *mixer = cstate->mixers;
0747     struct dpu_hw_pcc_cfg cfg;
0748     struct dpu_hw_ctl *ctl;
0749     struct dpu_hw_dspp *dspp;
0750     int i;
0751 
0752 
0753     if (!state->color_mgmt_changed)
0754         return;
0755 
0756     for (i = 0; i < cstate->num_mixers; i++) {
0757         ctl = mixer[i].lm_ctl;
0758         dspp = mixer[i].hw_dspp;
0759 
0760         if (!dspp || !dspp->ops.setup_pcc)
0761             continue;
0762 
0763         if (!state->ctm) {
0764             dspp->ops.setup_pcc(dspp, NULL);
0765         } else {
0766             _dpu_crtc_get_pcc_coeff(state, &cfg);
0767             dspp->ops.setup_pcc(dspp, &cfg);
0768         }
0769 
0770         mixer[i].flush_mask |= ctl->ops.get_bitmask_dspp(ctl,
0771             mixer[i].hw_dspp->idx);
0772 
0773         /* stage config flush mask */
0774         ctl->ops.update_pending_flush(ctl, mixer[i].flush_mask);
0775 
0776         DRM_DEBUG_ATOMIC("lm %d, ctl %d, flush mask 0x%x\n",
0777             mixer[i].hw_lm->idx - DSPP_0,
0778             ctl->idx - CTL_0,
0779             mixer[i].flush_mask);
0780     }
0781 }
0782 
0783 static void dpu_crtc_atomic_begin(struct drm_crtc *crtc,
0784         struct drm_atomic_state *state)
0785 {
0786     struct dpu_crtc_state *cstate = to_dpu_crtc_state(crtc->state);
0787     struct drm_encoder *encoder;
0788 
0789     if (!crtc->state->enable) {
0790         DRM_DEBUG_ATOMIC("crtc%d -> enable %d, skip atomic_begin\n",
0791                 crtc->base.id, crtc->state->enable);
0792         return;
0793     }
0794 
0795     DRM_DEBUG_ATOMIC("crtc%d\n", crtc->base.id);
0796 
0797     _dpu_crtc_setup_lm_bounds(crtc, crtc->state);
0798 
0799     /* encoder will trigger pending mask now */
0800     drm_for_each_encoder_mask(encoder, crtc->dev, crtc->state->encoder_mask)
0801         dpu_encoder_trigger_kickoff_pending(encoder);
0802 
0803     /*
0804      * If no mixers have been allocated in dpu_crtc_atomic_check(),
0805      * it means we are trying to flush a CRTC whose state is disabled:
0806      * nothing else needs to be done.
0807      */
0808     if (unlikely(!cstate->num_mixers))
0809         return;
0810 
0811     _dpu_crtc_blend_setup(crtc);
0812 
0813     _dpu_crtc_setup_cp_blocks(crtc);
0814 
0815     /*
0816      * PP_DONE irq is only used by command mode for now.
0817      * It is better to request pending before FLUSH and START trigger
0818      * to make sure no pp_done irq missed.
0819      * This is safe because no pp_done will happen before SW trigger
0820      * in command mode.
0821      */
0822 }
0823 
0824 static void dpu_crtc_atomic_flush(struct drm_crtc *crtc,
0825         struct drm_atomic_state *state)
0826 {
0827     struct dpu_crtc *dpu_crtc;
0828     struct drm_device *dev;
0829     struct drm_plane *plane;
0830     struct msm_drm_private *priv;
0831     unsigned long flags;
0832     struct dpu_crtc_state *cstate;
0833 
0834     if (!crtc->state->enable) {
0835         DRM_DEBUG_ATOMIC("crtc%d -> enable %d, skip atomic_flush\n",
0836                 crtc->base.id, crtc->state->enable);
0837         return;
0838     }
0839 
0840     DRM_DEBUG_ATOMIC("crtc%d\n", crtc->base.id);
0841 
0842     dpu_crtc = to_dpu_crtc(crtc);
0843     cstate = to_dpu_crtc_state(crtc->state);
0844     dev = crtc->dev;
0845     priv = dev->dev_private;
0846 
0847     if (crtc->index >= ARRAY_SIZE(priv->event_thread)) {
0848         DPU_ERROR("invalid crtc index[%d]\n", crtc->index);
0849         return;
0850     }
0851 
0852     WARN_ON(dpu_crtc->event);
0853     spin_lock_irqsave(&dev->event_lock, flags);
0854     dpu_crtc->event = crtc->state->event;
0855     crtc->state->event = NULL;
0856     spin_unlock_irqrestore(&dev->event_lock, flags);
0857 
0858     /*
0859      * If no mixers has been allocated in dpu_crtc_atomic_check(),
0860      * it means we are trying to flush a CRTC whose state is disabled:
0861      * nothing else needs to be done.
0862      */
0863     if (unlikely(!cstate->num_mixers))
0864         return;
0865 
0866     /* update performance setting before crtc kickoff */
0867     dpu_core_perf_crtc_update(crtc, 1, false);
0868 
0869     /*
0870      * Final plane updates: Give each plane a chance to complete all
0871      *                      required writes/flushing before crtc's "flush
0872      *                      everything" call below.
0873      */
0874     drm_atomic_crtc_for_each_plane(plane, crtc) {
0875         if (dpu_crtc->smmu_state.transition_error)
0876             dpu_plane_set_error(plane, true);
0877         dpu_plane_flush(plane);
0878     }
0879 
0880     /* Kickoff will be scheduled by outer layer */
0881 }
0882 
0883 /**
0884  * dpu_crtc_destroy_state - state destroy hook
0885  * @crtc: drm CRTC
0886  * @state: CRTC state object to release
0887  */
0888 static void dpu_crtc_destroy_state(struct drm_crtc *crtc,
0889         struct drm_crtc_state *state)
0890 {
0891     struct dpu_crtc_state *cstate = to_dpu_crtc_state(state);
0892 
0893     DRM_DEBUG_ATOMIC("crtc%d\n", crtc->base.id);
0894 
0895     __drm_atomic_helper_crtc_destroy_state(state);
0896 
0897     kfree(cstate);
0898 }
0899 
0900 static int _dpu_crtc_wait_for_frame_done(struct drm_crtc *crtc)
0901 {
0902     struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
0903     int ret, rc = 0;
0904 
0905     if (!atomic_read(&dpu_crtc->frame_pending)) {
0906         DRM_DEBUG_ATOMIC("no frames pending\n");
0907         return 0;
0908     }
0909 
0910     DPU_ATRACE_BEGIN("frame done completion wait");
0911     ret = wait_for_completion_timeout(&dpu_crtc->frame_done_comp,
0912             msecs_to_jiffies(DPU_CRTC_FRAME_DONE_TIMEOUT_MS));
0913     if (!ret) {
0914         DRM_ERROR("frame done wait timed out, ret:%d\n", ret);
0915         rc = -ETIMEDOUT;
0916     }
0917     DPU_ATRACE_END("frame done completion wait");
0918 
0919     return rc;
0920 }
0921 
0922 void dpu_crtc_commit_kickoff(struct drm_crtc *crtc)
0923 {
0924     struct drm_encoder *encoder;
0925     struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
0926     struct dpu_kms *dpu_kms = _dpu_crtc_get_kms(crtc);
0927     struct dpu_crtc_state *cstate = to_dpu_crtc_state(crtc->state);
0928 
0929     /*
0930      * If no mixers has been allocated in dpu_crtc_atomic_check(),
0931      * it means we are trying to start a CRTC whose state is disabled:
0932      * nothing else needs to be done.
0933      */
0934     if (unlikely(!cstate->num_mixers))
0935         return;
0936 
0937     DPU_ATRACE_BEGIN("crtc_commit");
0938 
0939     drm_for_each_encoder_mask(encoder, crtc->dev,
0940             crtc->state->encoder_mask) {
0941         if (!dpu_encoder_is_valid_for_commit(encoder)) {
0942             DRM_DEBUG_ATOMIC("invalid FB not kicking off crtc\n");
0943             goto end;
0944         }
0945     }
0946     /*
0947      * Encoder will flush/start now, unless it has a tx pending. If so, it
0948      * may delay and flush at an irq event (e.g. ppdone)
0949      */
0950     drm_for_each_encoder_mask(encoder, crtc->dev,
0951                   crtc->state->encoder_mask)
0952         dpu_encoder_prepare_for_kickoff(encoder);
0953 
0954     if (atomic_inc_return(&dpu_crtc->frame_pending) == 1) {
0955         /* acquire bandwidth and other resources */
0956         DRM_DEBUG_ATOMIC("crtc%d first commit\n", crtc->base.id);
0957     } else
0958         DRM_DEBUG_ATOMIC("crtc%d commit\n", crtc->base.id);
0959 
0960     dpu_crtc->play_count++;
0961 
0962     dpu_vbif_clear_errors(dpu_kms);
0963 
0964     drm_for_each_encoder_mask(encoder, crtc->dev, crtc->state->encoder_mask)
0965         dpu_encoder_kickoff(encoder);
0966 
0967     reinit_completion(&dpu_crtc->frame_done_comp);
0968 
0969 end:
0970     DPU_ATRACE_END("crtc_commit");
0971 }
0972 
0973 static void dpu_crtc_reset(struct drm_crtc *crtc)
0974 {
0975     struct dpu_crtc_state *cstate = kzalloc(sizeof(*cstate), GFP_KERNEL);
0976 
0977     if (crtc->state)
0978         dpu_crtc_destroy_state(crtc, crtc->state);
0979 
0980     __drm_atomic_helper_crtc_reset(crtc, &cstate->base);
0981 }
0982 
0983 /**
0984  * dpu_crtc_duplicate_state - state duplicate hook
0985  * @crtc: Pointer to drm crtc structure
0986  */
0987 static struct drm_crtc_state *dpu_crtc_duplicate_state(struct drm_crtc *crtc)
0988 {
0989     struct dpu_crtc_state *cstate, *old_cstate = to_dpu_crtc_state(crtc->state);
0990 
0991     cstate = kmemdup(old_cstate, sizeof(*old_cstate), GFP_KERNEL);
0992     if (!cstate) {
0993         DPU_ERROR("failed to allocate state\n");
0994         return NULL;
0995     }
0996 
0997     /* duplicate base helper */
0998     __drm_atomic_helper_crtc_duplicate_state(crtc, &cstate->base);
0999 
1000     return &cstate->base;
1001 }
1002 
1003 static void dpu_crtc_atomic_print_state(struct drm_printer *p,
1004                     const struct drm_crtc_state *state)
1005 {
1006     const struct dpu_crtc_state *cstate = to_dpu_crtc_state(state);
1007     int i;
1008 
1009     for (i = 0; i < cstate->num_mixers; i++) {
1010         drm_printf(p, "\tlm[%d]=%d\n", i, cstate->mixers[i].hw_lm->idx - LM_0);
1011         drm_printf(p, "\tctl[%d]=%d\n", i, cstate->mixers[i].lm_ctl->idx - CTL_0);
1012         if (cstate->mixers[i].hw_dspp)
1013             drm_printf(p, "\tdspp[%d]=%d\n", i, cstate->mixers[i].hw_dspp->idx - DSPP_0);
1014     }
1015 }
1016 
1017 static void dpu_crtc_disable(struct drm_crtc *crtc,
1018                  struct drm_atomic_state *state)
1019 {
1020     struct drm_crtc_state *old_crtc_state = drm_atomic_get_old_crtc_state(state,
1021                                           crtc);
1022     struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
1023     struct dpu_crtc_state *cstate = to_dpu_crtc_state(crtc->state);
1024     struct drm_encoder *encoder;
1025     unsigned long flags;
1026     bool release_bandwidth = false;
1027 
1028     DRM_DEBUG_KMS("crtc%d\n", crtc->base.id);
1029 
1030     /* Disable/save vblank irq handling */
1031     drm_crtc_vblank_off(crtc);
1032 
1033     drm_for_each_encoder_mask(encoder, crtc->dev,
1034                   old_crtc_state->encoder_mask) {
1035         /* in video mode, we hold an extra bandwidth reference
1036          * as we cannot drop bandwidth at frame-done if any
1037          * crtc is being used in video mode.
1038          */
1039         if (dpu_encoder_get_intf_mode(encoder) == INTF_MODE_VIDEO)
1040             release_bandwidth = true;
1041         dpu_encoder_assign_crtc(encoder, NULL);
1042     }
1043 
1044     /* wait for frame_event_done completion */
1045     if (_dpu_crtc_wait_for_frame_done(crtc))
1046         DPU_ERROR("crtc%d wait for frame done failed;frame_pending%d\n",
1047                 crtc->base.id,
1048                 atomic_read(&dpu_crtc->frame_pending));
1049 
1050     trace_dpu_crtc_disable(DRMID(crtc), false, dpu_crtc);
1051     dpu_crtc->enabled = false;
1052 
1053     if (atomic_read(&dpu_crtc->frame_pending)) {
1054         trace_dpu_crtc_disable_frame_pending(DRMID(crtc),
1055                      atomic_read(&dpu_crtc->frame_pending));
1056         if (release_bandwidth)
1057             dpu_core_perf_crtc_release_bw(crtc);
1058         atomic_set(&dpu_crtc->frame_pending, 0);
1059     }
1060 
1061     dpu_core_perf_crtc_update(crtc, 0, true);
1062 
1063     drm_for_each_encoder_mask(encoder, crtc->dev, crtc->state->encoder_mask)
1064         dpu_encoder_register_frame_event_callback(encoder, NULL, NULL);
1065 
1066     memset(cstate->mixers, 0, sizeof(cstate->mixers));
1067     cstate->num_mixers = 0;
1068 
1069     /* disable clk & bw control until clk & bw properties are set */
1070     cstate->bw_control = false;
1071     cstate->bw_split_vote = false;
1072 
1073     if (crtc->state->event && !crtc->state->active) {
1074         spin_lock_irqsave(&crtc->dev->event_lock, flags);
1075         drm_crtc_send_vblank_event(crtc, crtc->state->event);
1076         crtc->state->event = NULL;
1077         spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
1078     }
1079 
1080     pm_runtime_put_sync(crtc->dev->dev);
1081 }
1082 
1083 static void dpu_crtc_enable(struct drm_crtc *crtc,
1084         struct drm_atomic_state *state)
1085 {
1086     struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
1087     struct drm_encoder *encoder;
1088     bool request_bandwidth = false;
1089 
1090     pm_runtime_get_sync(crtc->dev->dev);
1091 
1092     DRM_DEBUG_KMS("crtc%d\n", crtc->base.id);
1093 
1094     drm_for_each_encoder_mask(encoder, crtc->dev, crtc->state->encoder_mask) {
1095         /* in video mode, we hold an extra bandwidth reference
1096          * as we cannot drop bandwidth at frame-done if any
1097          * crtc is being used in video mode.
1098          */
1099         if (dpu_encoder_get_intf_mode(encoder) == INTF_MODE_VIDEO)
1100             request_bandwidth = true;
1101         dpu_encoder_register_frame_event_callback(encoder,
1102                 dpu_crtc_frame_event_cb, (void *)crtc);
1103     }
1104 
1105     if (request_bandwidth)
1106         atomic_inc(&_dpu_crtc_get_kms(crtc)->bandwidth_ref);
1107 
1108     trace_dpu_crtc_enable(DRMID(crtc), true, dpu_crtc);
1109     dpu_crtc->enabled = true;
1110 
1111     drm_for_each_encoder_mask(encoder, crtc->dev, crtc->state->encoder_mask)
1112         dpu_encoder_assign_crtc(encoder, crtc);
1113 
1114     /* Enable/restore vblank irq handling */
1115     drm_crtc_vblank_on(crtc);
1116 }
1117 
1118 struct plane_state {
1119     struct dpu_plane_state *dpu_pstate;
1120     const struct drm_plane_state *drm_pstate;
1121     int stage;
1122     u32 pipe_id;
1123 };
1124 
1125 static bool dpu_crtc_needs_dirtyfb(struct drm_crtc_state *cstate)
1126 {
1127     struct drm_crtc *crtc = cstate->crtc;
1128     struct drm_encoder *encoder;
1129 
1130     drm_for_each_encoder_mask (encoder, crtc->dev, cstate->encoder_mask) {
1131         if (dpu_encoder_get_intf_mode(encoder) == INTF_MODE_CMD) {
1132             return true;
1133         }
1134     }
1135 
1136     return false;
1137 }
1138 
1139 static int dpu_crtc_atomic_check(struct drm_crtc *crtc,
1140         struct drm_atomic_state *state)
1141 {
1142     struct drm_crtc_state *crtc_state = drm_atomic_get_new_crtc_state(state,
1143                                       crtc);
1144     struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
1145     struct dpu_crtc_state *cstate = to_dpu_crtc_state(crtc_state);
1146     struct plane_state *pstates;
1147 
1148     const struct drm_plane_state *pstate;
1149     struct drm_plane *plane;
1150     struct drm_display_mode *mode;
1151 
1152     int cnt = 0, rc = 0, mixer_width = 0, i, z_pos;
1153 
1154     struct dpu_multirect_plane_states multirect_plane[DPU_STAGE_MAX * 2];
1155     int multirect_count = 0;
1156     const struct drm_plane_state *pipe_staged[SSPP_MAX];
1157     int left_zpos_cnt = 0, right_zpos_cnt = 0;
1158     struct drm_rect crtc_rect = { 0 };
1159     bool needs_dirtyfb = dpu_crtc_needs_dirtyfb(crtc_state);
1160 
1161     pstates = kzalloc(sizeof(*pstates) * DPU_STAGE_MAX * 4, GFP_KERNEL);
1162 
1163     if (!crtc_state->enable || !crtc_state->active) {
1164         DRM_DEBUG_ATOMIC("crtc%d -> enable %d, active %d, skip atomic_check\n",
1165                 crtc->base.id, crtc_state->enable,
1166                 crtc_state->active);
1167         memset(&cstate->new_perf, 0, sizeof(cstate->new_perf));
1168         goto end;
1169     }
1170 
1171     mode = &crtc_state->adjusted_mode;
1172     DRM_DEBUG_ATOMIC("%s: check\n", dpu_crtc->name);
1173 
1174     /* force a full mode set if active state changed */
1175     if (crtc_state->active_changed)
1176         crtc_state->mode_changed = true;
1177 
1178     memset(pipe_staged, 0, sizeof(pipe_staged));
1179 
1180     if (cstate->num_mixers) {
1181         mixer_width = mode->hdisplay / cstate->num_mixers;
1182 
1183         _dpu_crtc_setup_lm_bounds(crtc, crtc_state);
1184     }
1185 
1186     crtc_rect.x2 = mode->hdisplay;
1187     crtc_rect.y2 = mode->vdisplay;
1188 
1189      /* get plane state for all drm planes associated with crtc state */
1190     drm_atomic_crtc_state_for_each_plane_state(plane, pstate, crtc_state) {
1191         struct dpu_plane_state *dpu_pstate = to_dpu_plane_state(pstate);
1192         struct drm_rect dst, clip = crtc_rect;
1193 
1194         if (IS_ERR_OR_NULL(pstate)) {
1195             rc = PTR_ERR(pstate);
1196             DPU_ERROR("%s: failed to get plane%d state, %d\n",
1197                     dpu_crtc->name, plane->base.id, rc);
1198             goto end;
1199         }
1200         if (cnt >= DPU_STAGE_MAX * 4)
1201             continue;
1202 
1203         if (!pstate->visible)
1204             continue;
1205 
1206         pstates[cnt].dpu_pstate = dpu_pstate;
1207         pstates[cnt].drm_pstate = pstate;
1208         pstates[cnt].stage = pstate->normalized_zpos;
1209         pstates[cnt].pipe_id = dpu_plane_pipe(plane);
1210 
1211         dpu_pstate->needs_dirtyfb = needs_dirtyfb;
1212 
1213         if (pipe_staged[pstates[cnt].pipe_id]) {
1214             multirect_plane[multirect_count].r0 =
1215                 pipe_staged[pstates[cnt].pipe_id];
1216             multirect_plane[multirect_count].r1 = pstate;
1217             multirect_count++;
1218 
1219             pipe_staged[pstates[cnt].pipe_id] = NULL;
1220         } else {
1221             pipe_staged[pstates[cnt].pipe_id] = pstate;
1222         }
1223 
1224         cnt++;
1225 
1226         dst = drm_plane_state_dest(pstate);
1227         if (!drm_rect_intersect(&clip, &dst)) {
1228             DPU_ERROR("invalid vertical/horizontal destination\n");
1229             DPU_ERROR("display: " DRM_RECT_FMT " plane: "
1230                   DRM_RECT_FMT "\n", DRM_RECT_ARG(&crtc_rect),
1231                   DRM_RECT_ARG(&dst));
1232             rc = -E2BIG;
1233             goto end;
1234         }
1235     }
1236 
1237     for (i = 1; i < SSPP_MAX; i++) {
1238         if (pipe_staged[i]) {
1239             dpu_plane_clear_multirect(pipe_staged[i]);
1240 
1241             if (is_dpu_plane_virtual(pipe_staged[i]->plane)) {
1242                 DPU_ERROR(
1243                     "r1 only virt plane:%d not supported\n",
1244                     pipe_staged[i]->plane->base.id);
1245                 rc  = -EINVAL;
1246                 goto end;
1247             }
1248         }
1249     }
1250 
1251     z_pos = -1;
1252     for (i = 0; i < cnt; i++) {
1253         /* reset counts at every new blend stage */
1254         if (pstates[i].stage != z_pos) {
1255             left_zpos_cnt = 0;
1256             right_zpos_cnt = 0;
1257             z_pos = pstates[i].stage;
1258         }
1259 
1260         /* verify z_pos setting before using it */
1261         if (z_pos >= DPU_STAGE_MAX - DPU_STAGE_0) {
1262             DPU_ERROR("> %d plane stages assigned\n",
1263                     DPU_STAGE_MAX - DPU_STAGE_0);
1264             rc = -EINVAL;
1265             goto end;
1266         } else if (pstates[i].drm_pstate->crtc_x < mixer_width) {
1267             if (left_zpos_cnt == 2) {
1268                 DPU_ERROR("> 2 planes @ stage %d on left\n",
1269                     z_pos);
1270                 rc = -EINVAL;
1271                 goto end;
1272             }
1273             left_zpos_cnt++;
1274 
1275         } else {
1276             if (right_zpos_cnt == 2) {
1277                 DPU_ERROR("> 2 planes @ stage %d on right\n",
1278                     z_pos);
1279                 rc = -EINVAL;
1280                 goto end;
1281             }
1282             right_zpos_cnt++;
1283         }
1284 
1285         pstates[i].dpu_pstate->stage = z_pos + DPU_STAGE_0;
1286         DRM_DEBUG_ATOMIC("%s: zpos %d\n", dpu_crtc->name, z_pos);
1287     }
1288 
1289     for (i = 0; i < multirect_count; i++) {
1290         if (dpu_plane_validate_multirect_v2(&multirect_plane[i])) {
1291             DPU_ERROR(
1292             "multirect validation failed for planes (%d - %d)\n",
1293                     multirect_plane[i].r0->plane->base.id,
1294                     multirect_plane[i].r1->plane->base.id);
1295             rc = -EINVAL;
1296             goto end;
1297         }
1298     }
1299 
1300     atomic_inc(&_dpu_crtc_get_kms(crtc)->bandwidth_ref);
1301 
1302     rc = dpu_core_perf_crtc_check(crtc, crtc_state);
1303     if (rc) {
1304         DPU_ERROR("crtc%d failed performance check %d\n",
1305                 crtc->base.id, rc);
1306         goto end;
1307     }
1308 
1309     /* validate source split:
1310      * use pstates sorted by stage to check planes on same stage
1311      * we assume that all pipes are in source split so its valid to compare
1312      * without taking into account left/right mixer placement
1313      */
1314     for (i = 1; i < cnt; i++) {
1315         struct plane_state *prv_pstate, *cur_pstate;
1316         struct drm_rect left_rect, right_rect;
1317         int32_t left_pid, right_pid;
1318         int32_t stage;
1319 
1320         prv_pstate = &pstates[i - 1];
1321         cur_pstate = &pstates[i];
1322         if (prv_pstate->stage != cur_pstate->stage)
1323             continue;
1324 
1325         stage = cur_pstate->stage;
1326 
1327         left_pid = prv_pstate->dpu_pstate->base.plane->base.id;
1328         left_rect = drm_plane_state_dest(prv_pstate->drm_pstate);
1329 
1330         right_pid = cur_pstate->dpu_pstate->base.plane->base.id;
1331         right_rect = drm_plane_state_dest(cur_pstate->drm_pstate);
1332 
1333         if (right_rect.x1 < left_rect.x1) {
1334             swap(left_pid, right_pid);
1335             swap(left_rect, right_rect);
1336         }
1337 
1338         /**
1339          * - planes are enumerated in pipe-priority order such that
1340          *   planes with lower drm_id must be left-most in a shared
1341          *   blend-stage when using source split.
1342          * - planes in source split must be contiguous in width
1343          * - planes in source split must have same dest yoff and height
1344          */
1345         if (right_pid < left_pid) {
1346             DPU_ERROR(
1347                 "invalid src split cfg. priority mismatch. stage: %d left: %d right: %d\n",
1348                 stage, left_pid, right_pid);
1349             rc = -EINVAL;
1350             goto end;
1351         } else if (right_rect.x1 != drm_rect_width(&left_rect)) {
1352             DPU_ERROR("non-contiguous coordinates for src split. "
1353                   "stage: %d left: " DRM_RECT_FMT " right: "
1354                   DRM_RECT_FMT "\n", stage,
1355                   DRM_RECT_ARG(&left_rect),
1356                   DRM_RECT_ARG(&right_rect));
1357             rc = -EINVAL;
1358             goto end;
1359         } else if (left_rect.y1 != right_rect.y1 ||
1360                drm_rect_height(&left_rect) != drm_rect_height(&right_rect)) {
1361             DPU_ERROR("source split at stage: %d. invalid "
1362                   "yoff/height: left: " DRM_RECT_FMT " right: "
1363                   DRM_RECT_FMT "\n", stage,
1364                   DRM_RECT_ARG(&left_rect),
1365                   DRM_RECT_ARG(&right_rect));
1366             rc = -EINVAL;
1367             goto end;
1368         }
1369     }
1370 
1371 end:
1372     kfree(pstates);
1373     return rc;
1374 }
1375 
1376 int dpu_crtc_vblank(struct drm_crtc *crtc, bool en)
1377 {
1378     struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
1379     struct drm_encoder *enc;
1380 
1381     trace_dpu_crtc_vblank(DRMID(&dpu_crtc->base), en, dpu_crtc);
1382 
1383     /*
1384      * Normally we would iterate through encoder_mask in crtc state to find
1385      * attached encoders. In this case, we might be disabling vblank _after_
1386      * encoder_mask has been cleared.
1387      *
1388      * Instead, we "assign" a crtc to the encoder in enable and clear it in
1389      * disable (which is also after encoder_mask is cleared). So instead of
1390      * using encoder mask, we'll ask the encoder to toggle itself iff it's
1391      * currently assigned to our crtc.
1392      *
1393      * Note also that this function cannot be called while crtc is disabled
1394      * since we use drm_crtc_vblank_on/off. So we don't need to worry
1395      * about the assigned crtcs being inconsistent with the current state
1396      * (which means no need to worry about modeset locks).
1397      */
1398     list_for_each_entry(enc, &crtc->dev->mode_config.encoder_list, head) {
1399         trace_dpu_crtc_vblank_enable(DRMID(crtc), DRMID(enc), en,
1400                          dpu_crtc);
1401 
1402         dpu_encoder_toggle_vblank_for_crtc(enc, crtc, en);
1403     }
1404 
1405     return 0;
1406 }
1407 
1408 #ifdef CONFIG_DEBUG_FS
1409 static int _dpu_debugfs_status_show(struct seq_file *s, void *data)
1410 {
1411     struct dpu_crtc *dpu_crtc;
1412     struct dpu_plane_state *pstate = NULL;
1413     struct dpu_crtc_mixer *m;
1414 
1415     struct drm_crtc *crtc;
1416     struct drm_plane *plane;
1417     struct drm_display_mode *mode;
1418     struct drm_framebuffer *fb;
1419     struct drm_plane_state *state;
1420     struct dpu_crtc_state *cstate;
1421 
1422     int i, out_width;
1423 
1424     dpu_crtc = s->private;
1425     crtc = &dpu_crtc->base;
1426 
1427     drm_modeset_lock_all(crtc->dev);
1428     cstate = to_dpu_crtc_state(crtc->state);
1429 
1430     mode = &crtc->state->adjusted_mode;
1431     out_width = mode->hdisplay / cstate->num_mixers;
1432 
1433     seq_printf(s, "crtc:%d width:%d height:%d\n", crtc->base.id,
1434                 mode->hdisplay, mode->vdisplay);
1435 
1436     seq_puts(s, "\n");
1437 
1438     for (i = 0; i < cstate->num_mixers; ++i) {
1439         m = &cstate->mixers[i];
1440         seq_printf(s, "\tmixer:%d ctl:%d width:%d height:%d\n",
1441             m->hw_lm->idx - LM_0, m->lm_ctl->idx - CTL_0,
1442             out_width, mode->vdisplay);
1443     }
1444 
1445     seq_puts(s, "\n");
1446 
1447     drm_atomic_crtc_for_each_plane(plane, crtc) {
1448         pstate = to_dpu_plane_state(plane->state);
1449         state = plane->state;
1450 
1451         if (!pstate || !state)
1452             continue;
1453 
1454         seq_printf(s, "\tplane:%u stage:%d\n", plane->base.id,
1455             pstate->stage);
1456 
1457         if (plane->state->fb) {
1458             fb = plane->state->fb;
1459 
1460             seq_printf(s, "\tfb:%d image format:%4.4s wxh:%ux%u ",
1461                 fb->base.id, (char *) &fb->format->format,
1462                 fb->width, fb->height);
1463             for (i = 0; i < ARRAY_SIZE(fb->format->cpp); ++i)
1464                 seq_printf(s, "cpp[%d]:%u ",
1465                         i, fb->format->cpp[i]);
1466             seq_puts(s, "\n\t");
1467 
1468             seq_printf(s, "modifier:%8llu ", fb->modifier);
1469             seq_puts(s, "\n");
1470 
1471             seq_puts(s, "\t");
1472             for (i = 0; i < ARRAY_SIZE(fb->pitches); i++)
1473                 seq_printf(s, "pitches[%d]:%8u ", i,
1474                             fb->pitches[i]);
1475             seq_puts(s, "\n");
1476 
1477             seq_puts(s, "\t");
1478             for (i = 0; i < ARRAY_SIZE(fb->offsets); i++)
1479                 seq_printf(s, "offsets[%d]:%8u ", i,
1480                             fb->offsets[i]);
1481             seq_puts(s, "\n");
1482         }
1483 
1484         seq_printf(s, "\tsrc_x:%4d src_y:%4d src_w:%4d src_h:%4d\n",
1485             state->src_x, state->src_y, state->src_w, state->src_h);
1486 
1487         seq_printf(s, "\tdst x:%4d dst_y:%4d dst_w:%4d dst_h:%4d\n",
1488             state->crtc_x, state->crtc_y, state->crtc_w,
1489             state->crtc_h);
1490         seq_printf(s, "\tmultirect: mode: %d index: %d\n",
1491             pstate->multirect_mode, pstate->multirect_index);
1492 
1493         seq_puts(s, "\n");
1494     }
1495     if (dpu_crtc->vblank_cb_count) {
1496         ktime_t diff = ktime_sub(ktime_get(), dpu_crtc->vblank_cb_time);
1497         s64 diff_ms = ktime_to_ms(diff);
1498         s64 fps = diff_ms ? div_s64(
1499                 dpu_crtc->vblank_cb_count * 1000, diff_ms) : 0;
1500 
1501         seq_printf(s,
1502             "vblank fps:%lld count:%u total:%llums total_framecount:%llu\n",
1503                 fps, dpu_crtc->vblank_cb_count,
1504                 ktime_to_ms(diff), dpu_crtc->play_count);
1505 
1506         /* reset time & count for next measurement */
1507         dpu_crtc->vblank_cb_count = 0;
1508         dpu_crtc->vblank_cb_time = ktime_set(0, 0);
1509     }
1510 
1511     drm_modeset_unlock_all(crtc->dev);
1512 
1513     return 0;
1514 }
1515 
1516 DEFINE_SHOW_ATTRIBUTE(_dpu_debugfs_status);
1517 
1518 static int dpu_crtc_debugfs_state_show(struct seq_file *s, void *v)
1519 {
1520     struct drm_crtc *crtc = (struct drm_crtc *) s->private;
1521     struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
1522 
1523     seq_printf(s, "client type: %d\n", dpu_crtc_get_client_type(crtc));
1524     seq_printf(s, "intf_mode: %d\n", dpu_crtc_get_intf_mode(crtc));
1525     seq_printf(s, "core_clk_rate: %llu\n",
1526             dpu_crtc->cur_perf.core_clk_rate);
1527     seq_printf(s, "bw_ctl: %llu\n", dpu_crtc->cur_perf.bw_ctl);
1528     seq_printf(s, "max_per_pipe_ib: %llu\n",
1529                 dpu_crtc->cur_perf.max_per_pipe_ib);
1530 
1531     return 0;
1532 }
1533 DEFINE_SHOW_ATTRIBUTE(dpu_crtc_debugfs_state);
1534 
1535 static int _dpu_crtc_init_debugfs(struct drm_crtc *crtc)
1536 {
1537     struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
1538     struct dentry *debugfs_root;
1539 
1540     debugfs_root = debugfs_create_dir(dpu_crtc->name,
1541             crtc->dev->primary->debugfs_root);
1542 
1543     debugfs_create_file("status", 0400,
1544             debugfs_root,
1545             dpu_crtc, &_dpu_debugfs_status_fops);
1546     debugfs_create_file("state", 0600,
1547             debugfs_root,
1548             &dpu_crtc->base,
1549             &dpu_crtc_debugfs_state_fops);
1550 
1551     return 0;
1552 }
1553 #else
1554 static int _dpu_crtc_init_debugfs(struct drm_crtc *crtc)
1555 {
1556     return 0;
1557 }
1558 #endif /* CONFIG_DEBUG_FS */
1559 
1560 static int dpu_crtc_late_register(struct drm_crtc *crtc)
1561 {
1562     return _dpu_crtc_init_debugfs(crtc);
1563 }
1564 
1565 static const struct drm_crtc_funcs dpu_crtc_funcs = {
1566     .set_config = drm_atomic_helper_set_config,
1567     .destroy = dpu_crtc_destroy,
1568     .page_flip = drm_atomic_helper_page_flip,
1569     .reset = dpu_crtc_reset,
1570     .atomic_duplicate_state = dpu_crtc_duplicate_state,
1571     .atomic_destroy_state = dpu_crtc_destroy_state,
1572     .atomic_print_state = dpu_crtc_atomic_print_state,
1573     .late_register = dpu_crtc_late_register,
1574     .verify_crc_source = dpu_crtc_verify_crc_source,
1575     .set_crc_source = dpu_crtc_set_crc_source,
1576     .enable_vblank  = msm_crtc_enable_vblank,
1577     .disable_vblank = msm_crtc_disable_vblank,
1578     .get_vblank_timestamp = drm_crtc_vblank_helper_get_vblank_timestamp,
1579     .get_vblank_counter = dpu_crtc_get_vblank_counter,
1580 };
1581 
1582 static const struct drm_crtc_helper_funcs dpu_crtc_helper_funcs = {
1583     .atomic_disable = dpu_crtc_disable,
1584     .atomic_enable = dpu_crtc_enable,
1585     .atomic_check = dpu_crtc_atomic_check,
1586     .atomic_begin = dpu_crtc_atomic_begin,
1587     .atomic_flush = dpu_crtc_atomic_flush,
1588     .get_scanout_position = dpu_crtc_get_scanout_position,
1589 };
1590 
1591 /* initialize crtc */
1592 struct drm_crtc *dpu_crtc_init(struct drm_device *dev, struct drm_plane *plane,
1593                 struct drm_plane *cursor)
1594 {
1595     struct drm_crtc *crtc = NULL;
1596     struct dpu_crtc *dpu_crtc = NULL;
1597     int i;
1598 
1599     dpu_crtc = kzalloc(sizeof(*dpu_crtc), GFP_KERNEL);
1600     if (!dpu_crtc)
1601         return ERR_PTR(-ENOMEM);
1602 
1603     crtc = &dpu_crtc->base;
1604     crtc->dev = dev;
1605 
1606     spin_lock_init(&dpu_crtc->spin_lock);
1607     atomic_set(&dpu_crtc->frame_pending, 0);
1608 
1609     init_completion(&dpu_crtc->frame_done_comp);
1610 
1611     INIT_LIST_HEAD(&dpu_crtc->frame_event_list);
1612 
1613     for (i = 0; i < ARRAY_SIZE(dpu_crtc->frame_events); i++) {
1614         INIT_LIST_HEAD(&dpu_crtc->frame_events[i].list);
1615         list_add(&dpu_crtc->frame_events[i].list,
1616                 &dpu_crtc->frame_event_list);
1617         kthread_init_work(&dpu_crtc->frame_events[i].work,
1618                 dpu_crtc_frame_event_work);
1619     }
1620 
1621     drm_crtc_init_with_planes(dev, crtc, plane, cursor, &dpu_crtc_funcs,
1622                 NULL);
1623 
1624     drm_crtc_helper_add(crtc, &dpu_crtc_helper_funcs);
1625 
1626     drm_crtc_enable_color_mgmt(crtc, 0, true, 0);
1627 
1628     /* save user friendly CRTC name for later */
1629     snprintf(dpu_crtc->name, DPU_CRTC_NAME_SIZE, "crtc%u", crtc->base.id);
1630 
1631     /* initialize event handling */
1632     spin_lock_init(&dpu_crtc->event_lock);
1633 
1634     DRM_DEBUG_KMS("%s: successfully initialized crtc\n", dpu_crtc->name);
1635     return crtc;
1636 }