Back to home page

OSCL-LXR

 
 

    


0001 /*
0002  * Copyright © 2018 Intel Corporation
0003  *
0004  * Permission is hereby granted, free of charge, to any person obtaining a
0005  * copy of this software and associated documentation files (the "Software"),
0006  * to deal in the Software without restriction, including without limitation
0007  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
0008  * and/or sell copies of the Software, and to permit persons to whom the
0009  * Software is furnished to do so, subject to the following conditions:
0010  *
0011  * The above copyright notice and this permission notice (including the next
0012  * paragraph) shall be included in all copies or substantial portions of the
0013  * Software.
0014  *
0015  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
0016  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
0017  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
0018  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
0019  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
0020  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
0021  * DEALINGS IN THE SOFTWARE.
0022  *
0023  * Authors:
0024  *   Madhav Chauhan <madhav.chauhan@intel.com>
0025  *   Jani Nikula <jani.nikula@intel.com>
0026  */
0027 
0028 #include <drm/display/drm_dsc_helper.h>
0029 #include <drm/drm_atomic_helper.h>
0030 #include <drm/drm_mipi_dsi.h>
0031 
0032 #include "icl_dsi.h"
0033 #include "icl_dsi_regs.h"
0034 #include "intel_atomic.h"
0035 #include "intel_backlight.h"
0036 #include "intel_combo_phy.h"
0037 #include "intel_combo_phy_regs.h"
0038 #include "intel_connector.h"
0039 #include "intel_crtc.h"
0040 #include "intel_ddi.h"
0041 #include "intel_de.h"
0042 #include "intel_dsi.h"
0043 #include "intel_dsi_vbt.h"
0044 #include "intel_panel.h"
0045 #include "intel_vdsc.h"
0046 #include "skl_scaler.h"
0047 #include "skl_universal_plane.h"
0048 
0049 static int header_credits_available(struct drm_i915_private *dev_priv,
0050                     enum transcoder dsi_trans)
0051 {
0052     return (intel_de_read(dev_priv, DSI_CMD_TXCTL(dsi_trans)) & FREE_HEADER_CREDIT_MASK)
0053         >> FREE_HEADER_CREDIT_SHIFT;
0054 }
0055 
0056 static int payload_credits_available(struct drm_i915_private *dev_priv,
0057                      enum transcoder dsi_trans)
0058 {
0059     return (intel_de_read(dev_priv, DSI_CMD_TXCTL(dsi_trans)) & FREE_PLOAD_CREDIT_MASK)
0060         >> FREE_PLOAD_CREDIT_SHIFT;
0061 }
0062 
0063 static bool wait_for_header_credits(struct drm_i915_private *dev_priv,
0064                     enum transcoder dsi_trans, int hdr_credit)
0065 {
0066     if (wait_for_us(header_credits_available(dev_priv, dsi_trans) >=
0067             hdr_credit, 100)) {
0068         drm_err(&dev_priv->drm, "DSI header credits not released\n");
0069         return false;
0070     }
0071 
0072     return true;
0073 }
0074 
0075 static bool wait_for_payload_credits(struct drm_i915_private *dev_priv,
0076                      enum transcoder dsi_trans, int payld_credit)
0077 {
0078     if (wait_for_us(payload_credits_available(dev_priv, dsi_trans) >=
0079             payld_credit, 100)) {
0080         drm_err(&dev_priv->drm, "DSI payload credits not released\n");
0081         return false;
0082     }
0083 
0084     return true;
0085 }
0086 
0087 static enum transcoder dsi_port_to_transcoder(enum port port)
0088 {
0089     if (port == PORT_A)
0090         return TRANSCODER_DSI_0;
0091     else
0092         return TRANSCODER_DSI_1;
0093 }
0094 
0095 static void wait_for_cmds_dispatched_to_panel(struct intel_encoder *encoder)
0096 {
0097     struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
0098     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
0099     struct mipi_dsi_device *dsi;
0100     enum port port;
0101     enum transcoder dsi_trans;
0102     int ret;
0103 
0104     /* wait for header/payload credits to be released */
0105     for_each_dsi_port(port, intel_dsi->ports) {
0106         dsi_trans = dsi_port_to_transcoder(port);
0107         wait_for_header_credits(dev_priv, dsi_trans, MAX_HEADER_CREDIT);
0108         wait_for_payload_credits(dev_priv, dsi_trans, MAX_PLOAD_CREDIT);
0109     }
0110 
0111     /* send nop DCS command */
0112     for_each_dsi_port(port, intel_dsi->ports) {
0113         dsi = intel_dsi->dsi_hosts[port]->device;
0114         dsi->mode_flags |= MIPI_DSI_MODE_LPM;
0115         dsi->channel = 0;
0116         ret = mipi_dsi_dcs_nop(dsi);
0117         if (ret < 0)
0118             drm_err(&dev_priv->drm,
0119                 "error sending DCS NOP command\n");
0120     }
0121 
0122     /* wait for header credits to be released */
0123     for_each_dsi_port(port, intel_dsi->ports) {
0124         dsi_trans = dsi_port_to_transcoder(port);
0125         wait_for_header_credits(dev_priv, dsi_trans, MAX_HEADER_CREDIT);
0126     }
0127 
0128     /* wait for LP TX in progress bit to be cleared */
0129     for_each_dsi_port(port, intel_dsi->ports) {
0130         dsi_trans = dsi_port_to_transcoder(port);
0131         if (wait_for_us(!(intel_de_read(dev_priv, DSI_LP_MSG(dsi_trans)) &
0132                   LPTX_IN_PROGRESS), 20))
0133             drm_err(&dev_priv->drm, "LPTX bit not cleared\n");
0134     }
0135 }
0136 
0137 static int dsi_send_pkt_payld(struct intel_dsi_host *host,
0138                   const struct mipi_dsi_packet *packet)
0139 {
0140     struct intel_dsi *intel_dsi = host->intel_dsi;
0141     struct drm_i915_private *i915 = to_i915(intel_dsi->base.base.dev);
0142     enum transcoder dsi_trans = dsi_port_to_transcoder(host->port);
0143     const u8 *data = packet->payload;
0144     u32 len = packet->payload_length;
0145     int i, j;
0146 
0147     /* payload queue can accept *256 bytes*, check limit */
0148     if (len > MAX_PLOAD_CREDIT * 4) {
0149         drm_err(&i915->drm, "payload size exceeds max queue limit\n");
0150         return -EINVAL;
0151     }
0152 
0153     for (i = 0; i < len; i += 4) {
0154         u32 tmp = 0;
0155 
0156         if (!wait_for_payload_credits(i915, dsi_trans, 1))
0157             return -EBUSY;
0158 
0159         for (j = 0; j < min_t(u32, len - i, 4); j++)
0160             tmp |= *data++ << 8 * j;
0161 
0162         intel_de_write(i915, DSI_CMD_TXPYLD(dsi_trans), tmp);
0163     }
0164 
0165     return 0;
0166 }
0167 
0168 static int dsi_send_pkt_hdr(struct intel_dsi_host *host,
0169                 const struct mipi_dsi_packet *packet,
0170                 bool enable_lpdt)
0171 {
0172     struct intel_dsi *intel_dsi = host->intel_dsi;
0173     struct drm_i915_private *dev_priv = to_i915(intel_dsi->base.base.dev);
0174     enum transcoder dsi_trans = dsi_port_to_transcoder(host->port);
0175     u32 tmp;
0176 
0177     if (!wait_for_header_credits(dev_priv, dsi_trans, 1))
0178         return -EBUSY;
0179 
0180     tmp = intel_de_read(dev_priv, DSI_CMD_TXHDR(dsi_trans));
0181 
0182     if (packet->payload)
0183         tmp |= PAYLOAD_PRESENT;
0184     else
0185         tmp &= ~PAYLOAD_PRESENT;
0186 
0187     tmp &= ~VBLANK_FENCE;
0188 
0189     if (enable_lpdt)
0190         tmp |= LP_DATA_TRANSFER;
0191     else
0192         tmp &= ~LP_DATA_TRANSFER;
0193 
0194     tmp &= ~(PARAM_WC_MASK | VC_MASK | DT_MASK);
0195     tmp |= ((packet->header[0] & VC_MASK) << VC_SHIFT);
0196     tmp |= ((packet->header[0] & DT_MASK) << DT_SHIFT);
0197     tmp |= (packet->header[1] << PARAM_WC_LOWER_SHIFT);
0198     tmp |= (packet->header[2] << PARAM_WC_UPPER_SHIFT);
0199     intel_de_write(dev_priv, DSI_CMD_TXHDR(dsi_trans), tmp);
0200 
0201     return 0;
0202 }
0203 
0204 void icl_dsi_frame_update(struct intel_crtc_state *crtc_state)
0205 {
0206     struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
0207     struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
0208     u32 tmp, mode_flags;
0209     enum port port;
0210 
0211     mode_flags = crtc_state->mode_flags;
0212 
0213     /*
0214      * case 1 also covers dual link
0215      * In case of dual link, frame update should be set on
0216      * DSI_0
0217      */
0218     if (mode_flags & I915_MODE_FLAG_DSI_USE_TE0)
0219         port = PORT_A;
0220     else if (mode_flags & I915_MODE_FLAG_DSI_USE_TE1)
0221         port = PORT_B;
0222     else
0223         return;
0224 
0225     tmp = intel_de_read(dev_priv, DSI_CMD_FRMCTL(port));
0226     tmp |= DSI_FRAME_UPDATE_REQUEST;
0227     intel_de_write(dev_priv, DSI_CMD_FRMCTL(port), tmp);
0228 }
0229 
0230 static void dsi_program_swing_and_deemphasis(struct intel_encoder *encoder)
0231 {
0232     struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
0233     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
0234     enum phy phy;
0235     u32 tmp;
0236     int lane;
0237 
0238     for_each_dsi_phy(phy, intel_dsi->phys) {
0239         /*
0240          * Program voltage swing and pre-emphasis level values as per
0241          * table in BSPEC under DDI buffer programing
0242          */
0243         tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN(0, phy));
0244         tmp &= ~(SCALING_MODE_SEL_MASK | RTERM_SELECT_MASK);
0245         tmp |= SCALING_MODE_SEL(0x2);
0246         tmp |= TAP2_DISABLE | TAP3_DISABLE;
0247         tmp |= RTERM_SELECT(0x6);
0248         intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), tmp);
0249 
0250         tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW5_AUX(phy));
0251         tmp &= ~(SCALING_MODE_SEL_MASK | RTERM_SELECT_MASK);
0252         tmp |= SCALING_MODE_SEL(0x2);
0253         tmp |= TAP2_DISABLE | TAP3_DISABLE;
0254         tmp |= RTERM_SELECT(0x6);
0255         intel_de_write(dev_priv, ICL_PORT_TX_DW5_AUX(phy), tmp);
0256 
0257         tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW2_LN(0, phy));
0258         tmp &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK |
0259              RCOMP_SCALAR_MASK);
0260         tmp |= SWING_SEL_UPPER(0x2);
0261         tmp |= SWING_SEL_LOWER(0x2);
0262         tmp |= RCOMP_SCALAR(0x98);
0263         intel_de_write(dev_priv, ICL_PORT_TX_DW2_GRP(phy), tmp);
0264 
0265         tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW2_AUX(phy));
0266         tmp &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK |
0267              RCOMP_SCALAR_MASK);
0268         tmp |= SWING_SEL_UPPER(0x2);
0269         tmp |= SWING_SEL_LOWER(0x2);
0270         tmp |= RCOMP_SCALAR(0x98);
0271         intel_de_write(dev_priv, ICL_PORT_TX_DW2_AUX(phy), tmp);
0272 
0273         tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW4_AUX(phy));
0274         tmp &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK |
0275              CURSOR_COEFF_MASK);
0276         tmp |= POST_CURSOR_1(0x0);
0277         tmp |= POST_CURSOR_2(0x0);
0278         tmp |= CURSOR_COEFF(0x3f);
0279         intel_de_write(dev_priv, ICL_PORT_TX_DW4_AUX(phy), tmp);
0280 
0281         for (lane = 0; lane <= 3; lane++) {
0282             /* Bspec: must not use GRP register for write */
0283             tmp = intel_de_read(dev_priv,
0284                         ICL_PORT_TX_DW4_LN(lane, phy));
0285             tmp &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK |
0286                  CURSOR_COEFF_MASK);
0287             tmp |= POST_CURSOR_1(0x0);
0288             tmp |= POST_CURSOR_2(0x0);
0289             tmp |= CURSOR_COEFF(0x3f);
0290             intel_de_write(dev_priv,
0291                        ICL_PORT_TX_DW4_LN(lane, phy), tmp);
0292         }
0293     }
0294 }
0295 
0296 static void configure_dual_link_mode(struct intel_encoder *encoder,
0297                      const struct intel_crtc_state *pipe_config)
0298 {
0299     struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
0300     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
0301     u32 dss_ctl1;
0302 
0303     dss_ctl1 = intel_de_read(dev_priv, DSS_CTL1);
0304     dss_ctl1 |= SPLITTER_ENABLE;
0305     dss_ctl1 &= ~OVERLAP_PIXELS_MASK;
0306     dss_ctl1 |= OVERLAP_PIXELS(intel_dsi->pixel_overlap);
0307 
0308     if (intel_dsi->dual_link == DSI_DUAL_LINK_FRONT_BACK) {
0309         const struct drm_display_mode *adjusted_mode =
0310                     &pipe_config->hw.adjusted_mode;
0311         u32 dss_ctl2;
0312         u16 hactive = adjusted_mode->crtc_hdisplay;
0313         u16 dl_buffer_depth;
0314 
0315         dss_ctl1 &= ~DUAL_LINK_MODE_INTERLEAVE;
0316         dl_buffer_depth = hactive / 2 + intel_dsi->pixel_overlap;
0317 
0318         if (dl_buffer_depth > MAX_DL_BUFFER_TARGET_DEPTH)
0319             drm_err(&dev_priv->drm,
0320                 "DL buffer depth exceed max value\n");
0321 
0322         dss_ctl1 &= ~LEFT_DL_BUF_TARGET_DEPTH_MASK;
0323         dss_ctl1 |= LEFT_DL_BUF_TARGET_DEPTH(dl_buffer_depth);
0324         dss_ctl2 = intel_de_read(dev_priv, DSS_CTL2);
0325         dss_ctl2 &= ~RIGHT_DL_BUF_TARGET_DEPTH_MASK;
0326         dss_ctl2 |= RIGHT_DL_BUF_TARGET_DEPTH(dl_buffer_depth);
0327         intel_de_write(dev_priv, DSS_CTL2, dss_ctl2);
0328     } else {
0329         /* Interleave */
0330         dss_ctl1 |= DUAL_LINK_MODE_INTERLEAVE;
0331     }
0332 
0333     intel_de_write(dev_priv, DSS_CTL1, dss_ctl1);
0334 }
0335 
0336 /* aka DSI 8X clock */
0337 static int afe_clk(struct intel_encoder *encoder,
0338            const struct intel_crtc_state *crtc_state)
0339 {
0340     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
0341     int bpp;
0342 
0343     if (crtc_state->dsc.compression_enable)
0344         bpp = crtc_state->dsc.compressed_bpp;
0345     else
0346         bpp = mipi_dsi_pixel_format_to_bpp(intel_dsi->pixel_format);
0347 
0348     return DIV_ROUND_CLOSEST(intel_dsi->pclk * bpp, intel_dsi->lane_count);
0349 }
0350 
0351 static void gen11_dsi_program_esc_clk_div(struct intel_encoder *encoder,
0352                       const struct intel_crtc_state *crtc_state)
0353 {
0354     struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
0355     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
0356     enum port port;
0357     int afe_clk_khz;
0358     int theo_word_clk, act_word_clk;
0359     u32 esc_clk_div_m, esc_clk_div_m_phy;
0360 
0361     afe_clk_khz = afe_clk(encoder, crtc_state);
0362 
0363     if (IS_ALDERLAKE_S(dev_priv) || IS_ALDERLAKE_P(dev_priv)) {
0364         theo_word_clk = DIV_ROUND_UP(afe_clk_khz, 8 * DSI_MAX_ESC_CLK);
0365         act_word_clk = max(3, theo_word_clk + (theo_word_clk + 1) % 2);
0366         esc_clk_div_m = act_word_clk * 8;
0367         esc_clk_div_m_phy = (act_word_clk - 1) / 2;
0368     } else {
0369         esc_clk_div_m = DIV_ROUND_UP(afe_clk_khz, DSI_MAX_ESC_CLK);
0370     }
0371 
0372     for_each_dsi_port(port, intel_dsi->ports) {
0373         intel_de_write(dev_priv, ICL_DSI_ESC_CLK_DIV(port),
0374                    esc_clk_div_m & ICL_ESC_CLK_DIV_MASK);
0375         intel_de_posting_read(dev_priv, ICL_DSI_ESC_CLK_DIV(port));
0376     }
0377 
0378     for_each_dsi_port(port, intel_dsi->ports) {
0379         intel_de_write(dev_priv, ICL_DPHY_ESC_CLK_DIV(port),
0380                    esc_clk_div_m & ICL_ESC_CLK_DIV_MASK);
0381         intel_de_posting_read(dev_priv, ICL_DPHY_ESC_CLK_DIV(port));
0382     }
0383 
0384     if (IS_ALDERLAKE_S(dev_priv) || IS_ALDERLAKE_P(dev_priv)) {
0385         for_each_dsi_port(port, intel_dsi->ports) {
0386             intel_de_write(dev_priv, ADL_MIPIO_DW(port, 8),
0387                        esc_clk_div_m_phy & TX_ESC_CLK_DIV_PHY);
0388             intel_de_posting_read(dev_priv, ADL_MIPIO_DW(port, 8));
0389         }
0390     }
0391 }
0392 
0393 static void get_dsi_io_power_domains(struct drm_i915_private *dev_priv,
0394                      struct intel_dsi *intel_dsi)
0395 {
0396     enum port port;
0397 
0398     for_each_dsi_port(port, intel_dsi->ports) {
0399         drm_WARN_ON(&dev_priv->drm, intel_dsi->io_wakeref[port]);
0400         intel_dsi->io_wakeref[port] =
0401             intel_display_power_get(dev_priv,
0402                         port == PORT_A ?
0403                         POWER_DOMAIN_PORT_DDI_IO_A :
0404                         POWER_DOMAIN_PORT_DDI_IO_B);
0405     }
0406 }
0407 
0408 static void gen11_dsi_enable_io_power(struct intel_encoder *encoder)
0409 {
0410     struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
0411     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
0412     enum port port;
0413     u32 tmp;
0414 
0415     for_each_dsi_port(port, intel_dsi->ports) {
0416         tmp = intel_de_read(dev_priv, ICL_DSI_IO_MODECTL(port));
0417         tmp |= COMBO_PHY_MODE_DSI;
0418         intel_de_write(dev_priv, ICL_DSI_IO_MODECTL(port), tmp);
0419     }
0420 
0421     get_dsi_io_power_domains(dev_priv, intel_dsi);
0422 }
0423 
0424 static void gen11_dsi_power_up_lanes(struct intel_encoder *encoder)
0425 {
0426     struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
0427     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
0428     enum phy phy;
0429 
0430     for_each_dsi_phy(phy, intel_dsi->phys)
0431         intel_combo_phy_power_up_lanes(dev_priv, phy, true,
0432                            intel_dsi->lane_count, false);
0433 }
0434 
0435 static void gen11_dsi_config_phy_lanes_sequence(struct intel_encoder *encoder)
0436 {
0437     struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
0438     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
0439     enum phy phy;
0440     u32 tmp;
0441     int lane;
0442 
0443     /* Step 4b(i) set loadgen select for transmit and aux lanes */
0444     for_each_dsi_phy(phy, intel_dsi->phys) {
0445         tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW4_AUX(phy));
0446         tmp &= ~LOADGEN_SELECT;
0447         intel_de_write(dev_priv, ICL_PORT_TX_DW4_AUX(phy), tmp);
0448         for (lane = 0; lane <= 3; lane++) {
0449             tmp = intel_de_read(dev_priv,
0450                         ICL_PORT_TX_DW4_LN(lane, phy));
0451             tmp &= ~LOADGEN_SELECT;
0452             if (lane != 2)
0453                 tmp |= LOADGEN_SELECT;
0454             intel_de_write(dev_priv,
0455                        ICL_PORT_TX_DW4_LN(lane, phy), tmp);
0456         }
0457     }
0458 
0459     /* Step 4b(ii) set latency optimization for transmit and aux lanes */
0460     for_each_dsi_phy(phy, intel_dsi->phys) {
0461         tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW2_AUX(phy));
0462         tmp &= ~FRC_LATENCY_OPTIM_MASK;
0463         tmp |= FRC_LATENCY_OPTIM_VAL(0x5);
0464         intel_de_write(dev_priv, ICL_PORT_TX_DW2_AUX(phy), tmp);
0465         tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW2_LN(0, phy));
0466         tmp &= ~FRC_LATENCY_OPTIM_MASK;
0467         tmp |= FRC_LATENCY_OPTIM_VAL(0x5);
0468         intel_de_write(dev_priv, ICL_PORT_TX_DW2_GRP(phy), tmp);
0469 
0470         /* For EHL, TGL, set latency optimization for PCS_DW1 lanes */
0471         if (IS_JSL_EHL(dev_priv) || (DISPLAY_VER(dev_priv) >= 12)) {
0472             tmp = intel_de_read(dev_priv,
0473                         ICL_PORT_PCS_DW1_AUX(phy));
0474             tmp &= ~LATENCY_OPTIM_MASK;
0475             tmp |= LATENCY_OPTIM_VAL(0);
0476             intel_de_write(dev_priv, ICL_PORT_PCS_DW1_AUX(phy),
0477                        tmp);
0478 
0479             tmp = intel_de_read(dev_priv,
0480                         ICL_PORT_PCS_DW1_LN(0, phy));
0481             tmp &= ~LATENCY_OPTIM_MASK;
0482             tmp |= LATENCY_OPTIM_VAL(0x1);
0483             intel_de_write(dev_priv, ICL_PORT_PCS_DW1_GRP(phy),
0484                        tmp);
0485         }
0486     }
0487 
0488 }
0489 
0490 static void gen11_dsi_voltage_swing_program_seq(struct intel_encoder *encoder)
0491 {
0492     struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
0493     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
0494     u32 tmp;
0495     enum phy phy;
0496 
0497     /* clear common keeper enable bit */
0498     for_each_dsi_phy(phy, intel_dsi->phys) {
0499         tmp = intel_de_read(dev_priv, ICL_PORT_PCS_DW1_LN(0, phy));
0500         tmp &= ~COMMON_KEEPER_EN;
0501         intel_de_write(dev_priv, ICL_PORT_PCS_DW1_GRP(phy), tmp);
0502         tmp = intel_de_read(dev_priv, ICL_PORT_PCS_DW1_AUX(phy));
0503         tmp &= ~COMMON_KEEPER_EN;
0504         intel_de_write(dev_priv, ICL_PORT_PCS_DW1_AUX(phy), tmp);
0505     }
0506 
0507     /*
0508      * Set SUS Clock Config bitfield to 11b
0509      * Note: loadgen select program is done
0510      * as part of lane phy sequence configuration
0511      */
0512     for_each_dsi_phy(phy, intel_dsi->phys) {
0513         tmp = intel_de_read(dev_priv, ICL_PORT_CL_DW5(phy));
0514         tmp |= SUS_CLOCK_CONFIG;
0515         intel_de_write(dev_priv, ICL_PORT_CL_DW5(phy), tmp);
0516     }
0517 
0518     /* Clear training enable to change swing values */
0519     for_each_dsi_phy(phy, intel_dsi->phys) {
0520         tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN(0, phy));
0521         tmp &= ~TX_TRAINING_EN;
0522         intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), tmp);
0523         tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW5_AUX(phy));
0524         tmp &= ~TX_TRAINING_EN;
0525         intel_de_write(dev_priv, ICL_PORT_TX_DW5_AUX(phy), tmp);
0526     }
0527 
0528     /* Program swing and de-emphasis */
0529     dsi_program_swing_and_deemphasis(encoder);
0530 
0531     /* Set training enable to trigger update */
0532     for_each_dsi_phy(phy, intel_dsi->phys) {
0533         tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN(0, phy));
0534         tmp |= TX_TRAINING_EN;
0535         intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), tmp);
0536         tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW5_AUX(phy));
0537         tmp |= TX_TRAINING_EN;
0538         intel_de_write(dev_priv, ICL_PORT_TX_DW5_AUX(phy), tmp);
0539     }
0540 }
0541 
0542 static void gen11_dsi_enable_ddi_buffer(struct intel_encoder *encoder)
0543 {
0544     struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
0545     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
0546     u32 tmp;
0547     enum port port;
0548 
0549     for_each_dsi_port(port, intel_dsi->ports) {
0550         tmp = intel_de_read(dev_priv, DDI_BUF_CTL(port));
0551         tmp |= DDI_BUF_CTL_ENABLE;
0552         intel_de_write(dev_priv, DDI_BUF_CTL(port), tmp);
0553 
0554         if (wait_for_us(!(intel_de_read(dev_priv, DDI_BUF_CTL(port)) &
0555                   DDI_BUF_IS_IDLE),
0556                   500))
0557             drm_err(&dev_priv->drm, "DDI port:%c buffer idle\n",
0558                 port_name(port));
0559     }
0560 }
0561 
0562 static void
0563 gen11_dsi_setup_dphy_timings(struct intel_encoder *encoder,
0564                  const struct intel_crtc_state *crtc_state)
0565 {
0566     struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
0567     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
0568     u32 tmp;
0569     enum port port;
0570     enum phy phy;
0571 
0572     /* Program T-INIT master registers */
0573     for_each_dsi_port(port, intel_dsi->ports) {
0574         tmp = intel_de_read(dev_priv, ICL_DSI_T_INIT_MASTER(port));
0575         tmp &= ~DSI_T_INIT_MASTER_MASK;
0576         tmp |= intel_dsi->init_count;
0577         intel_de_write(dev_priv, ICL_DSI_T_INIT_MASTER(port), tmp);
0578     }
0579 
0580     /* Program DPHY clock lanes timings */
0581     for_each_dsi_port(port, intel_dsi->ports) {
0582         intel_de_write(dev_priv, DPHY_CLK_TIMING_PARAM(port),
0583                    intel_dsi->dphy_reg);
0584 
0585         /* shadow register inside display core */
0586         intel_de_write(dev_priv, DSI_CLK_TIMING_PARAM(port),
0587                    intel_dsi->dphy_reg);
0588     }
0589 
0590     /* Program DPHY data lanes timings */
0591     for_each_dsi_port(port, intel_dsi->ports) {
0592         intel_de_write(dev_priv, DPHY_DATA_TIMING_PARAM(port),
0593                    intel_dsi->dphy_data_lane_reg);
0594 
0595         /* shadow register inside display core */
0596         intel_de_write(dev_priv, DSI_DATA_TIMING_PARAM(port),
0597                    intel_dsi->dphy_data_lane_reg);
0598     }
0599 
0600     /*
0601      * If DSI link operating at or below an 800 MHz,
0602      * TA_SURE should be override and programmed to
0603      * a value '0' inside TA_PARAM_REGISTERS otherwise
0604      * leave all fields at HW default values.
0605      */
0606     if (DISPLAY_VER(dev_priv) == 11) {
0607         if (afe_clk(encoder, crtc_state) <= 800000) {
0608             for_each_dsi_port(port, intel_dsi->ports) {
0609                 tmp = intel_de_read(dev_priv,
0610                             DPHY_TA_TIMING_PARAM(port));
0611                 tmp &= ~TA_SURE_MASK;
0612                 tmp |= TA_SURE_OVERRIDE | TA_SURE(0);
0613                 intel_de_write(dev_priv,
0614                            DPHY_TA_TIMING_PARAM(port),
0615                            tmp);
0616 
0617                 /* shadow register inside display core */
0618                 tmp = intel_de_read(dev_priv,
0619                             DSI_TA_TIMING_PARAM(port));
0620                 tmp &= ~TA_SURE_MASK;
0621                 tmp |= TA_SURE_OVERRIDE | TA_SURE(0);
0622                 intel_de_write(dev_priv,
0623                            DSI_TA_TIMING_PARAM(port), tmp);
0624             }
0625         }
0626     }
0627 
0628     if (IS_JSL_EHL(dev_priv)) {
0629         for_each_dsi_phy(phy, intel_dsi->phys) {
0630             tmp = intel_de_read(dev_priv, ICL_DPHY_CHKN(phy));
0631             tmp |= ICL_DPHY_CHKN_AFE_OVER_PPI_STRAP;
0632             intel_de_write(dev_priv, ICL_DPHY_CHKN(phy), tmp);
0633         }
0634     }
0635 }
0636 
0637 static void gen11_dsi_gate_clocks(struct intel_encoder *encoder)
0638 {
0639     struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
0640     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
0641     u32 tmp;
0642     enum phy phy;
0643 
0644     mutex_lock(&dev_priv->dpll.lock);
0645     tmp = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0);
0646     for_each_dsi_phy(phy, intel_dsi->phys)
0647         tmp |= ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy);
0648 
0649     intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, tmp);
0650     mutex_unlock(&dev_priv->dpll.lock);
0651 }
0652 
0653 static void gen11_dsi_ungate_clocks(struct intel_encoder *encoder)
0654 {
0655     struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
0656     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
0657     u32 tmp;
0658     enum phy phy;
0659 
0660     mutex_lock(&dev_priv->dpll.lock);
0661     tmp = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0);
0662     for_each_dsi_phy(phy, intel_dsi->phys)
0663         tmp &= ~ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy);
0664 
0665     intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, tmp);
0666     mutex_unlock(&dev_priv->dpll.lock);
0667 }
0668 
0669 static bool gen11_dsi_is_clock_enabled(struct intel_encoder *encoder)
0670 {
0671     struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
0672     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
0673     bool clock_enabled = false;
0674     enum phy phy;
0675     u32 tmp;
0676 
0677     tmp = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0);
0678 
0679     for_each_dsi_phy(phy, intel_dsi->phys) {
0680         if (!(tmp & ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)))
0681             clock_enabled = true;
0682     }
0683 
0684     return clock_enabled;
0685 }
0686 
0687 static void gen11_dsi_map_pll(struct intel_encoder *encoder,
0688                   const struct intel_crtc_state *crtc_state)
0689 {
0690     struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
0691     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
0692     struct intel_shared_dpll *pll = crtc_state->shared_dpll;
0693     enum phy phy;
0694     u32 val;
0695 
0696     mutex_lock(&dev_priv->dpll.lock);
0697 
0698     val = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0);
0699     for_each_dsi_phy(phy, intel_dsi->phys) {
0700         val &= ~ICL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy);
0701         val |= ICL_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy);
0702     }
0703     intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val);
0704 
0705     for_each_dsi_phy(phy, intel_dsi->phys) {
0706         val &= ~ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy);
0707     }
0708     intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val);
0709 
0710     intel_de_posting_read(dev_priv, ICL_DPCLKA_CFGCR0);
0711 
0712     mutex_unlock(&dev_priv->dpll.lock);
0713 }
0714 
0715 static void
0716 gen11_dsi_configure_transcoder(struct intel_encoder *encoder,
0717                    const struct intel_crtc_state *pipe_config)
0718 {
0719     struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
0720     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
0721     struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
0722     enum pipe pipe = crtc->pipe;
0723     u32 tmp;
0724     enum port port;
0725     enum transcoder dsi_trans;
0726 
0727     for_each_dsi_port(port, intel_dsi->ports) {
0728         dsi_trans = dsi_port_to_transcoder(port);
0729         tmp = intel_de_read(dev_priv, DSI_TRANS_FUNC_CONF(dsi_trans));
0730 
0731         if (intel_dsi->eotp_pkt)
0732             tmp &= ~EOTP_DISABLED;
0733         else
0734             tmp |= EOTP_DISABLED;
0735 
0736         /* enable link calibration if freq > 1.5Gbps */
0737         if (afe_clk(encoder, pipe_config) >= 1500 * 1000) {
0738             tmp &= ~LINK_CALIBRATION_MASK;
0739             tmp |= CALIBRATION_ENABLED_INITIAL_ONLY;
0740         }
0741 
0742         /* configure continuous clock */
0743         tmp &= ~CONTINUOUS_CLK_MASK;
0744         if (intel_dsi->clock_stop)
0745             tmp |= CLK_ENTER_LP_AFTER_DATA;
0746         else
0747             tmp |= CLK_HS_CONTINUOUS;
0748 
0749         /* configure buffer threshold limit to minimum */
0750         tmp &= ~PIX_BUF_THRESHOLD_MASK;
0751         tmp |= PIX_BUF_THRESHOLD_1_4;
0752 
0753         /* set virtual channel to '0' */
0754         tmp &= ~PIX_VIRT_CHAN_MASK;
0755         tmp |= PIX_VIRT_CHAN(0);
0756 
0757         /* program BGR transmission */
0758         if (intel_dsi->bgr_enabled)
0759             tmp |= BGR_TRANSMISSION;
0760 
0761         /* select pixel format */
0762         tmp &= ~PIX_FMT_MASK;
0763         if (pipe_config->dsc.compression_enable) {
0764             tmp |= PIX_FMT_COMPRESSED;
0765         } else {
0766             switch (intel_dsi->pixel_format) {
0767             default:
0768                 MISSING_CASE(intel_dsi->pixel_format);
0769                 fallthrough;
0770             case MIPI_DSI_FMT_RGB565:
0771                 tmp |= PIX_FMT_RGB565;
0772                 break;
0773             case MIPI_DSI_FMT_RGB666_PACKED:
0774                 tmp |= PIX_FMT_RGB666_PACKED;
0775                 break;
0776             case MIPI_DSI_FMT_RGB666:
0777                 tmp |= PIX_FMT_RGB666_LOOSE;
0778                 break;
0779             case MIPI_DSI_FMT_RGB888:
0780                 tmp |= PIX_FMT_RGB888;
0781                 break;
0782             }
0783         }
0784 
0785         if (DISPLAY_VER(dev_priv) >= 12) {
0786             if (is_vid_mode(intel_dsi))
0787                 tmp |= BLANKING_PACKET_ENABLE;
0788         }
0789 
0790         /* program DSI operation mode */
0791         if (is_vid_mode(intel_dsi)) {
0792             tmp &= ~OP_MODE_MASK;
0793             switch (intel_dsi->video_mode) {
0794             default:
0795                 MISSING_CASE(intel_dsi->video_mode);
0796                 fallthrough;
0797             case NON_BURST_SYNC_EVENTS:
0798                 tmp |= VIDEO_MODE_SYNC_EVENT;
0799                 break;
0800             case NON_BURST_SYNC_PULSE:
0801                 tmp |= VIDEO_MODE_SYNC_PULSE;
0802                 break;
0803             }
0804         } else {
0805             /*
0806              * FIXME: Retrieve this info from VBT.
0807              * As per the spec when dsi transcoder is operating
0808              * in TE GATE mode, TE comes from GPIO
0809              * which is UTIL PIN for DSI 0.
0810              * Also this GPIO would not be used for other
0811              * purposes is an assumption.
0812              */
0813             tmp &= ~OP_MODE_MASK;
0814             tmp |= CMD_MODE_TE_GATE;
0815             tmp |= TE_SOURCE_GPIO;
0816         }
0817 
0818         intel_de_write(dev_priv, DSI_TRANS_FUNC_CONF(dsi_trans), tmp);
0819     }
0820 
0821     /* enable port sync mode if dual link */
0822     if (intel_dsi->dual_link) {
0823         for_each_dsi_port(port, intel_dsi->ports) {
0824             dsi_trans = dsi_port_to_transcoder(port);
0825             tmp = intel_de_read(dev_priv,
0826                         TRANS_DDI_FUNC_CTL2(dsi_trans));
0827             tmp |= PORT_SYNC_MODE_ENABLE;
0828             intel_de_write(dev_priv,
0829                        TRANS_DDI_FUNC_CTL2(dsi_trans), tmp);
0830         }
0831 
0832         /* configure stream splitting */
0833         configure_dual_link_mode(encoder, pipe_config);
0834     }
0835 
0836     for_each_dsi_port(port, intel_dsi->ports) {
0837         dsi_trans = dsi_port_to_transcoder(port);
0838 
0839         /* select data lane width */
0840         tmp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(dsi_trans));
0841         tmp &= ~DDI_PORT_WIDTH_MASK;
0842         tmp |= DDI_PORT_WIDTH(intel_dsi->lane_count);
0843 
0844         /* select input pipe */
0845         tmp &= ~TRANS_DDI_EDP_INPUT_MASK;
0846         switch (pipe) {
0847         default:
0848             MISSING_CASE(pipe);
0849             fallthrough;
0850         case PIPE_A:
0851             tmp |= TRANS_DDI_EDP_INPUT_A_ON;
0852             break;
0853         case PIPE_B:
0854             tmp |= TRANS_DDI_EDP_INPUT_B_ONOFF;
0855             break;
0856         case PIPE_C:
0857             tmp |= TRANS_DDI_EDP_INPUT_C_ONOFF;
0858             break;
0859         case PIPE_D:
0860             tmp |= TRANS_DDI_EDP_INPUT_D_ONOFF;
0861             break;
0862         }
0863 
0864         /* enable DDI buffer */
0865         tmp |= TRANS_DDI_FUNC_ENABLE;
0866         intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(dsi_trans), tmp);
0867     }
0868 
0869     /* wait for link ready */
0870     for_each_dsi_port(port, intel_dsi->ports) {
0871         dsi_trans = dsi_port_to_transcoder(port);
0872         if (wait_for_us((intel_de_read(dev_priv, DSI_TRANS_FUNC_CONF(dsi_trans)) &
0873                  LINK_READY), 2500))
0874             drm_err(&dev_priv->drm, "DSI link not ready\n");
0875     }
0876 }
0877 
0878 static void
0879 gen11_dsi_set_transcoder_timings(struct intel_encoder *encoder,
0880                  const struct intel_crtc_state *crtc_state)
0881 {
0882     struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
0883     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
0884     const struct drm_display_mode *adjusted_mode =
0885         &crtc_state->hw.adjusted_mode;
0886     enum port port;
0887     enum transcoder dsi_trans;
0888     /* horizontal timings */
0889     u16 htotal, hactive, hsync_start, hsync_end, hsync_size;
0890     u16 hback_porch;
0891     /* vertical timings */
0892     u16 vtotal, vactive, vsync_start, vsync_end, vsync_shift;
0893     int mul = 1, div = 1;
0894 
0895     /*
0896      * Adjust horizontal timings (htotal, hsync_start, hsync_end) to account
0897      * for slower link speed if DSC is enabled.
0898      *
0899      * The compression frequency ratio is the ratio between compressed and
0900      * non-compressed link speeds, and simplifies down to the ratio between
0901      * compressed and non-compressed bpp.
0902      */
0903     if (crtc_state->dsc.compression_enable) {
0904         mul = crtc_state->dsc.compressed_bpp;
0905         div = mipi_dsi_pixel_format_to_bpp(intel_dsi->pixel_format);
0906     }
0907 
0908     hactive = adjusted_mode->crtc_hdisplay;
0909 
0910     if (is_vid_mode(intel_dsi))
0911         htotal = DIV_ROUND_UP(adjusted_mode->crtc_htotal * mul, div);
0912     else
0913         htotal = DIV_ROUND_UP((hactive + 160) * mul, div);
0914 
0915     hsync_start = DIV_ROUND_UP(adjusted_mode->crtc_hsync_start * mul, div);
0916     hsync_end = DIV_ROUND_UP(adjusted_mode->crtc_hsync_end * mul, div);
0917     hsync_size  = hsync_end - hsync_start;
0918     hback_porch = (adjusted_mode->crtc_htotal -
0919                adjusted_mode->crtc_hsync_end);
0920     vactive = adjusted_mode->crtc_vdisplay;
0921 
0922     if (is_vid_mode(intel_dsi)) {
0923         vtotal = adjusted_mode->crtc_vtotal;
0924     } else {
0925         int bpp, line_time_us, byte_clk_period_ns;
0926 
0927         if (crtc_state->dsc.compression_enable)
0928             bpp = crtc_state->dsc.compressed_bpp;
0929         else
0930             bpp = mipi_dsi_pixel_format_to_bpp(intel_dsi->pixel_format);
0931 
0932         byte_clk_period_ns = 1000000 / afe_clk(encoder, crtc_state);
0933         line_time_us = (htotal * (bpp / 8) * byte_clk_period_ns) / (1000 * intel_dsi->lane_count);
0934         vtotal = vactive + DIV_ROUND_UP(400, line_time_us);
0935     }
0936     vsync_start = adjusted_mode->crtc_vsync_start;
0937     vsync_end = adjusted_mode->crtc_vsync_end;
0938     vsync_shift = hsync_start - htotal / 2;
0939 
0940     if (intel_dsi->dual_link) {
0941         hactive /= 2;
0942         if (intel_dsi->dual_link == DSI_DUAL_LINK_FRONT_BACK)
0943             hactive += intel_dsi->pixel_overlap;
0944         htotal /= 2;
0945     }
0946 
0947     /* minimum hactive as per bspec: 256 pixels */
0948     if (adjusted_mode->crtc_hdisplay < 256)
0949         drm_err(&dev_priv->drm, "hactive is less then 256 pixels\n");
0950 
0951     /* if RGB666 format, then hactive must be multiple of 4 pixels */
0952     if (intel_dsi->pixel_format == MIPI_DSI_FMT_RGB666 && hactive % 4 != 0)
0953         drm_err(&dev_priv->drm,
0954             "hactive pixels are not multiple of 4\n");
0955 
0956     /* program TRANS_HTOTAL register */
0957     for_each_dsi_port(port, intel_dsi->ports) {
0958         dsi_trans = dsi_port_to_transcoder(port);
0959         intel_de_write(dev_priv, HTOTAL(dsi_trans),
0960                    (hactive - 1) | ((htotal - 1) << 16));
0961     }
0962 
0963     /* TRANS_HSYNC register to be programmed only for video mode */
0964     if (is_vid_mode(intel_dsi)) {
0965         if (intel_dsi->video_mode == NON_BURST_SYNC_PULSE) {
0966             /* BSPEC: hsync size should be atleast 16 pixels */
0967             if (hsync_size < 16)
0968                 drm_err(&dev_priv->drm,
0969                     "hsync size < 16 pixels\n");
0970         }
0971 
0972         if (hback_porch < 16)
0973             drm_err(&dev_priv->drm, "hback porch < 16 pixels\n");
0974 
0975         if (intel_dsi->dual_link) {
0976             hsync_start /= 2;
0977             hsync_end /= 2;
0978         }
0979 
0980         for_each_dsi_port(port, intel_dsi->ports) {
0981             dsi_trans = dsi_port_to_transcoder(port);
0982             intel_de_write(dev_priv, HSYNC(dsi_trans),
0983                        (hsync_start - 1) | ((hsync_end - 1) << 16));
0984         }
0985     }
0986 
0987     /* program TRANS_VTOTAL register */
0988     for_each_dsi_port(port, intel_dsi->ports) {
0989         dsi_trans = dsi_port_to_transcoder(port);
0990         /*
0991          * FIXME: Programing this by assuming progressive mode, since
0992          * non-interlaced info from VBT is not saved inside
0993          * struct drm_display_mode.
0994          * For interlace mode: program required pixel minus 2
0995          */
0996         intel_de_write(dev_priv, VTOTAL(dsi_trans),
0997                    (vactive - 1) | ((vtotal - 1) << 16));
0998     }
0999 
1000     if (vsync_end < vsync_start || vsync_end > vtotal)
1001         drm_err(&dev_priv->drm, "Invalid vsync_end value\n");
1002 
1003     if (vsync_start < vactive)
1004         drm_err(&dev_priv->drm, "vsync_start less than vactive\n");
1005 
1006     /* program TRANS_VSYNC register for video mode only */
1007     if (is_vid_mode(intel_dsi)) {
1008         for_each_dsi_port(port, intel_dsi->ports) {
1009             dsi_trans = dsi_port_to_transcoder(port);
1010             intel_de_write(dev_priv, VSYNC(dsi_trans),
1011                        (vsync_start - 1) | ((vsync_end - 1) << 16));
1012         }
1013     }
1014 
1015     /*
1016      * FIXME: It has to be programmed only for video modes and interlaced
1017      * modes. Put the check condition here once interlaced
1018      * info available as described above.
1019      * program TRANS_VSYNCSHIFT register
1020      */
1021     if (is_vid_mode(intel_dsi)) {
1022         for_each_dsi_port(port, intel_dsi->ports) {
1023             dsi_trans = dsi_port_to_transcoder(port);
1024             intel_de_write(dev_priv, VSYNCSHIFT(dsi_trans),
1025                        vsync_shift);
1026         }
1027     }
1028 
1029     /* program TRANS_VBLANK register, should be same as vtotal programmed */
1030     if (DISPLAY_VER(dev_priv) >= 12) {
1031         for_each_dsi_port(port, intel_dsi->ports) {
1032             dsi_trans = dsi_port_to_transcoder(port);
1033             intel_de_write(dev_priv, VBLANK(dsi_trans),
1034                        (vactive - 1) | ((vtotal - 1) << 16));
1035         }
1036     }
1037 }
1038 
1039 static void gen11_dsi_enable_transcoder(struct intel_encoder *encoder)
1040 {
1041     struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1042     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1043     enum port port;
1044     enum transcoder dsi_trans;
1045     u32 tmp;
1046 
1047     for_each_dsi_port(port, intel_dsi->ports) {
1048         dsi_trans = dsi_port_to_transcoder(port);
1049         tmp = intel_de_read(dev_priv, PIPECONF(dsi_trans));
1050         tmp |= PIPECONF_ENABLE;
1051         intel_de_write(dev_priv, PIPECONF(dsi_trans), tmp);
1052 
1053         /* wait for transcoder to be enabled */
1054         if (intel_de_wait_for_set(dev_priv, PIPECONF(dsi_trans),
1055                       PIPECONF_STATE_ENABLE, 10))
1056             drm_err(&dev_priv->drm,
1057                 "DSI transcoder not enabled\n");
1058     }
1059 }
1060 
1061 static void gen11_dsi_setup_timeouts(struct intel_encoder *encoder,
1062                      const struct intel_crtc_state *crtc_state)
1063 {
1064     struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1065     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1066     enum port port;
1067     enum transcoder dsi_trans;
1068     u32 tmp, hs_tx_timeout, lp_rx_timeout, ta_timeout, divisor, mul;
1069 
1070     /*
1071      * escape clock count calculation:
1072      * BYTE_CLK_COUNT = TIME_NS/(8 * UI)
1073      * UI (nsec) = (10^6)/Bitrate
1074      * TIME_NS = (BYTE_CLK_COUNT * 8 * 10^6)/ Bitrate
1075      * ESCAPE_CLK_COUNT  = TIME_NS/ESC_CLK_NS
1076      */
1077     divisor = intel_dsi_tlpx_ns(intel_dsi) * afe_clk(encoder, crtc_state) * 1000;
1078     mul = 8 * 1000000;
1079     hs_tx_timeout = DIV_ROUND_UP(intel_dsi->hs_tx_timeout * mul,
1080                      divisor);
1081     lp_rx_timeout = DIV_ROUND_UP(intel_dsi->lp_rx_timeout * mul, divisor);
1082     ta_timeout = DIV_ROUND_UP(intel_dsi->turn_arnd_val * mul, divisor);
1083 
1084     for_each_dsi_port(port, intel_dsi->ports) {
1085         dsi_trans = dsi_port_to_transcoder(port);
1086 
1087         /* program hst_tx_timeout */
1088         tmp = intel_de_read(dev_priv, DSI_HSTX_TO(dsi_trans));
1089         tmp &= ~HSTX_TIMEOUT_VALUE_MASK;
1090         tmp |= HSTX_TIMEOUT_VALUE(hs_tx_timeout);
1091         intel_de_write(dev_priv, DSI_HSTX_TO(dsi_trans), tmp);
1092 
1093         /* FIXME: DSI_CALIB_TO */
1094 
1095         /* program lp_rx_host timeout */
1096         tmp = intel_de_read(dev_priv, DSI_LPRX_HOST_TO(dsi_trans));
1097         tmp &= ~LPRX_TIMEOUT_VALUE_MASK;
1098         tmp |= LPRX_TIMEOUT_VALUE(lp_rx_timeout);
1099         intel_de_write(dev_priv, DSI_LPRX_HOST_TO(dsi_trans), tmp);
1100 
1101         /* FIXME: DSI_PWAIT_TO */
1102 
1103         /* program turn around timeout */
1104         tmp = intel_de_read(dev_priv, DSI_TA_TO(dsi_trans));
1105         tmp &= ~TA_TIMEOUT_VALUE_MASK;
1106         tmp |= TA_TIMEOUT_VALUE(ta_timeout);
1107         intel_de_write(dev_priv, DSI_TA_TO(dsi_trans), tmp);
1108     }
1109 }
1110 
1111 static void gen11_dsi_config_util_pin(struct intel_encoder *encoder,
1112                       bool enable)
1113 {
1114     struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1115     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1116     u32 tmp;
1117 
1118     /*
1119      * used as TE i/p for DSI0,
1120      * for dual link/DSI1 TE is from slave DSI1
1121      * through GPIO.
1122      */
1123     if (is_vid_mode(intel_dsi) || (intel_dsi->ports & BIT(PORT_B)))
1124         return;
1125 
1126     tmp = intel_de_read(dev_priv, UTIL_PIN_CTL);
1127 
1128     if (enable) {
1129         tmp |= UTIL_PIN_DIRECTION_INPUT;
1130         tmp |= UTIL_PIN_ENABLE;
1131     } else {
1132         tmp &= ~UTIL_PIN_ENABLE;
1133     }
1134     intel_de_write(dev_priv, UTIL_PIN_CTL, tmp);
1135 }
1136 
1137 static void
1138 gen11_dsi_enable_port_and_phy(struct intel_encoder *encoder,
1139                   const struct intel_crtc_state *crtc_state)
1140 {
1141     /* step 4a: power up all lanes of the DDI used by DSI */
1142     gen11_dsi_power_up_lanes(encoder);
1143 
1144     /* step 4b: configure lane sequencing of the Combo-PHY transmitters */
1145     gen11_dsi_config_phy_lanes_sequence(encoder);
1146 
1147     /* step 4c: configure voltage swing and skew */
1148     gen11_dsi_voltage_swing_program_seq(encoder);
1149 
1150     /* enable DDI buffer */
1151     gen11_dsi_enable_ddi_buffer(encoder);
1152 
1153     /* setup D-PHY timings */
1154     gen11_dsi_setup_dphy_timings(encoder, crtc_state);
1155 
1156     /* Since transcoder is configured to take events from GPIO */
1157     gen11_dsi_config_util_pin(encoder, true);
1158 
1159     /* step 4h: setup DSI protocol timeouts */
1160     gen11_dsi_setup_timeouts(encoder, crtc_state);
1161 
1162     /* Step (4h, 4i, 4j, 4k): Configure transcoder */
1163     gen11_dsi_configure_transcoder(encoder, crtc_state);
1164 
1165     /* Step 4l: Gate DDI clocks */
1166     gen11_dsi_gate_clocks(encoder);
1167 }
1168 
1169 static void gen11_dsi_powerup_panel(struct intel_encoder *encoder)
1170 {
1171     struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1172     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1173     struct mipi_dsi_device *dsi;
1174     enum port port;
1175     enum transcoder dsi_trans;
1176     u32 tmp;
1177     int ret;
1178 
1179     /* set maximum return packet size */
1180     for_each_dsi_port(port, intel_dsi->ports) {
1181         dsi_trans = dsi_port_to_transcoder(port);
1182 
1183         /*
1184          * FIXME: This uses the number of DW's currently in the payload
1185          * receive queue. This is probably not what we want here.
1186          */
1187         tmp = intel_de_read(dev_priv, DSI_CMD_RXCTL(dsi_trans));
1188         tmp &= NUMBER_RX_PLOAD_DW_MASK;
1189         /* multiply "Number Rx Payload DW" by 4 to get max value */
1190         tmp = tmp * 4;
1191         dsi = intel_dsi->dsi_hosts[port]->device;
1192         ret = mipi_dsi_set_maximum_return_packet_size(dsi, tmp);
1193         if (ret < 0)
1194             drm_err(&dev_priv->drm,
1195                 "error setting max return pkt size%d\n", tmp);
1196     }
1197 
1198     /* panel power on related mipi dsi vbt sequences */
1199     intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_POWER_ON);
1200     intel_dsi_msleep(intel_dsi, intel_dsi->panel_on_delay);
1201     intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_DEASSERT_RESET);
1202     intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_INIT_OTP);
1203     intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_DISPLAY_ON);
1204 
1205     /* ensure all panel commands dispatched before enabling transcoder */
1206     wait_for_cmds_dispatched_to_panel(encoder);
1207 }
1208 
1209 static void gen11_dsi_pre_pll_enable(struct intel_atomic_state *state,
1210                      struct intel_encoder *encoder,
1211                      const struct intel_crtc_state *crtc_state,
1212                      const struct drm_connector_state *conn_state)
1213 {
1214     /* step2: enable IO power */
1215     gen11_dsi_enable_io_power(encoder);
1216 
1217     /* step3: enable DSI PLL */
1218     gen11_dsi_program_esc_clk_div(encoder, crtc_state);
1219 }
1220 
1221 static void gen11_dsi_pre_enable(struct intel_atomic_state *state,
1222                  struct intel_encoder *encoder,
1223                  const struct intel_crtc_state *pipe_config,
1224                  const struct drm_connector_state *conn_state)
1225 {
1226     /* step3b */
1227     gen11_dsi_map_pll(encoder, pipe_config);
1228 
1229     /* step4: enable DSI port and DPHY */
1230     gen11_dsi_enable_port_and_phy(encoder, pipe_config);
1231 
1232     /* step5: program and powerup panel */
1233     gen11_dsi_powerup_panel(encoder);
1234 
1235     intel_dsc_dsi_pps_write(encoder, pipe_config);
1236 
1237     /* step6c: configure transcoder timings */
1238     gen11_dsi_set_transcoder_timings(encoder, pipe_config);
1239 }
1240 
1241 /*
1242  * Wa_1409054076:icl,jsl,ehl
1243  * When pipe A is disabled and MIPI DSI is enabled on pipe B,
1244  * the AMT KVMR feature will incorrectly see pipe A as enabled.
1245  * Set 0x42080 bit 23=1 before enabling DSI on pipe B and leave
1246  * it set while DSI is enabled on pipe B
1247  */
1248 static void icl_apply_kvmr_pipe_a_wa(struct intel_encoder *encoder,
1249                      enum pipe pipe, bool enable)
1250 {
1251     struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1252 
1253     if (DISPLAY_VER(dev_priv) == 11 && pipe == PIPE_B)
1254         intel_de_rmw(dev_priv, CHICKEN_PAR1_1,
1255                  IGNORE_KVMR_PIPE_A,
1256                  enable ? IGNORE_KVMR_PIPE_A : 0);
1257 }
1258 
1259 /*
1260  * Wa_16012360555:adl-p
1261  * SW will have to program the "LP to HS Wakeup Guardband"
1262  * to account for the repeaters on the HS Request/Ready
1263  * PPI signaling between the Display engine and the DPHY.
1264  */
1265 static void adlp_set_lp_hs_wakeup_gb(struct intel_encoder *encoder)
1266 {
1267     struct drm_i915_private *i915 = to_i915(encoder->base.dev);
1268     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1269     enum port port;
1270 
1271     if (DISPLAY_VER(i915) == 13) {
1272         for_each_dsi_port(port, intel_dsi->ports)
1273             intel_de_rmw(i915, TGL_DSI_CHKN_REG(port),
1274                      TGL_DSI_CHKN_LSHS_GB_MASK,
1275                      TGL_DSI_CHKN_LSHS_GB(4));
1276     }
1277 }
1278 
1279 static void gen11_dsi_enable(struct intel_atomic_state *state,
1280                  struct intel_encoder *encoder,
1281                  const struct intel_crtc_state *crtc_state,
1282                  const struct drm_connector_state *conn_state)
1283 {
1284     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1285     struct intel_crtc *crtc = to_intel_crtc(conn_state->crtc);
1286 
1287     drm_WARN_ON(state->base.dev, crtc_state->has_pch_encoder);
1288 
1289     /* Wa_1409054076:icl,jsl,ehl */
1290     icl_apply_kvmr_pipe_a_wa(encoder, crtc->pipe, true);
1291 
1292     /* Wa_16012360555:adl-p */
1293     adlp_set_lp_hs_wakeup_gb(encoder);
1294 
1295     /* step6d: enable dsi transcoder */
1296     gen11_dsi_enable_transcoder(encoder);
1297 
1298     /* step7: enable backlight */
1299     intel_backlight_enable(crtc_state, conn_state);
1300     intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_BACKLIGHT_ON);
1301 
1302     intel_crtc_vblank_on(crtc_state);
1303 }
1304 
1305 static void gen11_dsi_disable_transcoder(struct intel_encoder *encoder)
1306 {
1307     struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1308     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1309     enum port port;
1310     enum transcoder dsi_trans;
1311     u32 tmp;
1312 
1313     for_each_dsi_port(port, intel_dsi->ports) {
1314         dsi_trans = dsi_port_to_transcoder(port);
1315 
1316         /* disable transcoder */
1317         tmp = intel_de_read(dev_priv, PIPECONF(dsi_trans));
1318         tmp &= ~PIPECONF_ENABLE;
1319         intel_de_write(dev_priv, PIPECONF(dsi_trans), tmp);
1320 
1321         /* wait for transcoder to be disabled */
1322         if (intel_de_wait_for_clear(dev_priv, PIPECONF(dsi_trans),
1323                         PIPECONF_STATE_ENABLE, 50))
1324             drm_err(&dev_priv->drm,
1325                 "DSI trancoder not disabled\n");
1326     }
1327 }
1328 
1329 static void gen11_dsi_powerdown_panel(struct intel_encoder *encoder)
1330 {
1331     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1332 
1333     intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_DISPLAY_OFF);
1334     intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_ASSERT_RESET);
1335     intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_POWER_OFF);
1336 
1337     /* ensure cmds dispatched to panel */
1338     wait_for_cmds_dispatched_to_panel(encoder);
1339 }
1340 
1341 static void gen11_dsi_deconfigure_trancoder(struct intel_encoder *encoder)
1342 {
1343     struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1344     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1345     enum port port;
1346     enum transcoder dsi_trans;
1347     u32 tmp;
1348 
1349     /* disable periodic update mode */
1350     if (is_cmd_mode(intel_dsi)) {
1351         for_each_dsi_port(port, intel_dsi->ports) {
1352             tmp = intel_de_read(dev_priv, DSI_CMD_FRMCTL(port));
1353             tmp &= ~DSI_PERIODIC_FRAME_UPDATE_ENABLE;
1354             intel_de_write(dev_priv, DSI_CMD_FRMCTL(port), tmp);
1355         }
1356     }
1357 
1358     /* put dsi link in ULPS */
1359     for_each_dsi_port(port, intel_dsi->ports) {
1360         dsi_trans = dsi_port_to_transcoder(port);
1361         tmp = intel_de_read(dev_priv, DSI_LP_MSG(dsi_trans));
1362         tmp |= LINK_ENTER_ULPS;
1363         tmp &= ~LINK_ULPS_TYPE_LP11;
1364         intel_de_write(dev_priv, DSI_LP_MSG(dsi_trans), tmp);
1365 
1366         if (wait_for_us((intel_de_read(dev_priv, DSI_LP_MSG(dsi_trans)) &
1367                  LINK_IN_ULPS),
1368                 10))
1369             drm_err(&dev_priv->drm, "DSI link not in ULPS\n");
1370     }
1371 
1372     /* disable ddi function */
1373     for_each_dsi_port(port, intel_dsi->ports) {
1374         dsi_trans = dsi_port_to_transcoder(port);
1375         tmp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(dsi_trans));
1376         tmp &= ~TRANS_DDI_FUNC_ENABLE;
1377         intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(dsi_trans), tmp);
1378     }
1379 
1380     /* disable port sync mode if dual link */
1381     if (intel_dsi->dual_link) {
1382         for_each_dsi_port(port, intel_dsi->ports) {
1383             dsi_trans = dsi_port_to_transcoder(port);
1384             tmp = intel_de_read(dev_priv,
1385                         TRANS_DDI_FUNC_CTL2(dsi_trans));
1386             tmp &= ~PORT_SYNC_MODE_ENABLE;
1387             intel_de_write(dev_priv,
1388                        TRANS_DDI_FUNC_CTL2(dsi_trans), tmp);
1389         }
1390     }
1391 }
1392 
1393 static void gen11_dsi_disable_port(struct intel_encoder *encoder)
1394 {
1395     struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1396     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1397     u32 tmp;
1398     enum port port;
1399 
1400     gen11_dsi_ungate_clocks(encoder);
1401     for_each_dsi_port(port, intel_dsi->ports) {
1402         tmp = intel_de_read(dev_priv, DDI_BUF_CTL(port));
1403         tmp &= ~DDI_BUF_CTL_ENABLE;
1404         intel_de_write(dev_priv, DDI_BUF_CTL(port), tmp);
1405 
1406         if (wait_for_us((intel_de_read(dev_priv, DDI_BUF_CTL(port)) &
1407                  DDI_BUF_IS_IDLE),
1408                  8))
1409             drm_err(&dev_priv->drm,
1410                 "DDI port:%c buffer not idle\n",
1411                 port_name(port));
1412     }
1413     gen11_dsi_gate_clocks(encoder);
1414 }
1415 
1416 static void gen11_dsi_disable_io_power(struct intel_encoder *encoder)
1417 {
1418     struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1419     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1420     enum port port;
1421     u32 tmp;
1422 
1423     for_each_dsi_port(port, intel_dsi->ports) {
1424         intel_wakeref_t wakeref;
1425 
1426         wakeref = fetch_and_zero(&intel_dsi->io_wakeref[port]);
1427         intel_display_power_put(dev_priv,
1428                     port == PORT_A ?
1429                     POWER_DOMAIN_PORT_DDI_IO_A :
1430                     POWER_DOMAIN_PORT_DDI_IO_B,
1431                     wakeref);
1432     }
1433 
1434     /* set mode to DDI */
1435     for_each_dsi_port(port, intel_dsi->ports) {
1436         tmp = intel_de_read(dev_priv, ICL_DSI_IO_MODECTL(port));
1437         tmp &= ~COMBO_PHY_MODE_DSI;
1438         intel_de_write(dev_priv, ICL_DSI_IO_MODECTL(port), tmp);
1439     }
1440 }
1441 
1442 static void gen11_dsi_disable(struct intel_atomic_state *state,
1443                   struct intel_encoder *encoder,
1444                   const struct intel_crtc_state *old_crtc_state,
1445                   const struct drm_connector_state *old_conn_state)
1446 {
1447     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1448     struct intel_crtc *crtc = to_intel_crtc(old_conn_state->crtc);
1449 
1450     /* step1: turn off backlight */
1451     intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_BACKLIGHT_OFF);
1452     intel_backlight_disable(old_conn_state);
1453 
1454     /* step2d,e: disable transcoder and wait */
1455     gen11_dsi_disable_transcoder(encoder);
1456 
1457     /* Wa_1409054076:icl,jsl,ehl */
1458     icl_apply_kvmr_pipe_a_wa(encoder, crtc->pipe, false);
1459 
1460     /* step2f,g: powerdown panel */
1461     gen11_dsi_powerdown_panel(encoder);
1462 
1463     /* step2h,i,j: deconfig trancoder */
1464     gen11_dsi_deconfigure_trancoder(encoder);
1465 
1466     /* step3: disable port */
1467     gen11_dsi_disable_port(encoder);
1468 
1469     gen11_dsi_config_util_pin(encoder, false);
1470 
1471     /* step4: disable IO power */
1472     gen11_dsi_disable_io_power(encoder);
1473 }
1474 
1475 static void gen11_dsi_post_disable(struct intel_atomic_state *state,
1476                    struct intel_encoder *encoder,
1477                    const struct intel_crtc_state *old_crtc_state,
1478                    const struct drm_connector_state *old_conn_state)
1479 {
1480     intel_crtc_vblank_off(old_crtc_state);
1481 
1482     intel_dsc_disable(old_crtc_state);
1483 
1484     skl_scaler_disable(old_crtc_state);
1485 }
1486 
1487 static enum drm_mode_status gen11_dsi_mode_valid(struct drm_connector *connector,
1488                          struct drm_display_mode *mode)
1489 {
1490     /* FIXME: DSC? */
1491     return intel_dsi_mode_valid(connector, mode);
1492 }
1493 
1494 static void gen11_dsi_get_timings(struct intel_encoder *encoder,
1495                   struct intel_crtc_state *pipe_config)
1496 {
1497     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1498     struct drm_display_mode *adjusted_mode =
1499                     &pipe_config->hw.adjusted_mode;
1500 
1501     if (pipe_config->dsc.compressed_bpp) {
1502         int div = pipe_config->dsc.compressed_bpp;
1503         int mul = mipi_dsi_pixel_format_to_bpp(intel_dsi->pixel_format);
1504 
1505         adjusted_mode->crtc_htotal =
1506             DIV_ROUND_UP(adjusted_mode->crtc_htotal * mul, div);
1507         adjusted_mode->crtc_hsync_start =
1508             DIV_ROUND_UP(adjusted_mode->crtc_hsync_start * mul, div);
1509         adjusted_mode->crtc_hsync_end =
1510             DIV_ROUND_UP(adjusted_mode->crtc_hsync_end * mul, div);
1511     }
1512 
1513     if (intel_dsi->dual_link) {
1514         adjusted_mode->crtc_hdisplay *= 2;
1515         if (intel_dsi->dual_link == DSI_DUAL_LINK_FRONT_BACK)
1516             adjusted_mode->crtc_hdisplay -=
1517                         intel_dsi->pixel_overlap;
1518         adjusted_mode->crtc_htotal *= 2;
1519     }
1520     adjusted_mode->crtc_hblank_start = adjusted_mode->crtc_hdisplay;
1521     adjusted_mode->crtc_hblank_end = adjusted_mode->crtc_htotal;
1522 
1523     if (intel_dsi->operation_mode == INTEL_DSI_VIDEO_MODE) {
1524         if (intel_dsi->dual_link) {
1525             adjusted_mode->crtc_hsync_start *= 2;
1526             adjusted_mode->crtc_hsync_end *= 2;
1527         }
1528     }
1529     adjusted_mode->crtc_vblank_start = adjusted_mode->crtc_vdisplay;
1530     adjusted_mode->crtc_vblank_end = adjusted_mode->crtc_vtotal;
1531 }
1532 
1533 static bool gen11_dsi_is_periodic_cmd_mode(struct intel_dsi *intel_dsi)
1534 {
1535     struct drm_device *dev = intel_dsi->base.base.dev;
1536     struct drm_i915_private *dev_priv = to_i915(dev);
1537     enum transcoder dsi_trans;
1538     u32 val;
1539 
1540     if (intel_dsi->ports == BIT(PORT_B))
1541         dsi_trans = TRANSCODER_DSI_1;
1542     else
1543         dsi_trans = TRANSCODER_DSI_0;
1544 
1545     val = intel_de_read(dev_priv, DSI_TRANS_FUNC_CONF(dsi_trans));
1546     return (val & DSI_PERIODIC_FRAME_UPDATE_ENABLE);
1547 }
1548 
1549 static void gen11_dsi_get_cmd_mode_config(struct intel_dsi *intel_dsi,
1550                       struct intel_crtc_state *pipe_config)
1551 {
1552     if (intel_dsi->ports == (BIT(PORT_B) | BIT(PORT_A)))
1553         pipe_config->mode_flags |= I915_MODE_FLAG_DSI_USE_TE1 |
1554                         I915_MODE_FLAG_DSI_USE_TE0;
1555     else if (intel_dsi->ports == BIT(PORT_B))
1556         pipe_config->mode_flags |= I915_MODE_FLAG_DSI_USE_TE1;
1557     else
1558         pipe_config->mode_flags |= I915_MODE_FLAG_DSI_USE_TE0;
1559 }
1560 
1561 static void gen11_dsi_get_config(struct intel_encoder *encoder,
1562                  struct intel_crtc_state *pipe_config)
1563 {
1564     struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
1565     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1566 
1567     intel_ddi_get_clock(encoder, pipe_config, icl_ddi_combo_get_pll(encoder));
1568 
1569     pipe_config->hw.adjusted_mode.crtc_clock = intel_dsi->pclk;
1570     if (intel_dsi->dual_link)
1571         pipe_config->hw.adjusted_mode.crtc_clock *= 2;
1572 
1573     gen11_dsi_get_timings(encoder, pipe_config);
1574     pipe_config->output_types |= BIT(INTEL_OUTPUT_DSI);
1575     pipe_config->pipe_bpp = bdw_get_pipemisc_bpp(crtc);
1576 
1577     /* Get the details on which TE should be enabled */
1578     if (is_cmd_mode(intel_dsi))
1579         gen11_dsi_get_cmd_mode_config(intel_dsi, pipe_config);
1580 
1581     if (gen11_dsi_is_periodic_cmd_mode(intel_dsi))
1582         pipe_config->mode_flags |= I915_MODE_FLAG_DSI_PERIODIC_CMD_MODE;
1583 }
1584 
1585 static void gen11_dsi_sync_state(struct intel_encoder *encoder,
1586                  const struct intel_crtc_state *crtc_state)
1587 {
1588     struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1589     struct intel_crtc *intel_crtc;
1590     enum pipe pipe;
1591 
1592     if (!crtc_state)
1593         return;
1594 
1595     intel_crtc = to_intel_crtc(crtc_state->uapi.crtc);
1596     pipe = intel_crtc->pipe;
1597 
1598     /* wa verify 1409054076:icl,jsl,ehl */
1599     if (DISPLAY_VER(dev_priv) == 11 && pipe == PIPE_B &&
1600         !(intel_de_read(dev_priv, CHICKEN_PAR1_1) & IGNORE_KVMR_PIPE_A))
1601         drm_dbg_kms(&dev_priv->drm,
1602                 "[ENCODER:%d:%s] BIOS left IGNORE_KVMR_PIPE_A cleared with pipe B enabled\n",
1603                 encoder->base.base.id,
1604                 encoder->base.name);
1605 }
1606 
1607 static int gen11_dsi_dsc_compute_config(struct intel_encoder *encoder,
1608                     struct intel_crtc_state *crtc_state)
1609 {
1610     struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1611     struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
1612     int dsc_max_bpc = DISPLAY_VER(dev_priv) >= 12 ? 12 : 10;
1613     bool use_dsc;
1614     int ret;
1615 
1616     use_dsc = intel_bios_get_dsc_params(encoder, crtc_state, dsc_max_bpc);
1617     if (!use_dsc)
1618         return 0;
1619 
1620     if (crtc_state->pipe_bpp < 8 * 3)
1621         return -EINVAL;
1622 
1623     /* FIXME: split only when necessary */
1624     if (crtc_state->dsc.slice_count > 1)
1625         crtc_state->dsc.dsc_split = true;
1626 
1627     vdsc_cfg->convert_rgb = true;
1628 
1629     /* FIXME: initialize from VBT */
1630     vdsc_cfg->rc_model_size = DSC_RC_MODEL_SIZE_CONST;
1631 
1632     vdsc_cfg->pic_height = crtc_state->hw.adjusted_mode.crtc_vdisplay;
1633 
1634     ret = intel_dsc_compute_params(crtc_state);
1635     if (ret)
1636         return ret;
1637 
1638     /* DSI specific sanity checks on the common code */
1639     drm_WARN_ON(&dev_priv->drm, vdsc_cfg->vbr_enable);
1640     drm_WARN_ON(&dev_priv->drm, vdsc_cfg->simple_422);
1641     drm_WARN_ON(&dev_priv->drm,
1642             vdsc_cfg->pic_width % vdsc_cfg->slice_width);
1643     drm_WARN_ON(&dev_priv->drm, vdsc_cfg->slice_height < 8);
1644     drm_WARN_ON(&dev_priv->drm,
1645             vdsc_cfg->pic_height % vdsc_cfg->slice_height);
1646 
1647     ret = drm_dsc_compute_rc_parameters(vdsc_cfg);
1648     if (ret)
1649         return ret;
1650 
1651     crtc_state->dsc.compression_enable = true;
1652 
1653     return 0;
1654 }
1655 
1656 static int gen11_dsi_compute_config(struct intel_encoder *encoder,
1657                     struct intel_crtc_state *pipe_config,
1658                     struct drm_connector_state *conn_state)
1659 {
1660     struct drm_i915_private *i915 = to_i915(encoder->base.dev);
1661     struct intel_dsi *intel_dsi = container_of(encoder, struct intel_dsi,
1662                            base);
1663     struct intel_connector *intel_connector = intel_dsi->attached_connector;
1664     struct drm_display_mode *adjusted_mode =
1665         &pipe_config->hw.adjusted_mode;
1666     int ret;
1667 
1668     pipe_config->output_format = INTEL_OUTPUT_FORMAT_RGB;
1669 
1670     ret = intel_panel_compute_config(intel_connector, adjusted_mode);
1671     if (ret)
1672         return ret;
1673 
1674     ret = intel_panel_fitting(pipe_config, conn_state);
1675     if (ret)
1676         return ret;
1677 
1678     adjusted_mode->flags = 0;
1679 
1680     /* Dual link goes to trancoder DSI'0' */
1681     if (intel_dsi->ports == BIT(PORT_B))
1682         pipe_config->cpu_transcoder = TRANSCODER_DSI_1;
1683     else
1684         pipe_config->cpu_transcoder = TRANSCODER_DSI_0;
1685 
1686     if (intel_dsi->pixel_format == MIPI_DSI_FMT_RGB888)
1687         pipe_config->pipe_bpp = 24;
1688     else
1689         pipe_config->pipe_bpp = 18;
1690 
1691     pipe_config->clock_set = true;
1692 
1693     if (gen11_dsi_dsc_compute_config(encoder, pipe_config))
1694         drm_dbg_kms(&i915->drm, "Attempting to use DSC failed\n");
1695 
1696     pipe_config->port_clock = afe_clk(encoder, pipe_config) / 5;
1697 
1698     /*
1699      * In case of TE GATE cmd mode, we
1700      * receive TE from the slave if
1701      * dual link is enabled
1702      */
1703     if (is_cmd_mode(intel_dsi))
1704         gen11_dsi_get_cmd_mode_config(intel_dsi, pipe_config);
1705 
1706     return 0;
1707 }
1708 
1709 static void gen11_dsi_get_power_domains(struct intel_encoder *encoder,
1710                     struct intel_crtc_state *crtc_state)
1711 {
1712     struct drm_i915_private *i915 = to_i915(encoder->base.dev);
1713 
1714     get_dsi_io_power_domains(i915,
1715                  enc_to_intel_dsi(encoder));
1716 }
1717 
1718 static bool gen11_dsi_get_hw_state(struct intel_encoder *encoder,
1719                    enum pipe *pipe)
1720 {
1721     struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1722     struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1723     enum transcoder dsi_trans;
1724     intel_wakeref_t wakeref;
1725     enum port port;
1726     bool ret = false;
1727     u32 tmp;
1728 
1729     wakeref = intel_display_power_get_if_enabled(dev_priv,
1730                              encoder->power_domain);
1731     if (!wakeref)
1732         return false;
1733 
1734     for_each_dsi_port(port, intel_dsi->ports) {
1735         dsi_trans = dsi_port_to_transcoder(port);
1736         tmp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(dsi_trans));
1737         switch (tmp & TRANS_DDI_EDP_INPUT_MASK) {
1738         case TRANS_DDI_EDP_INPUT_A_ON:
1739             *pipe = PIPE_A;
1740             break;
1741         case TRANS_DDI_EDP_INPUT_B_ONOFF:
1742             *pipe = PIPE_B;
1743             break;
1744         case TRANS_DDI_EDP_INPUT_C_ONOFF:
1745             *pipe = PIPE_C;
1746             break;
1747         case TRANS_DDI_EDP_INPUT_D_ONOFF:
1748             *pipe = PIPE_D;
1749             break;
1750         default:
1751             drm_err(&dev_priv->drm, "Invalid PIPE input\n");
1752             goto out;
1753         }
1754 
1755         tmp = intel_de_read(dev_priv, PIPECONF(dsi_trans));
1756         ret = tmp & PIPECONF_ENABLE;
1757     }
1758 out:
1759     intel_display_power_put(dev_priv, encoder->power_domain, wakeref);
1760     return ret;
1761 }
1762 
1763 static bool gen11_dsi_initial_fastset_check(struct intel_encoder *encoder,
1764                         struct intel_crtc_state *crtc_state)
1765 {
1766     if (crtc_state->dsc.compression_enable) {
1767         drm_dbg_kms(encoder->base.dev, "Forcing full modeset due to DSC being enabled\n");
1768         crtc_state->uapi.mode_changed = true;
1769 
1770         return false;
1771     }
1772 
1773     return true;
1774 }
1775 
1776 static void gen11_dsi_encoder_destroy(struct drm_encoder *encoder)
1777 {
1778     intel_encoder_destroy(encoder);
1779 }
1780 
1781 static const struct drm_encoder_funcs gen11_dsi_encoder_funcs = {
1782     .destroy = gen11_dsi_encoder_destroy,
1783 };
1784 
1785 static const struct drm_connector_funcs gen11_dsi_connector_funcs = {
1786     .detect = intel_panel_detect,
1787     .late_register = intel_connector_register,
1788     .early_unregister = intel_connector_unregister,
1789     .destroy = intel_connector_destroy,
1790     .fill_modes = drm_helper_probe_single_connector_modes,
1791     .atomic_get_property = intel_digital_connector_atomic_get_property,
1792     .atomic_set_property = intel_digital_connector_atomic_set_property,
1793     .atomic_destroy_state = drm_atomic_helper_connector_destroy_state,
1794     .atomic_duplicate_state = intel_digital_connector_duplicate_state,
1795 };
1796 
1797 static const struct drm_connector_helper_funcs gen11_dsi_connector_helper_funcs = {
1798     .get_modes = intel_dsi_get_modes,
1799     .mode_valid = gen11_dsi_mode_valid,
1800     .atomic_check = intel_digital_connector_atomic_check,
1801 };
1802 
1803 static int gen11_dsi_host_attach(struct mipi_dsi_host *host,
1804                  struct mipi_dsi_device *dsi)
1805 {
1806     return 0;
1807 }
1808 
1809 static int gen11_dsi_host_detach(struct mipi_dsi_host *host,
1810                  struct mipi_dsi_device *dsi)
1811 {
1812     return 0;
1813 }
1814 
1815 static ssize_t gen11_dsi_host_transfer(struct mipi_dsi_host *host,
1816                        const struct mipi_dsi_msg *msg)
1817 {
1818     struct intel_dsi_host *intel_dsi_host = to_intel_dsi_host(host);
1819     struct mipi_dsi_packet dsi_pkt;
1820     ssize_t ret;
1821     bool enable_lpdt = false;
1822 
1823     ret = mipi_dsi_create_packet(&dsi_pkt, msg);
1824     if (ret < 0)
1825         return ret;
1826 
1827     if (msg->flags & MIPI_DSI_MSG_USE_LPM)
1828         enable_lpdt = true;
1829 
1830     /* only long packet contains payload */
1831     if (mipi_dsi_packet_format_is_long(msg->type)) {
1832         ret = dsi_send_pkt_payld(intel_dsi_host, &dsi_pkt);
1833         if (ret < 0)
1834             return ret;
1835     }
1836 
1837     /* send packet header */
1838     ret  = dsi_send_pkt_hdr(intel_dsi_host, &dsi_pkt, enable_lpdt);
1839     if (ret < 0)
1840         return ret;
1841 
1842     //TODO: add payload receive code if needed
1843 
1844     ret = sizeof(dsi_pkt.header) + dsi_pkt.payload_length;
1845 
1846     return ret;
1847 }
1848 
1849 static const struct mipi_dsi_host_ops gen11_dsi_host_ops = {
1850     .attach = gen11_dsi_host_attach,
1851     .detach = gen11_dsi_host_detach,
1852     .transfer = gen11_dsi_host_transfer,
1853 };
1854 
1855 #define ICL_PREPARE_CNT_MAX 0x7
1856 #define ICL_CLK_ZERO_CNT_MAX    0xf
1857 #define ICL_TRAIL_CNT_MAX   0x7
1858 #define ICL_TCLK_PRE_CNT_MAX    0x3
1859 #define ICL_TCLK_POST_CNT_MAX   0x7
1860 #define ICL_HS_ZERO_CNT_MAX 0xf
1861 #define ICL_EXIT_ZERO_CNT_MAX   0x7
1862 
1863 static void icl_dphy_param_init(struct intel_dsi *intel_dsi)
1864 {
1865     struct drm_device *dev = intel_dsi->base.base.dev;
1866     struct drm_i915_private *dev_priv = to_i915(dev);
1867     struct intel_connector *connector = intel_dsi->attached_connector;
1868     struct mipi_config *mipi_config = connector->panel.vbt.dsi.config;
1869     u32 tlpx_ns;
1870     u32 prepare_cnt, exit_zero_cnt, clk_zero_cnt, trail_cnt;
1871     u32 ths_prepare_ns, tclk_trail_ns;
1872     u32 hs_zero_cnt;
1873     u32 tclk_pre_cnt, tclk_post_cnt;
1874 
1875     tlpx_ns = intel_dsi_tlpx_ns(intel_dsi);
1876 
1877     tclk_trail_ns = max(mipi_config->tclk_trail, mipi_config->ths_trail);
1878     ths_prepare_ns = max(mipi_config->ths_prepare,
1879                  mipi_config->tclk_prepare);
1880 
1881     /*
1882      * prepare cnt in escape clocks
1883      * this field represents a hexadecimal value with a precision
1884      * of 1.2 – i.e. the most significant bit is the integer
1885      * and the least significant 2 bits are fraction bits.
1886      * so, the field can represent a range of 0.25 to 1.75
1887      */
1888     prepare_cnt = DIV_ROUND_UP(ths_prepare_ns * 4, tlpx_ns);
1889     if (prepare_cnt > ICL_PREPARE_CNT_MAX) {
1890         drm_dbg_kms(&dev_priv->drm, "prepare_cnt out of range (%d)\n",
1891                 prepare_cnt);
1892         prepare_cnt = ICL_PREPARE_CNT_MAX;
1893     }
1894 
1895     /* clk zero count in escape clocks */
1896     clk_zero_cnt = DIV_ROUND_UP(mipi_config->tclk_prepare_clkzero -
1897                     ths_prepare_ns, tlpx_ns);
1898     if (clk_zero_cnt > ICL_CLK_ZERO_CNT_MAX) {
1899         drm_dbg_kms(&dev_priv->drm,
1900                 "clk_zero_cnt out of range (%d)\n", clk_zero_cnt);
1901         clk_zero_cnt = ICL_CLK_ZERO_CNT_MAX;
1902     }
1903 
1904     /* trail cnt in escape clocks*/
1905     trail_cnt = DIV_ROUND_UP(tclk_trail_ns, tlpx_ns);
1906     if (trail_cnt > ICL_TRAIL_CNT_MAX) {
1907         drm_dbg_kms(&dev_priv->drm, "trail_cnt out of range (%d)\n",
1908                 trail_cnt);
1909         trail_cnt = ICL_TRAIL_CNT_MAX;
1910     }
1911 
1912     /* tclk pre count in escape clocks */
1913     tclk_pre_cnt = DIV_ROUND_UP(mipi_config->tclk_pre, tlpx_ns);
1914     if (tclk_pre_cnt > ICL_TCLK_PRE_CNT_MAX) {
1915         drm_dbg_kms(&dev_priv->drm,
1916                 "tclk_pre_cnt out of range (%d)\n", tclk_pre_cnt);
1917         tclk_pre_cnt = ICL_TCLK_PRE_CNT_MAX;
1918     }
1919 
1920     /* tclk post count in escape clocks */
1921     tclk_post_cnt = DIV_ROUND_UP(mipi_config->tclk_post, tlpx_ns);
1922     if (tclk_post_cnt > ICL_TCLK_POST_CNT_MAX) {
1923         drm_dbg_kms(&dev_priv->drm,
1924                 "tclk_post_cnt out of range (%d)\n",
1925                 tclk_post_cnt);
1926         tclk_post_cnt = ICL_TCLK_POST_CNT_MAX;
1927     }
1928 
1929     /* hs zero cnt in escape clocks */
1930     hs_zero_cnt = DIV_ROUND_UP(mipi_config->ths_prepare_hszero -
1931                    ths_prepare_ns, tlpx_ns);
1932     if (hs_zero_cnt > ICL_HS_ZERO_CNT_MAX) {
1933         drm_dbg_kms(&dev_priv->drm, "hs_zero_cnt out of range (%d)\n",
1934                 hs_zero_cnt);
1935         hs_zero_cnt = ICL_HS_ZERO_CNT_MAX;
1936     }
1937 
1938     /* hs exit zero cnt in escape clocks */
1939     exit_zero_cnt = DIV_ROUND_UP(mipi_config->ths_exit, tlpx_ns);
1940     if (exit_zero_cnt > ICL_EXIT_ZERO_CNT_MAX) {
1941         drm_dbg_kms(&dev_priv->drm,
1942                 "exit_zero_cnt out of range (%d)\n",
1943                 exit_zero_cnt);
1944         exit_zero_cnt = ICL_EXIT_ZERO_CNT_MAX;
1945     }
1946 
1947     /* clock lane dphy timings */
1948     intel_dsi->dphy_reg = (CLK_PREPARE_OVERRIDE |
1949                    CLK_PREPARE(prepare_cnt) |
1950                    CLK_ZERO_OVERRIDE |
1951                    CLK_ZERO(clk_zero_cnt) |
1952                    CLK_PRE_OVERRIDE |
1953                    CLK_PRE(tclk_pre_cnt) |
1954                    CLK_POST_OVERRIDE |
1955                    CLK_POST(tclk_post_cnt) |
1956                    CLK_TRAIL_OVERRIDE |
1957                    CLK_TRAIL(trail_cnt));
1958 
1959     /* data lanes dphy timings */
1960     intel_dsi->dphy_data_lane_reg = (HS_PREPARE_OVERRIDE |
1961                      HS_PREPARE(prepare_cnt) |
1962                      HS_ZERO_OVERRIDE |
1963                      HS_ZERO(hs_zero_cnt) |
1964                      HS_TRAIL_OVERRIDE |
1965                      HS_TRAIL(trail_cnt) |
1966                      HS_EXIT_OVERRIDE |
1967                      HS_EXIT(exit_zero_cnt));
1968 
1969     intel_dsi_log_params(intel_dsi);
1970 }
1971 
1972 static void icl_dsi_add_properties(struct intel_connector *connector)
1973 {
1974     const struct drm_display_mode *fixed_mode =
1975         intel_panel_preferred_fixed_mode(connector);
1976     u32 allowed_scalers;
1977 
1978     allowed_scalers = BIT(DRM_MODE_SCALE_ASPECT) |
1979                BIT(DRM_MODE_SCALE_FULLSCREEN) |
1980                BIT(DRM_MODE_SCALE_CENTER);
1981 
1982     drm_connector_attach_scaling_mode_property(&connector->base,
1983                            allowed_scalers);
1984 
1985     connector->base.state->scaling_mode = DRM_MODE_SCALE_ASPECT;
1986 
1987     drm_connector_set_panel_orientation_with_quirk(&connector->base,
1988                                intel_dsi_get_panel_orientation(connector),
1989                                fixed_mode->hdisplay,
1990                                fixed_mode->vdisplay);
1991 }
1992 
1993 void icl_dsi_init(struct drm_i915_private *dev_priv)
1994 {
1995     struct drm_device *dev = &dev_priv->drm;
1996     struct intel_dsi *intel_dsi;
1997     struct intel_encoder *encoder;
1998     struct intel_connector *intel_connector;
1999     struct drm_connector *connector;
2000     enum port port;
2001 
2002     if (!intel_bios_is_dsi_present(dev_priv, &port))
2003         return;
2004 
2005     intel_dsi = kzalloc(sizeof(*intel_dsi), GFP_KERNEL);
2006     if (!intel_dsi)
2007         return;
2008 
2009     intel_connector = intel_connector_alloc();
2010     if (!intel_connector) {
2011         kfree(intel_dsi);
2012         return;
2013     }
2014 
2015     encoder = &intel_dsi->base;
2016     intel_dsi->attached_connector = intel_connector;
2017     connector = &intel_connector->base;
2018 
2019     /* register DSI encoder with DRM subsystem */
2020     drm_encoder_init(dev, &encoder->base, &gen11_dsi_encoder_funcs,
2021              DRM_MODE_ENCODER_DSI, "DSI %c", port_name(port));
2022 
2023     encoder->pre_pll_enable = gen11_dsi_pre_pll_enable;
2024     encoder->pre_enable = gen11_dsi_pre_enable;
2025     encoder->enable = gen11_dsi_enable;
2026     encoder->disable = gen11_dsi_disable;
2027     encoder->post_disable = gen11_dsi_post_disable;
2028     encoder->port = port;
2029     encoder->get_config = gen11_dsi_get_config;
2030     encoder->sync_state = gen11_dsi_sync_state;
2031     encoder->update_pipe = intel_backlight_update;
2032     encoder->compute_config = gen11_dsi_compute_config;
2033     encoder->get_hw_state = gen11_dsi_get_hw_state;
2034     encoder->initial_fastset_check = gen11_dsi_initial_fastset_check;
2035     encoder->type = INTEL_OUTPUT_DSI;
2036     encoder->cloneable = 0;
2037     encoder->pipe_mask = ~0;
2038     encoder->power_domain = POWER_DOMAIN_PORT_DSI;
2039     encoder->get_power_domains = gen11_dsi_get_power_domains;
2040     encoder->disable_clock = gen11_dsi_gate_clocks;
2041     encoder->is_clock_enabled = gen11_dsi_is_clock_enabled;
2042 
2043     /* register DSI connector with DRM subsystem */
2044     drm_connector_init(dev, connector, &gen11_dsi_connector_funcs,
2045                DRM_MODE_CONNECTOR_DSI);
2046     drm_connector_helper_add(connector, &gen11_dsi_connector_helper_funcs);
2047     connector->display_info.subpixel_order = SubPixelHorizontalRGB;
2048     connector->interlace_allowed = false;
2049     connector->doublescan_allowed = false;
2050     intel_connector->get_hw_state = intel_connector_get_hw_state;
2051 
2052     /* attach connector to encoder */
2053     intel_connector_attach_encoder(intel_connector, encoder);
2054 
2055     intel_bios_init_panel(dev_priv, &intel_connector->panel, NULL, NULL);
2056 
2057     mutex_lock(&dev->mode_config.mutex);
2058     intel_panel_add_vbt_lfp_fixed_mode(intel_connector);
2059     mutex_unlock(&dev->mode_config.mutex);
2060 
2061     if (!intel_panel_preferred_fixed_mode(intel_connector)) {
2062         drm_err(&dev_priv->drm, "DSI fixed mode info missing\n");
2063         goto err;
2064     }
2065 
2066     intel_panel_init(intel_connector);
2067 
2068     intel_backlight_setup(intel_connector, INVALID_PIPE);
2069 
2070     if (intel_connector->panel.vbt.dsi.config->dual_link)
2071         intel_dsi->ports = BIT(PORT_A) | BIT(PORT_B);
2072     else
2073         intel_dsi->ports = BIT(port);
2074 
2075     if (drm_WARN_ON(&dev_priv->drm, intel_connector->panel.vbt.dsi.bl_ports & ~intel_dsi->ports))
2076         intel_connector->panel.vbt.dsi.bl_ports &= intel_dsi->ports;
2077 
2078     intel_dsi->dcs_backlight_ports = intel_connector->panel.vbt.dsi.bl_ports;
2079 
2080     if (drm_WARN_ON(&dev_priv->drm, intel_connector->panel.vbt.dsi.cabc_ports & ~intel_dsi->ports))
2081         intel_connector->panel.vbt.dsi.cabc_ports &= intel_dsi->ports;
2082 
2083     intel_dsi->dcs_cabc_ports = intel_connector->panel.vbt.dsi.cabc_ports;
2084 
2085     for_each_dsi_port(port, intel_dsi->ports) {
2086         struct intel_dsi_host *host;
2087 
2088         host = intel_dsi_host_init(intel_dsi, &gen11_dsi_host_ops, port);
2089         if (!host)
2090             goto err;
2091 
2092         intel_dsi->dsi_hosts[port] = host;
2093     }
2094 
2095     if (!intel_dsi_vbt_init(intel_dsi, MIPI_DSI_GENERIC_PANEL_ID)) {
2096         drm_dbg_kms(&dev_priv->drm, "no device found\n");
2097         goto err;
2098     }
2099 
2100     icl_dphy_param_init(intel_dsi);
2101 
2102     icl_dsi_add_properties(intel_connector);
2103     return;
2104 
2105 err:
2106     drm_connector_cleanup(connector);
2107     drm_encoder_cleanup(&encoder->base);
2108     kfree(intel_dsi);
2109     kfree(intel_connector);
2110 }