0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024 #include <linux/string_helpers.h>
0025
0026 #include "intel_de.h"
0027 #include "intel_display_types.h"
0028 #include "intel_dpio_phy.h"
0029 #include "intel_dpll.h"
0030 #include "intel_dpll_mgr.h"
0031 #include "intel_pch_refclk.h"
0032 #include "intel_tc.h"
0033 #include "intel_tc_phy_regs.h"
0034
0035
0036
0037
0038
0039
0040
0041
0042
0043
0044
0045
0046
0047
0048
0049
0050
0051
0052
0053
0054
0055
0056 struct intel_shared_dpll_funcs {
0057
0058
0059
0060
0061 void (*enable)(struct drm_i915_private *i915,
0062 struct intel_shared_dpll *pll);
0063
0064
0065
0066
0067
0068
0069 void (*disable)(struct drm_i915_private *i915,
0070 struct intel_shared_dpll *pll);
0071
0072
0073
0074
0075
0076
0077 bool (*get_hw_state)(struct drm_i915_private *i915,
0078 struct intel_shared_dpll *pll,
0079 struct intel_dpll_hw_state *hw_state);
0080
0081
0082
0083
0084
0085 int (*get_freq)(struct drm_i915_private *i915,
0086 const struct intel_shared_dpll *pll,
0087 const struct intel_dpll_hw_state *pll_state);
0088 };
0089
0090 struct intel_dpll_mgr {
0091 const struct dpll_info *dpll_info;
0092
0093 int (*compute_dplls)(struct intel_atomic_state *state,
0094 struct intel_crtc *crtc,
0095 struct intel_encoder *encoder);
0096 int (*get_dplls)(struct intel_atomic_state *state,
0097 struct intel_crtc *crtc,
0098 struct intel_encoder *encoder);
0099 void (*put_dplls)(struct intel_atomic_state *state,
0100 struct intel_crtc *crtc);
0101 void (*update_active_dpll)(struct intel_atomic_state *state,
0102 struct intel_crtc *crtc,
0103 struct intel_encoder *encoder);
0104 void (*update_ref_clks)(struct drm_i915_private *i915);
0105 void (*dump_hw_state)(struct drm_i915_private *dev_priv,
0106 const struct intel_dpll_hw_state *hw_state);
0107 };
0108
0109 static void
0110 intel_atomic_duplicate_dpll_state(struct drm_i915_private *dev_priv,
0111 struct intel_shared_dpll_state *shared_dpll)
0112 {
0113 enum intel_dpll_id i;
0114
0115
0116 for (i = 0; i < dev_priv->dpll.num_shared_dpll; i++) {
0117 struct intel_shared_dpll *pll = &dev_priv->dpll.shared_dplls[i];
0118
0119 shared_dpll[i] = pll->state;
0120 }
0121 }
0122
0123 static struct intel_shared_dpll_state *
0124 intel_atomic_get_shared_dpll_state(struct drm_atomic_state *s)
0125 {
0126 struct intel_atomic_state *state = to_intel_atomic_state(s);
0127
0128 drm_WARN_ON(s->dev, !drm_modeset_is_locked(&s->dev->mode_config.connection_mutex));
0129
0130 if (!state->dpll_set) {
0131 state->dpll_set = true;
0132
0133 intel_atomic_duplicate_dpll_state(to_i915(s->dev),
0134 state->shared_dpll);
0135 }
0136
0137 return state->shared_dpll;
0138 }
0139
0140
0141
0142
0143
0144
0145
0146
0147
0148 struct intel_shared_dpll *
0149 intel_get_shared_dpll_by_id(struct drm_i915_private *dev_priv,
0150 enum intel_dpll_id id)
0151 {
0152 return &dev_priv->dpll.shared_dplls[id];
0153 }
0154
0155
0156
0157
0158
0159
0160
0161
0162
0163 enum intel_dpll_id
0164 intel_get_shared_dpll_id(struct drm_i915_private *dev_priv,
0165 struct intel_shared_dpll *pll)
0166 {
0167 long pll_idx = pll - dev_priv->dpll.shared_dplls;
0168
0169 if (drm_WARN_ON(&dev_priv->drm,
0170 pll_idx < 0 ||
0171 pll_idx >= dev_priv->dpll.num_shared_dpll))
0172 return -1;
0173
0174 return pll_idx;
0175 }
0176
0177
0178 void assert_shared_dpll(struct drm_i915_private *dev_priv,
0179 struct intel_shared_dpll *pll,
0180 bool state)
0181 {
0182 bool cur_state;
0183 struct intel_dpll_hw_state hw_state;
0184
0185 if (drm_WARN(&dev_priv->drm, !pll,
0186 "asserting DPLL %s with no DPLL\n", str_on_off(state)))
0187 return;
0188
0189 cur_state = intel_dpll_get_hw_state(dev_priv, pll, &hw_state);
0190 I915_STATE_WARN(cur_state != state,
0191 "%s assertion failure (expected %s, current %s)\n",
0192 pll->info->name, str_on_off(state),
0193 str_on_off(cur_state));
0194 }
0195
0196 static enum tc_port icl_pll_id_to_tc_port(enum intel_dpll_id id)
0197 {
0198 return TC_PORT_1 + id - DPLL_ID_ICL_MGPLL1;
0199 }
0200
0201 enum intel_dpll_id icl_tc_port_to_pll_id(enum tc_port tc_port)
0202 {
0203 return tc_port - TC_PORT_1 + DPLL_ID_ICL_MGPLL1;
0204 }
0205
0206 static i915_reg_t
0207 intel_combo_pll_enable_reg(struct drm_i915_private *i915,
0208 struct intel_shared_dpll *pll)
0209 {
0210 if (IS_DG1(i915))
0211 return DG1_DPLL_ENABLE(pll->info->id);
0212 else if (IS_JSL_EHL(i915) && (pll->info->id == DPLL_ID_EHL_DPLL4))
0213 return MG_PLL_ENABLE(0);
0214
0215 return ICL_DPLL_ENABLE(pll->info->id);
0216 }
0217
0218 static i915_reg_t
0219 intel_tc_pll_enable_reg(struct drm_i915_private *i915,
0220 struct intel_shared_dpll *pll)
0221 {
0222 const enum intel_dpll_id id = pll->info->id;
0223 enum tc_port tc_port = icl_pll_id_to_tc_port(id);
0224
0225 if (IS_ALDERLAKE_P(i915))
0226 return ADLP_PORTTC_PLL_ENABLE(tc_port);
0227
0228 return MG_PLL_ENABLE(tc_port);
0229 }
0230
0231
0232
0233
0234
0235
0236
0237 void intel_enable_shared_dpll(const struct intel_crtc_state *crtc_state)
0238 {
0239 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
0240 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
0241 struct intel_shared_dpll *pll = crtc_state->shared_dpll;
0242 unsigned int pipe_mask = BIT(crtc->pipe);
0243 unsigned int old_mask;
0244
0245 if (drm_WARN_ON(&dev_priv->drm, pll == NULL))
0246 return;
0247
0248 mutex_lock(&dev_priv->dpll.lock);
0249 old_mask = pll->active_mask;
0250
0251 if (drm_WARN_ON(&dev_priv->drm, !(pll->state.pipe_mask & pipe_mask)) ||
0252 drm_WARN_ON(&dev_priv->drm, pll->active_mask & pipe_mask))
0253 goto out;
0254
0255 pll->active_mask |= pipe_mask;
0256
0257 drm_dbg_kms(&dev_priv->drm,
0258 "enable %s (active 0x%x, on? %d) for [CRTC:%d:%s]\n",
0259 pll->info->name, pll->active_mask, pll->on,
0260 crtc->base.base.id, crtc->base.name);
0261
0262 if (old_mask) {
0263 drm_WARN_ON(&dev_priv->drm, !pll->on);
0264 assert_shared_dpll_enabled(dev_priv, pll);
0265 goto out;
0266 }
0267 drm_WARN_ON(&dev_priv->drm, pll->on);
0268
0269 drm_dbg_kms(&dev_priv->drm, "enabling %s\n", pll->info->name);
0270 pll->info->funcs->enable(dev_priv, pll);
0271 pll->on = true;
0272
0273 out:
0274 mutex_unlock(&dev_priv->dpll.lock);
0275 }
0276
0277
0278
0279
0280
0281
0282
0283 void intel_disable_shared_dpll(const struct intel_crtc_state *crtc_state)
0284 {
0285 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
0286 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
0287 struct intel_shared_dpll *pll = crtc_state->shared_dpll;
0288 unsigned int pipe_mask = BIT(crtc->pipe);
0289
0290
0291 if (DISPLAY_VER(dev_priv) < 5)
0292 return;
0293
0294 if (pll == NULL)
0295 return;
0296
0297 mutex_lock(&dev_priv->dpll.lock);
0298 if (drm_WARN(&dev_priv->drm, !(pll->active_mask & pipe_mask),
0299 "%s not used by [CRTC:%d:%s]\n", pll->info->name,
0300 crtc->base.base.id, crtc->base.name))
0301 goto out;
0302
0303 drm_dbg_kms(&dev_priv->drm,
0304 "disable %s (active 0x%x, on? %d) for [CRTC:%d:%s]\n",
0305 pll->info->name, pll->active_mask, pll->on,
0306 crtc->base.base.id, crtc->base.name);
0307
0308 assert_shared_dpll_enabled(dev_priv, pll);
0309 drm_WARN_ON(&dev_priv->drm, !pll->on);
0310
0311 pll->active_mask &= ~pipe_mask;
0312 if (pll->active_mask)
0313 goto out;
0314
0315 drm_dbg_kms(&dev_priv->drm, "disabling %s\n", pll->info->name);
0316 pll->info->funcs->disable(dev_priv, pll);
0317 pll->on = false;
0318
0319 out:
0320 mutex_unlock(&dev_priv->dpll.lock);
0321 }
0322
0323 static struct intel_shared_dpll *
0324 intel_find_shared_dpll(struct intel_atomic_state *state,
0325 const struct intel_crtc *crtc,
0326 const struct intel_dpll_hw_state *pll_state,
0327 unsigned long dpll_mask)
0328 {
0329 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
0330 struct intel_shared_dpll *pll, *unused_pll = NULL;
0331 struct intel_shared_dpll_state *shared_dpll;
0332 enum intel_dpll_id i;
0333
0334 shared_dpll = intel_atomic_get_shared_dpll_state(&state->base);
0335
0336 drm_WARN_ON(&dev_priv->drm, dpll_mask & ~(BIT(I915_NUM_PLLS) - 1));
0337
0338 for_each_set_bit(i, &dpll_mask, I915_NUM_PLLS) {
0339 pll = &dev_priv->dpll.shared_dplls[i];
0340
0341
0342 if (shared_dpll[i].pipe_mask == 0) {
0343 if (!unused_pll)
0344 unused_pll = pll;
0345 continue;
0346 }
0347
0348 if (memcmp(pll_state,
0349 &shared_dpll[i].hw_state,
0350 sizeof(*pll_state)) == 0) {
0351 drm_dbg_kms(&dev_priv->drm,
0352 "[CRTC:%d:%s] sharing existing %s (pipe mask 0x%x, active 0x%x)\n",
0353 crtc->base.base.id, crtc->base.name,
0354 pll->info->name,
0355 shared_dpll[i].pipe_mask,
0356 pll->active_mask);
0357 return pll;
0358 }
0359 }
0360
0361
0362 if (unused_pll) {
0363 drm_dbg_kms(&dev_priv->drm, "[CRTC:%d:%s] allocated %s\n",
0364 crtc->base.base.id, crtc->base.name,
0365 unused_pll->info->name);
0366 return unused_pll;
0367 }
0368
0369 return NULL;
0370 }
0371
0372 static void
0373 intel_reference_shared_dpll(struct intel_atomic_state *state,
0374 const struct intel_crtc *crtc,
0375 const struct intel_shared_dpll *pll,
0376 const struct intel_dpll_hw_state *pll_state)
0377 {
0378 struct drm_i915_private *i915 = to_i915(state->base.dev);
0379 struct intel_shared_dpll_state *shared_dpll;
0380 const enum intel_dpll_id id = pll->info->id;
0381
0382 shared_dpll = intel_atomic_get_shared_dpll_state(&state->base);
0383
0384 if (shared_dpll[id].pipe_mask == 0)
0385 shared_dpll[id].hw_state = *pll_state;
0386
0387 drm_dbg(&i915->drm, "using %s for pipe %c\n", pll->info->name,
0388 pipe_name(crtc->pipe));
0389
0390 shared_dpll[id].pipe_mask |= BIT(crtc->pipe);
0391 }
0392
0393 static void intel_unreference_shared_dpll(struct intel_atomic_state *state,
0394 const struct intel_crtc *crtc,
0395 const struct intel_shared_dpll *pll)
0396 {
0397 struct intel_shared_dpll_state *shared_dpll;
0398
0399 shared_dpll = intel_atomic_get_shared_dpll_state(&state->base);
0400 shared_dpll[pll->info->id].pipe_mask &= ~BIT(crtc->pipe);
0401 }
0402
0403 static void intel_put_dpll(struct intel_atomic_state *state,
0404 struct intel_crtc *crtc)
0405 {
0406 const struct intel_crtc_state *old_crtc_state =
0407 intel_atomic_get_old_crtc_state(state, crtc);
0408 struct intel_crtc_state *new_crtc_state =
0409 intel_atomic_get_new_crtc_state(state, crtc);
0410
0411 new_crtc_state->shared_dpll = NULL;
0412
0413 if (!old_crtc_state->shared_dpll)
0414 return;
0415
0416 intel_unreference_shared_dpll(state, crtc, old_crtc_state->shared_dpll);
0417 }
0418
0419
0420
0421
0422
0423
0424
0425
0426
0427
0428
0429
0430 void intel_shared_dpll_swap_state(struct intel_atomic_state *state)
0431 {
0432 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
0433 struct intel_shared_dpll_state *shared_dpll = state->shared_dpll;
0434 enum intel_dpll_id i;
0435
0436 if (!state->dpll_set)
0437 return;
0438
0439 for (i = 0; i < dev_priv->dpll.num_shared_dpll; i++) {
0440 struct intel_shared_dpll *pll =
0441 &dev_priv->dpll.shared_dplls[i];
0442
0443 swap(pll->state, shared_dpll[i]);
0444 }
0445 }
0446
0447 static bool ibx_pch_dpll_get_hw_state(struct drm_i915_private *dev_priv,
0448 struct intel_shared_dpll *pll,
0449 struct intel_dpll_hw_state *hw_state)
0450 {
0451 const enum intel_dpll_id id = pll->info->id;
0452 intel_wakeref_t wakeref;
0453 u32 val;
0454
0455 wakeref = intel_display_power_get_if_enabled(dev_priv,
0456 POWER_DOMAIN_DISPLAY_CORE);
0457 if (!wakeref)
0458 return false;
0459
0460 val = intel_de_read(dev_priv, PCH_DPLL(id));
0461 hw_state->dpll = val;
0462 hw_state->fp0 = intel_de_read(dev_priv, PCH_FP0(id));
0463 hw_state->fp1 = intel_de_read(dev_priv, PCH_FP1(id));
0464
0465 intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
0466
0467 return val & DPLL_VCO_ENABLE;
0468 }
0469
0470 static void ibx_assert_pch_refclk_enabled(struct drm_i915_private *dev_priv)
0471 {
0472 u32 val;
0473 bool enabled;
0474
0475 I915_STATE_WARN_ON(!(HAS_PCH_IBX(dev_priv) || HAS_PCH_CPT(dev_priv)));
0476
0477 val = intel_de_read(dev_priv, PCH_DREF_CONTROL);
0478 enabled = !!(val & (DREF_SSC_SOURCE_MASK | DREF_NONSPREAD_SOURCE_MASK |
0479 DREF_SUPERSPREAD_SOURCE_MASK));
0480 I915_STATE_WARN(!enabled, "PCH refclk assertion failure, should be active but is disabled\n");
0481 }
0482
0483 static void ibx_pch_dpll_enable(struct drm_i915_private *dev_priv,
0484 struct intel_shared_dpll *pll)
0485 {
0486 const enum intel_dpll_id id = pll->info->id;
0487
0488
0489 ibx_assert_pch_refclk_enabled(dev_priv);
0490
0491 intel_de_write(dev_priv, PCH_FP0(id), pll->state.hw_state.fp0);
0492 intel_de_write(dev_priv, PCH_FP1(id), pll->state.hw_state.fp1);
0493
0494 intel_de_write(dev_priv, PCH_DPLL(id), pll->state.hw_state.dpll);
0495
0496
0497 intel_de_posting_read(dev_priv, PCH_DPLL(id));
0498 udelay(150);
0499
0500
0501
0502
0503
0504
0505 intel_de_write(dev_priv, PCH_DPLL(id), pll->state.hw_state.dpll);
0506 intel_de_posting_read(dev_priv, PCH_DPLL(id));
0507 udelay(200);
0508 }
0509
0510 static void ibx_pch_dpll_disable(struct drm_i915_private *dev_priv,
0511 struct intel_shared_dpll *pll)
0512 {
0513 const enum intel_dpll_id id = pll->info->id;
0514
0515 intel_de_write(dev_priv, PCH_DPLL(id), 0);
0516 intel_de_posting_read(dev_priv, PCH_DPLL(id));
0517 udelay(200);
0518 }
0519
0520 static int ibx_compute_dpll(struct intel_atomic_state *state,
0521 struct intel_crtc *crtc,
0522 struct intel_encoder *encoder)
0523 {
0524 return 0;
0525 }
0526
0527 static int ibx_get_dpll(struct intel_atomic_state *state,
0528 struct intel_crtc *crtc,
0529 struct intel_encoder *encoder)
0530 {
0531 struct intel_crtc_state *crtc_state =
0532 intel_atomic_get_new_crtc_state(state, crtc);
0533 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
0534 struct intel_shared_dpll *pll;
0535 enum intel_dpll_id i;
0536
0537 if (HAS_PCH_IBX(dev_priv)) {
0538
0539 i = (enum intel_dpll_id) crtc->pipe;
0540 pll = &dev_priv->dpll.shared_dplls[i];
0541
0542 drm_dbg_kms(&dev_priv->drm,
0543 "[CRTC:%d:%s] using pre-allocated %s\n",
0544 crtc->base.base.id, crtc->base.name,
0545 pll->info->name);
0546 } else {
0547 pll = intel_find_shared_dpll(state, crtc,
0548 &crtc_state->dpll_hw_state,
0549 BIT(DPLL_ID_PCH_PLL_B) |
0550 BIT(DPLL_ID_PCH_PLL_A));
0551 }
0552
0553 if (!pll)
0554 return -EINVAL;
0555
0556
0557 intel_reference_shared_dpll(state, crtc,
0558 pll, &crtc_state->dpll_hw_state);
0559
0560 crtc_state->shared_dpll = pll;
0561
0562 return 0;
0563 }
0564
0565 static void ibx_dump_hw_state(struct drm_i915_private *dev_priv,
0566 const struct intel_dpll_hw_state *hw_state)
0567 {
0568 drm_dbg_kms(&dev_priv->drm,
0569 "dpll_hw_state: dpll: 0x%x, dpll_md: 0x%x, "
0570 "fp0: 0x%x, fp1: 0x%x\n",
0571 hw_state->dpll,
0572 hw_state->dpll_md,
0573 hw_state->fp0,
0574 hw_state->fp1);
0575 }
0576
0577 static const struct intel_shared_dpll_funcs ibx_pch_dpll_funcs = {
0578 .enable = ibx_pch_dpll_enable,
0579 .disable = ibx_pch_dpll_disable,
0580 .get_hw_state = ibx_pch_dpll_get_hw_state,
0581 };
0582
0583 static const struct dpll_info pch_plls[] = {
0584 { "PCH DPLL A", &ibx_pch_dpll_funcs, DPLL_ID_PCH_PLL_A, 0 },
0585 { "PCH DPLL B", &ibx_pch_dpll_funcs, DPLL_ID_PCH_PLL_B, 0 },
0586 { },
0587 };
0588
0589 static const struct intel_dpll_mgr pch_pll_mgr = {
0590 .dpll_info = pch_plls,
0591 .compute_dplls = ibx_compute_dpll,
0592 .get_dplls = ibx_get_dpll,
0593 .put_dplls = intel_put_dpll,
0594 .dump_hw_state = ibx_dump_hw_state,
0595 };
0596
0597 static void hsw_ddi_wrpll_enable(struct drm_i915_private *dev_priv,
0598 struct intel_shared_dpll *pll)
0599 {
0600 const enum intel_dpll_id id = pll->info->id;
0601
0602 intel_de_write(dev_priv, WRPLL_CTL(id), pll->state.hw_state.wrpll);
0603 intel_de_posting_read(dev_priv, WRPLL_CTL(id));
0604 udelay(20);
0605 }
0606
0607 static void hsw_ddi_spll_enable(struct drm_i915_private *dev_priv,
0608 struct intel_shared_dpll *pll)
0609 {
0610 intel_de_write(dev_priv, SPLL_CTL, pll->state.hw_state.spll);
0611 intel_de_posting_read(dev_priv, SPLL_CTL);
0612 udelay(20);
0613 }
0614
0615 static void hsw_ddi_wrpll_disable(struct drm_i915_private *dev_priv,
0616 struct intel_shared_dpll *pll)
0617 {
0618 const enum intel_dpll_id id = pll->info->id;
0619 u32 val;
0620
0621 val = intel_de_read(dev_priv, WRPLL_CTL(id));
0622 intel_de_write(dev_priv, WRPLL_CTL(id), val & ~WRPLL_PLL_ENABLE);
0623 intel_de_posting_read(dev_priv, WRPLL_CTL(id));
0624
0625
0626
0627
0628
0629 if (dev_priv->pch_ssc_use & BIT(id))
0630 intel_init_pch_refclk(dev_priv);
0631 }
0632
0633 static void hsw_ddi_spll_disable(struct drm_i915_private *dev_priv,
0634 struct intel_shared_dpll *pll)
0635 {
0636 enum intel_dpll_id id = pll->info->id;
0637 u32 val;
0638
0639 val = intel_de_read(dev_priv, SPLL_CTL);
0640 intel_de_write(dev_priv, SPLL_CTL, val & ~SPLL_PLL_ENABLE);
0641 intel_de_posting_read(dev_priv, SPLL_CTL);
0642
0643
0644
0645
0646
0647 if (dev_priv->pch_ssc_use & BIT(id))
0648 intel_init_pch_refclk(dev_priv);
0649 }
0650
0651 static bool hsw_ddi_wrpll_get_hw_state(struct drm_i915_private *dev_priv,
0652 struct intel_shared_dpll *pll,
0653 struct intel_dpll_hw_state *hw_state)
0654 {
0655 const enum intel_dpll_id id = pll->info->id;
0656 intel_wakeref_t wakeref;
0657 u32 val;
0658
0659 wakeref = intel_display_power_get_if_enabled(dev_priv,
0660 POWER_DOMAIN_DISPLAY_CORE);
0661 if (!wakeref)
0662 return false;
0663
0664 val = intel_de_read(dev_priv, WRPLL_CTL(id));
0665 hw_state->wrpll = val;
0666
0667 intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
0668
0669 return val & WRPLL_PLL_ENABLE;
0670 }
0671
0672 static bool hsw_ddi_spll_get_hw_state(struct drm_i915_private *dev_priv,
0673 struct intel_shared_dpll *pll,
0674 struct intel_dpll_hw_state *hw_state)
0675 {
0676 intel_wakeref_t wakeref;
0677 u32 val;
0678
0679 wakeref = intel_display_power_get_if_enabled(dev_priv,
0680 POWER_DOMAIN_DISPLAY_CORE);
0681 if (!wakeref)
0682 return false;
0683
0684 val = intel_de_read(dev_priv, SPLL_CTL);
0685 hw_state->spll = val;
0686
0687 intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
0688
0689 return val & SPLL_PLL_ENABLE;
0690 }
0691
0692 #define LC_FREQ 2700
0693 #define LC_FREQ_2K U64_C(LC_FREQ * 2000)
0694
0695 #define P_MIN 2
0696 #define P_MAX 64
0697 #define P_INC 2
0698
0699
0700 #define REF_MIN 48
0701 #define REF_MAX 400
0702 #define VCO_MIN 2400
0703 #define VCO_MAX 4800
0704
0705 struct hsw_wrpll_rnp {
0706 unsigned p, n2, r2;
0707 };
0708
0709 static unsigned hsw_wrpll_get_budget_for_freq(int clock)
0710 {
0711 unsigned budget;
0712
0713 switch (clock) {
0714 case 25175000:
0715 case 25200000:
0716 case 27000000:
0717 case 27027000:
0718 case 37762500:
0719 case 37800000:
0720 case 40500000:
0721 case 40541000:
0722 case 54000000:
0723 case 54054000:
0724 case 59341000:
0725 case 59400000:
0726 case 72000000:
0727 case 74176000:
0728 case 74250000:
0729 case 81000000:
0730 case 81081000:
0731 case 89012000:
0732 case 89100000:
0733 case 108000000:
0734 case 108108000:
0735 case 111264000:
0736 case 111375000:
0737 case 148352000:
0738 case 148500000:
0739 case 162000000:
0740 case 162162000:
0741 case 222525000:
0742 case 222750000:
0743 case 296703000:
0744 case 297000000:
0745 budget = 0;
0746 break;
0747 case 233500000:
0748 case 245250000:
0749 case 247750000:
0750 case 253250000:
0751 case 298000000:
0752 budget = 1500;
0753 break;
0754 case 169128000:
0755 case 169500000:
0756 case 179500000:
0757 case 202000000:
0758 budget = 2000;
0759 break;
0760 case 256250000:
0761 case 262500000:
0762 case 270000000:
0763 case 272500000:
0764 case 273750000:
0765 case 280750000:
0766 case 281250000:
0767 case 286000000:
0768 case 291750000:
0769 budget = 4000;
0770 break;
0771 case 267250000:
0772 case 268500000:
0773 budget = 5000;
0774 break;
0775 default:
0776 budget = 1000;
0777 break;
0778 }
0779
0780 return budget;
0781 }
0782
0783 static void hsw_wrpll_update_rnp(u64 freq2k, unsigned int budget,
0784 unsigned int r2, unsigned int n2,
0785 unsigned int p,
0786 struct hsw_wrpll_rnp *best)
0787 {
0788 u64 a, b, c, d, diff, diff_best;
0789
0790
0791 if (best->p == 0) {
0792 best->p = p;
0793 best->n2 = n2;
0794 best->r2 = r2;
0795 return;
0796 }
0797
0798
0799
0800
0801
0802
0803
0804
0805
0806
0807
0808
0809
0810
0811
0812 a = freq2k * budget * p * r2;
0813 b = freq2k * budget * best->p * best->r2;
0814 diff = abs_diff(freq2k * p * r2, LC_FREQ_2K * n2);
0815 diff_best = abs_diff(freq2k * best->p * best->r2,
0816 LC_FREQ_2K * best->n2);
0817 c = 1000000 * diff;
0818 d = 1000000 * diff_best;
0819
0820 if (a < c && b < d) {
0821
0822 if (best->p * best->r2 * diff < p * r2 * diff_best) {
0823 best->p = p;
0824 best->n2 = n2;
0825 best->r2 = r2;
0826 }
0827 } else if (a >= c && b < d) {
0828
0829 best->p = p;
0830 best->n2 = n2;
0831 best->r2 = r2;
0832 } else if (a >= c && b >= d) {
0833
0834 if (n2 * best->r2 * best->r2 > best->n2 * r2 * r2) {
0835 best->p = p;
0836 best->n2 = n2;
0837 best->r2 = r2;
0838 }
0839 }
0840
0841 }
0842
0843 static void
0844 hsw_ddi_calculate_wrpll(int clock ,
0845 unsigned *r2_out, unsigned *n2_out, unsigned *p_out)
0846 {
0847 u64 freq2k;
0848 unsigned p, n2, r2;
0849 struct hsw_wrpll_rnp best = {};
0850 unsigned budget;
0851
0852 freq2k = clock / 100;
0853
0854 budget = hsw_wrpll_get_budget_for_freq(clock);
0855
0856
0857
0858 if (freq2k == 5400000) {
0859 *n2_out = 2;
0860 *p_out = 1;
0861 *r2_out = 2;
0862 return;
0863 }
0864
0865
0866
0867
0868
0869
0870
0871
0872
0873
0874
0875
0876
0877
0878 for (r2 = LC_FREQ * 2 / REF_MAX + 1;
0879 r2 <= LC_FREQ * 2 / REF_MIN;
0880 r2++) {
0881
0882
0883
0884
0885
0886
0887
0888
0889
0890
0891
0892
0893 for (n2 = VCO_MIN * r2 / LC_FREQ + 1;
0894 n2 <= VCO_MAX * r2 / LC_FREQ;
0895 n2++) {
0896
0897 for (p = P_MIN; p <= P_MAX; p += P_INC)
0898 hsw_wrpll_update_rnp(freq2k, budget,
0899 r2, n2, p, &best);
0900 }
0901 }
0902
0903 *n2_out = best.n2;
0904 *p_out = best.p;
0905 *r2_out = best.r2;
0906 }
0907
0908 static int
0909 hsw_ddi_wrpll_compute_dpll(struct intel_atomic_state *state,
0910 struct intel_crtc *crtc)
0911 {
0912 struct intel_crtc_state *crtc_state =
0913 intel_atomic_get_new_crtc_state(state, crtc);
0914 unsigned int p, n2, r2;
0915
0916 hsw_ddi_calculate_wrpll(crtc_state->port_clock * 1000, &r2, &n2, &p);
0917
0918 crtc_state->dpll_hw_state.wrpll =
0919 WRPLL_PLL_ENABLE | WRPLL_REF_LCPLL |
0920 WRPLL_DIVIDER_REFERENCE(r2) | WRPLL_DIVIDER_FEEDBACK(n2) |
0921 WRPLL_DIVIDER_POST(p);
0922
0923 return 0;
0924 }
0925
0926 static struct intel_shared_dpll *
0927 hsw_ddi_wrpll_get_dpll(struct intel_atomic_state *state,
0928 struct intel_crtc *crtc)
0929 {
0930 struct intel_crtc_state *crtc_state =
0931 intel_atomic_get_new_crtc_state(state, crtc);
0932
0933 return intel_find_shared_dpll(state, crtc,
0934 &crtc_state->dpll_hw_state,
0935 BIT(DPLL_ID_WRPLL2) |
0936 BIT(DPLL_ID_WRPLL1));
0937 }
0938
0939 static int hsw_ddi_wrpll_get_freq(struct drm_i915_private *dev_priv,
0940 const struct intel_shared_dpll *pll,
0941 const struct intel_dpll_hw_state *pll_state)
0942 {
0943 int refclk;
0944 int n, p, r;
0945 u32 wrpll = pll_state->wrpll;
0946
0947 switch (wrpll & WRPLL_REF_MASK) {
0948 case WRPLL_REF_SPECIAL_HSW:
0949
0950 if (IS_HASWELL(dev_priv) && !IS_HSW_ULT(dev_priv)) {
0951 refclk = dev_priv->dpll.ref_clks.nssc;
0952 break;
0953 }
0954 fallthrough;
0955 case WRPLL_REF_PCH_SSC:
0956
0957
0958
0959
0960
0961 refclk = dev_priv->dpll.ref_clks.ssc;
0962 break;
0963 case WRPLL_REF_LCPLL:
0964 refclk = 2700000;
0965 break;
0966 default:
0967 MISSING_CASE(wrpll);
0968 return 0;
0969 }
0970
0971 r = wrpll & WRPLL_DIVIDER_REF_MASK;
0972 p = (wrpll & WRPLL_DIVIDER_POST_MASK) >> WRPLL_DIVIDER_POST_SHIFT;
0973 n = (wrpll & WRPLL_DIVIDER_FB_MASK) >> WRPLL_DIVIDER_FB_SHIFT;
0974
0975
0976 return (refclk * n / 10) / (p * r) * 2;
0977 }
0978
0979 static int
0980 hsw_ddi_lcpll_compute_dpll(struct intel_crtc_state *crtc_state)
0981 {
0982 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
0983 int clock = crtc_state->port_clock;
0984
0985 switch (clock / 2) {
0986 case 81000:
0987 case 135000:
0988 case 270000:
0989 return 0;
0990 default:
0991 drm_dbg_kms(&dev_priv->drm, "Invalid clock for DP: %d\n",
0992 clock);
0993 return -EINVAL;
0994 }
0995 }
0996
0997 static struct intel_shared_dpll *
0998 hsw_ddi_lcpll_get_dpll(struct intel_crtc_state *crtc_state)
0999 {
1000 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
1001 struct intel_shared_dpll *pll;
1002 enum intel_dpll_id pll_id;
1003 int clock = crtc_state->port_clock;
1004
1005 switch (clock / 2) {
1006 case 81000:
1007 pll_id = DPLL_ID_LCPLL_810;
1008 break;
1009 case 135000:
1010 pll_id = DPLL_ID_LCPLL_1350;
1011 break;
1012 case 270000:
1013 pll_id = DPLL_ID_LCPLL_2700;
1014 break;
1015 default:
1016 MISSING_CASE(clock / 2);
1017 return NULL;
1018 }
1019
1020 pll = intel_get_shared_dpll_by_id(dev_priv, pll_id);
1021
1022 if (!pll)
1023 return NULL;
1024
1025 return pll;
1026 }
1027
1028 static int hsw_ddi_lcpll_get_freq(struct drm_i915_private *i915,
1029 const struct intel_shared_dpll *pll,
1030 const struct intel_dpll_hw_state *pll_state)
1031 {
1032 int link_clock = 0;
1033
1034 switch (pll->info->id) {
1035 case DPLL_ID_LCPLL_810:
1036 link_clock = 81000;
1037 break;
1038 case DPLL_ID_LCPLL_1350:
1039 link_clock = 135000;
1040 break;
1041 case DPLL_ID_LCPLL_2700:
1042 link_clock = 270000;
1043 break;
1044 default:
1045 drm_WARN(&i915->drm, 1, "bad port clock sel\n");
1046 break;
1047 }
1048
1049 return link_clock * 2;
1050 }
1051
1052 static int
1053 hsw_ddi_spll_compute_dpll(struct intel_atomic_state *state,
1054 struct intel_crtc *crtc)
1055 {
1056 struct intel_crtc_state *crtc_state =
1057 intel_atomic_get_new_crtc_state(state, crtc);
1058
1059 if (drm_WARN_ON(crtc->base.dev, crtc_state->port_clock / 2 != 135000))
1060 return -EINVAL;
1061
1062 crtc_state->dpll_hw_state.spll =
1063 SPLL_PLL_ENABLE | SPLL_FREQ_1350MHz | SPLL_REF_MUXED_SSC;
1064
1065 return 0;
1066 }
1067
1068 static struct intel_shared_dpll *
1069 hsw_ddi_spll_get_dpll(struct intel_atomic_state *state,
1070 struct intel_crtc *crtc)
1071 {
1072 struct intel_crtc_state *crtc_state =
1073 intel_atomic_get_new_crtc_state(state, crtc);
1074
1075 return intel_find_shared_dpll(state, crtc, &crtc_state->dpll_hw_state,
1076 BIT(DPLL_ID_SPLL));
1077 }
1078
1079 static int hsw_ddi_spll_get_freq(struct drm_i915_private *i915,
1080 const struct intel_shared_dpll *pll,
1081 const struct intel_dpll_hw_state *pll_state)
1082 {
1083 int link_clock = 0;
1084
1085 switch (pll_state->spll & SPLL_FREQ_MASK) {
1086 case SPLL_FREQ_810MHz:
1087 link_clock = 81000;
1088 break;
1089 case SPLL_FREQ_1350MHz:
1090 link_clock = 135000;
1091 break;
1092 case SPLL_FREQ_2700MHz:
1093 link_clock = 270000;
1094 break;
1095 default:
1096 drm_WARN(&i915->drm, 1, "bad spll freq\n");
1097 break;
1098 }
1099
1100 return link_clock * 2;
1101 }
1102
1103 static int hsw_compute_dpll(struct intel_atomic_state *state,
1104 struct intel_crtc *crtc,
1105 struct intel_encoder *encoder)
1106 {
1107 struct intel_crtc_state *crtc_state =
1108 intel_atomic_get_new_crtc_state(state, crtc);
1109
1110 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
1111 return hsw_ddi_wrpll_compute_dpll(state, crtc);
1112 else if (intel_crtc_has_dp_encoder(crtc_state))
1113 return hsw_ddi_lcpll_compute_dpll(crtc_state);
1114 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_ANALOG))
1115 return hsw_ddi_spll_compute_dpll(state, crtc);
1116 else
1117 return -EINVAL;
1118 }
1119
1120 static int hsw_get_dpll(struct intel_atomic_state *state,
1121 struct intel_crtc *crtc,
1122 struct intel_encoder *encoder)
1123 {
1124 struct intel_crtc_state *crtc_state =
1125 intel_atomic_get_new_crtc_state(state, crtc);
1126 struct intel_shared_dpll *pll = NULL;
1127
1128 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
1129 pll = hsw_ddi_wrpll_get_dpll(state, crtc);
1130 else if (intel_crtc_has_dp_encoder(crtc_state))
1131 pll = hsw_ddi_lcpll_get_dpll(crtc_state);
1132 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_ANALOG))
1133 pll = hsw_ddi_spll_get_dpll(state, crtc);
1134
1135 if (!pll)
1136 return -EINVAL;
1137
1138 intel_reference_shared_dpll(state, crtc,
1139 pll, &crtc_state->dpll_hw_state);
1140
1141 crtc_state->shared_dpll = pll;
1142
1143 return 0;
1144 }
1145
1146 static void hsw_update_dpll_ref_clks(struct drm_i915_private *i915)
1147 {
1148 i915->dpll.ref_clks.ssc = 135000;
1149
1150 if (intel_de_read(i915, FUSE_STRAP3) & HSW_REF_CLK_SELECT)
1151 i915->dpll.ref_clks.nssc = 24000;
1152 else
1153 i915->dpll.ref_clks.nssc = 135000;
1154 }
1155
1156 static void hsw_dump_hw_state(struct drm_i915_private *dev_priv,
1157 const struct intel_dpll_hw_state *hw_state)
1158 {
1159 drm_dbg_kms(&dev_priv->drm, "dpll_hw_state: wrpll: 0x%x spll: 0x%x\n",
1160 hw_state->wrpll, hw_state->spll);
1161 }
1162
1163 static const struct intel_shared_dpll_funcs hsw_ddi_wrpll_funcs = {
1164 .enable = hsw_ddi_wrpll_enable,
1165 .disable = hsw_ddi_wrpll_disable,
1166 .get_hw_state = hsw_ddi_wrpll_get_hw_state,
1167 .get_freq = hsw_ddi_wrpll_get_freq,
1168 };
1169
1170 static const struct intel_shared_dpll_funcs hsw_ddi_spll_funcs = {
1171 .enable = hsw_ddi_spll_enable,
1172 .disable = hsw_ddi_spll_disable,
1173 .get_hw_state = hsw_ddi_spll_get_hw_state,
1174 .get_freq = hsw_ddi_spll_get_freq,
1175 };
1176
1177 static void hsw_ddi_lcpll_enable(struct drm_i915_private *dev_priv,
1178 struct intel_shared_dpll *pll)
1179 {
1180 }
1181
1182 static void hsw_ddi_lcpll_disable(struct drm_i915_private *dev_priv,
1183 struct intel_shared_dpll *pll)
1184 {
1185 }
1186
1187 static bool hsw_ddi_lcpll_get_hw_state(struct drm_i915_private *dev_priv,
1188 struct intel_shared_dpll *pll,
1189 struct intel_dpll_hw_state *hw_state)
1190 {
1191 return true;
1192 }
1193
1194 static const struct intel_shared_dpll_funcs hsw_ddi_lcpll_funcs = {
1195 .enable = hsw_ddi_lcpll_enable,
1196 .disable = hsw_ddi_lcpll_disable,
1197 .get_hw_state = hsw_ddi_lcpll_get_hw_state,
1198 .get_freq = hsw_ddi_lcpll_get_freq,
1199 };
1200
1201 static const struct dpll_info hsw_plls[] = {
1202 { "WRPLL 1", &hsw_ddi_wrpll_funcs, DPLL_ID_WRPLL1, 0 },
1203 { "WRPLL 2", &hsw_ddi_wrpll_funcs, DPLL_ID_WRPLL2, 0 },
1204 { "SPLL", &hsw_ddi_spll_funcs, DPLL_ID_SPLL, 0 },
1205 { "LCPLL 810", &hsw_ddi_lcpll_funcs, DPLL_ID_LCPLL_810, INTEL_DPLL_ALWAYS_ON },
1206 { "LCPLL 1350", &hsw_ddi_lcpll_funcs, DPLL_ID_LCPLL_1350, INTEL_DPLL_ALWAYS_ON },
1207 { "LCPLL 2700", &hsw_ddi_lcpll_funcs, DPLL_ID_LCPLL_2700, INTEL_DPLL_ALWAYS_ON },
1208 { },
1209 };
1210
1211 static const struct intel_dpll_mgr hsw_pll_mgr = {
1212 .dpll_info = hsw_plls,
1213 .compute_dplls = hsw_compute_dpll,
1214 .get_dplls = hsw_get_dpll,
1215 .put_dplls = intel_put_dpll,
1216 .update_ref_clks = hsw_update_dpll_ref_clks,
1217 .dump_hw_state = hsw_dump_hw_state,
1218 };
1219
1220 struct skl_dpll_regs {
1221 i915_reg_t ctl, cfgcr1, cfgcr2;
1222 };
1223
1224
1225 static const struct skl_dpll_regs skl_dpll_regs[4] = {
1226 {
1227
1228 .ctl = LCPLL1_CTL,
1229
1230 },
1231 {
1232
1233 .ctl = LCPLL2_CTL,
1234 .cfgcr1 = DPLL_CFGCR1(SKL_DPLL1),
1235 .cfgcr2 = DPLL_CFGCR2(SKL_DPLL1),
1236 },
1237 {
1238
1239 .ctl = WRPLL_CTL(0),
1240 .cfgcr1 = DPLL_CFGCR1(SKL_DPLL2),
1241 .cfgcr2 = DPLL_CFGCR2(SKL_DPLL2),
1242 },
1243 {
1244
1245 .ctl = WRPLL_CTL(1),
1246 .cfgcr1 = DPLL_CFGCR1(SKL_DPLL3),
1247 .cfgcr2 = DPLL_CFGCR2(SKL_DPLL3),
1248 },
1249 };
1250
1251 static void skl_ddi_pll_write_ctrl1(struct drm_i915_private *dev_priv,
1252 struct intel_shared_dpll *pll)
1253 {
1254 const enum intel_dpll_id id = pll->info->id;
1255 u32 val;
1256
1257 val = intel_de_read(dev_priv, DPLL_CTRL1);
1258
1259 val &= ~(DPLL_CTRL1_HDMI_MODE(id) |
1260 DPLL_CTRL1_SSC(id) |
1261 DPLL_CTRL1_LINK_RATE_MASK(id));
1262 val |= pll->state.hw_state.ctrl1 << (id * 6);
1263
1264 intel_de_write(dev_priv, DPLL_CTRL1, val);
1265 intel_de_posting_read(dev_priv, DPLL_CTRL1);
1266 }
1267
1268 static void skl_ddi_pll_enable(struct drm_i915_private *dev_priv,
1269 struct intel_shared_dpll *pll)
1270 {
1271 const struct skl_dpll_regs *regs = skl_dpll_regs;
1272 const enum intel_dpll_id id = pll->info->id;
1273
1274 skl_ddi_pll_write_ctrl1(dev_priv, pll);
1275
1276 intel_de_write(dev_priv, regs[id].cfgcr1, pll->state.hw_state.cfgcr1);
1277 intel_de_write(dev_priv, regs[id].cfgcr2, pll->state.hw_state.cfgcr2);
1278 intel_de_posting_read(dev_priv, regs[id].cfgcr1);
1279 intel_de_posting_read(dev_priv, regs[id].cfgcr2);
1280
1281
1282 intel_de_write(dev_priv, regs[id].ctl,
1283 intel_de_read(dev_priv, regs[id].ctl) | LCPLL_PLL_ENABLE);
1284
1285 if (intel_de_wait_for_set(dev_priv, DPLL_STATUS, DPLL_LOCK(id), 5))
1286 drm_err(&dev_priv->drm, "DPLL %d not locked\n", id);
1287 }
1288
1289 static void skl_ddi_dpll0_enable(struct drm_i915_private *dev_priv,
1290 struct intel_shared_dpll *pll)
1291 {
1292 skl_ddi_pll_write_ctrl1(dev_priv, pll);
1293 }
1294
1295 static void skl_ddi_pll_disable(struct drm_i915_private *dev_priv,
1296 struct intel_shared_dpll *pll)
1297 {
1298 const struct skl_dpll_regs *regs = skl_dpll_regs;
1299 const enum intel_dpll_id id = pll->info->id;
1300
1301
1302 intel_de_write(dev_priv, regs[id].ctl,
1303 intel_de_read(dev_priv, regs[id].ctl) & ~LCPLL_PLL_ENABLE);
1304 intel_de_posting_read(dev_priv, regs[id].ctl);
1305 }
1306
1307 static void skl_ddi_dpll0_disable(struct drm_i915_private *dev_priv,
1308 struct intel_shared_dpll *pll)
1309 {
1310 }
1311
1312 static bool skl_ddi_pll_get_hw_state(struct drm_i915_private *dev_priv,
1313 struct intel_shared_dpll *pll,
1314 struct intel_dpll_hw_state *hw_state)
1315 {
1316 u32 val;
1317 const struct skl_dpll_regs *regs = skl_dpll_regs;
1318 const enum intel_dpll_id id = pll->info->id;
1319 intel_wakeref_t wakeref;
1320 bool ret;
1321
1322 wakeref = intel_display_power_get_if_enabled(dev_priv,
1323 POWER_DOMAIN_DISPLAY_CORE);
1324 if (!wakeref)
1325 return false;
1326
1327 ret = false;
1328
1329 val = intel_de_read(dev_priv, regs[id].ctl);
1330 if (!(val & LCPLL_PLL_ENABLE))
1331 goto out;
1332
1333 val = intel_de_read(dev_priv, DPLL_CTRL1);
1334 hw_state->ctrl1 = (val >> (id * 6)) & 0x3f;
1335
1336
1337 if (val & DPLL_CTRL1_HDMI_MODE(id)) {
1338 hw_state->cfgcr1 = intel_de_read(dev_priv, regs[id].cfgcr1);
1339 hw_state->cfgcr2 = intel_de_read(dev_priv, regs[id].cfgcr2);
1340 }
1341 ret = true;
1342
1343 out:
1344 intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
1345
1346 return ret;
1347 }
1348
1349 static bool skl_ddi_dpll0_get_hw_state(struct drm_i915_private *dev_priv,
1350 struct intel_shared_dpll *pll,
1351 struct intel_dpll_hw_state *hw_state)
1352 {
1353 const struct skl_dpll_regs *regs = skl_dpll_regs;
1354 const enum intel_dpll_id id = pll->info->id;
1355 intel_wakeref_t wakeref;
1356 u32 val;
1357 bool ret;
1358
1359 wakeref = intel_display_power_get_if_enabled(dev_priv,
1360 POWER_DOMAIN_DISPLAY_CORE);
1361 if (!wakeref)
1362 return false;
1363
1364 ret = false;
1365
1366
1367 val = intel_de_read(dev_priv, regs[id].ctl);
1368 if (drm_WARN_ON(&dev_priv->drm, !(val & LCPLL_PLL_ENABLE)))
1369 goto out;
1370
1371 val = intel_de_read(dev_priv, DPLL_CTRL1);
1372 hw_state->ctrl1 = (val >> (id * 6)) & 0x3f;
1373
1374 ret = true;
1375
1376 out:
1377 intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
1378
1379 return ret;
1380 }
1381
1382 struct skl_wrpll_context {
1383 u64 min_deviation;
1384 u64 central_freq;
1385 u64 dco_freq;
1386 unsigned int p;
1387 };
1388
1389
1390 #define SKL_DCO_MAX_PDEVIATION 100
1391 #define SKL_DCO_MAX_NDEVIATION 600
1392
1393 static void skl_wrpll_try_divider(struct skl_wrpll_context *ctx,
1394 u64 central_freq,
1395 u64 dco_freq,
1396 unsigned int divider)
1397 {
1398 u64 deviation;
1399
1400 deviation = div64_u64(10000 * abs_diff(dco_freq, central_freq),
1401 central_freq);
1402
1403
1404 if (dco_freq >= central_freq) {
1405 if (deviation < SKL_DCO_MAX_PDEVIATION &&
1406 deviation < ctx->min_deviation) {
1407 ctx->min_deviation = deviation;
1408 ctx->central_freq = central_freq;
1409 ctx->dco_freq = dco_freq;
1410 ctx->p = divider;
1411 }
1412
1413 } else if (deviation < SKL_DCO_MAX_NDEVIATION &&
1414 deviation < ctx->min_deviation) {
1415 ctx->min_deviation = deviation;
1416 ctx->central_freq = central_freq;
1417 ctx->dco_freq = dco_freq;
1418 ctx->p = divider;
1419 }
1420 }
1421
1422 static void skl_wrpll_get_multipliers(unsigned int p,
1423 unsigned int *p0 ,
1424 unsigned int *p1 ,
1425 unsigned int *p2 )
1426 {
1427
1428 if (p % 2 == 0) {
1429 unsigned int half = p / 2;
1430
1431 if (half == 1 || half == 2 || half == 3 || half == 5) {
1432 *p0 = 2;
1433 *p1 = 1;
1434 *p2 = half;
1435 } else if (half % 2 == 0) {
1436 *p0 = 2;
1437 *p1 = half / 2;
1438 *p2 = 2;
1439 } else if (half % 3 == 0) {
1440 *p0 = 3;
1441 *p1 = half / 3;
1442 *p2 = 2;
1443 } else if (half % 7 == 0) {
1444 *p0 = 7;
1445 *p1 = half / 7;
1446 *p2 = 2;
1447 }
1448 } else if (p == 3 || p == 9) {
1449 *p0 = 3;
1450 *p1 = 1;
1451 *p2 = p / 3;
1452 } else if (p == 5 || p == 7) {
1453 *p0 = p;
1454 *p1 = 1;
1455 *p2 = 1;
1456 } else if (p == 15) {
1457 *p0 = 3;
1458 *p1 = 1;
1459 *p2 = 5;
1460 } else if (p == 21) {
1461 *p0 = 7;
1462 *p1 = 1;
1463 *p2 = 3;
1464 } else if (p == 35) {
1465 *p0 = 7;
1466 *p1 = 1;
1467 *p2 = 5;
1468 }
1469 }
1470
1471 struct skl_wrpll_params {
1472 u32 dco_fraction;
1473 u32 dco_integer;
1474 u32 qdiv_ratio;
1475 u32 qdiv_mode;
1476 u32 kdiv;
1477 u32 pdiv;
1478 u32 central_freq;
1479 };
1480
1481 static void skl_wrpll_params_populate(struct skl_wrpll_params *params,
1482 u64 afe_clock,
1483 int ref_clock,
1484 u64 central_freq,
1485 u32 p0, u32 p1, u32 p2)
1486 {
1487 u64 dco_freq;
1488
1489 switch (central_freq) {
1490 case 9600000000ULL:
1491 params->central_freq = 0;
1492 break;
1493 case 9000000000ULL:
1494 params->central_freq = 1;
1495 break;
1496 case 8400000000ULL:
1497 params->central_freq = 3;
1498 }
1499
1500 switch (p0) {
1501 case 1:
1502 params->pdiv = 0;
1503 break;
1504 case 2:
1505 params->pdiv = 1;
1506 break;
1507 case 3:
1508 params->pdiv = 2;
1509 break;
1510 case 7:
1511 params->pdiv = 4;
1512 break;
1513 default:
1514 WARN(1, "Incorrect PDiv\n");
1515 }
1516
1517 switch (p2) {
1518 case 5:
1519 params->kdiv = 0;
1520 break;
1521 case 2:
1522 params->kdiv = 1;
1523 break;
1524 case 3:
1525 params->kdiv = 2;
1526 break;
1527 case 1:
1528 params->kdiv = 3;
1529 break;
1530 default:
1531 WARN(1, "Incorrect KDiv\n");
1532 }
1533
1534 params->qdiv_ratio = p1;
1535 params->qdiv_mode = (params->qdiv_ratio == 1) ? 0 : 1;
1536
1537 dco_freq = p0 * p1 * p2 * afe_clock;
1538
1539
1540
1541
1542
1543 params->dco_integer = div_u64(dco_freq, ref_clock * KHz(1));
1544 params->dco_fraction =
1545 div_u64((div_u64(dco_freq, ref_clock / KHz(1)) -
1546 params->dco_integer * MHz(1)) * 0x8000, MHz(1));
1547 }
1548
1549 static int
1550 skl_ddi_calculate_wrpll(int clock ,
1551 int ref_clock,
1552 struct skl_wrpll_params *wrpll_params)
1553 {
1554 static const u64 dco_central_freq[3] = { 8400000000ULL,
1555 9000000000ULL,
1556 9600000000ULL };
1557 static const u8 even_dividers[] = { 4, 6, 8, 10, 12, 14, 16, 18, 20,
1558 24, 28, 30, 32, 36, 40, 42, 44,
1559 48, 52, 54, 56, 60, 64, 66, 68,
1560 70, 72, 76, 78, 80, 84, 88, 90,
1561 92, 96, 98 };
1562 static const u8 odd_dividers[] = { 3, 5, 7, 9, 15, 21, 35 };
1563 static const struct {
1564 const u8 *list;
1565 int n_dividers;
1566 } dividers[] = {
1567 { even_dividers, ARRAY_SIZE(even_dividers) },
1568 { odd_dividers, ARRAY_SIZE(odd_dividers) },
1569 };
1570 struct skl_wrpll_context ctx = {
1571 .min_deviation = U64_MAX,
1572 };
1573 unsigned int dco, d, i;
1574 unsigned int p0, p1, p2;
1575 u64 afe_clock = clock * 5;
1576
1577 for (d = 0; d < ARRAY_SIZE(dividers); d++) {
1578 for (dco = 0; dco < ARRAY_SIZE(dco_central_freq); dco++) {
1579 for (i = 0; i < dividers[d].n_dividers; i++) {
1580 unsigned int p = dividers[d].list[i];
1581 u64 dco_freq = p * afe_clock;
1582
1583 skl_wrpll_try_divider(&ctx,
1584 dco_central_freq[dco],
1585 dco_freq,
1586 p);
1587
1588
1589
1590
1591
1592 if (ctx.min_deviation == 0)
1593 goto skip_remaining_dividers;
1594 }
1595 }
1596
1597 skip_remaining_dividers:
1598
1599
1600
1601
1602 if (d == 0 && ctx.p)
1603 break;
1604 }
1605
1606 if (!ctx.p)
1607 return -EINVAL;
1608
1609
1610
1611
1612
1613 p0 = p1 = p2 = 0;
1614 skl_wrpll_get_multipliers(ctx.p, &p0, &p1, &p2);
1615 skl_wrpll_params_populate(wrpll_params, afe_clock, ref_clock,
1616 ctx.central_freq, p0, p1, p2);
1617
1618 return 0;
1619 }
1620
1621 static int skl_ddi_hdmi_pll_dividers(struct intel_crtc_state *crtc_state)
1622 {
1623 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev);
1624 struct skl_wrpll_params wrpll_params = {};
1625 u32 ctrl1, cfgcr1, cfgcr2;
1626 int ret;
1627
1628
1629
1630
1631
1632 ctrl1 = DPLL_CTRL1_OVERRIDE(0);
1633
1634 ctrl1 |= DPLL_CTRL1_HDMI_MODE(0);
1635
1636 ret = skl_ddi_calculate_wrpll(crtc_state->port_clock * 1000,
1637 i915->dpll.ref_clks.nssc, &wrpll_params);
1638 if (ret)
1639 return ret;
1640
1641 cfgcr1 = DPLL_CFGCR1_FREQ_ENABLE |
1642 DPLL_CFGCR1_DCO_FRACTION(wrpll_params.dco_fraction) |
1643 wrpll_params.dco_integer;
1644
1645 cfgcr2 = DPLL_CFGCR2_QDIV_RATIO(wrpll_params.qdiv_ratio) |
1646 DPLL_CFGCR2_QDIV_MODE(wrpll_params.qdiv_mode) |
1647 DPLL_CFGCR2_KDIV(wrpll_params.kdiv) |
1648 DPLL_CFGCR2_PDIV(wrpll_params.pdiv) |
1649 wrpll_params.central_freq;
1650
1651 crtc_state->dpll_hw_state.ctrl1 = ctrl1;
1652 crtc_state->dpll_hw_state.cfgcr1 = cfgcr1;
1653 crtc_state->dpll_hw_state.cfgcr2 = cfgcr2;
1654
1655 return 0;
1656 }
1657
1658 static int skl_ddi_wrpll_get_freq(struct drm_i915_private *i915,
1659 const struct intel_shared_dpll *pll,
1660 const struct intel_dpll_hw_state *pll_state)
1661 {
1662 int ref_clock = i915->dpll.ref_clks.nssc;
1663 u32 p0, p1, p2, dco_freq;
1664
1665 p0 = pll_state->cfgcr2 & DPLL_CFGCR2_PDIV_MASK;
1666 p2 = pll_state->cfgcr2 & DPLL_CFGCR2_KDIV_MASK;
1667
1668 if (pll_state->cfgcr2 & DPLL_CFGCR2_QDIV_MODE(1))
1669 p1 = (pll_state->cfgcr2 & DPLL_CFGCR2_QDIV_RATIO_MASK) >> 8;
1670 else
1671 p1 = 1;
1672
1673
1674 switch (p0) {
1675 case DPLL_CFGCR2_PDIV_1:
1676 p0 = 1;
1677 break;
1678 case DPLL_CFGCR2_PDIV_2:
1679 p0 = 2;
1680 break;
1681 case DPLL_CFGCR2_PDIV_3:
1682 p0 = 3;
1683 break;
1684 case DPLL_CFGCR2_PDIV_7_INVALID:
1685
1686
1687
1688
1689 drm_dbg_kms(&i915->drm, "Invalid WRPLL PDIV divider value, fixing it.\n");
1690 fallthrough;
1691 case DPLL_CFGCR2_PDIV_7:
1692 p0 = 7;
1693 break;
1694 default:
1695 MISSING_CASE(p0);
1696 return 0;
1697 }
1698
1699 switch (p2) {
1700 case DPLL_CFGCR2_KDIV_5:
1701 p2 = 5;
1702 break;
1703 case DPLL_CFGCR2_KDIV_2:
1704 p2 = 2;
1705 break;
1706 case DPLL_CFGCR2_KDIV_3:
1707 p2 = 3;
1708 break;
1709 case DPLL_CFGCR2_KDIV_1:
1710 p2 = 1;
1711 break;
1712 default:
1713 MISSING_CASE(p2);
1714 return 0;
1715 }
1716
1717 dco_freq = (pll_state->cfgcr1 & DPLL_CFGCR1_DCO_INTEGER_MASK) *
1718 ref_clock;
1719
1720 dco_freq += ((pll_state->cfgcr1 & DPLL_CFGCR1_DCO_FRACTION_MASK) >> 9) *
1721 ref_clock / 0x8000;
1722
1723 if (drm_WARN_ON(&i915->drm, p0 == 0 || p1 == 0 || p2 == 0))
1724 return 0;
1725
1726 return dco_freq / (p0 * p1 * p2 * 5);
1727 }
1728
1729 static int
1730 skl_ddi_dp_set_dpll_hw_state(struct intel_crtc_state *crtc_state)
1731 {
1732 u32 ctrl1;
1733
1734
1735
1736
1737
1738 ctrl1 = DPLL_CTRL1_OVERRIDE(0);
1739 switch (crtc_state->port_clock / 2) {
1740 case 81000:
1741 ctrl1 |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_810, 0);
1742 break;
1743 case 135000:
1744 ctrl1 |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1350, 0);
1745 break;
1746 case 270000:
1747 ctrl1 |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_2700, 0);
1748 break;
1749
1750 case 162000:
1751 ctrl1 |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1620, 0);
1752 break;
1753 case 108000:
1754 ctrl1 |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1080, 0);
1755 break;
1756 case 216000:
1757 ctrl1 |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_2160, 0);
1758 break;
1759 }
1760
1761 crtc_state->dpll_hw_state.ctrl1 = ctrl1;
1762
1763 return 0;
1764 }
1765
1766 static int skl_ddi_lcpll_get_freq(struct drm_i915_private *i915,
1767 const struct intel_shared_dpll *pll,
1768 const struct intel_dpll_hw_state *pll_state)
1769 {
1770 int link_clock = 0;
1771
1772 switch ((pll_state->ctrl1 & DPLL_CTRL1_LINK_RATE_MASK(0)) >>
1773 DPLL_CTRL1_LINK_RATE_SHIFT(0)) {
1774 case DPLL_CTRL1_LINK_RATE_810:
1775 link_clock = 81000;
1776 break;
1777 case DPLL_CTRL1_LINK_RATE_1080:
1778 link_clock = 108000;
1779 break;
1780 case DPLL_CTRL1_LINK_RATE_1350:
1781 link_clock = 135000;
1782 break;
1783 case DPLL_CTRL1_LINK_RATE_1620:
1784 link_clock = 162000;
1785 break;
1786 case DPLL_CTRL1_LINK_RATE_2160:
1787 link_clock = 216000;
1788 break;
1789 case DPLL_CTRL1_LINK_RATE_2700:
1790 link_clock = 270000;
1791 break;
1792 default:
1793 drm_WARN(&i915->drm, 1, "Unsupported link rate\n");
1794 break;
1795 }
1796
1797 return link_clock * 2;
1798 }
1799
1800 static int skl_compute_dpll(struct intel_atomic_state *state,
1801 struct intel_crtc *crtc,
1802 struct intel_encoder *encoder)
1803 {
1804 struct intel_crtc_state *crtc_state =
1805 intel_atomic_get_new_crtc_state(state, crtc);
1806
1807 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
1808 return skl_ddi_hdmi_pll_dividers(crtc_state);
1809 else if (intel_crtc_has_dp_encoder(crtc_state))
1810 return skl_ddi_dp_set_dpll_hw_state(crtc_state);
1811 else
1812 return -EINVAL;
1813 }
1814
1815 static int skl_get_dpll(struct intel_atomic_state *state,
1816 struct intel_crtc *crtc,
1817 struct intel_encoder *encoder)
1818 {
1819 struct intel_crtc_state *crtc_state =
1820 intel_atomic_get_new_crtc_state(state, crtc);
1821 struct intel_shared_dpll *pll;
1822
1823 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
1824 pll = intel_find_shared_dpll(state, crtc,
1825 &crtc_state->dpll_hw_state,
1826 BIT(DPLL_ID_SKL_DPLL0));
1827 else
1828 pll = intel_find_shared_dpll(state, crtc,
1829 &crtc_state->dpll_hw_state,
1830 BIT(DPLL_ID_SKL_DPLL3) |
1831 BIT(DPLL_ID_SKL_DPLL2) |
1832 BIT(DPLL_ID_SKL_DPLL1));
1833 if (!pll)
1834 return -EINVAL;
1835
1836 intel_reference_shared_dpll(state, crtc,
1837 pll, &crtc_state->dpll_hw_state);
1838
1839 crtc_state->shared_dpll = pll;
1840
1841 return 0;
1842 }
1843
1844 static int skl_ddi_pll_get_freq(struct drm_i915_private *i915,
1845 const struct intel_shared_dpll *pll,
1846 const struct intel_dpll_hw_state *pll_state)
1847 {
1848
1849
1850
1851
1852 if (pll_state->ctrl1 & DPLL_CTRL1_HDMI_MODE(0))
1853 return skl_ddi_wrpll_get_freq(i915, pll, pll_state);
1854 else
1855 return skl_ddi_lcpll_get_freq(i915, pll, pll_state);
1856 }
1857
1858 static void skl_update_dpll_ref_clks(struct drm_i915_private *i915)
1859 {
1860
1861 i915->dpll.ref_clks.nssc = i915->cdclk.hw.ref;
1862 }
1863
1864 static void skl_dump_hw_state(struct drm_i915_private *dev_priv,
1865 const struct intel_dpll_hw_state *hw_state)
1866 {
1867 drm_dbg_kms(&dev_priv->drm, "dpll_hw_state: "
1868 "ctrl1: 0x%x, cfgcr1: 0x%x, cfgcr2: 0x%x\n",
1869 hw_state->ctrl1,
1870 hw_state->cfgcr1,
1871 hw_state->cfgcr2);
1872 }
1873
1874 static const struct intel_shared_dpll_funcs skl_ddi_pll_funcs = {
1875 .enable = skl_ddi_pll_enable,
1876 .disable = skl_ddi_pll_disable,
1877 .get_hw_state = skl_ddi_pll_get_hw_state,
1878 .get_freq = skl_ddi_pll_get_freq,
1879 };
1880
1881 static const struct intel_shared_dpll_funcs skl_ddi_dpll0_funcs = {
1882 .enable = skl_ddi_dpll0_enable,
1883 .disable = skl_ddi_dpll0_disable,
1884 .get_hw_state = skl_ddi_dpll0_get_hw_state,
1885 .get_freq = skl_ddi_pll_get_freq,
1886 };
1887
1888 static const struct dpll_info skl_plls[] = {
1889 { "DPLL 0", &skl_ddi_dpll0_funcs, DPLL_ID_SKL_DPLL0, INTEL_DPLL_ALWAYS_ON },
1890 { "DPLL 1", &skl_ddi_pll_funcs, DPLL_ID_SKL_DPLL1, 0 },
1891 { "DPLL 2", &skl_ddi_pll_funcs, DPLL_ID_SKL_DPLL2, 0 },
1892 { "DPLL 3", &skl_ddi_pll_funcs, DPLL_ID_SKL_DPLL3, 0 },
1893 { },
1894 };
1895
1896 static const struct intel_dpll_mgr skl_pll_mgr = {
1897 .dpll_info = skl_plls,
1898 .compute_dplls = skl_compute_dpll,
1899 .get_dplls = skl_get_dpll,
1900 .put_dplls = intel_put_dpll,
1901 .update_ref_clks = skl_update_dpll_ref_clks,
1902 .dump_hw_state = skl_dump_hw_state,
1903 };
1904
1905 static void bxt_ddi_pll_enable(struct drm_i915_private *dev_priv,
1906 struct intel_shared_dpll *pll)
1907 {
1908 u32 temp;
1909 enum port port = (enum port)pll->info->id;
1910 enum dpio_phy phy;
1911 enum dpio_channel ch;
1912
1913 bxt_port_to_phy_channel(dev_priv, port, &phy, &ch);
1914
1915
1916 temp = intel_de_read(dev_priv, BXT_PORT_PLL_ENABLE(port));
1917 temp |= PORT_PLL_REF_SEL;
1918 intel_de_write(dev_priv, BXT_PORT_PLL_ENABLE(port), temp);
1919
1920 if (IS_GEMINILAKE(dev_priv)) {
1921 temp = intel_de_read(dev_priv, BXT_PORT_PLL_ENABLE(port));
1922 temp |= PORT_PLL_POWER_ENABLE;
1923 intel_de_write(dev_priv, BXT_PORT_PLL_ENABLE(port), temp);
1924
1925 if (wait_for_us((intel_de_read(dev_priv, BXT_PORT_PLL_ENABLE(port)) &
1926 PORT_PLL_POWER_STATE), 200))
1927 drm_err(&dev_priv->drm,
1928 "Power state not set for PLL:%d\n", port);
1929 }
1930
1931
1932 temp = intel_de_read(dev_priv, BXT_PORT_PLL_EBB_4(phy, ch));
1933 temp &= ~PORT_PLL_10BIT_CLK_ENABLE;
1934 intel_de_write(dev_priv, BXT_PORT_PLL_EBB_4(phy, ch), temp);
1935
1936
1937 temp = intel_de_read(dev_priv, BXT_PORT_PLL_EBB_0(phy, ch));
1938 temp &= ~(PORT_PLL_P1_MASK | PORT_PLL_P2_MASK);
1939 temp |= pll->state.hw_state.ebb0;
1940 intel_de_write(dev_priv, BXT_PORT_PLL_EBB_0(phy, ch), temp);
1941
1942
1943 temp = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 0));
1944 temp &= ~PORT_PLL_M2_INT_MASK;
1945 temp |= pll->state.hw_state.pll0;
1946 intel_de_write(dev_priv, BXT_PORT_PLL(phy, ch, 0), temp);
1947
1948
1949 temp = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 1));
1950 temp &= ~PORT_PLL_N_MASK;
1951 temp |= pll->state.hw_state.pll1;
1952 intel_de_write(dev_priv, BXT_PORT_PLL(phy, ch, 1), temp);
1953
1954
1955 temp = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 2));
1956 temp &= ~PORT_PLL_M2_FRAC_MASK;
1957 temp |= pll->state.hw_state.pll2;
1958 intel_de_write(dev_priv, BXT_PORT_PLL(phy, ch, 2), temp);
1959
1960
1961 temp = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 3));
1962 temp &= ~PORT_PLL_M2_FRAC_ENABLE;
1963 temp |= pll->state.hw_state.pll3;
1964 intel_de_write(dev_priv, BXT_PORT_PLL(phy, ch, 3), temp);
1965
1966
1967 temp = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 6));
1968 temp &= ~PORT_PLL_PROP_COEFF_MASK;
1969 temp &= ~PORT_PLL_INT_COEFF_MASK;
1970 temp &= ~PORT_PLL_GAIN_CTL_MASK;
1971 temp |= pll->state.hw_state.pll6;
1972 intel_de_write(dev_priv, BXT_PORT_PLL(phy, ch, 6), temp);
1973
1974
1975 temp = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 8));
1976 temp &= ~PORT_PLL_TARGET_CNT_MASK;
1977 temp |= pll->state.hw_state.pll8;
1978 intel_de_write(dev_priv, BXT_PORT_PLL(phy, ch, 8), temp);
1979
1980 temp = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 9));
1981 temp &= ~PORT_PLL_LOCK_THRESHOLD_MASK;
1982 temp |= pll->state.hw_state.pll9;
1983 intel_de_write(dev_priv, BXT_PORT_PLL(phy, ch, 9), temp);
1984
1985 temp = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 10));
1986 temp &= ~PORT_PLL_DCO_AMP_OVR_EN_H;
1987 temp &= ~PORT_PLL_DCO_AMP_MASK;
1988 temp |= pll->state.hw_state.pll10;
1989 intel_de_write(dev_priv, BXT_PORT_PLL(phy, ch, 10), temp);
1990
1991
1992 temp = intel_de_read(dev_priv, BXT_PORT_PLL_EBB_4(phy, ch));
1993 temp |= PORT_PLL_RECALIBRATE;
1994 intel_de_write(dev_priv, BXT_PORT_PLL_EBB_4(phy, ch), temp);
1995 temp &= ~PORT_PLL_10BIT_CLK_ENABLE;
1996 temp |= pll->state.hw_state.ebb4;
1997 intel_de_write(dev_priv, BXT_PORT_PLL_EBB_4(phy, ch), temp);
1998
1999
2000 temp = intel_de_read(dev_priv, BXT_PORT_PLL_ENABLE(port));
2001 temp |= PORT_PLL_ENABLE;
2002 intel_de_write(dev_priv, BXT_PORT_PLL_ENABLE(port), temp);
2003 intel_de_posting_read(dev_priv, BXT_PORT_PLL_ENABLE(port));
2004
2005 if (wait_for_us((intel_de_read(dev_priv, BXT_PORT_PLL_ENABLE(port)) & PORT_PLL_LOCK),
2006 200))
2007 drm_err(&dev_priv->drm, "PLL %d not locked\n", port);
2008
2009 if (IS_GEMINILAKE(dev_priv)) {
2010 temp = intel_de_read(dev_priv, BXT_PORT_TX_DW5_LN0(phy, ch));
2011 temp |= DCC_DELAY_RANGE_2;
2012 intel_de_write(dev_priv, BXT_PORT_TX_DW5_GRP(phy, ch), temp);
2013 }
2014
2015
2016
2017
2018
2019 temp = intel_de_read(dev_priv, BXT_PORT_PCS_DW12_LN01(phy, ch));
2020 temp &= ~LANE_STAGGER_MASK;
2021 temp &= ~LANESTAGGER_STRAP_OVRD;
2022 temp |= pll->state.hw_state.pcsdw12;
2023 intel_de_write(dev_priv, BXT_PORT_PCS_DW12_GRP(phy, ch), temp);
2024 }
2025
2026 static void bxt_ddi_pll_disable(struct drm_i915_private *dev_priv,
2027 struct intel_shared_dpll *pll)
2028 {
2029 enum port port = (enum port)pll->info->id;
2030 u32 temp;
2031
2032 temp = intel_de_read(dev_priv, BXT_PORT_PLL_ENABLE(port));
2033 temp &= ~PORT_PLL_ENABLE;
2034 intel_de_write(dev_priv, BXT_PORT_PLL_ENABLE(port), temp);
2035 intel_de_posting_read(dev_priv, BXT_PORT_PLL_ENABLE(port));
2036
2037 if (IS_GEMINILAKE(dev_priv)) {
2038 temp = intel_de_read(dev_priv, BXT_PORT_PLL_ENABLE(port));
2039 temp &= ~PORT_PLL_POWER_ENABLE;
2040 intel_de_write(dev_priv, BXT_PORT_PLL_ENABLE(port), temp);
2041
2042 if (wait_for_us(!(intel_de_read(dev_priv, BXT_PORT_PLL_ENABLE(port)) &
2043 PORT_PLL_POWER_STATE), 200))
2044 drm_err(&dev_priv->drm,
2045 "Power state not reset for PLL:%d\n", port);
2046 }
2047 }
2048
2049 static bool bxt_ddi_pll_get_hw_state(struct drm_i915_private *dev_priv,
2050 struct intel_shared_dpll *pll,
2051 struct intel_dpll_hw_state *hw_state)
2052 {
2053 enum port port = (enum port)pll->info->id;
2054 intel_wakeref_t wakeref;
2055 enum dpio_phy phy;
2056 enum dpio_channel ch;
2057 u32 val;
2058 bool ret;
2059
2060 bxt_port_to_phy_channel(dev_priv, port, &phy, &ch);
2061
2062 wakeref = intel_display_power_get_if_enabled(dev_priv,
2063 POWER_DOMAIN_DISPLAY_CORE);
2064 if (!wakeref)
2065 return false;
2066
2067 ret = false;
2068
2069 val = intel_de_read(dev_priv, BXT_PORT_PLL_ENABLE(port));
2070 if (!(val & PORT_PLL_ENABLE))
2071 goto out;
2072
2073 hw_state->ebb0 = intel_de_read(dev_priv, BXT_PORT_PLL_EBB_0(phy, ch));
2074 hw_state->ebb0 &= PORT_PLL_P1_MASK | PORT_PLL_P2_MASK;
2075
2076 hw_state->ebb4 = intel_de_read(dev_priv, BXT_PORT_PLL_EBB_4(phy, ch));
2077 hw_state->ebb4 &= PORT_PLL_10BIT_CLK_ENABLE;
2078
2079 hw_state->pll0 = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 0));
2080 hw_state->pll0 &= PORT_PLL_M2_INT_MASK;
2081
2082 hw_state->pll1 = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 1));
2083 hw_state->pll1 &= PORT_PLL_N_MASK;
2084
2085 hw_state->pll2 = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 2));
2086 hw_state->pll2 &= PORT_PLL_M2_FRAC_MASK;
2087
2088 hw_state->pll3 = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 3));
2089 hw_state->pll3 &= PORT_PLL_M2_FRAC_ENABLE;
2090
2091 hw_state->pll6 = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 6));
2092 hw_state->pll6 &= PORT_PLL_PROP_COEFF_MASK |
2093 PORT_PLL_INT_COEFF_MASK |
2094 PORT_PLL_GAIN_CTL_MASK;
2095
2096 hw_state->pll8 = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 8));
2097 hw_state->pll8 &= PORT_PLL_TARGET_CNT_MASK;
2098
2099 hw_state->pll9 = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 9));
2100 hw_state->pll9 &= PORT_PLL_LOCK_THRESHOLD_MASK;
2101
2102 hw_state->pll10 = intel_de_read(dev_priv, BXT_PORT_PLL(phy, ch, 10));
2103 hw_state->pll10 &= PORT_PLL_DCO_AMP_OVR_EN_H |
2104 PORT_PLL_DCO_AMP_MASK;
2105
2106
2107
2108
2109
2110
2111 hw_state->pcsdw12 = intel_de_read(dev_priv,
2112 BXT_PORT_PCS_DW12_LN01(phy, ch));
2113 if (intel_de_read(dev_priv, BXT_PORT_PCS_DW12_LN23(phy, ch)) != hw_state->pcsdw12)
2114 drm_dbg(&dev_priv->drm,
2115 "lane stagger config different for lane 01 (%08x) and 23 (%08x)\n",
2116 hw_state->pcsdw12,
2117 intel_de_read(dev_priv,
2118 BXT_PORT_PCS_DW12_LN23(phy, ch)));
2119 hw_state->pcsdw12 &= LANE_STAGGER_MASK | LANESTAGGER_STRAP_OVRD;
2120
2121 ret = true;
2122
2123 out:
2124 intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
2125
2126 return ret;
2127 }
2128
2129
2130 static const struct dpll bxt_dp_clk_val[] = {
2131
2132 { .dot = 162000, .p1 = 4, .p2 = 2, .n = 1, .m1 = 2, .m2 = 0x819999a },
2133 { .dot = 270000, .p1 = 4, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x6c00000 },
2134 { .dot = 540000, .p1 = 2, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x6c00000 },
2135 { .dot = 216000, .p1 = 3, .p2 = 2, .n = 1, .m1 = 2, .m2 = 0x819999a },
2136 { .dot = 243000, .p1 = 4, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x6133333 },
2137 { .dot = 324000, .p1 = 4, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x819999a },
2138 { .dot = 432000, .p1 = 3, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x819999a },
2139 };
2140
2141 static int
2142 bxt_ddi_hdmi_pll_dividers(struct intel_crtc_state *crtc_state,
2143 struct dpll *clk_div)
2144 {
2145 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev);
2146
2147
2148
2149
2150
2151
2152 if (!bxt_find_best_dpll(crtc_state, clk_div))
2153 return -EINVAL;
2154
2155 drm_WARN_ON(&i915->drm, clk_div->m1 != 2);
2156
2157 return 0;
2158 }
2159
2160 static void bxt_ddi_dp_pll_dividers(struct intel_crtc_state *crtc_state,
2161 struct dpll *clk_div)
2162 {
2163 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev);
2164 int i;
2165
2166 *clk_div = bxt_dp_clk_val[0];
2167 for (i = 0; i < ARRAY_SIZE(bxt_dp_clk_val); ++i) {
2168 if (crtc_state->port_clock == bxt_dp_clk_val[i].dot) {
2169 *clk_div = bxt_dp_clk_val[i];
2170 break;
2171 }
2172 }
2173
2174 chv_calc_dpll_params(i915->dpll.ref_clks.nssc, clk_div);
2175
2176 drm_WARN_ON(&i915->drm, clk_div->vco == 0 ||
2177 clk_div->dot != crtc_state->port_clock);
2178 }
2179
2180 static int bxt_ddi_set_dpll_hw_state(struct intel_crtc_state *crtc_state,
2181 const struct dpll *clk_div)
2182 {
2183 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev);
2184 struct intel_dpll_hw_state *dpll_hw_state = &crtc_state->dpll_hw_state;
2185 int clock = crtc_state->port_clock;
2186 int vco = clk_div->vco;
2187 u32 prop_coef, int_coef, gain_ctl, targ_cnt;
2188 u32 lanestagger;
2189
2190 if (vco >= 6200000 && vco <= 6700000) {
2191 prop_coef = 4;
2192 int_coef = 9;
2193 gain_ctl = 3;
2194 targ_cnt = 8;
2195 } else if ((vco > 5400000 && vco < 6200000) ||
2196 (vco >= 4800000 && vco < 5400000)) {
2197 prop_coef = 5;
2198 int_coef = 11;
2199 gain_ctl = 3;
2200 targ_cnt = 9;
2201 } else if (vco == 5400000) {
2202 prop_coef = 3;
2203 int_coef = 8;
2204 gain_ctl = 1;
2205 targ_cnt = 9;
2206 } else {
2207 drm_err(&i915->drm, "Invalid VCO\n");
2208 return -EINVAL;
2209 }
2210
2211 if (clock > 270000)
2212 lanestagger = 0x18;
2213 else if (clock > 135000)
2214 lanestagger = 0x0d;
2215 else if (clock > 67000)
2216 lanestagger = 0x07;
2217 else if (clock > 33000)
2218 lanestagger = 0x04;
2219 else
2220 lanestagger = 0x02;
2221
2222 dpll_hw_state->ebb0 = PORT_PLL_P1(clk_div->p1) | PORT_PLL_P2(clk_div->p2);
2223 dpll_hw_state->pll0 = PORT_PLL_M2_INT(clk_div->m2 >> 22);
2224 dpll_hw_state->pll1 = PORT_PLL_N(clk_div->n);
2225 dpll_hw_state->pll2 = PORT_PLL_M2_FRAC(clk_div->m2 & 0x3fffff);
2226
2227 if (clk_div->m2 & 0x3fffff)
2228 dpll_hw_state->pll3 = PORT_PLL_M2_FRAC_ENABLE;
2229
2230 dpll_hw_state->pll6 = PORT_PLL_PROP_COEFF(prop_coef) |
2231 PORT_PLL_INT_COEFF(int_coef) |
2232 PORT_PLL_GAIN_CTL(gain_ctl);
2233
2234 dpll_hw_state->pll8 = PORT_PLL_TARGET_CNT(targ_cnt);
2235
2236 dpll_hw_state->pll9 = PORT_PLL_LOCK_THRESHOLD(5);
2237
2238 dpll_hw_state->pll10 = PORT_PLL_DCO_AMP(15) |
2239 PORT_PLL_DCO_AMP_OVR_EN_H;
2240
2241 dpll_hw_state->ebb4 = PORT_PLL_10BIT_CLK_ENABLE;
2242
2243 dpll_hw_state->pcsdw12 = LANESTAGGER_STRAP_OVRD | lanestagger;
2244
2245 return 0;
2246 }
2247
2248 static int
2249 bxt_ddi_dp_set_dpll_hw_state(struct intel_crtc_state *crtc_state)
2250 {
2251 struct dpll clk_div = {};
2252
2253 bxt_ddi_dp_pll_dividers(crtc_state, &clk_div);
2254
2255 return bxt_ddi_set_dpll_hw_state(crtc_state, &clk_div);
2256 }
2257
2258 static int
2259 bxt_ddi_hdmi_set_dpll_hw_state(struct intel_crtc_state *crtc_state)
2260 {
2261 struct dpll clk_div = {};
2262
2263 bxt_ddi_hdmi_pll_dividers(crtc_state, &clk_div);
2264
2265 return bxt_ddi_set_dpll_hw_state(crtc_state, &clk_div);
2266 }
2267
2268 static int bxt_ddi_pll_get_freq(struct drm_i915_private *i915,
2269 const struct intel_shared_dpll *pll,
2270 const struct intel_dpll_hw_state *pll_state)
2271 {
2272 struct dpll clock;
2273
2274 clock.m1 = 2;
2275 clock.m2 = REG_FIELD_GET(PORT_PLL_M2_INT_MASK, pll_state->pll0) << 22;
2276 if (pll_state->pll3 & PORT_PLL_M2_FRAC_ENABLE)
2277 clock.m2 |= REG_FIELD_GET(PORT_PLL_M2_FRAC_MASK, pll_state->pll2);
2278 clock.n = REG_FIELD_GET(PORT_PLL_N_MASK, pll_state->pll1);
2279 clock.p1 = REG_FIELD_GET(PORT_PLL_P1_MASK, pll_state->ebb0);
2280 clock.p2 = REG_FIELD_GET(PORT_PLL_P2_MASK, pll_state->ebb0);
2281
2282 return chv_calc_dpll_params(i915->dpll.ref_clks.nssc, &clock);
2283 }
2284
2285 static int bxt_compute_dpll(struct intel_atomic_state *state,
2286 struct intel_crtc *crtc,
2287 struct intel_encoder *encoder)
2288 {
2289 struct intel_crtc_state *crtc_state =
2290 intel_atomic_get_new_crtc_state(state, crtc);
2291
2292 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
2293 return bxt_ddi_hdmi_set_dpll_hw_state(crtc_state);
2294 else if (intel_crtc_has_dp_encoder(crtc_state))
2295 return bxt_ddi_dp_set_dpll_hw_state(crtc_state);
2296 else
2297 return -EINVAL;
2298 }
2299
2300 static int bxt_get_dpll(struct intel_atomic_state *state,
2301 struct intel_crtc *crtc,
2302 struct intel_encoder *encoder)
2303 {
2304 struct intel_crtc_state *crtc_state =
2305 intel_atomic_get_new_crtc_state(state, crtc);
2306 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
2307 struct intel_shared_dpll *pll;
2308 enum intel_dpll_id id;
2309
2310
2311 id = (enum intel_dpll_id) encoder->port;
2312 pll = intel_get_shared_dpll_by_id(dev_priv, id);
2313
2314 drm_dbg_kms(&dev_priv->drm, "[CRTC:%d:%s] using pre-allocated %s\n",
2315 crtc->base.base.id, crtc->base.name, pll->info->name);
2316
2317 intel_reference_shared_dpll(state, crtc,
2318 pll, &crtc_state->dpll_hw_state);
2319
2320 crtc_state->shared_dpll = pll;
2321
2322 return 0;
2323 }
2324
2325 static void bxt_update_dpll_ref_clks(struct drm_i915_private *i915)
2326 {
2327 i915->dpll.ref_clks.ssc = 100000;
2328 i915->dpll.ref_clks.nssc = 100000;
2329
2330 }
2331
2332 static void bxt_dump_hw_state(struct drm_i915_private *dev_priv,
2333 const struct intel_dpll_hw_state *hw_state)
2334 {
2335 drm_dbg_kms(&dev_priv->drm, "dpll_hw_state: ebb0: 0x%x, ebb4: 0x%x,"
2336 "pll0: 0x%x, pll1: 0x%x, pll2: 0x%x, pll3: 0x%x, "
2337 "pll6: 0x%x, pll8: 0x%x, pll9: 0x%x, pll10: 0x%x, pcsdw12: 0x%x\n",
2338 hw_state->ebb0,
2339 hw_state->ebb4,
2340 hw_state->pll0,
2341 hw_state->pll1,
2342 hw_state->pll2,
2343 hw_state->pll3,
2344 hw_state->pll6,
2345 hw_state->pll8,
2346 hw_state->pll9,
2347 hw_state->pll10,
2348 hw_state->pcsdw12);
2349 }
2350
2351 static const struct intel_shared_dpll_funcs bxt_ddi_pll_funcs = {
2352 .enable = bxt_ddi_pll_enable,
2353 .disable = bxt_ddi_pll_disable,
2354 .get_hw_state = bxt_ddi_pll_get_hw_state,
2355 .get_freq = bxt_ddi_pll_get_freq,
2356 };
2357
2358 static const struct dpll_info bxt_plls[] = {
2359 { "PORT PLL A", &bxt_ddi_pll_funcs, DPLL_ID_SKL_DPLL0, 0 },
2360 { "PORT PLL B", &bxt_ddi_pll_funcs, DPLL_ID_SKL_DPLL1, 0 },
2361 { "PORT PLL C", &bxt_ddi_pll_funcs, DPLL_ID_SKL_DPLL2, 0 },
2362 { },
2363 };
2364
2365 static const struct intel_dpll_mgr bxt_pll_mgr = {
2366 .dpll_info = bxt_plls,
2367 .compute_dplls = bxt_compute_dpll,
2368 .get_dplls = bxt_get_dpll,
2369 .put_dplls = intel_put_dpll,
2370 .update_ref_clks = bxt_update_dpll_ref_clks,
2371 .dump_hw_state = bxt_dump_hw_state,
2372 };
2373
2374 static void icl_wrpll_get_multipliers(int bestdiv, int *pdiv,
2375 int *qdiv, int *kdiv)
2376 {
2377
2378 if (bestdiv % 2 == 0) {
2379 if (bestdiv == 2) {
2380 *pdiv = 2;
2381 *qdiv = 1;
2382 *kdiv = 1;
2383 } else if (bestdiv % 4 == 0) {
2384 *pdiv = 2;
2385 *qdiv = bestdiv / 4;
2386 *kdiv = 2;
2387 } else if (bestdiv % 6 == 0) {
2388 *pdiv = 3;
2389 *qdiv = bestdiv / 6;
2390 *kdiv = 2;
2391 } else if (bestdiv % 5 == 0) {
2392 *pdiv = 5;
2393 *qdiv = bestdiv / 10;
2394 *kdiv = 2;
2395 } else if (bestdiv % 14 == 0) {
2396 *pdiv = 7;
2397 *qdiv = bestdiv / 14;
2398 *kdiv = 2;
2399 }
2400 } else {
2401 if (bestdiv == 3 || bestdiv == 5 || bestdiv == 7) {
2402 *pdiv = bestdiv;
2403 *qdiv = 1;
2404 *kdiv = 1;
2405 } else {
2406 *pdiv = bestdiv / 3;
2407 *qdiv = 1;
2408 *kdiv = 3;
2409 }
2410 }
2411 }
2412
2413 static void icl_wrpll_params_populate(struct skl_wrpll_params *params,
2414 u32 dco_freq, u32 ref_freq,
2415 int pdiv, int qdiv, int kdiv)
2416 {
2417 u32 dco;
2418
2419 switch (kdiv) {
2420 case 1:
2421 params->kdiv = 1;
2422 break;
2423 case 2:
2424 params->kdiv = 2;
2425 break;
2426 case 3:
2427 params->kdiv = 4;
2428 break;
2429 default:
2430 WARN(1, "Incorrect KDiv\n");
2431 }
2432
2433 switch (pdiv) {
2434 case 2:
2435 params->pdiv = 1;
2436 break;
2437 case 3:
2438 params->pdiv = 2;
2439 break;
2440 case 5:
2441 params->pdiv = 4;
2442 break;
2443 case 7:
2444 params->pdiv = 8;
2445 break;
2446 default:
2447 WARN(1, "Incorrect PDiv\n");
2448 }
2449
2450 WARN_ON(kdiv != 2 && qdiv != 1);
2451
2452 params->qdiv_ratio = qdiv;
2453 params->qdiv_mode = (qdiv == 1) ? 0 : 1;
2454
2455 dco = div_u64((u64)dco_freq << 15, ref_freq);
2456
2457 params->dco_integer = dco >> 15;
2458 params->dco_fraction = dco & 0x7fff;
2459 }
2460
2461
2462
2463
2464
2465 static bool
2466 ehl_combo_pll_div_frac_wa_needed(struct drm_i915_private *i915)
2467 {
2468 return ((IS_PLATFORM(i915, INTEL_ELKHARTLAKE) &&
2469 IS_JSL_EHL_DISPLAY_STEP(i915, STEP_B0, STEP_FOREVER)) ||
2470 IS_TIGERLAKE(i915) || IS_ALDERLAKE_S(i915) || IS_ALDERLAKE_P(i915)) &&
2471 i915->dpll.ref_clks.nssc == 38400;
2472 }
2473
2474 struct icl_combo_pll_params {
2475 int clock;
2476 struct skl_wrpll_params wrpll;
2477 };
2478
2479
2480
2481
2482
2483 static const struct icl_combo_pll_params icl_dp_combo_pll_24MHz_values[] = {
2484 { 540000,
2485 { .dco_integer = 0x151, .dco_fraction = 0x4000,
2486 .pdiv = 0x2 , .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2487 { 270000,
2488 { .dco_integer = 0x151, .dco_fraction = 0x4000,
2489 .pdiv = 0x2 , .kdiv = 2, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2490 { 162000,
2491 { .dco_integer = 0x151, .dco_fraction = 0x4000,
2492 .pdiv = 0x4 , .kdiv = 2, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2493 { 324000,
2494 { .dco_integer = 0x151, .dco_fraction = 0x4000,
2495 .pdiv = 0x4 , .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2496 { 216000,
2497 { .dco_integer = 0x168, .dco_fraction = 0x0000,
2498 .pdiv = 0x1 , .kdiv = 2, .qdiv_mode = 1, .qdiv_ratio = 2, }, },
2499 { 432000,
2500 { .dco_integer = 0x168, .dco_fraction = 0x0000,
2501 .pdiv = 0x1 , .kdiv = 2, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2502 { 648000,
2503 { .dco_integer = 0x195, .dco_fraction = 0x0000,
2504 .pdiv = 0x2 , .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2505 { 810000,
2506 { .dco_integer = 0x151, .dco_fraction = 0x4000,
2507 .pdiv = 0x1 , .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2508 };
2509
2510
2511
2512 static const struct icl_combo_pll_params icl_dp_combo_pll_19_2MHz_values[] = {
2513 { 540000,
2514 { .dco_integer = 0x1A5, .dco_fraction = 0x7000,
2515 .pdiv = 0x2 , .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2516 { 270000,
2517 { .dco_integer = 0x1A5, .dco_fraction = 0x7000,
2518 .pdiv = 0x2 , .kdiv = 2, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2519 { 162000,
2520 { .dco_integer = 0x1A5, .dco_fraction = 0x7000,
2521 .pdiv = 0x4 , .kdiv = 2, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2522 { 324000,
2523 { .dco_integer = 0x1A5, .dco_fraction = 0x7000,
2524 .pdiv = 0x4 , .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2525 { 216000,
2526 { .dco_integer = 0x1C2, .dco_fraction = 0x0000,
2527 .pdiv = 0x1 , .kdiv = 2, .qdiv_mode = 1, .qdiv_ratio = 2, }, },
2528 { 432000,
2529 { .dco_integer = 0x1C2, .dco_fraction = 0x0000,
2530 .pdiv = 0x1 , .kdiv = 2, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2531 { 648000,
2532 { .dco_integer = 0x1FA, .dco_fraction = 0x2000,
2533 .pdiv = 0x2 , .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2534 { 810000,
2535 { .dco_integer = 0x1A5, .dco_fraction = 0x7000,
2536 .pdiv = 0x1 , .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2537 };
2538
2539 static const struct skl_wrpll_params icl_tbt_pll_24MHz_values = {
2540 .dco_integer = 0x151, .dco_fraction = 0x4000,
2541 .pdiv = 0x4 , .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0,
2542 };
2543
2544 static const struct skl_wrpll_params icl_tbt_pll_19_2MHz_values = {
2545 .dco_integer = 0x1A5, .dco_fraction = 0x7000,
2546 .pdiv = 0x4 , .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0,
2547 };
2548
2549 static const struct skl_wrpll_params tgl_tbt_pll_19_2MHz_values = {
2550 .dco_integer = 0x54, .dco_fraction = 0x3000,
2551
2552 .pdiv = 0, .kdiv = 0, .qdiv_mode = 0, .qdiv_ratio = 0,
2553 };
2554
2555 static const struct skl_wrpll_params tgl_tbt_pll_24MHz_values = {
2556 .dco_integer = 0x43, .dco_fraction = 0x4000,
2557
2558 };
2559
2560 static int icl_calc_dp_combo_pll(struct intel_crtc_state *crtc_state,
2561 struct skl_wrpll_params *pll_params)
2562 {
2563 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
2564 const struct icl_combo_pll_params *params =
2565 dev_priv->dpll.ref_clks.nssc == 24000 ?
2566 icl_dp_combo_pll_24MHz_values :
2567 icl_dp_combo_pll_19_2MHz_values;
2568 int clock = crtc_state->port_clock;
2569 int i;
2570
2571 for (i = 0; i < ARRAY_SIZE(icl_dp_combo_pll_24MHz_values); i++) {
2572 if (clock == params[i].clock) {
2573 *pll_params = params[i].wrpll;
2574 return 0;
2575 }
2576 }
2577
2578 MISSING_CASE(clock);
2579 return -EINVAL;
2580 }
2581
2582 static int icl_calc_tbt_pll(struct intel_crtc_state *crtc_state,
2583 struct skl_wrpll_params *pll_params)
2584 {
2585 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
2586
2587 if (DISPLAY_VER(dev_priv) >= 12) {
2588 switch (dev_priv->dpll.ref_clks.nssc) {
2589 default:
2590 MISSING_CASE(dev_priv->dpll.ref_clks.nssc);
2591 fallthrough;
2592 case 19200:
2593 case 38400:
2594 *pll_params = tgl_tbt_pll_19_2MHz_values;
2595 break;
2596 case 24000:
2597 *pll_params = tgl_tbt_pll_24MHz_values;
2598 break;
2599 }
2600 } else {
2601 switch (dev_priv->dpll.ref_clks.nssc) {
2602 default:
2603 MISSING_CASE(dev_priv->dpll.ref_clks.nssc);
2604 fallthrough;
2605 case 19200:
2606 case 38400:
2607 *pll_params = icl_tbt_pll_19_2MHz_values;
2608 break;
2609 case 24000:
2610 *pll_params = icl_tbt_pll_24MHz_values;
2611 break;
2612 }
2613 }
2614
2615 return 0;
2616 }
2617
2618 static int icl_ddi_tbt_pll_get_freq(struct drm_i915_private *i915,
2619 const struct intel_shared_dpll *pll,
2620 const struct intel_dpll_hw_state *pll_state)
2621 {
2622
2623
2624
2625
2626 drm_WARN_ON(&i915->drm, 1);
2627
2628 return 0;
2629 }
2630
2631 static int icl_wrpll_ref_clock(struct drm_i915_private *i915)
2632 {
2633 int ref_clock = i915->dpll.ref_clks.nssc;
2634
2635
2636
2637
2638
2639 if (ref_clock == 38400)
2640 ref_clock = 19200;
2641
2642 return ref_clock;
2643 }
2644
2645 static int
2646 icl_calc_wrpll(struct intel_crtc_state *crtc_state,
2647 struct skl_wrpll_params *wrpll_params)
2648 {
2649 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev);
2650 int ref_clock = icl_wrpll_ref_clock(i915);
2651 u32 afe_clock = crtc_state->port_clock * 5;
2652 u32 dco_min = 7998000;
2653 u32 dco_max = 10000000;
2654 u32 dco_mid = (dco_min + dco_max) / 2;
2655 static const int dividers[] = { 2, 4, 6, 8, 10, 12, 14, 16,
2656 18, 20, 24, 28, 30, 32, 36, 40,
2657 42, 44, 48, 50, 52, 54, 56, 60,
2658 64, 66, 68, 70, 72, 76, 78, 80,
2659 84, 88, 90, 92, 96, 98, 100, 102,
2660 3, 5, 7, 9, 15, 21 };
2661 u32 dco, best_dco = 0, dco_centrality = 0;
2662 u32 best_dco_centrality = U32_MAX;
2663 int d, best_div = 0, pdiv = 0, qdiv = 0, kdiv = 0;
2664
2665 for (d = 0; d < ARRAY_SIZE(dividers); d++) {
2666 dco = afe_clock * dividers[d];
2667
2668 if (dco <= dco_max && dco >= dco_min) {
2669 dco_centrality = abs(dco - dco_mid);
2670
2671 if (dco_centrality < best_dco_centrality) {
2672 best_dco_centrality = dco_centrality;
2673 best_div = dividers[d];
2674 best_dco = dco;
2675 }
2676 }
2677 }
2678
2679 if (best_div == 0)
2680 return -EINVAL;
2681
2682 icl_wrpll_get_multipliers(best_div, &pdiv, &qdiv, &kdiv);
2683 icl_wrpll_params_populate(wrpll_params, best_dco, ref_clock,
2684 pdiv, qdiv, kdiv);
2685
2686 return 0;
2687 }
2688
2689 static int icl_ddi_combo_pll_get_freq(struct drm_i915_private *i915,
2690 const struct intel_shared_dpll *pll,
2691 const struct intel_dpll_hw_state *pll_state)
2692 {
2693 int ref_clock = icl_wrpll_ref_clock(i915);
2694 u32 dco_fraction;
2695 u32 p0, p1, p2, dco_freq;
2696
2697 p0 = pll_state->cfgcr1 & DPLL_CFGCR1_PDIV_MASK;
2698 p2 = pll_state->cfgcr1 & DPLL_CFGCR1_KDIV_MASK;
2699
2700 if (pll_state->cfgcr1 & DPLL_CFGCR1_QDIV_MODE(1))
2701 p1 = (pll_state->cfgcr1 & DPLL_CFGCR1_QDIV_RATIO_MASK) >>
2702 DPLL_CFGCR1_QDIV_RATIO_SHIFT;
2703 else
2704 p1 = 1;
2705
2706 switch (p0) {
2707 case DPLL_CFGCR1_PDIV_2:
2708 p0 = 2;
2709 break;
2710 case DPLL_CFGCR1_PDIV_3:
2711 p0 = 3;
2712 break;
2713 case DPLL_CFGCR1_PDIV_5:
2714 p0 = 5;
2715 break;
2716 case DPLL_CFGCR1_PDIV_7:
2717 p0 = 7;
2718 break;
2719 }
2720
2721 switch (p2) {
2722 case DPLL_CFGCR1_KDIV_1:
2723 p2 = 1;
2724 break;
2725 case DPLL_CFGCR1_KDIV_2:
2726 p2 = 2;
2727 break;
2728 case DPLL_CFGCR1_KDIV_3:
2729 p2 = 3;
2730 break;
2731 }
2732
2733 dco_freq = (pll_state->cfgcr0 & DPLL_CFGCR0_DCO_INTEGER_MASK) *
2734 ref_clock;
2735
2736 dco_fraction = (pll_state->cfgcr0 & DPLL_CFGCR0_DCO_FRACTION_MASK) >>
2737 DPLL_CFGCR0_DCO_FRACTION_SHIFT;
2738
2739 if (ehl_combo_pll_div_frac_wa_needed(i915))
2740 dco_fraction *= 2;
2741
2742 dco_freq += (dco_fraction * ref_clock) / 0x8000;
2743
2744 if (drm_WARN_ON(&i915->drm, p0 == 0 || p1 == 0 || p2 == 0))
2745 return 0;
2746
2747 return dco_freq / (p0 * p1 * p2 * 5);
2748 }
2749
2750 static void icl_calc_dpll_state(struct drm_i915_private *i915,
2751 const struct skl_wrpll_params *pll_params,
2752 struct intel_dpll_hw_state *pll_state)
2753 {
2754 u32 dco_fraction = pll_params->dco_fraction;
2755
2756 if (ehl_combo_pll_div_frac_wa_needed(i915))
2757 dco_fraction = DIV_ROUND_CLOSEST(dco_fraction, 2);
2758
2759 pll_state->cfgcr0 = DPLL_CFGCR0_DCO_FRACTION(dco_fraction) |
2760 pll_params->dco_integer;
2761
2762 pll_state->cfgcr1 = DPLL_CFGCR1_QDIV_RATIO(pll_params->qdiv_ratio) |
2763 DPLL_CFGCR1_QDIV_MODE(pll_params->qdiv_mode) |
2764 DPLL_CFGCR1_KDIV(pll_params->kdiv) |
2765 DPLL_CFGCR1_PDIV(pll_params->pdiv);
2766
2767 if (DISPLAY_VER(i915) >= 12)
2768 pll_state->cfgcr1 |= TGL_DPLL_CFGCR1_CFSELOVRD_NORMAL_XTAL;
2769 else
2770 pll_state->cfgcr1 |= DPLL_CFGCR1_CENTRAL_FREQ_8400;
2771
2772 if (i915->vbt.override_afc_startup)
2773 pll_state->div0 = TGL_DPLL0_DIV0_AFC_STARTUP(i915->vbt.override_afc_startup_val);
2774 }
2775
2776 static int icl_mg_pll_find_divisors(int clock_khz, bool is_dp, bool use_ssc,
2777 u32 *target_dco_khz,
2778 struct intel_dpll_hw_state *state,
2779 bool is_dkl)
2780 {
2781 static const u8 div1_vals[] = { 7, 5, 3, 2 };
2782 u32 dco_min_freq, dco_max_freq;
2783 unsigned int i;
2784 int div2;
2785
2786 dco_min_freq = is_dp ? 8100000 : use_ssc ? 8000000 : 7992000;
2787 dco_max_freq = is_dp ? 8100000 : 10000000;
2788
2789 for (i = 0; i < ARRAY_SIZE(div1_vals); i++) {
2790 int div1 = div1_vals[i];
2791
2792 for (div2 = 10; div2 > 0; div2--) {
2793 int dco = div1 * div2 * clock_khz * 5;
2794 int a_divratio, tlinedrv, inputsel;
2795 u32 hsdiv;
2796
2797 if (dco < dco_min_freq || dco > dco_max_freq)
2798 continue;
2799
2800 if (div2 >= 2) {
2801
2802
2803
2804
2805
2806 a_divratio = is_dp ? 10 : 5;
2807 tlinedrv = is_dkl ? 1 : 2;
2808 } else {
2809 a_divratio = 5;
2810 tlinedrv = 0;
2811 }
2812 inputsel = is_dp ? 0 : 1;
2813
2814 switch (div1) {
2815 default:
2816 MISSING_CASE(div1);
2817 fallthrough;
2818 case 2:
2819 hsdiv = MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_2;
2820 break;
2821 case 3:
2822 hsdiv = MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_3;
2823 break;
2824 case 5:
2825 hsdiv = MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_5;
2826 break;
2827 case 7:
2828 hsdiv = MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_7;
2829 break;
2830 }
2831
2832 *target_dco_khz = dco;
2833
2834 state->mg_refclkin_ctl = MG_REFCLKIN_CTL_OD_2_MUX(1);
2835
2836 state->mg_clktop2_coreclkctl1 =
2837 MG_CLKTOP2_CORECLKCTL1_A_DIVRATIO(a_divratio);
2838
2839 state->mg_clktop2_hsclkctl =
2840 MG_CLKTOP2_HSCLKCTL_TLINEDRV_CLKSEL(tlinedrv) |
2841 MG_CLKTOP2_HSCLKCTL_CORE_INPUTSEL(inputsel) |
2842 hsdiv |
2843 MG_CLKTOP2_HSCLKCTL_DSDIV_RATIO(div2);
2844
2845 return 0;
2846 }
2847 }
2848
2849 return -EINVAL;
2850 }
2851
2852
2853
2854
2855
2856 static int icl_calc_mg_pll_state(struct intel_crtc_state *crtc_state,
2857 struct intel_dpll_hw_state *pll_state)
2858 {
2859 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
2860 int refclk_khz = dev_priv->dpll.ref_clks.nssc;
2861 int clock = crtc_state->port_clock;
2862 u32 dco_khz, m1div, m2div_int, m2div_rem, m2div_frac;
2863 u32 iref_ndiv, iref_trim, iref_pulse_w;
2864 u32 prop_coeff, int_coeff;
2865 u32 tdc_targetcnt, feedfwgain;
2866 u64 ssc_stepsize, ssc_steplen, ssc_steplog;
2867 u64 tmp;
2868 bool use_ssc = false;
2869 bool is_dp = !intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI);
2870 bool is_dkl = DISPLAY_VER(dev_priv) >= 12;
2871 int ret;
2872
2873 ret = icl_mg_pll_find_divisors(clock, is_dp, use_ssc, &dco_khz,
2874 pll_state, is_dkl);
2875 if (ret)
2876 return ret;
2877
2878 m1div = 2;
2879 m2div_int = dco_khz / (refclk_khz * m1div);
2880 if (m2div_int > 255) {
2881 if (!is_dkl) {
2882 m1div = 4;
2883 m2div_int = dco_khz / (refclk_khz * m1div);
2884 }
2885
2886 if (m2div_int > 255)
2887 return -EINVAL;
2888 }
2889 m2div_rem = dco_khz % (refclk_khz * m1div);
2890
2891 tmp = (u64)m2div_rem * (1 << 22);
2892 do_div(tmp, refclk_khz * m1div);
2893 m2div_frac = tmp;
2894
2895 switch (refclk_khz) {
2896 case 19200:
2897 iref_ndiv = 1;
2898 iref_trim = 28;
2899 iref_pulse_w = 1;
2900 break;
2901 case 24000:
2902 iref_ndiv = 1;
2903 iref_trim = 25;
2904 iref_pulse_w = 2;
2905 break;
2906 case 38400:
2907 iref_ndiv = 2;
2908 iref_trim = 28;
2909 iref_pulse_w = 1;
2910 break;
2911 default:
2912 MISSING_CASE(refclk_khz);
2913 return -EINVAL;
2914 }
2915
2916
2917
2918
2919
2920
2921
2922
2923
2924
2925
2926
2927
2928
2929
2930
2931 tdc_targetcnt = (2 * 1000 * 100000 * 10 / (132 * refclk_khz) + 5) / 10;
2932
2933
2934
2935
2936
2937
2938 feedfwgain = (use_ssc || m2div_rem > 0) ?
2939 m1div * 1000000 * 100 / (dco_khz * 3 / 10) : 0;
2940
2941 if (dco_khz >= 9000000) {
2942 prop_coeff = 5;
2943 int_coeff = 10;
2944 } else {
2945 prop_coeff = 4;
2946 int_coeff = 8;
2947 }
2948
2949 if (use_ssc) {
2950 tmp = mul_u32_u32(dco_khz, 47 * 32);
2951 do_div(tmp, refclk_khz * m1div * 10000);
2952 ssc_stepsize = tmp;
2953
2954 tmp = mul_u32_u32(dco_khz, 1000);
2955 ssc_steplen = DIV_ROUND_UP_ULL(tmp, 32 * 2 * 32);
2956 } else {
2957 ssc_stepsize = 0;
2958 ssc_steplen = 0;
2959 }
2960 ssc_steplog = 4;
2961
2962
2963 if (is_dkl) {
2964 pll_state->mg_pll_div0 = DKL_PLL_DIV0_INTEG_COEFF(int_coeff) |
2965 DKL_PLL_DIV0_PROP_COEFF(prop_coeff) |
2966 DKL_PLL_DIV0_FBPREDIV(m1div) |
2967 DKL_PLL_DIV0_FBDIV_INT(m2div_int);
2968 if (dev_priv->vbt.override_afc_startup) {
2969 u8 val = dev_priv->vbt.override_afc_startup_val;
2970
2971 pll_state->mg_pll_div0 |= DKL_PLL_DIV0_AFC_STARTUP(val);
2972 }
2973
2974 pll_state->mg_pll_div1 = DKL_PLL_DIV1_IREF_TRIM(iref_trim) |
2975 DKL_PLL_DIV1_TDC_TARGET_CNT(tdc_targetcnt);
2976
2977 pll_state->mg_pll_ssc = DKL_PLL_SSC_IREF_NDIV_RATIO(iref_ndiv) |
2978 DKL_PLL_SSC_STEP_LEN(ssc_steplen) |
2979 DKL_PLL_SSC_STEP_NUM(ssc_steplog) |
2980 (use_ssc ? DKL_PLL_SSC_EN : 0);
2981
2982 pll_state->mg_pll_bias = (m2div_frac ? DKL_PLL_BIAS_FRAC_EN_H : 0) |
2983 DKL_PLL_BIAS_FBDIV_FRAC(m2div_frac);
2984
2985 pll_state->mg_pll_tdc_coldst_bias =
2986 DKL_PLL_TDC_SSC_STEP_SIZE(ssc_stepsize) |
2987 DKL_PLL_TDC_FEED_FWD_GAIN(feedfwgain);
2988
2989 } else {
2990 pll_state->mg_pll_div0 =
2991 (m2div_rem > 0 ? MG_PLL_DIV0_FRACNEN_H : 0) |
2992 MG_PLL_DIV0_FBDIV_FRAC(m2div_frac) |
2993 MG_PLL_DIV0_FBDIV_INT(m2div_int);
2994
2995 pll_state->mg_pll_div1 =
2996 MG_PLL_DIV1_IREF_NDIVRATIO(iref_ndiv) |
2997 MG_PLL_DIV1_DITHER_DIV_2 |
2998 MG_PLL_DIV1_NDIVRATIO(1) |
2999 MG_PLL_DIV1_FBPREDIV(m1div);
3000
3001 pll_state->mg_pll_lf =
3002 MG_PLL_LF_TDCTARGETCNT(tdc_targetcnt) |
3003 MG_PLL_LF_AFCCNTSEL_512 |
3004 MG_PLL_LF_GAINCTRL(1) |
3005 MG_PLL_LF_INT_COEFF(int_coeff) |
3006 MG_PLL_LF_PROP_COEFF(prop_coeff);
3007
3008 pll_state->mg_pll_frac_lock =
3009 MG_PLL_FRAC_LOCK_TRUELOCK_CRIT_32 |
3010 MG_PLL_FRAC_LOCK_EARLYLOCK_CRIT_32 |
3011 MG_PLL_FRAC_LOCK_LOCKTHRESH(10) |
3012 MG_PLL_FRAC_LOCK_DCODITHEREN |
3013 MG_PLL_FRAC_LOCK_FEEDFWRDGAIN(feedfwgain);
3014 if (use_ssc || m2div_rem > 0)
3015 pll_state->mg_pll_frac_lock |=
3016 MG_PLL_FRAC_LOCK_FEEDFWRDCAL_EN;
3017
3018 pll_state->mg_pll_ssc =
3019 (use_ssc ? MG_PLL_SSC_EN : 0) |
3020 MG_PLL_SSC_TYPE(2) |
3021 MG_PLL_SSC_STEPLENGTH(ssc_steplen) |
3022 MG_PLL_SSC_STEPNUM(ssc_steplog) |
3023 MG_PLL_SSC_FLLEN |
3024 MG_PLL_SSC_STEPSIZE(ssc_stepsize);
3025
3026 pll_state->mg_pll_tdc_coldst_bias =
3027 MG_PLL_TDC_COLDST_COLDSTART |
3028 MG_PLL_TDC_COLDST_IREFINT_EN |
3029 MG_PLL_TDC_COLDST_REFBIAS_START_PULSE_W(iref_pulse_w) |
3030 MG_PLL_TDC_TDCOVCCORR_EN |
3031 MG_PLL_TDC_TDCSEL(3);
3032
3033 pll_state->mg_pll_bias =
3034 MG_PLL_BIAS_BIAS_GB_SEL(3) |
3035 MG_PLL_BIAS_INIT_DCOAMP(0x3F) |
3036 MG_PLL_BIAS_BIAS_BONUS(10) |
3037 MG_PLL_BIAS_BIASCAL_EN |
3038 MG_PLL_BIAS_CTRIM(12) |
3039 MG_PLL_BIAS_VREF_RDAC(4) |
3040 MG_PLL_BIAS_IREFTRIM(iref_trim);
3041
3042 if (refclk_khz == 38400) {
3043 pll_state->mg_pll_tdc_coldst_bias_mask =
3044 MG_PLL_TDC_COLDST_COLDSTART;
3045 pll_state->mg_pll_bias_mask = 0;
3046 } else {
3047 pll_state->mg_pll_tdc_coldst_bias_mask = -1U;
3048 pll_state->mg_pll_bias_mask = -1U;
3049 }
3050
3051 pll_state->mg_pll_tdc_coldst_bias &=
3052 pll_state->mg_pll_tdc_coldst_bias_mask;
3053 pll_state->mg_pll_bias &= pll_state->mg_pll_bias_mask;
3054 }
3055
3056 return 0;
3057 }
3058
3059 static int icl_ddi_mg_pll_get_freq(struct drm_i915_private *dev_priv,
3060 const struct intel_shared_dpll *pll,
3061 const struct intel_dpll_hw_state *pll_state)
3062 {
3063 u32 m1, m2_int, m2_frac, div1, div2, ref_clock;
3064 u64 tmp;
3065
3066 ref_clock = dev_priv->dpll.ref_clks.nssc;
3067
3068 if (DISPLAY_VER(dev_priv) >= 12) {
3069 m1 = pll_state->mg_pll_div0 & DKL_PLL_DIV0_FBPREDIV_MASK;
3070 m1 = m1 >> DKL_PLL_DIV0_FBPREDIV_SHIFT;
3071 m2_int = pll_state->mg_pll_div0 & DKL_PLL_DIV0_FBDIV_INT_MASK;
3072
3073 if (pll_state->mg_pll_bias & DKL_PLL_BIAS_FRAC_EN_H) {
3074 m2_frac = pll_state->mg_pll_bias &
3075 DKL_PLL_BIAS_FBDIV_FRAC_MASK;
3076 m2_frac = m2_frac >> DKL_PLL_BIAS_FBDIV_SHIFT;
3077 } else {
3078 m2_frac = 0;
3079 }
3080 } else {
3081 m1 = pll_state->mg_pll_div1 & MG_PLL_DIV1_FBPREDIV_MASK;
3082 m2_int = pll_state->mg_pll_div0 & MG_PLL_DIV0_FBDIV_INT_MASK;
3083
3084 if (pll_state->mg_pll_div0 & MG_PLL_DIV0_FRACNEN_H) {
3085 m2_frac = pll_state->mg_pll_div0 &
3086 MG_PLL_DIV0_FBDIV_FRAC_MASK;
3087 m2_frac = m2_frac >> MG_PLL_DIV0_FBDIV_FRAC_SHIFT;
3088 } else {
3089 m2_frac = 0;
3090 }
3091 }
3092
3093 switch (pll_state->mg_clktop2_hsclkctl &
3094 MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_MASK) {
3095 case MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_2:
3096 div1 = 2;
3097 break;
3098 case MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_3:
3099 div1 = 3;
3100 break;
3101 case MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_5:
3102 div1 = 5;
3103 break;
3104 case MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_7:
3105 div1 = 7;
3106 break;
3107 default:
3108 MISSING_CASE(pll_state->mg_clktop2_hsclkctl);
3109 return 0;
3110 }
3111
3112 div2 = (pll_state->mg_clktop2_hsclkctl &
3113 MG_CLKTOP2_HSCLKCTL_DSDIV_RATIO_MASK) >>
3114 MG_CLKTOP2_HSCLKCTL_DSDIV_RATIO_SHIFT;
3115
3116
3117 if (div2 == 0)
3118 div2 = 1;
3119
3120
3121
3122
3123
3124 tmp = (u64)m1 * m2_int * ref_clock +
3125 (((u64)m1 * m2_frac * ref_clock) >> 22);
3126 tmp = div_u64(tmp, 5 * div1 * div2);
3127
3128 return tmp;
3129 }
3130
3131
3132
3133
3134
3135
3136
3137
3138
3139 void icl_set_active_port_dpll(struct intel_crtc_state *crtc_state,
3140 enum icl_port_dpll_id port_dpll_id)
3141 {
3142 struct icl_port_dpll *port_dpll =
3143 &crtc_state->icl_port_dplls[port_dpll_id];
3144
3145 crtc_state->shared_dpll = port_dpll->pll;
3146 crtc_state->dpll_hw_state = port_dpll->hw_state;
3147 }
3148
3149 static void icl_update_active_dpll(struct intel_atomic_state *state,
3150 struct intel_crtc *crtc,
3151 struct intel_encoder *encoder)
3152 {
3153 struct intel_crtc_state *crtc_state =
3154 intel_atomic_get_new_crtc_state(state, crtc);
3155 struct intel_digital_port *primary_port;
3156 enum icl_port_dpll_id port_dpll_id = ICL_PORT_DPLL_DEFAULT;
3157
3158 primary_port = encoder->type == INTEL_OUTPUT_DP_MST ?
3159 enc_to_mst(encoder)->primary :
3160 enc_to_dig_port(encoder);
3161
3162 if (primary_port &&
3163 (intel_tc_port_in_dp_alt_mode(primary_port) ||
3164 intel_tc_port_in_legacy_mode(primary_port)))
3165 port_dpll_id = ICL_PORT_DPLL_MG_PHY;
3166
3167 icl_set_active_port_dpll(crtc_state, port_dpll_id);
3168 }
3169
3170 static u32 intel_get_hti_plls(struct drm_i915_private *i915)
3171 {
3172 if (!(i915->hti_state & HDPORT_ENABLED))
3173 return 0;
3174
3175 return REG_FIELD_GET(HDPORT_DPLL_USED_MASK, i915->hti_state);
3176 }
3177
3178 static int icl_compute_combo_phy_dpll(struct intel_atomic_state *state,
3179 struct intel_crtc *crtc)
3180 {
3181 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
3182 struct intel_crtc_state *crtc_state =
3183 intel_atomic_get_new_crtc_state(state, crtc);
3184 struct icl_port_dpll *port_dpll =
3185 &crtc_state->icl_port_dplls[ICL_PORT_DPLL_DEFAULT];
3186 struct skl_wrpll_params pll_params = {};
3187 int ret;
3188
3189 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI) ||
3190 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DSI))
3191 ret = icl_calc_wrpll(crtc_state, &pll_params);
3192 else
3193 ret = icl_calc_dp_combo_pll(crtc_state, &pll_params);
3194
3195 if (ret)
3196 return ret;
3197
3198 icl_calc_dpll_state(dev_priv, &pll_params, &port_dpll->hw_state);
3199
3200 return 0;
3201 }
3202
3203 static int icl_get_combo_phy_dpll(struct intel_atomic_state *state,
3204 struct intel_crtc *crtc,
3205 struct intel_encoder *encoder)
3206 {
3207 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
3208 struct intel_crtc_state *crtc_state =
3209 intel_atomic_get_new_crtc_state(state, crtc);
3210 struct icl_port_dpll *port_dpll =
3211 &crtc_state->icl_port_dplls[ICL_PORT_DPLL_DEFAULT];
3212 enum port port = encoder->port;
3213 unsigned long dpll_mask;
3214
3215 if (IS_ALDERLAKE_S(dev_priv)) {
3216 dpll_mask =
3217 BIT(DPLL_ID_DG1_DPLL3) |
3218 BIT(DPLL_ID_DG1_DPLL2) |
3219 BIT(DPLL_ID_ICL_DPLL1) |
3220 BIT(DPLL_ID_ICL_DPLL0);
3221 } else if (IS_DG1(dev_priv)) {
3222 if (port == PORT_D || port == PORT_E) {
3223 dpll_mask =
3224 BIT(DPLL_ID_DG1_DPLL2) |
3225 BIT(DPLL_ID_DG1_DPLL3);
3226 } else {
3227 dpll_mask =
3228 BIT(DPLL_ID_DG1_DPLL0) |
3229 BIT(DPLL_ID_DG1_DPLL1);
3230 }
3231 } else if (IS_ROCKETLAKE(dev_priv)) {
3232 dpll_mask =
3233 BIT(DPLL_ID_EHL_DPLL4) |
3234 BIT(DPLL_ID_ICL_DPLL1) |
3235 BIT(DPLL_ID_ICL_DPLL0);
3236 } else if (IS_JSL_EHL(dev_priv) && port != PORT_A) {
3237 dpll_mask =
3238 BIT(DPLL_ID_EHL_DPLL4) |
3239 BIT(DPLL_ID_ICL_DPLL1) |
3240 BIT(DPLL_ID_ICL_DPLL0);
3241 } else {
3242 dpll_mask = BIT(DPLL_ID_ICL_DPLL1) | BIT(DPLL_ID_ICL_DPLL0);
3243 }
3244
3245
3246 dpll_mask &= ~intel_get_hti_plls(dev_priv);
3247
3248 port_dpll->pll = intel_find_shared_dpll(state, crtc,
3249 &port_dpll->hw_state,
3250 dpll_mask);
3251 if (!port_dpll->pll)
3252 return -EINVAL;
3253
3254 intel_reference_shared_dpll(state, crtc,
3255 port_dpll->pll, &port_dpll->hw_state);
3256
3257 icl_update_active_dpll(state, crtc, encoder);
3258
3259 return 0;
3260 }
3261
3262 static int icl_compute_tc_phy_dplls(struct intel_atomic_state *state,
3263 struct intel_crtc *crtc)
3264 {
3265 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
3266 struct intel_crtc_state *crtc_state =
3267 intel_atomic_get_new_crtc_state(state, crtc);
3268 struct icl_port_dpll *port_dpll =
3269 &crtc_state->icl_port_dplls[ICL_PORT_DPLL_DEFAULT];
3270 struct skl_wrpll_params pll_params = {};
3271 int ret;
3272
3273 port_dpll = &crtc_state->icl_port_dplls[ICL_PORT_DPLL_DEFAULT];
3274 ret = icl_calc_tbt_pll(crtc_state, &pll_params);
3275 if (ret)
3276 return ret;
3277
3278 icl_calc_dpll_state(dev_priv, &pll_params, &port_dpll->hw_state);
3279
3280 port_dpll = &crtc_state->icl_port_dplls[ICL_PORT_DPLL_MG_PHY];
3281 ret = icl_calc_mg_pll_state(crtc_state, &port_dpll->hw_state);
3282 if (ret)
3283 return ret;
3284
3285 return 0;
3286 }
3287
3288 static int icl_get_tc_phy_dplls(struct intel_atomic_state *state,
3289 struct intel_crtc *crtc,
3290 struct intel_encoder *encoder)
3291 {
3292 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
3293 struct intel_crtc_state *crtc_state =
3294 intel_atomic_get_new_crtc_state(state, crtc);
3295 struct icl_port_dpll *port_dpll =
3296 &crtc_state->icl_port_dplls[ICL_PORT_DPLL_DEFAULT];
3297 enum intel_dpll_id dpll_id;
3298 int ret;
3299
3300 port_dpll = &crtc_state->icl_port_dplls[ICL_PORT_DPLL_DEFAULT];
3301 port_dpll->pll = intel_find_shared_dpll(state, crtc,
3302 &port_dpll->hw_state,
3303 BIT(DPLL_ID_ICL_TBTPLL));
3304 if (!port_dpll->pll)
3305 return -EINVAL;
3306 intel_reference_shared_dpll(state, crtc,
3307 port_dpll->pll, &port_dpll->hw_state);
3308
3309
3310 port_dpll = &crtc_state->icl_port_dplls[ICL_PORT_DPLL_MG_PHY];
3311 dpll_id = icl_tc_port_to_pll_id(intel_port_to_tc(dev_priv,
3312 encoder->port));
3313 port_dpll->pll = intel_find_shared_dpll(state, crtc,
3314 &port_dpll->hw_state,
3315 BIT(dpll_id));
3316 if (!port_dpll->pll) {
3317 ret = -EINVAL;
3318 goto err_unreference_tbt_pll;
3319 }
3320 intel_reference_shared_dpll(state, crtc,
3321 port_dpll->pll, &port_dpll->hw_state);
3322
3323 icl_update_active_dpll(state, crtc, encoder);
3324
3325 return 0;
3326
3327 err_unreference_tbt_pll:
3328 port_dpll = &crtc_state->icl_port_dplls[ICL_PORT_DPLL_DEFAULT];
3329 intel_unreference_shared_dpll(state, crtc, port_dpll->pll);
3330
3331 return ret;
3332 }
3333
3334 static int icl_compute_dplls(struct intel_atomic_state *state,
3335 struct intel_crtc *crtc,
3336 struct intel_encoder *encoder)
3337 {
3338 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
3339 enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
3340
3341 if (intel_phy_is_combo(dev_priv, phy))
3342 return icl_compute_combo_phy_dpll(state, crtc);
3343 else if (intel_phy_is_tc(dev_priv, phy))
3344 return icl_compute_tc_phy_dplls(state, crtc);
3345
3346 MISSING_CASE(phy);
3347
3348 return 0;
3349 }
3350
3351 static int icl_get_dplls(struct intel_atomic_state *state,
3352 struct intel_crtc *crtc,
3353 struct intel_encoder *encoder)
3354 {
3355 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
3356 enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
3357
3358 if (intel_phy_is_combo(dev_priv, phy))
3359 return icl_get_combo_phy_dpll(state, crtc, encoder);
3360 else if (intel_phy_is_tc(dev_priv, phy))
3361 return icl_get_tc_phy_dplls(state, crtc, encoder);
3362
3363 MISSING_CASE(phy);
3364
3365 return -EINVAL;
3366 }
3367
3368 static void icl_put_dplls(struct intel_atomic_state *state,
3369 struct intel_crtc *crtc)
3370 {
3371 const struct intel_crtc_state *old_crtc_state =
3372 intel_atomic_get_old_crtc_state(state, crtc);
3373 struct intel_crtc_state *new_crtc_state =
3374 intel_atomic_get_new_crtc_state(state, crtc);
3375 enum icl_port_dpll_id id;
3376
3377 new_crtc_state->shared_dpll = NULL;
3378
3379 for (id = ICL_PORT_DPLL_DEFAULT; id < ICL_PORT_DPLL_COUNT; id++) {
3380 const struct icl_port_dpll *old_port_dpll =
3381 &old_crtc_state->icl_port_dplls[id];
3382 struct icl_port_dpll *new_port_dpll =
3383 &new_crtc_state->icl_port_dplls[id];
3384
3385 new_port_dpll->pll = NULL;
3386
3387 if (!old_port_dpll->pll)
3388 continue;
3389
3390 intel_unreference_shared_dpll(state, crtc, old_port_dpll->pll);
3391 }
3392 }
3393
3394 static bool mg_pll_get_hw_state(struct drm_i915_private *dev_priv,
3395 struct intel_shared_dpll *pll,
3396 struct intel_dpll_hw_state *hw_state)
3397 {
3398 const enum intel_dpll_id id = pll->info->id;
3399 enum tc_port tc_port = icl_pll_id_to_tc_port(id);
3400 intel_wakeref_t wakeref;
3401 bool ret = false;
3402 u32 val;
3403
3404 i915_reg_t enable_reg = intel_tc_pll_enable_reg(dev_priv, pll);
3405
3406 wakeref = intel_display_power_get_if_enabled(dev_priv,
3407 POWER_DOMAIN_DISPLAY_CORE);
3408 if (!wakeref)
3409 return false;
3410
3411 val = intel_de_read(dev_priv, enable_reg);
3412 if (!(val & PLL_ENABLE))
3413 goto out;
3414
3415 hw_state->mg_refclkin_ctl = intel_de_read(dev_priv,
3416 MG_REFCLKIN_CTL(tc_port));
3417 hw_state->mg_refclkin_ctl &= MG_REFCLKIN_CTL_OD_2_MUX_MASK;
3418
3419 hw_state->mg_clktop2_coreclkctl1 =
3420 intel_de_read(dev_priv, MG_CLKTOP2_CORECLKCTL1(tc_port));
3421 hw_state->mg_clktop2_coreclkctl1 &=
3422 MG_CLKTOP2_CORECLKCTL1_A_DIVRATIO_MASK;
3423
3424 hw_state->mg_clktop2_hsclkctl =
3425 intel_de_read(dev_priv, MG_CLKTOP2_HSCLKCTL(tc_port));
3426 hw_state->mg_clktop2_hsclkctl &=
3427 MG_CLKTOP2_HSCLKCTL_TLINEDRV_CLKSEL_MASK |
3428 MG_CLKTOP2_HSCLKCTL_CORE_INPUTSEL_MASK |
3429 MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_MASK |
3430 MG_CLKTOP2_HSCLKCTL_DSDIV_RATIO_MASK;
3431
3432 hw_state->mg_pll_div0 = intel_de_read(dev_priv, MG_PLL_DIV0(tc_port));
3433 hw_state->mg_pll_div1 = intel_de_read(dev_priv, MG_PLL_DIV1(tc_port));
3434 hw_state->mg_pll_lf = intel_de_read(dev_priv, MG_PLL_LF(tc_port));
3435 hw_state->mg_pll_frac_lock = intel_de_read(dev_priv,
3436 MG_PLL_FRAC_LOCK(tc_port));
3437 hw_state->mg_pll_ssc = intel_de_read(dev_priv, MG_PLL_SSC(tc_port));
3438
3439 hw_state->mg_pll_bias = intel_de_read(dev_priv, MG_PLL_BIAS(tc_port));
3440 hw_state->mg_pll_tdc_coldst_bias =
3441 intel_de_read(dev_priv, MG_PLL_TDC_COLDST_BIAS(tc_port));
3442
3443 if (dev_priv->dpll.ref_clks.nssc == 38400) {
3444 hw_state->mg_pll_tdc_coldst_bias_mask = MG_PLL_TDC_COLDST_COLDSTART;
3445 hw_state->mg_pll_bias_mask = 0;
3446 } else {
3447 hw_state->mg_pll_tdc_coldst_bias_mask = -1U;
3448 hw_state->mg_pll_bias_mask = -1U;
3449 }
3450
3451 hw_state->mg_pll_tdc_coldst_bias &= hw_state->mg_pll_tdc_coldst_bias_mask;
3452 hw_state->mg_pll_bias &= hw_state->mg_pll_bias_mask;
3453
3454 ret = true;
3455 out:
3456 intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
3457 return ret;
3458 }
3459
3460 static bool dkl_pll_get_hw_state(struct drm_i915_private *dev_priv,
3461 struct intel_shared_dpll *pll,
3462 struct intel_dpll_hw_state *hw_state)
3463 {
3464 const enum intel_dpll_id id = pll->info->id;
3465 enum tc_port tc_port = icl_pll_id_to_tc_port(id);
3466 intel_wakeref_t wakeref;
3467 bool ret = false;
3468 u32 val;
3469
3470 wakeref = intel_display_power_get_if_enabled(dev_priv,
3471 POWER_DOMAIN_DISPLAY_CORE);
3472 if (!wakeref)
3473 return false;
3474
3475 val = intel_de_read(dev_priv, intel_tc_pll_enable_reg(dev_priv, pll));
3476 if (!(val & PLL_ENABLE))
3477 goto out;
3478
3479
3480
3481
3482
3483 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port),
3484 HIP_INDEX_VAL(tc_port, 0x2));
3485
3486 hw_state->mg_refclkin_ctl = intel_de_read(dev_priv,
3487 DKL_REFCLKIN_CTL(tc_port));
3488 hw_state->mg_refclkin_ctl &= MG_REFCLKIN_CTL_OD_2_MUX_MASK;
3489
3490 hw_state->mg_clktop2_hsclkctl =
3491 intel_de_read(dev_priv, DKL_CLKTOP2_HSCLKCTL(tc_port));
3492 hw_state->mg_clktop2_hsclkctl &=
3493 MG_CLKTOP2_HSCLKCTL_TLINEDRV_CLKSEL_MASK |
3494 MG_CLKTOP2_HSCLKCTL_CORE_INPUTSEL_MASK |
3495 MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_MASK |
3496 MG_CLKTOP2_HSCLKCTL_DSDIV_RATIO_MASK;
3497
3498 hw_state->mg_clktop2_coreclkctl1 =
3499 intel_de_read(dev_priv, DKL_CLKTOP2_CORECLKCTL1(tc_port));
3500 hw_state->mg_clktop2_coreclkctl1 &=
3501 MG_CLKTOP2_CORECLKCTL1_A_DIVRATIO_MASK;
3502
3503 hw_state->mg_pll_div0 = intel_de_read(dev_priv, DKL_PLL_DIV0(tc_port));
3504 val = DKL_PLL_DIV0_MASK;
3505 if (dev_priv->vbt.override_afc_startup)
3506 val |= DKL_PLL_DIV0_AFC_STARTUP_MASK;
3507 hw_state->mg_pll_div0 &= val;
3508
3509 hw_state->mg_pll_div1 = intel_de_read(dev_priv, DKL_PLL_DIV1(tc_port));
3510 hw_state->mg_pll_div1 &= (DKL_PLL_DIV1_IREF_TRIM_MASK |
3511 DKL_PLL_DIV1_TDC_TARGET_CNT_MASK);
3512
3513 hw_state->mg_pll_ssc = intel_de_read(dev_priv, DKL_PLL_SSC(tc_port));
3514 hw_state->mg_pll_ssc &= (DKL_PLL_SSC_IREF_NDIV_RATIO_MASK |
3515 DKL_PLL_SSC_STEP_LEN_MASK |
3516 DKL_PLL_SSC_STEP_NUM_MASK |
3517 DKL_PLL_SSC_EN);
3518
3519 hw_state->mg_pll_bias = intel_de_read(dev_priv, DKL_PLL_BIAS(tc_port));
3520 hw_state->mg_pll_bias &= (DKL_PLL_BIAS_FRAC_EN_H |
3521 DKL_PLL_BIAS_FBDIV_FRAC_MASK);
3522
3523 hw_state->mg_pll_tdc_coldst_bias =
3524 intel_de_read(dev_priv, DKL_PLL_TDC_COLDST_BIAS(tc_port));
3525 hw_state->mg_pll_tdc_coldst_bias &= (DKL_PLL_TDC_SSC_STEP_SIZE_MASK |
3526 DKL_PLL_TDC_FEED_FWD_GAIN_MASK);
3527
3528 ret = true;
3529 out:
3530 intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
3531 return ret;
3532 }
3533
3534 static bool icl_pll_get_hw_state(struct drm_i915_private *dev_priv,
3535 struct intel_shared_dpll *pll,
3536 struct intel_dpll_hw_state *hw_state,
3537 i915_reg_t enable_reg)
3538 {
3539 const enum intel_dpll_id id = pll->info->id;
3540 intel_wakeref_t wakeref;
3541 bool ret = false;
3542 u32 val;
3543
3544 wakeref = intel_display_power_get_if_enabled(dev_priv,
3545 POWER_DOMAIN_DISPLAY_CORE);
3546 if (!wakeref)
3547 return false;
3548
3549 val = intel_de_read(dev_priv, enable_reg);
3550 if (!(val & PLL_ENABLE))
3551 goto out;
3552
3553 if (IS_ALDERLAKE_S(dev_priv)) {
3554 hw_state->cfgcr0 = intel_de_read(dev_priv, ADLS_DPLL_CFGCR0(id));
3555 hw_state->cfgcr1 = intel_de_read(dev_priv, ADLS_DPLL_CFGCR1(id));
3556 } else if (IS_DG1(dev_priv)) {
3557 hw_state->cfgcr0 = intel_de_read(dev_priv, DG1_DPLL_CFGCR0(id));
3558 hw_state->cfgcr1 = intel_de_read(dev_priv, DG1_DPLL_CFGCR1(id));
3559 } else if (IS_ROCKETLAKE(dev_priv)) {
3560 hw_state->cfgcr0 = intel_de_read(dev_priv,
3561 RKL_DPLL_CFGCR0(id));
3562 hw_state->cfgcr1 = intel_de_read(dev_priv,
3563 RKL_DPLL_CFGCR1(id));
3564 } else if (DISPLAY_VER(dev_priv) >= 12) {
3565 hw_state->cfgcr0 = intel_de_read(dev_priv,
3566 TGL_DPLL_CFGCR0(id));
3567 hw_state->cfgcr1 = intel_de_read(dev_priv,
3568 TGL_DPLL_CFGCR1(id));
3569 if (dev_priv->vbt.override_afc_startup) {
3570 hw_state->div0 = intel_de_read(dev_priv, TGL_DPLL0_DIV0(id));
3571 hw_state->div0 &= TGL_DPLL0_DIV0_AFC_STARTUP_MASK;
3572 }
3573 } else {
3574 if (IS_JSL_EHL(dev_priv) && id == DPLL_ID_EHL_DPLL4) {
3575 hw_state->cfgcr0 = intel_de_read(dev_priv,
3576 ICL_DPLL_CFGCR0(4));
3577 hw_state->cfgcr1 = intel_de_read(dev_priv,
3578 ICL_DPLL_CFGCR1(4));
3579 } else {
3580 hw_state->cfgcr0 = intel_de_read(dev_priv,
3581 ICL_DPLL_CFGCR0(id));
3582 hw_state->cfgcr1 = intel_de_read(dev_priv,
3583 ICL_DPLL_CFGCR1(id));
3584 }
3585 }
3586
3587 ret = true;
3588 out:
3589 intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
3590 return ret;
3591 }
3592
3593 static bool combo_pll_get_hw_state(struct drm_i915_private *dev_priv,
3594 struct intel_shared_dpll *pll,
3595 struct intel_dpll_hw_state *hw_state)
3596 {
3597 i915_reg_t enable_reg = intel_combo_pll_enable_reg(dev_priv, pll);
3598
3599 return icl_pll_get_hw_state(dev_priv, pll, hw_state, enable_reg);
3600 }
3601
3602 static bool tbt_pll_get_hw_state(struct drm_i915_private *dev_priv,
3603 struct intel_shared_dpll *pll,
3604 struct intel_dpll_hw_state *hw_state)
3605 {
3606 return icl_pll_get_hw_state(dev_priv, pll, hw_state, TBT_PLL_ENABLE);
3607 }
3608
3609 static void icl_dpll_write(struct drm_i915_private *dev_priv,
3610 struct intel_shared_dpll *pll)
3611 {
3612 struct intel_dpll_hw_state *hw_state = &pll->state.hw_state;
3613 const enum intel_dpll_id id = pll->info->id;
3614 i915_reg_t cfgcr0_reg, cfgcr1_reg, div0_reg = INVALID_MMIO_REG;
3615
3616 if (IS_ALDERLAKE_S(dev_priv)) {
3617 cfgcr0_reg = ADLS_DPLL_CFGCR0(id);
3618 cfgcr1_reg = ADLS_DPLL_CFGCR1(id);
3619 } else if (IS_DG1(dev_priv)) {
3620 cfgcr0_reg = DG1_DPLL_CFGCR0(id);
3621 cfgcr1_reg = DG1_DPLL_CFGCR1(id);
3622 } else if (IS_ROCKETLAKE(dev_priv)) {
3623 cfgcr0_reg = RKL_DPLL_CFGCR0(id);
3624 cfgcr1_reg = RKL_DPLL_CFGCR1(id);
3625 } else if (DISPLAY_VER(dev_priv) >= 12) {
3626 cfgcr0_reg = TGL_DPLL_CFGCR0(id);
3627 cfgcr1_reg = TGL_DPLL_CFGCR1(id);
3628 div0_reg = TGL_DPLL0_DIV0(id);
3629 } else {
3630 if (IS_JSL_EHL(dev_priv) && id == DPLL_ID_EHL_DPLL4) {
3631 cfgcr0_reg = ICL_DPLL_CFGCR0(4);
3632 cfgcr1_reg = ICL_DPLL_CFGCR1(4);
3633 } else {
3634 cfgcr0_reg = ICL_DPLL_CFGCR0(id);
3635 cfgcr1_reg = ICL_DPLL_CFGCR1(id);
3636 }
3637 }
3638
3639 intel_de_write(dev_priv, cfgcr0_reg, hw_state->cfgcr0);
3640 intel_de_write(dev_priv, cfgcr1_reg, hw_state->cfgcr1);
3641 drm_WARN_ON_ONCE(&dev_priv->drm, dev_priv->vbt.override_afc_startup &&
3642 !i915_mmio_reg_valid(div0_reg));
3643 if (dev_priv->vbt.override_afc_startup &&
3644 i915_mmio_reg_valid(div0_reg))
3645 intel_de_rmw(dev_priv, div0_reg, TGL_DPLL0_DIV0_AFC_STARTUP_MASK,
3646 hw_state->div0);
3647 intel_de_posting_read(dev_priv, cfgcr1_reg);
3648 }
3649
3650 static void icl_mg_pll_write(struct drm_i915_private *dev_priv,
3651 struct intel_shared_dpll *pll)
3652 {
3653 struct intel_dpll_hw_state *hw_state = &pll->state.hw_state;
3654 enum tc_port tc_port = icl_pll_id_to_tc_port(pll->info->id);
3655 u32 val;
3656
3657
3658
3659
3660
3661
3662
3663 val = intel_de_read(dev_priv, MG_REFCLKIN_CTL(tc_port));
3664 val &= ~MG_REFCLKIN_CTL_OD_2_MUX_MASK;
3665 val |= hw_state->mg_refclkin_ctl;
3666 intel_de_write(dev_priv, MG_REFCLKIN_CTL(tc_port), val);
3667
3668 val = intel_de_read(dev_priv, MG_CLKTOP2_CORECLKCTL1(tc_port));
3669 val &= ~MG_CLKTOP2_CORECLKCTL1_A_DIVRATIO_MASK;
3670 val |= hw_state->mg_clktop2_coreclkctl1;
3671 intel_de_write(dev_priv, MG_CLKTOP2_CORECLKCTL1(tc_port), val);
3672
3673 val = intel_de_read(dev_priv, MG_CLKTOP2_HSCLKCTL(tc_port));
3674 val &= ~(MG_CLKTOP2_HSCLKCTL_TLINEDRV_CLKSEL_MASK |
3675 MG_CLKTOP2_HSCLKCTL_CORE_INPUTSEL_MASK |
3676 MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_MASK |
3677 MG_CLKTOP2_HSCLKCTL_DSDIV_RATIO_MASK);
3678 val |= hw_state->mg_clktop2_hsclkctl;
3679 intel_de_write(dev_priv, MG_CLKTOP2_HSCLKCTL(tc_port), val);
3680
3681 intel_de_write(dev_priv, MG_PLL_DIV0(tc_port), hw_state->mg_pll_div0);
3682 intel_de_write(dev_priv, MG_PLL_DIV1(tc_port), hw_state->mg_pll_div1);
3683 intel_de_write(dev_priv, MG_PLL_LF(tc_port), hw_state->mg_pll_lf);
3684 intel_de_write(dev_priv, MG_PLL_FRAC_LOCK(tc_port),
3685 hw_state->mg_pll_frac_lock);
3686 intel_de_write(dev_priv, MG_PLL_SSC(tc_port), hw_state->mg_pll_ssc);
3687
3688 val = intel_de_read(dev_priv, MG_PLL_BIAS(tc_port));
3689 val &= ~hw_state->mg_pll_bias_mask;
3690 val |= hw_state->mg_pll_bias;
3691 intel_de_write(dev_priv, MG_PLL_BIAS(tc_port), val);
3692
3693 val = intel_de_read(dev_priv, MG_PLL_TDC_COLDST_BIAS(tc_port));
3694 val &= ~hw_state->mg_pll_tdc_coldst_bias_mask;
3695 val |= hw_state->mg_pll_tdc_coldst_bias;
3696 intel_de_write(dev_priv, MG_PLL_TDC_COLDST_BIAS(tc_port), val);
3697
3698 intel_de_posting_read(dev_priv, MG_PLL_TDC_COLDST_BIAS(tc_port));
3699 }
3700
3701 static void dkl_pll_write(struct drm_i915_private *dev_priv,
3702 struct intel_shared_dpll *pll)
3703 {
3704 struct intel_dpll_hw_state *hw_state = &pll->state.hw_state;
3705 enum tc_port tc_port = icl_pll_id_to_tc_port(pll->info->id);
3706 u32 val;
3707
3708
3709
3710
3711
3712 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port),
3713 HIP_INDEX_VAL(tc_port, 0x2));
3714
3715
3716 val = intel_de_read(dev_priv, DKL_REFCLKIN_CTL(tc_port));
3717 val &= ~MG_REFCLKIN_CTL_OD_2_MUX_MASK;
3718 val |= hw_state->mg_refclkin_ctl;
3719 intel_de_write(dev_priv, DKL_REFCLKIN_CTL(tc_port), val);
3720
3721 val = intel_de_read(dev_priv, DKL_CLKTOP2_CORECLKCTL1(tc_port));
3722 val &= ~MG_CLKTOP2_CORECLKCTL1_A_DIVRATIO_MASK;
3723 val |= hw_state->mg_clktop2_coreclkctl1;
3724 intel_de_write(dev_priv, DKL_CLKTOP2_CORECLKCTL1(tc_port), val);
3725
3726 val = intel_de_read(dev_priv, DKL_CLKTOP2_HSCLKCTL(tc_port));
3727 val &= ~(MG_CLKTOP2_HSCLKCTL_TLINEDRV_CLKSEL_MASK |
3728 MG_CLKTOP2_HSCLKCTL_CORE_INPUTSEL_MASK |
3729 MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_MASK |
3730 MG_CLKTOP2_HSCLKCTL_DSDIV_RATIO_MASK);
3731 val |= hw_state->mg_clktop2_hsclkctl;
3732 intel_de_write(dev_priv, DKL_CLKTOP2_HSCLKCTL(tc_port), val);
3733
3734 val = DKL_PLL_DIV0_MASK;
3735 if (dev_priv->vbt.override_afc_startup)
3736 val |= DKL_PLL_DIV0_AFC_STARTUP_MASK;
3737 intel_de_rmw(dev_priv, DKL_PLL_DIV0(tc_port), val,
3738 hw_state->mg_pll_div0);
3739
3740 val = intel_de_read(dev_priv, DKL_PLL_DIV1(tc_port));
3741 val &= ~(DKL_PLL_DIV1_IREF_TRIM_MASK |
3742 DKL_PLL_DIV1_TDC_TARGET_CNT_MASK);
3743 val |= hw_state->mg_pll_div1;
3744 intel_de_write(dev_priv, DKL_PLL_DIV1(tc_port), val);
3745
3746 val = intel_de_read(dev_priv, DKL_PLL_SSC(tc_port));
3747 val &= ~(DKL_PLL_SSC_IREF_NDIV_RATIO_MASK |
3748 DKL_PLL_SSC_STEP_LEN_MASK |
3749 DKL_PLL_SSC_STEP_NUM_MASK |
3750 DKL_PLL_SSC_EN);
3751 val |= hw_state->mg_pll_ssc;
3752 intel_de_write(dev_priv, DKL_PLL_SSC(tc_port), val);
3753
3754 val = intel_de_read(dev_priv, DKL_PLL_BIAS(tc_port));
3755 val &= ~(DKL_PLL_BIAS_FRAC_EN_H |
3756 DKL_PLL_BIAS_FBDIV_FRAC_MASK);
3757 val |= hw_state->mg_pll_bias;
3758 intel_de_write(dev_priv, DKL_PLL_BIAS(tc_port), val);
3759
3760 val = intel_de_read(dev_priv, DKL_PLL_TDC_COLDST_BIAS(tc_port));
3761 val &= ~(DKL_PLL_TDC_SSC_STEP_SIZE_MASK |
3762 DKL_PLL_TDC_FEED_FWD_GAIN_MASK);
3763 val |= hw_state->mg_pll_tdc_coldst_bias;
3764 intel_de_write(dev_priv, DKL_PLL_TDC_COLDST_BIAS(tc_port), val);
3765
3766 intel_de_posting_read(dev_priv, DKL_PLL_TDC_COLDST_BIAS(tc_port));
3767 }
3768
3769 static void icl_pll_power_enable(struct drm_i915_private *dev_priv,
3770 struct intel_shared_dpll *pll,
3771 i915_reg_t enable_reg)
3772 {
3773 u32 val;
3774
3775 val = intel_de_read(dev_priv, enable_reg);
3776 val |= PLL_POWER_ENABLE;
3777 intel_de_write(dev_priv, enable_reg, val);
3778
3779
3780
3781
3782
3783 if (intel_de_wait_for_set(dev_priv, enable_reg, PLL_POWER_STATE, 1))
3784 drm_err(&dev_priv->drm, "PLL %d Power not enabled\n",
3785 pll->info->id);
3786 }
3787
3788 static void icl_pll_enable(struct drm_i915_private *dev_priv,
3789 struct intel_shared_dpll *pll,
3790 i915_reg_t enable_reg)
3791 {
3792 u32 val;
3793
3794 val = intel_de_read(dev_priv, enable_reg);
3795 val |= PLL_ENABLE;
3796 intel_de_write(dev_priv, enable_reg, val);
3797
3798
3799 if (intel_de_wait_for_set(dev_priv, enable_reg, PLL_LOCK, 1))
3800 drm_err(&dev_priv->drm, "PLL %d not locked\n", pll->info->id);
3801 }
3802
3803 static void adlp_cmtg_clock_gating_wa(struct drm_i915_private *i915, struct intel_shared_dpll *pll)
3804 {
3805 u32 val;
3806
3807 if (!IS_ADLP_DISPLAY_STEP(i915, STEP_A0, STEP_B0) ||
3808 pll->info->id != DPLL_ID_ICL_DPLL0)
3809 return;
3810
3811
3812
3813
3814
3815
3816
3817
3818
3819
3820
3821 val = intel_de_read(i915, TRANS_CMTG_CHICKEN);
3822 val = intel_de_read(i915, TRANS_CMTG_CHICKEN);
3823 intel_de_write(i915, TRANS_CMTG_CHICKEN, DISABLE_DPT_CLK_GATING);
3824 if (drm_WARN_ON(&i915->drm, val & ~DISABLE_DPT_CLK_GATING))
3825 drm_dbg_kms(&i915->drm, "Unexpected flags in TRANS_CMTG_CHICKEN: %08x\n", val);
3826 }
3827
3828 static void combo_pll_enable(struct drm_i915_private *dev_priv,
3829 struct intel_shared_dpll *pll)
3830 {
3831 i915_reg_t enable_reg = intel_combo_pll_enable_reg(dev_priv, pll);
3832
3833 if (IS_JSL_EHL(dev_priv) &&
3834 pll->info->id == DPLL_ID_EHL_DPLL4) {
3835
3836
3837
3838
3839
3840
3841 pll->wakeref = intel_display_power_get(dev_priv,
3842 POWER_DOMAIN_DC_OFF);
3843 }
3844
3845 icl_pll_power_enable(dev_priv, pll, enable_reg);
3846
3847 icl_dpll_write(dev_priv, pll);
3848
3849
3850
3851
3852
3853
3854
3855 icl_pll_enable(dev_priv, pll, enable_reg);
3856
3857 adlp_cmtg_clock_gating_wa(dev_priv, pll);
3858
3859
3860 }
3861
3862 static void tbt_pll_enable(struct drm_i915_private *dev_priv,
3863 struct intel_shared_dpll *pll)
3864 {
3865 icl_pll_power_enable(dev_priv, pll, TBT_PLL_ENABLE);
3866
3867 icl_dpll_write(dev_priv, pll);
3868
3869
3870
3871
3872
3873
3874
3875 icl_pll_enable(dev_priv, pll, TBT_PLL_ENABLE);
3876
3877
3878 }
3879
3880 static void mg_pll_enable(struct drm_i915_private *dev_priv,
3881 struct intel_shared_dpll *pll)
3882 {
3883 i915_reg_t enable_reg = intel_tc_pll_enable_reg(dev_priv, pll);
3884
3885 icl_pll_power_enable(dev_priv, pll, enable_reg);
3886
3887 if (DISPLAY_VER(dev_priv) >= 12)
3888 dkl_pll_write(dev_priv, pll);
3889 else
3890 icl_mg_pll_write(dev_priv, pll);
3891
3892
3893
3894
3895
3896
3897
3898 icl_pll_enable(dev_priv, pll, enable_reg);
3899
3900
3901 }
3902
3903 static void icl_pll_disable(struct drm_i915_private *dev_priv,
3904 struct intel_shared_dpll *pll,
3905 i915_reg_t enable_reg)
3906 {
3907 u32 val;
3908
3909
3910
3911
3912
3913
3914
3915
3916
3917 val = intel_de_read(dev_priv, enable_reg);
3918 val &= ~PLL_ENABLE;
3919 intel_de_write(dev_priv, enable_reg, val);
3920
3921
3922 if (intel_de_wait_for_clear(dev_priv, enable_reg, PLL_LOCK, 1))
3923 drm_err(&dev_priv->drm, "PLL %d locked\n", pll->info->id);
3924
3925
3926
3927 val = intel_de_read(dev_priv, enable_reg);
3928 val &= ~PLL_POWER_ENABLE;
3929 intel_de_write(dev_priv, enable_reg, val);
3930
3931
3932
3933
3934
3935 if (intel_de_wait_for_clear(dev_priv, enable_reg, PLL_POWER_STATE, 1))
3936 drm_err(&dev_priv->drm, "PLL %d Power not disabled\n",
3937 pll->info->id);
3938 }
3939
3940 static void combo_pll_disable(struct drm_i915_private *dev_priv,
3941 struct intel_shared_dpll *pll)
3942 {
3943 i915_reg_t enable_reg = intel_combo_pll_enable_reg(dev_priv, pll);
3944
3945 icl_pll_disable(dev_priv, pll, enable_reg);
3946
3947 if (IS_JSL_EHL(dev_priv) &&
3948 pll->info->id == DPLL_ID_EHL_DPLL4)
3949 intel_display_power_put(dev_priv, POWER_DOMAIN_DC_OFF,
3950 pll->wakeref);
3951 }
3952
3953 static void tbt_pll_disable(struct drm_i915_private *dev_priv,
3954 struct intel_shared_dpll *pll)
3955 {
3956 icl_pll_disable(dev_priv, pll, TBT_PLL_ENABLE);
3957 }
3958
3959 static void mg_pll_disable(struct drm_i915_private *dev_priv,
3960 struct intel_shared_dpll *pll)
3961 {
3962 i915_reg_t enable_reg = intel_tc_pll_enable_reg(dev_priv, pll);
3963
3964 icl_pll_disable(dev_priv, pll, enable_reg);
3965 }
3966
3967 static void icl_update_dpll_ref_clks(struct drm_i915_private *i915)
3968 {
3969
3970 i915->dpll.ref_clks.nssc = i915->cdclk.hw.ref;
3971 }
3972
3973 static void icl_dump_hw_state(struct drm_i915_private *dev_priv,
3974 const struct intel_dpll_hw_state *hw_state)
3975 {
3976 drm_dbg_kms(&dev_priv->drm,
3977 "dpll_hw_state: cfgcr0: 0x%x, cfgcr1: 0x%x, div0: 0x%x, "
3978 "mg_refclkin_ctl: 0x%x, hg_clktop2_coreclkctl1: 0x%x, "
3979 "mg_clktop2_hsclkctl: 0x%x, mg_pll_div0: 0x%x, "
3980 "mg_pll_div2: 0x%x, mg_pll_lf: 0x%x, "
3981 "mg_pll_frac_lock: 0x%x, mg_pll_ssc: 0x%x, "
3982 "mg_pll_bias: 0x%x, mg_pll_tdc_coldst_bias: 0x%x\n",
3983 hw_state->cfgcr0, hw_state->cfgcr1,
3984 hw_state->div0,
3985 hw_state->mg_refclkin_ctl,
3986 hw_state->mg_clktop2_coreclkctl1,
3987 hw_state->mg_clktop2_hsclkctl,
3988 hw_state->mg_pll_div0,
3989 hw_state->mg_pll_div1,
3990 hw_state->mg_pll_lf,
3991 hw_state->mg_pll_frac_lock,
3992 hw_state->mg_pll_ssc,
3993 hw_state->mg_pll_bias,
3994 hw_state->mg_pll_tdc_coldst_bias);
3995 }
3996
3997 static const struct intel_shared_dpll_funcs combo_pll_funcs = {
3998 .enable = combo_pll_enable,
3999 .disable = combo_pll_disable,
4000 .get_hw_state = combo_pll_get_hw_state,
4001 .get_freq = icl_ddi_combo_pll_get_freq,
4002 };
4003
4004 static const struct intel_shared_dpll_funcs tbt_pll_funcs = {
4005 .enable = tbt_pll_enable,
4006 .disable = tbt_pll_disable,
4007 .get_hw_state = tbt_pll_get_hw_state,
4008 .get_freq = icl_ddi_tbt_pll_get_freq,
4009 };
4010
4011 static const struct intel_shared_dpll_funcs mg_pll_funcs = {
4012 .enable = mg_pll_enable,
4013 .disable = mg_pll_disable,
4014 .get_hw_state = mg_pll_get_hw_state,
4015 .get_freq = icl_ddi_mg_pll_get_freq,
4016 };
4017
4018 static const struct dpll_info icl_plls[] = {
4019 { "DPLL 0", &combo_pll_funcs, DPLL_ID_ICL_DPLL0, 0 },
4020 { "DPLL 1", &combo_pll_funcs, DPLL_ID_ICL_DPLL1, 0 },
4021 { "TBT PLL", &tbt_pll_funcs, DPLL_ID_ICL_TBTPLL, 0 },
4022 { "MG PLL 1", &mg_pll_funcs, DPLL_ID_ICL_MGPLL1, 0 },
4023 { "MG PLL 2", &mg_pll_funcs, DPLL_ID_ICL_MGPLL2, 0 },
4024 { "MG PLL 3", &mg_pll_funcs, DPLL_ID_ICL_MGPLL3, 0 },
4025 { "MG PLL 4", &mg_pll_funcs, DPLL_ID_ICL_MGPLL4, 0 },
4026 { },
4027 };
4028
4029 static const struct intel_dpll_mgr icl_pll_mgr = {
4030 .dpll_info = icl_plls,
4031 .compute_dplls = icl_compute_dplls,
4032 .get_dplls = icl_get_dplls,
4033 .put_dplls = icl_put_dplls,
4034 .update_active_dpll = icl_update_active_dpll,
4035 .update_ref_clks = icl_update_dpll_ref_clks,
4036 .dump_hw_state = icl_dump_hw_state,
4037 };
4038
4039 static const struct dpll_info ehl_plls[] = {
4040 { "DPLL 0", &combo_pll_funcs, DPLL_ID_ICL_DPLL0, 0 },
4041 { "DPLL 1", &combo_pll_funcs, DPLL_ID_ICL_DPLL1, 0 },
4042 { "DPLL 4", &combo_pll_funcs, DPLL_ID_EHL_DPLL4, 0 },
4043 { },
4044 };
4045
4046 static const struct intel_dpll_mgr ehl_pll_mgr = {
4047 .dpll_info = ehl_plls,
4048 .compute_dplls = icl_compute_dplls,
4049 .get_dplls = icl_get_dplls,
4050 .put_dplls = icl_put_dplls,
4051 .update_ref_clks = icl_update_dpll_ref_clks,
4052 .dump_hw_state = icl_dump_hw_state,
4053 };
4054
4055 static const struct intel_shared_dpll_funcs dkl_pll_funcs = {
4056 .enable = mg_pll_enable,
4057 .disable = mg_pll_disable,
4058 .get_hw_state = dkl_pll_get_hw_state,
4059 .get_freq = icl_ddi_mg_pll_get_freq,
4060 };
4061
4062 static const struct dpll_info tgl_plls[] = {
4063 { "DPLL 0", &combo_pll_funcs, DPLL_ID_ICL_DPLL0, 0 },
4064 { "DPLL 1", &combo_pll_funcs, DPLL_ID_ICL_DPLL1, 0 },
4065 { "TBT PLL", &tbt_pll_funcs, DPLL_ID_ICL_TBTPLL, 0 },
4066 { "TC PLL 1", &dkl_pll_funcs, DPLL_ID_ICL_MGPLL1, 0 },
4067 { "TC PLL 2", &dkl_pll_funcs, DPLL_ID_ICL_MGPLL2, 0 },
4068 { "TC PLL 3", &dkl_pll_funcs, DPLL_ID_ICL_MGPLL3, 0 },
4069 { "TC PLL 4", &dkl_pll_funcs, DPLL_ID_ICL_MGPLL4, 0 },
4070 { "TC PLL 5", &dkl_pll_funcs, DPLL_ID_TGL_MGPLL5, 0 },
4071 { "TC PLL 6", &dkl_pll_funcs, DPLL_ID_TGL_MGPLL6, 0 },
4072 { },
4073 };
4074
4075 static const struct intel_dpll_mgr tgl_pll_mgr = {
4076 .dpll_info = tgl_plls,
4077 .compute_dplls = icl_compute_dplls,
4078 .get_dplls = icl_get_dplls,
4079 .put_dplls = icl_put_dplls,
4080 .update_active_dpll = icl_update_active_dpll,
4081 .update_ref_clks = icl_update_dpll_ref_clks,
4082 .dump_hw_state = icl_dump_hw_state,
4083 };
4084
4085 static const struct dpll_info rkl_plls[] = {
4086 { "DPLL 0", &combo_pll_funcs, DPLL_ID_ICL_DPLL0, 0 },
4087 { "DPLL 1", &combo_pll_funcs, DPLL_ID_ICL_DPLL1, 0 },
4088 { "DPLL 4", &combo_pll_funcs, DPLL_ID_EHL_DPLL4, 0 },
4089 { },
4090 };
4091
4092 static const struct intel_dpll_mgr rkl_pll_mgr = {
4093 .dpll_info = rkl_plls,
4094 .compute_dplls = icl_compute_dplls,
4095 .get_dplls = icl_get_dplls,
4096 .put_dplls = icl_put_dplls,
4097 .update_ref_clks = icl_update_dpll_ref_clks,
4098 .dump_hw_state = icl_dump_hw_state,
4099 };
4100
4101 static const struct dpll_info dg1_plls[] = {
4102 { "DPLL 0", &combo_pll_funcs, DPLL_ID_DG1_DPLL0, 0 },
4103 { "DPLL 1", &combo_pll_funcs, DPLL_ID_DG1_DPLL1, 0 },
4104 { "DPLL 2", &combo_pll_funcs, DPLL_ID_DG1_DPLL2, 0 },
4105 { "DPLL 3", &combo_pll_funcs, DPLL_ID_DG1_DPLL3, 0 },
4106 { },
4107 };
4108
4109 static const struct intel_dpll_mgr dg1_pll_mgr = {
4110 .dpll_info = dg1_plls,
4111 .compute_dplls = icl_compute_dplls,
4112 .get_dplls = icl_get_dplls,
4113 .put_dplls = icl_put_dplls,
4114 .update_ref_clks = icl_update_dpll_ref_clks,
4115 .dump_hw_state = icl_dump_hw_state,
4116 };
4117
4118 static const struct dpll_info adls_plls[] = {
4119 { "DPLL 0", &combo_pll_funcs, DPLL_ID_ICL_DPLL0, 0 },
4120 { "DPLL 1", &combo_pll_funcs, DPLL_ID_ICL_DPLL1, 0 },
4121 { "DPLL 2", &combo_pll_funcs, DPLL_ID_DG1_DPLL2, 0 },
4122 { "DPLL 3", &combo_pll_funcs, DPLL_ID_DG1_DPLL3, 0 },
4123 { },
4124 };
4125
4126 static const struct intel_dpll_mgr adls_pll_mgr = {
4127 .dpll_info = adls_plls,
4128 .compute_dplls = icl_compute_dplls,
4129 .get_dplls = icl_get_dplls,
4130 .put_dplls = icl_put_dplls,
4131 .update_ref_clks = icl_update_dpll_ref_clks,
4132 .dump_hw_state = icl_dump_hw_state,
4133 };
4134
4135 static const struct dpll_info adlp_plls[] = {
4136 { "DPLL 0", &combo_pll_funcs, DPLL_ID_ICL_DPLL0, 0 },
4137 { "DPLL 1", &combo_pll_funcs, DPLL_ID_ICL_DPLL1, 0 },
4138 { "TBT PLL", &tbt_pll_funcs, DPLL_ID_ICL_TBTPLL, 0 },
4139 { "TC PLL 1", &dkl_pll_funcs, DPLL_ID_ICL_MGPLL1, 0 },
4140 { "TC PLL 2", &dkl_pll_funcs, DPLL_ID_ICL_MGPLL2, 0 },
4141 { "TC PLL 3", &dkl_pll_funcs, DPLL_ID_ICL_MGPLL3, 0 },
4142 { "TC PLL 4", &dkl_pll_funcs, DPLL_ID_ICL_MGPLL4, 0 },
4143 { },
4144 };
4145
4146 static const struct intel_dpll_mgr adlp_pll_mgr = {
4147 .dpll_info = adlp_plls,
4148 .compute_dplls = icl_compute_dplls,
4149 .get_dplls = icl_get_dplls,
4150 .put_dplls = icl_put_dplls,
4151 .update_active_dpll = icl_update_active_dpll,
4152 .update_ref_clks = icl_update_dpll_ref_clks,
4153 .dump_hw_state = icl_dump_hw_state,
4154 };
4155
4156
4157
4158
4159
4160
4161
4162 void intel_shared_dpll_init(struct drm_i915_private *dev_priv)
4163 {
4164 const struct intel_dpll_mgr *dpll_mgr = NULL;
4165 const struct dpll_info *dpll_info;
4166 int i;
4167
4168 if (IS_DG2(dev_priv))
4169
4170 dpll_mgr = NULL;
4171 else if (IS_ALDERLAKE_P(dev_priv))
4172 dpll_mgr = &adlp_pll_mgr;
4173 else if (IS_ALDERLAKE_S(dev_priv))
4174 dpll_mgr = &adls_pll_mgr;
4175 else if (IS_DG1(dev_priv))
4176 dpll_mgr = &dg1_pll_mgr;
4177 else if (IS_ROCKETLAKE(dev_priv))
4178 dpll_mgr = &rkl_pll_mgr;
4179 else if (DISPLAY_VER(dev_priv) >= 12)
4180 dpll_mgr = &tgl_pll_mgr;
4181 else if (IS_JSL_EHL(dev_priv))
4182 dpll_mgr = &ehl_pll_mgr;
4183 else if (DISPLAY_VER(dev_priv) >= 11)
4184 dpll_mgr = &icl_pll_mgr;
4185 else if (IS_GEMINILAKE(dev_priv) || IS_BROXTON(dev_priv))
4186 dpll_mgr = &bxt_pll_mgr;
4187 else if (DISPLAY_VER(dev_priv) == 9)
4188 dpll_mgr = &skl_pll_mgr;
4189 else if (HAS_DDI(dev_priv))
4190 dpll_mgr = &hsw_pll_mgr;
4191 else if (HAS_PCH_IBX(dev_priv) || HAS_PCH_CPT(dev_priv))
4192 dpll_mgr = &pch_pll_mgr;
4193
4194 if (!dpll_mgr) {
4195 dev_priv->dpll.num_shared_dpll = 0;
4196 return;
4197 }
4198
4199 dpll_info = dpll_mgr->dpll_info;
4200
4201 for (i = 0; dpll_info[i].name; i++) {
4202 drm_WARN_ON(&dev_priv->drm, i != dpll_info[i].id);
4203 dev_priv->dpll.shared_dplls[i].info = &dpll_info[i];
4204 }
4205
4206 dev_priv->dpll.mgr = dpll_mgr;
4207 dev_priv->dpll.num_shared_dpll = i;
4208 mutex_init(&dev_priv->dpll.lock);
4209
4210 BUG_ON(dev_priv->dpll.num_shared_dpll > I915_NUM_PLLS);
4211 }
4212
4213
4214
4215
4216
4217
4218
4219
4220
4221
4222
4223
4224
4225
4226
4227 int intel_compute_shared_dplls(struct intel_atomic_state *state,
4228 struct intel_crtc *crtc,
4229 struct intel_encoder *encoder)
4230 {
4231 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
4232 const struct intel_dpll_mgr *dpll_mgr = dev_priv->dpll.mgr;
4233
4234 if (drm_WARN_ON(&dev_priv->drm, !dpll_mgr))
4235 return -EINVAL;
4236
4237 return dpll_mgr->compute_dplls(state, crtc, encoder);
4238 }
4239
4240
4241
4242
4243
4244
4245
4246
4247
4248
4249
4250
4251
4252
4253
4254
4255
4256
4257
4258
4259
4260 int intel_reserve_shared_dplls(struct intel_atomic_state *state,
4261 struct intel_crtc *crtc,
4262 struct intel_encoder *encoder)
4263 {
4264 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
4265 const struct intel_dpll_mgr *dpll_mgr = dev_priv->dpll.mgr;
4266
4267 if (drm_WARN_ON(&dev_priv->drm, !dpll_mgr))
4268 return -EINVAL;
4269
4270 return dpll_mgr->get_dplls(state, crtc, encoder);
4271 }
4272
4273
4274
4275
4276
4277
4278
4279
4280
4281
4282
4283
4284 void intel_release_shared_dplls(struct intel_atomic_state *state,
4285 struct intel_crtc *crtc)
4286 {
4287 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
4288 const struct intel_dpll_mgr *dpll_mgr = dev_priv->dpll.mgr;
4289
4290
4291
4292
4293
4294
4295
4296 if (!dpll_mgr)
4297 return;
4298
4299 dpll_mgr->put_dplls(state, crtc);
4300 }
4301
4302
4303
4304
4305
4306
4307
4308
4309
4310
4311
4312 void intel_update_active_dpll(struct intel_atomic_state *state,
4313 struct intel_crtc *crtc,
4314 struct intel_encoder *encoder)
4315 {
4316 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4317 const struct intel_dpll_mgr *dpll_mgr = dev_priv->dpll.mgr;
4318
4319 if (drm_WARN_ON(&dev_priv->drm, !dpll_mgr))
4320 return;
4321
4322 dpll_mgr->update_active_dpll(state, crtc, encoder);
4323 }
4324
4325
4326
4327
4328
4329
4330
4331
4332
4333 int intel_dpll_get_freq(struct drm_i915_private *i915,
4334 const struct intel_shared_dpll *pll,
4335 const struct intel_dpll_hw_state *pll_state)
4336 {
4337 if (drm_WARN_ON(&i915->drm, !pll->info->funcs->get_freq))
4338 return 0;
4339
4340 return pll->info->funcs->get_freq(i915, pll, pll_state);
4341 }
4342
4343
4344
4345
4346
4347
4348
4349
4350
4351 bool intel_dpll_get_hw_state(struct drm_i915_private *i915,
4352 struct intel_shared_dpll *pll,
4353 struct intel_dpll_hw_state *hw_state)
4354 {
4355 return pll->info->funcs->get_hw_state(i915, pll, hw_state);
4356 }
4357
4358 static void readout_dpll_hw_state(struct drm_i915_private *i915,
4359 struct intel_shared_dpll *pll)
4360 {
4361 struct intel_crtc *crtc;
4362
4363 pll->on = intel_dpll_get_hw_state(i915, pll, &pll->state.hw_state);
4364
4365 if (IS_JSL_EHL(i915) && pll->on &&
4366 pll->info->id == DPLL_ID_EHL_DPLL4) {
4367 pll->wakeref = intel_display_power_get(i915,
4368 POWER_DOMAIN_DC_OFF);
4369 }
4370
4371 pll->state.pipe_mask = 0;
4372 for_each_intel_crtc(&i915->drm, crtc) {
4373 struct intel_crtc_state *crtc_state =
4374 to_intel_crtc_state(crtc->base.state);
4375
4376 if (crtc_state->hw.active && crtc_state->shared_dpll == pll)
4377 pll->state.pipe_mask |= BIT(crtc->pipe);
4378 }
4379 pll->active_mask = pll->state.pipe_mask;
4380
4381 drm_dbg_kms(&i915->drm,
4382 "%s hw state readout: pipe_mask 0x%x, on %i\n",
4383 pll->info->name, pll->state.pipe_mask, pll->on);
4384 }
4385
4386 void intel_dpll_update_ref_clks(struct drm_i915_private *i915)
4387 {
4388 if (i915->dpll.mgr && i915->dpll.mgr->update_ref_clks)
4389 i915->dpll.mgr->update_ref_clks(i915);
4390 }
4391
4392 void intel_dpll_readout_hw_state(struct drm_i915_private *i915)
4393 {
4394 int i;
4395
4396 for (i = 0; i < i915->dpll.num_shared_dpll; i++)
4397 readout_dpll_hw_state(i915, &i915->dpll.shared_dplls[i]);
4398 }
4399
4400 static void sanitize_dpll_state(struct drm_i915_private *i915,
4401 struct intel_shared_dpll *pll)
4402 {
4403 if (!pll->on)
4404 return;
4405
4406 adlp_cmtg_clock_gating_wa(i915, pll);
4407
4408 if (pll->active_mask)
4409 return;
4410
4411 drm_dbg_kms(&i915->drm,
4412 "%s enabled but not in use, disabling\n",
4413 pll->info->name);
4414
4415 pll->info->funcs->disable(i915, pll);
4416 pll->on = false;
4417 }
4418
4419 void intel_dpll_sanitize_state(struct drm_i915_private *i915)
4420 {
4421 int i;
4422
4423 for (i = 0; i < i915->dpll.num_shared_dpll; i++)
4424 sanitize_dpll_state(i915, &i915->dpll.shared_dplls[i]);
4425 }
4426
4427
4428
4429
4430
4431
4432
4433
4434 void intel_dpll_dump_hw_state(struct drm_i915_private *dev_priv,
4435 const struct intel_dpll_hw_state *hw_state)
4436 {
4437 if (dev_priv->dpll.mgr) {
4438 dev_priv->dpll.mgr->dump_hw_state(dev_priv, hw_state);
4439 } else {
4440
4441
4442
4443 drm_dbg_kms(&dev_priv->drm,
4444 "dpll_hw_state: dpll: 0x%x, dpll_md: 0x%x, "
4445 "fp0: 0x%x, fp1: 0x%x\n",
4446 hw_state->dpll,
4447 hw_state->dpll_md,
4448 hw_state->fp0,
4449 hw_state->fp1);
4450 }
4451 }
4452
4453 static void
4454 verify_single_dpll_state(struct drm_i915_private *dev_priv,
4455 struct intel_shared_dpll *pll,
4456 struct intel_crtc *crtc,
4457 struct intel_crtc_state *new_crtc_state)
4458 {
4459 struct intel_dpll_hw_state dpll_hw_state;
4460 u8 pipe_mask;
4461 bool active;
4462
4463 memset(&dpll_hw_state, 0, sizeof(dpll_hw_state));
4464
4465 drm_dbg_kms(&dev_priv->drm, "%s\n", pll->info->name);
4466
4467 active = intel_dpll_get_hw_state(dev_priv, pll, &dpll_hw_state);
4468
4469 if (!(pll->info->flags & INTEL_DPLL_ALWAYS_ON)) {
4470 I915_STATE_WARN(!pll->on && pll->active_mask,
4471 "pll in active use but not on in sw tracking\n");
4472 I915_STATE_WARN(pll->on && !pll->active_mask,
4473 "pll is on but not used by any active pipe\n");
4474 I915_STATE_WARN(pll->on != active,
4475 "pll on state mismatch (expected %i, found %i)\n",
4476 pll->on, active);
4477 }
4478
4479 if (!crtc) {
4480 I915_STATE_WARN(pll->active_mask & ~pll->state.pipe_mask,
4481 "more active pll users than references: 0x%x vs 0x%x\n",
4482 pll->active_mask, pll->state.pipe_mask);
4483
4484 return;
4485 }
4486
4487 pipe_mask = BIT(crtc->pipe);
4488
4489 if (new_crtc_state->hw.active)
4490 I915_STATE_WARN(!(pll->active_mask & pipe_mask),
4491 "pll active mismatch (expected pipe %c in active mask 0x%x)\n",
4492 pipe_name(crtc->pipe), pll->active_mask);
4493 else
4494 I915_STATE_WARN(pll->active_mask & pipe_mask,
4495 "pll active mismatch (didn't expect pipe %c in active mask 0x%x)\n",
4496 pipe_name(crtc->pipe), pll->active_mask);
4497
4498 I915_STATE_WARN(!(pll->state.pipe_mask & pipe_mask),
4499 "pll enabled crtcs mismatch (expected 0x%x in 0x%x)\n",
4500 pipe_mask, pll->state.pipe_mask);
4501
4502 I915_STATE_WARN(pll->on && memcmp(&pll->state.hw_state,
4503 &dpll_hw_state,
4504 sizeof(dpll_hw_state)),
4505 "pll hw state mismatch\n");
4506 }
4507
4508 void intel_shared_dpll_state_verify(struct intel_crtc *crtc,
4509 struct intel_crtc_state *old_crtc_state,
4510 struct intel_crtc_state *new_crtc_state)
4511 {
4512 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
4513
4514 if (new_crtc_state->shared_dpll)
4515 verify_single_dpll_state(dev_priv, new_crtc_state->shared_dpll,
4516 crtc, new_crtc_state);
4517
4518 if (old_crtc_state->shared_dpll &&
4519 old_crtc_state->shared_dpll != new_crtc_state->shared_dpll) {
4520 u8 pipe_mask = BIT(crtc->pipe);
4521 struct intel_shared_dpll *pll = old_crtc_state->shared_dpll;
4522
4523 I915_STATE_WARN(pll->active_mask & pipe_mask,
4524 "pll active mismatch (didn't expect pipe %c in active mask (0x%x))\n",
4525 pipe_name(crtc->pipe), pll->active_mask);
4526 I915_STATE_WARN(pll->state.pipe_mask & pipe_mask,
4527 "pll enabled crtcs mismatch (found %x in enabled mask (0x%x))\n",
4528 pipe_name(crtc->pipe), pll->state.pipe_mask);
4529 }
4530 }
4531
4532 void intel_shared_dpll_verify_disabled(struct drm_i915_private *i915)
4533 {
4534 int i;
4535
4536 for (i = 0; i < i915->dpll.num_shared_dpll; i++)
4537 verify_single_dpll_state(i915, &i915->dpll.shared_dplls[i],
4538 NULL, NULL);
4539 }