0001
0002
0003
0004
0005
0006 #include <linux/clk-provider.h>
0007 #include <linux/delay.h>
0008
0009 #include "dsi_phy.h"
0010 #include "dsi.xml.h"
0011 #include "dsi_phy_28nm_8960.xml.h"
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035
0036
0037
0038
0039 #define POLL_MAX_READS 8000
0040 #define POLL_TIMEOUT_US 1
0041
0042 #define VCO_REF_CLK_RATE 27000000
0043 #define VCO_MIN_RATE 600000000
0044 #define VCO_MAX_RATE 1200000000
0045
0046 #define VCO_PREF_DIV_RATIO 27
0047
0048 struct pll_28nm_cached_state {
0049 unsigned long vco_rate;
0050 u8 postdiv3;
0051 u8 postdiv2;
0052 u8 postdiv1;
0053 };
0054
0055 struct clk_bytediv {
0056 struct clk_hw hw;
0057 void __iomem *reg;
0058 };
0059
0060 struct dsi_pll_28nm {
0061 struct clk_hw clk_hw;
0062
0063 struct msm_dsi_phy *phy;
0064
0065 struct pll_28nm_cached_state cached_state;
0066 };
0067
0068 #define to_pll_28nm(x) container_of(x, struct dsi_pll_28nm, clk_hw)
0069
0070 static bool pll_28nm_poll_for_ready(struct dsi_pll_28nm *pll_28nm,
0071 int nb_tries, int timeout_us)
0072 {
0073 bool pll_locked = false;
0074 u32 val;
0075
0076 while (nb_tries--) {
0077 val = dsi_phy_read(pll_28nm->phy->pll_base + REG_DSI_28nm_8960_PHY_PLL_RDY);
0078 pll_locked = !!(val & DSI_28nm_8960_PHY_PLL_RDY_PLL_RDY);
0079
0080 if (pll_locked)
0081 break;
0082
0083 udelay(timeout_us);
0084 }
0085 DBG("DSI PLL is %slocked", pll_locked ? "" : "*not* ");
0086
0087 return pll_locked;
0088 }
0089
0090
0091
0092
0093 static int dsi_pll_28nm_clk_set_rate(struct clk_hw *hw, unsigned long rate,
0094 unsigned long parent_rate)
0095 {
0096 struct dsi_pll_28nm *pll_28nm = to_pll_28nm(hw);
0097 void __iomem *base = pll_28nm->phy->pll_base;
0098 u32 val, temp, fb_divider;
0099
0100 DBG("rate=%lu, parent's=%lu", rate, parent_rate);
0101
0102 temp = rate / 10;
0103 val = VCO_REF_CLK_RATE / 10;
0104 fb_divider = (temp * VCO_PREF_DIV_RATIO) / val;
0105 fb_divider = fb_divider / 2 - 1;
0106 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_PLL_CTRL_1,
0107 fb_divider & 0xff);
0108
0109 val = dsi_phy_read(base + REG_DSI_28nm_8960_PHY_PLL_CTRL_2);
0110
0111 val |= (fb_divider >> 8) & 0x07;
0112
0113 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_PLL_CTRL_2,
0114 val);
0115
0116 val = dsi_phy_read(base + REG_DSI_28nm_8960_PHY_PLL_CTRL_3);
0117
0118 val |= (VCO_PREF_DIV_RATIO - 1) & 0x3f;
0119
0120 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_PLL_CTRL_3,
0121 val);
0122
0123 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_PLL_CTRL_6,
0124 0xf);
0125
0126 val = dsi_phy_read(base + REG_DSI_28nm_8960_PHY_PLL_CTRL_8);
0127 val |= 0x7 << 4;
0128 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_PLL_CTRL_8,
0129 val);
0130
0131 return 0;
0132 }
0133
0134 static int dsi_pll_28nm_clk_is_enabled(struct clk_hw *hw)
0135 {
0136 struct dsi_pll_28nm *pll_28nm = to_pll_28nm(hw);
0137
0138 return pll_28nm_poll_for_ready(pll_28nm, POLL_MAX_READS,
0139 POLL_TIMEOUT_US);
0140 }
0141
0142 static unsigned long dsi_pll_28nm_clk_recalc_rate(struct clk_hw *hw,
0143 unsigned long parent_rate)
0144 {
0145 struct dsi_pll_28nm *pll_28nm = to_pll_28nm(hw);
0146 void __iomem *base = pll_28nm->phy->pll_base;
0147 unsigned long vco_rate;
0148 u32 status, fb_divider, temp, ref_divider;
0149
0150 VERB("parent_rate=%lu", parent_rate);
0151
0152 status = dsi_phy_read(base + REG_DSI_28nm_8960_PHY_PLL_CTRL_0);
0153
0154 if (status & DSI_28nm_8960_PHY_PLL_CTRL_0_ENABLE) {
0155 fb_divider = dsi_phy_read(base + REG_DSI_28nm_8960_PHY_PLL_CTRL_1);
0156 fb_divider &= 0xff;
0157 temp = dsi_phy_read(base + REG_DSI_28nm_8960_PHY_PLL_CTRL_2) & 0x07;
0158 fb_divider = (temp << 8) | fb_divider;
0159 fb_divider += 1;
0160
0161 ref_divider = dsi_phy_read(base + REG_DSI_28nm_8960_PHY_PLL_CTRL_3);
0162 ref_divider &= 0x3f;
0163 ref_divider += 1;
0164
0165
0166 vco_rate = (parent_rate / ref_divider) * fb_divider * 2;
0167 } else {
0168 vco_rate = 0;
0169 }
0170
0171 DBG("returning vco rate = %lu", vco_rate);
0172
0173 return vco_rate;
0174 }
0175
0176 static int dsi_pll_28nm_vco_prepare(struct clk_hw *hw)
0177 {
0178 struct dsi_pll_28nm *pll_28nm = to_pll_28nm(hw);
0179 struct device *dev = &pll_28nm->phy->pdev->dev;
0180 void __iomem *base = pll_28nm->phy->pll_base;
0181 bool locked;
0182 unsigned int bit_div, byte_div;
0183 int max_reads = 1000, timeout_us = 100;
0184 u32 val;
0185
0186 DBG("id=%d", pll_28nm->phy->id);
0187
0188 if (unlikely(pll_28nm->phy->pll_on))
0189 return 0;
0190
0191
0192
0193
0194
0195
0196
0197
0198 val = dsi_phy_read(base + REG_DSI_28nm_8960_PHY_PLL_CTRL_9);
0199 byte_div = val + 1;
0200 bit_div = byte_div / 8;
0201
0202 val = dsi_phy_read(base + REG_DSI_28nm_8960_PHY_PLL_CTRL_8);
0203 val &= ~0xf;
0204 val |= (bit_div - 1);
0205 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_PLL_CTRL_8, val);
0206
0207
0208 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_PLL_CTRL_0,
0209 DSI_28nm_8960_PHY_PLL_CTRL_0_ENABLE);
0210
0211 locked = pll_28nm_poll_for_ready(pll_28nm, max_reads, timeout_us);
0212
0213 if (unlikely(!locked)) {
0214 DRM_DEV_ERROR(dev, "DSI PLL lock failed\n");
0215 return -EINVAL;
0216 }
0217
0218 DBG("DSI PLL lock success");
0219 pll_28nm->phy->pll_on = true;
0220
0221 return 0;
0222 }
0223
0224 static void dsi_pll_28nm_vco_unprepare(struct clk_hw *hw)
0225 {
0226 struct dsi_pll_28nm *pll_28nm = to_pll_28nm(hw);
0227
0228 DBG("id=%d", pll_28nm->phy->id);
0229
0230 if (unlikely(!pll_28nm->phy->pll_on))
0231 return;
0232
0233 dsi_phy_write(pll_28nm->phy->pll_base + REG_DSI_28nm_8960_PHY_PLL_CTRL_0, 0x00);
0234
0235 pll_28nm->phy->pll_on = false;
0236 }
0237
0238 static long dsi_pll_28nm_clk_round_rate(struct clk_hw *hw,
0239 unsigned long rate, unsigned long *parent_rate)
0240 {
0241 struct dsi_pll_28nm *pll_28nm = to_pll_28nm(hw);
0242
0243 if (rate < pll_28nm->phy->cfg->min_pll_rate)
0244 return pll_28nm->phy->cfg->min_pll_rate;
0245 else if (rate > pll_28nm->phy->cfg->max_pll_rate)
0246 return pll_28nm->phy->cfg->max_pll_rate;
0247 else
0248 return rate;
0249 }
0250
0251 static const struct clk_ops clk_ops_dsi_pll_28nm_vco = {
0252 .round_rate = dsi_pll_28nm_clk_round_rate,
0253 .set_rate = dsi_pll_28nm_clk_set_rate,
0254 .recalc_rate = dsi_pll_28nm_clk_recalc_rate,
0255 .prepare = dsi_pll_28nm_vco_prepare,
0256 .unprepare = dsi_pll_28nm_vco_unprepare,
0257 .is_enabled = dsi_pll_28nm_clk_is_enabled,
0258 };
0259
0260
0261
0262
0263
0264
0265
0266
0267
0268
0269
0270
0271
0272 #define to_clk_bytediv(_hw) container_of(_hw, struct clk_bytediv, hw)
0273
0274 static unsigned long clk_bytediv_recalc_rate(struct clk_hw *hw,
0275 unsigned long parent_rate)
0276 {
0277 struct clk_bytediv *bytediv = to_clk_bytediv(hw);
0278 unsigned int div;
0279
0280 div = dsi_phy_read(bytediv->reg) & 0xff;
0281
0282 return parent_rate / (div + 1);
0283 }
0284
0285
0286 static unsigned int get_vco_mul_factor(unsigned long byte_clk_rate)
0287 {
0288 unsigned long bit_mhz;
0289
0290
0291 bit_mhz = (byte_clk_rate * 8) / 1000000;
0292
0293 if (bit_mhz < 125)
0294 return 64;
0295 else if (bit_mhz < 250)
0296 return 32;
0297 else if (bit_mhz < 600)
0298 return 16;
0299 else
0300 return 8;
0301 }
0302
0303 static long clk_bytediv_round_rate(struct clk_hw *hw, unsigned long rate,
0304 unsigned long *prate)
0305 {
0306 unsigned long best_parent;
0307 unsigned int factor;
0308
0309 factor = get_vco_mul_factor(rate);
0310
0311 best_parent = rate * factor;
0312 *prate = clk_hw_round_rate(clk_hw_get_parent(hw), best_parent);
0313
0314 return *prate / factor;
0315 }
0316
0317 static int clk_bytediv_set_rate(struct clk_hw *hw, unsigned long rate,
0318 unsigned long parent_rate)
0319 {
0320 struct clk_bytediv *bytediv = to_clk_bytediv(hw);
0321 u32 val;
0322 unsigned int factor;
0323
0324 factor = get_vco_mul_factor(rate);
0325
0326 val = dsi_phy_read(bytediv->reg);
0327 val |= (factor - 1) & 0xff;
0328 dsi_phy_write(bytediv->reg, val);
0329
0330 return 0;
0331 }
0332
0333
0334 static const struct clk_ops clk_bytediv_ops = {
0335 .round_rate = clk_bytediv_round_rate,
0336 .set_rate = clk_bytediv_set_rate,
0337 .recalc_rate = clk_bytediv_recalc_rate,
0338 };
0339
0340
0341
0342
0343 static void dsi_28nm_pll_save_state(struct msm_dsi_phy *phy)
0344 {
0345 struct dsi_pll_28nm *pll_28nm = to_pll_28nm(phy->vco_hw);
0346 struct pll_28nm_cached_state *cached_state = &pll_28nm->cached_state;
0347 void __iomem *base = pll_28nm->phy->pll_base;
0348
0349 cached_state->postdiv3 =
0350 dsi_phy_read(base + REG_DSI_28nm_8960_PHY_PLL_CTRL_10);
0351 cached_state->postdiv2 =
0352 dsi_phy_read(base + REG_DSI_28nm_8960_PHY_PLL_CTRL_9);
0353 cached_state->postdiv1 =
0354 dsi_phy_read(base + REG_DSI_28nm_8960_PHY_PLL_CTRL_8);
0355
0356 cached_state->vco_rate = clk_hw_get_rate(phy->vco_hw);
0357 }
0358
0359 static int dsi_28nm_pll_restore_state(struct msm_dsi_phy *phy)
0360 {
0361 struct dsi_pll_28nm *pll_28nm = to_pll_28nm(phy->vco_hw);
0362 struct pll_28nm_cached_state *cached_state = &pll_28nm->cached_state;
0363 void __iomem *base = pll_28nm->phy->pll_base;
0364 int ret;
0365
0366 ret = dsi_pll_28nm_clk_set_rate(phy->vco_hw,
0367 cached_state->vco_rate, 0);
0368 if (ret) {
0369 DRM_DEV_ERROR(&pll_28nm->phy->pdev->dev,
0370 "restore vco rate failed. ret=%d\n", ret);
0371 return ret;
0372 }
0373
0374 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_PLL_CTRL_10,
0375 cached_state->postdiv3);
0376 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_PLL_CTRL_9,
0377 cached_state->postdiv2);
0378 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_PLL_CTRL_8,
0379 cached_state->postdiv1);
0380
0381 return 0;
0382 }
0383
0384 static int pll_28nm_register(struct dsi_pll_28nm *pll_28nm, struct clk_hw **provided_clocks)
0385 {
0386 char *clk_name, *parent_name, *vco_name;
0387 struct clk_init_data vco_init = {
0388 .parent_data = &(const struct clk_parent_data) {
0389 .fw_name = "ref",
0390 },
0391 .num_parents = 1,
0392 .flags = CLK_IGNORE_UNUSED,
0393 .ops = &clk_ops_dsi_pll_28nm_vco,
0394 };
0395 struct device *dev = &pll_28nm->phy->pdev->dev;
0396 struct clk_hw *hw;
0397 struct clk_bytediv *bytediv;
0398 struct clk_init_data bytediv_init = { };
0399 int ret;
0400
0401 DBG("%d", pll_28nm->phy->id);
0402
0403 bytediv = devm_kzalloc(dev, sizeof(*bytediv), GFP_KERNEL);
0404 if (!bytediv)
0405 return -ENOMEM;
0406
0407 vco_name = devm_kzalloc(dev, 32, GFP_KERNEL);
0408 if (!vco_name)
0409 return -ENOMEM;
0410
0411 parent_name = devm_kzalloc(dev, 32, GFP_KERNEL);
0412 if (!parent_name)
0413 return -ENOMEM;
0414
0415 clk_name = devm_kzalloc(dev, 32, GFP_KERNEL);
0416 if (!clk_name)
0417 return -ENOMEM;
0418
0419 snprintf(vco_name, 32, "dsi%dvco_clk", pll_28nm->phy->id);
0420 vco_init.name = vco_name;
0421
0422 pll_28nm->clk_hw.init = &vco_init;
0423
0424 ret = devm_clk_hw_register(dev, &pll_28nm->clk_hw);
0425 if (ret)
0426 return ret;
0427
0428
0429 bytediv->hw.init = &bytediv_init;
0430 bytediv->reg = pll_28nm->phy->pll_base + REG_DSI_28nm_8960_PHY_PLL_CTRL_9;
0431
0432 snprintf(parent_name, 32, "dsi%dvco_clk", pll_28nm->phy->id);
0433 snprintf(clk_name, 32, "dsi%dpllbyte", pll_28nm->phy->id + 1);
0434
0435 bytediv_init.name = clk_name;
0436 bytediv_init.ops = &clk_bytediv_ops;
0437 bytediv_init.flags = CLK_SET_RATE_PARENT;
0438 bytediv_init.parent_names = (const char * const *) &parent_name;
0439 bytediv_init.num_parents = 1;
0440
0441
0442 ret = devm_clk_hw_register(dev, &bytediv->hw);
0443 if (ret)
0444 return ret;
0445 provided_clocks[DSI_BYTE_PLL_CLK] = &bytediv->hw;
0446
0447 snprintf(clk_name, 32, "dsi%dpll", pll_28nm->phy->id + 1);
0448
0449 hw = devm_clk_hw_register_divider(dev, clk_name,
0450 parent_name, 0, pll_28nm->phy->pll_base +
0451 REG_DSI_28nm_8960_PHY_PLL_CTRL_10,
0452 0, 8, 0, NULL);
0453 if (IS_ERR(hw))
0454 return PTR_ERR(hw);
0455 provided_clocks[DSI_PIXEL_PLL_CLK] = hw;
0456
0457 return 0;
0458 }
0459
0460 static int dsi_pll_28nm_8960_init(struct msm_dsi_phy *phy)
0461 {
0462 struct platform_device *pdev = phy->pdev;
0463 struct dsi_pll_28nm *pll_28nm;
0464 int ret;
0465
0466 if (!pdev)
0467 return -ENODEV;
0468
0469 pll_28nm = devm_kzalloc(&pdev->dev, sizeof(*pll_28nm), GFP_KERNEL);
0470 if (!pll_28nm)
0471 return -ENOMEM;
0472
0473 pll_28nm->phy = phy;
0474
0475 ret = pll_28nm_register(pll_28nm, phy->provided_clocks->hws);
0476 if (ret) {
0477 DRM_DEV_ERROR(&pdev->dev, "failed to register PLL: %d\n", ret);
0478 return ret;
0479 }
0480
0481 phy->vco_hw = &pll_28nm->clk_hw;
0482
0483 return 0;
0484 }
0485
0486 static void dsi_28nm_dphy_set_timing(struct msm_dsi_phy *phy,
0487 struct msm_dsi_dphy_timing *timing)
0488 {
0489 void __iomem *base = phy->base;
0490
0491 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_TIMING_CTRL_0,
0492 DSI_28nm_8960_PHY_TIMING_CTRL_0_CLK_ZERO(timing->clk_zero));
0493 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_TIMING_CTRL_1,
0494 DSI_28nm_8960_PHY_TIMING_CTRL_1_CLK_TRAIL(timing->clk_trail));
0495 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_TIMING_CTRL_2,
0496 DSI_28nm_8960_PHY_TIMING_CTRL_2_CLK_PREPARE(timing->clk_prepare));
0497 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_TIMING_CTRL_3, 0x0);
0498 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_TIMING_CTRL_4,
0499 DSI_28nm_8960_PHY_TIMING_CTRL_4_HS_EXIT(timing->hs_exit));
0500 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_TIMING_CTRL_5,
0501 DSI_28nm_8960_PHY_TIMING_CTRL_5_HS_ZERO(timing->hs_zero));
0502 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_TIMING_CTRL_6,
0503 DSI_28nm_8960_PHY_TIMING_CTRL_6_HS_PREPARE(timing->hs_prepare));
0504 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_TIMING_CTRL_7,
0505 DSI_28nm_8960_PHY_TIMING_CTRL_7_HS_TRAIL(timing->hs_trail));
0506 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_TIMING_CTRL_8,
0507 DSI_28nm_8960_PHY_TIMING_CTRL_8_HS_RQST(timing->hs_rqst));
0508 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_TIMING_CTRL_9,
0509 DSI_28nm_8960_PHY_TIMING_CTRL_9_TA_GO(timing->ta_go) |
0510 DSI_28nm_8960_PHY_TIMING_CTRL_9_TA_SURE(timing->ta_sure));
0511 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_TIMING_CTRL_10,
0512 DSI_28nm_8960_PHY_TIMING_CTRL_10_TA_GET(timing->ta_get));
0513 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_TIMING_CTRL_11,
0514 DSI_28nm_8960_PHY_TIMING_CTRL_11_TRIG3_CMD(0));
0515 }
0516
0517 static void dsi_28nm_phy_regulator_init(struct msm_dsi_phy *phy)
0518 {
0519 void __iomem *base = phy->reg_base;
0520
0521 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_MISC_REGULATOR_CTRL_0, 0x3);
0522 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_MISC_REGULATOR_CTRL_1, 1);
0523 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_MISC_REGULATOR_CTRL_2, 1);
0524 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_MISC_REGULATOR_CTRL_3, 0);
0525 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_MISC_REGULATOR_CTRL_4,
0526 0x100);
0527 }
0528
0529 static void dsi_28nm_phy_regulator_ctrl(struct msm_dsi_phy *phy)
0530 {
0531 void __iomem *base = phy->reg_base;
0532
0533 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_MISC_REGULATOR_CTRL_0, 0x3);
0534 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_MISC_REGULATOR_CTRL_1, 0xa);
0535 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_MISC_REGULATOR_CTRL_2, 0x4);
0536 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_MISC_REGULATOR_CTRL_3, 0x0);
0537 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_MISC_REGULATOR_CTRL_4, 0x20);
0538 }
0539
0540 static void dsi_28nm_phy_calibration(struct msm_dsi_phy *phy)
0541 {
0542 void __iomem *base = phy->reg_base;
0543 u32 status;
0544 int i = 5000;
0545
0546 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_MISC_REGULATOR_CAL_PWR_CFG,
0547 0x3);
0548
0549 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_MISC_CAL_SW_CFG_2, 0x0);
0550 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_MISC_CAL_HW_CFG_1, 0x5a);
0551 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_MISC_CAL_HW_CFG_3, 0x10);
0552 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_MISC_CAL_HW_CFG_4, 0x1);
0553 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_MISC_CAL_HW_CFG_0, 0x1);
0554
0555 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_MISC_CAL_HW_TRIGGER, 0x1);
0556 usleep_range(5000, 6000);
0557 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_MISC_CAL_HW_TRIGGER, 0x0);
0558
0559 do {
0560 status = dsi_phy_read(base +
0561 REG_DSI_28nm_8960_PHY_MISC_CAL_STATUS);
0562
0563 if (!(status & DSI_28nm_8960_PHY_MISC_CAL_STATUS_CAL_BUSY))
0564 break;
0565
0566 udelay(1);
0567 } while (--i > 0);
0568 }
0569
0570 static void dsi_28nm_phy_lane_config(struct msm_dsi_phy *phy)
0571 {
0572 void __iomem *base = phy->base;
0573 int i;
0574
0575 for (i = 0; i < 4; i++) {
0576 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_LN_CFG_0(i), 0x80);
0577 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_LN_CFG_1(i), 0x45);
0578 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_LN_CFG_2(i), 0x00);
0579 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_LN_TEST_DATAPATH(i),
0580 0x00);
0581 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_LN_TEST_STR_0(i),
0582 0x01);
0583 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_LN_TEST_STR_1(i),
0584 0x66);
0585 }
0586
0587 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_LNCK_CFG_0, 0x40);
0588 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_LNCK_CFG_1, 0x67);
0589 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_LNCK_CFG_2, 0x0);
0590 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_LNCK_TEST_DATAPATH, 0x0);
0591 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_LNCK_TEST_STR0, 0x1);
0592 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_LNCK_TEST_STR1, 0x88);
0593 }
0594
0595 static int dsi_28nm_phy_enable(struct msm_dsi_phy *phy,
0596 struct msm_dsi_phy_clk_request *clk_req)
0597 {
0598 struct msm_dsi_dphy_timing *timing = &phy->timing;
0599 void __iomem *base = phy->base;
0600
0601 DBG("");
0602
0603 if (msm_dsi_dphy_timing_calc(timing, clk_req)) {
0604 DRM_DEV_ERROR(&phy->pdev->dev,
0605 "%s: D-PHY timing calculation failed\n", __func__);
0606 return -EINVAL;
0607 }
0608
0609 dsi_28nm_phy_regulator_init(phy);
0610
0611 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_LDO_CTRL, 0x04);
0612
0613
0614 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_STRENGTH_0, 0xff);
0615 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_STRENGTH_1, 0x00);
0616 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_STRENGTH_2, 0x06);
0617
0618
0619 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_CTRL_0, 0x5f);
0620 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_CTRL_1, 0x00);
0621 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_CTRL_2, 0x00);
0622 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_CTRL_3, 0x10);
0623
0624 dsi_28nm_phy_regulator_ctrl(phy);
0625
0626 dsi_28nm_phy_calibration(phy);
0627
0628 dsi_28nm_phy_lane_config(phy);
0629
0630 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_BIST_CTRL_4, 0x0f);
0631 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_BIST_CTRL_1, 0x03);
0632 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_BIST_CTRL_0, 0x03);
0633 dsi_phy_write(base + REG_DSI_28nm_8960_PHY_BIST_CTRL_4, 0x0);
0634
0635 dsi_28nm_dphy_set_timing(phy, timing);
0636
0637 return 0;
0638 }
0639
0640 static void dsi_28nm_phy_disable(struct msm_dsi_phy *phy)
0641 {
0642 dsi_phy_write(phy->base + REG_DSI_28nm_8960_PHY_CTRL_0, 0x0);
0643
0644
0645
0646
0647
0648 wmb();
0649 }
0650
0651 const struct msm_dsi_phy_cfg dsi_phy_28nm_8960_cfgs = {
0652 .has_phy_regulator = true,
0653 .reg_cfg = {
0654 .num = 1,
0655 .regs = {
0656 {"vddio", 100000, 100},
0657 },
0658 },
0659 .ops = {
0660 .enable = dsi_28nm_phy_enable,
0661 .disable = dsi_28nm_phy_disable,
0662 .pll_init = dsi_pll_28nm_8960_init,
0663 .save_pll_state = dsi_28nm_pll_save_state,
0664 .restore_pll_state = dsi_28nm_pll_restore_state,
0665 },
0666 .min_pll_rate = VCO_MIN_RATE,
0667 .max_pll_rate = VCO_MAX_RATE,
0668 .io_start = { 0x4700300, 0x5800300 },
0669 .num_dsi_phy = 2,
0670 };