Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: GPL-2.0
0002 /*
0003  * Copyright (c) 2014-2020, NVIDIA CORPORATION.  All rights reserved.
0004  */
0005 
0006 #include <linux/kernel.h>
0007 #include <linux/io.h>
0008 #include <linux/clk.h>
0009 #include <linux/delay.h>
0010 #include <linux/of.h>
0011 
0012 #include <soc/tegra/mc.h>
0013 
0014 #include "tegra210-emc.h"
0015 #include "tegra210-mc.h"
0016 
0017 /*
0018  * Enable flags for specifying verbosity.
0019  */
0020 #define INFO            (1 << 0)
0021 #define STEPS           (1 << 1)
0022 #define SUB_STEPS       (1 << 2)
0023 #define PRELOCK         (1 << 3)
0024 #define PRELOCK_STEPS   (1 << 4)
0025 #define ACTIVE_EN       (1 << 5)
0026 #define PRAMP_UP        (1 << 6)
0027 #define PRAMP_DN        (1 << 7)
0028 #define EMA_WRITES      (1 << 10)
0029 #define EMA_UPDATES     (1 << 11)
0030 #define PER_TRAIN       (1 << 16)
0031 #define CC_PRINT        (1 << 17)
0032 #define CCFIFO          (1 << 29)
0033 #define REGS            (1 << 30)
0034 #define REG_LISTS       (1 << 31)
0035 
0036 #define emc_dbg(emc, flags, ...) dev_dbg(emc->dev, __VA_ARGS__)
0037 
0038 #define DVFS_CLOCK_CHANGE_VERSION   21021
0039 #define EMC_PRELOCK_VERSION     2101
0040 
0041 enum {
0042     DVFS_SEQUENCE = 1,
0043     WRITE_TRAINING_SEQUENCE = 2,
0044     PERIODIC_TRAINING_SEQUENCE = 3,
0045     DVFS_PT1 = 10,
0046     DVFS_UPDATE = 11,
0047     TRAINING_PT1 = 12,
0048     TRAINING_UPDATE = 13,
0049     PERIODIC_TRAINING_UPDATE = 14
0050 };
0051 
0052 /*
0053  * PTFV defines - basically just indexes into the per table PTFV array.
0054  */
0055 #define PTFV_DQSOSC_MOVAVG_C0D0U0_INDEX     0
0056 #define PTFV_DQSOSC_MOVAVG_C0D0U1_INDEX     1
0057 #define PTFV_DQSOSC_MOVAVG_C0D1U0_INDEX     2
0058 #define PTFV_DQSOSC_MOVAVG_C0D1U1_INDEX     3
0059 #define PTFV_DQSOSC_MOVAVG_C1D0U0_INDEX     4
0060 #define PTFV_DQSOSC_MOVAVG_C1D0U1_INDEX     5
0061 #define PTFV_DQSOSC_MOVAVG_C1D1U0_INDEX     6
0062 #define PTFV_DQSOSC_MOVAVG_C1D1U1_INDEX     7
0063 #define PTFV_DVFS_SAMPLES_INDEX         9
0064 #define PTFV_MOVAVG_WEIGHT_INDEX        10
0065 #define PTFV_CONFIG_CTRL_INDEX          11
0066 
0067 #define PTFV_CONFIG_CTRL_USE_PREVIOUS_EMA   (1 << 0)
0068 
0069 /*
0070  * Do arithmetic in fixed point.
0071  */
0072 #define MOVAVG_PRECISION_FACTOR     100
0073 
0074 /*
0075  * The division portion of the average operation.
0076  */
0077 #define __AVERAGE_PTFV(dev)                     \
0078     ({ next->ptfv_list[PTFV_DQSOSC_MOVAVG_ ## dev ## _INDEX] =  \
0079        next->ptfv_list[PTFV_DQSOSC_MOVAVG_ ## dev ## _INDEX] /  \
0080        next->ptfv_list[PTFV_DVFS_SAMPLES_INDEX]; })
0081 
0082 /*
0083  * Convert val to fixed point and add it to the temporary average.
0084  */
0085 #define __INCREMENT_PTFV(dev, val)                  \
0086     ({ next->ptfv_list[PTFV_DQSOSC_MOVAVG_ ## dev ## _INDEX] += \
0087        ((val) * MOVAVG_PRECISION_FACTOR); })
0088 
0089 /*
0090  * Convert a moving average back to integral form and return the value.
0091  */
0092 #define __MOVAVG_AC(timing, dev)                    \
0093     ((timing)->ptfv_list[PTFV_DQSOSC_MOVAVG_ ## dev ## _INDEX] /    \
0094      MOVAVG_PRECISION_FACTOR)
0095 
0096 /* Weighted update. */
0097 #define __WEIGHTED_UPDATE_PTFV(dev, nval)               \
0098     do {                                \
0099         int w = PTFV_MOVAVG_WEIGHT_INDEX;           \
0100         int dqs = PTFV_DQSOSC_MOVAVG_ ## dev ## _INDEX;     \
0101                                     \
0102         next->ptfv_list[dqs] =                  \
0103             ((nval * MOVAVG_PRECISION_FACTOR) +     \
0104              (next->ptfv_list[dqs] *            \
0105               next->ptfv_list[w])) /            \
0106             (next->ptfv_list[w] + 1);           \
0107                                     \
0108         emc_dbg(emc, EMA_UPDATES, "%s: (s=%lu) EMA: %u\n",  \
0109             __stringify(dev), nval, next->ptfv_list[dqs]);  \
0110     } while (0)
0111 
0112 /* Access a particular average. */
0113 #define __MOVAVG(timing, dev)                      \
0114     ((timing)->ptfv_list[PTFV_DQSOSC_MOVAVG_ ## dev ## _INDEX])
0115 
0116 static u32 update_clock_tree_delay(struct tegra210_emc *emc, int type)
0117 {
0118     bool periodic_training_update = type == PERIODIC_TRAINING_UPDATE;
0119     struct tegra210_emc_timing *last = emc->last;
0120     struct tegra210_emc_timing *next = emc->next;
0121     u32 last_timing_rate_mhz = last->rate / 1000;
0122     u32 next_timing_rate_mhz = next->rate / 1000;
0123     bool dvfs_update = type == DVFS_UPDATE;
0124     s32 tdel = 0, tmdel = 0, adel = 0;
0125     bool dvfs_pt1 = type == DVFS_PT1;
0126     unsigned long cval = 0;
0127     u32 temp[2][2], value;
0128     unsigned int i;
0129 
0130     /*
0131      * Dev0 MSB.
0132      */
0133     if (dvfs_pt1 || periodic_training_update) {
0134         value = tegra210_emc_mrr_read(emc, 2, 19);
0135 
0136         for (i = 0; i < emc->num_channels; i++) {
0137             temp[i][0] = (value & 0x00ff) << 8;
0138             temp[i][1] = (value & 0xff00) << 0;
0139             value >>= 16;
0140         }
0141 
0142         /*
0143          * Dev0 LSB.
0144          */
0145         value = tegra210_emc_mrr_read(emc, 2, 18);
0146 
0147         for (i = 0; i < emc->num_channels; i++) {
0148             temp[i][0] |= (value & 0x00ff) >> 0;
0149             temp[i][1] |= (value & 0xff00) >> 8;
0150             value >>= 16;
0151         }
0152     }
0153 
0154     if (dvfs_pt1 || periodic_training_update) {
0155         cval = tegra210_emc_actual_osc_clocks(last->run_clocks);
0156         cval *= 1000000;
0157         cval /= last_timing_rate_mhz * 2 * temp[0][0];
0158     }
0159 
0160     if (dvfs_pt1)
0161         __INCREMENT_PTFV(C0D0U0, cval);
0162     else if (dvfs_update)
0163         __AVERAGE_PTFV(C0D0U0);
0164     else if (periodic_training_update)
0165         __WEIGHTED_UPDATE_PTFV(C0D0U0, cval);
0166 
0167     if (dvfs_update || periodic_training_update) {
0168         tdel = next->current_dram_clktree[C0D0U0] -
0169                 __MOVAVG_AC(next, C0D0U0);
0170         tmdel = (tdel < 0) ? -1 * tdel : tdel;
0171         adel = tmdel;
0172 
0173         if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
0174             next->tree_margin)
0175             next->current_dram_clktree[C0D0U0] =
0176                 __MOVAVG_AC(next, C0D0U0);
0177     }
0178 
0179     if (dvfs_pt1 || periodic_training_update) {
0180         cval = tegra210_emc_actual_osc_clocks(last->run_clocks);
0181         cval *= 1000000;
0182         cval /= last_timing_rate_mhz * 2 * temp[0][1];
0183     }
0184 
0185     if (dvfs_pt1)
0186         __INCREMENT_PTFV(C0D0U1, cval);
0187     else if (dvfs_update)
0188         __AVERAGE_PTFV(C0D0U1);
0189     else if (periodic_training_update)
0190         __WEIGHTED_UPDATE_PTFV(C0D0U1, cval);
0191 
0192     if (dvfs_update || periodic_training_update) {
0193         tdel = next->current_dram_clktree[C0D0U1] -
0194                 __MOVAVG_AC(next, C0D0U1);
0195         tmdel = (tdel < 0) ? -1 * tdel : tdel;
0196 
0197         if (tmdel > adel)
0198             adel = tmdel;
0199 
0200         if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
0201             next->tree_margin)
0202             next->current_dram_clktree[C0D0U1] =
0203                 __MOVAVG_AC(next, C0D0U1);
0204     }
0205 
0206     if (emc->num_channels > 1) {
0207         if (dvfs_pt1 || periodic_training_update) {
0208             cval = tegra210_emc_actual_osc_clocks(last->run_clocks);
0209             cval *= 1000000;
0210             cval /= last_timing_rate_mhz * 2 * temp[1][0];
0211         }
0212 
0213         if (dvfs_pt1)
0214             __INCREMENT_PTFV(C1D0U0, cval);
0215         else if (dvfs_update)
0216             __AVERAGE_PTFV(C1D0U0);
0217         else if (periodic_training_update)
0218             __WEIGHTED_UPDATE_PTFV(C1D0U0, cval);
0219 
0220         if (dvfs_update || periodic_training_update) {
0221             tdel = next->current_dram_clktree[C1D0U0] -
0222                     __MOVAVG_AC(next, C1D0U0);
0223             tmdel = (tdel < 0) ? -1 * tdel : tdel;
0224 
0225             if (tmdel > adel)
0226                 adel = tmdel;
0227 
0228             if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
0229                 next->tree_margin)
0230                 next->current_dram_clktree[C1D0U0] =
0231                     __MOVAVG_AC(next, C1D0U0);
0232         }
0233 
0234         if (dvfs_pt1 || periodic_training_update) {
0235             cval = tegra210_emc_actual_osc_clocks(last->run_clocks);
0236             cval *= 1000000;
0237             cval /= last_timing_rate_mhz * 2 * temp[1][1];
0238         }
0239 
0240         if (dvfs_pt1)
0241             __INCREMENT_PTFV(C1D0U1, cval);
0242         else if (dvfs_update)
0243             __AVERAGE_PTFV(C1D0U1);
0244         else if (periodic_training_update)
0245             __WEIGHTED_UPDATE_PTFV(C1D0U1, cval);
0246 
0247         if (dvfs_update || periodic_training_update) {
0248             tdel = next->current_dram_clktree[C1D0U1] -
0249                     __MOVAVG_AC(next, C1D0U1);
0250             tmdel = (tdel < 0) ? -1 * tdel : tdel;
0251 
0252             if (tmdel > adel)
0253                 adel = tmdel;
0254 
0255             if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
0256                 next->tree_margin)
0257                 next->current_dram_clktree[C1D0U1] =
0258                     __MOVAVG_AC(next, C1D0U1);
0259         }
0260     }
0261 
0262     if (emc->num_devices < 2)
0263         goto done;
0264 
0265     /*
0266      * Dev1 MSB.
0267      */
0268     if (dvfs_pt1 || periodic_training_update) {
0269         value = tegra210_emc_mrr_read(emc, 1, 19);
0270 
0271         for (i = 0; i < emc->num_channels; i++) {
0272             temp[i][0] = (value & 0x00ff) << 8;
0273             temp[i][1] = (value & 0xff00) << 0;
0274             value >>= 16;
0275         }
0276 
0277         /*
0278          * Dev1 LSB.
0279          */
0280         value = tegra210_emc_mrr_read(emc, 2, 18);
0281 
0282         for (i = 0; i < emc->num_channels; i++) {
0283             temp[i][0] |= (value & 0x00ff) >> 0;
0284             temp[i][1] |= (value & 0xff00) >> 8;
0285             value >>= 16;
0286         }
0287     }
0288 
0289     if (dvfs_pt1 || periodic_training_update) {
0290         cval = tegra210_emc_actual_osc_clocks(last->run_clocks);
0291         cval *= 1000000;
0292         cval /= last_timing_rate_mhz * 2 * temp[0][0];
0293     }
0294 
0295     if (dvfs_pt1)
0296         __INCREMENT_PTFV(C0D1U0, cval);
0297     else if (dvfs_update)
0298         __AVERAGE_PTFV(C0D1U0);
0299     else if (periodic_training_update)
0300         __WEIGHTED_UPDATE_PTFV(C0D1U0, cval);
0301 
0302     if (dvfs_update || periodic_training_update) {
0303         tdel = next->current_dram_clktree[C0D1U0] -
0304                 __MOVAVG_AC(next, C0D1U0);
0305         tmdel = (tdel < 0) ? -1 * tdel : tdel;
0306 
0307         if (tmdel > adel)
0308             adel = tmdel;
0309 
0310         if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
0311             next->tree_margin)
0312             next->current_dram_clktree[C0D1U0] =
0313                 __MOVAVG_AC(next, C0D1U0);
0314     }
0315 
0316     if (dvfs_pt1 || periodic_training_update) {
0317         cval = tegra210_emc_actual_osc_clocks(last->run_clocks);
0318         cval *= 1000000;
0319         cval /= last_timing_rate_mhz * 2 * temp[0][1];
0320     }
0321 
0322     if (dvfs_pt1)
0323         __INCREMENT_PTFV(C0D1U1, cval);
0324     else if (dvfs_update)
0325         __AVERAGE_PTFV(C0D1U1);
0326     else if (periodic_training_update)
0327         __WEIGHTED_UPDATE_PTFV(C0D1U1, cval);
0328 
0329     if (dvfs_update || periodic_training_update) {
0330         tdel = next->current_dram_clktree[C0D1U1] -
0331                 __MOVAVG_AC(next, C0D1U1);
0332         tmdel = (tdel < 0) ? -1 * tdel : tdel;
0333 
0334         if (tmdel > adel)
0335             adel = tmdel;
0336 
0337         if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
0338             next->tree_margin)
0339             next->current_dram_clktree[C0D1U1] =
0340                 __MOVAVG_AC(next, C0D1U1);
0341     }
0342 
0343     if (emc->num_channels > 1) {
0344         if (dvfs_pt1 || periodic_training_update) {
0345             cval = tegra210_emc_actual_osc_clocks(last->run_clocks);
0346             cval *= 1000000;
0347             cval /= last_timing_rate_mhz * 2 * temp[1][0];
0348         }
0349 
0350         if (dvfs_pt1)
0351             __INCREMENT_PTFV(C1D1U0, cval);
0352         else if (dvfs_update)
0353             __AVERAGE_PTFV(C1D1U0);
0354         else if (periodic_training_update)
0355             __WEIGHTED_UPDATE_PTFV(C1D1U0, cval);
0356 
0357         if (dvfs_update || periodic_training_update) {
0358             tdel = next->current_dram_clktree[C1D1U0] -
0359                     __MOVAVG_AC(next, C1D1U0);
0360             tmdel = (tdel < 0) ? -1 * tdel : tdel;
0361 
0362             if (tmdel > adel)
0363                 adel = tmdel;
0364 
0365             if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
0366                 next->tree_margin)
0367                 next->current_dram_clktree[C1D1U0] =
0368                     __MOVAVG_AC(next, C1D1U0);
0369         }
0370 
0371         if (dvfs_pt1 || periodic_training_update) {
0372             cval = tegra210_emc_actual_osc_clocks(last->run_clocks);
0373             cval *= 1000000;
0374             cval /= last_timing_rate_mhz * 2 * temp[1][1];
0375         }
0376 
0377         if (dvfs_pt1)
0378             __INCREMENT_PTFV(C1D1U1, cval);
0379         else if (dvfs_update)
0380             __AVERAGE_PTFV(C1D1U1);
0381         else if (periodic_training_update)
0382             __WEIGHTED_UPDATE_PTFV(C1D1U1, cval);
0383 
0384         if (dvfs_update || periodic_training_update) {
0385             tdel = next->current_dram_clktree[C1D1U1] -
0386                     __MOVAVG_AC(next, C1D1U1);
0387             tmdel = (tdel < 0) ? -1 * tdel : tdel;
0388 
0389             if (tmdel > adel)
0390                 adel = tmdel;
0391 
0392             if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
0393                 next->tree_margin)
0394                 next->current_dram_clktree[C1D1U1] =
0395                     __MOVAVG_AC(next, C1D1U1);
0396         }
0397     }
0398 
0399 done:
0400     return adel;
0401 }
0402 
0403 static u32 periodic_compensation_handler(struct tegra210_emc *emc, u32 type,
0404                      struct tegra210_emc_timing *last,
0405                      struct tegra210_emc_timing *next)
0406 {
0407 #define __COPY_EMA(nt, lt, dev)                     \
0408     ({ __MOVAVG(nt, dev) = __MOVAVG(lt, dev) *          \
0409        (nt)->ptfv_list[PTFV_DVFS_SAMPLES_INDEX]; })
0410 
0411     u32 i, adel = 0, samples = next->ptfv_list[PTFV_DVFS_SAMPLES_INDEX];
0412     u32 delay;
0413 
0414     delay = tegra210_emc_actual_osc_clocks(last->run_clocks);
0415     delay *= 1000;
0416     delay = 2 + (delay / last->rate);
0417 
0418     if (!next->periodic_training)
0419         return 0;
0420 
0421     if (type == DVFS_SEQUENCE) {
0422         if (last->periodic_training &&
0423             (next->ptfv_list[PTFV_CONFIG_CTRL_INDEX] &
0424              PTFV_CONFIG_CTRL_USE_PREVIOUS_EMA)) {
0425             /*
0426              * If the previous frequency was using periodic
0427              * calibration then we can reuse the previous
0428              * frequencies EMA data.
0429              */
0430             __COPY_EMA(next, last, C0D0U0);
0431             __COPY_EMA(next, last, C0D0U1);
0432             __COPY_EMA(next, last, C1D0U0);
0433             __COPY_EMA(next, last, C1D0U1);
0434             __COPY_EMA(next, last, C0D1U0);
0435             __COPY_EMA(next, last, C0D1U1);
0436             __COPY_EMA(next, last, C1D1U0);
0437             __COPY_EMA(next, last, C1D1U1);
0438         } else {
0439             /* Reset the EMA.*/
0440             __MOVAVG(next, C0D0U0) = 0;
0441             __MOVAVG(next, C0D0U1) = 0;
0442             __MOVAVG(next, C1D0U0) = 0;
0443             __MOVAVG(next, C1D0U1) = 0;
0444             __MOVAVG(next, C0D1U0) = 0;
0445             __MOVAVG(next, C0D1U1) = 0;
0446             __MOVAVG(next, C1D1U0) = 0;
0447             __MOVAVG(next, C1D1U1) = 0;
0448 
0449             for (i = 0; i < samples; i++) {
0450                 tegra210_emc_start_periodic_compensation(emc);
0451                 udelay(delay);
0452 
0453                 /*
0454                  * Generate next sample of data.
0455                  */
0456                 adel = update_clock_tree_delay(emc, DVFS_PT1);
0457             }
0458         }
0459 
0460         /*
0461          * Seems like it should be part of the
0462          * 'if (last_timing->periodic_training)' conditional
0463          * since is already done for the else clause.
0464          */
0465         adel = update_clock_tree_delay(emc, DVFS_UPDATE);
0466     }
0467 
0468     if (type == PERIODIC_TRAINING_SEQUENCE) {
0469         tegra210_emc_start_periodic_compensation(emc);
0470         udelay(delay);
0471 
0472         adel = update_clock_tree_delay(emc, PERIODIC_TRAINING_UPDATE);
0473     }
0474 
0475     return adel;
0476 }
0477 
0478 static u32 tegra210_emc_r21021_periodic_compensation(struct tegra210_emc *emc)
0479 {
0480     u32 emc_cfg, emc_cfg_o, emc_cfg_update, del, value;
0481     static const u32 list[] = {
0482         EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_0,
0483         EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_1,
0484         EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_2,
0485         EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_3,
0486         EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_0,
0487         EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_1,
0488         EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_2,
0489         EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_3,
0490         EMC_DATA_BRLSHFT_0,
0491         EMC_DATA_BRLSHFT_1
0492     };
0493     struct tegra210_emc_timing *last = emc->last;
0494     unsigned int items = ARRAY_SIZE(list), i;
0495     unsigned long delay;
0496 
0497     if (last->periodic_training) {
0498         emc_dbg(emc, PER_TRAIN, "Periodic training starting\n");
0499 
0500         value = emc_readl(emc, EMC_DBG);
0501         emc_cfg_o = emc_readl(emc, EMC_CFG);
0502         emc_cfg = emc_cfg_o & ~(EMC_CFG_DYN_SELF_REF |
0503                     EMC_CFG_DRAM_ACPD |
0504                     EMC_CFG_DRAM_CLKSTOP_PD);
0505 
0506 
0507         /*
0508          * 1. Power optimizations should be off.
0509          */
0510         emc_writel(emc, emc_cfg, EMC_CFG);
0511 
0512         /* Does emc_timing_update() for above changes. */
0513         tegra210_emc_dll_disable(emc);
0514 
0515         for (i = 0; i < emc->num_channels; i++)
0516             tegra210_emc_wait_for_update(emc, i, EMC_EMC_STATUS,
0517                              EMC_EMC_STATUS_DRAM_IN_POWERDOWN_MASK,
0518                              0);
0519 
0520         for (i = 0; i < emc->num_channels; i++)
0521             tegra210_emc_wait_for_update(emc, i, EMC_EMC_STATUS,
0522                              EMC_EMC_STATUS_DRAM_IN_SELF_REFRESH_MASK,
0523                              0);
0524 
0525         emc_cfg_update = value = emc_readl(emc, EMC_CFG_UPDATE);
0526         value &= ~EMC_CFG_UPDATE_UPDATE_DLL_IN_UPDATE_MASK;
0527         value |= (2 << EMC_CFG_UPDATE_UPDATE_DLL_IN_UPDATE_SHIFT);
0528         emc_writel(emc, value, EMC_CFG_UPDATE);
0529 
0530         /*
0531          * 2. osc kick off - this assumes training and dvfs have set
0532          *    correct MR23.
0533          */
0534         tegra210_emc_start_periodic_compensation(emc);
0535 
0536         /*
0537          * 3. Let dram capture its clock tree delays.
0538          */
0539         delay = tegra210_emc_actual_osc_clocks(last->run_clocks);
0540         delay *= 1000;
0541         delay /= last->rate + 1;
0542         udelay(delay);
0543 
0544         /*
0545          * 4. Check delta wrt previous values (save value if margin
0546          *    exceeds what is set in table).
0547          */
0548         del = periodic_compensation_handler(emc,
0549                             PERIODIC_TRAINING_SEQUENCE,
0550                             last, last);
0551 
0552         /*
0553          * 5. Apply compensation w.r.t. trained values (if clock tree
0554          *    has drifted more than the set margin).
0555          */
0556         if (last->tree_margin < ((del * 128 * (last->rate / 1000)) / 1000000)) {
0557             for (i = 0; i < items; i++) {
0558                 value = tegra210_emc_compensate(last, list[i]);
0559                 emc_dbg(emc, EMA_WRITES, "0x%08x <= 0x%08x\n",
0560                     list[i], value);
0561                 emc_writel(emc, value, list[i]);
0562             }
0563         }
0564 
0565         emc_writel(emc, emc_cfg_o, EMC_CFG);
0566 
0567         /*
0568          * 6. Timing update actally applies the new trimmers.
0569          */
0570         tegra210_emc_timing_update(emc);
0571 
0572         /* 6.1. Restore the UPDATE_DLL_IN_UPDATE field. */
0573         emc_writel(emc, emc_cfg_update, EMC_CFG_UPDATE);
0574 
0575         /* 6.2. Restore the DLL. */
0576         tegra210_emc_dll_enable(emc);
0577     }
0578 
0579     return 0;
0580 }
0581 
0582 /*
0583  * Do the clock change sequence.
0584  */
0585 static void tegra210_emc_r21021_set_clock(struct tegra210_emc *emc, u32 clksrc)
0586 {
0587     /* state variables */
0588     static bool fsp_for_next_freq;
0589     /* constant configuration parameters */
0590     const bool save_restore_clkstop_pd = true;
0591     const u32 zqcal_before_cc_cutoff = 2400;
0592     const bool cya_allow_ref_cc = false;
0593     const bool cya_issue_pc_ref = false;
0594     const bool opt_cc_short_zcal = true;
0595     const bool ref_b4_sref_en = false;
0596     const u32 tZQCAL_lpddr4 = 1000000;
0597     const bool opt_short_zcal = true;
0598     const bool opt_do_sw_qrst = true;
0599     const u32 opt_dvfs_mode = MAN_SR;
0600     /*
0601      * This is the timing table for the source frequency. It does _not_
0602      * necessarily correspond to the actual timing values in the EMC at the
0603      * moment. If the boot BCT differs from the table then this can happen.
0604      * However, we need it for accessing the dram_timings (which are not
0605      * really registers) array for the current frequency.
0606      */
0607     struct tegra210_emc_timing *fake, *last = emc->last, *next = emc->next;
0608     u32 tRTM, RP_war, R2P_war, TRPab_war, deltaTWATM, W2P_war, tRPST;
0609     u32 mr13_flip_fspwr, mr13_flip_fspop, ramp_up_wait, ramp_down_wait;
0610     u32 zq_wait_long, zq_latch_dvfs_wait_time, tZQCAL_lpddr4_fc_adj;
0611     u32 emc_auto_cal_config, auto_cal_en, emc_cfg, emc_sel_dpd_ctrl;
0612     u32 tFC_lpddr4 = 1000 * next->dram_timings[T_FC_LPDDR4];
0613     u32 bg_reg_mode_change, enable_bglp_reg, enable_bg_reg;
0614     bool opt_zcal_en_cc = false, is_lpddr3 = false;
0615     bool compensate_trimmer_applicable = false;
0616     u32 emc_dbg, emc_cfg_pipe_clk, emc_pin;
0617     u32 src_clk_period, dst_clk_period; /* in picoseconds */
0618     bool shared_zq_resistor = false;
0619     u32 value, dram_type;
0620     u32 opt_dll_mode = 0;
0621     unsigned long delay;
0622     unsigned int i;
0623 
0624     emc_dbg(emc, INFO, "Running clock change.\n");
0625 
0626     /* XXX fake == last */
0627     fake = tegra210_emc_find_timing(emc, last->rate * 1000UL);
0628     fsp_for_next_freq = !fsp_for_next_freq;
0629 
0630     value = emc_readl(emc, EMC_FBIO_CFG5) & EMC_FBIO_CFG5_DRAM_TYPE_MASK;
0631     dram_type = value >> EMC_FBIO_CFG5_DRAM_TYPE_SHIFT;
0632 
0633     if (last->burst_regs[EMC_ZCAL_WAIT_CNT_INDEX] & BIT(31))
0634         shared_zq_resistor = true;
0635 
0636     if ((next->burst_regs[EMC_ZCAL_INTERVAL_INDEX] != 0 &&
0637          last->burst_regs[EMC_ZCAL_INTERVAL_INDEX] == 0) ||
0638         dram_type == DRAM_TYPE_LPDDR4)
0639         opt_zcal_en_cc = true;
0640 
0641     if (dram_type == DRAM_TYPE_DDR3)
0642         opt_dll_mode = tegra210_emc_get_dll_state(next);
0643 
0644     if ((next->burst_regs[EMC_FBIO_CFG5_INDEX] & BIT(25)) &&
0645         (dram_type == DRAM_TYPE_LPDDR2))
0646         is_lpddr3 = true;
0647 
0648     emc_readl(emc, EMC_CFG);
0649     emc_readl(emc, EMC_AUTO_CAL_CONFIG);
0650 
0651     src_clk_period = 1000000000 / last->rate;
0652     dst_clk_period = 1000000000 / next->rate;
0653 
0654     if (dst_clk_period <= zqcal_before_cc_cutoff)
0655         tZQCAL_lpddr4_fc_adj = tZQCAL_lpddr4 - tFC_lpddr4;
0656     else
0657         tZQCAL_lpddr4_fc_adj = tZQCAL_lpddr4;
0658 
0659     tZQCAL_lpddr4_fc_adj /= dst_clk_period;
0660 
0661     emc_dbg = emc_readl(emc, EMC_DBG);
0662     emc_pin = emc_readl(emc, EMC_PIN);
0663     emc_cfg_pipe_clk = emc_readl(emc, EMC_CFG_PIPE_CLK);
0664 
0665     emc_cfg = next->burst_regs[EMC_CFG_INDEX];
0666     emc_cfg &= ~(EMC_CFG_DYN_SELF_REF | EMC_CFG_DRAM_ACPD |
0667              EMC_CFG_DRAM_CLKSTOP_SR | EMC_CFG_DRAM_CLKSTOP_PD);
0668     emc_sel_dpd_ctrl = next->emc_sel_dpd_ctrl;
0669     emc_sel_dpd_ctrl &= ~(EMC_SEL_DPD_CTRL_CLK_SEL_DPD_EN |
0670                   EMC_SEL_DPD_CTRL_CA_SEL_DPD_EN |
0671                   EMC_SEL_DPD_CTRL_RESET_SEL_DPD_EN |
0672                   EMC_SEL_DPD_CTRL_ODT_SEL_DPD_EN |
0673                   EMC_SEL_DPD_CTRL_DATA_SEL_DPD_EN);
0674 
0675     emc_dbg(emc, INFO, "Clock change version: %d\n",
0676         DVFS_CLOCK_CHANGE_VERSION);
0677     emc_dbg(emc, INFO, "DRAM type = %d\n", dram_type);
0678     emc_dbg(emc, INFO, "DRAM dev #: %u\n", emc->num_devices);
0679     emc_dbg(emc, INFO, "Next EMC clksrc: 0x%08x\n", clksrc);
0680     emc_dbg(emc, INFO, "DLL clksrc:      0x%08x\n", next->dll_clk_src);
0681     emc_dbg(emc, INFO, "last rate: %u, next rate %u\n", last->rate,
0682         next->rate);
0683     emc_dbg(emc, INFO, "last period: %u, next period: %u\n",
0684         src_clk_period, dst_clk_period);
0685     emc_dbg(emc, INFO, "  shared_zq_resistor: %d\n", !!shared_zq_resistor);
0686     emc_dbg(emc, INFO, "  num_channels: %u\n", emc->num_channels);
0687     emc_dbg(emc, INFO, "  opt_dll_mode: %d\n", opt_dll_mode);
0688 
0689     /*
0690      * Step 1:
0691      *   Pre DVFS SW sequence.
0692      */
0693     emc_dbg(emc, STEPS, "Step 1\n");
0694     emc_dbg(emc, STEPS, "Step 1.1: Disable DLL temporarily.\n");
0695 
0696     value = emc_readl(emc, EMC_CFG_DIG_DLL);
0697     value &= ~EMC_CFG_DIG_DLL_CFG_DLL_EN;
0698     emc_writel(emc, value, EMC_CFG_DIG_DLL);
0699 
0700     tegra210_emc_timing_update(emc);
0701 
0702     for (i = 0; i < emc->num_channels; i++)
0703         tegra210_emc_wait_for_update(emc, i, EMC_CFG_DIG_DLL,
0704                          EMC_CFG_DIG_DLL_CFG_DLL_EN, 0);
0705 
0706     emc_dbg(emc, STEPS, "Step 1.2: Disable AUTOCAL temporarily.\n");
0707 
0708     emc_auto_cal_config = next->emc_auto_cal_config;
0709     auto_cal_en = emc_auto_cal_config & EMC_AUTO_CAL_CONFIG_AUTO_CAL_ENABLE;
0710     emc_auto_cal_config &= ~EMC_AUTO_CAL_CONFIG_AUTO_CAL_START;
0711     emc_auto_cal_config |= EMC_AUTO_CAL_CONFIG_AUTO_CAL_MEASURE_STALL;
0712     emc_auto_cal_config |= EMC_AUTO_CAL_CONFIG_AUTO_CAL_UPDATE_STALL;
0713     emc_auto_cal_config |= auto_cal_en;
0714     emc_writel(emc, emc_auto_cal_config, EMC_AUTO_CAL_CONFIG);
0715     emc_readl(emc, EMC_AUTO_CAL_CONFIG); /* Flush write. */
0716 
0717     emc_dbg(emc, STEPS, "Step 1.3: Disable other power features.\n");
0718 
0719     tegra210_emc_set_shadow_bypass(emc, ACTIVE);
0720     emc_writel(emc, emc_cfg, EMC_CFG);
0721     emc_writel(emc, emc_sel_dpd_ctrl, EMC_SEL_DPD_CTRL);
0722     tegra210_emc_set_shadow_bypass(emc, ASSEMBLY);
0723 
0724     if (next->periodic_training) {
0725         tegra210_emc_reset_dram_clktree_values(next);
0726 
0727         for (i = 0; i < emc->num_channels; i++)
0728             tegra210_emc_wait_for_update(emc, i, EMC_EMC_STATUS,
0729                              EMC_EMC_STATUS_DRAM_IN_POWERDOWN_MASK,
0730                              0);
0731 
0732         for (i = 0; i < emc->num_channels; i++)
0733             tegra210_emc_wait_for_update(emc, i, EMC_EMC_STATUS,
0734                              EMC_EMC_STATUS_DRAM_IN_SELF_REFRESH_MASK,
0735                              0);
0736 
0737         tegra210_emc_start_periodic_compensation(emc);
0738 
0739         delay = 1000 * tegra210_emc_actual_osc_clocks(last->run_clocks);
0740         udelay((delay / last->rate) + 2);
0741 
0742         value = periodic_compensation_handler(emc, DVFS_SEQUENCE, fake,
0743                               next);
0744         value = (value * 128 * next->rate / 1000) / 1000000;
0745 
0746         if (next->periodic_training && value > next->tree_margin)
0747             compensate_trimmer_applicable = true;
0748     }
0749 
0750     emc_writel(emc, EMC_INTSTATUS_CLKCHANGE_COMPLETE, EMC_INTSTATUS);
0751     tegra210_emc_set_shadow_bypass(emc, ACTIVE);
0752     emc_writel(emc, emc_cfg, EMC_CFG);
0753     emc_writel(emc, emc_sel_dpd_ctrl, EMC_SEL_DPD_CTRL);
0754     emc_writel(emc, emc_cfg_pipe_clk | EMC_CFG_PIPE_CLK_CLK_ALWAYS_ON,
0755            EMC_CFG_PIPE_CLK);
0756     emc_writel(emc, next->emc_fdpd_ctrl_cmd_no_ramp &
0757             ~EMC_FDPD_CTRL_CMD_NO_RAMP_CMD_DPD_NO_RAMP_ENABLE,
0758            EMC_FDPD_CTRL_CMD_NO_RAMP);
0759 
0760     bg_reg_mode_change =
0761         ((next->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
0762           EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD) ^
0763          (last->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
0764           EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD)) ||
0765         ((next->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
0766           EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD) ^
0767          (last->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
0768           EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD));
0769     enable_bglp_reg =
0770         (next->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
0771          EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD) == 0;
0772     enable_bg_reg =
0773         (next->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
0774          EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD) == 0;
0775 
0776     if (bg_reg_mode_change) {
0777         if (enable_bg_reg)
0778             emc_writel(emc, last->burst_regs
0779                    [EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
0780                    ~EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD,
0781                    EMC_PMACRO_BG_BIAS_CTRL_0);
0782 
0783         if (enable_bglp_reg)
0784             emc_writel(emc, last->burst_regs
0785                    [EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
0786                    ~EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD,
0787                    EMC_PMACRO_BG_BIAS_CTRL_0);
0788     }
0789 
0790     /* Check if we need to turn on VREF generator. */
0791     if ((((last->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX] &
0792            EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_E_IVREF) == 0) &&
0793          ((next->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX] &
0794            EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_E_IVREF) == 1)) ||
0795         (((last->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX] &
0796            EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQS_E_IVREF) == 0) &&
0797          ((next->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX] &
0798            EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQS_E_IVREF) != 0))) {
0799         u32 pad_tx_ctrl =
0800             next->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX];
0801         u32 last_pad_tx_ctrl =
0802             last->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX];
0803         u32 next_dq_e_ivref, next_dqs_e_ivref;
0804 
0805         next_dqs_e_ivref = pad_tx_ctrl &
0806                    EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQS_E_IVREF;
0807         next_dq_e_ivref = pad_tx_ctrl &
0808                   EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_E_IVREF;
0809         value = (last_pad_tx_ctrl &
0810                 ~EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_E_IVREF &
0811                 ~EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQS_E_IVREF) |
0812             next_dq_e_ivref | next_dqs_e_ivref;
0813         emc_writel(emc, value, EMC_PMACRO_DATA_PAD_TX_CTRL);
0814         udelay(1);
0815     } else if (bg_reg_mode_change) {
0816         udelay(1);
0817     }
0818 
0819     tegra210_emc_set_shadow_bypass(emc, ASSEMBLY);
0820 
0821     /*
0822      * Step 2:
0823      *   Prelock the DLL.
0824      */
0825     emc_dbg(emc, STEPS, "Step 2\n");
0826 
0827     if (next->burst_regs[EMC_CFG_DIG_DLL_INDEX] &
0828         EMC_CFG_DIG_DLL_CFG_DLL_EN) {
0829         emc_dbg(emc, INFO, "Prelock enabled for target frequency.\n");
0830         value = tegra210_emc_dll_prelock(emc, clksrc);
0831         emc_dbg(emc, INFO, "DLL out: 0x%03x\n", value);
0832     } else {
0833         emc_dbg(emc, INFO, "Disabling DLL for target frequency.\n");
0834         tegra210_emc_dll_disable(emc);
0835     }
0836 
0837     /*
0838      * Step 3:
0839      *   Prepare autocal for the clock change.
0840      */
0841     emc_dbg(emc, STEPS, "Step 3\n");
0842 
0843     tegra210_emc_set_shadow_bypass(emc, ACTIVE);
0844     emc_writel(emc, next->emc_auto_cal_config2, EMC_AUTO_CAL_CONFIG2);
0845     emc_writel(emc, next->emc_auto_cal_config3, EMC_AUTO_CAL_CONFIG3);
0846     emc_writel(emc, next->emc_auto_cal_config4, EMC_AUTO_CAL_CONFIG4);
0847     emc_writel(emc, next->emc_auto_cal_config5, EMC_AUTO_CAL_CONFIG5);
0848     emc_writel(emc, next->emc_auto_cal_config6, EMC_AUTO_CAL_CONFIG6);
0849     emc_writel(emc, next->emc_auto_cal_config7, EMC_AUTO_CAL_CONFIG7);
0850     emc_writel(emc, next->emc_auto_cal_config8, EMC_AUTO_CAL_CONFIG8);
0851     tegra210_emc_set_shadow_bypass(emc, ASSEMBLY);
0852 
0853     emc_auto_cal_config |= (EMC_AUTO_CAL_CONFIG_AUTO_CAL_COMPUTE_START |
0854                 auto_cal_en);
0855     emc_writel(emc, emc_auto_cal_config, EMC_AUTO_CAL_CONFIG);
0856 
0857     /*
0858      * Step 4:
0859      *   Update EMC_CFG. (??)
0860      */
0861     emc_dbg(emc, STEPS, "Step 4\n");
0862 
0863     if (src_clk_period > 50000 && dram_type == DRAM_TYPE_LPDDR4)
0864         ccfifo_writel(emc, 1, EMC_SELF_REF, 0);
0865     else
0866         emc_writel(emc, next->emc_cfg_2, EMC_CFG_2);
0867 
0868     /*
0869      * Step 5:
0870      *   Prepare reference variables for ZQCAL regs.
0871      */
0872     emc_dbg(emc, STEPS, "Step 5\n");
0873 
0874     if (dram_type == DRAM_TYPE_LPDDR4)
0875         zq_wait_long = max((u32)1, div_o3(1000000, dst_clk_period));
0876     else if (dram_type == DRAM_TYPE_LPDDR2 || is_lpddr3)
0877         zq_wait_long = max(next->min_mrs_wait,
0878                    div_o3(360000, dst_clk_period)) + 4;
0879     else if (dram_type == DRAM_TYPE_DDR3)
0880         zq_wait_long = max((u32)256,
0881                    div_o3(320000, dst_clk_period) + 2);
0882     else
0883         zq_wait_long = 0;
0884 
0885     /*
0886      * Step 6:
0887      *   Training code - removed.
0888      */
0889     emc_dbg(emc, STEPS, "Step 6\n");
0890 
0891     /*
0892      * Step 7:
0893      *   Program FSP reference registers and send MRWs to new FSPWR.
0894      */
0895     emc_dbg(emc, STEPS, "Step 7\n");
0896     emc_dbg(emc, SUB_STEPS, "Step 7.1: Bug 200024907 - Patch RP R2P");
0897 
0898     /* WAR 200024907 */
0899     if (dram_type == DRAM_TYPE_LPDDR4) {
0900         u32 nRTP = 16;
0901 
0902         if (src_clk_period >= 1000000 / 1866) /* 535.91 ps */
0903             nRTP = 14;
0904 
0905         if (src_clk_period >= 1000000 / 1600) /* 625.00 ps */
0906             nRTP = 12;
0907 
0908         if (src_clk_period >= 1000000 / 1333) /* 750.19 ps */
0909             nRTP = 10;
0910 
0911         if (src_clk_period >= 1000000 / 1066) /* 938.09 ps */
0912             nRTP = 8;
0913 
0914         deltaTWATM = max_t(u32, div_o3(7500, src_clk_period), 8);
0915 
0916         /*
0917          * Originally there was a + .5 in the tRPST calculation.
0918          * However since we can't do FP in the kernel and the tRTM
0919          * computation was in a floating point ceiling function, adding
0920          * one to tRTP should be ok. There is no other source of non
0921          * integer values, so the result was always going to be
0922          * something for the form: f_ceil(N + .5) = N + 1;
0923          */
0924         tRPST = (last->emc_mrw & 0x80) >> 7;
0925         tRTM = fake->dram_timings[RL] + div_o3(3600, src_clk_period) +
0926             max_t(u32, div_o3(7500, src_clk_period), 8) + tRPST +
0927             1 + nRTP;
0928 
0929         emc_dbg(emc, INFO, "tRTM = %u, EMC_RP = %u\n", tRTM,
0930             next->burst_regs[EMC_RP_INDEX]);
0931 
0932         if (last->burst_regs[EMC_RP_INDEX] < tRTM) {
0933             if (tRTM > (last->burst_regs[EMC_R2P_INDEX] +
0934                     last->burst_regs[EMC_RP_INDEX])) {
0935                 R2P_war = tRTM - last->burst_regs[EMC_RP_INDEX];
0936                 RP_war = last->burst_regs[EMC_RP_INDEX];
0937                 TRPab_war = last->burst_regs[EMC_TRPAB_INDEX];
0938 
0939                 if (R2P_war > 63) {
0940                     RP_war = R2P_war +
0941                          last->burst_regs[EMC_RP_INDEX] - 63;
0942 
0943                     if (TRPab_war < RP_war)
0944                         TRPab_war = RP_war;
0945 
0946                     R2P_war = 63;
0947                 }
0948             } else {
0949                 R2P_war = last->burst_regs[EMC_R2P_INDEX];
0950                 RP_war = last->burst_regs[EMC_RP_INDEX];
0951                 TRPab_war = last->burst_regs[EMC_TRPAB_INDEX];
0952             }
0953 
0954             if (RP_war < deltaTWATM) {
0955                 W2P_war = last->burst_regs[EMC_W2P_INDEX]
0956                       + deltaTWATM - RP_war;
0957                 if (W2P_war > 63) {
0958                     RP_war = RP_war + W2P_war - 63;
0959                     if (TRPab_war < RP_war)
0960                         TRPab_war = RP_war;
0961                     W2P_war = 63;
0962                 }
0963             } else {
0964                 W2P_war = last->burst_regs[
0965                       EMC_W2P_INDEX];
0966             }
0967 
0968             if ((last->burst_regs[EMC_W2P_INDEX] ^ W2P_war) ||
0969                 (last->burst_regs[EMC_R2P_INDEX] ^ R2P_war) ||
0970                 (last->burst_regs[EMC_RP_INDEX] ^ RP_war) ||
0971                 (last->burst_regs[EMC_TRPAB_INDEX] ^ TRPab_war)) {
0972                 emc_writel(emc, RP_war, EMC_RP);
0973                 emc_writel(emc, R2P_war, EMC_R2P);
0974                 emc_writel(emc, W2P_war, EMC_W2P);
0975                 emc_writel(emc, TRPab_war, EMC_TRPAB);
0976             }
0977 
0978             tegra210_emc_timing_update(emc);
0979         } else {
0980             emc_dbg(emc, INFO, "Skipped WAR\n");
0981         }
0982     }
0983 
0984     if (!fsp_for_next_freq) {
0985         mr13_flip_fspwr = (next->emc_mrw3 & 0xffffff3f) | 0x80;
0986         mr13_flip_fspop = (next->emc_mrw3 & 0xffffff3f) | 0x00;
0987     } else {
0988         mr13_flip_fspwr = (next->emc_mrw3 & 0xffffff3f) | 0x40;
0989         mr13_flip_fspop = (next->emc_mrw3 & 0xffffff3f) | 0xc0;
0990     }
0991 
0992     if (dram_type == DRAM_TYPE_LPDDR4) {
0993         emc_writel(emc, mr13_flip_fspwr, EMC_MRW3);
0994         emc_writel(emc, next->emc_mrw, EMC_MRW);
0995         emc_writel(emc, next->emc_mrw2, EMC_MRW2);
0996     }
0997 
0998     /*
0999      * Step 8:
1000      *   Program the shadow registers.
1001      */
1002     emc_dbg(emc, STEPS, "Step 8\n");
1003     emc_dbg(emc, SUB_STEPS, "Writing burst_regs\n");
1004 
1005     for (i = 0; i < next->num_burst; i++) {
1006         const u16 *offsets = emc->offsets->burst;
1007         u16 offset;
1008 
1009         if (!offsets[i])
1010             continue;
1011 
1012         value = next->burst_regs[i];
1013         offset = offsets[i];
1014 
1015         if (dram_type != DRAM_TYPE_LPDDR4 &&
1016             (offset == EMC_MRW6 || offset == EMC_MRW7 ||
1017              offset == EMC_MRW8 || offset == EMC_MRW9 ||
1018              offset == EMC_MRW10 || offset == EMC_MRW11 ||
1019              offset == EMC_MRW12 || offset == EMC_MRW13 ||
1020              offset == EMC_MRW14 || offset == EMC_MRW15 ||
1021              offset == EMC_TRAINING_CTRL))
1022             continue;
1023 
1024         /* Pain... And suffering. */
1025         if (offset == EMC_CFG) {
1026             value &= ~EMC_CFG_DRAM_ACPD;
1027             value &= ~EMC_CFG_DYN_SELF_REF;
1028 
1029             if (dram_type == DRAM_TYPE_LPDDR4) {
1030                 value &= ~EMC_CFG_DRAM_CLKSTOP_SR;
1031                 value &= ~EMC_CFG_DRAM_CLKSTOP_PD;
1032             }
1033         } else if (offset == EMC_MRS_WAIT_CNT &&
1034                dram_type == DRAM_TYPE_LPDDR2 &&
1035                opt_zcal_en_cc && !opt_cc_short_zcal &&
1036                opt_short_zcal) {
1037             value = (value & ~(EMC_MRS_WAIT_CNT_SHORT_WAIT_MASK <<
1038                        EMC_MRS_WAIT_CNT_SHORT_WAIT_SHIFT)) |
1039                 ((zq_wait_long & EMC_MRS_WAIT_CNT_SHORT_WAIT_MASK) <<
1040                          EMC_MRS_WAIT_CNT_SHORT_WAIT_SHIFT);
1041         } else if (offset == EMC_ZCAL_WAIT_CNT &&
1042                dram_type == DRAM_TYPE_DDR3 && opt_zcal_en_cc &&
1043                !opt_cc_short_zcal && opt_short_zcal) {
1044             value = (value & ~(EMC_ZCAL_WAIT_CNT_ZCAL_WAIT_CNT_MASK <<
1045                        EMC_ZCAL_WAIT_CNT_ZCAL_WAIT_CNT_SHIFT)) |
1046                 ((zq_wait_long & EMC_ZCAL_WAIT_CNT_ZCAL_WAIT_CNT_MASK) <<
1047                          EMC_MRS_WAIT_CNT_SHORT_WAIT_SHIFT);
1048         } else if (offset == EMC_ZCAL_INTERVAL && opt_zcal_en_cc) {
1049             value = 0; /* EMC_ZCAL_INTERVAL reset value. */
1050         } else if (offset == EMC_PMACRO_AUTOCAL_CFG_COMMON) {
1051             value |= EMC_PMACRO_AUTOCAL_CFG_COMMON_E_CAL_BYPASS_DVFS;
1052         } else if (offset == EMC_PMACRO_DATA_PAD_TX_CTRL) {
1053             value &= ~(EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQSP_TX_E_DCC |
1054                    EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQSN_TX_E_DCC |
1055                    EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_TX_E_DCC |
1056                    EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_CMD_TX_E_DCC);
1057         } else if (offset == EMC_PMACRO_CMD_PAD_TX_CTRL) {
1058             value |= EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQ_TX_DRVFORCEON;
1059             value &= ~(EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQSP_TX_E_DCC |
1060                    EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQSN_TX_E_DCC |
1061                    EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQ_TX_E_DCC |
1062                    EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_CMD_TX_E_DCC);
1063         } else if (offset == EMC_PMACRO_BRICK_CTRL_RFU1) {
1064             value &= 0xf800f800;
1065         } else if (offset == EMC_PMACRO_COMMON_PAD_TX_CTRL) {
1066             value &= 0xfffffff0;
1067         }
1068 
1069         emc_writel(emc, value, offset);
1070     }
1071 
1072     /* SW addition: do EMC refresh adjustment here. */
1073     tegra210_emc_adjust_timing(emc, next);
1074 
1075     if (dram_type == DRAM_TYPE_LPDDR4) {
1076         value = (23 << EMC_MRW_MRW_MA_SHIFT) |
1077             (next->run_clocks & EMC_MRW_MRW_OP_MASK);
1078         emc_writel(emc, value, EMC_MRW);
1079     }
1080 
1081     /* Per channel burst registers. */
1082     emc_dbg(emc, SUB_STEPS, "Writing burst_regs_per_ch\n");
1083 
1084     for (i = 0; i < next->num_burst_per_ch; i++) {
1085         const struct tegra210_emc_per_channel_regs *burst =
1086                 emc->offsets->burst_per_channel;
1087 
1088         if (!burst[i].offset)
1089             continue;
1090 
1091         if (dram_type != DRAM_TYPE_LPDDR4 &&
1092             (burst[i].offset == EMC_MRW6 ||
1093              burst[i].offset == EMC_MRW7 ||
1094              burst[i].offset == EMC_MRW8 ||
1095              burst[i].offset == EMC_MRW9 ||
1096              burst[i].offset == EMC_MRW10 ||
1097              burst[i].offset == EMC_MRW11 ||
1098              burst[i].offset == EMC_MRW12 ||
1099              burst[i].offset == EMC_MRW13 ||
1100              burst[i].offset == EMC_MRW14 ||
1101              burst[i].offset == EMC_MRW15))
1102             continue;
1103 
1104         /* Filter out second channel if not in DUAL_CHANNEL mode. */
1105         if (emc->num_channels < 2 && burst[i].bank >= 1)
1106             continue;
1107 
1108         emc_dbg(emc, REG_LISTS, "(%u) 0x%08x => 0x%08x\n", i,
1109             next->burst_reg_per_ch[i], burst[i].offset);
1110         emc_channel_writel(emc, burst[i].bank,
1111                    next->burst_reg_per_ch[i],
1112                    burst[i].offset);
1113     }
1114 
1115     /* Vref regs. */
1116     emc_dbg(emc, SUB_STEPS, "Writing vref_regs\n");
1117 
1118     for (i = 0; i < next->vref_num; i++) {
1119         const struct tegra210_emc_per_channel_regs *vref =
1120                     emc->offsets->vref_per_channel;
1121 
1122         if (!vref[i].offset)
1123             continue;
1124 
1125         if (emc->num_channels < 2 && vref[i].bank >= 1)
1126             continue;
1127 
1128         emc_dbg(emc, REG_LISTS, "(%u) 0x%08x => 0x%08x\n", i,
1129             next->vref_perch_regs[i], vref[i].offset);
1130         emc_channel_writel(emc, vref[i].bank, next->vref_perch_regs[i],
1131                    vref[i].offset);
1132     }
1133 
1134     /* Trimmers. */
1135     emc_dbg(emc, SUB_STEPS, "Writing trim_regs\n");
1136 
1137     for (i = 0; i < next->num_trim; i++) {
1138         const u16 *offsets = emc->offsets->trim;
1139 
1140         if (!offsets[i])
1141             continue;
1142 
1143         if (compensate_trimmer_applicable &&
1144             (offsets[i] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_0 ||
1145              offsets[i] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_1 ||
1146              offsets[i] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_2 ||
1147              offsets[i] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_3 ||
1148              offsets[i] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_0 ||
1149              offsets[i] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_1 ||
1150              offsets[i] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_2 ||
1151              offsets[i] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_3 ||
1152              offsets[i] == EMC_DATA_BRLSHFT_0 ||
1153              offsets[i] == EMC_DATA_BRLSHFT_1)) {
1154             value = tegra210_emc_compensate(next, offsets[i]);
1155             emc_dbg(emc, REG_LISTS, "(%u) 0x%08x => 0x%08x\n", i,
1156                 value, offsets[i]);
1157             emc_dbg(emc, EMA_WRITES, "0x%08x <= 0x%08x\n",
1158                 (u32)(u64)offsets[i], value);
1159             emc_writel(emc, value, offsets[i]);
1160         } else {
1161             emc_dbg(emc, REG_LISTS, "(%u) 0x%08x => 0x%08x\n", i,
1162                 next->trim_regs[i], offsets[i]);
1163             emc_writel(emc, next->trim_regs[i], offsets[i]);
1164         }
1165     }
1166 
1167     /* Per channel trimmers. */
1168     emc_dbg(emc, SUB_STEPS, "Writing trim_regs_per_ch\n");
1169 
1170     for (i = 0; i < next->num_trim_per_ch; i++) {
1171         const struct tegra210_emc_per_channel_regs *trim =
1172                 &emc->offsets->trim_per_channel[0];
1173         unsigned int offset;
1174 
1175         if (!trim[i].offset)
1176             continue;
1177 
1178         if (emc->num_channels < 2 && trim[i].bank >= 1)
1179             continue;
1180 
1181         offset = trim[i].offset;
1182 
1183         if (compensate_trimmer_applicable &&
1184             (offset == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_0 ||
1185              offset == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_1 ||
1186              offset == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_2 ||
1187              offset == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_3 ||
1188              offset == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_0 ||
1189              offset == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_1 ||
1190              offset == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_2 ||
1191              offset == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_3 ||
1192              offset == EMC_DATA_BRLSHFT_0 ||
1193              offset == EMC_DATA_BRLSHFT_1)) {
1194             value = tegra210_emc_compensate(next, offset);
1195             emc_dbg(emc, REG_LISTS, "(%u) 0x%08x => 0x%08x\n", i,
1196                 value, offset);
1197             emc_dbg(emc, EMA_WRITES, "0x%08x <= 0x%08x\n", offset,
1198                 value);
1199             emc_channel_writel(emc, trim[i].bank, value, offset);
1200         } else {
1201             emc_dbg(emc, REG_LISTS, "(%u) 0x%08x => 0x%08x\n", i,
1202                 next->trim_perch_regs[i], offset);
1203             emc_channel_writel(emc, trim[i].bank,
1204                        next->trim_perch_regs[i], offset);
1205         }
1206     }
1207 
1208     emc_dbg(emc, SUB_STEPS, "Writing burst_mc_regs\n");
1209 
1210     for (i = 0; i < next->num_mc_regs; i++) {
1211         const u16 *offsets = emc->offsets->burst_mc;
1212         u32 *values = next->burst_mc_regs;
1213 
1214         emc_dbg(emc, REG_LISTS, "(%u) 0x%08x => 0x%08x\n", i,
1215             values[i], offsets[i]);
1216         mc_writel(emc->mc, values[i], offsets[i]);
1217     }
1218 
1219     /* Registers to be programmed on the faster clock. */
1220     if (next->rate < last->rate) {
1221         const u16 *la = emc->offsets->la_scale;
1222 
1223         emc_dbg(emc, SUB_STEPS, "Writing la_scale_regs\n");
1224 
1225         for (i = 0; i < next->num_up_down; i++) {
1226             emc_dbg(emc, REG_LISTS, "(%u) 0x%08x => 0x%08x\n", i,
1227                 next->la_scale_regs[i], la[i]);
1228             mc_writel(emc->mc, next->la_scale_regs[i], la[i]);
1229         }
1230     }
1231 
1232     /* Flush all the burst register writes. */
1233     mc_readl(emc->mc, MC_EMEM_ADR_CFG);
1234 
1235     /*
1236      * Step 9:
1237      *   LPDDR4 section A.
1238      */
1239     emc_dbg(emc, STEPS, "Step 9\n");
1240 
1241     value = next->burst_regs[EMC_ZCAL_WAIT_CNT_INDEX];
1242     value &= ~EMC_ZCAL_WAIT_CNT_ZCAL_WAIT_CNT_MASK;
1243 
1244     if (dram_type == DRAM_TYPE_LPDDR4) {
1245         emc_writel(emc, 0, EMC_ZCAL_INTERVAL);
1246         emc_writel(emc, value, EMC_ZCAL_WAIT_CNT);
1247 
1248         value = emc_dbg | (EMC_DBG_WRITE_MUX_ACTIVE |
1249                    EMC_DBG_WRITE_ACTIVE_ONLY);
1250 
1251         emc_writel(emc, value, EMC_DBG);
1252         emc_writel(emc, 0, EMC_ZCAL_INTERVAL);
1253         emc_writel(emc, emc_dbg, EMC_DBG);
1254     }
1255 
1256     /*
1257      * Step 10:
1258      *   LPDDR4 and DDR3 common section.
1259      */
1260     emc_dbg(emc, STEPS, "Step 10\n");
1261 
1262     if (opt_dvfs_mode == MAN_SR || dram_type == DRAM_TYPE_LPDDR4) {
1263         if (dram_type == DRAM_TYPE_LPDDR4)
1264             ccfifo_writel(emc, 0x101, EMC_SELF_REF, 0);
1265         else
1266             ccfifo_writel(emc, 0x1, EMC_SELF_REF, 0);
1267 
1268         if (dram_type == DRAM_TYPE_LPDDR4 &&
1269             dst_clk_period <= zqcal_before_cc_cutoff) {
1270             ccfifo_writel(emc, mr13_flip_fspwr ^ 0x40, EMC_MRW3, 0);
1271             ccfifo_writel(emc, (next->burst_regs[EMC_MRW6_INDEX] &
1272                         0xFFFF3F3F) |
1273                        (last->burst_regs[EMC_MRW6_INDEX] &
1274                         0x0000C0C0), EMC_MRW6, 0);
1275             ccfifo_writel(emc, (next->burst_regs[EMC_MRW14_INDEX] &
1276                         0xFFFF0707) |
1277                        (last->burst_regs[EMC_MRW14_INDEX] &
1278                         0x00003838), EMC_MRW14, 0);
1279 
1280             if (emc->num_devices > 1) {
1281                 ccfifo_writel(emc,
1282                       (next->burst_regs[EMC_MRW7_INDEX] &
1283                        0xFFFF3F3F) |
1284                       (last->burst_regs[EMC_MRW7_INDEX] &
1285                        0x0000C0C0), EMC_MRW7, 0);
1286                 ccfifo_writel(emc,
1287                      (next->burst_regs[EMC_MRW15_INDEX] &
1288                       0xFFFF0707) |
1289                      (last->burst_regs[EMC_MRW15_INDEX] &
1290                       0x00003838), EMC_MRW15, 0);
1291             }
1292 
1293             if (opt_zcal_en_cc) {
1294                 if (emc->num_devices < 2)
1295                     ccfifo_writel(emc,
1296                         2UL << EMC_ZQ_CAL_DEV_SEL_SHIFT
1297                         | EMC_ZQ_CAL_ZQ_CAL_CMD,
1298                         EMC_ZQ_CAL, 0);
1299                 else if (shared_zq_resistor)
1300                     ccfifo_writel(emc,
1301                         2UL << EMC_ZQ_CAL_DEV_SEL_SHIFT
1302                         | EMC_ZQ_CAL_ZQ_CAL_CMD,
1303                         EMC_ZQ_CAL, 0);
1304                 else
1305                     ccfifo_writel(emc,
1306                               EMC_ZQ_CAL_ZQ_CAL_CMD,
1307                               EMC_ZQ_CAL, 0);
1308             }
1309         }
1310     }
1311 
1312     if (dram_type == DRAM_TYPE_LPDDR4) {
1313         value = (1000 * fake->dram_timings[T_RP]) / src_clk_period;
1314         ccfifo_writel(emc, mr13_flip_fspop | 0x8, EMC_MRW3, value);
1315         ccfifo_writel(emc, 0, 0, tFC_lpddr4 / src_clk_period);
1316     }
1317 
1318     if (dram_type == DRAM_TYPE_LPDDR4 || opt_dvfs_mode != MAN_SR) {
1319         delay = 30;
1320 
1321         if (cya_allow_ref_cc) {
1322             delay += (1000 * fake->dram_timings[T_RP]) /
1323                     src_clk_period;
1324             delay += 4000 * fake->dram_timings[T_RFC];
1325         }
1326 
1327         ccfifo_writel(emc, emc_pin & ~(EMC_PIN_PIN_CKE_PER_DEV |
1328                            EMC_PIN_PIN_CKEB |
1329                            EMC_PIN_PIN_CKE),
1330                   EMC_PIN, delay);
1331     }
1332 
1333     /* calculate reference delay multiplier */
1334     value = 1;
1335 
1336     if (ref_b4_sref_en)
1337         value++;
1338 
1339     if (cya_allow_ref_cc)
1340         value++;
1341 
1342     if (cya_issue_pc_ref)
1343         value++;
1344 
1345     if (dram_type != DRAM_TYPE_LPDDR4) {
1346         delay = ((1000 * fake->dram_timings[T_RP] / src_clk_period) +
1347              (1000 * fake->dram_timings[T_RFC] / src_clk_period));
1348         delay = value * delay + 20;
1349     } else {
1350         delay = 0;
1351     }
1352 
1353     /*
1354      * Step 11:
1355      *   Ramp down.
1356      */
1357     emc_dbg(emc, STEPS, "Step 11\n");
1358 
1359     ccfifo_writel(emc, 0x0, EMC_CFG_SYNC, delay);
1360 
1361     value = emc_dbg | EMC_DBG_WRITE_MUX_ACTIVE | EMC_DBG_WRITE_ACTIVE_ONLY;
1362     ccfifo_writel(emc, value, EMC_DBG, 0);
1363 
1364     ramp_down_wait = tegra210_emc_dvfs_power_ramp_down(emc, src_clk_period,
1365                                0);
1366 
1367     /*
1368      * Step 12:
1369      *   And finally - trigger the clock change.
1370      */
1371     emc_dbg(emc, STEPS, "Step 12\n");
1372 
1373     ccfifo_writel(emc, 1, EMC_STALL_THEN_EXE_AFTER_CLKCHANGE, 0);
1374     value &= ~EMC_DBG_WRITE_ACTIVE_ONLY;
1375     ccfifo_writel(emc, value, EMC_DBG, 0);
1376 
1377     /*
1378      * Step 13:
1379      *   Ramp up.
1380      */
1381     emc_dbg(emc, STEPS, "Step 13\n");
1382 
1383     ramp_up_wait = tegra210_emc_dvfs_power_ramp_up(emc, dst_clk_period, 0);
1384     ccfifo_writel(emc, emc_dbg, EMC_DBG, 0);
1385 
1386     /*
1387      * Step 14:
1388      *   Bringup CKE pins.
1389      */
1390     emc_dbg(emc, STEPS, "Step 14\n");
1391 
1392     if (dram_type == DRAM_TYPE_LPDDR4) {
1393         value = emc_pin | EMC_PIN_PIN_CKE;
1394 
1395         if (emc->num_devices <= 1)
1396             value &= ~(EMC_PIN_PIN_CKEB | EMC_PIN_PIN_CKE_PER_DEV);
1397         else
1398             value |= EMC_PIN_PIN_CKEB | EMC_PIN_PIN_CKE_PER_DEV;
1399 
1400         ccfifo_writel(emc, value, EMC_PIN, 0);
1401     }
1402 
1403     /*
1404      * Step 15: (two step 15s ??)
1405      *   Calculate zqlatch wait time; has dependency on ramping times.
1406      */
1407     emc_dbg(emc, STEPS, "Step 15\n");
1408 
1409     if (dst_clk_period <= zqcal_before_cc_cutoff) {
1410         s32 t = (s32)(ramp_up_wait + ramp_down_wait) /
1411             (s32)dst_clk_period;
1412         zq_latch_dvfs_wait_time = (s32)tZQCAL_lpddr4_fc_adj - t;
1413     } else {
1414         zq_latch_dvfs_wait_time = tZQCAL_lpddr4_fc_adj -
1415             div_o3(1000 * next->dram_timings[T_PDEX],
1416                    dst_clk_period);
1417     }
1418 
1419     emc_dbg(emc, INFO, "tZQCAL_lpddr4_fc_adj = %u\n", tZQCAL_lpddr4_fc_adj);
1420     emc_dbg(emc, INFO, "dst_clk_period = %u\n",
1421         dst_clk_period);
1422     emc_dbg(emc, INFO, "next->dram_timings[T_PDEX] = %u\n",
1423         next->dram_timings[T_PDEX]);
1424     emc_dbg(emc, INFO, "zq_latch_dvfs_wait_time = %d\n",
1425         max_t(s32, 0, zq_latch_dvfs_wait_time));
1426 
1427     if (dram_type == DRAM_TYPE_LPDDR4 && opt_zcal_en_cc) {
1428         delay = div_o3(1000 * next->dram_timings[T_PDEX],
1429                    dst_clk_period);
1430 
1431         if (emc->num_devices < 2) {
1432             if (dst_clk_period > zqcal_before_cc_cutoff)
1433                 ccfifo_writel(emc,
1434                           2UL << EMC_ZQ_CAL_DEV_SEL_SHIFT |
1435                           EMC_ZQ_CAL_ZQ_CAL_CMD, EMC_ZQ_CAL,
1436                           delay);
1437 
1438             value = (mr13_flip_fspop & 0xfffffff7) | 0x0c000000;
1439             ccfifo_writel(emc, value, EMC_MRW3, delay);
1440             ccfifo_writel(emc, 0, EMC_SELF_REF, 0);
1441             ccfifo_writel(emc, 0, EMC_REF, 0);
1442             ccfifo_writel(emc, 2UL << EMC_ZQ_CAL_DEV_SEL_SHIFT |
1443                       EMC_ZQ_CAL_ZQ_LATCH_CMD,
1444                       EMC_ZQ_CAL,
1445                       max_t(s32, 0, zq_latch_dvfs_wait_time));
1446         } else if (shared_zq_resistor) {
1447             if (dst_clk_period > zqcal_before_cc_cutoff)
1448                 ccfifo_writel(emc,
1449                           2UL << EMC_ZQ_CAL_DEV_SEL_SHIFT |
1450                           EMC_ZQ_CAL_ZQ_CAL_CMD, EMC_ZQ_CAL,
1451                           delay);
1452 
1453             ccfifo_writel(emc, 2UL << EMC_ZQ_CAL_DEV_SEL_SHIFT |
1454                       EMC_ZQ_CAL_ZQ_LATCH_CMD, EMC_ZQ_CAL,
1455                       max_t(s32, 0, zq_latch_dvfs_wait_time) +
1456                     delay);
1457             ccfifo_writel(emc, 1UL << EMC_ZQ_CAL_DEV_SEL_SHIFT |
1458                       EMC_ZQ_CAL_ZQ_LATCH_CMD,
1459                       EMC_ZQ_CAL, 0);
1460 
1461             value = (mr13_flip_fspop & 0xfffffff7) | 0x0c000000;
1462             ccfifo_writel(emc, value, EMC_MRW3, 0);
1463             ccfifo_writel(emc, 0, EMC_SELF_REF, 0);
1464             ccfifo_writel(emc, 0, EMC_REF, 0);
1465 
1466             ccfifo_writel(emc, 1UL << EMC_ZQ_CAL_DEV_SEL_SHIFT |
1467                       EMC_ZQ_CAL_ZQ_LATCH_CMD, EMC_ZQ_CAL,
1468                       tZQCAL_lpddr4 / dst_clk_period);
1469         } else {
1470             if (dst_clk_period > zqcal_before_cc_cutoff)
1471                 ccfifo_writel(emc, EMC_ZQ_CAL_ZQ_CAL_CMD,
1472                           EMC_ZQ_CAL, delay);
1473 
1474             value = (mr13_flip_fspop & 0xfffffff7) | 0x0c000000;
1475             ccfifo_writel(emc, value, EMC_MRW3, delay);
1476             ccfifo_writel(emc, 0, EMC_SELF_REF, 0);
1477             ccfifo_writel(emc, 0, EMC_REF, 0);
1478 
1479             ccfifo_writel(emc, EMC_ZQ_CAL_ZQ_LATCH_CMD, EMC_ZQ_CAL,
1480                       max_t(s32, 0, zq_latch_dvfs_wait_time));
1481         }
1482     }
1483 
1484     /* WAR: delay for zqlatch */
1485     ccfifo_writel(emc, 0, 0, 10);
1486 
1487     /*
1488      * Step 16:
1489      *   LPDDR4 Conditional Training Kickoff. Removed.
1490      */
1491 
1492     /*
1493      * Step 17:
1494      *   MANSR exit self refresh.
1495      */
1496     emc_dbg(emc, STEPS, "Step 17\n");
1497 
1498     if (opt_dvfs_mode == MAN_SR && dram_type != DRAM_TYPE_LPDDR4)
1499         ccfifo_writel(emc, 0, EMC_SELF_REF, 0);
1500 
1501     /*
1502      * Step 18:
1503      *   Send MRWs to LPDDR3/DDR3.
1504      */
1505     emc_dbg(emc, STEPS, "Step 18\n");
1506 
1507     if (dram_type == DRAM_TYPE_LPDDR2) {
1508         ccfifo_writel(emc, next->emc_mrw2, EMC_MRW2, 0);
1509         ccfifo_writel(emc, next->emc_mrw,  EMC_MRW,  0);
1510         if (is_lpddr3)
1511             ccfifo_writel(emc, next->emc_mrw4, EMC_MRW4, 0);
1512     } else if (dram_type == DRAM_TYPE_DDR3) {
1513         if (opt_dll_mode)
1514             ccfifo_writel(emc, next->emc_emrs &
1515                       ~EMC_EMRS_USE_EMRS_LONG_CNT, EMC_EMRS, 0);
1516         ccfifo_writel(emc, next->emc_emrs2 &
1517                   ~EMC_EMRS2_USE_EMRS2_LONG_CNT, EMC_EMRS2, 0);
1518         ccfifo_writel(emc, next->emc_mrs |
1519                   EMC_EMRS_USE_EMRS_LONG_CNT, EMC_MRS, 0);
1520     }
1521 
1522     /*
1523      * Step 19:
1524      *   ZQCAL for LPDDR3/DDR3
1525      */
1526     emc_dbg(emc, STEPS, "Step 19\n");
1527 
1528     if (opt_zcal_en_cc) {
1529         if (dram_type == DRAM_TYPE_LPDDR2) {
1530             value = opt_cc_short_zcal ? 90000 : 360000;
1531             value = div_o3(value, dst_clk_period);
1532             value = value <<
1533                 EMC_MRS_WAIT_CNT2_MRS_EXT2_WAIT_CNT_SHIFT |
1534                 value <<
1535                 EMC_MRS_WAIT_CNT2_MRS_EXT1_WAIT_CNT_SHIFT;
1536             ccfifo_writel(emc, value, EMC_MRS_WAIT_CNT2, 0);
1537 
1538             value = opt_cc_short_zcal ? 0x56 : 0xab;
1539             ccfifo_writel(emc, 2 << EMC_MRW_MRW_DEV_SELECTN_SHIFT |
1540                        EMC_MRW_USE_MRW_EXT_CNT |
1541                        10 << EMC_MRW_MRW_MA_SHIFT |
1542                        value << EMC_MRW_MRW_OP_SHIFT,
1543                       EMC_MRW, 0);
1544 
1545             if (emc->num_devices > 1) {
1546                 value = 1 << EMC_MRW_MRW_DEV_SELECTN_SHIFT |
1547                     EMC_MRW_USE_MRW_EXT_CNT |
1548                     10 << EMC_MRW_MRW_MA_SHIFT |
1549                     value << EMC_MRW_MRW_OP_SHIFT;
1550                 ccfifo_writel(emc, value, EMC_MRW, 0);
1551             }
1552         } else if (dram_type == DRAM_TYPE_DDR3) {
1553             value = opt_cc_short_zcal ? 0 : EMC_ZQ_CAL_LONG;
1554 
1555             ccfifo_writel(emc, value |
1556                        2 << EMC_ZQ_CAL_DEV_SEL_SHIFT |
1557                        EMC_ZQ_CAL_ZQ_CAL_CMD, EMC_ZQ_CAL,
1558                        0);
1559 
1560             if (emc->num_devices > 1) {
1561                 value = value | 1 << EMC_ZQ_CAL_DEV_SEL_SHIFT |
1562                         EMC_ZQ_CAL_ZQ_CAL_CMD;
1563                 ccfifo_writel(emc, value, EMC_ZQ_CAL, 0);
1564             }
1565         }
1566     }
1567 
1568     if (bg_reg_mode_change) {
1569         tegra210_emc_set_shadow_bypass(emc, ACTIVE);
1570 
1571         if (ramp_up_wait <= 1250000)
1572             delay = (1250000 - ramp_up_wait) / dst_clk_period;
1573         else
1574             delay = 0;
1575 
1576         ccfifo_writel(emc,
1577                   next->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX],
1578                   EMC_PMACRO_BG_BIAS_CTRL_0, delay);
1579         tegra210_emc_set_shadow_bypass(emc, ASSEMBLY);
1580     }
1581 
1582     /*
1583      * Step 20:
1584      *   Issue ref and optional QRST.
1585      */
1586     emc_dbg(emc, STEPS, "Step 20\n");
1587 
1588     if (dram_type != DRAM_TYPE_LPDDR4)
1589         ccfifo_writel(emc, 0, EMC_REF, 0);
1590 
1591     if (opt_do_sw_qrst) {
1592         ccfifo_writel(emc, 1, EMC_ISSUE_QRST, 0);
1593         ccfifo_writel(emc, 0, EMC_ISSUE_QRST, 2);
1594     }
1595 
1596     /*
1597      * Step 21:
1598      *   Restore ZCAL and ZCAL interval.
1599      */
1600     emc_dbg(emc, STEPS, "Step 21\n");
1601 
1602     if (save_restore_clkstop_pd || opt_zcal_en_cc) {
1603         ccfifo_writel(emc, emc_dbg | EMC_DBG_WRITE_MUX_ACTIVE,
1604                   EMC_DBG, 0);
1605         if (opt_zcal_en_cc && dram_type != DRAM_TYPE_LPDDR4)
1606             ccfifo_writel(emc, next->burst_regs[EMC_ZCAL_INTERVAL_INDEX],
1607                       EMC_ZCAL_INTERVAL, 0);
1608 
1609         if (save_restore_clkstop_pd)
1610             ccfifo_writel(emc, next->burst_regs[EMC_CFG_INDEX] &
1611                         ~EMC_CFG_DYN_SELF_REF,
1612                       EMC_CFG, 0);
1613         ccfifo_writel(emc, emc_dbg, EMC_DBG, 0);
1614     }
1615 
1616     /*
1617      * Step 22:
1618      *   Restore EMC_CFG_PIPE_CLK.
1619      */
1620     emc_dbg(emc, STEPS, "Step 22\n");
1621 
1622     ccfifo_writel(emc, emc_cfg_pipe_clk, EMC_CFG_PIPE_CLK, 0);
1623 
1624     if (bg_reg_mode_change) {
1625         if (enable_bg_reg)
1626             emc_writel(emc,
1627                    next->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
1628                     ~EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD,
1629                    EMC_PMACRO_BG_BIAS_CTRL_0);
1630         else
1631             emc_writel(emc,
1632                    next->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
1633                     ~EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD,
1634                    EMC_PMACRO_BG_BIAS_CTRL_0);
1635     }
1636 
1637     /*
1638      * Step 23:
1639      */
1640     emc_dbg(emc, STEPS, "Step 23\n");
1641 
1642     value = emc_readl(emc, EMC_CFG_DIG_DLL);
1643     value |= EMC_CFG_DIG_DLL_CFG_DLL_STALL_ALL_TRAFFIC;
1644     value &= ~EMC_CFG_DIG_DLL_CFG_DLL_STALL_RW_UNTIL_LOCK;
1645     value &= ~EMC_CFG_DIG_DLL_CFG_DLL_STALL_ALL_UNTIL_LOCK;
1646     value &= ~EMC_CFG_DIG_DLL_CFG_DLL_EN;
1647     value = (value & ~EMC_CFG_DIG_DLL_CFG_DLL_MODE_MASK) |
1648         (2 << EMC_CFG_DIG_DLL_CFG_DLL_MODE_SHIFT);
1649     emc_writel(emc, value, EMC_CFG_DIG_DLL);
1650 
1651     tegra210_emc_do_clock_change(emc, clksrc);
1652 
1653     /*
1654      * Step 24:
1655      *   Save training results. Removed.
1656      */
1657 
1658     /*
1659      * Step 25:
1660      *   Program MC updown registers.
1661      */
1662     emc_dbg(emc, STEPS, "Step 25\n");
1663 
1664     if (next->rate > last->rate) {
1665         for (i = 0; i < next->num_up_down; i++)
1666             mc_writel(emc->mc, next->la_scale_regs[i],
1667                   emc->offsets->la_scale[i]);
1668 
1669         tegra210_emc_timing_update(emc);
1670     }
1671 
1672     /*
1673      * Step 26:
1674      *   Restore ZCAL registers.
1675      */
1676     emc_dbg(emc, STEPS, "Step 26\n");
1677 
1678     if (dram_type == DRAM_TYPE_LPDDR4) {
1679         tegra210_emc_set_shadow_bypass(emc, ACTIVE);
1680         emc_writel(emc, next->burst_regs[EMC_ZCAL_WAIT_CNT_INDEX],
1681                EMC_ZCAL_WAIT_CNT);
1682         emc_writel(emc, next->burst_regs[EMC_ZCAL_INTERVAL_INDEX],
1683                EMC_ZCAL_INTERVAL);
1684         tegra210_emc_set_shadow_bypass(emc, ASSEMBLY);
1685     }
1686 
1687     if (dram_type != DRAM_TYPE_LPDDR4 && opt_zcal_en_cc &&
1688         !opt_short_zcal && opt_cc_short_zcal) {
1689         udelay(2);
1690 
1691         tegra210_emc_set_shadow_bypass(emc, ACTIVE);
1692         if (dram_type == DRAM_TYPE_LPDDR2)
1693             emc_writel(emc, next->burst_regs[EMC_MRS_WAIT_CNT_INDEX],
1694                    EMC_MRS_WAIT_CNT);
1695         else if (dram_type == DRAM_TYPE_DDR3)
1696             emc_writel(emc, next->burst_regs[EMC_ZCAL_WAIT_CNT_INDEX],
1697                    EMC_ZCAL_WAIT_CNT);
1698         tegra210_emc_set_shadow_bypass(emc, ASSEMBLY);
1699     }
1700 
1701     /*
1702      * Step 27:
1703      *   Restore EMC_CFG, FDPD registers.
1704      */
1705     emc_dbg(emc, STEPS, "Step 27\n");
1706 
1707     tegra210_emc_set_shadow_bypass(emc, ACTIVE);
1708     emc_writel(emc, next->burst_regs[EMC_CFG_INDEX], EMC_CFG);
1709     tegra210_emc_set_shadow_bypass(emc, ASSEMBLY);
1710     emc_writel(emc, next->emc_fdpd_ctrl_cmd_no_ramp,
1711            EMC_FDPD_CTRL_CMD_NO_RAMP);
1712     emc_writel(emc, next->emc_sel_dpd_ctrl, EMC_SEL_DPD_CTRL);
1713 
1714     /*
1715      * Step 28:
1716      *   Training recover. Removed.
1717      */
1718     emc_dbg(emc, STEPS, "Step 28\n");
1719 
1720     tegra210_emc_set_shadow_bypass(emc, ACTIVE);
1721     emc_writel(emc,
1722            next->burst_regs[EMC_PMACRO_AUTOCAL_CFG_COMMON_INDEX],
1723            EMC_PMACRO_AUTOCAL_CFG_COMMON);
1724     tegra210_emc_set_shadow_bypass(emc, ASSEMBLY);
1725 
1726     /*
1727      * Step 29:
1728      *   Power fix WAR.
1729      */
1730     emc_dbg(emc, STEPS, "Step 29\n");
1731 
1732     emc_writel(emc, EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE0 |
1733            EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE1 |
1734            EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE2 |
1735            EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE3 |
1736            EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE4 |
1737            EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE5 |
1738            EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE6 |
1739            EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE7,
1740            EMC_PMACRO_CFG_PM_GLOBAL_0);
1741     emc_writel(emc, EMC_PMACRO_TRAINING_CTRL_0_CH0_TRAINING_E_WRPTR,
1742            EMC_PMACRO_TRAINING_CTRL_0);
1743     emc_writel(emc, EMC_PMACRO_TRAINING_CTRL_1_CH1_TRAINING_E_WRPTR,
1744            EMC_PMACRO_TRAINING_CTRL_1);
1745     emc_writel(emc, 0, EMC_PMACRO_CFG_PM_GLOBAL_0);
1746 
1747     /*
1748      * Step 30:
1749      *   Re-enable autocal.
1750      */
1751     emc_dbg(emc, STEPS, "Step 30: Re-enable DLL and AUTOCAL\n");
1752 
1753     if (next->burst_regs[EMC_CFG_DIG_DLL_INDEX] & EMC_CFG_DIG_DLL_CFG_DLL_EN) {
1754         value = emc_readl(emc, EMC_CFG_DIG_DLL);
1755         value |=  EMC_CFG_DIG_DLL_CFG_DLL_STALL_ALL_TRAFFIC;
1756         value |=  EMC_CFG_DIG_DLL_CFG_DLL_EN;
1757         value &= ~EMC_CFG_DIG_DLL_CFG_DLL_STALL_RW_UNTIL_LOCK;
1758         value &= ~EMC_CFG_DIG_DLL_CFG_DLL_STALL_ALL_UNTIL_LOCK;
1759         value = (value & ~EMC_CFG_DIG_DLL_CFG_DLL_MODE_MASK) |
1760             (2 << EMC_CFG_DIG_DLL_CFG_DLL_MODE_SHIFT);
1761         emc_writel(emc, value, EMC_CFG_DIG_DLL);
1762         tegra210_emc_timing_update(emc);
1763     }
1764 
1765     emc_writel(emc, next->emc_auto_cal_config, EMC_AUTO_CAL_CONFIG);
1766 
1767     /* Done! Yay. */
1768 }
1769 
1770 const struct tegra210_emc_sequence tegra210_emc_r21021 = {
1771     .revision = 0x7,
1772     .set_clock = tegra210_emc_r21021_set_clock,
1773     .periodic_compensation = tegra210_emc_r21021_periodic_compensation,
1774 };