0001
0002
0003
0004
0005
0006
0007 #include <linux/clk.h>
0008 #include <linux/devfreq.h>
0009 #include <linux/devfreq-event.h>
0010 #include <linux/device.h>
0011 #include <linux/interrupt.h>
0012 #include <linux/io.h>
0013 #include <linux/mfd/syscon.h>
0014 #include <linux/module.h>
0015 #include <linux/moduleparam.h>
0016 #include <linux/of_device.h>
0017 #include <linux/pm_opp.h>
0018 #include <linux/platform_device.h>
0019 #include <linux/regmap.h>
0020 #include <linux/regulator/consumer.h>
0021 #include <linux/slab.h>
0022 #include "../jedec_ddr.h"
0023 #include "../of_memory.h"
0024
0025 static int irqmode;
0026 module_param(irqmode, int, 0644);
0027 MODULE_PARM_DESC(irqmode, "Enable IRQ mode (0=off [default], 1=on)");
0028
0029 #define EXYNOS5_DREXI_TIMINGAREF (0x0030)
0030 #define EXYNOS5_DREXI_TIMINGROW0 (0x0034)
0031 #define EXYNOS5_DREXI_TIMINGDATA0 (0x0038)
0032 #define EXYNOS5_DREXI_TIMINGPOWER0 (0x003C)
0033 #define EXYNOS5_DREXI_TIMINGROW1 (0x00E4)
0034 #define EXYNOS5_DREXI_TIMINGDATA1 (0x00E8)
0035 #define EXYNOS5_DREXI_TIMINGPOWER1 (0x00EC)
0036 #define CDREX_PAUSE (0x2091c)
0037 #define CDREX_LPDDR3PHY_CON3 (0x20a20)
0038 #define CDREX_LPDDR3PHY_CLKM_SRC (0x20700)
0039 #define EXYNOS5_TIMING_SET_SWI BIT(28)
0040 #define USE_MX_MSPLL_TIMINGS (1)
0041 #define USE_BPLL_TIMINGS (0)
0042 #define EXYNOS5_AREF_NORMAL (0x2e)
0043
0044 #define DREX_PPCCLKCON (0x0130)
0045 #define DREX_PEREV2CONFIG (0x013c)
0046 #define DREX_PMNC_PPC (0xE000)
0047 #define DREX_CNTENS_PPC (0xE010)
0048 #define DREX_CNTENC_PPC (0xE020)
0049 #define DREX_INTENS_PPC (0xE030)
0050 #define DREX_INTENC_PPC (0xE040)
0051 #define DREX_FLAG_PPC (0xE050)
0052 #define DREX_PMCNT2_PPC (0xE130)
0053
0054
0055
0056
0057
0058
0059 #define CC_RESET BIT(2)
0060
0061
0062
0063
0064
0065 #define PPC_COUNTER_RESET BIT(1)
0066
0067
0068
0069
0070
0071 #define PPC_ENABLE BIT(0)
0072
0073
0074
0075
0076
0077 #define PEREV_CLK_EN BIT(0)
0078
0079
0080
0081
0082
0083 #define PERF_CNT2 BIT(2)
0084 #define PERF_CCNT BIT(31)
0085
0086
0087
0088
0089
0090
0091
0092
0093 #define READ_TRANSFER_CH0 (0x6d)
0094 #define READ_TRANSFER_CH1 (0x6f)
0095
0096 #define PERF_COUNTER_START_VALUE 0xff000000
0097 #define PERF_EVENT_UP_DOWN_THRESHOLD 900000000ULL
0098
0099
0100
0101
0102
0103
0104
0105
0106 struct dmc_opp_table {
0107 u32 freq_hz;
0108 u32 volt_uv;
0109 };
0110
0111
0112
0113
0114
0115
0116
0117
0118
0119
0120
0121
0122
0123
0124
0125
0126
0127
0128
0129
0130
0131
0132
0133
0134
0135
0136
0137
0138
0139
0140
0141
0142
0143
0144
0145
0146
0147
0148
0149
0150
0151
0152 struct exynos5_dmc {
0153 struct device *dev;
0154 struct devfreq *df;
0155 struct devfreq_simple_ondemand_data gov_data;
0156 void __iomem *base_drexi0;
0157 void __iomem *base_drexi1;
0158 struct regmap *clk_regmap;
0159
0160 struct mutex lock;
0161 unsigned long curr_rate;
0162 unsigned long curr_volt;
0163 struct dmc_opp_table *opp;
0164 int opp_count;
0165 u32 timings_arr_size;
0166 u32 *timing_row;
0167 u32 *timing_data;
0168 u32 *timing_power;
0169 const struct lpddr3_timings *timings;
0170 const struct lpddr3_min_tck *min_tck;
0171 u32 bypass_timing_row;
0172 u32 bypass_timing_data;
0173 u32 bypass_timing_power;
0174 struct regulator *vdd_mif;
0175 struct clk *fout_spll;
0176 struct clk *fout_bpll;
0177 struct clk *mout_spll;
0178 struct clk *mout_bpll;
0179 struct clk *mout_mclk_cdrex;
0180 struct clk *mout_mx_mspll_ccore;
0181 struct devfreq_event_dev **counter;
0182 int num_counters;
0183 u64 last_overflow_ts[2];
0184 unsigned long load;
0185 unsigned long total;
0186 bool in_irq_mode;
0187 };
0188
0189 #define TIMING_FIELD(t_name, t_bit_beg, t_bit_end) \
0190 { .name = t_name, .bit_beg = t_bit_beg, .bit_end = t_bit_end }
0191
0192 #define TIMING_VAL2REG(timing, t_val) \
0193 ({ \
0194 u32 __val; \
0195 __val = (t_val) << (timing)->bit_beg; \
0196 __val; \
0197 })
0198
0199 struct timing_reg {
0200 char *name;
0201 int bit_beg;
0202 int bit_end;
0203 unsigned int val;
0204 };
0205
0206 static const struct timing_reg timing_row_reg_fields[] = {
0207 TIMING_FIELD("tRFC", 24, 31),
0208 TIMING_FIELD("tRRD", 20, 23),
0209 TIMING_FIELD("tRP", 16, 19),
0210 TIMING_FIELD("tRCD", 12, 15),
0211 TIMING_FIELD("tRC", 6, 11),
0212 TIMING_FIELD("tRAS", 0, 5),
0213 };
0214
0215 static const struct timing_reg timing_data_reg_fields[] = {
0216 TIMING_FIELD("tWTR", 28, 31),
0217 TIMING_FIELD("tWR", 24, 27),
0218 TIMING_FIELD("tRTP", 20, 23),
0219 TIMING_FIELD("tW2W-C2C", 14, 14),
0220 TIMING_FIELD("tR2R-C2C", 12, 12),
0221 TIMING_FIELD("WL", 8, 11),
0222 TIMING_FIELD("tDQSCK", 4, 7),
0223 TIMING_FIELD("RL", 0, 3),
0224 };
0225
0226 static const struct timing_reg timing_power_reg_fields[] = {
0227 TIMING_FIELD("tFAW", 26, 31),
0228 TIMING_FIELD("tXSR", 16, 25),
0229 TIMING_FIELD("tXP", 8, 15),
0230 TIMING_FIELD("tCKE", 4, 7),
0231 TIMING_FIELD("tMRD", 0, 3),
0232 };
0233
0234 #define TIMING_COUNT (ARRAY_SIZE(timing_row_reg_fields) + \
0235 ARRAY_SIZE(timing_data_reg_fields) + \
0236 ARRAY_SIZE(timing_power_reg_fields))
0237
0238 static int exynos5_counters_set_event(struct exynos5_dmc *dmc)
0239 {
0240 int i, ret;
0241
0242 for (i = 0; i < dmc->num_counters; i++) {
0243 if (!dmc->counter[i])
0244 continue;
0245 ret = devfreq_event_set_event(dmc->counter[i]);
0246 if (ret < 0)
0247 return ret;
0248 }
0249 return 0;
0250 }
0251
0252 static int exynos5_counters_enable_edev(struct exynos5_dmc *dmc)
0253 {
0254 int i, ret;
0255
0256 for (i = 0; i < dmc->num_counters; i++) {
0257 if (!dmc->counter[i])
0258 continue;
0259 ret = devfreq_event_enable_edev(dmc->counter[i]);
0260 if (ret < 0)
0261 return ret;
0262 }
0263 return 0;
0264 }
0265
0266 static int exynos5_counters_disable_edev(struct exynos5_dmc *dmc)
0267 {
0268 int i, ret;
0269
0270 for (i = 0; i < dmc->num_counters; i++) {
0271 if (!dmc->counter[i])
0272 continue;
0273 ret = devfreq_event_disable_edev(dmc->counter[i]);
0274 if (ret < 0)
0275 return ret;
0276 }
0277 return 0;
0278 }
0279
0280
0281
0282
0283
0284
0285
0286
0287
0288 static int find_target_freq_idx(struct exynos5_dmc *dmc,
0289 unsigned long target_rate)
0290 {
0291 int i;
0292
0293 for (i = dmc->opp_count - 1; i >= 0; i--)
0294 if (dmc->opp[i].freq_hz <= target_rate)
0295 return i;
0296
0297 return -EINVAL;
0298 }
0299
0300
0301
0302
0303
0304
0305
0306
0307
0308
0309
0310
0311
0312
0313 static int exynos5_switch_timing_regs(struct exynos5_dmc *dmc, bool set)
0314 {
0315 unsigned int reg;
0316 int ret;
0317
0318 ret = regmap_read(dmc->clk_regmap, CDREX_LPDDR3PHY_CON3, ®);
0319 if (ret)
0320 return ret;
0321
0322 if (set)
0323 reg |= EXYNOS5_TIMING_SET_SWI;
0324 else
0325 reg &= ~EXYNOS5_TIMING_SET_SWI;
0326
0327 regmap_write(dmc->clk_regmap, CDREX_LPDDR3PHY_CON3, reg);
0328
0329 return 0;
0330 }
0331
0332
0333
0334
0335
0336
0337
0338
0339 static int exynos5_init_freq_table(struct exynos5_dmc *dmc,
0340 struct devfreq_dev_profile *profile)
0341 {
0342 int i, ret;
0343 int idx;
0344 unsigned long freq;
0345
0346 ret = devm_pm_opp_of_add_table(dmc->dev);
0347 if (ret < 0) {
0348 dev_err(dmc->dev, "Failed to get OPP table\n");
0349 return ret;
0350 }
0351
0352 dmc->opp_count = dev_pm_opp_get_opp_count(dmc->dev);
0353
0354 dmc->opp = devm_kmalloc_array(dmc->dev, dmc->opp_count,
0355 sizeof(struct dmc_opp_table), GFP_KERNEL);
0356 if (!dmc->opp)
0357 return -ENOMEM;
0358
0359 idx = dmc->opp_count - 1;
0360 for (i = 0, freq = ULONG_MAX; i < dmc->opp_count; i++, freq--) {
0361 struct dev_pm_opp *opp;
0362
0363 opp = dev_pm_opp_find_freq_floor(dmc->dev, &freq);
0364 if (IS_ERR(opp))
0365 return PTR_ERR(opp);
0366
0367 dmc->opp[idx - i].freq_hz = freq;
0368 dmc->opp[idx - i].volt_uv = dev_pm_opp_get_voltage(opp);
0369
0370 dev_pm_opp_put(opp);
0371 }
0372
0373 return 0;
0374 }
0375
0376
0377
0378
0379
0380
0381
0382
0383
0384 static void exynos5_set_bypass_dram_timings(struct exynos5_dmc *dmc)
0385 {
0386 writel(EXYNOS5_AREF_NORMAL,
0387 dmc->base_drexi0 + EXYNOS5_DREXI_TIMINGAREF);
0388
0389 writel(dmc->bypass_timing_row,
0390 dmc->base_drexi0 + EXYNOS5_DREXI_TIMINGROW1);
0391 writel(dmc->bypass_timing_row,
0392 dmc->base_drexi1 + EXYNOS5_DREXI_TIMINGROW1);
0393 writel(dmc->bypass_timing_data,
0394 dmc->base_drexi0 + EXYNOS5_DREXI_TIMINGDATA1);
0395 writel(dmc->bypass_timing_data,
0396 dmc->base_drexi1 + EXYNOS5_DREXI_TIMINGDATA1);
0397 writel(dmc->bypass_timing_power,
0398 dmc->base_drexi0 + EXYNOS5_DREXI_TIMINGPOWER1);
0399 writel(dmc->bypass_timing_power,
0400 dmc->base_drexi1 + EXYNOS5_DREXI_TIMINGPOWER1);
0401 }
0402
0403
0404
0405
0406
0407
0408
0409
0410
0411
0412
0413
0414 static int exynos5_dram_change_timings(struct exynos5_dmc *dmc,
0415 unsigned long target_rate)
0416 {
0417 int idx;
0418
0419 for (idx = dmc->opp_count - 1; idx >= 0; idx--)
0420 if (dmc->opp[idx].freq_hz <= target_rate)
0421 break;
0422
0423 if (idx < 0)
0424 return -EINVAL;
0425
0426 writel(EXYNOS5_AREF_NORMAL,
0427 dmc->base_drexi0 + EXYNOS5_DREXI_TIMINGAREF);
0428
0429 writel(dmc->timing_row[idx],
0430 dmc->base_drexi0 + EXYNOS5_DREXI_TIMINGROW0);
0431 writel(dmc->timing_row[idx],
0432 dmc->base_drexi1 + EXYNOS5_DREXI_TIMINGROW0);
0433 writel(dmc->timing_data[idx],
0434 dmc->base_drexi0 + EXYNOS5_DREXI_TIMINGDATA0);
0435 writel(dmc->timing_data[idx],
0436 dmc->base_drexi1 + EXYNOS5_DREXI_TIMINGDATA0);
0437 writel(dmc->timing_power[idx],
0438 dmc->base_drexi0 + EXYNOS5_DREXI_TIMINGPOWER0);
0439 writel(dmc->timing_power[idx],
0440 dmc->base_drexi1 + EXYNOS5_DREXI_TIMINGPOWER0);
0441
0442 return 0;
0443 }
0444
0445
0446
0447
0448
0449
0450
0451
0452
0453
0454
0455 static int exynos5_dmc_align_target_voltage(struct exynos5_dmc *dmc,
0456 unsigned long target_volt)
0457 {
0458 int ret = 0;
0459
0460 if (dmc->curr_volt <= target_volt)
0461 return 0;
0462
0463 ret = regulator_set_voltage(dmc->vdd_mif, target_volt,
0464 target_volt);
0465 if (!ret)
0466 dmc->curr_volt = target_volt;
0467
0468 return ret;
0469 }
0470
0471
0472
0473
0474
0475
0476
0477
0478
0479
0480
0481 static int exynos5_dmc_align_bypass_voltage(struct exynos5_dmc *dmc,
0482 unsigned long target_volt)
0483 {
0484 int ret = 0;
0485
0486 if (dmc->curr_volt >= target_volt)
0487 return 0;
0488
0489 ret = regulator_set_voltage(dmc->vdd_mif, target_volt,
0490 target_volt);
0491 if (!ret)
0492 dmc->curr_volt = target_volt;
0493
0494 return ret;
0495 }
0496
0497
0498
0499
0500
0501
0502
0503
0504 static int exynos5_dmc_align_bypass_dram_timings(struct exynos5_dmc *dmc,
0505 unsigned long target_rate)
0506 {
0507 int idx = find_target_freq_idx(dmc, target_rate);
0508
0509 if (idx < 0)
0510 return -EINVAL;
0511
0512 exynos5_set_bypass_dram_timings(dmc);
0513
0514 return 0;
0515 }
0516
0517
0518
0519
0520
0521
0522
0523
0524
0525
0526
0527
0528 static int
0529 exynos5_dmc_switch_to_bypass_configuration(struct exynos5_dmc *dmc,
0530 unsigned long target_rate,
0531 unsigned long target_volt)
0532 {
0533 int ret;
0534
0535
0536
0537
0538
0539
0540 ret = exynos5_dmc_align_bypass_voltage(dmc, target_volt);
0541 if (ret)
0542 return ret;
0543
0544
0545
0546
0547 ret = exynos5_dmc_align_bypass_dram_timings(dmc, target_rate);
0548 if (ret)
0549 return ret;
0550
0551
0552
0553
0554 ret = exynos5_switch_timing_regs(dmc, USE_MX_MSPLL_TIMINGS);
0555
0556 return ret;
0557 }
0558
0559
0560
0561
0562
0563
0564
0565
0566
0567
0568
0569
0570
0571
0572
0573
0574
0575
0576
0577
0578
0579
0580
0581
0582 static int
0583 exynos5_dmc_change_freq_and_volt(struct exynos5_dmc *dmc,
0584 unsigned long target_rate,
0585 unsigned long target_volt)
0586 {
0587 int ret;
0588
0589 ret = exynos5_dmc_switch_to_bypass_configuration(dmc, target_rate,
0590 target_volt);
0591 if (ret)
0592 return ret;
0593
0594
0595
0596
0597
0598 clk_prepare_enable(dmc->fout_spll);
0599 clk_prepare_enable(dmc->mout_spll);
0600 clk_prepare_enable(dmc->mout_mx_mspll_ccore);
0601
0602 ret = clk_set_parent(dmc->mout_mclk_cdrex, dmc->mout_mx_mspll_ccore);
0603 if (ret)
0604 goto disable_clocks;
0605
0606
0607
0608
0609
0610
0611 exynos5_dram_change_timings(dmc, target_rate);
0612
0613 clk_set_rate(dmc->fout_bpll, target_rate);
0614
0615 ret = exynos5_switch_timing_regs(dmc, USE_BPLL_TIMINGS);
0616 if (ret)
0617 goto disable_clocks;
0618
0619 ret = clk_set_parent(dmc->mout_mclk_cdrex, dmc->mout_bpll);
0620 if (ret)
0621 goto disable_clocks;
0622
0623
0624
0625
0626
0627 ret = exynos5_dmc_align_target_voltage(dmc, target_volt);
0628
0629 disable_clocks:
0630 clk_disable_unprepare(dmc->mout_mx_mspll_ccore);
0631 clk_disable_unprepare(dmc->mout_spll);
0632 clk_disable_unprepare(dmc->fout_spll);
0633
0634 return ret;
0635 }
0636
0637
0638
0639
0640
0641
0642
0643
0644
0645
0646
0647
0648
0649
0650
0651
0652 static int exynos5_dmc_get_volt_freq(struct exynos5_dmc *dmc,
0653 unsigned long *freq,
0654 unsigned long *target_rate,
0655 unsigned long *target_volt, u32 flags)
0656 {
0657 struct dev_pm_opp *opp;
0658
0659 opp = devfreq_recommended_opp(dmc->dev, freq, flags);
0660 if (IS_ERR(opp))
0661 return PTR_ERR(opp);
0662
0663 *target_rate = dev_pm_opp_get_freq(opp);
0664 *target_volt = dev_pm_opp_get_voltage(opp);
0665 dev_pm_opp_put(opp);
0666
0667 return 0;
0668 }
0669
0670
0671
0672
0673
0674
0675
0676
0677
0678
0679
0680
0681
0682 static int exynos5_dmc_target(struct device *dev, unsigned long *freq,
0683 u32 flags)
0684 {
0685 struct exynos5_dmc *dmc = dev_get_drvdata(dev);
0686 unsigned long target_rate = 0;
0687 unsigned long target_volt = 0;
0688 int ret;
0689
0690 ret = exynos5_dmc_get_volt_freq(dmc, freq, &target_rate, &target_volt,
0691 flags);
0692
0693 if (ret)
0694 return ret;
0695
0696 if (target_rate == dmc->curr_rate)
0697 return 0;
0698
0699 mutex_lock(&dmc->lock);
0700
0701 ret = exynos5_dmc_change_freq_and_volt(dmc, target_rate, target_volt);
0702
0703 if (ret) {
0704 mutex_unlock(&dmc->lock);
0705 return ret;
0706 }
0707
0708 dmc->curr_rate = target_rate;
0709
0710 mutex_unlock(&dmc->lock);
0711 return 0;
0712 }
0713
0714
0715
0716
0717
0718
0719
0720
0721
0722
0723
0724 static int exynos5_counters_get(struct exynos5_dmc *dmc,
0725 unsigned long *load_count,
0726 unsigned long *total_count)
0727 {
0728 unsigned long total = 0;
0729 struct devfreq_event_data event;
0730 int ret, i;
0731
0732 *load_count = 0;
0733
0734
0735 for (i = 0; i < dmc->num_counters; i++) {
0736 if (!dmc->counter[i])
0737 continue;
0738
0739 ret = devfreq_event_get_event(dmc->counter[i], &event);
0740 if (ret < 0)
0741 return ret;
0742
0743 *load_count += event.load_count;
0744
0745 if (total < event.total_count)
0746 total = event.total_count;
0747 }
0748
0749 *total_count = total;
0750
0751 return 0;
0752 }
0753
0754
0755
0756
0757
0758
0759
0760
0761
0762 static void exynos5_dmc_start_perf_events(struct exynos5_dmc *dmc,
0763 u32 beg_value)
0764 {
0765
0766 writel(PERF_CNT2, dmc->base_drexi0 + DREX_INTENS_PPC);
0767 writel(PERF_CNT2, dmc->base_drexi1 + DREX_INTENS_PPC);
0768
0769
0770 writel(PERF_CNT2 | PERF_CCNT, dmc->base_drexi0 + DREX_CNTENS_PPC);
0771 writel(PERF_CNT2 | PERF_CCNT, dmc->base_drexi1 + DREX_CNTENS_PPC);
0772
0773
0774 writel(PERF_CNT2 | PERF_CCNT, dmc->base_drexi0 + DREX_FLAG_PPC);
0775 writel(PERF_CNT2 | PERF_CCNT, dmc->base_drexi1 + DREX_FLAG_PPC);
0776
0777
0778 writel(CC_RESET | PPC_COUNTER_RESET, dmc->base_drexi0 + DREX_PMNC_PPC);
0779 writel(CC_RESET | PPC_COUNTER_RESET, dmc->base_drexi1 + DREX_PMNC_PPC);
0780
0781
0782
0783
0784
0785 writel(beg_value, dmc->base_drexi0 + DREX_PMCNT2_PPC);
0786 writel(beg_value, dmc->base_drexi1 + DREX_PMCNT2_PPC);
0787
0788
0789 writel(PPC_ENABLE, dmc->base_drexi0 + DREX_PMNC_PPC);
0790 writel(PPC_ENABLE, dmc->base_drexi1 + DREX_PMNC_PPC);
0791 }
0792
0793
0794
0795
0796
0797
0798
0799
0800
0801
0802 static void exynos5_dmc_perf_events_calc(struct exynos5_dmc *dmc, u64 diff_ts)
0803 {
0804
0805
0806
0807
0808
0809
0810
0811
0812
0813
0814 if (diff_ts < PERF_EVENT_UP_DOWN_THRESHOLD) {
0815
0816
0817
0818
0819 dmc->load = 70;
0820 dmc->total = 100;
0821 } else {
0822
0823
0824
0825
0826 dmc->load = 35;
0827 dmc->total = 100;
0828 }
0829
0830 dev_dbg(dmc->dev, "diff_ts=%llu\n", diff_ts);
0831 }
0832
0833
0834
0835
0836
0837
0838
0839
0840 static void exynos5_dmc_perf_events_check(struct exynos5_dmc *dmc)
0841 {
0842 u32 val;
0843 u64 diff_ts, ts;
0844
0845 ts = ktime_get_ns();
0846
0847
0848 writel(0, dmc->base_drexi0 + DREX_PMNC_PPC);
0849 writel(0, dmc->base_drexi1 + DREX_PMNC_PPC);
0850
0851
0852 val = readl(dmc->base_drexi0 + DREX_FLAG_PPC);
0853 if (val) {
0854 diff_ts = ts - dmc->last_overflow_ts[0];
0855 dmc->last_overflow_ts[0] = ts;
0856 dev_dbg(dmc->dev, "drex0 0xE050 val= 0x%08x\n", val);
0857 } else {
0858 val = readl(dmc->base_drexi1 + DREX_FLAG_PPC);
0859 diff_ts = ts - dmc->last_overflow_ts[1];
0860 dmc->last_overflow_ts[1] = ts;
0861 dev_dbg(dmc->dev, "drex1 0xE050 val= 0x%08x\n", val);
0862 }
0863
0864 exynos5_dmc_perf_events_calc(dmc, diff_ts);
0865
0866 exynos5_dmc_start_perf_events(dmc, PERF_COUNTER_START_VALUE);
0867 }
0868
0869
0870
0871
0872
0873
0874
0875 static void exynos5_dmc_enable_perf_events(struct exynos5_dmc *dmc)
0876 {
0877 u64 ts;
0878
0879
0880 writel(PEREV_CLK_EN, dmc->base_drexi0 + DREX_PPCCLKCON);
0881 writel(PEREV_CLK_EN, dmc->base_drexi1 + DREX_PPCCLKCON);
0882
0883
0884 writel(READ_TRANSFER_CH0, dmc->base_drexi0 + DREX_PEREV2CONFIG);
0885 writel(READ_TRANSFER_CH1, dmc->base_drexi1 + DREX_PEREV2CONFIG);
0886
0887 ts = ktime_get_ns();
0888 dmc->last_overflow_ts[0] = ts;
0889 dmc->last_overflow_ts[1] = ts;
0890
0891
0892 dmc->load = 99;
0893 dmc->total = 100;
0894 }
0895
0896
0897
0898
0899
0900
0901
0902 static void exynos5_dmc_disable_perf_events(struct exynos5_dmc *dmc)
0903 {
0904
0905 writel(0, dmc->base_drexi0 + DREX_PMNC_PPC);
0906 writel(0, dmc->base_drexi1 + DREX_PMNC_PPC);
0907
0908
0909 writel(PERF_CNT2, dmc->base_drexi0 + DREX_INTENC_PPC);
0910 writel(PERF_CNT2, dmc->base_drexi1 + DREX_INTENC_PPC);
0911
0912
0913 writel(PERF_CNT2 | PERF_CCNT, dmc->base_drexi0 + DREX_CNTENC_PPC);
0914 writel(PERF_CNT2 | PERF_CCNT, dmc->base_drexi1 + DREX_CNTENC_PPC);
0915
0916
0917 writel(PERF_CNT2 | PERF_CCNT, dmc->base_drexi0 + DREX_FLAG_PPC);
0918 writel(PERF_CNT2 | PERF_CCNT, dmc->base_drexi1 + DREX_FLAG_PPC);
0919 }
0920
0921
0922
0923
0924
0925
0926
0927
0928
0929
0930 static int exynos5_dmc_get_status(struct device *dev,
0931 struct devfreq_dev_status *stat)
0932 {
0933 struct exynos5_dmc *dmc = dev_get_drvdata(dev);
0934 unsigned long load, total;
0935 int ret;
0936
0937 if (dmc->in_irq_mode) {
0938 mutex_lock(&dmc->lock);
0939 stat->current_frequency = dmc->curr_rate;
0940 mutex_unlock(&dmc->lock);
0941
0942 stat->busy_time = dmc->load;
0943 stat->total_time = dmc->total;
0944 } else {
0945 ret = exynos5_counters_get(dmc, &load, &total);
0946 if (ret < 0)
0947 return -EINVAL;
0948
0949
0950 stat->busy_time = load >> 10;
0951 stat->total_time = total >> 10;
0952
0953 ret = exynos5_counters_set_event(dmc);
0954 if (ret < 0) {
0955 dev_err(dev, "could not set event counter\n");
0956 return ret;
0957 }
0958 }
0959
0960 return 0;
0961 }
0962
0963
0964
0965
0966
0967
0968
0969
0970
0971
0972 static int exynos5_dmc_get_cur_freq(struct device *dev, unsigned long *freq)
0973 {
0974 struct exynos5_dmc *dmc = dev_get_drvdata(dev);
0975
0976 mutex_lock(&dmc->lock);
0977 *freq = dmc->curr_rate;
0978 mutex_unlock(&dmc->lock);
0979
0980 return 0;
0981 }
0982
0983
0984
0985
0986
0987
0988 static struct devfreq_dev_profile exynos5_dmc_df_profile = {
0989 .timer = DEVFREQ_TIMER_DELAYED,
0990 .target = exynos5_dmc_target,
0991 .get_dev_status = exynos5_dmc_get_status,
0992 .get_cur_freq = exynos5_dmc_get_cur_freq,
0993 };
0994
0995
0996
0997
0998
0999
1000
1001
1002
1003
1004
1005
1006
1007 static unsigned long
1008 exynos5_dmc_align_init_freq(struct exynos5_dmc *dmc,
1009 unsigned long bootloader_init_freq)
1010 {
1011 unsigned long aligned_freq;
1012 int idx;
1013
1014 idx = find_target_freq_idx(dmc, bootloader_init_freq);
1015 if (idx >= 0)
1016 aligned_freq = dmc->opp[idx].freq_hz;
1017 else
1018 aligned_freq = dmc->opp[dmc->opp_count - 1].freq_hz;
1019
1020 return aligned_freq;
1021 }
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036 static int create_timings_aligned(struct exynos5_dmc *dmc, u32 *reg_timing_row,
1037 u32 *reg_timing_data, u32 *reg_timing_power,
1038 u32 clk_period_ps)
1039 {
1040 u32 val;
1041 const struct timing_reg *reg;
1042
1043 if (clk_period_ps == 0)
1044 return -EINVAL;
1045
1046 *reg_timing_row = 0;
1047 *reg_timing_data = 0;
1048 *reg_timing_power = 0;
1049
1050 val = dmc->timings->tRFC / clk_period_ps;
1051 val += dmc->timings->tRFC % clk_period_ps ? 1 : 0;
1052 val = max(val, dmc->min_tck->tRFC);
1053 reg = &timing_row_reg_fields[0];
1054 *reg_timing_row |= TIMING_VAL2REG(reg, val);
1055
1056 val = dmc->timings->tRRD / clk_period_ps;
1057 val += dmc->timings->tRRD % clk_period_ps ? 1 : 0;
1058 val = max(val, dmc->min_tck->tRRD);
1059 reg = &timing_row_reg_fields[1];
1060 *reg_timing_row |= TIMING_VAL2REG(reg, val);
1061
1062 val = dmc->timings->tRPab / clk_period_ps;
1063 val += dmc->timings->tRPab % clk_period_ps ? 1 : 0;
1064 val = max(val, dmc->min_tck->tRPab);
1065 reg = &timing_row_reg_fields[2];
1066 *reg_timing_row |= TIMING_VAL2REG(reg, val);
1067
1068 val = dmc->timings->tRCD / clk_period_ps;
1069 val += dmc->timings->tRCD % clk_period_ps ? 1 : 0;
1070 val = max(val, dmc->min_tck->tRCD);
1071 reg = &timing_row_reg_fields[3];
1072 *reg_timing_row |= TIMING_VAL2REG(reg, val);
1073
1074 val = dmc->timings->tRC / clk_period_ps;
1075 val += dmc->timings->tRC % clk_period_ps ? 1 : 0;
1076 val = max(val, dmc->min_tck->tRC);
1077 reg = &timing_row_reg_fields[4];
1078 *reg_timing_row |= TIMING_VAL2REG(reg, val);
1079
1080 val = dmc->timings->tRAS / clk_period_ps;
1081 val += dmc->timings->tRAS % clk_period_ps ? 1 : 0;
1082 val = max(val, dmc->min_tck->tRAS);
1083 reg = &timing_row_reg_fields[5];
1084 *reg_timing_row |= TIMING_VAL2REG(reg, val);
1085
1086
1087 val = dmc->timings->tWTR / clk_period_ps;
1088 val += dmc->timings->tWTR % clk_period_ps ? 1 : 0;
1089 val = max(val, dmc->min_tck->tWTR);
1090 reg = &timing_data_reg_fields[0];
1091 *reg_timing_data |= TIMING_VAL2REG(reg, val);
1092
1093 val = dmc->timings->tWR / clk_period_ps;
1094 val += dmc->timings->tWR % clk_period_ps ? 1 : 0;
1095 val = max(val, dmc->min_tck->tWR);
1096 reg = &timing_data_reg_fields[1];
1097 *reg_timing_data |= TIMING_VAL2REG(reg, val);
1098
1099 val = dmc->timings->tRTP / clk_period_ps;
1100 val += dmc->timings->tRTP % clk_period_ps ? 1 : 0;
1101 val = max(val, dmc->min_tck->tRTP);
1102 reg = &timing_data_reg_fields[2];
1103 *reg_timing_data |= TIMING_VAL2REG(reg, val);
1104
1105 val = dmc->timings->tW2W_C2C / clk_period_ps;
1106 val += dmc->timings->tW2W_C2C % clk_period_ps ? 1 : 0;
1107 val = max(val, dmc->min_tck->tW2W_C2C);
1108 reg = &timing_data_reg_fields[3];
1109 *reg_timing_data |= TIMING_VAL2REG(reg, val);
1110
1111 val = dmc->timings->tR2R_C2C / clk_period_ps;
1112 val += dmc->timings->tR2R_C2C % clk_period_ps ? 1 : 0;
1113 val = max(val, dmc->min_tck->tR2R_C2C);
1114 reg = &timing_data_reg_fields[4];
1115 *reg_timing_data |= TIMING_VAL2REG(reg, val);
1116
1117 val = dmc->timings->tWL / clk_period_ps;
1118 val += dmc->timings->tWL % clk_period_ps ? 1 : 0;
1119 val = max(val, dmc->min_tck->tWL);
1120 reg = &timing_data_reg_fields[5];
1121 *reg_timing_data |= TIMING_VAL2REG(reg, val);
1122
1123 val = dmc->timings->tDQSCK / clk_period_ps;
1124 val += dmc->timings->tDQSCK % clk_period_ps ? 1 : 0;
1125 val = max(val, dmc->min_tck->tDQSCK);
1126 reg = &timing_data_reg_fields[6];
1127 *reg_timing_data |= TIMING_VAL2REG(reg, val);
1128
1129 val = dmc->timings->tRL / clk_period_ps;
1130 val += dmc->timings->tRL % clk_period_ps ? 1 : 0;
1131 val = max(val, dmc->min_tck->tRL);
1132 reg = &timing_data_reg_fields[7];
1133 *reg_timing_data |= TIMING_VAL2REG(reg, val);
1134
1135
1136 val = dmc->timings->tFAW / clk_period_ps;
1137 val += dmc->timings->tFAW % clk_period_ps ? 1 : 0;
1138 val = max(val, dmc->min_tck->tFAW);
1139 reg = &timing_power_reg_fields[0];
1140 *reg_timing_power |= TIMING_VAL2REG(reg, val);
1141
1142 val = dmc->timings->tXSR / clk_period_ps;
1143 val += dmc->timings->tXSR % clk_period_ps ? 1 : 0;
1144 val = max(val, dmc->min_tck->tXSR);
1145 reg = &timing_power_reg_fields[1];
1146 *reg_timing_power |= TIMING_VAL2REG(reg, val);
1147
1148 val = dmc->timings->tXP / clk_period_ps;
1149 val += dmc->timings->tXP % clk_period_ps ? 1 : 0;
1150 val = max(val, dmc->min_tck->tXP);
1151 reg = &timing_power_reg_fields[2];
1152 *reg_timing_power |= TIMING_VAL2REG(reg, val);
1153
1154 val = dmc->timings->tCKE / clk_period_ps;
1155 val += dmc->timings->tCKE % clk_period_ps ? 1 : 0;
1156 val = max(val, dmc->min_tck->tCKE);
1157 reg = &timing_power_reg_fields[3];
1158 *reg_timing_power |= TIMING_VAL2REG(reg, val);
1159
1160 val = dmc->timings->tMRD / clk_period_ps;
1161 val += dmc->timings->tMRD % clk_period_ps ? 1 : 0;
1162 val = max(val, dmc->min_tck->tMRD);
1163 reg = &timing_power_reg_fields[4];
1164 *reg_timing_power |= TIMING_VAL2REG(reg, val);
1165
1166 return 0;
1167 }
1168
1169
1170
1171
1172
1173
1174
1175 static int of_get_dram_timings(struct exynos5_dmc *dmc)
1176 {
1177 int ret = 0;
1178 int idx;
1179 struct device_node *np_ddr;
1180 u32 freq_mhz, clk_period_ps;
1181
1182 np_ddr = of_parse_phandle(dmc->dev->of_node, "device-handle", 0);
1183 if (!np_ddr) {
1184 dev_warn(dmc->dev, "could not find 'device-handle' in DT\n");
1185 return -EINVAL;
1186 }
1187
1188 dmc->timing_row = devm_kmalloc_array(dmc->dev, TIMING_COUNT,
1189 sizeof(u32), GFP_KERNEL);
1190 if (!dmc->timing_row) {
1191 ret = -ENOMEM;
1192 goto put_node;
1193 }
1194
1195 dmc->timing_data = devm_kmalloc_array(dmc->dev, TIMING_COUNT,
1196 sizeof(u32), GFP_KERNEL);
1197 if (!dmc->timing_data) {
1198 ret = -ENOMEM;
1199 goto put_node;
1200 }
1201
1202 dmc->timing_power = devm_kmalloc_array(dmc->dev, TIMING_COUNT,
1203 sizeof(u32), GFP_KERNEL);
1204 if (!dmc->timing_power) {
1205 ret = -ENOMEM;
1206 goto put_node;
1207 }
1208
1209 dmc->timings = of_lpddr3_get_ddr_timings(np_ddr, dmc->dev,
1210 DDR_TYPE_LPDDR3,
1211 &dmc->timings_arr_size);
1212 if (!dmc->timings) {
1213 dev_warn(dmc->dev, "could not get timings from DT\n");
1214 ret = -EINVAL;
1215 goto put_node;
1216 }
1217
1218 dmc->min_tck = of_lpddr3_get_min_tck(np_ddr, dmc->dev);
1219 if (!dmc->min_tck) {
1220 dev_warn(dmc->dev, "could not get tck from DT\n");
1221 ret = -EINVAL;
1222 goto put_node;
1223 }
1224
1225
1226 for (idx = 0; idx < dmc->opp_count; idx++) {
1227 freq_mhz = dmc->opp[idx].freq_hz / 1000000;
1228 clk_period_ps = 1000000 / freq_mhz;
1229
1230 ret = create_timings_aligned(dmc, &dmc->timing_row[idx],
1231 &dmc->timing_data[idx],
1232 &dmc->timing_power[idx],
1233 clk_period_ps);
1234 }
1235
1236
1237
1238 dmc->bypass_timing_row = dmc->timing_row[idx - 1];
1239 dmc->bypass_timing_data = dmc->timing_data[idx - 1];
1240 dmc->bypass_timing_power = dmc->timing_power[idx - 1];
1241
1242 put_node:
1243 of_node_put(np_ddr);
1244 return ret;
1245 }
1246
1247
1248
1249
1250
1251
1252
1253
1254 static int exynos5_dmc_init_clks(struct exynos5_dmc *dmc)
1255 {
1256 int ret;
1257 unsigned long target_volt = 0;
1258 unsigned long target_rate = 0;
1259 unsigned int tmp;
1260
1261 dmc->fout_spll = devm_clk_get(dmc->dev, "fout_spll");
1262 if (IS_ERR(dmc->fout_spll))
1263 return PTR_ERR(dmc->fout_spll);
1264
1265 dmc->fout_bpll = devm_clk_get(dmc->dev, "fout_bpll");
1266 if (IS_ERR(dmc->fout_bpll))
1267 return PTR_ERR(dmc->fout_bpll);
1268
1269 dmc->mout_mclk_cdrex = devm_clk_get(dmc->dev, "mout_mclk_cdrex");
1270 if (IS_ERR(dmc->mout_mclk_cdrex))
1271 return PTR_ERR(dmc->mout_mclk_cdrex);
1272
1273 dmc->mout_bpll = devm_clk_get(dmc->dev, "mout_bpll");
1274 if (IS_ERR(dmc->mout_bpll))
1275 return PTR_ERR(dmc->mout_bpll);
1276
1277 dmc->mout_mx_mspll_ccore = devm_clk_get(dmc->dev,
1278 "mout_mx_mspll_ccore");
1279 if (IS_ERR(dmc->mout_mx_mspll_ccore))
1280 return PTR_ERR(dmc->mout_mx_mspll_ccore);
1281
1282 dmc->mout_spll = devm_clk_get(dmc->dev, "ff_dout_spll2");
1283 if (IS_ERR(dmc->mout_spll)) {
1284 dmc->mout_spll = devm_clk_get(dmc->dev, "mout_sclk_spll");
1285 if (IS_ERR(dmc->mout_spll))
1286 return PTR_ERR(dmc->mout_spll);
1287 }
1288
1289
1290
1291
1292 dmc->curr_rate = clk_get_rate(dmc->mout_mclk_cdrex);
1293 dmc->curr_rate = exynos5_dmc_align_init_freq(dmc, dmc->curr_rate);
1294 exynos5_dmc_df_profile.initial_freq = dmc->curr_rate;
1295
1296 ret = exynos5_dmc_get_volt_freq(dmc, &dmc->curr_rate, &target_rate,
1297 &target_volt, 0);
1298 if (ret)
1299 return ret;
1300
1301 dmc->curr_volt = target_volt;
1302
1303 ret = clk_set_parent(dmc->mout_mx_mspll_ccore, dmc->mout_spll);
1304 if (ret)
1305 return ret;
1306
1307 clk_prepare_enable(dmc->fout_bpll);
1308 clk_prepare_enable(dmc->mout_bpll);
1309
1310
1311
1312
1313
1314 regmap_read(dmc->clk_regmap, CDREX_LPDDR3PHY_CLKM_SRC, &tmp);
1315 tmp &= ~(BIT(1) | BIT(0));
1316 regmap_write(dmc->clk_regmap, CDREX_LPDDR3PHY_CLKM_SRC, tmp);
1317
1318 return 0;
1319 }
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330 static int exynos5_performance_counters_init(struct exynos5_dmc *dmc)
1331 {
1332 int ret, i;
1333
1334 dmc->num_counters = devfreq_event_get_edev_count(dmc->dev,
1335 "devfreq-events");
1336 if (dmc->num_counters < 0) {
1337 dev_err(dmc->dev, "could not get devfreq-event counters\n");
1338 return dmc->num_counters;
1339 }
1340
1341 dmc->counter = devm_kcalloc(dmc->dev, dmc->num_counters,
1342 sizeof(*dmc->counter), GFP_KERNEL);
1343 if (!dmc->counter)
1344 return -ENOMEM;
1345
1346 for (i = 0; i < dmc->num_counters; i++) {
1347 dmc->counter[i] =
1348 devfreq_event_get_edev_by_phandle(dmc->dev,
1349 "devfreq-events", i);
1350 if (IS_ERR_OR_NULL(dmc->counter[i]))
1351 return -EPROBE_DEFER;
1352 }
1353
1354 ret = exynos5_counters_enable_edev(dmc);
1355 if (ret < 0) {
1356 dev_err(dmc->dev, "could not enable event counter\n");
1357 return ret;
1358 }
1359
1360 ret = exynos5_counters_set_event(dmc);
1361 if (ret < 0) {
1362 exynos5_counters_disable_edev(dmc);
1363 dev_err(dmc->dev, "could not set event counter\n");
1364 return ret;
1365 }
1366
1367 return 0;
1368 }
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379 static inline int exynos5_dmc_set_pause_on_switching(struct exynos5_dmc *dmc)
1380 {
1381 unsigned int val;
1382 int ret;
1383
1384 ret = regmap_read(dmc->clk_regmap, CDREX_PAUSE, &val);
1385 if (ret)
1386 return ret;
1387
1388 val |= 1UL;
1389 regmap_write(dmc->clk_regmap, CDREX_PAUSE, val);
1390
1391 return 0;
1392 }
1393
1394 static irqreturn_t dmc_irq_thread(int irq, void *priv)
1395 {
1396 int res;
1397 struct exynos5_dmc *dmc = priv;
1398
1399 mutex_lock(&dmc->df->lock);
1400 exynos5_dmc_perf_events_check(dmc);
1401 res = update_devfreq(dmc->df);
1402 mutex_unlock(&dmc->df->lock);
1403
1404 if (res)
1405 dev_warn(dmc->dev, "devfreq failed with %d\n", res);
1406
1407 return IRQ_HANDLED;
1408 }
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420 static int exynos5_dmc_probe(struct platform_device *pdev)
1421 {
1422 int ret = 0;
1423 struct device *dev = &pdev->dev;
1424 struct device_node *np = dev->of_node;
1425 struct exynos5_dmc *dmc;
1426 int irq[2];
1427
1428 dmc = devm_kzalloc(dev, sizeof(*dmc), GFP_KERNEL);
1429 if (!dmc)
1430 return -ENOMEM;
1431
1432 mutex_init(&dmc->lock);
1433
1434 dmc->dev = dev;
1435 platform_set_drvdata(pdev, dmc);
1436
1437 dmc->base_drexi0 = devm_platform_ioremap_resource(pdev, 0);
1438 if (IS_ERR(dmc->base_drexi0))
1439 return PTR_ERR(dmc->base_drexi0);
1440
1441 dmc->base_drexi1 = devm_platform_ioremap_resource(pdev, 1);
1442 if (IS_ERR(dmc->base_drexi1))
1443 return PTR_ERR(dmc->base_drexi1);
1444
1445 dmc->clk_regmap = syscon_regmap_lookup_by_phandle(np,
1446 "samsung,syscon-clk");
1447 if (IS_ERR(dmc->clk_regmap))
1448 return PTR_ERR(dmc->clk_regmap);
1449
1450 ret = exynos5_init_freq_table(dmc, &exynos5_dmc_df_profile);
1451 if (ret) {
1452 dev_warn(dev, "couldn't initialize frequency settings\n");
1453 return ret;
1454 }
1455
1456 dmc->vdd_mif = devm_regulator_get(dev, "vdd");
1457 if (IS_ERR(dmc->vdd_mif)) {
1458 ret = PTR_ERR(dmc->vdd_mif);
1459 return ret;
1460 }
1461
1462 ret = exynos5_dmc_init_clks(dmc);
1463 if (ret)
1464 return ret;
1465
1466 ret = of_get_dram_timings(dmc);
1467 if (ret) {
1468 dev_warn(dev, "couldn't initialize timings settings\n");
1469 goto remove_clocks;
1470 }
1471
1472 ret = exynos5_dmc_set_pause_on_switching(dmc);
1473 if (ret) {
1474 dev_warn(dev, "couldn't get access to PAUSE register\n");
1475 goto remove_clocks;
1476 }
1477
1478
1479 irq[0] = platform_get_irq_byname(pdev, "drex_0");
1480 irq[1] = platform_get_irq_byname(pdev, "drex_1");
1481 if (irq[0] > 0 && irq[1] > 0 && irqmode) {
1482 ret = devm_request_threaded_irq(dev, irq[0], NULL,
1483 dmc_irq_thread, IRQF_ONESHOT,
1484 dev_name(dev), dmc);
1485 if (ret) {
1486 dev_err(dev, "couldn't grab IRQ\n");
1487 goto remove_clocks;
1488 }
1489
1490 ret = devm_request_threaded_irq(dev, irq[1], NULL,
1491 dmc_irq_thread, IRQF_ONESHOT,
1492 dev_name(dev), dmc);
1493 if (ret) {
1494 dev_err(dev, "couldn't grab IRQ\n");
1495 goto remove_clocks;
1496 }
1497
1498
1499
1500
1501
1502 dmc->gov_data.upthreshold = 55;
1503 dmc->gov_data.downdifferential = 5;
1504
1505 exynos5_dmc_enable_perf_events(dmc);
1506
1507 dmc->in_irq_mode = 1;
1508 } else {
1509 ret = exynos5_performance_counters_init(dmc);
1510 if (ret) {
1511 dev_warn(dev, "couldn't probe performance counters\n");
1512 goto remove_clocks;
1513 }
1514
1515
1516
1517
1518
1519 dmc->gov_data.upthreshold = 10;
1520 dmc->gov_data.downdifferential = 5;
1521
1522 exynos5_dmc_df_profile.polling_ms = 100;
1523 }
1524
1525 dmc->df = devm_devfreq_add_device(dev, &exynos5_dmc_df_profile,
1526 DEVFREQ_GOV_SIMPLE_ONDEMAND,
1527 &dmc->gov_data);
1528
1529 if (IS_ERR(dmc->df)) {
1530 ret = PTR_ERR(dmc->df);
1531 goto err_devfreq_add;
1532 }
1533
1534 if (dmc->in_irq_mode)
1535 exynos5_dmc_start_perf_events(dmc, PERF_COUNTER_START_VALUE);
1536
1537 dev_info(dev, "DMC initialized, in irq mode: %d\n", dmc->in_irq_mode);
1538
1539 return 0;
1540
1541 err_devfreq_add:
1542 if (dmc->in_irq_mode)
1543 exynos5_dmc_disable_perf_events(dmc);
1544 else
1545 exynos5_counters_disable_edev(dmc);
1546 remove_clocks:
1547 clk_disable_unprepare(dmc->mout_bpll);
1548 clk_disable_unprepare(dmc->fout_bpll);
1549
1550 return ret;
1551 }
1552
1553
1554
1555
1556
1557
1558
1559
1560
1561 static int exynos5_dmc_remove(struct platform_device *pdev)
1562 {
1563 struct exynos5_dmc *dmc = dev_get_drvdata(&pdev->dev);
1564
1565 if (dmc->in_irq_mode)
1566 exynos5_dmc_disable_perf_events(dmc);
1567 else
1568 exynos5_counters_disable_edev(dmc);
1569
1570 clk_disable_unprepare(dmc->mout_bpll);
1571 clk_disable_unprepare(dmc->fout_bpll);
1572
1573 return 0;
1574 }
1575
1576 static const struct of_device_id exynos5_dmc_of_match[] = {
1577 { .compatible = "samsung,exynos5422-dmc", },
1578 { },
1579 };
1580 MODULE_DEVICE_TABLE(of, exynos5_dmc_of_match);
1581
1582 static struct platform_driver exynos5_dmc_platdrv = {
1583 .probe = exynos5_dmc_probe,
1584 .remove = exynos5_dmc_remove,
1585 .driver = {
1586 .name = "exynos5-dmc",
1587 .of_match_table = exynos5_dmc_of_match,
1588 },
1589 };
1590 module_platform_driver(exynos5_dmc_platdrv);
1591 MODULE_DESCRIPTION("Driver for Exynos5422 Dynamic Memory Controller dynamic frequency and voltage change");
1592 MODULE_LICENSE("GPL v2");
1593 MODULE_AUTHOR("Lukasz Luba");