0001
0002
0003
0004
0005
0006 #include <linux/kernel.h>
0007 #include <linux/bitops.h>
0008 #include <linux/err.h>
0009 #include <linux/bug.h>
0010 #include <linux/export.h>
0011 #include <linux/clk-provider.h>
0012 #include <linux/delay.h>
0013 #include <linux/rational.h>
0014 #include <linux/regmap.h>
0015 #include <linux/math64.h>
0016 #include <linux/minmax.h>
0017 #include <linux/slab.h>
0018
0019 #include <asm/div64.h>
0020
0021 #include "clk-rcg.h"
0022 #include "common.h"
0023
0024 #define CMD_REG 0x0
0025 #define CMD_UPDATE BIT(0)
0026 #define CMD_ROOT_EN BIT(1)
0027 #define CMD_DIRTY_CFG BIT(4)
0028 #define CMD_DIRTY_N BIT(5)
0029 #define CMD_DIRTY_M BIT(6)
0030 #define CMD_DIRTY_D BIT(7)
0031 #define CMD_ROOT_OFF BIT(31)
0032
0033 #define CFG_REG 0x4
0034 #define CFG_SRC_DIV_SHIFT 0
0035 #define CFG_SRC_SEL_SHIFT 8
0036 #define CFG_SRC_SEL_MASK (0x7 << CFG_SRC_SEL_SHIFT)
0037 #define CFG_MODE_SHIFT 12
0038 #define CFG_MODE_MASK (0x3 << CFG_MODE_SHIFT)
0039 #define CFG_MODE_DUAL_EDGE (0x2 << CFG_MODE_SHIFT)
0040 #define CFG_HW_CLK_CTRL_MASK BIT(20)
0041
0042 #define M_REG 0x8
0043 #define N_REG 0xc
0044 #define D_REG 0x10
0045
0046 #define RCG_CFG_OFFSET(rcg) ((rcg)->cmd_rcgr + (rcg)->cfg_off + CFG_REG)
0047 #define RCG_M_OFFSET(rcg) ((rcg)->cmd_rcgr + (rcg)->cfg_off + M_REG)
0048 #define RCG_N_OFFSET(rcg) ((rcg)->cmd_rcgr + (rcg)->cfg_off + N_REG)
0049 #define RCG_D_OFFSET(rcg) ((rcg)->cmd_rcgr + (rcg)->cfg_off + D_REG)
0050
0051
0052 #define MAX_PERF_LEVEL 8
0053 #define SE_CMD_DFSR_OFFSET 0x14
0054 #define SE_CMD_DFS_EN BIT(0)
0055 #define SE_PERF_DFSR(level) (0x1c + 0x4 * (level))
0056 #define SE_PERF_M_DFSR(level) (0x5c + 0x4 * (level))
0057 #define SE_PERF_N_DFSR(level) (0x9c + 0x4 * (level))
0058
0059 enum freq_policy {
0060 FLOOR,
0061 CEIL,
0062 };
0063
0064 static int clk_rcg2_is_enabled(struct clk_hw *hw)
0065 {
0066 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
0067 u32 cmd;
0068 int ret;
0069
0070 ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
0071 if (ret)
0072 return ret;
0073
0074 return (cmd & CMD_ROOT_OFF) == 0;
0075 }
0076
0077 static u8 __clk_rcg2_get_parent(struct clk_hw *hw, u32 cfg)
0078 {
0079 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
0080 int num_parents = clk_hw_get_num_parents(hw);
0081 int i;
0082
0083 cfg &= CFG_SRC_SEL_MASK;
0084 cfg >>= CFG_SRC_SEL_SHIFT;
0085
0086 for (i = 0; i < num_parents; i++)
0087 if (cfg == rcg->parent_map[i].cfg)
0088 return i;
0089
0090 pr_debug("%s: Clock %s has invalid parent, using default.\n",
0091 __func__, clk_hw_get_name(hw));
0092 return 0;
0093 }
0094
0095 static u8 clk_rcg2_get_parent(struct clk_hw *hw)
0096 {
0097 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
0098 u32 cfg;
0099 int ret;
0100
0101 ret = regmap_read(rcg->clkr.regmap, RCG_CFG_OFFSET(rcg), &cfg);
0102 if (ret) {
0103 pr_debug("%s: Unable to read CFG register for %s\n",
0104 __func__, clk_hw_get_name(hw));
0105 return 0;
0106 }
0107
0108 return __clk_rcg2_get_parent(hw, cfg);
0109 }
0110
0111 static int update_config(struct clk_rcg2 *rcg)
0112 {
0113 int count, ret;
0114 u32 cmd;
0115 struct clk_hw *hw = &rcg->clkr.hw;
0116 const char *name = clk_hw_get_name(hw);
0117
0118 ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
0119 CMD_UPDATE, CMD_UPDATE);
0120 if (ret)
0121 return ret;
0122
0123
0124 for (count = 500; count > 0; count--) {
0125 ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
0126 if (ret)
0127 return ret;
0128 if (!(cmd & CMD_UPDATE))
0129 return 0;
0130 udelay(1);
0131 }
0132
0133 WARN(1, "%s: rcg didn't update its configuration.", name);
0134 return -EBUSY;
0135 }
0136
0137 static int clk_rcg2_set_parent(struct clk_hw *hw, u8 index)
0138 {
0139 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
0140 int ret;
0141 u32 cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
0142
0143 ret = regmap_update_bits(rcg->clkr.regmap, RCG_CFG_OFFSET(rcg),
0144 CFG_SRC_SEL_MASK, cfg);
0145 if (ret)
0146 return ret;
0147
0148 return update_config(rcg);
0149 }
0150
0151
0152
0153
0154
0155
0156
0157
0158 static unsigned long
0159 calc_rate(unsigned long rate, u32 m, u32 n, u32 mode, u32 hid_div)
0160 {
0161 if (hid_div) {
0162 rate *= 2;
0163 rate /= hid_div + 1;
0164 }
0165
0166 if (mode) {
0167 u64 tmp = rate;
0168 tmp *= m;
0169 do_div(tmp, n);
0170 rate = tmp;
0171 }
0172
0173 return rate;
0174 }
0175
0176 static unsigned long
0177 __clk_rcg2_recalc_rate(struct clk_hw *hw, unsigned long parent_rate, u32 cfg)
0178 {
0179 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
0180 u32 hid_div, m = 0, n = 0, mode = 0, mask;
0181
0182 if (rcg->mnd_width) {
0183 mask = BIT(rcg->mnd_width) - 1;
0184 regmap_read(rcg->clkr.regmap, RCG_M_OFFSET(rcg), &m);
0185 m &= mask;
0186 regmap_read(rcg->clkr.regmap, RCG_N_OFFSET(rcg), &n);
0187 n = ~n;
0188 n &= mask;
0189 n += m;
0190 mode = cfg & CFG_MODE_MASK;
0191 mode >>= CFG_MODE_SHIFT;
0192 }
0193
0194 mask = BIT(rcg->hid_width) - 1;
0195 hid_div = cfg >> CFG_SRC_DIV_SHIFT;
0196 hid_div &= mask;
0197
0198 return calc_rate(parent_rate, m, n, mode, hid_div);
0199 }
0200
0201 static unsigned long
0202 clk_rcg2_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
0203 {
0204 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
0205 u32 cfg;
0206
0207 regmap_read(rcg->clkr.regmap, RCG_CFG_OFFSET(rcg), &cfg);
0208
0209 return __clk_rcg2_recalc_rate(hw, parent_rate, cfg);
0210 }
0211
0212 static int _freq_tbl_determine_rate(struct clk_hw *hw, const struct freq_tbl *f,
0213 struct clk_rate_request *req,
0214 enum freq_policy policy)
0215 {
0216 unsigned long clk_flags, rate = req->rate;
0217 struct clk_hw *p;
0218 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
0219 int index;
0220
0221 switch (policy) {
0222 case FLOOR:
0223 f = qcom_find_freq_floor(f, rate);
0224 break;
0225 case CEIL:
0226 f = qcom_find_freq(f, rate);
0227 break;
0228 default:
0229 return -EINVAL;
0230 }
0231
0232 if (!f)
0233 return -EINVAL;
0234
0235 index = qcom_find_src_index(hw, rcg->parent_map, f->src);
0236 if (index < 0)
0237 return index;
0238
0239 clk_flags = clk_hw_get_flags(hw);
0240 p = clk_hw_get_parent_by_index(hw, index);
0241 if (!p)
0242 return -EINVAL;
0243
0244 if (clk_flags & CLK_SET_RATE_PARENT) {
0245 rate = f->freq;
0246 if (f->pre_div) {
0247 if (!rate)
0248 rate = req->rate;
0249 rate /= 2;
0250 rate *= f->pre_div + 1;
0251 }
0252
0253 if (f->n) {
0254 u64 tmp = rate;
0255 tmp = tmp * f->n;
0256 do_div(tmp, f->m);
0257 rate = tmp;
0258 }
0259 } else {
0260 rate = clk_hw_get_rate(p);
0261 }
0262 req->best_parent_hw = p;
0263 req->best_parent_rate = rate;
0264 req->rate = f->freq;
0265
0266 return 0;
0267 }
0268
0269 static int clk_rcg2_determine_rate(struct clk_hw *hw,
0270 struct clk_rate_request *req)
0271 {
0272 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
0273
0274 return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req, CEIL);
0275 }
0276
0277 static int clk_rcg2_determine_floor_rate(struct clk_hw *hw,
0278 struct clk_rate_request *req)
0279 {
0280 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
0281
0282 return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req, FLOOR);
0283 }
0284
0285 static int __clk_rcg2_configure(struct clk_rcg2 *rcg, const struct freq_tbl *f,
0286 u32 *_cfg)
0287 {
0288 u32 cfg, mask, d_val, not2d_val, n_minus_m;
0289 struct clk_hw *hw = &rcg->clkr.hw;
0290 int ret, index = qcom_find_src_index(hw, rcg->parent_map, f->src);
0291
0292 if (index < 0)
0293 return index;
0294
0295 if (rcg->mnd_width && f->n) {
0296 mask = BIT(rcg->mnd_width) - 1;
0297 ret = regmap_update_bits(rcg->clkr.regmap,
0298 RCG_M_OFFSET(rcg), mask, f->m);
0299 if (ret)
0300 return ret;
0301
0302 ret = regmap_update_bits(rcg->clkr.regmap,
0303 RCG_N_OFFSET(rcg), mask, ~(f->n - f->m));
0304 if (ret)
0305 return ret;
0306
0307
0308 d_val = f->n;
0309
0310 n_minus_m = f->n - f->m;
0311 n_minus_m *= 2;
0312
0313 d_val = clamp_t(u32, d_val, f->m, n_minus_m);
0314 not2d_val = ~d_val & mask;
0315
0316 ret = regmap_update_bits(rcg->clkr.regmap,
0317 RCG_D_OFFSET(rcg), mask, not2d_val);
0318 if (ret)
0319 return ret;
0320 }
0321
0322 mask = BIT(rcg->hid_width) - 1;
0323 mask |= CFG_SRC_SEL_MASK | CFG_MODE_MASK | CFG_HW_CLK_CTRL_MASK;
0324 cfg = f->pre_div << CFG_SRC_DIV_SHIFT;
0325 cfg |= rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
0326 if (rcg->mnd_width && f->n && (f->m != f->n))
0327 cfg |= CFG_MODE_DUAL_EDGE;
0328
0329 *_cfg &= ~mask;
0330 *_cfg |= cfg;
0331
0332 return 0;
0333 }
0334
0335 static int clk_rcg2_configure(struct clk_rcg2 *rcg, const struct freq_tbl *f)
0336 {
0337 u32 cfg;
0338 int ret;
0339
0340 ret = regmap_read(rcg->clkr.regmap, RCG_CFG_OFFSET(rcg), &cfg);
0341 if (ret)
0342 return ret;
0343
0344 ret = __clk_rcg2_configure(rcg, f, &cfg);
0345 if (ret)
0346 return ret;
0347
0348 ret = regmap_write(rcg->clkr.regmap, RCG_CFG_OFFSET(rcg), cfg);
0349 if (ret)
0350 return ret;
0351
0352 return update_config(rcg);
0353 }
0354
0355 static int __clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate,
0356 enum freq_policy policy)
0357 {
0358 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
0359 const struct freq_tbl *f;
0360
0361 switch (policy) {
0362 case FLOOR:
0363 f = qcom_find_freq_floor(rcg->freq_tbl, rate);
0364 break;
0365 case CEIL:
0366 f = qcom_find_freq(rcg->freq_tbl, rate);
0367 break;
0368 default:
0369 return -EINVAL;
0370 }
0371
0372 if (!f)
0373 return -EINVAL;
0374
0375 return clk_rcg2_configure(rcg, f);
0376 }
0377
0378 static int clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate,
0379 unsigned long parent_rate)
0380 {
0381 return __clk_rcg2_set_rate(hw, rate, CEIL);
0382 }
0383
0384 static int clk_rcg2_set_floor_rate(struct clk_hw *hw, unsigned long rate,
0385 unsigned long parent_rate)
0386 {
0387 return __clk_rcg2_set_rate(hw, rate, FLOOR);
0388 }
0389
0390 static int clk_rcg2_set_rate_and_parent(struct clk_hw *hw,
0391 unsigned long rate, unsigned long parent_rate, u8 index)
0392 {
0393 return __clk_rcg2_set_rate(hw, rate, CEIL);
0394 }
0395
0396 static int clk_rcg2_set_floor_rate_and_parent(struct clk_hw *hw,
0397 unsigned long rate, unsigned long parent_rate, u8 index)
0398 {
0399 return __clk_rcg2_set_rate(hw, rate, FLOOR);
0400 }
0401
0402 static int clk_rcg2_get_duty_cycle(struct clk_hw *hw, struct clk_duty *duty)
0403 {
0404 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
0405 u32 notn_m, n, m, d, not2d, mask;
0406
0407 if (!rcg->mnd_width) {
0408
0409 duty->num = 1;
0410 duty->den = 2;
0411 return 0;
0412 }
0413
0414 regmap_read(rcg->clkr.regmap, RCG_D_OFFSET(rcg), ¬2d);
0415 regmap_read(rcg->clkr.regmap, RCG_M_OFFSET(rcg), &m);
0416 regmap_read(rcg->clkr.regmap, RCG_N_OFFSET(rcg), ¬n_m);
0417
0418 if (!not2d && !m && !notn_m) {
0419
0420 duty->num = 1;
0421 duty->den = 2;
0422 return 0;
0423 }
0424
0425 mask = BIT(rcg->mnd_width) - 1;
0426
0427 d = ~(not2d) & mask;
0428 d = DIV_ROUND_CLOSEST(d, 2);
0429
0430 n = (~(notn_m) + m) & mask;
0431
0432 duty->num = d;
0433 duty->den = n;
0434
0435 return 0;
0436 }
0437
0438 static int clk_rcg2_set_duty_cycle(struct clk_hw *hw, struct clk_duty *duty)
0439 {
0440 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
0441 u32 notn_m, n, m, d, not2d, mask, duty_per, cfg;
0442 int ret;
0443
0444
0445 if (!rcg->mnd_width)
0446 return -EINVAL;
0447
0448 mask = BIT(rcg->mnd_width) - 1;
0449
0450 regmap_read(rcg->clkr.regmap, RCG_N_OFFSET(rcg), ¬n_m);
0451 regmap_read(rcg->clkr.regmap, RCG_M_OFFSET(rcg), &m);
0452 regmap_read(rcg->clkr.regmap, RCG_CFG_OFFSET(rcg), &cfg);
0453
0454
0455 if (!(cfg & CFG_MODE_MASK))
0456 return -EINVAL;
0457
0458 n = (~(notn_m) + m) & mask;
0459
0460 duty_per = (duty->num * 100) / duty->den;
0461
0462
0463 d = DIV_ROUND_CLOSEST(n * duty_per * 2, 100);
0464
0465
0466
0467
0468
0469 d = clamp_val(d, 1, mask);
0470
0471 if ((d / 2) > (n - m))
0472 d = (n - m) * 2;
0473 else if ((d / 2) < (m / 2))
0474 d = m;
0475
0476 not2d = ~d & mask;
0477
0478 ret = regmap_update_bits(rcg->clkr.regmap, RCG_D_OFFSET(rcg), mask,
0479 not2d);
0480 if (ret)
0481 return ret;
0482
0483 return update_config(rcg);
0484 }
0485
0486 const struct clk_ops clk_rcg2_ops = {
0487 .is_enabled = clk_rcg2_is_enabled,
0488 .get_parent = clk_rcg2_get_parent,
0489 .set_parent = clk_rcg2_set_parent,
0490 .recalc_rate = clk_rcg2_recalc_rate,
0491 .determine_rate = clk_rcg2_determine_rate,
0492 .set_rate = clk_rcg2_set_rate,
0493 .set_rate_and_parent = clk_rcg2_set_rate_and_parent,
0494 .get_duty_cycle = clk_rcg2_get_duty_cycle,
0495 .set_duty_cycle = clk_rcg2_set_duty_cycle,
0496 };
0497 EXPORT_SYMBOL_GPL(clk_rcg2_ops);
0498
0499 const struct clk_ops clk_rcg2_floor_ops = {
0500 .is_enabled = clk_rcg2_is_enabled,
0501 .get_parent = clk_rcg2_get_parent,
0502 .set_parent = clk_rcg2_set_parent,
0503 .recalc_rate = clk_rcg2_recalc_rate,
0504 .determine_rate = clk_rcg2_determine_floor_rate,
0505 .set_rate = clk_rcg2_set_floor_rate,
0506 .set_rate_and_parent = clk_rcg2_set_floor_rate_and_parent,
0507 .get_duty_cycle = clk_rcg2_get_duty_cycle,
0508 .set_duty_cycle = clk_rcg2_set_duty_cycle,
0509 };
0510 EXPORT_SYMBOL_GPL(clk_rcg2_floor_ops);
0511
0512 struct frac_entry {
0513 int num;
0514 int den;
0515 };
0516
0517 static const struct frac_entry frac_table_675m[] = {
0518 { 52, 295 },
0519 { 11, 57 },
0520 { 63, 307 },
0521 { 11, 50 },
0522 { 47, 206 },
0523 { 31, 100 },
0524 { 107, 269 },
0525 { },
0526 };
0527
0528 static struct frac_entry frac_table_810m[] = {
0529 { 31, 211 },
0530 { 32, 199 },
0531 { 63, 307 },
0532 { 11, 60 },
0533 { 50, 263 },
0534 { 31, 120 },
0535 { 119, 359 },
0536 { },
0537 };
0538
0539 static int clk_edp_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
0540 unsigned long parent_rate)
0541 {
0542 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
0543 struct freq_tbl f = *rcg->freq_tbl;
0544 const struct frac_entry *frac;
0545 int delta = 100000;
0546 s64 src_rate = parent_rate;
0547 s64 request;
0548 u32 mask = BIT(rcg->hid_width) - 1;
0549 u32 hid_div;
0550
0551 if (src_rate == 810000000)
0552 frac = frac_table_810m;
0553 else
0554 frac = frac_table_675m;
0555
0556 for (; frac->num; frac++) {
0557 request = rate;
0558 request *= frac->den;
0559 request = div_s64(request, frac->num);
0560 if ((src_rate < (request - delta)) ||
0561 (src_rate > (request + delta)))
0562 continue;
0563
0564 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
0565 &hid_div);
0566 f.pre_div = hid_div;
0567 f.pre_div >>= CFG_SRC_DIV_SHIFT;
0568 f.pre_div &= mask;
0569 f.m = frac->num;
0570 f.n = frac->den;
0571
0572 return clk_rcg2_configure(rcg, &f);
0573 }
0574
0575 return -EINVAL;
0576 }
0577
0578 static int clk_edp_pixel_set_rate_and_parent(struct clk_hw *hw,
0579 unsigned long rate, unsigned long parent_rate, u8 index)
0580 {
0581
0582 return clk_edp_pixel_set_rate(hw, rate, parent_rate);
0583 }
0584
0585 static int clk_edp_pixel_determine_rate(struct clk_hw *hw,
0586 struct clk_rate_request *req)
0587 {
0588 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
0589 const struct freq_tbl *f = rcg->freq_tbl;
0590 const struct frac_entry *frac;
0591 int delta = 100000;
0592 s64 request;
0593 u32 mask = BIT(rcg->hid_width) - 1;
0594 u32 hid_div;
0595 int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
0596
0597
0598 req->best_parent_hw = clk_hw_get_parent_by_index(hw, index);
0599 req->best_parent_rate = clk_hw_get_rate(req->best_parent_hw);
0600
0601 if (req->best_parent_rate == 810000000)
0602 frac = frac_table_810m;
0603 else
0604 frac = frac_table_675m;
0605
0606 for (; frac->num; frac++) {
0607 request = req->rate;
0608 request *= frac->den;
0609 request = div_s64(request, frac->num);
0610 if ((req->best_parent_rate < (request - delta)) ||
0611 (req->best_parent_rate > (request + delta)))
0612 continue;
0613
0614 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
0615 &hid_div);
0616 hid_div >>= CFG_SRC_DIV_SHIFT;
0617 hid_div &= mask;
0618
0619 req->rate = calc_rate(req->best_parent_rate,
0620 frac->num, frac->den,
0621 !!frac->den, hid_div);
0622 return 0;
0623 }
0624
0625 return -EINVAL;
0626 }
0627
0628 const struct clk_ops clk_edp_pixel_ops = {
0629 .is_enabled = clk_rcg2_is_enabled,
0630 .get_parent = clk_rcg2_get_parent,
0631 .set_parent = clk_rcg2_set_parent,
0632 .recalc_rate = clk_rcg2_recalc_rate,
0633 .set_rate = clk_edp_pixel_set_rate,
0634 .set_rate_and_parent = clk_edp_pixel_set_rate_and_parent,
0635 .determine_rate = clk_edp_pixel_determine_rate,
0636 };
0637 EXPORT_SYMBOL_GPL(clk_edp_pixel_ops);
0638
0639 static int clk_byte_determine_rate(struct clk_hw *hw,
0640 struct clk_rate_request *req)
0641 {
0642 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
0643 const struct freq_tbl *f = rcg->freq_tbl;
0644 int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
0645 unsigned long parent_rate, div;
0646 u32 mask = BIT(rcg->hid_width) - 1;
0647 struct clk_hw *p;
0648
0649 if (req->rate == 0)
0650 return -EINVAL;
0651
0652 req->best_parent_hw = p = clk_hw_get_parent_by_index(hw, index);
0653 req->best_parent_rate = parent_rate = clk_hw_round_rate(p, req->rate);
0654
0655 div = DIV_ROUND_UP((2 * parent_rate), req->rate) - 1;
0656 div = min_t(u32, div, mask);
0657
0658 req->rate = calc_rate(parent_rate, 0, 0, 0, div);
0659
0660 return 0;
0661 }
0662
0663 static int clk_byte_set_rate(struct clk_hw *hw, unsigned long rate,
0664 unsigned long parent_rate)
0665 {
0666 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
0667 struct freq_tbl f = *rcg->freq_tbl;
0668 unsigned long div;
0669 u32 mask = BIT(rcg->hid_width) - 1;
0670
0671 div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
0672 div = min_t(u32, div, mask);
0673
0674 f.pre_div = div;
0675
0676 return clk_rcg2_configure(rcg, &f);
0677 }
0678
0679 static int clk_byte_set_rate_and_parent(struct clk_hw *hw,
0680 unsigned long rate, unsigned long parent_rate, u8 index)
0681 {
0682
0683 return clk_byte_set_rate(hw, rate, parent_rate);
0684 }
0685
0686 const struct clk_ops clk_byte_ops = {
0687 .is_enabled = clk_rcg2_is_enabled,
0688 .get_parent = clk_rcg2_get_parent,
0689 .set_parent = clk_rcg2_set_parent,
0690 .recalc_rate = clk_rcg2_recalc_rate,
0691 .set_rate = clk_byte_set_rate,
0692 .set_rate_and_parent = clk_byte_set_rate_and_parent,
0693 .determine_rate = clk_byte_determine_rate,
0694 };
0695 EXPORT_SYMBOL_GPL(clk_byte_ops);
0696
0697 static int clk_byte2_determine_rate(struct clk_hw *hw,
0698 struct clk_rate_request *req)
0699 {
0700 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
0701 unsigned long parent_rate, div;
0702 u32 mask = BIT(rcg->hid_width) - 1;
0703 struct clk_hw *p;
0704 unsigned long rate = req->rate;
0705
0706 if (rate == 0)
0707 return -EINVAL;
0708
0709 p = req->best_parent_hw;
0710 req->best_parent_rate = parent_rate = clk_hw_round_rate(p, rate);
0711
0712 div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
0713 div = min_t(u32, div, mask);
0714
0715 req->rate = calc_rate(parent_rate, 0, 0, 0, div);
0716
0717 return 0;
0718 }
0719
0720 static int clk_byte2_set_rate(struct clk_hw *hw, unsigned long rate,
0721 unsigned long parent_rate)
0722 {
0723 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
0724 struct freq_tbl f = { 0 };
0725 unsigned long div;
0726 int i, num_parents = clk_hw_get_num_parents(hw);
0727 u32 mask = BIT(rcg->hid_width) - 1;
0728 u32 cfg;
0729
0730 div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
0731 div = min_t(u32, div, mask);
0732
0733 f.pre_div = div;
0734
0735 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
0736 cfg &= CFG_SRC_SEL_MASK;
0737 cfg >>= CFG_SRC_SEL_SHIFT;
0738
0739 for (i = 0; i < num_parents; i++) {
0740 if (cfg == rcg->parent_map[i].cfg) {
0741 f.src = rcg->parent_map[i].src;
0742 return clk_rcg2_configure(rcg, &f);
0743 }
0744 }
0745
0746 return -EINVAL;
0747 }
0748
0749 static int clk_byte2_set_rate_and_parent(struct clk_hw *hw,
0750 unsigned long rate, unsigned long parent_rate, u8 index)
0751 {
0752
0753 return clk_byte2_set_rate(hw, rate, parent_rate);
0754 }
0755
0756 const struct clk_ops clk_byte2_ops = {
0757 .is_enabled = clk_rcg2_is_enabled,
0758 .get_parent = clk_rcg2_get_parent,
0759 .set_parent = clk_rcg2_set_parent,
0760 .recalc_rate = clk_rcg2_recalc_rate,
0761 .set_rate = clk_byte2_set_rate,
0762 .set_rate_and_parent = clk_byte2_set_rate_and_parent,
0763 .determine_rate = clk_byte2_determine_rate,
0764 };
0765 EXPORT_SYMBOL_GPL(clk_byte2_ops);
0766
0767 static const struct frac_entry frac_table_pixel[] = {
0768 { 3, 8 },
0769 { 2, 9 },
0770 { 4, 9 },
0771 { 1, 1 },
0772 { 2, 3 },
0773 { }
0774 };
0775
0776 static int clk_pixel_determine_rate(struct clk_hw *hw,
0777 struct clk_rate_request *req)
0778 {
0779 unsigned long request, src_rate;
0780 int delta = 100000;
0781 const struct frac_entry *frac = frac_table_pixel;
0782
0783 for (; frac->num; frac++) {
0784 request = (req->rate * frac->den) / frac->num;
0785
0786 src_rate = clk_hw_round_rate(req->best_parent_hw, request);
0787 if ((src_rate < (request - delta)) ||
0788 (src_rate > (request + delta)))
0789 continue;
0790
0791 req->best_parent_rate = src_rate;
0792 req->rate = (src_rate * frac->num) / frac->den;
0793 return 0;
0794 }
0795
0796 return -EINVAL;
0797 }
0798
0799 static int clk_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
0800 unsigned long parent_rate)
0801 {
0802 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
0803 struct freq_tbl f = { 0 };
0804 const struct frac_entry *frac = frac_table_pixel;
0805 unsigned long request;
0806 int delta = 100000;
0807 u32 mask = BIT(rcg->hid_width) - 1;
0808 u32 hid_div, cfg;
0809 int i, num_parents = clk_hw_get_num_parents(hw);
0810
0811 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
0812 cfg &= CFG_SRC_SEL_MASK;
0813 cfg >>= CFG_SRC_SEL_SHIFT;
0814
0815 for (i = 0; i < num_parents; i++)
0816 if (cfg == rcg->parent_map[i].cfg) {
0817 f.src = rcg->parent_map[i].src;
0818 break;
0819 }
0820
0821 for (; frac->num; frac++) {
0822 request = (rate * frac->den) / frac->num;
0823
0824 if ((parent_rate < (request - delta)) ||
0825 (parent_rate > (request + delta)))
0826 continue;
0827
0828 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
0829 &hid_div);
0830 f.pre_div = hid_div;
0831 f.pre_div >>= CFG_SRC_DIV_SHIFT;
0832 f.pre_div &= mask;
0833 f.m = frac->num;
0834 f.n = frac->den;
0835
0836 return clk_rcg2_configure(rcg, &f);
0837 }
0838 return -EINVAL;
0839 }
0840
0841 static int clk_pixel_set_rate_and_parent(struct clk_hw *hw, unsigned long rate,
0842 unsigned long parent_rate, u8 index)
0843 {
0844 return clk_pixel_set_rate(hw, rate, parent_rate);
0845 }
0846
0847 const struct clk_ops clk_pixel_ops = {
0848 .is_enabled = clk_rcg2_is_enabled,
0849 .get_parent = clk_rcg2_get_parent,
0850 .set_parent = clk_rcg2_set_parent,
0851 .recalc_rate = clk_rcg2_recalc_rate,
0852 .set_rate = clk_pixel_set_rate,
0853 .set_rate_and_parent = clk_pixel_set_rate_and_parent,
0854 .determine_rate = clk_pixel_determine_rate,
0855 };
0856 EXPORT_SYMBOL_GPL(clk_pixel_ops);
0857
0858 static int clk_gfx3d_determine_rate(struct clk_hw *hw,
0859 struct clk_rate_request *req)
0860 {
0861 struct clk_rate_request parent_req = { .min_rate = 0, .max_rate = ULONG_MAX };
0862 struct clk_rcg2_gfx3d *cgfx = to_clk_rcg2_gfx3d(hw);
0863 struct clk_hw *xo, *p0, *p1, *p2;
0864 unsigned long p0_rate;
0865 u8 mux_div = cgfx->div;
0866 int ret;
0867
0868 p0 = cgfx->hws[0];
0869 p1 = cgfx->hws[1];
0870 p2 = cgfx->hws[2];
0871
0872
0873
0874
0875
0876 if (WARN_ON(!p0 || !p1 || !p2))
0877 return -EINVAL;
0878
0879 xo = clk_hw_get_parent_by_index(hw, 0);
0880 if (req->rate == clk_hw_get_rate(xo)) {
0881 req->best_parent_hw = xo;
0882 return 0;
0883 }
0884
0885 if (mux_div == 0)
0886 mux_div = 1;
0887
0888 parent_req.rate = req->rate * mux_div;
0889
0890
0891 p0_rate = clk_hw_get_rate(p0);
0892
0893 if (parent_req.rate == p0_rate) {
0894 req->rate = req->best_parent_rate = p0_rate;
0895 req->best_parent_hw = p0;
0896 return 0;
0897 }
0898
0899 if (req->best_parent_hw == p0) {
0900
0901 if (clk_hw_get_rate(p2) == parent_req.rate)
0902 req->best_parent_hw = p2;
0903 else
0904 req->best_parent_hw = p1;
0905 } else if (req->best_parent_hw == p2) {
0906 req->best_parent_hw = p1;
0907 } else {
0908 req->best_parent_hw = p2;
0909 }
0910
0911 ret = __clk_determine_rate(req->best_parent_hw, &parent_req);
0912 if (ret)
0913 return ret;
0914
0915 req->rate = req->best_parent_rate = parent_req.rate;
0916 req->rate /= mux_div;
0917
0918 return 0;
0919 }
0920
0921 static int clk_gfx3d_set_rate_and_parent(struct clk_hw *hw, unsigned long rate,
0922 unsigned long parent_rate, u8 index)
0923 {
0924 struct clk_rcg2_gfx3d *cgfx = to_clk_rcg2_gfx3d(hw);
0925 struct clk_rcg2 *rcg = &cgfx->rcg;
0926 u32 cfg;
0927 int ret;
0928
0929 cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
0930
0931 if (cgfx->div > 1)
0932 cfg |= ((2 * cgfx->div) - 1) << CFG_SRC_DIV_SHIFT;
0933
0934 ret = regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, cfg);
0935 if (ret)
0936 return ret;
0937
0938 return update_config(rcg);
0939 }
0940
0941 static int clk_gfx3d_set_rate(struct clk_hw *hw, unsigned long rate,
0942 unsigned long parent_rate)
0943 {
0944
0945
0946
0947
0948
0949 return 0;
0950 }
0951
0952 const struct clk_ops clk_gfx3d_ops = {
0953 .is_enabled = clk_rcg2_is_enabled,
0954 .get_parent = clk_rcg2_get_parent,
0955 .set_parent = clk_rcg2_set_parent,
0956 .recalc_rate = clk_rcg2_recalc_rate,
0957 .set_rate = clk_gfx3d_set_rate,
0958 .set_rate_and_parent = clk_gfx3d_set_rate_and_parent,
0959 .determine_rate = clk_gfx3d_determine_rate,
0960 };
0961 EXPORT_SYMBOL_GPL(clk_gfx3d_ops);
0962
0963 static int clk_rcg2_set_force_enable(struct clk_hw *hw)
0964 {
0965 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
0966 const char *name = clk_hw_get_name(hw);
0967 int ret, count;
0968
0969 ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
0970 CMD_ROOT_EN, CMD_ROOT_EN);
0971 if (ret)
0972 return ret;
0973
0974
0975 for (count = 500; count > 0; count--) {
0976 if (clk_rcg2_is_enabled(hw))
0977 return 0;
0978
0979 udelay(1);
0980 }
0981
0982 pr_err("%s: RCG did not turn on\n", name);
0983 return -ETIMEDOUT;
0984 }
0985
0986 static int clk_rcg2_clear_force_enable(struct clk_hw *hw)
0987 {
0988 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
0989
0990 return regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
0991 CMD_ROOT_EN, 0);
0992 }
0993
0994 static int
0995 clk_rcg2_shared_force_enable_clear(struct clk_hw *hw, const struct freq_tbl *f)
0996 {
0997 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
0998 int ret;
0999
1000 ret = clk_rcg2_set_force_enable(hw);
1001 if (ret)
1002 return ret;
1003
1004 ret = clk_rcg2_configure(rcg, f);
1005 if (ret)
1006 return ret;
1007
1008 return clk_rcg2_clear_force_enable(hw);
1009 }
1010
1011 static int clk_rcg2_shared_set_rate(struct clk_hw *hw, unsigned long rate,
1012 unsigned long parent_rate)
1013 {
1014 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1015 const struct freq_tbl *f;
1016
1017 f = qcom_find_freq(rcg->freq_tbl, rate);
1018 if (!f)
1019 return -EINVAL;
1020
1021
1022
1023
1024
1025
1026 if (!clk_hw_is_enabled(hw))
1027 return __clk_rcg2_configure(rcg, f, &rcg->parked_cfg);
1028
1029 return clk_rcg2_shared_force_enable_clear(hw, f);
1030 }
1031
1032 static int clk_rcg2_shared_set_rate_and_parent(struct clk_hw *hw,
1033 unsigned long rate, unsigned long parent_rate, u8 index)
1034 {
1035 return clk_rcg2_shared_set_rate(hw, rate, parent_rate);
1036 }
1037
1038 static int clk_rcg2_shared_enable(struct clk_hw *hw)
1039 {
1040 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1041 int ret;
1042
1043
1044
1045
1046
1047 ret = clk_rcg2_set_force_enable(hw);
1048 if (ret)
1049 return ret;
1050
1051
1052 ret = regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, rcg->parked_cfg);
1053 if (ret)
1054 return ret;
1055
1056 ret = update_config(rcg);
1057 if (ret)
1058 return ret;
1059
1060 return clk_rcg2_clear_force_enable(hw);
1061 }
1062
1063 static void clk_rcg2_shared_disable(struct clk_hw *hw)
1064 {
1065 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1066
1067
1068
1069
1070
1071 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &rcg->parked_cfg);
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081 clk_rcg2_set_force_enable(hw);
1082
1083 regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
1084 rcg->safe_src_index << CFG_SRC_SEL_SHIFT);
1085
1086 update_config(rcg);
1087
1088 clk_rcg2_clear_force_enable(hw);
1089 }
1090
1091 static u8 clk_rcg2_shared_get_parent(struct clk_hw *hw)
1092 {
1093 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1094
1095
1096 if (!clk_hw_is_enabled(hw))
1097 return __clk_rcg2_get_parent(hw, rcg->parked_cfg);
1098
1099 return clk_rcg2_get_parent(hw);
1100 }
1101
1102 static int clk_rcg2_shared_set_parent(struct clk_hw *hw, u8 index)
1103 {
1104 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1105
1106
1107 if (!clk_hw_is_enabled(hw)) {
1108 rcg->parked_cfg &= ~CFG_SRC_SEL_MASK;
1109 rcg->parked_cfg |= rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
1110
1111 return 0;
1112 }
1113
1114 return clk_rcg2_set_parent(hw, index);
1115 }
1116
1117 static unsigned long
1118 clk_rcg2_shared_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
1119 {
1120 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1121
1122
1123 if (!clk_hw_is_enabled(hw))
1124 return __clk_rcg2_recalc_rate(hw, parent_rate, rcg->parked_cfg);
1125
1126 return clk_rcg2_recalc_rate(hw, parent_rate);
1127 }
1128
1129 const struct clk_ops clk_rcg2_shared_ops = {
1130 .enable = clk_rcg2_shared_enable,
1131 .disable = clk_rcg2_shared_disable,
1132 .get_parent = clk_rcg2_shared_get_parent,
1133 .set_parent = clk_rcg2_shared_set_parent,
1134 .recalc_rate = clk_rcg2_shared_recalc_rate,
1135 .determine_rate = clk_rcg2_determine_rate,
1136 .set_rate = clk_rcg2_shared_set_rate,
1137 .set_rate_and_parent = clk_rcg2_shared_set_rate_and_parent,
1138 };
1139 EXPORT_SYMBOL_GPL(clk_rcg2_shared_ops);
1140
1141
1142 static void clk_rcg2_dfs_populate_freq(struct clk_hw *hw, unsigned int l,
1143 struct freq_tbl *f)
1144 {
1145 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1146 struct clk_hw *p;
1147 unsigned long prate = 0;
1148 u32 val, mask, cfg, mode, src;
1149 int i, num_parents;
1150
1151 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + SE_PERF_DFSR(l), &cfg);
1152
1153 mask = BIT(rcg->hid_width) - 1;
1154 f->pre_div = 1;
1155 if (cfg & mask)
1156 f->pre_div = cfg & mask;
1157
1158 src = cfg & CFG_SRC_SEL_MASK;
1159 src >>= CFG_SRC_SEL_SHIFT;
1160
1161 num_parents = clk_hw_get_num_parents(hw);
1162 for (i = 0; i < num_parents; i++) {
1163 if (src == rcg->parent_map[i].cfg) {
1164 f->src = rcg->parent_map[i].src;
1165 p = clk_hw_get_parent_by_index(&rcg->clkr.hw, i);
1166 prate = clk_hw_get_rate(p);
1167 }
1168 }
1169
1170 mode = cfg & CFG_MODE_MASK;
1171 mode >>= CFG_MODE_SHIFT;
1172 if (mode) {
1173 mask = BIT(rcg->mnd_width) - 1;
1174 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + SE_PERF_M_DFSR(l),
1175 &val);
1176 val &= mask;
1177 f->m = val;
1178
1179 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + SE_PERF_N_DFSR(l),
1180 &val);
1181 val = ~val;
1182 val &= mask;
1183 val += f->m;
1184 f->n = val;
1185 }
1186
1187 f->freq = calc_rate(prate, f->m, f->n, mode, f->pre_div);
1188 }
1189
1190 static int clk_rcg2_dfs_populate_freq_table(struct clk_rcg2 *rcg)
1191 {
1192 struct freq_tbl *freq_tbl;
1193 int i;
1194
1195
1196 freq_tbl = kcalloc(MAX_PERF_LEVEL + 1, sizeof(*freq_tbl), GFP_KERNEL);
1197 if (!freq_tbl)
1198 return -ENOMEM;
1199 rcg->freq_tbl = freq_tbl;
1200
1201 for (i = 0; i < MAX_PERF_LEVEL; i++)
1202 clk_rcg2_dfs_populate_freq(&rcg->clkr.hw, i, freq_tbl + i);
1203
1204 return 0;
1205 }
1206
1207 static int clk_rcg2_dfs_determine_rate(struct clk_hw *hw,
1208 struct clk_rate_request *req)
1209 {
1210 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1211 int ret;
1212
1213 if (!rcg->freq_tbl) {
1214 ret = clk_rcg2_dfs_populate_freq_table(rcg);
1215 if (ret) {
1216 pr_err("Failed to update DFS tables for %s\n",
1217 clk_hw_get_name(hw));
1218 return ret;
1219 }
1220 }
1221
1222 return clk_rcg2_determine_rate(hw, req);
1223 }
1224
1225 static unsigned long
1226 clk_rcg2_dfs_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
1227 {
1228 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1229 u32 level, mask, cfg, m = 0, n = 0, mode, pre_div;
1230
1231 regmap_read(rcg->clkr.regmap,
1232 rcg->cmd_rcgr + SE_CMD_DFSR_OFFSET, &level);
1233 level &= GENMASK(4, 1);
1234 level >>= 1;
1235
1236 if (rcg->freq_tbl)
1237 return rcg->freq_tbl[level].freq;
1238
1239
1240
1241
1242
1243
1244
1245
1246 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + SE_PERF_DFSR(level),
1247 &cfg);
1248
1249 mask = BIT(rcg->hid_width) - 1;
1250 pre_div = 1;
1251 if (cfg & mask)
1252 pre_div = cfg & mask;
1253
1254 mode = cfg & CFG_MODE_MASK;
1255 mode >>= CFG_MODE_SHIFT;
1256 if (mode) {
1257 mask = BIT(rcg->mnd_width) - 1;
1258 regmap_read(rcg->clkr.regmap,
1259 rcg->cmd_rcgr + SE_PERF_M_DFSR(level), &m);
1260 m &= mask;
1261
1262 regmap_read(rcg->clkr.regmap,
1263 rcg->cmd_rcgr + SE_PERF_N_DFSR(level), &n);
1264 n = ~n;
1265 n &= mask;
1266 n += m;
1267 }
1268
1269 return calc_rate(parent_rate, m, n, mode, pre_div);
1270 }
1271
1272 static const struct clk_ops clk_rcg2_dfs_ops = {
1273 .is_enabled = clk_rcg2_is_enabled,
1274 .get_parent = clk_rcg2_get_parent,
1275 .determine_rate = clk_rcg2_dfs_determine_rate,
1276 .recalc_rate = clk_rcg2_dfs_recalc_rate,
1277 };
1278
1279 static int clk_rcg2_enable_dfs(const struct clk_rcg_dfs_data *data,
1280 struct regmap *regmap)
1281 {
1282 struct clk_rcg2 *rcg = data->rcg;
1283 struct clk_init_data *init = data->init;
1284 u32 val;
1285 int ret;
1286
1287 ret = regmap_read(regmap, rcg->cmd_rcgr + SE_CMD_DFSR_OFFSET, &val);
1288 if (ret)
1289 return -EINVAL;
1290
1291 if (!(val & SE_CMD_DFS_EN))
1292 return 0;
1293
1294
1295
1296
1297
1298 init->flags |= CLK_GET_RATE_NOCACHE;
1299 init->ops = &clk_rcg2_dfs_ops;
1300
1301 rcg->freq_tbl = NULL;
1302
1303 return 0;
1304 }
1305
1306 int qcom_cc_register_rcg_dfs(struct regmap *regmap,
1307 const struct clk_rcg_dfs_data *rcgs, size_t len)
1308 {
1309 int i, ret;
1310
1311 for (i = 0; i < len; i++) {
1312 ret = clk_rcg2_enable_dfs(&rcgs[i], regmap);
1313 if (ret)
1314 return ret;
1315 }
1316
1317 return 0;
1318 }
1319 EXPORT_SYMBOL_GPL(qcom_cc_register_rcg_dfs);
1320
1321 static int clk_rcg2_dp_set_rate(struct clk_hw *hw, unsigned long rate,
1322 unsigned long parent_rate)
1323 {
1324 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1325 struct freq_tbl f = { 0 };
1326 u32 mask = BIT(rcg->hid_width) - 1;
1327 u32 hid_div, cfg;
1328 int i, num_parents = clk_hw_get_num_parents(hw);
1329 unsigned long num, den;
1330
1331 rational_best_approximation(parent_rate, rate,
1332 GENMASK(rcg->mnd_width - 1, 0),
1333 GENMASK(rcg->mnd_width - 1, 0), &den, &num);
1334
1335 if (!num || !den)
1336 return -EINVAL;
1337
1338 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
1339 hid_div = cfg;
1340 cfg &= CFG_SRC_SEL_MASK;
1341 cfg >>= CFG_SRC_SEL_SHIFT;
1342
1343 for (i = 0; i < num_parents; i++) {
1344 if (cfg == rcg->parent_map[i].cfg) {
1345 f.src = rcg->parent_map[i].src;
1346 break;
1347 }
1348 }
1349
1350 f.pre_div = hid_div;
1351 f.pre_div >>= CFG_SRC_DIV_SHIFT;
1352 f.pre_div &= mask;
1353
1354 if (num != den) {
1355 f.m = num;
1356 f.n = den;
1357 } else {
1358 f.m = 0;
1359 f.n = 0;
1360 }
1361
1362 return clk_rcg2_configure(rcg, &f);
1363 }
1364
1365 static int clk_rcg2_dp_set_rate_and_parent(struct clk_hw *hw,
1366 unsigned long rate, unsigned long parent_rate, u8 index)
1367 {
1368 return clk_rcg2_dp_set_rate(hw, rate, parent_rate);
1369 }
1370
1371 static int clk_rcg2_dp_determine_rate(struct clk_hw *hw,
1372 struct clk_rate_request *req)
1373 {
1374 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1375 unsigned long num, den;
1376 u64 tmp;
1377
1378
1379 rational_best_approximation(req->best_parent_rate, req->rate,
1380 GENMASK(rcg->mnd_width - 1, 0),
1381 GENMASK(rcg->mnd_width - 1, 0), &den, &num);
1382
1383 if (!num || !den)
1384 return -EINVAL;
1385
1386 tmp = req->best_parent_rate * num;
1387 do_div(tmp, den);
1388 req->rate = tmp;
1389
1390 return 0;
1391 }
1392
1393 const struct clk_ops clk_dp_ops = {
1394 .is_enabled = clk_rcg2_is_enabled,
1395 .get_parent = clk_rcg2_get_parent,
1396 .set_parent = clk_rcg2_set_parent,
1397 .recalc_rate = clk_rcg2_recalc_rate,
1398 .set_rate = clk_rcg2_dp_set_rate,
1399 .set_rate_and_parent = clk_rcg2_dp_set_rate_and_parent,
1400 .determine_rate = clk_rcg2_dp_determine_rate,
1401 };
1402 EXPORT_SYMBOL_GPL(clk_dp_ops);