0001
0002
0003
0004
0005
0006
0007
0008 #include <linux/clk.h>
0009 #include <linux/kernel.h>
0010 #include <linux/module.h>
0011 #include <linux/of.h>
0012 #include <linux/of_device.h>
0013 #include <linux/platform_device.h>
0014 #include <sound/pcm.h>
0015 #include <sound/pcm_params.h>
0016 #include <linux/regmap.h>
0017 #include <sound/soc.h>
0018 #include <sound/soc-dai.h>
0019 #include "lpass-lpaif-reg.h"
0020 #include "lpass.h"
0021
0022 #define LPASS_CPU_MAX_MI2S_LINES 4
0023 #define LPASS_CPU_I2S_SD0_MASK BIT(0)
0024 #define LPASS_CPU_I2S_SD1_MASK BIT(1)
0025 #define LPASS_CPU_I2S_SD2_MASK BIT(2)
0026 #define LPASS_CPU_I2S_SD3_MASK BIT(3)
0027 #define LPASS_CPU_I2S_SD0_1_MASK GENMASK(1, 0)
0028 #define LPASS_CPU_I2S_SD2_3_MASK GENMASK(3, 2)
0029 #define LPASS_CPU_I2S_SD0_1_2_MASK GENMASK(2, 0)
0030 #define LPASS_CPU_I2S_SD0_1_2_3_MASK GENMASK(3, 0)
0031 #define LPASS_REG_READ 1
0032 #define LPASS_REG_WRITE 0
0033
0034
0035
0036
0037 static struct snd_pcm_chmap_elem lpass_quad_chmaps[] = {
0038 { .channels = 4,
0039 .map = { SNDRV_CHMAP_FL, SNDRV_CHMAP_RL,
0040 SNDRV_CHMAP_FR, SNDRV_CHMAP_RR } },
0041 { }
0042 };
0043 static int lpass_cpu_init_i2sctl_bitfields(struct device *dev,
0044 struct lpaif_i2sctl *i2sctl, struct regmap *map)
0045 {
0046 struct lpass_data *drvdata = dev_get_drvdata(dev);
0047 struct lpass_variant *v = drvdata->variant;
0048
0049 i2sctl->loopback = devm_regmap_field_alloc(dev, map, v->loopback);
0050 i2sctl->spken = devm_regmap_field_alloc(dev, map, v->spken);
0051 i2sctl->spkmode = devm_regmap_field_alloc(dev, map, v->spkmode);
0052 i2sctl->spkmono = devm_regmap_field_alloc(dev, map, v->spkmono);
0053 i2sctl->micen = devm_regmap_field_alloc(dev, map, v->micen);
0054 i2sctl->micmode = devm_regmap_field_alloc(dev, map, v->micmode);
0055 i2sctl->micmono = devm_regmap_field_alloc(dev, map, v->micmono);
0056 i2sctl->wssrc = devm_regmap_field_alloc(dev, map, v->wssrc);
0057 i2sctl->bitwidth = devm_regmap_field_alloc(dev, map, v->bitwidth);
0058
0059 if (IS_ERR(i2sctl->loopback) || IS_ERR(i2sctl->spken) ||
0060 IS_ERR(i2sctl->spkmode) || IS_ERR(i2sctl->spkmono) ||
0061 IS_ERR(i2sctl->micen) || IS_ERR(i2sctl->micmode) ||
0062 IS_ERR(i2sctl->micmono) || IS_ERR(i2sctl->wssrc) ||
0063 IS_ERR(i2sctl->bitwidth))
0064 return -EINVAL;
0065
0066 return 0;
0067 }
0068
0069 static int lpass_cpu_daiops_set_sysclk(struct snd_soc_dai *dai, int clk_id,
0070 unsigned int freq, int dir)
0071 {
0072 struct lpass_data *drvdata = snd_soc_dai_get_drvdata(dai);
0073 int ret;
0074
0075 ret = clk_set_rate(drvdata->mi2s_osr_clk[dai->driver->id], freq);
0076 if (ret)
0077 dev_err(dai->dev, "error setting mi2s osrclk to %u: %d\n",
0078 freq, ret);
0079
0080 return ret;
0081 }
0082
0083 static int lpass_cpu_daiops_startup(struct snd_pcm_substream *substream,
0084 struct snd_soc_dai *dai)
0085 {
0086 struct lpass_data *drvdata = snd_soc_dai_get_drvdata(dai);
0087 int ret;
0088
0089 ret = clk_prepare_enable(drvdata->mi2s_osr_clk[dai->driver->id]);
0090 if (ret) {
0091 dev_err(dai->dev, "error in enabling mi2s osr clk: %d\n", ret);
0092 return ret;
0093 }
0094 ret = clk_prepare(drvdata->mi2s_bit_clk[dai->driver->id]);
0095 if (ret) {
0096 dev_err(dai->dev, "error in enabling mi2s bit clk: %d\n", ret);
0097 clk_disable_unprepare(drvdata->mi2s_osr_clk[dai->driver->id]);
0098 return ret;
0099 }
0100 return 0;
0101 }
0102
0103 static void lpass_cpu_daiops_shutdown(struct snd_pcm_substream *substream,
0104 struct snd_soc_dai *dai)
0105 {
0106 struct lpass_data *drvdata = snd_soc_dai_get_drvdata(dai);
0107 struct lpaif_i2sctl *i2sctl = drvdata->i2sctl;
0108 unsigned int id = dai->driver->id;
0109
0110 clk_disable_unprepare(drvdata->mi2s_osr_clk[dai->driver->id]);
0111
0112
0113
0114
0115
0116 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK)
0117 regmap_fields_write(i2sctl->spken, id, LPAIF_I2SCTL_SPKEN_DISABLE);
0118 else
0119 regmap_fields_write(i2sctl->micen, id, LPAIF_I2SCTL_MICEN_DISABLE);
0120
0121
0122
0123
0124
0125
0126 if (drvdata->mi2s_was_prepared[dai->driver->id]) {
0127 drvdata->mi2s_was_prepared[dai->driver->id] = false;
0128 clk_disable(drvdata->mi2s_bit_clk[dai->driver->id]);
0129 }
0130
0131 clk_unprepare(drvdata->mi2s_bit_clk[dai->driver->id]);
0132 }
0133
0134 static int lpass_cpu_daiops_hw_params(struct snd_pcm_substream *substream,
0135 struct snd_pcm_hw_params *params, struct snd_soc_dai *dai)
0136 {
0137 struct lpass_data *drvdata = snd_soc_dai_get_drvdata(dai);
0138 struct lpaif_i2sctl *i2sctl = drvdata->i2sctl;
0139 unsigned int id = dai->driver->id;
0140 snd_pcm_format_t format = params_format(params);
0141 unsigned int channels = params_channels(params);
0142 unsigned int rate = params_rate(params);
0143 unsigned int mode;
0144 unsigned int regval;
0145 int bitwidth, ret;
0146
0147 bitwidth = snd_pcm_format_width(format);
0148 if (bitwidth < 0) {
0149 dev_err(dai->dev, "invalid bit width given: %d\n", bitwidth);
0150 return bitwidth;
0151 }
0152
0153 ret = regmap_fields_write(i2sctl->loopback, id,
0154 LPAIF_I2SCTL_LOOPBACK_DISABLE);
0155 if (ret) {
0156 dev_err(dai->dev, "error updating loopback field: %d\n", ret);
0157 return ret;
0158 }
0159
0160 ret = regmap_fields_write(i2sctl->wssrc, id,
0161 LPAIF_I2SCTL_WSSRC_INTERNAL);
0162 if (ret) {
0163 dev_err(dai->dev, "error updating wssrc field: %d\n", ret);
0164 return ret;
0165 }
0166
0167 switch (bitwidth) {
0168 case 16:
0169 regval = LPAIF_I2SCTL_BITWIDTH_16;
0170 break;
0171 case 24:
0172 regval = LPAIF_I2SCTL_BITWIDTH_24;
0173 break;
0174 case 32:
0175 regval = LPAIF_I2SCTL_BITWIDTH_32;
0176 break;
0177 default:
0178 dev_err(dai->dev, "invalid bitwidth given: %d\n", bitwidth);
0179 return -EINVAL;
0180 }
0181
0182 ret = regmap_fields_write(i2sctl->bitwidth, id, regval);
0183 if (ret) {
0184 dev_err(dai->dev, "error updating bitwidth field: %d\n", ret);
0185 return ret;
0186 }
0187
0188 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK)
0189 mode = drvdata->mi2s_playback_sd_mode[id];
0190 else
0191 mode = drvdata->mi2s_capture_sd_mode[id];
0192
0193 if (!mode) {
0194 dev_err(dai->dev, "no line is assigned\n");
0195 return -EINVAL;
0196 }
0197
0198 switch (channels) {
0199 case 1:
0200 case 2:
0201 switch (mode) {
0202 case LPAIF_I2SCTL_MODE_QUAD01:
0203 case LPAIF_I2SCTL_MODE_6CH:
0204 case LPAIF_I2SCTL_MODE_8CH:
0205 mode = LPAIF_I2SCTL_MODE_SD0;
0206 break;
0207 case LPAIF_I2SCTL_MODE_QUAD23:
0208 mode = LPAIF_I2SCTL_MODE_SD2;
0209 break;
0210 }
0211
0212 break;
0213 case 4:
0214 if (mode < LPAIF_I2SCTL_MODE_QUAD01) {
0215 dev_err(dai->dev, "cannot configure 4 channels with mode %d\n",
0216 mode);
0217 return -EINVAL;
0218 }
0219
0220 switch (mode) {
0221 case LPAIF_I2SCTL_MODE_6CH:
0222 case LPAIF_I2SCTL_MODE_8CH:
0223 mode = LPAIF_I2SCTL_MODE_QUAD01;
0224 break;
0225 }
0226 break;
0227 case 6:
0228 if (mode < LPAIF_I2SCTL_MODE_6CH) {
0229 dev_err(dai->dev, "cannot configure 6 channels with mode %d\n",
0230 mode);
0231 return -EINVAL;
0232 }
0233
0234 switch (mode) {
0235 case LPAIF_I2SCTL_MODE_8CH:
0236 mode = LPAIF_I2SCTL_MODE_6CH;
0237 break;
0238 }
0239 break;
0240 case 8:
0241 if (mode < LPAIF_I2SCTL_MODE_8CH) {
0242 dev_err(dai->dev, "cannot configure 8 channels with mode %d\n",
0243 mode);
0244 return -EINVAL;
0245 }
0246 break;
0247 default:
0248 dev_err(dai->dev, "invalid channels given: %u\n", channels);
0249 return -EINVAL;
0250 }
0251
0252 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
0253 ret = regmap_fields_write(i2sctl->spkmode, id,
0254 LPAIF_I2SCTL_SPKMODE(mode));
0255 if (ret) {
0256 dev_err(dai->dev, "error writing to i2sctl spkr mode: %d\n",
0257 ret);
0258 return ret;
0259 }
0260 if (channels >= 2)
0261 ret = regmap_fields_write(i2sctl->spkmono, id,
0262 LPAIF_I2SCTL_SPKMONO_STEREO);
0263 else
0264 ret = regmap_fields_write(i2sctl->spkmono, id,
0265 LPAIF_I2SCTL_SPKMONO_MONO);
0266 } else {
0267 ret = regmap_fields_write(i2sctl->micmode, id,
0268 LPAIF_I2SCTL_MICMODE(mode));
0269 if (ret) {
0270 dev_err(dai->dev, "error writing to i2sctl mic mode: %d\n",
0271 ret);
0272 return ret;
0273 }
0274 if (channels >= 2)
0275 ret = regmap_fields_write(i2sctl->micmono, id,
0276 LPAIF_I2SCTL_MICMONO_STEREO);
0277 else
0278 ret = regmap_fields_write(i2sctl->micmono, id,
0279 LPAIF_I2SCTL_MICMONO_MONO);
0280 }
0281
0282 if (ret) {
0283 dev_err(dai->dev, "error writing to i2sctl channels mode: %d\n",
0284 ret);
0285 return ret;
0286 }
0287
0288 ret = clk_set_rate(drvdata->mi2s_bit_clk[id],
0289 rate * bitwidth * 2);
0290 if (ret) {
0291 dev_err(dai->dev, "error setting mi2s bitclk to %u: %d\n",
0292 rate * bitwidth * 2, ret);
0293 return ret;
0294 }
0295
0296 return 0;
0297 }
0298
0299 static int lpass_cpu_daiops_trigger(struct snd_pcm_substream *substream,
0300 int cmd, struct snd_soc_dai *dai)
0301 {
0302 struct lpass_data *drvdata = snd_soc_dai_get_drvdata(dai);
0303 struct lpaif_i2sctl *i2sctl = drvdata->i2sctl;
0304 unsigned int id = dai->driver->id;
0305 int ret = -EINVAL;
0306
0307 switch (cmd) {
0308 case SNDRV_PCM_TRIGGER_START:
0309 case SNDRV_PCM_TRIGGER_RESUME:
0310 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
0311
0312
0313
0314
0315
0316
0317
0318
0319
0320
0321
0322
0323 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
0324 ret = regmap_fields_write(i2sctl->spken, id,
0325 LPAIF_I2SCTL_SPKEN_ENABLE);
0326 } else {
0327 ret = regmap_fields_write(i2sctl->micen, id,
0328 LPAIF_I2SCTL_MICEN_ENABLE);
0329 }
0330 if (ret)
0331 dev_err(dai->dev, "error writing to i2sctl reg: %d\n",
0332 ret);
0333
0334 ret = clk_enable(drvdata->mi2s_bit_clk[id]);
0335 if (ret) {
0336 dev_err(dai->dev, "error in enabling mi2s bit clk: %d\n", ret);
0337 clk_disable(drvdata->mi2s_osr_clk[id]);
0338 return ret;
0339 }
0340 break;
0341 case SNDRV_PCM_TRIGGER_STOP:
0342 case SNDRV_PCM_TRIGGER_SUSPEND:
0343 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
0344
0345
0346
0347
0348 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
0349 ret = regmap_fields_write(i2sctl->spken, id,
0350 LPAIF_I2SCTL_SPKEN_DISABLE);
0351 } else {
0352 ret = regmap_fields_write(i2sctl->micen, id,
0353 LPAIF_I2SCTL_MICEN_DISABLE);
0354 }
0355 if (ret)
0356 dev_err(dai->dev, "error writing to i2sctl reg: %d\n",
0357 ret);
0358
0359 clk_disable(drvdata->mi2s_bit_clk[dai->driver->id]);
0360
0361 break;
0362 }
0363
0364 return ret;
0365 }
0366
0367 static int lpass_cpu_daiops_prepare(struct snd_pcm_substream *substream,
0368 struct snd_soc_dai *dai)
0369 {
0370 struct lpass_data *drvdata = snd_soc_dai_get_drvdata(dai);
0371 struct lpaif_i2sctl *i2sctl = drvdata->i2sctl;
0372 unsigned int id = dai->driver->id;
0373 int ret;
0374
0375
0376
0377
0378
0379
0380
0381 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK)
0382 ret = regmap_fields_write(i2sctl->spken, id, LPAIF_I2SCTL_SPKEN_ENABLE);
0383 else
0384 ret = regmap_fields_write(i2sctl->micen, id, LPAIF_I2SCTL_MICEN_ENABLE);
0385
0386 if (ret) {
0387 dev_err(dai->dev, "error writing to i2sctl reg: %d\n", ret);
0388 return ret;
0389 }
0390
0391
0392
0393
0394
0395
0396 if (!drvdata->mi2s_was_prepared[dai->driver->id]) {
0397 ret = clk_enable(drvdata->mi2s_bit_clk[id]);
0398 if (ret) {
0399 dev_err(dai->dev, "error in enabling mi2s bit clk: %d\n", ret);
0400 return ret;
0401 }
0402 drvdata->mi2s_was_prepared[dai->driver->id] = true;
0403 }
0404 return 0;
0405 }
0406
0407 const struct snd_soc_dai_ops asoc_qcom_lpass_cpu_dai_ops = {
0408 .set_sysclk = lpass_cpu_daiops_set_sysclk,
0409 .startup = lpass_cpu_daiops_startup,
0410 .shutdown = lpass_cpu_daiops_shutdown,
0411 .hw_params = lpass_cpu_daiops_hw_params,
0412 .trigger = lpass_cpu_daiops_trigger,
0413 .prepare = lpass_cpu_daiops_prepare,
0414 };
0415 EXPORT_SYMBOL_GPL(asoc_qcom_lpass_cpu_dai_ops);
0416
0417 int lpass_cpu_pcm_new(struct snd_soc_pcm_runtime *rtd,
0418 struct snd_soc_dai *dai)
0419 {
0420 int ret;
0421 struct snd_soc_dai_driver *drv = dai->driver;
0422 struct lpass_data *drvdata = snd_soc_dai_get_drvdata(dai);
0423
0424 if (drvdata->mi2s_playback_sd_mode[dai->id] == LPAIF_I2SCTL_MODE_QUAD01) {
0425 ret = snd_pcm_add_chmap_ctls(rtd->pcm, SNDRV_PCM_STREAM_PLAYBACK,
0426 lpass_quad_chmaps, drv->playback.channels_max, 0,
0427 NULL);
0428 if (ret < 0)
0429 return ret;
0430 }
0431
0432 return 0;
0433 }
0434 EXPORT_SYMBOL_GPL(lpass_cpu_pcm_new);
0435
0436 int asoc_qcom_lpass_cpu_dai_probe(struct snd_soc_dai *dai)
0437 {
0438 struct lpass_data *drvdata = snd_soc_dai_get_drvdata(dai);
0439 int ret;
0440
0441
0442 ret = regmap_write(drvdata->lpaif_map,
0443 LPAIF_I2SCTL_REG(drvdata->variant, dai->driver->id), 0);
0444 if (ret)
0445 dev_err(dai->dev, "error writing to i2sctl reg: %d\n", ret);
0446
0447 return ret;
0448 }
0449 EXPORT_SYMBOL_GPL(asoc_qcom_lpass_cpu_dai_probe);
0450
0451 static int asoc_qcom_of_xlate_dai_name(struct snd_soc_component *component,
0452 const struct of_phandle_args *args,
0453 const char **dai_name)
0454 {
0455 struct lpass_data *drvdata = snd_soc_component_get_drvdata(component);
0456 struct lpass_variant *variant = drvdata->variant;
0457 int id = args->args[0];
0458 int ret = -EINVAL;
0459 int i;
0460
0461 for (i = 0; i < variant->num_dai; i++) {
0462 if (variant->dai_driver[i].id == id) {
0463 *dai_name = variant->dai_driver[i].name;
0464 ret = 0;
0465 break;
0466 }
0467 }
0468
0469 return ret;
0470 }
0471
0472 static const struct snd_soc_component_driver lpass_cpu_comp_driver = {
0473 .name = "lpass-cpu",
0474 .of_xlate_dai_name = asoc_qcom_of_xlate_dai_name,
0475 .legacy_dai_naming = 1,
0476 };
0477
0478 static bool lpass_cpu_regmap_writeable(struct device *dev, unsigned int reg)
0479 {
0480 struct lpass_data *drvdata = dev_get_drvdata(dev);
0481 struct lpass_variant *v = drvdata->variant;
0482 int i;
0483
0484 for (i = 0; i < v->i2s_ports; ++i)
0485 if (reg == LPAIF_I2SCTL_REG(v, i))
0486 return true;
0487
0488 for (i = 0; i < v->irq_ports; ++i) {
0489 if (reg == LPAIF_IRQEN_REG(v, i))
0490 return true;
0491 if (reg == LPAIF_IRQCLEAR_REG(v, i))
0492 return true;
0493 }
0494
0495 for (i = 0; i < v->rdma_channels; ++i) {
0496 if (reg == LPAIF_RDMACTL_REG(v, i))
0497 return true;
0498 if (reg == LPAIF_RDMABASE_REG(v, i))
0499 return true;
0500 if (reg == LPAIF_RDMABUFF_REG(v, i))
0501 return true;
0502 if (reg == LPAIF_RDMAPER_REG(v, i))
0503 return true;
0504 }
0505
0506 for (i = 0; i < v->wrdma_channels; ++i) {
0507 if (reg == LPAIF_WRDMACTL_REG(v, i + v->wrdma_channel_start))
0508 return true;
0509 if (reg == LPAIF_WRDMABASE_REG(v, i + v->wrdma_channel_start))
0510 return true;
0511 if (reg == LPAIF_WRDMABUFF_REG(v, i + v->wrdma_channel_start))
0512 return true;
0513 if (reg == LPAIF_WRDMAPER_REG(v, i + v->wrdma_channel_start))
0514 return true;
0515 }
0516
0517 return false;
0518 }
0519
0520 static bool lpass_cpu_regmap_readable(struct device *dev, unsigned int reg)
0521 {
0522 struct lpass_data *drvdata = dev_get_drvdata(dev);
0523 struct lpass_variant *v = drvdata->variant;
0524 int i;
0525
0526 for (i = 0; i < v->i2s_ports; ++i)
0527 if (reg == LPAIF_I2SCTL_REG(v, i))
0528 return true;
0529
0530 for (i = 0; i < v->irq_ports; ++i) {
0531 if (reg == LPAIF_IRQCLEAR_REG(v, i))
0532 return true;
0533 if (reg == LPAIF_IRQEN_REG(v, i))
0534 return true;
0535 if (reg == LPAIF_IRQSTAT_REG(v, i))
0536 return true;
0537 }
0538
0539 for (i = 0; i < v->rdma_channels; ++i) {
0540 if (reg == LPAIF_RDMACTL_REG(v, i))
0541 return true;
0542 if (reg == LPAIF_RDMABASE_REG(v, i))
0543 return true;
0544 if (reg == LPAIF_RDMABUFF_REG(v, i))
0545 return true;
0546 if (reg == LPAIF_RDMACURR_REG(v, i))
0547 return true;
0548 if (reg == LPAIF_RDMAPER_REG(v, i))
0549 return true;
0550 }
0551
0552 for (i = 0; i < v->wrdma_channels; ++i) {
0553 if (reg == LPAIF_WRDMACTL_REG(v, i + v->wrdma_channel_start))
0554 return true;
0555 if (reg == LPAIF_WRDMABASE_REG(v, i + v->wrdma_channel_start))
0556 return true;
0557 if (reg == LPAIF_WRDMABUFF_REG(v, i + v->wrdma_channel_start))
0558 return true;
0559 if (reg == LPAIF_WRDMACURR_REG(v, i + v->wrdma_channel_start))
0560 return true;
0561 if (reg == LPAIF_WRDMAPER_REG(v, i + v->wrdma_channel_start))
0562 return true;
0563 }
0564
0565 return false;
0566 }
0567
0568 static bool lpass_cpu_regmap_volatile(struct device *dev, unsigned int reg)
0569 {
0570 struct lpass_data *drvdata = dev_get_drvdata(dev);
0571 struct lpass_variant *v = drvdata->variant;
0572 int i;
0573
0574 for (i = 0; i < v->irq_ports; ++i) {
0575 if (reg == LPAIF_IRQCLEAR_REG(v, i))
0576 return true;
0577 if (reg == LPAIF_IRQSTAT_REG(v, i))
0578 return true;
0579 }
0580
0581 for (i = 0; i < v->rdma_channels; ++i)
0582 if (reg == LPAIF_RDMACURR_REG(v, i))
0583 return true;
0584
0585 for (i = 0; i < v->wrdma_channels; ++i)
0586 if (reg == LPAIF_WRDMACURR_REG(v, i + v->wrdma_channel_start))
0587 return true;
0588
0589 return false;
0590 }
0591
0592 static struct regmap_config lpass_cpu_regmap_config = {
0593 .name = "lpass_cpu",
0594 .reg_bits = 32,
0595 .reg_stride = 4,
0596 .val_bits = 32,
0597 .writeable_reg = lpass_cpu_regmap_writeable,
0598 .readable_reg = lpass_cpu_regmap_readable,
0599 .volatile_reg = lpass_cpu_regmap_volatile,
0600 .cache_type = REGCACHE_FLAT,
0601 };
0602
0603 static int lpass_hdmi_init_bitfields(struct device *dev, struct regmap *map)
0604 {
0605 struct lpass_data *drvdata = dev_get_drvdata(dev);
0606 struct lpass_variant *v = drvdata->variant;
0607 unsigned int i;
0608 struct lpass_hdmi_tx_ctl *tx_ctl;
0609 struct regmap_field *legacy_en;
0610 struct lpass_vbit_ctrl *vbit_ctl;
0611 struct regmap_field *tx_parity;
0612 struct lpass_dp_metadata_ctl *meta_ctl;
0613 struct lpass_sstream_ctl *sstream_ctl;
0614 struct regmap_field *ch_msb;
0615 struct regmap_field *ch_lsb;
0616 struct lpass_hdmitx_dmactl *tx_dmactl;
0617 int rval;
0618
0619 tx_ctl = devm_kzalloc(dev, sizeof(*tx_ctl), GFP_KERNEL);
0620 if (!tx_ctl)
0621 return -ENOMEM;
0622
0623 QCOM_REGMAP_FIELD_ALLOC(dev, map, v->soft_reset, tx_ctl->soft_reset);
0624 QCOM_REGMAP_FIELD_ALLOC(dev, map, v->force_reset, tx_ctl->force_reset);
0625 drvdata->tx_ctl = tx_ctl;
0626
0627 QCOM_REGMAP_FIELD_ALLOC(dev, map, v->legacy_en, legacy_en);
0628 drvdata->hdmitx_legacy_en = legacy_en;
0629
0630 vbit_ctl = devm_kzalloc(dev, sizeof(*vbit_ctl), GFP_KERNEL);
0631 if (!vbit_ctl)
0632 return -ENOMEM;
0633
0634 QCOM_REGMAP_FIELD_ALLOC(dev, map, v->replace_vbit, vbit_ctl->replace_vbit);
0635 QCOM_REGMAP_FIELD_ALLOC(dev, map, v->vbit_stream, vbit_ctl->vbit_stream);
0636 drvdata->vbit_ctl = vbit_ctl;
0637
0638
0639 QCOM_REGMAP_FIELD_ALLOC(dev, map, v->calc_en, tx_parity);
0640 drvdata->hdmitx_parity_calc_en = tx_parity;
0641
0642 meta_ctl = devm_kzalloc(dev, sizeof(*meta_ctl), GFP_KERNEL);
0643 if (!meta_ctl)
0644 return -ENOMEM;
0645
0646 rval = devm_regmap_field_bulk_alloc(dev, map, &meta_ctl->mute, &v->mute, 7);
0647 if (rval)
0648 return rval;
0649 drvdata->meta_ctl = meta_ctl;
0650
0651 sstream_ctl = devm_kzalloc(dev, sizeof(*sstream_ctl), GFP_KERNEL);
0652 if (!sstream_ctl)
0653 return -ENOMEM;
0654
0655 rval = devm_regmap_field_bulk_alloc(dev, map, &sstream_ctl->sstream_en, &v->sstream_en, 9);
0656 if (rval)
0657 return rval;
0658
0659 drvdata->sstream_ctl = sstream_ctl;
0660
0661 for (i = 0; i < LPASS_MAX_HDMI_DMA_CHANNELS; i++) {
0662 QCOM_REGMAP_FIELD_ALLOC(dev, map, v->msb_bits, ch_msb);
0663 drvdata->hdmitx_ch_msb[i] = ch_msb;
0664
0665 QCOM_REGMAP_FIELD_ALLOC(dev, map, v->lsb_bits, ch_lsb);
0666 drvdata->hdmitx_ch_lsb[i] = ch_lsb;
0667
0668 tx_dmactl = devm_kzalloc(dev, sizeof(*tx_dmactl), GFP_KERNEL);
0669 if (!tx_dmactl)
0670 return -ENOMEM;
0671
0672 QCOM_REGMAP_FIELD_ALLOC(dev, map, v->use_hw_chs, tx_dmactl->use_hw_chs);
0673 QCOM_REGMAP_FIELD_ALLOC(dev, map, v->use_hw_usr, tx_dmactl->use_hw_usr);
0674 QCOM_REGMAP_FIELD_ALLOC(dev, map, v->hw_chs_sel, tx_dmactl->hw_chs_sel);
0675 QCOM_REGMAP_FIELD_ALLOC(dev, map, v->hw_usr_sel, tx_dmactl->hw_usr_sel);
0676 drvdata->hdmi_tx_dmactl[i] = tx_dmactl;
0677 }
0678 return 0;
0679 }
0680
0681 static bool lpass_hdmi_regmap_writeable(struct device *dev, unsigned int reg)
0682 {
0683 struct lpass_data *drvdata = dev_get_drvdata(dev);
0684 struct lpass_variant *v = drvdata->variant;
0685 int i;
0686
0687 if (reg == LPASS_HDMI_TX_CTL_ADDR(v))
0688 return true;
0689 if (reg == LPASS_HDMI_TX_LEGACY_ADDR(v))
0690 return true;
0691 if (reg == LPASS_HDMI_TX_VBIT_CTL_ADDR(v))
0692 return true;
0693 if (reg == LPASS_HDMI_TX_PARITY_ADDR(v))
0694 return true;
0695 if (reg == LPASS_HDMI_TX_DP_ADDR(v))
0696 return true;
0697 if (reg == LPASS_HDMI_TX_SSTREAM_ADDR(v))
0698 return true;
0699 if (reg == LPASS_HDMITX_APP_IRQEN_REG(v))
0700 return true;
0701 if (reg == LPASS_HDMITX_APP_IRQCLEAR_REG(v))
0702 return true;
0703
0704 for (i = 0; i < v->hdmi_rdma_channels; i++) {
0705 if (reg == LPASS_HDMI_TX_CH_LSB_ADDR(v, i))
0706 return true;
0707 if (reg == LPASS_HDMI_TX_CH_MSB_ADDR(v, i))
0708 return true;
0709 if (reg == LPASS_HDMI_TX_DMA_ADDR(v, i))
0710 return true;
0711 }
0712
0713 for (i = 0; i < v->hdmi_rdma_channels; ++i) {
0714 if (reg == LPAIF_HDMI_RDMACTL_REG(v, i))
0715 return true;
0716 if (reg == LPAIF_HDMI_RDMABASE_REG(v, i))
0717 return true;
0718 if (reg == LPAIF_HDMI_RDMABUFF_REG(v, i))
0719 return true;
0720 if (reg == LPAIF_HDMI_RDMAPER_REG(v, i))
0721 return true;
0722 }
0723 return false;
0724 }
0725
0726 static bool lpass_hdmi_regmap_readable(struct device *dev, unsigned int reg)
0727 {
0728 struct lpass_data *drvdata = dev_get_drvdata(dev);
0729 struct lpass_variant *v = drvdata->variant;
0730 int i;
0731
0732 if (reg == LPASS_HDMI_TX_CTL_ADDR(v))
0733 return true;
0734 if (reg == LPASS_HDMI_TX_LEGACY_ADDR(v))
0735 return true;
0736 if (reg == LPASS_HDMI_TX_VBIT_CTL_ADDR(v))
0737 return true;
0738
0739 for (i = 0; i < v->hdmi_rdma_channels; i++) {
0740 if (reg == LPASS_HDMI_TX_CH_LSB_ADDR(v, i))
0741 return true;
0742 if (reg == LPASS_HDMI_TX_CH_MSB_ADDR(v, i))
0743 return true;
0744 if (reg == LPASS_HDMI_TX_DMA_ADDR(v, i))
0745 return true;
0746 }
0747
0748 if (reg == LPASS_HDMI_TX_PARITY_ADDR(v))
0749 return true;
0750 if (reg == LPASS_HDMI_TX_DP_ADDR(v))
0751 return true;
0752 if (reg == LPASS_HDMI_TX_SSTREAM_ADDR(v))
0753 return true;
0754 if (reg == LPASS_HDMITX_APP_IRQEN_REG(v))
0755 return true;
0756 if (reg == LPASS_HDMITX_APP_IRQSTAT_REG(v))
0757 return true;
0758
0759 for (i = 0; i < v->hdmi_rdma_channels; ++i) {
0760 if (reg == LPAIF_HDMI_RDMACTL_REG(v, i))
0761 return true;
0762 if (reg == LPAIF_HDMI_RDMABASE_REG(v, i))
0763 return true;
0764 if (reg == LPAIF_HDMI_RDMABUFF_REG(v, i))
0765 return true;
0766 if (reg == LPAIF_HDMI_RDMAPER_REG(v, i))
0767 return true;
0768 if (reg == LPAIF_HDMI_RDMACURR_REG(v, i))
0769 return true;
0770 }
0771
0772 return false;
0773 }
0774
0775 static bool lpass_hdmi_regmap_volatile(struct device *dev, unsigned int reg)
0776 {
0777 struct lpass_data *drvdata = dev_get_drvdata(dev);
0778 struct lpass_variant *v = drvdata->variant;
0779 int i;
0780
0781 if (reg == LPASS_HDMITX_APP_IRQSTAT_REG(v))
0782 return true;
0783 if (reg == LPASS_HDMI_TX_LEGACY_ADDR(v))
0784 return true;
0785
0786 for (i = 0; i < v->hdmi_rdma_channels; ++i) {
0787 if (reg == LPAIF_HDMI_RDMACURR_REG(v, i))
0788 return true;
0789 }
0790 return false;
0791 }
0792
0793 static struct regmap_config lpass_hdmi_regmap_config = {
0794 .name = "lpass_hdmi",
0795 .reg_bits = 32,
0796 .reg_stride = 4,
0797 .val_bits = 32,
0798 .writeable_reg = lpass_hdmi_regmap_writeable,
0799 .readable_reg = lpass_hdmi_regmap_readable,
0800 .volatile_reg = lpass_hdmi_regmap_volatile,
0801 .cache_type = REGCACHE_FLAT,
0802 };
0803
0804 static bool __lpass_rxtx_regmap_accessible(struct device *dev, unsigned int reg, bool rw)
0805 {
0806 struct lpass_data *drvdata = dev_get_drvdata(dev);
0807 struct lpass_variant *v = drvdata->variant;
0808 int i;
0809
0810 for (i = 0; i < v->rxtx_irq_ports; ++i) {
0811 if (reg == LPAIF_RXTX_IRQCLEAR_REG(v, i))
0812 return true;
0813 if (reg == LPAIF_RXTX_IRQEN_REG(v, i))
0814 return true;
0815 if (reg == LPAIF_RXTX_IRQSTAT_REG(v, i))
0816 return true;
0817 }
0818
0819 for (i = 0; i < v->rxtx_rdma_channels; ++i) {
0820 if (reg == LPAIF_CDC_RXTX_RDMACTL_REG(v, i, LPASS_CDC_DMA_RX0))
0821 return true;
0822 if (reg == LPAIF_CDC_RXTX_RDMABASE_REG(v, i, LPASS_CDC_DMA_RX0))
0823 return true;
0824 if (reg == LPAIF_CDC_RXTX_RDMABUFF_REG(v, i, LPASS_CDC_DMA_RX0))
0825 return true;
0826 if (rw == LPASS_REG_READ) {
0827 if (reg == LPAIF_CDC_RXTX_RDMACURR_REG(v, i, LPASS_CDC_DMA_RX0))
0828 return true;
0829 }
0830 if (reg == LPAIF_CDC_RXTX_RDMAPER_REG(v, i, LPASS_CDC_DMA_RX0))
0831 return true;
0832 if (reg == LPAIF_CDC_RXTX_RDMA_INTF_REG(v, i, LPASS_CDC_DMA_RX0))
0833 return true;
0834 }
0835
0836 for (i = 0; i < v->rxtx_wrdma_channels; ++i) {
0837 if (reg == LPAIF_CDC_RXTX_WRDMACTL_REG(v, i + v->rxtx_wrdma_channel_start,
0838 LPASS_CDC_DMA_TX3))
0839 return true;
0840 if (reg == LPAIF_CDC_RXTX_WRDMABASE_REG(v, i + v->rxtx_wrdma_channel_start,
0841 LPASS_CDC_DMA_TX3))
0842 return true;
0843 if (reg == LPAIF_CDC_RXTX_WRDMABUFF_REG(v, i + v->rxtx_wrdma_channel_start,
0844 LPASS_CDC_DMA_TX3))
0845 return true;
0846 if (rw == LPASS_REG_READ) {
0847 if (reg == LPAIF_CDC_RXTX_WRDMACURR_REG(v, i, LPASS_CDC_DMA_RX0))
0848 return true;
0849 }
0850 if (reg == LPAIF_CDC_RXTX_WRDMAPER_REG(v, i + v->rxtx_wrdma_channel_start,
0851 LPASS_CDC_DMA_TX3))
0852 return true;
0853 if (reg == LPAIF_CDC_RXTX_WRDMA_INTF_REG(v, i + v->rxtx_wrdma_channel_start,
0854 LPASS_CDC_DMA_TX3))
0855 return true;
0856 }
0857 return false;
0858 }
0859
0860 static bool lpass_rxtx_regmap_writeable(struct device *dev, unsigned int reg)
0861 {
0862 return __lpass_rxtx_regmap_accessible(dev, reg, LPASS_REG_WRITE);
0863 }
0864
0865 static bool lpass_rxtx_regmap_readable(struct device *dev, unsigned int reg)
0866 {
0867 return __lpass_rxtx_regmap_accessible(dev, reg, LPASS_REG_READ);
0868 }
0869
0870 static bool lpass_rxtx_regmap_volatile(struct device *dev, unsigned int reg)
0871 {
0872 struct lpass_data *drvdata = dev_get_drvdata(dev);
0873 struct lpass_variant *v = drvdata->variant;
0874 int i;
0875
0876 for (i = 0; i < v->rxtx_irq_ports; ++i) {
0877 if (reg == LPAIF_RXTX_IRQCLEAR_REG(v, i))
0878 return true;
0879 if (reg == LPAIF_RXTX_IRQSTAT_REG(v, i))
0880 return true;
0881 }
0882
0883 for (i = 0; i < v->rxtx_rdma_channels; ++i)
0884 if (reg == LPAIF_CDC_RXTX_RDMACURR_REG(v, i, LPASS_CDC_DMA_RX0))
0885 return true;
0886
0887 for (i = 0; i < v->rxtx_wrdma_channels; ++i)
0888 if (reg == LPAIF_CDC_RXTX_WRDMACURR_REG(v, i + v->rxtx_wrdma_channel_start,
0889 LPASS_CDC_DMA_TX3))
0890 return true;
0891
0892 return false;
0893 }
0894
0895 static bool __lpass_va_regmap_accessible(struct device *dev, unsigned int reg, bool rw)
0896 {
0897 struct lpass_data *drvdata = dev_get_drvdata(dev);
0898 struct lpass_variant *v = drvdata->variant;
0899 int i;
0900
0901 for (i = 0; i < v->va_irq_ports; ++i) {
0902 if (reg == LPAIF_VA_IRQCLEAR_REG(v, i))
0903 return true;
0904 if (reg == LPAIF_VA_IRQEN_REG(v, i))
0905 return true;
0906 if (reg == LPAIF_VA_IRQSTAT_REG(v, i))
0907 return true;
0908 }
0909
0910 for (i = 0; i < v->va_wrdma_channels; ++i) {
0911 if (reg == LPAIF_CDC_VA_WRDMACTL_REG(v, i + v->va_wrdma_channel_start,
0912 LPASS_CDC_DMA_VA_TX0))
0913 return true;
0914 if (reg == LPAIF_CDC_VA_WRDMABASE_REG(v, i + v->va_wrdma_channel_start,
0915 LPASS_CDC_DMA_VA_TX0))
0916 return true;
0917 if (reg == LPAIF_CDC_VA_WRDMABUFF_REG(v, i + v->va_wrdma_channel_start,
0918 LPASS_CDC_DMA_VA_TX0))
0919 return true;
0920 if (rw == LPASS_REG_READ) {
0921 if (reg == LPAIF_CDC_VA_WRDMACURR_REG(v, i + v->va_wrdma_channel_start,
0922 LPASS_CDC_DMA_VA_TX0))
0923 return true;
0924 }
0925 if (reg == LPAIF_CDC_VA_WRDMAPER_REG(v, i + v->va_wrdma_channel_start,
0926 LPASS_CDC_DMA_VA_TX0))
0927 return true;
0928 if (reg == LPAIF_CDC_VA_WRDMA_INTF_REG(v, i + v->va_wrdma_channel_start,
0929 LPASS_CDC_DMA_VA_TX0))
0930 return true;
0931 }
0932 return false;
0933 }
0934
0935 static bool lpass_va_regmap_writeable(struct device *dev, unsigned int reg)
0936 {
0937 return __lpass_va_regmap_accessible(dev, reg, LPASS_REG_WRITE);
0938 }
0939
0940 static bool lpass_va_regmap_readable(struct device *dev, unsigned int reg)
0941 {
0942 return __lpass_va_regmap_accessible(dev, reg, LPASS_REG_READ);
0943 }
0944
0945 static bool lpass_va_regmap_volatile(struct device *dev, unsigned int reg)
0946 {
0947 struct lpass_data *drvdata = dev_get_drvdata(dev);
0948 struct lpass_variant *v = drvdata->variant;
0949 int i;
0950
0951 for (i = 0; i < v->va_irq_ports; ++i) {
0952 if (reg == LPAIF_VA_IRQCLEAR_REG(v, i))
0953 return true;
0954 if (reg == LPAIF_VA_IRQSTAT_REG(v, i))
0955 return true;
0956 }
0957
0958 for (i = 0; i < v->va_wrdma_channels; ++i) {
0959 if (reg == LPAIF_CDC_VA_WRDMACURR_REG(v, i + v->va_wrdma_channel_start,
0960 LPASS_CDC_DMA_VA_TX0))
0961 return true;
0962 }
0963
0964 return false;
0965 }
0966
0967 static struct regmap_config lpass_rxtx_regmap_config = {
0968 .reg_bits = 32,
0969 .reg_stride = 4,
0970 .val_bits = 32,
0971 .writeable_reg = lpass_rxtx_regmap_writeable,
0972 .readable_reg = lpass_rxtx_regmap_readable,
0973 .volatile_reg = lpass_rxtx_regmap_volatile,
0974 .cache_type = REGCACHE_FLAT,
0975 };
0976
0977 static struct regmap_config lpass_va_regmap_config = {
0978 .reg_bits = 32,
0979 .reg_stride = 4,
0980 .val_bits = 32,
0981 .writeable_reg = lpass_va_regmap_writeable,
0982 .readable_reg = lpass_va_regmap_readable,
0983 .volatile_reg = lpass_va_regmap_volatile,
0984 .cache_type = REGCACHE_FLAT,
0985 };
0986
0987 static unsigned int of_lpass_cpu_parse_sd_lines(struct device *dev,
0988 struct device_node *node,
0989 const char *name)
0990 {
0991 unsigned int lines[LPASS_CPU_MAX_MI2S_LINES];
0992 unsigned int sd_line_mask = 0;
0993 int num_lines, i;
0994
0995 num_lines = of_property_read_variable_u32_array(node, name, lines, 0,
0996 LPASS_CPU_MAX_MI2S_LINES);
0997 if (num_lines < 0)
0998 return LPAIF_I2SCTL_MODE_NONE;
0999
1000 for (i = 0; i < num_lines; i++)
1001 sd_line_mask |= BIT(lines[i]);
1002
1003 switch (sd_line_mask) {
1004 case LPASS_CPU_I2S_SD0_MASK:
1005 return LPAIF_I2SCTL_MODE_SD0;
1006 case LPASS_CPU_I2S_SD1_MASK:
1007 return LPAIF_I2SCTL_MODE_SD1;
1008 case LPASS_CPU_I2S_SD2_MASK:
1009 return LPAIF_I2SCTL_MODE_SD2;
1010 case LPASS_CPU_I2S_SD3_MASK:
1011 return LPAIF_I2SCTL_MODE_SD3;
1012 case LPASS_CPU_I2S_SD0_1_MASK:
1013 return LPAIF_I2SCTL_MODE_QUAD01;
1014 case LPASS_CPU_I2S_SD2_3_MASK:
1015 return LPAIF_I2SCTL_MODE_QUAD23;
1016 case LPASS_CPU_I2S_SD0_1_2_MASK:
1017 return LPAIF_I2SCTL_MODE_6CH;
1018 case LPASS_CPU_I2S_SD0_1_2_3_MASK:
1019 return LPAIF_I2SCTL_MODE_8CH;
1020 default:
1021 dev_err(dev, "Unsupported SD line mask: %#x\n", sd_line_mask);
1022 return LPAIF_I2SCTL_MODE_NONE;
1023 }
1024 }
1025
1026 static void of_lpass_cpu_parse_dai_data(struct device *dev,
1027 struct lpass_data *data)
1028 {
1029 struct device_node *node;
1030 int ret, id;
1031
1032
1033 for (id = 0; id < data->variant->num_dai; id++) {
1034 data->mi2s_playback_sd_mode[id] = LPAIF_I2SCTL_MODE_8CH;
1035 data->mi2s_capture_sd_mode[id] = LPAIF_I2SCTL_MODE_8CH;
1036 }
1037
1038 for_each_child_of_node(dev->of_node, node) {
1039 ret = of_property_read_u32(node, "reg", &id);
1040 if (ret || id < 0) {
1041 dev_err(dev, "valid dai id not found: %d\n", ret);
1042 continue;
1043 }
1044 if (id == LPASS_DP_RX) {
1045 data->hdmi_port_enable = 1;
1046 } else if (is_cdc_dma_port(id)) {
1047 data->codec_dma_enable = 1;
1048 } else {
1049 data->mi2s_playback_sd_mode[id] =
1050 of_lpass_cpu_parse_sd_lines(dev, node,
1051 "qcom,playback-sd-lines");
1052 data->mi2s_capture_sd_mode[id] =
1053 of_lpass_cpu_parse_sd_lines(dev, node,
1054 "qcom,capture-sd-lines");
1055 }
1056 }
1057 }
1058
1059 static int of_lpass_cdc_dma_clks_parse(struct device *dev,
1060 struct lpass_data *data)
1061 {
1062 data->codec_mem0 = devm_clk_get(dev, "audio_cc_codec_mem0");
1063 if (IS_ERR(data->codec_mem0))
1064 return PTR_ERR(data->codec_mem0);
1065
1066 data->codec_mem1 = devm_clk_get(dev, "audio_cc_codec_mem1");
1067 if (IS_ERR(data->codec_mem1))
1068 return PTR_ERR(data->codec_mem1);
1069
1070 data->codec_mem2 = devm_clk_get(dev, "audio_cc_codec_mem2");
1071 if (IS_ERR(data->codec_mem2))
1072 return PTR_ERR(data->codec_mem2);
1073
1074 data->va_mem0 = devm_clk_get(dev, "aon_cc_va_mem0");
1075 if (IS_ERR(data->va_mem0))
1076 return PTR_ERR(data->va_mem0);
1077
1078 return 0;
1079 }
1080
1081 int asoc_qcom_lpass_cpu_platform_probe(struct platform_device *pdev)
1082 {
1083 struct lpass_data *drvdata;
1084 struct device_node *dsp_of_node;
1085 struct resource *res;
1086 struct lpass_variant *variant;
1087 struct device *dev = &pdev->dev;
1088 const struct of_device_id *match;
1089 int ret, i, dai_id;
1090
1091 dsp_of_node = of_parse_phandle(pdev->dev.of_node, "qcom,adsp", 0);
1092 if (dsp_of_node) {
1093 dev_err(dev, "DSP exists and holds audio resources\n");
1094 of_node_put(dsp_of_node);
1095 return -EBUSY;
1096 }
1097
1098 drvdata = devm_kzalloc(dev, sizeof(struct lpass_data), GFP_KERNEL);
1099 if (!drvdata)
1100 return -ENOMEM;
1101 platform_set_drvdata(pdev, drvdata);
1102
1103 match = of_match_device(dev->driver->of_match_table, dev);
1104 if (!match || !match->data)
1105 return -EINVAL;
1106
1107 if (of_device_is_compatible(dev->of_node, "qcom,lpass-cpu-apq8016")) {
1108 dev_warn(dev, "%s compatible is deprecated\n",
1109 match->compatible);
1110 }
1111
1112 drvdata->variant = (struct lpass_variant *)match->data;
1113 variant = drvdata->variant;
1114
1115 of_lpass_cpu_parse_dai_data(dev, drvdata);
1116
1117 if (drvdata->codec_dma_enable) {
1118 drvdata->rxtx_lpaif =
1119 devm_platform_ioremap_resource_byname(pdev, "lpass-rxtx-lpaif");
1120 if (IS_ERR(drvdata->rxtx_lpaif))
1121 return PTR_ERR(drvdata->rxtx_lpaif);
1122
1123 drvdata->va_lpaif = devm_platform_ioremap_resource_byname(pdev, "lpass-va-lpaif");
1124 if (IS_ERR(drvdata->va_lpaif))
1125 return PTR_ERR(drvdata->va_lpaif);
1126
1127 lpass_rxtx_regmap_config.max_register = LPAIF_CDC_RXTX_WRDMAPER_REG(variant,
1128 variant->rxtx_wrdma_channels +
1129 variant->rxtx_wrdma_channel_start, LPASS_CDC_DMA_TX3);
1130
1131 drvdata->rxtx_lpaif_map = devm_regmap_init_mmio(dev, drvdata->rxtx_lpaif,
1132 &lpass_rxtx_regmap_config);
1133 if (IS_ERR(drvdata->rxtx_lpaif_map))
1134 return PTR_ERR(drvdata->rxtx_lpaif_map);
1135
1136 lpass_va_regmap_config.max_register = LPAIF_CDC_VA_WRDMAPER_REG(variant,
1137 variant->va_wrdma_channels +
1138 variant->va_wrdma_channel_start, LPASS_CDC_DMA_VA_TX0);
1139
1140 drvdata->va_lpaif_map = devm_regmap_init_mmio(dev, drvdata->va_lpaif,
1141 &lpass_va_regmap_config);
1142 if (IS_ERR(drvdata->va_lpaif_map))
1143 return PTR_ERR(drvdata->va_lpaif_map);
1144
1145 ret = of_lpass_cdc_dma_clks_parse(dev, drvdata);
1146 if (ret) {
1147 dev_err(dev, "failed to get cdc dma clocks %d\n", ret);
1148 return ret;
1149 }
1150
1151 res = platform_get_resource_byname(pdev, IORESOURCE_MEM, "lpass-rxtx-cdc-dma-lpm");
1152 drvdata->rxtx_cdc_dma_lpm_buf = res->start;
1153
1154 res = platform_get_resource_byname(pdev, IORESOURCE_MEM, "lpass-va-cdc-dma-lpm");
1155 drvdata->va_cdc_dma_lpm_buf = res->start;
1156 }
1157
1158 drvdata->lpaif = devm_platform_ioremap_resource_byname(pdev, "lpass-lpaif");
1159 if (IS_ERR(drvdata->lpaif))
1160 return PTR_ERR(drvdata->lpaif);
1161
1162 lpass_cpu_regmap_config.max_register = LPAIF_WRDMAPER_REG(variant,
1163 variant->wrdma_channels +
1164 variant->wrdma_channel_start);
1165
1166 drvdata->lpaif_map = devm_regmap_init_mmio(dev, drvdata->lpaif,
1167 &lpass_cpu_regmap_config);
1168 if (IS_ERR(drvdata->lpaif_map)) {
1169 dev_err(dev, "error initializing regmap: %ld\n",
1170 PTR_ERR(drvdata->lpaif_map));
1171 return PTR_ERR(drvdata->lpaif_map);
1172 }
1173
1174 if (drvdata->hdmi_port_enable) {
1175 drvdata->hdmiif = devm_platform_ioremap_resource_byname(pdev, "lpass-hdmiif");
1176 if (IS_ERR(drvdata->hdmiif))
1177 return PTR_ERR(drvdata->hdmiif);
1178
1179 lpass_hdmi_regmap_config.max_register = LPAIF_HDMI_RDMAPER_REG(variant,
1180 variant->hdmi_rdma_channels - 1);
1181 drvdata->hdmiif_map = devm_regmap_init_mmio(dev, drvdata->hdmiif,
1182 &lpass_hdmi_regmap_config);
1183 if (IS_ERR(drvdata->hdmiif_map)) {
1184 dev_err(dev, "error initializing regmap: %ld\n",
1185 PTR_ERR(drvdata->hdmiif_map));
1186 return PTR_ERR(drvdata->hdmiif_map);
1187 }
1188 }
1189
1190 if (variant->init) {
1191 ret = variant->init(pdev);
1192 if (ret) {
1193 dev_err(dev, "error initializing variant: %d\n", ret);
1194 return ret;
1195 }
1196 }
1197
1198 for (i = 0; i < variant->num_dai; i++) {
1199 dai_id = variant->dai_driver[i].id;
1200 if (dai_id == LPASS_DP_RX || is_cdc_dma_port(dai_id))
1201 continue;
1202
1203 drvdata->mi2s_osr_clk[dai_id] = devm_clk_get_optional(dev,
1204 variant->dai_osr_clk_names[i]);
1205 drvdata->mi2s_bit_clk[dai_id] = devm_clk_get(dev,
1206 variant->dai_bit_clk_names[i]);
1207 if (IS_ERR(drvdata->mi2s_bit_clk[dai_id])) {
1208 dev_err(dev,
1209 "error getting %s: %ld\n",
1210 variant->dai_bit_clk_names[i],
1211 PTR_ERR(drvdata->mi2s_bit_clk[dai_id]));
1212 return PTR_ERR(drvdata->mi2s_bit_clk[dai_id]);
1213 }
1214 if (drvdata->mi2s_playback_sd_mode[dai_id] ==
1215 LPAIF_I2SCTL_MODE_QUAD01) {
1216 variant->dai_driver[dai_id].playback.channels_min = 4;
1217 variant->dai_driver[dai_id].playback.channels_max = 4;
1218 }
1219 }
1220
1221
1222 drvdata->i2sctl = devm_kzalloc(&pdev->dev, sizeof(struct lpaif_i2sctl),
1223 GFP_KERNEL);
1224
1225
1226 ret = lpass_cpu_init_i2sctl_bitfields(dev, drvdata->i2sctl,
1227 drvdata->lpaif_map);
1228 if (ret) {
1229 dev_err(dev, "error init i2sctl field: %d\n", ret);
1230 return ret;
1231 }
1232
1233 if (drvdata->hdmi_port_enable) {
1234 ret = lpass_hdmi_init_bitfields(dev, drvdata->hdmiif_map);
1235 if (ret) {
1236 dev_err(dev, "%s error hdmi init failed\n", __func__);
1237 return ret;
1238 }
1239 }
1240 ret = devm_snd_soc_register_component(dev,
1241 &lpass_cpu_comp_driver,
1242 variant->dai_driver,
1243 variant->num_dai);
1244 if (ret) {
1245 dev_err(dev, "error registering cpu driver: %d\n", ret);
1246 goto err;
1247 }
1248
1249 ret = asoc_qcom_lpass_platform_register(pdev);
1250 if (ret) {
1251 dev_err(dev, "error registering platform driver: %d\n", ret);
1252 goto err;
1253 }
1254
1255 err:
1256 return ret;
1257 }
1258 EXPORT_SYMBOL_GPL(asoc_qcom_lpass_cpu_platform_probe);
1259
1260 int asoc_qcom_lpass_cpu_platform_remove(struct platform_device *pdev)
1261 {
1262 struct lpass_data *drvdata = platform_get_drvdata(pdev);
1263
1264 if (drvdata->variant->exit)
1265 drvdata->variant->exit(pdev);
1266
1267
1268 return 0;
1269 }
1270 EXPORT_SYMBOL_GPL(asoc_qcom_lpass_cpu_platform_remove);
1271
1272 void asoc_qcom_lpass_cpu_platform_shutdown(struct platform_device *pdev)
1273 {
1274 struct lpass_data *drvdata = platform_get_drvdata(pdev);
1275
1276 if (drvdata->variant->exit)
1277 drvdata->variant->exit(pdev);
1278
1279 }
1280 EXPORT_SYMBOL_GPL(asoc_qcom_lpass_cpu_platform_shutdown);
1281
1282 MODULE_DESCRIPTION("QTi LPASS CPU Driver");
1283 MODULE_LICENSE("GPL v2");