0001
0002
0003
0004
0005
0006
0007 #include <linux/clk.h>
0008 #include <linux/dma-mapping.h>
0009 #include <linux/dma/sprd-dma.h>
0010 #include <linux/errno.h>
0011 #include <linux/init.h>
0012 #include <linux/interrupt.h>
0013 #include <linux/io.h>
0014 #include <linux/kernel.h>
0015 #include <linux/module.h>
0016 #include <linux/of.h>
0017 #include <linux/of_dma.h>
0018 #include <linux/of_device.h>
0019 #include <linux/pm_runtime.h>
0020 #include <linux/slab.h>
0021
0022 #include "virt-dma.h"
0023
0024 #define SPRD_DMA_CHN_REG_OFFSET 0x1000
0025 #define SPRD_DMA_CHN_REG_LENGTH 0x40
0026 #define SPRD_DMA_MEMCPY_MIN_SIZE 64
0027
0028
0029 #define SPRD_DMA_GLB_PAUSE 0x0
0030 #define SPRD_DMA_GLB_FRAG_WAIT 0x4
0031 #define SPRD_DMA_GLB_REQ_PEND0_EN 0x8
0032 #define SPRD_DMA_GLB_REQ_PEND1_EN 0xc
0033 #define SPRD_DMA_GLB_INT_RAW_STS 0x10
0034 #define SPRD_DMA_GLB_INT_MSK_STS 0x14
0035 #define SPRD_DMA_GLB_REQ_STS 0x18
0036 #define SPRD_DMA_GLB_CHN_EN_STS 0x1c
0037 #define SPRD_DMA_GLB_DEBUG_STS 0x20
0038 #define SPRD_DMA_GLB_ARB_SEL_STS 0x24
0039 #define SPRD_DMA_GLB_2STAGE_GRP1 0x28
0040 #define SPRD_DMA_GLB_2STAGE_GRP2 0x2c
0041 #define SPRD_DMA_GLB_REQ_UID(uid) (0x4 * ((uid) - 1))
0042 #define SPRD_DMA_GLB_REQ_UID_OFFSET 0x2000
0043
0044
0045 #define SPRD_DMA_CHN_PAUSE 0x0
0046 #define SPRD_DMA_CHN_REQ 0x4
0047 #define SPRD_DMA_CHN_CFG 0x8
0048 #define SPRD_DMA_CHN_INTC 0xc
0049 #define SPRD_DMA_CHN_SRC_ADDR 0x10
0050 #define SPRD_DMA_CHN_DES_ADDR 0x14
0051 #define SPRD_DMA_CHN_FRG_LEN 0x18
0052 #define SPRD_DMA_CHN_BLK_LEN 0x1c
0053 #define SPRD_DMA_CHN_TRSC_LEN 0x20
0054 #define SPRD_DMA_CHN_TRSF_STEP 0x24
0055 #define SPRD_DMA_CHN_WARP_PTR 0x28
0056 #define SPRD_DMA_CHN_WARP_TO 0x2c
0057 #define SPRD_DMA_CHN_LLIST_PTR 0x30
0058 #define SPRD_DMA_CHN_FRAG_STEP 0x34
0059 #define SPRD_DMA_CHN_SRC_BLK_STEP 0x38
0060 #define SPRD_DMA_CHN_DES_BLK_STEP 0x3c
0061
0062
0063 #define SPRD_DMA_GLB_2STAGE_EN BIT(24)
0064 #define SPRD_DMA_GLB_CHN_INT_MASK GENMASK(23, 20)
0065 #define SPRD_DMA_GLB_DEST_INT BIT(22)
0066 #define SPRD_DMA_GLB_SRC_INT BIT(20)
0067 #define SPRD_DMA_GLB_LIST_DONE_TRG BIT(19)
0068 #define SPRD_DMA_GLB_TRANS_DONE_TRG BIT(18)
0069 #define SPRD_DMA_GLB_BLOCK_DONE_TRG BIT(17)
0070 #define SPRD_DMA_GLB_FRAG_DONE_TRG BIT(16)
0071 #define SPRD_DMA_GLB_TRG_OFFSET 16
0072 #define SPRD_DMA_GLB_DEST_CHN_MASK GENMASK(13, 8)
0073 #define SPRD_DMA_GLB_DEST_CHN_OFFSET 8
0074 #define SPRD_DMA_GLB_SRC_CHN_MASK GENMASK(5, 0)
0075
0076
0077 #define SPRD_DMA_INT_MASK GENMASK(4, 0)
0078 #define SPRD_DMA_INT_CLR_OFFSET 24
0079 #define SPRD_DMA_FRAG_INT_EN BIT(0)
0080 #define SPRD_DMA_BLK_INT_EN BIT(1)
0081 #define SPRD_DMA_TRANS_INT_EN BIT(2)
0082 #define SPRD_DMA_LIST_INT_EN BIT(3)
0083 #define SPRD_DMA_CFG_ERR_INT_EN BIT(4)
0084
0085
0086 #define SPRD_DMA_CHN_EN BIT(0)
0087 #define SPRD_DMA_LINKLIST_EN BIT(4)
0088 #define SPRD_DMA_WAIT_BDONE_OFFSET 24
0089 #define SPRD_DMA_DONOT_WAIT_BDONE 1
0090
0091
0092 #define SPRD_DMA_REQ_EN BIT(0)
0093
0094
0095 #define SPRD_DMA_PAUSE_EN BIT(0)
0096 #define SPRD_DMA_PAUSE_STS BIT(2)
0097 #define SPRD_DMA_PAUSE_CNT 0x2000
0098
0099
0100 #define SPRD_DMA_HIGH_ADDR_MASK GENMASK(31, 28)
0101 #define SPRD_DMA_LOW_ADDR_MASK GENMASK(31, 0)
0102 #define SPRD_DMA_WRAP_ADDR_MASK GENMASK(27, 0)
0103 #define SPRD_DMA_HIGH_ADDR_OFFSET 4
0104
0105
0106 #define SPRD_DMA_FRAG_INT_STS BIT(16)
0107 #define SPRD_DMA_BLK_INT_STS BIT(17)
0108 #define SPRD_DMA_TRSC_INT_STS BIT(18)
0109 #define SPRD_DMA_LIST_INT_STS BIT(19)
0110 #define SPRD_DMA_CFGERR_INT_STS BIT(20)
0111 #define SPRD_DMA_CHN_INT_STS \
0112 (SPRD_DMA_FRAG_INT_STS | SPRD_DMA_BLK_INT_STS | \
0113 SPRD_DMA_TRSC_INT_STS | SPRD_DMA_LIST_INT_STS | \
0114 SPRD_DMA_CFGERR_INT_STS)
0115
0116
0117 #define SPRD_DMA_SRC_DATAWIDTH_OFFSET 30
0118 #define SPRD_DMA_DES_DATAWIDTH_OFFSET 28
0119 #define SPRD_DMA_SWT_MODE_OFFSET 26
0120 #define SPRD_DMA_REQ_MODE_OFFSET 24
0121 #define SPRD_DMA_REQ_MODE_MASK GENMASK(1, 0)
0122 #define SPRD_DMA_WRAP_SEL_DEST BIT(23)
0123 #define SPRD_DMA_WRAP_EN BIT(22)
0124 #define SPRD_DMA_FIX_SEL_OFFSET 21
0125 #define SPRD_DMA_FIX_EN_OFFSET 20
0126 #define SPRD_DMA_LLIST_END BIT(19)
0127 #define SPRD_DMA_FRG_LEN_MASK GENMASK(16, 0)
0128
0129
0130 #define SPRD_DMA_BLK_LEN_MASK GENMASK(16, 0)
0131
0132
0133 #define SPRD_DMA_TRSC_LEN_MASK GENMASK(27, 0)
0134
0135
0136 #define SPRD_DMA_DEST_TRSF_STEP_OFFSET 16
0137 #define SPRD_DMA_SRC_TRSF_STEP_OFFSET 0
0138 #define SPRD_DMA_TRSF_STEP_MASK GENMASK(15, 0)
0139
0140
0141 #define SPRD_DMA_LLIST_HIGH_MASK GENMASK(31, 28)
0142 #define SPRD_DMA_LLIST_HIGH_SHIFT 28
0143
0144
0145 #define SPRD_DMA_CHN_MODE_MASK GENMASK(7, 0)
0146 #define SPRD_DMA_TRG_MODE_MASK GENMASK(7, 0)
0147 #define SPRD_DMA_INT_TYPE_MASK GENMASK(7, 0)
0148
0149
0150 #define SPRD_DMA_NONE_STEP 0
0151 #define SPRD_DMA_BYTE_STEP 1
0152 #define SPRD_DMA_SHORT_STEP 2
0153 #define SPRD_DMA_WORD_STEP 4
0154 #define SPRD_DMA_DWORD_STEP 8
0155
0156 #define SPRD_DMA_SOFTWARE_UID 0
0157
0158
0159 enum sprd_dma_datawidth {
0160 SPRD_DMA_DATAWIDTH_1_BYTE,
0161 SPRD_DMA_DATAWIDTH_2_BYTES,
0162 SPRD_DMA_DATAWIDTH_4_BYTES,
0163 SPRD_DMA_DATAWIDTH_8_BYTES,
0164 };
0165
0166
0167 struct sprd_dma_chn_hw {
0168 u32 pause;
0169 u32 req;
0170 u32 cfg;
0171 u32 intc;
0172 u32 src_addr;
0173 u32 des_addr;
0174 u32 frg_len;
0175 u32 blk_len;
0176 u32 trsc_len;
0177 u32 trsf_step;
0178 u32 wrap_ptr;
0179 u32 wrap_to;
0180 u32 llist_ptr;
0181 u32 frg_step;
0182 u32 src_blk_step;
0183 u32 des_blk_step;
0184 };
0185
0186
0187 struct sprd_dma_desc {
0188 struct virt_dma_desc vd;
0189 struct sprd_dma_chn_hw chn_hw;
0190 enum dma_transfer_direction dir;
0191 };
0192
0193
0194 struct sprd_dma_chn {
0195 struct virt_dma_chan vc;
0196 void __iomem *chn_base;
0197 struct sprd_dma_linklist linklist;
0198 struct dma_slave_config slave_cfg;
0199 u32 chn_num;
0200 u32 dev_id;
0201 enum sprd_dma_chn_mode chn_mode;
0202 enum sprd_dma_trg_mode trg_mode;
0203 enum sprd_dma_int_type int_type;
0204 struct sprd_dma_desc *cur_desc;
0205 };
0206
0207
0208 struct sprd_dma_dev {
0209 struct dma_device dma_dev;
0210 void __iomem *glb_base;
0211 struct clk *clk;
0212 struct clk *ashb_clk;
0213 int irq;
0214 u32 total_chns;
0215 struct sprd_dma_chn channels[];
0216 };
0217
0218 static void sprd_dma_free_desc(struct virt_dma_desc *vd);
0219 static bool sprd_dma_filter_fn(struct dma_chan *chan, void *param);
0220 static struct of_dma_filter_info sprd_dma_info = {
0221 .filter_fn = sprd_dma_filter_fn,
0222 };
0223
0224 static inline struct sprd_dma_chn *to_sprd_dma_chan(struct dma_chan *c)
0225 {
0226 return container_of(c, struct sprd_dma_chn, vc.chan);
0227 }
0228
0229 static inline struct sprd_dma_dev *to_sprd_dma_dev(struct dma_chan *c)
0230 {
0231 struct sprd_dma_chn *schan = to_sprd_dma_chan(c);
0232
0233 return container_of(schan, struct sprd_dma_dev, channels[c->chan_id]);
0234 }
0235
0236 static inline struct sprd_dma_desc *to_sprd_dma_desc(struct virt_dma_desc *vd)
0237 {
0238 return container_of(vd, struct sprd_dma_desc, vd);
0239 }
0240
0241 static void sprd_dma_glb_update(struct sprd_dma_dev *sdev, u32 reg,
0242 u32 mask, u32 val)
0243 {
0244 u32 orig = readl(sdev->glb_base + reg);
0245 u32 tmp;
0246
0247 tmp = (orig & ~mask) | val;
0248 writel(tmp, sdev->glb_base + reg);
0249 }
0250
0251 static void sprd_dma_chn_update(struct sprd_dma_chn *schan, u32 reg,
0252 u32 mask, u32 val)
0253 {
0254 u32 orig = readl(schan->chn_base + reg);
0255 u32 tmp;
0256
0257 tmp = (orig & ~mask) | val;
0258 writel(tmp, schan->chn_base + reg);
0259 }
0260
0261 static int sprd_dma_enable(struct sprd_dma_dev *sdev)
0262 {
0263 int ret;
0264
0265 ret = clk_prepare_enable(sdev->clk);
0266 if (ret)
0267 return ret;
0268
0269
0270
0271
0272
0273 if (!IS_ERR(sdev->ashb_clk))
0274 ret = clk_prepare_enable(sdev->ashb_clk);
0275
0276 return ret;
0277 }
0278
0279 static void sprd_dma_disable(struct sprd_dma_dev *sdev)
0280 {
0281 clk_disable_unprepare(sdev->clk);
0282
0283
0284
0285
0286 if (!IS_ERR(sdev->ashb_clk))
0287 clk_disable_unprepare(sdev->ashb_clk);
0288 }
0289
0290 static void sprd_dma_set_uid(struct sprd_dma_chn *schan)
0291 {
0292 struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan);
0293 u32 dev_id = schan->dev_id;
0294
0295 if (dev_id != SPRD_DMA_SOFTWARE_UID) {
0296 u32 uid_offset = SPRD_DMA_GLB_REQ_UID_OFFSET +
0297 SPRD_DMA_GLB_REQ_UID(dev_id);
0298
0299 writel(schan->chn_num + 1, sdev->glb_base + uid_offset);
0300 }
0301 }
0302
0303 static void sprd_dma_unset_uid(struct sprd_dma_chn *schan)
0304 {
0305 struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan);
0306 u32 dev_id = schan->dev_id;
0307
0308 if (dev_id != SPRD_DMA_SOFTWARE_UID) {
0309 u32 uid_offset = SPRD_DMA_GLB_REQ_UID_OFFSET +
0310 SPRD_DMA_GLB_REQ_UID(dev_id);
0311
0312 writel(0, sdev->glb_base + uid_offset);
0313 }
0314 }
0315
0316 static void sprd_dma_clear_int(struct sprd_dma_chn *schan)
0317 {
0318 sprd_dma_chn_update(schan, SPRD_DMA_CHN_INTC,
0319 SPRD_DMA_INT_MASK << SPRD_DMA_INT_CLR_OFFSET,
0320 SPRD_DMA_INT_MASK << SPRD_DMA_INT_CLR_OFFSET);
0321 }
0322
0323 static void sprd_dma_enable_chn(struct sprd_dma_chn *schan)
0324 {
0325 sprd_dma_chn_update(schan, SPRD_DMA_CHN_CFG, SPRD_DMA_CHN_EN,
0326 SPRD_DMA_CHN_EN);
0327 }
0328
0329 static void sprd_dma_disable_chn(struct sprd_dma_chn *schan)
0330 {
0331 sprd_dma_chn_update(schan, SPRD_DMA_CHN_CFG, SPRD_DMA_CHN_EN, 0);
0332 }
0333
0334 static void sprd_dma_soft_request(struct sprd_dma_chn *schan)
0335 {
0336 sprd_dma_chn_update(schan, SPRD_DMA_CHN_REQ, SPRD_DMA_REQ_EN,
0337 SPRD_DMA_REQ_EN);
0338 }
0339
0340 static void sprd_dma_pause_resume(struct sprd_dma_chn *schan, bool enable)
0341 {
0342 struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan);
0343 u32 pause, timeout = SPRD_DMA_PAUSE_CNT;
0344
0345 if (enable) {
0346 sprd_dma_chn_update(schan, SPRD_DMA_CHN_PAUSE,
0347 SPRD_DMA_PAUSE_EN, SPRD_DMA_PAUSE_EN);
0348
0349 do {
0350 pause = readl(schan->chn_base + SPRD_DMA_CHN_PAUSE);
0351 if (pause & SPRD_DMA_PAUSE_STS)
0352 break;
0353
0354 cpu_relax();
0355 } while (--timeout > 0);
0356
0357 if (!timeout)
0358 dev_warn(sdev->dma_dev.dev,
0359 "pause dma controller timeout\n");
0360 } else {
0361 sprd_dma_chn_update(schan, SPRD_DMA_CHN_PAUSE,
0362 SPRD_DMA_PAUSE_EN, 0);
0363 }
0364 }
0365
0366 static void sprd_dma_stop_and_disable(struct sprd_dma_chn *schan)
0367 {
0368 u32 cfg = readl(schan->chn_base + SPRD_DMA_CHN_CFG);
0369
0370 if (!(cfg & SPRD_DMA_CHN_EN))
0371 return;
0372
0373 sprd_dma_pause_resume(schan, true);
0374 sprd_dma_disable_chn(schan);
0375 }
0376
0377 static unsigned long sprd_dma_get_src_addr(struct sprd_dma_chn *schan)
0378 {
0379 unsigned long addr, addr_high;
0380
0381 addr = readl(schan->chn_base + SPRD_DMA_CHN_SRC_ADDR);
0382 addr_high = readl(schan->chn_base + SPRD_DMA_CHN_WARP_PTR) &
0383 SPRD_DMA_HIGH_ADDR_MASK;
0384
0385 return addr | (addr_high << SPRD_DMA_HIGH_ADDR_OFFSET);
0386 }
0387
0388 static unsigned long sprd_dma_get_dst_addr(struct sprd_dma_chn *schan)
0389 {
0390 unsigned long addr, addr_high;
0391
0392 addr = readl(schan->chn_base + SPRD_DMA_CHN_DES_ADDR);
0393 addr_high = readl(schan->chn_base + SPRD_DMA_CHN_WARP_TO) &
0394 SPRD_DMA_HIGH_ADDR_MASK;
0395
0396 return addr | (addr_high << SPRD_DMA_HIGH_ADDR_OFFSET);
0397 }
0398
0399 static enum sprd_dma_int_type sprd_dma_get_int_type(struct sprd_dma_chn *schan)
0400 {
0401 struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan);
0402 u32 intc_sts = readl(schan->chn_base + SPRD_DMA_CHN_INTC) &
0403 SPRD_DMA_CHN_INT_STS;
0404
0405 switch (intc_sts) {
0406 case SPRD_DMA_CFGERR_INT_STS:
0407 return SPRD_DMA_CFGERR_INT;
0408
0409 case SPRD_DMA_LIST_INT_STS:
0410 return SPRD_DMA_LIST_INT;
0411
0412 case SPRD_DMA_TRSC_INT_STS:
0413 return SPRD_DMA_TRANS_INT;
0414
0415 case SPRD_DMA_BLK_INT_STS:
0416 return SPRD_DMA_BLK_INT;
0417
0418 case SPRD_DMA_FRAG_INT_STS:
0419 return SPRD_DMA_FRAG_INT;
0420
0421 default:
0422 dev_warn(sdev->dma_dev.dev, "incorrect dma interrupt type\n");
0423 return SPRD_DMA_NO_INT;
0424 }
0425 }
0426
0427 static enum sprd_dma_req_mode sprd_dma_get_req_type(struct sprd_dma_chn *schan)
0428 {
0429 u32 frag_reg = readl(schan->chn_base + SPRD_DMA_CHN_FRG_LEN);
0430
0431 return (frag_reg >> SPRD_DMA_REQ_MODE_OFFSET) & SPRD_DMA_REQ_MODE_MASK;
0432 }
0433
0434 static int sprd_dma_set_2stage_config(struct sprd_dma_chn *schan)
0435 {
0436 struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan);
0437 u32 val, chn = schan->chn_num + 1;
0438
0439 switch (schan->chn_mode) {
0440 case SPRD_DMA_SRC_CHN0:
0441 val = chn & SPRD_DMA_GLB_SRC_CHN_MASK;
0442 val |= BIT(schan->trg_mode - 1) << SPRD_DMA_GLB_TRG_OFFSET;
0443 val |= SPRD_DMA_GLB_2STAGE_EN;
0444 if (schan->int_type != SPRD_DMA_NO_INT)
0445 val |= SPRD_DMA_GLB_SRC_INT;
0446
0447 sprd_dma_glb_update(sdev, SPRD_DMA_GLB_2STAGE_GRP1, val, val);
0448 break;
0449
0450 case SPRD_DMA_SRC_CHN1:
0451 val = chn & SPRD_DMA_GLB_SRC_CHN_MASK;
0452 val |= BIT(schan->trg_mode - 1) << SPRD_DMA_GLB_TRG_OFFSET;
0453 val |= SPRD_DMA_GLB_2STAGE_EN;
0454 if (schan->int_type != SPRD_DMA_NO_INT)
0455 val |= SPRD_DMA_GLB_SRC_INT;
0456
0457 sprd_dma_glb_update(sdev, SPRD_DMA_GLB_2STAGE_GRP2, val, val);
0458 break;
0459
0460 case SPRD_DMA_DST_CHN0:
0461 val = (chn << SPRD_DMA_GLB_DEST_CHN_OFFSET) &
0462 SPRD_DMA_GLB_DEST_CHN_MASK;
0463 val |= SPRD_DMA_GLB_2STAGE_EN;
0464 if (schan->int_type != SPRD_DMA_NO_INT)
0465 val |= SPRD_DMA_GLB_DEST_INT;
0466
0467 sprd_dma_glb_update(sdev, SPRD_DMA_GLB_2STAGE_GRP1, val, val);
0468 break;
0469
0470 case SPRD_DMA_DST_CHN1:
0471 val = (chn << SPRD_DMA_GLB_DEST_CHN_OFFSET) &
0472 SPRD_DMA_GLB_DEST_CHN_MASK;
0473 val |= SPRD_DMA_GLB_2STAGE_EN;
0474 if (schan->int_type != SPRD_DMA_NO_INT)
0475 val |= SPRD_DMA_GLB_DEST_INT;
0476
0477 sprd_dma_glb_update(sdev, SPRD_DMA_GLB_2STAGE_GRP2, val, val);
0478 break;
0479
0480 default:
0481 dev_err(sdev->dma_dev.dev, "invalid channel mode setting %d\n",
0482 schan->chn_mode);
0483 return -EINVAL;
0484 }
0485
0486 return 0;
0487 }
0488
0489 static void sprd_dma_set_pending(struct sprd_dma_chn *schan, bool enable)
0490 {
0491 struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan);
0492 u32 reg, val, req_id;
0493
0494 if (schan->dev_id == SPRD_DMA_SOFTWARE_UID)
0495 return;
0496
0497
0498 req_id = schan->dev_id - 1;
0499
0500 if (req_id < 32) {
0501 reg = SPRD_DMA_GLB_REQ_PEND0_EN;
0502 val = BIT(req_id);
0503 } else {
0504 reg = SPRD_DMA_GLB_REQ_PEND1_EN;
0505 val = BIT(req_id - 32);
0506 }
0507
0508 sprd_dma_glb_update(sdev, reg, val, enable ? val : 0);
0509 }
0510
0511 static void sprd_dma_set_chn_config(struct sprd_dma_chn *schan,
0512 struct sprd_dma_desc *sdesc)
0513 {
0514 struct sprd_dma_chn_hw *cfg = &sdesc->chn_hw;
0515
0516 writel(cfg->pause, schan->chn_base + SPRD_DMA_CHN_PAUSE);
0517 writel(cfg->cfg, schan->chn_base + SPRD_DMA_CHN_CFG);
0518 writel(cfg->intc, schan->chn_base + SPRD_DMA_CHN_INTC);
0519 writel(cfg->src_addr, schan->chn_base + SPRD_DMA_CHN_SRC_ADDR);
0520 writel(cfg->des_addr, schan->chn_base + SPRD_DMA_CHN_DES_ADDR);
0521 writel(cfg->frg_len, schan->chn_base + SPRD_DMA_CHN_FRG_LEN);
0522 writel(cfg->blk_len, schan->chn_base + SPRD_DMA_CHN_BLK_LEN);
0523 writel(cfg->trsc_len, schan->chn_base + SPRD_DMA_CHN_TRSC_LEN);
0524 writel(cfg->trsf_step, schan->chn_base + SPRD_DMA_CHN_TRSF_STEP);
0525 writel(cfg->wrap_ptr, schan->chn_base + SPRD_DMA_CHN_WARP_PTR);
0526 writel(cfg->wrap_to, schan->chn_base + SPRD_DMA_CHN_WARP_TO);
0527 writel(cfg->llist_ptr, schan->chn_base + SPRD_DMA_CHN_LLIST_PTR);
0528 writel(cfg->frg_step, schan->chn_base + SPRD_DMA_CHN_FRAG_STEP);
0529 writel(cfg->src_blk_step, schan->chn_base + SPRD_DMA_CHN_SRC_BLK_STEP);
0530 writel(cfg->des_blk_step, schan->chn_base + SPRD_DMA_CHN_DES_BLK_STEP);
0531 writel(cfg->req, schan->chn_base + SPRD_DMA_CHN_REQ);
0532 }
0533
0534 static void sprd_dma_start(struct sprd_dma_chn *schan)
0535 {
0536 struct virt_dma_desc *vd = vchan_next_desc(&schan->vc);
0537
0538 if (!vd)
0539 return;
0540
0541 list_del(&vd->node);
0542 schan->cur_desc = to_sprd_dma_desc(vd);
0543
0544
0545
0546
0547
0548 if (schan->chn_mode && sprd_dma_set_2stage_config(schan))
0549 return;
0550
0551
0552
0553
0554
0555 sprd_dma_set_chn_config(schan, schan->cur_desc);
0556 sprd_dma_set_uid(schan);
0557 sprd_dma_set_pending(schan, true);
0558 sprd_dma_enable_chn(schan);
0559
0560 if (schan->dev_id == SPRD_DMA_SOFTWARE_UID &&
0561 schan->chn_mode != SPRD_DMA_DST_CHN0 &&
0562 schan->chn_mode != SPRD_DMA_DST_CHN1)
0563 sprd_dma_soft_request(schan);
0564 }
0565
0566 static void sprd_dma_stop(struct sprd_dma_chn *schan)
0567 {
0568 sprd_dma_stop_and_disable(schan);
0569 sprd_dma_set_pending(schan, false);
0570 sprd_dma_unset_uid(schan);
0571 sprd_dma_clear_int(schan);
0572 schan->cur_desc = NULL;
0573 }
0574
0575 static bool sprd_dma_check_trans_done(struct sprd_dma_desc *sdesc,
0576 enum sprd_dma_int_type int_type,
0577 enum sprd_dma_req_mode req_mode)
0578 {
0579 if (int_type == SPRD_DMA_NO_INT)
0580 return false;
0581
0582 if (int_type >= req_mode + 1)
0583 return true;
0584 else
0585 return false;
0586 }
0587
0588 static irqreturn_t dma_irq_handle(int irq, void *dev_id)
0589 {
0590 struct sprd_dma_dev *sdev = (struct sprd_dma_dev *)dev_id;
0591 u32 irq_status = readl(sdev->glb_base + SPRD_DMA_GLB_INT_MSK_STS);
0592 struct sprd_dma_chn *schan;
0593 struct sprd_dma_desc *sdesc;
0594 enum sprd_dma_req_mode req_type;
0595 enum sprd_dma_int_type int_type;
0596 bool trans_done = false, cyclic = false;
0597 u32 i;
0598
0599 while (irq_status) {
0600 i = __ffs(irq_status);
0601 irq_status &= (irq_status - 1);
0602 schan = &sdev->channels[i];
0603
0604 spin_lock(&schan->vc.lock);
0605
0606 sdesc = schan->cur_desc;
0607 if (!sdesc) {
0608 spin_unlock(&schan->vc.lock);
0609 return IRQ_HANDLED;
0610 }
0611
0612 int_type = sprd_dma_get_int_type(schan);
0613 req_type = sprd_dma_get_req_type(schan);
0614 sprd_dma_clear_int(schan);
0615
0616
0617 cyclic = schan->linklist.phy_addr ? true : false;
0618 if (cyclic == true) {
0619 vchan_cyclic_callback(&sdesc->vd);
0620 } else {
0621
0622 trans_done = sprd_dma_check_trans_done(sdesc, int_type,
0623 req_type);
0624 if (trans_done == true) {
0625 vchan_cookie_complete(&sdesc->vd);
0626 schan->cur_desc = NULL;
0627 sprd_dma_start(schan);
0628 }
0629 }
0630 spin_unlock(&schan->vc.lock);
0631 }
0632
0633 return IRQ_HANDLED;
0634 }
0635
0636 static int sprd_dma_alloc_chan_resources(struct dma_chan *chan)
0637 {
0638 return pm_runtime_get_sync(chan->device->dev);
0639 }
0640
0641 static void sprd_dma_free_chan_resources(struct dma_chan *chan)
0642 {
0643 struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
0644 struct virt_dma_desc *cur_vd = NULL;
0645 unsigned long flags;
0646
0647 spin_lock_irqsave(&schan->vc.lock, flags);
0648 if (schan->cur_desc)
0649 cur_vd = &schan->cur_desc->vd;
0650
0651 sprd_dma_stop(schan);
0652 spin_unlock_irqrestore(&schan->vc.lock, flags);
0653
0654 if (cur_vd)
0655 sprd_dma_free_desc(cur_vd);
0656
0657 vchan_free_chan_resources(&schan->vc);
0658 pm_runtime_put(chan->device->dev);
0659 }
0660
0661 static enum dma_status sprd_dma_tx_status(struct dma_chan *chan,
0662 dma_cookie_t cookie,
0663 struct dma_tx_state *txstate)
0664 {
0665 struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
0666 struct virt_dma_desc *vd;
0667 unsigned long flags;
0668 enum dma_status ret;
0669 u32 pos;
0670
0671 ret = dma_cookie_status(chan, cookie, txstate);
0672 if (ret == DMA_COMPLETE || !txstate)
0673 return ret;
0674
0675 spin_lock_irqsave(&schan->vc.lock, flags);
0676 vd = vchan_find_desc(&schan->vc, cookie);
0677 if (vd) {
0678 struct sprd_dma_desc *sdesc = to_sprd_dma_desc(vd);
0679 struct sprd_dma_chn_hw *hw = &sdesc->chn_hw;
0680
0681 if (hw->trsc_len > 0)
0682 pos = hw->trsc_len;
0683 else if (hw->blk_len > 0)
0684 pos = hw->blk_len;
0685 else if (hw->frg_len > 0)
0686 pos = hw->frg_len;
0687 else
0688 pos = 0;
0689 } else if (schan->cur_desc && schan->cur_desc->vd.tx.cookie == cookie) {
0690 struct sprd_dma_desc *sdesc = schan->cur_desc;
0691
0692 if (sdesc->dir == DMA_DEV_TO_MEM)
0693 pos = sprd_dma_get_dst_addr(schan);
0694 else
0695 pos = sprd_dma_get_src_addr(schan);
0696 } else {
0697 pos = 0;
0698 }
0699 spin_unlock_irqrestore(&schan->vc.lock, flags);
0700
0701 dma_set_residue(txstate, pos);
0702 return ret;
0703 }
0704
0705 static void sprd_dma_issue_pending(struct dma_chan *chan)
0706 {
0707 struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
0708 unsigned long flags;
0709
0710 spin_lock_irqsave(&schan->vc.lock, flags);
0711 if (vchan_issue_pending(&schan->vc) && !schan->cur_desc)
0712 sprd_dma_start(schan);
0713 spin_unlock_irqrestore(&schan->vc.lock, flags);
0714 }
0715
0716 static int sprd_dma_get_datawidth(enum dma_slave_buswidth buswidth)
0717 {
0718 switch (buswidth) {
0719 case DMA_SLAVE_BUSWIDTH_1_BYTE:
0720 case DMA_SLAVE_BUSWIDTH_2_BYTES:
0721 case DMA_SLAVE_BUSWIDTH_4_BYTES:
0722 case DMA_SLAVE_BUSWIDTH_8_BYTES:
0723 return ffs(buswidth) - 1;
0724
0725 default:
0726 return -EINVAL;
0727 }
0728 }
0729
0730 static int sprd_dma_get_step(enum dma_slave_buswidth buswidth)
0731 {
0732 switch (buswidth) {
0733 case DMA_SLAVE_BUSWIDTH_1_BYTE:
0734 case DMA_SLAVE_BUSWIDTH_2_BYTES:
0735 case DMA_SLAVE_BUSWIDTH_4_BYTES:
0736 case DMA_SLAVE_BUSWIDTH_8_BYTES:
0737 return buswidth;
0738
0739 default:
0740 return -EINVAL;
0741 }
0742 }
0743
0744 static int sprd_dma_fill_desc(struct dma_chan *chan,
0745 struct sprd_dma_chn_hw *hw,
0746 unsigned int sglen, int sg_index,
0747 dma_addr_t src, dma_addr_t dst, u32 len,
0748 enum dma_transfer_direction dir,
0749 unsigned long flags,
0750 struct dma_slave_config *slave_cfg)
0751 {
0752 struct sprd_dma_dev *sdev = to_sprd_dma_dev(chan);
0753 struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
0754 enum sprd_dma_chn_mode chn_mode = schan->chn_mode;
0755 u32 req_mode = (flags >> SPRD_DMA_REQ_SHIFT) & SPRD_DMA_REQ_MODE_MASK;
0756 u32 int_mode = flags & SPRD_DMA_INT_MASK;
0757 int src_datawidth, dst_datawidth, src_step, dst_step;
0758 u32 temp, fix_mode = 0, fix_en = 0;
0759 phys_addr_t llist_ptr;
0760
0761 if (dir == DMA_MEM_TO_DEV) {
0762 src_step = sprd_dma_get_step(slave_cfg->src_addr_width);
0763 if (src_step < 0) {
0764 dev_err(sdev->dma_dev.dev, "invalid source step\n");
0765 return src_step;
0766 }
0767
0768
0769
0770
0771
0772 if (chn_mode == SPRD_DMA_DST_CHN0 ||
0773 chn_mode == SPRD_DMA_DST_CHN1)
0774 dst_step = src_step;
0775 else
0776 dst_step = SPRD_DMA_NONE_STEP;
0777 } else {
0778 dst_step = sprd_dma_get_step(slave_cfg->dst_addr_width);
0779 if (dst_step < 0) {
0780 dev_err(sdev->dma_dev.dev, "invalid destination step\n");
0781 return dst_step;
0782 }
0783 src_step = SPRD_DMA_NONE_STEP;
0784 }
0785
0786 src_datawidth = sprd_dma_get_datawidth(slave_cfg->src_addr_width);
0787 if (src_datawidth < 0) {
0788 dev_err(sdev->dma_dev.dev, "invalid source datawidth\n");
0789 return src_datawidth;
0790 }
0791
0792 dst_datawidth = sprd_dma_get_datawidth(slave_cfg->dst_addr_width);
0793 if (dst_datawidth < 0) {
0794 dev_err(sdev->dma_dev.dev, "invalid destination datawidth\n");
0795 return dst_datawidth;
0796 }
0797
0798 hw->cfg = SPRD_DMA_DONOT_WAIT_BDONE << SPRD_DMA_WAIT_BDONE_OFFSET;
0799
0800
0801
0802
0803
0804 hw->wrap_ptr = (src >> SPRD_DMA_HIGH_ADDR_OFFSET) & SPRD_DMA_HIGH_ADDR_MASK;
0805 hw->wrap_to = (dst >> SPRD_DMA_HIGH_ADDR_OFFSET) & SPRD_DMA_HIGH_ADDR_MASK;
0806 hw->src_addr = src & SPRD_DMA_LOW_ADDR_MASK;
0807 hw->des_addr = dst & SPRD_DMA_LOW_ADDR_MASK;
0808
0809
0810
0811
0812
0813
0814 if ((src_step != 0 && dst_step != 0) || (src_step | dst_step) == 0) {
0815 fix_en = 0;
0816 } else {
0817 fix_en = 1;
0818 if (src_step)
0819 fix_mode = 1;
0820 else
0821 fix_mode = 0;
0822 }
0823
0824 hw->intc = int_mode | SPRD_DMA_CFG_ERR_INT_EN;
0825
0826 temp = src_datawidth << SPRD_DMA_SRC_DATAWIDTH_OFFSET;
0827 temp |= dst_datawidth << SPRD_DMA_DES_DATAWIDTH_OFFSET;
0828 temp |= req_mode << SPRD_DMA_REQ_MODE_OFFSET;
0829 temp |= fix_mode << SPRD_DMA_FIX_SEL_OFFSET;
0830 temp |= fix_en << SPRD_DMA_FIX_EN_OFFSET;
0831 temp |= schan->linklist.wrap_addr ?
0832 SPRD_DMA_WRAP_EN | SPRD_DMA_WRAP_SEL_DEST : 0;
0833 temp |= slave_cfg->src_maxburst & SPRD_DMA_FRG_LEN_MASK;
0834 hw->frg_len = temp;
0835
0836 hw->blk_len = slave_cfg->src_maxburst & SPRD_DMA_BLK_LEN_MASK;
0837 hw->trsc_len = len & SPRD_DMA_TRSC_LEN_MASK;
0838
0839 temp = (dst_step & SPRD_DMA_TRSF_STEP_MASK) << SPRD_DMA_DEST_TRSF_STEP_OFFSET;
0840 temp |= (src_step & SPRD_DMA_TRSF_STEP_MASK) << SPRD_DMA_SRC_TRSF_STEP_OFFSET;
0841 hw->trsf_step = temp;
0842
0843
0844 if (schan->linklist.phy_addr) {
0845 hw->cfg |= SPRD_DMA_LINKLIST_EN;
0846
0847
0848 temp = sglen ? (sg_index + 1) % sglen : 0;
0849
0850
0851 temp = temp * sizeof(*hw) + SPRD_DMA_CHN_SRC_ADDR;
0852
0853
0854
0855
0856 llist_ptr = schan->linklist.phy_addr + temp;
0857 hw->llist_ptr = lower_32_bits(llist_ptr);
0858 hw->src_blk_step = (upper_32_bits(llist_ptr) << SPRD_DMA_LLIST_HIGH_SHIFT) &
0859 SPRD_DMA_LLIST_HIGH_MASK;
0860
0861 if (schan->linklist.wrap_addr) {
0862 hw->wrap_ptr |= schan->linklist.wrap_addr &
0863 SPRD_DMA_WRAP_ADDR_MASK;
0864 hw->wrap_to |= dst & SPRD_DMA_WRAP_ADDR_MASK;
0865 }
0866 } else {
0867 hw->llist_ptr = 0;
0868 hw->src_blk_step = 0;
0869 }
0870
0871 hw->frg_step = 0;
0872 hw->des_blk_step = 0;
0873 return 0;
0874 }
0875
0876 static int sprd_dma_fill_linklist_desc(struct dma_chan *chan,
0877 unsigned int sglen, int sg_index,
0878 dma_addr_t src, dma_addr_t dst, u32 len,
0879 enum dma_transfer_direction dir,
0880 unsigned long flags,
0881 struct dma_slave_config *slave_cfg)
0882 {
0883 struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
0884 struct sprd_dma_chn_hw *hw;
0885
0886 if (!schan->linklist.virt_addr)
0887 return -EINVAL;
0888
0889 hw = (struct sprd_dma_chn_hw *)(schan->linklist.virt_addr +
0890 sg_index * sizeof(*hw));
0891
0892 return sprd_dma_fill_desc(chan, hw, sglen, sg_index, src, dst, len,
0893 dir, flags, slave_cfg);
0894 }
0895
0896 static struct dma_async_tx_descriptor *
0897 sprd_dma_prep_dma_memcpy(struct dma_chan *chan, dma_addr_t dest, dma_addr_t src,
0898 size_t len, unsigned long flags)
0899 {
0900 struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
0901 struct sprd_dma_desc *sdesc;
0902 struct sprd_dma_chn_hw *hw;
0903 enum sprd_dma_datawidth datawidth;
0904 u32 step, temp;
0905
0906 sdesc = kzalloc(sizeof(*sdesc), GFP_NOWAIT);
0907 if (!sdesc)
0908 return NULL;
0909
0910 hw = &sdesc->chn_hw;
0911
0912 hw->cfg = SPRD_DMA_DONOT_WAIT_BDONE << SPRD_DMA_WAIT_BDONE_OFFSET;
0913 hw->intc = SPRD_DMA_TRANS_INT | SPRD_DMA_CFG_ERR_INT_EN;
0914 hw->src_addr = src & SPRD_DMA_LOW_ADDR_MASK;
0915 hw->des_addr = dest & SPRD_DMA_LOW_ADDR_MASK;
0916 hw->wrap_ptr = (src >> SPRD_DMA_HIGH_ADDR_OFFSET) &
0917 SPRD_DMA_HIGH_ADDR_MASK;
0918 hw->wrap_to = (dest >> SPRD_DMA_HIGH_ADDR_OFFSET) &
0919 SPRD_DMA_HIGH_ADDR_MASK;
0920
0921 if (IS_ALIGNED(len, 8)) {
0922 datawidth = SPRD_DMA_DATAWIDTH_8_BYTES;
0923 step = SPRD_DMA_DWORD_STEP;
0924 } else if (IS_ALIGNED(len, 4)) {
0925 datawidth = SPRD_DMA_DATAWIDTH_4_BYTES;
0926 step = SPRD_DMA_WORD_STEP;
0927 } else if (IS_ALIGNED(len, 2)) {
0928 datawidth = SPRD_DMA_DATAWIDTH_2_BYTES;
0929 step = SPRD_DMA_SHORT_STEP;
0930 } else {
0931 datawidth = SPRD_DMA_DATAWIDTH_1_BYTE;
0932 step = SPRD_DMA_BYTE_STEP;
0933 }
0934
0935 temp = datawidth << SPRD_DMA_SRC_DATAWIDTH_OFFSET;
0936 temp |= datawidth << SPRD_DMA_DES_DATAWIDTH_OFFSET;
0937 temp |= SPRD_DMA_TRANS_REQ << SPRD_DMA_REQ_MODE_OFFSET;
0938 temp |= len & SPRD_DMA_FRG_LEN_MASK;
0939 hw->frg_len = temp;
0940
0941 hw->blk_len = len & SPRD_DMA_BLK_LEN_MASK;
0942 hw->trsc_len = len & SPRD_DMA_TRSC_LEN_MASK;
0943
0944 temp = (step & SPRD_DMA_TRSF_STEP_MASK) << SPRD_DMA_DEST_TRSF_STEP_OFFSET;
0945 temp |= (step & SPRD_DMA_TRSF_STEP_MASK) << SPRD_DMA_SRC_TRSF_STEP_OFFSET;
0946 hw->trsf_step = temp;
0947
0948 return vchan_tx_prep(&schan->vc, &sdesc->vd, flags);
0949 }
0950
0951 static struct dma_async_tx_descriptor *
0952 sprd_dma_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
0953 unsigned int sglen, enum dma_transfer_direction dir,
0954 unsigned long flags, void *context)
0955 {
0956 struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
0957 struct dma_slave_config *slave_cfg = &schan->slave_cfg;
0958 dma_addr_t src = 0, dst = 0;
0959 dma_addr_t start_src = 0, start_dst = 0;
0960 struct sprd_dma_desc *sdesc;
0961 struct scatterlist *sg;
0962 u32 len = 0;
0963 int ret, i;
0964
0965 if (!is_slave_direction(dir))
0966 return NULL;
0967
0968 if (context) {
0969 struct sprd_dma_linklist *ll_cfg =
0970 (struct sprd_dma_linklist *)context;
0971
0972 schan->linklist.phy_addr = ll_cfg->phy_addr;
0973 schan->linklist.virt_addr = ll_cfg->virt_addr;
0974 schan->linklist.wrap_addr = ll_cfg->wrap_addr;
0975 } else {
0976 schan->linklist.phy_addr = 0;
0977 schan->linklist.virt_addr = 0;
0978 schan->linklist.wrap_addr = 0;
0979 }
0980
0981
0982
0983
0984
0985 schan->chn_mode =
0986 (flags >> SPRD_DMA_CHN_MODE_SHIFT) & SPRD_DMA_CHN_MODE_MASK;
0987 schan->trg_mode =
0988 (flags >> SPRD_DMA_TRG_MODE_SHIFT) & SPRD_DMA_TRG_MODE_MASK;
0989 schan->int_type = flags & SPRD_DMA_INT_TYPE_MASK;
0990
0991 sdesc = kzalloc(sizeof(*sdesc), GFP_NOWAIT);
0992 if (!sdesc)
0993 return NULL;
0994
0995 sdesc->dir = dir;
0996
0997 for_each_sg(sgl, sg, sglen, i) {
0998 len = sg_dma_len(sg);
0999
1000 if (dir == DMA_MEM_TO_DEV) {
1001 src = sg_dma_address(sg);
1002 dst = slave_cfg->dst_addr;
1003 } else {
1004 src = slave_cfg->src_addr;
1005 dst = sg_dma_address(sg);
1006 }
1007
1008 if (!i) {
1009 start_src = src;
1010 start_dst = dst;
1011 }
1012
1013
1014
1015
1016
1017
1018 if (sglen < 2)
1019 break;
1020
1021 ret = sprd_dma_fill_linklist_desc(chan, sglen, i, src, dst, len,
1022 dir, flags, slave_cfg);
1023 if (ret) {
1024 kfree(sdesc);
1025 return NULL;
1026 }
1027 }
1028
1029 ret = sprd_dma_fill_desc(chan, &sdesc->chn_hw, 0, 0, start_src,
1030 start_dst, len, dir, flags, slave_cfg);
1031 if (ret) {
1032 kfree(sdesc);
1033 return NULL;
1034 }
1035
1036 return vchan_tx_prep(&schan->vc, &sdesc->vd, flags);
1037 }
1038
1039 static int sprd_dma_slave_config(struct dma_chan *chan,
1040 struct dma_slave_config *config)
1041 {
1042 struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
1043 struct dma_slave_config *slave_cfg = &schan->slave_cfg;
1044
1045 memcpy(slave_cfg, config, sizeof(*config));
1046 return 0;
1047 }
1048
1049 static int sprd_dma_pause(struct dma_chan *chan)
1050 {
1051 struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
1052 unsigned long flags;
1053
1054 spin_lock_irqsave(&schan->vc.lock, flags);
1055 sprd_dma_pause_resume(schan, true);
1056 spin_unlock_irqrestore(&schan->vc.lock, flags);
1057
1058 return 0;
1059 }
1060
1061 static int sprd_dma_resume(struct dma_chan *chan)
1062 {
1063 struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
1064 unsigned long flags;
1065
1066 spin_lock_irqsave(&schan->vc.lock, flags);
1067 sprd_dma_pause_resume(schan, false);
1068 spin_unlock_irqrestore(&schan->vc.lock, flags);
1069
1070 return 0;
1071 }
1072
1073 static int sprd_dma_terminate_all(struct dma_chan *chan)
1074 {
1075 struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
1076 struct virt_dma_desc *cur_vd = NULL;
1077 unsigned long flags;
1078 LIST_HEAD(head);
1079
1080 spin_lock_irqsave(&schan->vc.lock, flags);
1081 if (schan->cur_desc)
1082 cur_vd = &schan->cur_desc->vd;
1083
1084 sprd_dma_stop(schan);
1085
1086 vchan_get_all_descriptors(&schan->vc, &head);
1087 spin_unlock_irqrestore(&schan->vc.lock, flags);
1088
1089 if (cur_vd)
1090 sprd_dma_free_desc(cur_vd);
1091
1092 vchan_dma_desc_free_list(&schan->vc, &head);
1093 return 0;
1094 }
1095
1096 static void sprd_dma_free_desc(struct virt_dma_desc *vd)
1097 {
1098 struct sprd_dma_desc *sdesc = to_sprd_dma_desc(vd);
1099
1100 kfree(sdesc);
1101 }
1102
1103 static bool sprd_dma_filter_fn(struct dma_chan *chan, void *param)
1104 {
1105 struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
1106 u32 slave_id = *(u32 *)param;
1107
1108 schan->dev_id = slave_id;
1109 return true;
1110 }
1111
1112 static int sprd_dma_probe(struct platform_device *pdev)
1113 {
1114 struct device_node *np = pdev->dev.of_node;
1115 struct sprd_dma_dev *sdev;
1116 struct sprd_dma_chn *dma_chn;
1117 u32 chn_count;
1118 int ret, i;
1119
1120
1121 ret = device_property_read_u32(&pdev->dev, "dma-channels", &chn_count);
1122 if (ret)
1123 ret = device_property_read_u32(&pdev->dev, "#dma-channels",
1124 &chn_count);
1125 if (ret) {
1126 dev_err(&pdev->dev, "get dma channels count failed\n");
1127 return ret;
1128 }
1129
1130 sdev = devm_kzalloc(&pdev->dev,
1131 struct_size(sdev, channels, chn_count),
1132 GFP_KERNEL);
1133 if (!sdev)
1134 return -ENOMEM;
1135
1136 sdev->clk = devm_clk_get(&pdev->dev, "enable");
1137 if (IS_ERR(sdev->clk)) {
1138 dev_err(&pdev->dev, "get enable clock failed\n");
1139 return PTR_ERR(sdev->clk);
1140 }
1141
1142
1143 sdev->ashb_clk = devm_clk_get(&pdev->dev, "ashb_eb");
1144 if (IS_ERR(sdev->ashb_clk))
1145 dev_warn(&pdev->dev, "no optional ashb eb clock\n");
1146
1147
1148
1149
1150
1151
1152
1153
1154 sdev->irq = platform_get_irq(pdev, 0);
1155 if (sdev->irq > 0) {
1156 ret = devm_request_irq(&pdev->dev, sdev->irq, dma_irq_handle,
1157 0, "sprd_dma", (void *)sdev);
1158 if (ret < 0) {
1159 dev_err(&pdev->dev, "request dma irq failed\n");
1160 return ret;
1161 }
1162 } else {
1163 dev_warn(&pdev->dev, "no interrupts for the dma controller\n");
1164 }
1165
1166 sdev->glb_base = devm_platform_ioremap_resource(pdev, 0);
1167 if (IS_ERR(sdev->glb_base))
1168 return PTR_ERR(sdev->glb_base);
1169
1170 dma_cap_set(DMA_MEMCPY, sdev->dma_dev.cap_mask);
1171 sdev->total_chns = chn_count;
1172 sdev->dma_dev.chancnt = chn_count;
1173 INIT_LIST_HEAD(&sdev->dma_dev.channels);
1174 INIT_LIST_HEAD(&sdev->dma_dev.global_node);
1175 sdev->dma_dev.dev = &pdev->dev;
1176 sdev->dma_dev.device_alloc_chan_resources = sprd_dma_alloc_chan_resources;
1177 sdev->dma_dev.device_free_chan_resources = sprd_dma_free_chan_resources;
1178 sdev->dma_dev.device_tx_status = sprd_dma_tx_status;
1179 sdev->dma_dev.device_issue_pending = sprd_dma_issue_pending;
1180 sdev->dma_dev.device_prep_dma_memcpy = sprd_dma_prep_dma_memcpy;
1181 sdev->dma_dev.device_prep_slave_sg = sprd_dma_prep_slave_sg;
1182 sdev->dma_dev.device_config = sprd_dma_slave_config;
1183 sdev->dma_dev.device_pause = sprd_dma_pause;
1184 sdev->dma_dev.device_resume = sprd_dma_resume;
1185 sdev->dma_dev.device_terminate_all = sprd_dma_terminate_all;
1186
1187 for (i = 0; i < chn_count; i++) {
1188 dma_chn = &sdev->channels[i];
1189 dma_chn->chn_num = i;
1190 dma_chn->cur_desc = NULL;
1191
1192 dma_chn->chn_base = sdev->glb_base + SPRD_DMA_CHN_REG_OFFSET +
1193 SPRD_DMA_CHN_REG_LENGTH * i;
1194
1195 dma_chn->vc.desc_free = sprd_dma_free_desc;
1196 vchan_init(&dma_chn->vc, &sdev->dma_dev);
1197 }
1198
1199 platform_set_drvdata(pdev, sdev);
1200 ret = sprd_dma_enable(sdev);
1201 if (ret)
1202 return ret;
1203
1204 pm_runtime_set_active(&pdev->dev);
1205 pm_runtime_enable(&pdev->dev);
1206
1207 ret = pm_runtime_get_sync(&pdev->dev);
1208 if (ret < 0)
1209 goto err_rpm;
1210
1211 ret = dma_async_device_register(&sdev->dma_dev);
1212 if (ret < 0) {
1213 dev_err(&pdev->dev, "register dma device failed:%d\n", ret);
1214 goto err_register;
1215 }
1216
1217 sprd_dma_info.dma_cap = sdev->dma_dev.cap_mask;
1218 ret = of_dma_controller_register(np, of_dma_simple_xlate,
1219 &sprd_dma_info);
1220 if (ret)
1221 goto err_of_register;
1222
1223 pm_runtime_put(&pdev->dev);
1224 return 0;
1225
1226 err_of_register:
1227 dma_async_device_unregister(&sdev->dma_dev);
1228 err_register:
1229 pm_runtime_put_noidle(&pdev->dev);
1230 pm_runtime_disable(&pdev->dev);
1231 err_rpm:
1232 sprd_dma_disable(sdev);
1233 return ret;
1234 }
1235
1236 static int sprd_dma_remove(struct platform_device *pdev)
1237 {
1238 struct sprd_dma_dev *sdev = platform_get_drvdata(pdev);
1239 struct sprd_dma_chn *c, *cn;
1240
1241 pm_runtime_get_sync(&pdev->dev);
1242
1243
1244 if (sdev->irq > 0)
1245 devm_free_irq(&pdev->dev, sdev->irq, sdev);
1246
1247 list_for_each_entry_safe(c, cn, &sdev->dma_dev.channels,
1248 vc.chan.device_node) {
1249 list_del(&c->vc.chan.device_node);
1250 tasklet_kill(&c->vc.task);
1251 }
1252
1253 of_dma_controller_free(pdev->dev.of_node);
1254 dma_async_device_unregister(&sdev->dma_dev);
1255 sprd_dma_disable(sdev);
1256
1257 pm_runtime_put_noidle(&pdev->dev);
1258 pm_runtime_disable(&pdev->dev);
1259 return 0;
1260 }
1261
1262 static const struct of_device_id sprd_dma_match[] = {
1263 { .compatible = "sprd,sc9860-dma", },
1264 {},
1265 };
1266 MODULE_DEVICE_TABLE(of, sprd_dma_match);
1267
1268 static int __maybe_unused sprd_dma_runtime_suspend(struct device *dev)
1269 {
1270 struct sprd_dma_dev *sdev = dev_get_drvdata(dev);
1271
1272 sprd_dma_disable(sdev);
1273 return 0;
1274 }
1275
1276 static int __maybe_unused sprd_dma_runtime_resume(struct device *dev)
1277 {
1278 struct sprd_dma_dev *sdev = dev_get_drvdata(dev);
1279 int ret;
1280
1281 ret = sprd_dma_enable(sdev);
1282 if (ret)
1283 dev_err(sdev->dma_dev.dev, "enable dma failed\n");
1284
1285 return ret;
1286 }
1287
1288 static const struct dev_pm_ops sprd_dma_pm_ops = {
1289 SET_RUNTIME_PM_OPS(sprd_dma_runtime_suspend,
1290 sprd_dma_runtime_resume,
1291 NULL)
1292 };
1293
1294 static struct platform_driver sprd_dma_driver = {
1295 .probe = sprd_dma_probe,
1296 .remove = sprd_dma_remove,
1297 .driver = {
1298 .name = "sprd-dma",
1299 .of_match_table = sprd_dma_match,
1300 .pm = &sprd_dma_pm_ops,
1301 },
1302 };
1303 module_platform_driver(sprd_dma_driver);
1304
1305 MODULE_LICENSE("GPL v2");
1306 MODULE_DESCRIPTION("DMA driver for Spreadtrum");
1307 MODULE_AUTHOR("Baolin Wang <baolin.wang@spreadtrum.com>");
1308 MODULE_AUTHOR("Eric Long <eric.long@spreadtrum.com>");
1309 MODULE_ALIAS("platform:sprd-dma");