0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012 #include <linux/clk.h>
0013 #include <linux/delay.h>
0014 #include <linux/dmaengine.h>
0015 #include <linux/dma-mapping.h>
0016 #include <linux/err.h>
0017 #include <linux/init.h>
0018 #include <linux/iopoll.h>
0019 #include <linux/jiffies.h>
0020 #include <linux/list.h>
0021 #include <linux/module.h>
0022 #include <linux/of.h>
0023 #include <linux/of_device.h>
0024 #include <linux/of_dma.h>
0025 #include <linux/platform_device.h>
0026 #include <linux/pm_runtime.h>
0027 #include <linux/reset.h>
0028 #include <linux/sched.h>
0029 #include <linux/slab.h>
0030
0031 #include "virt-dma.h"
0032
0033 #define STM32_DMA_LISR 0x0000
0034 #define STM32_DMA_HISR 0x0004
0035 #define STM32_DMA_LIFCR 0x0008
0036 #define STM32_DMA_HIFCR 0x000c
0037 #define STM32_DMA_TCI BIT(5)
0038 #define STM32_DMA_HTI BIT(4)
0039 #define STM32_DMA_TEI BIT(3)
0040 #define STM32_DMA_DMEI BIT(2)
0041 #define STM32_DMA_FEI BIT(0)
0042 #define STM32_DMA_MASKI (STM32_DMA_TCI \
0043 | STM32_DMA_TEI \
0044 | STM32_DMA_DMEI \
0045 | STM32_DMA_FEI)
0046
0047
0048 #define STM32_DMA_SCR(x) (0x0010 + 0x18 * (x))
0049 #define STM32_DMA_SCR_REQ(n) ((n & 0x7) << 25)
0050 #define STM32_DMA_SCR_MBURST_MASK GENMASK(24, 23)
0051 #define STM32_DMA_SCR_MBURST(n) ((n & 0x3) << 23)
0052 #define STM32_DMA_SCR_PBURST_MASK GENMASK(22, 21)
0053 #define STM32_DMA_SCR_PBURST(n) ((n & 0x3) << 21)
0054 #define STM32_DMA_SCR_PL_MASK GENMASK(17, 16)
0055 #define STM32_DMA_SCR_PL(n) ((n & 0x3) << 16)
0056 #define STM32_DMA_SCR_MSIZE_MASK GENMASK(14, 13)
0057 #define STM32_DMA_SCR_MSIZE(n) ((n & 0x3) << 13)
0058 #define STM32_DMA_SCR_PSIZE_MASK GENMASK(12, 11)
0059 #define STM32_DMA_SCR_PSIZE(n) ((n & 0x3) << 11)
0060 #define STM32_DMA_SCR_PSIZE_GET(n) ((n & STM32_DMA_SCR_PSIZE_MASK) >> 11)
0061 #define STM32_DMA_SCR_DIR_MASK GENMASK(7, 6)
0062 #define STM32_DMA_SCR_DIR(n) ((n & 0x3) << 6)
0063 #define STM32_DMA_SCR_TRBUFF BIT(20)
0064 #define STM32_DMA_SCR_CT BIT(19)
0065 #define STM32_DMA_SCR_DBM BIT(18)
0066 #define STM32_DMA_SCR_PINCOS BIT(15)
0067 #define STM32_DMA_SCR_MINC BIT(10)
0068 #define STM32_DMA_SCR_PINC BIT(9)
0069 #define STM32_DMA_SCR_CIRC BIT(8)
0070 #define STM32_DMA_SCR_PFCTRL BIT(5)
0071 #define STM32_DMA_SCR_TCIE BIT(4)
0072
0073 #define STM32_DMA_SCR_TEIE BIT(2)
0074 #define STM32_DMA_SCR_DMEIE BIT(1)
0075 #define STM32_DMA_SCR_EN BIT(0)
0076 #define STM32_DMA_SCR_CFG_MASK (STM32_DMA_SCR_PINC \
0077 | STM32_DMA_SCR_MINC \
0078 | STM32_DMA_SCR_PINCOS \
0079 | STM32_DMA_SCR_PL_MASK)
0080 #define STM32_DMA_SCR_IRQ_MASK (STM32_DMA_SCR_TCIE \
0081 | STM32_DMA_SCR_TEIE \
0082 | STM32_DMA_SCR_DMEIE)
0083
0084
0085 #define STM32_DMA_SNDTR(x) (0x0014 + 0x18 * (x))
0086
0087
0088 #define STM32_DMA_SPAR(x) (0x0018 + 0x18 * (x))
0089
0090
0091 #define STM32_DMA_SM0AR(x) (0x001c + 0x18 * (x))
0092
0093
0094 #define STM32_DMA_SM1AR(x) (0x0020 + 0x18 * (x))
0095
0096
0097 #define STM32_DMA_SFCR(x) (0x0024 + 0x18 * (x))
0098 #define STM32_DMA_SFCR_FTH_MASK GENMASK(1, 0)
0099 #define STM32_DMA_SFCR_FTH(n) (n & STM32_DMA_SFCR_FTH_MASK)
0100 #define STM32_DMA_SFCR_FEIE BIT(7)
0101 #define STM32_DMA_SFCR_DMDIS BIT(2)
0102 #define STM32_DMA_SFCR_MASK (STM32_DMA_SFCR_FEIE \
0103 | STM32_DMA_SFCR_DMDIS)
0104
0105
0106 #define STM32_DMA_DEV_TO_MEM 0x00
0107 #define STM32_DMA_MEM_TO_DEV 0x01
0108 #define STM32_DMA_MEM_TO_MEM 0x02
0109
0110
0111 #define STM32_DMA_PRIORITY_LOW 0x00
0112 #define STM32_DMA_PRIORITY_MEDIUM 0x01
0113 #define STM32_DMA_PRIORITY_HIGH 0x02
0114 #define STM32_DMA_PRIORITY_VERY_HIGH 0x03
0115
0116
0117 #define STM32_DMA_FIFO_THRESHOLD_1QUARTERFULL 0x00
0118 #define STM32_DMA_FIFO_THRESHOLD_HALFFULL 0x01
0119 #define STM32_DMA_FIFO_THRESHOLD_3QUARTERSFULL 0x02
0120 #define STM32_DMA_FIFO_THRESHOLD_FULL 0x03
0121 #define STM32_DMA_FIFO_THRESHOLD_NONE 0x04
0122
0123 #define STM32_DMA_MAX_DATA_ITEMS 0xffff
0124
0125
0126
0127
0128
0129 #define STM32_DMA_ALIGNED_MAX_DATA_ITEMS \
0130 ALIGN_DOWN(STM32_DMA_MAX_DATA_ITEMS, 16)
0131 #define STM32_DMA_MAX_CHANNELS 0x08
0132 #define STM32_DMA_MAX_REQUEST_ID 0x08
0133 #define STM32_DMA_MAX_DATA_PARAM 0x03
0134 #define STM32_DMA_FIFO_SIZE 16
0135 #define STM32_DMA_MIN_BURST 4
0136 #define STM32_DMA_MAX_BURST 16
0137
0138
0139 #define STM32_DMA_THRESHOLD_FTR_MASK GENMASK(1, 0)
0140 #define STM32_DMA_THRESHOLD_FTR_GET(n) ((n) & STM32_DMA_THRESHOLD_FTR_MASK)
0141 #define STM32_DMA_DIRECT_MODE_MASK BIT(2)
0142 #define STM32_DMA_DIRECT_MODE_GET(n) (((n) & STM32_DMA_DIRECT_MODE_MASK) >> 2)
0143 #define STM32_DMA_ALT_ACK_MODE_MASK BIT(4)
0144 #define STM32_DMA_ALT_ACK_MODE_GET(n) (((n) & STM32_DMA_ALT_ACK_MODE_MASK) >> 4)
0145
0146 enum stm32_dma_width {
0147 STM32_DMA_BYTE,
0148 STM32_DMA_HALF_WORD,
0149 STM32_DMA_WORD,
0150 };
0151
0152 enum stm32_dma_burst_size {
0153 STM32_DMA_BURST_SINGLE,
0154 STM32_DMA_BURST_INCR4,
0155 STM32_DMA_BURST_INCR8,
0156 STM32_DMA_BURST_INCR16,
0157 };
0158
0159
0160
0161
0162
0163
0164
0165
0166 struct stm32_dma_cfg {
0167 u32 channel_id;
0168 u32 request_line;
0169 u32 stream_config;
0170 u32 features;
0171 };
0172
0173 struct stm32_dma_chan_reg {
0174 u32 dma_lisr;
0175 u32 dma_hisr;
0176 u32 dma_lifcr;
0177 u32 dma_hifcr;
0178 u32 dma_scr;
0179 u32 dma_sndtr;
0180 u32 dma_spar;
0181 u32 dma_sm0ar;
0182 u32 dma_sm1ar;
0183 u32 dma_sfcr;
0184 };
0185
0186 struct stm32_dma_sg_req {
0187 u32 len;
0188 struct stm32_dma_chan_reg chan_reg;
0189 };
0190
0191 struct stm32_dma_desc {
0192 struct virt_dma_desc vdesc;
0193 bool cyclic;
0194 u32 num_sgs;
0195 struct stm32_dma_sg_req sg_req[];
0196 };
0197
0198 struct stm32_dma_chan {
0199 struct virt_dma_chan vchan;
0200 bool config_init;
0201 bool busy;
0202 u32 id;
0203 u32 irq;
0204 struct stm32_dma_desc *desc;
0205 u32 next_sg;
0206 struct dma_slave_config dma_sconfig;
0207 struct stm32_dma_chan_reg chan_reg;
0208 u32 threshold;
0209 u32 mem_burst;
0210 u32 mem_width;
0211 enum dma_status status;
0212 };
0213
0214 struct stm32_dma_device {
0215 struct dma_device ddev;
0216 void __iomem *base;
0217 struct clk *clk;
0218 bool mem2mem;
0219 struct stm32_dma_chan chan[STM32_DMA_MAX_CHANNELS];
0220 };
0221
0222 static struct stm32_dma_device *stm32_dma_get_dev(struct stm32_dma_chan *chan)
0223 {
0224 return container_of(chan->vchan.chan.device, struct stm32_dma_device,
0225 ddev);
0226 }
0227
0228 static struct stm32_dma_chan *to_stm32_dma_chan(struct dma_chan *c)
0229 {
0230 return container_of(c, struct stm32_dma_chan, vchan.chan);
0231 }
0232
0233 static struct stm32_dma_desc *to_stm32_dma_desc(struct virt_dma_desc *vdesc)
0234 {
0235 return container_of(vdesc, struct stm32_dma_desc, vdesc);
0236 }
0237
0238 static struct device *chan2dev(struct stm32_dma_chan *chan)
0239 {
0240 return &chan->vchan.chan.dev->device;
0241 }
0242
0243 static u32 stm32_dma_read(struct stm32_dma_device *dmadev, u32 reg)
0244 {
0245 return readl_relaxed(dmadev->base + reg);
0246 }
0247
0248 static void stm32_dma_write(struct stm32_dma_device *dmadev, u32 reg, u32 val)
0249 {
0250 writel_relaxed(val, dmadev->base + reg);
0251 }
0252
0253 static int stm32_dma_get_width(struct stm32_dma_chan *chan,
0254 enum dma_slave_buswidth width)
0255 {
0256 switch (width) {
0257 case DMA_SLAVE_BUSWIDTH_1_BYTE:
0258 return STM32_DMA_BYTE;
0259 case DMA_SLAVE_BUSWIDTH_2_BYTES:
0260 return STM32_DMA_HALF_WORD;
0261 case DMA_SLAVE_BUSWIDTH_4_BYTES:
0262 return STM32_DMA_WORD;
0263 default:
0264 dev_err(chan2dev(chan), "Dma bus width not supported\n");
0265 return -EINVAL;
0266 }
0267 }
0268
0269 static enum dma_slave_buswidth stm32_dma_get_max_width(u32 buf_len,
0270 dma_addr_t buf_addr,
0271 u32 threshold)
0272 {
0273 enum dma_slave_buswidth max_width;
0274
0275 if (threshold == STM32_DMA_FIFO_THRESHOLD_FULL)
0276 max_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
0277 else
0278 max_width = DMA_SLAVE_BUSWIDTH_2_BYTES;
0279
0280 while ((buf_len < max_width || buf_len % max_width) &&
0281 max_width > DMA_SLAVE_BUSWIDTH_1_BYTE)
0282 max_width = max_width >> 1;
0283
0284 if (buf_addr & (max_width - 1))
0285 max_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
0286
0287 return max_width;
0288 }
0289
0290 static bool stm32_dma_fifo_threshold_is_allowed(u32 burst, u32 threshold,
0291 enum dma_slave_buswidth width)
0292 {
0293 u32 remaining;
0294
0295 if (threshold == STM32_DMA_FIFO_THRESHOLD_NONE)
0296 return false;
0297
0298 if (width != DMA_SLAVE_BUSWIDTH_UNDEFINED) {
0299 if (burst != 0) {
0300
0301
0302
0303
0304 remaining = ((STM32_DMA_FIFO_SIZE / width) *
0305 (threshold + 1) / 4) % burst;
0306
0307 if (remaining == 0)
0308 return true;
0309 } else {
0310 return true;
0311 }
0312 }
0313
0314 return false;
0315 }
0316
0317 static bool stm32_dma_is_burst_possible(u32 buf_len, u32 threshold)
0318 {
0319
0320 if (threshold == STM32_DMA_FIFO_THRESHOLD_NONE)
0321 return false;
0322
0323
0324
0325
0326
0327
0328 return ((buf_len % ((threshold + 1) * 4)) == 0);
0329 }
0330
0331 static u32 stm32_dma_get_best_burst(u32 buf_len, u32 max_burst, u32 threshold,
0332 enum dma_slave_buswidth width)
0333 {
0334 u32 best_burst = max_burst;
0335
0336 if (best_burst == 1 || !stm32_dma_is_burst_possible(buf_len, threshold))
0337 return 0;
0338
0339 while ((buf_len < best_burst * width && best_burst > 1) ||
0340 !stm32_dma_fifo_threshold_is_allowed(best_burst, threshold,
0341 width)) {
0342 if (best_burst > STM32_DMA_MIN_BURST)
0343 best_burst = best_burst >> 1;
0344 else
0345 best_burst = 0;
0346 }
0347
0348 return best_burst;
0349 }
0350
0351 static int stm32_dma_get_burst(struct stm32_dma_chan *chan, u32 maxburst)
0352 {
0353 switch (maxburst) {
0354 case 0:
0355 case 1:
0356 return STM32_DMA_BURST_SINGLE;
0357 case 4:
0358 return STM32_DMA_BURST_INCR4;
0359 case 8:
0360 return STM32_DMA_BURST_INCR8;
0361 case 16:
0362 return STM32_DMA_BURST_INCR16;
0363 default:
0364 dev_err(chan2dev(chan), "Dma burst size not supported\n");
0365 return -EINVAL;
0366 }
0367 }
0368
0369 static void stm32_dma_set_fifo_config(struct stm32_dma_chan *chan,
0370 u32 src_burst, u32 dst_burst)
0371 {
0372 chan->chan_reg.dma_sfcr &= ~STM32_DMA_SFCR_MASK;
0373 chan->chan_reg.dma_scr &= ~STM32_DMA_SCR_DMEIE;
0374
0375 if (!src_burst && !dst_burst) {
0376
0377 chan->chan_reg.dma_scr |= STM32_DMA_SCR_DMEIE;
0378 } else {
0379
0380 chan->chan_reg.dma_sfcr |= STM32_DMA_SFCR_MASK;
0381 }
0382 }
0383
0384 static int stm32_dma_slave_config(struct dma_chan *c,
0385 struct dma_slave_config *config)
0386 {
0387 struct stm32_dma_chan *chan = to_stm32_dma_chan(c);
0388
0389 memcpy(&chan->dma_sconfig, config, sizeof(*config));
0390
0391 chan->config_init = true;
0392
0393 return 0;
0394 }
0395
0396 static u32 stm32_dma_irq_status(struct stm32_dma_chan *chan)
0397 {
0398 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
0399 u32 flags, dma_isr;
0400
0401
0402
0403
0404
0405
0406
0407
0408
0409 if (chan->id & 4)
0410 dma_isr = stm32_dma_read(dmadev, STM32_DMA_HISR);
0411 else
0412 dma_isr = stm32_dma_read(dmadev, STM32_DMA_LISR);
0413
0414 flags = dma_isr >> (((chan->id & 2) << 3) | ((chan->id & 1) * 6));
0415
0416 return flags & STM32_DMA_MASKI;
0417 }
0418
0419 static void stm32_dma_irq_clear(struct stm32_dma_chan *chan, u32 flags)
0420 {
0421 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
0422 u32 dma_ifcr;
0423
0424
0425
0426
0427
0428
0429
0430
0431 flags &= STM32_DMA_MASKI;
0432 dma_ifcr = flags << (((chan->id & 2) << 3) | ((chan->id & 1) * 6));
0433
0434 if (chan->id & 4)
0435 stm32_dma_write(dmadev, STM32_DMA_HIFCR, dma_ifcr);
0436 else
0437 stm32_dma_write(dmadev, STM32_DMA_LIFCR, dma_ifcr);
0438 }
0439
0440 static int stm32_dma_disable_chan(struct stm32_dma_chan *chan)
0441 {
0442 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
0443 u32 dma_scr, id, reg;
0444
0445 id = chan->id;
0446 reg = STM32_DMA_SCR(id);
0447 dma_scr = stm32_dma_read(dmadev, reg);
0448
0449 if (dma_scr & STM32_DMA_SCR_EN) {
0450 dma_scr &= ~STM32_DMA_SCR_EN;
0451 stm32_dma_write(dmadev, reg, dma_scr);
0452
0453 return readl_relaxed_poll_timeout_atomic(dmadev->base + reg,
0454 dma_scr, !(dma_scr & STM32_DMA_SCR_EN),
0455 10, 1000000);
0456 }
0457
0458 return 0;
0459 }
0460
0461 static void stm32_dma_stop(struct stm32_dma_chan *chan)
0462 {
0463 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
0464 u32 dma_scr, dma_sfcr, status;
0465 int ret;
0466
0467
0468 dma_scr = stm32_dma_read(dmadev, STM32_DMA_SCR(chan->id));
0469 dma_scr &= ~STM32_DMA_SCR_IRQ_MASK;
0470 stm32_dma_write(dmadev, STM32_DMA_SCR(chan->id), dma_scr);
0471 dma_sfcr = stm32_dma_read(dmadev, STM32_DMA_SFCR(chan->id));
0472 dma_sfcr &= ~STM32_DMA_SFCR_FEIE;
0473 stm32_dma_write(dmadev, STM32_DMA_SFCR(chan->id), dma_sfcr);
0474
0475
0476 ret = stm32_dma_disable_chan(chan);
0477 if (ret < 0)
0478 return;
0479
0480
0481 status = stm32_dma_irq_status(chan);
0482 if (status) {
0483 dev_dbg(chan2dev(chan), "%s(): clearing interrupt: 0x%08x\n",
0484 __func__, status);
0485 stm32_dma_irq_clear(chan, status);
0486 }
0487
0488 chan->busy = false;
0489 chan->status = DMA_COMPLETE;
0490 }
0491
0492 static int stm32_dma_terminate_all(struct dma_chan *c)
0493 {
0494 struct stm32_dma_chan *chan = to_stm32_dma_chan(c);
0495 unsigned long flags;
0496 LIST_HEAD(head);
0497
0498 spin_lock_irqsave(&chan->vchan.lock, flags);
0499
0500 if (chan->desc) {
0501 dma_cookie_complete(&chan->desc->vdesc.tx);
0502 vchan_terminate_vdesc(&chan->desc->vdesc);
0503 if (chan->busy)
0504 stm32_dma_stop(chan);
0505 chan->desc = NULL;
0506 }
0507
0508 vchan_get_all_descriptors(&chan->vchan, &head);
0509 spin_unlock_irqrestore(&chan->vchan.lock, flags);
0510 vchan_dma_desc_free_list(&chan->vchan, &head);
0511
0512 return 0;
0513 }
0514
0515 static void stm32_dma_synchronize(struct dma_chan *c)
0516 {
0517 struct stm32_dma_chan *chan = to_stm32_dma_chan(c);
0518
0519 vchan_synchronize(&chan->vchan);
0520 }
0521
0522 static void stm32_dma_dump_reg(struct stm32_dma_chan *chan)
0523 {
0524 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
0525 u32 scr = stm32_dma_read(dmadev, STM32_DMA_SCR(chan->id));
0526 u32 ndtr = stm32_dma_read(dmadev, STM32_DMA_SNDTR(chan->id));
0527 u32 spar = stm32_dma_read(dmadev, STM32_DMA_SPAR(chan->id));
0528 u32 sm0ar = stm32_dma_read(dmadev, STM32_DMA_SM0AR(chan->id));
0529 u32 sm1ar = stm32_dma_read(dmadev, STM32_DMA_SM1AR(chan->id));
0530 u32 sfcr = stm32_dma_read(dmadev, STM32_DMA_SFCR(chan->id));
0531
0532 dev_dbg(chan2dev(chan), "SCR: 0x%08x\n", scr);
0533 dev_dbg(chan2dev(chan), "NDTR: 0x%08x\n", ndtr);
0534 dev_dbg(chan2dev(chan), "SPAR: 0x%08x\n", spar);
0535 dev_dbg(chan2dev(chan), "SM0AR: 0x%08x\n", sm0ar);
0536 dev_dbg(chan2dev(chan), "SM1AR: 0x%08x\n", sm1ar);
0537 dev_dbg(chan2dev(chan), "SFCR: 0x%08x\n", sfcr);
0538 }
0539
0540 static void stm32_dma_sg_inc(struct stm32_dma_chan *chan)
0541 {
0542 chan->next_sg++;
0543 if (chan->desc->cyclic && (chan->next_sg == chan->desc->num_sgs))
0544 chan->next_sg = 0;
0545 }
0546
0547 static void stm32_dma_configure_next_sg(struct stm32_dma_chan *chan);
0548
0549 static void stm32_dma_start_transfer(struct stm32_dma_chan *chan)
0550 {
0551 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
0552 struct virt_dma_desc *vdesc;
0553 struct stm32_dma_sg_req *sg_req;
0554 struct stm32_dma_chan_reg *reg;
0555 u32 status;
0556 int ret;
0557
0558 ret = stm32_dma_disable_chan(chan);
0559 if (ret < 0)
0560 return;
0561
0562 if (!chan->desc) {
0563 vdesc = vchan_next_desc(&chan->vchan);
0564 if (!vdesc)
0565 return;
0566
0567 list_del(&vdesc->node);
0568
0569 chan->desc = to_stm32_dma_desc(vdesc);
0570 chan->next_sg = 0;
0571 }
0572
0573 if (chan->next_sg == chan->desc->num_sgs)
0574 chan->next_sg = 0;
0575
0576 sg_req = &chan->desc->sg_req[chan->next_sg];
0577 reg = &sg_req->chan_reg;
0578
0579 reg->dma_scr &= ~STM32_DMA_SCR_EN;
0580 stm32_dma_write(dmadev, STM32_DMA_SCR(chan->id), reg->dma_scr);
0581 stm32_dma_write(dmadev, STM32_DMA_SPAR(chan->id), reg->dma_spar);
0582 stm32_dma_write(dmadev, STM32_DMA_SM0AR(chan->id), reg->dma_sm0ar);
0583 stm32_dma_write(dmadev, STM32_DMA_SFCR(chan->id), reg->dma_sfcr);
0584 stm32_dma_write(dmadev, STM32_DMA_SM1AR(chan->id), reg->dma_sm1ar);
0585 stm32_dma_write(dmadev, STM32_DMA_SNDTR(chan->id), reg->dma_sndtr);
0586
0587 stm32_dma_sg_inc(chan);
0588
0589
0590 status = stm32_dma_irq_status(chan);
0591 if (status)
0592 stm32_dma_irq_clear(chan, status);
0593
0594 if (chan->desc->cyclic)
0595 stm32_dma_configure_next_sg(chan);
0596
0597 stm32_dma_dump_reg(chan);
0598
0599
0600 chan->busy = true;
0601 chan->status = DMA_IN_PROGRESS;
0602 reg->dma_scr |= STM32_DMA_SCR_EN;
0603 stm32_dma_write(dmadev, STM32_DMA_SCR(chan->id), reg->dma_scr);
0604
0605 dev_dbg(chan2dev(chan), "vchan %pK: started\n", &chan->vchan);
0606 }
0607
0608 static void stm32_dma_configure_next_sg(struct stm32_dma_chan *chan)
0609 {
0610 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
0611 struct stm32_dma_sg_req *sg_req;
0612 u32 dma_scr, dma_sm0ar, dma_sm1ar, id;
0613
0614 id = chan->id;
0615 dma_scr = stm32_dma_read(dmadev, STM32_DMA_SCR(id));
0616
0617 sg_req = &chan->desc->sg_req[chan->next_sg];
0618
0619 if (dma_scr & STM32_DMA_SCR_CT) {
0620 dma_sm0ar = sg_req->chan_reg.dma_sm0ar;
0621 stm32_dma_write(dmadev, STM32_DMA_SM0AR(id), dma_sm0ar);
0622 dev_dbg(chan2dev(chan), "CT=1 <=> SM0AR: 0x%08x\n",
0623 stm32_dma_read(dmadev, STM32_DMA_SM0AR(id)));
0624 } else {
0625 dma_sm1ar = sg_req->chan_reg.dma_sm1ar;
0626 stm32_dma_write(dmadev, STM32_DMA_SM1AR(id), dma_sm1ar);
0627 dev_dbg(chan2dev(chan), "CT=0 <=> SM1AR: 0x%08x\n",
0628 stm32_dma_read(dmadev, STM32_DMA_SM1AR(id)));
0629 }
0630 }
0631
0632 static void stm32_dma_handle_chan_paused(struct stm32_dma_chan *chan)
0633 {
0634 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
0635 u32 dma_scr;
0636
0637
0638
0639
0640
0641 dma_scr = stm32_dma_read(dmadev, STM32_DMA_SCR(chan->id));
0642
0643
0644
0645
0646
0647 if (chan->desc && chan->desc->cyclic) {
0648 if (chan->desc->num_sgs == 1)
0649 dma_scr |= STM32_DMA_SCR_CIRC;
0650 else
0651 dma_scr |= STM32_DMA_SCR_DBM;
0652 }
0653 chan->chan_reg.dma_scr = dma_scr;
0654
0655
0656
0657
0658
0659 if (chan->desc && chan->desc->cyclic) {
0660 dma_scr &= ~(STM32_DMA_SCR_DBM | STM32_DMA_SCR_CIRC);
0661 stm32_dma_write(dmadev, STM32_DMA_SCR(chan->id), dma_scr);
0662 }
0663
0664 chan->chan_reg.dma_sndtr = stm32_dma_read(dmadev, STM32_DMA_SNDTR(chan->id));
0665
0666 dev_dbg(chan2dev(chan), "vchan %pK: paused\n", &chan->vchan);
0667 }
0668
0669 static void stm32_dma_post_resume_reconfigure(struct stm32_dma_chan *chan)
0670 {
0671 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
0672 struct stm32_dma_sg_req *sg_req;
0673 u32 dma_scr, status, id;
0674
0675 id = chan->id;
0676 dma_scr = stm32_dma_read(dmadev, STM32_DMA_SCR(id));
0677
0678
0679 status = stm32_dma_irq_status(chan);
0680 if (status)
0681 stm32_dma_irq_clear(chan, status);
0682
0683 if (!chan->next_sg)
0684 sg_req = &chan->desc->sg_req[chan->desc->num_sgs - 1];
0685 else
0686 sg_req = &chan->desc->sg_req[chan->next_sg - 1];
0687
0688
0689 stm32_dma_write(dmadev, STM32_DMA_SNDTR(chan->id), sg_req->chan_reg.dma_sndtr);
0690
0691
0692 stm32_dma_write(dmadev, STM32_DMA_SPAR(id), sg_req->chan_reg.dma_spar);
0693
0694
0695 stm32_dma_write(dmadev, STM32_DMA_SM0AR(id), sg_req->chan_reg.dma_sm0ar);
0696 stm32_dma_write(dmadev, STM32_DMA_SM1AR(id), sg_req->chan_reg.dma_sm1ar);
0697
0698
0699 if (chan->chan_reg.dma_scr & STM32_DMA_SCR_DBM) {
0700 dma_scr |= STM32_DMA_SCR_DBM;
0701
0702 if (chan->chan_reg.dma_scr & STM32_DMA_SCR_CT)
0703 dma_scr &= ~STM32_DMA_SCR_CT;
0704 else
0705 dma_scr |= STM32_DMA_SCR_CT;
0706 } else if (chan->chan_reg.dma_scr & STM32_DMA_SCR_CIRC) {
0707 dma_scr |= STM32_DMA_SCR_CIRC;
0708 }
0709 stm32_dma_write(dmadev, STM32_DMA_SCR(chan->id), dma_scr);
0710
0711 stm32_dma_configure_next_sg(chan);
0712
0713 stm32_dma_dump_reg(chan);
0714
0715 dma_scr |= STM32_DMA_SCR_EN;
0716 stm32_dma_write(dmadev, STM32_DMA_SCR(chan->id), dma_scr);
0717
0718 dev_dbg(chan2dev(chan), "vchan %pK: reconfigured after pause/resume\n", &chan->vchan);
0719 }
0720
0721 static void stm32_dma_handle_chan_done(struct stm32_dma_chan *chan, u32 scr)
0722 {
0723 if (!chan->desc)
0724 return;
0725
0726 if (chan->desc->cyclic) {
0727 vchan_cyclic_callback(&chan->desc->vdesc);
0728 stm32_dma_sg_inc(chan);
0729
0730 if (!(scr & (STM32_DMA_SCR_CIRC | STM32_DMA_SCR_DBM)))
0731 stm32_dma_post_resume_reconfigure(chan);
0732 else if (scr & STM32_DMA_SCR_DBM)
0733 stm32_dma_configure_next_sg(chan);
0734 } else {
0735 chan->busy = false;
0736 chan->status = DMA_COMPLETE;
0737 if (chan->next_sg == chan->desc->num_sgs) {
0738 vchan_cookie_complete(&chan->desc->vdesc);
0739 chan->desc = NULL;
0740 }
0741 stm32_dma_start_transfer(chan);
0742 }
0743 }
0744
0745 static irqreturn_t stm32_dma_chan_irq(int irq, void *devid)
0746 {
0747 struct stm32_dma_chan *chan = devid;
0748 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
0749 u32 status, scr, sfcr;
0750
0751 spin_lock(&chan->vchan.lock);
0752
0753 status = stm32_dma_irq_status(chan);
0754 scr = stm32_dma_read(dmadev, STM32_DMA_SCR(chan->id));
0755 sfcr = stm32_dma_read(dmadev, STM32_DMA_SFCR(chan->id));
0756
0757 if (status & STM32_DMA_FEI) {
0758 stm32_dma_irq_clear(chan, STM32_DMA_FEI);
0759 status &= ~STM32_DMA_FEI;
0760 if (sfcr & STM32_DMA_SFCR_FEIE) {
0761 if (!(scr & STM32_DMA_SCR_EN) &&
0762 !(status & STM32_DMA_TCI))
0763 dev_err(chan2dev(chan), "FIFO Error\n");
0764 else
0765 dev_dbg(chan2dev(chan), "FIFO over/underrun\n");
0766 }
0767 }
0768 if (status & STM32_DMA_DMEI) {
0769 stm32_dma_irq_clear(chan, STM32_DMA_DMEI);
0770 status &= ~STM32_DMA_DMEI;
0771 if (sfcr & STM32_DMA_SCR_DMEIE)
0772 dev_dbg(chan2dev(chan), "Direct mode overrun\n");
0773 }
0774
0775 if (status & STM32_DMA_TCI) {
0776 stm32_dma_irq_clear(chan, STM32_DMA_TCI);
0777 if (scr & STM32_DMA_SCR_TCIE) {
0778 if (chan->status == DMA_PAUSED && !(scr & STM32_DMA_SCR_EN))
0779 stm32_dma_handle_chan_paused(chan);
0780 else
0781 stm32_dma_handle_chan_done(chan, scr);
0782 }
0783 status &= ~STM32_DMA_TCI;
0784 }
0785
0786 if (status & STM32_DMA_HTI) {
0787 stm32_dma_irq_clear(chan, STM32_DMA_HTI);
0788 status &= ~STM32_DMA_HTI;
0789 }
0790
0791 if (status) {
0792 stm32_dma_irq_clear(chan, status);
0793 dev_err(chan2dev(chan), "DMA error: status=0x%08x\n", status);
0794 if (!(scr & STM32_DMA_SCR_EN))
0795 dev_err(chan2dev(chan), "chan disabled by HW\n");
0796 }
0797
0798 spin_unlock(&chan->vchan.lock);
0799
0800 return IRQ_HANDLED;
0801 }
0802
0803 static void stm32_dma_issue_pending(struct dma_chan *c)
0804 {
0805 struct stm32_dma_chan *chan = to_stm32_dma_chan(c);
0806 unsigned long flags;
0807
0808 spin_lock_irqsave(&chan->vchan.lock, flags);
0809 if (vchan_issue_pending(&chan->vchan) && !chan->desc && !chan->busy) {
0810 dev_dbg(chan2dev(chan), "vchan %pK: issued\n", &chan->vchan);
0811 stm32_dma_start_transfer(chan);
0812
0813 }
0814 spin_unlock_irqrestore(&chan->vchan.lock, flags);
0815 }
0816
0817 static int stm32_dma_pause(struct dma_chan *c)
0818 {
0819 struct stm32_dma_chan *chan = to_stm32_dma_chan(c);
0820 unsigned long flags;
0821 int ret;
0822
0823 if (chan->status != DMA_IN_PROGRESS)
0824 return -EPERM;
0825
0826 spin_lock_irqsave(&chan->vchan.lock, flags);
0827 ret = stm32_dma_disable_chan(chan);
0828
0829
0830
0831
0832 if (!ret)
0833 chan->status = DMA_PAUSED;
0834 spin_unlock_irqrestore(&chan->vchan.lock, flags);
0835
0836 return ret;
0837 }
0838
0839 static int stm32_dma_resume(struct dma_chan *c)
0840 {
0841 struct stm32_dma_chan *chan = to_stm32_dma_chan(c);
0842 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
0843 struct stm32_dma_chan_reg chan_reg = chan->chan_reg;
0844 u32 id = chan->id, scr, ndtr, offset, spar, sm0ar, sm1ar;
0845 struct stm32_dma_sg_req *sg_req;
0846 unsigned long flags;
0847
0848 if (chan->status != DMA_PAUSED)
0849 return -EPERM;
0850
0851 scr = stm32_dma_read(dmadev, STM32_DMA_SCR(id));
0852 if (WARN_ON(scr & STM32_DMA_SCR_EN))
0853 return -EPERM;
0854
0855 spin_lock_irqsave(&chan->vchan.lock, flags);
0856
0857
0858 if (!chan->next_sg)
0859 sg_req = &chan->desc->sg_req[chan->desc->num_sgs - 1];
0860 else
0861 sg_req = &chan->desc->sg_req[chan->next_sg - 1];
0862
0863 ndtr = sg_req->chan_reg.dma_sndtr;
0864 offset = (ndtr - chan_reg.dma_sndtr) << STM32_DMA_SCR_PSIZE_GET(chan_reg.dma_scr);
0865 spar = sg_req->chan_reg.dma_spar;
0866 sm0ar = sg_req->chan_reg.dma_sm0ar;
0867 sm1ar = sg_req->chan_reg.dma_sm1ar;
0868
0869
0870
0871
0872
0873 if (chan_reg.dma_scr & STM32_DMA_SCR_PINC)
0874 stm32_dma_write(dmadev, STM32_DMA_SPAR(id), spar + offset);
0875 else
0876 stm32_dma_write(dmadev, STM32_DMA_SPAR(id), spar);
0877
0878 if (!(chan_reg.dma_scr & STM32_DMA_SCR_MINC))
0879 offset = 0;
0880
0881
0882
0883
0884
0885
0886 if ((chan_reg.dma_scr & STM32_DMA_SCR_DBM) && (chan_reg.dma_scr & STM32_DMA_SCR_CT))
0887 stm32_dma_write(dmadev, STM32_DMA_SM1AR(id), sm1ar + offset);
0888 else
0889 stm32_dma_write(dmadev, STM32_DMA_SM0AR(id), sm0ar + offset);
0890
0891
0892 stm32_dma_write(dmadev, STM32_DMA_SNDTR(id), chan_reg.dma_sndtr);
0893
0894
0895
0896
0897
0898 if (chan_reg.dma_scr & (STM32_DMA_SCR_CIRC | STM32_DMA_SCR_DBM))
0899 chan_reg.dma_scr &= ~(STM32_DMA_SCR_CIRC | STM32_DMA_SCR_DBM);
0900
0901 if (chan_reg.dma_scr & STM32_DMA_SCR_DBM)
0902 stm32_dma_configure_next_sg(chan);
0903
0904 stm32_dma_dump_reg(chan);
0905
0906
0907 chan->status = DMA_IN_PROGRESS;
0908 chan_reg.dma_scr |= STM32_DMA_SCR_EN;
0909 stm32_dma_write(dmadev, STM32_DMA_SCR(id), chan_reg.dma_scr);
0910
0911 spin_unlock_irqrestore(&chan->vchan.lock, flags);
0912
0913 dev_dbg(chan2dev(chan), "vchan %pK: resumed\n", &chan->vchan);
0914
0915 return 0;
0916 }
0917
0918 static int stm32_dma_set_xfer_param(struct stm32_dma_chan *chan,
0919 enum dma_transfer_direction direction,
0920 enum dma_slave_buswidth *buswidth,
0921 u32 buf_len, dma_addr_t buf_addr)
0922 {
0923 enum dma_slave_buswidth src_addr_width, dst_addr_width;
0924 int src_bus_width, dst_bus_width;
0925 int src_burst_size, dst_burst_size;
0926 u32 src_maxburst, dst_maxburst, src_best_burst, dst_best_burst;
0927 u32 dma_scr, fifoth;
0928
0929 src_addr_width = chan->dma_sconfig.src_addr_width;
0930 dst_addr_width = chan->dma_sconfig.dst_addr_width;
0931 src_maxburst = chan->dma_sconfig.src_maxburst;
0932 dst_maxburst = chan->dma_sconfig.dst_maxburst;
0933 fifoth = chan->threshold;
0934
0935 switch (direction) {
0936 case DMA_MEM_TO_DEV:
0937
0938 dst_bus_width = stm32_dma_get_width(chan, dst_addr_width);
0939 if (dst_bus_width < 0)
0940 return dst_bus_width;
0941
0942
0943 dst_best_burst = stm32_dma_get_best_burst(buf_len,
0944 dst_maxburst,
0945 fifoth,
0946 dst_addr_width);
0947
0948 dst_burst_size = stm32_dma_get_burst(chan, dst_best_burst);
0949 if (dst_burst_size < 0)
0950 return dst_burst_size;
0951
0952
0953 src_addr_width = stm32_dma_get_max_width(buf_len, buf_addr,
0954 fifoth);
0955 chan->mem_width = src_addr_width;
0956 src_bus_width = stm32_dma_get_width(chan, src_addr_width);
0957 if (src_bus_width < 0)
0958 return src_bus_width;
0959
0960
0961
0962
0963
0964 if (buf_addr & (buf_len - 1))
0965 src_maxburst = 1;
0966 else
0967 src_maxburst = STM32_DMA_MAX_BURST;
0968 src_best_burst = stm32_dma_get_best_burst(buf_len,
0969 src_maxburst,
0970 fifoth,
0971 src_addr_width);
0972 src_burst_size = stm32_dma_get_burst(chan, src_best_burst);
0973 if (src_burst_size < 0)
0974 return src_burst_size;
0975
0976 dma_scr = STM32_DMA_SCR_DIR(STM32_DMA_MEM_TO_DEV) |
0977 STM32_DMA_SCR_PSIZE(dst_bus_width) |
0978 STM32_DMA_SCR_MSIZE(src_bus_width) |
0979 STM32_DMA_SCR_PBURST(dst_burst_size) |
0980 STM32_DMA_SCR_MBURST(src_burst_size);
0981
0982
0983 chan->chan_reg.dma_sfcr &= ~STM32_DMA_SFCR_FTH_MASK;
0984 if (fifoth != STM32_DMA_FIFO_THRESHOLD_NONE)
0985 chan->chan_reg.dma_sfcr |= STM32_DMA_SFCR_FTH(fifoth);
0986
0987
0988 chan->chan_reg.dma_spar = chan->dma_sconfig.dst_addr;
0989 *buswidth = dst_addr_width;
0990 break;
0991
0992 case DMA_DEV_TO_MEM:
0993
0994 src_bus_width = stm32_dma_get_width(chan, src_addr_width);
0995 if (src_bus_width < 0)
0996 return src_bus_width;
0997
0998
0999 src_best_burst = stm32_dma_get_best_burst(buf_len,
1000 src_maxburst,
1001 fifoth,
1002 src_addr_width);
1003 chan->mem_burst = src_best_burst;
1004 src_burst_size = stm32_dma_get_burst(chan, src_best_burst);
1005 if (src_burst_size < 0)
1006 return src_burst_size;
1007
1008
1009 dst_addr_width = stm32_dma_get_max_width(buf_len, buf_addr,
1010 fifoth);
1011 chan->mem_width = dst_addr_width;
1012 dst_bus_width = stm32_dma_get_width(chan, dst_addr_width);
1013 if (dst_bus_width < 0)
1014 return dst_bus_width;
1015
1016
1017
1018
1019
1020 if (buf_addr & (buf_len - 1))
1021 dst_maxburst = 1;
1022 else
1023 dst_maxburst = STM32_DMA_MAX_BURST;
1024 dst_best_burst = stm32_dma_get_best_burst(buf_len,
1025 dst_maxburst,
1026 fifoth,
1027 dst_addr_width);
1028 chan->mem_burst = dst_best_burst;
1029 dst_burst_size = stm32_dma_get_burst(chan, dst_best_burst);
1030 if (dst_burst_size < 0)
1031 return dst_burst_size;
1032
1033 dma_scr = STM32_DMA_SCR_DIR(STM32_DMA_DEV_TO_MEM) |
1034 STM32_DMA_SCR_PSIZE(src_bus_width) |
1035 STM32_DMA_SCR_MSIZE(dst_bus_width) |
1036 STM32_DMA_SCR_PBURST(src_burst_size) |
1037 STM32_DMA_SCR_MBURST(dst_burst_size);
1038
1039
1040 chan->chan_reg.dma_sfcr &= ~STM32_DMA_SFCR_FTH_MASK;
1041 if (fifoth != STM32_DMA_FIFO_THRESHOLD_NONE)
1042 chan->chan_reg.dma_sfcr |= STM32_DMA_SFCR_FTH(fifoth);
1043
1044
1045 chan->chan_reg.dma_spar = chan->dma_sconfig.src_addr;
1046 *buswidth = chan->dma_sconfig.src_addr_width;
1047 break;
1048
1049 default:
1050 dev_err(chan2dev(chan), "Dma direction is not supported\n");
1051 return -EINVAL;
1052 }
1053
1054 stm32_dma_set_fifo_config(chan, src_best_burst, dst_best_burst);
1055
1056
1057 chan->chan_reg.dma_scr &= ~(STM32_DMA_SCR_DIR_MASK |
1058 STM32_DMA_SCR_PSIZE_MASK | STM32_DMA_SCR_MSIZE_MASK |
1059 STM32_DMA_SCR_PBURST_MASK | STM32_DMA_SCR_MBURST_MASK);
1060 chan->chan_reg.dma_scr |= dma_scr;
1061
1062 return 0;
1063 }
1064
1065 static void stm32_dma_clear_reg(struct stm32_dma_chan_reg *regs)
1066 {
1067 memset(regs, 0, sizeof(struct stm32_dma_chan_reg));
1068 }
1069
1070 static struct dma_async_tx_descriptor *stm32_dma_prep_slave_sg(
1071 struct dma_chan *c, struct scatterlist *sgl,
1072 u32 sg_len, enum dma_transfer_direction direction,
1073 unsigned long flags, void *context)
1074 {
1075 struct stm32_dma_chan *chan = to_stm32_dma_chan(c);
1076 struct stm32_dma_desc *desc;
1077 struct scatterlist *sg;
1078 enum dma_slave_buswidth buswidth;
1079 u32 nb_data_items;
1080 int i, ret;
1081
1082 if (!chan->config_init) {
1083 dev_err(chan2dev(chan), "dma channel is not configured\n");
1084 return NULL;
1085 }
1086
1087 if (sg_len < 1) {
1088 dev_err(chan2dev(chan), "Invalid segment length %d\n", sg_len);
1089 return NULL;
1090 }
1091
1092 desc = kzalloc(struct_size(desc, sg_req, sg_len), GFP_NOWAIT);
1093 if (!desc)
1094 return NULL;
1095
1096
1097 if (chan->dma_sconfig.device_fc)
1098 chan->chan_reg.dma_scr |= STM32_DMA_SCR_PFCTRL;
1099 else
1100 chan->chan_reg.dma_scr &= ~STM32_DMA_SCR_PFCTRL;
1101
1102 for_each_sg(sgl, sg, sg_len, i) {
1103 ret = stm32_dma_set_xfer_param(chan, direction, &buswidth,
1104 sg_dma_len(sg),
1105 sg_dma_address(sg));
1106 if (ret < 0)
1107 goto err;
1108
1109 desc->sg_req[i].len = sg_dma_len(sg);
1110
1111 nb_data_items = desc->sg_req[i].len / buswidth;
1112 if (nb_data_items > STM32_DMA_ALIGNED_MAX_DATA_ITEMS) {
1113 dev_err(chan2dev(chan), "nb items not supported\n");
1114 goto err;
1115 }
1116
1117 stm32_dma_clear_reg(&desc->sg_req[i].chan_reg);
1118 desc->sg_req[i].chan_reg.dma_scr = chan->chan_reg.dma_scr;
1119 desc->sg_req[i].chan_reg.dma_sfcr = chan->chan_reg.dma_sfcr;
1120 desc->sg_req[i].chan_reg.dma_spar = chan->chan_reg.dma_spar;
1121 desc->sg_req[i].chan_reg.dma_sm0ar = sg_dma_address(sg);
1122 desc->sg_req[i].chan_reg.dma_sm1ar = sg_dma_address(sg);
1123 desc->sg_req[i].chan_reg.dma_sndtr = nb_data_items;
1124 }
1125
1126 desc->num_sgs = sg_len;
1127 desc->cyclic = false;
1128
1129 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
1130
1131 err:
1132 kfree(desc);
1133 return NULL;
1134 }
1135
1136 static struct dma_async_tx_descriptor *stm32_dma_prep_dma_cyclic(
1137 struct dma_chan *c, dma_addr_t buf_addr, size_t buf_len,
1138 size_t period_len, enum dma_transfer_direction direction,
1139 unsigned long flags)
1140 {
1141 struct stm32_dma_chan *chan = to_stm32_dma_chan(c);
1142 struct stm32_dma_desc *desc;
1143 enum dma_slave_buswidth buswidth;
1144 u32 num_periods, nb_data_items;
1145 int i, ret;
1146
1147 if (!buf_len || !period_len) {
1148 dev_err(chan2dev(chan), "Invalid buffer/period len\n");
1149 return NULL;
1150 }
1151
1152 if (!chan->config_init) {
1153 dev_err(chan2dev(chan), "dma channel is not configured\n");
1154 return NULL;
1155 }
1156
1157 if (buf_len % period_len) {
1158 dev_err(chan2dev(chan), "buf_len not multiple of period_len\n");
1159 return NULL;
1160 }
1161
1162
1163
1164
1165
1166
1167
1168 if (chan->busy) {
1169 dev_err(chan2dev(chan), "Request not allowed when dma busy\n");
1170 return NULL;
1171 }
1172
1173 ret = stm32_dma_set_xfer_param(chan, direction, &buswidth, period_len,
1174 buf_addr);
1175 if (ret < 0)
1176 return NULL;
1177
1178 nb_data_items = period_len / buswidth;
1179 if (nb_data_items > STM32_DMA_ALIGNED_MAX_DATA_ITEMS) {
1180 dev_err(chan2dev(chan), "number of items not supported\n");
1181 return NULL;
1182 }
1183
1184
1185 if (buf_len == period_len) {
1186 chan->chan_reg.dma_scr |= STM32_DMA_SCR_CIRC;
1187 } else {
1188 chan->chan_reg.dma_scr |= STM32_DMA_SCR_DBM;
1189 chan->chan_reg.dma_scr &= ~STM32_DMA_SCR_CT;
1190 }
1191
1192
1193 chan->chan_reg.dma_scr &= ~STM32_DMA_SCR_PFCTRL;
1194
1195 num_periods = buf_len / period_len;
1196
1197 desc = kzalloc(struct_size(desc, sg_req, num_periods), GFP_NOWAIT);
1198 if (!desc)
1199 return NULL;
1200
1201 for (i = 0; i < num_periods; i++) {
1202 desc->sg_req[i].len = period_len;
1203
1204 stm32_dma_clear_reg(&desc->sg_req[i].chan_reg);
1205 desc->sg_req[i].chan_reg.dma_scr = chan->chan_reg.dma_scr;
1206 desc->sg_req[i].chan_reg.dma_sfcr = chan->chan_reg.dma_sfcr;
1207 desc->sg_req[i].chan_reg.dma_spar = chan->chan_reg.dma_spar;
1208 desc->sg_req[i].chan_reg.dma_sm0ar = buf_addr;
1209 desc->sg_req[i].chan_reg.dma_sm1ar = buf_addr;
1210 desc->sg_req[i].chan_reg.dma_sndtr = nb_data_items;
1211 buf_addr += period_len;
1212 }
1213
1214 desc->num_sgs = num_periods;
1215 desc->cyclic = true;
1216
1217 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
1218 }
1219
1220 static struct dma_async_tx_descriptor *stm32_dma_prep_dma_memcpy(
1221 struct dma_chan *c, dma_addr_t dest,
1222 dma_addr_t src, size_t len, unsigned long flags)
1223 {
1224 struct stm32_dma_chan *chan = to_stm32_dma_chan(c);
1225 enum dma_slave_buswidth max_width;
1226 struct stm32_dma_desc *desc;
1227 size_t xfer_count, offset;
1228 u32 num_sgs, best_burst, dma_burst, threshold;
1229 int i;
1230
1231 num_sgs = DIV_ROUND_UP(len, STM32_DMA_ALIGNED_MAX_DATA_ITEMS);
1232 desc = kzalloc(struct_size(desc, sg_req, num_sgs), GFP_NOWAIT);
1233 if (!desc)
1234 return NULL;
1235
1236 threshold = chan->threshold;
1237
1238 for (offset = 0, i = 0; offset < len; offset += xfer_count, i++) {
1239 xfer_count = min_t(size_t, len - offset,
1240 STM32_DMA_ALIGNED_MAX_DATA_ITEMS);
1241
1242
1243 max_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
1244 best_burst = stm32_dma_get_best_burst(len, STM32_DMA_MAX_BURST,
1245 threshold, max_width);
1246 dma_burst = stm32_dma_get_burst(chan, best_burst);
1247
1248 stm32_dma_clear_reg(&desc->sg_req[i].chan_reg);
1249 desc->sg_req[i].chan_reg.dma_scr =
1250 STM32_DMA_SCR_DIR(STM32_DMA_MEM_TO_MEM) |
1251 STM32_DMA_SCR_PBURST(dma_burst) |
1252 STM32_DMA_SCR_MBURST(dma_burst) |
1253 STM32_DMA_SCR_MINC |
1254 STM32_DMA_SCR_PINC |
1255 STM32_DMA_SCR_TCIE |
1256 STM32_DMA_SCR_TEIE;
1257 desc->sg_req[i].chan_reg.dma_sfcr |= STM32_DMA_SFCR_MASK;
1258 desc->sg_req[i].chan_reg.dma_sfcr |=
1259 STM32_DMA_SFCR_FTH(threshold);
1260 desc->sg_req[i].chan_reg.dma_spar = src + offset;
1261 desc->sg_req[i].chan_reg.dma_sm0ar = dest + offset;
1262 desc->sg_req[i].chan_reg.dma_sndtr = xfer_count;
1263 desc->sg_req[i].len = xfer_count;
1264 }
1265
1266 desc->num_sgs = num_sgs;
1267 desc->cyclic = false;
1268
1269 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
1270 }
1271
1272 static u32 stm32_dma_get_remaining_bytes(struct stm32_dma_chan *chan)
1273 {
1274 u32 dma_scr, width, ndtr;
1275 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
1276
1277 dma_scr = stm32_dma_read(dmadev, STM32_DMA_SCR(chan->id));
1278 width = STM32_DMA_SCR_PSIZE_GET(dma_scr);
1279 ndtr = stm32_dma_read(dmadev, STM32_DMA_SNDTR(chan->id));
1280
1281 return ndtr << width;
1282 }
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296 static bool stm32_dma_is_current_sg(struct stm32_dma_chan *chan)
1297 {
1298 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
1299 struct stm32_dma_sg_req *sg_req;
1300 u32 dma_scr, dma_smar, id, period_len;
1301
1302 id = chan->id;
1303 dma_scr = stm32_dma_read(dmadev, STM32_DMA_SCR(id));
1304
1305
1306 if (!(dma_scr & STM32_DMA_SCR_DBM))
1307 return true;
1308
1309 sg_req = &chan->desc->sg_req[chan->next_sg];
1310 period_len = sg_req->len;
1311
1312
1313 if (dma_scr & STM32_DMA_SCR_CT) {
1314 dma_smar = stm32_dma_read(dmadev, STM32_DMA_SM0AR(id));
1315
1316
1317
1318
1319 return (dma_smar >= sg_req->chan_reg.dma_sm0ar &&
1320 dma_smar < sg_req->chan_reg.dma_sm0ar + period_len);
1321 }
1322
1323 dma_smar = stm32_dma_read(dmadev, STM32_DMA_SM1AR(id));
1324
1325
1326
1327
1328 return (dma_smar >= sg_req->chan_reg.dma_sm1ar &&
1329 dma_smar < sg_req->chan_reg.dma_sm1ar + period_len);
1330 }
1331
1332 static size_t stm32_dma_desc_residue(struct stm32_dma_chan *chan,
1333 struct stm32_dma_desc *desc,
1334 u32 next_sg)
1335 {
1336 u32 modulo, burst_size;
1337 u32 residue;
1338 u32 n_sg = next_sg;
1339 struct stm32_dma_sg_req *sg_req = &chan->desc->sg_req[chan->next_sg];
1340 int i;
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367 residue = stm32_dma_get_remaining_bytes(chan);
1368
1369 if (chan->desc->cyclic && !stm32_dma_is_current_sg(chan)) {
1370 n_sg++;
1371 if (n_sg == chan->desc->num_sgs)
1372 n_sg = 0;
1373 residue = sg_req->len;
1374 }
1375
1376
1377
1378
1379
1380
1381
1382
1383 if (!chan->desc->cyclic || n_sg != 0)
1384 for (i = n_sg; i < desc->num_sgs; i++)
1385 residue += desc->sg_req[i].len;
1386
1387 if (!chan->mem_burst)
1388 return residue;
1389
1390 burst_size = chan->mem_burst * chan->mem_width;
1391 modulo = residue % burst_size;
1392 if (modulo)
1393 residue = residue - modulo + burst_size;
1394
1395 return residue;
1396 }
1397
1398 static enum dma_status stm32_dma_tx_status(struct dma_chan *c,
1399 dma_cookie_t cookie,
1400 struct dma_tx_state *state)
1401 {
1402 struct stm32_dma_chan *chan = to_stm32_dma_chan(c);
1403 struct virt_dma_desc *vdesc;
1404 enum dma_status status;
1405 unsigned long flags;
1406 u32 residue = 0;
1407
1408 status = dma_cookie_status(c, cookie, state);
1409 if (status == DMA_COMPLETE)
1410 return status;
1411
1412 status = chan->status;
1413
1414 if (!state)
1415 return status;
1416
1417 spin_lock_irqsave(&chan->vchan.lock, flags);
1418 vdesc = vchan_find_desc(&chan->vchan, cookie);
1419 if (chan->desc && cookie == chan->desc->vdesc.tx.cookie)
1420 residue = stm32_dma_desc_residue(chan, chan->desc,
1421 chan->next_sg);
1422 else if (vdesc)
1423 residue = stm32_dma_desc_residue(chan,
1424 to_stm32_dma_desc(vdesc), 0);
1425 dma_set_residue(state, residue);
1426
1427 spin_unlock_irqrestore(&chan->vchan.lock, flags);
1428
1429 return status;
1430 }
1431
1432 static int stm32_dma_alloc_chan_resources(struct dma_chan *c)
1433 {
1434 struct stm32_dma_chan *chan = to_stm32_dma_chan(c);
1435 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
1436 int ret;
1437
1438 chan->config_init = false;
1439
1440 ret = pm_runtime_resume_and_get(dmadev->ddev.dev);
1441 if (ret < 0)
1442 return ret;
1443
1444 ret = stm32_dma_disable_chan(chan);
1445 if (ret < 0)
1446 pm_runtime_put(dmadev->ddev.dev);
1447
1448 return ret;
1449 }
1450
1451 static void stm32_dma_free_chan_resources(struct dma_chan *c)
1452 {
1453 struct stm32_dma_chan *chan = to_stm32_dma_chan(c);
1454 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
1455 unsigned long flags;
1456
1457 dev_dbg(chan2dev(chan), "Freeing channel %d\n", chan->id);
1458
1459 if (chan->busy) {
1460 spin_lock_irqsave(&chan->vchan.lock, flags);
1461 stm32_dma_stop(chan);
1462 chan->desc = NULL;
1463 spin_unlock_irqrestore(&chan->vchan.lock, flags);
1464 }
1465
1466 pm_runtime_put(dmadev->ddev.dev);
1467
1468 vchan_free_chan_resources(to_virt_chan(c));
1469 stm32_dma_clear_reg(&chan->chan_reg);
1470 chan->threshold = 0;
1471 }
1472
1473 static void stm32_dma_desc_free(struct virt_dma_desc *vdesc)
1474 {
1475 kfree(container_of(vdesc, struct stm32_dma_desc, vdesc));
1476 }
1477
1478 static void stm32_dma_set_config(struct stm32_dma_chan *chan,
1479 struct stm32_dma_cfg *cfg)
1480 {
1481 stm32_dma_clear_reg(&chan->chan_reg);
1482
1483 chan->chan_reg.dma_scr = cfg->stream_config & STM32_DMA_SCR_CFG_MASK;
1484 chan->chan_reg.dma_scr |= STM32_DMA_SCR_REQ(cfg->request_line);
1485
1486
1487 chan->chan_reg.dma_scr |= STM32_DMA_SCR_TEIE | STM32_DMA_SCR_TCIE;
1488
1489 chan->threshold = STM32_DMA_THRESHOLD_FTR_GET(cfg->features);
1490 if (STM32_DMA_DIRECT_MODE_GET(cfg->features))
1491 chan->threshold = STM32_DMA_FIFO_THRESHOLD_NONE;
1492 if (STM32_DMA_ALT_ACK_MODE_GET(cfg->features))
1493 chan->chan_reg.dma_scr |= STM32_DMA_SCR_TRBUFF;
1494 }
1495
1496 static struct dma_chan *stm32_dma_of_xlate(struct of_phandle_args *dma_spec,
1497 struct of_dma *ofdma)
1498 {
1499 struct stm32_dma_device *dmadev = ofdma->of_dma_data;
1500 struct device *dev = dmadev->ddev.dev;
1501 struct stm32_dma_cfg cfg;
1502 struct stm32_dma_chan *chan;
1503 struct dma_chan *c;
1504
1505 if (dma_spec->args_count < 4) {
1506 dev_err(dev, "Bad number of cells\n");
1507 return NULL;
1508 }
1509
1510 cfg.channel_id = dma_spec->args[0];
1511 cfg.request_line = dma_spec->args[1];
1512 cfg.stream_config = dma_spec->args[2];
1513 cfg.features = dma_spec->args[3];
1514
1515 if (cfg.channel_id >= STM32_DMA_MAX_CHANNELS ||
1516 cfg.request_line >= STM32_DMA_MAX_REQUEST_ID) {
1517 dev_err(dev, "Bad channel and/or request id\n");
1518 return NULL;
1519 }
1520
1521 chan = &dmadev->chan[cfg.channel_id];
1522
1523 c = dma_get_slave_channel(&chan->vchan.chan);
1524 if (!c) {
1525 dev_err(dev, "No more channels available\n");
1526 return NULL;
1527 }
1528
1529 stm32_dma_set_config(chan, &cfg);
1530
1531 return c;
1532 }
1533
1534 static const struct of_device_id stm32_dma_of_match[] = {
1535 { .compatible = "st,stm32-dma", },
1536 { },
1537 };
1538 MODULE_DEVICE_TABLE(of, stm32_dma_of_match);
1539
1540 static int stm32_dma_probe(struct platform_device *pdev)
1541 {
1542 struct stm32_dma_chan *chan;
1543 struct stm32_dma_device *dmadev;
1544 struct dma_device *dd;
1545 const struct of_device_id *match;
1546 struct resource *res;
1547 struct reset_control *rst;
1548 int i, ret;
1549
1550 match = of_match_device(stm32_dma_of_match, &pdev->dev);
1551 if (!match) {
1552 dev_err(&pdev->dev, "Error: No device match found\n");
1553 return -ENODEV;
1554 }
1555
1556 dmadev = devm_kzalloc(&pdev->dev, sizeof(*dmadev), GFP_KERNEL);
1557 if (!dmadev)
1558 return -ENOMEM;
1559
1560 dd = &dmadev->ddev;
1561
1562 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1563 dmadev->base = devm_ioremap_resource(&pdev->dev, res);
1564 if (IS_ERR(dmadev->base))
1565 return PTR_ERR(dmadev->base);
1566
1567 dmadev->clk = devm_clk_get(&pdev->dev, NULL);
1568 if (IS_ERR(dmadev->clk))
1569 return dev_err_probe(&pdev->dev, PTR_ERR(dmadev->clk), "Can't get clock\n");
1570
1571 ret = clk_prepare_enable(dmadev->clk);
1572 if (ret < 0) {
1573 dev_err(&pdev->dev, "clk_prep_enable error: %d\n", ret);
1574 return ret;
1575 }
1576
1577 dmadev->mem2mem = of_property_read_bool(pdev->dev.of_node,
1578 "st,mem2mem");
1579
1580 rst = devm_reset_control_get(&pdev->dev, NULL);
1581 if (IS_ERR(rst)) {
1582 ret = PTR_ERR(rst);
1583 if (ret == -EPROBE_DEFER)
1584 goto clk_free;
1585 } else {
1586 reset_control_assert(rst);
1587 udelay(2);
1588 reset_control_deassert(rst);
1589 }
1590
1591 dma_set_max_seg_size(&pdev->dev, STM32_DMA_ALIGNED_MAX_DATA_ITEMS);
1592
1593 dma_cap_set(DMA_SLAVE, dd->cap_mask);
1594 dma_cap_set(DMA_PRIVATE, dd->cap_mask);
1595 dma_cap_set(DMA_CYCLIC, dd->cap_mask);
1596 dd->device_alloc_chan_resources = stm32_dma_alloc_chan_resources;
1597 dd->device_free_chan_resources = stm32_dma_free_chan_resources;
1598 dd->device_tx_status = stm32_dma_tx_status;
1599 dd->device_issue_pending = stm32_dma_issue_pending;
1600 dd->device_prep_slave_sg = stm32_dma_prep_slave_sg;
1601 dd->device_prep_dma_cyclic = stm32_dma_prep_dma_cyclic;
1602 dd->device_config = stm32_dma_slave_config;
1603 dd->device_pause = stm32_dma_pause;
1604 dd->device_resume = stm32_dma_resume;
1605 dd->device_terminate_all = stm32_dma_terminate_all;
1606 dd->device_synchronize = stm32_dma_synchronize;
1607 dd->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
1608 BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
1609 BIT(DMA_SLAVE_BUSWIDTH_4_BYTES);
1610 dd->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
1611 BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
1612 BIT(DMA_SLAVE_BUSWIDTH_4_BYTES);
1613 dd->directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV);
1614 dd->residue_granularity = DMA_RESIDUE_GRANULARITY_BURST;
1615 dd->copy_align = DMAENGINE_ALIGN_32_BYTES;
1616 dd->max_burst = STM32_DMA_MAX_BURST;
1617 dd->max_sg_burst = STM32_DMA_ALIGNED_MAX_DATA_ITEMS;
1618 dd->descriptor_reuse = true;
1619 dd->dev = &pdev->dev;
1620 INIT_LIST_HEAD(&dd->channels);
1621
1622 if (dmadev->mem2mem) {
1623 dma_cap_set(DMA_MEMCPY, dd->cap_mask);
1624 dd->device_prep_dma_memcpy = stm32_dma_prep_dma_memcpy;
1625 dd->directions |= BIT(DMA_MEM_TO_MEM);
1626 }
1627
1628 for (i = 0; i < STM32_DMA_MAX_CHANNELS; i++) {
1629 chan = &dmadev->chan[i];
1630 chan->id = i;
1631 chan->vchan.desc_free = stm32_dma_desc_free;
1632 vchan_init(&chan->vchan, dd);
1633 }
1634
1635 ret = dma_async_device_register(dd);
1636 if (ret)
1637 goto clk_free;
1638
1639 for (i = 0; i < STM32_DMA_MAX_CHANNELS; i++) {
1640 chan = &dmadev->chan[i];
1641 ret = platform_get_irq(pdev, i);
1642 if (ret < 0)
1643 goto err_unregister;
1644 chan->irq = ret;
1645
1646 ret = devm_request_irq(&pdev->dev, chan->irq,
1647 stm32_dma_chan_irq, 0,
1648 dev_name(chan2dev(chan)), chan);
1649 if (ret) {
1650 dev_err(&pdev->dev,
1651 "request_irq failed with err %d channel %d\n",
1652 ret, i);
1653 goto err_unregister;
1654 }
1655 }
1656
1657 ret = of_dma_controller_register(pdev->dev.of_node,
1658 stm32_dma_of_xlate, dmadev);
1659 if (ret < 0) {
1660 dev_err(&pdev->dev,
1661 "STM32 DMA DMA OF registration failed %d\n", ret);
1662 goto err_unregister;
1663 }
1664
1665 platform_set_drvdata(pdev, dmadev);
1666
1667 pm_runtime_set_active(&pdev->dev);
1668 pm_runtime_enable(&pdev->dev);
1669 pm_runtime_get_noresume(&pdev->dev);
1670 pm_runtime_put(&pdev->dev);
1671
1672 dev_info(&pdev->dev, "STM32 DMA driver registered\n");
1673
1674 return 0;
1675
1676 err_unregister:
1677 dma_async_device_unregister(dd);
1678 clk_free:
1679 clk_disable_unprepare(dmadev->clk);
1680
1681 return ret;
1682 }
1683
1684 #ifdef CONFIG_PM
1685 static int stm32_dma_runtime_suspend(struct device *dev)
1686 {
1687 struct stm32_dma_device *dmadev = dev_get_drvdata(dev);
1688
1689 clk_disable_unprepare(dmadev->clk);
1690
1691 return 0;
1692 }
1693
1694 static int stm32_dma_runtime_resume(struct device *dev)
1695 {
1696 struct stm32_dma_device *dmadev = dev_get_drvdata(dev);
1697 int ret;
1698
1699 ret = clk_prepare_enable(dmadev->clk);
1700 if (ret) {
1701 dev_err(dev, "failed to prepare_enable clock\n");
1702 return ret;
1703 }
1704
1705 return 0;
1706 }
1707 #endif
1708
1709 #ifdef CONFIG_PM_SLEEP
1710 static int stm32_dma_pm_suspend(struct device *dev)
1711 {
1712 struct stm32_dma_device *dmadev = dev_get_drvdata(dev);
1713 int id, ret, scr;
1714
1715 ret = pm_runtime_resume_and_get(dev);
1716 if (ret < 0)
1717 return ret;
1718
1719 for (id = 0; id < STM32_DMA_MAX_CHANNELS; id++) {
1720 scr = stm32_dma_read(dmadev, STM32_DMA_SCR(id));
1721 if (scr & STM32_DMA_SCR_EN) {
1722 dev_warn(dev, "Suspend is prevented by Chan %i\n", id);
1723 return -EBUSY;
1724 }
1725 }
1726
1727 pm_runtime_put_sync(dev);
1728
1729 pm_runtime_force_suspend(dev);
1730
1731 return 0;
1732 }
1733
1734 static int stm32_dma_pm_resume(struct device *dev)
1735 {
1736 return pm_runtime_force_resume(dev);
1737 }
1738 #endif
1739
1740 static const struct dev_pm_ops stm32_dma_pm_ops = {
1741 SET_SYSTEM_SLEEP_PM_OPS(stm32_dma_pm_suspend, stm32_dma_pm_resume)
1742 SET_RUNTIME_PM_OPS(stm32_dma_runtime_suspend,
1743 stm32_dma_runtime_resume, NULL)
1744 };
1745
1746 static struct platform_driver stm32_dma_driver = {
1747 .driver = {
1748 .name = "stm32-dma",
1749 .of_match_table = stm32_dma_of_match,
1750 .pm = &stm32_dma_pm_ops,
1751 },
1752 .probe = stm32_dma_probe,
1753 };
1754
1755 static int __init stm32_dma_init(void)
1756 {
1757 return platform_driver_register(&stm32_dma_driver);
1758 }
1759 subsys_initcall(stm32_dma_init);