0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013 #include <linux/bitfield.h>
0014 #include <linux/clk.h>
0015 #include <linux/delay.h>
0016 #include <linux/dmaengine.h>
0017 #include <linux/dma-mapping.h>
0018 #include <linux/dmapool.h>
0019 #include <linux/err.h>
0020 #include <linux/init.h>
0021 #include <linux/iopoll.h>
0022 #include <linux/jiffies.h>
0023 #include <linux/list.h>
0024 #include <linux/log2.h>
0025 #include <linux/module.h>
0026 #include <linux/of.h>
0027 #include <linux/of_device.h>
0028 #include <linux/of_dma.h>
0029 #include <linux/platform_device.h>
0030 #include <linux/pm_runtime.h>
0031 #include <linux/reset.h>
0032 #include <linux/slab.h>
0033
0034 #include "virt-dma.h"
0035
0036 #define STM32_MDMA_GISR0 0x0000
0037
0038
0039 #define STM32_MDMA_CISR(x) (0x40 + 0x40 * (x))
0040 #define STM32_MDMA_CISR_CRQA BIT(16)
0041 #define STM32_MDMA_CISR_TCIF BIT(4)
0042 #define STM32_MDMA_CISR_BTIF BIT(3)
0043 #define STM32_MDMA_CISR_BRTIF BIT(2)
0044 #define STM32_MDMA_CISR_CTCIF BIT(1)
0045 #define STM32_MDMA_CISR_TEIF BIT(0)
0046
0047
0048 #define STM32_MDMA_CIFCR(x) (0x44 + 0x40 * (x))
0049 #define STM32_MDMA_CIFCR_CLTCIF BIT(4)
0050 #define STM32_MDMA_CIFCR_CBTIF BIT(3)
0051 #define STM32_MDMA_CIFCR_CBRTIF BIT(2)
0052 #define STM32_MDMA_CIFCR_CCTCIF BIT(1)
0053 #define STM32_MDMA_CIFCR_CTEIF BIT(0)
0054 #define STM32_MDMA_CIFCR_CLEAR_ALL (STM32_MDMA_CIFCR_CLTCIF \
0055 | STM32_MDMA_CIFCR_CBTIF \
0056 | STM32_MDMA_CIFCR_CBRTIF \
0057 | STM32_MDMA_CIFCR_CCTCIF \
0058 | STM32_MDMA_CIFCR_CTEIF)
0059
0060
0061 #define STM32_MDMA_CESR(x) (0x48 + 0x40 * (x))
0062 #define STM32_MDMA_CESR_BSE BIT(11)
0063 #define STM32_MDMA_CESR_ASR BIT(10)
0064 #define STM32_MDMA_CESR_TEMD BIT(9)
0065 #define STM32_MDMA_CESR_TELD BIT(8)
0066 #define STM32_MDMA_CESR_TED BIT(7)
0067 #define STM32_MDMA_CESR_TEA_MASK GENMASK(6, 0)
0068
0069
0070 #define STM32_MDMA_CCR(x) (0x4C + 0x40 * (x))
0071 #define STM32_MDMA_CCR_SWRQ BIT(16)
0072 #define STM32_MDMA_CCR_WEX BIT(14)
0073 #define STM32_MDMA_CCR_HEX BIT(13)
0074 #define STM32_MDMA_CCR_BEX BIT(12)
0075 #define STM32_MDMA_CCR_SM BIT(8)
0076 #define STM32_MDMA_CCR_PL_MASK GENMASK(7, 6)
0077 #define STM32_MDMA_CCR_PL(n) FIELD_PREP(STM32_MDMA_CCR_PL_MASK, (n))
0078 #define STM32_MDMA_CCR_TCIE BIT(5)
0079 #define STM32_MDMA_CCR_BTIE BIT(4)
0080 #define STM32_MDMA_CCR_BRTIE BIT(3)
0081 #define STM32_MDMA_CCR_CTCIE BIT(2)
0082 #define STM32_MDMA_CCR_TEIE BIT(1)
0083 #define STM32_MDMA_CCR_EN BIT(0)
0084 #define STM32_MDMA_CCR_IRQ_MASK (STM32_MDMA_CCR_TCIE \
0085 | STM32_MDMA_CCR_BTIE \
0086 | STM32_MDMA_CCR_BRTIE \
0087 | STM32_MDMA_CCR_CTCIE \
0088 | STM32_MDMA_CCR_TEIE)
0089
0090
0091 #define STM32_MDMA_CTCR(x) (0x50 + 0x40 * (x))
0092 #define STM32_MDMA_CTCR_BWM BIT(31)
0093 #define STM32_MDMA_CTCR_SWRM BIT(30)
0094 #define STM32_MDMA_CTCR_TRGM_MSK GENMASK(29, 28)
0095 #define STM32_MDMA_CTCR_TRGM(n) FIELD_PREP(STM32_MDMA_CTCR_TRGM_MSK, (n))
0096 #define STM32_MDMA_CTCR_TRGM_GET(n) FIELD_GET(STM32_MDMA_CTCR_TRGM_MSK, (n))
0097 #define STM32_MDMA_CTCR_PAM_MASK GENMASK(27, 26)
0098 #define STM32_MDMA_CTCR_PAM(n) FIELD_PREP(STM32_MDMA_CTCR_PAM_MASK, (n))
0099 #define STM32_MDMA_CTCR_PKE BIT(25)
0100 #define STM32_MDMA_CTCR_TLEN_MSK GENMASK(24, 18)
0101 #define STM32_MDMA_CTCR_TLEN(n) FIELD_PREP(STM32_MDMA_CTCR_TLEN_MSK, (n))
0102 #define STM32_MDMA_CTCR_TLEN_GET(n) FIELD_GET(STM32_MDMA_CTCR_TLEN_MSK, (n))
0103 #define STM32_MDMA_CTCR_LEN2_MSK GENMASK(25, 18)
0104 #define STM32_MDMA_CTCR_LEN2(n) FIELD_PREP(STM32_MDMA_CTCR_LEN2_MSK, (n))
0105 #define STM32_MDMA_CTCR_LEN2_GET(n) FIELD_GET(STM32_MDMA_CTCR_LEN2_MSK, (n))
0106 #define STM32_MDMA_CTCR_DBURST_MASK GENMASK(17, 15)
0107 #define STM32_MDMA_CTCR_DBURST(n) FIELD_PREP(STM32_MDMA_CTCR_DBURST_MASK, (n))
0108 #define STM32_MDMA_CTCR_SBURST_MASK GENMASK(14, 12)
0109 #define STM32_MDMA_CTCR_SBURST(n) FIELD_PREP(STM32_MDMA_CTCR_SBURST_MASK, (n))
0110 #define STM32_MDMA_CTCR_DINCOS_MASK GENMASK(11, 10)
0111 #define STM32_MDMA_CTCR_DINCOS(n) FIELD_PREP(STM32_MDMA_CTCR_DINCOS_MASK, (n))
0112 #define STM32_MDMA_CTCR_SINCOS_MASK GENMASK(9, 8)
0113 #define STM32_MDMA_CTCR_SINCOS(n) FIELD_PREP(STM32_MDMA_CTCR_SINCOS_MASK, (n))
0114 #define STM32_MDMA_CTCR_DSIZE_MASK GENMASK(7, 6)
0115 #define STM32_MDMA_CTCR_DSIZE(n) FIELD_PREP(STM32_MDMA_CTCR_DSIZE_MASK, (n))
0116 #define STM32_MDMA_CTCR_SSIZE_MASK GENMASK(5, 4)
0117 #define STM32_MDMA_CTCR_SSIZE(n) FIELD_PREP(STM32_MDMA_CTCR_SSIZE_MASK, (n))
0118 #define STM32_MDMA_CTCR_DINC_MASK GENMASK(3, 2)
0119 #define STM32_MDMA_CTCR_DINC(n) FIELD_PREP(STM32_MDMA_CTCR_DINC_MASK, (n))
0120 #define STM32_MDMA_CTCR_SINC_MASK GENMASK(1, 0)
0121 #define STM32_MDMA_CTCR_SINC(n) FIELD_PREP(STM32_MDMA_CTCR_SINC_MASK, (n))
0122 #define STM32_MDMA_CTCR_CFG_MASK (STM32_MDMA_CTCR_SINC_MASK \
0123 | STM32_MDMA_CTCR_DINC_MASK \
0124 | STM32_MDMA_CTCR_SINCOS_MASK \
0125 | STM32_MDMA_CTCR_DINCOS_MASK \
0126 | STM32_MDMA_CTCR_LEN2_MSK \
0127 | STM32_MDMA_CTCR_TRGM_MSK)
0128
0129
0130 #define STM32_MDMA_CBNDTR(x) (0x54 + 0x40 * (x))
0131 #define STM32_MDMA_CBNDTR_BRC_MK GENMASK(31, 20)
0132 #define STM32_MDMA_CBNDTR_BRC(n) FIELD_PREP(STM32_MDMA_CBNDTR_BRC_MK, (n))
0133 #define STM32_MDMA_CBNDTR_BRC_GET(n) FIELD_GET(STM32_MDMA_CBNDTR_BRC_MK, (n))
0134
0135 #define STM32_MDMA_CBNDTR_BRDUM BIT(19)
0136 #define STM32_MDMA_CBNDTR_BRSUM BIT(18)
0137 #define STM32_MDMA_CBNDTR_BNDT_MASK GENMASK(16, 0)
0138 #define STM32_MDMA_CBNDTR_BNDT(n) FIELD_PREP(STM32_MDMA_CBNDTR_BNDT_MASK, (n))
0139
0140
0141 #define STM32_MDMA_CSAR(x) (0x58 + 0x40 * (x))
0142
0143
0144 #define STM32_MDMA_CDAR(x) (0x5C + 0x40 * (x))
0145
0146
0147 #define STM32_MDMA_CBRUR(x) (0x60 + 0x40 * (x))
0148 #define STM32_MDMA_CBRUR_DUV_MASK GENMASK(31, 16)
0149 #define STM32_MDMA_CBRUR_DUV(n) FIELD_PREP(STM32_MDMA_CBRUR_DUV_MASK, (n))
0150 #define STM32_MDMA_CBRUR_SUV_MASK GENMASK(15, 0)
0151 #define STM32_MDMA_CBRUR_SUV(n) FIELD_PREP(STM32_MDMA_CBRUR_SUV_MASK, (n))
0152
0153
0154 #define STM32_MDMA_CLAR(x) (0x64 + 0x40 * (x))
0155
0156
0157 #define STM32_MDMA_CTBR(x) (0x68 + 0x40 * (x))
0158 #define STM32_MDMA_CTBR_DBUS BIT(17)
0159 #define STM32_MDMA_CTBR_SBUS BIT(16)
0160 #define STM32_MDMA_CTBR_TSEL_MASK GENMASK(5, 0)
0161 #define STM32_MDMA_CTBR_TSEL(n) FIELD_PREP(STM32_MDMA_CTBR_TSEL_MASK, (n))
0162
0163
0164 #define STM32_MDMA_CMAR(x) (0x70 + 0x40 * (x))
0165
0166
0167 #define STM32_MDMA_CMDR(x) (0x74 + 0x40 * (x))
0168
0169 #define STM32_MDMA_MAX_BUF_LEN 128
0170 #define STM32_MDMA_MAX_BLOCK_LEN 65536
0171 #define STM32_MDMA_MAX_CHANNELS 32
0172 #define STM32_MDMA_MAX_REQUESTS 256
0173 #define STM32_MDMA_MAX_BURST 128
0174 #define STM32_MDMA_VERY_HIGH_PRIORITY 0x3
0175
0176 enum stm32_mdma_trigger_mode {
0177 STM32_MDMA_BUFFER,
0178 STM32_MDMA_BLOCK,
0179 STM32_MDMA_BLOCK_REP,
0180 STM32_MDMA_LINKED_LIST,
0181 };
0182
0183 enum stm32_mdma_width {
0184 STM32_MDMA_BYTE,
0185 STM32_MDMA_HALF_WORD,
0186 STM32_MDMA_WORD,
0187 STM32_MDMA_DOUBLE_WORD,
0188 };
0189
0190 enum stm32_mdma_inc_mode {
0191 STM32_MDMA_FIXED = 0,
0192 STM32_MDMA_INC = 2,
0193 STM32_MDMA_DEC = 3,
0194 };
0195
0196 struct stm32_mdma_chan_config {
0197 u32 request;
0198 u32 priority_level;
0199 u32 transfer_config;
0200 u32 mask_addr;
0201 u32 mask_data;
0202 };
0203
0204 struct stm32_mdma_hwdesc {
0205 u32 ctcr;
0206 u32 cbndtr;
0207 u32 csar;
0208 u32 cdar;
0209 u32 cbrur;
0210 u32 clar;
0211 u32 ctbr;
0212 u32 dummy;
0213 u32 cmar;
0214 u32 cmdr;
0215 } __aligned(64);
0216
0217 struct stm32_mdma_desc_node {
0218 struct stm32_mdma_hwdesc *hwdesc;
0219 dma_addr_t hwdesc_phys;
0220 };
0221
0222 struct stm32_mdma_desc {
0223 struct virt_dma_desc vdesc;
0224 u32 ccr;
0225 bool cyclic;
0226 u32 count;
0227 struct stm32_mdma_desc_node node[];
0228 };
0229
0230 struct stm32_mdma_chan {
0231 struct virt_dma_chan vchan;
0232 struct dma_pool *desc_pool;
0233 u32 id;
0234 struct stm32_mdma_desc *desc;
0235 u32 curr_hwdesc;
0236 struct dma_slave_config dma_config;
0237 struct stm32_mdma_chan_config chan_config;
0238 bool busy;
0239 u32 mem_burst;
0240 u32 mem_width;
0241 };
0242
0243 struct stm32_mdma_device {
0244 struct dma_device ddev;
0245 void __iomem *base;
0246 struct clk *clk;
0247 int irq;
0248 u32 nr_channels;
0249 u32 nr_requests;
0250 u32 nr_ahb_addr_masks;
0251 u32 chan_reserved;
0252 struct stm32_mdma_chan chan[STM32_MDMA_MAX_CHANNELS];
0253 u32 ahb_addr_masks[];
0254 };
0255
0256 static struct stm32_mdma_device *stm32_mdma_get_dev(
0257 struct stm32_mdma_chan *chan)
0258 {
0259 return container_of(chan->vchan.chan.device, struct stm32_mdma_device,
0260 ddev);
0261 }
0262
0263 static struct stm32_mdma_chan *to_stm32_mdma_chan(struct dma_chan *c)
0264 {
0265 return container_of(c, struct stm32_mdma_chan, vchan.chan);
0266 }
0267
0268 static struct stm32_mdma_desc *to_stm32_mdma_desc(struct virt_dma_desc *vdesc)
0269 {
0270 return container_of(vdesc, struct stm32_mdma_desc, vdesc);
0271 }
0272
0273 static struct device *chan2dev(struct stm32_mdma_chan *chan)
0274 {
0275 return &chan->vchan.chan.dev->device;
0276 }
0277
0278 static struct device *mdma2dev(struct stm32_mdma_device *mdma_dev)
0279 {
0280 return mdma_dev->ddev.dev;
0281 }
0282
0283 static u32 stm32_mdma_read(struct stm32_mdma_device *dmadev, u32 reg)
0284 {
0285 return readl_relaxed(dmadev->base + reg);
0286 }
0287
0288 static void stm32_mdma_write(struct stm32_mdma_device *dmadev, u32 reg, u32 val)
0289 {
0290 writel_relaxed(val, dmadev->base + reg);
0291 }
0292
0293 static void stm32_mdma_set_bits(struct stm32_mdma_device *dmadev, u32 reg,
0294 u32 mask)
0295 {
0296 void __iomem *addr = dmadev->base + reg;
0297
0298 writel_relaxed(readl_relaxed(addr) | mask, addr);
0299 }
0300
0301 static void stm32_mdma_clr_bits(struct stm32_mdma_device *dmadev, u32 reg,
0302 u32 mask)
0303 {
0304 void __iomem *addr = dmadev->base + reg;
0305
0306 writel_relaxed(readl_relaxed(addr) & ~mask, addr);
0307 }
0308
0309 static struct stm32_mdma_desc *stm32_mdma_alloc_desc(
0310 struct stm32_mdma_chan *chan, u32 count)
0311 {
0312 struct stm32_mdma_desc *desc;
0313 int i;
0314
0315 desc = kzalloc(struct_size(desc, node, count), GFP_NOWAIT);
0316 if (!desc)
0317 return NULL;
0318
0319 for (i = 0; i < count; i++) {
0320 desc->node[i].hwdesc =
0321 dma_pool_alloc(chan->desc_pool, GFP_NOWAIT,
0322 &desc->node[i].hwdesc_phys);
0323 if (!desc->node[i].hwdesc)
0324 goto err;
0325 }
0326
0327 desc->count = count;
0328
0329 return desc;
0330
0331 err:
0332 dev_err(chan2dev(chan), "Failed to allocate descriptor\n");
0333 while (--i >= 0)
0334 dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
0335 desc->node[i].hwdesc_phys);
0336 kfree(desc);
0337 return NULL;
0338 }
0339
0340 static void stm32_mdma_desc_free(struct virt_dma_desc *vdesc)
0341 {
0342 struct stm32_mdma_desc *desc = to_stm32_mdma_desc(vdesc);
0343 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(vdesc->tx.chan);
0344 int i;
0345
0346 for (i = 0; i < desc->count; i++)
0347 dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
0348 desc->node[i].hwdesc_phys);
0349 kfree(desc);
0350 }
0351
0352 static int stm32_mdma_get_width(struct stm32_mdma_chan *chan,
0353 enum dma_slave_buswidth width)
0354 {
0355 switch (width) {
0356 case DMA_SLAVE_BUSWIDTH_1_BYTE:
0357 case DMA_SLAVE_BUSWIDTH_2_BYTES:
0358 case DMA_SLAVE_BUSWIDTH_4_BYTES:
0359 case DMA_SLAVE_BUSWIDTH_8_BYTES:
0360 return ffs(width) - 1;
0361 default:
0362 dev_err(chan2dev(chan), "Dma bus width %i not supported\n",
0363 width);
0364 return -EINVAL;
0365 }
0366 }
0367
0368 static enum dma_slave_buswidth stm32_mdma_get_max_width(dma_addr_t addr,
0369 u32 buf_len, u32 tlen)
0370 {
0371 enum dma_slave_buswidth max_width = DMA_SLAVE_BUSWIDTH_8_BYTES;
0372
0373 for (max_width = DMA_SLAVE_BUSWIDTH_8_BYTES;
0374 max_width > DMA_SLAVE_BUSWIDTH_1_BYTE;
0375 max_width >>= 1) {
0376
0377
0378
0379
0380 if ((((buf_len | addr) & (max_width - 1)) == 0) &&
0381 tlen >= max_width)
0382 break;
0383 }
0384
0385 return max_width;
0386 }
0387
0388 static u32 stm32_mdma_get_best_burst(u32 buf_len, u32 tlen, u32 max_burst,
0389 enum dma_slave_buswidth width)
0390 {
0391 u32 best_burst;
0392
0393 best_burst = min((u32)1 << __ffs(tlen | buf_len),
0394 max_burst * width) / width;
0395
0396 return (best_burst > 0) ? best_burst : 1;
0397 }
0398
0399 static int stm32_mdma_disable_chan(struct stm32_mdma_chan *chan)
0400 {
0401 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
0402 u32 ccr, cisr, id, reg;
0403 int ret;
0404
0405 id = chan->id;
0406 reg = STM32_MDMA_CCR(id);
0407
0408
0409 stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_IRQ_MASK);
0410
0411 ccr = stm32_mdma_read(dmadev, reg);
0412 if (ccr & STM32_MDMA_CCR_EN) {
0413 stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_EN);
0414
0415
0416 ret = readl_relaxed_poll_timeout_atomic(
0417 dmadev->base + STM32_MDMA_CISR(id), cisr,
0418 (cisr & STM32_MDMA_CISR_CTCIF), 10, 1000);
0419 if (ret) {
0420 dev_err(chan2dev(chan), "%s: timeout!\n", __func__);
0421 return -EBUSY;
0422 }
0423 }
0424
0425 return 0;
0426 }
0427
0428 static void stm32_mdma_stop(struct stm32_mdma_chan *chan)
0429 {
0430 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
0431 u32 status;
0432 int ret;
0433
0434
0435 ret = stm32_mdma_disable_chan(chan);
0436 if (ret < 0)
0437 return;
0438
0439
0440 status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
0441 if (status) {
0442 dev_dbg(chan2dev(chan), "%s(): clearing interrupt: 0x%08x\n",
0443 __func__, status);
0444 stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status);
0445 }
0446
0447 chan->busy = false;
0448 }
0449
0450 static void stm32_mdma_set_bus(struct stm32_mdma_device *dmadev, u32 *ctbr,
0451 u32 ctbr_mask, u32 src_addr)
0452 {
0453 u32 mask;
0454 int i;
0455
0456
0457 *ctbr &= ~ctbr_mask;
0458 mask = src_addr & 0xF0000000;
0459 for (i = 0; i < dmadev->nr_ahb_addr_masks; i++) {
0460 if (mask == dmadev->ahb_addr_masks[i]) {
0461 *ctbr |= ctbr_mask;
0462 break;
0463 }
0464 }
0465 }
0466
0467 static int stm32_mdma_set_xfer_param(struct stm32_mdma_chan *chan,
0468 enum dma_transfer_direction direction,
0469 u32 *mdma_ccr, u32 *mdma_ctcr,
0470 u32 *mdma_ctbr, dma_addr_t addr,
0471 u32 buf_len)
0472 {
0473 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
0474 struct stm32_mdma_chan_config *chan_config = &chan->chan_config;
0475 enum dma_slave_buswidth src_addr_width, dst_addr_width;
0476 phys_addr_t src_addr, dst_addr;
0477 int src_bus_width, dst_bus_width;
0478 u32 src_maxburst, dst_maxburst, src_best_burst, dst_best_burst;
0479 u32 ccr, ctcr, ctbr, tlen;
0480
0481 src_addr_width = chan->dma_config.src_addr_width;
0482 dst_addr_width = chan->dma_config.dst_addr_width;
0483 src_maxburst = chan->dma_config.src_maxburst;
0484 dst_maxburst = chan->dma_config.dst_maxburst;
0485
0486 ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id));
0487 ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id));
0488 ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id));
0489
0490
0491 ctcr &= ~STM32_MDMA_CTCR_SWRM;
0492
0493
0494 ctcr &= ~STM32_MDMA_CTCR_CFG_MASK;
0495 ctcr |= chan_config->transfer_config & STM32_MDMA_CTCR_CFG_MASK;
0496
0497
0498
0499
0500
0501 tlen = STM32_MDMA_CTCR_LEN2_GET(ctcr);
0502 ctcr &= ~STM32_MDMA_CTCR_LEN2_MSK;
0503 ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1));
0504
0505
0506 ctcr &= ~STM32_MDMA_CTCR_PKE;
0507
0508
0509 if (src_maxburst * src_addr_width > STM32_MDMA_MAX_BURST ||
0510 dst_maxburst * dst_addr_width > STM32_MDMA_MAX_BURST) {
0511 dev_err(chan2dev(chan),
0512 "burst size * bus width higher than %d bytes\n",
0513 STM32_MDMA_MAX_BURST);
0514 return -EINVAL;
0515 }
0516
0517 if ((!is_power_of_2(src_maxburst) && src_maxburst > 0) ||
0518 (!is_power_of_2(dst_maxburst) && dst_maxburst > 0)) {
0519 dev_err(chan2dev(chan), "burst size must be a power of 2\n");
0520 return -EINVAL;
0521 }
0522
0523
0524
0525
0526
0527
0528
0529 ccr &= ~(STM32_MDMA_CCR_SWRQ | STM32_MDMA_CCR_WEX | STM32_MDMA_CCR_HEX |
0530 STM32_MDMA_CCR_BEX | STM32_MDMA_CCR_PL_MASK);
0531 ccr |= STM32_MDMA_CCR_PL(chan_config->priority_level);
0532
0533
0534 ctbr &= ~STM32_MDMA_CTBR_TSEL_MASK;
0535 ctbr |= STM32_MDMA_CTBR_TSEL(chan_config->request);
0536
0537 switch (direction) {
0538 case DMA_MEM_TO_DEV:
0539 dst_addr = chan->dma_config.dst_addr;
0540
0541
0542 dst_bus_width = stm32_mdma_get_width(chan, dst_addr_width);
0543 if (dst_bus_width < 0)
0544 return dst_bus_width;
0545 ctcr &= ~STM32_MDMA_CTCR_DSIZE_MASK;
0546 ctcr |= STM32_MDMA_CTCR_DSIZE(dst_bus_width);
0547
0548
0549 dst_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
0550 dst_maxburst,
0551 dst_addr_width);
0552 chan->mem_burst = dst_best_burst;
0553 ctcr &= ~STM32_MDMA_CTCR_DBURST_MASK;
0554 ctcr |= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst)));
0555
0556
0557 src_addr_width = stm32_mdma_get_max_width(addr, buf_len, tlen);
0558 chan->mem_width = src_addr_width;
0559 src_bus_width = stm32_mdma_get_width(chan, src_addr_width);
0560 if (src_bus_width < 0)
0561 return src_bus_width;
0562 ctcr &= ~STM32_MDMA_CTCR_SSIZE_MASK |
0563 STM32_MDMA_CTCR_SINCOS_MASK;
0564 ctcr |= STM32_MDMA_CTCR_SSIZE(src_bus_width) |
0565 STM32_MDMA_CTCR_SINCOS(src_bus_width);
0566
0567
0568 src_maxburst = STM32_MDMA_MAX_BUF_LEN / src_addr_width;
0569 src_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
0570 src_maxburst,
0571 src_addr_width);
0572 chan->mem_burst = src_best_burst;
0573 ctcr &= ~STM32_MDMA_CTCR_SBURST_MASK;
0574 ctcr |= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst)));
0575
0576
0577 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
0578 dst_addr);
0579
0580 if (dst_bus_width != src_bus_width)
0581 ctcr |= STM32_MDMA_CTCR_PKE;
0582
0583
0584 stm32_mdma_write(dmadev, STM32_MDMA_CDAR(chan->id), dst_addr);
0585 break;
0586
0587 case DMA_DEV_TO_MEM:
0588 src_addr = chan->dma_config.src_addr;
0589
0590
0591 src_bus_width = stm32_mdma_get_width(chan, src_addr_width);
0592 if (src_bus_width < 0)
0593 return src_bus_width;
0594 ctcr &= ~STM32_MDMA_CTCR_SSIZE_MASK;
0595 ctcr |= STM32_MDMA_CTCR_SSIZE(src_bus_width);
0596
0597
0598 src_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
0599 src_maxburst,
0600 src_addr_width);
0601 ctcr &= ~STM32_MDMA_CTCR_SBURST_MASK;
0602 ctcr |= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst)));
0603
0604
0605 dst_addr_width = stm32_mdma_get_max_width(addr, buf_len, tlen);
0606 chan->mem_width = dst_addr_width;
0607 dst_bus_width = stm32_mdma_get_width(chan, dst_addr_width);
0608 if (dst_bus_width < 0)
0609 return dst_bus_width;
0610 ctcr &= ~(STM32_MDMA_CTCR_DSIZE_MASK |
0611 STM32_MDMA_CTCR_DINCOS_MASK);
0612 ctcr |= STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
0613 STM32_MDMA_CTCR_DINCOS(dst_bus_width);
0614
0615
0616 dst_maxburst = STM32_MDMA_MAX_BUF_LEN / dst_addr_width;
0617 dst_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
0618 dst_maxburst,
0619 dst_addr_width);
0620 ctcr &= ~STM32_MDMA_CTCR_DBURST_MASK;
0621 ctcr |= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst)));
0622
0623
0624 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
0625 src_addr);
0626
0627 if (dst_bus_width != src_bus_width)
0628 ctcr |= STM32_MDMA_CTCR_PKE;
0629
0630
0631 stm32_mdma_write(dmadev, STM32_MDMA_CSAR(chan->id), src_addr);
0632 break;
0633
0634 default:
0635 dev_err(chan2dev(chan), "Dma direction is not supported\n");
0636 return -EINVAL;
0637 }
0638
0639 *mdma_ccr = ccr;
0640 *mdma_ctcr = ctcr;
0641 *mdma_ctbr = ctbr;
0642
0643 return 0;
0644 }
0645
0646 static void stm32_mdma_dump_hwdesc(struct stm32_mdma_chan *chan,
0647 struct stm32_mdma_desc_node *node)
0648 {
0649 dev_dbg(chan2dev(chan), "hwdesc: %pad\n", &node->hwdesc_phys);
0650 dev_dbg(chan2dev(chan), "CTCR: 0x%08x\n", node->hwdesc->ctcr);
0651 dev_dbg(chan2dev(chan), "CBNDTR: 0x%08x\n", node->hwdesc->cbndtr);
0652 dev_dbg(chan2dev(chan), "CSAR: 0x%08x\n", node->hwdesc->csar);
0653 dev_dbg(chan2dev(chan), "CDAR: 0x%08x\n", node->hwdesc->cdar);
0654 dev_dbg(chan2dev(chan), "CBRUR: 0x%08x\n", node->hwdesc->cbrur);
0655 dev_dbg(chan2dev(chan), "CLAR: 0x%08x\n", node->hwdesc->clar);
0656 dev_dbg(chan2dev(chan), "CTBR: 0x%08x\n", node->hwdesc->ctbr);
0657 dev_dbg(chan2dev(chan), "CMAR: 0x%08x\n", node->hwdesc->cmar);
0658 dev_dbg(chan2dev(chan), "CMDR: 0x%08x\n\n", node->hwdesc->cmdr);
0659 }
0660
0661 static void stm32_mdma_setup_hwdesc(struct stm32_mdma_chan *chan,
0662 struct stm32_mdma_desc *desc,
0663 enum dma_transfer_direction dir, u32 count,
0664 dma_addr_t src_addr, dma_addr_t dst_addr,
0665 u32 len, u32 ctcr, u32 ctbr, bool is_last,
0666 bool is_first, bool is_cyclic)
0667 {
0668 struct stm32_mdma_chan_config *config = &chan->chan_config;
0669 struct stm32_mdma_hwdesc *hwdesc;
0670 u32 next = count + 1;
0671
0672 hwdesc = desc->node[count].hwdesc;
0673 hwdesc->ctcr = ctcr;
0674 hwdesc->cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK |
0675 STM32_MDMA_CBNDTR_BRDUM |
0676 STM32_MDMA_CBNDTR_BRSUM |
0677 STM32_MDMA_CBNDTR_BNDT_MASK);
0678 hwdesc->cbndtr |= STM32_MDMA_CBNDTR_BNDT(len);
0679 hwdesc->csar = src_addr;
0680 hwdesc->cdar = dst_addr;
0681 hwdesc->cbrur = 0;
0682 hwdesc->ctbr = ctbr;
0683 hwdesc->cmar = config->mask_addr;
0684 hwdesc->cmdr = config->mask_data;
0685
0686 if (is_last) {
0687 if (is_cyclic)
0688 hwdesc->clar = desc->node[0].hwdesc_phys;
0689 else
0690 hwdesc->clar = 0;
0691 } else {
0692 hwdesc->clar = desc->node[next].hwdesc_phys;
0693 }
0694
0695 stm32_mdma_dump_hwdesc(chan, &desc->node[count]);
0696 }
0697
0698 static int stm32_mdma_setup_xfer(struct stm32_mdma_chan *chan,
0699 struct stm32_mdma_desc *desc,
0700 struct scatterlist *sgl, u32 sg_len,
0701 enum dma_transfer_direction direction)
0702 {
0703 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
0704 struct dma_slave_config *dma_config = &chan->dma_config;
0705 struct scatterlist *sg;
0706 dma_addr_t src_addr, dst_addr;
0707 u32 ccr, ctcr, ctbr;
0708 int i, ret = 0;
0709
0710 for_each_sg(sgl, sg, sg_len, i) {
0711 if (sg_dma_len(sg) > STM32_MDMA_MAX_BLOCK_LEN) {
0712 dev_err(chan2dev(chan), "Invalid block len\n");
0713 return -EINVAL;
0714 }
0715
0716 if (direction == DMA_MEM_TO_DEV) {
0717 src_addr = sg_dma_address(sg);
0718 dst_addr = dma_config->dst_addr;
0719 ret = stm32_mdma_set_xfer_param(chan, direction, &ccr,
0720 &ctcr, &ctbr, src_addr,
0721 sg_dma_len(sg));
0722 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
0723 src_addr);
0724 } else {
0725 src_addr = dma_config->src_addr;
0726 dst_addr = sg_dma_address(sg);
0727 ret = stm32_mdma_set_xfer_param(chan, direction, &ccr,
0728 &ctcr, &ctbr, dst_addr,
0729 sg_dma_len(sg));
0730 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
0731 dst_addr);
0732 }
0733
0734 if (ret < 0)
0735 return ret;
0736
0737 stm32_mdma_setup_hwdesc(chan, desc, direction, i, src_addr,
0738 dst_addr, sg_dma_len(sg), ctcr, ctbr,
0739 i == sg_len - 1, i == 0, false);
0740 }
0741
0742
0743 ccr &= ~STM32_MDMA_CCR_IRQ_MASK;
0744 ccr |= STM32_MDMA_CCR_TEIE | STM32_MDMA_CCR_CTCIE;
0745 if (sg_len > 1)
0746 ccr |= STM32_MDMA_CCR_BTIE;
0747 desc->ccr = ccr;
0748
0749 return 0;
0750 }
0751
0752 static struct dma_async_tx_descriptor *
0753 stm32_mdma_prep_slave_sg(struct dma_chan *c, struct scatterlist *sgl,
0754 u32 sg_len, enum dma_transfer_direction direction,
0755 unsigned long flags, void *context)
0756 {
0757 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
0758 struct stm32_mdma_desc *desc;
0759 int i, ret;
0760
0761
0762
0763
0764
0765
0766 if (chan->desc && chan->desc->cyclic) {
0767 dev_err(chan2dev(chan),
0768 "Request not allowed when dma in cyclic mode\n");
0769 return NULL;
0770 }
0771
0772 desc = stm32_mdma_alloc_desc(chan, sg_len);
0773 if (!desc)
0774 return NULL;
0775
0776 ret = stm32_mdma_setup_xfer(chan, desc, sgl, sg_len, direction);
0777 if (ret < 0)
0778 goto xfer_setup_err;
0779
0780 desc->cyclic = false;
0781
0782 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
0783
0784 xfer_setup_err:
0785 for (i = 0; i < desc->count; i++)
0786 dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
0787 desc->node[i].hwdesc_phys);
0788 kfree(desc);
0789 return NULL;
0790 }
0791
0792 static struct dma_async_tx_descriptor *
0793 stm32_mdma_prep_dma_cyclic(struct dma_chan *c, dma_addr_t buf_addr,
0794 size_t buf_len, size_t period_len,
0795 enum dma_transfer_direction direction,
0796 unsigned long flags)
0797 {
0798 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
0799 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
0800 struct dma_slave_config *dma_config = &chan->dma_config;
0801 struct stm32_mdma_desc *desc;
0802 dma_addr_t src_addr, dst_addr;
0803 u32 ccr, ctcr, ctbr, count;
0804 int i, ret;
0805
0806
0807
0808
0809
0810
0811 if (chan->desc && chan->desc->cyclic) {
0812 dev_err(chan2dev(chan),
0813 "Request not allowed when dma in cyclic mode\n");
0814 return NULL;
0815 }
0816
0817 if (!buf_len || !period_len || period_len > STM32_MDMA_MAX_BLOCK_LEN) {
0818 dev_err(chan2dev(chan), "Invalid buffer/period len\n");
0819 return NULL;
0820 }
0821
0822 if (buf_len % period_len) {
0823 dev_err(chan2dev(chan), "buf_len not multiple of period_len\n");
0824 return NULL;
0825 }
0826
0827 count = buf_len / period_len;
0828
0829 desc = stm32_mdma_alloc_desc(chan, count);
0830 if (!desc)
0831 return NULL;
0832
0833
0834 if (direction == DMA_MEM_TO_DEV) {
0835 src_addr = buf_addr;
0836 ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, &ctcr,
0837 &ctbr, src_addr, period_len);
0838 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
0839 src_addr);
0840 } else {
0841 dst_addr = buf_addr;
0842 ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, &ctcr,
0843 &ctbr, dst_addr, period_len);
0844 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
0845 dst_addr);
0846 }
0847
0848 if (ret < 0)
0849 goto xfer_setup_err;
0850
0851
0852 ccr &= ~STM32_MDMA_CCR_IRQ_MASK;
0853 ccr |= STM32_MDMA_CCR_TEIE | STM32_MDMA_CCR_CTCIE | STM32_MDMA_CCR_BTIE;
0854 desc->ccr = ccr;
0855
0856
0857 for (i = 0; i < count; i++) {
0858 if (direction == DMA_MEM_TO_DEV) {
0859 src_addr = buf_addr + i * period_len;
0860 dst_addr = dma_config->dst_addr;
0861 } else {
0862 src_addr = dma_config->src_addr;
0863 dst_addr = buf_addr + i * period_len;
0864 }
0865
0866 stm32_mdma_setup_hwdesc(chan, desc, direction, i, src_addr,
0867 dst_addr, period_len, ctcr, ctbr,
0868 i == count - 1, i == 0, true);
0869 }
0870
0871 desc->cyclic = true;
0872
0873 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
0874
0875 xfer_setup_err:
0876 for (i = 0; i < desc->count; i++)
0877 dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
0878 desc->node[i].hwdesc_phys);
0879 kfree(desc);
0880 return NULL;
0881 }
0882
0883 static struct dma_async_tx_descriptor *
0884 stm32_mdma_prep_dma_memcpy(struct dma_chan *c, dma_addr_t dest, dma_addr_t src,
0885 size_t len, unsigned long flags)
0886 {
0887 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
0888 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
0889 enum dma_slave_buswidth max_width;
0890 struct stm32_mdma_desc *desc;
0891 struct stm32_mdma_hwdesc *hwdesc;
0892 u32 ccr, ctcr, ctbr, cbndtr, count, max_burst, mdma_burst;
0893 u32 best_burst, tlen;
0894 size_t xfer_count, offset;
0895 int src_bus_width, dst_bus_width;
0896 int i;
0897
0898
0899
0900
0901
0902
0903 if (chan->desc && chan->desc->cyclic) {
0904 dev_err(chan2dev(chan),
0905 "Request not allowed when dma in cyclic mode\n");
0906 return NULL;
0907 }
0908
0909 count = DIV_ROUND_UP(len, STM32_MDMA_MAX_BLOCK_LEN);
0910 desc = stm32_mdma_alloc_desc(chan, count);
0911 if (!desc)
0912 return NULL;
0913
0914 ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id));
0915 ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id));
0916 ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id));
0917 cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id));
0918
0919
0920 ccr &= ~(STM32_MDMA_CCR_WEX | STM32_MDMA_CCR_HEX |
0921 STM32_MDMA_CCR_BEX | STM32_MDMA_CCR_PL_MASK |
0922 STM32_MDMA_CCR_IRQ_MASK);
0923 ccr |= STM32_MDMA_CCR_TEIE;
0924
0925
0926 ctcr &= ~(STM32_MDMA_CTCR_BWM | STM32_MDMA_CTCR_TRGM_MSK |
0927 STM32_MDMA_CTCR_PAM_MASK | STM32_MDMA_CTCR_PKE |
0928 STM32_MDMA_CTCR_TLEN_MSK | STM32_MDMA_CTCR_DBURST_MASK |
0929 STM32_MDMA_CTCR_SBURST_MASK | STM32_MDMA_CTCR_DINCOS_MASK |
0930 STM32_MDMA_CTCR_SINCOS_MASK | STM32_MDMA_CTCR_DSIZE_MASK |
0931 STM32_MDMA_CTCR_SSIZE_MASK | STM32_MDMA_CTCR_DINC_MASK |
0932 STM32_MDMA_CTCR_SINC_MASK);
0933 ctcr |= STM32_MDMA_CTCR_SWRM | STM32_MDMA_CTCR_SINC(STM32_MDMA_INC) |
0934 STM32_MDMA_CTCR_DINC(STM32_MDMA_INC);
0935
0936
0937 ctbr &= ~STM32_MDMA_CTBR_TSEL_MASK;
0938
0939
0940 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS, src);
0941 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS, dest);
0942
0943
0944 cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK | STM32_MDMA_CBNDTR_BRDUM |
0945 STM32_MDMA_CBNDTR_BRSUM | STM32_MDMA_CBNDTR_BNDT_MASK);
0946
0947 if (len <= STM32_MDMA_MAX_BLOCK_LEN) {
0948 cbndtr |= STM32_MDMA_CBNDTR_BNDT(len);
0949 if (len <= STM32_MDMA_MAX_BUF_LEN) {
0950
0951 ccr |= STM32_MDMA_CCR_TCIE | STM32_MDMA_CCR_CTCIE;
0952 ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BUFFER);
0953 } else {
0954
0955 ccr |= STM32_MDMA_CCR_BTIE | STM32_MDMA_CCR_CTCIE;
0956 ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BLOCK);
0957 }
0958
0959 tlen = STM32_MDMA_MAX_BUF_LEN;
0960 ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1));
0961
0962
0963 max_width = stm32_mdma_get_max_width(src, len, tlen);
0964 src_bus_width = stm32_mdma_get_width(chan, max_width);
0965
0966 max_burst = tlen / max_width;
0967 best_burst = stm32_mdma_get_best_burst(len, tlen, max_burst,
0968 max_width);
0969 mdma_burst = ilog2(best_burst);
0970
0971 ctcr |= STM32_MDMA_CTCR_SBURST(mdma_burst) |
0972 STM32_MDMA_CTCR_SSIZE(src_bus_width) |
0973 STM32_MDMA_CTCR_SINCOS(src_bus_width);
0974
0975
0976 max_width = stm32_mdma_get_max_width(dest, len, tlen);
0977 dst_bus_width = stm32_mdma_get_width(chan, max_width);
0978
0979 max_burst = tlen / max_width;
0980 best_burst = stm32_mdma_get_best_burst(len, tlen, max_burst,
0981 max_width);
0982 mdma_burst = ilog2(best_burst);
0983
0984 ctcr |= STM32_MDMA_CTCR_DBURST(mdma_burst) |
0985 STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
0986 STM32_MDMA_CTCR_DINCOS(dst_bus_width);
0987
0988 if (dst_bus_width != src_bus_width)
0989 ctcr |= STM32_MDMA_CTCR_PKE;
0990
0991
0992 hwdesc = desc->node[0].hwdesc;
0993 hwdesc->ctcr = ctcr;
0994 hwdesc->cbndtr = cbndtr;
0995 hwdesc->csar = src;
0996 hwdesc->cdar = dest;
0997 hwdesc->cbrur = 0;
0998 hwdesc->clar = 0;
0999 hwdesc->ctbr = ctbr;
1000 hwdesc->cmar = 0;
1001 hwdesc->cmdr = 0;
1002
1003 stm32_mdma_dump_hwdesc(chan, &desc->node[0]);
1004 } else {
1005
1006 ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_LINKED_LIST) |
1007 STM32_MDMA_CTCR_TLEN((STM32_MDMA_MAX_BUF_LEN - 1));
1008 ccr |= STM32_MDMA_CCR_BTIE | STM32_MDMA_CCR_CTCIE;
1009 tlen = STM32_MDMA_MAX_BUF_LEN;
1010
1011 for (i = 0, offset = 0; offset < len;
1012 i++, offset += xfer_count) {
1013 xfer_count = min_t(size_t, len - offset,
1014 STM32_MDMA_MAX_BLOCK_LEN);
1015
1016
1017 max_width = stm32_mdma_get_max_width(src, len, tlen);
1018 src_bus_width = stm32_mdma_get_width(chan, max_width);
1019
1020 max_burst = tlen / max_width;
1021 best_burst = stm32_mdma_get_best_burst(len, tlen,
1022 max_burst,
1023 max_width);
1024 mdma_burst = ilog2(best_burst);
1025
1026 ctcr |= STM32_MDMA_CTCR_SBURST(mdma_burst) |
1027 STM32_MDMA_CTCR_SSIZE(src_bus_width) |
1028 STM32_MDMA_CTCR_SINCOS(src_bus_width);
1029
1030
1031 max_width = stm32_mdma_get_max_width(dest, len, tlen);
1032 dst_bus_width = stm32_mdma_get_width(chan, max_width);
1033
1034 max_burst = tlen / max_width;
1035 best_burst = stm32_mdma_get_best_burst(len, tlen,
1036 max_burst,
1037 max_width);
1038 mdma_burst = ilog2(best_burst);
1039
1040 ctcr |= STM32_MDMA_CTCR_DBURST(mdma_burst) |
1041 STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
1042 STM32_MDMA_CTCR_DINCOS(dst_bus_width);
1043
1044 if (dst_bus_width != src_bus_width)
1045 ctcr |= STM32_MDMA_CTCR_PKE;
1046
1047
1048 stm32_mdma_setup_hwdesc(chan, desc, DMA_MEM_TO_MEM, i,
1049 src + offset, dest + offset,
1050 xfer_count, ctcr, ctbr,
1051 i == count - 1, i == 0, false);
1052 }
1053 }
1054
1055 desc->ccr = ccr;
1056
1057 desc->cyclic = false;
1058
1059 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
1060 }
1061
1062 static void stm32_mdma_dump_reg(struct stm32_mdma_chan *chan)
1063 {
1064 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1065
1066 dev_dbg(chan2dev(chan), "CCR: 0x%08x\n",
1067 stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)));
1068 dev_dbg(chan2dev(chan), "CTCR: 0x%08x\n",
1069 stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id)));
1070 dev_dbg(chan2dev(chan), "CBNDTR: 0x%08x\n",
1071 stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id)));
1072 dev_dbg(chan2dev(chan), "CSAR: 0x%08x\n",
1073 stm32_mdma_read(dmadev, STM32_MDMA_CSAR(chan->id)));
1074 dev_dbg(chan2dev(chan), "CDAR: 0x%08x\n",
1075 stm32_mdma_read(dmadev, STM32_MDMA_CDAR(chan->id)));
1076 dev_dbg(chan2dev(chan), "CBRUR: 0x%08x\n",
1077 stm32_mdma_read(dmadev, STM32_MDMA_CBRUR(chan->id)));
1078 dev_dbg(chan2dev(chan), "CLAR: 0x%08x\n",
1079 stm32_mdma_read(dmadev, STM32_MDMA_CLAR(chan->id)));
1080 dev_dbg(chan2dev(chan), "CTBR: 0x%08x\n",
1081 stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id)));
1082 dev_dbg(chan2dev(chan), "CMAR: 0x%08x\n",
1083 stm32_mdma_read(dmadev, STM32_MDMA_CMAR(chan->id)));
1084 dev_dbg(chan2dev(chan), "CMDR: 0x%08x\n",
1085 stm32_mdma_read(dmadev, STM32_MDMA_CMDR(chan->id)));
1086 }
1087
1088 static void stm32_mdma_start_transfer(struct stm32_mdma_chan *chan)
1089 {
1090 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1091 struct virt_dma_desc *vdesc;
1092 struct stm32_mdma_hwdesc *hwdesc;
1093 u32 id = chan->id;
1094 u32 status, reg;
1095
1096 vdesc = vchan_next_desc(&chan->vchan);
1097 if (!vdesc) {
1098 chan->desc = NULL;
1099 return;
1100 }
1101
1102 list_del(&vdesc->node);
1103
1104 chan->desc = to_stm32_mdma_desc(vdesc);
1105 hwdesc = chan->desc->node[0].hwdesc;
1106 chan->curr_hwdesc = 0;
1107
1108 stm32_mdma_write(dmadev, STM32_MDMA_CCR(id), chan->desc->ccr);
1109 stm32_mdma_write(dmadev, STM32_MDMA_CTCR(id), hwdesc->ctcr);
1110 stm32_mdma_write(dmadev, STM32_MDMA_CBNDTR(id), hwdesc->cbndtr);
1111 stm32_mdma_write(dmadev, STM32_MDMA_CSAR(id), hwdesc->csar);
1112 stm32_mdma_write(dmadev, STM32_MDMA_CDAR(id), hwdesc->cdar);
1113 stm32_mdma_write(dmadev, STM32_MDMA_CBRUR(id), hwdesc->cbrur);
1114 stm32_mdma_write(dmadev, STM32_MDMA_CLAR(id), hwdesc->clar);
1115 stm32_mdma_write(dmadev, STM32_MDMA_CTBR(id), hwdesc->ctbr);
1116 stm32_mdma_write(dmadev, STM32_MDMA_CMAR(id), hwdesc->cmar);
1117 stm32_mdma_write(dmadev, STM32_MDMA_CMDR(id), hwdesc->cmdr);
1118
1119
1120 status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(id));
1121 if (status)
1122 stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(id), status);
1123
1124 stm32_mdma_dump_reg(chan);
1125
1126
1127 stm32_mdma_set_bits(dmadev, STM32_MDMA_CCR(id), STM32_MDMA_CCR_EN);
1128
1129
1130 if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM) {
1131 reg = STM32_MDMA_CCR(id);
1132 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ);
1133 }
1134
1135 chan->busy = true;
1136
1137 dev_dbg(chan2dev(chan), "vchan %pK: started\n", &chan->vchan);
1138 }
1139
1140 static void stm32_mdma_issue_pending(struct dma_chan *c)
1141 {
1142 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1143 unsigned long flags;
1144
1145 spin_lock_irqsave(&chan->vchan.lock, flags);
1146
1147 if (!vchan_issue_pending(&chan->vchan))
1148 goto end;
1149
1150 dev_dbg(chan2dev(chan), "vchan %pK: issued\n", &chan->vchan);
1151
1152 if (!chan->desc && !chan->busy)
1153 stm32_mdma_start_transfer(chan);
1154
1155 end:
1156 spin_unlock_irqrestore(&chan->vchan.lock, flags);
1157 }
1158
1159 static int stm32_mdma_pause(struct dma_chan *c)
1160 {
1161 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1162 unsigned long flags;
1163 int ret;
1164
1165 spin_lock_irqsave(&chan->vchan.lock, flags);
1166 ret = stm32_mdma_disable_chan(chan);
1167 spin_unlock_irqrestore(&chan->vchan.lock, flags);
1168
1169 if (!ret)
1170 dev_dbg(chan2dev(chan), "vchan %pK: pause\n", &chan->vchan);
1171
1172 return ret;
1173 }
1174
1175 static int stm32_mdma_resume(struct dma_chan *c)
1176 {
1177 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1178 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1179 struct stm32_mdma_hwdesc *hwdesc;
1180 unsigned long flags;
1181 u32 status, reg;
1182
1183 hwdesc = chan->desc->node[chan->curr_hwdesc].hwdesc;
1184
1185 spin_lock_irqsave(&chan->vchan.lock, flags);
1186
1187
1188 stm32_mdma_write(dmadev, STM32_MDMA_CCR(chan->id), chan->desc->ccr);
1189
1190
1191 status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
1192 if (status)
1193 stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status);
1194
1195 stm32_mdma_dump_reg(chan);
1196
1197
1198 reg = STM32_MDMA_CCR(chan->id);
1199 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_EN);
1200
1201
1202 if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM)
1203 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ);
1204
1205 spin_unlock_irqrestore(&chan->vchan.lock, flags);
1206
1207 dev_dbg(chan2dev(chan), "vchan %pK: resume\n", &chan->vchan);
1208
1209 return 0;
1210 }
1211
1212 static int stm32_mdma_terminate_all(struct dma_chan *c)
1213 {
1214 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1215 unsigned long flags;
1216 LIST_HEAD(head);
1217
1218 spin_lock_irqsave(&chan->vchan.lock, flags);
1219 if (chan->desc) {
1220 vchan_terminate_vdesc(&chan->desc->vdesc);
1221 if (chan->busy)
1222 stm32_mdma_stop(chan);
1223 chan->desc = NULL;
1224 }
1225 vchan_get_all_descriptors(&chan->vchan, &head);
1226 spin_unlock_irqrestore(&chan->vchan.lock, flags);
1227
1228 vchan_dma_desc_free_list(&chan->vchan, &head);
1229
1230 return 0;
1231 }
1232
1233 static void stm32_mdma_synchronize(struct dma_chan *c)
1234 {
1235 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1236
1237 vchan_synchronize(&chan->vchan);
1238 }
1239
1240 static int stm32_mdma_slave_config(struct dma_chan *c,
1241 struct dma_slave_config *config)
1242 {
1243 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1244
1245 memcpy(&chan->dma_config, config, sizeof(*config));
1246
1247 return 0;
1248 }
1249
1250 static size_t stm32_mdma_desc_residue(struct stm32_mdma_chan *chan,
1251 struct stm32_mdma_desc *desc,
1252 u32 curr_hwdesc)
1253 {
1254 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1255 struct stm32_mdma_hwdesc *hwdesc;
1256 u32 cbndtr, residue, modulo, burst_size;
1257 int i;
1258
1259 residue = 0;
1260 for (i = curr_hwdesc + 1; i < desc->count; i++) {
1261 hwdesc = desc->node[i].hwdesc;
1262 residue += STM32_MDMA_CBNDTR_BNDT(hwdesc->cbndtr);
1263 }
1264 cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id));
1265 residue += cbndtr & STM32_MDMA_CBNDTR_BNDT_MASK;
1266
1267 if (!chan->mem_burst)
1268 return residue;
1269
1270 burst_size = chan->mem_burst * chan->mem_width;
1271 modulo = residue % burst_size;
1272 if (modulo)
1273 residue = residue - modulo + burst_size;
1274
1275 return residue;
1276 }
1277
1278 static enum dma_status stm32_mdma_tx_status(struct dma_chan *c,
1279 dma_cookie_t cookie,
1280 struct dma_tx_state *state)
1281 {
1282 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1283 struct virt_dma_desc *vdesc;
1284 enum dma_status status;
1285 unsigned long flags;
1286 u32 residue = 0;
1287
1288 status = dma_cookie_status(c, cookie, state);
1289 if ((status == DMA_COMPLETE) || (!state))
1290 return status;
1291
1292 spin_lock_irqsave(&chan->vchan.lock, flags);
1293
1294 vdesc = vchan_find_desc(&chan->vchan, cookie);
1295 if (chan->desc && cookie == chan->desc->vdesc.tx.cookie)
1296 residue = stm32_mdma_desc_residue(chan, chan->desc,
1297 chan->curr_hwdesc);
1298 else if (vdesc)
1299 residue = stm32_mdma_desc_residue(chan,
1300 to_stm32_mdma_desc(vdesc), 0);
1301 dma_set_residue(state, residue);
1302
1303 spin_unlock_irqrestore(&chan->vchan.lock, flags);
1304
1305 return status;
1306 }
1307
1308 static void stm32_mdma_xfer_end(struct stm32_mdma_chan *chan)
1309 {
1310 vchan_cookie_complete(&chan->desc->vdesc);
1311 chan->desc = NULL;
1312 chan->busy = false;
1313
1314
1315 stm32_mdma_start_transfer(chan);
1316 }
1317
1318 static irqreturn_t stm32_mdma_irq_handler(int irq, void *devid)
1319 {
1320 struct stm32_mdma_device *dmadev = devid;
1321 struct stm32_mdma_chan *chan;
1322 u32 reg, id, ccr, ien, status;
1323
1324
1325 status = readl_relaxed(dmadev->base + STM32_MDMA_GISR0);
1326 if (!status) {
1327 dev_dbg(mdma2dev(dmadev), "spurious it\n");
1328 return IRQ_NONE;
1329 }
1330 id = __ffs(status);
1331 chan = &dmadev->chan[id];
1332
1333
1334 spin_lock(&chan->vchan.lock);
1335 status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(id));
1336
1337 status &= ~STM32_MDMA_CISR_CRQA;
1338 ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(id));
1339 ien = (ccr & STM32_MDMA_CCR_IRQ_MASK) >> 1;
1340
1341 if (!(status & ien)) {
1342 spin_unlock(&chan->vchan.lock);
1343 if (chan->busy)
1344 dev_warn(chan2dev(chan),
1345 "spurious it (status=0x%04x, ien=0x%04x)\n", status, ien);
1346 else
1347 dev_dbg(chan2dev(chan),
1348 "spurious it (status=0x%04x, ien=0x%04x)\n", status, ien);
1349 return IRQ_NONE;
1350 }
1351
1352 reg = STM32_MDMA_CIFCR(id);
1353
1354 if (status & STM32_MDMA_CISR_TEIF) {
1355 dev_err(chan2dev(chan), "Transfer Err: stat=0x%08x\n",
1356 readl_relaxed(dmadev->base + STM32_MDMA_CESR(id)));
1357 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CTEIF);
1358 status &= ~STM32_MDMA_CISR_TEIF;
1359 }
1360
1361 if (status & STM32_MDMA_CISR_CTCIF) {
1362 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CCTCIF);
1363 status &= ~STM32_MDMA_CISR_CTCIF;
1364 stm32_mdma_xfer_end(chan);
1365 }
1366
1367 if (status & STM32_MDMA_CISR_BRTIF) {
1368 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBRTIF);
1369 status &= ~STM32_MDMA_CISR_BRTIF;
1370 }
1371
1372 if (status & STM32_MDMA_CISR_BTIF) {
1373 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBTIF);
1374 status &= ~STM32_MDMA_CISR_BTIF;
1375 chan->curr_hwdesc++;
1376 if (chan->desc && chan->desc->cyclic) {
1377 if (chan->curr_hwdesc == chan->desc->count)
1378 chan->curr_hwdesc = 0;
1379 vchan_cyclic_callback(&chan->desc->vdesc);
1380 }
1381 }
1382
1383 if (status & STM32_MDMA_CISR_TCIF) {
1384 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CLTCIF);
1385 status &= ~STM32_MDMA_CISR_TCIF;
1386 }
1387
1388 if (status) {
1389 stm32_mdma_set_bits(dmadev, reg, status);
1390 dev_err(chan2dev(chan), "DMA error: status=0x%08x\n", status);
1391 if (!(ccr & STM32_MDMA_CCR_EN))
1392 dev_err(chan2dev(chan), "chan disabled by HW\n");
1393 }
1394
1395 spin_unlock(&chan->vchan.lock);
1396
1397 return IRQ_HANDLED;
1398 }
1399
1400 static int stm32_mdma_alloc_chan_resources(struct dma_chan *c)
1401 {
1402 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1403 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1404 int ret;
1405
1406 chan->desc_pool = dmam_pool_create(dev_name(&c->dev->device),
1407 c->device->dev,
1408 sizeof(struct stm32_mdma_hwdesc),
1409 __alignof__(struct stm32_mdma_hwdesc),
1410 0);
1411 if (!chan->desc_pool) {
1412 dev_err(chan2dev(chan), "failed to allocate descriptor pool\n");
1413 return -ENOMEM;
1414 }
1415
1416 ret = pm_runtime_resume_and_get(dmadev->ddev.dev);
1417 if (ret < 0)
1418 return ret;
1419
1420 ret = stm32_mdma_disable_chan(chan);
1421 if (ret < 0)
1422 pm_runtime_put(dmadev->ddev.dev);
1423
1424 return ret;
1425 }
1426
1427 static void stm32_mdma_free_chan_resources(struct dma_chan *c)
1428 {
1429 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1430 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1431 unsigned long flags;
1432
1433 dev_dbg(chan2dev(chan), "Freeing channel %d\n", chan->id);
1434
1435 if (chan->busy) {
1436 spin_lock_irqsave(&chan->vchan.lock, flags);
1437 stm32_mdma_stop(chan);
1438 chan->desc = NULL;
1439 spin_unlock_irqrestore(&chan->vchan.lock, flags);
1440 }
1441
1442 pm_runtime_put(dmadev->ddev.dev);
1443 vchan_free_chan_resources(to_virt_chan(c));
1444 dmam_pool_destroy(chan->desc_pool);
1445 chan->desc_pool = NULL;
1446 }
1447
1448 static bool stm32_mdma_filter_fn(struct dma_chan *c, void *fn_param)
1449 {
1450 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1451 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1452
1453
1454 if (dmadev->chan_reserved & BIT(chan->id))
1455 return false;
1456
1457 return true;
1458 }
1459
1460 static struct dma_chan *stm32_mdma_of_xlate(struct of_phandle_args *dma_spec,
1461 struct of_dma *ofdma)
1462 {
1463 struct stm32_mdma_device *dmadev = ofdma->of_dma_data;
1464 dma_cap_mask_t mask = dmadev->ddev.cap_mask;
1465 struct stm32_mdma_chan *chan;
1466 struct dma_chan *c;
1467 struct stm32_mdma_chan_config config;
1468
1469 if (dma_spec->args_count < 5) {
1470 dev_err(mdma2dev(dmadev), "Bad number of args\n");
1471 return NULL;
1472 }
1473
1474 config.request = dma_spec->args[0];
1475 config.priority_level = dma_spec->args[1];
1476 config.transfer_config = dma_spec->args[2];
1477 config.mask_addr = dma_spec->args[3];
1478 config.mask_data = dma_spec->args[4];
1479
1480 if (config.request >= dmadev->nr_requests) {
1481 dev_err(mdma2dev(dmadev), "Bad request line\n");
1482 return NULL;
1483 }
1484
1485 if (config.priority_level > STM32_MDMA_VERY_HIGH_PRIORITY) {
1486 dev_err(mdma2dev(dmadev), "Priority level not supported\n");
1487 return NULL;
1488 }
1489
1490 c = __dma_request_channel(&mask, stm32_mdma_filter_fn, &config, ofdma->of_node);
1491 if (!c) {
1492 dev_err(mdma2dev(dmadev), "No more channels available\n");
1493 return NULL;
1494 }
1495
1496 chan = to_stm32_mdma_chan(c);
1497 chan->chan_config = config;
1498
1499 return c;
1500 }
1501
1502 static const struct of_device_id stm32_mdma_of_match[] = {
1503 { .compatible = "st,stm32h7-mdma", },
1504 { },
1505 };
1506 MODULE_DEVICE_TABLE(of, stm32_mdma_of_match);
1507
1508 static int stm32_mdma_probe(struct platform_device *pdev)
1509 {
1510 struct stm32_mdma_chan *chan;
1511 struct stm32_mdma_device *dmadev;
1512 struct dma_device *dd;
1513 struct device_node *of_node;
1514 struct resource *res;
1515 struct reset_control *rst;
1516 u32 nr_channels, nr_requests;
1517 int i, count, ret;
1518
1519 of_node = pdev->dev.of_node;
1520 if (!of_node)
1521 return -ENODEV;
1522
1523 ret = device_property_read_u32(&pdev->dev, "dma-channels",
1524 &nr_channels);
1525 if (ret) {
1526 nr_channels = STM32_MDMA_MAX_CHANNELS;
1527 dev_warn(&pdev->dev, "MDMA defaulting on %i channels\n",
1528 nr_channels);
1529 }
1530
1531 ret = device_property_read_u32(&pdev->dev, "dma-requests",
1532 &nr_requests);
1533 if (ret) {
1534 nr_requests = STM32_MDMA_MAX_REQUESTS;
1535 dev_warn(&pdev->dev, "MDMA defaulting on %i request lines\n",
1536 nr_requests);
1537 }
1538
1539 count = device_property_count_u32(&pdev->dev, "st,ahb-addr-masks");
1540 if (count < 0)
1541 count = 0;
1542
1543 dmadev = devm_kzalloc(&pdev->dev,
1544 struct_size(dmadev, ahb_addr_masks, count),
1545 GFP_KERNEL);
1546 if (!dmadev)
1547 return -ENOMEM;
1548
1549 dmadev->nr_channels = nr_channels;
1550 dmadev->nr_requests = nr_requests;
1551 device_property_read_u32_array(&pdev->dev, "st,ahb-addr-masks",
1552 dmadev->ahb_addr_masks,
1553 count);
1554 dmadev->nr_ahb_addr_masks = count;
1555
1556 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1557 dmadev->base = devm_ioremap_resource(&pdev->dev, res);
1558 if (IS_ERR(dmadev->base))
1559 return PTR_ERR(dmadev->base);
1560
1561 dmadev->clk = devm_clk_get(&pdev->dev, NULL);
1562 if (IS_ERR(dmadev->clk))
1563 return dev_err_probe(&pdev->dev, PTR_ERR(dmadev->clk),
1564 "Missing clock controller\n");
1565
1566 ret = clk_prepare_enable(dmadev->clk);
1567 if (ret < 0) {
1568 dev_err(&pdev->dev, "clk_prep_enable error: %d\n", ret);
1569 return ret;
1570 }
1571
1572 rst = devm_reset_control_get(&pdev->dev, NULL);
1573 if (IS_ERR(rst)) {
1574 ret = PTR_ERR(rst);
1575 if (ret == -EPROBE_DEFER)
1576 goto err_clk;
1577 } else {
1578 reset_control_assert(rst);
1579 udelay(2);
1580 reset_control_deassert(rst);
1581 }
1582
1583 dd = &dmadev->ddev;
1584 dma_cap_set(DMA_SLAVE, dd->cap_mask);
1585 dma_cap_set(DMA_PRIVATE, dd->cap_mask);
1586 dma_cap_set(DMA_CYCLIC, dd->cap_mask);
1587 dma_cap_set(DMA_MEMCPY, dd->cap_mask);
1588 dd->device_alloc_chan_resources = stm32_mdma_alloc_chan_resources;
1589 dd->device_free_chan_resources = stm32_mdma_free_chan_resources;
1590 dd->device_tx_status = stm32_mdma_tx_status;
1591 dd->device_issue_pending = stm32_mdma_issue_pending;
1592 dd->device_prep_slave_sg = stm32_mdma_prep_slave_sg;
1593 dd->device_prep_dma_cyclic = stm32_mdma_prep_dma_cyclic;
1594 dd->device_prep_dma_memcpy = stm32_mdma_prep_dma_memcpy;
1595 dd->device_config = stm32_mdma_slave_config;
1596 dd->device_pause = stm32_mdma_pause;
1597 dd->device_resume = stm32_mdma_resume;
1598 dd->device_terminate_all = stm32_mdma_terminate_all;
1599 dd->device_synchronize = stm32_mdma_synchronize;
1600 dd->descriptor_reuse = true;
1601
1602 dd->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
1603 BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
1604 BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) |
1605 BIT(DMA_SLAVE_BUSWIDTH_8_BYTES);
1606 dd->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
1607 BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
1608 BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) |
1609 BIT(DMA_SLAVE_BUSWIDTH_8_BYTES);
1610 dd->directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV) |
1611 BIT(DMA_MEM_TO_MEM);
1612 dd->residue_granularity = DMA_RESIDUE_GRANULARITY_BURST;
1613 dd->max_burst = STM32_MDMA_MAX_BURST;
1614 dd->dev = &pdev->dev;
1615 INIT_LIST_HEAD(&dd->channels);
1616
1617 for (i = 0; i < dmadev->nr_channels; i++) {
1618 chan = &dmadev->chan[i];
1619 chan->id = i;
1620
1621 if (stm32_mdma_read(dmadev, STM32_MDMA_CCR(i)) & STM32_MDMA_CCR_SM)
1622 dmadev->chan_reserved |= BIT(i);
1623
1624 chan->vchan.desc_free = stm32_mdma_desc_free;
1625 vchan_init(&chan->vchan, dd);
1626 }
1627
1628 dmadev->irq = platform_get_irq(pdev, 0);
1629 if (dmadev->irq < 0) {
1630 ret = dmadev->irq;
1631 goto err_clk;
1632 }
1633
1634 ret = devm_request_irq(&pdev->dev, dmadev->irq, stm32_mdma_irq_handler,
1635 0, dev_name(&pdev->dev), dmadev);
1636 if (ret) {
1637 dev_err(&pdev->dev, "failed to request IRQ\n");
1638 goto err_clk;
1639 }
1640
1641 ret = dmaenginem_async_device_register(dd);
1642 if (ret)
1643 goto err_clk;
1644
1645 ret = of_dma_controller_register(of_node, stm32_mdma_of_xlate, dmadev);
1646 if (ret < 0) {
1647 dev_err(&pdev->dev,
1648 "STM32 MDMA DMA OF registration failed %d\n", ret);
1649 goto err_clk;
1650 }
1651
1652 platform_set_drvdata(pdev, dmadev);
1653 pm_runtime_set_active(&pdev->dev);
1654 pm_runtime_enable(&pdev->dev);
1655 pm_runtime_get_noresume(&pdev->dev);
1656 pm_runtime_put(&pdev->dev);
1657
1658 dev_info(&pdev->dev, "STM32 MDMA driver registered\n");
1659
1660 return 0;
1661
1662 err_clk:
1663 clk_disable_unprepare(dmadev->clk);
1664
1665 return ret;
1666 }
1667
1668 #ifdef CONFIG_PM
1669 static int stm32_mdma_runtime_suspend(struct device *dev)
1670 {
1671 struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
1672
1673 clk_disable_unprepare(dmadev->clk);
1674
1675 return 0;
1676 }
1677
1678 static int stm32_mdma_runtime_resume(struct device *dev)
1679 {
1680 struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
1681 int ret;
1682
1683 ret = clk_prepare_enable(dmadev->clk);
1684 if (ret) {
1685 dev_err(dev, "failed to prepare_enable clock\n");
1686 return ret;
1687 }
1688
1689 return 0;
1690 }
1691 #endif
1692
1693 #ifdef CONFIG_PM_SLEEP
1694 static int stm32_mdma_pm_suspend(struct device *dev)
1695 {
1696 struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
1697 u32 ccr, id;
1698 int ret;
1699
1700 ret = pm_runtime_resume_and_get(dev);
1701 if (ret < 0)
1702 return ret;
1703
1704 for (id = 0; id < dmadev->nr_channels; id++) {
1705 ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(id));
1706 if (ccr & STM32_MDMA_CCR_EN) {
1707 dev_warn(dev, "Suspend is prevented by Chan %i\n", id);
1708 return -EBUSY;
1709 }
1710 }
1711
1712 pm_runtime_put_sync(dev);
1713
1714 pm_runtime_force_suspend(dev);
1715
1716 return 0;
1717 }
1718
1719 static int stm32_mdma_pm_resume(struct device *dev)
1720 {
1721 return pm_runtime_force_resume(dev);
1722 }
1723 #endif
1724
1725 static const struct dev_pm_ops stm32_mdma_pm_ops = {
1726 SET_SYSTEM_SLEEP_PM_OPS(stm32_mdma_pm_suspend, stm32_mdma_pm_resume)
1727 SET_RUNTIME_PM_OPS(stm32_mdma_runtime_suspend,
1728 stm32_mdma_runtime_resume, NULL)
1729 };
1730
1731 static struct platform_driver stm32_mdma_driver = {
1732 .probe = stm32_mdma_probe,
1733 .driver = {
1734 .name = "stm32-mdma",
1735 .of_match_table = stm32_mdma_of_match,
1736 .pm = &stm32_mdma_pm_ops,
1737 },
1738 };
1739
1740 static int __init stm32_mdma_init(void)
1741 {
1742 return platform_driver_register(&stm32_mdma_driver);
1743 }
1744
1745 subsys_initcall(stm32_mdma_init);
1746
1747 MODULE_DESCRIPTION("Driver for STM32 MDMA controller");
1748 MODULE_AUTHOR("M'boumba Cedric Madianga <cedric.madianga@gmail.com>");
1749 MODULE_AUTHOR("Pierre-Yves Mordret <pierre-yves.mordret@st.com>");
1750 MODULE_LICENSE("GPL v2");