0001
0002
0003
0004
0005
0006
0007 #include <linux/init.h>
0008 #include <linux/platform_device.h>
0009 #include <linux/io.h>
0010 #include <linux/dma-mapping.h>
0011 #include <linux/export.h>
0012 #include <linux/spinlock.h>
0013 #include <linux/clk.h>
0014 #include <linux/delay.h>
0015 #include <linux/err.h>
0016 #include <linux/of.h>
0017
0018 #include <lantiq_soc.h>
0019 #include <xway_dma.h>
0020
0021 #define LTQ_DMA_ID 0x08
0022 #define LTQ_DMA_CTRL 0x10
0023 #define LTQ_DMA_CPOLL 0x14
0024 #define LTQ_DMA_CS 0x18
0025 #define LTQ_DMA_CCTRL 0x1C
0026 #define LTQ_DMA_CDBA 0x20
0027 #define LTQ_DMA_CDLEN 0x24
0028 #define LTQ_DMA_CIS 0x28
0029 #define LTQ_DMA_CIE 0x2C
0030 #define LTQ_DMA_PS 0x40
0031 #define LTQ_DMA_PCTRL 0x44
0032 #define LTQ_DMA_IRNEN 0xf4
0033
0034 #define DMA_ID_CHNR GENMASK(26, 20)
0035 #define DMA_DESCPT BIT(3)
0036 #define DMA_TX BIT(8)
0037 #define DMA_CHAN_ON BIT(0)
0038 #define DMA_PDEN BIT(6)
0039 #define DMA_CHAN_RST BIT(1)
0040 #define DMA_RESET BIT(0)
0041 #define DMA_IRQ_ACK 0x7e
0042 #define DMA_POLL BIT(31)
0043 #define DMA_CLK_DIV4 BIT(6)
0044 #define DMA_PCTRL_2W_BURST 0x1
0045 #define DMA_PCTRL_4W_BURST 0x2
0046 #define DMA_PCTRL_8W_BURST 0x3
0047 #define DMA_TX_BURST_SHIFT 4
0048 #define DMA_RX_BURST_SHIFT 2
0049 #define DMA_ETOP_ENDIANNESS (0xf << 8)
0050 #define DMA_WEIGHT (BIT(17) | BIT(16))
0051
0052 #define ltq_dma_r32(x) ltq_r32(ltq_dma_membase + (x))
0053 #define ltq_dma_w32(x, y) ltq_w32(x, ltq_dma_membase + (y))
0054 #define ltq_dma_w32_mask(x, y, z) ltq_w32_mask(x, y, \
0055 ltq_dma_membase + (z))
0056
0057 static void __iomem *ltq_dma_membase;
0058 static DEFINE_SPINLOCK(ltq_dma_lock);
0059
0060 void
0061 ltq_dma_enable_irq(struct ltq_dma_channel *ch)
0062 {
0063 unsigned long flags;
0064
0065 spin_lock_irqsave(<q_dma_lock, flags);
0066 ltq_dma_w32(ch->nr, LTQ_DMA_CS);
0067 ltq_dma_w32_mask(0, 1 << ch->nr, LTQ_DMA_IRNEN);
0068 spin_unlock_irqrestore(<q_dma_lock, flags);
0069 }
0070 EXPORT_SYMBOL_GPL(ltq_dma_enable_irq);
0071
0072 void
0073 ltq_dma_disable_irq(struct ltq_dma_channel *ch)
0074 {
0075 unsigned long flags;
0076
0077 spin_lock_irqsave(<q_dma_lock, flags);
0078 ltq_dma_w32(ch->nr, LTQ_DMA_CS);
0079 ltq_dma_w32_mask(1 << ch->nr, 0, LTQ_DMA_IRNEN);
0080 spin_unlock_irqrestore(<q_dma_lock, flags);
0081 }
0082 EXPORT_SYMBOL_GPL(ltq_dma_disable_irq);
0083
0084 void
0085 ltq_dma_ack_irq(struct ltq_dma_channel *ch)
0086 {
0087 unsigned long flags;
0088
0089 spin_lock_irqsave(<q_dma_lock, flags);
0090 ltq_dma_w32(ch->nr, LTQ_DMA_CS);
0091 ltq_dma_w32(DMA_IRQ_ACK, LTQ_DMA_CIS);
0092 spin_unlock_irqrestore(<q_dma_lock, flags);
0093 }
0094 EXPORT_SYMBOL_GPL(ltq_dma_ack_irq);
0095
0096 void
0097 ltq_dma_open(struct ltq_dma_channel *ch)
0098 {
0099 unsigned long flag;
0100
0101 spin_lock_irqsave(<q_dma_lock, flag);
0102 ltq_dma_w32(ch->nr, LTQ_DMA_CS);
0103 ltq_dma_w32_mask(0, DMA_CHAN_ON, LTQ_DMA_CCTRL);
0104 spin_unlock_irqrestore(<q_dma_lock, flag);
0105 }
0106 EXPORT_SYMBOL_GPL(ltq_dma_open);
0107
0108 void
0109 ltq_dma_close(struct ltq_dma_channel *ch)
0110 {
0111 unsigned long flag;
0112
0113 spin_lock_irqsave(<q_dma_lock, flag);
0114 ltq_dma_w32(ch->nr, LTQ_DMA_CS);
0115 ltq_dma_w32_mask(DMA_CHAN_ON, 0, LTQ_DMA_CCTRL);
0116 ltq_dma_w32_mask(1 << ch->nr, 0, LTQ_DMA_IRNEN);
0117 spin_unlock_irqrestore(<q_dma_lock, flag);
0118 }
0119 EXPORT_SYMBOL_GPL(ltq_dma_close);
0120
0121 static void
0122 ltq_dma_alloc(struct ltq_dma_channel *ch)
0123 {
0124 unsigned long flags;
0125
0126 ch->desc = 0;
0127 ch->desc_base = dma_alloc_coherent(ch->dev,
0128 LTQ_DESC_NUM * LTQ_DESC_SIZE,
0129 &ch->phys, GFP_ATOMIC);
0130
0131 spin_lock_irqsave(<q_dma_lock, flags);
0132 ltq_dma_w32(ch->nr, LTQ_DMA_CS);
0133 ltq_dma_w32(ch->phys, LTQ_DMA_CDBA);
0134 ltq_dma_w32(LTQ_DESC_NUM, LTQ_DMA_CDLEN);
0135 ltq_dma_w32_mask(DMA_CHAN_ON, 0, LTQ_DMA_CCTRL);
0136 wmb();
0137 ltq_dma_w32_mask(0, DMA_CHAN_RST, LTQ_DMA_CCTRL);
0138 while (ltq_dma_r32(LTQ_DMA_CCTRL) & DMA_CHAN_RST)
0139 ;
0140 spin_unlock_irqrestore(<q_dma_lock, flags);
0141 }
0142
0143 void
0144 ltq_dma_alloc_tx(struct ltq_dma_channel *ch)
0145 {
0146 unsigned long flags;
0147
0148 ltq_dma_alloc(ch);
0149
0150 spin_lock_irqsave(<q_dma_lock, flags);
0151 ltq_dma_w32(DMA_DESCPT, LTQ_DMA_CIE);
0152 ltq_dma_w32_mask(0, 1 << ch->nr, LTQ_DMA_IRNEN);
0153 ltq_dma_w32(DMA_WEIGHT | DMA_TX, LTQ_DMA_CCTRL);
0154 spin_unlock_irqrestore(<q_dma_lock, flags);
0155 }
0156 EXPORT_SYMBOL_GPL(ltq_dma_alloc_tx);
0157
0158 void
0159 ltq_dma_alloc_rx(struct ltq_dma_channel *ch)
0160 {
0161 unsigned long flags;
0162
0163 ltq_dma_alloc(ch);
0164
0165 spin_lock_irqsave(<q_dma_lock, flags);
0166 ltq_dma_w32(DMA_DESCPT, LTQ_DMA_CIE);
0167 ltq_dma_w32_mask(0, 1 << ch->nr, LTQ_DMA_IRNEN);
0168 ltq_dma_w32(DMA_WEIGHT, LTQ_DMA_CCTRL);
0169 spin_unlock_irqrestore(<q_dma_lock, flags);
0170 }
0171 EXPORT_SYMBOL_GPL(ltq_dma_alloc_rx);
0172
0173 void
0174 ltq_dma_free(struct ltq_dma_channel *ch)
0175 {
0176 if (!ch->desc_base)
0177 return;
0178 ltq_dma_close(ch);
0179 dma_free_coherent(ch->dev, LTQ_DESC_NUM * LTQ_DESC_SIZE,
0180 ch->desc_base, ch->phys);
0181 }
0182 EXPORT_SYMBOL_GPL(ltq_dma_free);
0183
0184 void
0185 ltq_dma_init_port(int p, int tx_burst, int rx_burst)
0186 {
0187 ltq_dma_w32(p, LTQ_DMA_PS);
0188 switch (p) {
0189 case DMA_PORT_ETOP:
0190
0191
0192
0193
0194 ltq_dma_w32_mask(0, (DMA_ETOP_ENDIANNESS | DMA_PDEN),
0195 LTQ_DMA_PCTRL);
0196 break;
0197
0198 default:
0199 break;
0200 }
0201
0202 switch (rx_burst) {
0203 case 8:
0204 ltq_dma_w32_mask(0x0c, (DMA_PCTRL_8W_BURST << DMA_RX_BURST_SHIFT),
0205 LTQ_DMA_PCTRL);
0206 break;
0207 case 4:
0208 ltq_dma_w32_mask(0x0c, (DMA_PCTRL_4W_BURST << DMA_RX_BURST_SHIFT),
0209 LTQ_DMA_PCTRL);
0210 break;
0211 case 2:
0212 ltq_dma_w32_mask(0x0c, (DMA_PCTRL_2W_BURST << DMA_RX_BURST_SHIFT),
0213 LTQ_DMA_PCTRL);
0214 break;
0215 default:
0216 break;
0217 }
0218
0219 switch (tx_burst) {
0220 case 8:
0221 ltq_dma_w32_mask(0x30, (DMA_PCTRL_8W_BURST << DMA_TX_BURST_SHIFT),
0222 LTQ_DMA_PCTRL);
0223 break;
0224 case 4:
0225 ltq_dma_w32_mask(0x30, (DMA_PCTRL_4W_BURST << DMA_TX_BURST_SHIFT),
0226 LTQ_DMA_PCTRL);
0227 break;
0228 case 2:
0229 ltq_dma_w32_mask(0x30, (DMA_PCTRL_2W_BURST << DMA_TX_BURST_SHIFT),
0230 LTQ_DMA_PCTRL);
0231 break;
0232 default:
0233 break;
0234 }
0235 }
0236 EXPORT_SYMBOL_GPL(ltq_dma_init_port);
0237
0238 static int
0239 ltq_dma_init(struct platform_device *pdev)
0240 {
0241 struct clk *clk;
0242 struct resource *res;
0243 unsigned int id, nchannels;
0244 int i;
0245
0246 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
0247 ltq_dma_membase = devm_ioremap_resource(&pdev->dev, res);
0248 if (IS_ERR(ltq_dma_membase))
0249 panic("Failed to remap dma resource");
0250
0251
0252 clk = clk_get(&pdev->dev, NULL);
0253 if (IS_ERR(clk))
0254 panic("Failed to get dma clock");
0255
0256 clk_enable(clk);
0257 ltq_dma_w32_mask(0, DMA_RESET, LTQ_DMA_CTRL);
0258
0259 usleep_range(1, 10);
0260
0261
0262 ltq_dma_w32(0, LTQ_DMA_IRNEN);
0263
0264
0265 id = ltq_dma_r32(LTQ_DMA_ID);
0266 nchannels = ((id & DMA_ID_CHNR) >> 20);
0267 for (i = 0; i < nchannels; i++) {
0268 ltq_dma_w32(i, LTQ_DMA_CS);
0269 ltq_dma_w32(DMA_CHAN_RST, LTQ_DMA_CCTRL);
0270 ltq_dma_w32(DMA_POLL | DMA_CLK_DIV4, LTQ_DMA_CPOLL);
0271 ltq_dma_w32_mask(DMA_CHAN_ON, 0, LTQ_DMA_CCTRL);
0272 }
0273
0274 dev_info(&pdev->dev,
0275 "Init done - hw rev: %X, ports: %d, channels: %d\n",
0276 id & 0x1f, (id >> 16) & 0xf, nchannels);
0277
0278 return 0;
0279 }
0280
0281 static const struct of_device_id dma_match[] = {
0282 { .compatible = "lantiq,dma-xway" },
0283 {},
0284 };
0285
0286 static struct platform_driver dma_driver = {
0287 .probe = ltq_dma_init,
0288 .driver = {
0289 .name = "dma-xway",
0290 .of_match_table = dma_match,
0291 },
0292 };
0293
0294 int __init
0295 dma_init(void)
0296 {
0297 return platform_driver_register(&dma_driver);
0298 }
0299
0300 postcore_initcall(dma_init);