0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030 #ifndef __ASM_AU1000_DMA_H
0031 #define __ASM_AU1000_DMA_H
0032
0033 #include <linux/io.h> /* need byte IO */
0034 #include <linux/spinlock.h> /* And spinlocks */
0035 #include <linux/delay.h>
0036
0037 #define NUM_AU1000_DMA_CHANNELS 8
0038
0039
0040 #define DMA_MODE_SET 0x00000000
0041 #define DMA_MODE_READ DMA_MODE_SET
0042 #define DMA_MODE_CLEAR 0x00000004
0043
0044 #define DMA_DAH_MASK (0x0f << 20)
0045 #define DMA_DID_BIT 16
0046 #define DMA_DID_MASK (0x0f << DMA_DID_BIT)
0047 #define DMA_DS (1 << 15)
0048 #define DMA_BE (1 << 13)
0049 #define DMA_DR (1 << 12)
0050 #define DMA_TS8 (1 << 11)
0051 #define DMA_DW_BIT 9
0052 #define DMA_DW_MASK (0x03 << DMA_DW_BIT)
0053 #define DMA_DW8 (0 << DMA_DW_BIT)
0054 #define DMA_DW16 (1 << DMA_DW_BIT)
0055 #define DMA_DW32 (2 << DMA_DW_BIT)
0056 #define DMA_NC (1 << 8)
0057 #define DMA_IE (1 << 7)
0058 #define DMA_HALT (1 << 6)
0059 #define DMA_GO (1 << 5)
0060 #define DMA_AB (1 << 4)
0061 #define DMA_D1 (1 << 3)
0062 #define DMA_BE1 (1 << 2)
0063 #define DMA_D0 (1 << 1)
0064 #define DMA_BE0 (1 << 0)
0065
0066 #define DMA_PERIPHERAL_ADDR 0x00000008
0067 #define DMA_BUFFER0_START 0x0000000C
0068 #define DMA_BUFFER1_START 0x00000014
0069 #define DMA_BUFFER0_COUNT 0x00000010
0070 #define DMA_BUFFER1_COUNT 0x00000018
0071 #define DMA_BAH_BIT 16
0072 #define DMA_BAH_MASK (0x0f << DMA_BAH_BIT)
0073 #define DMA_COUNT_BIT 0
0074 #define DMA_COUNT_MASK (0xffff << DMA_COUNT_BIT)
0075
0076
0077 enum {
0078 DMA_ID_UART0_TX = 0,
0079 DMA_ID_UART0_RX,
0080 DMA_ID_GP04,
0081 DMA_ID_GP05,
0082 DMA_ID_AC97C_TX,
0083 DMA_ID_AC97C_RX,
0084 DMA_ID_UART3_TX,
0085 DMA_ID_UART3_RX,
0086 DMA_ID_USBDEV_EP0_RX,
0087 DMA_ID_USBDEV_EP0_TX,
0088 DMA_ID_USBDEV_EP2_TX,
0089 DMA_ID_USBDEV_EP3_TX,
0090 DMA_ID_USBDEV_EP4_RX,
0091 DMA_ID_USBDEV_EP5_RX,
0092 DMA_ID_I2S_TX,
0093 DMA_ID_I2S_RX,
0094 DMA_NUM_DEV
0095 };
0096
0097
0098 enum {
0099 DMA_ID_SD0_TX = 0,
0100 DMA_ID_SD0_RX,
0101 DMA_ID_SD1_TX,
0102 DMA_ID_SD1_RX,
0103 DMA_NUM_DEV_BANK2
0104 };
0105
0106 struct dma_chan {
0107 int dev_id;
0108
0109 void __iomem *io;
0110 const char *dev_str;
0111 int irq;
0112 void *irq_dev;
0113 unsigned int fifo_addr;
0114 unsigned int mode;
0115 };
0116
0117
0118 extern struct dma_chan au1000_dma_table[];
0119 extern int request_au1000_dma(int dev_id,
0120 const char *dev_str,
0121 irq_handler_t irqhandler,
0122 unsigned long irqflags,
0123 void *irq_dev_id);
0124 extern void free_au1000_dma(unsigned int dmanr);
0125 extern int au1000_dma_read_proc(char *buf, char **start, off_t fpos,
0126 int length, int *eof, void *data);
0127 extern void dump_au1000_dma_channel(unsigned int dmanr);
0128 extern spinlock_t au1000_dma_spin_lock;
0129
0130 static inline struct dma_chan *get_dma_chan(unsigned int dmanr)
0131 {
0132 if (dmanr >= NUM_AU1000_DMA_CHANNELS ||
0133 au1000_dma_table[dmanr].dev_id < 0)
0134 return NULL;
0135 return &au1000_dma_table[dmanr];
0136 }
0137
0138 static inline unsigned long claim_dma_lock(void)
0139 {
0140 unsigned long flags;
0141
0142 spin_lock_irqsave(&au1000_dma_spin_lock, flags);
0143 return flags;
0144 }
0145
0146 static inline void release_dma_lock(unsigned long flags)
0147 {
0148 spin_unlock_irqrestore(&au1000_dma_spin_lock, flags);
0149 }
0150
0151
0152
0153
0154 static inline void enable_dma_buffer0(unsigned int dmanr)
0155 {
0156 struct dma_chan *chan = get_dma_chan(dmanr);
0157
0158 if (!chan)
0159 return;
0160 __raw_writel(DMA_BE0, chan->io + DMA_MODE_SET);
0161 }
0162
0163 static inline void enable_dma_buffer1(unsigned int dmanr)
0164 {
0165 struct dma_chan *chan = get_dma_chan(dmanr);
0166
0167 if (!chan)
0168 return;
0169 __raw_writel(DMA_BE1, chan->io + DMA_MODE_SET);
0170 }
0171 static inline void enable_dma_buffers(unsigned int dmanr)
0172 {
0173 struct dma_chan *chan = get_dma_chan(dmanr);
0174
0175 if (!chan)
0176 return;
0177 __raw_writel(DMA_BE0 | DMA_BE1, chan->io + DMA_MODE_SET);
0178 }
0179
0180 static inline void start_dma(unsigned int dmanr)
0181 {
0182 struct dma_chan *chan = get_dma_chan(dmanr);
0183
0184 if (!chan)
0185 return;
0186 __raw_writel(DMA_GO, chan->io + DMA_MODE_SET);
0187 }
0188
0189 #define DMA_HALT_POLL 0x5000
0190
0191 static inline void halt_dma(unsigned int dmanr)
0192 {
0193 struct dma_chan *chan = get_dma_chan(dmanr);
0194 int i;
0195
0196 if (!chan)
0197 return;
0198 __raw_writel(DMA_GO, chan->io + DMA_MODE_CLEAR);
0199
0200
0201 for (i = 0; i < DMA_HALT_POLL; i++)
0202 if (__raw_readl(chan->io + DMA_MODE_READ) & DMA_HALT)
0203 break;
0204 if (i == DMA_HALT_POLL)
0205 printk(KERN_INFO "halt_dma: HALT poll expired!\n");
0206 }
0207
0208 static inline void disable_dma(unsigned int dmanr)
0209 {
0210 struct dma_chan *chan = get_dma_chan(dmanr);
0211
0212 if (!chan)
0213 return;
0214
0215 halt_dma(dmanr);
0216
0217
0218 __raw_writel(~DMA_GO, chan->io + DMA_MODE_CLEAR);
0219 }
0220
0221 static inline int dma_halted(unsigned int dmanr)
0222 {
0223 struct dma_chan *chan = get_dma_chan(dmanr);
0224
0225 if (!chan)
0226 return 1;
0227 return (__raw_readl(chan->io + DMA_MODE_READ) & DMA_HALT) ? 1 : 0;
0228 }
0229
0230
0231 static inline void init_dma(unsigned int dmanr)
0232 {
0233 struct dma_chan *chan = get_dma_chan(dmanr);
0234 u32 mode;
0235
0236 if (!chan)
0237 return;
0238
0239 disable_dma(dmanr);
0240
0241
0242 __raw_writel(CPHYSADDR(chan->fifo_addr), chan->io + DMA_PERIPHERAL_ADDR);
0243
0244 mode = chan->mode | (chan->dev_id << DMA_DID_BIT);
0245 if (chan->irq)
0246 mode |= DMA_IE;
0247
0248 __raw_writel(~mode, chan->io + DMA_MODE_CLEAR);
0249 __raw_writel(mode, chan->io + DMA_MODE_SET);
0250 }
0251
0252
0253
0254
0255 static inline void set_dma_mode(unsigned int dmanr, unsigned int mode)
0256 {
0257 struct dma_chan *chan = get_dma_chan(dmanr);
0258
0259 if (!chan)
0260 return;
0261
0262
0263
0264
0265
0266 mode &= (DMA_BE | DMA_DR | DMA_TS8 | DMA_DW_MASK | DMA_NC);
0267 chan->mode &= ~(DMA_BE | DMA_DR | DMA_TS8 | DMA_DW_MASK | DMA_NC);
0268 chan->mode |= mode;
0269 }
0270
0271 static inline unsigned int get_dma_mode(unsigned int dmanr)
0272 {
0273 struct dma_chan *chan = get_dma_chan(dmanr);
0274
0275 if (!chan)
0276 return 0;
0277 return chan->mode;
0278 }
0279
0280 static inline int get_dma_active_buffer(unsigned int dmanr)
0281 {
0282 struct dma_chan *chan = get_dma_chan(dmanr);
0283
0284 if (!chan)
0285 return -1;
0286 return (__raw_readl(chan->io + DMA_MODE_READ) & DMA_AB) ? 1 : 0;
0287 }
0288
0289
0290
0291
0292
0293
0294 static inline void set_dma_fifo_addr(unsigned int dmanr, unsigned int a)
0295 {
0296 struct dma_chan *chan = get_dma_chan(dmanr);
0297
0298 if (!chan)
0299 return;
0300
0301 if (chan->mode & DMA_DS)
0302 return;
0303
0304 if (chan->dev_id != DMA_ID_GP04 && chan->dev_id != DMA_ID_GP05)
0305 return;
0306
0307 __raw_writel(CPHYSADDR(a), chan->io + DMA_PERIPHERAL_ADDR);
0308 }
0309
0310
0311
0312
0313 static inline void clear_dma_done0(unsigned int dmanr)
0314 {
0315 struct dma_chan *chan = get_dma_chan(dmanr);
0316
0317 if (!chan)
0318 return;
0319 __raw_writel(DMA_D0, chan->io + DMA_MODE_CLEAR);
0320 }
0321
0322 static inline void clear_dma_done1(unsigned int dmanr)
0323 {
0324 struct dma_chan *chan = get_dma_chan(dmanr);
0325
0326 if (!chan)
0327 return;
0328 __raw_writel(DMA_D1, chan->io + DMA_MODE_CLEAR);
0329 }
0330
0331
0332
0333
0334 static inline void set_dma_page(unsigned int dmanr, char pagenr)
0335 {
0336 }
0337
0338
0339
0340
0341 static inline void set_dma_addr0(unsigned int dmanr, unsigned int a)
0342 {
0343 struct dma_chan *chan = get_dma_chan(dmanr);
0344
0345 if (!chan)
0346 return;
0347 __raw_writel(a, chan->io + DMA_BUFFER0_START);
0348 }
0349
0350
0351
0352
0353 static inline void set_dma_addr1(unsigned int dmanr, unsigned int a)
0354 {
0355 struct dma_chan *chan = get_dma_chan(dmanr);
0356
0357 if (!chan)
0358 return;
0359 __raw_writel(a, chan->io + DMA_BUFFER1_START);
0360 }
0361
0362
0363
0364
0365
0366 static inline void set_dma_count0(unsigned int dmanr, unsigned int count)
0367 {
0368 struct dma_chan *chan = get_dma_chan(dmanr);
0369
0370 if (!chan)
0371 return;
0372 count &= DMA_COUNT_MASK;
0373 __raw_writel(count, chan->io + DMA_BUFFER0_COUNT);
0374 }
0375
0376
0377
0378
0379 static inline void set_dma_count1(unsigned int dmanr, unsigned int count)
0380 {
0381 struct dma_chan *chan = get_dma_chan(dmanr);
0382
0383 if (!chan)
0384 return;
0385 count &= DMA_COUNT_MASK;
0386 __raw_writel(count, chan->io + DMA_BUFFER1_COUNT);
0387 }
0388
0389
0390
0391
0392 static inline void set_dma_count(unsigned int dmanr, unsigned int count)
0393 {
0394 struct dma_chan *chan = get_dma_chan(dmanr);
0395
0396 if (!chan)
0397 return;
0398 count &= DMA_COUNT_MASK;
0399 __raw_writel(count, chan->io + DMA_BUFFER0_COUNT);
0400 __raw_writel(count, chan->io + DMA_BUFFER1_COUNT);
0401 }
0402
0403
0404
0405
0406
0407 static inline unsigned int get_dma_buffer_done(unsigned int dmanr)
0408 {
0409 struct dma_chan *chan = get_dma_chan(dmanr);
0410
0411 if (!chan)
0412 return 0;
0413 return __raw_readl(chan->io + DMA_MODE_READ) & (DMA_D0 | DMA_D1);
0414 }
0415
0416
0417
0418
0419
0420 static inline int get_dma_done_irq(unsigned int dmanr)
0421 {
0422 struct dma_chan *chan = get_dma_chan(dmanr);
0423
0424 if (!chan)
0425 return -1;
0426 return chan->irq;
0427 }
0428
0429
0430
0431
0432 static inline int get_dma_residue(unsigned int dmanr)
0433 {
0434 int curBufCntReg, count;
0435 struct dma_chan *chan = get_dma_chan(dmanr);
0436
0437 if (!chan)
0438 return 0;
0439
0440 curBufCntReg = (__raw_readl(chan->io + DMA_MODE_READ) & DMA_AB) ?
0441 DMA_BUFFER1_COUNT : DMA_BUFFER0_COUNT;
0442
0443 count = __raw_readl(chan->io + curBufCntReg) & DMA_COUNT_MASK;
0444
0445 if ((chan->mode & DMA_DW_MASK) == DMA_DW16)
0446 count <<= 1;
0447 else if ((chan->mode & DMA_DW_MASK) == DMA_DW32)
0448 count <<= 2;
0449
0450 return count;
0451 }
0452
0453 #endif