0001
0002
0003
0004
0005
0006
0007
0008
0009 #include <linux/string.h>
0010 #include <linux/io.h>
0011 #include <linux/pci_regs.h>
0012 #include <linux/spinlock.h>
0013
0014 #include "cobalt-driver.h"
0015 #include "cobalt-omnitek.h"
0016
0017
0018 #define END_OF_CHAIN (1 << 1)
0019 #define INTERRUPT_ENABLE (1 << 2)
0020 #define WRITE_TO_PCI (1 << 3)
0021 #define READ_FROM_PCI (0 << 3)
0022 #define DESCRIPTOR_FLAG_MSK (END_OF_CHAIN | INTERRUPT_ENABLE | WRITE_TO_PCI)
0023 #define NEXT_ADRS_MSK 0xffffffe0
0024
0025
0026 #define ENABLE (1 << 0)
0027 #define START (1 << 1)
0028 #define ABORT (1 << 2)
0029 #define DONE (1 << 4)
0030 #define SG_INTERRUPT (1 << 5)
0031 #define EVENT_INTERRUPT (1 << 6)
0032 #define SCATTER_GATHER_MODE (1 << 8)
0033 #define DISABLE_VIDEO_RESYNC (1 << 9)
0034 #define EVENT_INTERRUPT_ENABLE (1 << 10)
0035 #define DIRECTIONAL_MSK (3 << 16)
0036 #define INPUT_ONLY (0 << 16)
0037 #define OUTPUT_ONLY (1 << 16)
0038 #define BIDIRECTIONAL (2 << 16)
0039 #define DMA_TYPE_MEMORY (0 << 18)
0040 #define DMA_TYPE_FIFO (1 << 18)
0041
0042 #define BASE (cobalt->bar0)
0043 #define CAPABILITY_HEADER (BASE)
0044 #define CAPABILITY_REGISTER (BASE + 0x04)
0045 #define PCI_64BIT (1 << 8)
0046 #define LOCAL_64BIT (1 << 9)
0047 #define INTERRUPT_STATUS (BASE + 0x08)
0048 #define PCI(c) (BASE + 0x40 + ((c) * 0x40))
0049 #define SIZE(c) (BASE + 0x58 + ((c) * 0x40))
0050 #define DESCRIPTOR(c) (BASE + 0x50 + ((c) * 0x40))
0051 #define CS_REG(c) (BASE + 0x60 + ((c) * 0x40))
0052 #define BYTES_TRANSFERRED(c) (BASE + 0x64 + ((c) * 0x40))
0053
0054
0055 static char *get_dma_direction(u32 status)
0056 {
0057 switch (status & DIRECTIONAL_MSK) {
0058 case INPUT_ONLY: return "Input";
0059 case OUTPUT_ONLY: return "Output";
0060 case BIDIRECTIONAL: return "Bidirectional";
0061 }
0062 return "";
0063 }
0064
0065 static void show_dma_capability(struct cobalt *cobalt)
0066 {
0067 u32 header = ioread32(CAPABILITY_HEADER);
0068 u32 capa = ioread32(CAPABILITY_REGISTER);
0069 u32 i;
0070
0071 cobalt_info("Omnitek DMA capability: ID 0x%02x Version 0x%02x Next 0x%x Size 0x%x\n",
0072 header & 0xff, (header >> 8) & 0xff,
0073 (header >> 16) & 0xffff, (capa >> 24) & 0xff);
0074
0075 switch ((capa >> 8) & 0x3) {
0076 case 0:
0077 cobalt_info("Omnitek DMA: 32 bits PCIe and Local\n");
0078 break;
0079 case 1:
0080 cobalt_info("Omnitek DMA: 64 bits PCIe, 32 bits Local\n");
0081 break;
0082 case 3:
0083 cobalt_info("Omnitek DMA: 64 bits PCIe and Local\n");
0084 break;
0085 }
0086
0087 for (i = 0; i < (capa & 0xf); i++) {
0088 u32 status = ioread32(CS_REG(i));
0089
0090 cobalt_info("Omnitek DMA channel #%d: %s %s\n", i,
0091 status & DMA_TYPE_FIFO ? "FIFO" : "MEMORY",
0092 get_dma_direction(status));
0093 }
0094 }
0095
0096 void omni_sg_dma_start(struct cobalt_stream *s, struct sg_dma_desc_info *desc)
0097 {
0098 struct cobalt *cobalt = s->cobalt;
0099
0100 iowrite32((u32)((u64)desc->bus >> 32), DESCRIPTOR(s->dma_channel) + 4);
0101 iowrite32((u32)desc->bus & NEXT_ADRS_MSK, DESCRIPTOR(s->dma_channel));
0102 iowrite32(ENABLE | SCATTER_GATHER_MODE | START, CS_REG(s->dma_channel));
0103 }
0104
0105 bool is_dma_done(struct cobalt_stream *s)
0106 {
0107 struct cobalt *cobalt = s->cobalt;
0108
0109 if (ioread32(CS_REG(s->dma_channel)) & DONE)
0110 return true;
0111
0112 return false;
0113 }
0114
0115 void omni_sg_dma_abort_channel(struct cobalt_stream *s)
0116 {
0117 struct cobalt *cobalt = s->cobalt;
0118
0119 if (!is_dma_done(s))
0120 iowrite32(ABORT, CS_REG(s->dma_channel));
0121 }
0122
0123 int omni_sg_dma_init(struct cobalt *cobalt)
0124 {
0125 u32 capa = ioread32(CAPABILITY_REGISTER);
0126 int i;
0127
0128 cobalt->first_fifo_channel = 0;
0129 cobalt->dma_channels = capa & 0xf;
0130 if (capa & PCI_64BIT)
0131 cobalt->pci_32_bit = false;
0132 else
0133 cobalt->pci_32_bit = true;
0134
0135 for (i = 0; i < cobalt->dma_channels; i++) {
0136 u32 status = ioread32(CS_REG(i));
0137 u32 ctrl = ioread32(CS_REG(i));
0138
0139 if (!(ctrl & DONE))
0140 iowrite32(ABORT, CS_REG(i));
0141
0142 if (!(status & DMA_TYPE_FIFO))
0143 cobalt->first_fifo_channel++;
0144 }
0145 show_dma_capability(cobalt);
0146 return 0;
0147 }
0148
0149 int descriptor_list_create(struct cobalt *cobalt,
0150 struct scatterlist *scatter_list, bool to_pci, unsigned sglen,
0151 unsigned size, unsigned width, unsigned stride,
0152 struct sg_dma_desc_info *desc)
0153 {
0154 struct sg_dma_descriptor *d = (struct sg_dma_descriptor *)desc->virt;
0155 dma_addr_t next = desc->bus;
0156 unsigned offset = 0;
0157 unsigned copy_bytes = width;
0158 unsigned copied = 0;
0159 bool first = true;
0160
0161
0162 WARN_ON(sg_dma_address(scatter_list) & 3);
0163 WARN_ON(size & 3);
0164 WARN_ON(next & 3);
0165 WARN_ON(stride & 3);
0166 WARN_ON(stride < width);
0167 if (width >= stride)
0168 copy_bytes = stride = size;
0169
0170 while (size) {
0171 dma_addr_t addr = sg_dma_address(scatter_list) + offset;
0172 unsigned bytes;
0173
0174 if (addr == 0)
0175 return -EFAULT;
0176 if (cobalt->pci_32_bit) {
0177 WARN_ON((u64)addr >> 32);
0178 if ((u64)addr >> 32)
0179 return -EFAULT;
0180 }
0181
0182
0183 d->pci_l = addr & 0xffffffff;
0184
0185
0186 d->pci_h = (u64)addr >> 32;
0187
0188
0189 d->local = 0;
0190 d->reserved0 = 0;
0191
0192
0193 bytes = min(sg_dma_len(scatter_list) - offset,
0194 copy_bytes - copied);
0195
0196 if (first) {
0197 if (to_pci)
0198 d->local = 0x11111111;
0199 first = false;
0200 if (sglen == 1) {
0201
0202
0203 d->bytes = (bytes / 2) & ~3;
0204 d->reserved1 = 0;
0205 size -= d->bytes;
0206 copied += d->bytes;
0207 offset += d->bytes;
0208 addr += d->bytes;
0209 next += sizeof(struct sg_dma_descriptor);
0210 d->next_h = (u32)((u64)next >> 32);
0211 d->next_l = (u32)next |
0212 (to_pci ? WRITE_TO_PCI : 0);
0213 bytes -= d->bytes;
0214 d++;
0215
0216 d->pci_l = addr & 0xffffffff;
0217
0218
0219
0220 d->pci_h = (u64)addr >> 32;
0221
0222
0223 d->local = 0;
0224 d->reserved0 = 0;
0225 }
0226 }
0227
0228 d->bytes = bytes;
0229 d->reserved1 = 0;
0230 size -= bytes;
0231 copied += bytes;
0232 offset += bytes;
0233
0234 if (copied == copy_bytes) {
0235 while (copied < stride) {
0236 bytes = min(sg_dma_len(scatter_list) - offset,
0237 stride - copied);
0238 copied += bytes;
0239 offset += bytes;
0240 size -= bytes;
0241 if (sg_dma_len(scatter_list) == offset) {
0242 offset = 0;
0243 scatter_list = sg_next(scatter_list);
0244 }
0245 }
0246 copied = 0;
0247 } else {
0248 offset = 0;
0249 scatter_list = sg_next(scatter_list);
0250 }
0251
0252
0253 next += sizeof(struct sg_dma_descriptor);
0254 if (size == 0) {
0255
0256 d->next_h = (u32)((u64)desc->bus >> 32);
0257 d->next_l = (u32)desc->bus |
0258 (to_pci ? WRITE_TO_PCI : 0) | INTERRUPT_ENABLE;
0259 if (!to_pci)
0260 d->local = 0x22222222;
0261 desc->last_desc_virt = d;
0262 } else {
0263 d->next_h = (u32)((u64)next >> 32);
0264 d->next_l = (u32)next | (to_pci ? WRITE_TO_PCI : 0);
0265 }
0266 d++;
0267 }
0268 return 0;
0269 }
0270
0271 void descriptor_list_chain(struct sg_dma_desc_info *this,
0272 struct sg_dma_desc_info *next)
0273 {
0274 struct sg_dma_descriptor *d = this->last_desc_virt;
0275 u32 direction = d->next_l & WRITE_TO_PCI;
0276
0277 if (next == NULL) {
0278 d->next_h = 0;
0279 d->next_l = direction | INTERRUPT_ENABLE | END_OF_CHAIN;
0280 } else {
0281 d->next_h = (u32)((u64)next->bus >> 32);
0282 d->next_l = (u32)next->bus | direction | INTERRUPT_ENABLE;
0283 }
0284 }
0285
0286 void *descriptor_list_allocate(struct sg_dma_desc_info *desc, size_t bytes)
0287 {
0288 desc->size = bytes;
0289 desc->virt = dma_alloc_coherent(desc->dev, bytes,
0290 &desc->bus, GFP_KERNEL);
0291 return desc->virt;
0292 }
0293
0294 void descriptor_list_free(struct sg_dma_desc_info *desc)
0295 {
0296 if (desc->virt)
0297 dma_free_coherent(desc->dev, desc->size,
0298 desc->virt, desc->bus);
0299 desc->virt = NULL;
0300 }
0301
0302 void descriptor_list_interrupt_enable(struct sg_dma_desc_info *desc)
0303 {
0304 struct sg_dma_descriptor *d = desc->last_desc_virt;
0305
0306 d->next_l |= INTERRUPT_ENABLE;
0307 }
0308
0309 void descriptor_list_interrupt_disable(struct sg_dma_desc_info *desc)
0310 {
0311 struct sg_dma_descriptor *d = desc->last_desc_virt;
0312
0313 d->next_l &= ~INTERRUPT_ENABLE;
0314 }
0315
0316 void descriptor_list_loopback(struct sg_dma_desc_info *desc)
0317 {
0318 struct sg_dma_descriptor *d = desc->last_desc_virt;
0319
0320 d->next_h = (u32)((u64)desc->bus >> 32);
0321 d->next_l = (u32)desc->bus | (d->next_l & DESCRIPTOR_FLAG_MSK);
0322 }
0323
0324 void descriptor_list_end_of_chain(struct sg_dma_desc_info *desc)
0325 {
0326 struct sg_dma_descriptor *d = desc->last_desc_virt;
0327
0328 d->next_l |= END_OF_CHAIN;
0329 }