0001
0002
0003
0004
0005
0006
0007 #include <linux/dma-mapping.h>
0008 #include <linux/kernel.h>
0009 #include <linux/pci.h>
0010
0011 #include "pt3.h"
0012
0013 #define PT3_ACCESS_UNIT (TS_PACKET_SZ * 128)
0014 #define PT3_BUF_CANARY (0x74)
0015
0016 static u32 get_dma_base(int idx)
0017 {
0018 int i;
0019
0020 i = (idx == 1 || idx == 2) ? 3 - idx : idx;
0021 return REG_DMA_BASE + 0x18 * i;
0022 }
0023
0024 int pt3_stop_dma(struct pt3_adapter *adap)
0025 {
0026 struct pt3_board *pt3 = adap->dvb_adap.priv;
0027 u32 base;
0028 u32 stat;
0029 int retry;
0030
0031 base = get_dma_base(adap->adap_idx);
0032 stat = ioread32(pt3->regs[0] + base + OFST_STATUS);
0033 if (!(stat & 0x01))
0034 return 0;
0035
0036 iowrite32(0x02, pt3->regs[0] + base + OFST_DMA_CTL);
0037 for (retry = 0; retry < 5; retry++) {
0038 stat = ioread32(pt3->regs[0] + base + OFST_STATUS);
0039 if (!(stat & 0x01))
0040 return 0;
0041 msleep(50);
0042 }
0043 return -EIO;
0044 }
0045
0046 int pt3_start_dma(struct pt3_adapter *adap)
0047 {
0048 struct pt3_board *pt3 = adap->dvb_adap.priv;
0049 u32 base = get_dma_base(adap->adap_idx);
0050
0051 iowrite32(0x02, pt3->regs[0] + base + OFST_DMA_CTL);
0052 iowrite32(lower_32_bits(adap->desc_buf[0].b_addr),
0053 pt3->regs[0] + base + OFST_DMA_DESC_L);
0054 iowrite32(upper_32_bits(adap->desc_buf[0].b_addr),
0055 pt3->regs[0] + base + OFST_DMA_DESC_H);
0056 iowrite32(0x01, pt3->regs[0] + base + OFST_DMA_CTL);
0057 return 0;
0058 }
0059
0060
0061 static u8 *next_unit(struct pt3_adapter *adap, int *idx, int *ofs)
0062 {
0063 *ofs += PT3_ACCESS_UNIT;
0064 if (*ofs >= DATA_BUF_SZ) {
0065 *ofs -= DATA_BUF_SZ;
0066 (*idx)++;
0067 if (*idx == adap->num_bufs)
0068 *idx = 0;
0069 }
0070 return &adap->buffer[*idx].data[*ofs];
0071 }
0072
0073 int pt3_proc_dma(struct pt3_adapter *adap)
0074 {
0075 int idx, ofs;
0076
0077 idx = adap->buf_idx;
0078 ofs = adap->buf_ofs;
0079
0080 if (adap->buffer[idx].data[ofs] == PT3_BUF_CANARY)
0081 return 0;
0082
0083 while (*next_unit(adap, &idx, &ofs) != PT3_BUF_CANARY) {
0084 u8 *p;
0085
0086 p = &adap->buffer[adap->buf_idx].data[adap->buf_ofs];
0087 if (adap->num_discard > 0)
0088 adap->num_discard--;
0089 else if (adap->buf_ofs + PT3_ACCESS_UNIT > DATA_BUF_SZ) {
0090 dvb_dmx_swfilter_packets(&adap->demux, p,
0091 (DATA_BUF_SZ - adap->buf_ofs) / TS_PACKET_SZ);
0092 dvb_dmx_swfilter_packets(&adap->demux,
0093 adap->buffer[idx].data, ofs / TS_PACKET_SZ);
0094 } else
0095 dvb_dmx_swfilter_packets(&adap->demux, p,
0096 PT3_ACCESS_UNIT / TS_PACKET_SZ);
0097
0098 *p = PT3_BUF_CANARY;
0099 adap->buf_idx = idx;
0100 adap->buf_ofs = ofs;
0101 }
0102 return 0;
0103 }
0104
0105 void pt3_init_dmabuf(struct pt3_adapter *adap)
0106 {
0107 int idx, ofs;
0108 u8 *p;
0109
0110 idx = 0;
0111 ofs = 0;
0112 p = adap->buffer[0].data;
0113
0114 while (idx < adap->num_bufs) {
0115 p[ofs] = PT3_BUF_CANARY;
0116 ofs += PT3_ACCESS_UNIT;
0117 if (ofs >= DATA_BUF_SZ) {
0118 ofs -= DATA_BUF_SZ;
0119 idx++;
0120 p = adap->buffer[idx].data;
0121 }
0122 }
0123 adap->buf_idx = 0;
0124 adap->buf_ofs = 0;
0125 }
0126
0127 void pt3_free_dmabuf(struct pt3_adapter *adap)
0128 {
0129 struct pt3_board *pt3;
0130 int i;
0131
0132 pt3 = adap->dvb_adap.priv;
0133 for (i = 0; i < adap->num_bufs; i++)
0134 dma_free_coherent(&pt3->pdev->dev, DATA_BUF_SZ,
0135 adap->buffer[i].data, adap->buffer[i].b_addr);
0136 adap->num_bufs = 0;
0137
0138 for (i = 0; i < adap->num_desc_bufs; i++)
0139 dma_free_coherent(&pt3->pdev->dev, PAGE_SIZE,
0140 adap->desc_buf[i].descs, adap->desc_buf[i].b_addr);
0141 adap->num_desc_bufs = 0;
0142 }
0143
0144
0145 int pt3_alloc_dmabuf(struct pt3_adapter *adap)
0146 {
0147 struct pt3_board *pt3;
0148 void *p;
0149 int i, j;
0150 int idx, ofs;
0151 int num_desc_bufs;
0152 dma_addr_t data_addr, desc_addr;
0153 struct xfer_desc *d;
0154
0155 pt3 = adap->dvb_adap.priv;
0156 adap->num_bufs = 0;
0157 adap->num_desc_bufs = 0;
0158 for (i = 0; i < pt3->num_bufs; i++) {
0159 p = dma_alloc_coherent(&pt3->pdev->dev, DATA_BUF_SZ,
0160 &adap->buffer[i].b_addr, GFP_KERNEL);
0161 if (p == NULL)
0162 goto failed;
0163 adap->buffer[i].data = p;
0164 adap->num_bufs++;
0165 }
0166 pt3_init_dmabuf(adap);
0167
0168
0169 idx = 0;
0170 ofs = 0;
0171 num_desc_bufs =
0172 DIV_ROUND_UP(adap->num_bufs * DATA_BUF_XFERS, DESCS_IN_PAGE);
0173 for (i = 0; i < num_desc_bufs; i++) {
0174 p = dma_alloc_coherent(&pt3->pdev->dev, PAGE_SIZE,
0175 &desc_addr, GFP_KERNEL);
0176 if (p == NULL)
0177 goto failed;
0178 adap->num_desc_bufs++;
0179 adap->desc_buf[i].descs = p;
0180 adap->desc_buf[i].b_addr = desc_addr;
0181
0182 if (i > 0) {
0183 d = &adap->desc_buf[i - 1].descs[DESCS_IN_PAGE - 1];
0184 d->next_l = lower_32_bits(desc_addr);
0185 d->next_h = upper_32_bits(desc_addr);
0186 }
0187 for (j = 0; j < DESCS_IN_PAGE; j++) {
0188 data_addr = adap->buffer[idx].b_addr + ofs;
0189 d = &adap->desc_buf[i].descs[j];
0190 d->addr_l = lower_32_bits(data_addr);
0191 d->addr_h = upper_32_bits(data_addr);
0192 d->size = DATA_XFER_SZ;
0193
0194 desc_addr += sizeof(struct xfer_desc);
0195 d->next_l = lower_32_bits(desc_addr);
0196 d->next_h = upper_32_bits(desc_addr);
0197
0198 ofs += DATA_XFER_SZ;
0199 if (ofs >= DATA_BUF_SZ) {
0200 ofs -= DATA_BUF_SZ;
0201 idx++;
0202 if (idx >= adap->num_bufs) {
0203 desc_addr = adap->desc_buf[0].b_addr;
0204 d->next_l = lower_32_bits(desc_addr);
0205 d->next_h = upper_32_bits(desc_addr);
0206 return 0;
0207 }
0208 }
0209 }
0210 }
0211 return 0;
0212
0213 failed:
0214 pt3_free_dmabuf(adap);
0215 return -ENOMEM;
0216 }