Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: GPL-2.0-or-later
0002 /* interrupt handling
0003     Copyright (C) 2003-2004  Kevin Thayer <nufan_wfk at yahoo.com>
0004     Copyright (C) 2004  Chris Kennedy <c@groovy.org>
0005     Copyright (C) 2005-2007  Hans Verkuil <hverkuil@xs4all.nl>
0006 
0007  */
0008 
0009 #include "ivtv-driver.h"
0010 #include "ivtv-queue.h"
0011 #include "ivtv-udma.h"
0012 #include "ivtv-irq.h"
0013 #include "ivtv-mailbox.h"
0014 #include "ivtv-vbi.h"
0015 #include "ivtv-yuv.h"
0016 #include <media/v4l2-event.h>
0017 
0018 #define DMA_MAGIC_COOKIE 0x000001fe
0019 
0020 static void ivtv_dma_dec_start(struct ivtv_stream *s);
0021 
0022 static const int ivtv_stream_map[] = {
0023     IVTV_ENC_STREAM_TYPE_MPG,
0024     IVTV_ENC_STREAM_TYPE_YUV,
0025     IVTV_ENC_STREAM_TYPE_PCM,
0026     IVTV_ENC_STREAM_TYPE_VBI,
0027 };
0028 
0029 static void ivtv_pcm_work_handler(struct ivtv *itv)
0030 {
0031     struct ivtv_stream *s = &itv->streams[IVTV_ENC_STREAM_TYPE_PCM];
0032     struct ivtv_buffer *buf;
0033 
0034     /* Pass the PCM data to ivtv-alsa */
0035 
0036     while (1) {
0037         /*
0038          * Users should not be using both the ALSA and V4L2 PCM audio
0039          * capture interfaces at the same time.  If the user is doing
0040          * this, there maybe a buffer in q_io to grab, use, and put
0041          * back in rotation.
0042          */
0043         buf = ivtv_dequeue(s, &s->q_io);
0044         if (buf == NULL)
0045             buf = ivtv_dequeue(s, &s->q_full);
0046         if (buf == NULL)
0047             break;
0048 
0049         if (buf->readpos < buf->bytesused)
0050             itv->pcm_announce_callback(itv->alsa,
0051                 (u8 *)(buf->buf + buf->readpos),
0052                 (size_t)(buf->bytesused - buf->readpos));
0053 
0054         ivtv_enqueue(s, buf, &s->q_free);
0055     }
0056 }
0057 
0058 static void ivtv_pio_work_handler(struct ivtv *itv)
0059 {
0060     struct ivtv_stream *s = &itv->streams[itv->cur_pio_stream];
0061     struct ivtv_buffer *buf;
0062     int i = 0;
0063 
0064     IVTV_DEBUG_HI_DMA("ivtv_pio_work_handler\n");
0065     if (itv->cur_pio_stream < 0 || itv->cur_pio_stream >= IVTV_MAX_STREAMS ||
0066             s->vdev.v4l2_dev == NULL || !ivtv_use_pio(s)) {
0067         itv->cur_pio_stream = -1;
0068         /* trigger PIO complete user interrupt */
0069         write_reg(IVTV_IRQ_ENC_PIO_COMPLETE, 0x44);
0070         return;
0071     }
0072     IVTV_DEBUG_HI_DMA("Process PIO %s\n", s->name);
0073     list_for_each_entry(buf, &s->q_dma.list, list) {
0074         u32 size = s->sg_processing[i].size & 0x3ffff;
0075 
0076         /* Copy the data from the card to the buffer */
0077         if (s->type == IVTV_DEC_STREAM_TYPE_VBI) {
0078             memcpy_fromio(buf->buf, itv->dec_mem + s->sg_processing[i].src - IVTV_DECODER_OFFSET, size);
0079         }
0080         else {
0081             memcpy_fromio(buf->buf, itv->enc_mem + s->sg_processing[i].src, size);
0082         }
0083         i++;
0084         if (i == s->sg_processing_size)
0085             break;
0086     }
0087     write_reg(IVTV_IRQ_ENC_PIO_COMPLETE, 0x44);
0088 }
0089 
0090 void ivtv_irq_work_handler(struct kthread_work *work)
0091 {
0092     struct ivtv *itv = container_of(work, struct ivtv, irq_work);
0093 
0094     if (test_and_clear_bit(IVTV_F_I_WORK_HANDLER_PIO, &itv->i_flags))
0095         ivtv_pio_work_handler(itv);
0096 
0097     if (test_and_clear_bit(IVTV_F_I_WORK_HANDLER_VBI, &itv->i_flags))
0098         ivtv_vbi_work_handler(itv);
0099 
0100     if (test_and_clear_bit(IVTV_F_I_WORK_HANDLER_YUV, &itv->i_flags))
0101         ivtv_yuv_work_handler(itv);
0102 
0103     if (test_and_clear_bit(IVTV_F_I_WORK_HANDLER_PCM, &itv->i_flags))
0104         ivtv_pcm_work_handler(itv);
0105 }
0106 
0107 /* Determine the required DMA size, setup enough buffers in the predma queue and
0108    actually copy the data from the card to the buffers in case a PIO transfer is
0109    required for this stream.
0110  */
0111 static int stream_enc_dma_append(struct ivtv_stream *s, u32 data[CX2341X_MBOX_MAX_DATA])
0112 {
0113     struct ivtv *itv = s->itv;
0114     struct ivtv_buffer *buf;
0115     u32 bytes_needed = 0;
0116     u32 offset, size;
0117     u32 UVoffset = 0, UVsize = 0;
0118     int skip_bufs = s->q_predma.buffers;
0119     int idx = s->sg_pending_size;
0120     int rc;
0121 
0122     /* sanity checks */
0123     if (s->vdev.v4l2_dev == NULL) {
0124         IVTV_DEBUG_WARN("Stream %s not started\n", s->name);
0125         return -1;
0126     }
0127     if (!test_bit(IVTV_F_S_CLAIMED, &s->s_flags)) {
0128         IVTV_DEBUG_WARN("Stream %s not open\n", s->name);
0129         return -1;
0130     }
0131 
0132     /* determine offset, size and PTS for the various streams */
0133     switch (s->type) {
0134         case IVTV_ENC_STREAM_TYPE_MPG:
0135             offset = data[1];
0136             size = data[2];
0137             s->pending_pts = 0;
0138             break;
0139 
0140         case IVTV_ENC_STREAM_TYPE_YUV:
0141             offset = data[1];
0142             size = data[2];
0143             UVoffset = data[3];
0144             UVsize = data[4];
0145             s->pending_pts = ((u64) data[5] << 32) | data[6];
0146             break;
0147 
0148         case IVTV_ENC_STREAM_TYPE_PCM:
0149             offset = data[1] + 12;
0150             size = data[2] - 12;
0151             s->pending_pts = read_dec(offset - 8) |
0152                 ((u64)(read_dec(offset - 12)) << 32);
0153             if (itv->has_cx23415)
0154                 offset += IVTV_DECODER_OFFSET;
0155             break;
0156 
0157         case IVTV_ENC_STREAM_TYPE_VBI:
0158             size = itv->vbi.enc_size * itv->vbi.fpi;
0159             offset = read_enc(itv->vbi.enc_start - 4) + 12;
0160             if (offset == 12) {
0161                 IVTV_DEBUG_INFO("VBI offset == 0\n");
0162                 return -1;
0163             }
0164             s->pending_pts = read_enc(offset - 4) | ((u64)read_enc(offset - 8) << 32);
0165             break;
0166 
0167         case IVTV_DEC_STREAM_TYPE_VBI:
0168             size = read_dec(itv->vbi.dec_start + 4) + 8;
0169             offset = read_dec(itv->vbi.dec_start) + itv->vbi.dec_start;
0170             s->pending_pts = 0;
0171             offset += IVTV_DECODER_OFFSET;
0172             break;
0173         default:
0174             /* shouldn't happen */
0175             return -1;
0176     }
0177 
0178     /* if this is the start of the DMA then fill in the magic cookie */
0179     if (s->sg_pending_size == 0 && ivtv_use_dma(s)) {
0180         if (itv->has_cx23415 && (s->type == IVTV_ENC_STREAM_TYPE_PCM ||
0181             s->type == IVTV_DEC_STREAM_TYPE_VBI)) {
0182             s->pending_backup = read_dec(offset - IVTV_DECODER_OFFSET);
0183             write_dec_sync(DMA_MAGIC_COOKIE, offset - IVTV_DECODER_OFFSET);
0184         }
0185         else {
0186             s->pending_backup = read_enc(offset);
0187             write_enc_sync(DMA_MAGIC_COOKIE, offset);
0188         }
0189         s->pending_offset = offset;
0190     }
0191 
0192     bytes_needed = size;
0193     if (s->type == IVTV_ENC_STREAM_TYPE_YUV) {
0194         /* The size for the Y samples needs to be rounded upwards to a
0195            multiple of the buf_size. The UV samples then start in the
0196            next buffer. */
0197         bytes_needed = s->buf_size * ((bytes_needed + s->buf_size - 1) / s->buf_size);
0198         bytes_needed += UVsize;
0199     }
0200 
0201     IVTV_DEBUG_HI_DMA("%s %s: 0x%08x bytes at 0x%08x\n",
0202         ivtv_use_pio(s) ? "PIO" : "DMA", s->name, bytes_needed, offset);
0203 
0204     rc = ivtv_queue_move(s, &s->q_free, &s->q_full, &s->q_predma, bytes_needed);
0205     if (rc < 0) { /* Insufficient buffers */
0206         IVTV_DEBUG_WARN("Cannot obtain %d bytes for %s data transfer\n",
0207                 bytes_needed, s->name);
0208         return -1;
0209     }
0210     if (rc && !s->buffers_stolen && test_bit(IVTV_F_S_APPL_IO, &s->s_flags)) {
0211         IVTV_WARN("All %s stream buffers are full. Dropping data.\n", s->name);
0212         IVTV_WARN("Cause: the application is not reading fast enough.\n");
0213     }
0214     s->buffers_stolen = rc;
0215 
0216     /* got the buffers, now fill in sg_pending */
0217     buf = list_entry(s->q_predma.list.next, struct ivtv_buffer, list);
0218     memset(buf->buf, 0, 128);
0219     list_for_each_entry(buf, &s->q_predma.list, list) {
0220         if (skip_bufs-- > 0)
0221             continue;
0222         s->sg_pending[idx].dst = buf->dma_handle;
0223         s->sg_pending[idx].src = offset;
0224         s->sg_pending[idx].size = s->buf_size;
0225         buf->bytesused = min(size, s->buf_size);
0226         buf->dma_xfer_cnt = s->dma_xfer_cnt;
0227 
0228         s->q_predma.bytesused += buf->bytesused;
0229         size -= buf->bytesused;
0230         offset += s->buf_size;
0231 
0232         /* Sync SG buffers */
0233         ivtv_buf_sync_for_device(s, buf);
0234 
0235         if (size == 0) {    /* YUV */
0236             /* process the UV section */
0237             offset = UVoffset;
0238             size = UVsize;
0239         }
0240         idx++;
0241     }
0242     s->sg_pending_size = idx;
0243     return 0;
0244 }
0245 
0246 static void dma_post(struct ivtv_stream *s)
0247 {
0248     struct ivtv *itv = s->itv;
0249     struct ivtv_buffer *buf = NULL;
0250     struct list_head *p;
0251     u32 offset;
0252     __le32 *u32buf;
0253     int x = 0;
0254 
0255     IVTV_DEBUG_HI_DMA("%s %s completed (%x)\n", ivtv_use_pio(s) ? "PIO" : "DMA",
0256             s->name, s->dma_offset);
0257     list_for_each(p, &s->q_dma.list) {
0258         buf = list_entry(p, struct ivtv_buffer, list);
0259         u32buf = (__le32 *)buf->buf;
0260 
0261         /* Sync Buffer */
0262         ivtv_buf_sync_for_cpu(s, buf);
0263 
0264         if (x == 0 && ivtv_use_dma(s)) {
0265             offset = s->dma_last_offset;
0266             if (le32_to_cpu(u32buf[offset / 4]) != DMA_MAGIC_COOKIE)
0267             {
0268                 for (offset = 0; offset < 64; offset++)
0269                     if (le32_to_cpu(u32buf[offset]) == DMA_MAGIC_COOKIE)
0270                         break;
0271                 offset *= 4;
0272                 if (offset == 256) {
0273                     IVTV_DEBUG_WARN("%s: Couldn't find start of buffer within the first 256 bytes\n", s->name);
0274                     offset = s->dma_last_offset;
0275                 }
0276                 if (s->dma_last_offset != offset)
0277                     IVTV_DEBUG_WARN("%s: offset %d -> %d\n", s->name, s->dma_last_offset, offset);
0278                 s->dma_last_offset = offset;
0279             }
0280             if (itv->has_cx23415 && (s->type == IVTV_ENC_STREAM_TYPE_PCM ||
0281                         s->type == IVTV_DEC_STREAM_TYPE_VBI)) {
0282                 write_dec_sync(0, s->dma_offset - IVTV_DECODER_OFFSET);
0283             }
0284             else {
0285                 write_enc_sync(0, s->dma_offset);
0286             }
0287             if (offset) {
0288                 buf->bytesused -= offset;
0289                 memcpy(buf->buf, buf->buf + offset, buf->bytesused + offset);
0290             }
0291             *u32buf = cpu_to_le32(s->dma_backup);
0292         }
0293         x++;
0294         /* flag byteswap ABCD -> DCBA for MPG & VBI data outside irq */
0295         if (s->type == IVTV_ENC_STREAM_TYPE_MPG ||
0296             s->type == IVTV_ENC_STREAM_TYPE_VBI)
0297             buf->b_flags |= IVTV_F_B_NEED_BUF_SWAP;
0298     }
0299     if (buf)
0300         buf->bytesused += s->dma_last_offset;
0301     if (buf && s->type == IVTV_DEC_STREAM_TYPE_VBI) {
0302         list_for_each_entry(buf, &s->q_dma.list, list) {
0303             /* Parse and Groom VBI Data */
0304             s->q_dma.bytesused -= buf->bytesused;
0305             ivtv_process_vbi_data(itv, buf, 0, s->type);
0306             s->q_dma.bytesused += buf->bytesused;
0307         }
0308         if (s->fh == NULL) {
0309             ivtv_queue_move(s, &s->q_dma, NULL, &s->q_free, 0);
0310             return;
0311         }
0312     }
0313 
0314     ivtv_queue_move(s, &s->q_dma, NULL, &s->q_full, s->q_dma.bytesused);
0315 
0316     if (s->type == IVTV_ENC_STREAM_TYPE_PCM &&
0317         itv->pcm_announce_callback != NULL) {
0318         /*
0319          * Set up the work handler to pass the data to ivtv-alsa.
0320          *
0321          * We just use q_full and let the work handler race with users
0322          * making ivtv-fileops.c calls on the PCM device node.
0323          *
0324          * Users should not be using both the ALSA and V4L2 PCM audio
0325          * capture interfaces at the same time.  If the user does this,
0326          * fragments of data will just go out each interface as they
0327          * race for PCM data.
0328          */
0329         set_bit(IVTV_F_I_WORK_HANDLER_PCM, &itv->i_flags);
0330         set_bit(IVTV_F_I_HAVE_WORK, &itv->i_flags);
0331     }
0332 
0333     if (s->fh)
0334         wake_up(&s->waitq);
0335 }
0336 
0337 void ivtv_dma_stream_dec_prepare(struct ivtv_stream *s, u32 offset, int lock)
0338 {
0339     struct ivtv *itv = s->itv;
0340     struct yuv_playback_info *yi = &itv->yuv_info;
0341     u8 frame = yi->draw_frame;
0342     struct yuv_frame_info *f = &yi->new_frame_info[frame];
0343     struct ivtv_buffer *buf;
0344     u32 y_size = 720 * ((f->src_h + 31) & ~31);
0345     u32 uv_offset = offset + IVTV_YUV_BUFFER_UV_OFFSET;
0346     int y_done = 0;
0347     int bytes_written = 0;
0348     int idx = 0;
0349 
0350     IVTV_DEBUG_HI_DMA("DEC PREPARE DMA %s: %08x %08x\n", s->name, s->q_predma.bytesused, offset);
0351 
0352     /* Insert buffer block for YUV if needed */
0353     if (s->type == IVTV_DEC_STREAM_TYPE_YUV && f->offset_y) {
0354         if (yi->blanking_dmaptr) {
0355             s->sg_pending[idx].src = yi->blanking_dmaptr;
0356             s->sg_pending[idx].dst = offset;
0357             s->sg_pending[idx].size = 720 * 16;
0358         }
0359         offset += 720 * 16;
0360         idx++;
0361     }
0362 
0363     list_for_each_entry(buf, &s->q_predma.list, list) {
0364         /* YUV UV Offset from Y Buffer */
0365         if (s->type == IVTV_DEC_STREAM_TYPE_YUV && !y_done &&
0366                 (bytes_written + buf->bytesused) >= y_size) {
0367             s->sg_pending[idx].src = buf->dma_handle;
0368             s->sg_pending[idx].dst = offset;
0369             s->sg_pending[idx].size = y_size - bytes_written;
0370             offset = uv_offset;
0371             if (s->sg_pending[idx].size != buf->bytesused) {
0372                 idx++;
0373                 s->sg_pending[idx].src =
0374                   buf->dma_handle + s->sg_pending[idx - 1].size;
0375                 s->sg_pending[idx].dst = offset;
0376                 s->sg_pending[idx].size =
0377                    buf->bytesused - s->sg_pending[idx - 1].size;
0378                 offset += s->sg_pending[idx].size;
0379             }
0380             y_done = 1;
0381         } else {
0382             s->sg_pending[idx].src = buf->dma_handle;
0383             s->sg_pending[idx].dst = offset;
0384             s->sg_pending[idx].size = buf->bytesused;
0385             offset += buf->bytesused;
0386         }
0387         bytes_written += buf->bytesused;
0388 
0389         /* Sync SG buffers */
0390         ivtv_buf_sync_for_device(s, buf);
0391         idx++;
0392     }
0393     s->sg_pending_size = idx;
0394 
0395     /* Sync Hardware SG List of buffers */
0396     ivtv_stream_sync_for_device(s);
0397     if (lock) {
0398         unsigned long flags = 0;
0399 
0400         spin_lock_irqsave(&itv->dma_reg_lock, flags);
0401         if (!test_bit(IVTV_F_I_DMA, &itv->i_flags))
0402             ivtv_dma_dec_start(s);
0403         else
0404             set_bit(IVTV_F_S_DMA_PENDING, &s->s_flags);
0405         spin_unlock_irqrestore(&itv->dma_reg_lock, flags);
0406     } else {
0407         if (!test_bit(IVTV_F_I_DMA, &itv->i_flags))
0408             ivtv_dma_dec_start(s);
0409         else
0410             set_bit(IVTV_F_S_DMA_PENDING, &s->s_flags);
0411     }
0412 }
0413 
0414 static void ivtv_dma_enc_start_xfer(struct ivtv_stream *s)
0415 {
0416     struct ivtv *itv = s->itv;
0417 
0418     s->sg_dma->src = cpu_to_le32(s->sg_processing[s->sg_processed].src);
0419     s->sg_dma->dst = cpu_to_le32(s->sg_processing[s->sg_processed].dst);
0420     s->sg_dma->size = cpu_to_le32(s->sg_processing[s->sg_processed].size | 0x80000000);
0421     s->sg_processed++;
0422     /* Sync Hardware SG List of buffers */
0423     ivtv_stream_sync_for_device(s);
0424     write_reg(s->sg_handle, IVTV_REG_ENCDMAADDR);
0425     write_reg_sync(read_reg(IVTV_REG_DMAXFER) | 0x02, IVTV_REG_DMAXFER);
0426     itv->dma_timer.expires = jiffies + msecs_to_jiffies(300);
0427     add_timer(&itv->dma_timer);
0428 }
0429 
0430 static void ivtv_dma_dec_start_xfer(struct ivtv_stream *s)
0431 {
0432     struct ivtv *itv = s->itv;
0433 
0434     s->sg_dma->src = cpu_to_le32(s->sg_processing[s->sg_processed].src);
0435     s->sg_dma->dst = cpu_to_le32(s->sg_processing[s->sg_processed].dst);
0436     s->sg_dma->size = cpu_to_le32(s->sg_processing[s->sg_processed].size | 0x80000000);
0437     s->sg_processed++;
0438     /* Sync Hardware SG List of buffers */
0439     ivtv_stream_sync_for_device(s);
0440     write_reg(s->sg_handle, IVTV_REG_DECDMAADDR);
0441     write_reg_sync(read_reg(IVTV_REG_DMAXFER) | 0x01, IVTV_REG_DMAXFER);
0442     itv->dma_timer.expires = jiffies + msecs_to_jiffies(300);
0443     add_timer(&itv->dma_timer);
0444 }
0445 
0446 /* start the encoder DMA */
0447 static void ivtv_dma_enc_start(struct ivtv_stream *s)
0448 {
0449     struct ivtv *itv = s->itv;
0450     struct ivtv_stream *s_vbi = &itv->streams[IVTV_ENC_STREAM_TYPE_VBI];
0451     int i;
0452 
0453     IVTV_DEBUG_HI_DMA("start %s for %s\n", ivtv_use_dma(s) ? "DMA" : "PIO", s->name);
0454 
0455     if (s->q_predma.bytesused)
0456         ivtv_queue_move(s, &s->q_predma, NULL, &s->q_dma, s->q_predma.bytesused);
0457 
0458     if (ivtv_use_dma(s))
0459         s->sg_pending[s->sg_pending_size - 1].size += 256;
0460 
0461     /* If this is an MPEG stream, and VBI data is also pending, then append the
0462        VBI DMA to the MPEG DMA and transfer both sets of data at once.
0463 
0464        VBI DMA is a second class citizen compared to MPEG and mixing them together
0465        will confuse the firmware (the end of a VBI DMA is seen as the end of a
0466        MPEG DMA, thus effectively dropping an MPEG frame). So instead we make
0467        sure we only use the MPEG DMA to transfer the VBI DMA if both are in
0468        use. This way no conflicts occur. */
0469     clear_bit(IVTV_F_S_DMA_HAS_VBI, &s->s_flags);
0470     if (s->type == IVTV_ENC_STREAM_TYPE_MPG && s_vbi->sg_pending_size &&
0471             s->sg_pending_size + s_vbi->sg_pending_size <= s->buffers) {
0472         ivtv_queue_move(s_vbi, &s_vbi->q_predma, NULL, &s_vbi->q_dma, s_vbi->q_predma.bytesused);
0473         if (ivtv_use_dma(s_vbi))
0474             s_vbi->sg_pending[s_vbi->sg_pending_size - 1].size += 256;
0475         for (i = 0; i < s_vbi->sg_pending_size; i++) {
0476             s->sg_pending[s->sg_pending_size++] = s_vbi->sg_pending[i];
0477         }
0478         s_vbi->dma_offset = s_vbi->pending_offset;
0479         s_vbi->sg_pending_size = 0;
0480         s_vbi->dma_xfer_cnt++;
0481         set_bit(IVTV_F_S_DMA_HAS_VBI, &s->s_flags);
0482         IVTV_DEBUG_HI_DMA("include DMA for %s\n", s_vbi->name);
0483     }
0484 
0485     s->dma_xfer_cnt++;
0486     memcpy(s->sg_processing, s->sg_pending, sizeof(struct ivtv_sg_host_element) * s->sg_pending_size);
0487     s->sg_processing_size = s->sg_pending_size;
0488     s->sg_pending_size = 0;
0489     s->sg_processed = 0;
0490     s->dma_offset = s->pending_offset;
0491     s->dma_backup = s->pending_backup;
0492     s->dma_pts = s->pending_pts;
0493 
0494     if (ivtv_use_pio(s)) {
0495         set_bit(IVTV_F_I_WORK_HANDLER_PIO, &itv->i_flags);
0496         set_bit(IVTV_F_I_HAVE_WORK, &itv->i_flags);
0497         set_bit(IVTV_F_I_PIO, &itv->i_flags);
0498         itv->cur_pio_stream = s->type;
0499     }
0500     else {
0501         itv->dma_retries = 0;
0502         ivtv_dma_enc_start_xfer(s);
0503         set_bit(IVTV_F_I_DMA, &itv->i_flags);
0504         itv->cur_dma_stream = s->type;
0505     }
0506 }
0507 
0508 static void ivtv_dma_dec_start(struct ivtv_stream *s)
0509 {
0510     struct ivtv *itv = s->itv;
0511 
0512     if (s->q_predma.bytesused)
0513         ivtv_queue_move(s, &s->q_predma, NULL, &s->q_dma, s->q_predma.bytesused);
0514     s->dma_xfer_cnt++;
0515     memcpy(s->sg_processing, s->sg_pending, sizeof(struct ivtv_sg_host_element) * s->sg_pending_size);
0516     s->sg_processing_size = s->sg_pending_size;
0517     s->sg_pending_size = 0;
0518     s->sg_processed = 0;
0519 
0520     IVTV_DEBUG_HI_DMA("start DMA for %s\n", s->name);
0521     itv->dma_retries = 0;
0522     ivtv_dma_dec_start_xfer(s);
0523     set_bit(IVTV_F_I_DMA, &itv->i_flags);
0524     itv->cur_dma_stream = s->type;
0525 }
0526 
0527 static void ivtv_irq_dma_read(struct ivtv *itv)
0528 {
0529     struct ivtv_stream *s = NULL;
0530     struct ivtv_buffer *buf;
0531     int hw_stream_type = 0;
0532 
0533     IVTV_DEBUG_HI_IRQ("DEC DMA READ\n");
0534 
0535     del_timer(&itv->dma_timer);
0536 
0537     if (!test_bit(IVTV_F_I_UDMA, &itv->i_flags) && itv->cur_dma_stream < 0)
0538         return;
0539 
0540     if (!test_bit(IVTV_F_I_UDMA, &itv->i_flags)) {
0541         s = &itv->streams[itv->cur_dma_stream];
0542         ivtv_stream_sync_for_cpu(s);
0543 
0544         if (read_reg(IVTV_REG_DMASTATUS) & 0x14) {
0545             IVTV_DEBUG_WARN("DEC DMA ERROR %x (xfer %d of %d, retry %d)\n",
0546                     read_reg(IVTV_REG_DMASTATUS),
0547                     s->sg_processed, s->sg_processing_size, itv->dma_retries);
0548             write_reg(read_reg(IVTV_REG_DMASTATUS) & 3, IVTV_REG_DMASTATUS);
0549             if (itv->dma_retries == 3) {
0550                 /* Too many retries, give up on this frame */
0551                 itv->dma_retries = 0;
0552                 s->sg_processed = s->sg_processing_size;
0553             }
0554             else {
0555                 /* Retry, starting with the first xfer segment.
0556                    Just retrying the current segment is not sufficient. */
0557                 s->sg_processed = 0;
0558                 itv->dma_retries++;
0559             }
0560         }
0561         if (s->sg_processed < s->sg_processing_size) {
0562             /* DMA next buffer */
0563             ivtv_dma_dec_start_xfer(s);
0564             return;
0565         }
0566         if (s->type == IVTV_DEC_STREAM_TYPE_YUV)
0567             hw_stream_type = 2;
0568         IVTV_DEBUG_HI_DMA("DEC DATA READ %s: %d\n", s->name, s->q_dma.bytesused);
0569 
0570         /* For some reason must kick the firmware, like PIO mode,
0571            I think this tells the firmware we are done and the size
0572            of the xfer so it can calculate what we need next.
0573            I think we can do this part ourselves but would have to
0574            fully calculate xfer info ourselves and not use interrupts
0575          */
0576         ivtv_vapi(itv, CX2341X_DEC_SCHED_DMA_FROM_HOST, 3, 0, s->q_dma.bytesused,
0577                 hw_stream_type);
0578 
0579         /* Free last DMA call */
0580         while ((buf = ivtv_dequeue(s, &s->q_dma)) != NULL) {
0581             ivtv_buf_sync_for_cpu(s, buf);
0582             ivtv_enqueue(s, buf, &s->q_free);
0583         }
0584         wake_up(&s->waitq);
0585     }
0586     clear_bit(IVTV_F_I_UDMA, &itv->i_flags);
0587     clear_bit(IVTV_F_I_DMA, &itv->i_flags);
0588     itv->cur_dma_stream = -1;
0589     wake_up(&itv->dma_waitq);
0590 }
0591 
0592 static void ivtv_irq_enc_dma_complete(struct ivtv *itv)
0593 {
0594     u32 data[CX2341X_MBOX_MAX_DATA];
0595     struct ivtv_stream *s;
0596 
0597     ivtv_api_get_data(&itv->enc_mbox, IVTV_MBOX_DMA_END, 2, data);
0598     IVTV_DEBUG_HI_IRQ("ENC DMA COMPLETE %x %d (%d)\n", data[0], data[1], itv->cur_dma_stream);
0599 
0600     del_timer(&itv->dma_timer);
0601 
0602     if (itv->cur_dma_stream < 0)
0603         return;
0604 
0605     s = &itv->streams[itv->cur_dma_stream];
0606     ivtv_stream_sync_for_cpu(s);
0607 
0608     if (data[0] & 0x18) {
0609         IVTV_DEBUG_WARN("ENC DMA ERROR %x (offset %08x, xfer %d of %d, retry %d)\n", data[0],
0610             s->dma_offset, s->sg_processed, s->sg_processing_size, itv->dma_retries);
0611         write_reg(read_reg(IVTV_REG_DMASTATUS) & 3, IVTV_REG_DMASTATUS);
0612         if (itv->dma_retries == 3) {
0613             /* Too many retries, give up on this frame */
0614             itv->dma_retries = 0;
0615             s->sg_processed = s->sg_processing_size;
0616         }
0617         else {
0618             /* Retry, starting with the first xfer segment.
0619                Just retrying the current segment is not sufficient. */
0620             s->sg_processed = 0;
0621             itv->dma_retries++;
0622         }
0623     }
0624     if (s->sg_processed < s->sg_processing_size) {
0625         /* DMA next buffer */
0626         ivtv_dma_enc_start_xfer(s);
0627         return;
0628     }
0629     clear_bit(IVTV_F_I_DMA, &itv->i_flags);
0630     itv->cur_dma_stream = -1;
0631     dma_post(s);
0632     if (test_and_clear_bit(IVTV_F_S_DMA_HAS_VBI, &s->s_flags)) {
0633         s = &itv->streams[IVTV_ENC_STREAM_TYPE_VBI];
0634         dma_post(s);
0635     }
0636     s->sg_processing_size = 0;
0637     s->sg_processed = 0;
0638     wake_up(&itv->dma_waitq);
0639 }
0640 
0641 static void ivtv_irq_enc_pio_complete(struct ivtv *itv)
0642 {
0643     struct ivtv_stream *s;
0644 
0645     if (itv->cur_pio_stream < 0 || itv->cur_pio_stream >= IVTV_MAX_STREAMS) {
0646         itv->cur_pio_stream = -1;
0647         return;
0648     }
0649     s = &itv->streams[itv->cur_pio_stream];
0650     IVTV_DEBUG_HI_IRQ("ENC PIO COMPLETE %s\n", s->name);
0651     clear_bit(IVTV_F_I_PIO, &itv->i_flags);
0652     itv->cur_pio_stream = -1;
0653     dma_post(s);
0654     if (s->type == IVTV_ENC_STREAM_TYPE_MPG)
0655         ivtv_vapi(itv, CX2341X_ENC_SCHED_DMA_TO_HOST, 3, 0, 0, 0);
0656     else if (s->type == IVTV_ENC_STREAM_TYPE_YUV)
0657         ivtv_vapi(itv, CX2341X_ENC_SCHED_DMA_TO_HOST, 3, 0, 0, 1);
0658     else if (s->type == IVTV_ENC_STREAM_TYPE_PCM)
0659         ivtv_vapi(itv, CX2341X_ENC_SCHED_DMA_TO_HOST, 3, 0, 0, 2);
0660     clear_bit(IVTV_F_I_PIO, &itv->i_flags);
0661     if (test_and_clear_bit(IVTV_F_S_DMA_HAS_VBI, &s->s_flags)) {
0662         s = &itv->streams[IVTV_ENC_STREAM_TYPE_VBI];
0663         dma_post(s);
0664     }
0665     wake_up(&itv->dma_waitq);
0666 }
0667 
0668 static void ivtv_irq_dma_err(struct ivtv *itv)
0669 {
0670     u32 data[CX2341X_MBOX_MAX_DATA];
0671     u32 status;
0672 
0673     del_timer(&itv->dma_timer);
0674 
0675     ivtv_api_get_data(&itv->enc_mbox, IVTV_MBOX_DMA_END, 2, data);
0676     status = read_reg(IVTV_REG_DMASTATUS);
0677     IVTV_DEBUG_WARN("DMA ERROR %08x %08x %08x %d\n", data[0], data[1],
0678                 status, itv->cur_dma_stream);
0679     /*
0680      * We do *not* write back to the IVTV_REG_DMASTATUS register to
0681      * clear the error status, if either the encoder write (0x02) or
0682      * decoder read (0x01) bus master DMA operation do not indicate
0683      * completed.  We can race with the DMA engine, which may have
0684      * transitioned to completed status *after* we read the register.
0685      * Setting a IVTV_REG_DMASTATUS flag back to "busy" status, after the
0686      * DMA engine has completed, will cause the DMA engine to stop working.
0687      */
0688     status &= 0x3;
0689     if (status == 0x3)
0690         write_reg(status, IVTV_REG_DMASTATUS);
0691 
0692     if (!test_bit(IVTV_F_I_UDMA, &itv->i_flags) &&
0693         itv->cur_dma_stream >= 0 && itv->cur_dma_stream < IVTV_MAX_STREAMS) {
0694         struct ivtv_stream *s = &itv->streams[itv->cur_dma_stream];
0695 
0696         if (s->type >= IVTV_DEC_STREAM_TYPE_MPG) {
0697             /* retry */
0698             /*
0699              * FIXME - handle cases of DMA error similar to
0700              * encoder below, except conditioned on status & 0x1
0701              */
0702             ivtv_dma_dec_start(s);
0703             return;
0704         } else {
0705             if ((status & 0x2) == 0) {
0706                 /*
0707                  * CX2341x Bus Master DMA write is ongoing.
0708                  * Reset the timer and let it complete.
0709                  */
0710                 itv->dma_timer.expires =
0711                         jiffies + msecs_to_jiffies(600);
0712                 add_timer(&itv->dma_timer);
0713                 return;
0714             }
0715 
0716             if (itv->dma_retries < 3) {
0717                 /*
0718                  * CX2341x Bus Master DMA write has ended.
0719                  * Retry the write, starting with the first
0720                  * xfer segment. Just retrying the current
0721                  * segment is not sufficient.
0722                  */
0723                 s->sg_processed = 0;
0724                 itv->dma_retries++;
0725                 ivtv_dma_enc_start_xfer(s);
0726                 return;
0727             }
0728             /* Too many retries, give up on this one */
0729         }
0730 
0731     }
0732     if (test_bit(IVTV_F_I_UDMA, &itv->i_flags)) {
0733         ivtv_udma_start(itv);
0734         return;
0735     }
0736     clear_bit(IVTV_F_I_UDMA, &itv->i_flags);
0737     clear_bit(IVTV_F_I_DMA, &itv->i_flags);
0738     itv->cur_dma_stream = -1;
0739     wake_up(&itv->dma_waitq);
0740 }
0741 
0742 static void ivtv_irq_enc_start_cap(struct ivtv *itv)
0743 {
0744     u32 data[CX2341X_MBOX_MAX_DATA];
0745     struct ivtv_stream *s;
0746 
0747     /* Get DMA destination and size arguments from card */
0748     ivtv_api_get_data(&itv->enc_mbox, IVTV_MBOX_DMA, 7, data);
0749     IVTV_DEBUG_HI_IRQ("ENC START CAP %d: %08x %08x\n", data[0], data[1], data[2]);
0750 
0751     if (data[0] > 2 || data[1] == 0 || data[2] == 0) {
0752         IVTV_DEBUG_WARN("Unknown input: %08x %08x %08x\n",
0753                 data[0], data[1], data[2]);
0754         return;
0755     }
0756     s = &itv->streams[ivtv_stream_map[data[0]]];
0757     if (!stream_enc_dma_append(s, data)) {
0758         set_bit(ivtv_use_pio(s) ? IVTV_F_S_PIO_PENDING : IVTV_F_S_DMA_PENDING, &s->s_flags);
0759     }
0760 }
0761 
0762 static void ivtv_irq_enc_vbi_cap(struct ivtv *itv)
0763 {
0764     u32 data[CX2341X_MBOX_MAX_DATA];
0765     struct ivtv_stream *s;
0766 
0767     IVTV_DEBUG_HI_IRQ("ENC START VBI CAP\n");
0768     s = &itv->streams[IVTV_ENC_STREAM_TYPE_VBI];
0769 
0770     if (!stream_enc_dma_append(s, data))
0771         set_bit(ivtv_use_pio(s) ? IVTV_F_S_PIO_PENDING : IVTV_F_S_DMA_PENDING, &s->s_flags);
0772 }
0773 
0774 static void ivtv_irq_dec_vbi_reinsert(struct ivtv *itv)
0775 {
0776     u32 data[CX2341X_MBOX_MAX_DATA];
0777     struct ivtv_stream *s = &itv->streams[IVTV_DEC_STREAM_TYPE_VBI];
0778 
0779     IVTV_DEBUG_HI_IRQ("DEC VBI REINSERT\n");
0780     if (test_bit(IVTV_F_S_CLAIMED, &s->s_flags) &&
0781             !stream_enc_dma_append(s, data)) {
0782         set_bit(IVTV_F_S_PIO_PENDING, &s->s_flags);
0783     }
0784 }
0785 
0786 static void ivtv_irq_dec_data_req(struct ivtv *itv)
0787 {
0788     u32 data[CX2341X_MBOX_MAX_DATA];
0789     struct ivtv_stream *s;
0790 
0791     /* YUV or MPG */
0792 
0793     if (test_bit(IVTV_F_I_DEC_YUV, &itv->i_flags)) {
0794         ivtv_api_get_data(&itv->dec_mbox, IVTV_MBOX_DMA, 2, data);
0795         itv->dma_data_req_size =
0796                  1080 * ((itv->yuv_info.v4l2_src_h + 31) & ~31);
0797         itv->dma_data_req_offset = data[1];
0798         if (atomic_read(&itv->yuv_info.next_dma_frame) >= 0)
0799             ivtv_yuv_frame_complete(itv);
0800         s = &itv->streams[IVTV_DEC_STREAM_TYPE_YUV];
0801     }
0802     else {
0803         ivtv_api_get_data(&itv->dec_mbox, IVTV_MBOX_DMA, 3, data);
0804         itv->dma_data_req_size = min_t(u32, data[2], 0x10000);
0805         itv->dma_data_req_offset = data[1];
0806         s = &itv->streams[IVTV_DEC_STREAM_TYPE_MPG];
0807     }
0808     IVTV_DEBUG_HI_IRQ("DEC DATA REQ %s: %d %08x %u\n", s->name, s->q_full.bytesused,
0809                itv->dma_data_req_offset, itv->dma_data_req_size);
0810     if (itv->dma_data_req_size == 0 || s->q_full.bytesused < itv->dma_data_req_size) {
0811         set_bit(IVTV_F_S_NEEDS_DATA, &s->s_flags);
0812     }
0813     else {
0814         if (test_bit(IVTV_F_I_DEC_YUV, &itv->i_flags))
0815             ivtv_yuv_setup_stream_frame(itv);
0816         clear_bit(IVTV_F_S_NEEDS_DATA, &s->s_flags);
0817         ivtv_queue_move(s, &s->q_full, NULL, &s->q_predma, itv->dma_data_req_size);
0818         ivtv_dma_stream_dec_prepare(s, itv->dma_data_req_offset + IVTV_DECODER_OFFSET, 0);
0819     }
0820 }
0821 
0822 static void ivtv_irq_vsync(struct ivtv *itv)
0823 {
0824     /* The vsync interrupt is unusual in that it won't clear until
0825      * the end of the first line for the current field, at which
0826      * point it clears itself. This can result in repeated vsync
0827      * interrupts, or a missed vsync. Read some of the registers
0828      * to determine the line being displayed and ensure we handle
0829      * one vsync per frame.
0830      */
0831     unsigned int frame = read_reg(IVTV_REG_DEC_LINE_FIELD) & 1;
0832     struct yuv_playback_info *yi = &itv->yuv_info;
0833     int last_dma_frame = atomic_read(&yi->next_dma_frame);
0834     struct yuv_frame_info *f = &yi->new_frame_info[last_dma_frame];
0835 
0836     if (0) IVTV_DEBUG_IRQ("DEC VSYNC\n");
0837 
0838     if (((frame ^ f->sync_field) == 0 &&
0839         ((itv->last_vsync_field & 1) ^ f->sync_field)) ||
0840             (frame != (itv->last_vsync_field & 1) && !f->interlaced)) {
0841         int next_dma_frame = last_dma_frame;
0842 
0843         if (!(f->interlaced && f->delay && yi->fields_lapsed < 1)) {
0844             if (next_dma_frame >= 0 && next_dma_frame != atomic_read(&yi->next_fill_frame)) {
0845                 write_reg(yuv_offset[next_dma_frame] >> 4, 0x82c);
0846                 write_reg((yuv_offset[next_dma_frame] + IVTV_YUV_BUFFER_UV_OFFSET) >> 4, 0x830);
0847                 write_reg(yuv_offset[next_dma_frame] >> 4, 0x834);
0848                 write_reg((yuv_offset[next_dma_frame] + IVTV_YUV_BUFFER_UV_OFFSET) >> 4, 0x838);
0849                 next_dma_frame = (next_dma_frame + 1) % IVTV_YUV_BUFFERS;
0850                 atomic_set(&yi->next_dma_frame, next_dma_frame);
0851                 yi->fields_lapsed = -1;
0852                 yi->running = 1;
0853             }
0854         }
0855     }
0856     if (frame != (itv->last_vsync_field & 1)) {
0857         static const struct v4l2_event evtop = {
0858             .type = V4L2_EVENT_VSYNC,
0859             .u.vsync.field = V4L2_FIELD_TOP,
0860         };
0861         static const struct v4l2_event evbottom = {
0862             .type = V4L2_EVENT_VSYNC,
0863             .u.vsync.field = V4L2_FIELD_BOTTOM,
0864         };
0865         struct ivtv_stream *s = ivtv_get_output_stream(itv);
0866 
0867         itv->last_vsync_field += 1;
0868         if (frame == 0) {
0869             clear_bit(IVTV_F_I_VALID_DEC_TIMINGS, &itv->i_flags);
0870             clear_bit(IVTV_F_I_EV_VSYNC_FIELD, &itv->i_flags);
0871         }
0872         else {
0873             set_bit(IVTV_F_I_EV_VSYNC_FIELD, &itv->i_flags);
0874         }
0875         if (test_bit(IVTV_F_I_EV_VSYNC_ENABLED, &itv->i_flags)) {
0876             set_bit(IVTV_F_I_EV_VSYNC, &itv->i_flags);
0877             wake_up(&itv->event_waitq);
0878             if (s)
0879                 wake_up(&s->waitq);
0880         }
0881         if (s && s->vdev.v4l2_dev)
0882             v4l2_event_queue(&s->vdev, frame ? &evtop : &evbottom);
0883         wake_up(&itv->vsync_waitq);
0884 
0885         /* Send VBI to saa7127 */
0886         if (frame && (itv->output_mode == OUT_PASSTHROUGH ||
0887             test_bit(IVTV_F_I_UPDATE_WSS, &itv->i_flags) ||
0888             test_bit(IVTV_F_I_UPDATE_VPS, &itv->i_flags) ||
0889             test_bit(IVTV_F_I_UPDATE_CC, &itv->i_flags))) {
0890             set_bit(IVTV_F_I_WORK_HANDLER_VBI, &itv->i_flags);
0891             set_bit(IVTV_F_I_HAVE_WORK, &itv->i_flags);
0892         }
0893 
0894         /* Check if we need to update the yuv registers */
0895         if (yi->running && (yi->yuv_forced_update || f->update)) {
0896             if (!f->update) {
0897                 last_dma_frame =
0898                     (u8)(atomic_read(&yi->next_dma_frame) -
0899                          1) % IVTV_YUV_BUFFERS;
0900                 f = &yi->new_frame_info[last_dma_frame];
0901             }
0902 
0903             if (f->src_w) {
0904                 yi->update_frame = last_dma_frame;
0905                 f->update = 0;
0906                 yi->yuv_forced_update = 0;
0907                 set_bit(IVTV_F_I_WORK_HANDLER_YUV, &itv->i_flags);
0908                 set_bit(IVTV_F_I_HAVE_WORK, &itv->i_flags);
0909             }
0910         }
0911 
0912         yi->fields_lapsed++;
0913     }
0914 }
0915 
0916 #define IVTV_IRQ_DMA (IVTV_IRQ_DMA_READ | IVTV_IRQ_ENC_DMA_COMPLETE | IVTV_IRQ_DMA_ERR | IVTV_IRQ_ENC_START_CAP | IVTV_IRQ_ENC_VBI_CAP | IVTV_IRQ_DEC_DATA_REQ | IVTV_IRQ_DEC_VBI_RE_INSERT)
0917 
0918 irqreturn_t ivtv_irq_handler(int irq, void *dev_id)
0919 {
0920     struct ivtv *itv = (struct ivtv *)dev_id;
0921     u32 combo;
0922     u32 stat;
0923     int i;
0924     u8 vsync_force = 0;
0925 
0926     spin_lock(&itv->dma_reg_lock);
0927     /* get contents of irq status register */
0928     stat = read_reg(IVTV_REG_IRQSTATUS);
0929 
0930     combo = ~itv->irqmask & stat;
0931 
0932     /* Clear out IRQ */
0933     if (combo) write_reg(combo, IVTV_REG_IRQSTATUS);
0934 
0935     if (0 == combo) {
0936         /* The vsync interrupt is unusual and clears itself. If we
0937          * took too long, we may have missed it. Do some checks
0938          */
0939         if (~itv->irqmask & IVTV_IRQ_DEC_VSYNC) {
0940             /* vsync is enabled, see if we're in a new field */
0941             if ((itv->last_vsync_field & 1) !=
0942                 (read_reg(IVTV_REG_DEC_LINE_FIELD) & 1)) {
0943                 /* New field, looks like we missed it */
0944                 IVTV_DEBUG_YUV("VSync interrupt missed %d\n",
0945                        read_reg(IVTV_REG_DEC_LINE_FIELD) >> 16);
0946                 vsync_force = 1;
0947             }
0948         }
0949 
0950         if (!vsync_force) {
0951             /* No Vsync expected, wasn't for us */
0952             spin_unlock(&itv->dma_reg_lock);
0953             return IRQ_NONE;
0954         }
0955     }
0956 
0957     /* Exclude interrupts noted below from the output, otherwise the log is flooded with
0958        these messages */
0959     if (combo & ~0xff6d0400)
0960         IVTV_DEBUG_HI_IRQ("======= valid IRQ bits: 0x%08x ======\n", combo);
0961 
0962     if (combo & IVTV_IRQ_DEC_DMA_COMPLETE) {
0963         IVTV_DEBUG_HI_IRQ("DEC DMA COMPLETE\n");
0964     }
0965 
0966     if (combo & IVTV_IRQ_DMA_READ) {
0967         ivtv_irq_dma_read(itv);
0968     }
0969 
0970     if (combo & IVTV_IRQ_ENC_DMA_COMPLETE) {
0971         ivtv_irq_enc_dma_complete(itv);
0972     }
0973 
0974     if (combo & IVTV_IRQ_ENC_PIO_COMPLETE) {
0975         ivtv_irq_enc_pio_complete(itv);
0976     }
0977 
0978     if (combo & IVTV_IRQ_DMA_ERR) {
0979         ivtv_irq_dma_err(itv);
0980     }
0981 
0982     if (combo & IVTV_IRQ_ENC_START_CAP) {
0983         ivtv_irq_enc_start_cap(itv);
0984     }
0985 
0986     if (combo & IVTV_IRQ_ENC_VBI_CAP) {
0987         ivtv_irq_enc_vbi_cap(itv);
0988     }
0989 
0990     if (combo & IVTV_IRQ_DEC_VBI_RE_INSERT) {
0991         ivtv_irq_dec_vbi_reinsert(itv);
0992     }
0993 
0994     if (combo & IVTV_IRQ_ENC_EOS) {
0995         IVTV_DEBUG_IRQ("ENC EOS\n");
0996         set_bit(IVTV_F_I_EOS, &itv->i_flags);
0997         wake_up(&itv->eos_waitq);
0998     }
0999 
1000     if (combo & IVTV_IRQ_DEC_DATA_REQ) {
1001         ivtv_irq_dec_data_req(itv);
1002     }
1003 
1004     /* Decoder Vertical Sync - We can't rely on 'combo', so check if vsync enabled */
1005     if (~itv->irqmask & IVTV_IRQ_DEC_VSYNC) {
1006         ivtv_irq_vsync(itv);
1007     }
1008 
1009     if (combo & IVTV_IRQ_ENC_VIM_RST) {
1010         IVTV_DEBUG_IRQ("VIM RST\n");
1011         /*ivtv_vapi(itv, CX2341X_ENC_REFRESH_INPUT, 0); */
1012     }
1013 
1014     if (combo & IVTV_IRQ_DEC_AUD_MODE_CHG) {
1015         IVTV_DEBUG_INFO("Stereo mode changed\n");
1016     }
1017 
1018     if ((combo & IVTV_IRQ_DMA) && !test_bit(IVTV_F_I_DMA, &itv->i_flags)) {
1019         itv->irq_rr_idx++;
1020         for (i = 0; i < IVTV_MAX_STREAMS; i++) {
1021             int idx = (i + itv->irq_rr_idx) % IVTV_MAX_STREAMS;
1022             struct ivtv_stream *s = &itv->streams[idx];
1023 
1024             if (!test_and_clear_bit(IVTV_F_S_DMA_PENDING, &s->s_flags))
1025                 continue;
1026             if (s->type >= IVTV_DEC_STREAM_TYPE_MPG)
1027                 ivtv_dma_dec_start(s);
1028             else
1029                 ivtv_dma_enc_start(s);
1030             break;
1031         }
1032 
1033         if (i == IVTV_MAX_STREAMS &&
1034             test_bit(IVTV_F_I_UDMA_PENDING, &itv->i_flags))
1035             ivtv_udma_start(itv);
1036     }
1037 
1038     if ((combo & IVTV_IRQ_DMA) && !test_bit(IVTV_F_I_PIO, &itv->i_flags)) {
1039         itv->irq_rr_idx++;
1040         for (i = 0; i < IVTV_MAX_STREAMS; i++) {
1041             int idx = (i + itv->irq_rr_idx) % IVTV_MAX_STREAMS;
1042             struct ivtv_stream *s = &itv->streams[idx];
1043 
1044             if (!test_and_clear_bit(IVTV_F_S_PIO_PENDING, &s->s_flags))
1045                 continue;
1046             if (s->type == IVTV_DEC_STREAM_TYPE_VBI || s->type < IVTV_DEC_STREAM_TYPE_MPG)
1047                 ivtv_dma_enc_start(s);
1048             break;
1049         }
1050     }
1051 
1052     if (test_and_clear_bit(IVTV_F_I_HAVE_WORK, &itv->i_flags)) {
1053         kthread_queue_work(&itv->irq_worker, &itv->irq_work);
1054     }
1055 
1056     spin_unlock(&itv->dma_reg_lock);
1057 
1058     /* If we've just handled a 'forced' vsync, it's safest to say it
1059      * wasn't ours. Another device may have triggered it at just
1060      * the right time.
1061      */
1062     return vsync_force ? IRQ_NONE : IRQ_HANDLED;
1063 }
1064 
1065 void ivtv_unfinished_dma(struct timer_list *t)
1066 {
1067     struct ivtv *itv = from_timer(itv, t, dma_timer);
1068 
1069     if (!test_bit(IVTV_F_I_DMA, &itv->i_flags))
1070         return;
1071     IVTV_ERR("DMA TIMEOUT %08x %d\n", read_reg(IVTV_REG_DMASTATUS), itv->cur_dma_stream);
1072 
1073     write_reg(read_reg(IVTV_REG_DMASTATUS) & 3, IVTV_REG_DMASTATUS);
1074     clear_bit(IVTV_F_I_UDMA, &itv->i_flags);
1075     clear_bit(IVTV_F_I_DMA, &itv->i_flags);
1076     itv->cur_dma_stream = -1;
1077     wake_up(&itv->dma_waitq);
1078 }