xref: /OK3568_Linux_fs/kernel/drivers/media/pci/ivtv/ivtv-irq.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-or-later
2*4882a593Smuzhiyun /* interrupt handling
3*4882a593Smuzhiyun     Copyright (C) 2003-2004  Kevin Thayer <nufan_wfk at yahoo.com>
4*4882a593Smuzhiyun     Copyright (C) 2004  Chris Kennedy <c@groovy.org>
5*4882a593Smuzhiyun     Copyright (C) 2005-2007  Hans Verkuil <hverkuil@xs4all.nl>
6*4882a593Smuzhiyun 
7*4882a593Smuzhiyun  */
8*4882a593Smuzhiyun 
9*4882a593Smuzhiyun #include "ivtv-driver.h"
10*4882a593Smuzhiyun #include "ivtv-queue.h"
11*4882a593Smuzhiyun #include "ivtv-udma.h"
12*4882a593Smuzhiyun #include "ivtv-irq.h"
13*4882a593Smuzhiyun #include "ivtv-mailbox.h"
14*4882a593Smuzhiyun #include "ivtv-vbi.h"
15*4882a593Smuzhiyun #include "ivtv-yuv.h"
16*4882a593Smuzhiyun #include <media/v4l2-event.h>
17*4882a593Smuzhiyun 
18*4882a593Smuzhiyun #define DMA_MAGIC_COOKIE 0x000001fe
19*4882a593Smuzhiyun 
20*4882a593Smuzhiyun static void ivtv_dma_dec_start(struct ivtv_stream *s);
21*4882a593Smuzhiyun 
22*4882a593Smuzhiyun static const int ivtv_stream_map[] = {
23*4882a593Smuzhiyun 	IVTV_ENC_STREAM_TYPE_MPG,
24*4882a593Smuzhiyun 	IVTV_ENC_STREAM_TYPE_YUV,
25*4882a593Smuzhiyun 	IVTV_ENC_STREAM_TYPE_PCM,
26*4882a593Smuzhiyun 	IVTV_ENC_STREAM_TYPE_VBI,
27*4882a593Smuzhiyun };
28*4882a593Smuzhiyun 
ivtv_pcm_work_handler(struct ivtv * itv)29*4882a593Smuzhiyun static void ivtv_pcm_work_handler(struct ivtv *itv)
30*4882a593Smuzhiyun {
31*4882a593Smuzhiyun 	struct ivtv_stream *s = &itv->streams[IVTV_ENC_STREAM_TYPE_PCM];
32*4882a593Smuzhiyun 	struct ivtv_buffer *buf;
33*4882a593Smuzhiyun 
34*4882a593Smuzhiyun 	/* Pass the PCM data to ivtv-alsa */
35*4882a593Smuzhiyun 
36*4882a593Smuzhiyun 	while (1) {
37*4882a593Smuzhiyun 		/*
38*4882a593Smuzhiyun 		 * Users should not be using both the ALSA and V4L2 PCM audio
39*4882a593Smuzhiyun 		 * capture interfaces at the same time.  If the user is doing
40*4882a593Smuzhiyun 		 * this, there maybe a buffer in q_io to grab, use, and put
41*4882a593Smuzhiyun 		 * back in rotation.
42*4882a593Smuzhiyun 		 */
43*4882a593Smuzhiyun 		buf = ivtv_dequeue(s, &s->q_io);
44*4882a593Smuzhiyun 		if (buf == NULL)
45*4882a593Smuzhiyun 			buf = ivtv_dequeue(s, &s->q_full);
46*4882a593Smuzhiyun 		if (buf == NULL)
47*4882a593Smuzhiyun 			break;
48*4882a593Smuzhiyun 
49*4882a593Smuzhiyun 		if (buf->readpos < buf->bytesused)
50*4882a593Smuzhiyun 			itv->pcm_announce_callback(itv->alsa,
51*4882a593Smuzhiyun 				(u8 *)(buf->buf + buf->readpos),
52*4882a593Smuzhiyun 				(size_t)(buf->bytesused - buf->readpos));
53*4882a593Smuzhiyun 
54*4882a593Smuzhiyun 		ivtv_enqueue(s, buf, &s->q_free);
55*4882a593Smuzhiyun 	}
56*4882a593Smuzhiyun }
57*4882a593Smuzhiyun 
ivtv_pio_work_handler(struct ivtv * itv)58*4882a593Smuzhiyun static void ivtv_pio_work_handler(struct ivtv *itv)
59*4882a593Smuzhiyun {
60*4882a593Smuzhiyun 	struct ivtv_stream *s = &itv->streams[itv->cur_pio_stream];
61*4882a593Smuzhiyun 	struct ivtv_buffer *buf;
62*4882a593Smuzhiyun 	int i = 0;
63*4882a593Smuzhiyun 
64*4882a593Smuzhiyun 	IVTV_DEBUG_HI_DMA("ivtv_pio_work_handler\n");
65*4882a593Smuzhiyun 	if (itv->cur_pio_stream < 0 || itv->cur_pio_stream >= IVTV_MAX_STREAMS ||
66*4882a593Smuzhiyun 			s->vdev.v4l2_dev == NULL || !ivtv_use_pio(s)) {
67*4882a593Smuzhiyun 		itv->cur_pio_stream = -1;
68*4882a593Smuzhiyun 		/* trigger PIO complete user interrupt */
69*4882a593Smuzhiyun 		write_reg(IVTV_IRQ_ENC_PIO_COMPLETE, 0x44);
70*4882a593Smuzhiyun 		return;
71*4882a593Smuzhiyun 	}
72*4882a593Smuzhiyun 	IVTV_DEBUG_HI_DMA("Process PIO %s\n", s->name);
73*4882a593Smuzhiyun 	list_for_each_entry(buf, &s->q_dma.list, list) {
74*4882a593Smuzhiyun 		u32 size = s->sg_processing[i].size & 0x3ffff;
75*4882a593Smuzhiyun 
76*4882a593Smuzhiyun 		/* Copy the data from the card to the buffer */
77*4882a593Smuzhiyun 		if (s->type == IVTV_DEC_STREAM_TYPE_VBI) {
78*4882a593Smuzhiyun 			memcpy_fromio(buf->buf, itv->dec_mem + s->sg_processing[i].src - IVTV_DECODER_OFFSET, size);
79*4882a593Smuzhiyun 		}
80*4882a593Smuzhiyun 		else {
81*4882a593Smuzhiyun 			memcpy_fromio(buf->buf, itv->enc_mem + s->sg_processing[i].src, size);
82*4882a593Smuzhiyun 		}
83*4882a593Smuzhiyun 		i++;
84*4882a593Smuzhiyun 		if (i == s->sg_processing_size)
85*4882a593Smuzhiyun 			break;
86*4882a593Smuzhiyun 	}
87*4882a593Smuzhiyun 	write_reg(IVTV_IRQ_ENC_PIO_COMPLETE, 0x44);
88*4882a593Smuzhiyun }
89*4882a593Smuzhiyun 
ivtv_irq_work_handler(struct kthread_work * work)90*4882a593Smuzhiyun void ivtv_irq_work_handler(struct kthread_work *work)
91*4882a593Smuzhiyun {
92*4882a593Smuzhiyun 	struct ivtv *itv = container_of(work, struct ivtv, irq_work);
93*4882a593Smuzhiyun 
94*4882a593Smuzhiyun 	if (test_and_clear_bit(IVTV_F_I_WORK_HANDLER_PIO, &itv->i_flags))
95*4882a593Smuzhiyun 		ivtv_pio_work_handler(itv);
96*4882a593Smuzhiyun 
97*4882a593Smuzhiyun 	if (test_and_clear_bit(IVTV_F_I_WORK_HANDLER_VBI, &itv->i_flags))
98*4882a593Smuzhiyun 		ivtv_vbi_work_handler(itv);
99*4882a593Smuzhiyun 
100*4882a593Smuzhiyun 	if (test_and_clear_bit(IVTV_F_I_WORK_HANDLER_YUV, &itv->i_flags))
101*4882a593Smuzhiyun 		ivtv_yuv_work_handler(itv);
102*4882a593Smuzhiyun 
103*4882a593Smuzhiyun 	if (test_and_clear_bit(IVTV_F_I_WORK_HANDLER_PCM, &itv->i_flags))
104*4882a593Smuzhiyun 		ivtv_pcm_work_handler(itv);
105*4882a593Smuzhiyun }
106*4882a593Smuzhiyun 
107*4882a593Smuzhiyun /* Determine the required DMA size, setup enough buffers in the predma queue and
108*4882a593Smuzhiyun    actually copy the data from the card to the buffers in case a PIO transfer is
109*4882a593Smuzhiyun    required for this stream.
110*4882a593Smuzhiyun  */
stream_enc_dma_append(struct ivtv_stream * s,u32 data[CX2341X_MBOX_MAX_DATA])111*4882a593Smuzhiyun static int stream_enc_dma_append(struct ivtv_stream *s, u32 data[CX2341X_MBOX_MAX_DATA])
112*4882a593Smuzhiyun {
113*4882a593Smuzhiyun 	struct ivtv *itv = s->itv;
114*4882a593Smuzhiyun 	struct ivtv_buffer *buf;
115*4882a593Smuzhiyun 	u32 bytes_needed = 0;
116*4882a593Smuzhiyun 	u32 offset, size;
117*4882a593Smuzhiyun 	u32 UVoffset = 0, UVsize = 0;
118*4882a593Smuzhiyun 	int skip_bufs = s->q_predma.buffers;
119*4882a593Smuzhiyun 	int idx = s->sg_pending_size;
120*4882a593Smuzhiyun 	int rc;
121*4882a593Smuzhiyun 
122*4882a593Smuzhiyun 	/* sanity checks */
123*4882a593Smuzhiyun 	if (s->vdev.v4l2_dev == NULL) {
124*4882a593Smuzhiyun 		IVTV_DEBUG_WARN("Stream %s not started\n", s->name);
125*4882a593Smuzhiyun 		return -1;
126*4882a593Smuzhiyun 	}
127*4882a593Smuzhiyun 	if (!test_bit(IVTV_F_S_CLAIMED, &s->s_flags)) {
128*4882a593Smuzhiyun 		IVTV_DEBUG_WARN("Stream %s not open\n", s->name);
129*4882a593Smuzhiyun 		return -1;
130*4882a593Smuzhiyun 	}
131*4882a593Smuzhiyun 
132*4882a593Smuzhiyun 	/* determine offset, size and PTS for the various streams */
133*4882a593Smuzhiyun 	switch (s->type) {
134*4882a593Smuzhiyun 		case IVTV_ENC_STREAM_TYPE_MPG:
135*4882a593Smuzhiyun 			offset = data[1];
136*4882a593Smuzhiyun 			size = data[2];
137*4882a593Smuzhiyun 			s->pending_pts = 0;
138*4882a593Smuzhiyun 			break;
139*4882a593Smuzhiyun 
140*4882a593Smuzhiyun 		case IVTV_ENC_STREAM_TYPE_YUV:
141*4882a593Smuzhiyun 			offset = data[1];
142*4882a593Smuzhiyun 			size = data[2];
143*4882a593Smuzhiyun 			UVoffset = data[3];
144*4882a593Smuzhiyun 			UVsize = data[4];
145*4882a593Smuzhiyun 			s->pending_pts = ((u64) data[5] << 32) | data[6];
146*4882a593Smuzhiyun 			break;
147*4882a593Smuzhiyun 
148*4882a593Smuzhiyun 		case IVTV_ENC_STREAM_TYPE_PCM:
149*4882a593Smuzhiyun 			offset = data[1] + 12;
150*4882a593Smuzhiyun 			size = data[2] - 12;
151*4882a593Smuzhiyun 			s->pending_pts = read_dec(offset - 8) |
152*4882a593Smuzhiyun 				((u64)(read_dec(offset - 12)) << 32);
153*4882a593Smuzhiyun 			if (itv->has_cx23415)
154*4882a593Smuzhiyun 				offset += IVTV_DECODER_OFFSET;
155*4882a593Smuzhiyun 			break;
156*4882a593Smuzhiyun 
157*4882a593Smuzhiyun 		case IVTV_ENC_STREAM_TYPE_VBI:
158*4882a593Smuzhiyun 			size = itv->vbi.enc_size * itv->vbi.fpi;
159*4882a593Smuzhiyun 			offset = read_enc(itv->vbi.enc_start - 4) + 12;
160*4882a593Smuzhiyun 			if (offset == 12) {
161*4882a593Smuzhiyun 				IVTV_DEBUG_INFO("VBI offset == 0\n");
162*4882a593Smuzhiyun 				return -1;
163*4882a593Smuzhiyun 			}
164*4882a593Smuzhiyun 			s->pending_pts = read_enc(offset - 4) | ((u64)read_enc(offset - 8) << 32);
165*4882a593Smuzhiyun 			break;
166*4882a593Smuzhiyun 
167*4882a593Smuzhiyun 		case IVTV_DEC_STREAM_TYPE_VBI:
168*4882a593Smuzhiyun 			size = read_dec(itv->vbi.dec_start + 4) + 8;
169*4882a593Smuzhiyun 			offset = read_dec(itv->vbi.dec_start) + itv->vbi.dec_start;
170*4882a593Smuzhiyun 			s->pending_pts = 0;
171*4882a593Smuzhiyun 			offset += IVTV_DECODER_OFFSET;
172*4882a593Smuzhiyun 			break;
173*4882a593Smuzhiyun 		default:
174*4882a593Smuzhiyun 			/* shouldn't happen */
175*4882a593Smuzhiyun 			return -1;
176*4882a593Smuzhiyun 	}
177*4882a593Smuzhiyun 
178*4882a593Smuzhiyun 	/* if this is the start of the DMA then fill in the magic cookie */
179*4882a593Smuzhiyun 	if (s->sg_pending_size == 0 && ivtv_use_dma(s)) {
180*4882a593Smuzhiyun 		if (itv->has_cx23415 && (s->type == IVTV_ENC_STREAM_TYPE_PCM ||
181*4882a593Smuzhiyun 		    s->type == IVTV_DEC_STREAM_TYPE_VBI)) {
182*4882a593Smuzhiyun 			s->pending_backup = read_dec(offset - IVTV_DECODER_OFFSET);
183*4882a593Smuzhiyun 			write_dec_sync(DMA_MAGIC_COOKIE, offset - IVTV_DECODER_OFFSET);
184*4882a593Smuzhiyun 		}
185*4882a593Smuzhiyun 		else {
186*4882a593Smuzhiyun 			s->pending_backup = read_enc(offset);
187*4882a593Smuzhiyun 			write_enc_sync(DMA_MAGIC_COOKIE, offset);
188*4882a593Smuzhiyun 		}
189*4882a593Smuzhiyun 		s->pending_offset = offset;
190*4882a593Smuzhiyun 	}
191*4882a593Smuzhiyun 
192*4882a593Smuzhiyun 	bytes_needed = size;
193*4882a593Smuzhiyun 	if (s->type == IVTV_ENC_STREAM_TYPE_YUV) {
194*4882a593Smuzhiyun 		/* The size for the Y samples needs to be rounded upwards to a
195*4882a593Smuzhiyun 		   multiple of the buf_size. The UV samples then start in the
196*4882a593Smuzhiyun 		   next buffer. */
197*4882a593Smuzhiyun 		bytes_needed = s->buf_size * ((bytes_needed + s->buf_size - 1) / s->buf_size);
198*4882a593Smuzhiyun 		bytes_needed += UVsize;
199*4882a593Smuzhiyun 	}
200*4882a593Smuzhiyun 
201*4882a593Smuzhiyun 	IVTV_DEBUG_HI_DMA("%s %s: 0x%08x bytes at 0x%08x\n",
202*4882a593Smuzhiyun 		ivtv_use_pio(s) ? "PIO" : "DMA", s->name, bytes_needed, offset);
203*4882a593Smuzhiyun 
204*4882a593Smuzhiyun 	rc = ivtv_queue_move(s, &s->q_free, &s->q_full, &s->q_predma, bytes_needed);
205*4882a593Smuzhiyun 	if (rc < 0) { /* Insufficient buffers */
206*4882a593Smuzhiyun 		IVTV_DEBUG_WARN("Cannot obtain %d bytes for %s data transfer\n",
207*4882a593Smuzhiyun 				bytes_needed, s->name);
208*4882a593Smuzhiyun 		return -1;
209*4882a593Smuzhiyun 	}
210*4882a593Smuzhiyun 	if (rc && !s->buffers_stolen && test_bit(IVTV_F_S_APPL_IO, &s->s_flags)) {
211*4882a593Smuzhiyun 		IVTV_WARN("All %s stream buffers are full. Dropping data.\n", s->name);
212*4882a593Smuzhiyun 		IVTV_WARN("Cause: the application is not reading fast enough.\n");
213*4882a593Smuzhiyun 	}
214*4882a593Smuzhiyun 	s->buffers_stolen = rc;
215*4882a593Smuzhiyun 
216*4882a593Smuzhiyun 	/* got the buffers, now fill in sg_pending */
217*4882a593Smuzhiyun 	buf = list_entry(s->q_predma.list.next, struct ivtv_buffer, list);
218*4882a593Smuzhiyun 	memset(buf->buf, 0, 128);
219*4882a593Smuzhiyun 	list_for_each_entry(buf, &s->q_predma.list, list) {
220*4882a593Smuzhiyun 		if (skip_bufs-- > 0)
221*4882a593Smuzhiyun 			continue;
222*4882a593Smuzhiyun 		s->sg_pending[idx].dst = buf->dma_handle;
223*4882a593Smuzhiyun 		s->sg_pending[idx].src = offset;
224*4882a593Smuzhiyun 		s->sg_pending[idx].size = s->buf_size;
225*4882a593Smuzhiyun 		buf->bytesused = min(size, s->buf_size);
226*4882a593Smuzhiyun 		buf->dma_xfer_cnt = s->dma_xfer_cnt;
227*4882a593Smuzhiyun 
228*4882a593Smuzhiyun 		s->q_predma.bytesused += buf->bytesused;
229*4882a593Smuzhiyun 		size -= buf->bytesused;
230*4882a593Smuzhiyun 		offset += s->buf_size;
231*4882a593Smuzhiyun 
232*4882a593Smuzhiyun 		/* Sync SG buffers */
233*4882a593Smuzhiyun 		ivtv_buf_sync_for_device(s, buf);
234*4882a593Smuzhiyun 
235*4882a593Smuzhiyun 		if (size == 0) {	/* YUV */
236*4882a593Smuzhiyun 			/* process the UV section */
237*4882a593Smuzhiyun 			offset = UVoffset;
238*4882a593Smuzhiyun 			size = UVsize;
239*4882a593Smuzhiyun 		}
240*4882a593Smuzhiyun 		idx++;
241*4882a593Smuzhiyun 	}
242*4882a593Smuzhiyun 	s->sg_pending_size = idx;
243*4882a593Smuzhiyun 	return 0;
244*4882a593Smuzhiyun }
245*4882a593Smuzhiyun 
dma_post(struct ivtv_stream * s)246*4882a593Smuzhiyun static void dma_post(struct ivtv_stream *s)
247*4882a593Smuzhiyun {
248*4882a593Smuzhiyun 	struct ivtv *itv = s->itv;
249*4882a593Smuzhiyun 	struct ivtv_buffer *buf = NULL;
250*4882a593Smuzhiyun 	struct list_head *p;
251*4882a593Smuzhiyun 	u32 offset;
252*4882a593Smuzhiyun 	__le32 *u32buf;
253*4882a593Smuzhiyun 	int x = 0;
254*4882a593Smuzhiyun 
255*4882a593Smuzhiyun 	IVTV_DEBUG_HI_DMA("%s %s completed (%x)\n", ivtv_use_pio(s) ? "PIO" : "DMA",
256*4882a593Smuzhiyun 			s->name, s->dma_offset);
257*4882a593Smuzhiyun 	list_for_each(p, &s->q_dma.list) {
258*4882a593Smuzhiyun 		buf = list_entry(p, struct ivtv_buffer, list);
259*4882a593Smuzhiyun 		u32buf = (__le32 *)buf->buf;
260*4882a593Smuzhiyun 
261*4882a593Smuzhiyun 		/* Sync Buffer */
262*4882a593Smuzhiyun 		ivtv_buf_sync_for_cpu(s, buf);
263*4882a593Smuzhiyun 
264*4882a593Smuzhiyun 		if (x == 0 && ivtv_use_dma(s)) {
265*4882a593Smuzhiyun 			offset = s->dma_last_offset;
266*4882a593Smuzhiyun 			if (le32_to_cpu(u32buf[offset / 4]) != DMA_MAGIC_COOKIE)
267*4882a593Smuzhiyun 			{
268*4882a593Smuzhiyun 				for (offset = 0; offset < 64; offset++)
269*4882a593Smuzhiyun 					if (le32_to_cpu(u32buf[offset]) == DMA_MAGIC_COOKIE)
270*4882a593Smuzhiyun 						break;
271*4882a593Smuzhiyun 				offset *= 4;
272*4882a593Smuzhiyun 				if (offset == 256) {
273*4882a593Smuzhiyun 					IVTV_DEBUG_WARN("%s: Couldn't find start of buffer within the first 256 bytes\n", s->name);
274*4882a593Smuzhiyun 					offset = s->dma_last_offset;
275*4882a593Smuzhiyun 				}
276*4882a593Smuzhiyun 				if (s->dma_last_offset != offset)
277*4882a593Smuzhiyun 					IVTV_DEBUG_WARN("%s: offset %d -> %d\n", s->name, s->dma_last_offset, offset);
278*4882a593Smuzhiyun 				s->dma_last_offset = offset;
279*4882a593Smuzhiyun 			}
280*4882a593Smuzhiyun 			if (itv->has_cx23415 && (s->type == IVTV_ENC_STREAM_TYPE_PCM ||
281*4882a593Smuzhiyun 						s->type == IVTV_DEC_STREAM_TYPE_VBI)) {
282*4882a593Smuzhiyun 				write_dec_sync(0, s->dma_offset - IVTV_DECODER_OFFSET);
283*4882a593Smuzhiyun 			}
284*4882a593Smuzhiyun 			else {
285*4882a593Smuzhiyun 				write_enc_sync(0, s->dma_offset);
286*4882a593Smuzhiyun 			}
287*4882a593Smuzhiyun 			if (offset) {
288*4882a593Smuzhiyun 				buf->bytesused -= offset;
289*4882a593Smuzhiyun 				memcpy(buf->buf, buf->buf + offset, buf->bytesused + offset);
290*4882a593Smuzhiyun 			}
291*4882a593Smuzhiyun 			*u32buf = cpu_to_le32(s->dma_backup);
292*4882a593Smuzhiyun 		}
293*4882a593Smuzhiyun 		x++;
294*4882a593Smuzhiyun 		/* flag byteswap ABCD -> DCBA for MPG & VBI data outside irq */
295*4882a593Smuzhiyun 		if (s->type == IVTV_ENC_STREAM_TYPE_MPG ||
296*4882a593Smuzhiyun 		    s->type == IVTV_ENC_STREAM_TYPE_VBI)
297*4882a593Smuzhiyun 			buf->b_flags |= IVTV_F_B_NEED_BUF_SWAP;
298*4882a593Smuzhiyun 	}
299*4882a593Smuzhiyun 	if (buf)
300*4882a593Smuzhiyun 		buf->bytesused += s->dma_last_offset;
301*4882a593Smuzhiyun 	if (buf && s->type == IVTV_DEC_STREAM_TYPE_VBI) {
302*4882a593Smuzhiyun 		list_for_each_entry(buf, &s->q_dma.list, list) {
303*4882a593Smuzhiyun 			/* Parse and Groom VBI Data */
304*4882a593Smuzhiyun 			s->q_dma.bytesused -= buf->bytesused;
305*4882a593Smuzhiyun 			ivtv_process_vbi_data(itv, buf, 0, s->type);
306*4882a593Smuzhiyun 			s->q_dma.bytesused += buf->bytesused;
307*4882a593Smuzhiyun 		}
308*4882a593Smuzhiyun 		if (s->fh == NULL) {
309*4882a593Smuzhiyun 			ivtv_queue_move(s, &s->q_dma, NULL, &s->q_free, 0);
310*4882a593Smuzhiyun 			return;
311*4882a593Smuzhiyun 		}
312*4882a593Smuzhiyun 	}
313*4882a593Smuzhiyun 
314*4882a593Smuzhiyun 	ivtv_queue_move(s, &s->q_dma, NULL, &s->q_full, s->q_dma.bytesused);
315*4882a593Smuzhiyun 
316*4882a593Smuzhiyun 	if (s->type == IVTV_ENC_STREAM_TYPE_PCM &&
317*4882a593Smuzhiyun 	    itv->pcm_announce_callback != NULL) {
318*4882a593Smuzhiyun 		/*
319*4882a593Smuzhiyun 		 * Set up the work handler to pass the data to ivtv-alsa.
320*4882a593Smuzhiyun 		 *
321*4882a593Smuzhiyun 		 * We just use q_full and let the work handler race with users
322*4882a593Smuzhiyun 		 * making ivtv-fileops.c calls on the PCM device node.
323*4882a593Smuzhiyun 		 *
324*4882a593Smuzhiyun 		 * Users should not be using both the ALSA and V4L2 PCM audio
325*4882a593Smuzhiyun 		 * capture interfaces at the same time.  If the user does this,
326*4882a593Smuzhiyun 		 * fragments of data will just go out each interface as they
327*4882a593Smuzhiyun 		 * race for PCM data.
328*4882a593Smuzhiyun 		 */
329*4882a593Smuzhiyun 		set_bit(IVTV_F_I_WORK_HANDLER_PCM, &itv->i_flags);
330*4882a593Smuzhiyun 		set_bit(IVTV_F_I_HAVE_WORK, &itv->i_flags);
331*4882a593Smuzhiyun 	}
332*4882a593Smuzhiyun 
333*4882a593Smuzhiyun 	if (s->fh)
334*4882a593Smuzhiyun 		wake_up(&s->waitq);
335*4882a593Smuzhiyun }
336*4882a593Smuzhiyun 
ivtv_dma_stream_dec_prepare(struct ivtv_stream * s,u32 offset,int lock)337*4882a593Smuzhiyun void ivtv_dma_stream_dec_prepare(struct ivtv_stream *s, u32 offset, int lock)
338*4882a593Smuzhiyun {
339*4882a593Smuzhiyun 	struct ivtv *itv = s->itv;
340*4882a593Smuzhiyun 	struct yuv_playback_info *yi = &itv->yuv_info;
341*4882a593Smuzhiyun 	u8 frame = yi->draw_frame;
342*4882a593Smuzhiyun 	struct yuv_frame_info *f = &yi->new_frame_info[frame];
343*4882a593Smuzhiyun 	struct ivtv_buffer *buf;
344*4882a593Smuzhiyun 	u32 y_size = 720 * ((f->src_h + 31) & ~31);
345*4882a593Smuzhiyun 	u32 uv_offset = offset + IVTV_YUV_BUFFER_UV_OFFSET;
346*4882a593Smuzhiyun 	int y_done = 0;
347*4882a593Smuzhiyun 	int bytes_written = 0;
348*4882a593Smuzhiyun 	int idx = 0;
349*4882a593Smuzhiyun 
350*4882a593Smuzhiyun 	IVTV_DEBUG_HI_DMA("DEC PREPARE DMA %s: %08x %08x\n", s->name, s->q_predma.bytesused, offset);
351*4882a593Smuzhiyun 
352*4882a593Smuzhiyun 	/* Insert buffer block for YUV if needed */
353*4882a593Smuzhiyun 	if (s->type == IVTV_DEC_STREAM_TYPE_YUV && f->offset_y) {
354*4882a593Smuzhiyun 		if (yi->blanking_dmaptr) {
355*4882a593Smuzhiyun 			s->sg_pending[idx].src = yi->blanking_dmaptr;
356*4882a593Smuzhiyun 			s->sg_pending[idx].dst = offset;
357*4882a593Smuzhiyun 			s->sg_pending[idx].size = 720 * 16;
358*4882a593Smuzhiyun 		}
359*4882a593Smuzhiyun 		offset += 720 * 16;
360*4882a593Smuzhiyun 		idx++;
361*4882a593Smuzhiyun 	}
362*4882a593Smuzhiyun 
363*4882a593Smuzhiyun 	list_for_each_entry(buf, &s->q_predma.list, list) {
364*4882a593Smuzhiyun 		/* YUV UV Offset from Y Buffer */
365*4882a593Smuzhiyun 		if (s->type == IVTV_DEC_STREAM_TYPE_YUV && !y_done &&
366*4882a593Smuzhiyun 				(bytes_written + buf->bytesused) >= y_size) {
367*4882a593Smuzhiyun 			s->sg_pending[idx].src = buf->dma_handle;
368*4882a593Smuzhiyun 			s->sg_pending[idx].dst = offset;
369*4882a593Smuzhiyun 			s->sg_pending[idx].size = y_size - bytes_written;
370*4882a593Smuzhiyun 			offset = uv_offset;
371*4882a593Smuzhiyun 			if (s->sg_pending[idx].size != buf->bytesused) {
372*4882a593Smuzhiyun 				idx++;
373*4882a593Smuzhiyun 				s->sg_pending[idx].src =
374*4882a593Smuzhiyun 				  buf->dma_handle + s->sg_pending[idx - 1].size;
375*4882a593Smuzhiyun 				s->sg_pending[idx].dst = offset;
376*4882a593Smuzhiyun 				s->sg_pending[idx].size =
377*4882a593Smuzhiyun 				   buf->bytesused - s->sg_pending[idx - 1].size;
378*4882a593Smuzhiyun 				offset += s->sg_pending[idx].size;
379*4882a593Smuzhiyun 			}
380*4882a593Smuzhiyun 			y_done = 1;
381*4882a593Smuzhiyun 		} else {
382*4882a593Smuzhiyun 			s->sg_pending[idx].src = buf->dma_handle;
383*4882a593Smuzhiyun 			s->sg_pending[idx].dst = offset;
384*4882a593Smuzhiyun 			s->sg_pending[idx].size = buf->bytesused;
385*4882a593Smuzhiyun 			offset += buf->bytesused;
386*4882a593Smuzhiyun 		}
387*4882a593Smuzhiyun 		bytes_written += buf->bytesused;
388*4882a593Smuzhiyun 
389*4882a593Smuzhiyun 		/* Sync SG buffers */
390*4882a593Smuzhiyun 		ivtv_buf_sync_for_device(s, buf);
391*4882a593Smuzhiyun 		idx++;
392*4882a593Smuzhiyun 	}
393*4882a593Smuzhiyun 	s->sg_pending_size = idx;
394*4882a593Smuzhiyun 
395*4882a593Smuzhiyun 	/* Sync Hardware SG List of buffers */
396*4882a593Smuzhiyun 	ivtv_stream_sync_for_device(s);
397*4882a593Smuzhiyun 	if (lock) {
398*4882a593Smuzhiyun 		unsigned long flags = 0;
399*4882a593Smuzhiyun 
400*4882a593Smuzhiyun 		spin_lock_irqsave(&itv->dma_reg_lock, flags);
401*4882a593Smuzhiyun 		if (!test_bit(IVTV_F_I_DMA, &itv->i_flags))
402*4882a593Smuzhiyun 			ivtv_dma_dec_start(s);
403*4882a593Smuzhiyun 		else
404*4882a593Smuzhiyun 			set_bit(IVTV_F_S_DMA_PENDING, &s->s_flags);
405*4882a593Smuzhiyun 		spin_unlock_irqrestore(&itv->dma_reg_lock, flags);
406*4882a593Smuzhiyun 	} else {
407*4882a593Smuzhiyun 		if (!test_bit(IVTV_F_I_DMA, &itv->i_flags))
408*4882a593Smuzhiyun 			ivtv_dma_dec_start(s);
409*4882a593Smuzhiyun 		else
410*4882a593Smuzhiyun 			set_bit(IVTV_F_S_DMA_PENDING, &s->s_flags);
411*4882a593Smuzhiyun 	}
412*4882a593Smuzhiyun }
413*4882a593Smuzhiyun 
ivtv_dma_enc_start_xfer(struct ivtv_stream * s)414*4882a593Smuzhiyun static void ivtv_dma_enc_start_xfer(struct ivtv_stream *s)
415*4882a593Smuzhiyun {
416*4882a593Smuzhiyun 	struct ivtv *itv = s->itv;
417*4882a593Smuzhiyun 
418*4882a593Smuzhiyun 	s->sg_dma->src = cpu_to_le32(s->sg_processing[s->sg_processed].src);
419*4882a593Smuzhiyun 	s->sg_dma->dst = cpu_to_le32(s->sg_processing[s->sg_processed].dst);
420*4882a593Smuzhiyun 	s->sg_dma->size = cpu_to_le32(s->sg_processing[s->sg_processed].size | 0x80000000);
421*4882a593Smuzhiyun 	s->sg_processed++;
422*4882a593Smuzhiyun 	/* Sync Hardware SG List of buffers */
423*4882a593Smuzhiyun 	ivtv_stream_sync_for_device(s);
424*4882a593Smuzhiyun 	write_reg(s->sg_handle, IVTV_REG_ENCDMAADDR);
425*4882a593Smuzhiyun 	write_reg_sync(read_reg(IVTV_REG_DMAXFER) | 0x02, IVTV_REG_DMAXFER);
426*4882a593Smuzhiyun 	itv->dma_timer.expires = jiffies + msecs_to_jiffies(300);
427*4882a593Smuzhiyun 	add_timer(&itv->dma_timer);
428*4882a593Smuzhiyun }
429*4882a593Smuzhiyun 
ivtv_dma_dec_start_xfer(struct ivtv_stream * s)430*4882a593Smuzhiyun static void ivtv_dma_dec_start_xfer(struct ivtv_stream *s)
431*4882a593Smuzhiyun {
432*4882a593Smuzhiyun 	struct ivtv *itv = s->itv;
433*4882a593Smuzhiyun 
434*4882a593Smuzhiyun 	s->sg_dma->src = cpu_to_le32(s->sg_processing[s->sg_processed].src);
435*4882a593Smuzhiyun 	s->sg_dma->dst = cpu_to_le32(s->sg_processing[s->sg_processed].dst);
436*4882a593Smuzhiyun 	s->sg_dma->size = cpu_to_le32(s->sg_processing[s->sg_processed].size | 0x80000000);
437*4882a593Smuzhiyun 	s->sg_processed++;
438*4882a593Smuzhiyun 	/* Sync Hardware SG List of buffers */
439*4882a593Smuzhiyun 	ivtv_stream_sync_for_device(s);
440*4882a593Smuzhiyun 	write_reg(s->sg_handle, IVTV_REG_DECDMAADDR);
441*4882a593Smuzhiyun 	write_reg_sync(read_reg(IVTV_REG_DMAXFER) | 0x01, IVTV_REG_DMAXFER);
442*4882a593Smuzhiyun 	itv->dma_timer.expires = jiffies + msecs_to_jiffies(300);
443*4882a593Smuzhiyun 	add_timer(&itv->dma_timer);
444*4882a593Smuzhiyun }
445*4882a593Smuzhiyun 
446*4882a593Smuzhiyun /* start the encoder DMA */
ivtv_dma_enc_start(struct ivtv_stream * s)447*4882a593Smuzhiyun static void ivtv_dma_enc_start(struct ivtv_stream *s)
448*4882a593Smuzhiyun {
449*4882a593Smuzhiyun 	struct ivtv *itv = s->itv;
450*4882a593Smuzhiyun 	struct ivtv_stream *s_vbi = &itv->streams[IVTV_ENC_STREAM_TYPE_VBI];
451*4882a593Smuzhiyun 	int i;
452*4882a593Smuzhiyun 
453*4882a593Smuzhiyun 	IVTV_DEBUG_HI_DMA("start %s for %s\n", ivtv_use_dma(s) ? "DMA" : "PIO", s->name);
454*4882a593Smuzhiyun 
455*4882a593Smuzhiyun 	if (s->q_predma.bytesused)
456*4882a593Smuzhiyun 		ivtv_queue_move(s, &s->q_predma, NULL, &s->q_dma, s->q_predma.bytesused);
457*4882a593Smuzhiyun 
458*4882a593Smuzhiyun 	if (ivtv_use_dma(s))
459*4882a593Smuzhiyun 		s->sg_pending[s->sg_pending_size - 1].size += 256;
460*4882a593Smuzhiyun 
461*4882a593Smuzhiyun 	/* If this is an MPEG stream, and VBI data is also pending, then append the
462*4882a593Smuzhiyun 	   VBI DMA to the MPEG DMA and transfer both sets of data at once.
463*4882a593Smuzhiyun 
464*4882a593Smuzhiyun 	   VBI DMA is a second class citizen compared to MPEG and mixing them together
465*4882a593Smuzhiyun 	   will confuse the firmware (the end of a VBI DMA is seen as the end of a
466*4882a593Smuzhiyun 	   MPEG DMA, thus effectively dropping an MPEG frame). So instead we make
467*4882a593Smuzhiyun 	   sure we only use the MPEG DMA to transfer the VBI DMA if both are in
468*4882a593Smuzhiyun 	   use. This way no conflicts occur. */
469*4882a593Smuzhiyun 	clear_bit(IVTV_F_S_DMA_HAS_VBI, &s->s_flags);
470*4882a593Smuzhiyun 	if (s->type == IVTV_ENC_STREAM_TYPE_MPG && s_vbi->sg_pending_size &&
471*4882a593Smuzhiyun 			s->sg_pending_size + s_vbi->sg_pending_size <= s->buffers) {
472*4882a593Smuzhiyun 		ivtv_queue_move(s_vbi, &s_vbi->q_predma, NULL, &s_vbi->q_dma, s_vbi->q_predma.bytesused);
473*4882a593Smuzhiyun 		if (ivtv_use_dma(s_vbi))
474*4882a593Smuzhiyun 			s_vbi->sg_pending[s_vbi->sg_pending_size - 1].size += 256;
475*4882a593Smuzhiyun 		for (i = 0; i < s_vbi->sg_pending_size; i++) {
476*4882a593Smuzhiyun 			s->sg_pending[s->sg_pending_size++] = s_vbi->sg_pending[i];
477*4882a593Smuzhiyun 		}
478*4882a593Smuzhiyun 		s_vbi->dma_offset = s_vbi->pending_offset;
479*4882a593Smuzhiyun 		s_vbi->sg_pending_size = 0;
480*4882a593Smuzhiyun 		s_vbi->dma_xfer_cnt++;
481*4882a593Smuzhiyun 		set_bit(IVTV_F_S_DMA_HAS_VBI, &s->s_flags);
482*4882a593Smuzhiyun 		IVTV_DEBUG_HI_DMA("include DMA for %s\n", s_vbi->name);
483*4882a593Smuzhiyun 	}
484*4882a593Smuzhiyun 
485*4882a593Smuzhiyun 	s->dma_xfer_cnt++;
486*4882a593Smuzhiyun 	memcpy(s->sg_processing, s->sg_pending, sizeof(struct ivtv_sg_host_element) * s->sg_pending_size);
487*4882a593Smuzhiyun 	s->sg_processing_size = s->sg_pending_size;
488*4882a593Smuzhiyun 	s->sg_pending_size = 0;
489*4882a593Smuzhiyun 	s->sg_processed = 0;
490*4882a593Smuzhiyun 	s->dma_offset = s->pending_offset;
491*4882a593Smuzhiyun 	s->dma_backup = s->pending_backup;
492*4882a593Smuzhiyun 	s->dma_pts = s->pending_pts;
493*4882a593Smuzhiyun 
494*4882a593Smuzhiyun 	if (ivtv_use_pio(s)) {
495*4882a593Smuzhiyun 		set_bit(IVTV_F_I_WORK_HANDLER_PIO, &itv->i_flags);
496*4882a593Smuzhiyun 		set_bit(IVTV_F_I_HAVE_WORK, &itv->i_flags);
497*4882a593Smuzhiyun 		set_bit(IVTV_F_I_PIO, &itv->i_flags);
498*4882a593Smuzhiyun 		itv->cur_pio_stream = s->type;
499*4882a593Smuzhiyun 	}
500*4882a593Smuzhiyun 	else {
501*4882a593Smuzhiyun 		itv->dma_retries = 0;
502*4882a593Smuzhiyun 		ivtv_dma_enc_start_xfer(s);
503*4882a593Smuzhiyun 		set_bit(IVTV_F_I_DMA, &itv->i_flags);
504*4882a593Smuzhiyun 		itv->cur_dma_stream = s->type;
505*4882a593Smuzhiyun 	}
506*4882a593Smuzhiyun }
507*4882a593Smuzhiyun 
ivtv_dma_dec_start(struct ivtv_stream * s)508*4882a593Smuzhiyun static void ivtv_dma_dec_start(struct ivtv_stream *s)
509*4882a593Smuzhiyun {
510*4882a593Smuzhiyun 	struct ivtv *itv = s->itv;
511*4882a593Smuzhiyun 
512*4882a593Smuzhiyun 	if (s->q_predma.bytesused)
513*4882a593Smuzhiyun 		ivtv_queue_move(s, &s->q_predma, NULL, &s->q_dma, s->q_predma.bytesused);
514*4882a593Smuzhiyun 	s->dma_xfer_cnt++;
515*4882a593Smuzhiyun 	memcpy(s->sg_processing, s->sg_pending, sizeof(struct ivtv_sg_host_element) * s->sg_pending_size);
516*4882a593Smuzhiyun 	s->sg_processing_size = s->sg_pending_size;
517*4882a593Smuzhiyun 	s->sg_pending_size = 0;
518*4882a593Smuzhiyun 	s->sg_processed = 0;
519*4882a593Smuzhiyun 
520*4882a593Smuzhiyun 	IVTV_DEBUG_HI_DMA("start DMA for %s\n", s->name);
521*4882a593Smuzhiyun 	itv->dma_retries = 0;
522*4882a593Smuzhiyun 	ivtv_dma_dec_start_xfer(s);
523*4882a593Smuzhiyun 	set_bit(IVTV_F_I_DMA, &itv->i_flags);
524*4882a593Smuzhiyun 	itv->cur_dma_stream = s->type;
525*4882a593Smuzhiyun }
526*4882a593Smuzhiyun 
ivtv_irq_dma_read(struct ivtv * itv)527*4882a593Smuzhiyun static void ivtv_irq_dma_read(struct ivtv *itv)
528*4882a593Smuzhiyun {
529*4882a593Smuzhiyun 	struct ivtv_stream *s = NULL;
530*4882a593Smuzhiyun 	struct ivtv_buffer *buf;
531*4882a593Smuzhiyun 	int hw_stream_type = 0;
532*4882a593Smuzhiyun 
533*4882a593Smuzhiyun 	IVTV_DEBUG_HI_IRQ("DEC DMA READ\n");
534*4882a593Smuzhiyun 
535*4882a593Smuzhiyun 	del_timer(&itv->dma_timer);
536*4882a593Smuzhiyun 
537*4882a593Smuzhiyun 	if (!test_bit(IVTV_F_I_UDMA, &itv->i_flags) && itv->cur_dma_stream < 0)
538*4882a593Smuzhiyun 		return;
539*4882a593Smuzhiyun 
540*4882a593Smuzhiyun 	if (!test_bit(IVTV_F_I_UDMA, &itv->i_flags)) {
541*4882a593Smuzhiyun 		s = &itv->streams[itv->cur_dma_stream];
542*4882a593Smuzhiyun 		ivtv_stream_sync_for_cpu(s);
543*4882a593Smuzhiyun 
544*4882a593Smuzhiyun 		if (read_reg(IVTV_REG_DMASTATUS) & 0x14) {
545*4882a593Smuzhiyun 			IVTV_DEBUG_WARN("DEC DMA ERROR %x (xfer %d of %d, retry %d)\n",
546*4882a593Smuzhiyun 					read_reg(IVTV_REG_DMASTATUS),
547*4882a593Smuzhiyun 					s->sg_processed, s->sg_processing_size, itv->dma_retries);
548*4882a593Smuzhiyun 			write_reg(read_reg(IVTV_REG_DMASTATUS) & 3, IVTV_REG_DMASTATUS);
549*4882a593Smuzhiyun 			if (itv->dma_retries == 3) {
550*4882a593Smuzhiyun 				/* Too many retries, give up on this frame */
551*4882a593Smuzhiyun 				itv->dma_retries = 0;
552*4882a593Smuzhiyun 				s->sg_processed = s->sg_processing_size;
553*4882a593Smuzhiyun 			}
554*4882a593Smuzhiyun 			else {
555*4882a593Smuzhiyun 				/* Retry, starting with the first xfer segment.
556*4882a593Smuzhiyun 				   Just retrying the current segment is not sufficient. */
557*4882a593Smuzhiyun 				s->sg_processed = 0;
558*4882a593Smuzhiyun 				itv->dma_retries++;
559*4882a593Smuzhiyun 			}
560*4882a593Smuzhiyun 		}
561*4882a593Smuzhiyun 		if (s->sg_processed < s->sg_processing_size) {
562*4882a593Smuzhiyun 			/* DMA next buffer */
563*4882a593Smuzhiyun 			ivtv_dma_dec_start_xfer(s);
564*4882a593Smuzhiyun 			return;
565*4882a593Smuzhiyun 		}
566*4882a593Smuzhiyun 		if (s->type == IVTV_DEC_STREAM_TYPE_YUV)
567*4882a593Smuzhiyun 			hw_stream_type = 2;
568*4882a593Smuzhiyun 		IVTV_DEBUG_HI_DMA("DEC DATA READ %s: %d\n", s->name, s->q_dma.bytesused);
569*4882a593Smuzhiyun 
570*4882a593Smuzhiyun 		/* For some reason must kick the firmware, like PIO mode,
571*4882a593Smuzhiyun 		   I think this tells the firmware we are done and the size
572*4882a593Smuzhiyun 		   of the xfer so it can calculate what we need next.
573*4882a593Smuzhiyun 		   I think we can do this part ourselves but would have to
574*4882a593Smuzhiyun 		   fully calculate xfer info ourselves and not use interrupts
575*4882a593Smuzhiyun 		 */
576*4882a593Smuzhiyun 		ivtv_vapi(itv, CX2341X_DEC_SCHED_DMA_FROM_HOST, 3, 0, s->q_dma.bytesused,
577*4882a593Smuzhiyun 				hw_stream_type);
578*4882a593Smuzhiyun 
579*4882a593Smuzhiyun 		/* Free last DMA call */
580*4882a593Smuzhiyun 		while ((buf = ivtv_dequeue(s, &s->q_dma)) != NULL) {
581*4882a593Smuzhiyun 			ivtv_buf_sync_for_cpu(s, buf);
582*4882a593Smuzhiyun 			ivtv_enqueue(s, buf, &s->q_free);
583*4882a593Smuzhiyun 		}
584*4882a593Smuzhiyun 		wake_up(&s->waitq);
585*4882a593Smuzhiyun 	}
586*4882a593Smuzhiyun 	clear_bit(IVTV_F_I_UDMA, &itv->i_flags);
587*4882a593Smuzhiyun 	clear_bit(IVTV_F_I_DMA, &itv->i_flags);
588*4882a593Smuzhiyun 	itv->cur_dma_stream = -1;
589*4882a593Smuzhiyun 	wake_up(&itv->dma_waitq);
590*4882a593Smuzhiyun }
591*4882a593Smuzhiyun 
ivtv_irq_enc_dma_complete(struct ivtv * itv)592*4882a593Smuzhiyun static void ivtv_irq_enc_dma_complete(struct ivtv *itv)
593*4882a593Smuzhiyun {
594*4882a593Smuzhiyun 	u32 data[CX2341X_MBOX_MAX_DATA];
595*4882a593Smuzhiyun 	struct ivtv_stream *s;
596*4882a593Smuzhiyun 
597*4882a593Smuzhiyun 	ivtv_api_get_data(&itv->enc_mbox, IVTV_MBOX_DMA_END, 2, data);
598*4882a593Smuzhiyun 	IVTV_DEBUG_HI_IRQ("ENC DMA COMPLETE %x %d (%d)\n", data[0], data[1], itv->cur_dma_stream);
599*4882a593Smuzhiyun 
600*4882a593Smuzhiyun 	del_timer(&itv->dma_timer);
601*4882a593Smuzhiyun 
602*4882a593Smuzhiyun 	if (itv->cur_dma_stream < 0)
603*4882a593Smuzhiyun 		return;
604*4882a593Smuzhiyun 
605*4882a593Smuzhiyun 	s = &itv->streams[itv->cur_dma_stream];
606*4882a593Smuzhiyun 	ivtv_stream_sync_for_cpu(s);
607*4882a593Smuzhiyun 
608*4882a593Smuzhiyun 	if (data[0] & 0x18) {
609*4882a593Smuzhiyun 		IVTV_DEBUG_WARN("ENC DMA ERROR %x (offset %08x, xfer %d of %d, retry %d)\n", data[0],
610*4882a593Smuzhiyun 			s->dma_offset, s->sg_processed, s->sg_processing_size, itv->dma_retries);
611*4882a593Smuzhiyun 		write_reg(read_reg(IVTV_REG_DMASTATUS) & 3, IVTV_REG_DMASTATUS);
612*4882a593Smuzhiyun 		if (itv->dma_retries == 3) {
613*4882a593Smuzhiyun 			/* Too many retries, give up on this frame */
614*4882a593Smuzhiyun 			itv->dma_retries = 0;
615*4882a593Smuzhiyun 			s->sg_processed = s->sg_processing_size;
616*4882a593Smuzhiyun 		}
617*4882a593Smuzhiyun 		else {
618*4882a593Smuzhiyun 			/* Retry, starting with the first xfer segment.
619*4882a593Smuzhiyun 			   Just retrying the current segment is not sufficient. */
620*4882a593Smuzhiyun 			s->sg_processed = 0;
621*4882a593Smuzhiyun 			itv->dma_retries++;
622*4882a593Smuzhiyun 		}
623*4882a593Smuzhiyun 	}
624*4882a593Smuzhiyun 	if (s->sg_processed < s->sg_processing_size) {
625*4882a593Smuzhiyun 		/* DMA next buffer */
626*4882a593Smuzhiyun 		ivtv_dma_enc_start_xfer(s);
627*4882a593Smuzhiyun 		return;
628*4882a593Smuzhiyun 	}
629*4882a593Smuzhiyun 	clear_bit(IVTV_F_I_DMA, &itv->i_flags);
630*4882a593Smuzhiyun 	itv->cur_dma_stream = -1;
631*4882a593Smuzhiyun 	dma_post(s);
632*4882a593Smuzhiyun 	if (test_and_clear_bit(IVTV_F_S_DMA_HAS_VBI, &s->s_flags)) {
633*4882a593Smuzhiyun 		s = &itv->streams[IVTV_ENC_STREAM_TYPE_VBI];
634*4882a593Smuzhiyun 		dma_post(s);
635*4882a593Smuzhiyun 	}
636*4882a593Smuzhiyun 	s->sg_processing_size = 0;
637*4882a593Smuzhiyun 	s->sg_processed = 0;
638*4882a593Smuzhiyun 	wake_up(&itv->dma_waitq);
639*4882a593Smuzhiyun }
640*4882a593Smuzhiyun 
ivtv_irq_enc_pio_complete(struct ivtv * itv)641*4882a593Smuzhiyun static void ivtv_irq_enc_pio_complete(struct ivtv *itv)
642*4882a593Smuzhiyun {
643*4882a593Smuzhiyun 	struct ivtv_stream *s;
644*4882a593Smuzhiyun 
645*4882a593Smuzhiyun 	if (itv->cur_pio_stream < 0 || itv->cur_pio_stream >= IVTV_MAX_STREAMS) {
646*4882a593Smuzhiyun 		itv->cur_pio_stream = -1;
647*4882a593Smuzhiyun 		return;
648*4882a593Smuzhiyun 	}
649*4882a593Smuzhiyun 	s = &itv->streams[itv->cur_pio_stream];
650*4882a593Smuzhiyun 	IVTV_DEBUG_HI_IRQ("ENC PIO COMPLETE %s\n", s->name);
651*4882a593Smuzhiyun 	clear_bit(IVTV_F_I_PIO, &itv->i_flags);
652*4882a593Smuzhiyun 	itv->cur_pio_stream = -1;
653*4882a593Smuzhiyun 	dma_post(s);
654*4882a593Smuzhiyun 	if (s->type == IVTV_ENC_STREAM_TYPE_MPG)
655*4882a593Smuzhiyun 		ivtv_vapi(itv, CX2341X_ENC_SCHED_DMA_TO_HOST, 3, 0, 0, 0);
656*4882a593Smuzhiyun 	else if (s->type == IVTV_ENC_STREAM_TYPE_YUV)
657*4882a593Smuzhiyun 		ivtv_vapi(itv, CX2341X_ENC_SCHED_DMA_TO_HOST, 3, 0, 0, 1);
658*4882a593Smuzhiyun 	else if (s->type == IVTV_ENC_STREAM_TYPE_PCM)
659*4882a593Smuzhiyun 		ivtv_vapi(itv, CX2341X_ENC_SCHED_DMA_TO_HOST, 3, 0, 0, 2);
660*4882a593Smuzhiyun 	clear_bit(IVTV_F_I_PIO, &itv->i_flags);
661*4882a593Smuzhiyun 	if (test_and_clear_bit(IVTV_F_S_DMA_HAS_VBI, &s->s_flags)) {
662*4882a593Smuzhiyun 		s = &itv->streams[IVTV_ENC_STREAM_TYPE_VBI];
663*4882a593Smuzhiyun 		dma_post(s);
664*4882a593Smuzhiyun 	}
665*4882a593Smuzhiyun 	wake_up(&itv->dma_waitq);
666*4882a593Smuzhiyun }
667*4882a593Smuzhiyun 
ivtv_irq_dma_err(struct ivtv * itv)668*4882a593Smuzhiyun static void ivtv_irq_dma_err(struct ivtv *itv)
669*4882a593Smuzhiyun {
670*4882a593Smuzhiyun 	u32 data[CX2341X_MBOX_MAX_DATA];
671*4882a593Smuzhiyun 	u32 status;
672*4882a593Smuzhiyun 
673*4882a593Smuzhiyun 	del_timer(&itv->dma_timer);
674*4882a593Smuzhiyun 
675*4882a593Smuzhiyun 	ivtv_api_get_data(&itv->enc_mbox, IVTV_MBOX_DMA_END, 2, data);
676*4882a593Smuzhiyun 	status = read_reg(IVTV_REG_DMASTATUS);
677*4882a593Smuzhiyun 	IVTV_DEBUG_WARN("DMA ERROR %08x %08x %08x %d\n", data[0], data[1],
678*4882a593Smuzhiyun 				status, itv->cur_dma_stream);
679*4882a593Smuzhiyun 	/*
680*4882a593Smuzhiyun 	 * We do *not* write back to the IVTV_REG_DMASTATUS register to
681*4882a593Smuzhiyun 	 * clear the error status, if either the encoder write (0x02) or
682*4882a593Smuzhiyun 	 * decoder read (0x01) bus master DMA operation do not indicate
683*4882a593Smuzhiyun 	 * completed.  We can race with the DMA engine, which may have
684*4882a593Smuzhiyun 	 * transitioned to completed status *after* we read the register.
685*4882a593Smuzhiyun 	 * Setting a IVTV_REG_DMASTATUS flag back to "busy" status, after the
686*4882a593Smuzhiyun 	 * DMA engine has completed, will cause the DMA engine to stop working.
687*4882a593Smuzhiyun 	 */
688*4882a593Smuzhiyun 	status &= 0x3;
689*4882a593Smuzhiyun 	if (status == 0x3)
690*4882a593Smuzhiyun 		write_reg(status, IVTV_REG_DMASTATUS);
691*4882a593Smuzhiyun 
692*4882a593Smuzhiyun 	if (!test_bit(IVTV_F_I_UDMA, &itv->i_flags) &&
693*4882a593Smuzhiyun 	    itv->cur_dma_stream >= 0 && itv->cur_dma_stream < IVTV_MAX_STREAMS) {
694*4882a593Smuzhiyun 		struct ivtv_stream *s = &itv->streams[itv->cur_dma_stream];
695*4882a593Smuzhiyun 
696*4882a593Smuzhiyun 		if (s->type >= IVTV_DEC_STREAM_TYPE_MPG) {
697*4882a593Smuzhiyun 			/* retry */
698*4882a593Smuzhiyun 			/*
699*4882a593Smuzhiyun 			 * FIXME - handle cases of DMA error similar to
700*4882a593Smuzhiyun 			 * encoder below, except conditioned on status & 0x1
701*4882a593Smuzhiyun 			 */
702*4882a593Smuzhiyun 			ivtv_dma_dec_start(s);
703*4882a593Smuzhiyun 			return;
704*4882a593Smuzhiyun 		} else {
705*4882a593Smuzhiyun 			if ((status & 0x2) == 0) {
706*4882a593Smuzhiyun 				/*
707*4882a593Smuzhiyun 				 * CX2341x Bus Master DMA write is ongoing.
708*4882a593Smuzhiyun 				 * Reset the timer and let it complete.
709*4882a593Smuzhiyun 				 */
710*4882a593Smuzhiyun 				itv->dma_timer.expires =
711*4882a593Smuzhiyun 						jiffies + msecs_to_jiffies(600);
712*4882a593Smuzhiyun 				add_timer(&itv->dma_timer);
713*4882a593Smuzhiyun 				return;
714*4882a593Smuzhiyun 			}
715*4882a593Smuzhiyun 
716*4882a593Smuzhiyun 			if (itv->dma_retries < 3) {
717*4882a593Smuzhiyun 				/*
718*4882a593Smuzhiyun 				 * CX2341x Bus Master DMA write has ended.
719*4882a593Smuzhiyun 				 * Retry the write, starting with the first
720*4882a593Smuzhiyun 				 * xfer segment. Just retrying the current
721*4882a593Smuzhiyun 				 * segment is not sufficient.
722*4882a593Smuzhiyun 				 */
723*4882a593Smuzhiyun 				s->sg_processed = 0;
724*4882a593Smuzhiyun 				itv->dma_retries++;
725*4882a593Smuzhiyun 				ivtv_dma_enc_start_xfer(s);
726*4882a593Smuzhiyun 				return;
727*4882a593Smuzhiyun 			}
728*4882a593Smuzhiyun 			/* Too many retries, give up on this one */
729*4882a593Smuzhiyun 		}
730*4882a593Smuzhiyun 
731*4882a593Smuzhiyun 	}
732*4882a593Smuzhiyun 	if (test_bit(IVTV_F_I_UDMA, &itv->i_flags)) {
733*4882a593Smuzhiyun 		ivtv_udma_start(itv);
734*4882a593Smuzhiyun 		return;
735*4882a593Smuzhiyun 	}
736*4882a593Smuzhiyun 	clear_bit(IVTV_F_I_UDMA, &itv->i_flags);
737*4882a593Smuzhiyun 	clear_bit(IVTV_F_I_DMA, &itv->i_flags);
738*4882a593Smuzhiyun 	itv->cur_dma_stream = -1;
739*4882a593Smuzhiyun 	wake_up(&itv->dma_waitq);
740*4882a593Smuzhiyun }
741*4882a593Smuzhiyun 
ivtv_irq_enc_start_cap(struct ivtv * itv)742*4882a593Smuzhiyun static void ivtv_irq_enc_start_cap(struct ivtv *itv)
743*4882a593Smuzhiyun {
744*4882a593Smuzhiyun 	u32 data[CX2341X_MBOX_MAX_DATA];
745*4882a593Smuzhiyun 	struct ivtv_stream *s;
746*4882a593Smuzhiyun 
747*4882a593Smuzhiyun 	/* Get DMA destination and size arguments from card */
748*4882a593Smuzhiyun 	ivtv_api_get_data(&itv->enc_mbox, IVTV_MBOX_DMA, 7, data);
749*4882a593Smuzhiyun 	IVTV_DEBUG_HI_IRQ("ENC START CAP %d: %08x %08x\n", data[0], data[1], data[2]);
750*4882a593Smuzhiyun 
751*4882a593Smuzhiyun 	if (data[0] > 2 || data[1] == 0 || data[2] == 0) {
752*4882a593Smuzhiyun 		IVTV_DEBUG_WARN("Unknown input: %08x %08x %08x\n",
753*4882a593Smuzhiyun 				data[0], data[1], data[2]);
754*4882a593Smuzhiyun 		return;
755*4882a593Smuzhiyun 	}
756*4882a593Smuzhiyun 	s = &itv->streams[ivtv_stream_map[data[0]]];
757*4882a593Smuzhiyun 	if (!stream_enc_dma_append(s, data)) {
758*4882a593Smuzhiyun 		set_bit(ivtv_use_pio(s) ? IVTV_F_S_PIO_PENDING : IVTV_F_S_DMA_PENDING, &s->s_flags);
759*4882a593Smuzhiyun 	}
760*4882a593Smuzhiyun }
761*4882a593Smuzhiyun 
ivtv_irq_enc_vbi_cap(struct ivtv * itv)762*4882a593Smuzhiyun static void ivtv_irq_enc_vbi_cap(struct ivtv *itv)
763*4882a593Smuzhiyun {
764*4882a593Smuzhiyun 	u32 data[CX2341X_MBOX_MAX_DATA];
765*4882a593Smuzhiyun 	struct ivtv_stream *s;
766*4882a593Smuzhiyun 
767*4882a593Smuzhiyun 	IVTV_DEBUG_HI_IRQ("ENC START VBI CAP\n");
768*4882a593Smuzhiyun 	s = &itv->streams[IVTV_ENC_STREAM_TYPE_VBI];
769*4882a593Smuzhiyun 
770*4882a593Smuzhiyun 	if (!stream_enc_dma_append(s, data))
771*4882a593Smuzhiyun 		set_bit(ivtv_use_pio(s) ? IVTV_F_S_PIO_PENDING : IVTV_F_S_DMA_PENDING, &s->s_flags);
772*4882a593Smuzhiyun }
773*4882a593Smuzhiyun 
ivtv_irq_dec_vbi_reinsert(struct ivtv * itv)774*4882a593Smuzhiyun static void ivtv_irq_dec_vbi_reinsert(struct ivtv *itv)
775*4882a593Smuzhiyun {
776*4882a593Smuzhiyun 	u32 data[CX2341X_MBOX_MAX_DATA];
777*4882a593Smuzhiyun 	struct ivtv_stream *s = &itv->streams[IVTV_DEC_STREAM_TYPE_VBI];
778*4882a593Smuzhiyun 
779*4882a593Smuzhiyun 	IVTV_DEBUG_HI_IRQ("DEC VBI REINSERT\n");
780*4882a593Smuzhiyun 	if (test_bit(IVTV_F_S_CLAIMED, &s->s_flags) &&
781*4882a593Smuzhiyun 			!stream_enc_dma_append(s, data)) {
782*4882a593Smuzhiyun 		set_bit(IVTV_F_S_PIO_PENDING, &s->s_flags);
783*4882a593Smuzhiyun 	}
784*4882a593Smuzhiyun }
785*4882a593Smuzhiyun 
ivtv_irq_dec_data_req(struct ivtv * itv)786*4882a593Smuzhiyun static void ivtv_irq_dec_data_req(struct ivtv *itv)
787*4882a593Smuzhiyun {
788*4882a593Smuzhiyun 	u32 data[CX2341X_MBOX_MAX_DATA];
789*4882a593Smuzhiyun 	struct ivtv_stream *s;
790*4882a593Smuzhiyun 
791*4882a593Smuzhiyun 	/* YUV or MPG */
792*4882a593Smuzhiyun 
793*4882a593Smuzhiyun 	if (test_bit(IVTV_F_I_DEC_YUV, &itv->i_flags)) {
794*4882a593Smuzhiyun 		ivtv_api_get_data(&itv->dec_mbox, IVTV_MBOX_DMA, 2, data);
795*4882a593Smuzhiyun 		itv->dma_data_req_size =
796*4882a593Smuzhiyun 				 1080 * ((itv->yuv_info.v4l2_src_h + 31) & ~31);
797*4882a593Smuzhiyun 		itv->dma_data_req_offset = data[1];
798*4882a593Smuzhiyun 		if (atomic_read(&itv->yuv_info.next_dma_frame) >= 0)
799*4882a593Smuzhiyun 			ivtv_yuv_frame_complete(itv);
800*4882a593Smuzhiyun 		s = &itv->streams[IVTV_DEC_STREAM_TYPE_YUV];
801*4882a593Smuzhiyun 	}
802*4882a593Smuzhiyun 	else {
803*4882a593Smuzhiyun 		ivtv_api_get_data(&itv->dec_mbox, IVTV_MBOX_DMA, 3, data);
804*4882a593Smuzhiyun 		itv->dma_data_req_size = min_t(u32, data[2], 0x10000);
805*4882a593Smuzhiyun 		itv->dma_data_req_offset = data[1];
806*4882a593Smuzhiyun 		s = &itv->streams[IVTV_DEC_STREAM_TYPE_MPG];
807*4882a593Smuzhiyun 	}
808*4882a593Smuzhiyun 	IVTV_DEBUG_HI_IRQ("DEC DATA REQ %s: %d %08x %u\n", s->name, s->q_full.bytesused,
809*4882a593Smuzhiyun 		       itv->dma_data_req_offset, itv->dma_data_req_size);
810*4882a593Smuzhiyun 	if (itv->dma_data_req_size == 0 || s->q_full.bytesused < itv->dma_data_req_size) {
811*4882a593Smuzhiyun 		set_bit(IVTV_F_S_NEEDS_DATA, &s->s_flags);
812*4882a593Smuzhiyun 	}
813*4882a593Smuzhiyun 	else {
814*4882a593Smuzhiyun 		if (test_bit(IVTV_F_I_DEC_YUV, &itv->i_flags))
815*4882a593Smuzhiyun 			ivtv_yuv_setup_stream_frame(itv);
816*4882a593Smuzhiyun 		clear_bit(IVTV_F_S_NEEDS_DATA, &s->s_flags);
817*4882a593Smuzhiyun 		ivtv_queue_move(s, &s->q_full, NULL, &s->q_predma, itv->dma_data_req_size);
818*4882a593Smuzhiyun 		ivtv_dma_stream_dec_prepare(s, itv->dma_data_req_offset + IVTV_DECODER_OFFSET, 0);
819*4882a593Smuzhiyun 	}
820*4882a593Smuzhiyun }
821*4882a593Smuzhiyun 
ivtv_irq_vsync(struct ivtv * itv)822*4882a593Smuzhiyun static void ivtv_irq_vsync(struct ivtv *itv)
823*4882a593Smuzhiyun {
824*4882a593Smuzhiyun 	/* The vsync interrupt is unusual in that it won't clear until
825*4882a593Smuzhiyun 	 * the end of the first line for the current field, at which
826*4882a593Smuzhiyun 	 * point it clears itself. This can result in repeated vsync
827*4882a593Smuzhiyun 	 * interrupts, or a missed vsync. Read some of the registers
828*4882a593Smuzhiyun 	 * to determine the line being displayed and ensure we handle
829*4882a593Smuzhiyun 	 * one vsync per frame.
830*4882a593Smuzhiyun 	 */
831*4882a593Smuzhiyun 	unsigned int frame = read_reg(IVTV_REG_DEC_LINE_FIELD) & 1;
832*4882a593Smuzhiyun 	struct yuv_playback_info *yi = &itv->yuv_info;
833*4882a593Smuzhiyun 	int last_dma_frame = atomic_read(&yi->next_dma_frame);
834*4882a593Smuzhiyun 	struct yuv_frame_info *f = &yi->new_frame_info[last_dma_frame];
835*4882a593Smuzhiyun 
836*4882a593Smuzhiyun 	if (0) IVTV_DEBUG_IRQ("DEC VSYNC\n");
837*4882a593Smuzhiyun 
838*4882a593Smuzhiyun 	if (((frame ^ f->sync_field) == 0 &&
839*4882a593Smuzhiyun 		((itv->last_vsync_field & 1) ^ f->sync_field)) ||
840*4882a593Smuzhiyun 			(frame != (itv->last_vsync_field & 1) && !f->interlaced)) {
841*4882a593Smuzhiyun 		int next_dma_frame = last_dma_frame;
842*4882a593Smuzhiyun 
843*4882a593Smuzhiyun 		if (!(f->interlaced && f->delay && yi->fields_lapsed < 1)) {
844*4882a593Smuzhiyun 			if (next_dma_frame >= 0 && next_dma_frame != atomic_read(&yi->next_fill_frame)) {
845*4882a593Smuzhiyun 				write_reg(yuv_offset[next_dma_frame] >> 4, 0x82c);
846*4882a593Smuzhiyun 				write_reg((yuv_offset[next_dma_frame] + IVTV_YUV_BUFFER_UV_OFFSET) >> 4, 0x830);
847*4882a593Smuzhiyun 				write_reg(yuv_offset[next_dma_frame] >> 4, 0x834);
848*4882a593Smuzhiyun 				write_reg((yuv_offset[next_dma_frame] + IVTV_YUV_BUFFER_UV_OFFSET) >> 4, 0x838);
849*4882a593Smuzhiyun 				next_dma_frame = (next_dma_frame + 1) % IVTV_YUV_BUFFERS;
850*4882a593Smuzhiyun 				atomic_set(&yi->next_dma_frame, next_dma_frame);
851*4882a593Smuzhiyun 				yi->fields_lapsed = -1;
852*4882a593Smuzhiyun 				yi->running = 1;
853*4882a593Smuzhiyun 			}
854*4882a593Smuzhiyun 		}
855*4882a593Smuzhiyun 	}
856*4882a593Smuzhiyun 	if (frame != (itv->last_vsync_field & 1)) {
857*4882a593Smuzhiyun 		static const struct v4l2_event evtop = {
858*4882a593Smuzhiyun 			.type = V4L2_EVENT_VSYNC,
859*4882a593Smuzhiyun 			.u.vsync.field = V4L2_FIELD_TOP,
860*4882a593Smuzhiyun 		};
861*4882a593Smuzhiyun 		static const struct v4l2_event evbottom = {
862*4882a593Smuzhiyun 			.type = V4L2_EVENT_VSYNC,
863*4882a593Smuzhiyun 			.u.vsync.field = V4L2_FIELD_BOTTOM,
864*4882a593Smuzhiyun 		};
865*4882a593Smuzhiyun 		struct ivtv_stream *s = ivtv_get_output_stream(itv);
866*4882a593Smuzhiyun 
867*4882a593Smuzhiyun 		itv->last_vsync_field += 1;
868*4882a593Smuzhiyun 		if (frame == 0) {
869*4882a593Smuzhiyun 			clear_bit(IVTV_F_I_VALID_DEC_TIMINGS, &itv->i_flags);
870*4882a593Smuzhiyun 			clear_bit(IVTV_F_I_EV_VSYNC_FIELD, &itv->i_flags);
871*4882a593Smuzhiyun 		}
872*4882a593Smuzhiyun 		else {
873*4882a593Smuzhiyun 			set_bit(IVTV_F_I_EV_VSYNC_FIELD, &itv->i_flags);
874*4882a593Smuzhiyun 		}
875*4882a593Smuzhiyun 		if (test_bit(IVTV_F_I_EV_VSYNC_ENABLED, &itv->i_flags)) {
876*4882a593Smuzhiyun 			set_bit(IVTV_F_I_EV_VSYNC, &itv->i_flags);
877*4882a593Smuzhiyun 			wake_up(&itv->event_waitq);
878*4882a593Smuzhiyun 			if (s)
879*4882a593Smuzhiyun 				wake_up(&s->waitq);
880*4882a593Smuzhiyun 		}
881*4882a593Smuzhiyun 		if (s && s->vdev.v4l2_dev)
882*4882a593Smuzhiyun 			v4l2_event_queue(&s->vdev, frame ? &evtop : &evbottom);
883*4882a593Smuzhiyun 		wake_up(&itv->vsync_waitq);
884*4882a593Smuzhiyun 
885*4882a593Smuzhiyun 		/* Send VBI to saa7127 */
886*4882a593Smuzhiyun 		if (frame && (itv->output_mode == OUT_PASSTHROUGH ||
887*4882a593Smuzhiyun 			test_bit(IVTV_F_I_UPDATE_WSS, &itv->i_flags) ||
888*4882a593Smuzhiyun 			test_bit(IVTV_F_I_UPDATE_VPS, &itv->i_flags) ||
889*4882a593Smuzhiyun 			test_bit(IVTV_F_I_UPDATE_CC, &itv->i_flags))) {
890*4882a593Smuzhiyun 			set_bit(IVTV_F_I_WORK_HANDLER_VBI, &itv->i_flags);
891*4882a593Smuzhiyun 			set_bit(IVTV_F_I_HAVE_WORK, &itv->i_flags);
892*4882a593Smuzhiyun 		}
893*4882a593Smuzhiyun 
894*4882a593Smuzhiyun 		/* Check if we need to update the yuv registers */
895*4882a593Smuzhiyun 		if (yi->running && (yi->yuv_forced_update || f->update)) {
896*4882a593Smuzhiyun 			if (!f->update) {
897*4882a593Smuzhiyun 				last_dma_frame =
898*4882a593Smuzhiyun 					(u8)(atomic_read(&yi->next_dma_frame) -
899*4882a593Smuzhiyun 						 1) % IVTV_YUV_BUFFERS;
900*4882a593Smuzhiyun 				f = &yi->new_frame_info[last_dma_frame];
901*4882a593Smuzhiyun 			}
902*4882a593Smuzhiyun 
903*4882a593Smuzhiyun 			if (f->src_w) {
904*4882a593Smuzhiyun 				yi->update_frame = last_dma_frame;
905*4882a593Smuzhiyun 				f->update = 0;
906*4882a593Smuzhiyun 				yi->yuv_forced_update = 0;
907*4882a593Smuzhiyun 				set_bit(IVTV_F_I_WORK_HANDLER_YUV, &itv->i_flags);
908*4882a593Smuzhiyun 				set_bit(IVTV_F_I_HAVE_WORK, &itv->i_flags);
909*4882a593Smuzhiyun 			}
910*4882a593Smuzhiyun 		}
911*4882a593Smuzhiyun 
912*4882a593Smuzhiyun 		yi->fields_lapsed++;
913*4882a593Smuzhiyun 	}
914*4882a593Smuzhiyun }
915*4882a593Smuzhiyun 
916*4882a593Smuzhiyun #define IVTV_IRQ_DMA (IVTV_IRQ_DMA_READ | IVTV_IRQ_ENC_DMA_COMPLETE | IVTV_IRQ_DMA_ERR | IVTV_IRQ_ENC_START_CAP | IVTV_IRQ_ENC_VBI_CAP | IVTV_IRQ_DEC_DATA_REQ | IVTV_IRQ_DEC_VBI_RE_INSERT)
917*4882a593Smuzhiyun 
ivtv_irq_handler(int irq,void * dev_id)918*4882a593Smuzhiyun irqreturn_t ivtv_irq_handler(int irq, void *dev_id)
919*4882a593Smuzhiyun {
920*4882a593Smuzhiyun 	struct ivtv *itv = (struct ivtv *)dev_id;
921*4882a593Smuzhiyun 	u32 combo;
922*4882a593Smuzhiyun 	u32 stat;
923*4882a593Smuzhiyun 	int i;
924*4882a593Smuzhiyun 	u8 vsync_force = 0;
925*4882a593Smuzhiyun 
926*4882a593Smuzhiyun 	spin_lock(&itv->dma_reg_lock);
927*4882a593Smuzhiyun 	/* get contents of irq status register */
928*4882a593Smuzhiyun 	stat = read_reg(IVTV_REG_IRQSTATUS);
929*4882a593Smuzhiyun 
930*4882a593Smuzhiyun 	combo = ~itv->irqmask & stat;
931*4882a593Smuzhiyun 
932*4882a593Smuzhiyun 	/* Clear out IRQ */
933*4882a593Smuzhiyun 	if (combo) write_reg(combo, IVTV_REG_IRQSTATUS);
934*4882a593Smuzhiyun 
935*4882a593Smuzhiyun 	if (0 == combo) {
936*4882a593Smuzhiyun 		/* The vsync interrupt is unusual and clears itself. If we
937*4882a593Smuzhiyun 		 * took too long, we may have missed it. Do some checks
938*4882a593Smuzhiyun 		 */
939*4882a593Smuzhiyun 		if (~itv->irqmask & IVTV_IRQ_DEC_VSYNC) {
940*4882a593Smuzhiyun 			/* vsync is enabled, see if we're in a new field */
941*4882a593Smuzhiyun 			if ((itv->last_vsync_field & 1) !=
942*4882a593Smuzhiyun 			    (read_reg(IVTV_REG_DEC_LINE_FIELD) & 1)) {
943*4882a593Smuzhiyun 				/* New field, looks like we missed it */
944*4882a593Smuzhiyun 				IVTV_DEBUG_YUV("VSync interrupt missed %d\n",
945*4882a593Smuzhiyun 				       read_reg(IVTV_REG_DEC_LINE_FIELD) >> 16);
946*4882a593Smuzhiyun 				vsync_force = 1;
947*4882a593Smuzhiyun 			}
948*4882a593Smuzhiyun 		}
949*4882a593Smuzhiyun 
950*4882a593Smuzhiyun 		if (!vsync_force) {
951*4882a593Smuzhiyun 			/* No Vsync expected, wasn't for us */
952*4882a593Smuzhiyun 			spin_unlock(&itv->dma_reg_lock);
953*4882a593Smuzhiyun 			return IRQ_NONE;
954*4882a593Smuzhiyun 		}
955*4882a593Smuzhiyun 	}
956*4882a593Smuzhiyun 
957*4882a593Smuzhiyun 	/* Exclude interrupts noted below from the output, otherwise the log is flooded with
958*4882a593Smuzhiyun 	   these messages */
959*4882a593Smuzhiyun 	if (combo & ~0xff6d0400)
960*4882a593Smuzhiyun 		IVTV_DEBUG_HI_IRQ("======= valid IRQ bits: 0x%08x ======\n", combo);
961*4882a593Smuzhiyun 
962*4882a593Smuzhiyun 	if (combo & IVTV_IRQ_DEC_DMA_COMPLETE) {
963*4882a593Smuzhiyun 		IVTV_DEBUG_HI_IRQ("DEC DMA COMPLETE\n");
964*4882a593Smuzhiyun 	}
965*4882a593Smuzhiyun 
966*4882a593Smuzhiyun 	if (combo & IVTV_IRQ_DMA_READ) {
967*4882a593Smuzhiyun 		ivtv_irq_dma_read(itv);
968*4882a593Smuzhiyun 	}
969*4882a593Smuzhiyun 
970*4882a593Smuzhiyun 	if (combo & IVTV_IRQ_ENC_DMA_COMPLETE) {
971*4882a593Smuzhiyun 		ivtv_irq_enc_dma_complete(itv);
972*4882a593Smuzhiyun 	}
973*4882a593Smuzhiyun 
974*4882a593Smuzhiyun 	if (combo & IVTV_IRQ_ENC_PIO_COMPLETE) {
975*4882a593Smuzhiyun 		ivtv_irq_enc_pio_complete(itv);
976*4882a593Smuzhiyun 	}
977*4882a593Smuzhiyun 
978*4882a593Smuzhiyun 	if (combo & IVTV_IRQ_DMA_ERR) {
979*4882a593Smuzhiyun 		ivtv_irq_dma_err(itv);
980*4882a593Smuzhiyun 	}
981*4882a593Smuzhiyun 
982*4882a593Smuzhiyun 	if (combo & IVTV_IRQ_ENC_START_CAP) {
983*4882a593Smuzhiyun 		ivtv_irq_enc_start_cap(itv);
984*4882a593Smuzhiyun 	}
985*4882a593Smuzhiyun 
986*4882a593Smuzhiyun 	if (combo & IVTV_IRQ_ENC_VBI_CAP) {
987*4882a593Smuzhiyun 		ivtv_irq_enc_vbi_cap(itv);
988*4882a593Smuzhiyun 	}
989*4882a593Smuzhiyun 
990*4882a593Smuzhiyun 	if (combo & IVTV_IRQ_DEC_VBI_RE_INSERT) {
991*4882a593Smuzhiyun 		ivtv_irq_dec_vbi_reinsert(itv);
992*4882a593Smuzhiyun 	}
993*4882a593Smuzhiyun 
994*4882a593Smuzhiyun 	if (combo & IVTV_IRQ_ENC_EOS) {
995*4882a593Smuzhiyun 		IVTV_DEBUG_IRQ("ENC EOS\n");
996*4882a593Smuzhiyun 		set_bit(IVTV_F_I_EOS, &itv->i_flags);
997*4882a593Smuzhiyun 		wake_up(&itv->eos_waitq);
998*4882a593Smuzhiyun 	}
999*4882a593Smuzhiyun 
1000*4882a593Smuzhiyun 	if (combo & IVTV_IRQ_DEC_DATA_REQ) {
1001*4882a593Smuzhiyun 		ivtv_irq_dec_data_req(itv);
1002*4882a593Smuzhiyun 	}
1003*4882a593Smuzhiyun 
1004*4882a593Smuzhiyun 	/* Decoder Vertical Sync - We can't rely on 'combo', so check if vsync enabled */
1005*4882a593Smuzhiyun 	if (~itv->irqmask & IVTV_IRQ_DEC_VSYNC) {
1006*4882a593Smuzhiyun 		ivtv_irq_vsync(itv);
1007*4882a593Smuzhiyun 	}
1008*4882a593Smuzhiyun 
1009*4882a593Smuzhiyun 	if (combo & IVTV_IRQ_ENC_VIM_RST) {
1010*4882a593Smuzhiyun 		IVTV_DEBUG_IRQ("VIM RST\n");
1011*4882a593Smuzhiyun 		/*ivtv_vapi(itv, CX2341X_ENC_REFRESH_INPUT, 0); */
1012*4882a593Smuzhiyun 	}
1013*4882a593Smuzhiyun 
1014*4882a593Smuzhiyun 	if (combo & IVTV_IRQ_DEC_AUD_MODE_CHG) {
1015*4882a593Smuzhiyun 		IVTV_DEBUG_INFO("Stereo mode changed\n");
1016*4882a593Smuzhiyun 	}
1017*4882a593Smuzhiyun 
1018*4882a593Smuzhiyun 	if ((combo & IVTV_IRQ_DMA) && !test_bit(IVTV_F_I_DMA, &itv->i_flags)) {
1019*4882a593Smuzhiyun 		itv->irq_rr_idx++;
1020*4882a593Smuzhiyun 		for (i = 0; i < IVTV_MAX_STREAMS; i++) {
1021*4882a593Smuzhiyun 			int idx = (i + itv->irq_rr_idx) % IVTV_MAX_STREAMS;
1022*4882a593Smuzhiyun 			struct ivtv_stream *s = &itv->streams[idx];
1023*4882a593Smuzhiyun 
1024*4882a593Smuzhiyun 			if (!test_and_clear_bit(IVTV_F_S_DMA_PENDING, &s->s_flags))
1025*4882a593Smuzhiyun 				continue;
1026*4882a593Smuzhiyun 			if (s->type >= IVTV_DEC_STREAM_TYPE_MPG)
1027*4882a593Smuzhiyun 				ivtv_dma_dec_start(s);
1028*4882a593Smuzhiyun 			else
1029*4882a593Smuzhiyun 				ivtv_dma_enc_start(s);
1030*4882a593Smuzhiyun 			break;
1031*4882a593Smuzhiyun 		}
1032*4882a593Smuzhiyun 
1033*4882a593Smuzhiyun 		if (i == IVTV_MAX_STREAMS &&
1034*4882a593Smuzhiyun 		    test_bit(IVTV_F_I_UDMA_PENDING, &itv->i_flags))
1035*4882a593Smuzhiyun 			ivtv_udma_start(itv);
1036*4882a593Smuzhiyun 	}
1037*4882a593Smuzhiyun 
1038*4882a593Smuzhiyun 	if ((combo & IVTV_IRQ_DMA) && !test_bit(IVTV_F_I_PIO, &itv->i_flags)) {
1039*4882a593Smuzhiyun 		itv->irq_rr_idx++;
1040*4882a593Smuzhiyun 		for (i = 0; i < IVTV_MAX_STREAMS; i++) {
1041*4882a593Smuzhiyun 			int idx = (i + itv->irq_rr_idx) % IVTV_MAX_STREAMS;
1042*4882a593Smuzhiyun 			struct ivtv_stream *s = &itv->streams[idx];
1043*4882a593Smuzhiyun 
1044*4882a593Smuzhiyun 			if (!test_and_clear_bit(IVTV_F_S_PIO_PENDING, &s->s_flags))
1045*4882a593Smuzhiyun 				continue;
1046*4882a593Smuzhiyun 			if (s->type == IVTV_DEC_STREAM_TYPE_VBI || s->type < IVTV_DEC_STREAM_TYPE_MPG)
1047*4882a593Smuzhiyun 				ivtv_dma_enc_start(s);
1048*4882a593Smuzhiyun 			break;
1049*4882a593Smuzhiyun 		}
1050*4882a593Smuzhiyun 	}
1051*4882a593Smuzhiyun 
1052*4882a593Smuzhiyun 	if (test_and_clear_bit(IVTV_F_I_HAVE_WORK, &itv->i_flags)) {
1053*4882a593Smuzhiyun 		kthread_queue_work(&itv->irq_worker, &itv->irq_work);
1054*4882a593Smuzhiyun 	}
1055*4882a593Smuzhiyun 
1056*4882a593Smuzhiyun 	spin_unlock(&itv->dma_reg_lock);
1057*4882a593Smuzhiyun 
1058*4882a593Smuzhiyun 	/* If we've just handled a 'forced' vsync, it's safest to say it
1059*4882a593Smuzhiyun 	 * wasn't ours. Another device may have triggered it at just
1060*4882a593Smuzhiyun 	 * the right time.
1061*4882a593Smuzhiyun 	 */
1062*4882a593Smuzhiyun 	return vsync_force ? IRQ_NONE : IRQ_HANDLED;
1063*4882a593Smuzhiyun }
1064*4882a593Smuzhiyun 
ivtv_unfinished_dma(struct timer_list * t)1065*4882a593Smuzhiyun void ivtv_unfinished_dma(struct timer_list *t)
1066*4882a593Smuzhiyun {
1067*4882a593Smuzhiyun 	struct ivtv *itv = from_timer(itv, t, dma_timer);
1068*4882a593Smuzhiyun 
1069*4882a593Smuzhiyun 	if (!test_bit(IVTV_F_I_DMA, &itv->i_flags))
1070*4882a593Smuzhiyun 		return;
1071*4882a593Smuzhiyun 	IVTV_ERR("DMA TIMEOUT %08x %d\n", read_reg(IVTV_REG_DMASTATUS), itv->cur_dma_stream);
1072*4882a593Smuzhiyun 
1073*4882a593Smuzhiyun 	write_reg(read_reg(IVTV_REG_DMASTATUS) & 3, IVTV_REG_DMASTATUS);
1074*4882a593Smuzhiyun 	clear_bit(IVTV_F_I_UDMA, &itv->i_flags);
1075*4882a593Smuzhiyun 	clear_bit(IVTV_F_I_DMA, &itv->i_flags);
1076*4882a593Smuzhiyun 	itv->cur_dma_stream = -1;
1077*4882a593Smuzhiyun 	wake_up(&itv->dma_waitq);
1078*4882a593Smuzhiyun }
1079