1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22#include <linux/init.h>
23#include <linux/list.h>
24#include <linux/module.h>
25#include <linux/kernel.h>
26#include <linux/bitmap.h>
27#include <linux/usb.h>
28#include <linux/i2c.h>
29#include <linux/mm.h>
30#include <linux/mutex.h>
31
32#include <media/v4l2-common.h>
33#include <media/v4l2-ioctl.h>
34#include <media/v4l2-chip-ident.h>
35#include <media/msp3400.h>
36#include <media/tuner.h>
37
38#include "cx231xx.h"
39#include "cx231xx-vbi.h"
40
41static inline void print_err_status(struct cx231xx *dev, int packet, int status)
42{
43 char *errmsg = "Unknown";
44
45 switch (status) {
46 case -ENOENT:
47 errmsg = "unlinked synchronuously";
48 break;
49 case -ECONNRESET:
50 errmsg = "unlinked asynchronuously";
51 break;
52 case -ENOSR:
53 errmsg = "Buffer error (overrun)";
54 break;
55 case -EPIPE:
56 errmsg = "Stalled (device not responding)";
57 break;
58 case -EOVERFLOW:
59 errmsg = "Babble (bad cable?)";
60 break;
61 case -EPROTO:
62 errmsg = "Bit-stuff error (bad cable?)";
63 break;
64 case -EILSEQ:
65 errmsg = "CRC/Timeout (could be anything)";
66 break;
67 case -ETIME:
68 errmsg = "Device does not respond";
69 break;
70 }
71 if (packet < 0) {
72 cx231xx_err(DRIVER_NAME "URB status %d [%s].\n", status,
73 errmsg);
74 } else {
75 cx231xx_err(DRIVER_NAME "URB packet %d, status %d [%s].\n",
76 packet, status, errmsg);
77 }
78}
79
80
81
82
83static inline int cx231xx_isoc_vbi_copy(struct cx231xx *dev, struct urb *urb)
84{
85 struct cx231xx_buffer *buf;
86 struct cx231xx_dmaqueue *dma_q = urb->context;
87 int rc = 1;
88 unsigned char *p_buffer;
89 u32 bytes_parsed = 0, buffer_size = 0;
90 u8 sav_eav = 0;
91
92 if (!dev)
93 return 0;
94
95 if ((dev->state & DEV_DISCONNECTED) || (dev->state & DEV_MISCONFIGURED))
96 return 0;
97
98 if (urb->status < 0) {
99 print_err_status(dev, -1, urb->status);
100 if (urb->status == -ENOENT)
101 return 0;
102 }
103
104 buf = dev->vbi_mode.isoc_ctl.buf;
105
106
107 p_buffer = urb->transfer_buffer;
108 buffer_size = urb->actual_length;
109
110 if (buffer_size > 0) {
111 bytes_parsed = 0;
112
113 if (dma_q->is_partial_line) {
114
115
116 sav_eav = dma_q->last_sav;
117 } else {
118
119
120
121 sav_eav = cx231xx_find_boundary_SAV_EAV(p_buffer,
122 dma_q->partial_buf,
123 &bytes_parsed);
124 }
125
126 sav_eav &= 0xF0;
127
128
129 if (sav_eav) {
130 bytes_parsed += cx231xx_get_vbi_line(dev, dma_q,
131 sav_eav,
132 p_buffer + bytes_parsed,
133 buffer_size - bytes_parsed);
134 }
135
136
137 dma_q->is_partial_line = 0;
138
139 while (bytes_parsed < buffer_size) {
140 u32 bytes_used = 0;
141
142 sav_eav = cx231xx_find_next_SAV_EAV(
143 p_buffer + bytes_parsed,
144 buffer_size - bytes_parsed,
145 &bytes_used);
146
147 bytes_parsed += bytes_used;
148
149 sav_eav &= 0xF0;
150 if (sav_eav && (bytes_parsed < buffer_size)) {
151 bytes_parsed += cx231xx_get_vbi_line(dev,
152 dma_q, sav_eav,
153 p_buffer+bytes_parsed,
154 buffer_size-bytes_parsed);
155 }
156 }
157
158
159
160 memcpy(dma_q->partial_buf, p_buffer + buffer_size - 4, 4);
161 bytes_parsed = 0;
162 }
163
164 return rc;
165}
166
167
168
169
170
171static int
172vbi_buffer_setup(struct videobuf_queue *vq, unsigned int *count,
173 unsigned int *size)
174{
175 struct cx231xx_fh *fh = vq->priv_data;
176 struct cx231xx *dev = fh->dev;
177 u32 height = 0;
178
179 height = ((dev->norm & V4L2_STD_625_50) ?
180 PAL_VBI_LINES : NTSC_VBI_LINES);
181
182 *size = (dev->width * height * 2);
183 if (0 == *count)
184 *count = CX231XX_DEF_VBI_BUF;
185
186 if (*count < CX231XX_MIN_BUF)
187 *count = CX231XX_MIN_BUF;
188
189 return 0;
190}
191
192
193static void free_buffer(struct videobuf_queue *vq, struct cx231xx_buffer *buf)
194{
195 struct cx231xx_fh *fh = vq->priv_data;
196 struct cx231xx *dev = fh->dev;
197 unsigned long flags = 0;
198 if (in_interrupt())
199 BUG();
200
201
202
203
204
205
206
207
208
209
210 spin_lock_irqsave(&dev->vbi_mode.slock, flags);
211 if (dev->vbi_mode.isoc_ctl.buf == buf)
212 dev->vbi_mode.isoc_ctl.buf = NULL;
213 spin_unlock_irqrestore(&dev->vbi_mode.slock, flags);
214
215 videobuf_vmalloc_free(&buf->vb);
216 buf->vb.state = VIDEOBUF_NEEDS_INIT;
217}
218
219static int
220vbi_buffer_prepare(struct videobuf_queue *vq, struct videobuf_buffer *vb,
221 enum v4l2_field field)
222{
223 struct cx231xx_fh *fh = vq->priv_data;
224 struct cx231xx_buffer *buf =
225 container_of(vb, struct cx231xx_buffer, vb);
226 struct cx231xx *dev = fh->dev;
227 int rc = 0, urb_init = 0;
228 u32 height = 0;
229
230 height = ((dev->norm & V4L2_STD_625_50) ?
231 PAL_VBI_LINES : NTSC_VBI_LINES);
232 buf->vb.size = ((dev->width << 1) * height);
233
234 if (0 != buf->vb.baddr && buf->vb.bsize < buf->vb.size)
235 return -EINVAL;
236
237 buf->vb.width = dev->width;
238 buf->vb.height = height;
239 buf->vb.field = field;
240 buf->vb.field = V4L2_FIELD_SEQ_TB;
241
242 if (VIDEOBUF_NEEDS_INIT == buf->vb.state) {
243 rc = videobuf_iolock(vq, &buf->vb, NULL);
244 if (rc < 0)
245 goto fail;
246 }
247
248 if (!dev->vbi_mode.isoc_ctl.num_bufs)
249 urb_init = 1;
250
251 if (urb_init) {
252 rc = cx231xx_init_vbi_isoc(dev, CX231XX_NUM_VBI_PACKETS,
253 CX231XX_NUM_VBI_BUFS,
254 dev->vbi_mode.alt_max_pkt_size[0],
255 cx231xx_isoc_vbi_copy);
256 if (rc < 0)
257 goto fail;
258 }
259
260 buf->vb.state = VIDEOBUF_PREPARED;
261 return 0;
262
263fail:
264 free_buffer(vq, buf);
265 return rc;
266}
267
268static void
269vbi_buffer_queue(struct videobuf_queue *vq, struct videobuf_buffer *vb)
270{
271 struct cx231xx_buffer *buf =
272 container_of(vb, struct cx231xx_buffer, vb);
273 struct cx231xx_fh *fh = vq->priv_data;
274 struct cx231xx *dev = fh->dev;
275 struct cx231xx_dmaqueue *vidq = &dev->vbi_mode.vidq;
276
277 buf->vb.state = VIDEOBUF_QUEUED;
278 list_add_tail(&buf->vb.queue, &vidq->active);
279
280}
281
282static void vbi_buffer_release(struct videobuf_queue *vq,
283 struct videobuf_buffer *vb)
284{
285 struct cx231xx_buffer *buf =
286 container_of(vb, struct cx231xx_buffer, vb);
287
288
289 free_buffer(vq, buf);
290}
291
292struct videobuf_queue_ops cx231xx_vbi_qops = {
293 .buf_setup = vbi_buffer_setup,
294 .buf_prepare = vbi_buffer_prepare,
295 .buf_queue = vbi_buffer_queue,
296 .buf_release = vbi_buffer_release,
297};
298
299
300
301
302
303
304
305
306static void cx231xx_irq_vbi_callback(struct urb *urb)
307{
308 struct cx231xx_dmaqueue *dma_q = urb->context;
309 struct cx231xx_video_mode *vmode =
310 container_of(dma_q, struct cx231xx_video_mode, vidq);
311 struct cx231xx *dev = container_of(vmode, struct cx231xx, vbi_mode);
312 int rc;
313
314 switch (urb->status) {
315 case 0:
316 case -ETIMEDOUT:
317 break;
318 case -ECONNRESET:
319 case -ENOENT:
320 case -ESHUTDOWN:
321 return;
322 default:
323 cx231xx_err(DRIVER_NAME "urb completition error %d.\n",
324 urb->status);
325 break;
326 }
327
328
329 spin_lock(&dev->vbi_mode.slock);
330 rc = dev->vbi_mode.isoc_ctl.isoc_copy(dev, urb);
331 spin_unlock(&dev->vbi_mode.slock);
332
333
334 urb->status = 0;
335
336 urb->status = usb_submit_urb(urb, GFP_ATOMIC);
337 if (urb->status) {
338 cx231xx_err(DRIVER_NAME "urb resubmit failed (error=%i)\n",
339 urb->status);
340 }
341}
342
343
344
345
346void cx231xx_uninit_vbi_isoc(struct cx231xx *dev)
347{
348 struct urb *urb;
349 int i;
350
351 cx231xx_info(DRIVER_NAME "cx231xx: called cx231xx_uninit_vbi_isoc\n");
352
353 dev->vbi_mode.isoc_ctl.nfields = -1;
354 for (i = 0; i < dev->vbi_mode.isoc_ctl.num_bufs; i++) {
355 urb = dev->vbi_mode.isoc_ctl.urb[i];
356 if (urb) {
357 if (!irqs_disabled())
358 usb_kill_urb(urb);
359 else
360 usb_unlink_urb(urb);
361
362 if (dev->vbi_mode.isoc_ctl.transfer_buffer[i]) {
363
364 kfree(dev->vbi_mode.isoc_ctl.
365 transfer_buffer[i]);
366 dev->vbi_mode.isoc_ctl.transfer_buffer[i] =
367 NULL;
368 }
369 usb_free_urb(urb);
370 dev->vbi_mode.isoc_ctl.urb[i] = NULL;
371 }
372 dev->vbi_mode.isoc_ctl.transfer_buffer[i] = NULL;
373 }
374
375 kfree(dev->vbi_mode.isoc_ctl.urb);
376 kfree(dev->vbi_mode.isoc_ctl.transfer_buffer);
377
378 dev->vbi_mode.isoc_ctl.urb = NULL;
379 dev->vbi_mode.isoc_ctl.transfer_buffer = NULL;
380 dev->vbi_mode.isoc_ctl.num_bufs = 0;
381
382 cx231xx_capture_start(dev, 0, Vbi);
383}
384EXPORT_SYMBOL_GPL(cx231xx_uninit_vbi_isoc);
385
386
387
388
389int cx231xx_init_vbi_isoc(struct cx231xx *dev, int max_packets,
390 int num_bufs, int max_pkt_size,
391 int (*isoc_copy) (struct cx231xx *dev,
392 struct urb *urb))
393{
394 struct cx231xx_dmaqueue *dma_q = &dev->vbi_mode.vidq;
395 int i;
396 int sb_size, pipe;
397 struct urb *urb;
398 int rc;
399
400 cx231xx_info(DRIVER_NAME "cx231xx: called cx231xx_prepare_isoc\n");
401
402
403 cx231xx_uninit_vbi_isoc(dev);
404
405
406 usb_clear_halt(dev->udev,
407 usb_rcvbulkpipe(dev->udev,
408 dev->vbi_mode.end_point_addr));
409
410 dev->vbi_mode.isoc_ctl.isoc_copy = isoc_copy;
411 dev->vbi_mode.isoc_ctl.num_bufs = num_bufs;
412 dma_q->pos = 0;
413 dma_q->is_partial_line = 0;
414 dma_q->last_sav = 0;
415 dma_q->current_field = -1;
416 dma_q->bytes_left_in_line = dev->width << 1;
417 dma_q->lines_per_field = ((dev->norm & V4L2_STD_625_50) ?
418 PAL_VBI_LINES : NTSC_VBI_LINES);
419 dma_q->lines_completed = 0;
420 for (i = 0; i < 8; i++)
421 dma_q->partial_buf[i] = 0;
422
423 dev->vbi_mode.isoc_ctl.urb = kzalloc(sizeof(void *) * num_bufs,
424 GFP_KERNEL);
425 if (!dev->vbi_mode.isoc_ctl.urb) {
426 cx231xx_errdev("cannot alloc memory for usb buffers\n");
427 return -ENOMEM;
428 }
429
430 dev->vbi_mode.isoc_ctl.transfer_buffer =
431 kzalloc(sizeof(void *) * num_bufs, GFP_KERNEL);
432 if (!dev->vbi_mode.isoc_ctl.transfer_buffer) {
433 cx231xx_errdev("cannot allocate memory for usbtransfer\n");
434 kfree(dev->vbi_mode.isoc_ctl.urb);
435 return -ENOMEM;
436 }
437
438 dev->vbi_mode.isoc_ctl.max_pkt_size = max_pkt_size;
439 dev->vbi_mode.isoc_ctl.buf = NULL;
440
441 sb_size = max_packets * dev->vbi_mode.isoc_ctl.max_pkt_size;
442
443
444 for (i = 0; i < dev->vbi_mode.isoc_ctl.num_bufs; i++) {
445
446 urb = usb_alloc_urb(0, GFP_KERNEL);
447 if (!urb) {
448 cx231xx_err(DRIVER_NAME
449 ": cannot alloc isoc_ctl.urb %i\n", i);
450 cx231xx_uninit_vbi_isoc(dev);
451 return -ENOMEM;
452 }
453 dev->vbi_mode.isoc_ctl.urb[i] = urb;
454 urb->transfer_flags = 0;
455
456 dev->vbi_mode.isoc_ctl.transfer_buffer[i] =
457 kzalloc(sb_size, GFP_KERNEL);
458 if (!dev->vbi_mode.isoc_ctl.transfer_buffer[i]) {
459 cx231xx_err(DRIVER_NAME
460 ": unable to allocate %i bytes for transfer"
461 " buffer %i%s\n", sb_size, i,
462 in_interrupt() ? " while in int" : "");
463 cx231xx_uninit_vbi_isoc(dev);
464 return -ENOMEM;
465 }
466
467 pipe = usb_rcvbulkpipe(dev->udev, dev->vbi_mode.end_point_addr);
468 usb_fill_bulk_urb(urb, dev->udev, pipe,
469 dev->vbi_mode.isoc_ctl.transfer_buffer[i],
470 sb_size, cx231xx_irq_vbi_callback, dma_q);
471 }
472
473 init_waitqueue_head(&dma_q->wq);
474
475
476 for (i = 0; i < dev->vbi_mode.isoc_ctl.num_bufs; i++) {
477 rc = usb_submit_urb(dev->vbi_mode.isoc_ctl.urb[i], GFP_ATOMIC);
478 if (rc) {
479 cx231xx_err(DRIVER_NAME
480 ": submit of urb %i failed (error=%i)\n", i,
481 rc);
482 cx231xx_uninit_vbi_isoc(dev);
483 return rc;
484 }
485 }
486
487 cx231xx_capture_start(dev, 1, Vbi);
488
489 return 0;
490}
491EXPORT_SYMBOL_GPL(cx231xx_init_vbi_isoc);
492
493u32 cx231xx_get_vbi_line(struct cx231xx *dev, struct cx231xx_dmaqueue *dma_q,
494 u8 sav_eav, u8 *p_buffer, u32 buffer_size)
495{
496 u32 bytes_copied = 0;
497 int current_field = -1;
498
499 switch (sav_eav) {
500
501 case SAV_VBI_FIELD1:
502 current_field = 1;
503 break;
504
505 case SAV_VBI_FIELD2:
506 current_field = 2;
507 break;
508 default:
509 break;
510 }
511
512 if (current_field < 0)
513 return bytes_copied;
514
515 dma_q->last_sav = sav_eav;
516
517 bytes_copied =
518 cx231xx_copy_vbi_line(dev, dma_q, p_buffer, buffer_size,
519 current_field);
520
521 return bytes_copied;
522}
523
524
525
526
527static inline void vbi_buffer_filled(struct cx231xx *dev,
528 struct cx231xx_dmaqueue *dma_q,
529 struct cx231xx_buffer *buf)
530{
531
532
533
534 buf->vb.state = VIDEOBUF_DONE;
535 buf->vb.field_count++;
536 do_gettimeofday(&buf->vb.ts);
537
538 dev->vbi_mode.isoc_ctl.buf = NULL;
539
540 list_del(&buf->vb.queue);
541 wake_up(&buf->vb.done);
542}
543
544u32 cx231xx_copy_vbi_line(struct cx231xx *dev, struct cx231xx_dmaqueue *dma_q,
545 u8 *p_line, u32 length, int field_number)
546{
547 u32 bytes_to_copy;
548 struct cx231xx_buffer *buf;
549 u32 _line_size = dev->width * 2;
550
551 if (dma_q->current_field != field_number)
552 cx231xx_reset_vbi_buffer(dev, dma_q);
553
554
555 buf = dev->vbi_mode.isoc_ctl.buf;
556
557
558 dma_q->current_field = field_number;
559
560 bytes_to_copy = dma_q->bytes_left_in_line;
561 if (bytes_to_copy > length)
562 bytes_to_copy = length;
563
564 if (dma_q->lines_completed >= dma_q->lines_per_field) {
565 dma_q->bytes_left_in_line -= bytes_to_copy;
566 dma_q->is_partial_line =
567 (dma_q->bytes_left_in_line == 0) ? 0 : 1;
568 return 0;
569 }
570
571 dma_q->is_partial_line = 1;
572
573
574
575 if (!buf) {
576 dma_q->bytes_left_in_line -= bytes_to_copy;
577 dma_q->is_partial_line =
578 (dma_q->bytes_left_in_line == 0) ? 0 : 1;
579 return bytes_to_copy;
580 }
581
582
583 cx231xx_do_vbi_copy(dev, dma_q, p_line, bytes_to_copy);
584
585 dma_q->pos += bytes_to_copy;
586 dma_q->bytes_left_in_line -= bytes_to_copy;
587
588 if (dma_q->bytes_left_in_line == 0) {
589
590 dma_q->bytes_left_in_line = _line_size;
591 dma_q->lines_completed++;
592 dma_q->is_partial_line = 0;
593
594 if (cx231xx_is_vbi_buffer_done(dev, dma_q) && buf) {
595
596 vbi_buffer_filled(dev, dma_q, buf);
597
598 dma_q->pos = 0;
599 buf = NULL;
600 dma_q->lines_completed = 0;
601 }
602 }
603
604 return bytes_to_copy;
605}
606
607
608
609
610static inline void get_next_vbi_buf(struct cx231xx_dmaqueue *dma_q,
611 struct cx231xx_buffer **buf)
612{
613 struct cx231xx_video_mode *vmode =
614 container_of(dma_q, struct cx231xx_video_mode, vidq);
615 struct cx231xx *dev = container_of(vmode, struct cx231xx, vbi_mode);
616 char *outp;
617
618 if (list_empty(&dma_q->active)) {
619 cx231xx_err(DRIVER_NAME ": No active queue to serve\n");
620 dev->vbi_mode.isoc_ctl.buf = NULL;
621 *buf = NULL;
622 return;
623 }
624
625
626 *buf = list_entry(dma_q->active.next, struct cx231xx_buffer, vb.queue);
627
628
629 outp = videobuf_to_vmalloc(&(*buf)->vb);
630 memset(outp, 0, (*buf)->vb.size);
631
632 dev->vbi_mode.isoc_ctl.buf = *buf;
633
634 return;
635}
636
637void cx231xx_reset_vbi_buffer(struct cx231xx *dev,
638 struct cx231xx_dmaqueue *dma_q)
639{
640 struct cx231xx_buffer *buf;
641
642 buf = dev->vbi_mode.isoc_ctl.buf;
643
644 if (buf == NULL) {
645
646 get_next_vbi_buf(dma_q, &buf);
647
648 dma_q->pos = 0;
649 dma_q->current_field = -1;
650 }
651
652 dma_q->bytes_left_in_line = dev->width << 1;
653 dma_q->lines_completed = 0;
654}
655
656int cx231xx_do_vbi_copy(struct cx231xx *dev, struct cx231xx_dmaqueue *dma_q,
657 u8 *p_buffer, u32 bytes_to_copy)
658{
659 u8 *p_out_buffer = NULL;
660 u32 current_line_bytes_copied = 0;
661 struct cx231xx_buffer *buf;
662 u32 _line_size = dev->width << 1;
663 void *startwrite;
664 int offset, lencopy;
665
666 buf = dev->vbi_mode.isoc_ctl.buf;
667
668 if (buf == NULL)
669 return -EINVAL;
670
671 p_out_buffer = videobuf_to_vmalloc(&buf->vb);
672
673 if (dma_q->bytes_left_in_line != _line_size) {
674 current_line_bytes_copied =
675 _line_size - dma_q->bytes_left_in_line;
676 }
677
678 offset = (dma_q->lines_completed * _line_size) +
679 current_line_bytes_copied;
680
681
682 startwrite = p_out_buffer + offset;
683
684 lencopy = dma_q->bytes_left_in_line > bytes_to_copy ?
685 bytes_to_copy : dma_q->bytes_left_in_line;
686
687 memcpy(startwrite, p_buffer, lencopy);
688
689 return 0;
690}
691
692u8 cx231xx_is_vbi_buffer_done(struct cx231xx *dev,
693 struct cx231xx_dmaqueue *dma_q)
694{
695 u32 height = 0;
696
697 height = ((dev->norm & V4L2_STD_625_50) ?
698 PAL_VBI_LINES : NTSC_VBI_LINES);
699 return (dma_q->lines_completed == height) ? 1 : 0;
700}
701