xref: /linux/drivers/media/pci/cx88/cx88-vbi.c (revision 4949009eb8d40a441dcddcd96e101e77d31cf1b2)
1 /*
2  */
3 #include <linux/kernel.h>
4 #include <linux/module.h>
5 #include <linux/init.h>
6 
7 #include "cx88.h"
8 
9 static unsigned int vbi_debug;
10 module_param(vbi_debug,int,0644);
11 MODULE_PARM_DESC(vbi_debug,"enable debug messages [vbi]");
12 
13 #define dprintk(level,fmt, arg...)	if (vbi_debug >= level) \
14 	printk(KERN_DEBUG "%s: " fmt, dev->core->name , ## arg)
15 
16 /* ------------------------------------------------------------------ */
17 
18 int cx8800_vbi_fmt (struct file *file, void *priv,
19 					struct v4l2_format *f)
20 {
21 	struct cx8800_dev *dev = video_drvdata(file);
22 
23 	f->fmt.vbi.samples_per_line = VBI_LINE_LENGTH;
24 	f->fmt.vbi.sample_format = V4L2_PIX_FMT_GREY;
25 	f->fmt.vbi.offset = 244;
26 
27 	if (dev->core->tvnorm & V4L2_STD_525_60) {
28 		/* ntsc */
29 		f->fmt.vbi.sampling_rate = 28636363;
30 		f->fmt.vbi.start[0] = 10;
31 		f->fmt.vbi.start[1] = 273;
32 		f->fmt.vbi.count[0] = VBI_LINE_NTSC_COUNT;
33 		f->fmt.vbi.count[1] = VBI_LINE_NTSC_COUNT;
34 
35 	} else if (dev->core->tvnorm & V4L2_STD_625_50) {
36 		/* pal */
37 		f->fmt.vbi.sampling_rate = 35468950;
38 		f->fmt.vbi.start[0] = V4L2_VBI_ITU_625_F1_START + 5;
39 		f->fmt.vbi.start[1] = V4L2_VBI_ITU_625_F2_START + 5;
40 		f->fmt.vbi.count[0] = VBI_LINE_PAL_COUNT;
41 		f->fmt.vbi.count[1] = VBI_LINE_PAL_COUNT;
42 	}
43 	return 0;
44 }
45 
46 static int cx8800_start_vbi_dma(struct cx8800_dev    *dev,
47 			 struct cx88_dmaqueue *q,
48 			 struct cx88_buffer   *buf)
49 {
50 	struct cx88_core *core = dev->core;
51 
52 	/* setup fifo + format */
53 	cx88_sram_channel_setup(dev->core, &cx88_sram_channels[SRAM_CH24],
54 				VBI_LINE_LENGTH, buf->risc.dma);
55 
56 	cx_write(MO_VBOS_CONTROL, ( (1 << 18) |  // comb filter delay fixup
57 				    (1 << 15) |  // enable vbi capture
58 				    (1 << 11) ));
59 
60 	/* reset counter */
61 	cx_write(MO_VBI_GPCNTRL, GP_COUNT_CONTROL_RESET);
62 	q->count = 1;
63 
64 	/* enable irqs */
65 	cx_set(MO_PCI_INTMSK, core->pci_irqmask | PCI_INT_VIDINT);
66 	cx_set(MO_VID_INTMSK, 0x0f0088);
67 
68 	/* enable capture */
69 	cx_set(VID_CAPTURE_CONTROL,0x18);
70 
71 	/* start dma */
72 	cx_set(MO_DEV_CNTRL2, (1<<5));
73 	cx_set(MO_VID_DMACNTRL, 0x88);
74 
75 	return 0;
76 }
77 
78 void cx8800_stop_vbi_dma(struct cx8800_dev *dev)
79 {
80 	struct cx88_core *core = dev->core;
81 
82 	/* stop dma */
83 	cx_clear(MO_VID_DMACNTRL, 0x88);
84 
85 	/* disable capture */
86 	cx_clear(VID_CAPTURE_CONTROL,0x18);
87 
88 	/* disable irqs */
89 	cx_clear(MO_PCI_INTMSK, PCI_INT_VIDINT);
90 	cx_clear(MO_VID_INTMSK, 0x0f0088);
91 }
92 
93 int cx8800_restart_vbi_queue(struct cx8800_dev    *dev,
94 			     struct cx88_dmaqueue *q)
95 {
96 	struct cx88_buffer *buf;
97 
98 	if (list_empty(&q->active))
99 		return 0;
100 
101 	buf = list_entry(q->active.next, struct cx88_buffer, list);
102 	dprintk(2,"restart_queue [%p/%d]: restart dma\n",
103 		buf, buf->vb.v4l2_buf.index);
104 	cx8800_start_vbi_dma(dev, q, buf);
105 	list_for_each_entry(buf, &q->active, list)
106 		buf->count = q->count++;
107 	return 0;
108 }
109 
110 /* ------------------------------------------------------------------ */
111 
112 static int queue_setup(struct vb2_queue *q, const struct v4l2_format *fmt,
113 			   unsigned int *num_buffers, unsigned int *num_planes,
114 			   unsigned int sizes[], void *alloc_ctxs[])
115 {
116 	struct cx8800_dev *dev = q->drv_priv;
117 
118 	*num_planes = 1;
119 	if (dev->core->tvnorm & V4L2_STD_525_60)
120 		sizes[0] = VBI_LINE_NTSC_COUNT * VBI_LINE_LENGTH * 2;
121 	else
122 		sizes[0] = VBI_LINE_PAL_COUNT * VBI_LINE_LENGTH * 2;
123 	alloc_ctxs[0] = dev->alloc_ctx;
124 	return 0;
125 }
126 
127 
128 static int buffer_prepare(struct vb2_buffer *vb)
129 {
130 	struct cx8800_dev *dev = vb->vb2_queue->drv_priv;
131 	struct cx88_buffer *buf = container_of(vb, struct cx88_buffer, vb);
132 	struct sg_table *sgt = vb2_dma_sg_plane_desc(vb, 0);
133 	unsigned int lines;
134 	unsigned int size;
135 
136 	if (dev->core->tvnorm & V4L2_STD_525_60)
137 		lines = VBI_LINE_NTSC_COUNT;
138 	else
139 		lines = VBI_LINE_PAL_COUNT;
140 	size = lines * VBI_LINE_LENGTH * 2;
141 	if (vb2_plane_size(vb, 0) < size)
142 		return -EINVAL;
143 	vb2_set_plane_payload(vb, 0, size);
144 
145 	cx88_risc_buffer(dev->pci, &buf->risc, sgt->sgl,
146 			 0, VBI_LINE_LENGTH * lines,
147 			 VBI_LINE_LENGTH, 0,
148 			 lines);
149 	return 0;
150 }
151 
152 static void buffer_finish(struct vb2_buffer *vb)
153 {
154 	struct cx8800_dev *dev = vb->vb2_queue->drv_priv;
155 	struct cx88_buffer *buf = container_of(vb, struct cx88_buffer, vb);
156 	struct cx88_riscmem *risc = &buf->risc;
157 
158 	if (risc->cpu)
159 		pci_free_consistent(dev->pci, risc->size, risc->cpu, risc->dma);
160 	memset(risc, 0, sizeof(*risc));
161 }
162 
163 static void buffer_queue(struct vb2_buffer *vb)
164 {
165 	struct cx8800_dev *dev = vb->vb2_queue->drv_priv;
166 	struct cx88_buffer    *buf = container_of(vb, struct cx88_buffer, vb);
167 	struct cx88_buffer    *prev;
168 	struct cx88_dmaqueue  *q    = &dev->vbiq;
169 
170 	/* add jump to start */
171 	buf->risc.cpu[1] = cpu_to_le32(buf->risc.dma + 8);
172 	buf->risc.jmp[0] = cpu_to_le32(RISC_JUMP | RISC_CNT_INC);
173 	buf->risc.jmp[1] = cpu_to_le32(buf->risc.dma + 8);
174 
175 	if (list_empty(&q->active)) {
176 		list_add_tail(&buf->list, &q->active);
177 		cx8800_start_vbi_dma(dev, q, buf);
178 		buf->count    = q->count++;
179 		dprintk(2,"[%p/%d] vbi_queue - first active\n",
180 			buf, buf->vb.v4l2_buf.index);
181 
182 	} else {
183 		buf->risc.cpu[0] |= cpu_to_le32(RISC_IRQ1);
184 		prev = list_entry(q->active.prev, struct cx88_buffer, list);
185 		list_add_tail(&buf->list, &q->active);
186 		buf->count    = q->count++;
187 		prev->risc.jmp[1] = cpu_to_le32(buf->risc.dma);
188 		dprintk(2,"[%p/%d] buffer_queue - append to active\n",
189 			buf, buf->vb.v4l2_buf.index);
190 	}
191 }
192 
193 static int start_streaming(struct vb2_queue *q, unsigned int count)
194 {
195 	struct cx8800_dev *dev = q->drv_priv;
196 	struct cx88_dmaqueue *dmaq = &dev->vbiq;
197 	struct cx88_buffer *buf = list_entry(dmaq->active.next,
198 			struct cx88_buffer, list);
199 
200 	cx8800_start_vbi_dma(dev, dmaq, buf);
201 	return 0;
202 }
203 
204 static void stop_streaming(struct vb2_queue *q)
205 {
206 	struct cx8800_dev *dev = q->drv_priv;
207 	struct cx88_core *core = dev->core;
208 	struct cx88_dmaqueue *dmaq = &dev->vbiq;
209 	unsigned long flags;
210 
211 	cx_clear(MO_VID_DMACNTRL, 0x11);
212 	cx_clear(VID_CAPTURE_CONTROL, 0x06);
213 	cx8800_stop_vbi_dma(dev);
214 	spin_lock_irqsave(&dev->slock, flags);
215 	while (!list_empty(&dmaq->active)) {
216 		struct cx88_buffer *buf = list_entry(dmaq->active.next,
217 			struct cx88_buffer, list);
218 
219 		list_del(&buf->list);
220 		vb2_buffer_done(&buf->vb, VB2_BUF_STATE_ERROR);
221 	}
222 	spin_unlock_irqrestore(&dev->slock, flags);
223 }
224 
225 const struct vb2_ops cx8800_vbi_qops = {
226 	.queue_setup    = queue_setup,
227 	.buf_prepare  = buffer_prepare,
228 	.buf_finish = buffer_finish,
229 	.buf_queue    = buffer_queue,
230 	.wait_prepare = vb2_ops_wait_prepare,
231 	.wait_finish = vb2_ops_wait_finish,
232 	.start_streaming = start_streaming,
233 	.stop_streaming = stop_streaming,
234 };
235