2 * DirectShow capture services (QCAP.DLL)
4 * Copyright 2005 Maarten Lankhorst
6 * This file contains the part of the vfw capture interface that
7 * does the actual Video4Linux(1/2) stuff required for capturing
8 * and setting/getting media format..
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2.1 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, write to the Free Software
22 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 #define NONAMELESSSTRUCT
28 #define NONAMELESSUNION
40 #include "wine/debug.h"
43 #include "qcap_main.h"
49 #ifdef HAVE_SYS_IOCTL_H
50 #include <sys/ioctl.h>
52 #ifdef HAVE_SYS_MMAN_H
55 #ifdef HAVE_SYS_ERRNO_H
56 #include <sys/errno.h>
58 #ifdef HAVE_SYS_TIME_H
61 #ifdef HAVE_ASM_TYPES_H
62 #include <asm/types.h>
64 #ifdef HAVE_LINUX_VIDEODEV_H
65 #include <linux/videodev.h>
71 WINE_DEFAULT_DEBUG_CHANNEL(qcap_v4l);
73 #ifdef HAVE_LINUX_VIDEODEV_H
75 typedef void (* Renderer)(Capture *, LPBYTE bufferin, LPBYTE stream);
79 UINT width, height, bitDepth, fps, outputwidth, outputheight;
82 CRITICAL_SECTION CritSect;
86 int iscommitted, stopped;
87 struct video_picture pict;
88 int dbrightness, dhue, dcolour, dcontrast;
91 struct video_mmap *grab_buf;
92 struct video_mbuf gb_buffers;
113 static void renderer_RGB(Capture *capBox, LPBYTE bufferin, LPBYTE stream);
114 static void renderer_YUV(Capture *capBox, LPBYTE bufferin, LPBYTE stream);
116 static const struct renderlist renderlist_V4l[] = {
117 { 0, "NULL renderer", NULL },
118 { 8, "Gray scales", NULL }, /* 1, Don't support */
119 { 0, "High 240 cube (BT848)", NULL }, /* 2, Don't support */
120 { 16, "16 bit RGB (565)", NULL }, /* 3, Don't support */
121 { 24, "24 bit RGB values", renderer_RGB }, /* 4, Supported, */
122 { 32, "32 bit RGB values", renderer_RGB }, /* 5, Supported */
123 { 16, "15 bit RGB (555)", NULL }, /* 6, Don't support */
124 { 16, "YUV 422 (Not P)", renderer_YUV }, /* 7, Supported */
125 { 16, "YUYV (Not P)", renderer_YUV }, /* 8, Supported */
126 { 16, "UYVY (Not P)", renderer_YUV }, /* 9, Supported */
127 { 16, "YUV 420 (Not P)", NULL }, /* 10, Not supported, if I had to guess it's YYUYYV */
128 { 12, "YUV 411 (Not P)", renderer_YUV }, /* 11, Supported */
129 { 0, "Raw capturing (BT848)", NULL }, /* 12, Don't support */
130 { 16, "YUV 422 (Planar)", renderer_YUV }, /* 13, Supported */
131 { 12, "YUV 411 (Planar)", renderer_YUV }, /* 14, Supported */
132 { 12, "YUV 420 (Planar)", renderer_YUV }, /* 15, Supported */
133 { 10, "YUV 410 (Planar)", renderer_YUV }, /* 16, Supported */
134 /* FIXME: add YUV420 support */
138 const int fallback_V4l[] = { 4, 5, 7, 8, 9, 13, 15, 14, 16, 11, -1 };
139 /* Fallback: First try raw formats (Should try yuv first perhaps?), then yuv */
141 /* static const Capture defbox; */
143 static int xioctl(int fd, int request, void * arg)
148 r = ioctl (fd, request, arg);
149 } while (-1 == r && EINTR == errno);
154 /* Prepare the capture buffers */
155 static HRESULT V4l_Prepare(Capture *capBox)
157 TRACE("%p: Preparing for %dx%d resolution\n", capBox, capBox->width, capBox->height);
161 if (xioctl(capBox->fd, VIDIOCGMBUF, &capBox->gb_buffers) != -1 &&
162 capBox->gb_buffers.frames)
164 capBox->buffers = capBox->gb_buffers.frames;
165 if (capBox->gb_buffers.frames > 1)
167 TRACE("%p: Using %d/%d buffers\n", capBox,
168 capBox->buffers, capBox->gb_buffers.frames);
170 capBox->pmap = mmap( 0, capBox->gb_buffers.size, PROT_READ|PROT_WRITE,
171 MAP_SHARED, capBox->fd, 0 );
172 if (capBox->pmap != MAP_FAILED)
176 capBox->grab_buf = CoTaskMemAlloc(sizeof(struct video_mmap) * capBox->buffers);
177 if (!capBox->grab_buf)
179 munmap(capBox->pmap, capBox->gb_buffers.size);
180 return E_OUTOFMEMORY;
183 /* Setup mmap capture buffers. */
184 for (i = 0; i < capBox->buffers; i++)
186 capBox->grab_buf[i].format = capBox->pict.palette;
187 capBox->grab_buf[i].frame = i;
188 capBox->grab_buf[i].width = capBox->width;
189 capBox->grab_buf[i].height = capBox->height;
197 capBox->imagesize = renderlist_V4l[capBox->pict.palette].depth *
198 capBox->height * capBox->width / 8;
199 capBox->grab_data = CoTaskMemAlloc(capBox->imagesize);
200 if (!capBox->grab_data)
201 return E_OUTOFMEMORY;
203 TRACE("Using mmap: %d\n", capBox->mmap);
207 static void V4l_Unprepare(Capture *capBox)
211 for (capBox->curframe = 0; capBox->curframe < capBox->buffers; capBox->curframe++)
212 xioctl(capBox->fd, VIDIOCSYNC, &capBox->grab_buf[capBox->curframe]);
213 munmap(capBox->pmap, capBox->gb_buffers.size);
214 CoTaskMemFree(capBox->grab_buf);
217 CoTaskMemFree(capBox->grab_data);
220 HRESULT qcap_driver_destroy(Capture *capBox)
222 TRACE("%p\n", capBox);
224 if( capBox->fd != -1 )
226 DeleteCriticalSection(&capBox->CritSect);
227 CoTaskMemFree(capBox);
231 HRESULT qcap_driver_set_format(Capture *capBox, AM_MEDIA_TYPE * mT)
233 int newheight, newwidth;
234 struct video_window window;
235 VIDEOINFOHEADER *format;
237 TRACE("%p\n", capBox);
239 format = (VIDEOINFOHEADER *) mT->pbFormat;
240 if (format->bmiHeader.biBitCount != 24 ||
241 format->bmiHeader.biCompression != BI_RGB)
243 FIXME("unsupported media type %d %ld\n", format->bmiHeader.biBitCount,
244 format->bmiHeader.biCompression );
245 return VFW_E_INVALIDMEDIATYPE;
248 newwidth = format->bmiHeader.biWidth;
249 newheight = format->bmiHeader.biHeight;
251 TRACE("%p -> (%p) - %d %d\n", capBox, mT, newwidth, newheight);
253 if (capBox->height == newheight && capBox->width == newwidth)
256 if(-1 == xioctl(capBox->fd, VIDIOCGWIN, &window))
258 ERR("ioctl(VIDIOCGWIN) failed (%d)\n", errno);
261 window.width = newwidth;
262 window.height = newheight;
263 if (xioctl(capBox->fd, VIDIOCSWIN, &window) == -1)
265 TRACE("using software resize: %dx%d -> %dx%d\n",
266 window.width, window.height, capBox->width, capBox->height);
267 capBox->swresize = TRUE;
271 capBox->height = window.height;
272 capBox->width = window.width;
273 capBox->swresize = FALSE;
275 capBox->outputwidth = window.width;
276 capBox->outputheight = window.height;
280 HRESULT qcap_driver_get_format(Capture *capBox, AM_MEDIA_TYPE ** mT)
284 mT[0] = CoTaskMemAlloc(sizeof(AM_MEDIA_TYPE));
286 return E_OUTOFMEMORY;
287 vi = CoTaskMemAlloc(sizeof(VIDEOINFOHEADER));
288 mT[0]->cbFormat = sizeof(VIDEOINFOHEADER);
291 CoTaskMemFree(mT[0]);
292 return E_OUTOFMEMORY;
294 memcpy(&mT[0]->majortype, &MEDIATYPE_Video, sizeof(GUID));
295 memcpy(&mT[0]->subtype, &MEDIASUBTYPE_RGB24, sizeof(GUID));
296 memcpy(&mT[0]->formattype, &FORMAT_VideoInfo, sizeof(GUID));
297 mT[0]->bFixedSizeSamples = TRUE;
298 mT[0]->bTemporalCompression = FALSE;
300 mT[0]->lSampleSize = capBox->outputwidth * capBox->outputheight * capBox->bitDepth / 8;
301 TRACE("Output format: %dx%d - %d bits = %lu KB\n", capBox->outputwidth,
302 capBox->outputheight, capBox->bitDepth, mT[0]->lSampleSize/1024);
303 vi->rcSource.left = 0; vi->rcSource.top = 0;
304 vi->rcTarget.left = 0; vi->rcTarget.top = 0;
305 vi->rcSource.right = capBox->width; vi->rcSource.bottom = capBox->height;
306 vi->rcTarget.right = capBox->outputwidth; vi->rcTarget.bottom = capBox->outputheight;
307 vi->dwBitRate = capBox->fps * mT[0]->lSampleSize;
308 vi->dwBitErrorRate = 0;
309 vi->AvgTimePerFrame = (LONGLONG)10000000.0 / (LONGLONG)capBox->fps;
310 vi->bmiHeader.biSize = 40;
311 vi->bmiHeader.biWidth = capBox->outputwidth;
312 vi->bmiHeader.biHeight = capBox->outputheight;
313 vi->bmiHeader.biPlanes = 1;
314 vi->bmiHeader.biBitCount = 24;
315 vi->bmiHeader.biCompression = BI_RGB;
316 vi->bmiHeader.biSizeImage = mT[0]->lSampleSize;
317 vi->bmiHeader.biClrUsed = vi->bmiHeader.biClrImportant = 0;
318 vi->bmiHeader.biXPelsPerMeter = 100;
319 vi->bmiHeader.biYPelsPerMeter = 100;
320 mT[0]->pbFormat = (void *)vi;
321 dump_AM_MEDIA_TYPE(mT[0]);
325 HRESULT qcap_driver_get_prop_range( Capture *capBox, long Property, long *pMin,
326 long *pMax, long *pSteppingDelta, long *pDefault, long *pCapsFlags )
328 TRACE("%p -> %ld %p %p %p %p %p\n", capBox, Property,
329 pMin, pMax, pSteppingDelta, pDefault, pCapsFlags);
333 case VideoProcAmp_Brightness:
334 *pDefault = capBox->dbrightness;
336 case VideoProcAmp_Contrast:
337 *pDefault = capBox->dcontrast;
339 case VideoProcAmp_Hue:
340 *pDefault = capBox->dhue;
342 case VideoProcAmp_Saturation:
343 *pDefault = capBox->dcolour;
346 FIXME("Not implemented %ld\n", Property);
351 *pSteppingDelta = 65536/256;
352 *pCapsFlags = VideoProcAmp_Flags_Manual;
356 HRESULT qcap_driver_get_prop( Capture *capBox, long Property, long *lValue, long *Flags )
358 TRACE("%p -> %ld %p %p\n", capBox, Property, lValue, Flags);
362 case VideoProcAmp_Brightness:
363 *lValue = capBox->pict.brightness;
365 case VideoProcAmp_Contrast:
366 *lValue = capBox->pict.contrast;
368 case VideoProcAmp_Hue:
369 *lValue = capBox->pict.hue;
371 case VideoProcAmp_Saturation:
372 *lValue = capBox->pict.colour;
375 FIXME("Not implemented %ld\n", Property);
378 *Flags = VideoProcAmp_Flags_Manual;
382 HRESULT qcap_driver_set_prop(Capture *capBox, long Property, long lValue, long Flags)
384 TRACE("%p -> %ld %ld %ld\n", capBox, Property, lValue, Flags);
388 case VideoProcAmp_Brightness:
389 capBox->pict.brightness = lValue;
391 case VideoProcAmp_Contrast:
392 capBox->pict.contrast = lValue;
394 case VideoProcAmp_Hue:
395 capBox->pict.hue = lValue;
397 case VideoProcAmp_Saturation:
398 capBox->pict.colour = lValue;
401 FIXME("Not implemented %ld\n", Property);
405 if (xioctl(capBox->fd, VIDIOCSPICT, &capBox->pict) == -1)
407 ERR("ioctl(VIDIOCSPICT) failed (%d)\n",errno);
413 static void renderer_RGB(Capture *capBox, LPBYTE bufferin, LPBYTE stream)
415 int depth = renderlist_V4l[capBox->pict.palette].depth;
416 int size = capBox->height * capBox->width * depth / 8;
422 memcpy(bufferin, stream, size);
427 while (pointer + offset <= size)
429 bufferin[pointer] = stream[pointer + offset];
431 bufferin[pointer] = stream[pointer + offset];
433 bufferin[pointer] = stream[pointer + offset];
439 ERR("Unknown bit depth %d\n", depth);
444 static void renderer_YUV(Capture *capBox, LPBYTE bufferin, LPBYTE stream)
446 enum YUV_Format format;
448 switch (capBox->pict.palette)
450 case 7: /* YUV422 - same as YUYV */
457 case 11: /* YUV411 */
460 case 13: /* YUV422P */
463 case 14: /* YUV411P */
466 case 15: /* YUV420P */
469 case 16: /* YUV410P */
473 ERR("Unknown palette %d\n", capBox->pict.palette);
476 YUV_To_RGB24(format, bufferin, stream, capBox->width, capBox->height);
479 static void Resize(Capture * capBox, LPBYTE output, LPBYTE input)
481 /* the whole image needs to be reversed,
482 because the dibs are messed up in windows */
483 if (!capBox->swresize)
485 int depth = capBox->bitDepth / 8;
486 int inoffset = 0, outoffset = capBox->height * capBox->width * depth;
487 int ow = capBox->width * depth;
488 while (outoffset > 0)
492 for (x = 0; x < ow; x++)
493 output[outoffset + x] = input[inoffset + x];
500 HBITMAP bmp_s, bmp_d;
501 int depth = capBox->bitDepth / 8;
502 int inoffset = 0, outoffset = (capBox->outputheight) * capBox->outputwidth * depth;
503 int ow = capBox->outputwidth * depth;
506 /* FIXME: Improve software resizing: add error checks and optimize */
508 myarray = CoTaskMemAlloc(capBox->outputwidth * capBox->outputheight * depth);
509 dc_s = CreateCompatibleDC(NULL);
510 dc_d = CreateCompatibleDC(NULL);
511 bmp_s = CreateBitmap(capBox->width, capBox->height, 1, capBox->bitDepth, input);
512 bmp_d = CreateBitmap(capBox->outputwidth, capBox->outputheight, 1, capBox->bitDepth, NULL);
513 SelectObject(dc_s, bmp_s);
514 SelectObject(dc_d, bmp_d);
515 StretchBlt(dc_d, 0, 0, capBox->outputwidth, capBox->outputheight,
516 dc_s, 0, 0, capBox->width, capBox->height, SRCCOPY);
517 GetBitmapBits(bmp_d, capBox->outputwidth * capBox->outputheight * depth, myarray);
518 while (outoffset > 0)
523 for (i = 0; i < ow; i++)
524 output[outoffset + i] = myarray[inoffset + i];
527 CoTaskMemFree(myarray);
535 static void V4l_GetFrame(Capture * capBox, unsigned char ** pInput)
539 if (xioctl(capBox->fd, VIDIOCSYNC, &capBox->grab_buf[capBox->curframe]) == -1)
540 WARN("Syncing ioctl failed: %d\n", errno);
542 *pInput = capBox->pmap + capBox->gb_buffers.offsets[capBox->curframe];
547 while ((retval = read(capBox->fd, capBox->grab_data, capBox->imagesize)) == -1)
548 if (errno != EAGAIN) break;
550 WARN("Error occurred while reading from device: %s\n", strerror(errno));
551 *pInput = capBox->grab_data;
555 static void V4l_FreeFrame(Capture * capBox)
560 if (xioctl(capBox->fd, VIDIOCMCAPTURE, &capBox->grab_buf[capBox->curframe]) == -1)
561 ERR("Freeing frame for capture failed: %s\n", strerror(errno));
563 if (++capBox->curframe == capBox->buffers)
564 capBox->curframe = 0;
567 static DWORD WINAPI ReadThread(LPVOID lParam)
569 Capture * capBox = (Capture *)lParam;
571 IMediaSample *pSample = NULL;
572 unsigned long framecount = 0;
573 unsigned char *pTarget, *pInput, *pOutput;
575 hr = V4l_Prepare(capBox);
579 pOutput = CoTaskMemAlloc(capBox->width * capBox->height * capBox->bitDepth / 8);
580 capBox->curframe = 0;
582 V4l_FreeFrame(capBox);
583 } while (capBox->curframe != 0);
587 EnterCriticalSection(&capBox->CritSect);
590 hr = OutputPin_GetDeliveryBuffer((OutputPin *)capBox->pOut, &pSample, NULL, NULL, 0);
595 if (!capBox->swresize)
596 len = capBox->height * capBox->width * capBox->bitDepth / 8;
598 len = capBox->outputheight * capBox->outputwidth * capBox->bitDepth / 8;
599 IMediaSample_SetActualDataLength(pSample, len);
601 len = IMediaSample_GetActualDataLength(pSample);
602 TRACE("Data length: %d KB\n", len / 1024);
604 IMediaSample_GetPointer(pSample, &pTarget);
605 /* FIXME: Check return values.. */
606 V4l_GetFrame(capBox, &pInput);
607 capBox->renderer(capBox, pOutput, pInput);
608 Resize(capBox, pTarget, pOutput);
609 hr = OutputPin_SendSample((OutputPin *)capBox->pOut, pSample);
610 TRACE("%p -> Frame %lu: %lx\n", capBox, ++framecount, hr);
611 IMediaSample_Release(pSample);
612 V4l_FreeFrame(capBox);
614 LeaveCriticalSection(&capBox->CritSect);
615 if (FAILED(hr) && hr != VFW_E_NOT_CONNECTED)
617 ERR("Received error: %lx\n", hr);
621 LeaveCriticalSection(&capBox->CritSect);
622 CoTaskMemFree(pOutput);
627 CoTaskMemFree(pOutput);
628 V4l_Unprepare(capBox);
629 LeaveCriticalSection(&capBox->CritSect);
632 capBox->thread = 0; capBox->stopped = 1;
633 FIXME("Stop IFilterGraph\n");
637 HRESULT qcap_driver_run(Capture *capBox, FILTER_STATE *state)
642 TRACE("%p -> (%p)\n", capBox, state);
644 if (*state == State_Running) return S_OK;
646 EnterCriticalSection(&capBox->CritSect);
650 if (*state == State_Stopped)
652 *state = State_Running;
653 if (!capBox->iscommitted++)
655 IMemAllocator * pAlloc = NULL;
656 ALLOCATOR_PROPERTIES ap, actual;
660 if (!capBox->swresize)
661 ap.cbBuffer = capBox->width * capBox->height;
663 ap.cbBuffer = capBox->outputwidth * capBox->outputheight;
664 ap.cbBuffer = (ap.cbBuffer * capBox->bitDepth) / 8;
668 out = (OutputPin *)capBox->pOut;
669 hr = IMemInputPin_GetAllocator(out->pMemInputPin, &pAlloc);
672 hr = IMemAllocator_SetProperties(pAlloc, &ap, &actual);
675 hr = IMemAllocator_Commit(pAlloc);
678 IMemAllocator_Release(pAlloc);
680 TRACE("Committing allocator: %lx\n", hr);
683 thread = CreateThread(NULL, 0, ReadThread, capBox, 0, NULL);
686 capBox->thread = thread;
687 SetThreadPriority(thread, THREAD_PRIORITY_LOWEST);
688 LeaveCriticalSection(&capBox->CritSect);
691 ERR("Creating thread failed.. %lx\n", GetLastError());
692 LeaveCriticalSection(&capBox->CritSect);
696 ResumeThread(capBox->thread);
697 *state = State_Running;
698 LeaveCriticalSection(&capBox->CritSect);
702 HRESULT qcap_driver_pause(Capture *capBox, FILTER_STATE *state)
704 TRACE("%p -> (%p)\n", capBox, state);
706 if (*state == State_Paused)
708 if (*state == State_Stopped)
709 qcap_driver_run(capBox, state);
711 EnterCriticalSection(&capBox->CritSect);
712 *state = State_Paused;
713 SuspendThread(capBox->thread);
714 LeaveCriticalSection(&capBox->CritSect);
719 HRESULT qcap_driver_stop(Capture *capBox, FILTER_STATE *state)
721 TRACE("%p -> (%p)\n", capBox, state);
723 if (*state == State_Stopped)
726 EnterCriticalSection(&capBox->CritSect);
730 if (*state == State_Paused)
731 ResumeThread(capBox->thread);
734 if (capBox->iscommitted)
736 IMemInputPin *pMem = NULL;
737 IMemAllocator * pAlloc = NULL;
738 IPin *pConnect = NULL;
741 capBox->iscommitted = 0;
743 hr = IPin_ConnectedTo(capBox->pOut, &pConnect);
746 hr = IPin_QueryInterface(pConnect, &IID_IMemInputPin, (void **) &pMem);
749 hr = IMemInputPin_GetAllocator(pMem, &pAlloc);
752 hr = IMemAllocator_Decommit(pAlloc);
755 IMemAllocator_Release(pAlloc);
758 IMemInputPin_Release(pMem);
761 IPin_Release(pConnect);
763 if (hr != S_OK && hr != VFW_E_NOT_COMMITTED)
764 WARN("Decommitting allocator: %lx\n", hr);
766 V4l_Unprepare(capBox);
769 *state = State_Stopped;
770 LeaveCriticalSection(&capBox->CritSect);
774 Capture * qcap_driver_init( IPin *pOut, USHORT card )
776 Capture * capBox = NULL;
778 struct video_capability capa;
779 struct video_picture pict;
780 struct video_window window;
784 capBox = CoTaskMemAlloc(sizeof(Capture));
788 /* capBox->vtbl = &defboxVtbl; */
790 InitializeCriticalSection( &capBox->CritSect );
792 sprintf(device, "/dev/video%i", card);
793 TRACE("opening %s\n", device);
794 capBox->fd = open(device, O_RDWR | O_NONBLOCK);
795 if (capBox->fd == -1)
797 WARN("open failed (%d)\n", errno);
801 memset(&capa, 0, sizeof(capa));
803 if (xioctl(capBox->fd, VIDIOCGCAP, &capa) == -1)
805 WARN("ioctl(VIDIOCGCAP) failed (%d)\n", errno);
809 if (!(capa.type & VID_TYPE_CAPTURE))
811 WARN("not a video capture device\n");
815 TRACE("%d inputs on %s\n", capa.channels, capa.name );
817 if (xioctl(capBox->fd, VIDIOCGPICT, &pict) == -1)
819 ERR("ioctl(VIDIOCGPICT) failed (%d)\n", errno );
823 TRACE("depth %d palette %d (%s) hue %d color %d contrast %d\n",
824 pict.depth, pict.palette, renderlist_V4l[pict.palette].name,
825 pict.hue, pict.colour, pict.contrast );
827 capBox->dbrightness = pict.brightness;
828 capBox->dcolour = pict.colour;
829 capBox->dhue = pict.hue;
830 capBox->dcontrast = pict.contrast;
832 if (!renderlist_V4l[pict.palette].renderer)
834 int palet = pict.palette, i;
836 TRACE("No renderer available for %s, falling back to defaults\n",
837 renderlist_V4l[pict.palette].name);
838 capBox->renderer = NULL;
839 for (i = 0; fallback_V4l[i] >=0 ; i++)
841 int n = fallback_V4l[i];
843 if (renderlist_V4l[n].renderer == NULL)
846 pict.depth = renderlist_V4l[n].depth;
848 if (xioctl(capBox->fd, VIDIOCSPICT, &pict) == -1)
850 TRACE("Could not render with %s (%d)\n",
851 renderlist_V4l[n].name, n);
854 TRACE("using renderer %s (%d)\n",
855 renderlist_V4l[n].name, n);
856 capBox->renderer = renderlist_V4l[n].renderer;
860 if (!capBox->renderer)
862 ERR("video format %s isn't available\n",
863 renderlist_V4l[palet].name);
869 TRACE("Using the suggested format\n");
870 capBox->renderer = renderlist_V4l[pict.palette].renderer;
872 memcpy(&capBox->pict, &pict, sizeof(struct video_picture));
874 memset(&window, 0, sizeof(window));
875 if (xioctl(capBox->fd, VIDIOCGWIN, &window) == -1)
877 WARN("VIDIOCGWIN failed (%d)\n", errno);
881 capBox->height = capBox->outputheight = window.height;
882 capBox->width = capBox->outputwidth = window.width;
883 capBox->swresize = FALSE;
884 capBox->bitDepth = 24;
888 capBox->curframe = 0;
889 capBox->iscommitted = 0;
891 TRACE("format: %d bits - %d x %d\n", capBox->bitDepth, capBox->width, capBox->height);
893 return (Capture*) capBox;
897 qcap_driver_destroy( (Capture*) capBox );
904 Capture * qcap_driver_init( IPin *pOut, USHORT card )
907 "The v4l headers were not available at compile time,\n"
908 "so video capture support is not available.\n";
913 #define FAIL_WITH_ERR \
914 ERR("v4l absent: shouldn't be called\n"); \
917 HRESULT qcap_driver_destroy(Capture *capBox)
922 HRESULT qcap_driver_set_format(Capture *capBox, AM_MEDIA_TYPE * mT)
927 HRESULT qcap_driver_get_format(Capture *capBox, AM_MEDIA_TYPE ** mT)
932 HRESULT qcap_driver_get_prop_range( Capture *capBox, long Property, long *pMin,
933 long *pMax, long *pSteppingDelta, long *pDefault, long *pCapsFlags )
938 HRESULT qcap_driver_get_prop(Capture *capBox, long Property, long *lValue, long *Flags)
943 HRESULT qcap_driver_set_prop(Capture *capBox, long Property, long lValue, long Flags)
948 HRESULT qcap_driver_run(Capture *capBox, FILTER_STATE *state)
953 HRESULT qcap_driver_pause(Capture *capBox, FILTER_STATE *state)
958 HRESULT qcap_driver_stop(Capture *capBox, FILTER_STATE *state)
963 #endif /* HAVE_LINUX_VIDEODEV_H */