2 * DirectShow capture services (QCAP.DLL)
4 * Copyright 2005 Maarten Lankhorst
6 * This file contains the part of the vfw capture interface that
7 * does the actual Video4Linux(1/2) stuff required for capturing
8 * and setting/getting media format..
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2.1 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, write to the Free Software
22 * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
26 #include "wine/port.h"
28 #define NONAMELESSSTRUCT
29 #define NONAMELESSUNION
41 #include "wine/debug.h"
44 #include "qcap_main.h"
50 #ifdef HAVE_SYS_IOCTL_H
51 #include <sys/ioctl.h>
53 #ifdef HAVE_SYS_MMAN_H
56 #ifdef HAVE_SYS_ERRNO_H
57 #include <sys/errno.h>
59 #ifdef HAVE_SYS_TIME_H
62 #ifdef HAVE_ASM_TYPES_H
63 #include <asm/types.h>
65 #ifdef HAVE_LINUX_VIDEODEV_H
66 #include <linux/videodev.h>
72 WINE_DEFAULT_DEBUG_CHANNEL(qcap_v4l);
74 #ifdef HAVE_LINUX_VIDEODEV_H
76 typedef void (* Renderer)(const Capture *, LPBYTE bufferin, const BYTE *stream);
80 UINT width, height, bitDepth, fps, outputwidth, outputheight;
83 CRITICAL_SECTION CritSect;
87 int iscommitted, stopped;
88 struct video_picture pict;
89 int dbrightness, dhue, dcolour, dcontrast;
92 struct video_mmap *grab_buf;
93 struct video_mbuf gb_buffers;
114 static void renderer_RGB(const Capture *capBox, LPBYTE bufferin, const BYTE *stream);
115 static void renderer_YUV(const Capture *capBox, LPBYTE bufferin, const BYTE *stream);
117 static const struct renderlist renderlist_V4l[] = {
118 { 0, "NULL renderer", NULL },
119 { 8, "Gray scales", NULL }, /* 1, Don't support */
120 { 0, "High 240 cube (BT848)", NULL }, /* 2, Don't support */
121 { 16, "16 bit RGB (565)", NULL }, /* 3, Don't support */
122 { 24, "24 bit RGB values", renderer_RGB }, /* 4, Supported, */
123 { 32, "32 bit RGB values", renderer_RGB }, /* 5, Supported */
124 { 16, "15 bit RGB (555)", NULL }, /* 6, Don't support */
125 { 16, "YUV 422 (Not P)", renderer_YUV }, /* 7, Supported */
126 { 16, "YUYV (Not P)", renderer_YUV }, /* 8, Supported */
127 { 16, "UYVY (Not P)", renderer_YUV }, /* 9, Supported */
128 { 16, "YUV 420 (Not P)", NULL }, /* 10, Not supported, if I had to guess it's YYUYYV */
129 { 12, "YUV 411 (Not P)", renderer_YUV }, /* 11, Supported */
130 { 0, "Raw capturing (BT848)", NULL }, /* 12, Don't support */
131 { 16, "YUV 422 (Planar)", renderer_YUV }, /* 13, Supported */
132 { 12, "YUV 411 (Planar)", renderer_YUV }, /* 14, Supported */
133 { 12, "YUV 420 (Planar)", renderer_YUV }, /* 15, Supported */
134 { 10, "YUV 410 (Planar)", renderer_YUV }, /* 16, Supported */
135 /* FIXME: add YUV420 support */
139 static const int fallback_V4l[] = { 4, 5, 7, 8, 9, 13, 15, 14, 16, 11, -1 };
140 /* Fallback: First try raw formats (Should try yuv first perhaps?), then yuv */
142 /* static const Capture defbox; */
144 static int xioctl(int fd, int request, void * arg)
149 r = ioctl (fd, request, arg);
150 } while (-1 == r && EINTR == errno);
155 /* Prepare the capture buffers */
156 static HRESULT V4l_Prepare(Capture *capBox)
158 TRACE("%p: Preparing for %dx%d resolution\n", capBox, capBox->width, capBox->height);
162 if (xioctl(capBox->fd, VIDIOCGMBUF, &capBox->gb_buffers) != -1 &&
163 capBox->gb_buffers.frames)
165 capBox->buffers = capBox->gb_buffers.frames;
166 if (capBox->gb_buffers.frames > 1)
168 TRACE("%p: Using %d/%d buffers\n", capBox,
169 capBox->buffers, capBox->gb_buffers.frames);
171 capBox->pmap = mmap( 0, capBox->gb_buffers.size, PROT_READ|PROT_WRITE,
172 MAP_SHARED, capBox->fd, 0 );
173 if (capBox->pmap != MAP_FAILED)
177 capBox->grab_buf = CoTaskMemAlloc(sizeof(struct video_mmap) * capBox->buffers);
178 if (!capBox->grab_buf)
180 munmap(capBox->pmap, capBox->gb_buffers.size);
181 return E_OUTOFMEMORY;
184 /* Setup mmap capture buffers. */
185 for (i = 0; i < capBox->buffers; i++)
187 capBox->grab_buf[i].format = capBox->pict.palette;
188 capBox->grab_buf[i].frame = i;
189 capBox->grab_buf[i].width = capBox->width;
190 capBox->grab_buf[i].height = capBox->height;
198 capBox->imagesize = renderlist_V4l[capBox->pict.palette].depth *
199 capBox->height * capBox->width / 8;
200 capBox->grab_data = CoTaskMemAlloc(capBox->imagesize);
201 if (!capBox->grab_data)
202 return E_OUTOFMEMORY;
204 TRACE("Using mmap: %d\n", capBox->mmap);
208 static void V4l_Unprepare(Capture *capBox)
212 for (capBox->curframe = 0; capBox->curframe < capBox->buffers; capBox->curframe++)
213 xioctl(capBox->fd, VIDIOCSYNC, &capBox->grab_buf[capBox->curframe]);
214 munmap(capBox->pmap, capBox->gb_buffers.size);
215 CoTaskMemFree(capBox->grab_buf);
218 CoTaskMemFree(capBox->grab_data);
221 HRESULT qcap_driver_destroy(Capture *capBox)
223 TRACE("%p\n", capBox);
225 if( capBox->fd != -1 )
227 capBox->CritSect.DebugInfo->Spare[0] = 0;
228 DeleteCriticalSection(&capBox->CritSect);
229 CoTaskMemFree(capBox);
233 HRESULT qcap_driver_set_format(Capture *capBox, AM_MEDIA_TYPE * mT)
235 int newheight, newwidth;
236 struct video_window window;
237 VIDEOINFOHEADER *format;
239 TRACE("%p\n", capBox);
241 format = (VIDEOINFOHEADER *) mT->pbFormat;
242 if (format->bmiHeader.biBitCount != 24 ||
243 format->bmiHeader.biCompression != BI_RGB)
245 FIXME("unsupported media type %d %d\n", format->bmiHeader.biBitCount,
246 format->bmiHeader.biCompression );
247 return VFW_E_INVALIDMEDIATYPE;
250 newwidth = format->bmiHeader.biWidth;
251 newheight = format->bmiHeader.biHeight;
253 TRACE("%p -> (%p) - %d %d\n", capBox, mT, newwidth, newheight);
255 if (capBox->height == newheight && capBox->width == newwidth)
258 if(-1 == xioctl(capBox->fd, VIDIOCGWIN, &window))
260 ERR("ioctl(VIDIOCGWIN) failed (%d)\n", errno);
263 window.width = newwidth;
264 window.height = newheight;
265 if (xioctl(capBox->fd, VIDIOCSWIN, &window) == -1)
267 TRACE("using software resize: %dx%d -> %dx%d\n",
268 window.width, window.height, capBox->width, capBox->height);
269 capBox->swresize = TRUE;
273 capBox->height = window.height;
274 capBox->width = window.width;
275 capBox->swresize = FALSE;
277 capBox->outputwidth = window.width;
278 capBox->outputheight = window.height;
282 HRESULT qcap_driver_get_format(const Capture *capBox, AM_MEDIA_TYPE ** mT)
286 mT[0] = CoTaskMemAlloc(sizeof(AM_MEDIA_TYPE));
288 return E_OUTOFMEMORY;
289 vi = CoTaskMemAlloc(sizeof(VIDEOINFOHEADER));
290 mT[0]->cbFormat = sizeof(VIDEOINFOHEADER);
293 CoTaskMemFree(mT[0]);
294 return E_OUTOFMEMORY;
296 mT[0]->majortype = MEDIATYPE_Video;
297 mT[0]->subtype = MEDIASUBTYPE_RGB24;
298 mT[0]->formattype = FORMAT_VideoInfo;
299 mT[0]->bFixedSizeSamples = TRUE;
300 mT[0]->bTemporalCompression = FALSE;
302 mT[0]->lSampleSize = capBox->outputwidth * capBox->outputheight * capBox->bitDepth / 8;
303 TRACE("Output format: %dx%d - %d bits = %u KB\n", capBox->outputwidth,
304 capBox->outputheight, capBox->bitDepth, mT[0]->lSampleSize/1024);
305 vi->rcSource.left = 0; vi->rcSource.top = 0;
306 vi->rcTarget.left = 0; vi->rcTarget.top = 0;
307 vi->rcSource.right = capBox->width; vi->rcSource.bottom = capBox->height;
308 vi->rcTarget.right = capBox->outputwidth; vi->rcTarget.bottom = capBox->outputheight;
309 vi->dwBitRate = capBox->fps * mT[0]->lSampleSize;
310 vi->dwBitErrorRate = 0;
311 vi->AvgTimePerFrame = (LONGLONG)10000000.0 / (LONGLONG)capBox->fps;
312 vi->bmiHeader.biSize = 40;
313 vi->bmiHeader.biWidth = capBox->outputwidth;
314 vi->bmiHeader.biHeight = capBox->outputheight;
315 vi->bmiHeader.biPlanes = 1;
316 vi->bmiHeader.biBitCount = 24;
317 vi->bmiHeader.biCompression = BI_RGB;
318 vi->bmiHeader.biSizeImage = mT[0]->lSampleSize;
319 vi->bmiHeader.biClrUsed = vi->bmiHeader.biClrImportant = 0;
320 vi->bmiHeader.biXPelsPerMeter = 100;
321 vi->bmiHeader.biYPelsPerMeter = 100;
322 mT[0]->pbFormat = (void *)vi;
323 dump_AM_MEDIA_TYPE(mT[0]);
327 HRESULT qcap_driver_get_prop_range( Capture *capBox, long Property, long *pMin,
328 long *pMax, long *pSteppingDelta, long *pDefault, long *pCapsFlags )
330 TRACE("%p -> %ld %p %p %p %p %p\n", capBox, Property,
331 pMin, pMax, pSteppingDelta, pDefault, pCapsFlags);
335 case VideoProcAmp_Brightness:
336 *pDefault = capBox->dbrightness;
338 case VideoProcAmp_Contrast:
339 *pDefault = capBox->dcontrast;
341 case VideoProcAmp_Hue:
342 *pDefault = capBox->dhue;
344 case VideoProcAmp_Saturation:
345 *pDefault = capBox->dcolour;
348 FIXME("Not implemented %ld\n", Property);
353 *pSteppingDelta = 65536/256;
354 *pCapsFlags = VideoProcAmp_Flags_Manual;
358 HRESULT qcap_driver_get_prop( Capture *capBox, long Property, long *lValue, long *Flags )
360 TRACE("%p -> %ld %p %p\n", capBox, Property, lValue, Flags);
364 case VideoProcAmp_Brightness:
365 *lValue = capBox->pict.brightness;
367 case VideoProcAmp_Contrast:
368 *lValue = capBox->pict.contrast;
370 case VideoProcAmp_Hue:
371 *lValue = capBox->pict.hue;
373 case VideoProcAmp_Saturation:
374 *lValue = capBox->pict.colour;
377 FIXME("Not implemented %ld\n", Property);
380 *Flags = VideoProcAmp_Flags_Manual;
384 HRESULT qcap_driver_set_prop(Capture *capBox, long Property, long lValue, long Flags)
386 TRACE("%p -> %ld %ld %ld\n", capBox, Property, lValue, Flags);
390 case VideoProcAmp_Brightness:
391 capBox->pict.brightness = lValue;
393 case VideoProcAmp_Contrast:
394 capBox->pict.contrast = lValue;
396 case VideoProcAmp_Hue:
397 capBox->pict.hue = lValue;
399 case VideoProcAmp_Saturation:
400 capBox->pict.colour = lValue;
403 FIXME("Not implemented %ld\n", Property);
407 if (xioctl(capBox->fd, VIDIOCSPICT, &capBox->pict) == -1)
409 ERR("ioctl(VIDIOCSPICT) failed (%d)\n",errno);
415 static void renderer_RGB(const Capture *capBox, LPBYTE bufferin, const BYTE *stream)
417 int depth = renderlist_V4l[capBox->pict.palette].depth;
418 int size = capBox->height * capBox->width * depth / 8;
424 memcpy(bufferin, stream, size);
429 while (pointer + offset <= size)
431 bufferin[pointer] = stream[pointer + offset];
433 bufferin[pointer] = stream[pointer + offset];
435 bufferin[pointer] = stream[pointer + offset];
441 ERR("Unknown bit depth %d\n", depth);
446 static void renderer_YUV(const Capture *capBox, LPBYTE bufferin, const BYTE *stream)
448 enum YUV_Format format;
450 switch (capBox->pict.palette)
452 case 7: /* YUV422 - same as YUYV */
459 case 11: /* YUV411 */
462 case 13: /* YUV422P */
465 case 14: /* YUV411P */
468 case 15: /* YUV420P */
471 case 16: /* YUV410P */
475 ERR("Unknown palette %d\n", capBox->pict.palette);
478 YUV_To_RGB24(format, bufferin, stream, capBox->width, capBox->height);
481 static void Resize(const Capture * capBox, LPBYTE output, const BYTE *input)
483 /* the whole image needs to be reversed,
484 because the dibs are messed up in windows */
485 if (!capBox->swresize)
487 int depth = capBox->bitDepth / 8;
488 int inoffset = 0, outoffset = capBox->height * capBox->width * depth;
489 int ow = capBox->width * depth;
490 while (outoffset > 0)
494 for (x = 0; x < ow; x++)
495 output[outoffset + x] = input[inoffset + x];
502 HBITMAP bmp_s, bmp_d;
503 int depth = capBox->bitDepth / 8;
504 int inoffset = 0, outoffset = (capBox->outputheight) * capBox->outputwidth * depth;
505 int ow = capBox->outputwidth * depth;
508 /* FIXME: Improve software resizing: add error checks and optimize */
510 myarray = CoTaskMemAlloc(capBox->outputwidth * capBox->outputheight * depth);
511 dc_s = CreateCompatibleDC(NULL);
512 dc_d = CreateCompatibleDC(NULL);
513 bmp_s = CreateBitmap(capBox->width, capBox->height, 1, capBox->bitDepth, input);
514 bmp_d = CreateBitmap(capBox->outputwidth, capBox->outputheight, 1, capBox->bitDepth, NULL);
515 SelectObject(dc_s, bmp_s);
516 SelectObject(dc_d, bmp_d);
517 StretchBlt(dc_d, 0, 0, capBox->outputwidth, capBox->outputheight,
518 dc_s, 0, 0, capBox->width, capBox->height, SRCCOPY);
519 GetBitmapBits(bmp_d, capBox->outputwidth * capBox->outputheight * depth, myarray);
520 while (outoffset > 0)
525 for (i = 0; i < ow; i++)
526 output[outoffset + i] = myarray[inoffset + i];
529 CoTaskMemFree(myarray);
537 static void V4l_GetFrame(Capture * capBox, unsigned char ** pInput)
541 if (xioctl(capBox->fd, VIDIOCSYNC, &capBox->grab_buf[capBox->curframe]) == -1)
542 WARN("Syncing ioctl failed: %d\n", errno);
544 *pInput = capBox->pmap + capBox->gb_buffers.offsets[capBox->curframe];
549 while ((retval = read(capBox->fd, capBox->grab_data, capBox->imagesize)) == -1)
550 if (errno != EAGAIN) break;
552 WARN("Error occurred while reading from device: %s\n", strerror(errno));
553 *pInput = (unsigned char*) capBox->grab_data;
557 static void V4l_FreeFrame(Capture * capBox)
562 if (xioctl(capBox->fd, VIDIOCMCAPTURE, &capBox->grab_buf[capBox->curframe]) == -1)
563 ERR("Freeing frame for capture failed: %s\n", strerror(errno));
565 if (++capBox->curframe == capBox->buffers)
566 capBox->curframe = 0;
569 static DWORD WINAPI ReadThread(LPVOID lParam)
571 Capture * capBox = lParam;
573 IMediaSample *pSample = NULL;
574 unsigned long framecount = 0;
575 unsigned char *pTarget, *pInput, *pOutput;
577 hr = V4l_Prepare(capBox);
581 pOutput = CoTaskMemAlloc(capBox->width * capBox->height * capBox->bitDepth / 8);
582 capBox->curframe = 0;
584 V4l_FreeFrame(capBox);
585 } while (capBox->curframe != 0);
589 EnterCriticalSection(&capBox->CritSect);
592 hr = OutputPin_GetDeliveryBuffer((OutputPin *)capBox->pOut, &pSample, NULL, NULL, 0);
597 if (!capBox->swresize)
598 len = capBox->height * capBox->width * capBox->bitDepth / 8;
600 len = capBox->outputheight * capBox->outputwidth * capBox->bitDepth / 8;
601 IMediaSample_SetActualDataLength(pSample, len);
603 len = IMediaSample_GetActualDataLength(pSample);
604 TRACE("Data length: %d KB\n", len / 1024);
606 IMediaSample_GetPointer(pSample, &pTarget);
607 /* FIXME: Check return values.. */
608 V4l_GetFrame(capBox, &pInput);
609 capBox->renderer(capBox, pOutput, pInput);
610 Resize(capBox, pTarget, pOutput);
611 hr = OutputPin_SendSample((OutputPin *)capBox->pOut, pSample);
612 TRACE("%p -> Frame %lu: %x\n", capBox, ++framecount, hr);
613 IMediaSample_Release(pSample);
614 V4l_FreeFrame(capBox);
616 LeaveCriticalSection(&capBox->CritSect);
617 if (FAILED(hr) && hr != VFW_E_NOT_CONNECTED)
619 ERR("Received error: %x\n", hr);
623 LeaveCriticalSection(&capBox->CritSect);
624 CoTaskMemFree(pOutput);
629 CoTaskMemFree(pOutput);
630 V4l_Unprepare(capBox);
631 LeaveCriticalSection(&capBox->CritSect);
634 capBox->thread = 0; capBox->stopped = 1;
635 FIXME("Stop IFilterGraph\n");
639 HRESULT qcap_driver_run(Capture *capBox, FILTER_STATE *state)
644 TRACE("%p -> (%p)\n", capBox, state);
646 if (*state == State_Running) return S_OK;
648 EnterCriticalSection(&capBox->CritSect);
652 if (*state == State_Stopped)
654 *state = State_Running;
655 if (!capBox->iscommitted++)
657 IMemAllocator * pAlloc = NULL;
658 ALLOCATOR_PROPERTIES ap, actual;
662 if (!capBox->swresize)
663 ap.cbBuffer = capBox->width * capBox->height;
665 ap.cbBuffer = capBox->outputwidth * capBox->outputheight;
666 ap.cbBuffer = (ap.cbBuffer * capBox->bitDepth) / 8;
670 out = (OutputPin *)capBox->pOut;
671 hr = IMemInputPin_GetAllocator(out->pMemInputPin, &pAlloc);
674 hr = IMemAllocator_SetProperties(pAlloc, &ap, &actual);
677 hr = IMemAllocator_Commit(pAlloc);
680 IMemAllocator_Release(pAlloc);
682 TRACE("Committing allocator: %x\n", hr);
685 thread = CreateThread(NULL, 0, ReadThread, capBox, 0, NULL);
688 capBox->thread = thread;
689 SetThreadPriority(thread, THREAD_PRIORITY_LOWEST);
690 LeaveCriticalSection(&capBox->CritSect);
693 ERR("Creating thread failed.. %u\n", GetLastError());
694 LeaveCriticalSection(&capBox->CritSect);
698 ResumeThread(capBox->thread);
699 *state = State_Running;
700 LeaveCriticalSection(&capBox->CritSect);
704 HRESULT qcap_driver_pause(Capture *capBox, FILTER_STATE *state)
706 TRACE("%p -> (%p)\n", capBox, state);
708 if (*state == State_Paused)
710 if (*state == State_Stopped)
711 qcap_driver_run(capBox, state);
713 EnterCriticalSection(&capBox->CritSect);
714 *state = State_Paused;
715 SuspendThread(capBox->thread);
716 LeaveCriticalSection(&capBox->CritSect);
721 HRESULT qcap_driver_stop(Capture *capBox, FILTER_STATE *state)
723 TRACE("%p -> (%p)\n", capBox, state);
725 if (*state == State_Stopped)
728 EnterCriticalSection(&capBox->CritSect);
732 if (*state == State_Paused)
733 ResumeThread(capBox->thread);
736 if (capBox->iscommitted)
738 IMemInputPin *pMem = NULL;
739 IMemAllocator * pAlloc = NULL;
740 IPin *pConnect = NULL;
743 capBox->iscommitted = 0;
745 hr = IPin_ConnectedTo(capBox->pOut, &pConnect);
748 hr = IPin_QueryInterface(pConnect, &IID_IMemInputPin, (void **) &pMem);
751 hr = IMemInputPin_GetAllocator(pMem, &pAlloc);
754 hr = IMemAllocator_Decommit(pAlloc);
757 IMemAllocator_Release(pAlloc);
760 IMemInputPin_Release(pMem);
763 IPin_Release(pConnect);
765 if (hr != S_OK && hr != VFW_E_NOT_COMMITTED)
766 WARN("Decommitting allocator: %x\n", hr);
768 V4l_Unprepare(capBox);
771 *state = State_Stopped;
772 LeaveCriticalSection(&capBox->CritSect);
776 Capture * qcap_driver_init( IPin *pOut, USHORT card )
778 Capture * capBox = NULL;
780 struct video_capability capa;
781 struct video_picture pict;
782 struct video_window window;
786 capBox = CoTaskMemAlloc(sizeof(Capture));
790 /* capBox->vtbl = &defboxVtbl; */
792 InitializeCriticalSection( &capBox->CritSect );
793 capBox->CritSect.DebugInfo->Spare[0] = (DWORD_PTR)(__FILE__ ": Capture.CritSect");
795 sprintf(device, "/dev/video%i", card);
796 TRACE("opening %s\n", device);
797 capBox->fd = open(device, O_RDWR | O_NONBLOCK);
798 if (capBox->fd == -1)
800 WARN("open failed (%d)\n", errno);
804 memset(&capa, 0, sizeof(capa));
806 if (xioctl(capBox->fd, VIDIOCGCAP, &capa) == -1)
808 WARN("ioctl(VIDIOCGCAP) failed (%d)\n", errno);
812 if (!(capa.type & VID_TYPE_CAPTURE))
814 WARN("not a video capture device\n");
818 TRACE("%d inputs on %s\n", capa.channels, capa.name );
820 if (xioctl(capBox->fd, VIDIOCGPICT, &pict) == -1)
822 ERR("ioctl(VIDIOCGPICT) failed (%d)\n", errno );
826 TRACE("depth %d palette %d (%s) hue %d color %d contrast %d\n",
827 pict.depth, pict.palette, renderlist_V4l[pict.palette].name,
828 pict.hue, pict.colour, pict.contrast );
830 capBox->dbrightness = pict.brightness;
831 capBox->dcolour = pict.colour;
832 capBox->dhue = pict.hue;
833 capBox->dcontrast = pict.contrast;
835 if (!renderlist_V4l[pict.palette].renderer)
837 int palet = pict.palette, i;
839 TRACE("No renderer available for %s, falling back to defaults\n",
840 renderlist_V4l[pict.palette].name);
841 capBox->renderer = NULL;
842 for (i = 0; fallback_V4l[i] >=0 ; i++)
844 int n = fallback_V4l[i];
846 if (renderlist_V4l[n].renderer == NULL)
849 pict.depth = renderlist_V4l[n].depth;
851 if (xioctl(capBox->fd, VIDIOCSPICT, &pict) == -1)
853 TRACE("Could not render with %s (%d)\n",
854 renderlist_V4l[n].name, n);
857 TRACE("using renderer %s (%d)\n",
858 renderlist_V4l[n].name, n);
859 capBox->renderer = renderlist_V4l[n].renderer;
863 if (!capBox->renderer)
865 ERR("video format %s isn't available\n",
866 renderlist_V4l[palet].name);
872 TRACE("Using the suggested format\n");
873 capBox->renderer = renderlist_V4l[pict.palette].renderer;
875 memcpy(&capBox->pict, &pict, sizeof(struct video_picture));
877 memset(&window, 0, sizeof(window));
878 if (xioctl(capBox->fd, VIDIOCGWIN, &window) == -1)
880 WARN("VIDIOCGWIN failed (%d)\n", errno);
884 capBox->height = capBox->outputheight = window.height;
885 capBox->width = capBox->outputwidth = window.width;
886 capBox->swresize = FALSE;
887 capBox->bitDepth = 24;
891 capBox->curframe = 0;
892 capBox->iscommitted = 0;
894 TRACE("format: %d bits - %d x %d\n", capBox->bitDepth, capBox->width, capBox->height);
900 qcap_driver_destroy( capBox );
907 Capture * qcap_driver_init( IPin *pOut, USHORT card )
910 "The v4l headers were not available at compile time,\n"
911 "so video capture support is not available.\n";
916 #define FAIL_WITH_ERR \
917 ERR("v4l absent: shouldn't be called\n"); \
920 HRESULT qcap_driver_destroy(Capture *capBox)
925 HRESULT qcap_driver_set_format(Capture *capBox, AM_MEDIA_TYPE * mT)
930 HRESULT qcap_driver_get_format(const Capture *capBox, AM_MEDIA_TYPE ** mT)
935 HRESULT qcap_driver_get_prop_range( Capture *capBox, long Property, long *pMin,
936 long *pMax, long *pSteppingDelta, long *pDefault, long *pCapsFlags )
941 HRESULT qcap_driver_get_prop(Capture *capBox, long Property, long *lValue, long *Flags)
946 HRESULT qcap_driver_set_prop(Capture *capBox, long Property, long lValue, long Flags)
951 HRESULT qcap_driver_run(Capture *capBox, FILTER_STATE *state)
956 HRESULT qcap_driver_pause(Capture *capBox, FILTER_STATE *state)
961 HRESULT qcap_driver_stop(Capture *capBox, FILTER_STATE *state)
966 #endif /* HAVE_LINUX_VIDEODEV_H */