2 * GStreamer wrapper filter
4 * Copyright 2010 Maarten Lankhorst for CodeWeavers
5 * Copyright 2010 Aric Stewart for CodeWeavers
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
25 #include <gst/app/gstappsink.h>
26 #include <gst/app/gstappsrc.h>
27 #include <gst/app/gstappbuffer.h>
29 #include "gst_private.h"
30 #include "gst_guids.h"
46 #include "wine/unicode.h"
47 #include "wine/debug.h"
50 DEFINE_GUID(WMMEDIASUBTYPE_MP3, 0x00000055, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
52 WINE_DEFAULT_DEBUG_CHANNEL(gstreamer);
59 static const IBaseFilterVtbl GSTTf_Vtbl;
61 static gboolean match_element(GstPluginFeature *feature, gpointer gdata) {
62 struct typeinfo *data = (struct typeinfo*)gdata;
63 GstElementFactory *factory;
66 if (!GST_IS_ELEMENT_FACTORY(feature))
68 factory = GST_ELEMENT_FACTORY(feature);
69 if (!strstr(gst_element_factory_get_klass(factory), data->type))
71 for (list = gst_element_factory_get_static_pad_templates(factory); list; list = list->next) {
72 GstStaticPadTemplate *pad = (GstStaticPadTemplate*)list->data;
75 if (pad->direction != GST_PAD_SINK)
77 caps = gst_static_caps_get(&pad->static_caps);
78 ret = gst_caps_is_always_compatible(caps, data->caps);
86 static const char *Gstreamer_FindMatch(const char *strcaps)
91 GstElementFactory *bestfactory = NULL;
92 GstCaps *caps = gst_caps_from_string(strcaps);
95 data.type = "Decoder";
96 copy = gst_default_registry_feature_filter(match_element, 0, &data);
97 for (list = copy; list; list = list->next) {
98 GstElementFactory *factory = (GstElementFactory*)list->data;
100 rank = gst_plugin_feature_get_rank(GST_PLUGIN_FEATURE(factory));
101 if (rank > bestrank || !bestrank) {
103 bestfactory = factory;
106 gst_caps_unref(caps);
110 FIXME("Could not find plugin for %s\n", strcaps);
113 return gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(bestfactory));
116 typedef struct GstTfImpl {
118 IUnknown *seekthru_unk;
119 const char *gstreamer_name;
121 GstPad *my_src, *my_sink, *their_src, *their_sink;
125 static HRESULT WINAPI Gstreamer_transform_ProcessBegin(TransformFilter *iface) {
126 GstTfImpl *This = (GstTfImpl*)iface;
129 ret = gst_element_set_state(This->filter, GST_STATE_PLAYING);
130 TRACE("Returned: %i\n", ret);
134 static HRESULT WINAPI Gstreamer_transform_DecideBufferSize(TransformFilter *tf, IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *ppropInputRequest)
136 GstTfImpl *This = (GstTfImpl*)tf;
137 ALLOCATOR_PROPERTIES actual;
139 if (!ppropInputRequest->cbAlign)
140 ppropInputRequest->cbAlign = 1;
142 ppropInputRequest->cbBuffer = This->cbBuffer;
144 if (ppropInputRequest->cBuffers < 2)
145 ppropInputRequest->cBuffers = 2;
147 return IMemAllocator_SetProperties(pAlloc, ppropInputRequest, &actual);
150 static void release_sample(void *data) {
151 TRACE("Releasing %p\n", data);
152 IMediaSample_Release((IMediaSample *)data);
155 static GstFlowReturn got_data(GstPad *pad, GstBuffer *buf) {
156 GstTfImpl *This = gst_pad_get_element_private(pad);
157 IMediaSample *sample = GST_APP_BUFFER(buf)->priv;
158 REFERENCE_TIME tStart, tStop;
161 if (GST_BUFFER_TIMESTAMP_IS_VALID(buf) &&
162 GST_BUFFER_DURATION_IS_VALID(buf)) {
163 tStart = buf->timestamp / 100;
164 tStop = tStart + buf->duration / 100;
165 IMediaSample_SetTime(sample, &tStart, &tStop);
168 IMediaSample_SetTime(sample, NULL, NULL);
169 if (GST_BUFFER_OFFSET_IS_VALID(buf) &&
170 GST_BUFFER_OFFSET_END_IS_VALID(buf)) {
171 tStart = buf->offset / 100;
172 tStop = buf->offset_end / 100;
173 IMediaSample_SetMediaTime(sample, &tStart, &tStop);
176 IMediaSample_SetMediaTime(sample, NULL, NULL);
178 IMediaSample_SetDiscontinuity(sample, GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DISCONT));
179 IMediaSample_SetPreroll(sample, GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_PREROLL));
180 IMediaSample_SetSyncPoint(sample, !GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT));
181 IMediaSample_SetActualDataLength(sample, GST_BUFFER_SIZE(buf));
183 hr = BaseOutputPinImpl_Deliver((BaseOutputPin*)This->tf.ppPins[1], sample);
184 gst_buffer_unref(buf);
186 return GST_FLOW_WRONG_STATE;
188 return GST_FLOW_RESEND;
192 static GstFlowReturn request_buffer(GstPad *pad, guint64 ofs, guint size, GstCaps *caps, GstBuffer **buf) {
193 GstTfImpl *This = gst_pad_get_element_private(pad);
194 IMediaSample *sample;
197 TRACE("Requesting buffer\n");
199 hr = BaseOutputPinImpl_GetDeliveryBuffer((BaseOutputPin*)This->tf.ppPins[1], &sample, NULL, NULL, 0);
201 ERR("Could not get output buffer: %08x\n", hr);
202 return GST_FLOW_WRONG_STATE;
204 IMediaSample_SetActualDataLength(sample, size);
205 IMediaSample_GetPointer(sample, &ptr);
206 *buf = gst_app_buffer_new(ptr, size, release_sample, sample);
209 IMediaSample_Release(sample);
210 ERR("Out of memory\n");
211 return GST_FLOW_ERROR;
214 caps = gst_pad_get_caps_reffed(This->my_sink);
215 gst_buffer_set_caps(*buf, caps);
219 static HRESULT WINAPI Gstreamer_transform_ProcessData(TransformFilter *iface, IMediaSample *sample) {
220 GstTfImpl *This = (GstTfImpl*)iface;
221 REFERENCE_TIME tStart, tStop;
226 TRACE("Reading %p\n", sample);
228 EnterCriticalSection(&This->tf.filter.csFilter);
229 IMediaSample_GetPointer(sample, &data);
230 buf = gst_app_buffer_new(data, IMediaSample_GetActualDataLength(sample), release_sample, sample);
232 LeaveCriticalSection(&This->tf.filter.csFilter);
235 gst_buffer_set_caps(buf, gst_pad_get_caps_reffed(This->my_src));
236 IMediaSample_AddRef(sample);
237 buf->duration = buf->timestamp = -1;
238 hr = IMediaSample_GetTime(sample, &tStart, &tStop);
240 buf->timestamp = tStart * 100;
242 buf->duration = (tStop - tStart)*100;
244 if (IMediaSample_GetMediaTime(sample, &tStart, &tStop) == S_OK) {
245 buf->offset = tStart * 100;
246 buf->offset_end = tStop * 100;
248 if (IMediaSample_IsDiscontinuity(sample) == S_OK)
249 GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_DISCONT);
250 if (IMediaSample_IsPreroll(sample) == S_OK)
251 GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_PREROLL);
252 if (IMediaSample_IsSyncPoint(sample) != S_OK)
253 GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT);
254 LeaveCriticalSection(&This->tf.filter.csFilter);
255 ret = gst_pad_push(This->my_src, buf);
257 WARN("Sending returned: %i\n", ret);
258 if (ret == GST_FLOW_ERROR)
260 if (ret == GST_FLOW_WRONG_STATE)
261 return VFW_E_WRONG_STATE;
262 if (ret == GST_FLOW_RESEND)
267 static HRESULT WINAPI Gstreamer_transform_ProcessEnd(TransformFilter *iface) {
268 GstTfImpl *This = (GstTfImpl*)iface;
271 LeaveCriticalSection(&This->tf.filter.csFilter);
272 ret = gst_element_set_state(This->filter, GST_STATE_READY);
273 EnterCriticalSection(&This->tf.filter.csFilter);
274 TRACE("Returned: %i\n", ret);
278 static void Gstreamer_transform_pad_added(GstElement *filter, GstPad *pad, GstTfImpl *This)
281 if (!GST_PAD_IS_SRC(pad))
284 ret = gst_pad_link(pad, This->my_sink);
286 WARN("Failed to link with %i\n", ret);
287 This->their_src = pad;
289 gst_pad_set_active(pad, TRUE);
290 gst_pad_set_active(This->my_sink, TRUE);
293 static HRESULT Gstreamer_transform_ConnectInput(GstTfImpl *This, const AM_MEDIA_TYPE *amt, GstCaps *capsin, GstCaps *capsout) {
295 int done = 0, found = 0, ret;
297 This->filter = gst_element_factory_make(This->gstreamer_name, NULL);
299 FIXME("Could not make %s filter\n", This->gstreamer_name);
302 This->my_src = gst_pad_new(NULL, GST_PAD_SRC);
303 gst_pad_set_element_private (This->my_src, This);
305 This->my_sink = gst_pad_new(NULL, GST_PAD_SINK);
306 gst_pad_set_chain_function(This->my_sink, got_data);
307 gst_pad_set_bufferalloc_function(This->my_sink, request_buffer);
308 gst_pad_set_element_private (This->my_sink, This);
310 ret = gst_pad_set_caps(This->my_src, capsin);
312 WARN("Failed to set caps on own source with %i\n", ret);
316 ret = gst_pad_set_caps(This->my_sink, capsout);
318 WARN("Failed to set caps on own sink with %i\n", ret);
322 it = gst_element_iterate_sink_pads(This->filter);
326 switch (gst_iterator_next(it, &item)) {
327 case GST_ITERATOR_RESYNC:
328 gst_iterator_resync (it);
330 case GST_ITERATOR_OK:
331 This->their_sink = item;
332 case GST_ITERATOR_ERROR:
333 case GST_ITERATOR_DONE:
338 gst_iterator_free(it);
339 if (!This->their_sink) {
340 ERR("Could not find sink on filter %s\n", This->gstreamer_name);
344 it = gst_element_iterate_src_pads(This->filter);
345 gst_iterator_resync(it);
350 switch (gst_iterator_next(it, &item)) {
351 case GST_ITERATOR_RESYNC:
352 gst_iterator_resync (it);
354 case GST_ITERATOR_OK:
355 This->their_src = item;
356 case GST_ITERATOR_ERROR:
357 case GST_ITERATOR_DONE:
362 gst_iterator_free(it);
363 found = !!This->their_src;
365 g_signal_connect(This->filter, "pad-added", G_CALLBACK(Gstreamer_transform_pad_added), This);
366 ret = gst_pad_link(This->my_src, This->their_sink);
368 WARN("Failed to link with %i\n", ret);
373 Gstreamer_transform_pad_added(This->filter, This->their_src, This);
375 if (!gst_pad_is_linked(This->my_sink))
378 TRACE("Connected\n");
382 static HRESULT WINAPI Gstreamer_transform_Cleanup(TransformFilter *tf, PIN_DIRECTION dir) {
383 GstTfImpl *This = (GstTfImpl*)tf;
385 if (dir == PINDIR_INPUT)
388 gst_element_set_state(This->filter, GST_STATE_NULL);
389 gst_object_unref(This->filter);
393 gst_pad_unlink(This->my_src, This->their_sink);
394 gst_object_unref(This->my_src);
397 gst_pad_unlink(This->their_src, This->my_sink);
398 gst_object_unref(This->my_sink);
400 This->my_sink = This->my_src = This->their_sink = This->their_src = NULL;
401 FIXME("%p stub\n", This);
406 static HRESULT WINAPI Gstreamer_transform_EndOfStream(TransformFilter *iface) {
407 GstTfImpl *This = (GstTfImpl*)iface;
410 gst_pad_push_event(This->my_src, gst_event_new_eos());
414 static HRESULT WINAPI Gstreamer_transform_BeginFlush(TransformFilter *iface) {
415 GstTfImpl *This = (GstTfImpl*)iface;
418 gst_pad_push_event(This->my_src, gst_event_new_flush_start());
422 static HRESULT WINAPI Gstreamer_transform_EndFlush(TransformFilter *iface) {
423 GstTfImpl *This = (GstTfImpl*)iface;
426 gst_pad_push_event(This->my_src, gst_event_new_flush_stop());
430 static HRESULT WINAPI Gstreamer_transform_NewSegment(TransformFilter *iface, REFERENCE_TIME tStart, REFERENCE_TIME tStop, double dRate) {
431 GstTfImpl *This = (GstTfImpl*)iface;
434 gst_pad_push_event(This->my_src, gst_event_new_new_segment_full(1,
435 1.0, dRate, GST_FORMAT_TIME, 0, tStop <= tStart ? -1 : tStop * 100, tStart*100));
439 static HRESULT WINAPI Gstreamer_transform_QOS(TransformFilter *iface, IBaseFilter *sender, Quality qm) {
440 GstTfImpl *This = (GstTfImpl*)iface;
441 REFERENCE_TIME late = qm.Late;
442 if (qm.Late < 0 && -qm.Late > qm.TimeStamp)
443 late = -qm.TimeStamp;
444 gst_pad_push_event(This->my_sink, gst_event_new_qos(1000. / qm.Proportion, late * 100, qm.TimeStamp * 100));
445 return QualityControlImpl_Notify((IQualityControl*)&iface->qcimpl, sender, qm);
448 static HRESULT Gstreamer_transform_create(IUnknown *punkout, const CLSID *clsid, const char *name, const TransformFilterFuncTable *vtbl, void **obj)
452 if (FAILED(TransformFilter_Construct(&GSTTf_Vtbl, sizeof(GstTfImpl), clsid, vtbl, (IBaseFilter**)&This)))
453 return E_OUTOFMEMORY;
456 ISeekingPassThru *passthru;
457 CoCreateInstance(&CLSID_SeekingPassThru, (IUnknown*)This, CLSCTX_INPROC_SERVER, &IID_IUnknown, (void**)&This->seekthru_unk);
458 IUnknown_QueryInterface(This->seekthru_unk, &IID_ISeekingPassThru, (void**)&passthru);
459 ISeekingPassThru_Init(passthru, FALSE, (IPin*)This->tf.ppPins[0]);
460 ISeekingPassThru_Release(passthru);
463 This->gstreamer_name = name;
469 static HRESULT WINAPI Gstreamer_Mp3_QueryConnect(TransformFilter *iface, const AM_MEDIA_TYPE *amt) {
470 GstTfImpl *This = (GstTfImpl*)iface;
471 TRACE("%p %p\n", This, amt);
472 dump_AM_MEDIA_TYPE(amt);
474 if ( (!IsEqualGUID(&amt->majortype, &MEDIATYPE_Audio) &&
475 !IsEqualGUID(&amt->majortype, &MEDIATYPE_Stream)) ||
476 (!IsEqualGUID(&amt->subtype, &MEDIASUBTYPE_MPEG1AudioPayload) &&
477 !IsEqualGUID(&amt->subtype, &WMMEDIASUBTYPE_MP3))
478 || !IsEqualGUID(&amt->formattype, &FORMAT_WaveFormatEx))
484 static HRESULT WINAPI Gstreamer_Mp3_SetMediaType(TransformFilter *tf, PIN_DIRECTION dir, const AM_MEDIA_TYPE *amt) {
485 GstTfImpl *This = (GstTfImpl*)tf;
486 GstCaps *capsin, *capsout;
487 AM_MEDIA_TYPE *outpmt = &This->tf.pmt;
488 WAVEFORMATEX *wfx, *wfxin;
492 if (dir != PINDIR_INPUT)
495 if (Gstreamer_Mp3_QueryConnect(&This->tf, amt) == S_FALSE || !amt->pbFormat)
496 return VFW_E_TYPE_NOT_ACCEPTED;
498 wfxin = (WAVEFORMATEX*)amt->pbFormat;
499 switch (wfxin->wFormatTag) {
500 case WAVE_FORMAT_MPEGLAYER3:
503 case WAVE_FORMAT_MPEG: {
504 MPEG1WAVEFORMAT *mpgformat = (MPEG1WAVEFORMAT*)wfxin;
505 layer = mpgformat->fwHeadLayer;
509 FIXME("Unhandled tag %x\n", wfxin->wFormatTag);
513 FreeMediaType(outpmt);
514 CopyMediaType(outpmt, amt);
516 outpmt->subtype = MEDIASUBTYPE_PCM;
517 outpmt->formattype = FORMAT_WaveFormatEx;
518 outpmt->cbFormat = sizeof(*wfx);
519 CoTaskMemFree(outpmt->pbFormat);
520 wfx = CoTaskMemAlloc(outpmt->cbFormat);
521 outpmt->pbFormat = (BYTE*)wfx;
522 wfx->wFormatTag = WAVE_FORMAT_PCM;
523 wfx->wBitsPerSample = 16;
524 wfx->nSamplesPerSec = wfxin->nSamplesPerSec;
525 wfx->nChannels = wfxin->nChannels;
526 wfx->nBlockAlign = wfx->wBitsPerSample * wfx->nChannels / 8;
528 wfx->nAvgBytesPerSec = wfx->nSamplesPerSec * wfx->nBlockAlign;
530 capsin = gst_caps_new_simple("audio/mpeg",
531 "mpegversion", G_TYPE_INT, 1,
532 "layer", G_TYPE_INT, layer,
533 "rate", G_TYPE_INT, wfx->nSamplesPerSec,
534 "channels", G_TYPE_INT, wfx->nChannels,
536 capsout = gst_caps_new_simple("audio/x-raw-int",
537 "endianness", G_TYPE_INT, 1234,
538 "signed", G_TYPE_BOOLEAN, 1,
539 "width", G_TYPE_INT, 16,
540 "depth", G_TYPE_INT, 16,
541 "rate", G_TYPE_INT, wfx->nSamplesPerSec,
542 "channels", G_TYPE_INT, wfx->nChannels,
545 hr = Gstreamer_transform_ConnectInput(This, amt, capsin, capsout);
546 gst_caps_unref(capsin);
547 gst_caps_unref(capsout);
549 This->cbBuffer = wfx->nAvgBytesPerSec / 4;
554 static HRESULT WINAPI Gstreamer_Mp3_ConnectInput(TransformFilter *tf, PIN_DIRECTION dir, IPin *pin)
559 static const TransformFilterFuncTable Gstreamer_Mp3_vtbl = {
560 Gstreamer_transform_DecideBufferSize,
561 Gstreamer_transform_ProcessBegin,
562 Gstreamer_transform_ProcessData,
563 Gstreamer_transform_ProcessEnd,
564 Gstreamer_Mp3_QueryConnect,
565 Gstreamer_Mp3_SetMediaType,
566 Gstreamer_Mp3_ConnectInput,
567 Gstreamer_transform_Cleanup,
568 Gstreamer_transform_EndOfStream,
569 Gstreamer_transform_BeginFlush,
570 Gstreamer_transform_EndFlush,
571 Gstreamer_transform_NewSegment,
572 Gstreamer_transform_QOS
575 IUnknown * CALLBACK Gstreamer_Mp3_create(IUnknown *punkout, HRESULT *phr)
578 IUnknown *obj = NULL;
579 if (!Gstreamer_init())
584 plugin = Gstreamer_FindMatch("audio/mpeg, mpegversion=(int) 1");
590 *phr = Gstreamer_transform_create(punkout, &CLSID_Gstreamer_Mp3, plugin, &Gstreamer_Mp3_vtbl, (LPVOID*)&obj);
594 static HRESULT WINAPI Gstreamer_YUV_QueryConnect(TransformFilter *iface, const AM_MEDIA_TYPE *amt) {
595 GstTfImpl *This = (GstTfImpl*)iface;
596 TRACE("%p %p\n", This, amt);
597 dump_AM_MEDIA_TYPE(amt);
599 if (!IsEqualGUID(&amt->majortype, &MEDIATYPE_Video) ||
600 (!IsEqualGUID(&amt->formattype, &FORMAT_VideoInfo) &&
601 !IsEqualGUID(&amt->formattype, &FORMAT_VideoInfo2)))
603 if (memcmp(&amt->subtype.Data2, &MEDIATYPE_Video.Data2, sizeof(GUID) - sizeof(amt->subtype.Data1)))
605 switch (amt->subtype.Data1) {
606 case mmioFOURCC('I','4','2','0'):
607 case mmioFOURCC('Y','V','1','2'):
608 case mmioFOURCC('N','V','1','2'):
609 case mmioFOURCC('N','V','2','1'):
610 case mmioFOURCC('Y','U','Y','2'):
611 case mmioFOURCC('Y','V','Y','U'):
614 WARN("Unhandled fourcc %s\n", debugstr_an((char*)&amt->subtype.Data1, 4));
619 static HRESULT WINAPI Gstreamer_YUV_ConnectInput(TransformFilter *tf, PIN_DIRECTION dir, IPin *pin)
624 static HRESULT WINAPI Gstreamer_YUV_SetMediaType(TransformFilter *tf, PIN_DIRECTION dir, const AM_MEDIA_TYPE *amt) {
625 GstTfImpl *This = (GstTfImpl*)tf;
626 GstCaps *capsin, *capsout;
627 AM_MEDIA_TYPE *outpmt = &This->tf.pmt;
632 if (dir != PINDIR_INPUT)
635 if (Gstreamer_YUV_QueryConnect(&This->tf, amt) == S_FALSE || !amt->pbFormat)
638 FreeMediaType(outpmt);
639 CopyMediaType(outpmt, amt);
641 if (IsEqualGUID(&amt->formattype, &FORMAT_VideoInfo)) {
642 VIDEOINFOHEADER *vih = (VIDEOINFOHEADER*)outpmt->pbFormat;
643 avgtime = vih->AvgTimePerFrame;
644 width = vih->bmiHeader.biWidth;
645 height = vih->bmiHeader.biHeight;
646 if ((LONG)vih->bmiHeader.biHeight > 0)
647 vih->bmiHeader.biHeight = -vih->bmiHeader.biHeight;
648 vih->bmiHeader.biBitCount = 24;
649 vih->bmiHeader.biCompression = BI_RGB;
651 VIDEOINFOHEADER2 *vih = (VIDEOINFOHEADER2*)outpmt->pbFormat;
652 avgtime = vih->AvgTimePerFrame;
653 width = vih->bmiHeader.biWidth;
654 height = vih->bmiHeader.biHeight;
655 if ((LONG)vih->bmiHeader.biHeight > 0)
656 vih->bmiHeader.biHeight = -vih->bmiHeader.biHeight;
657 vih->bmiHeader.biBitCount = 24;
658 vih->bmiHeader.biCompression = BI_RGB;
661 avgtime = 10000000 / 30;
663 outpmt->subtype = MEDIASUBTYPE_RGB24;
665 capsin = gst_caps_new_simple("video/x-raw-yuv",
666 "format", GST_TYPE_FOURCC, amt->subtype.Data1,
667 "width", G_TYPE_INT, width,
668 "height", G_TYPE_INT, height,
669 "framerate", GST_TYPE_FRACTION, 10000000, avgtime,
671 capsout = gst_caps_new_simple("video/x-raw-rgb",
672 "endianness", G_TYPE_INT, 4321,
673 "width", G_TYPE_INT, width,
674 "height", G_TYPE_INT, height,
675 "framerate", GST_TYPE_FRACTION, 10000000, avgtime,
676 "bpp", G_TYPE_INT, 24,
677 "depth", G_TYPE_INT, 24,
678 "red_mask", G_TYPE_INT, 0xff,
679 "green_mask", G_TYPE_INT, 0xff00,
680 "blue_mask", G_TYPE_INT, 0xff0000,
683 hr = Gstreamer_transform_ConnectInput(This, amt, capsin, capsout);
684 gst_caps_unref(capsin);
685 gst_caps_unref(capsout);
687 This->cbBuffer = width * height * 4;
691 static const TransformFilterFuncTable Gstreamer_YUV_vtbl = {
692 Gstreamer_transform_DecideBufferSize,
693 Gstreamer_transform_ProcessBegin,
694 Gstreamer_transform_ProcessData,
695 Gstreamer_transform_ProcessEnd,
696 Gstreamer_YUV_QueryConnect,
697 Gstreamer_YUV_SetMediaType,
698 Gstreamer_YUV_ConnectInput,
699 Gstreamer_transform_Cleanup,
700 Gstreamer_transform_EndOfStream,
701 Gstreamer_transform_BeginFlush,
702 Gstreamer_transform_EndFlush,
703 Gstreamer_transform_NewSegment,
704 Gstreamer_transform_QOS
707 IUnknown * CALLBACK Gstreamer_YUV_create(IUnknown *punkout, HRESULT *phr)
709 IUnknown *obj = NULL;
710 if (!Gstreamer_init())
715 *phr = Gstreamer_transform_create(punkout, &CLSID_Gstreamer_YUV, "ffmpegcolorspace", &Gstreamer_YUV_vtbl, (LPVOID*)&obj);
719 static HRESULT WINAPI Gstreamer_AudioConvert_QueryConnect(TransformFilter *iface, const AM_MEDIA_TYPE *amt) {
720 GstTfImpl *This = (GstTfImpl*)iface;
721 TRACE("%p %p\n", This, amt);
722 dump_AM_MEDIA_TYPE(amt);
724 if (!IsEqualGUID(&amt->majortype, &MEDIATYPE_Audio) ||
725 !IsEqualGUID(&amt->subtype, &MEDIASUBTYPE_PCM) ||
726 !IsEqualGUID(&amt->formattype, &FORMAT_WaveFormatEx))
731 static HRESULT WINAPI Gstreamer_AudioConvert_ConnectInput(TransformFilter *tf, PIN_DIRECTION dir, IPin *pin)
736 static HRESULT WINAPI Gstreamer_AudioConvert_SetMediaType(TransformFilter *tf, PIN_DIRECTION dir, const AM_MEDIA_TYPE *amt) {
737 GstTfImpl *This = (GstTfImpl*)tf;
738 GstCaps *capsin, *capsout;
739 AM_MEDIA_TYPE *outpmt = &This->tf.pmt;
741 WAVEFORMATEX *outwfe;
742 WAVEFORMATEXTENSIBLE *outwfx;
744 int inisfloat = 0, indepth;
746 if (dir != PINDIR_INPUT)
749 if (Gstreamer_AudioConvert_QueryConnect(&This->tf, amt) == S_FALSE || !amt->pbFormat)
752 FreeMediaType(outpmt);
755 outpmt->cbFormat = sizeof(WAVEFORMATEXTENSIBLE);
756 outpmt->pbFormat = CoTaskMemAlloc(outpmt->cbFormat);
758 inwfe = (WAVEFORMATEX*)amt->pbFormat;
759 indepth = inwfe->wBitsPerSample;
760 if (inwfe->wFormatTag == WAVE_FORMAT_EXTENSIBLE) {
761 WAVEFORMATEXTENSIBLE *inwfx = (WAVEFORMATEXTENSIBLE*)inwfe;
762 inisfloat = IsEqualGUID(&inwfx->SubFormat, &KSDATAFORMAT_SUBTYPE_IEEE_FLOAT);
763 if (inwfx->Samples.wValidBitsPerSample)
764 indepth = inwfx->Samples.wValidBitsPerSample;
767 capsin = gst_caps_new_simple(inisfloat ? "audio/x-raw-float" : "audio/x-raw-int",
768 "endianness", G_TYPE_INT, 1234,
769 "width", G_TYPE_INT, inwfe->wBitsPerSample,
770 "depth", G_TYPE_INT, indepth,
771 "channels", G_TYPE_INT, inwfe->nChannels,
772 "rate", G_TYPE_INT, inwfe->nSamplesPerSec,
775 outwfe = (WAVEFORMATEX*)outpmt->pbFormat;
776 outwfx = (WAVEFORMATEXTENSIBLE*)outwfe;
777 outwfe->wFormatTag = WAVE_FORMAT_EXTENSIBLE;
778 outwfe->nChannels = 2;
779 outwfe->nSamplesPerSec = inwfe->nSamplesPerSec;
780 outwfe->wBitsPerSample = 16;
781 outwfe->nBlockAlign = outwfe->nChannels * outwfe->wBitsPerSample / 8;
782 outwfe->nAvgBytesPerSec = outwfe->nBlockAlign * outwfe->nSamplesPerSec;
783 outwfe->cbSize = sizeof(*outwfx) - sizeof(*outwfe);
784 outwfx->Samples.wValidBitsPerSample = outwfe->wBitsPerSample;
785 outwfx->dwChannelMask = SPEAKER_FRONT_LEFT|SPEAKER_FRONT_RIGHT;
786 outwfx->SubFormat = KSDATAFORMAT_SUBTYPE_PCM;
788 capsout = gst_caps_new_simple("audio/x-raw-int",
789 "endianness", G_TYPE_INT, 1234,
790 "width", G_TYPE_INT, outwfe->wBitsPerSample,
791 "depth", G_TYPE_INT, outwfx->Samples.wValidBitsPerSample,
792 "channels", G_TYPE_INT, outwfe->nChannels,
793 "rate", G_TYPE_INT, outwfe->nSamplesPerSec,
796 hr = Gstreamer_transform_ConnectInput(This, amt, capsin, capsout);
798 gst_caps_unref(capsin);
799 gst_caps_unref(capsout);
801 This->cbBuffer = inwfe->nAvgBytesPerSec;
805 static const TransformFilterFuncTable Gstreamer_AudioConvert_vtbl = {
806 Gstreamer_transform_DecideBufferSize,
807 Gstreamer_transform_ProcessBegin,
808 Gstreamer_transform_ProcessData,
809 Gstreamer_transform_ProcessEnd,
810 Gstreamer_AudioConvert_QueryConnect,
811 Gstreamer_AudioConvert_SetMediaType,
812 Gstreamer_AudioConvert_ConnectInput,
813 Gstreamer_transform_Cleanup,
814 Gstreamer_transform_EndOfStream,
815 Gstreamer_transform_BeginFlush,
816 Gstreamer_transform_EndFlush,
817 Gstreamer_transform_NewSegment,
818 Gstreamer_transform_QOS
821 IUnknown * CALLBACK Gstreamer_AudioConvert_create(IUnknown *punkout, HRESULT *phr)
823 IUnknown *obj = NULL;
824 if (!Gstreamer_init())
829 *phr = Gstreamer_transform_create(punkout, &CLSID_Gstreamer_AudioConvert, "audioconvert", &Gstreamer_AudioConvert_vtbl, (LPVOID*)&obj);
833 static HRESULT WINAPI GSTTf_QueryInterface(IBaseFilter * iface, REFIID riid, LPVOID * ppv)
836 GstTfImpl *This = (GstTfImpl*)iface;
837 TRACE("(%p/%p)->(%s, %p)\n", This, iface, debugstr_guid(riid), ppv);
839 if (IsEqualIID(riid, &IID_IMediaSeeking))
840 return IUnknown_QueryInterface(This->seekthru_unk, riid, ppv);
842 hr = TransformFilterImpl_QueryInterface(iface, riid, ppv);
847 static const IBaseFilterVtbl GSTTf_Vtbl =
849 GSTTf_QueryInterface,
850 BaseFilterImpl_AddRef,
851 TransformFilterImpl_Release,
852 BaseFilterImpl_GetClassID,
853 TransformFilterImpl_Stop,
854 TransformFilterImpl_Pause,
855 TransformFilterImpl_Run,
856 BaseFilterImpl_GetState,
857 BaseFilterImpl_SetSyncSource,
858 BaseFilterImpl_GetSyncSource,
859 BaseFilterImpl_EnumPins,
860 TransformFilterImpl_FindPin,
861 BaseFilterImpl_QueryFilterInfo,
862 BaseFilterImpl_JoinFilterGraph,
863 BaseFilterImpl_QueryVendorInfo