2 * GStreamer wrapper filter
4 * Copyright 2010 Maarten Lankhorst for CodeWeavers
5 * Copyright 2010 Aric Stewart for CodeWeavers
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
26 #include <gst/video/video.h>
27 #include <gst/audio/audio.h>
29 #include "gst_private.h"
30 #include "gst_guids.h"
47 #include "wine/unicode.h"
48 #include "wine/debug.h"
51 DEFINE_GUID(WMMEDIASUBTYPE_MP3
, 0x00000055, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
53 WINE_DEFAULT_DEBUG_CHANNEL(gstreamer
);
60 static const IBaseFilterVtbl GSTTf_Vtbl
;
62 static gboolean
match_element(GstPluginFeature
*feature
, gpointer gdata
)
64 struct typeinfo
*data
= (struct typeinfo
*)gdata
;
65 GstElementFactory
*factory
;
68 if (!GST_IS_ELEMENT_FACTORY(feature
))
70 factory
= GST_ELEMENT_FACTORY(feature
);
71 if (!strstr(gst_element_factory_get_klass(factory
), data
->type
))
73 for (list
= gst_element_factory_get_static_pad_templates(factory
); list
; list
= list
->next
) {
74 GstStaticPadTemplate
*pad
= (GstStaticPadTemplate
*)list
->data
;
77 if (pad
->direction
!= GST_PAD_SINK
)
79 caps
= gst_static_caps_get(&pad
->static_caps
);
80 ret
= gst_caps_is_always_compatible(caps
, data
->caps
);
88 static const char *Gstreamer_FindMatch(const char *strcaps
)
93 GstElementFactory
*bestfactory
= NULL
;
94 GstCaps
*caps
= gst_caps_from_string(strcaps
);
96 TRACE("%s\n", strcaps
);
99 data
.type
= "Decoder";
100 copy
= gst_registry_feature_filter(gst_registry_get(), match_element
, 0, &data
);
101 for (list
= copy
; list
; list
= list
->next
) {
102 GstElementFactory
*factory
= (GstElementFactory
*)list
->data
;
104 rank
= gst_plugin_feature_get_rank(GST_PLUGIN_FEATURE(factory
));
105 if (rank
> bestrank
|| !bestrank
) {
107 bestfactory
= factory
;
110 gst_caps_unref(caps
);
114 FIXME("Could not find plugin for %s\n", strcaps
);
117 return gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(bestfactory
));
120 typedef struct GstTfImpl
{
122 const char *gstreamer_name
;
124 GstPad
*my_src
, *my_sink
, *their_src
, *their_sink
;
128 static HRESULT WINAPI
Gstreamer_transform_ProcessBegin(TransformFilter
*iface
)
130 GstTfImpl
*This
= (GstTfImpl
*)iface
;
135 ret
= gst_element_set_state(This
->filter
, GST_STATE_PLAYING
);
136 TRACE("Returned: %i\n", ret
);
140 static HRESULT WINAPI
Gstreamer_transform_DecideBufferSize(TransformFilter
*tf
, IMemAllocator
*pAlloc
, ALLOCATOR_PROPERTIES
*ppropInputRequest
)
142 GstTfImpl
*This
= (GstTfImpl
*)tf
;
143 ALLOCATOR_PROPERTIES actual
;
145 TRACE("%p, %p, %p\n", This
, pAlloc
, ppropInputRequest
);
147 if (!ppropInputRequest
->cbAlign
)
148 ppropInputRequest
->cbAlign
= 1;
150 ppropInputRequest
->cbBuffer
= This
->cbBuffer
;
152 if (ppropInputRequest
->cBuffers
< 2)
153 ppropInputRequest
->cBuffers
= 2;
155 return IMemAllocator_SetProperties(pAlloc
, ppropInputRequest
, &actual
);
158 GstFlowReturn
got_data(GstPad
*pad
, GstObject
*parent
, GstBuffer
*buf
)
160 GstTfImpl
*This
= gst_pad_get_element_private(pad
);
161 IMediaSample
*sample
= (IMediaSample
*) gst_mini_object_get_qdata(GST_MINI_OBJECT(buf
), g_quark_from_static_string(media_quark_string
));
162 REFERENCE_TIME tStart
, tStop
;
165 TRACE("%p, %p\n", pad
, buf
);
171 gst_buffer_map(buf
, &info
, GST_MAP_READ
);
173 hr
= BaseOutputPinImpl_GetDeliveryBuffer((BaseOutputPin
*)This
->tf
.ppPins
[1], &sample
, NULL
, NULL
, 0);
175 ERR("Could not get output buffer: %08x\n", hr
);
176 return GST_FLOW_FLUSHING
;
179 IMediaSample_SetActualDataLength(sample
, info
.size
);
181 IMediaSample_GetPointer(sample
, &ptr
);
183 memcpy(ptr
, info
.data
, info
.size
);
185 gst_buffer_unmap(buf
, &info
);
188 if (GST_BUFFER_PTS_IS_VALID(buf
) &&
189 GST_BUFFER_DURATION_IS_VALID(buf
)) {
190 tStart
= buf
->pts
/ 100;
191 tStop
= tStart
+ buf
->duration
/ 100;
192 IMediaSample_SetTime(sample
, &tStart
, &tStop
);
195 IMediaSample_SetTime(sample
, NULL
, NULL
);
196 if (GST_BUFFER_OFFSET_IS_VALID(buf
) &&
197 GST_BUFFER_OFFSET_END_IS_VALID(buf
)) {
198 tStart
= buf
->offset
/ 100;
199 tStop
= buf
->offset_end
/ 100;
200 IMediaSample_SetMediaTime(sample
, &tStart
, &tStop
);
203 IMediaSample_SetMediaTime(sample
, NULL
, NULL
);
205 IMediaSample_SetDiscontinuity(sample
, GST_BUFFER_FLAG_IS_SET(buf
, GST_BUFFER_FLAG_DISCONT
));
206 IMediaSample_SetPreroll(sample
, GST_BUFFER_FLAG_IS_SET(buf
, GST_BUFFER_FLAG_LIVE
));
207 IMediaSample_SetSyncPoint(sample
, !GST_BUFFER_FLAG_IS_SET(buf
, GST_BUFFER_FLAG_DELTA_UNIT
));
208 IMediaSample_SetActualDataLength(sample
, gst_buffer_get_size(buf
));
210 hr
= BaseOutputPinImpl_Deliver((BaseOutputPin
*)This
->tf
.ppPins
[1], sample
);
211 IMediaSample_Release(sample
);
212 gst_buffer_unref(buf
);
214 return GST_FLOW_FLUSHING
;
218 static HRESULT WINAPI
Gstreamer_transform_ProcessData(TransformFilter
*iface
, IMediaSample
*sample
)
220 GstTfImpl
*This
= (GstTfImpl
*)iface
;
221 REFERENCE_TIME tStart
, tStop
;
228 TRACE("%p, %p\n", This
, sample
);
232 EnterCriticalSection(&This
->tf
.csReceive
);
233 IMediaSample_GetPointer(sample
, &data
);
235 IMediaSample_AddRef(sample
);
236 bufsize
= IMediaSample_GetActualDataLength(sample
);
237 buf
= gst_buffer_new_wrapped_full(0, data
, bufsize
, 0, bufsize
, sample
, release_sample_wrapper
);
239 IMediaSample_Release(sample
);
240 LeaveCriticalSection(&This
->tf
.csReceive
);
244 IMediaSample_AddRef(sample
);
245 gst_mini_object_set_qdata(GST_MINI_OBJECT(buf
), g_quark_from_static_string(media_quark_string
), sample
, release_sample_wrapper
);
247 buf
->duration
= buf
->pts
= -1;
248 hr
= IMediaSample_GetTime(sample
, &tStart
, &tStop
);
250 buf
->pts
= tStart
* 100;
252 buf
->duration
= (tStop
- tStart
)*100;
254 if (IMediaSample_GetMediaTime(sample
, &tStart
, &tStop
) == S_OK
) {
255 buf
->offset
= tStart
* 100;
256 buf
->offset_end
= tStop
* 100;
258 if (IMediaSample_IsDiscontinuity(sample
) == S_OK
)
259 GST_BUFFER_FLAG_SET(buf
, GST_BUFFER_FLAG_DISCONT
);
260 if (IMediaSample_IsPreroll(sample
) == S_OK
)
261 GST_BUFFER_FLAG_SET(buf
, GST_BUFFER_FLAG_LIVE
);
262 if (IMediaSample_IsSyncPoint(sample
) != S_OK
)
263 GST_BUFFER_FLAG_SET(buf
, GST_BUFFER_FLAG_DELTA_UNIT
);
264 LeaveCriticalSection(&This
->tf
.csReceive
);
265 ret
= gst_pad_push(This
->my_src
, buf
);
267 WARN("Sending returned: %i\n", ret
);
268 if (ret
== GST_FLOW_FLUSHING
)
269 return VFW_E_WRONG_STATE
;
273 static HRESULT WINAPI
Gstreamer_transform_ProcessEnd(TransformFilter
*iface
)
275 GstTfImpl
*This
= (GstTfImpl
*)iface
;
280 LeaveCriticalSection(&This
->tf
.csReceive
);
281 ret
= gst_element_set_state(This
->filter
, GST_STATE_READY
);
282 EnterCriticalSection(&This
->tf
.csReceive
);
283 TRACE("Returned: %i\n", ret
);
287 void Gstreamer_transform_pad_added(GstElement
*filter
, GstPad
*pad
, gpointer user
)
289 GstTfImpl
*This
= (GstTfImpl
*)user
;
292 TRACE("%p %p %p\n", This
, filter
, pad
);
294 if (!GST_PAD_IS_SRC(pad
))
297 ret
= gst_pad_link(pad
, This
->my_sink
);
299 WARN("Failed to link with %i\n", ret
);
300 This
->their_src
= pad
;
303 static HRESULT
Gstreamer_transform_ConnectInput(GstTfImpl
*This
, const AM_MEDIA_TYPE
*amt
, GstCaps
*capsin
, GstCaps
*capsout
)
306 BOOL done
= FALSE
, found
= FALSE
;
309 TRACE("%p %p %p %p\n", This
, amt
, capsin
, capsout
);
313 This
->filter
= gst_element_factory_make(This
->gstreamer_name
, NULL
);
315 FIXME("Could not make %s filter\n", This
->gstreamer_name
);
318 This
->my_src
= gst_pad_new("yuvsrc", GST_PAD_SRC
);
319 gst_pad_set_element_private (This
->my_src
, This
);
320 gst_pad_set_active(This
->my_src
, 1);
322 This
->my_sink
= gst_pad_new("yuvsink", GST_PAD_SINK
);
323 gst_pad_set_chain_function(This
->my_sink
, got_data_wrapper
);
324 gst_pad_set_element_private (This
->my_sink
, This
);
325 gst_pad_set_active(This
->my_sink
, 1);
327 it
= gst_element_iterate_sink_pads(This
->filter
);
331 switch (gst_iterator_next(it
, &item
)) {
332 case GST_ITERATOR_RESYNC
:
333 gst_iterator_resync (it
);
335 case GST_ITERATOR_OK
:
336 This
->their_sink
= g_value_get_object(&item
);
337 gst_object_ref(This
->their_sink
);
338 g_value_reset(&item
);
339 case GST_ITERATOR_ERROR
:
340 case GST_ITERATOR_DONE
:
345 gst_iterator_free(it
);
346 if (!This
->their_sink
) {
347 ERR("Could not find sink on filter %s\n", This
->gstreamer_name
);
351 it
= gst_element_iterate_src_pads(This
->filter
);
352 gst_iterator_resync(it
);
357 switch (gst_iterator_next(it
, &item
)) {
358 case GST_ITERATOR_RESYNC
:
359 gst_iterator_resync (it
);
361 case GST_ITERATOR_OK
:
362 This
->their_src
= g_value_get_object(&item
);
363 gst_object_ref(This
->their_src
);
364 g_value_reset(&item
);
365 case GST_ITERATOR_ERROR
:
366 case GST_ITERATOR_DONE
:
371 gst_iterator_free(it
);
372 found
= !!This
->their_src
;
374 g_signal_connect(This
->filter
, "pad-added", G_CALLBACK(Gstreamer_transform_pad_added_wrapper
), This
);
375 ret
= gst_pad_link(This
->my_src
, This
->their_sink
);
377 WARN("Failed to link with %i\n", ret
);
381 ret
= gst_pad_set_caps(This
->my_src
, capsin
);
383 WARN("Failed to set caps on own source with %i\n", ret
);
388 Gstreamer_transform_pad_added(This
->filter
, This
->their_src
, This
);
390 if (!gst_pad_is_linked(This
->my_sink
))
393 ret
= gst_pad_set_caps(This
->my_sink
, capsout
);
395 WARN("Failed to set caps on own sink with %i\n", ret
);
399 TRACE("Connected\n");
403 static HRESULT WINAPI
Gstreamer_transform_Cleanup(TransformFilter
*tf
, PIN_DIRECTION dir
)
405 GstTfImpl
*This
= (GstTfImpl
*)tf
;
407 TRACE("%p 0x%x\n", This
, dir
);
411 if (dir
== PINDIR_INPUT
)
414 gst_element_set_state(This
->filter
, GST_STATE_NULL
);
415 gst_object_unref(This
->filter
);
419 gst_pad_unlink(This
->my_src
, This
->their_sink
);
420 gst_object_unref(This
->my_src
);
421 gst_object_unref(This
->their_sink
);
424 gst_pad_unlink(This
->their_src
, This
->my_sink
);
425 gst_object_unref(This
->my_sink
);
426 gst_object_unref(This
->their_src
);
428 This
->my_sink
= This
->my_src
= This
->their_sink
= This
->their_src
= NULL
;
433 static HRESULT WINAPI
Gstreamer_transform_EndOfStream(TransformFilter
*iface
)
435 GstTfImpl
*This
= (GstTfImpl
*)iface
;
439 gst_pad_push_event(This
->my_src
, gst_event_new_eos());
443 static HRESULT WINAPI
Gstreamer_transform_BeginFlush(TransformFilter
*iface
)
445 GstTfImpl
*This
= (GstTfImpl
*)iface
;
449 gst_pad_push_event(This
->my_src
, gst_event_new_flush_start());
453 static HRESULT WINAPI
Gstreamer_transform_EndFlush(TransformFilter
*iface
)
455 GstTfImpl
*This
= (GstTfImpl
*)iface
;
459 gst_pad_push_event(This
->my_src
, gst_event_new_flush_stop(TRUE
));
463 static HRESULT WINAPI
Gstreamer_transform_NewSegment(TransformFilter
*iface
, REFERENCE_TIME tStart
, REFERENCE_TIME tStop
, double dRate
)
465 GstTfImpl
*This
= (GstTfImpl
*)iface
;
466 const GstSegment segment
= { GST_SEGMENT_FLAG_NONE
, 1.0, dRate
, GST_FORMAT_TIME
, 0, 0, 0, tStop
<= tStart
? -1 : tStop
* 100, 0, tStart
*100, -1 };
471 gst_pad_push_event(This
->my_src
, gst_event_new_segment(&segment
));
476 static HRESULT WINAPI
Gstreamer_transform_QOS(TransformFilter
*iface
, IBaseFilter
*sender
, Quality qm
)
478 GstTfImpl
*This
= (GstTfImpl
*)iface
;
479 REFERENCE_TIME late
= qm
.Late
;
481 TRACE("%p %p { 0x%x %u %s %s }\n", This
, sender
,
482 qm
.Type
, qm
.Proportion
,
483 wine_dbgstr_longlong(qm
.Late
),
484 wine_dbgstr_longlong(qm
.TimeStamp
));
488 if (qm
.Late
< 0 && -qm
.Late
> qm
.TimeStamp
)
489 late
= -qm
.TimeStamp
;
490 gst_pad_push_event(This
->my_sink
, gst_event_new_qos(late
<= 0 ? GST_QOS_TYPE_OVERFLOW
: GST_QOS_TYPE_UNDERFLOW
, 1000. / qm
.Proportion
, late
* 100, qm
.TimeStamp
* 100));
491 return TransformFilterImpl_Notify(iface
, sender
, qm
);
494 static HRESULT
Gstreamer_transform_create(IUnknown
*punkouter
, const CLSID
*clsid
, const char *name
, const TransformFilterFuncTable
*vtbl
, void **obj
)
498 TRACE("%p, %p, %p, %p, %p\n", punkouter
, clsid
, name
, vtbl
, obj
);
500 if (FAILED(TransformFilter_Construct(&GSTTf_Vtbl
, sizeof(GstTfImpl
), clsid
, vtbl
, (IBaseFilter
**)&This
)))
501 return E_OUTOFMEMORY
;
503 This
->gstreamer_name
= name
;
506 TRACE("returning %p\n", This
);
511 static HRESULT WINAPI
Gstreamer_Mp3_QueryConnect(TransformFilter
*iface
, const AM_MEDIA_TYPE
*amt
)
513 GstTfImpl
*This
= (GstTfImpl
*)iface
;
514 TRACE("%p %p\n", This
, amt
);
515 dump_AM_MEDIA_TYPE(amt
);
517 if ( (!IsEqualGUID(&amt
->majortype
, &MEDIATYPE_Audio
) &&
518 !IsEqualGUID(&amt
->majortype
, &MEDIATYPE_Stream
)) ||
519 (!IsEqualGUID(&amt
->subtype
, &MEDIASUBTYPE_MPEG1AudioPayload
) &&
520 !IsEqualGUID(&amt
->subtype
, &WMMEDIASUBTYPE_MP3
))
521 || !IsEqualGUID(&amt
->formattype
, &FORMAT_WaveFormatEx
)){
528 static HRESULT WINAPI
Gstreamer_Mp3_SetMediaType(TransformFilter
*tf
, PIN_DIRECTION dir
, const AM_MEDIA_TYPE
*amt
)
530 GstTfImpl
*This
= (GstTfImpl
*)tf
;
531 GstCaps
*capsin
, *capsout
;
532 AM_MEDIA_TYPE
*outpmt
= &This
->tf
.pmt
;
533 WAVEFORMATEX
*wfx
, *wfxin
;
537 TRACE("%p 0x%x %p\n", This
, dir
, amt
);
541 if (dir
!= PINDIR_INPUT
)
544 if (Gstreamer_Mp3_QueryConnect(&This
->tf
, amt
) == S_FALSE
|| !amt
->pbFormat
)
545 return VFW_E_TYPE_NOT_ACCEPTED
;
547 wfxin
= (WAVEFORMATEX
*)amt
->pbFormat
;
548 switch (wfxin
->wFormatTag
) {
549 case WAVE_FORMAT_MPEGLAYER3
:
552 case WAVE_FORMAT_MPEG
: {
553 MPEG1WAVEFORMAT
*mpgformat
= (MPEG1WAVEFORMAT
*)wfxin
;
554 layer
= mpgformat
->fwHeadLayer
;
558 FIXME("Unhandled tag %x\n", wfxin
->wFormatTag
);
562 FreeMediaType(outpmt
);
563 CopyMediaType(outpmt
, amt
);
565 outpmt
->subtype
= MEDIASUBTYPE_PCM
;
566 outpmt
->formattype
= FORMAT_WaveFormatEx
;
567 outpmt
->cbFormat
= sizeof(*wfx
);
568 CoTaskMemFree(outpmt
->pbFormat
);
569 wfx
= CoTaskMemAlloc(outpmt
->cbFormat
);
570 outpmt
->pbFormat
= (BYTE
*)wfx
;
571 wfx
->wFormatTag
= WAVE_FORMAT_PCM
;
572 wfx
->wBitsPerSample
= 16;
573 wfx
->nSamplesPerSec
= wfxin
->nSamplesPerSec
;
574 wfx
->nChannels
= wfxin
->nChannels
;
575 wfx
->nBlockAlign
= wfx
->wBitsPerSample
* wfx
->nChannels
/ 8;
577 wfx
->nAvgBytesPerSec
= wfx
->nSamplesPerSec
* wfx
->nBlockAlign
;
579 capsin
= gst_caps_new_simple("audio/mpeg",
580 "mpegversion", G_TYPE_INT
, 1,
581 "layer", G_TYPE_INT
, layer
,
582 "rate", G_TYPE_INT
, wfx
->nSamplesPerSec
,
583 "channels", G_TYPE_INT
, wfx
->nChannels
,
585 capsout
= gst_caps_new_simple("audio/x-raw",
586 "format", G_TYPE_STRING
, "S16LE",
587 "rate", G_TYPE_INT
, wfx
->nSamplesPerSec
,
588 "channels", G_TYPE_INT
, wfx
->nChannels
,
591 hr
= Gstreamer_transform_ConnectInput(This
, amt
, capsin
, capsout
);
592 gst_caps_unref(capsin
);
593 gst_caps_unref(capsout
);
595 This
->cbBuffer
= wfx
->nAvgBytesPerSec
/ 4;
600 static HRESULT WINAPI
Gstreamer_Mp3_ConnectInput(TransformFilter
*tf
, PIN_DIRECTION dir
, IPin
*pin
)
602 TRACE("%p 0x%x %p\n", tf
, dir
, pin
);
606 static const TransformFilterFuncTable Gstreamer_Mp3_vtbl
= {
607 Gstreamer_transform_DecideBufferSize
,
608 Gstreamer_transform_ProcessBegin
,
609 Gstreamer_transform_ProcessData
,
610 Gstreamer_transform_ProcessEnd
,
611 Gstreamer_Mp3_QueryConnect
,
612 Gstreamer_Mp3_SetMediaType
,
613 Gstreamer_Mp3_ConnectInput
,
614 Gstreamer_transform_Cleanup
,
615 Gstreamer_transform_EndOfStream
,
616 Gstreamer_transform_BeginFlush
,
617 Gstreamer_transform_EndFlush
,
618 Gstreamer_transform_NewSegment
,
619 Gstreamer_transform_QOS
622 IUnknown
* CALLBACK
Gstreamer_Mp3_create(IUnknown
*punkouter
, HRESULT
*phr
)
625 IUnknown
*obj
= NULL
;
627 TRACE("%p %p\n", punkouter
, phr
);
629 if (!Gstreamer_init())
637 plugin
= Gstreamer_FindMatch("audio/mpeg, mpegversion=(int) 1");
644 *phr
= Gstreamer_transform_create(punkouter
, &CLSID_Gstreamer_Mp3
, plugin
, &Gstreamer_Mp3_vtbl
, (LPVOID
*)&obj
);
646 TRACE("returning %p\n", obj
);
651 static HRESULT WINAPI
Gstreamer_YUV_QueryConnect(TransformFilter
*iface
, const AM_MEDIA_TYPE
*amt
)
653 GstTfImpl
*This
= (GstTfImpl
*)iface
;
654 TRACE("%p %p\n", This
, amt
);
655 dump_AM_MEDIA_TYPE(amt
);
657 if (!IsEqualGUID(&amt
->majortype
, &MEDIATYPE_Video
) ||
658 (!IsEqualGUID(&amt
->formattype
, &FORMAT_VideoInfo
) &&
659 !IsEqualGUID(&amt
->formattype
, &FORMAT_VideoInfo2
)))
661 if (memcmp(&amt
->subtype
.Data2
, &MEDIATYPE_Video
.Data2
, sizeof(GUID
) - sizeof(amt
->subtype
.Data1
)))
663 switch (amt
->subtype
.Data1
) {
664 case mmioFOURCC('I','4','2','0'):
665 case mmioFOURCC('Y','V','1','2'):
666 case mmioFOURCC('N','V','1','2'):
667 case mmioFOURCC('N','V','2','1'):
668 case mmioFOURCC('Y','U','Y','2'):
669 case mmioFOURCC('Y','V','Y','U'):
672 WARN("Unhandled fourcc %s\n", debugstr_an((char*)&amt
->subtype
.Data1
, 4));
677 static HRESULT WINAPI
Gstreamer_YUV_ConnectInput(TransformFilter
*tf
, PIN_DIRECTION dir
, IPin
*pin
)
679 TRACE("%p 0x%x %p\n", tf
, dir
, pin
);
683 static HRESULT WINAPI
Gstreamer_YUV2RGB_SetMediaType(TransformFilter
*tf
, PIN_DIRECTION dir
, const AM_MEDIA_TYPE
*amt
)
685 GstTfImpl
*This
= (GstTfImpl
*)tf
;
686 GstCaps
*capsin
, *capsout
;
687 AM_MEDIA_TYPE
*outpmt
= &This
->tf
.pmt
;
692 TRACE("%p 0x%x %p\n", This
, dir
, amt
);
696 if (dir
!= PINDIR_INPUT
)
699 if (Gstreamer_YUV_QueryConnect(&This
->tf
, amt
) == S_FALSE
|| !amt
->pbFormat
)
702 FreeMediaType(outpmt
);
703 CopyMediaType(outpmt
, amt
);
705 if (IsEqualGUID(&amt
->formattype
, &FORMAT_VideoInfo
)) {
706 VIDEOINFOHEADER
*vih
= (VIDEOINFOHEADER
*)outpmt
->pbFormat
;
707 avgtime
= vih
->AvgTimePerFrame
;
708 width
= vih
->bmiHeader
.biWidth
;
709 height
= vih
->bmiHeader
.biHeight
;
710 vih
->bmiHeader
.biBitCount
= 24;
711 vih
->bmiHeader
.biCompression
= BI_RGB
;
712 vih
->bmiHeader
.biSizeImage
= width
* abs(height
) * 3;
714 VIDEOINFOHEADER2
*vih
= (VIDEOINFOHEADER2
*)outpmt
->pbFormat
;
715 avgtime
= vih
->AvgTimePerFrame
;
716 width
= vih
->bmiHeader
.biWidth
;
717 height
= vih
->bmiHeader
.biHeight
;
718 vih
->bmiHeader
.biBitCount
= 24;
719 vih
->bmiHeader
.biCompression
= BI_RGB
;
720 vih
->bmiHeader
.biSizeImage
= width
* abs(height
) * 3;
723 avgtime
= 10000000 / 30;
725 outpmt
->subtype
= MEDIASUBTYPE_RGB24
;
727 capsin
= gst_caps_new_simple("video/x-raw",
728 "format", G_TYPE_STRING
,
729 gst_video_format_to_string(
730 gst_video_format_from_fourcc(amt
->subtype
.Data1
)),
731 "width", G_TYPE_INT
, width
,
732 "height", G_TYPE_INT
, height
,
733 "framerate", GST_TYPE_FRACTION
, 10000000, avgtime
,
735 capsout
= gst_caps_new_simple("video/x-raw",
736 "format", G_TYPE_STRING
, "BGR",
737 "width", G_TYPE_INT
, width
,
738 "height", G_TYPE_INT
, height
,
739 "framerate", GST_TYPE_FRACTION
, 10000000, avgtime
,
742 hr
= Gstreamer_transform_ConnectInput(This
, amt
, capsin
, capsout
);
743 gst_caps_unref(capsin
);
744 gst_caps_unref(capsout
);
746 This
->cbBuffer
= width
* height
* 4;
750 static const TransformFilterFuncTable Gstreamer_YUV2RGB_vtbl
= {
751 Gstreamer_transform_DecideBufferSize
,
752 Gstreamer_transform_ProcessBegin
,
753 Gstreamer_transform_ProcessData
,
754 Gstreamer_transform_ProcessEnd
,
755 Gstreamer_YUV_QueryConnect
,
756 Gstreamer_YUV2RGB_SetMediaType
,
757 Gstreamer_YUV_ConnectInput
,
758 Gstreamer_transform_Cleanup
,
759 Gstreamer_transform_EndOfStream
,
760 Gstreamer_transform_BeginFlush
,
761 Gstreamer_transform_EndFlush
,
762 Gstreamer_transform_NewSegment
,
763 Gstreamer_transform_QOS
766 IUnknown
* CALLBACK
Gstreamer_YUV2RGB_create(IUnknown
*punkouter
, HRESULT
*phr
)
768 IUnknown
*obj
= NULL
;
770 TRACE("%p %p\n", punkouter
, phr
);
772 if (!Gstreamer_init())
778 *phr
= Gstreamer_transform_create(punkouter
, &CLSID_Gstreamer_YUV2RGB
, "videoconvert", &Gstreamer_YUV2RGB_vtbl
, (LPVOID
*)&obj
);
780 TRACE("returning %p\n", obj
);
785 static HRESULT WINAPI
Gstreamer_YUV2ARGB_SetMediaType(TransformFilter
*tf
, PIN_DIRECTION dir
, const AM_MEDIA_TYPE
*amt
)
787 GstTfImpl
*This
= (GstTfImpl
*)tf
;
788 GstCaps
*capsin
, *capsout
;
789 AM_MEDIA_TYPE
*outpmt
= &This
->tf
.pmt
;
794 TRACE("%p 0x%x %p\n", This
, dir
, amt
);
798 if (dir
!= PINDIR_INPUT
)
801 if (Gstreamer_YUV_QueryConnect(&This
->tf
, amt
) == S_FALSE
|| !amt
->pbFormat
)
804 FreeMediaType(outpmt
);
805 CopyMediaType(outpmt
, amt
);
807 if (IsEqualGUID(&amt
->formattype
, &FORMAT_VideoInfo
)) {
808 VIDEOINFOHEADER
*vih
= (VIDEOINFOHEADER
*)outpmt
->pbFormat
;
809 avgtime
= vih
->AvgTimePerFrame
;
810 width
= vih
->bmiHeader
.biWidth
;
811 height
= vih
->bmiHeader
.biHeight
;
812 vih
->bmiHeader
.biBitCount
= 32;
813 vih
->bmiHeader
.biCompression
= BI_RGB
;
814 vih
->bmiHeader
.biSizeImage
= width
* abs(height
) * 3;
816 VIDEOINFOHEADER2
*vih
= (VIDEOINFOHEADER2
*)outpmt
->pbFormat
;
817 avgtime
= vih
->AvgTimePerFrame
;
818 width
= vih
->bmiHeader
.biWidth
;
819 height
= vih
->bmiHeader
.biHeight
;
820 vih
->bmiHeader
.biBitCount
= 32;
821 vih
->bmiHeader
.biCompression
= BI_RGB
;
822 vih
->bmiHeader
.biSizeImage
= width
* abs(height
) * 3;
825 avgtime
= 10000000 / 30;
827 outpmt
->subtype
= MEDIASUBTYPE_ARGB32
;
829 capsin
= gst_caps_new_simple("video/x-raw",
830 "format", G_TYPE_STRING
,
831 gst_video_format_to_string(
832 gst_video_format_from_fourcc(amt
->subtype
.Data1
)),
833 "width", G_TYPE_INT
, width
,
834 "height", G_TYPE_INT
, height
,
835 "framerate", GST_TYPE_FRACTION
, 10000000, avgtime
,
837 capsout
= gst_caps_new_simple("video/x-raw",
838 "format", G_TYPE_STRING
, "BGRA",
839 "width", G_TYPE_INT
, width
,
840 "height", G_TYPE_INT
, height
,
841 "framerate", GST_TYPE_FRACTION
, 10000000, avgtime
,
844 hr
= Gstreamer_transform_ConnectInput(This
, amt
, capsin
, capsout
);
845 gst_caps_unref(capsin
);
846 gst_caps_unref(capsout
);
848 This
->cbBuffer
= width
* height
* 4;
852 static const TransformFilterFuncTable Gstreamer_YUV2ARGB_vtbl
= {
853 Gstreamer_transform_DecideBufferSize
,
854 Gstreamer_transform_ProcessBegin
,
855 Gstreamer_transform_ProcessData
,
856 Gstreamer_transform_ProcessEnd
,
857 Gstreamer_YUV_QueryConnect
,
858 Gstreamer_YUV2ARGB_SetMediaType
,
859 Gstreamer_YUV_ConnectInput
,
860 Gstreamer_transform_Cleanup
,
861 Gstreamer_transform_EndOfStream
,
862 Gstreamer_transform_BeginFlush
,
863 Gstreamer_transform_EndFlush
,
864 Gstreamer_transform_NewSegment
,
865 Gstreamer_transform_QOS
868 IUnknown
* CALLBACK
Gstreamer_YUV2ARGB_create(IUnknown
*punkouter
, HRESULT
*phr
)
870 IUnknown
*obj
= NULL
;
872 TRACE("%p %p\n", punkouter
, phr
);
874 if (!Gstreamer_init())
880 *phr
= Gstreamer_transform_create(punkouter
, &CLSID_Gstreamer_YUV2ARGB
, "videoconvert", &Gstreamer_YUV2ARGB_vtbl
, (LPVOID
*)&obj
);
882 TRACE("returning %p\n", obj
);
887 static HRESULT WINAPI
Gstreamer_AudioConvert_QueryConnect(TransformFilter
*iface
, const AM_MEDIA_TYPE
*amt
)
889 GstTfImpl
*This
= (GstTfImpl
*)iface
;
890 TRACE("%p %p\n", This
, amt
);
891 dump_AM_MEDIA_TYPE(amt
);
893 if (!IsEqualGUID(&amt
->majortype
, &MEDIATYPE_Audio
) ||
894 !IsEqualGUID(&amt
->subtype
, &MEDIASUBTYPE_PCM
) ||
895 !IsEqualGUID(&amt
->formattype
, &FORMAT_WaveFormatEx
))
900 static HRESULT WINAPI
Gstreamer_AudioConvert_ConnectInput(TransformFilter
*tf
, PIN_DIRECTION dir
, IPin
*pin
)
902 TRACE("%p 0x%x %p\n", tf
, dir
, pin
);
906 static HRESULT WINAPI
Gstreamer_AudioConvert_SetMediaType(TransformFilter
*tf
, PIN_DIRECTION dir
, const AM_MEDIA_TYPE
*amt
)
908 GstTfImpl
*This
= (GstTfImpl
*)tf
;
909 GstCaps
*capsin
, *capsout
;
910 AM_MEDIA_TYPE
*outpmt
= &This
->tf
.pmt
;
912 WAVEFORMATEX
*outwfe
;
913 WAVEFORMATEXTENSIBLE
*outwfx
;
914 GstAudioFormat format
;
916 BOOL inisfloat
= FALSE
;
919 TRACE("%p 0x%x %p\n", This
, dir
, amt
);
923 if (dir
!= PINDIR_INPUT
)
926 if (Gstreamer_AudioConvert_QueryConnect(&This
->tf
, amt
) == S_FALSE
|| !amt
->pbFormat
)
929 FreeMediaType(outpmt
);
932 outpmt
->cbFormat
= sizeof(WAVEFORMATEXTENSIBLE
);
933 outpmt
->pbFormat
= CoTaskMemAlloc(outpmt
->cbFormat
);
935 inwfe
= (WAVEFORMATEX
*)amt
->pbFormat
;
936 indepth
= inwfe
->wBitsPerSample
;
937 if (inwfe
->wFormatTag
== WAVE_FORMAT_EXTENSIBLE
) {
938 WAVEFORMATEXTENSIBLE
*inwfx
= (WAVEFORMATEXTENSIBLE
*)inwfe
;
939 inisfloat
= IsEqualGUID(&inwfx
->SubFormat
, &KSDATAFORMAT_SUBTYPE_IEEE_FLOAT
);
940 if (inwfx
->Samples
.wValidBitsPerSample
)
941 indepth
= inwfx
->Samples
.wValidBitsPerSample
;
942 } else if (inwfe
->wFormatTag
== WAVE_FORMAT_IEEE_FLOAT
)
946 format
= inwfe
->wBitsPerSample
== 64 ? GST_AUDIO_FORMAT_F64LE
: GST_AUDIO_FORMAT_F32LE
;
948 format
= gst_audio_format_build_integer(inwfe
->wBitsPerSample
!= 8, G_LITTLE_ENDIAN
,
949 inwfe
->wBitsPerSample
, indepth
);
951 capsin
= gst_caps_new_simple("audio/x-raw",
952 "format", G_TYPE_STRING
, gst_audio_format_to_string(format
),
953 "channels", G_TYPE_INT
, inwfe
->nChannels
,
954 "rate", G_TYPE_INT
, inwfe
->nSamplesPerSec
,
957 outwfe
= (WAVEFORMATEX
*)outpmt
->pbFormat
;
958 outwfx
= (WAVEFORMATEXTENSIBLE
*)outwfe
;
959 outwfe
->wFormatTag
= WAVE_FORMAT_EXTENSIBLE
;
960 outwfe
->nChannels
= 2;
961 outwfe
->nSamplesPerSec
= inwfe
->nSamplesPerSec
;
962 outwfe
->wBitsPerSample
= 16;
963 outwfe
->nBlockAlign
= outwfe
->nChannels
* outwfe
->wBitsPerSample
/ 8;
964 outwfe
->nAvgBytesPerSec
= outwfe
->nBlockAlign
* outwfe
->nSamplesPerSec
;
965 outwfe
->cbSize
= sizeof(*outwfx
) - sizeof(*outwfe
);
966 outwfx
->Samples
.wValidBitsPerSample
= outwfe
->wBitsPerSample
;
967 outwfx
->dwChannelMask
= SPEAKER_FRONT_LEFT
|SPEAKER_FRONT_RIGHT
;
968 outwfx
->SubFormat
= KSDATAFORMAT_SUBTYPE_PCM
;
970 capsout
= gst_caps_new_simple("audio/x-raw",
971 "format", G_TYPE_STRING
, "S16LE",
972 "channels", G_TYPE_INT
, outwfe
->nChannels
,
973 "rate", G_TYPE_INT
, outwfe
->nSamplesPerSec
,
977 hr
= Gstreamer_transform_ConnectInput(This
, amt
, capsin
, capsout
);
978 gst_caps_unref(capsin
);
979 gst_caps_unref(capsout
);
981 This
->cbBuffer
= inwfe
->nAvgBytesPerSec
;
985 static const TransformFilterFuncTable Gstreamer_AudioConvert_vtbl
= {
986 Gstreamer_transform_DecideBufferSize
,
987 Gstreamer_transform_ProcessBegin
,
988 Gstreamer_transform_ProcessData
,
989 Gstreamer_transform_ProcessEnd
,
990 Gstreamer_AudioConvert_QueryConnect
,
991 Gstreamer_AudioConvert_SetMediaType
,
992 Gstreamer_AudioConvert_ConnectInput
,
993 Gstreamer_transform_Cleanup
,
994 Gstreamer_transform_EndOfStream
,
995 Gstreamer_transform_BeginFlush
,
996 Gstreamer_transform_EndFlush
,
997 Gstreamer_transform_NewSegment
,
998 Gstreamer_transform_QOS
1001 IUnknown
* CALLBACK
Gstreamer_AudioConvert_create(IUnknown
*punkouter
, HRESULT
*phr
)
1003 IUnknown
*obj
= NULL
;
1005 TRACE("%p %p\n", punkouter
, phr
);
1007 if (!Gstreamer_init())
1013 *phr
= Gstreamer_transform_create(punkouter
, &CLSID_Gstreamer_AudioConvert
, "audioconvert", &Gstreamer_AudioConvert_vtbl
, (LPVOID
*)&obj
);
1015 TRACE("returning %p\n", obj
);
1020 static const IBaseFilterVtbl GSTTf_Vtbl
=
1022 TransformFilterImpl_QueryInterface
,
1023 BaseFilterImpl_AddRef
,
1024 TransformFilterImpl_Release
,
1025 BaseFilterImpl_GetClassID
,
1026 TransformFilterImpl_Stop
,
1027 TransformFilterImpl_Pause
,
1028 TransformFilterImpl_Run
,
1029 BaseFilterImpl_GetState
,
1030 BaseFilterImpl_SetSyncSource
,
1031 BaseFilterImpl_GetSyncSource
,
1032 BaseFilterImpl_EnumPins
,
1033 TransformFilterImpl_FindPin
,
1034 BaseFilterImpl_QueryFilterInfo
,
1035 BaseFilterImpl_JoinFilterGraph
,
1036 BaseFilterImpl_QueryVendorInfo