2 * GStreamer wrapper filter
4 * Copyright 2010 Maarten Lankhorst for CodeWeavers
5 * Copyright 2010 Aric Stewart for CodeWeavers
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
25 #include <gst/app/gstappsink.h>
26 #include <gst/app/gstappsrc.h>
27 #include <gst/app/gstappbuffer.h>
29 #include "gst_private.h"
30 #include "gst_guids.h"
46 #include "wine/unicode.h"
47 #include "wine/debug.h"
50 DEFINE_GUID(WMMEDIASUBTYPE_MP3
, 0x00000055, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
52 WINE_DEFAULT_DEBUG_CHANNEL(gstreamer
);
59 static const IBaseFilterVtbl GSTTf_Vtbl
;
61 static gboolean
match_element(GstPluginFeature
*feature
, gpointer gdata
) {
62 struct typeinfo
*data
= (struct typeinfo
*)gdata
;
63 GstElementFactory
*factory
;
66 if (!GST_IS_ELEMENT_FACTORY(feature
))
68 factory
= GST_ELEMENT_FACTORY(feature
);
69 if (!strstr(gst_element_factory_get_klass(factory
), data
->type
))
71 for (list
= gst_element_factory_get_static_pad_templates(factory
); list
; list
= list
->next
) {
72 GstStaticPadTemplate
*pad
= (GstStaticPadTemplate
*)list
->data
;
75 if (pad
->direction
!= GST_PAD_SINK
)
77 caps
= gst_static_caps_get(&pad
->static_caps
);
78 ret
= gst_caps_is_always_compatible(caps
, data
->caps
);
86 static const char *Gstreamer_FindMatch(const char *strcaps
)
91 GstElementFactory
*bestfactory
= NULL
;
92 GstCaps
*caps
= gst_caps_from_string(strcaps
);
95 data
.type
= "Decoder";
96 copy
= gst_default_registry_feature_filter(match_element
, 0, &data
);
97 for (list
= copy
; list
; list
= list
->next
) {
98 GstElementFactory
*factory
= (GstElementFactory
*)list
->data
;
100 rank
= gst_plugin_feature_get_rank(GST_PLUGIN_FEATURE(factory
));
101 if (rank
> bestrank
|| !bestrank
) {
103 bestfactory
= factory
;
106 gst_caps_unref(caps
);
110 FIXME("Could not find plugin for %s\n", strcaps
);
113 return gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(bestfactory
));
116 typedef struct GstTfImpl
{
118 const char *gstreamer_name
;
120 GstPad
*my_src
, *my_sink
, *their_src
, *their_sink
;
124 static HRESULT WINAPI
Gstreamer_transform_ProcessBegin(TransformFilter
*iface
) {
125 GstTfImpl
*This
= (GstTfImpl
*)iface
;
128 ret
= gst_element_set_state(This
->filter
, GST_STATE_PLAYING
);
129 TRACE("Returned: %i\n", ret
);
133 static HRESULT WINAPI
Gstreamer_transform_DecideBufferSize(TransformFilter
*tf
, IMemAllocator
*pAlloc
, ALLOCATOR_PROPERTIES
*ppropInputRequest
)
135 GstTfImpl
*This
= (GstTfImpl
*)tf
;
136 ALLOCATOR_PROPERTIES actual
;
138 if (!ppropInputRequest
->cbAlign
)
139 ppropInputRequest
->cbAlign
= 1;
141 ppropInputRequest
->cbBuffer
= This
->cbBuffer
;
143 if (ppropInputRequest
->cBuffers
< 2)
144 ppropInputRequest
->cBuffers
= 2;
146 return IMemAllocator_SetProperties(pAlloc
, ppropInputRequest
, &actual
);
149 static void release_sample(void *data
) {
150 TRACE("Releasing %p\n", data
);
151 IMediaSample_Release((IMediaSample
*)data
);
154 static GstFlowReturn
got_data(GstPad
*pad
, GstBuffer
*buf
) {
155 GstTfImpl
*This
= gst_pad_get_element_private(pad
);
156 IMediaSample
*sample
= GST_APP_BUFFER(buf
)->priv
;
157 REFERENCE_TIME tStart
, tStop
;
160 if (GST_BUFFER_TIMESTAMP_IS_VALID(buf
) &&
161 GST_BUFFER_DURATION_IS_VALID(buf
)) {
162 tStart
= buf
->timestamp
/ 100;
163 tStop
= tStart
+ buf
->duration
/ 100;
164 IMediaSample_SetTime(sample
, &tStart
, &tStop
);
167 IMediaSample_SetTime(sample
, NULL
, NULL
);
168 if (GST_BUFFER_OFFSET_IS_VALID(buf
) &&
169 GST_BUFFER_OFFSET_END_IS_VALID(buf
)) {
170 tStart
= buf
->offset
/ 100;
171 tStop
= buf
->offset_end
/ 100;
172 IMediaSample_SetMediaTime(sample
, &tStart
, &tStop
);
175 IMediaSample_SetMediaTime(sample
, NULL
, NULL
);
177 IMediaSample_SetDiscontinuity(sample
, GST_BUFFER_FLAG_IS_SET(buf
, GST_BUFFER_FLAG_DISCONT
));
178 IMediaSample_SetPreroll(sample
, GST_BUFFER_FLAG_IS_SET(buf
, GST_BUFFER_FLAG_PREROLL
));
179 IMediaSample_SetSyncPoint(sample
, !GST_BUFFER_FLAG_IS_SET(buf
, GST_BUFFER_FLAG_DELTA_UNIT
));
180 IMediaSample_SetActualDataLength(sample
, GST_BUFFER_SIZE(buf
));
182 hr
= BaseOutputPinImpl_Deliver((BaseOutputPin
*)This
->tf
.ppPins
[1], sample
);
183 gst_buffer_unref(buf
);
185 return GST_FLOW_WRONG_STATE
;
187 return GST_FLOW_RESEND
;
191 static GstFlowReturn
request_buffer(GstPad
*pad
, guint64 ofs
, guint size
, GstCaps
*caps
, GstBuffer
**buf
) {
192 GstTfImpl
*This
= gst_pad_get_element_private(pad
);
193 IMediaSample
*sample
;
196 TRACE("Requesting buffer\n");
198 hr
= BaseOutputPinImpl_GetDeliveryBuffer((BaseOutputPin
*)This
->tf
.ppPins
[1], &sample
, NULL
, NULL
, 0);
200 ERR("Could not get output buffer: %08x\n", hr
);
201 return GST_FLOW_WRONG_STATE
;
203 IMediaSample_SetActualDataLength(sample
, size
);
204 IMediaSample_GetPointer(sample
, &ptr
);
205 *buf
= gst_app_buffer_new(ptr
, size
, release_sample
, sample
);
208 IMediaSample_Release(sample
);
209 ERR("Out of memory\n");
210 return GST_FLOW_ERROR
;
213 caps
= gst_pad_get_caps_reffed(This
->my_sink
);
214 gst_buffer_set_caps(*buf
, caps
);
218 static HRESULT WINAPI
Gstreamer_transform_ProcessData(TransformFilter
*iface
, IMediaSample
*sample
) {
219 GstTfImpl
*This
= (GstTfImpl
*)iface
;
220 REFERENCE_TIME tStart
, tStop
;
225 TRACE("Reading %p\n", sample
);
227 EnterCriticalSection(&This
->tf
.csReceive
);
228 IMediaSample_GetPointer(sample
, &data
);
229 buf
= gst_app_buffer_new(data
, IMediaSample_GetActualDataLength(sample
), release_sample
, sample
);
231 LeaveCriticalSection(&This
->tf
.csReceive
);
234 gst_buffer_set_caps(buf
, gst_pad_get_caps_reffed(This
->my_src
));
235 IMediaSample_AddRef(sample
);
236 buf
->duration
= buf
->timestamp
= -1;
237 hr
= IMediaSample_GetTime(sample
, &tStart
, &tStop
);
239 buf
->timestamp
= tStart
* 100;
241 buf
->duration
= (tStop
- tStart
)*100;
243 if (IMediaSample_GetMediaTime(sample
, &tStart
, &tStop
) == S_OK
) {
244 buf
->offset
= tStart
* 100;
245 buf
->offset_end
= tStop
* 100;
247 if (IMediaSample_IsDiscontinuity(sample
) == S_OK
)
248 GST_BUFFER_FLAG_SET(buf
, GST_BUFFER_FLAG_DISCONT
);
249 if (IMediaSample_IsPreroll(sample
) == S_OK
)
250 GST_BUFFER_FLAG_SET(buf
, GST_BUFFER_FLAG_PREROLL
);
251 if (IMediaSample_IsSyncPoint(sample
) != S_OK
)
252 GST_BUFFER_FLAG_SET(buf
, GST_BUFFER_FLAG_DELTA_UNIT
);
253 LeaveCriticalSection(&This
->tf
.csReceive
);
254 ret
= gst_pad_push(This
->my_src
, buf
);
256 WARN("Sending returned: %i\n", ret
);
257 if (ret
== GST_FLOW_ERROR
)
259 if (ret
== GST_FLOW_WRONG_STATE
)
260 return VFW_E_WRONG_STATE
;
261 if (ret
== GST_FLOW_RESEND
)
266 static HRESULT WINAPI
Gstreamer_transform_ProcessEnd(TransformFilter
*iface
) {
267 GstTfImpl
*This
= (GstTfImpl
*)iface
;
270 LeaveCriticalSection(&This
->tf
.csReceive
);
271 ret
= gst_element_set_state(This
->filter
, GST_STATE_READY
);
272 EnterCriticalSection(&This
->tf
.csReceive
);
273 TRACE("Returned: %i\n", ret
);
277 static void Gstreamer_transform_pad_added(GstElement
*filter
, GstPad
*pad
, GstTfImpl
*This
)
280 if (!GST_PAD_IS_SRC(pad
))
283 ret
= gst_pad_link(pad
, This
->my_sink
);
285 WARN("Failed to link with %i\n", ret
);
286 This
->their_src
= pad
;
288 gst_pad_set_active(pad
, TRUE
);
289 gst_pad_set_active(This
->my_sink
, TRUE
);
292 static HRESULT
Gstreamer_transform_ConnectInput(GstTfImpl
*This
, const AM_MEDIA_TYPE
*amt
, GstCaps
*capsin
, GstCaps
*capsout
) {
294 int done
= 0, found
= 0, ret
;
296 This
->filter
= gst_element_factory_make(This
->gstreamer_name
, NULL
);
298 FIXME("Could not make %s filter\n", This
->gstreamer_name
);
301 This
->my_src
= gst_pad_new(NULL
, GST_PAD_SRC
);
302 gst_pad_set_element_private (This
->my_src
, This
);
304 This
->my_sink
= gst_pad_new(NULL
, GST_PAD_SINK
);
305 gst_pad_set_chain_function(This
->my_sink
, got_data
);
306 gst_pad_set_bufferalloc_function(This
->my_sink
, request_buffer
);
307 gst_pad_set_element_private (This
->my_sink
, This
);
309 ret
= gst_pad_set_caps(This
->my_src
, capsin
);
311 WARN("Failed to set caps on own source with %i\n", ret
);
315 ret
= gst_pad_set_caps(This
->my_sink
, capsout
);
317 WARN("Failed to set caps on own sink with %i\n", ret
);
321 it
= gst_element_iterate_sink_pads(This
->filter
);
325 switch (gst_iterator_next(it
, &item
)) {
326 case GST_ITERATOR_RESYNC
:
327 gst_iterator_resync (it
);
329 case GST_ITERATOR_OK
:
330 This
->their_sink
= item
;
331 case GST_ITERATOR_ERROR
:
332 case GST_ITERATOR_DONE
:
337 gst_iterator_free(it
);
338 if (!This
->their_sink
) {
339 ERR("Could not find sink on filter %s\n", This
->gstreamer_name
);
343 it
= gst_element_iterate_src_pads(This
->filter
);
344 gst_iterator_resync(it
);
349 switch (gst_iterator_next(it
, &item
)) {
350 case GST_ITERATOR_RESYNC
:
351 gst_iterator_resync (it
);
353 case GST_ITERATOR_OK
:
354 This
->their_src
= item
;
355 case GST_ITERATOR_ERROR
:
356 case GST_ITERATOR_DONE
:
361 gst_iterator_free(it
);
362 found
= !!This
->their_src
;
364 g_signal_connect(This
->filter
, "pad-added", G_CALLBACK(Gstreamer_transform_pad_added
), This
);
365 ret
= gst_pad_link(This
->my_src
, This
->their_sink
);
367 WARN("Failed to link with %i\n", ret
);
372 Gstreamer_transform_pad_added(This
->filter
, This
->their_src
, This
);
374 if (!gst_pad_is_linked(This
->my_sink
))
377 TRACE("Connected\n");
381 static HRESULT WINAPI
Gstreamer_transform_Cleanup(TransformFilter
*tf
, PIN_DIRECTION dir
) {
382 GstTfImpl
*This
= (GstTfImpl
*)tf
;
384 if (dir
== PINDIR_INPUT
)
387 gst_element_set_state(This
->filter
, GST_STATE_NULL
);
388 gst_object_unref(This
->filter
);
392 gst_pad_unlink(This
->my_src
, This
->their_sink
);
393 gst_object_unref(This
->my_src
);
396 gst_pad_unlink(This
->their_src
, This
->my_sink
);
397 gst_object_unref(This
->my_sink
);
399 This
->my_sink
= This
->my_src
= This
->their_sink
= This
->their_src
= NULL
;
404 static HRESULT WINAPI
Gstreamer_transform_EndOfStream(TransformFilter
*iface
) {
405 GstTfImpl
*This
= (GstTfImpl
*)iface
;
408 gst_pad_push_event(This
->my_src
, gst_event_new_eos());
412 static HRESULT WINAPI
Gstreamer_transform_BeginFlush(TransformFilter
*iface
) {
413 GstTfImpl
*This
= (GstTfImpl
*)iface
;
416 gst_pad_push_event(This
->my_src
, gst_event_new_flush_start());
420 static HRESULT WINAPI
Gstreamer_transform_EndFlush(TransformFilter
*iface
) {
421 GstTfImpl
*This
= (GstTfImpl
*)iface
;
424 gst_pad_push_event(This
->my_src
, gst_event_new_flush_stop());
428 static HRESULT WINAPI
Gstreamer_transform_NewSegment(TransformFilter
*iface
, REFERENCE_TIME tStart
, REFERENCE_TIME tStop
, double dRate
) {
429 GstTfImpl
*This
= (GstTfImpl
*)iface
;
432 gst_pad_push_event(This
->my_src
, gst_event_new_new_segment_full(1,
433 1.0, dRate
, GST_FORMAT_TIME
, 0, tStop
<= tStart
? -1 : tStop
* 100, tStart
*100));
437 static HRESULT WINAPI
Gstreamer_transform_QOS(TransformFilter
*iface
, IBaseFilter
*sender
, Quality qm
) {
438 GstTfImpl
*This
= (GstTfImpl
*)iface
;
439 REFERENCE_TIME late
= qm
.Late
;
440 if (qm
.Late
< 0 && -qm
.Late
> qm
.TimeStamp
)
441 late
= -qm
.TimeStamp
;
442 gst_pad_push_event(This
->my_sink
, gst_event_new_qos(1000. / qm
.Proportion
, late
* 100, qm
.TimeStamp
* 100));
443 return TransformFilterImpl_Notify(iface
, sender
, qm
);
446 static HRESULT
Gstreamer_transform_create(IUnknown
*punkout
, const CLSID
*clsid
, const char *name
, const TransformFilterFuncTable
*vtbl
, void **obj
)
450 if (FAILED(TransformFilter_Construct(&GSTTf_Vtbl
, sizeof(GstTfImpl
), clsid
, vtbl
, (IBaseFilter
**)&This
)))
451 return E_OUTOFMEMORY
;
453 This
->gstreamer_name
= name
;
459 static HRESULT WINAPI
Gstreamer_Mp3_QueryConnect(TransformFilter
*iface
, const AM_MEDIA_TYPE
*amt
) {
460 GstTfImpl
*This
= (GstTfImpl
*)iface
;
461 TRACE("%p %p\n", This
, amt
);
462 dump_AM_MEDIA_TYPE(amt
);
464 if ( (!IsEqualGUID(&amt
->majortype
, &MEDIATYPE_Audio
) &&
465 !IsEqualGUID(&amt
->majortype
, &MEDIATYPE_Stream
)) ||
466 (!IsEqualGUID(&amt
->subtype
, &MEDIASUBTYPE_MPEG1AudioPayload
) &&
467 !IsEqualGUID(&amt
->subtype
, &WMMEDIASUBTYPE_MP3
))
468 || !IsEqualGUID(&amt
->formattype
, &FORMAT_WaveFormatEx
))
474 static HRESULT WINAPI
Gstreamer_Mp3_SetMediaType(TransformFilter
*tf
, PIN_DIRECTION dir
, const AM_MEDIA_TYPE
*amt
) {
475 GstTfImpl
*This
= (GstTfImpl
*)tf
;
476 GstCaps
*capsin
, *capsout
;
477 AM_MEDIA_TYPE
*outpmt
= &This
->tf
.pmt
;
478 WAVEFORMATEX
*wfx
, *wfxin
;
482 if (dir
!= PINDIR_INPUT
)
485 if (Gstreamer_Mp3_QueryConnect(&This
->tf
, amt
) == S_FALSE
|| !amt
->pbFormat
)
486 return VFW_E_TYPE_NOT_ACCEPTED
;
488 wfxin
= (WAVEFORMATEX
*)amt
->pbFormat
;
489 switch (wfxin
->wFormatTag
) {
490 case WAVE_FORMAT_MPEGLAYER3
:
493 case WAVE_FORMAT_MPEG
: {
494 MPEG1WAVEFORMAT
*mpgformat
= (MPEG1WAVEFORMAT
*)wfxin
;
495 layer
= mpgformat
->fwHeadLayer
;
499 FIXME("Unhandled tag %x\n", wfxin
->wFormatTag
);
503 FreeMediaType(outpmt
);
504 CopyMediaType(outpmt
, amt
);
506 outpmt
->subtype
= MEDIASUBTYPE_PCM
;
507 outpmt
->formattype
= FORMAT_WaveFormatEx
;
508 outpmt
->cbFormat
= sizeof(*wfx
);
509 CoTaskMemFree(outpmt
->pbFormat
);
510 wfx
= CoTaskMemAlloc(outpmt
->cbFormat
);
511 outpmt
->pbFormat
= (BYTE
*)wfx
;
512 wfx
->wFormatTag
= WAVE_FORMAT_PCM
;
513 wfx
->wBitsPerSample
= 16;
514 wfx
->nSamplesPerSec
= wfxin
->nSamplesPerSec
;
515 wfx
->nChannels
= wfxin
->nChannels
;
516 wfx
->nBlockAlign
= wfx
->wBitsPerSample
* wfx
->nChannels
/ 8;
518 wfx
->nAvgBytesPerSec
= wfx
->nSamplesPerSec
* wfx
->nBlockAlign
;
520 capsin
= gst_caps_new_simple("audio/mpeg",
521 "mpegversion", G_TYPE_INT
, 1,
522 "layer", G_TYPE_INT
, layer
,
523 "rate", G_TYPE_INT
, wfx
->nSamplesPerSec
,
524 "channels", G_TYPE_INT
, wfx
->nChannels
,
526 capsout
= gst_caps_new_simple("audio/x-raw-int",
527 "endianness", G_TYPE_INT
, 1234,
528 "signed", G_TYPE_BOOLEAN
, 1,
529 "width", G_TYPE_INT
, 16,
530 "depth", G_TYPE_INT
, 16,
531 "rate", G_TYPE_INT
, wfx
->nSamplesPerSec
,
532 "channels", G_TYPE_INT
, wfx
->nChannels
,
535 hr
= Gstreamer_transform_ConnectInput(This
, amt
, capsin
, capsout
);
536 gst_caps_unref(capsin
);
537 gst_caps_unref(capsout
);
539 This
->cbBuffer
= wfx
->nAvgBytesPerSec
/ 4;
544 static HRESULT WINAPI
Gstreamer_Mp3_ConnectInput(TransformFilter
*tf
, PIN_DIRECTION dir
, IPin
*pin
)
549 static const TransformFilterFuncTable Gstreamer_Mp3_vtbl
= {
550 Gstreamer_transform_DecideBufferSize
,
551 Gstreamer_transform_ProcessBegin
,
552 Gstreamer_transform_ProcessData
,
553 Gstreamer_transform_ProcessEnd
,
554 Gstreamer_Mp3_QueryConnect
,
555 Gstreamer_Mp3_SetMediaType
,
556 Gstreamer_Mp3_ConnectInput
,
557 Gstreamer_transform_Cleanup
,
558 Gstreamer_transform_EndOfStream
,
559 Gstreamer_transform_BeginFlush
,
560 Gstreamer_transform_EndFlush
,
561 Gstreamer_transform_NewSegment
,
562 Gstreamer_transform_QOS
565 IUnknown
* CALLBACK
Gstreamer_Mp3_create(IUnknown
*punkout
, HRESULT
*phr
)
568 IUnknown
*obj
= NULL
;
569 if (!Gstreamer_init())
574 plugin
= Gstreamer_FindMatch("audio/mpeg, mpegversion=(int) 1");
580 *phr
= Gstreamer_transform_create(punkout
, &CLSID_Gstreamer_Mp3
, plugin
, &Gstreamer_Mp3_vtbl
, (LPVOID
*)&obj
);
584 static HRESULT WINAPI
Gstreamer_YUV_QueryConnect(TransformFilter
*iface
, const AM_MEDIA_TYPE
*amt
) {
585 GstTfImpl
*This
= (GstTfImpl
*)iface
;
586 TRACE("%p %p\n", This
, amt
);
587 dump_AM_MEDIA_TYPE(amt
);
589 if (!IsEqualGUID(&amt
->majortype
, &MEDIATYPE_Video
) ||
590 (!IsEqualGUID(&amt
->formattype
, &FORMAT_VideoInfo
) &&
591 !IsEqualGUID(&amt
->formattype
, &FORMAT_VideoInfo2
)))
593 if (memcmp(&amt
->subtype
.Data2
, &MEDIATYPE_Video
.Data2
, sizeof(GUID
) - sizeof(amt
->subtype
.Data1
)))
595 switch (amt
->subtype
.Data1
) {
596 case mmioFOURCC('I','4','2','0'):
597 case mmioFOURCC('Y','V','1','2'):
598 case mmioFOURCC('N','V','1','2'):
599 case mmioFOURCC('N','V','2','1'):
600 case mmioFOURCC('Y','U','Y','2'):
601 case mmioFOURCC('Y','V','Y','U'):
604 WARN("Unhandled fourcc %s\n", debugstr_an((char*)&amt
->subtype
.Data1
, 4));
609 static HRESULT WINAPI
Gstreamer_YUV_ConnectInput(TransformFilter
*tf
, PIN_DIRECTION dir
, IPin
*pin
)
614 static HRESULT WINAPI
Gstreamer_YUV_SetMediaType(TransformFilter
*tf
, PIN_DIRECTION dir
, const AM_MEDIA_TYPE
*amt
) {
615 GstTfImpl
*This
= (GstTfImpl
*)tf
;
616 GstCaps
*capsin
, *capsout
;
617 AM_MEDIA_TYPE
*outpmt
= &This
->tf
.pmt
;
622 if (dir
!= PINDIR_INPUT
)
625 if (Gstreamer_YUV_QueryConnect(&This
->tf
, amt
) == S_FALSE
|| !amt
->pbFormat
)
628 FreeMediaType(outpmt
);
629 CopyMediaType(outpmt
, amt
);
631 if (IsEqualGUID(&amt
->formattype
, &FORMAT_VideoInfo
)) {
632 VIDEOINFOHEADER
*vih
= (VIDEOINFOHEADER
*)outpmt
->pbFormat
;
633 avgtime
= vih
->AvgTimePerFrame
;
634 width
= vih
->bmiHeader
.biWidth
;
635 height
= vih
->bmiHeader
.biHeight
;
636 if (vih
->bmiHeader
.biHeight
> 0)
637 vih
->bmiHeader
.biHeight
= -vih
->bmiHeader
.biHeight
;
638 vih
->bmiHeader
.biBitCount
= 24;
639 vih
->bmiHeader
.biCompression
= BI_RGB
;
640 vih
->bmiHeader
.biSizeImage
= width
* abs(height
) * 3;
642 VIDEOINFOHEADER2
*vih
= (VIDEOINFOHEADER2
*)outpmt
->pbFormat
;
643 avgtime
= vih
->AvgTimePerFrame
;
644 width
= vih
->bmiHeader
.biWidth
;
645 height
= vih
->bmiHeader
.biHeight
;
646 if (vih
->bmiHeader
.biHeight
> 0)
647 vih
->bmiHeader
.biHeight
= -vih
->bmiHeader
.biHeight
;
648 vih
->bmiHeader
.biBitCount
= 24;
649 vih
->bmiHeader
.biCompression
= BI_RGB
;
650 vih
->bmiHeader
.biSizeImage
= width
* abs(height
) * 3;
653 avgtime
= 10000000 / 30;
655 outpmt
->subtype
= MEDIASUBTYPE_RGB24
;
657 capsin
= gst_caps_new_simple("video/x-raw-yuv",
658 "format", GST_TYPE_FOURCC
, amt
->subtype
.Data1
,
659 "width", G_TYPE_INT
, width
,
660 "height", G_TYPE_INT
, height
,
661 "framerate", GST_TYPE_FRACTION
, 10000000, avgtime
,
663 capsout
= gst_caps_new_simple("video/x-raw-rgb",
664 "endianness", G_TYPE_INT
, 4321,
665 "width", G_TYPE_INT
, width
,
666 "height", G_TYPE_INT
, height
,
667 "framerate", GST_TYPE_FRACTION
, 10000000, avgtime
,
668 "bpp", G_TYPE_INT
, 24,
669 "depth", G_TYPE_INT
, 24,
670 "red_mask", G_TYPE_INT
, 0xff,
671 "green_mask", G_TYPE_INT
, 0xff00,
672 "blue_mask", G_TYPE_INT
, 0xff0000,
675 hr
= Gstreamer_transform_ConnectInput(This
, amt
, capsin
, capsout
);
676 gst_caps_unref(capsin
);
677 gst_caps_unref(capsout
);
679 This
->cbBuffer
= width
* height
* 4;
683 static const TransformFilterFuncTable Gstreamer_YUV_vtbl
= {
684 Gstreamer_transform_DecideBufferSize
,
685 Gstreamer_transform_ProcessBegin
,
686 Gstreamer_transform_ProcessData
,
687 Gstreamer_transform_ProcessEnd
,
688 Gstreamer_YUV_QueryConnect
,
689 Gstreamer_YUV_SetMediaType
,
690 Gstreamer_YUV_ConnectInput
,
691 Gstreamer_transform_Cleanup
,
692 Gstreamer_transform_EndOfStream
,
693 Gstreamer_transform_BeginFlush
,
694 Gstreamer_transform_EndFlush
,
695 Gstreamer_transform_NewSegment
,
696 Gstreamer_transform_QOS
699 IUnknown
* CALLBACK
Gstreamer_YUV_create(IUnknown
*punkout
, HRESULT
*phr
)
701 IUnknown
*obj
= NULL
;
702 if (!Gstreamer_init())
707 *phr
= Gstreamer_transform_create(punkout
, &CLSID_Gstreamer_YUV
, "ffmpegcolorspace", &Gstreamer_YUV_vtbl
, (LPVOID
*)&obj
);
711 static HRESULT WINAPI
Gstreamer_AudioConvert_QueryConnect(TransformFilter
*iface
, const AM_MEDIA_TYPE
*amt
) {
712 GstTfImpl
*This
= (GstTfImpl
*)iface
;
713 TRACE("%p %p\n", This
, amt
);
714 dump_AM_MEDIA_TYPE(amt
);
716 if (!IsEqualGUID(&amt
->majortype
, &MEDIATYPE_Audio
) ||
717 !IsEqualGUID(&amt
->subtype
, &MEDIASUBTYPE_PCM
) ||
718 !IsEqualGUID(&amt
->formattype
, &FORMAT_WaveFormatEx
))
723 static HRESULT WINAPI
Gstreamer_AudioConvert_ConnectInput(TransformFilter
*tf
, PIN_DIRECTION dir
, IPin
*pin
)
728 static HRESULT WINAPI
Gstreamer_AudioConvert_SetMediaType(TransformFilter
*tf
, PIN_DIRECTION dir
, const AM_MEDIA_TYPE
*amt
) {
729 GstTfImpl
*This
= (GstTfImpl
*)tf
;
730 GstCaps
*capsin
, *capsout
;
731 AM_MEDIA_TYPE
*outpmt
= &This
->tf
.pmt
;
733 WAVEFORMATEX
*outwfe
;
734 WAVEFORMATEXTENSIBLE
*outwfx
;
736 int inisfloat
= 0, indepth
;
738 if (dir
!= PINDIR_INPUT
)
741 if (Gstreamer_AudioConvert_QueryConnect(&This
->tf
, amt
) == S_FALSE
|| !amt
->pbFormat
)
744 FreeMediaType(outpmt
);
747 outpmt
->cbFormat
= sizeof(WAVEFORMATEXTENSIBLE
);
748 outpmt
->pbFormat
= CoTaskMemAlloc(outpmt
->cbFormat
);
750 inwfe
= (WAVEFORMATEX
*)amt
->pbFormat
;
751 indepth
= inwfe
->wBitsPerSample
;
752 if (inwfe
->wFormatTag
== WAVE_FORMAT_EXTENSIBLE
) {
753 WAVEFORMATEXTENSIBLE
*inwfx
= (WAVEFORMATEXTENSIBLE
*)inwfe
;
754 inisfloat
= IsEqualGUID(&inwfx
->SubFormat
, &KSDATAFORMAT_SUBTYPE_IEEE_FLOAT
);
755 if (inwfx
->Samples
.wValidBitsPerSample
)
756 indepth
= inwfx
->Samples
.wValidBitsPerSample
;
759 capsin
= gst_caps_new_simple(inisfloat
? "audio/x-raw-float" : "audio/x-raw-int",
760 "endianness", G_TYPE_INT
, 1234,
761 "width", G_TYPE_INT
, inwfe
->wBitsPerSample
,
762 "depth", G_TYPE_INT
, indepth
,
763 "channels", G_TYPE_INT
, inwfe
->nChannels
,
764 "rate", G_TYPE_INT
, inwfe
->nSamplesPerSec
,
767 outwfe
= (WAVEFORMATEX
*)outpmt
->pbFormat
;
768 outwfx
= (WAVEFORMATEXTENSIBLE
*)outwfe
;
769 outwfe
->wFormatTag
= WAVE_FORMAT_EXTENSIBLE
;
770 outwfe
->nChannels
= 2;
771 outwfe
->nSamplesPerSec
= inwfe
->nSamplesPerSec
;
772 outwfe
->wBitsPerSample
= 16;
773 outwfe
->nBlockAlign
= outwfe
->nChannels
* outwfe
->wBitsPerSample
/ 8;
774 outwfe
->nAvgBytesPerSec
= outwfe
->nBlockAlign
* outwfe
->nSamplesPerSec
;
775 outwfe
->cbSize
= sizeof(*outwfx
) - sizeof(*outwfe
);
776 outwfx
->Samples
.wValidBitsPerSample
= outwfe
->wBitsPerSample
;
777 outwfx
->dwChannelMask
= SPEAKER_FRONT_LEFT
|SPEAKER_FRONT_RIGHT
;
778 outwfx
->SubFormat
= KSDATAFORMAT_SUBTYPE_PCM
;
780 capsout
= gst_caps_new_simple("audio/x-raw-int",
781 "endianness", G_TYPE_INT
, 1234,
782 "width", G_TYPE_INT
, outwfe
->wBitsPerSample
,
783 "depth", G_TYPE_INT
, outwfx
->Samples
.wValidBitsPerSample
,
784 "channels", G_TYPE_INT
, outwfe
->nChannels
,
785 "rate", G_TYPE_INT
, outwfe
->nSamplesPerSec
,
788 hr
= Gstreamer_transform_ConnectInput(This
, amt
, capsin
, capsout
);
789 gst_caps_unref(capsin
);
790 gst_caps_unref(capsout
);
792 This
->cbBuffer
= inwfe
->nAvgBytesPerSec
;
796 static const TransformFilterFuncTable Gstreamer_AudioConvert_vtbl
= {
797 Gstreamer_transform_DecideBufferSize
,
798 Gstreamer_transform_ProcessBegin
,
799 Gstreamer_transform_ProcessData
,
800 Gstreamer_transform_ProcessEnd
,
801 Gstreamer_AudioConvert_QueryConnect
,
802 Gstreamer_AudioConvert_SetMediaType
,
803 Gstreamer_AudioConvert_ConnectInput
,
804 Gstreamer_transform_Cleanup
,
805 Gstreamer_transform_EndOfStream
,
806 Gstreamer_transform_BeginFlush
,
807 Gstreamer_transform_EndFlush
,
808 Gstreamer_transform_NewSegment
,
809 Gstreamer_transform_QOS
812 IUnknown
* CALLBACK
Gstreamer_AudioConvert_create(IUnknown
*punkout
, HRESULT
*phr
)
814 IUnknown
*obj
= NULL
;
815 if (!Gstreamer_init())
820 *phr
= Gstreamer_transform_create(punkout
, &CLSID_Gstreamer_AudioConvert
, "audioconvert", &Gstreamer_AudioConvert_vtbl
, (LPVOID
*)&obj
);
824 static const IBaseFilterVtbl GSTTf_Vtbl
=
826 TransformFilterImpl_QueryInterface
,
827 BaseFilterImpl_AddRef
,
828 TransformFilterImpl_Release
,
829 BaseFilterImpl_GetClassID
,
830 TransformFilterImpl_Stop
,
831 TransformFilterImpl_Pause
,
832 TransformFilterImpl_Run
,
833 BaseFilterImpl_GetState
,
834 BaseFilterImpl_SetSyncSource
,
835 BaseFilterImpl_GetSyncSource
,
836 BaseFilterImpl_EnumPins
,
837 TransformFilterImpl_FindPin
,
838 BaseFilterImpl_QueryFilterInfo
,
839 BaseFilterImpl_JoinFilterGraph
,
840 BaseFilterImpl_QueryVendorInfo