2 * GStreamer wrapper filter
4 * Copyright 2010 Maarten Lankhorst for CodeWeavers
5 * Copyright 2010 Aric Stewart for CodeWeavers
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
25 #include <gst/app/gstappsink.h>
26 #include <gst/app/gstappsrc.h>
27 #include <gst/app/gstappbuffer.h>
29 #include "gst_private.h"
30 #include "gst_guids.h"
46 #include "wine/unicode.h"
47 #include "wine/debug.h"
50 DEFINE_GUID(WMMEDIASUBTYPE_MP3
, 0x00000055, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
52 WINE_DEFAULT_DEBUG_CHANNEL(gstreamer
);
59 static const IBaseFilterVtbl GSTTf_Vtbl
;
61 static gboolean
match_element(GstPluginFeature
*feature
, gpointer gdata
) {
62 struct typeinfo
*data
= (struct typeinfo
*)gdata
;
63 GstElementFactory
*factory
;
66 if (!GST_IS_ELEMENT_FACTORY(feature
))
68 factory
= GST_ELEMENT_FACTORY(feature
);
69 if (!strstr(gst_element_factory_get_klass(factory
), data
->type
))
71 for (list
= gst_element_factory_get_static_pad_templates(factory
); list
; list
= list
->next
) {
72 GstStaticPadTemplate
*pad
= (GstStaticPadTemplate
*)list
->data
;
75 if (pad
->direction
!= GST_PAD_SINK
)
77 caps
= gst_static_caps_get(&pad
->static_caps
);
78 ret
= gst_caps_is_always_compatible(caps
, data
->caps
);
86 static const char *Gstreamer_FindMatch(const char *strcaps
)
91 GstElementFactory
*bestfactory
= NULL
;
92 GstCaps
*caps
= gst_caps_from_string(strcaps
);
95 data
.type
= "Decoder";
96 copy
= gst_default_registry_feature_filter(match_element
, 0, &data
);
97 for (list
= copy
; list
; list
= list
->next
) {
98 GstElementFactory
*factory
= (GstElementFactory
*)list
->data
;
100 rank
= gst_plugin_feature_get_rank(GST_PLUGIN_FEATURE(factory
));
101 if (rank
> bestrank
|| !bestrank
) {
103 bestfactory
= factory
;
106 gst_caps_unref(caps
);
110 FIXME("Could not find plugin for %s\n", strcaps
);
113 return gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(bestfactory
));
116 typedef struct GstTfImpl
{
118 const char *gstreamer_name
;
120 GstPad
*my_src
, *my_sink
, *their_src
, *their_sink
;
124 static HRESULT WINAPI
Gstreamer_transform_ProcessBegin(TransformFilter
*iface
) {
125 GstTfImpl
*This
= (GstTfImpl
*)iface
;
128 ret
= gst_element_set_state(This
->filter
, GST_STATE_PLAYING
);
129 TRACE("Returned: %i\n", ret
);
133 static HRESULT WINAPI
Gstreamer_transform_DecideBufferSize(TransformFilter
*tf
, IMemAllocator
*pAlloc
, ALLOCATOR_PROPERTIES
*ppropInputRequest
)
135 GstTfImpl
*This
= (GstTfImpl
*)tf
;
136 ALLOCATOR_PROPERTIES actual
;
138 if (!ppropInputRequest
->cbAlign
)
139 ppropInputRequest
->cbAlign
= 1;
141 ppropInputRequest
->cbBuffer
= This
->cbBuffer
;
143 if (ppropInputRequest
->cBuffers
< 2)
144 ppropInputRequest
->cBuffers
= 2;
146 return IMemAllocator_SetProperties(pAlloc
, ppropInputRequest
, &actual
);
149 static void release_sample(void *data
) {
150 TRACE("Releasing %p\n", data
);
151 IMediaSample_Release((IMediaSample
*)data
);
154 static GstFlowReturn
got_data(GstPad
*pad
, GstBuffer
*buf
) {
155 GstTfImpl
*This
= gst_pad_get_element_private(pad
);
156 IMediaSample
*sample
= GST_APP_BUFFER(buf
)->priv
;
157 REFERENCE_TIME tStart
, tStop
;
160 if (GST_BUFFER_TIMESTAMP_IS_VALID(buf
) &&
161 GST_BUFFER_DURATION_IS_VALID(buf
)) {
162 tStart
= buf
->timestamp
/ 100;
163 tStop
= tStart
+ buf
->duration
/ 100;
164 IMediaSample_SetTime(sample
, &tStart
, &tStop
);
167 IMediaSample_SetTime(sample
, NULL
, NULL
);
168 if (GST_BUFFER_OFFSET_IS_VALID(buf
) &&
169 GST_BUFFER_OFFSET_END_IS_VALID(buf
)) {
170 tStart
= buf
->offset
/ 100;
171 tStop
= buf
->offset_end
/ 100;
172 IMediaSample_SetMediaTime(sample
, &tStart
, &tStop
);
175 IMediaSample_SetMediaTime(sample
, NULL
, NULL
);
177 IMediaSample_SetDiscontinuity(sample
, GST_BUFFER_FLAG_IS_SET(buf
, GST_BUFFER_FLAG_DISCONT
));
178 IMediaSample_SetPreroll(sample
, GST_BUFFER_FLAG_IS_SET(buf
, GST_BUFFER_FLAG_PREROLL
));
179 IMediaSample_SetSyncPoint(sample
, !GST_BUFFER_FLAG_IS_SET(buf
, GST_BUFFER_FLAG_DELTA_UNIT
));
180 IMediaSample_SetActualDataLength(sample
, GST_BUFFER_SIZE(buf
));
182 hr
= BaseOutputPinImpl_Deliver((BaseOutputPin
*)This
->tf
.ppPins
[1], sample
);
183 gst_buffer_unref(buf
);
185 return GST_FLOW_WRONG_STATE
;
187 return GST_FLOW_RESEND
;
191 static GstFlowReturn
request_buffer(GstPad
*pad
, guint64 ofs
, guint size
, GstCaps
*caps
, GstBuffer
**buf
) {
192 GstTfImpl
*This
= gst_pad_get_element_private(pad
);
193 IMediaSample
*sample
;
196 TRACE("Requesting buffer\n");
198 hr
= BaseOutputPinImpl_GetDeliveryBuffer((BaseOutputPin
*)This
->tf
.ppPins
[1], &sample
, NULL
, NULL
, 0);
200 ERR("Could not get output buffer: %08x\n", hr
);
201 return GST_FLOW_WRONG_STATE
;
203 IMediaSample_SetActualDataLength(sample
, size
);
204 IMediaSample_GetPointer(sample
, &ptr
);
205 *buf
= gst_app_buffer_new(ptr
, size
, release_sample
, sample
);
208 IMediaSample_Release(sample
);
209 ERR("Out of memory\n");
210 return GST_FLOW_ERROR
;
213 caps
= gst_pad_get_caps_reffed(This
->my_sink
);
214 gst_buffer_set_caps(*buf
, caps
);
218 static HRESULT WINAPI
Gstreamer_transform_ProcessData(TransformFilter
*iface
, IMediaSample
*sample
) {
219 GstTfImpl
*This
= (GstTfImpl
*)iface
;
220 REFERENCE_TIME tStart
, tStop
;
225 TRACE("Reading %p\n", sample
);
227 EnterCriticalSection(&This
->tf
.csReceive
);
228 IMediaSample_GetPointer(sample
, &data
);
229 buf
= gst_app_buffer_new(data
, IMediaSample_GetActualDataLength(sample
), release_sample
, sample
);
231 LeaveCriticalSection(&This
->tf
.csReceive
);
234 gst_buffer_set_caps(buf
, gst_pad_get_caps_reffed(This
->my_src
));
235 IMediaSample_AddRef(sample
);
236 buf
->duration
= buf
->timestamp
= -1;
237 hr
= IMediaSample_GetTime(sample
, &tStart
, &tStop
);
239 buf
->timestamp
= tStart
* 100;
241 buf
->duration
= (tStop
- tStart
)*100;
243 if (IMediaSample_GetMediaTime(sample
, &tStart
, &tStop
) == S_OK
) {
244 buf
->offset
= tStart
* 100;
245 buf
->offset_end
= tStop
* 100;
247 if (IMediaSample_IsDiscontinuity(sample
) == S_OK
)
248 GST_BUFFER_FLAG_SET(buf
, GST_BUFFER_FLAG_DISCONT
);
249 if (IMediaSample_IsPreroll(sample
) == S_OK
)
250 GST_BUFFER_FLAG_SET(buf
, GST_BUFFER_FLAG_PREROLL
);
251 if (IMediaSample_IsSyncPoint(sample
) != S_OK
)
252 GST_BUFFER_FLAG_SET(buf
, GST_BUFFER_FLAG_DELTA_UNIT
);
253 LeaveCriticalSection(&This
->tf
.csReceive
);
254 ret
= gst_pad_push(This
->my_src
, buf
);
256 WARN("Sending returned: %i\n", ret
);
257 if (ret
== GST_FLOW_ERROR
)
259 if (ret
== GST_FLOW_WRONG_STATE
)
260 return VFW_E_WRONG_STATE
;
261 if (ret
== GST_FLOW_RESEND
)
266 static HRESULT WINAPI
Gstreamer_transform_ProcessEnd(TransformFilter
*iface
) {
267 GstTfImpl
*This
= (GstTfImpl
*)iface
;
270 LeaveCriticalSection(&This
->tf
.csReceive
);
271 ret
= gst_element_set_state(This
->filter
, GST_STATE_READY
);
272 EnterCriticalSection(&This
->tf
.csReceive
);
273 TRACE("Returned: %i\n", ret
);
277 static void Gstreamer_transform_pad_added(GstElement
*filter
, GstPad
*pad
, GstTfImpl
*This
)
280 if (!GST_PAD_IS_SRC(pad
))
283 ret
= gst_pad_link(pad
, This
->my_sink
);
285 WARN("Failed to link with %i\n", ret
);
286 This
->their_src
= pad
;
288 gst_pad_set_active(pad
, TRUE
);
289 gst_pad_set_active(This
->my_sink
, TRUE
);
292 static HRESULT
Gstreamer_transform_ConnectInput(GstTfImpl
*This
, const AM_MEDIA_TYPE
*amt
, GstCaps
*capsin
, GstCaps
*capsout
) {
294 BOOL done
= FALSE
, found
= FALSE
;
297 This
->filter
= gst_element_factory_make(This
->gstreamer_name
, NULL
);
299 FIXME("Could not make %s filter\n", This
->gstreamer_name
);
302 This
->my_src
= gst_pad_new(NULL
, GST_PAD_SRC
);
303 gst_pad_set_element_private (This
->my_src
, This
);
305 This
->my_sink
= gst_pad_new(NULL
, GST_PAD_SINK
);
306 gst_pad_set_chain_function(This
->my_sink
, got_data
);
307 gst_pad_set_bufferalloc_function(This
->my_sink
, request_buffer
);
308 gst_pad_set_element_private (This
->my_sink
, This
);
310 ret
= gst_pad_set_caps(This
->my_src
, capsin
);
312 WARN("Failed to set caps on own source with %i\n", ret
);
316 ret
= gst_pad_set_caps(This
->my_sink
, capsout
);
318 WARN("Failed to set caps on own sink with %i\n", ret
);
322 it
= gst_element_iterate_sink_pads(This
->filter
);
326 switch (gst_iterator_next(it
, &item
)) {
327 case GST_ITERATOR_RESYNC
:
328 gst_iterator_resync (it
);
330 case GST_ITERATOR_OK
:
331 This
->their_sink
= item
;
332 case GST_ITERATOR_ERROR
:
333 case GST_ITERATOR_DONE
:
338 gst_iterator_free(it
);
339 if (!This
->their_sink
) {
340 ERR("Could not find sink on filter %s\n", This
->gstreamer_name
);
344 it
= gst_element_iterate_src_pads(This
->filter
);
345 gst_iterator_resync(it
);
350 switch (gst_iterator_next(it
, &item
)) {
351 case GST_ITERATOR_RESYNC
:
352 gst_iterator_resync (it
);
354 case GST_ITERATOR_OK
:
355 This
->their_src
= item
;
356 case GST_ITERATOR_ERROR
:
357 case GST_ITERATOR_DONE
:
362 gst_iterator_free(it
);
363 found
= !!This
->their_src
;
365 g_signal_connect(This
->filter
, "pad-added", G_CALLBACK(Gstreamer_transform_pad_added
), This
);
366 ret
= gst_pad_link(This
->my_src
, This
->their_sink
);
368 WARN("Failed to link with %i\n", ret
);
373 Gstreamer_transform_pad_added(This
->filter
, This
->their_src
, This
);
375 if (!gst_pad_is_linked(This
->my_sink
))
378 TRACE("Connected\n");
382 static HRESULT WINAPI
Gstreamer_transform_Cleanup(TransformFilter
*tf
, PIN_DIRECTION dir
) {
383 GstTfImpl
*This
= (GstTfImpl
*)tf
;
385 if (dir
== PINDIR_INPUT
)
388 gst_element_set_state(This
->filter
, GST_STATE_NULL
);
389 gst_object_unref(This
->filter
);
393 gst_pad_unlink(This
->my_src
, This
->their_sink
);
394 gst_object_unref(This
->my_src
);
397 gst_pad_unlink(This
->their_src
, This
->my_sink
);
398 gst_object_unref(This
->my_sink
);
400 This
->my_sink
= This
->my_src
= This
->their_sink
= This
->their_src
= NULL
;
405 static HRESULT WINAPI
Gstreamer_transform_EndOfStream(TransformFilter
*iface
) {
406 GstTfImpl
*This
= (GstTfImpl
*)iface
;
409 gst_pad_push_event(This
->my_src
, gst_event_new_eos());
413 static HRESULT WINAPI
Gstreamer_transform_BeginFlush(TransformFilter
*iface
) {
414 GstTfImpl
*This
= (GstTfImpl
*)iface
;
417 gst_pad_push_event(This
->my_src
, gst_event_new_flush_start());
421 static HRESULT WINAPI
Gstreamer_transform_EndFlush(TransformFilter
*iface
) {
422 GstTfImpl
*This
= (GstTfImpl
*)iface
;
425 gst_pad_push_event(This
->my_src
, gst_event_new_flush_stop());
429 static HRESULT WINAPI
Gstreamer_transform_NewSegment(TransformFilter
*iface
, REFERENCE_TIME tStart
, REFERENCE_TIME tStop
, double dRate
) {
430 GstTfImpl
*This
= (GstTfImpl
*)iface
;
433 gst_pad_push_event(This
->my_src
, gst_event_new_new_segment_full(1,
434 1.0, dRate
, GST_FORMAT_TIME
, 0, tStop
<= tStart
? -1 : tStop
* 100, tStart
*100));
438 static HRESULT WINAPI
Gstreamer_transform_QOS(TransformFilter
*iface
, IBaseFilter
*sender
, Quality qm
) {
439 GstTfImpl
*This
= (GstTfImpl
*)iface
;
440 REFERENCE_TIME late
= qm
.Late
;
441 if (qm
.Late
< 0 && -qm
.Late
> qm
.TimeStamp
)
442 late
= -qm
.TimeStamp
;
443 gst_pad_push_event(This
->my_sink
, gst_event_new_qos(1000. / qm
.Proportion
, late
* 100, qm
.TimeStamp
* 100));
444 return TransformFilterImpl_Notify(iface
, sender
, qm
);
447 static HRESULT
Gstreamer_transform_create(IUnknown
*punkout
, const CLSID
*clsid
, const char *name
, const TransformFilterFuncTable
*vtbl
, void **obj
)
451 if (FAILED(TransformFilter_Construct(&GSTTf_Vtbl
, sizeof(GstTfImpl
), clsid
, vtbl
, (IBaseFilter
**)&This
)))
452 return E_OUTOFMEMORY
;
454 This
->gstreamer_name
= name
;
460 static HRESULT WINAPI
Gstreamer_Mp3_QueryConnect(TransformFilter
*iface
, const AM_MEDIA_TYPE
*amt
) {
461 GstTfImpl
*This
= (GstTfImpl
*)iface
;
462 TRACE("%p %p\n", This
, amt
);
463 dump_AM_MEDIA_TYPE(amt
);
465 if ( (!IsEqualGUID(&amt
->majortype
, &MEDIATYPE_Audio
) &&
466 !IsEqualGUID(&amt
->majortype
, &MEDIATYPE_Stream
)) ||
467 (!IsEqualGUID(&amt
->subtype
, &MEDIASUBTYPE_MPEG1AudioPayload
) &&
468 !IsEqualGUID(&amt
->subtype
, &WMMEDIASUBTYPE_MP3
))
469 || !IsEqualGUID(&amt
->formattype
, &FORMAT_WaveFormatEx
))
475 static HRESULT WINAPI
Gstreamer_Mp3_SetMediaType(TransformFilter
*tf
, PIN_DIRECTION dir
, const AM_MEDIA_TYPE
*amt
) {
476 GstTfImpl
*This
= (GstTfImpl
*)tf
;
477 GstCaps
*capsin
, *capsout
;
478 AM_MEDIA_TYPE
*outpmt
= &This
->tf
.pmt
;
479 WAVEFORMATEX
*wfx
, *wfxin
;
483 if (dir
!= PINDIR_INPUT
)
486 if (Gstreamer_Mp3_QueryConnect(&This
->tf
, amt
) == S_FALSE
|| !amt
->pbFormat
)
487 return VFW_E_TYPE_NOT_ACCEPTED
;
489 wfxin
= (WAVEFORMATEX
*)amt
->pbFormat
;
490 switch (wfxin
->wFormatTag
) {
491 case WAVE_FORMAT_MPEGLAYER3
:
494 case WAVE_FORMAT_MPEG
: {
495 MPEG1WAVEFORMAT
*mpgformat
= (MPEG1WAVEFORMAT
*)wfxin
;
496 layer
= mpgformat
->fwHeadLayer
;
500 FIXME("Unhandled tag %x\n", wfxin
->wFormatTag
);
504 FreeMediaType(outpmt
);
505 CopyMediaType(outpmt
, amt
);
507 outpmt
->subtype
= MEDIASUBTYPE_PCM
;
508 outpmt
->formattype
= FORMAT_WaveFormatEx
;
509 outpmt
->cbFormat
= sizeof(*wfx
);
510 CoTaskMemFree(outpmt
->pbFormat
);
511 wfx
= CoTaskMemAlloc(outpmt
->cbFormat
);
512 outpmt
->pbFormat
= (BYTE
*)wfx
;
513 wfx
->wFormatTag
= WAVE_FORMAT_PCM
;
514 wfx
->wBitsPerSample
= 16;
515 wfx
->nSamplesPerSec
= wfxin
->nSamplesPerSec
;
516 wfx
->nChannels
= wfxin
->nChannels
;
517 wfx
->nBlockAlign
= wfx
->wBitsPerSample
* wfx
->nChannels
/ 8;
519 wfx
->nAvgBytesPerSec
= wfx
->nSamplesPerSec
* wfx
->nBlockAlign
;
521 capsin
= gst_caps_new_simple("audio/mpeg",
522 "mpegversion", G_TYPE_INT
, 1,
523 "layer", G_TYPE_INT
, layer
,
524 "rate", G_TYPE_INT
, wfx
->nSamplesPerSec
,
525 "channels", G_TYPE_INT
, wfx
->nChannels
,
527 capsout
= gst_caps_new_simple("audio/x-raw-int",
528 "endianness", G_TYPE_INT
, 1234,
529 "signed", G_TYPE_BOOLEAN
, 1,
530 "width", G_TYPE_INT
, 16,
531 "depth", G_TYPE_INT
, 16,
532 "rate", G_TYPE_INT
, wfx
->nSamplesPerSec
,
533 "channels", G_TYPE_INT
, wfx
->nChannels
,
536 hr
= Gstreamer_transform_ConnectInput(This
, amt
, capsin
, capsout
);
537 gst_caps_unref(capsin
);
538 gst_caps_unref(capsout
);
540 This
->cbBuffer
= wfx
->nAvgBytesPerSec
/ 4;
545 static HRESULT WINAPI
Gstreamer_Mp3_ConnectInput(TransformFilter
*tf
, PIN_DIRECTION dir
, IPin
*pin
)
550 static const TransformFilterFuncTable Gstreamer_Mp3_vtbl
= {
551 Gstreamer_transform_DecideBufferSize
,
552 Gstreamer_transform_ProcessBegin
,
553 Gstreamer_transform_ProcessData
,
554 Gstreamer_transform_ProcessEnd
,
555 Gstreamer_Mp3_QueryConnect
,
556 Gstreamer_Mp3_SetMediaType
,
557 Gstreamer_Mp3_ConnectInput
,
558 Gstreamer_transform_Cleanup
,
559 Gstreamer_transform_EndOfStream
,
560 Gstreamer_transform_BeginFlush
,
561 Gstreamer_transform_EndFlush
,
562 Gstreamer_transform_NewSegment
,
563 Gstreamer_transform_QOS
566 IUnknown
* CALLBACK
Gstreamer_Mp3_create(IUnknown
*punkout
, HRESULT
*phr
)
569 IUnknown
*obj
= NULL
;
570 if (!Gstreamer_init())
575 plugin
= Gstreamer_FindMatch("audio/mpeg, mpegversion=(int) 1");
581 *phr
= Gstreamer_transform_create(punkout
, &CLSID_Gstreamer_Mp3
, plugin
, &Gstreamer_Mp3_vtbl
, (LPVOID
*)&obj
);
585 static HRESULT WINAPI
Gstreamer_YUV_QueryConnect(TransformFilter
*iface
, const AM_MEDIA_TYPE
*amt
) {
586 GstTfImpl
*This
= (GstTfImpl
*)iface
;
587 TRACE("%p %p\n", This
, amt
);
588 dump_AM_MEDIA_TYPE(amt
);
590 if (!IsEqualGUID(&amt
->majortype
, &MEDIATYPE_Video
) ||
591 (!IsEqualGUID(&amt
->formattype
, &FORMAT_VideoInfo
) &&
592 !IsEqualGUID(&amt
->formattype
, &FORMAT_VideoInfo2
)))
594 if (memcmp(&amt
->subtype
.Data2
, &MEDIATYPE_Video
.Data2
, sizeof(GUID
) - sizeof(amt
->subtype
.Data1
)))
596 switch (amt
->subtype
.Data1
) {
597 case mmioFOURCC('I','4','2','0'):
598 case mmioFOURCC('Y','V','1','2'):
599 case mmioFOURCC('N','V','1','2'):
600 case mmioFOURCC('N','V','2','1'):
601 case mmioFOURCC('Y','U','Y','2'):
602 case mmioFOURCC('Y','V','Y','U'):
605 WARN("Unhandled fourcc %s\n", debugstr_an((char*)&amt
->subtype
.Data1
, 4));
610 static HRESULT WINAPI
Gstreamer_YUV_ConnectInput(TransformFilter
*tf
, PIN_DIRECTION dir
, IPin
*pin
)
615 static HRESULT WINAPI
Gstreamer_YUV_SetMediaType(TransformFilter
*tf
, PIN_DIRECTION dir
, const AM_MEDIA_TYPE
*amt
) {
616 GstTfImpl
*This
= (GstTfImpl
*)tf
;
617 GstCaps
*capsin
, *capsout
;
618 AM_MEDIA_TYPE
*outpmt
= &This
->tf
.pmt
;
623 if (dir
!= PINDIR_INPUT
)
626 if (Gstreamer_YUV_QueryConnect(&This
->tf
, amt
) == S_FALSE
|| !amt
->pbFormat
)
629 FreeMediaType(outpmt
);
630 CopyMediaType(outpmt
, amt
);
632 if (IsEqualGUID(&amt
->formattype
, &FORMAT_VideoInfo
)) {
633 VIDEOINFOHEADER
*vih
= (VIDEOINFOHEADER
*)outpmt
->pbFormat
;
634 avgtime
= vih
->AvgTimePerFrame
;
635 width
= vih
->bmiHeader
.biWidth
;
636 height
= vih
->bmiHeader
.biHeight
;
637 if (vih
->bmiHeader
.biHeight
> 0)
638 vih
->bmiHeader
.biHeight
= -vih
->bmiHeader
.biHeight
;
639 vih
->bmiHeader
.biBitCount
= 24;
640 vih
->bmiHeader
.biCompression
= BI_RGB
;
641 vih
->bmiHeader
.biSizeImage
= width
* abs(height
) * 3;
643 VIDEOINFOHEADER2
*vih
= (VIDEOINFOHEADER2
*)outpmt
->pbFormat
;
644 avgtime
= vih
->AvgTimePerFrame
;
645 width
= vih
->bmiHeader
.biWidth
;
646 height
= vih
->bmiHeader
.biHeight
;
647 if (vih
->bmiHeader
.biHeight
> 0)
648 vih
->bmiHeader
.biHeight
= -vih
->bmiHeader
.biHeight
;
649 vih
->bmiHeader
.biBitCount
= 24;
650 vih
->bmiHeader
.biCompression
= BI_RGB
;
651 vih
->bmiHeader
.biSizeImage
= width
* abs(height
) * 3;
654 avgtime
= 10000000 / 30;
656 outpmt
->subtype
= MEDIASUBTYPE_RGB24
;
658 capsin
= gst_caps_new_simple("video/x-raw-yuv",
659 "format", GST_TYPE_FOURCC
, amt
->subtype
.Data1
,
660 "width", G_TYPE_INT
, width
,
661 "height", G_TYPE_INT
, height
,
662 "framerate", GST_TYPE_FRACTION
, 10000000, avgtime
,
664 capsout
= gst_caps_new_simple("video/x-raw-rgb",
665 "endianness", G_TYPE_INT
, 4321,
666 "width", G_TYPE_INT
, width
,
667 "height", G_TYPE_INT
, height
,
668 "framerate", GST_TYPE_FRACTION
, 10000000, avgtime
,
669 "bpp", G_TYPE_INT
, 24,
670 "depth", G_TYPE_INT
, 24,
671 "red_mask", G_TYPE_INT
, 0xff,
672 "green_mask", G_TYPE_INT
, 0xff00,
673 "blue_mask", G_TYPE_INT
, 0xff0000,
676 hr
= Gstreamer_transform_ConnectInput(This
, amt
, capsin
, capsout
);
677 gst_caps_unref(capsin
);
678 gst_caps_unref(capsout
);
680 This
->cbBuffer
= width
* height
* 4;
684 static const TransformFilterFuncTable Gstreamer_YUV_vtbl
= {
685 Gstreamer_transform_DecideBufferSize
,
686 Gstreamer_transform_ProcessBegin
,
687 Gstreamer_transform_ProcessData
,
688 Gstreamer_transform_ProcessEnd
,
689 Gstreamer_YUV_QueryConnect
,
690 Gstreamer_YUV_SetMediaType
,
691 Gstreamer_YUV_ConnectInput
,
692 Gstreamer_transform_Cleanup
,
693 Gstreamer_transform_EndOfStream
,
694 Gstreamer_transform_BeginFlush
,
695 Gstreamer_transform_EndFlush
,
696 Gstreamer_transform_NewSegment
,
697 Gstreamer_transform_QOS
700 IUnknown
* CALLBACK
Gstreamer_YUV_create(IUnknown
*punkout
, HRESULT
*phr
)
702 IUnknown
*obj
= NULL
;
703 if (!Gstreamer_init())
708 *phr
= Gstreamer_transform_create(punkout
, &CLSID_Gstreamer_YUV
, "ffmpegcolorspace", &Gstreamer_YUV_vtbl
, (LPVOID
*)&obj
);
712 static HRESULT WINAPI
Gstreamer_AudioConvert_QueryConnect(TransformFilter
*iface
, const AM_MEDIA_TYPE
*amt
) {
713 GstTfImpl
*This
= (GstTfImpl
*)iface
;
714 TRACE("%p %p\n", This
, amt
);
715 dump_AM_MEDIA_TYPE(amt
);
717 if (!IsEqualGUID(&amt
->majortype
, &MEDIATYPE_Audio
) ||
718 !IsEqualGUID(&amt
->subtype
, &MEDIASUBTYPE_PCM
) ||
719 !IsEqualGUID(&amt
->formattype
, &FORMAT_WaveFormatEx
))
724 static HRESULT WINAPI
Gstreamer_AudioConvert_ConnectInput(TransformFilter
*tf
, PIN_DIRECTION dir
, IPin
*pin
)
729 static HRESULT WINAPI
Gstreamer_AudioConvert_SetMediaType(TransformFilter
*tf
, PIN_DIRECTION dir
, const AM_MEDIA_TYPE
*amt
) {
730 GstTfImpl
*This
= (GstTfImpl
*)tf
;
731 GstCaps
*capsin
, *capsout
;
732 AM_MEDIA_TYPE
*outpmt
= &This
->tf
.pmt
;
734 WAVEFORMATEX
*outwfe
;
735 WAVEFORMATEXTENSIBLE
*outwfx
;
737 BOOL inisfloat
= FALSE
;
740 if (dir
!= PINDIR_INPUT
)
743 if (Gstreamer_AudioConvert_QueryConnect(&This
->tf
, amt
) == S_FALSE
|| !amt
->pbFormat
)
746 FreeMediaType(outpmt
);
749 outpmt
->cbFormat
= sizeof(WAVEFORMATEXTENSIBLE
);
750 outpmt
->pbFormat
= CoTaskMemAlloc(outpmt
->cbFormat
);
752 inwfe
= (WAVEFORMATEX
*)amt
->pbFormat
;
753 indepth
= inwfe
->wBitsPerSample
;
754 if (inwfe
->wFormatTag
== WAVE_FORMAT_EXTENSIBLE
) {
755 WAVEFORMATEXTENSIBLE
*inwfx
= (WAVEFORMATEXTENSIBLE
*)inwfe
;
756 inisfloat
= IsEqualGUID(&inwfx
->SubFormat
, &KSDATAFORMAT_SUBTYPE_IEEE_FLOAT
);
757 if (inwfx
->Samples
.wValidBitsPerSample
)
758 indepth
= inwfx
->Samples
.wValidBitsPerSample
;
761 capsin
= gst_caps_new_simple(inisfloat
? "audio/x-raw-float" : "audio/x-raw-int",
762 "endianness", G_TYPE_INT
, 1234,
763 "width", G_TYPE_INT
, inwfe
->wBitsPerSample
,
764 "depth", G_TYPE_INT
, indepth
,
765 "channels", G_TYPE_INT
, inwfe
->nChannels
,
766 "rate", G_TYPE_INT
, inwfe
->nSamplesPerSec
,
769 outwfe
= (WAVEFORMATEX
*)outpmt
->pbFormat
;
770 outwfx
= (WAVEFORMATEXTENSIBLE
*)outwfe
;
771 outwfe
->wFormatTag
= WAVE_FORMAT_EXTENSIBLE
;
772 outwfe
->nChannels
= 2;
773 outwfe
->nSamplesPerSec
= inwfe
->nSamplesPerSec
;
774 outwfe
->wBitsPerSample
= 16;
775 outwfe
->nBlockAlign
= outwfe
->nChannels
* outwfe
->wBitsPerSample
/ 8;
776 outwfe
->nAvgBytesPerSec
= outwfe
->nBlockAlign
* outwfe
->nSamplesPerSec
;
777 outwfe
->cbSize
= sizeof(*outwfx
) - sizeof(*outwfe
);
778 outwfx
->Samples
.wValidBitsPerSample
= outwfe
->wBitsPerSample
;
779 outwfx
->dwChannelMask
= SPEAKER_FRONT_LEFT
|SPEAKER_FRONT_RIGHT
;
780 outwfx
->SubFormat
= KSDATAFORMAT_SUBTYPE_PCM
;
782 capsout
= gst_caps_new_simple("audio/x-raw-int",
783 "endianness", G_TYPE_INT
, 1234,
784 "width", G_TYPE_INT
, outwfe
->wBitsPerSample
,
785 "depth", G_TYPE_INT
, outwfx
->Samples
.wValidBitsPerSample
,
786 "channels", G_TYPE_INT
, outwfe
->nChannels
,
787 "rate", G_TYPE_INT
, outwfe
->nSamplesPerSec
,
790 hr
= Gstreamer_transform_ConnectInput(This
, amt
, capsin
, capsout
);
791 gst_caps_unref(capsin
);
792 gst_caps_unref(capsout
);
794 This
->cbBuffer
= inwfe
->nAvgBytesPerSec
;
798 static const TransformFilterFuncTable Gstreamer_AudioConvert_vtbl
= {
799 Gstreamer_transform_DecideBufferSize
,
800 Gstreamer_transform_ProcessBegin
,
801 Gstreamer_transform_ProcessData
,
802 Gstreamer_transform_ProcessEnd
,
803 Gstreamer_AudioConvert_QueryConnect
,
804 Gstreamer_AudioConvert_SetMediaType
,
805 Gstreamer_AudioConvert_ConnectInput
,
806 Gstreamer_transform_Cleanup
,
807 Gstreamer_transform_EndOfStream
,
808 Gstreamer_transform_BeginFlush
,
809 Gstreamer_transform_EndFlush
,
810 Gstreamer_transform_NewSegment
,
811 Gstreamer_transform_QOS
814 IUnknown
* CALLBACK
Gstreamer_AudioConvert_create(IUnknown
*punkout
, HRESULT
*phr
)
816 IUnknown
*obj
= NULL
;
817 if (!Gstreamer_init())
822 *phr
= Gstreamer_transform_create(punkout
, &CLSID_Gstreamer_AudioConvert
, "audioconvert", &Gstreamer_AudioConvert_vtbl
, (LPVOID
*)&obj
);
826 static const IBaseFilterVtbl GSTTf_Vtbl
=
828 TransformFilterImpl_QueryInterface
,
829 BaseFilterImpl_AddRef
,
830 TransformFilterImpl_Release
,
831 BaseFilterImpl_GetClassID
,
832 TransformFilterImpl_Stop
,
833 TransformFilterImpl_Pause
,
834 TransformFilterImpl_Run
,
835 BaseFilterImpl_GetState
,
836 BaseFilterImpl_SetSyncSource
,
837 BaseFilterImpl_GetSyncSource
,
838 BaseFilterImpl_EnumPins
,
839 TransformFilterImpl_FindPin
,
840 BaseFilterImpl_QueryFilterInfo
,
841 BaseFilterImpl_JoinFilterGraph
,
842 BaseFilterImpl_QueryVendorInfo