2 * This file is part of gtkD.
4 * gtkD is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU Lesser General Public License as published by
6 * the Free Software Foundation; either version 2.1 of the License, or
7 * (at your option) any later version.
9 * gtkD is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU Lesser General Public License for more details.
14 * You should have received a copy of the GNU Lesser General Public License
15 * along with gtkD; if not, write to the Free Software
16 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 * gstreamerD GNonLin video and two track audio example.
22 * This example isn't final quality, and it represents some not-so-good API
23 * found from gstreamerD. Also it doesn't yet do all the things it is supposed to
24 * because of GNonLin not being finished. And therefore it has it's share of bugs.
27 module gnonlin_video_example
;
34 import tango
.io
.Stdout
;
35 import Util
= tango
.text
.Util
;
36 import Integer
= tango
.text
.convert
.Integer
;
37 import tango
.util
.collection
.LinkSeq
;
38 import Stringz
= tango
.stdc
.stringz
;
42 import gtk
.MainWindow
;
48 import gstreamer
.gstreamer
;
49 import gstreamer
.Pipeline
;
51 import gstreamer
.Element
;
52 import gstreamer
.ElementFactory
;
54 import gstreamer
.Caps
;
55 import gstreamer
.Message
;
57 import gstreamerc
.gstreamertypes
;
61 const long FRAME_25
= (GST_SECOND
/ 25L);//40000000
63 class GnonlinHello
: MainWindow
67 this( char[] file1
, char[] file2
, char[] file3
, char[] file4
)
69 debug(GnonlinHello
) Stdout("GnonlinHello.this() START.").newline
;
70 debug(GnonlinHello
) scope(exit
) Stdout("GnonlinHello.this() END.").newline
;
72 super("Gnonlin helloworld");
74 vbox
= new VBox(false,0);
75 playButton
= new Button("Play");
76 playButton
.addOnClicked( &onPlay
);
77 vbox
.packStart( playButton
, false, false, 0 );
79 stopButton
= new Button("Stop");
80 stopButton
.addOnClicked( &onStop
);
81 vbox
.packStart( stopButton
, false, false, 0 );
83 seekButton
= new Button("Back to start");
84 seekButton
.addOnClicked( &onSeek
);
85 vbox
.packStart( seekButton
, false, false, 0 );
87 forwardOneButton
= new Button("Forward one");
88 forwardOneButton
.addOnClicked( &onForwardOne
);
89 vbox
.packStart( forwardOneButton
, false, false, 0 );
91 backwardOneButton
= new Button("Backward one");
92 backwardOneButton
.addOnClicked( &onBackwardOne
);
93 vbox
.packStart( backwardOneButton
, false, false, 0 );
101 pipeline
= new Pipeline("mypipeline");
103 pipeline
.getBus().addWatch( &busCall
);
105 gnl_video_composition
= new Bin( ElementFactory
.make("gnlcomposition", "gnl_video_composition") );
106 if( pipeline
.add( gnl_video_composition
) == false ) throw new Exception("pipeline.add(gnl_video_composition)");
107 gnl_video_composition
.addOnPadAdded(&newPad
);
112 Stdout("At the moment getting videotestsrc usually segfaults if you put even one Stdout before it.").newline
;
113 Stdout("Remove all Stdouts and debugs to make it work.").newline
;
114 //That's why debug=GnonlinHello is not on now.
115 //If you need the Stdouts, then comment-out the following six lines.
118 m_defaultVideoElement
= ElementFactory
.make( "videotestsrc", "default-videoelement" );
119 m_defaultVideoElement
.setProperty("pattern", 2);
120 m_defaultVideoSource
= new Bin( ElementFactory
.make( "gnlsource", "defaultvideosource") );
121 m_defaultVideoSource
.add( m_defaultVideoElement
);
122 m_defaultVideoSource
.setProperty("priority", 4294967295 );// 2 ** 32 -1 );
123 gnl_video_composition
.add( m_defaultVideoSource
);
126 gnl_audio_composition1
= new Bin( ElementFactory
.make("gnlcomposition", "gnl_audio_composition1") );
127 if( pipeline
.add( gnl_audio_composition1
) == false ) throw new Exception("pipeline.add(gnl_audio_composition1)");
128 gnl_audio_composition1
.addOnPadAdded(&newPad
);
130 gnl_audio_composition2
= new Bin( ElementFactory
.make("gnlcomposition", "gnl_audio_composition2") );
131 if( pipeline
.add( gnl_audio_composition2
) == false ) throw new Exception("pipeline.add(gnl_audio_composition2)");
132 gnl_audio_composition2
.addOnPadAdded(&newPad
);
134 m_defaultAudioElement1
= ElementFactory
.make( "audiotestsrc", "defaultaudioelement1" );
135 m_defaultAudioElement1
.setProperty("volume", 0);
136 m_defaultAudioSource1
= new Bin( ElementFactory
.make( "gnlsource", "defaultaudiosource1") );
137 m_defaultAudioSource1
.add( m_defaultAudioElement1
);
138 m_defaultAudioSource1
.setProperty("priority", 4294967295 );//2 ** 32 -1 );
140 gnl_audio_composition1
.add( m_defaultAudioSource1
);
143 //This is how'd you do it, if it worked. For some reason adding a defaultaudiosource
144 //to a gnlcomposition that has it's first gnlfilesource in other than zero
145 //ends up with errors in the playback.
146 //This makes it impossible for now to have a second track of audio, that doesn't
147 //start from zero. But as long as you have something starting from zero, it works.
148 m_defaultAudioElement2
= ElementFactory
.make( "audiotestsrc", "defaultaudioelement2" );
149 m_defaultAudioElement2
.setProperty("volume", 0);
150 m_defaultAudioSource2
= new Bin( ElementFactory
.make( "gnlsource", "defaultaudiosource2") );
151 m_defaultAudioSource2
.add( m_defaultAudioElement2
);
152 m_defaultAudioSource2
.setProperty("priority", 4294967295 );//2 ** 32 -1 );
154 gnl_audio_composition2
.add( m_defaultAudioSource2
);
159 //Test one of these: xvimagesink, ximagesink, sdlvideosink
160 //xvimagesink is accelerated
161 videosink
= ElementFactory
.make("xvimagesink", "video-output-xvimagesink");
162 if( videosink
.getElementStruct() is null )
164 Stdout("Creation of xvimagesink failed.").newline
;
167 if( pipeline
.add( videosink
) == false ) throw new Exception("pipeline.add(videosink)");
171 //The adder element can add/mix several audio tracks together.
172 audio_adder
= new Bin( ElementFactory
.make("adder", "audio_adder") );
173 if( pipeline
.add( audio_adder
) == false ) throw new Exception("pipeline.add(audio_adder)");
175 audioconvert
= ElementFactory
.make("audioconvert", "audioconverter");
176 if( pipeline
.add( audioconvert
) == false ) throw new Exception("pipeline.add(audioconvert)");
178 if( audio_adder
.link( audioconvert
) == false ) throw new Exception("audio_adder.link(audioconvert)");
180 audiosink
= ElementFactory
.make("alsasink", "audio-output-alsa");
181 if( pipeline
.add( audiosink
) == false ) throw new Exception("pipeline.add(audiosink)");
182 if( audioconvert
.link( audiosink
) == false ) throw new Exception("audioconvert.link(audiosink)");
187 debug(GnonlinHello
) Stdout("GnonlinHello.this() Initing gnlfilesources.").newline
;
189 //This isn't entirely necessary, but demonstrated here because this way you could
190 //use video files that have audio as audiofiles.
191 Caps videocaps
= Caps
.fromString("video/x-raw-yuv;video/x-raw-rgb");
192 Caps audiocaps
= Caps
.fromString("audio/x-raw-int;audio/x-raw-float");
196 debug(GnonlinHello
) Stdout("GnonlinHello.this() video_material1.").newline
;
198 video_material1
= ElementFactory
.make("gnlfilesource", "video_material1");
199 //video_material1.addOnPadAdded(&newPad);
200 debug(GnonlinHello
) Stdout("GnonlinHello.this() Trying to do gnl_video_composition.add( video_material1 ).").newline
;
201 if( (cast(Bin
)gnl_video_composition
).add( video_material1
) == false ) throw new Exception("gnl_video_composition.add(video_material1)");
203 debug(GnonlinHello
) Stdout("GnonlinHello.this() Setting properties for video_material1.").newline
;
205 video_material1
.location( file1
);
206 //alternatively: video_material1.setProperty("location", file1 );
208 //This is just an example of getting the properties. It is difficult at the moment.
209 //This API should be simplified:
210 scope Value locval
= new Value( "empty" );
211 video_material1
.getProperty( "location", locval
);
212 char[] loc
= locval
.getString();
213 debug(GnonlinHello
) Stdout("video_material1 location:", loc
).newline
;
215 video_material1
.setProperty("start", 0 * SECOND
);
216 video_material1
.setProperty("duration", 5 * SECOND
);
217 video_material1
.setProperty("media-start", 0 * SECOND
);
218 video_material1
.setProperty("media-duration", 5 * SECOND
);
220 //This isn't entirely necessary, but demonstrated here because this way you could
221 //use video files that have audio as audiofiles.
222 video_material1
.caps( videocaps
);
226 video_material2
= ElementFactory
.make("gnlfilesource", "video_material2");
227 if( (cast(Bin
)gnl_video_composition
).add( video_material2
) == false ) throw new Exception("gnl_video_composition.add(video_material2)");
229 video_material2
.location( file2
);
230 video_material2
.setProperty("start", 5 * SECOND
);
231 video_material2
.setProperty("duration", 5 * SECOND
);
232 video_material2
.setProperty("media-start", 0 * SECOND
);
233 video_material2
.setProperty("media-duration", 5 * SECOND
);
235 video_material2
.caps( videocaps
);
237 debug(GnonlinHello
) Stdout("Setting audio properties.").newline
;
239 audio1
= ElementFactory
.make("gnlfilesource", "audio1");
240 if( (cast(Bin
)gnl_audio_composition1
).add( audio1
) == false ) throw new Exception("gnl_audio_composition1.add(audio1)");
242 audio1
.location( file3
);
243 audio1
.setProperty("start", 0 * SECOND
);
244 audio1
.setProperty("duration", 5 * SECOND
);
245 audio1
.setProperty("media-start", 0 * SECOND
);
246 audio1
.setProperty("media-duration", 5 * SECOND
);
248 //This isn't entirely necessary, but demonstrated here because this way you could
249 //use video files that have audio as audiofiles.
250 audio1
.caps( audiocaps
);
252 audio2
= ElementFactory
.make("gnlfilesource", "audio2");
253 if( (cast(Bin
)gnl_audio_composition2
).add( audio2
) == false ) throw new Exception("gnl_audio_composition2.add(audio2)");
255 audio2
.location( file4
);
256 audio2
.setProperty("start", 5 * SECOND
);
257 audio2
.setProperty("duration", 5 * SECOND
);
258 audio2
.setProperty("media-start", 0 * SECOND
);
259 audio2
.setProperty("media-duration", 5 * SECOND
);
261 audio2
.caps( audiocaps
);
267 pipeline
.setState( GstState
.NULL
);
270 void onSeek(Button button
)
272 pipeline
.seek( 0 * GST_SECOND
);
275 void onForwardOne(Button button
)
277 long cur_pos
= pipeline
.queryPosition();
278 debug(GnonlinHello
) Stdout("position: ")( cur_pos
).newline
;
279 pipeline
.seek( cur_pos
+ FRAME_25
);
283 void onBackwardOne(Button button
)
285 long cur_pos
= pipeline
.queryPosition();
286 debug(GnonlinHello
) Stdout("position: ")( cur_pos
).newline
;
287 pipeline
.seek( cur_pos
- FRAME_25
);
291 void onPlay(Button button
)
293 debug(GnonlinHello
) Stdout("Setting to PLAYING.").newline
;
294 pipeline
.setState( GstState
.PLAYING
);
295 debug(GnonlinHello
) Stdout("Running.").newline
;
298 void onStop(Button button
)
300 debug(GnonlinHello
) Stdout("Setting to STOP.").newline
;
301 //pipeline.setState( GstState.NULL );
302 pipeline
.setState( GstState
.PAUSED
);
303 debug(GnonlinHello
) Stdout("Stopped.").newline
;
306 void newPad( Pad pad
, Element element
)
308 debug(GnonlinHello
) Stdout("newPad callback called. START.").newline
;
311 throw new Exception("newPad: pad is null.");
312 else if( pad
.getPadStruct() is null )
313 throw new Exception("newPad: pad.getPadStruct is null.");
314 if( element
is null )
315 throw new Exception("newPad: element is null.");
319 // We can now link this pad with the audio decoder
320 debug(GnonlinHello
) Stdout("Dynamic pad created, linking parser/decoder").newline
;
322 Caps caps
= pad
.getCaps();
324 char[] pad_type
= caps
.getStructure(0).getName();
326 debug(GnonlinHello
) Stdout("pad_type: ")( pad_type
).newline
;
328 //if( std.string.find( pad_type, "video" ) == -1 )
329 if( Util
.containsPattern( pad_type
, "audio" ) == true )
332 debug(GnonlinHello
) Stdout("newPad(): linking to audio-output.").newline
;
333 //convpad = audioconvert.getPad("audiosink");
334 //convpad = audioconvert.getCompatiblePad( pad, pad.getCaps() );
335 convpad
= audio_adder
.getCompatiblePad( pad
, pad
.getCaps() );
340 debug(GnonlinHello
) Stdout("newPad(): linking to video-output.").newline
;
341 convpad
= videosink
.getCompatiblePad( pad
, pad
.getCaps() );
346 if( convpad
is null )
347 throw new Exception("newPad: convpad is null.");
348 else if( convpad
.getPadStruct() is null )
349 throw new Exception("newPad: convpad.getPadStruct is null.");
353 Stdout("Pad name: ")( pad
.getName() ).newline
;
354 Stdout("convpad name: ")( convpad
.getName() ).newline
;
355 Stdout("Pad parent name: ")( pad
.getParent().getName() ).newline
;
356 Stdout("convpad parent name: ")( convpad
.getParent().getName() ).newline
;
359 debug(GnonlinHello
) Stdout("doing a gst_pad_link.").newline
;
361 pad
.link( convpad
);// != GstPadLinkReturn.OK ) throw new Exception("dynamic pad.link(convpad)");
363 debug(GnonlinHello
) Stdout("Done. That was ok.").newline
;
367 bool busCall( Message msg
)
369 debug(GnonlinHello
) Stdout("GnonlinHello.busCall(msg) START.").newline
;
370 debug(GnonlinHello
) scope(exit
) Stdout("GnonlinHello.busCall(msg) END.").newline
;
374 case GstMessageType
.UNKNOWN
:
375 debug(GnonlinHello
) Stdout("Unknown message type.").newline
;
377 case GstMessageType
.EOS
:
378 debug(GnonlinHello
) Stdout("End-of-stream.").newline
;
382 case GstMessageType
.ERROR
:
386 msg
.parseError(&err
, &dbug
);
388 Stdout("Error: ")( Stringz
.fromUtf8z(err
.message
) )(" dbug: ")( Stringz
.fromUtf8z(dbug
) ).newline
;
389 //g_error_free (err);
402 //GStreamer elements:
406 Bin gnl_video_composition
;
407 Element video_material1
, video_material2
;
410 Bin gnl_audio_composition1
;
412 Bin gnl_audio_composition2
;
416 Element m_defaultVideoElement
;
417 Bin m_defaultVideoSource
;
418 Element m_defaultAudioElement1
;
419 Bin m_defaultAudioSource1
;
420 Element m_defaultAudioElement2
;
421 Bin m_defaultAudioSource2
;
423 Element audioconvert
, audiosink
;
426 //GUI elements for our little remote controller:
432 Button forwardOneButton
;
433 Button backwardOneButton
;
437 int main(char[][] args
)
439 debug(GnonlinHello
) Stdout("GStreamerD Hello!").newline
;
441 uint major
, minor
, micro
, nano
;
443 debug(GnonlinHello
) Stdout("Trying to init...").newline
;
446 GStreamer
.init(args
);
448 // check input arguments
449 if (args
.length
!= 5)
451 Stdout("Give it two videofiles and two audiofiles. The files must be atleast 10 seconds long. Usage example: %s videofile1.dv videofile2.mov audiofile1.ogg audiofile2.ogg\n", args
[0]).newline
;
455 debug(GnonlinHello
) Stdout("Checking version of GStreamer...").newline
;
456 GStreamer
.versio(&major
, &minor
, µ
, &nano
);
457 debug(GnonlinHello
) Stdout("This program is linked against GStreamer ", major
, ".", minor
, ".", micro
).newline
;
459 GnonlinHello gstHello
= new GnonlinHello( args
[1], args
[2], args
[3], args
[4] );