Fixed demos/gtkDTests and demos/cairo for Tango.
[gtkD.git] / gtkD / demos / gstreamer / gnonlin_example / gnonlin_example.d~
blob291c4b4c250ca651492b0eeb7f5cb1c0af9e0819
1 /*
2  * This file is part of gtkD.
3  *
4  * gtkD is free software; you can redistribute it and/or modify
5  * it under the terms of the GNU Lesser General Public License as published by
6  * the Free Software Foundation; either version 2.1 of the License, or
7  * (at your option) any later version.
8  *
9  * gtkD is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12  * GNU Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public License
15  * along with gtkD; if not, write to the Free Software
16  * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
17  *
18  *
19  * gstreamerD GNonLin video and two track audio example.
20  * by Jonas Kivi
21  *
22  * This example isn't final quality, and it represents some not-so-good API
23  * found from gstreamerD. Also it doesn't yet do all the things it is supposed to
24  * because of GNonLin not being finished. And therefore it has it's share of bugs.
25  */
27 module gnonlin_example;
29 //Phobos imports:
30 //import std.stdio;
31 //import std.string;
33 //Tango imports
34 import tango.io.Stdout;
35 import Util = tango.text.Util;
36 import Integer = tango.text.convert.Integer;
37 import tango.util.collection.LinkSeq;
38 import Stringz = tango.stdc.stringz;
40 //gtkD imports:
41 import gtk.GtkD;
42 import gtk.MainWindow;
43 import gtk.VBox;
44 import gtk.Button;
45 import gobject.Value;
47 //gstreamerD imports:
48 import gstreamer.gstreamer;
49 import gstreamer.Pipeline;
50 import gstreamer.Bin;
51 import gstreamer.Element;
52 import gstreamer.ElementFactory;
53 import gstreamer.Pad;
54 import gstreamer.Caps;
55 import gstreamer.Message;
56 import gstreamer.Bus;
57 import gstreamerc.gstreamertypes;
59 //debug=GnonlinHello;
61 const long FRAME_25 = (GST_SECOND / 25L);//40000000
63 class GnonlinHello : MainWindow
65 public:
66         
67         this( char[] file1, char[] file2, char[] file3, char[] file4 )
68         {
69                 debug(GnonlinHello) Stdout("GnonlinHello.this() START.").newline;
70                 debug(GnonlinHello) scope(exit) Stdout("GnonlinHello.this() END.").newline;
71                 
72                 super("Gnonlin helloworld");
73                 
74                 vbox = new VBox(false,0);
75                 playButton = new Button("Play");
76                 playButton.addOnClicked( &onPlay );
77                 vbox.packStart( playButton, false, false, 0 );
78                 
79                 stopButton = new Button("Stop");
80                 stopButton.addOnClicked( &onStop );
81                 vbox.packStart( stopButton, false, false, 0 );
82                 
83                 seekButton = new Button("Back to start");
84                 seekButton.addOnClicked( &onSeek );
85                 vbox.packStart( seekButton, false, false, 0 );
86                 
87                 forwardOneButton = new Button("Forward one");
88                 forwardOneButton.addOnClicked( &onForwardOne );
89                 vbox.packStart( forwardOneButton, false, false, 0 );
90                 
91                 backwardOneButton = new Button("Backward one");
92                 backwardOneButton.addOnClicked( &onBackwardOne );
93                 vbox.packStart( backwardOneButton, false, false, 0 );
94                 
95                 add( vbox );
96                 
97                 showAll();
98                 
99                 // create elements
100                 
101                 pipeline = new Pipeline("mypipeline");
102                 
103                 pipeline.getBus().addWatch( &busCall );
104                 
105                 gnl_video_composition = new Bin( ElementFactory.make("gnlcomposition", "gnl_video_composition") );
106                 if( pipeline.add( gnl_video_composition ) == false ) throw new Exception("pipeline.add(gnl_video_composition)");
107                 gnl_video_composition.addOnPadAdded(&newPad);
108                 
109                 
110                 debug(GnonlinHello)
111                 {
112                         Stdout("At the moment getting videotestsrc usually segfaults if you put even one Stdout before it.").newline;
113                         Stdout("Remove all Stdouts and debugs to make it work.").newline;
114                         //That's why debug=GnonlinHello is not on now.
115                         //If you need the Stdouts, then comment-out the following six lines.
116                 }
117                 
118                 m_defaultVideoElement = ElementFactory.make( "videotestsrc", "default-videoelement" );
119                 m_defaultVideoElement.setProperty("pattern", 2);
120                 m_defaultVideoSource = new Bin( ElementFactory.make( "gnlsource", "defaultvideosource") );
121                 m_defaultVideoSource.add( m_defaultVideoElement );
122                 m_defaultVideoSource.setProperty("priority", 4294967295 );// 2 ** 32 -1 );
123                 gnl_video_composition.add( m_defaultVideoSource );
124                 
125                 
126                 gnl_audio_composition1 = new Bin( ElementFactory.make("gnlcomposition", "gnl_audio_composition1") );
127                 if( pipeline.add( gnl_audio_composition1 ) == false ) throw new Exception("pipeline.add(gnl_audio_composition1)");
128                 gnl_audio_composition1.addOnPadAdded(&newPad);
130                 gnl_audio_composition2 = new Bin( ElementFactory.make("gnlcomposition", "gnl_audio_composition2") );
131                 if( pipeline.add( gnl_audio_composition2 ) == false ) throw new Exception("pipeline.add(gnl_audio_composition2)");
132                 gnl_audio_composition2.addOnPadAdded(&newPad);
133         
134                 m_defaultAudioElement1 = ElementFactory.make( "audiotestsrc", "defaultaudioelement1" );
135                 m_defaultAudioElement1.setProperty("volume", 0);
136                 m_defaultAudioSource1 = new Bin( ElementFactory.make( "gnlsource", "defaultaudiosource1") );
137                 m_defaultAudioSource1.add( m_defaultAudioElement1 );
138                 m_defaultAudioSource1.setProperty("priority", 4294967295 );//2 ** 32 -1 );
139                 
140                 gnl_audio_composition1.add( m_defaultAudioSource1 );
141                 
142                 
143                 //This is how'd you do it, if it worked. For some reason adding a defaultaudiosource
144                 //to a gnlcomposition that has it's first gnlfilesource in other than zero
145                 //ends up with errors in the playback.
146                 //This makes it impossible for now to have a second track of audio, that doesn't
147                 //start from zero. But as long as you have something starting from zero, it works.
148                 m_defaultAudioElement2 = ElementFactory.make( "audiotestsrc", "defaultaudioelement2" );
149                 m_defaultAudioElement2.setProperty("volume", 0);
150                 m_defaultAudioSource2 = new Bin( ElementFactory.make( "gnlsource", "defaultaudiosource2") );
151                 m_defaultAudioSource2.add( m_defaultAudioElement2 );
152                 m_defaultAudioSource2.setProperty("priority", 4294967295 );//2 ** 32 -1 );
153                 
154                 gnl_audio_composition2.add( m_defaultAudioSource2 );
155                 
156                 
157                 //VIDEOSINK
158                 
159                 //Test one of these: xvimagesink, ximagesink, sdlvideosink
160                 //xvimagesink is accelerated
161                 videosink = ElementFactory.make("xvimagesink", "video-output-xvimagesink");
162                 if( videosink.getElementStruct() is null )
163                 {
164                         Stdout("Creation of xvimagesink failed.").newline;
165                 }
166                 
167                 if( pipeline.add( videosink ) == false ) throw new Exception("pipeline.add(videosink)");
168                 
169                 //AUDIOSINK:
171                 //The adder element can add/mix several audio tracks together.
172                 audio_adder = new Bin( ElementFactory.make("adder", "audio_adder") );
173                 if( pipeline.add( audio_adder ) == false ) throw new Exception("pipeline.add(audio_adder)");
175                 audioconvert = ElementFactory.make("audioconvert", "audioconverter");
176                 if( pipeline.add( audioconvert ) == false ) throw new Exception("pipeline.add(audioconvert)");
177                 
178                 if( audio_adder.link( audioconvert ) == false ) throw new Exception("audio_adder.link(audioconvert)");
180                 audiosink = ElementFactory.make("alsasink", "audio-output-alsa");
181                 if( pipeline.add( audiosink ) == false ) throw new Exception("pipeline.add(audiosink)");
182                 if( audioconvert.link( audiosink ) == false ) throw new Exception("audioconvert.link(audiosink)");
185                 //GNLFILESOURCES:
187                 debug(GnonlinHello) Stdout("GnonlinHello.this() Initing gnlfilesources.").newline;
189                 //This isn't entirely necessary, but demonstrated here because this way you could
190                 //use video files that have audio as audiofiles.
191                 Caps videocaps = Caps.fromString("video/x-raw-yuv;video/x-raw-rgb");
192                 Caps audiocaps = Caps.fromString("audio/x-raw-int;audio/x-raw-float");
194                 //VIDEO1:
195                 
196                 debug(GnonlinHello) Stdout("GnonlinHello.this() video_material1.").newline;
198                 video_material1 = ElementFactory.make("gnlfilesource", "video_material1");
199                 //video_material1.addOnPadAdded(&newPad);
200                 debug(GnonlinHello) Stdout("GnonlinHello.this() Trying to do gnl_video_composition.add( video_material1 ).").newline;
201                 if( (cast(Bin)gnl_video_composition).add( video_material1 ) == false ) throw new Exception("gnl_video_composition.add(video_material1)");
202                 
203                 debug(GnonlinHello) Stdout("GnonlinHello.this() Setting properties for video_material1.").newline;
205                 video_material1.location( file1 );
206                 //alternatively: video_material1.setProperty("location", file1 );
208                 //This is just an example of getting the properties. It is difficult at the moment.
209                 //This API should be simplified:
210                 scope Value locval = new Value( "empty" );
211                 video_material1.getProperty( "location", locval );
212                 char[] loc = locval.getString();
213                 debug(GnonlinHello) Stdout("video_material1 location:", loc ).newline;
215                 video_material1.setProperty("start", 0 * SECOND );
216                 video_material1.setProperty("duration", 5 * SECOND );
217                 video_material1.setProperty("media-start", 0 * SECOND );
218                 video_material1.setProperty("media-duration", 5 * SECOND );
220                 //This isn't entirely necessary, but demonstrated here because this way you could
221                 //use video files that have audio as audiofiles.
222                 video_material1.caps( videocaps );
224                 //VIDEO2:
226                 video_material2 = ElementFactory.make("gnlfilesource", "video_material2");
227                 if( (cast(Bin)gnl_video_composition).add( video_material2 ) == false ) throw new Exception("gnl_video_composition.add(video_material2)");
228         
229                 video_material2.location( file2 );
230                 video_material2.setProperty("start",  5 * SECOND );
231                 video_material2.setProperty("duration", 5 * SECOND );
232                 video_material2.setProperty("media-start", 0 * SECOND );
233                 video_material2.setProperty("media-duration", 5 * SECOND );
235                 video_material2.caps( videocaps );
237                 debug(GnonlinHello) Stdout("Setting audio properties.").newline;
239                 audio1 = ElementFactory.make("gnlfilesource", "audio1");
240                 if( (cast(Bin)gnl_audio_composition1).add( audio1 ) == false ) throw new Exception("gnl_audio_composition1.add(audio1)");
242                 audio1.location( file3 );
243                 audio1.setProperty("start",  0 * SECOND );
244                 audio1.setProperty("duration", 5 * SECOND );
245                 audio1.setProperty("media-start", 0 * SECOND );
246                 audio1.setProperty("media-duration", 5 * SECOND );
248                 //This isn't entirely necessary, but demonstrated here because this way you could
249                 //use video files that have audio as audiofiles.
250                 audio1.caps( audiocaps );
252                 audio2 = ElementFactory.make("gnlfilesource", "audio2");
253                 if( (cast(Bin)gnl_audio_composition2).add( audio2 ) == false ) throw new Exception("gnl_audio_composition2.add(audio2)");
255                 audio2.location( file4 );
256                 audio2.setProperty("start",  5 * SECOND );
257                 audio2.setProperty("duration", 5 * SECOND );
258                 audio2.setProperty("media-start", 0 * SECOND );
259                 audio2.setProperty("media-duration", 5 * SECOND );
261                 audio2.caps( audiocaps );
263         }
265         ~this()
266         {
267                 pipeline.setState( GstState.NULL );
268         }
270         void onSeek(Button button)
271         {
272                 pipeline.seek( 0 * GST_SECOND );
273         }
275         void onForwardOne(Button button)
276         {
277                 long cur_pos = pipeline.queryPosition();
278                 debug(GnonlinHello) Stdout("position: ")( cur_pos ).newline;
279                 pipeline.seek( cur_pos + FRAME_25 );
281         }
283         void onBackwardOne(Button button)
284         {
285                 long cur_pos = pipeline.queryPosition();
286                 debug(GnonlinHello) Stdout("position: ")( cur_pos ).newline;
287                 pipeline.seek( cur_pos - FRAME_25 );
289         }
291         void onPlay(Button button)
292         {
293                 debug(GnonlinHello) Stdout("Setting to PLAYING.").newline;
294                 pipeline.setState( GstState.PLAYING );
295                 debug(GnonlinHello) Stdout("Running.").newline;
296         }
298         void onStop(Button button)
299         {
300                 debug(GnonlinHello) Stdout("Setting to STOP.").newline;
301                 //pipeline.setState( GstState.NULL );
302                 pipeline.setState( GstState.PAUSED );
303                 debug(GnonlinHello) Stdout("Stopped.").newline;
304         }
306         void newPad( Pad pad, Element element )
307         {
308                 debug(GnonlinHello) Stdout("newPad callback called. START.").newline;
310                 if( pad is null )
311                         throw new Exception("newPad: pad is null.");
312                 else if( pad.getPadStruct() is null )
313                         throw new Exception("newPad: pad.getPadStruct is null.");
314                 if( element is null )
315                         throw new Exception("newPad: element is null.");
316                 
317                 Pad convpad;
319                 // We can now link this pad with the audio decoder
320                 debug(GnonlinHello) Stdout("Dynamic pad created, linking parser/decoder").newline;
322                 Caps caps = pad.getCaps();
324                 char[] pad_type = caps.getStructure(0).getName();
326                 debug(GnonlinHello) Stdout("pad_type: ")( pad_type ).newline;
328                 //if( std.string.find( pad_type, "video" ) == -1 )
329                 if( Util.containsPattern( pad_type, "audio" ) == true )
330                 {
331                         //it's audio:
332                         debug(GnonlinHello) Stdout("newPad(): linking to audio-output.").newline;
333                         //convpad = audioconvert.getPad("audiosink");
334                         //convpad = audioconvert.getCompatiblePad( pad, pad.getCaps() );
335                         convpad = audio_adder.getCompatiblePad( pad, pad.getCaps() );
336                 }
337                 else
338                 {
339                         //it's video:
340                         debug(GnonlinHello) Stdout("newPad(): linking to video-output.").newline;
341                         convpad = videosink.getCompatiblePad( pad, pad.getCaps() );
343                 }
346                 if( convpad is null )
347                         throw new Exception("newPad: convpad is null.");
348                 else if( convpad.getPadStruct() is null )
349                         throw new Exception("newPad: convpad.getPadStruct is null.");
351                 debug(GnonlinHello)
352                 {
353                         Stdout("Pad name: ")( pad.getName() ).newline;
354                         Stdout("convpad name: ")( convpad.getName() ).newline;
355                         Stdout("Pad parent name: ")( pad.getParent().getName() ).newline;
356                         Stdout("convpad parent name: ")( convpad.getParent().getName() ).newline;
357                 }
359                 debug(GnonlinHello) Stdout("doing a gst_pad_link.").newline;
360                 //if(
361                 pad.link( convpad );// != GstPadLinkReturn.OK ) throw new Exception("dynamic pad.link(convpad)");
363                 debug(GnonlinHello) Stdout("Done. That was ok.").newline;
365         }
366         
367         bool busCall( Message msg )
368         {
369                 debug(GnonlinHello) Stdout("GnonlinHello.busCall(msg) START.").newline;
370                 debug(GnonlinHello) scope(exit) Stdout("GnonlinHello.busCall(msg) END.").newline;
372                 switch( msg.type )
373                 {
374                         case GstMessageType.UNKNOWN:
375                                 debug(GnonlinHello) Stdout("Unknown message type.").newline;
376                         break;
377                         case GstMessageType.EOS:
378                                 debug(GnonlinHello) Stdout("End-of-stream.").newline;
379                                 //GtkD.mainQuit();
380                         break;
382                         case GstMessageType.ERROR:
383                         {
384                                 gchar *dbug;
385                                 GError *err;
386                                 msg.parseError(&err, &dbug);
387                                 //g_free (dbug);
388                                 Stdout("Error: ")( Stringz.fromUtf8z(err.message) )(" dbug: ")( Stringz.fromUtf8z(dbug) ).newline;
389                                 //g_error_free (err);
390                                 //GtkD.mainQuit();
391                         break;
392                         }
393                         default:
394                         break;
395                 }
397                 return true;
398         }
399         
400 protected:
401         
402         //GStreamer elements:
403         
404         Pipeline pipeline;
405         
406         Bin gnl_video_composition;
407                 Element video_material1, video_material2;
409         Element audio_adder;
410                 Bin gnl_audio_composition1;
411                         Element audio1;
412                 Bin gnl_audio_composition2;
413                         Element audio2;
415         //default sources:
416         Element m_defaultVideoElement;
417         Bin m_defaultVideoSource;
418         Element m_defaultAudioElement1;
419         Bin m_defaultAudioSource1;
420         Element m_defaultAudioElement2;
421         Bin m_defaultAudioSource2;
423         Element audioconvert, audiosink;
424         Element videosink;
426         //GUI elements for our little remote controller:
428         VBox vbox;
429         Button playButton;
430         Button stopButton;
431         Button seekButton;
432         Button forwardOneButton;
433         Button backwardOneButton;
437 int main(char[][] args)
439         debug(GnonlinHello) Stdout("GStreamerD Hello!").newline;
441         uint major, minor, micro, nano;
443         debug(GnonlinHello) Stdout("Trying to init...").newline;
445         GtkD.init(args);
446         GStreamer.init(args);
448         // check input arguments
449         if (args.length != 5)
450         {
451                 Stdout("Give it two videofiles and two audiofiles. The files must be atleast 10 seconds long. Usage example: %s videofile1.dv videofile2.mov audiofile1.ogg audiofile2.ogg\n", args[0]).newline;
452                 return -1;
453         }
455         debug(GnonlinHello) Stdout("Checking version of GStreamer...").newline;
456         GStreamer.versio(&major, &minor, &micro, &nano);
457         debug(GnonlinHello) Stdout("This program is linked against GStreamer ", major, ".", minor, ".", micro ).newline;
458         
459         GnonlinHello gstHello = new GnonlinHello( args[1], args[2], args[3], args[4] );
461         GtkD.main();
463         return 0;