libjava/
[official-gcc.git] / libjava / classpath / native / jni / gstreamer-peer / gstreamer_io_peer.c
blob7e38b91f8add216c4137c1aa2c1f4006fc51606b
1 /* gstreamer_io_peer.c -- Implements native methods for class
2 GStreamerNativePeer
3 Copyright (C) 2007 Free Software Foundation, Inc.
5 This file is part of GNU Classpath.
7 GNU Classpath is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU Classpath is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU Classpath; see the file COPYING. If not, write to the
19 Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301 USA.
22 Linking this library statically or dynamically with other modules is
23 making a combined work based on this library. Thus, the terms and
24 conditions of the GNU General Public License cover the whole
25 combination.
27 As a special exception, the copyright holders of this library give you
28 permission to link this library with independent modules to produce an
29 executable, regardless of the license terms of these independent
30 modules, and to copy and distribute the resulting executable under
31 terms of your choice, provided that you also meet, for each linked
32 independent module, the terms and conditions of the license of that
33 module. An independent module is a module which is not derived from
34 or based on this library. If you modify this library, you may extend
35 this exception to your version of the library, but you are not
36 obligated to do so. If you do not wish to do so, delete this
37 exception statement from your version. */
39 #include <stdio.h>
40 #include <string.h>
42 #include <jni.h>
44 #include <glib.h>
45 #include <glib/gprintf.h>
47 #include <gdk/gdk.h>
49 #include <gst/gst.h>
51 #include "jcl.h"
53 #include "gst_peer.h"
55 #include "gnu_javax_sound_sampled_gstreamer_io_GstAudioFileReaderNativePeer.h"
57 #include "gst_classpath_src.h"
58 #include "gst_input_stream.h"
60 /* for caching */
61 static jfieldID fileFID = NULL;
62 static jfieldID pointerDataID = NULL;
64 static jfieldID mimetypeFID = NULL;
65 static jfieldID endiannessFID = NULL;
66 static jfieldID channelsFID = NULL;
67 static jfieldID rateFID = NULL;
68 static jfieldID widthFID = NULL;
69 static jfieldID depthFID = NULL;
70 static jfieldID isSignedFID = NULL;
71 static jfieldID nameFID = NULL;
72 static jfieldID layerFID = NULL;
73 static jfieldID bitrateFID = NULL;
74 static jfieldID framedFID = NULL;
75 static jfieldID typeFID = NULL;
77 typedef struct _AudioProperties AudioProperties;
78 struct _AudioProperties
81 * NOTE: descriptions of the properties are taken from:
82 * http://gstreamer.freedesktop.org/data/doc/gstreamer/head/pwg/html/section-types-definitions.html#table-audio-types
85 /* decoder name */
86 const char *name;
88 /* audio endiannes */
89 const char *endianness;
91 /* header size */
92 const char *header_size;
94 /* mime */
95 const char *mimetype;
97 /* The sample rate of the data, in samples (per channel) per second */
98 const char *samplerate;
100 /* The number of channels of audio data */
101 const char *channels;
103 const char *layer;
105 const char *bitrate;
107 const char *framed;
110 * Defines if the values of the integer samples are signed or not.
111 * Signed samples use one bit to indicate sign (negative or positive)
112 * of the value. Unsigned samples are always positive.
114 const char *signess;
116 /* */
117 const char *rate;
119 /* Number of bits allocated per sample. */
120 const char *width;
123 * The number of bits used per sample.
124 * If the depth is less than the width, the low bits are assumed to be the
125 * ones used. For example, a width of 32 and a depth of 24 means that
126 * each sample is stored in a 32 bit word, but only the low
127 * 24 bits are actually used.
129 const char *depth;
132 * This is set in the case of the mpeg files.
134 const char *type;
136 gboolean done;
139 /* ***** PRIVATE FUNCTIONS DECLARATION ***** */
141 static gboolean
142 set_strings (JNIEnv *env, const AudioProperties *properties, jobject header);
144 static gboolean
145 typefind_callback(GstElement *typefind, guint probability, const GstCaps *caps,
146 gpointer data);
148 static void
149 element_added (GstBin *bin, GstElement *element, gpointer data);
151 static void
152 new_decoded_pad (GstElement *decoder, GstPad *pad,
153 gboolean last, gpointer data);
155 static gboolean
156 fill_info (GstElement *decoder, AudioProperties *properties);
158 static gchar *
159 get_string_property (const GstStructure *structure, const gchar *property);
161 static gchar *
162 get_boolean_property (const GstStructure *structure, const gchar *property);
164 static gboolean
165 set_string (JNIEnv *env, jobject header, jfieldID fieldID,
166 const gchar *property);
168 static void
169 free_properties (AudioProperties *properties);
171 static void
172 reset_properties (AudioProperties *properties);
174 static jboolean process_audio (GstElement *source, JNIEnv *env, jobject header);
176 /* ***** END: PRIVATE FUNCTIONS DECLARATION ***** */
178 /* ***** NATIVE FUNCTIONS ***** */
180 JNIEXPORT void JNICALL
181 Java_gnu_javax_sound_sampled_gstreamer_io_GstAudioFileReaderNativePeer_init_1id_1cache
182 (JNIEnv *env, jclass clazz __attribute__ ((unused)))
184 jclass pointerClass = NULL;
185 jclass GstHeader = NULL;
187 GstHeader = JCL_FindClass(env, "gnu/javax/sound/sampled/gstreamer/io/GstAudioFileReaderNativePeer$GstHeader");
188 fileFID = (*env)->GetFieldID(env, GstHeader, "file", "Ljava/lang/String;");
190 mimetypeFID = (*env)->GetFieldID(env, GstHeader, "mimetype",
191 "Ljava/lang/String;");
192 endiannessFID = (*env)->GetFieldID(env, GstHeader, "endianness",
193 "Ljava/lang/String;");
194 channelsFID = (*env)->GetFieldID(env, GstHeader, "channels",
195 "Ljava/lang/String;");
196 rateFID = (*env)->GetFieldID(env, GstHeader, "rate", "Ljava/lang/String;");
197 widthFID = (*env)->GetFieldID(env, GstHeader, "width", "Ljava/lang/String;");
198 depthFID = (*env)->GetFieldID(env, GstHeader, "depth", "Ljava/lang/String;");
199 isSignedFID = (*env)->GetFieldID(env, GstHeader, "isSigned",
200 "Ljava/lang/String;");
201 nameFID = (*env)->GetFieldID(env, GstHeader, "name", "Ljava/lang/String;");
202 layerFID = (*env)->GetFieldID(env, GstHeader, "layer", "Ljava/lang/String;");
203 bitrateFID = (*env)->GetFieldID(env, GstHeader, "bitrate",
204 "Ljava/lang/String;");
205 framedFID = (*env)->GetFieldID(env, GstHeader, "framed",
206 "Ljava/lang/String;");
207 typeFID = (*env)->GetFieldID(env, GstHeader, "type", "Ljava/lang/String;");
209 #if SIZEOF_VOID_P == 8
210 pointerClass = JCL_FindClass (env, "gnu/classpath/Pointer64");
211 if (pointerClass != NULL)
213 pointerDataID = (*env)->GetFieldID (env, pointerClass, "data", "J");
215 #else
216 # if SIZEOF_VOID_P == 4
217 pointerClass = JCL_FindClass (env, "gnu/classpath/Pointer32");
218 if (pointerClass != NULL)
220 pointerDataID = (*env)->GetFieldID(env, pointerClass, "data", "I");
222 # else
223 # error "Pointer size is not supported."
224 # endif /* SIZEOF_VOID_P == 4 */
225 #endif /* SIZEOF_VOID_P == 8 */
229 JNIEXPORT jboolean JNICALL
230 Java_gnu_javax_sound_sampled_gstreamer_io_GstAudioFileReaderNativePeer_gstreamer_1get_1audio_1format_1stream
231 (JNIEnv *env, jclass clazz __attribute__ ((unused)), jobject header,
232 jobject pointer)
234 GstInputStream *istream = NULL;
235 GstElement *source = NULL;
236 gboolean result = JNI_FALSE;
238 if (header == NULL)
239 return JNI_FALSE;
241 if (pointer == NULL)
242 return JNI_FALSE;
244 gst_init (NULL, NULL);
246 istream = (GstInputStream *) get_object_from_pointer (env, pointer,
247 pointerDataID);
248 if (istream == NULL)
249 return JNI_FALSE;
251 /* init gstreamer */
252 gst_init (NULL, NULL);
254 /* SOURCE */
255 source = gst_element_factory_make ("classpathsrc", "source");
256 if (source == NULL)
258 g_warning ("unable to create a source");
259 return JNI_FALSE;
261 g_object_set (G_OBJECT (source), GST_CLASSPATH_SRC_ISTREAM, istream, NULL);
263 result = process_audio (source, env, header);
265 return result;
268 JNIEXPORT jboolean JNICALL
269 Java_gnu_javax_sound_sampled_gstreamer_io_GstAudioFileReaderNativePeer_gstreamer_1get_1audio_1format_1file
270 (JNIEnv *env, jclass clazz __attribute__ ((unused)), jobject header)
272 /* source file */
273 const char *file = NULL;
275 /* GStreamer elements */
276 GstElement *source = NULL;
278 jboolean result = JNI_FALSE;
280 /* java fields */
281 jstring _file = NULL;
283 _file = (*env)->GetObjectField(env, header, fileFID);
284 file = JCL_jstring_to_cstring (env, _file);
285 if (file == NULL)
287 return JNI_FALSE;
290 gst_init (NULL, NULL);
292 /* create the source element, will be used to read the file */
293 source = gst_element_factory_make ("filesrc", "source");
294 if (source == NULL)
296 JCL_free_cstring (env, _file, file);
297 return JNI_FALSE;
300 /* set the file name */
301 g_object_set (G_OBJECT (source), "location", file, NULL);
303 result = process_audio (source, env, header);
305 /* free stuff */
306 JCL_free_cstring (env, _file, file);
308 return result;
311 /* ***** END: NATIVE FUNCTIONS ***** */
313 /* ***** PRIVATE FUNCTIONS IMPLEMENTATION ***** */
315 static jboolean process_audio (GstElement *source, JNIEnv *env, jobject header)
317 /* will contain the properties we need to put into the given GstHeader */
318 AudioProperties *properties = NULL;
320 /* GStreamer elements */
321 GstElement *pipeline = NULL;
322 GstElement *decoder = NULL;
324 GstElement *typefind = NULL;
326 GstStateChangeReturn res;
328 jboolean result = JNI_FALSE;
330 properties = (AudioProperties *) g_malloc0 (sizeof (AudioProperties));
331 if (properties == NULL)
333 return result;
335 reset_properties(properties);
338 * create the decoder element, this will decode the stream and retrieve
339 * its properties.
340 * We connect a signal to this element, to be informed when it is done
341 * in decoding the stream and to get the needed informations about the
342 * audio file.
344 decoder = gst_element_factory_make ("decodebin", "decoder");
345 if (decoder == NULL)
347 free_properties(properties);
348 return result;
351 /* now, we create a pipeline and fill it with the other elements */
352 pipeline = gst_pipeline_new ("pipeline");
353 if (pipeline == NULL)
355 gst_object_unref (GST_OBJECT (decoder));
356 free_properties(properties);
357 return result;
360 g_signal_connect (decoder, "new-decoded-pad", G_CALLBACK (new_decoded_pad),
361 pipeline);
362 g_signal_connect (G_OBJECT (decoder), "element-added",
363 G_CALLBACK (element_added), properties);
366 * we get the typefind from the decodebin to catch the additional properties
367 * that the decodebin does not expose to us
369 typefind = gst_bin_get_by_name (GST_BIN (decoder), "typefind");
370 if (typefind != NULL)
373 * NOTE: the above is not a typo, we can live without the typefind,
374 * just, our stream detection will not be as accurate as we would.
375 * Anyway, if this fails, there is some problem, probabily a memory
376 * error.
378 g_signal_connect (G_OBJECT (typefind), "have-type",
379 G_CALLBACK (typefind_callback), properties);
382 gst_bin_add_many (GST_BIN (pipeline), source, decoder, NULL);
383 gst_element_link (source, decoder);
386 * now, we set the pipeline playing state to pause and traverse it
387 * to get the info we need.
390 res = gst_element_set_state (pipeline, GST_STATE_PAUSED);
391 if (res == GST_STATE_CHANGE_FAILURE)
393 gst_element_set_state (pipeline, GST_STATE_NULL);
394 gst_object_unref (GST_OBJECT (pipeline));
396 free_properties(properties);
398 return result;
401 res = gst_element_get_state (pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
402 if (res != GST_STATE_CHANGE_SUCCESS)
404 gst_element_set_state (pipeline, GST_STATE_NULL);
405 gst_object_unref (GST_OBJECT (pipeline));
407 free_properties(properties);
409 return result;
412 if (fill_info (decoder, properties))
414 result = set_strings (env, properties, header);
417 /* free stuff */
418 gst_element_set_state (pipeline, GST_STATE_NULL);
420 free_properties (properties);
422 gst_object_unref (GST_OBJECT (pipeline));
424 return result;
428 static gboolean typefind_callback(GstElement *typefind __attribute__ ((unused)),
429 guint probability __attribute__ ((unused)),
430 const GstCaps *caps,
431 gpointer data)
433 GstStructure *structure = NULL;
434 AudioProperties *properties = NULL;
436 const char *mpeg = NULL;
438 properties = (AudioProperties *) data;
440 structure = gst_caps_get_structure (caps, 0);
442 /* MIMETYPE */
443 properties->mimetype = gst_structure_get_name (structure);
444 mpeg = get_string_property(structure, "mpegversion");
446 if (mpeg != NULL)
448 properties->layer = get_string_property(structure, "layer");
449 properties->type = (gchar *) g_malloc0 (_GST_MALLOC_SIZE_);
450 g_snprintf ((gpointer) properties->type, _GST_MALLOC_SIZE_,
451 "MPEG%sV%s", mpeg,
452 properties->layer);
454 g_free ((gpointer) mpeg);
457 return TRUE;
460 static void
461 new_decoded_pad (GstElement *decoder __attribute__ ((unused)),
462 GstPad *pad,
463 gboolean last __attribute__ ((unused)),
464 gpointer data)
466 GstElement *pipeline = NULL;
467 GstElement *fakesink = NULL;
468 GstPad *sinkpad = NULL;
470 pipeline = (GstElement *) data;
471 if (pipeline == NULL)
472 return;
474 fakesink = gst_element_factory_make ("fakesink", NULL);
475 if (fakesink == NULL)
476 return;
478 gst_bin_add (GST_BIN (pipeline), fakesink);
479 sinkpad = gst_element_get_pad (fakesink, "sink");
480 if (GST_PAD_LINK_FAILED (gst_pad_link (pad, sinkpad)))
482 gst_bin_remove (GST_BIN (pipeline), fakesink);
484 else
486 gst_element_set_state (fakesink, GST_STATE_PAUSED);
490 static gboolean
491 set_strings (JNIEnv *env, const AudioProperties *properties, jobject header)
493 gboolean result = FALSE;
496 * we only need at least one of them to be sure we can handle this
497 * kind of audio data.
500 /* now, map our properties to the java class */
501 set_string (env, header, mimetypeFID, properties->mimetype);
503 if (set_string (env, header, endiannessFID, properties->endianness))
504 result = JNI_TRUE;
506 if (set_string (env, header, channelsFID, properties->channels))
507 result = JNI_TRUE;
509 if (set_string (env, header, rateFID, properties->rate))
510 result = JNI_TRUE;
512 if (set_string (env, header, widthFID, properties->width))
513 result = JNI_TRUE;
515 if (set_string (env, header, depthFID, properties->depth))
516 result = JNI_TRUE;
518 if (set_string (env, header, isSignedFID, properties->signess))
519 result = JNI_TRUE;
521 if (set_string (env, header, nameFID, properties->name))
522 result = JNI_TRUE;
524 /* non primary properties */
525 set_string (env, header, layerFID, properties->layer);
526 set_string (env, header, bitrateFID, properties->bitrate);
527 set_string (env, header, framedFID, properties->framed);
528 set_string (env, header, typeFID, properties->type);
530 return result;
533 static gboolean fill_info (GstElement *decoder, AudioProperties *properties)
535 GstIterator *it = NULL;
536 gpointer data = NULL;
537 gboolean result = FALSE;
539 it = gst_element_iterate_src_pads (decoder);
540 while (gst_iterator_next (it, &data) == GST_ITERATOR_OK)
542 GstPad *pad = GST_PAD (data);
543 GstCaps *caps;
545 GstStructure *structure;
547 const gchar *caps_string = NULL;
549 caps = gst_pad_get_caps (pad);
550 caps_string = gst_caps_to_string (caps);
552 if (g_str_has_prefix (caps_string, "video"))
554 /* no video support, this is an audio library */
556 g_free ((gpointer) caps_string);
557 gst_caps_unref (caps);
558 gst_object_unref (pad);
560 continue;
563 g_free ((gpointer) caps_string);
565 structure = gst_caps_get_structure (GST_CAPS (caps), 0);
567 /* fill the properties we need */
569 /* SIGNESS */
570 properties->signess = get_boolean_property(structure, "signed");
571 if (properties->signess != NULL)
573 result = TRUE;
576 /* ENDIANNESS */
577 properties->endianness = get_string_property(structure, "endianness");
578 if (properties->endianness != NULL)
580 result = TRUE;
583 /* CHANNELS */
584 properties->channels = get_string_property(structure, "channels");
585 if (properties->channels != NULL)
587 result = TRUE;
590 /* RATE */
591 properties->rate = get_string_property(structure, "rate");
592 if (properties->rate != NULL)
594 result = TRUE;
597 /* WIDTH */
598 properties->width = get_string_property(structure, "width");
599 if (properties->width != NULL)
601 result = TRUE;
604 /* DEPTH */
605 properties->depth = get_string_property(structure, "depth");
606 if (properties->depth != NULL)
608 result = TRUE;
611 gst_caps_unref (caps);
612 gst_object_unref (pad);
615 return result;
618 static void
619 free_properties (AudioProperties *properties __attribute__ ((unused)))
621 /* FIXME this causes a segfault, a string not allocated by us? double free? */
623 if (properties->name != NULL) g_free((gpointer) properties->name);
624 if (properties->endianness != NULL) g_free((gpointer) properties->endianness);
625 if (properties->channels != NULL) g_free((gpointer) properties->channels);
626 if (properties->rate != NULL) g_free((gpointer) properties->rate);
627 if (properties->width != NULL) g_free((gpointer) properties->width);
628 if (properties->depth != NULL) g_free((gpointer) properties->depth);
629 if (properties->layer != NULL) g_free((gpointer) properties->layer);
630 if (properties->bitrate != NULL) g_free((gpointer) properties->bitrate);
631 if (properties->framed != NULL) g_free((gpointer) properties->framed);
633 if (properties != NULL) g_free ((gpointer) properties);
637 static void reset_properties (AudioProperties *properties)
639 properties->done = FALSE;
640 properties->signess = FALSE;
641 properties->name = NULL;
642 properties->endianness = NULL;
643 properties->channels = NULL;
644 properties->rate = NULL;
645 properties->width = NULL;
646 properties->depth = NULL;
647 properties->layer = NULL;
648 properties->bitrate = NULL;
649 properties->framed = NULL;
652 static gchar *get_string_property (const GstStructure *structure,
653 const gchar *property)
655 int props = 0;
656 gchar *result = NULL;
658 if (property == NULL)
660 return NULL;
663 /* we don't need more */
664 result = (gchar *) g_malloc0 (_GST_MALLOC_SIZE_);
665 if (result == NULL)
667 /* huston, we have a problem here... */
668 return NULL;
671 if (gst_structure_get_int (structure, property, &props))
673 g_snprintf (result, _GST_MALLOC_SIZE_, "%d", props);
675 else
677 g_free ((gpointer) result);
678 return NULL;
681 return result;
684 static gchar *get_boolean_property (const GstStructure *structure,
685 const gchar *property)
687 gchar *result = NULL;
688 gboolean props = FALSE;
690 result = (gchar *) g_malloc0 (_GST_MALLOC_SIZE_);
691 if (result == NULL)
693 /* huston, we have a problem here... */
694 return NULL;
697 if (gst_structure_get_boolean (structure, property, &props))
699 g_snprintf (result, _GST_MALLOC_SIZE_, "%s", (props ? "true" : "false" ));
701 else
703 g_free ((gpointer) result);
704 return NULL;
707 return result;
710 static gboolean set_string (JNIEnv *env, jobject header, jfieldID fieldID,
711 const gchar *property)
713 jstring property_string_field = NULL;
715 if (property == NULL || header == NULL)
717 return JNI_FALSE;
720 property_string_field = (*env)->NewStringUTF(env, property);
721 if (property_string_field == NULL)
723 return JNI_FALSE;
726 (*env)->SetObjectField(env, header, fieldID, property_string_field);
728 return JNI_TRUE;
731 static void element_added (GstBin *bin, GstElement *element, gpointer data)
733 GstElementFactory *factory;
735 factory = gst_element_get_factory (element);
736 ((AudioProperties *) data)->name = gst_element_factory_get_longname (factory);
739 /* ***** END: PRIVATE FUNCTIONS IMPLEMENTATION ***** */