gtk_object_set(GTK_OBJECT(audio_play),"mute",mute,NULL);
}
+gint delete_event(GtkWidget *widget, GdkEvent *event, gpointer data)
+{
+ gdk_threads_leave();
+ g_print("setting to ~PLAYING state\n");
+ gst_element_set_state(GST_ELEMENT(pipeline),~GST_STATE_PLAYING);
+ g_print("setting to ~RUNNING state\n");
+ gst_element_set_state(GST_ELEMENT(pipeline),~GST_STATE_RUNNING);
+ gdk_threads_enter();
+ return FALSE;
+}
+
+void destroy(GtkWidget *widget, gpointer data)
+{
+
+ gtk_main_quit();
+}
+
void gstplay_parse_state_changed(GstElement *element, gint state, gpointer data)
{
printf("gstplay: element \"%s\" state changed %d\n", gst_element_get_name(element), state);
g_return_val_if_fail(video_render_thread != NULL, -1);
show = gst_elementfactory_make("videosink","show");
g_return_val_if_fail(show != NULL, -1);
- gtk_object_set(GTK_OBJECT(show),"xv_enabled",FALSE,NULL);
+ //gtk_object_set(GTK_OBJECT(show),"xv_enabled",FALSE,NULL);
window1 = create_window1 (gst_util_get_widget_arg(GTK_OBJECT(show),"widget"));
gtk_widget_show (window1);
+ gtk_signal_connect(GTK_OBJECT(window1),"delete_event",
+ GTK_SIGNAL_FUNC(delete_event),NULL);
+ gtk_signal_connect(GTK_OBJECT(window1),"destroy",
+ GTK_SIGNAL_FUNC(destroy),pipeline);
+
gtk_signal_connect(GTK_OBJECT(show),"frame_displayed",
GTK_SIGNAL_FUNC(frame_displayed),NULL);
gst_bin_add(GST_BIN(video_render_thread),GST_ELEMENT(show));
-#define BUFFER 15
+#define BUFFER 20
#define VIDEO_DECODER "mpeg2play"
#ifdef HAVE_CONFIG_H
extern gboolean _gst_plugin_spew;
-extern GstElement *show, *audio_play;
+extern GstElement *video_render_queue, *audio_render_queue;
void mpeg2_new_pad_created(GstElement *parse,GstPad *pad,GstElement *pipeline)
{
// connect to audio pad
//if (0) {
- if (strncmp(gst_pad_get_name(pad), "private_stream_1.0", 18) == 0 && audio_play) {
+ if (strncmp(gst_pad_get_name(pad), "private_stream_1.0", 18) == 0) {
gst_plugin_load("ac3parse");
gst_plugin_load("ac3dec");
// construct internal pipeline elements
g_return_if_fail(audio_thread != NULL);
gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(parse_audio));
gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(decode));
- gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(audio_play));
// set up pad connections
gst_element_add_ghost_pad(GST_ELEMENT(audio_thread),
gst_pad_connect(gst_element_get_pad(parse_audio,"src"),
gst_element_get_pad(decode,"sink"));
gst_pad_connect(gst_element_get_pad(decode,"src"),
- gst_element_get_pad(audio_play,"sink"));
+ gst_element_get_pad(audio_render_queue,"sink"));
// construct queue and connect everything in the main pipelie
audio_queue = gst_elementfactory_make("queue","audio_queue");
gtk_object_set(GTK_OBJECT(audio_thread),"create_thread",TRUE,NULL);
g_print("setting to RUNNING state\n");
gst_element_set_state(GST_ELEMENT(audio_thread),GST_STATE_RUNNING);
- g_print("setting to PLAYING state\n");
- gst_element_set_state(GST_ELEMENT(audio_thread),GST_STATE_PLAYING);
}
// connect to audio pad
//if (0) {
- if (strncmp(gst_pad_get_name(pad), "audio_", 6) == 0 && audio_play) {
+ if (strncmp(gst_pad_get_name(pad), "audio_", 6) == 0) {
gst_plugin_load("mp3parse");
gst_plugin_load("mpg123");
// construct internal pipeline elements
g_return_if_fail(audio_thread != NULL);
gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(parse_audio));
gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(decode));
- gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(audio_play));
// set up pad connections
gst_element_add_ghost_pad(GST_ELEMENT(audio_thread),
gst_pad_connect(gst_element_get_pad(parse_audio,"src"),
gst_element_get_pad(decode,"sink"));
gst_pad_connect(gst_element_get_pad(decode,"src"),
- gst_element_get_pad(audio_play,"sink"));
+ gst_element_get_pad(audio_render_queue,"sink"));
// construct queue and connect everything in the main pipelie
audio_queue = gst_elementfactory_make("queue","audio_queue");
gtk_object_set(GTK_OBJECT(audio_thread),"create_thread",TRUE,NULL);
g_print("setting to RUNNING state\n");
gst_element_set_state(GST_ELEMENT(audio_thread),GST_STATE_RUNNING);
- g_print("setting to PLAYING state\n");
- gst_element_set_state(GST_ELEMENT(audio_thread),GST_STATE_PLAYING);
} else if (strncmp(gst_pad_get_name(pad), "video_", 6) == 0) {
//} else if (0) {
- mpeg2_setup_video_thread(pad, show, pipeline);
+ mpeg2_setup_video_thread(pad, video_render_queue, pipeline);
}
}
g_return_if_fail(video_thread != NULL);
gst_bin_add(GST_BIN(video_thread),GST_ELEMENT(parse_video));
gst_bin_add(GST_BIN(video_thread),GST_ELEMENT(decode_video));
- gst_bin_add(GST_BIN(video_thread),GST_ELEMENT(show));
// set up pad connections
gst_element_add_ghost_pad(GST_ELEMENT(video_thread),
gst_pad_connect(gst_element_get_pad(parse_video,"src"),
gst_element_get_pad(decode_video,"sink"));
gst_pad_connect(gst_element_get_pad(decode_video,"src"),
- gst_element_get_pad(show,"sink"));
+ gst_element_get_pad(video_render_queue,"sink"));
// construct queue and connect everything in the main pipeline
video_queue = gst_elementfactory_make("queue","video_queue");
gtk_object_set(GTK_OBJECT(video_thread),"create_thread",TRUE,NULL);
g_print("setting to RUNNING state\n");
gst_element_set_state(GST_ELEMENT(video_thread),GST_STATE_RUNNING);
- g_print("setting to PLAYING state\n");
- gst_element_set_state(GST_ELEMENT(video_thread),GST_STATE_PLAYING);
g_print("\n");
}