}
ev->play_started = 0;
+ ev->pipeline_parsed = 0;
uri = sbuf ? eina_strbuf_string_get(sbuf) : file;
DBG("setting file to '%s'", uri);
ev = (Emotion_Gstreamer_Video *)video;
if (!ev->pipeline) return ;
- gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
+ if (ev->pipeline_parsed)
+ gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
ev->play = 1;
ev->play_started = 1;
}
if (!ev->pipeline) return ;
- gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
+ if (ev->pipeline_parsed)
+ gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
ev->play = 0;
}
case GST_MESSAGE_ASYNC_DONE:
if (!ev->delete_me) _emotion_seek_done(ev->obj);
break;
+ case GST_MESSAGE_STREAM_STATUS:
+ break;
default:
ERR("bus say: %s [%i]",
GST_MESSAGE_SRC_NAME(msg),
case GST_MESSAGE_EOS:
case GST_MESSAGE_TAG:
case GST_MESSAGE_ASYNC_DONE:
+ case GST_MESSAGE_STREAM_STATUS:
send = emotion_gstreamer_message_alloc(ev, msg);
if (send) ecore_main_loop_thread_safe_call_async(_eos_main_fct, send);
_free_metadata(ev->metadata);
ev->metadata = calloc(1, sizeof(Emotion_Gstreamer_Metadata));
+ ev->pipeline_parsed = EINA_TRUE;
+
em_audio_channel_volume_set(ev, ev->volume);
em_audio_channel_mute_set(ev, ev->audio_mute);
}
_emotion_open_done(ev->obj);
- ev->pipeline_parsed = EINA_TRUE;
return EINA_TRUE;
}
static void
_emotion_gstreamer_end(void *data, Ecore_Thread *thread)
{
- _emotion_gstreamer_video_pipeline_parse(data, EINA_TRUE);
- _emotion_gstreamer_cancel(data, thread);
+ Emotion_Gstreamer_Video *ev = data;
+
+ ev->threads = eina_list_remove(ev->threads, thread);
+
+ if (ev->play)
+ {
+ gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
+ ev->play_started = 1;
+ }
+
+ if (ev->in == ev->out && ev->threads == NULL && ev->delete_me)
+ em_shutdown(ev);
+ else
+ _emotion_gstreamer_video_pipeline_parse(data, EINA_TRUE);
}
GstElement *