emotion: first try with xv.
authorcedric <cedric>
Mon, 3 Oct 2011 03:30:39 +0000 (03:30 +0000)
committercedric <cedric@7cbeb6ba-43b4-40fd-8cce-4c39aea84d33>
Mon, 3 Oct 2011 03:30:39 +0000 (03:30 +0000)
git-svn-id: http://svn.enlightenment.org/svn/e/trunk/emotion@63759 7cbeb6ba-43b4-40fd-8cce-4c39aea84d33

configure.ac
src/bin/emotion_test_main.c
src/lib/emotion_private.h
src/lib/emotion_smart.c
src/modules/generic/emotion_generic.c
src/modules/gstreamer/emotion_alloc.c
src/modules/gstreamer/emotion_gstreamer.c
src/modules/gstreamer/emotion_gstreamer.h
src/modules/gstreamer/emotion_sink.c
src/modules/xine/emotion_xine.c

index 08e1fa4..bee5f81 100644 (file)
@@ -114,7 +114,7 @@ if test "x${have_emotion_test}" = "xyes" ; then
    PKG_CHECK_MODULES(EMOTION_BIN, [eina >= 1.0.0 evas >= 1.0.0 ecore >= 1.0.0 ecore-evas >= 1.0.0 edje >= 1.0.0])
 fi
 
-PKG_CHECK_MODULES(ECORE_X, [ecore-x >= 1.0.0], [have_ecore_x="yes"], [have_ecore_x="no"])
+PKG_CHECK_MODULES(ECORE_X, [ecore-x >= 1.0.0 ecore-evas >= 1.0.0], [have_ecore_x="yes"], [have_ecore_x="no"])
 
 PKG_CHECK_MODULES(ECORE_FB, [ecore-fb >= 1.0.0], [have_ecore_fb="yes"], [have_ecore_fb="no"])
 
index e64295f..966015e 100644 (file)
@@ -723,6 +723,8 @@ main(int argc, char **argv)
    if (!ecore_evas)
      goto shutdown_edje;
 
+   ecore_evas_alpha_set(ecore_evas, EINA_TRUE);
+
    ecore_evas_callback_delete_request_set(ecore_evas, main_delete_request);
    ecore_evas_callback_resize_set(ecore_evas, main_resize);
    ecore_evas_title_set(ecore_evas, "Evas Media Test Program");
index 0961fed..1d43398 100644 (file)
@@ -110,6 +110,8 @@ struct _Emotion_Video_Module
    double         (*speed_get) (void *ef);
    int            (*eject) (void *ef);
    const char *   (*meta_get) (void *ef, int meta);
+   void           (*priority_set) (void *ef, Eina_Bool priority);
+   Eina_Bool      (*priority_get) (void *ef);
 
    Eina_Emotion_Plugins *plugin;
 };
index 3adc02b..d3f32c9 100644 (file)
@@ -1269,6 +1269,32 @@ emotion_object_vis_supported(const Evas_Object *obj, Emotion_Vis visualization)
    return sd->module->vis_supported(sd->video, visualization);
 }
 
+EAPI void
+emotion_object_priority_set(Evas_Object *obj, Eina_Bool priority)
+{
+   Smart_Data *sd;
+
+   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
+   fprintf(stderr, "priority set %p\n", sd->module);
+   if (!sd->module) return ;
+   if (!sd->video) return ;
+   if (!sd->module->priority_set) return ;
+   fprintf(stderr, "calling\n");
+   sd->module->priority_set(sd->video, priority);
+}
+
+EAPI Eina_Bool
+emotion_object_priority_get(const Evas_Object *obj)
+{
+   Smart_Data *sd;
+
+   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, 0);
+   if (!sd->module) return EINA_FALSE;
+   if (!sd->video) return EINA_FALSE;
+   if (!sd->module->priority_get) return EINA_FALSE;
+   return sd->module->priority_get(sd->video);
+}
+
 #ifdef HAVE_EIO
 static void
 _eio_load_xattr_cleanup(Smart_Data *sd, Eio_File *handler)
index 2b1be32..6f497d4 100644 (file)
@@ -1714,6 +1714,8 @@ static Emotion_Video_Module em_module =
    em_speed_get, /* speed_get */
    em_eject, /* eject */
    em_meta_get, /* meta_get */
+   NULL, /* priority_set */
+   NULL, /* priority_get */
    NULL /* handle */
 };
 
index 1f55904..aad5430 100644 (file)
@@ -22,6 +22,7 @@ emotion_gstreamer_buffer_alloc(EvasVideoSinkPrivate *sink,
    send->sink = sink;
    send->frame = gst_buffer_ref(buffer);
    send->preroll = preroll;
+   send->force = EINA_FALSE;
    sink->ev->out++;
    send->ev = sink->ev;
 
index aeff464..d6b0b07 100644 (file)
@@ -164,6 +164,10 @@ static int            em_eject                    (void             *video);
 static const char    *em_meta_get                 (void             *video,
                                                    int               meta);
 
+static void           em_priority_set             (void             *video,
+                                                  Eina_Bool         pri);
+static Eina_Bool      em_priority_get             (void             *video);
+
 static GstBusSyncReply _eos_sync_fct(GstBus *bus,
                                     GstMessage *message,
                                     gpointer data);
@@ -228,6 +232,8 @@ static Emotion_Video_Module em_module =
    em_speed_get, /* speed_get */
    em_eject, /* eject */
    em_meta_get, /* meta_get */
+   em_priority_set, /* priority_set */
+   em_priority_get, /* priority_get */
    NULL /* handle */
 };
 
@@ -345,32 +351,16 @@ failure:
    return 0;
 }
 
-int
-em_shutdown(void *video)
+static void
+em_cleanup(Emotion_Gstreamer_Video *ev)
 {
-   Emotion_Gstreamer_Video *ev;
    Emotion_Audio_Stream *astream;
    Emotion_Video_Stream *vstream;
 
-   ev = (Emotion_Gstreamer_Video *)video;
-   if (!ev)
-     return 0;
-
-   if (ev->threads)
-     {
-        Ecore_Thread *t;
-
-        EINA_LIST_FREE(ev->threads, t)
-          ecore_thread_cancel(t);
-
-        ev->delete_me = EINA_TRUE;
-        return 1;
-     }
-
-   if (ev->in != ev->out)
+   if (ev->send)
      {
-        ev->delete_me = EINA_TRUE;
-        return 1;
+        emotion_gstreamer_buffer_free(ev->send);
+        ev->send = NULL;
      }
 
    if (ev->eos_bus)
@@ -379,24 +369,41 @@ em_shutdown(void *video)
         ev->eos_bus = NULL;
      }
 
+   if (ev->metadata)
+     {
+        _free_metadata(ev->metadata);
+        ev->metadata = NULL;
+     }
+
    if (ev->last_buffer)
      {
         gst_buffer_unref(ev->last_buffer);
         ev->last_buffer = NULL;
      }
 
+   if (!ev->stream)
+     {
+        evas_object_image_video_surface_set(emotion_object_image_get(ev->obj), NULL);
+        ev->stream = EINA_TRUE;
+     }
+
    if (ev->pipeline)
      {
        gstreamer_video_sink_new(ev, ev->obj, NULL);
 
-       g_object_set(G_OBJECT(ev->sink), "ev", NULL, NULL);
-       g_object_set(G_OBJECT(ev->sink), "evas-object", NULL, NULL);
+       g_object_set(G_OBJECT(ev->esink), "ev", NULL, NULL);
+       g_object_set(G_OBJECT(ev->esink), "evas-object", NULL, NULL);
        gst_element_set_state(ev->pipeline, GST_STATE_NULL);
        gst_object_unref(ev->pipeline);
 
        ev->pipeline = NULL;
        ev->sink = NULL;
 
+       if (ev->teepad) gst_object_unref(ev->teepad);
+       ev->teepad = NULL;
+       if (ev->xvpad) gst_object_unref(ev->xvpad);
+       ev->xvpad = NULL;
+
        if (ev->win) ecore_x_window_free(ev->win);
        ev->win = 0;
      }
@@ -405,6 +412,35 @@ em_shutdown(void *video)
      free(astream);
    EINA_LIST_FREE(ev->video_streams, vstream)
      free(vstream);
+}
+
+int
+em_shutdown(void *video)
+{
+   Emotion_Gstreamer_Video *ev;
+
+   ev = (Emotion_Gstreamer_Video *)video;
+   if (!ev)
+     return 0;
+
+   if (ev->threads)
+     {
+        Ecore_Thread *t;
+
+        EINA_LIST_FREE(ev->threads, t)
+          ecore_thread_cancel(t);
+
+        ev->delete_me = EINA_TRUE;
+        return EINA_FALSE;
+     }
+
+   if (ev->in != ev->out)
+     {
+        ev->delete_me = EINA_TRUE;
+        return EINA_FALSE;
+     }
+
+   em_cleanup(ev);
 
    free(ev);
 
@@ -479,19 +515,11 @@ static void
 em_file_close(void *video)
 {
    Emotion_Gstreamer_Video *ev;
-   Emotion_Audio_Stream *astream;
-   Emotion_Video_Stream *vstream;
 
    ev = (Emotion_Gstreamer_Video *)video;
    if (!ev)
      return;
 
-   if (ev->eos_bus)
-     {
-        gst_object_unref(GST_OBJECT(ev->eos_bus));
-        ev->eos_bus = NULL;
-     }
-
    if (ev->threads)
      {
         Ecore_Thread *t;
@@ -500,32 +528,10 @@ em_file_close(void *video)
           ecore_thread_cancel(t);
      }
 
-   if (ev->pipeline)
-     {
-        gstreamer_video_sink_new(ev, ev->obj, NULL);
-
-        g_object_set(G_OBJECT(ev->sink), "ev", NULL, NULL);
-        g_object_set(G_OBJECT(ev->sink), "evas-object", NULL, NULL);
-        gst_element_set_state(ev->pipeline, GST_STATE_NULL);
-        gst_object_unref(ev->pipeline);
-        ev->pipeline = NULL;
-        ev->sink = NULL;
-     }
+   em_cleanup(ev);
 
-   /* we clear the stream lists */
-   EINA_LIST_FREE(ev->audio_streams, astream)
-     free(astream);
-   EINA_LIST_FREE(ev->video_streams, vstream)
-     free(vstream);
    ev->pipeline_parsed = EINA_FALSE;
    ev->play_started = 0;
-
-   /* shutdown eos */
-   if (ev->metadata)
-     {
-        _free_metadata(ev->metadata);
-        ev->metadata = NULL;
-     }
 }
 
 static void
@@ -1214,6 +1220,24 @@ em_meta_get(void *video, int meta)
    return str;
 }
 
+static void
+em_priority_set(void *video, Eina_Bool pri)
+{
+   Emotion_Gstreamer_Video *ev;
+
+   ev = video;
+   ev->priority = pri;
+}
+
+static Eina_Bool
+em_priority_get(void *video)
+{
+   Emotion_Gstreamer_Video *ev;
+
+   ev = video;
+   return ev->stream;
+}
+
 static Eina_Bool
 module_open(Evas_Object           *obj,
             const Emotion_Video_Module **module,
@@ -1429,6 +1453,30 @@ _free_metadata(Emotion_Gstreamer_Metadata *m)
   free(m);
 }
 
+static Eina_Bool
+_em_restart_stream(void *data)
+{
+   Emotion_Gstreamer_Video *ev;
+
+   ev = data;
+
+   ev->pipeline = gstreamer_video_sink_new(ev, ev->obj, ev->uri);
+
+   if (ev->pipeline)
+     {
+        ev->eos_bus = gst_pipeline_get_bus(GST_PIPELINE(ev->pipeline));
+        if (!ev->eos_bus)
+          {
+             ERR("could not get the bus");
+             return EINA_FALSE;
+          }
+
+        gst_bus_set_sync_handler(ev->eos_bus, _eos_sync_fct, ev);
+     }
+
+   return ECORE_CALLBACK_CANCEL;
+}
+
 static void
 _eos_main_fct(void *data)
 {
@@ -1475,6 +1523,14 @@ _eos_main_fct(void *data)
          break;
       case GST_MESSAGE_STREAM_STATUS:
          break;
+      case GST_MESSAGE_ERROR:
+         ERR("Switching back to composited rendering.");
+         em_cleanup(ev);
+
+         ev->priority = EINA_FALSE;
+
+         ecore_idler_add(_em_restart_stream, ev);
+         break;
       default:
          ERR("bus say: %s [%i - %s]",
              GST_MESSAGE_SRC_NAME(msg),
@@ -1498,6 +1554,10 @@ _eos_sync_fct(GstBus *bus __UNUSED__, GstMessage *msg, gpointer data)
       case GST_MESSAGE_TAG:
       case GST_MESSAGE_ASYNC_DONE:
       case GST_MESSAGE_STREAM_STATUS:
+         INF("bus say: %s [%i - %s]",
+             GST_MESSAGE_SRC_NAME(msg),
+             GST_MESSAGE_TYPE(msg),
+            GST_MESSAGE_TYPE_NAME(msg));
          send = emotion_gstreamer_message_alloc(ev, msg);
 
          if (send) ecore_main_loop_thread_safe_call_async(_eos_main_fct, send);
@@ -1508,7 +1568,7 @@ _eos_sync_fct(GstBus *bus __UNUSED__, GstMessage *msg, gpointer data)
         {
            GstState old_state, new_state;
 
-           gst_message_parse_state_changed (msg, &old_state, &new_state, NULL);
+           gst_message_parse_state_changed(msg, &old_state, &new_state, NULL);
            INF("Element %s changed state from %s to %s.",
                GST_OBJECT_NAME(msg->src),
                gst_element_state_get_name(old_state),
@@ -1521,10 +1581,17 @@ _eos_sync_fct(GstBus *bus __UNUSED__, GstMessage *msg, gpointer data)
            gchar *debug;
 
           gst_message_parse_error(msg, &error, &debug);
-          ERR("WARNING from element %s: %s", GST_OBJECT_NAME(msg->src), error->message);
+          ERR("ERROR from element %s: %s", GST_OBJECT_NAME(msg->src), error->message);
           ERR("Debugging info: %s", (debug) ? debug : "none");
           g_error_free(error);
           g_free(debug);
+
+           if (strncmp(GST_OBJECT_NAME(msg->src), "xvimagesink", 11) == 0)
+             {
+                send = emotion_gstreamer_message_alloc(ev, msg);
+
+                if (send) ecore_main_loop_thread_safe_call_async(_eos_main_fct, send);
+             }
           break;
        }
       case GST_MESSAGE_WARNING:
index fbcde1e..dd676c0 100644 (file)
 
 #ifdef HAVE_ECORE_X
 # include <Ecore_X.h>
+# include <Ecore_Evas.h>
 # ifdef HAVE_XOVERLAY_H
 #  include <gst/interfaces/xoverlay.h>
 # endif
 #endif
 
-
 #define HTTP_STREAM 0
 #define RTSP_STREAM 1
 #include <glib.h>
@@ -77,6 +77,10 @@ struct _Emotion_Gstreamer_Video
    /* Gstreamer elements */
    GstElement       *pipeline;
    GstElement       *sink;
+   GstElement       *esink;
+   GstElement       *tee;
+   GstPad           *teepad;
+   GstPad           *xvpad;
    Eina_List        *threads;
 
    /* eos */
@@ -109,6 +113,12 @@ struct _Emotion_Gstreamer_Video
    Ecore_X_Window    win;
 #endif
 
+   const char       *uri;
+
+   Emotion_Gstreamer_Buffer *send;
+
+   EvasVideoSinkPrivate *sink_data;
+
    Emotion_Vis       vis;
 
    int               in;
@@ -128,6 +138,9 @@ struct _Emotion_Gstreamer_Video
    Eina_Bool         delete_me    : 1;
    Eina_Bool         samsung      : 1;
    Eina_Bool         kill_buffer  : 1;
+   Eina_Bool         linked       : 1;
+   Eina_Bool         stream       : 1;
+   Eina_Bool         priority     : 1;
 };
 
 struct _EvasVideoSink {
@@ -178,6 +191,7 @@ struct _Emotion_Gstreamer_Buffer
    GstBuffer *frame;
 
    Eina_Bool preroll : 1;
+   Eina_Bool force : 1;
 };
 
 struct _Emotion_Gstreamer_Message
index 3634c00..2f4ef5f 100644 (file)
@@ -674,6 +674,14 @@ evas_video_sink_samsung_main_render(void *data)
    if (!priv || !priv->o || priv->unlocked)
      goto exit_point;
 
+   if (!send->ev->stream && !send->force)
+     {
+        if (send->ev->send)
+          emotion_gstreamer_buffer_free(send->ev->send);
+        send->ev->send = send;
+        goto exit_stream;
+     }
+
    _emotion_gstreamer_video_pipeline_parse(send->ev, EINA_TRUE);
 
    /* Getting stride to compute the right size and then fill the object properly */
@@ -745,6 +753,7 @@ evas_video_sink_samsung_main_render(void *data)
  exit_point:
    emotion_gstreamer_buffer_free(send);
 
+ exit_stream:
    if (preroll || !priv->o) return ;
 
    if (!priv->unlocked)
@@ -776,6 +785,14 @@ evas_video_sink_main_render(void *data)
    if (!priv || !priv->o || priv->unlocked)
      goto exit_point;
 
+   if (!ev->stream && !send->force)
+     {
+        if (ev->send)
+          emotion_gstreamer_buffer_free(ev->send);
+        ev->send = send;
+        goto exit_stream;
+     }
+
    _emotion_gstreamer_video_pipeline_parse(ev, EINA_TRUE);
 
    INF("sink main render [%i, %i] (source height: %i)", priv->width, priv->height, priv->source_height);
@@ -820,6 +837,7 @@ evas_video_sink_main_render(void *data)
  exit_point:
    emotion_gstreamer_buffer_free(send);
 
+ exit_stream:
    if (preroll || !priv->o) return ;
 
    if (!priv->unlocked)
@@ -941,7 +959,7 @@ _emotion_gstreamer_cancel(void *data, Ecore_Thread *thread)
 
    if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
 
-   if (ev->in == ev->out && ev->threads == NULL && ev->delete_me)
+   if (ev->in == ev->out && ev->delete_me)
      em_shutdown(ev);
 }
 
@@ -960,32 +978,74 @@ _emotion_gstreamer_end(void *data, Ecore_Thread *thread)
 
    if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
 
-   if (ev->in == ev->out && ev->threads == NULL && ev->delete_me)
+   if (ev->in == ev->out && ev->delete_me)
      em_shutdown(ev);
    else
      _emotion_gstreamer_video_pipeline_parse(data, EINA_TRUE);
 }
 
 static void
-_on_post_clear(void *data, Evas *e __UNUSED__, void *event_info __UNUSED__)
+_on_resize_fill(void *data, Evas *e __UNUSED__, Evas_Object *obj, void *event_info __UNUSED__)
 {
    Emotion_Gstreamer_Video *ev = data;
 
-   if (!ev->kill_buffer) return ;
+   if (ev->samsung)
+     evas_object_image_fill_set(obj, 0, 0, ev->fill.width, ev->fill.height);
+}
 
-#if 0
-   if (ev->last_buffer) gst_buffer_unref(ev->last_buffer);
-   ev->last_buffer = NULL;
-#endif
+static void
+_video_resize(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__,
+              Evas_Coord w, Evas_Coord h)
+{
+   Emotion_Gstreamer_Video *ev = data;
+
+   ecore_x_window_resize(ev->win, w, h);
+   fprintf(stderr, "resize: %i, %i\n", w, h);
 }
 
 static void
-_on_resize_fill(void *data, Evas *e __UNUSED__, Evas_Object *obj, void *event_info __UNUSED__)
+_video_move(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__,
+            Evas_Coord x, Evas_Coord y)
 {
    Emotion_Gstreamer_Video *ev = data;
 
-   if (ev->samsung)
-     evas_object_image_fill_set(obj, 0, 0, ev->fill.width, ev->fill.height);
+   ecore_x_window_move(ev->win, x, y);
+}
+
+static void
+_video_show(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__)
+{
+   Emotion_Gstreamer_Video *ev = data;
+
+   fprintf(stderr, "show xwin %i\n", ev->win);
+
+   ecore_x_window_show(ev->win);
+   gst_pad_link(ev->teepad, ev->xvpad);
+   ev->linked = EINA_TRUE;
+}
+
+static void
+_video_hide(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__)
+{
+   Emotion_Gstreamer_Video *ev = data;
+
+   fprintf(stderr, "hide xwin: %i\n", ev->win);
+
+   ecore_x_window_hide(ev->win);
+   gst_pad_unlink(ev->teepad, ev->xvpad);
+   ev->linked = EINA_FALSE;
+}
+
+static void
+_video_update_pixels(void *data, Evas_Object *obj, const Evas_Video_Surface *surface __UNUSED__)
+{
+   Emotion_Gstreamer_Video *ev = data;
+
+   if (!ev->send) return ;
+
+   ev->send->force = EINA_TRUE;
+   evas_video_sink_main_render(ev->send);
+   ev->send = NULL;
 }
 
 GstElement *
@@ -994,8 +1054,14 @@ gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
                         const char *uri)
 {
    GstElement *playbin;
-   GstElement *sink = NULL;
+   GstElement *bin = NULL;
+   GstElement *esink = NULL;
+   GstElement *xvsink = NULL;
+   GstElement *tee = NULL;
+   GstElement *queue = NULL;
    Evas_Object *obj;
+   GstPad *pad;
+   GstPad *teepad;
    int flags;
 #if defined HAVE_ECORE_X && defined HAVE_XOVERLAY_H
    const char *engine;
@@ -1010,7 +1076,6 @@ gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
      }
 
    evas_object_event_callback_del_full(obj, EVAS_CALLBACK_RESIZE, _on_resize_fill, ev);
-   evas_event_callback_del_full(evas_object_evas_get(obj), EVAS_CALLBACK_RENDER_FLUSH_POST, _on_post_clear, ev);
 
    if (!uri)
      return NULL;
@@ -1022,26 +1087,47 @@ gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
         return NULL;
      }
 
+   bin = gst_bin_new(NULL);
+   if (!bin)
+     {
+       ERR("Unable to create GstBin !");
+       goto unref_pipeline;
+     }
+
+   tee = gst_element_factory_make("tee", NULL);
+   if (!tee)
+     {
+       ERR("Unable to create 'tee' GstElement.");
+       goto unref_pipeline;
+     }
+
+   fprintf(stderr, "priority: %i\n", ev->priority);
+
 #if defined HAVE_ECORE_X && defined HAVE_XOVERLAY_H
    engines = evas_render_method_list();
 
    engine = eina_list_nth(engines, evas_output_method_get(evas_object_evas_get(obj)) - 1);
 
-   if (engine && strstr(engine, "_x11") != NULL)
+   if (ev->priority && engine && strstr(engine, "_x11") != NULL)
      {
-#if 0
+        Ecore_Evas *ee;
         Evas_Coord x, y, w, h;
        Ecore_X_Window win;
 
        evas_object_geometry_get(obj, &x, &y, &w, &h);
 
-       win = ecore_x_window_new(0, x, y, w, h);
+        ee = ecore_evas_ecore_evas_get(evas_object_evas_get(obj));
+
+        if (w < 1) w = 1;
+        if (h < 1) h = 1;
+
+       win = ecore_x_window_new((Ecore_X_Window) ecore_evas_window_get(ee), x, y, w, h);
        if (win)
          {
-            sink = gst_element_factory_make("xvimagesink", NULL);
-            if (sink)
+             xvsink = gst_element_factory_make("xvimagesink", NULL);
+            if (xvsink)
               {
-                 gst_x_overlay_set_window_handle(GST_X_OVERLAY(sink), win);
+                 gst_x_overlay_set_window_handle(GST_X_OVERLAY(xvsink), win);
                  ev->win = win;
               }
             else
@@ -1049,27 +1135,68 @@ gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
                  ecore_x_window_free(win);
               }
          }
-#endif
      }
    evas_render_method_list_free(engines);
 #else
 # warning "no ecore_x or xoverlay"
 #endif
-   if (!sink)
+
+   esink = gst_element_factory_make("emotion-sink", "sink");
+   if (!esink)
      {
-        sink = gst_element_factory_make("emotion-sink", "sink");
-       if (!sink)
-         {
-            ERR("Unable to create 'emotion-sink' GstElement.");
-            goto unref_pipeline;
-         }
+        ERR("Unable to create 'emotion-sink' GstElement.");
+        goto unref_pipeline;
+     }
+
+   g_object_set(G_OBJECT(esink), "evas-object", obj, NULL);
+   g_object_set(G_OBJECT(esink), "ev", ev, NULL);
 
-       g_object_set(G_OBJECT(sink), "evas-object", obj, NULL);
-       g_object_set(G_OBJECT(sink), "ev", ev, NULL);
+   evas_object_image_pixels_get_callback_set(obj, NULL, NULL);
+
+   /* We need queue to force each video sink to be in its own thread */
+   queue = gst_element_factory_make("queue", NULL);
+   if (!queue)
+     {
+        ERR("Unable to create 'queue' GstElement.");
+        goto unref_pipeline;
+     }
+
+   gst_bin_add_many(GST_BIN(bin), tee, queue, esink, xvsink, NULL);
+   gst_element_link_many(queue, esink, NULL);
+
+   /* link both sink to GstTee */
+   pad = gst_element_get_pad(queue, "sink");
+   teepad = gst_element_get_request_pad(tee, "src%d");
+   gst_pad_link(teepad, pad);
+   gst_object_unref(pad);
+   gst_object_unref(teepad);
+
+   if (xvsink)
+     {
+        queue = gst_element_factory_make("queue", NULL);
+        if (queue)
+          {
+           gst_bin_add_many(GST_BIN(bin), queue, NULL);
+           gst_element_link_many(queue, xvsink, NULL);
 
-       evas_object_image_pixels_get_callback_set(obj, NULL, NULL);
+           pad = gst_element_get_pad(queue, "sink");
+           teepad = gst_element_get_request_pad(tee, "src%d");
+           gst_pad_link(teepad, pad);
+
+           ev->teepad = teepad;
+           ev->xvpad = pad;
+         }
+       else
+         {
+           gst_object_unref(xvsink);
+           xvsink = NULL;
+         }
      }
 
+   teepad = gst_element_get_pad(tee, "sink");
+   gst_element_add_pad(bin, gst_ghost_pad_new("sink", teepad));
+   gst_object_unref(teepad);
+
 #define GST_PLAY_FLAG_NATIVE_VIDEO  (1 << 6)
 #define GST_PLAY_FLAG_DOWNLOAD      (1 << 7)
 #define GST_PLAY_FLAG_AUDIO         (1 << 1)
@@ -1077,15 +1204,37 @@ gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
 
    g_object_get(G_OBJECT(playbin), "flags", &flags, NULL);
    g_object_set(G_OBJECT(playbin), "flags", flags | GST_PLAY_FLAG_NATIVE_VIDEO | GST_PLAY_FLAG_DOWNLOAD | GST_PLAY_FLAG_NATIVE_AUDIO, NULL);
-   g_object_set(G_OBJECT(playbin), "video-sink", sink, NULL);
+   g_object_set(G_OBJECT(playbin), "video-sink", bin, NULL);
    g_object_set(G_OBJECT(playbin), "uri", uri, NULL);
 
    evas_object_image_pixels_get_callback_set(obj, NULL, NULL);
    evas_object_event_callback_add(obj, EVAS_CALLBACK_RESIZE, _on_resize_fill, ev);
-   evas_event_callback_add(evas_object_evas_get(obj), EVAS_CALLBACK_RENDER_FLUSH_POST, _on_post_clear, ev);
 
+   ev->stream = EINA_TRUE;
+
+   if (xvsink)
+     {
+        Evas_Video_Surface video;
+
+        video.version = EVAS_VIDEO_SURFACE_VERSION;
+        video.data = ev;
+        video.parent = NULL;
+        video.move = _video_move;
+        video.resize = _video_resize;
+        video.show = _video_show;
+        video.hide = _video_hide;
+        video.update_pixels = _video_update_pixels;
+
+        evas_object_image_video_surface_set(obj, &video);
+        ev->stream = EINA_FALSE;
+     }
+
+   eina_stringshare_replace(&ev->uri, uri);
+   ev->linked = EINA_TRUE;
    ev->pipeline = playbin;
-   ev->sink = sink;
+   ev->sink = bin;
+   ev->esink = esink;
+   ev->tee = tee;
    ev->threads = eina_list_append(ev->threads,
                                   ecore_thread_run(_emotion_gstreamer_pause,
                                                    _emotion_gstreamer_end,
@@ -1099,6 +1248,10 @@ gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
    return playbin;
 
  unref_pipeline:
+   gst_object_unref(xvsink);
+   gst_object_unref(esink);
+   gst_object_unref(tee);
+   gst_object_unref(bin);
    gst_object_unref(playbin);
    return NULL;
 }
index 55ac337..cf2da59 100644 (file)
@@ -1556,6 +1556,8 @@ static Emotion_Video_Module em_module =
      em_speed_get, /* speed_get */
      em_eject, /* eject */
      em_meta_get, /* meta_get */
+     NULL, /* priority_set */
+     NULL, /* priority_get */
      NULL /* handle */
 };