From: cedric Date: Thu, 5 Jul 2012 12:40:56 +0000 (+0000) Subject: emotion: let work around embedded device limitation. X-Git-Tag: submit/2.0alpha-wayland/20121127.222018~33 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=f33980a4c3faa42fd7ad44b1e0a46725afca98e2;p=profile%2Fivi%2Femotion.git emotion: let work around embedded device limitation. NOTE: I am going to accept more of this work around. So if you preferred device doesn't come with a proper nice implementation of gstreamer, let's start working around in emotion :'( git-svn-id: http://svn.enlightenment.org/svn/e/trunk/emotion@73340 7cbeb6ba-43b4-40fd-8cce-4c39aea84d33 --- diff --git a/AUTHORS b/AUTHORS index c683782..c2579a8 100644 --- a/AUTHORS +++ b/AUTHORS @@ -7,3 +7,4 @@ Rafael Antognolli Jérôme Pinot Pierre Le Magourou Hugo Camboulive +Sohyun Kim diff --git a/ChangeLog b/ChangeLog index 4169f7d..9329acf 100644 --- a/ChangeLog +++ b/ChangeLog @@ -29,3 +29,6 @@ 2012-07-03 Cedric Bail * Track pending object for proper shutdown. +2012-06-05 Sohyun Kim + + * Add fimcconvert element to resize and convert color using fimc on device diff --git a/NEWS b/NEWS index 5fffba7..fa32d20 100644 --- a/NEWS +++ b/NEWS @@ -8,6 +8,7 @@ Additions: - Implement SPU switch for generic/vlc. - Sync rendering with Ecore_Animator. - Track pending object for proper shutdown. + - Start handling embedded hardware customization. Fixes: - build out of tree. diff --git a/src/modules/gstreamer/emotion_gstreamer.c b/src/modules/gstreamer/emotion_gstreamer.c index 38abf57..3383ce9 100644 --- a/src/modules/gstreamer/emotion_gstreamer.c +++ b/src/modules/gstreamer/emotion_gstreamer.c @@ -422,11 +422,16 @@ em_cleanup(Emotion_Gstreamer_Video *ev) ev->pipeline = NULL; ev->sink = NULL; - if (ev->teepad) gst_object_unref(ev->teepad); - ev->teepad = NULL; + if (ev->eteepad) gst_object_unref(ev->eteepad); + ev->eteepad = NULL; + if (ev->xvteepad) gst_object_unref(ev->xvteepad); + ev->xvteepad = NULL; if (ev->xvpad) gst_object_unref(ev->xvpad); ev->xvpad = NULL; + ev->src_width = 0; + ev->src_height = 0; + #ifdef HAVE_ECORE_X fprintf(stderr, "destroying window: %i\n", ev->win); if (ev->win) ecore_x_window_free(ev->win); @@ -1604,6 +1609,58 @@ _em_restart_stream(void *data) return ECORE_CALLBACK_CANCEL; } +static Eina_Bool +_video_size_get(GstElement *elem, int *width, int *height) +{ + GstIterator *itr = NULL; + GstCaps *caps; + GstStructure *str; + gpointer pad; + Eina_Bool ret = EINA_FALSE; + + itr = gst_element_iterate_src_pads(elem); + while(gst_iterator_next(itr, &pad) && !ret) + { + caps = gst_pad_get_caps(GST_PAD(pad)); + str = gst_caps_get_structure(caps, 0); + if (g_strrstr(gst_structure_get_name(str), "video")) + { + if (gst_structure_get_int(str, "width", width) && gst_structure_get_int(str, "height", height)) + ret = EINA_TRUE; + } + gst_caps_unref(caps); + gst_object_unref(pad); + } + gst_iterator_free(itr); + + return ret; +} + +static void +_no_more_pads(GstElement *decodebin, gpointer data) +{ + GstIterator *itr = NULL; + gpointer elem; + Emotion_Gstreamer_Video *ev = data; + + itr = gst_bin_iterate_elements(GST_BIN(decodebin)); + while(gst_iterator_next(itr, &elem)) + { + if(_video_size_get(GST_ELEMENT(elem), &ev->src_width, &ev->src_height)) + { + double ratio; + + ratio = (double)ev->src_width / (double)ev->src_height; + _emotion_frame_resize(ev->obj, ev->src_width, ev->src_height, ratio); + + gst_object_unref(elem); + break; + } + gst_object_unref(elem); + } + gst_iterator_free(itr); +} + static void _eos_main_fct(void *data) { @@ -1650,6 +1707,13 @@ _eos_main_fct(void *data) break; case GST_MESSAGE_STREAM_STATUS: break; + case GST_MESSAGE_STATE_CHANGED: + if (!ev->delete_me) + { + if (!g_signal_handlers_disconnect_by_func(msg->src, _no_more_pads, ev)) + g_signal_connect(msg->src, "no-more-pads", G_CALLBACK(_no_more_pads), ev); + } + break; case GST_MESSAGE_ERROR: em_cleanup(ev); @@ -1704,6 +1768,13 @@ _eos_sync_fct(GstBus *bus __UNUSED__, GstMessage *msg, gpointer data) GST_OBJECT_NAME(msg->src), gst_element_state_get_name(old_state), gst_element_state_get_name(new_state)); + + if (!strncmp(GST_OBJECT_NAME(msg->src), "decodebin", 9) && !strcmp(gst_element_state_get_name(new_state), "READY")) + { + send = emotion_gstreamer_message_alloc(ev, msg); + + if (send) ecore_main_loop_thread_safe_call_async(_eos_main_fct, send); + } break; } case GST_MESSAGE_ERROR: diff --git a/src/modules/gstreamer/emotion_gstreamer.h b/src/modules/gstreamer/emotion_gstreamer.h index 25a7180..9c7455d 100644 --- a/src/modules/gstreamer/emotion_gstreamer.h +++ b/src/modules/gstreamer/emotion_gstreamer.h @@ -55,7 +55,10 @@ struct _Emotion_Gstreamer_Video GstElement *esink; GstElement *xvsink; GstElement *tee; - GstPad *teepad; + GstElement *convert; + + GstPad *eteepad; + GstPad *xvteepad; GstPad *xvpad; Eina_List *threads; @@ -121,6 +124,9 @@ struct _Emotion_Gstreamer_Video Eina_Bool kill_buffer : 1; Eina_Bool stream : 1; Eina_Bool priority : 1; + + int src_width; + int src_height; }; struct _EvasVideoSink { diff --git a/src/modules/gstreamer/emotion_sink.c b/src/modules/gstreamer/emotion_sink.c index 85739bd..6fe26f8 100644 --- a/src/modules/gstreamer/emotion_sink.c +++ b/src/modules/gstreamer/emotion_sink.c @@ -895,7 +895,7 @@ _block_pad_unlink_cb(GstPad *pad, gboolean blocked, gpointer user_data) Emotion_Gstreamer_Video *ev = user_data; GstEvent *gev; - gst_pad_unlink(ev->teepad, ev->xvpad); + gst_pad_unlink(ev->xvteepad, ev->xvpad); gev = gst_event_new_eos(); gst_pad_send_event(ev->xvpad, gev); gst_pad_set_blocked_async(pad, FALSE, _block_pad_unlink_cb, NULL); @@ -909,7 +909,7 @@ _block_pad_link_cb(GstPad *pad, gboolean blocked, gpointer user_data) { Emotion_Gstreamer_Video *ev = user_data; - gst_pad_link(ev->teepad, ev->xvpad); + gst_pad_link(ev->xvteepad, ev->xvpad); if (ev->play) gst_element_set_state(ev->xvsink, GST_STATE_PLAYING); else @@ -928,7 +928,7 @@ _video_show(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *s fprintf(stderr, "show xv\n"); ecore_x_window_show(ev->win); #endif - /* gst_pad_set_blocked_async(ev->teepad, TRUE, _block_pad_link_cb, ev); */ + /* gst_pad_set_blocked_async(ev->xvteepad, TRUE, _block_pad_link_cb, ev); */ } static void @@ -940,7 +940,7 @@ _video_hide(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *s fprintf(stderr, "hide xv\n"); ecore_x_window_hide(ev->win); #endif - /* gst_pad_set_blocked_async(ev->teepad, TRUE, _block_pad_unlink_cb, ev); */ + /* gst_pad_set_blocked_async(ev->xvteepad, TRUE, _block_pad_unlink_cb, ev); */ } static void @@ -957,6 +957,67 @@ _video_update_pixels(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_S evas_video_sink_main_render(send); } +static void +_image_resize(void *data, Evas *e, Evas_Object *obj, void *event_info) +{ + Emotion_Gstreamer_Video *ev = data; + Evas_Coord width, height; + int image_area, src_area; + double ratio; + + evas_object_geometry_get(obj, NULL, NULL, &width, &height); + image_area = width * height; + src_area = ev->src_width * ev->src_height; + ratio = (double)image_area / (double)src_area; + + // when an image is much smaller than original video size, + // add fimcconvert element to the pipeline + if (ratio < 0.8 && !ev->priority && !ev->convert) + { + GstElementFactory *cfactory = NULL; + + cfactory = gst_element_factory_find("fimcconvert"); + if (cfactory) + { + GstElement *convert = NULL; + + convert = gst_element_factory_create(cfactory, NULL); + if (convert) + { + GstElement *queue = NULL; + GstPad *pad, *teepad; + + queue = gst_bin_get_by_name(GST_BIN(ev->sink), "equeue"); + gst_element_unlink(ev->tee, queue); + gst_element_release_request_pad(ev->tee, ev->eteepad); + gst_object_unref(ev->eteepad); + + gst_bin_add(GST_BIN(ev->sink), convert); + gst_element_link_many(ev->tee, convert, queue, NULL); + pad = gst_element_get_pad(convert, "sink"); + teepad = gst_element_get_request_pad(ev->tee, "src%d"); + gst_pad_link(teepad, pad); + gst_object_unref(pad); + + g_object_set(G_OBJECT(convert), "src-width", width, NULL); + g_object_set(G_OBJECT(convert), "src-height", height, NULL); + g_object_set(G_OBJECT(convert), "qos", TRUE, NULL); + gst_element_sync_state_with_parent(convert); + + ev->eteepad = teepad; + ev->convert = convert; + } + } + } + // TODO: when an image is resized(e.g rotation), set size again to fimcconvert + // TODO: fimcconvert has an issue about resetting + //else if (ev->convert) + // { + // g_object_set(G_OBJECT(ev->convert), "src-width", w, NULL); + // g_object_set(G_OBJECT(ev->convert), "src-height", h, NULL); + // } +} + GstElement * gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev, Evas_Object *o, @@ -1115,9 +1176,10 @@ gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev, g_object_set(G_OBJECT(esink), "ev", ev, NULL); evas_object_image_pixels_get_callback_set(obj, NULL, NULL); + evas_object_event_callback_add(obj, EVAS_CALLBACK_RESIZE, _image_resize, ev); /* We need queue to force each video sink to be in its own thread */ - queue = gst_element_factory_make("queue", NULL); + queue = gst_element_factory_make("queue", "equeue"); if (!queue) { ERR("Unable to create 'queue' GstElement."); @@ -1132,13 +1194,14 @@ gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev, teepad = gst_element_get_request_pad(tee, "src%d"); gst_pad_link(teepad, pad); gst_object_unref(pad); - gst_object_unref(teepad); + + ev->eteepad = teepad; if (xvsink) { GstElement *fakeeos; - queue = gst_element_factory_make("queue", NULL); + queue = gst_element_factory_make("queue", "xvqueue"); fakeeos = GST_ELEMENT(GST_BIN(g_object_new(GST_TYPE_FAKEEOS_BIN, "name", "eosbin", NULL))); if (queue && fakeeos) { @@ -1157,7 +1220,7 @@ gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev, xvsink = fakeeos; - ev->teepad = teepad; + ev->xvteepad = teepad; ev->xvpad = pad; } else