mm_display_interface_evas_render(display->mm_display, packet);
}
+static gboolean __set_overlay_display_idle_cb(gpointer user_data)
+{
+ int ret = MM_ERROR_NONE;
+ webrtc_display_s *display = (webrtc_display_s*)user_data;
+
+ RET_VAL_IF(display == NULL, G_SOURCE_REMOVE, "display is NULL");
+
+ g_mutex_lock(&display->mutex);
+
+ ret = mm_display_interface_set_display(display->mm_display, MM_DISPLAY_TYPE_OVERLAY, display->object, &display->overlay_surface_id);
+ if (ret != MM_ERROR_NONE) {
+ LOG_ERROR("failed to mm_display_interface_set_display(), ret[0x%x]", ret);
+ g_mutex_unlock(&display->mutex);
+ return G_SOURCE_REMOVE;
+ }
+
+ LOG_INFO("overlay_surface_id[%d]", display->overlay_surface_id);
+
+ g_mutex_unlock(&display->mutex);
+
+ return G_SOURCE_REMOVE;
+}
+
static gboolean __set_evas_display_idle_cb(gpointer user_data)
{
int ret = MM_ERROR_NONE;
return G_SOURCE_REMOVE;
}
+ /* FIXME: remove meaningless this value */
LOG_DEBUG("surface id[%d]", surface_id);
/* FIXE: get mode and rotation value from handle */
switch (display->type) {
case WEBRTC_DISPLAY_TYPE_OVERLAY:
- /* TODO: Implemenation */
LOG_DEBUG("it's OVERLAY type");
+
+ g_idle_add(__set_overlay_display_idle_cb, display);
break;
case WEBRTC_DISPLAY_TYPE_EVAS:
#include "webrtc.h"
#include "webrtc_private.h"
+#include <gst/video/videooverlay.h>
#define DEFAULT_ELEMENT_VIDEOCONVERT "videoconvert"
#define DEFAULT_ELEMENT_AUDIOCONVERT "audioconvert"
return g_hash_table_lookup(webrtc->gst.sink_slots, key);
}
+static const char *__get_videosink_factory_name(webrtc_display_s *display, const char *default_name, bool *display_is_set)
+{
+ const char *factory_name = DEFAULT_ELEMENT_VIDEOSINK;
+
+ RET_VAL_IF(display == NULL, default_name, "display is NULL");
+ RET_VAL_IF(default_name == NULL, DEFAULT_ELEMENT_VIDEOSINK, "default_name is NULL");
+ RET_VAL_IF(display_is_set == NULL, default_name, "display_is_set is NULL");
+
+ if (display->object == NULL) {
+ *display_is_set = false;
+ LOG_INFO("display object is not set, use [%s]", default_name);
+ return default_name;
+ }
+
+ *display_is_set = true;
+
+ if (display->type == WEBRTC_DISPLAY_TYPE_OVERLAY)
+ factory_name = DEFAULT_ELEMENT_VIDEOSINK;
+ else if (display->type == WEBRTC_DISPLAY_TYPE_EVAS)
+ factory_name = DEFAULT_ELEMENT_FAKESINK;
+ else
+ LOG_ERROR("invalid type(%d)", display->type);
+
+ LOG_INFO("use [%s] for object[%p], type[%d]", factory_name, display->object, display->type);
+
+ return factory_name;
+}
+
static int __build_videosink(webrtc_s *webrtc, GstElement *decodebin, GstPad *src_pad)
{
webrtc_gst_slot_s *sink;
GstElement *videoconvert;
GstElement *videosink;
- const char *videosink_factory_name = DEFAULT_ELEMENT_VIDEOSINK;
+ const char *videosink_factory_name;
+ bool display_is_set = false;
RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
RET_VAL_IF(decodebin == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "decodebin is NULL");
return WEBRTC_ERROR_INVALID_OPERATION;
}
- /* TODO: Implemenation per sink->display->type */
- if (sink->display && sink->display->object != NULL && sink->display->type == WEBRTC_DISPLAY_TYPE_EVAS)
- videosink_factory_name = DEFAULT_ELEMENT_FAKESINK;
+ videosink_factory_name = __get_videosink_factory_name(sink->display, DEFAULT_ELEMENT_VIDEOSINK, &display_is_set);
- /* FIXME: get factory name from ini */
if (!(videosink = _create_element(videosink_factory_name, NULL))) {
LOG_ERROR("failed to create videosink[%s]", videosink_factory_name);
return WEBRTC_ERROR_INVALID_OPERATION;
}
- if (sink->display && sink->display->object != NULL && sink->display->type == WEBRTC_DISPLAY_TYPE_EVAS) {
- g_object_set(videosink, "signal-handoffs", TRUE, NULL);
- _connect_and_append_signal(&sink->signals, (GObject *)videosink, "handoff", G_CALLBACK(_video_stream_decoded_cb), sink->display);
+ if (display_is_set) {
+ if (sink->display->type == WEBRTC_DISPLAY_TYPE_OVERLAY) {
+ gst_video_overlay_set_wl_window_wl_surface_id(GST_VIDEO_OVERLAY(videosink), sink->display->overlay_surface_id);
+
+ } else if (sink->display->type == WEBRTC_DISPLAY_TYPE_EVAS) {
+ g_object_set(videosink, "signal-handoffs", TRUE, NULL);
+ _connect_and_append_signal(&sink->signals, (GObject *)videosink, "handoff", G_CALLBACK(_video_stream_decoded_cb), sink->display);
+ }
}
gst_bin_add_many(GST_BIN(sink->bin), videoconvert, videosink, NULL);
/* for video display */
static Evas_Object *g_win_id;
-static Evas_Object *g_selected_win_id;
typedef struct {
Evas_Object *win;
return -1;
ad->win = win;
g_win_id = win;
- g_selected_win_id = g_win_id;
create_render_rect_and_bg(ad->win);
/* Create evas image object for EVAS surface */
}
ad->win = NULL;
- g_selected_win_id = NULL;
return 0;
}
if (type == WEBRTC_MEDIA_TYPE_VIDEO) {
g_print("Video track is added, ");
if (conn->display_type == WEBRTC_DISPLAY_TYPE_OVERLAY) {
- g_print("set display - overlay\n");
- /* FIXME: set window id here */
+ g_print("set display - overlay, object[%p]\n", g_win_id);
+ webrtc_set_display(conn->webrtc, id, WEBRTC_DISPLAY_TYPE_OVERLAY, g_win_id);
} else if (conn->display_type == WEBRTC_DISPLAY_TYPE_EVAS) {
g_print("set display - evas object[%p]\n", conn->eo);