#include <gst/gst.h>
#include <gst/video/videooverlay.h>
#include <Elementary.h>
-#include <Ecore_Wayland.h>
+#include <Ecore_Wl2.h>
+#include <sys/syscall.h>
#include "mm_wfd_sink_util.h"
#include "mm_wfd_sink_priv.h"
#include "mm_wfd_sink_dlog.h"
#include "mm_wfd_sink_enum.h"
#include "mm_wfd_sink_wayland.h"
+#include "mm_wfd_sink_config.h"
#define PRINT_WFD_REF_COUNT(wfd_sink)\
do {\
/* gstreamer */
static int __mm_wfd_sink_init_gstreamer(mm_wfd_sink_t *wfd_sink);
-static int __mm_wfd_sink_create_pipeline(mm_wfd_sink_t *wfd_sink);
-static int __mm_wfd_sink_destroy_audio_decodebin(mm_wfd_sink_t *wfd_sink);
-static int __mm_wfd_sink_destroy_video_decodebin(mm_wfd_sink_t *wfd_sink);
-static int __mm_wfd_sink_destroy_audio_sinkbin(mm_wfd_sink_t *wfd_sink);
-static int __mm_wfd_sink_destroy_video_sinkbin(mm_wfd_sink_t *wfd_sink);
-static int __mm_wfd_sink_destroy_pipeline(mm_wfd_sink_t *wfd_sink);
-static int __mm_wfd_sink_set_pipeline_state(mm_wfd_sink_t *wfd_sink, GstState state, gboolean async);
static gboolean _mm_wfd_sink_msg_callback(GstBus *bus, GstMessage *msg, gpointer data);
-/* state */
-static int __mm_wfd_sink_set_state(mm_wfd_sink_t *wfd_sink, MMWFDSinkStateType state);
-
/* util */
static void __mm_wfd_sink_dump_pipeline_state(mm_wfd_sink_t *wfd_sink);
+static int
+__resource_release_cb(mm_resource_manager_h rm, mm_resource_manager_res_h res,
+ void *user_data)
+{
+ mm_wfd_sink_t *wfdsink = NULL;
+ MMWFDSinkResourceType res_idx = MM_WFD_SINK_RESOURCE_TYPE_MAX;
+
+ wfd_sink_debug_fenter();
+
+ if (!user_data) {
+ wfd_sink_error("user_data is null");
+ return FALSE;
+ }
+
+ wfdsink = (mm_wfd_sink_t *)user_data;
+
+ wfdsink->interrupted_by_resource = TRUE;
+
+ wfd_sink_debug("resource is interrupted. resource would be released after destroying");
+
+ MMWFDSINK_CMD_LOCK(wfdsink);
+ if (_mm_wfd_sink_disconnect(wfdsink) != MM_ERROR_NONE)
+ wfd_sink_error("failed to disconnect");
+
+ if (_mm_wfd_sink_unprepare(wfdsink) != MM_ERROR_NONE)
+ wfd_sink_error("failed to unprepare");
+ for (res_idx = MM_WFD_SINK_RESOURCE_TYPE_VIDEO_DECODER; res_idx < MM_WFD_SINK_RESOURCE_TYPE_MAX; res_idx++) {
+ wfdsink->hw_resource[res_idx] = NULL;
+ }
+ MMWFDSINK_CMD_UNLOCK(wfdsink);
+
+ wfd_sink_debug_fleave();
+ return TRUE; /* release all the resources */
+}
+
+static int __mm_wfd_sink_acquire_hw_resource (mm_wfd_sink_t* wfd_sink, MMWFDSinkResourceType type)
+{
+ int rm_ret = MM_RESOURCE_MANAGER_ERROR_NONE;
+
+ wfd_sink_debug_fenter();
+
+ mm_resource_manager_res_type_e rm_res_type = MM_RESOURCE_MANAGER_RES_TYPE_MAX;
+
+ switch (type) {
+ case MM_WFD_SINK_RESOURCE_TYPE_VIDEO_DECODER:
+ rm_res_type = MM_RESOURCE_MANAGER_RES_TYPE_VIDEO_DECODER;
+ break;
+ case MM_WFD_SINK_RESOURCE_TYPE_VIDEO_OVERLAY:
+ rm_res_type = MM_RESOURCE_MANAGER_RES_TYPE_VIDEO_OVERLAY;
+ break;
+ default:
+ wfd_sink_error("invalid wfd sink resource type %d", type);
+ return MM_ERROR_WFD_INTERNAL;
+ }
+
+ if (wfd_sink->hw_resource[type] != NULL) {
+ wfd_sink_debug("[%d type] resource was already acquired", type);
+ return MM_ERROR_NONE;
+ }
+
+ wfd_sink_debug("mark for acquire [%d type] resource", type);
+ rm_ret = mm_resource_manager_mark_for_acquire(wfd_sink->resource_manager,
+ rm_res_type, MM_RESOURCE_MANAGER_RES_VOLUME_FULL, &wfd_sink->hw_resource[type]);
+ if (rm_ret != MM_RESOURCE_MANAGER_ERROR_NONE) {
+ wfd_sink_error("failed to mark resource for acquire, ret(0x%x)", rm_ret);
+ return MM_ERROR_WFD_INTERNAL;
+ }
+
+ rm_ret = mm_resource_manager_commit(wfd_sink->resource_manager);
+ if (rm_ret != MM_RESOURCE_MANAGER_ERROR_NONE) {
+ wfd_sink_error("failed to commit of resource, ret(0x%x)", rm_ret);
+ return MM_ERROR_WFD_INTERNAL;
+ }
+
+ wfd_sink_debug_fleave();
+ return MM_ERROR_NONE;
+}
+
+static int __mm_wfd_sink_release_hw_resource(mm_wfd_sink_t *wfd_sink, MMWFDSinkResourceType type)
+{
+ int rm_ret = MM_RESOURCE_MANAGER_ERROR_NONE;
+
+ wfd_sink_debug_fenter();
+
+ if (wfd_sink->hw_resource[type] == NULL) {
+ wfd_sink_debug("there is no acquired [%d type] resource", type);
+ return MM_ERROR_NONE;
+ }
+
+ wfd_sink_debug("mark for release [%d type] resource", type);
+ rm_ret = mm_resource_manager_mark_for_release(wfd_sink->resource_manager, wfd_sink->hw_resource[type]);
+ if (rm_ret != MM_RESOURCE_MANAGER_ERROR_NONE) {
+ wfd_sink_error("failed to mark resource for release, ret(0x%x)", rm_ret);
+ return MM_ERROR_WFD_INTERNAL;
+ }
+
+ wfd_sink->hw_resource[type] = NULL;
+
+ rm_ret = mm_resource_manager_commit(wfd_sink->resource_manager);
+ if (rm_ret != MM_RESOURCE_MANAGER_ERROR_NONE) {
+ wfd_sink_error("failed to commit of resource, ret(0x%x)", rm_ret);
+ return MM_ERROR_WFD_INTERNAL;
+ }
+
+ wfd_sink_debug_fleave();
+ return MM_ERROR_NONE;
+}
+
int _mm_wfd_sink_create(mm_wfd_sink_t **wfd_sink, const char *ini_path)
{
int result = MM_ERROR_NONE;
return MM_ERROR_WFD_NO_FREE_SPACE;
}
+ /* initialize resource manager */
+ if (mm_resource_manager_create(MM_RESOURCE_MANAGER_APP_CLASS_MEDIA,
+ __resource_release_cb, new_wfd_sink, &new_wfd_sink->resource_manager)
+ != MM_RESOURCE_MANAGER_ERROR_NONE) {
+ wfd_sink_error("failed to initialize resource manager");
+ MMWFDSINK_FREEIF(new_wfd_sink);
+ return MM_ERROR_WFD_INTERNAL;
+ }
+
/* Initialize gstreamer related */
new_wfd_sink->attrs = 0;
/* Initialize video resolution */
new_wfd_sink->supportive_resolution = MM_WFD_SINK_RESOLUTION_UNKNOWN;
+ /* Initialize coulped sink information */
+ new_wfd_sink->coupled_sink_address = NULL;
+ new_wfd_sink->coupled_sink_status = MM_WFD_COUPLED_SINK_STATUS_NOT_COUPLED;
+
+ /* In case of R2 sink, it would be TRUE */
+ new_wfd_sink->is_coupled_sink_supported = FALSE;
+
/* construct attributes */
new_wfd_sink->attrs = _mmwfd_construct_attribute((MMHandleType)new_wfd_sink);
if (!new_wfd_sink->attrs) {
- MMWFDSINK_FREEIF(new_wfd_sink);
+ result = MM_ERROR_WFD_INTERNAL;
wfd_sink_error("failed to set attribute");
- return MM_ERROR_WFD_INTERNAL;
+ goto fail_to_attrs;
}
/* load ini for initialize */
mm_wfd_sink_ini_unload(&new_wfd_sink->ini);
fail_to_load_ini:
_mmwfd_deconstruct_attribute(new_wfd_sink->attrs);
+fail_to_attrs:
+ mm_resource_manager_destroy(new_wfd_sink->resource_manager);
MMWFDSINK_FREEIF(new_wfd_sink);
*wfd_sink = NULL;
/* set state */
__mm_wfd_sink_set_state(wfd_sink, MM_WFD_SINK_STATE_NULL);
+ if (!wfd_sink->interrupted_by_resource) {
+ int rm_ret = MM_ERROR_NONE;
+ MMWFDSinkResourceType res_idx = MM_WFD_SINK_RESOURCE_TYPE_MAX;
+
+ for (res_idx = MM_WFD_SINK_RESOURCE_TYPE_VIDEO_DECODER; res_idx < MM_WFD_SINK_RESOURCE_TYPE_MAX; res_idx++) {
+ rm_ret = __mm_wfd_sink_release_hw_resource(wfd_sink, res_idx);
+ if (rm_ret != MM_ERROR_NONE)
+ wfd_sink_error("failed to release [%d] resources", res_idx);
+ }
+ }
wfd_sink_debug_fleave();
return result;
/* release attributes */
_mmwfd_deconstruct_attribute(wfd_sink->attrs);
+ /* release the others */
+ g_free(wfd_sink->coupled_sink_address);
+
+ /* release resource manager */
+ if (MM_ERROR_NONE != mm_resource_manager_destroy(wfd_sink->resource_manager)) {
+ result = MM_ERROR_WFD_INTERNAL;
+ wfd_sink_error("failed to destroy resource manager");
+ }
+
/* release manager thread */
if (MM_ERROR_NONE != _mm_wfd_sink_release_manager(wfd_sink)) {
+ result = MM_ERROR_WFD_INTERNAL;
wfd_sink_error("failed to release manager");
- return MM_ERROR_WFD_INTERNAL;
}
/* set state */
}
if (gst_pad_is_linked(srcpad)) {
- wfd_sink_debug("%s:%s is already linked to %s:%s",
+ wfd_sink_debug("%s:%s is already linked",
GST_ELEMENT_NAME(GST_PAD_PARENT(srcpad)), GST_PAD_NAME(srcpad));
goto done;
}
}
if (gst_pad_is_linked(srcpad)) {
- wfd_sink_debug("%s is already linked",
- GST_ELEMENT_NAME(GST_PAD_PARENT(sinkpad)), GST_PAD_NAME(sinkpad));
+ wfd_sink_debug("%s:%s is already linked",
+ GST_ELEMENT_NAME(GST_PAD_PARENT(srcpad)), GST_PAD_NAME(srcpad));
goto done;
}
mm_wfd_sink_t *wfd_sink = (mm_wfd_sink_t *) data;
const GstStructure *message_structure = gst_message_get_structure(msg);
gboolean ret = TRUE;
+ gchar *getname = NULL;
wfd_sink_return_val_if_fail(wfd_sink, FALSE);
wfd_sink_return_val_if_fail(msg && GST_IS_MESSAGE(msg), FALSE);
__mm_wfd_sink_set_state(wfd_sink, MM_WFD_SINK_STATE_PAUSED);
} else if (category && !strcmp(category, "close")) {
__mm_wfd_sink_set_state(wfd_sink, MM_WFD_SINK_STATE_DISCONNECTED);
+ } else if (category && !strcmp(category, "coupling")) {
+ wfd_sink->coupled_sink_status = MM_WFD_COUPLED_SINK_STATUS_COUPLED;
+ wfd_sink->coupled_sink_address = g_strdup(text);
+ wfd_sink_debug("coupling info [%d : %s]", wfd_sink->coupled_sink_status, wfd_sink->coupled_sink_address);
+ __mm_wfd_sink_set_state(wfd_sink, MM_WFD_SINK_STATE_DISCONNECTED);
}
break;
case GST_PROGRESS_TYPE_CANCELED:
MMWFDSINK_POST_MESSAGE(wfd_sink,
MM_ERROR_WFD_INTERNAL,
MMWFDSINK_CURRENT_STATE(wfd_sink));
+ } else if (category && !strcmp(category, "coupling")) {
+ wfd_sink_error("got error : %s", GST_STR_NULL(text));
+ /*_mm_wfd_sink_disconnect (wfd_sink); */
+ MMWFDSINK_POST_MESSAGE(wfd_sink,
+ MM_ERROR_WFD_INTERNAL,
+ MMWFDSINK_CURRENT_STATE(wfd_sink));
} else {
wfd_sink_error("got error : %s", GST_STR_NULL(text));
}
break;
case GST_MESSAGE_ASYNC_START:
- wfd_sink_debug("GST_MESSAGE_ASYNC_START : %s", gst_element_get_name(GST_MESSAGE_SRC(msg)));
+ getname = gst_element_get_name(GST_MESSAGE_SRC(msg));
+ wfd_sink_debug("GST_MESSAGE_ASYNC_START : %s", getname);
+ MMWFDSINK_FREEIF(getname);
break;
case GST_MESSAGE_ASYNC_DONE:
- wfd_sink_debug("GST_MESSAGE_ASYNC_DONE : %s", gst_element_get_name(GST_MESSAGE_SRC(msg)));
+ getname = gst_element_get_name(GST_MESSAGE_SRC(msg));
+ wfd_sink_debug("GST_MESSAGE_ASYNC_DONE : %s", getname);
+ MMWFDSINK_FREEIF(getname);
break;
case GST_MESSAGE_UNKNOWN:
case GST_MESSAGE_INFO:
/* ERRORS */
invalid_state:
- wfd_sink_error("current state is invalid.", MMWFDSINK_STATE_GET_NAME(cur_state));
+ wfd_sink_error("current state[%s] is invalid.", MMWFDSINK_STATE_GET_NAME(cur_state));
return MM_ERROR_WFD_INVALID_STATE;
}
-static int __mm_wfd_sink_set_state(mm_wfd_sink_t *wfd_sink, MMWFDSinkStateType state)
+int __mm_wfd_sink_set_state(mm_wfd_sink_t *wfd_sink, MMWFDSinkStateType state)
{
wfd_sink_debug_fenter();
return MM_ERROR_NONE;
}
-static int
+int
__mm_wfd_sink_set_pipeline_state(mm_wfd_sink_t *wfd_sink, GstState state, gboolean async)
{
GstStateChangeReturn result = GST_STATE_CHANGE_FAILURE;
wfd_sink->pipeline,
MM_ERROR_WFD_NOT_INITIALIZED);
+ if (wfd_sink->stream_info.video_stream_info.codec == MM_WFD_SINK_VIDEO_CODEC_NONE) {
+ wfd_sink_debug("Skip unprepare video pipeline for none audio codec.");
+ wfd_sink_debug_fleave();
+ return MM_ERROR_NONE;
+ }
+
PRINT_WFD_REF_COUNT(wfd_sink);
wfd_sink_error("No-error:unprepare video sink bin");
if (wfd_sink->pipeline->v_sinkbin && wfd_sink->pipeline->v_sinkbin[WFD_SINK_V_S_BIN].gst) {
goto ERROR;
}
- wfd_sink_debug("try to unlink %s:%s and %s:%s",
+ wfd_sink_error("try to unlink %s:%s and %s:%s",
GST_ELEMENT_NAME(GST_PAD_PARENT(srcpad)), GST_PAD_NAME(srcpad),
GST_ELEMENT_NAME(GST_PAD_PARENT(sinkpad)), GST_PAD_NAME(sinkpad));
if (!gst_pad_unlink(srcpad, sinkpad)) {
gst_object_unref(sinkpad);
sinkpad = NULL;
+ wfd_sink_error("try to remove %s from %s",
+ GST_ELEMENT_NAME(v_sinkbin), GST_ELEMENT_NAME(pipeline));
+
gst_object_ref(v_sinkbin);
if (!gst_bin_remove(GST_BIN(pipeline), GST_ELEMENT(v_sinkbin))) {
wfd_sink_error("failed to remove %s from %s",
goto ERROR;
}
- wfd_sink_debug("try to unlink %s:%s and %s:%s",
+ wfd_sink_error("try to unlink %s:%s and %s:%s",
GST_ELEMENT_NAME(GST_PAD_PARENT(srcpad)), GST_PAD_NAME(srcpad),
GST_ELEMENT_NAME(GST_PAD_PARENT(sinkpad)), GST_PAD_NAME(sinkpad));
if (!gst_pad_unlink(srcpad, sinkpad)) {
goto ERROR;
}
- wfd_sink_debug("try to unlink %s:%s and %s:%s",
+ wfd_sink_error("try to unlink %s:%s and %s:%s",
GST_ELEMENT_NAME(GST_PAD_PARENT(srcpad)), GST_PAD_NAME(srcpad),
GST_ELEMENT_NAME(GST_PAD_PARENT(sinkpad)), GST_PAD_NAME(sinkpad));
if (!gst_pad_unlink(srcpad, sinkpad)) {
wfd_sink->pipeline->mainbin[WFD_SINK_M_PIPE].gst,
MM_ERROR_WFD_NOT_INITIALIZED);
+ if (wfd_sink->stream_info.video_stream_info.codec == MM_WFD_SINK_VIDEO_CODEC_NONE) {
+ wfd_sink_debug("Skip prepare video pipeline for none audio codec");
+ wfd_sink_debug_fleave();
+ return MM_ERROR_NONE;
+ }
+
/* check video decodebin is linked */
if (!wfd_sink->video_decodebin_is_linked) {
/* check video decodebin is created */
wfd_sink->pipeline,
MM_ERROR_WFD_NOT_INITIALIZED);
+ if (wfd_sink->stream_info.audio_stream_info.codec == MM_WFD_SINK_AUDIO_CODEC_NONE) {
+ wfd_sink_debug("Skip unprepare audio pipeline for none audio codec.");
+ wfd_sink_debug_fleave();
+ return MM_ERROR_NONE;
+ }
+
wfd_sink_error("No-error:unprepare audio sink bin");
PRINT_WFD_REF_COUNT(wfd_sink);
goto ERROR;
}
- wfd_sink_debug("try to unlink %s:%s and %s:%s",
+ wfd_sink_error("try to unlink %s:%s and %s:%s",
GST_ELEMENT_NAME(GST_PAD_PARENT(srcpad)), GST_PAD_NAME(srcpad),
GST_ELEMENT_NAME(GST_PAD_PARENT(sinkpad)), GST_PAD_NAME(sinkpad));
if (!gst_pad_unlink(srcpad, sinkpad)) {
gst_object_unref(sinkpad);
sinkpad = NULL;
+ wfd_sink_error("try to remove %s from %s", GST_ELEMENT_NAME(a_sinkbin), GST_ELEMENT_NAME(pipeline));
+
gst_object_ref(a_sinkbin);
if (!gst_bin_remove(GST_BIN(pipeline), GST_ELEMENT(a_sinkbin))) {
wfd_sink_error("failed to remove %s from %s",
goto ERROR;
}
- wfd_sink_debug("try to unlink %s:%s and %s:%s",
+ wfd_sink_error("try to unlink %s:%s and %s:%s",
GST_ELEMENT_NAME(GST_PAD_PARENT(srcpad)), GST_PAD_NAME(srcpad),
GST_ELEMENT_NAME(GST_PAD_PARENT(sinkpad)), GST_PAD_NAME(sinkpad));
if (!gst_pad_unlink(srcpad, sinkpad)) {
return MM_ERROR_WFD_INTERNAL;
}
- wfd_sink_debug("try to unlink %s:%s and %s:%s",
+ wfd_sink_error("try to unlink %s:%s and %s:%s",
GST_ELEMENT_NAME(GST_PAD_PARENT(srcpad)), GST_PAD_NAME(srcpad),
GST_ELEMENT_NAME(GST_PAD_PARENT(sinkpad)), GST_PAD_NAME(sinkpad));
if (!gst_pad_unlink(srcpad, sinkpad)) {
wfd_sink->pipeline->mainbin[WFD_SINK_M_PIPE].gst,
MM_ERROR_WFD_NOT_INITIALIZED);
+ if (wfd_sink->stream_info.audio_stream_info.codec == MM_WFD_SINK_AUDIO_CODEC_NONE) {
+ wfd_sink_debug("Skip prepare audio pipeline for none audio codec");
+ wfd_sink_debug_fleave();
+ return MM_ERROR_NONE;
+ }
+
/* check audio decodebin is linked */
if (!wfd_sink->audio_decodebin_is_linked) {
/* check audio decodebin is created */
video_minus_compensation = FALSE;
}
} else {
- wfd_sink_debug("first update video average gap(%lld) ", video_avgrage_gap);
+ wfd_sink_debug("first update video average gap(%"G_GINT64_FORMAT"d) ", video_avgrage_gap);
wfd_sink->video_average_gap = video_avgrage_gap;
}
} else {
audio_minus_compensation = FALSE;
}
} else {
- wfd_sink_debug("first update audio average gap(%lld) ", audio_avgrage_gap);
+ wfd_sink_debug("first update audio average gap(%"G_GINT64_FORMAT"d) ", audio_avgrage_gap);
wfd_sink->audio_average_gap = audio_avgrage_gap;
}
} else {
avgrage_gap_diff = audio_avgrage_gap_diff;
}
- wfd_sink_debug("average diff gap difference beween audio:%s%lld and video:%s%lld ",
+ wfd_sink_debug("average diff gap difference beween audio:%s%"G_GINT64_FORMAT"d and video:%s%"G_GINT64_FORMAT"d ",
audio_minus_compensation ? "-" : "", audio_avgrage_gap_diff,
video_minus_compensation ? "-" : "", video_avgrage_gap_diff);
else
ts_offset += avgrage_gap_diff;
- wfd_sink_debug("do timestamp compensation : %s%lld (ts-offset : %"
+ wfd_sink_debug("do timestamp compensation : %s%"G_GINT64_FORMAT"d (ts-offset : %"
GST_TIME_FORMAT") at(%" GST_TIME_FORMAT")",
minus_compensation ? "-" : "", avgrage_gap_diff,
GST_TIME_ARGS(ts_offset), GST_TIME_ARGS(running_time));
if (wfd_sink->pipeline && wfd_sink->pipeline->v_sinkbin && wfd_sink->pipeline->v_sinkbin[WFD_SINK_V_S_SINK].gst)
g_object_set(G_OBJECT(wfd_sink->pipeline->v_sinkbin[WFD_SINK_V_S_SINK].gst), "ts-offset", (gint64)ts_offset, NULL);
} else {
- wfd_sink_debug("don't need to do timestamp compensation : %s%lld (ts-offset : %"GST_TIME_FORMAT ")",
+ wfd_sink_debug("don't need to do timestamp compensation : %s%"G_GINT64_FORMAT"d (ts-offset : %"GST_TIME_FORMAT ")",
minus_compensation ? "-" : "", avgrage_gap_diff, GST_TIME_ARGS(ts_offset));
}
__mm_wfd_sink_demux_pad_added(GstElement *demux, GstPad *pad, gpointer data)
{
mm_wfd_sink_t *wfd_sink = (mm_wfd_sink_t *)data;
- gchar *name = gst_pad_get_name(pad);
+ gchar *name = NULL;
GstElement *pipeline = NULL;
GstElement *valve = NULL;
GstPad *sinkpad = NULL;
wfd_sink->pipeline->mainbin &&
wfd_sink->pipeline->mainbin[WFD_SINK_M_PIPE].gst);
+ name = gst_pad_get_name(pad);
+ if (name == NULL) {
+ wfd_sink_error("fail to get pad");
+ goto ERROR;
+ }
+
+ wfd_sink_debug("Mux pad added, video_codec=%d, audio_codec=%d, name[0] = %c",
+ wfd_sink->stream_info.video_stream_info.codec,
+ wfd_sink->stream_info.audio_stream_info.codec,
+ name[0]);
+
+ //In case of none vieo codec, we don't add video pad
+ if (wfd_sink->stream_info.video_stream_info.codec == MM_WFD_SINK_VIDEO_CODEC_NONE && name[0] == 'v') {
+ wfd_sink_error("Skip video pad add for none video codec");
+ // Do nothing
+ goto done;
+ }
+
+ //In case of none audio codec, we don't add audio pad
+ if (wfd_sink->stream_info.audio_stream_info.codec == MM_WFD_SINK_AUDIO_CODEC_NONE && name[0] == 'a') {
+ wfd_sink_error("Skip audio pad add for none audio codec");
+ // Do nothing
+ goto done;
+ }
+
pipeline = wfd_sink->pipeline->mainbin[WFD_SINK_M_PIPE].gst;
/* take srcpad from demuxer added pad */
goto ERROR;
}
- if (name[0] == 'v') {
+ if (name[0] == 'v')
MMWFDSINK_GENERATE_DOT_IF_ENABLED(wfd_sink, "video-pad-added-pipeline");
- } else if (name[0] == 'a') {
+ else if (name[0] == 'a')
MMWFDSINK_GENERATE_DOT_IF_ENABLED(wfd_sink, "audio-pad-added-pipeline");
- }
done:
MMWFDSINK_FREEIF(name);
gchar *video_format;
wfd_sink_debug_fenter();
-
wfd_sink_return_if_fail(str && GST_IS_STRUCTURE(str));
wfd_sink_return_if_fail(wfd_sink);
wfd_sink_error("invalid audio format(%s)...", audio_format);
is_valid_audio_format = FALSE;
}
-
if (is_valid_audio_format == TRUE) {
if (gst_structure_has_field(str, "audio_rate"))
gst_structure_get_int(str, "audio_rate", &stream_info->audio_stream_info.sample_rate);
stream_info->audio_stream_info.channels,
stream_info->audio_stream_info.bitwidth);
}
- }
+ g_free(audio_format);
+ }
if (gst_structure_has_field(str, "video_format")) {
is_valid_video_format = TRUE;
video_format = g_strdup(gst_structure_get_string(str, "video_format"));
stream_info->video_stream_info.height,
stream_info->video_stream_info.frame_rate);
}
+
+ g_free(video_format);
}
WFD_SINK_MANAGER_SIGNAL_CMD(wfd_sink);
GstStructure *wfd_content_protection = NULL;
GstStructure *wfd2_video_formats = NULL;
GstStructure *wfd2_audio_codecs = NULL;
+ GstStructure *wfd_coupled_sink= NULL;
gint hdcp_version = 0;
gint hdcp_port = 0;
guint64 CEA_resolution = 0;
gst_structure_free(wfd_video_formats);
wfd_video_formats = NULL;
}
+ /* set coupled sink information for Wi-Fi Display session negotiation */
+ wfd_coupled_sink = gst_structure_new("wfd_coupled_sink",
+ "coupled_sink_status", G_TYPE_INT, wfd_sink->coupled_sink_status,
+ "coupled_sink_address", G_TYPE_STRING, wfd_sink->coupled_sink_address,
+ "is_coupled_sink_supported", G_TYPE_BOOLEAN, wfd_sink->is_coupled_sink_supported,
+ NULL);
+ if (wfd_coupled_sink) {
+ g_object_set(G_OBJECT(wfdsrc), "wfd-coupled-sink", wfd_coupled_sink, NULL);
+ gst_structure_free(wfd_coupled_sink);
+ wfd_coupled_sink = NULL;
+ }
/* set hdcp parameter for Wi-Fi Display session negotiation */
if (wfd_sink->ini.wfd_content_protection.enable_hdcp) {
}
-static int __mm_wfd_sink_create_pipeline(mm_wfd_sink_t *wfd_sink)
+int __mm_wfd_sink_create_pipeline(mm_wfd_sink_t *wfd_sink)
{
MMWFDSinkGstElement *mainbin = NULL;
GList *element_bucket = NULL;
MMWFDSINK_TS_DATA_DUMP(wfd_sink, mainbin[WFD_SINK_M_DEPAY].gst, "src");
+ /* create queue for ts */
+ MMWFDSINK_CREATE_ELEMENT(mainbin, WFD_SINK_M_QUEUE, "queue", "ts_queue", TRUE);
+ MMWFDSINK_PAD_PROBE(wfd_sink, NULL, mainbin[WFD_SINK_M_QUEUE].gst, "src");
+ MMWFDSINK_PAD_PROBE(wfd_sink, NULL, mainbin[WFD_SINK_M_QUEUE].gst, "sink");
+ g_object_set(G_OBJECT(mainbin[WFD_SINK_M_QUEUE].gst), "max-size-buffers", 200000, NULL);
+
+ /* create valve for demux */
+ MMWFDSINK_CREATE_ELEMENT(mainbin, WFD_SINK_M_D_VALVE, "valve", "demux_valve", TRUE);
+ MMWFDSINK_PAD_PROBE(wfd_sink, NULL, mainbin[WFD_SINK_M_D_VALVE].gst, "src");
+ MMWFDSINK_PAD_PROBE(wfd_sink, NULL, mainbin[WFD_SINK_M_D_VALVE].gst, "sink");
+
/* create tsdemuxer*/
MMWFDSINK_CREATE_ELEMENT(mainbin, WFD_SINK_M_DEMUX, wfd_sink->ini.name_of_tsdemux, "wfdsink_demux", TRUE);
MMWFDSINK_PAD_PROBE(wfd_sink, NULL, mainbin[WFD_SINK_M_DEMUX].gst, "sink");
CREATE_ERROR:
wfd_sink_error("ERROR : releasing pipeline");
- if (element_bucket)
+ if (element_bucket) {
g_list_free(element_bucket);
- element_bucket = NULL;
-
- /* finished */
- if (bus) {
- gst_object_unref(GST_OBJECT(bus));
- bus = NULL;
+ element_bucket = NULL;
}
+
/* release element which are not added to bin */
for (i = 1; i < WFD_SINK_M_NUM; i++) { /* NOTE : skip pipeline */
if (mainbin != NULL && mainbin[i].gst) {
return MM_ERROR_NONE;
}
+ if (wfd_sink->stream_info.audio_stream_info.codec == MM_WFD_SINK_AUDIO_CODEC_NONE) {
+ wfd_sink_debug("Skip link audio decodebin for none audio codec.");
+ wfd_sink_debug_fleave();
+ return MM_ERROR_NONE;
+ }
+
/* take audio decodebin */
a_decodebin = wfd_sink->pipeline->a_decodebin;
default:
wfd_sink_error("audio codec is not decied yet. cannot link audio decodebin...");
- return MM_ERROR_WFD_INTERNAL;
+ goto fail_to_link;
break;
}
return MM_ERROR_NONE;
}
-static int __mm_wfd_sink_destroy_audio_decodebin(mm_wfd_sink_t *wfd_sink)
+int __mm_wfd_sink_destroy_audio_decodebin(mm_wfd_sink_t *wfd_sink)
{
GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
MMWFDSinkGstElement *a_decodebin = NULL;
GstObject *parent = NULL;
int i;
- wfd_sink_debug_fenter();
+ wfd_sink_error_fenter();
wfd_sink_return_val_if_fail(wfd_sink, MM_ERROR_WFD_NOT_INITIALIZED);
wfd_sink_debug("audio decodebin has no parent.. need to relase by itself");
if (GST_STATE(a_decodebin[WFD_SINK_A_D_BIN].gst) >= GST_STATE_READY) {
- wfd_sink_debug("try to change state of audio decodebin to NULL");
+ wfd_sink_error("try to change state of audio decodebin to NULL");
ret = gst_element_set_state(a_decodebin[WFD_SINK_A_D_BIN].gst, GST_STATE_NULL);
if (ret != GST_STATE_CHANGE_SUCCESS) {
wfd_sink_error("failed to change state of audio decodebin to NULL");
MMWFDSINK_FREEIF(wfd_sink->pipeline->a_decodebin);
- wfd_sink_debug_fleave();
+ wfd_sink_error_fleave();
return MM_ERROR_NONE;
}
return MM_ERROR_NONE;
}
+ if (wfd_sink->stream_info.audio_stream_info.codec == MM_WFD_SINK_AUDIO_CODEC_NONE) {
+ wfd_sink_debug("Skip create audio decodebin for none audio codec.");
+ wfd_sink_debug_fleave();
+ return MM_ERROR_NONE;
+ }
+
/* check audio decodebin could be linked now */
switch (wfd_sink->stream_info.audio_stream_info.codec) {
case MM_WFD_SINK_AUDIO_CODEC_AAC:
return MM_ERROR_WFD_INTERNAL;
}
-static int __mm_wfd_sink_destroy_audio_sinkbin(mm_wfd_sink_t *wfd_sink)
+int __mm_wfd_sink_destroy_audio_sinkbin(mm_wfd_sink_t *wfd_sink)
{
GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
MMWFDSinkGstElement *a_sinkbin = NULL;
GstObject *parent = NULL;
int i;
- wfd_sink_debug_fenter();
+ wfd_sink_error_fenter();
wfd_sink_return_val_if_fail(wfd_sink, MM_ERROR_WFD_NOT_INITIALIZED);
parent = gst_element_get_parent(a_sinkbin[WFD_SINK_A_S_BIN].gst);
if (!parent) {
- wfd_sink_debug("audio sinkbin has no parent.. need to relase by itself");
+ wfd_sink_error("audio sinkbin has no parent.. need to relase by itself");
if (GST_STATE(a_sinkbin[WFD_SINK_A_S_BIN].gst) >= GST_STATE_READY) {
- wfd_sink_debug("try to change state of audio sinkbin to NULL");
+ wfd_sink_error("try to change state of audio sinkbin to NULL");
ret = gst_element_set_state(a_sinkbin[WFD_SINK_A_S_BIN].gst, GST_STATE_NULL);
if (ret != GST_STATE_CHANGE_SUCCESS) {
wfd_sink_error("failed to change state of audio sinkbin to NULL");
MMWFDSINK_FREEIF(wfd_sink->pipeline->a_sinkbin);
- wfd_sink_debug_fleave();
+ wfd_sink_error_fleave();
return MM_ERROR_NONE;
}
return MM_ERROR_NONE;
}
+ if (wfd_sink->stream_info.audio_stream_info.codec == MM_WFD_SINK_AUDIO_CODEC_NONE) {
+ wfd_sink_error("Skip create audio sink bin for non audio codec.");
+ wfd_sink_debug_fleave();
+ return MM_ERROR_NONE;
+ }
+
/* alloc handles */
a_sinkbin = (MMWFDSinkGstElement *)g_malloc0(sizeof(MMWFDSinkGstElement) * WFD_SINK_A_S_NUM);
if (!a_sinkbin) {
return MM_ERROR_NONE;
}
+ if (wfd_sink->stream_info.video_stream_info.codec == MM_WFD_SINK_VIDEO_CODEC_NONE) {
+ wfd_sink_debug("Skip link video decodebin for none video codec.");
+ wfd_sink_debug_fleave();
+ return MM_ERROR_NONE;
+ }
+
/* take video decodebin */
v_decodebin = wfd_sink->pipeline->v_decodebin;
if (v_decodebin[WFD_SINK_V_D_H265_DEC].gst)
element_bucket = g_list_append(element_bucket, &v_decodebin[WFD_SINK_V_D_H265_DEC]);
break;
+ case MM_WFD_SINK_VIDEO_CODEC_VP9:
+ if (v_decodebin[WFD_SINK_V_D_VP9_PARSE].gst)
+ element_bucket = g_list_append(element_bucket, &v_decodebin[WFD_SINK_V_D_VP9_PARSE]);
+ if (v_decodebin[WFD_SINK_V_D_VP9_DEC].gst)
+ element_bucket = g_list_append(element_bucket, &v_decodebin[WFD_SINK_V_D_VP9_DEC]);
+ break;
default:
wfd_sink_error("video codec is not decied yet. cannot link video decpdebin...");
- return MM_ERROR_WFD_INTERNAL;
+ goto fail_to_link;
break;
}
/* update display surface */
mm_attrs_get_int_by_name(wfd_sink->attrs, "display_surface_type", &surface_type);
- wfd_sink_debug("check display surface type attribute: %d", surface_type);
+ wfd_sink_info("check display surface type attribute: %d", surface_type);
mm_attrs_get_int_by_name(wfd_sink->attrs, "display_visible", &visible);
- wfd_sink_debug("check display visible attribute: %d", visible);
+ wfd_sink_info("check display visible attribute: %d", visible);
+
+ if (FALSE == visible) {
+ wfd_sink_info("skipped to prepare video sink. display_visible is FALSE.");
+ g_object_set(G_OBJECT(video_sink), "visible", visible, NULL);
+ return MM_ERROR_NONE;
+ }
/* configuring display */
switch (surface_type) {
- case MM_DISPLAY_SURFACE_EVAS: {
- void *object = NULL;
- gint scaling = 0;
-
- /* common case if using evas surface */
- mm_attrs_get_data_by_name(wfd_sink->attrs, "display_overlay", &object);
- mm_attrs_get_int_by_name(wfd_sink->attrs, "display_evas_do_scaling", &scaling);
- if (object) {
- wfd_sink_debug("set video param : evas-object %x", object);
- g_object_set(G_OBJECT(video_sink), "evas-object", object, NULL);
- } else {
- wfd_sink_error("no evas object");
- return MM_ERROR_WFD_INTERNAL;
- }
- }
- break;
-
- case MM_DISPLAY_SURFACE_OVERLAY: {
- static unsigned int wl_surface_id = 0;
- static void *display_overlay = NULL;
- int wl_window_x = 0;
- int wl_window_y = 0;
- int wl_window_width = 0;
- int wl_window_height = 0;
- struct wl_surface *wl_surface = NULL;
- struct wl_display *wl_display = NULL;
- Ecore_Wl_Window *wl_window = NULL;
- wl_client *wlclient = NULL;
- Evas_Object *obj = NULL;
- void *object = NULL;
- const char *object_type = NULL;
- int ret = 0;
-
- mm_attrs_get_data_by_name(wfd_sink->attrs, "display_overlay", &object);
-
- if (object != NULL) {
- obj = (Evas_Object *)object;
- object_type = evas_object_type_get(obj);
- wfd_sink_debug("window object type : %s", object_type);
-
- /* wayland overlay surface */
- LOGI("Wayland overlay surface type");
- evas_object_geometry_get(obj, &wl_window_x, &wl_window_y, &wl_window_width, &wl_window_height);
-
- wfd_sink_debug("x[%d] y[%d] width[%d] height[%d]", wl_window_x, wl_window_y,
- wl_window_width, wl_window_height);
-
- wl_window = elm_win_wl_window_get(obj);
- wl_surface = (struct wl_surface *) ecore_wl_window_surface_get(wl_window);
-
- /* get wl_display */
- wl_display = (struct wl_display *) ecore_wl_display_get();
-
- wfd_sink_debug("previous display object : %p current object : %p", display_overlay, object);
- if (wl_surface && wl_display && (wl_surface_id == 0 || display_overlay != object)) {
- wfd_sink_debug("surface = %p, wl_display = %p", wl_surface, wl_display);
- display_overlay = object;
-
- ret = mm_wfd_sink_wlclient_create(&wlclient);
- if (ret != MM_ERROR_NONE) {
- wfd_sink_error("Wayland client create failure");
- return ret;
- }
- wfd_sink_debug("Try to get surface id");
-
- wl_surface_id = mm_wfd_sink_wlclient_get_wl_window_wl_surface_id(wlclient, wl_surface, wl_display);
-
- wfd_sink_debug("wl_surface_id = %d", wl_surface_id);
-
- if (wlclient) {
- g_free(wlclient);
- wlclient = NULL;
- }
- }
- wfd_sink_debug("set video param : surface_id %d", wl_surface_id);
- gst_video_overlay_set_wl_window_wl_surface_id(GST_VIDEO_OVERLAY(video_sink),
- wl_surface_id);
- /* After setting window handle, set render rectangle */
- gst_video_overlay_set_render_rectangle(GST_VIDEO_OVERLAY(video_sink),
- wl_window_x, wl_window_y, wl_window_width, wl_window_height);
- } else {
- wfd_sink_debug("display object is NULL!");
- return MM_ERROR_WFD_INTERNAL;
- }
- }
- break;
-
- case MM_DISPLAY_SURFACE_NULL: {
- /* do nothing */
- wfd_sink_error("Not Supported Surface.");
- return MM_ERROR_WFD_INTERNAL;
- }
- break;
- default: {
- wfd_sink_error("Not Supported Surface.(default case)");
+ case MM_DISPLAY_SURFACE_EVAS: {
+ void *object = NULL;
+ gint scaling = 0;
+
+ /* common case if using evas surface */
+ mm_attrs_get_data_by_name(wfd_sink->attrs, "display_overlay", &object);
+ mm_attrs_get_int_by_name(wfd_sink->attrs, "display_evas_do_scaling", &scaling);
+ if (object) {
+ wfd_sink_debug("set video param : evas-object %p", object);
+ g_object_set(G_OBJECT(video_sink), "evas-object", object, NULL);
+ } else {
+ wfd_sink_error("no evas object");
return MM_ERROR_WFD_INTERNAL;
}
- break;
+ }
+ break;
+ case MM_DISPLAY_SURFACE_OVERLAY:
+ if (USE_EXTERNAL_WL_DISPLAY_HANDLE) {
+ gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(video_sink),
+ wfd_sink->display_surface_id);
+ } else {
+ gst_video_overlay_set_wl_window_wl_surface_id(GST_VIDEO_OVERLAY(video_sink),
+ wfd_sink->display_surface_id);
+ }
+ break;
+ case MM_DISPLAY_SURFACE_NULL:
+ /* do nothing */
+ wfd_sink_error("Not Supported Surface.");
+ return MM_ERROR_WFD_INTERNAL;
+ default:
+ wfd_sink_error("Not Supported Surface.(default case)");
+ return MM_ERROR_WFD_INTERNAL;
}
g_object_set(G_OBJECT(video_sink), "qos", FALSE, NULL);
return MM_ERROR_NONE;
}
-static int __mm_wfd_sink_destroy_video_decodebin(mm_wfd_sink_t *wfd_sink)
+int __mm_wfd_sink_destroy_video_decodebin(mm_wfd_sink_t *wfd_sink)
{
GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
MMWFDSinkGstElement *v_decodebin = NULL;
GstObject *parent = NULL;
int i;
- wfd_sink_debug_fenter();
+ wfd_sink_error_fenter();
wfd_sink_return_val_if_fail(wfd_sink, MM_ERROR_WFD_NOT_INITIALIZED);
wfd_sink_debug("video decodebin has no parent.. need to relase by itself");
if (GST_STATE(v_decodebin[WFD_SINK_V_D_BIN].gst) >= GST_STATE_READY) {
- wfd_sink_debug("try to change state of video decodebin to NULL");
+ wfd_sink_error("try to change state of video decodebin to NULL");
ret = gst_element_set_state(v_decodebin[WFD_SINK_V_D_BIN].gst, GST_STATE_NULL);
if (ret != GST_STATE_CHANGE_SUCCESS) {
wfd_sink_error("failed to change state of video decodebin to NULL");
MMWFDSINK_FREEIF(wfd_sink->pipeline->v_decodebin);
- wfd_sink_debug_fleave();
+ wfd_sink_error_fleave();
return MM_ERROR_NONE;
}
return MM_ERROR_NONE;
}
+ if (wfd_sink->stream_info.video_stream_info.codec == MM_WFD_SINK_VIDEO_CODEC_NONE) {
+ wfd_sink_debug("Skip create video decodebin for none video codec.");
+ wfd_sink_debug_fleave();
+ return MM_ERROR_NONE;
+ }
+
/* check video decodebin could be linked now */
switch (wfd_sink->stream_info.video_stream_info.codec) {
case MM_WFD_SINK_VIDEO_CODEC_H264:
video_codec = WFD_VIDEO_H265;
link = TRUE;
break;
+ case MM_WFD_SINK_VIDEO_CODEC_VP9:
+ video_codec = WFD_VIDEO_VP9;
+ link = TRUE;
+ break;
case MM_WFD_SINK_VIDEO_CODEC_NONE:
default:
wfd_sink_debug("video decodebin could NOT be linked now, just create");
MMWFDSINK_PAD_PROBE(wfd_sink, NULL, v_decodebin[WFD_SINK_V_D_H264_PARSE].gst, "sink");
MMWFDSINK_PAD_PROBE(wfd_sink, NULL, v_decodebin[WFD_SINK_V_D_H264_PARSE].gst, "src");
+ if (!g_strcmp0(wfd_sink->ini.name_of_video_h264_decoder, "sprddec_h264") ||
+ !g_strcmp0(wfd_sink->ini.name_of_video_h264_decoder, "omxdec_h264")) {
+ /* acquire HW resource */
+ wfd_sink_error("wfd_sink->ini.name_of_video_h264_decoder1 :: %s", wfd_sink->ini.name_of_video_h264_decoder);
+ if (__mm_wfd_sink_acquire_hw_resource(wfd_sink, MM_WFD_SINK_RESOURCE_TYPE_VIDEO_DECODER) != MM_ERROR_NONE) {
+ wfd_sink_error("failed to acquire video decoder resource");
+ goto CREATE_ERROR;
+ }
+ }
+
/* create dec */
MMWFDSINK_CREATE_ELEMENT(v_decodebin, WFD_SINK_V_D_H264_DEC, wfd_sink->ini.name_of_video_h264_decoder, "video_h264_dec", FALSE);
MMWFDSINK_PAD_PROBE(wfd_sink, NULL, v_decodebin[WFD_SINK_V_D_H264_DEC].gst, "sink");
if (video_codec & WFD_VIDEO_H265) {
/* create parser */
- MMWFDSINK_CREATE_ELEMENT(v_decodebin, WFD_SINK_V_D_H265_PARSE, wfd_sink->ini.name_of_video_h264_parser, "video_h265_parser", FALSE);
+ MMWFDSINK_CREATE_ELEMENT(v_decodebin, WFD_SINK_V_D_H265_PARSE, wfd_sink->ini.name_of_video_h265_parser, "video_h265_parser", FALSE);
MMWFDSINK_PAD_PROBE(wfd_sink, NULL, v_decodebin[WFD_SINK_V_D_H265_PARSE].gst, "sink");
MMWFDSINK_PAD_PROBE(wfd_sink, NULL, v_decodebin[WFD_SINK_V_D_H265_PARSE].gst, "src");
}
}
}
+ if (video_codec & WFD_VIDEO_VP9) {
+ /* create parser */
+ MMWFDSINK_CREATE_ELEMENT(v_decodebin, WFD_SINK_V_D_VP9_PARSE, wfd_sink->ini.name_of_video_vp9_parser, "video_vp9_parser", FALSE);
+ MMWFDSINK_PAD_PROBE(wfd_sink, NULL, v_decodebin[WFD_SINK_V_D_VP9_PARSE].gst, "sink");
+ MMWFDSINK_PAD_PROBE(wfd_sink, NULL, v_decodebin[WFD_SINK_V_D_VP9_PARSE].gst, "src");
+
+ /* create dec */
+ MMWFDSINK_CREATE_ELEMENT(v_decodebin, WFD_SINK_V_D_VP9_DEC, wfd_sink->ini.name_of_video_vp9_decoder, "video_vp9_dec", FALSE);
+ MMWFDSINK_PAD_PROBE(wfd_sink, NULL, v_decodebin[WFD_SINK_V_D_VP9_DEC].gst, "sink");
+ MMWFDSINK_PAD_PROBE(wfd_sink, NULL, v_decodebin[WFD_SINK_V_D_VP9_DEC].gst, "src");
+ if (v_decodebin[WFD_SINK_V_D_VP9_DEC].gst) {
+ if (MM_ERROR_NONE != __mm_wfd_sink_prepare_videodec(wfd_sink, v_decodebin[WFD_SINK_V_D_VP9_DEC].gst)) {
+ wfd_sink_error("failed to set video decoder property...");
+ goto CREATE_ERROR;
+ }
+ }
+ }
g_list_free(element_bucket);
return MM_ERROR_WFD_INTERNAL;
}
-static int __mm_wfd_sink_destroy_video_sinkbin(mm_wfd_sink_t *wfd_sink)
+int __mm_wfd_sink_destroy_video_sinkbin(mm_wfd_sink_t *wfd_sink)
{
GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
MMWFDSinkGstElement *v_sinkbin = NULL;
GstObject *parent = NULL;
int i;
- wfd_sink_debug_fenter();
+ wfd_sink_error_fenter();
wfd_sink_return_val_if_fail(wfd_sink, MM_ERROR_WFD_NOT_INITIALIZED);
wfd_sink_debug("video sinkbin has no parent.. need to relase by itself");
if (GST_STATE(v_sinkbin[WFD_SINK_V_S_BIN].gst) >= GST_STATE_READY) {
- wfd_sink_debug("try to change state of video sinkbin to NULL");
+ wfd_sink_error("try to change state of video sinkbin to NULL");
ret = gst_element_set_state(v_sinkbin[WFD_SINK_V_S_BIN].gst, GST_STATE_NULL);
if (ret != GST_STATE_CHANGE_SUCCESS) {
wfd_sink_error("failed to change state of video sinkbin to NULL");
MMWFDSINK_FREEIF(wfd_sink->pipeline->v_sinkbin);
- wfd_sink_debug_fleave();
+ wfd_sink_error_fleave();
return MM_ERROR_NONE;
}
return MM_ERROR_NONE;
}
+ if (wfd_sink->stream_info.video_stream_info.codec == MM_WFD_SINK_VIDEO_CODEC_NONE) {
+ wfd_sink_error("Skip create video sink bin for non video codec.");
+ wfd_sink_debug_fleave();
+ return MM_ERROR_NONE;
+ }
+
/* alloc handles */
v_sinkbin = (MMWFDSinkGstElement *)g_malloc0(sizeof(MMWFDSinkGstElement) * WFD_SINK_V_S_NUM);
if (!v_sinkbin) {
/* create sink */
mm_attrs_get_int_by_name(wfd_sink->attrs, "display_surface_type", &surface_type);
-
- if (surface_type == MM_DISPLAY_SURFACE_OVERLAY) {
+ if (surface_type == MM_DISPLAY_SURFACE_OVERLAY &&
+ (__mm_wfd_sink_acquire_hw_resource(wfd_sink, MM_WFD_SINK_RESOURCE_TYPE_VIDEO_OVERLAY) == MM_ERROR_NONE)) {
MMWFDSINK_CREATE_ELEMENT(v_sinkbin, WFD_SINK_V_S_SINK, wfd_sink->ini.name_of_video_sink, "video_sink", TRUE);
} else if (surface_type == MM_DISPLAY_SURFACE_EVAS) {
MMWFDSINK_CREATE_ELEMENT(v_sinkbin, WFD_SINK_V_S_SINK, wfd_sink->ini.name_of_video_evas_sink, "video_sink", TRUE);
goto CREATE_ERROR;
}
+ wfd_sink->interrupted_by_resource = FALSE;
+
MMWFDSINK_PAD_PROBE(wfd_sink, NULL, v_sinkbin[WFD_SINK_V_S_SINK].gst, "sink");
if (v_sinkbin[WFD_SINK_V_S_SINK].gst) {
if (MM_ERROR_NONE != __mm_wfd_sink_prepare_videosink(wfd_sink, v_sinkbin[WFD_SINK_V_S_SINK].gst)) {
return MM_ERROR_WFD_INTERNAL;
}
-static int __mm_wfd_sink_destroy_pipeline(mm_wfd_sink_t *wfd_sink)
+int __mm_wfd_sink_destroy_pipeline(mm_wfd_sink_t *wfd_sink)
{
GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
GstBus *bus = NULL;
ret = gst_element_set_state(mainbin[WFD_SINK_M_PIPE].gst, GST_STATE_NULL);
if (ret != GST_STATE_CHANGE_SUCCESS) {
- wfd_sink_error("failed to change state of pipeline to NULL");
+ wfd_sink_error("failed to change state of pipeline to NULL. ret[%d]", ret);
return MM_ERROR_WFD_INTERNAL;
} else {
wfd_sink_debug("Successed to change state of pipeline to NULL");
wfd_sink->audio_decodebin_is_linked = FALSE;
wfd_sink->video_decodebin_is_linked = FALSE;
wfd_sink->need_to_reset_basetime = FALSE;
+ wfd_sink->supportive_resolution = MM_WFD_SINK_RESOLUTION_UNKNOWN;
wfd_sink_debug_fleave();
return MM_ERROR_NONE;
}
+int _mm_wfd_sink_set_display_overlay(mm_wfd_sink_t *wfd_sink, void *display_object)
+{
+ int wl_surface_id = 0;
+ static void *display_overlay = NULL;
+ int wl_window_x = 0;
+ int wl_window_y = 0;
+ int wl_window_width = 0;
+ int wl_window_height = 0;
+ struct wl_surface *wl_surface = NULL;
+ struct wl_display *wl_display = NULL;
+ Ecore_Wl2_Window *wl2_window = NULL;
+ Ecore_Wl2_Display *wl2_display = NULL;
+ wl_client *wlclient = NULL;
+ Evas_Object *obj = NULL;
+ void *object = display_object;
+ const char *object_type = NULL;
+ int ret = 0;
+ pid_t pid = getpid();
+ pid_t tid = syscall(SYS_gettid);
+
+ if (pid != tid) {
+ wfd_sink_error("API isn't called in main thread");
+ return MM_ERROR_WFD_INTERNAL;
+ }
+
+ if (object == NULL) {
+ wfd_sink_debug("display object is NULL!");
+ return MM_ERROR_WFD_INTERNAL;
+ }
+ obj = (Evas_Object *)object;
+ object_type = evas_object_type_get(obj);
+ wfd_sink_debug("window object type : %s", object_type);
+
+ if (strcmp(object_type, "elm_win")) {
+ wfd_sink_error("Window type is not elm_win");
+ return MM_ERROR_WFD_INTERNAL;
+ }
+
+ /* wayland overlay surface */
+ wfd_sink_info("Wayland overlay surface type");
+ evas_object_geometry_get(obj, &wl_window_x, &wl_window_y, &wl_window_width, &wl_window_height);
+
+ wfd_sink_debug("x[%d] y[%d] width[%d] height[%d]", wl_window_x, wl_window_y,
+ wl_window_width, wl_window_height);
+
+ wl2_window = ecore_evas_wayland2_window_get(ecore_evas_ecore_evas_get(evas_object_evas_get(obj)));
+ ecore_wl2_window_video_has(wl2_window, EINA_TRUE);
+ wl_surface = ecore_wl2_window_surface_get(wl2_window);
+
+ /* get wl_display */
+ wl2_display = ecore_wl2_connected_display_get(NULL);
+ wl_display = ecore_wl2_display_get(wl2_display);
+
+ wfd_sink_debug("previous display object : %p current object : %p", display_overlay, object);
+ if (wl_surface && wl_display && (wl_surface_id == 0 || display_overlay != object)) {
+ wfd_sink_debug("surface = %p, wl_display = %p", wl_surface, wl_display);
+ display_overlay = object;
+
+ ret = mm_wfd_sink_wlclient_create(&wlclient);
+ if (ret != MM_ERROR_NONE) {
+ wfd_sink_error("Wayland client create failure");
+ return MM_ERROR_WFD_INTERNAL;
+ }
+ wfd_sink_debug("Try to get surface id");
+
+ wl_surface_id = mm_wfd_sink_wlclient_get_wl_window_wl_surface_id(wlclient, wl_surface, wl_display);
+
+ wfd_sink_debug("wl_surface_id = %d", wl_surface_id);
+ if (wl_surface_id == 0) {
+ wfd_sink_error("wl_surface_id cannot be zero.");
+ return MM_ERROR_WFD_INTERNAL;
+ }
+ wfd_sink->display_surface_id = wl_surface_id;
+
+ if (wlclient) {
+ g_free(wlclient);
+ wlclient = NULL;
+ }
+ }
+ return MM_ERROR_NONE;
+}
+
void __mm_wfd_sink_print_ref_count(mm_wfd_sink_t *wfd_sink)
{
int i = 0;