bool encoded_audio_frame_cb_is_set;
#endif
#ifdef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
- GstElement *render_pipeline;
- GstElement *appsrc;
+ GstElement *audio_render_pipeline;
+ GstElement *video_render_pipeline;
+ GstElement *appsrc_for_audio;
+ GstElement *appsrc_for_video;
#endif
media_packet_source_s packet_sources[MAX_MEDIA_PACKET_SOURCE_LEN];
} connection_s;
static webrtc_signaling_server_h g_inner_signaling_server;
#if defined(__DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__) || defined(__DEBUG_VALIDATE_ENCODED_FRAME_CB__)
-static GstElement* __build_appsrc_render_pipeline(GstElement **appsrc);
+GstBuffer *__alloc_buffer_from_packet(media_packet_h packet);
+static GstElement* __build_appsrc_h264_render_pipeline(GstElement **appsrc);
+#endif
+#ifdef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
+static GstElement* __build_appsrc_opus_render_pipeline(GstElement **appsrc);
#endif
static void win_del(void *data, Evas_Object *obj, void *event)
}
#ifdef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
- if (g_conns[index].render_pipeline) {
- GstStateChangeReturn state_change_ret = gst_element_set_state(g_conns[index].render_pipeline, GST_STATE_NULL);
- if (state_change_ret == GST_STATE_CHANGE_FAILURE)
+ if (g_conns[index].video_render_pipeline) {
+ if (gst_element_set_state(g_conns[index].video_render_pipeline, GST_STATE_NULL) == GST_STATE_CHANGE_FAILURE)
+ g_printerr("failed to set state to NULL\n");
+ gst_object_unref(g_conns[index].video_render_pipeline);
+ g_conns[index].video_render_pipeline = NULL;
+ g_conns[index].appsrc_for_video = NULL;
+ g_print("video render pipeline is released\n");
+ }
+ if (g_conns[index].audio_render_pipeline) {
+ if (gst_element_set_state(g_conns[index].audio_render_pipeline, GST_STATE_NULL) == GST_STATE_CHANGE_FAILURE)
g_printerr("failed to set state to NULL\n");
- gst_object_unref(g_conns[index].render_pipeline);
- g_conns[index].render_pipeline = NULL;
- g_print("appsrc render pipeline is released\n");
+ gst_object_unref(g_conns[index].audio_render_pipeline);
+ g_conns[index].audio_render_pipeline = NULL;
+ g_conns[index].appsrc_for_audio = NULL;
+ g_print("audio render pipeline is released\n");
}
#endif
#ifndef TIZEN_TV
}
#else
g_print("Video track is added\n");
- if ((conn->render_pipeline = __build_appsrc_render_pipeline(&conn->appsrc))) {
- GstStateChangeReturn state_change_ret = gst_element_set_state(conn->render_pipeline, GST_STATE_PLAYING);
+ if ((conn->video_render_pipeline = __build_appsrc_h264_render_pipeline(&conn->appsrc_for_video))) {
+ GstStateChangeReturn state_change_ret = gst_element_set_state(conn->video_render_pipeline, GST_STATE_PLAYING);
if (state_change_ret == GST_STATE_CHANGE_FAILURE)
- g_printerr("failed to set state to PLAYING\n");
+ g_printerr("failed to set state to PLAYING to video render pipeline\n");
}
#endif
} else if (type == WEBRTC_MEDIA_TYPE_AUDIO) {
- int ret;
g_print("Audio track is added\n");
+#ifndef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
+ int ret;
if (!conn->render.stream_info) {
ret = sound_manager_create_stream_information(SOUND_STREAM_TYPE_MEDIA, NULL, NULL, &conn->render.stream_info);
ret = webrtc_set_sound_stream_info(webrtc, id, conn->render.stream_info);
if (ret != WEBRTC_ERROR_NONE)
g_printerr("failed to webrtc_set_sound_stream_info(), ret[0x%x]\n", ret);
+#else
+ if ((conn->audio_render_pipeline = __build_appsrc_opus_render_pipeline(&conn->appsrc_for_audio))) {
+ GstStateChangeReturn state_change_ret = gst_element_set_state(conn->audio_render_pipeline, GST_STATE_PLAYING);
+ if (state_change_ret == GST_STATE_CHANGE_FAILURE)
+ g_printerr("failed to set state to PLAYING to audio render pipeline\n");
+ }
+#endif
}
}
#ifdef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
{
GstFlowReturn gst_ret = GST_FLOW_OK;
- GstBuffer *buffer = NULL;
-
- media_packet_get_extra(packet, (void**)&buffer);
- if (buffer) {
- GstMapInfo buff_info = GST_MAP_INFO_INIT;
- if (!gst_buffer_map(buffer, &buff_info, GST_MAP_READ)) {
- g_print("failed to gst_buffer_map()\n");
- media_packet_destroy(packet);
- return;
- }
- g_print("buffer[%p] buffer_info.data[%p]\n", buffer, buff_info.data);
- gst_buffer_unmap(buffer, &buff_info);
+ GstElement *appsrc = (type == WEBRTC_MEDIA_TYPE_AUDIO) ? conn->appsrc_for_audio : conn->appsrc_for_video;
+ static bool first_audio_packet = true;
+ GstBuffer *buffer = __alloc_buffer_from_packet(packet);
+ if (!buffer) {
+ media_packet_destroy(packet);
+ return;
+ }
+
+ if (type == WEBRTC_MEDIA_TYPE_AUDIO && first_audio_packet) {
+ media_format_h format;
+ int channels = 0;
+ int rate = 0;
+
+ media_packet_get_format(packet, &format);
+ media_format_get_audio_info(format, NULL, &channels, &rate, NULL, NULL);
+ if (channels > 0 && rate > 0) {
+ GstCaps *caps = gst_caps_new_simple("audio/x-opus",
+ "channel-mapping-family", G_TYPE_INT, 0, /* FIXME: need to get this value from media packet */
+ "channels", G_TYPE_INT, channels,
+ "rate", G_TYPE_INT, rate,
+ NULL);
+
+ g_print("channels[%d] and rate[%d] are added to appsrc caps\n", channels, rate);
- g_signal_emit_by_name(G_OBJECT(conn->appsrc), "push-buffer", buffer, &gst_ret, NULL);
- if (gst_ret != GST_FLOW_OK)
- g_printerr("failed to 'push-buffer', gst_ret[0x%x]\n", gst_ret);
+ g_object_set(G_OBJECT(appsrc), "caps", caps, NULL);
+ gst_caps_unref(caps);
+ }
+ media_format_unref(format);
+ first_audio_packet = false;
}
+
+ g_signal_emit_by_name(G_OBJECT(appsrc), "push-buffer", buffer, &gst_ret, NULL);
+ if (gst_ret != GST_FLOW_OK)
+ g_printerr("failed to 'push-buffer', gst_ret[0x%x]\n", gst_ret);
+
+ gst_buffer_unref(buffer);
}
-#endif
+#endif /* __DEBUG_VALIDATE_ENCODED_FRAME_CB__ */
/* media packet should be freed after use */
media_packet_destroy(packet);
}
RET_IF(ret != WEBRTC_ERROR_NONE, "ret[0x%x]", ret);
g_print("webrtc_set_encoded_audio_frame_cb() success\n");
+#ifndef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
#ifndef TIZEN_TV
if (!g_conns[index].encoded_audio_frame_cb_is_set) {
g_conns[index].encoded_audio_frame_cb_is_set = true;
g_print("espp set audio stream info\n");
}
#endif
+#endif
}
static void _webrtc_unset_encoded_audio_frame_cb(int index)
RET_IF(ret != WEBRTC_ERROR_NONE, "ret[0x%x]", ret);
g_print("webrtc_unset_encoded_audio_frame_cb() success\n");
+#ifndef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
#ifndef TIZEN_TV
if (g_conns[index].encoded_audio_frame_cb_is_set) {
g_conns[index].encoded_audio_frame_cb_is_set = false;
g_print("espp close & destroy\n");
}
#endif
+#endif
}
static void _webrtc_set_encoded_video_frame_cb(int index)
RET_IF(ret != WEBRTC_ERROR_NONE, "ret[0x%x]", ret);
g_print("webrtc_set_encoded_video_frame_cb() success\n");
+
+#ifndef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
#ifndef TIZEN_TV
if (!g_conns[index].encoded_video_frame_cb_is_set) {
g_conns[index].encoded_video_frame_cb_is_set = true;
g_print("espp set video stream info\n");
}
#endif
+#endif
}
static void _webrtc_unset_encoded_video_frame_cb(int index)
RET_IF(ret != WEBRTC_ERROR_NONE, "ret[0x%x]", ret);
g_print("webrtc_unset_encoded_video_frame_cb() success\n");
+
+#ifndef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
#ifndef TIZEN_TV
if (g_conns[index].encoded_video_frame_cb_is_set) {
g_conns[index].encoded_video_frame_cb_is_set = false;
g_print("espp close & destroy\n");
}
#endif
+#endif
}
static void __media_packet_source_buffer_state_changed_cb(unsigned int source_id, webrtc_media_packet_source_buffer_state_e state, void *user_data)
return NULL;
}
-#ifdef __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__
+#if defined(__DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__) || defined(__DEBUG_VALIDATE_ENCODED_FRAME_CB__)
static int __fill_gst_buffer_mapped_data_from_packet(GstBuffer *buffer, media_packet_h packet)
{
bool has_tbm_surface = false;
if (gst_buffer_map(buffer, &buff_info, GST_MAP_READWRITE)) {
if (has_tbm_surface) {
- int i;
+ unsigned int i;
guint8 *ptr = buff_info.data;
for (i = 0; i < ts_info.num_planes; i++) {
g_print("plane[%d][ptr:%p size:%u]\n", i, ts_info.planes[i].ptr, ts_info.planes[i].size);
}
#if defined(__DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__) || defined(__DEBUG_VALIDATE_ENCODED_FRAME_CB__)
-static GstElement* __build_appsrc_render_pipeline(GstElement **appsrc)
+static GstElement* __build_appsrc_h264_render_pipeline(GstElement **appsrc)
{
GstElement *pipeline;
GstElement *src;
return NULL;
}
- pipeline = gst_pipeline_new("appsrc-render-pipeline");
+ pipeline = gst_pipeline_new("appsrc-h264-render-pipeline");
src = gst_element_factory_make("appsrc", NULL);
if (!src) {
"alignment", G_TYPE_STRING, "au",
NULL);
- g_object_set(G_OBJECT(src), "caps", caps, NULL);
+ g_object_set(G_OBJECT(src),
+ "caps", caps,
+ "format", GST_FORMAT_TIME,
+ NULL);
+ gst_caps_unref(caps);
dec = gst_element_factory_make("avdec_h264", NULL);
if (!dec) {
gst_bin_add_many(GST_BIN(pipeline), src, dec, convert, sink, NULL);
if (!gst_element_link_many(src, dec, convert, sink, NULL)) {
- g_printerr("failed to gst_element_link_many(), appsrc_render\n");
+ g_printerr("failed to gst_element_link_many(), appsrc_h264_render\n");
+ goto error;
+ }
+
+ *appsrc = src;
+
+ g_print("appsrc H264 render pipeline is created\n");
+
+ return pipeline;
+
+error:
+ gst_object_unref(pipeline);
+ return NULL;
+}
+#endif
+
+#ifdef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
+static GstElement* __build_appsrc_opus_render_pipeline(GstElement **appsrc)
+{
+ GstElement *pipeline;
+ GstElement *src = NULL;
+ GstElement *dec = NULL;
+ GstElement *sink;
+ GstCaps *caps;
+
+ if (!appsrc) {
+ g_printerr("appsrc is NULL\n");
+ return NULL;
+ }
+
+ pipeline = gst_pipeline_new("appsrc-opus-render-pipeline");
+
+ src = gst_element_factory_make("appsrc", NULL);
+ if (!src) {
+ g_printerr("failed to gst_element_factory_make(), appsrc\n");
+ goto error;
+ }
+
+ caps = gst_caps_new_simple("audio/x-opus", NULL, NULL);
+
+ g_object_set(G_OBJECT(src),
+ "format", GST_FORMAT_TIME,
+ "caps", caps,
+ NULL);
+ gst_caps_unref(caps);
+
+ dec = gst_element_factory_make("opusdec", NULL);
+ if (!dec) {
+ g_printerr("failed to gst_element_factory_make(), opusdec\n");
+ goto error_with_unref_elem;
+ }
+
+ sink = gst_element_factory_make("pulsesink", NULL);
+ if (!sink) {
+ g_printerr("failed to gst_element_factory_make(), pulsesink\n");
+ goto error_with_unref_elem;
+ }
+ g_object_set(G_OBJECT(sink),
+ "sync", FALSE,
+ NULL);
+
+ gst_bin_add_many(GST_BIN(pipeline), src, dec, sink, NULL);
+ if (!gst_element_link_many(src, dec, sink, NULL)) {
+ g_printerr("failed to gst_element_link_many(), appsrc_opus_render\n");
goto error;
}
*appsrc = src;
- g_print("appsrc render pipeline is created\n");
+ g_print("appsrc OPUS render pipeline is created\n");
return pipeline;
+error_with_unref_elem:
+ if (src)
+ gst_object_unref(src);
+ if (dec)
+ gst_object_unref(dec);
error:
gst_object_unref(pipeline);
return NULL;
case MEDIA_FORMAT_H264_SP:
packet_source->src_pipeline = __build_h264_format_pipeline(packet_source);
#ifdef __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__
- packet_source->render_pipeline = __build_appsrc_render_pipeline(&packet_source->appsrc);
+ packet_source->render_pipeline = __build_appsrc_h264_render_pipeline(&packet_source->appsrc);
#endif
break;
case MEDIA_FORMAT_I420: