From da6c13225d24e32d29396c792dab498838c6da94 Mon Sep 17 00:00:00 2001 From: Sangchul Lee Date: Mon, 8 Nov 2021 14:31:47 +0900 Subject: [PATCH] webrtc_test: Add opus decoding pipeline when __DEBUG_VALIDATE_ENCODED_FRAME_CB__ is enabled [Version] 0.3.37 [Issue Type] Debug Change-Id: I47828c656e7ac86ee9111434b53478f9f50d4d4d Signed-off-by: Sangchul Lee --- packaging/capi-media-webrtc.spec | 2 +- src/webrtc_sink.c | 3 +- test/webrtc_test.c | 197 +++++++++++++++++++++++++------ 3 files changed, 163 insertions(+), 39 deletions(-) diff --git a/packaging/capi-media-webrtc.spec b/packaging/capi-media-webrtc.spec index 8fd9af83..5eeae3dc 100644 --- a/packaging/capi-media-webrtc.spec +++ b/packaging/capi-media-webrtc.spec @@ -1,6 +1,6 @@ Name: capi-media-webrtc Summary: A WebRTC library in Tizen Native API -Version: 0.3.36 +Version: 0.3.37 Release: 0 Group: Multimedia/API License: Apache-2.0 diff --git a/src/webrtc_sink.c b/src/webrtc_sink.c index 6bd6be49..39467fc2 100644 --- a/src/webrtc_sink.c +++ b/src/webrtc_sink.c @@ -790,7 +790,8 @@ static media_packet_h __make_media_packet(webrtc_gst_slot_s *sink, GstBuffer *bu if (ret != WEBRTC_ERROR_NONE) goto error; - LOG_DEBUG("sink[%p], packet[%p], buffer[%p]", sink, packet, buffer); + LOG_DEBUG("sink[%p], packet[%p], buffer[%p, pts:%"G_GUINT64_FORMAT", dts:%"G_GUINT64_FORMAT", duration:%"G_GUINT64_FORMAT"]", + sink, packet, buffer, GST_BUFFER_PTS(buffer), GST_BUFFER_DTS(buffer), GST_BUFFER_DURATION(buffer)); return packet; diff --git a/test/webrtc_test.c b/test/webrtc_test.c index 93fdf21c..f636e3fb 100644 --- a/test/webrtc_test.c +++ b/test/webrtc_test.c @@ -224,8 +224,10 @@ typedef struct _connection_s { bool encoded_audio_frame_cb_is_set; #endif #ifdef __DEBUG_VALIDATE_ENCODED_FRAME_CB__ - GstElement *render_pipeline; - GstElement *appsrc; + GstElement *audio_render_pipeline; + GstElement *video_render_pipeline; + GstElement *appsrc_for_audio; + GstElement *appsrc_for_video; #endif media_packet_source_s packet_sources[MAX_MEDIA_PACKET_SOURCE_LEN]; } connection_s; @@ -254,7 +256,11 @@ static int g_cnt; static webrtc_signaling_server_h g_inner_signaling_server; #if defined(__DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__) || defined(__DEBUG_VALIDATE_ENCODED_FRAME_CB__) -static GstElement* __build_appsrc_render_pipeline(GstElement **appsrc); +GstBuffer *__alloc_buffer_from_packet(media_packet_h packet); +static GstElement* __build_appsrc_h264_render_pipeline(GstElement **appsrc); +#endif +#ifdef __DEBUG_VALIDATE_ENCODED_FRAME_CB__ +static GstElement* __build_appsrc_opus_render_pipeline(GstElement **appsrc); #endif static void win_del(void *data, Evas_Object *obj, void *event) @@ -641,13 +647,21 @@ static void _webrtc_stop(int index) } #ifdef __DEBUG_VALIDATE_ENCODED_FRAME_CB__ - if (g_conns[index].render_pipeline) { - GstStateChangeReturn state_change_ret = gst_element_set_state(g_conns[index].render_pipeline, GST_STATE_NULL); - if (state_change_ret == GST_STATE_CHANGE_FAILURE) + if (g_conns[index].video_render_pipeline) { + if (gst_element_set_state(g_conns[index].video_render_pipeline, GST_STATE_NULL) == GST_STATE_CHANGE_FAILURE) + g_printerr("failed to set state to NULL\n"); + gst_object_unref(g_conns[index].video_render_pipeline); + g_conns[index].video_render_pipeline = NULL; + g_conns[index].appsrc_for_video = NULL; + g_print("video render pipeline is released\n"); + } + if (g_conns[index].audio_render_pipeline) { + if (gst_element_set_state(g_conns[index].audio_render_pipeline, GST_STATE_NULL) == GST_STATE_CHANGE_FAILURE) g_printerr("failed to set state to NULL\n"); - gst_object_unref(g_conns[index].render_pipeline); - g_conns[index].render_pipeline = NULL; - g_print("appsrc render pipeline is released\n"); + gst_object_unref(g_conns[index].audio_render_pipeline); + g_conns[index].audio_render_pipeline = NULL; + g_conns[index].appsrc_for_audio = NULL; + g_print("audio render pipeline is released\n"); } #endif #ifndef TIZEN_TV @@ -2073,15 +2087,16 @@ static void __track_added_cb(webrtc_h webrtc, webrtc_media_type_e type, unsigned } #else g_print("Video track is added\n"); - if ((conn->render_pipeline = __build_appsrc_render_pipeline(&conn->appsrc))) { - GstStateChangeReturn state_change_ret = gst_element_set_state(conn->render_pipeline, GST_STATE_PLAYING); + if ((conn->video_render_pipeline = __build_appsrc_h264_render_pipeline(&conn->appsrc_for_video))) { + GstStateChangeReturn state_change_ret = gst_element_set_state(conn->video_render_pipeline, GST_STATE_PLAYING); if (state_change_ret == GST_STATE_CHANGE_FAILURE) - g_printerr("failed to set state to PLAYING\n"); + g_printerr("failed to set state to PLAYING to video render pipeline\n"); } #endif } else if (type == WEBRTC_MEDIA_TYPE_AUDIO) { - int ret; g_print("Audio track is added\n"); +#ifndef __DEBUG_VALIDATE_ENCODED_FRAME_CB__ + int ret; if (!conn->render.stream_info) { ret = sound_manager_create_stream_information(SOUND_STREAM_TYPE_MEDIA, NULL, NULL, &conn->render.stream_info); @@ -2091,6 +2106,13 @@ static void __track_added_cb(webrtc_h webrtc, webrtc_media_type_e type, unsigned ret = webrtc_set_sound_stream_info(webrtc, id, conn->render.stream_info); if (ret != WEBRTC_ERROR_NONE) g_printerr("failed to webrtc_set_sound_stream_info(), ret[0x%x]\n", ret); +#else + if ((conn->audio_render_pipeline = __build_appsrc_opus_render_pipeline(&conn->appsrc_for_audio))) { + GstStateChangeReturn state_change_ret = gst_element_set_state(conn->audio_render_pipeline, GST_STATE_PLAYING); + if (state_change_ret == GST_STATE_CHANGE_FAILURE) + g_printerr("failed to set state to PLAYING to audio render pipeline\n"); + } +#endif } } @@ -2148,25 +2170,44 @@ static void __encoded_frame_cb(webrtc_h webrtc, webrtc_media_type_e type, unsign #ifdef __DEBUG_VALIDATE_ENCODED_FRAME_CB__ { GstFlowReturn gst_ret = GST_FLOW_OK; - GstBuffer *buffer = NULL; - - media_packet_get_extra(packet, (void**)&buffer); - if (buffer) { - GstMapInfo buff_info = GST_MAP_INFO_INIT; - if (!gst_buffer_map(buffer, &buff_info, GST_MAP_READ)) { - g_print("failed to gst_buffer_map()\n"); - media_packet_destroy(packet); - return; - } - g_print("buffer[%p] buffer_info.data[%p]\n", buffer, buff_info.data); - gst_buffer_unmap(buffer, &buff_info); + GstElement *appsrc = (type == WEBRTC_MEDIA_TYPE_AUDIO) ? conn->appsrc_for_audio : conn->appsrc_for_video; + static bool first_audio_packet = true; + GstBuffer *buffer = __alloc_buffer_from_packet(packet); + if (!buffer) { + media_packet_destroy(packet); + return; + } + + if (type == WEBRTC_MEDIA_TYPE_AUDIO && first_audio_packet) { + media_format_h format; + int channels = 0; + int rate = 0; + + media_packet_get_format(packet, &format); + media_format_get_audio_info(format, NULL, &channels, &rate, NULL, NULL); + if (channels > 0 && rate > 0) { + GstCaps *caps = gst_caps_new_simple("audio/x-opus", + "channel-mapping-family", G_TYPE_INT, 0, /* FIXME: need to get this value from media packet */ + "channels", G_TYPE_INT, channels, + "rate", G_TYPE_INT, rate, + NULL); + + g_print("channels[%d] and rate[%d] are added to appsrc caps\n", channels, rate); - g_signal_emit_by_name(G_OBJECT(conn->appsrc), "push-buffer", buffer, &gst_ret, NULL); - if (gst_ret != GST_FLOW_OK) - g_printerr("failed to 'push-buffer', gst_ret[0x%x]\n", gst_ret); + g_object_set(G_OBJECT(appsrc), "caps", caps, NULL); + gst_caps_unref(caps); + } + media_format_unref(format); + first_audio_packet = false; } + + g_signal_emit_by_name(G_OBJECT(appsrc), "push-buffer", buffer, &gst_ret, NULL); + if (gst_ret != GST_FLOW_OK) + g_printerr("failed to 'push-buffer', gst_ret[0x%x]\n", gst_ret); + + gst_buffer_unref(buffer); } -#endif +#endif /* __DEBUG_VALIDATE_ENCODED_FRAME_CB__ */ /* media packet should be freed after use */ media_packet_destroy(packet); } @@ -2179,6 +2220,7 @@ static void _webrtc_set_encoded_audio_frame_cb(int index) RET_IF(ret != WEBRTC_ERROR_NONE, "ret[0x%x]", ret); g_print("webrtc_set_encoded_audio_frame_cb() success\n"); +#ifndef __DEBUG_VALIDATE_ENCODED_FRAME_CB__ #ifndef TIZEN_TV if (!g_conns[index].encoded_audio_frame_cb_is_set) { g_conns[index].encoded_audio_frame_cb_is_set = true; @@ -2188,6 +2230,7 @@ static void _webrtc_set_encoded_audio_frame_cb(int index) g_print("espp set audio stream info\n"); } #endif +#endif } static void _webrtc_unset_encoded_audio_frame_cb(int index) @@ -2198,12 +2241,14 @@ static void _webrtc_unset_encoded_audio_frame_cb(int index) RET_IF(ret != WEBRTC_ERROR_NONE, "ret[0x%x]", ret); g_print("webrtc_unset_encoded_audio_frame_cb() success\n"); +#ifndef __DEBUG_VALIDATE_ENCODED_FRAME_CB__ #ifndef TIZEN_TV if (g_conns[index].encoded_audio_frame_cb_is_set) { g_conns[index].encoded_audio_frame_cb_is_set = false; g_print("espp close & destroy\n"); } #endif +#endif } static void _webrtc_set_encoded_video_frame_cb(int index) @@ -2214,6 +2259,8 @@ static void _webrtc_set_encoded_video_frame_cb(int index) RET_IF(ret != WEBRTC_ERROR_NONE, "ret[0x%x]", ret); g_print("webrtc_set_encoded_video_frame_cb() success\n"); + +#ifndef __DEBUG_VALIDATE_ENCODED_FRAME_CB__ #ifndef TIZEN_TV if (!g_conns[index].encoded_video_frame_cb_is_set) { g_conns[index].encoded_video_frame_cb_is_set = true; @@ -2223,6 +2270,7 @@ static void _webrtc_set_encoded_video_frame_cb(int index) g_print("espp set video stream info\n"); } #endif +#endif } static void _webrtc_unset_encoded_video_frame_cb(int index) @@ -2233,12 +2281,15 @@ static void _webrtc_unset_encoded_video_frame_cb(int index) RET_IF(ret != WEBRTC_ERROR_NONE, "ret[0x%x]", ret); g_print("webrtc_unset_encoded_video_frame_cb() success\n"); + +#ifndef __DEBUG_VALIDATE_ENCODED_FRAME_CB__ #ifndef TIZEN_TV if (g_conns[index].encoded_video_frame_cb_is_set) { g_conns[index].encoded_video_frame_cb_is_set = false; g_print("espp close & destroy\n"); } #endif +#endif } static void __media_packet_source_buffer_state_changed_cb(unsigned int source_id, webrtc_media_packet_source_buffer_state_e state, void *user_data) @@ -2644,7 +2695,7 @@ error: return NULL; } -#ifdef __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__ +#if defined(__DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__) || defined(__DEBUG_VALIDATE_ENCODED_FRAME_CB__) static int __fill_gst_buffer_mapped_data_from_packet(GstBuffer *buffer, media_packet_h packet) { bool has_tbm_surface = false; @@ -2681,7 +2732,7 @@ static int __fill_gst_buffer_mapped_data_from_packet(GstBuffer *buffer, media_pa if (gst_buffer_map(buffer, &buff_info, GST_MAP_READWRITE)) { if (has_tbm_surface) { - int i; + unsigned int i; guint8 *ptr = buff_info.data; for (i = 0; i < ts_info.num_planes; i++) { g_print("plane[%d][ptr:%p size:%u]\n", i, ts_info.planes[i].ptr, ts_info.planes[i].size); @@ -3164,7 +3215,7 @@ error: } #if defined(__DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__) || defined(__DEBUG_VALIDATE_ENCODED_FRAME_CB__) -static GstElement* __build_appsrc_render_pipeline(GstElement **appsrc) +static GstElement* __build_appsrc_h264_render_pipeline(GstElement **appsrc) { GstElement *pipeline; GstElement *src; @@ -3178,7 +3229,7 @@ static GstElement* __build_appsrc_render_pipeline(GstElement **appsrc) return NULL; } - pipeline = gst_pipeline_new("appsrc-render-pipeline"); + pipeline = gst_pipeline_new("appsrc-h264-render-pipeline"); src = gst_element_factory_make("appsrc", NULL); if (!src) { @@ -3191,7 +3242,11 @@ static GstElement* __build_appsrc_render_pipeline(GstElement **appsrc) "alignment", G_TYPE_STRING, "au", NULL); - g_object_set(G_OBJECT(src), "caps", caps, NULL); + g_object_set(G_OBJECT(src), + "caps", caps, + "format", GST_FORMAT_TIME, + NULL); + gst_caps_unref(caps); dec = gst_element_factory_make("avdec_h264", NULL); if (!dec) { @@ -3213,16 +3268,84 @@ static GstElement* __build_appsrc_render_pipeline(GstElement **appsrc) gst_bin_add_many(GST_BIN(pipeline), src, dec, convert, sink, NULL); if (!gst_element_link_many(src, dec, convert, sink, NULL)) { - g_printerr("failed to gst_element_link_many(), appsrc_render\n"); + g_printerr("failed to gst_element_link_many(), appsrc_h264_render\n"); + goto error; + } + + *appsrc = src; + + g_print("appsrc H264 render pipeline is created\n"); + + return pipeline; + +error: + gst_object_unref(pipeline); + return NULL; +} +#endif + +#ifdef __DEBUG_VALIDATE_ENCODED_FRAME_CB__ +static GstElement* __build_appsrc_opus_render_pipeline(GstElement **appsrc) +{ + GstElement *pipeline; + GstElement *src = NULL; + GstElement *dec = NULL; + GstElement *sink; + GstCaps *caps; + + if (!appsrc) { + g_printerr("appsrc is NULL\n"); + return NULL; + } + + pipeline = gst_pipeline_new("appsrc-opus-render-pipeline"); + + src = gst_element_factory_make("appsrc", NULL); + if (!src) { + g_printerr("failed to gst_element_factory_make(), appsrc\n"); + goto error; + } + + caps = gst_caps_new_simple("audio/x-opus", NULL, NULL); + + g_object_set(G_OBJECT(src), + "format", GST_FORMAT_TIME, + "caps", caps, + NULL); + gst_caps_unref(caps); + + dec = gst_element_factory_make("opusdec", NULL); + if (!dec) { + g_printerr("failed to gst_element_factory_make(), opusdec\n"); + goto error_with_unref_elem; + } + + sink = gst_element_factory_make("pulsesink", NULL); + if (!sink) { + g_printerr("failed to gst_element_factory_make(), pulsesink\n"); + goto error_with_unref_elem; + } + g_object_set(G_OBJECT(sink), + "sync", FALSE, + NULL); + + gst_bin_add_many(GST_BIN(pipeline), src, dec, sink, NULL); + if (!gst_element_link_many(src, dec, sink, NULL)) { + g_printerr("failed to gst_element_link_many(), appsrc_opus_render\n"); goto error; } *appsrc = src; - g_print("appsrc render pipeline is created\n"); + g_print("appsrc OPUS render pipeline is created\n"); return pipeline; +error_with_unref_elem: + if (src) + gst_object_unref(src); + if (dec) + gst_object_unref(dec); error: gst_object_unref(pipeline); return NULL; @@ -3261,7 +3384,7 @@ static GstElement* __make_src_pipeline(media_packet_source_s *packet_source) case MEDIA_FORMAT_H264_SP: packet_source->src_pipeline = __build_h264_format_pipeline(packet_source); #ifdef __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__ - packet_source->render_pipeline = __build_appsrc_render_pipeline(&packet_source->appsrc); + packet_source->render_pipeline = __build_appsrc_h264_render_pipeline(&packet_source->appsrc); #endif break; case MEDIA_FORMAT_I420: -- 2.34.1