From: Sangchul Lee Date: Mon, 27 Jun 2022 00:03:55 +0000 (+0900) Subject: webrtc_test: Move functions to webrtc_test_validate.c X-Git-Tag: submit/tizen/20220706.020113~6 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=9712d12a927b71bcb2129f8dcec42d06298db5d5;p=platform%2Fcore%2Fapi%2Fwebrtc.git webrtc_test: Move functions to webrtc_test_validate.c [Version] 0.3.139 [Issue Type] Refactoring Change-Id: I00b78ac410b3ae3c18567d5e9dd5c38bef62f1c8 Signed-off-by: Sangchul Lee --- diff --git a/packaging/capi-media-webrtc.spec b/packaging/capi-media-webrtc.spec index 82cb9ada..eb5e238c 100644 --- a/packaging/capi-media-webrtc.spec +++ b/packaging/capi-media-webrtc.spec @@ -1,6 +1,6 @@ Name: capi-media-webrtc Summary: A WebRTC library in Tizen Native API -Version: 0.3.138 +Version: 0.3.139 Release: 0 Group: Multimedia/API License: Apache-2.0 diff --git a/test/webrtc_test.c b/test/webrtc_test.c index 42248cb5..ee48ffb3 100644 --- a/test/webrtc_test.c +++ b/test/webrtc_test.c @@ -82,11 +82,6 @@ static const char *g_webrtc_stats_type_str[] = { static appdata_s g_ad; -GstBuffer *__alloc_buffer_from_packet(media_packet_h packet); -static GstElement* __build_appsrc_h264_render_pipeline(GstElement **appsrc); -static GstElement* __build_appsrc_vp8_render_pipeline(GstElement **appsrc); -static GstElement* __build_appsrc_opus_render_pipeline(GstElement **appsrc); - appdata_s *get_appdata(void) { return &g_ad; @@ -2309,7 +2304,7 @@ static void __track_added_cb(webrtc_h webrtc, webrtc_media_type_e type, unsigned } } else { g_print("Video track is added\n"); - if ((conn->video_render_pipeline = __build_appsrc_vp8_render_pipeline(&conn->appsrc_for_video))) { + if ((conn->video_render_pipeline = _build_appsrc_vp8_render_pipeline(&conn->appsrc_for_video))) { GstStateChangeReturn state_change_ret = gst_element_set_state(conn->video_render_pipeline, GST_STATE_PLAYING); if (state_change_ret == GST_STATE_CHANGE_FAILURE) g_printerr("failed to set state to PLAYING to video render pipeline\n"); @@ -2335,7 +2330,7 @@ static void __track_added_cb(webrtc_h webrtc, webrtc_media_type_e type, unsigned if (ret != WEBRTC_ERROR_NONE) g_printerr("failed to webrtc_set_sound_stream_info(), ret[0x%x]\n", ret); } else { - if ((conn->audio_render_pipeline = __build_appsrc_opus_render_pipeline(&conn->appsrc_for_audio))) { + if ((conn->audio_render_pipeline = _build_appsrc_opus_render_pipeline(&conn->appsrc_for_audio))) { GstStateChangeReturn state_change_ret = gst_element_set_state(conn->audio_render_pipeline, GST_STATE_PLAYING); if (state_change_ret == GST_STATE_CHANGE_FAILURE) g_printerr("failed to set state to PLAYING to audio render pipeline\n"); @@ -2550,67 +2545,8 @@ static void __encoded_frame_cb(webrtc_h webrtc, webrtc_media_type_e type, unsign __espp_submit_packet(conn, packet, type); } #endif - if (g_ad.validate_encoded_frame_cb) { - GstFlowReturn gst_ret = GST_FLOW_OK; - GstElement *appsrc = (type == WEBRTC_MEDIA_TYPE_AUDIO) ? conn->appsrc_for_audio : conn->appsrc_for_video; - static bool first_audio_packet = true; - static bool first_video_packet = true; - GstBuffer *buffer = __alloc_buffer_from_packet(packet); - if (!buffer) { - media_packet_destroy(packet); - return; - } - - if (type == WEBRTC_MEDIA_TYPE_AUDIO && first_audio_packet) { - media_format_h format; - int channels = 0; - int rate = 0; - - media_packet_get_format(packet, &format); - media_format_get_audio_info(format, NULL, &channels, &rate, NULL, NULL); - if (channels > 0 && rate > 0) { - GstCaps *caps = gst_caps_new_simple("audio/x-opus", - "channel-mapping-family", G_TYPE_INT, 0, /* FIXME: need to get this value from media packet */ - "channels", G_TYPE_INT, channels, - "rate", G_TYPE_INT, rate, - NULL); - - g_print("channels[%d] and rate[%d] are added to appsrc caps\n", channels, rate); - - g_object_set(G_OBJECT(appsrc), "caps", caps, NULL); - gst_caps_unref(caps); - } - media_format_unref(format); - first_audio_packet = false; - - } else if (type == WEBRTC_MEDIA_TYPE_VIDEO && first_video_packet) { - media_format_h format; - int width = 0; - int height = 0; - - media_packet_get_format(packet, &format); - media_format_get_video_info(format, NULL, &width, &height, NULL, NULL); - if (width > 0 && height > 0) { - GstCaps *caps = gst_caps_new_simple("video/x-vp8", - "width", G_TYPE_INT, width, - "height", G_TYPE_INT, height, - NULL); - - g_print("width[%d] height[%d]\n", width, height); - - g_object_set(G_OBJECT(appsrc), "caps", caps, NULL); - gst_caps_unref(caps); - } - media_format_unref(format); - first_video_packet = false; - } - - g_signal_emit_by_name(G_OBJECT(appsrc), "push-buffer", buffer, &gst_ret, NULL); - if (gst_ret != GST_FLOW_OK) - g_printerr("failed to 'push-buffer', gst_ret[0x%x]\n", gst_ret); - - gst_buffer_unref(buffer); - } + if (g_ad.validate_encoded_frame_cb) + _push_buffer_to_validate_encoded_frame_callback_data(conn, packet, type); #ifndef TIZEN_TV out: #endif @@ -3112,115 +3048,6 @@ error: return NULL; } -static int __fill_gst_buffer_mapped_data_from_packet(GstBuffer *buffer, media_packet_h packet) -{ - bool has_tbm_surface = false; - tbm_surface_info_s ts_info; - guint64 size = 0; - GstMapInfo buff_info = GST_MAP_INFO_INIT; - - if (!buffer) { - g_printerr("buffer is NULL\n"); - return -1; - } - if (!packet) { - g_printerr("packet is NULL\n"); - return -1; - } - - media_packet_get_buffer_size(packet, &size); - media_packet_has_tbm_surface_buffer(packet, &has_tbm_surface); - - if (has_tbm_surface) { - int ret = TBM_SURFACE_ERROR_NONE; - tbm_surface_h ts; - - media_packet_get_tbm_surface(packet, &ts); - ret = tbm_surface_get_info(ts, &ts_info); - if (ret != TBM_SURFACE_ERROR_NONE) { - g_printerr("failed to tbm_surface_get_info()\n"); - return -1; - } - - g_printerr("tbm surface[%p, %ux%u, size:%u, format:%u, num_planes:%u] found\n", - ts, ts_info.width, ts_info.height, ts_info.size, ts_info.format, ts_info.num_planes); - } - - if (gst_buffer_map(buffer, &buff_info, GST_MAP_READWRITE)) { - if (has_tbm_surface) { - unsigned int i; - guint8 *ptr = buff_info.data; - for (i = 0; i < ts_info.num_planes; i++) { - g_print("plane[%d][ptr:%p size:%u]\n", i, ts_info.planes[i].ptr, ts_info.planes[i].size); - memcpy(ptr, ts_info.planes[i].ptr, ts_info.planes[i].size); - ptr += ts_info.planes[i].size; - } - - } else { - guchar *data_ptr; - media_packet_get_buffer_data_ptr(packet, (void **)&data_ptr); - if (data_ptr == NULL) { - g_printerr("invalid packet, data_ptr is NULL\n"); - gst_buffer_unmap(buffer, &buff_info); - return -1; - } - memcpy(buff_info.data, data_ptr, size); - } - - buff_info.size = size; - - g_print("buffer[%p], buff_info[data:%p, size:%u]\n", buffer, buff_info.data, buff_info.size); - - gst_buffer_unmap(buffer, &buff_info); - } - - return 0; -} - -GstBuffer *__alloc_buffer_from_packet(media_packet_h packet) -{ - int ret; - guchar *data_ptr; - GstBuffer *new_buffer; - guint64 pts = 0; - guint64 dts = 0; - guint64 duration = 0; - guint64 size = 0; - - media_packet_get_buffer_data_ptr(packet, (void **)&data_ptr); - if (data_ptr == NULL) { - g_printerr("invalid packet, data_ptr is NULL\n"); - return NULL; - } - - media_packet_get_buffer_size(packet, &size); - - new_buffer = gst_buffer_new_and_alloc(size); - if (!new_buffer) { - g_printerr("failed to gst_buffer_new_and_alloc()\n"); - return NULL; - } - - ret = __fill_gst_buffer_mapped_data_from_packet(new_buffer, packet); - if (ret == -1) { - g_printerr("failed to __fill_gst_buffer_mapped_data_from_packet()\n"); - gst_buffer_unref(new_buffer); - return NULL; - } - - media_packet_get_pts(packet, &pts); - GST_BUFFER_PTS(new_buffer) = pts; - media_packet_get_dts(packet, &dts); - GST_BUFFER_DTS(new_buffer) = dts; - - media_packet_get_duration(packet, &duration); - GST_BUFFER_DURATION(new_buffer) = duration; - - g_print("new gst buffer[%p, pts:%llu, dts:%llu, duration:%llu]\n", new_buffer, pts, dts, duration); - - return new_buffer; -} - static void __stream_handoff_cb(GstElement *object, GstBuffer *buffer, GstPad *pad, gpointer data) { media_packet_source_s *packet_source = (media_packet_source_s *)data; @@ -3254,26 +3081,7 @@ static void __stream_handoff_cb(GstElement *object, GstBuffer *buffer, GstPad *p } if (g_ad.validate_feeding_data) { - GstBuffer *buffer_from_packet = NULL; - bool is_new_buffer = false; - media_packet_get_extra(packet, (void **)&buffer_from_packet); - if (buffer_from_packet) { - g_print("external gstbuffer[%p]\n", buffer_from_packet); - } else { - buffer_from_packet = __alloc_buffer_from_packet(packet); - if (!buffer_from_packet) { - media_packet_destroy(packet); - return; - } - is_new_buffer = true; - } - GstFlowReturn gst_ret = GST_FLOW_OK; - g_signal_emit_by_name(G_OBJECT(packet_source->appsrc), "push-buffer", buffer_from_packet, &gst_ret, NULL); - if (gst_ret != GST_FLOW_OK) - g_printerr("failed to 'push-buffer', gst_ret[0x%x]\n", gst_ret); - if (is_new_buffer) - gst_buffer_unref(buffer_from_packet); - media_packet_destroy(packet); + _push_buffer_to_validate_media_packet_source(packet_source, packet); } else { if (webrtc_media_packet_source_push_packet(packet_source->webrtc, packet_source->source_id, packet) != WEBRTC_ERROR_NONE) { @@ -3630,222 +3438,6 @@ error: return NULL; } -static GstElement* __build_appsrc_h264_render_pipeline(GstElement **appsrc) -{ - GstElement *pipeline; - GstElement *src = NULL; - GstElement *dec = NULL; - GstElement *convert = NULL; - GstElement *sink; - GstCaps *caps; - - if (!appsrc) { - g_printerr("appsrc is NULL\n"); - return NULL; - } - - pipeline = gst_pipeline_new("appsrc-h264-render-pipeline"); - - src = gst_element_factory_make("appsrc", NULL); - if (!src) { - g_printerr("failed to gst_element_factory_make(), appsrc\n"); - goto error; - } - - caps = gst_caps_new_simple("video/x-h264", - "stream-format", G_TYPE_STRING, "byte-stream", - "alignment", G_TYPE_STRING, "au", - NULL); - - g_object_set(G_OBJECT(src), - "caps", caps, - "format", GST_FORMAT_TIME, - NULL); - gst_caps_unref(caps); - - dec = gst_element_factory_make("avdec_h264", NULL); - if (!dec) { - g_printerr("failed to gst_element_factory_make(), avdec_h264\n"); - goto error_with_unref_elem; - } - - convert = gst_element_factory_make("videoconvert", NULL); - if (!convert) { - g_printerr("failed to gst_element_factory_make(), videoconvert\n"); - goto error_with_unref_elem; - } - - sink = gst_element_factory_make("tizenwlsink", NULL); - if (!sink) { - g_printerr("failed to gst_element_factory_make(), tizenwlsink\n"); - goto error_with_unref_elem; - } - - gst_bin_add_many(GST_BIN(pipeline), src, dec, convert, sink, NULL); - if (!gst_element_link_many(src, dec, convert, sink, NULL)) { - g_printerr("failed to gst_element_link_many(), appsrc_h264_render\n"); - goto error; - } - - *appsrc = src; - - g_print("appsrc H264 render pipeline is created\n"); - - return pipeline; - -error_with_unref_elem: - if (src) - gst_object_unref(src); - if (dec) - gst_object_unref(dec); - if (convert) - gst_object_unref(convert); -error: - gst_object_unref(pipeline); - return NULL; -} - -static GstElement* __build_appsrc_vp8_render_pipeline(GstElement **appsrc) -{ - GstElement *pipeline; - GstElement *src = NULL; - GstElement *dec = NULL; - GstElement *convert = NULL; - GstElement *sink; - GstCaps *caps; - - if (!appsrc) { - g_printerr("appsrc is NULL\n"); - return NULL; - } - - pipeline = gst_pipeline_new("appsrc-vp8-render-pipeline"); - - src = gst_element_factory_make("appsrc", NULL); - if (!src) { - g_printerr("failed to gst_element_factory_make(), appsrc\n"); - goto error; - } - - caps = gst_caps_new_simple("video/x-vp8", NULL, NULL); - - g_object_set(G_OBJECT(src), - "format", GST_FORMAT_TIME, - "caps", caps, - NULL); - gst_caps_unref(caps); - - dec = gst_element_factory_make("vp8dec", NULL); - if (!dec) { - g_printerr("failed to gst_element_factory_make(), vp8dec\n"); - goto error_with_unref_elem; - } - - convert = gst_element_factory_make("videoconvert", NULL); - if (!convert) { - g_printerr("failed to gst_element_factory_make(), videoconvert\n"); - goto error_with_unref_elem; - } - - sink = gst_element_factory_make("tizenwlsink", NULL); - if (!sink) { - g_printerr("failed to gst_element_factory_make(), tizenwlsink\n"); - goto error_with_unref_elem; - } - g_object_set(G_OBJECT(sink), - "sync", FALSE, - NULL); - - gst_bin_add_many(GST_BIN(pipeline), src, dec, convert, sink, NULL); - if (!gst_element_link_many(src, dec, convert, sink, NULL)) { - g_printerr("failed to gst_element_link_many(), appsrc_vp8_render\n"); - goto error; - } - - *appsrc = src; - - g_print("appsrc VP8 render pipeline is created\n"); - - return pipeline; - -error_with_unref_elem: - if (src) - gst_object_unref(src); - if (dec) - gst_object_unref(dec); - if (convert) - gst_object_unref(convert); -error: - gst_object_unref(pipeline); - return NULL; -} - -static GstElement* __build_appsrc_opus_render_pipeline(GstElement **appsrc) -{ - GstElement *pipeline; - GstElement *src = NULL; - GstElement *dec = NULL; - GstElement *sink; - GstCaps *caps; - - if (!appsrc) { - g_printerr("appsrc is NULL\n"); - return NULL; - } - - pipeline = gst_pipeline_new("appsrc-opus-render-pipeline"); - - src = gst_element_factory_make("appsrc", NULL); - if (!src) { - g_printerr("failed to gst_element_factory_make(), appsrc\n"); - goto error; - } - - caps = gst_caps_new_simple("audio/x-opus", NULL, NULL); - - g_object_set(G_OBJECT(src), - "format", GST_FORMAT_TIME, - "caps", caps, - NULL); - gst_caps_unref(caps); - - dec = gst_element_factory_make("opusdec", NULL); - if (!dec) { - g_printerr("failed to gst_element_factory_make(), opusdec\n"); - goto error_with_unref_elem; - } - - sink = gst_element_factory_make("pulsesink", NULL); - if (!sink) { - g_printerr("failed to gst_element_factory_make(), pulsesink\n"); - goto error_with_unref_elem; - } - g_object_set(G_OBJECT(sink), - "sync", FALSE, - NULL); - - gst_bin_add_many(GST_BIN(pipeline), src, dec, sink, NULL); - if (!gst_element_link_many(src, dec, sink, NULL)) { - g_printerr("failed to gst_element_link_many(), appsrc_opus_render\n"); - goto error; - } - - *appsrc = src; - - g_print("appsrc OPUS render pipeline is created\n"); - - return pipeline; - -error_with_unref_elem: - if (src) - gst_object_unref(src); - if (dec) - gst_object_unref(dec); -error: - gst_object_unref(pipeline); - return NULL; -} - static GstElement* __make_src_pipeline(media_packet_source_s *packet_source) { media_format_type_e format_type; @@ -3878,7 +3470,7 @@ static GstElement* __make_src_pipeline(media_packet_source_s *packet_source) case MEDIA_FORMAT_H264_SP: packet_source->src_pipeline = __build_h264_format_pipeline(packet_source); if (g_ad.validate_feeding_data) - packet_source->render_pipeline = __build_appsrc_h264_render_pipeline(&packet_source->appsrc); + packet_source->render_pipeline = _build_appsrc_h264_render_pipeline(&packet_source->appsrc); break; case MEDIA_FORMAT_I420: case MEDIA_FORMAT_NV12: diff --git a/test/webrtc_test_priv.h b/test/webrtc_test_priv.h index d60982d1..cc987f19 100644 --- a/test/webrtc_test_priv.h +++ b/test/webrtc_test_priv.h @@ -305,6 +305,12 @@ void reset_menu_state(void); void quit_program(void); bool foreach_turn_server(const char *turn_server, gpointer user_data); +GstElement *_build_appsrc_h264_render_pipeline(GstElement **appsrc); +GstElement *_build_appsrc_vp8_render_pipeline(GstElement **appsrc); +GstElement *_build_appsrc_opus_render_pipeline(GstElement **appsrc); +void _push_buffer_to_validate_media_packet_source(media_packet_source_s *packet_source, media_packet_h packet); +void _push_buffer_to_validate_encoded_frame_callback_data(connection_s *conn, media_packet_h packet, webrtc_media_type_e type); + #ifdef __cplusplus } #endif /* __cplusplus */ diff --git a/test/webrtc_test_validate.c b/test/webrtc_test_validate.c new file mode 100644 index 00000000..06c4fb51 --- /dev/null +++ b/test/webrtc_test_validate.c @@ -0,0 +1,443 @@ +/* + * Copyright (c) 2022 Samsung Electronics Co., Ltd All Rights Reserved + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "webrtc_test_priv.h" + +static int __fill_gst_buffer_mapped_data_from_packet(GstBuffer *buffer, media_packet_h packet) +{ + bool has_tbm_surface = false; + tbm_surface_info_s ts_info; + guint64 size = 0; + GstMapInfo buff_info = GST_MAP_INFO_INIT; + + if (!buffer) { + g_printerr("buffer is NULL\n"); + return -1; + } + if (!packet) { + g_printerr("packet is NULL\n"); + return -1; + } + + media_packet_get_buffer_size(packet, &size); + media_packet_has_tbm_surface_buffer(packet, &has_tbm_surface); + + if (has_tbm_surface) { + int ret = TBM_SURFACE_ERROR_NONE; + tbm_surface_h ts; + + media_packet_get_tbm_surface(packet, &ts); + ret = tbm_surface_get_info(ts, &ts_info); + if (ret != TBM_SURFACE_ERROR_NONE) { + g_printerr("failed to tbm_surface_get_info()\n"); + return -1; + } + + g_printerr("tbm surface[%p, %ux%u, size:%u, format:%u, num_planes:%u] found\n", + ts, ts_info.width, ts_info.height, ts_info.size, ts_info.format, ts_info.num_planes); + } + + if (gst_buffer_map(buffer, &buff_info, GST_MAP_READWRITE)) { + if (has_tbm_surface) { + unsigned int i; + guint8 *ptr = buff_info.data; + for (i = 0; i < ts_info.num_planes; i++) { + g_print("plane[%d][ptr:%p size:%u]\n", i, ts_info.planes[i].ptr, ts_info.planes[i].size); + memcpy(ptr, ts_info.planes[i].ptr, ts_info.planes[i].size); + ptr += ts_info.planes[i].size; + } + + } else { + guchar *data_ptr; + media_packet_get_buffer_data_ptr(packet, (void **)&data_ptr); + if (data_ptr == NULL) { + g_printerr("invalid packet, data_ptr is NULL\n"); + gst_buffer_unmap(buffer, &buff_info); + return -1; + } + memcpy(buff_info.data, data_ptr, size); + } + + buff_info.size = size; + + g_print("buffer[%p], buff_info[data:%p, size:%u]\n", buffer, buff_info.data, buff_info.size); + + gst_buffer_unmap(buffer, &buff_info); + } + + return 0; +} + +GstBuffer *__alloc_buffer_from_packet(media_packet_h packet) +{ + int ret; + guchar *data_ptr; + GstBuffer *new_buffer; + guint64 pts = 0; + guint64 dts = 0; + guint64 duration = 0; + guint64 size = 0; + + media_packet_get_buffer_data_ptr(packet, (void **)&data_ptr); + if (data_ptr == NULL) { + g_printerr("invalid packet, data_ptr is NULL\n"); + return NULL; + } + + media_packet_get_buffer_size(packet, &size); + + new_buffer = gst_buffer_new_and_alloc(size); + if (!new_buffer) { + g_printerr("failed to gst_buffer_new_and_alloc()\n"); + return NULL; + } + + ret = __fill_gst_buffer_mapped_data_from_packet(new_buffer, packet); + if (ret == -1) { + g_printerr("failed to __fill_gst_buffer_mapped_data_from_packet()\n"); + gst_buffer_unref(new_buffer); + return NULL; + } + + media_packet_get_pts(packet, &pts); + GST_BUFFER_PTS(new_buffer) = pts; + media_packet_get_dts(packet, &dts); + GST_BUFFER_DTS(new_buffer) = dts; + + media_packet_get_duration(packet, &duration); + GST_BUFFER_DURATION(new_buffer) = duration; + + g_print("new gst buffer[%p, pts:%llu, dts:%llu, duration:%llu]\n", new_buffer, pts, dts, duration); + + return new_buffer; +} + +GstElement *_build_appsrc_h264_render_pipeline(GstElement **appsrc) +{ + GstElement *pipeline; + GstElement *src = NULL; + GstElement *dec = NULL; + GstElement *convert = NULL; + GstElement *sink; + GstCaps *caps; + + if (!appsrc) { + g_printerr("appsrc is NULL\n"); + return NULL; + } + + pipeline = gst_pipeline_new("appsrc-h264-render-pipeline"); + + src = gst_element_factory_make("appsrc", NULL); + if (!src) { + g_printerr("failed to gst_element_factory_make(), appsrc\n"); + goto error; + } + + caps = gst_caps_new_simple("video/x-h264", + "stream-format", G_TYPE_STRING, "byte-stream", + "alignment", G_TYPE_STRING, "au", + NULL); + + g_object_set(G_OBJECT(src), + "caps", caps, + "format", GST_FORMAT_TIME, + NULL); + gst_caps_unref(caps); + + dec = gst_element_factory_make("avdec_h264", NULL); + if (!dec) { + g_printerr("failed to gst_element_factory_make(), avdec_h264\n"); + goto error_with_unref_elem; + } + + convert = gst_element_factory_make("videoconvert", NULL); + if (!convert) { + g_printerr("failed to gst_element_factory_make(), videoconvert\n"); + goto error_with_unref_elem; + } + + sink = gst_element_factory_make("tizenwlsink", NULL); + if (!sink) { + g_printerr("failed to gst_element_factory_make(), tizenwlsink\n"); + goto error_with_unref_elem; + } + + gst_bin_add_many(GST_BIN(pipeline), src, dec, convert, sink, NULL); + if (!gst_element_link_many(src, dec, convert, sink, NULL)) { + g_printerr("failed to gst_element_link_many(), appsrc_h264_render\n"); + goto error; + } + + *appsrc = src; + + g_print("appsrc H264 render pipeline is created\n"); + + return pipeline; + +error_with_unref_elem: + if (src) + gst_object_unref(src); + if (dec) + gst_object_unref(dec); + if (convert) + gst_object_unref(convert); +error: + gst_object_unref(pipeline); + return NULL; +} + +GstElement *_build_appsrc_vp8_render_pipeline(GstElement **appsrc) +{ + GstElement *pipeline; + GstElement *src = NULL; + GstElement *dec = NULL; + GstElement *convert = NULL; + GstElement *sink; + GstCaps *caps; + + if (!appsrc) { + g_printerr("appsrc is NULL\n"); + return NULL; + } + + pipeline = gst_pipeline_new("appsrc-vp8-render-pipeline"); + + src = gst_element_factory_make("appsrc", NULL); + if (!src) { + g_printerr("failed to gst_element_factory_make(), appsrc\n"); + goto error; + } + + caps = gst_caps_new_simple("video/x-vp8", NULL, NULL); + + g_object_set(G_OBJECT(src), + "format", GST_FORMAT_TIME, + "caps", caps, + NULL); + gst_caps_unref(caps); + + dec = gst_element_factory_make("vp8dec", NULL); + if (!dec) { + g_printerr("failed to gst_element_factory_make(), vp8dec\n"); + goto error_with_unref_elem; + } + + convert = gst_element_factory_make("videoconvert", NULL); + if (!convert) { + g_printerr("failed to gst_element_factory_make(), videoconvert\n"); + goto error_with_unref_elem; + } + + sink = gst_element_factory_make("tizenwlsink", NULL); + if (!sink) { + g_printerr("failed to gst_element_factory_make(), tizenwlsink\n"); + goto error_with_unref_elem; + } + g_object_set(G_OBJECT(sink), + "sync", FALSE, + NULL); + + gst_bin_add_many(GST_BIN(pipeline), src, dec, convert, sink, NULL); + if (!gst_element_link_many(src, dec, convert, sink, NULL)) { + g_printerr("failed to gst_element_link_many(), appsrc_vp8_render\n"); + goto error; + } + + *appsrc = src; + + g_print("appsrc VP8 render pipeline is created\n"); + + return pipeline; + +error_with_unref_elem: + if (src) + gst_object_unref(src); + if (dec) + gst_object_unref(dec); + if (convert) + gst_object_unref(convert); +error: + gst_object_unref(pipeline); + return NULL; +} + +GstElement *_build_appsrc_opus_render_pipeline(GstElement **appsrc) +{ + GstElement *pipeline; + GstElement *src = NULL; + GstElement *dec = NULL; + GstElement *sink; + GstCaps *caps; + + if (!appsrc) { + g_printerr("appsrc is NULL\n"); + return NULL; + } + + pipeline = gst_pipeline_new("appsrc-opus-render-pipeline"); + + src = gst_element_factory_make("appsrc", NULL); + if (!src) { + g_printerr("failed to gst_element_factory_make(), appsrc\n"); + goto error; + } + + caps = gst_caps_new_simple("audio/x-opus", NULL, NULL); + + g_object_set(G_OBJECT(src), + "format", GST_FORMAT_TIME, + "caps", caps, + NULL); + gst_caps_unref(caps); + + dec = gst_element_factory_make("opusdec", NULL); + if (!dec) { + g_printerr("failed to gst_element_factory_make(), opusdec\n"); + goto error_with_unref_elem; + } + + sink = gst_element_factory_make("pulsesink", NULL); + if (!sink) { + g_printerr("failed to gst_element_factory_make(), pulsesink\n"); + goto error_with_unref_elem; + } + g_object_set(G_OBJECT(sink), + "sync", FALSE, + NULL); + + gst_bin_add_many(GST_BIN(pipeline), src, dec, sink, NULL); + if (!gst_element_link_many(src, dec, sink, NULL)) { + g_printerr("failed to gst_element_link_many(), appsrc_opus_render\n"); + goto error; + } + + *appsrc = src; + + g_print("appsrc OPUS render pipeline is created\n"); + + return pipeline; + +error_with_unref_elem: + if (src) + gst_object_unref(src); + if (dec) + gst_object_unref(dec); +error: + gst_object_unref(pipeline); + return NULL; +} + +void _push_buffer_to_validate_media_packet_source(media_packet_source_s *packet_source, media_packet_h packet) +{ + GstBuffer *buffer = NULL; + bool is_new_buffer = false; + GstFlowReturn gst_ret = GST_FLOW_OK; + + RET_IF(!packet_source, "packet_source is NULL"); + RET_IF(!packet, "packet is NULL"); + + media_packet_get_extra(packet, (void **)&buffer); + if (buffer) { + g_print("external gstbuffer[%p]\n", buffer); + } else { + buffer = __alloc_buffer_from_packet(packet); + if (!buffer) { + media_packet_destroy(packet); + return; + } + is_new_buffer = true; + } + + g_signal_emit_by_name(G_OBJECT(packet_source->appsrc), "push-buffer", buffer, &gst_ret, NULL); + if (gst_ret != GST_FLOW_OK) + g_printerr("failed to 'push-buffer', gst_ret[0x%x]\n", gst_ret); + if (is_new_buffer) + gst_buffer_unref(buffer); + + media_packet_destroy(packet); +} + +void _push_buffer_to_validate_encoded_frame_callback_data(connection_s *conn, media_packet_h packet, webrtc_media_type_e type) +{ + GstBuffer *buffer = NULL; + GstFlowReturn gst_ret = GST_FLOW_OK; + GstElement *appsrc = NULL; + static bool first_audio_packet = true; + static bool first_video_packet = true; + + RET_IF(!conn, "conn is NULL"); + RET_IF(!packet, "packet is NULL"); + + appsrc = (type == WEBRTC_MEDIA_TYPE_AUDIO) ? conn->appsrc_for_audio : conn->appsrc_for_video; + RET_IF(!appsrc, "appsrc is NULL"); + + buffer = __alloc_buffer_from_packet(packet); + if (!buffer) { + media_packet_destroy(packet); + return; + } + + if (type == WEBRTC_MEDIA_TYPE_AUDIO && first_audio_packet) { + media_format_h format; + int channels = 0; + int rate = 0; + + media_packet_get_format(packet, &format); + media_format_get_audio_info(format, NULL, &channels, &rate, NULL, NULL); + if (channels > 0 && rate > 0) { + GstCaps *caps = gst_caps_new_simple("audio/x-opus", + "channel-mapping-family", G_TYPE_INT, 0, /* FIXME: need to get this value from media packet */ + "channels", G_TYPE_INT, channels, + "rate", G_TYPE_INT, rate, + NULL); + + g_print("channels[%d] and rate[%d] are added to appsrc caps\n", channels, rate); + + g_object_set(G_OBJECT(appsrc), "caps", caps, NULL); + gst_caps_unref(caps); + } + media_format_unref(format); + first_audio_packet = false; + + } else if (type == WEBRTC_MEDIA_TYPE_VIDEO && first_video_packet) { + media_format_h format; + int width = 0; + int height = 0; + + media_packet_get_format(packet, &format); + media_format_get_video_info(format, NULL, &width, &height, NULL, NULL); + if (width > 0 && height > 0) { + GstCaps *caps = gst_caps_new_simple("video/x-vp8", + "width", G_TYPE_INT, width, + "height", G_TYPE_INT, height, + NULL); + + g_print("width[%d] height[%d]\n", width, height); + + g_object_set(G_OBJECT(appsrc), "caps", caps, NULL); + gst_caps_unref(caps); + } + media_format_unref(format); + first_video_packet = false; + } + + g_signal_emit_by_name(G_OBJECT(appsrc), "push-buffer", buffer, &gst_ret, NULL); + if (gst_ret != GST_FLOW_OK) + g_printerr("failed to 'push-buffer', gst_ret[0x%x]\n", gst_ret); + + gst_buffer_unref(buffer); +}