webrtc_test: Add execution option to replace build definition 02/276802/3
authorSangchul Lee <sc11.lee@samsung.com>
Sun, 26 Jun 2022 23:31:08 +0000 (08:31 +0900)
committerSangchul Lee <sc11.lee@samsung.com>
Tue, 28 Jun 2022 23:55:30 +0000 (08:55 +0900)
-f, --validate-feeding-data
: validate media packet source feeding data by rendering these on gst pipeline

-e, --validate-encoded-frame-cb
: validate media packets from encoded frame callback by rendering these on gst pipeline

This patch increases PredefinedPreprocessor(PP) score of SAM metrics.

[Version] 0.3.138
[Issue Type] Refactoring

Change-Id: I0811831c533d604827363dd16c522ee528d6a9aa
Signed-off-by: Sangchul Lee <sc11.lee@samsung.com>
packaging/capi-media-webrtc.spec
test/webrtc_test.c
test/webrtc_test_menu.c
test/webrtc_test_priv.h

index 2fe08f27ab4c5938b3a7af1e1770a225036e8bdd..82cb9ada6063d62099a87d185a845a7b80fefc92 100644 (file)
@@ -1,6 +1,6 @@
 Name:       capi-media-webrtc
 Summary:    A WebRTC library in Tizen Native API
-Version:    0.3.137
+Version:    0.3.138
 Release:    0
 Group:      Multimedia/API
 License:    Apache-2.0
index 739f63f3756c57be50152a6862077a8b41d64f97..42248cb51f35328241573ac5768280911a22e810 100644 (file)
@@ -82,16 +82,10 @@ static const char *g_webrtc_stats_type_str[] = {
 
 static appdata_s g_ad;
 
-#if defined(__DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__) || defined(__DEBUG_VALIDATE_ENCODED_FRAME_CB__)
 GstBuffer *__alloc_buffer_from_packet(media_packet_h packet);
-#endif
-#ifdef __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__
 static GstElement* __build_appsrc_h264_render_pipeline(GstElement **appsrc);
-#endif
-#ifdef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
 static GstElement* __build_appsrc_vp8_render_pipeline(GstElement **appsrc);
 static GstElement* __build_appsrc_opus_render_pipeline(GstElement **appsrc);
-#endif
 
 appdata_s *get_appdata(void)
 {
@@ -614,24 +608,24 @@ static void _webrtc_stop(int index)
                        g_ad.conns[index].recv_channels[i] = NULL;
        }
 
-#ifdef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
-       if (g_ad.conns[index].video_render_pipeline) {
-               if (gst_element_set_state(g_ad.conns[index].video_render_pipeline, GST_STATE_NULL) == GST_STATE_CHANGE_FAILURE)
-                       g_printerr("failed to set state to NULL\n");
-               gst_object_unref(g_ad.conns[index].video_render_pipeline);
-               g_ad.conns[index].video_render_pipeline = NULL;
-               g_ad.conns[index].appsrc_for_video = NULL;
-               g_print("video render pipeline is released\n");
-       }
-       if (g_ad.conns[index].audio_render_pipeline) {
-               if (gst_element_set_state(g_ad.conns[index].audio_render_pipeline, GST_STATE_NULL) == GST_STATE_CHANGE_FAILURE)
-                       g_printerr("failed to set state to NULL\n");
-               gst_object_unref(g_ad.conns[index].audio_render_pipeline);
-               g_ad.conns[index].audio_render_pipeline = NULL;
-               g_ad.conns[index].appsrc_for_audio = NULL;
-               g_print("audio render pipeline is released\n");
+       if (g_ad.validate_encoded_frame_cb) {
+               if (g_ad.conns[index].video_render_pipeline) {
+                       if (gst_element_set_state(g_ad.conns[index].video_render_pipeline, GST_STATE_NULL) == GST_STATE_CHANGE_FAILURE)
+                               g_printerr("failed to set state to NULL\n");
+                       gst_object_unref(g_ad.conns[index].video_render_pipeline);
+                       g_ad.conns[index].video_render_pipeline = NULL;
+                       g_ad.conns[index].appsrc_for_video = NULL;
+                       g_print("video render pipeline is released\n");
+               }
+               if (g_ad.conns[index].audio_render_pipeline) {
+                       if (gst_element_set_state(g_ad.conns[index].audio_render_pipeline, GST_STATE_NULL) == GST_STATE_CHANGE_FAILURE)
+                               g_printerr("failed to set state to NULL\n");
+                       gst_object_unref(g_ad.conns[index].audio_render_pipeline);
+                       g_ad.conns[index].audio_render_pipeline = NULL;
+                       g_ad.conns[index].appsrc_for_audio = NULL;
+                       g_print("audio render pipeline is released\n");
+               }
        }
-#endif
 #ifndef TIZEN_TV
        if (g_ad.conns[index].encoded_audio_frame_cb_is_set ||
                g_ad.conns[index].encoded_video_frame_cb_is_set)
@@ -2286,66 +2280,67 @@ static void __track_added_cb(webrtc_h webrtc, webrtc_media_type_e type, unsigned
        g_print("__track_added_cb() is invoked, webrtc[%p], type[%d], id[%u], conn[%p]\n", webrtc, type, id, conn);
 
        if (type == WEBRTC_MEDIA_TYPE_VIDEO) {
-#ifndef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
+               if (!g_ad.validate_encoded_frame_cb) {
 #ifndef TIZEN_TV
-               conn->render.espp.video_track_preparing = true;
+                       conn->render.espp.video_track_preparing = true;
 #endif
-               if (conn->render.display_type == WEBRTC_DISPLAY_TYPE_OVERLAY) {
-                       g_print("Video track is added, set display - overlay, object[%p]\n", g_ad.win_id);
+                       if (conn->render.display_type == WEBRTC_DISPLAY_TYPE_OVERLAY) {
+                               g_print("Video track is added, set display - overlay, object[%p]\n", g_ad.win_id);
 #ifndef TIZEN_TV
-                       if (!conn->render.espp.handle)
-                               webrtc_set_display(webrtc, id, WEBRTC_DISPLAY_TYPE_OVERLAY, g_ad.win_id);
-                       else
-                               esplusplayer_set_display(conn->render.espp.handle, ESPLUSPLAYER_DISPLAY_TYPE_OVERLAY, g_ad.win_id);
+                               if (!conn->render.espp.handle)
+                                       webrtc_set_display(webrtc, id, WEBRTC_DISPLAY_TYPE_OVERLAY, g_ad.win_id);
+                               else
+                                       esplusplayer_set_display(conn->render.espp.handle, ESPLUSPLAYER_DISPLAY_TYPE_OVERLAY, g_ad.win_id);
 #else
-                       webrtc_set_display(webrtc, id, WEBRTC_DISPLAY_TYPE_OVERLAY, g_ad.win_id);
+                               webrtc_set_display(webrtc, id, WEBRTC_DISPLAY_TYPE_OVERLAY, g_ad.win_id);
 #endif
-               } else if (conn->render.display_type == WEBRTC_DISPLAY_TYPE_EVAS) {
-                       g_print("Video track is added, set display - evas object[%p]\n", conn->render.eo);
+                       } else if (conn->render.display_type == WEBRTC_DISPLAY_TYPE_EVAS) {
+                               g_print("Video track is added, set display - evas object[%p]\n", conn->render.eo);
 #ifndef TIZEN_TV
-                       if (!conn->render.espp.handle)
-                               webrtc_set_display(webrtc, id, WEBRTC_DISPLAY_TYPE_EVAS, conn->render.eo);
-                       else
-                               esplusplayer_set_display(conn->render.espp.handle, ESPLUSPLAYER_DISPLAY_TYPE_EVAS, conn->render.eo);
+                               if (!conn->render.espp.handle)
+                                       webrtc_set_display(webrtc, id, WEBRTC_DISPLAY_TYPE_EVAS, conn->render.eo);
+                               else
+                                       esplusplayer_set_display(conn->render.espp.handle, ESPLUSPLAYER_DISPLAY_TYPE_EVAS, conn->render.eo);
 #else
-                       webrtc_set_display(webrtc, id, WEBRTC_DISPLAY_TYPE_EVAS, conn->render.eo);
+                               webrtc_set_display(webrtc, id, WEBRTC_DISPLAY_TYPE_EVAS, conn->render.eo);
 #endif
+                       } else {
+                               g_print("Video track is added, invalid display type[%d]\n", conn->render.display_type);
+                       }
                } else {
-                       g_print("Video track is added, invalid display type[%d]\n", conn->render.display_type);
-               }
-#else
-               g_print("Video track is added\n");
-               if ((conn->video_render_pipeline = __build_appsrc_vp8_render_pipeline(&conn->appsrc_for_video))) {
-                       GstStateChangeReturn state_change_ret = gst_element_set_state(conn->video_render_pipeline, GST_STATE_PLAYING);
-                       if (state_change_ret == GST_STATE_CHANGE_FAILURE)
-                               g_printerr("failed to set state to PLAYING to video render pipeline\n");
+                       g_print("Video track is added\n");
+                       if ((conn->video_render_pipeline = __build_appsrc_vp8_render_pipeline(&conn->appsrc_for_video))) {
+                               GstStateChangeReturn state_change_ret = gst_element_set_state(conn->video_render_pipeline, GST_STATE_PLAYING);
+                               if (state_change_ret == GST_STATE_CHANGE_FAILURE)
+                                       g_printerr("failed to set state to PLAYING to video render pipeline\n");
+                       }
                }
-#endif
+
        } else if (type == WEBRTC_MEDIA_TYPE_AUDIO) {
                g_print("Audio track is added\n");
-#ifndef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
-               int ret;
+               if (!g_ad.validate_encoded_frame_cb) {
+                       int ret;
 #ifndef TIZEN_TV
-               conn->render.espp.audio_track_preparing = true;
+                       conn->render.espp.audio_track_preparing = true;
 
-               if (conn->render.espp.handle) /* TODO: apply stream info if the function is provided in espp */
-                       return;
+                       if (conn->render.espp.handle) /* TODO: apply stream info if the function is provided in espp */
+                               return;
 #endif
-               if (!conn->render.stream_info) {
-                       ret = sound_manager_create_stream_information(SOUND_STREAM_TYPE_MEDIA, NULL, NULL, &conn->render.stream_info);
-                       RET_IF(ret != SOUND_MANAGER_ERROR_NONE, "failed to sound_manager_create_stream_information(), ret[0x%x]", ret);
-               }
+                       if (!conn->render.stream_info) {
+                               ret = sound_manager_create_stream_information(SOUND_STREAM_TYPE_MEDIA, NULL, NULL, &conn->render.stream_info);
+                               RET_IF(ret != SOUND_MANAGER_ERROR_NONE, "failed to sound_manager_create_stream_information(), ret[0x%x]", ret);
+                       }
 
-               ret = webrtc_set_sound_stream_info(webrtc, id, conn->render.stream_info);
-               if (ret != WEBRTC_ERROR_NONE)
-                       g_printerr("failed to webrtc_set_sound_stream_info(), ret[0x%x]\n", ret);
-#else
-               if ((conn->audio_render_pipeline = __build_appsrc_opus_render_pipeline(&conn->appsrc_for_audio))) {
-                       GstStateChangeReturn state_change_ret = gst_element_set_state(conn->audio_render_pipeline, GST_STATE_PLAYING);
-                       if (state_change_ret == GST_STATE_CHANGE_FAILURE)
-                               g_printerr("failed to set state to PLAYING to audio render pipeline\n");
+                       ret = webrtc_set_sound_stream_info(webrtc, id, conn->render.stream_info);
+                       if (ret != WEBRTC_ERROR_NONE)
+                               g_printerr("failed to webrtc_set_sound_stream_info(), ret[0x%x]\n", ret);
+               } else {
+                       if ((conn->audio_render_pipeline = __build_appsrc_opus_render_pipeline(&conn->appsrc_for_audio))) {
+                               GstStateChangeReturn state_change_ret = gst_element_set_state(conn->audio_render_pipeline, GST_STATE_PLAYING);
+                               if (state_change_ret == GST_STATE_CHANGE_FAILURE)
+                                       g_printerr("failed to set state to PLAYING to audio render pipeline\n");
+                       }
                }
-#endif
        }
 }
 
@@ -2555,8 +2550,7 @@ static void __encoded_frame_cb(webrtc_h webrtc, webrtc_media_type_e type, unsign
                __espp_submit_packet(conn, packet, type);
        }
 #endif
-#ifdef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
-       {
+       if (g_ad.validate_encoded_frame_cb) {
                GstFlowReturn gst_ret = GST_FLOW_OK;
                GstElement *appsrc = (type == WEBRTC_MEDIA_TYPE_AUDIO) ? conn->appsrc_for_audio : conn->appsrc_for_video;
                static bool first_audio_packet = true;
@@ -2617,7 +2611,6 @@ static void __encoded_frame_cb(webrtc_h webrtc, webrtc_media_type_e type, unsign
 
                gst_buffer_unref(buffer);
        }
-#endif /* __DEBUG_VALIDATE_ENCODED_FRAME_CB__ */
 #ifndef TIZEN_TV
 out:
 #endif
@@ -2633,14 +2626,14 @@ static void _webrtc_set_encoded_audio_frame_cb(int index)
        RET_IF(ret != WEBRTC_ERROR_NONE, "ret[0x%x]", ret);
 
        g_print("webrtc_set_encoded_audio_frame_cb() success\n");
-#ifndef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
+       if (!g_ad.validate_encoded_frame_cb) {
 #ifndef TIZEN_TV
-       if (!g_ad.conns[index].encoded_audio_frame_cb_is_set) {
-               g_ad.conns[index].encoded_audio_frame_cb_is_set = true;
-               __espp_init(index);
-       }
-#endif
+               if (!g_ad.conns[index].encoded_audio_frame_cb_is_set) {
+                       g_ad.conns[index].encoded_audio_frame_cb_is_set = true;
+                       __espp_init(index);
+               }
 #endif
+       }
 }
 
 static void _webrtc_unset_encoded_audio_frame_cb(int index)
@@ -2651,14 +2644,14 @@ static void _webrtc_unset_encoded_audio_frame_cb(int index)
        RET_IF(ret != WEBRTC_ERROR_NONE, "ret[0x%x]", ret);
 
        g_print("webrtc_unset_encoded_audio_frame_cb() success\n");
-#ifndef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
+       if (!g_ad.validate_encoded_frame_cb) {
 #ifndef TIZEN_TV
-       if (g_ad.conns[index].encoded_audio_frame_cb_is_set) {
-               g_ad.conns[index].encoded_audio_frame_cb_is_set = false;
-               __espp_deinit(index);
-       }
-#endif
+               if (g_ad.conns[index].encoded_audio_frame_cb_is_set) {
+                       g_ad.conns[index].encoded_audio_frame_cb_is_set = false;
+                       __espp_deinit(index);
+               }
 #endif
+       }
 }
 
 static void _webrtc_set_encoded_video_frame_cb(int index)
@@ -2670,14 +2663,14 @@ static void _webrtc_set_encoded_video_frame_cb(int index)
 
        g_print("webrtc_set_encoded_video_frame_cb() success\n");
 
-#ifndef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
+       if (!g_ad.validate_encoded_frame_cb) {
 #ifndef TIZEN_TV
-       if (!g_ad.conns[index].encoded_video_frame_cb_is_set) {
-               g_ad.conns[index].encoded_video_frame_cb_is_set = true;
-               __espp_init(index);
-       }
-#endif
+               if (!g_ad.conns[index].encoded_video_frame_cb_is_set) {
+                       g_ad.conns[index].encoded_video_frame_cb_is_set = true;
+                       __espp_init(index);
+               }
 #endif
+       }
 }
 
 static void _webrtc_unset_encoded_video_frame_cb(int index)
@@ -2689,14 +2682,14 @@ static void _webrtc_unset_encoded_video_frame_cb(int index)
 
        g_print("webrtc_unset_encoded_video_frame_cb() success\n");
 
-#ifndef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
+       if (!g_ad.validate_encoded_frame_cb) {
 #ifndef TIZEN_TV
-       if (g_ad.conns[index].encoded_video_frame_cb_is_set) {
-               g_ad.conns[index].encoded_video_frame_cb_is_set = false;
-               __espp_deinit(index);
-       }
-#endif
+               if (g_ad.conns[index].encoded_video_frame_cb_is_set) {
+                       g_ad.conns[index].encoded_video_frame_cb_is_set = false;
+                       __espp_deinit(index);
+               }
 #endif
+       }
 }
 
 static void __media_packet_source_buffer_state_changed_cb(unsigned int source_id, webrtc_media_packet_source_buffer_state_e state, void *user_data)
@@ -3119,7 +3112,6 @@ error:
        return NULL;
 }
 
-#if defined(__DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__) || defined(__DEBUG_VALIDATE_ENCODED_FRAME_CB__)
 static int __fill_gst_buffer_mapped_data_from_packet(GstBuffer *buffer, media_packet_h packet)
 {
        bool has_tbm_surface = false;
@@ -3228,7 +3220,6 @@ GstBuffer *__alloc_buffer_from_packet(media_packet_h packet)
 
        return new_buffer;
 }
-#endif /* __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__ */
 
 static void __stream_handoff_cb(GstElement *object, GstBuffer *buffer, GstPad *pad, gpointer data)
 {
@@ -3262,33 +3253,34 @@ static void __stream_handoff_cb(GstElement *object, GstBuffer *buffer, GstPad *p
                return;
        }
 
-#ifdef __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__
-       GstBuffer *buffer_from_packet = NULL;
-       bool is_new_buffer = false;
-       media_packet_get_extra(packet, (void **)&buffer_from_packet);
-       if (buffer_from_packet) {
-               g_print("external gstbuffer[%p]\n", buffer_from_packet);
+       if (g_ad.validate_feeding_data) {
+               GstBuffer *buffer_from_packet = NULL;
+               bool is_new_buffer = false;
+               media_packet_get_extra(packet, (void **)&buffer_from_packet);
+               if (buffer_from_packet) {
+                       g_print("external gstbuffer[%p]\n", buffer_from_packet);
+               } else {
+                       buffer_from_packet = __alloc_buffer_from_packet(packet);
+                       if (!buffer_from_packet) {
+                               media_packet_destroy(packet);
+                               return;
+                       }
+                       is_new_buffer = true;
+               }
+               GstFlowReturn gst_ret = GST_FLOW_OK;
+               g_signal_emit_by_name(G_OBJECT(packet_source->appsrc), "push-buffer", buffer_from_packet, &gst_ret, NULL);
+               if (gst_ret != GST_FLOW_OK)
+                       g_printerr("failed to 'push-buffer', gst_ret[0x%x]\n", gst_ret);
+               if (is_new_buffer)
+                       gst_buffer_unref(buffer_from_packet);
+               media_packet_destroy(packet);
+
        } else {
-               buffer_from_packet = __alloc_buffer_from_packet(packet);
-               if (!buffer_from_packet) {
+               if (webrtc_media_packet_source_push_packet(packet_source->webrtc, packet_source->source_id, packet) != WEBRTC_ERROR_NONE) {
+                       g_printerr("failed to webrtc_media_packet_source_push_packet()\n");
                        media_packet_destroy(packet);
-                       return;
                }
-               is_new_buffer = true;
-       }
-       GstFlowReturn gst_ret = GST_FLOW_OK;
-       g_signal_emit_by_name(G_OBJECT(packet_source->appsrc), "push-buffer", buffer_from_packet, &gst_ret, NULL);
-       if (gst_ret != GST_FLOW_OK)
-               g_printerr("failed to 'push-buffer', gst_ret[0x%x]\n", gst_ret);
-       if (is_new_buffer)
-               gst_buffer_unref(buffer_from_packet);
-       media_packet_destroy(packet);
-#else
-       if (webrtc_media_packet_source_push_packet(packet_source->webrtc, packet_source->source_id, packet) != WEBRTC_ERROR_NONE) {
-               g_printerr("failed to webrtc_media_packet_source_push_packet()\n");
-               media_packet_destroy(packet);
        }
-#endif
 }
 
 static GstCaps *__make_raw_caps(media_format_h format)
@@ -3638,7 +3630,6 @@ error:
        return NULL;
 }
 
-#ifdef __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__
 static GstElement* __build_appsrc_h264_render_pipeline(GstElement **appsrc)
 {
        GstElement *pipeline;
@@ -3714,8 +3705,6 @@ error:
        return NULL;
 }
 
-#endif /* __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__ */
-#ifdef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
 static GstElement* __build_appsrc_vp8_render_pipeline(GstElement **appsrc)
 {
        GstElement *pipeline;
@@ -3857,7 +3846,6 @@ error:
        return NULL;
 }
 
-#endif /* __DEBUG_VALIDATE_ENCODED_FRAME_CB__ */
 static GstElement* __make_src_pipeline(media_packet_source_s *packet_source)
 {
        media_format_type_e format_type;
@@ -3889,9 +3877,8 @@ static GstElement* __make_src_pipeline(media_packet_source_s *packet_source)
                switch (mimetype) {
                case MEDIA_FORMAT_H264_SP:
                        packet_source->src_pipeline = __build_h264_format_pipeline(packet_source);
-#ifdef __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__
-                       packet_source->render_pipeline = __build_appsrc_h264_render_pipeline(&packet_source->appsrc);
-#endif
+                       if (g_ad.validate_feeding_data)
+                               packet_source->render_pipeline = __build_appsrc_h264_render_pipeline(&packet_source->appsrc);
                        break;
                case MEDIA_FORMAT_I420:
                case MEDIA_FORMAT_NV12:
@@ -3926,10 +3913,9 @@ static void _start_pushing_packet(int index, int source_id)
        }
 
        state_change_ret = gst_element_set_state(g_ad.conns[index].packet_sources[i].src_pipeline, GST_STATE_PLAYING);
-#ifdef __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__
-       if (g_ad.conns[index].packet_sources[i].render_pipeline)
+       if (g_ad.validate_feeding_data && g_ad.conns[index].packet_sources[i].render_pipeline)
                state_change_ret = gst_element_set_state(g_ad.conns[index].packet_sources[i].render_pipeline, GST_STATE_PLAYING);
-#endif
+
        RET_IF(state_change_ret == GST_STATE_CHANGE_FAILURE, "failed to set state to PLAYING");
 
        g_ad.conns[index].packet_sources[i].is_stop_requested = false;
@@ -3950,10 +3936,8 @@ static void _stop_pushing_packet(int index, int source_id)
        g_ad.conns[index].packet_sources[i].is_stop_requested = true;
 
        gst_element_set_state(g_ad.conns[index].packet_sources[i].src_pipeline, GST_STATE_PAUSED);
-#ifdef __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__
-       if (g_ad.conns[index].packet_sources[i].render_pipeline)
+       if (g_ad.validate_feeding_data && g_ad.conns[index].packet_sources[i].render_pipeline)
                gst_element_set_state(g_ad.conns[index].packet_sources[i].render_pipeline, GST_STATE_PAUSED);
-#endif
 
        g_print("_stop_pushing_packet()\n");
 }
@@ -5321,8 +5305,10 @@ static void print_usage()
 {
        printf("Usage : ");
        printf("webrtc_test [option]\n\n"
-                  "  -p, --proxy           proxy URL to use (e.g. http://123.123.123.123:8080)\n"
-                  "  -h, --help            help\n");
+                  "  -p, --proxy                     proxy URL to use (e.g. http://123.123.123.123:8080)\n"
+                  "  -f, --validate-feeding-data     validate media packet source feeding data by rendering these on gst pipeline\n"
+                  "  -e, --validate-encoded-frame-cb validate media packets from encoded frame callback by rendering these on gst pipeline\n"
+                  "  -h, --help                      help\n");
 }
 
 int main(int argc, char *argv[])
@@ -5340,17 +5326,25 @@ int main(int argc, char *argv[])
 
                static struct option long_options[] = {
                        { "proxy", required_argument, 0, 'p' },
+                       { "validate-feeding-data", no_argument, 0, 'f' },
+                       { "validate-encoded-frame-cb", no_argument, 0, 'e' },
                        { "help", no_argument, 0, 'h' },
                        { 0, 0, 0, 0 }
                };
 
-               if ((opt = getopt_long(argc, argv, "p:h", long_options, &opt_idx)) == -1)
+               if ((opt = getopt_long(argc, argv, "p:feh", long_options, &opt_idx)) == -1)
                        break;
 
                switch (opt) {
                case 'p':
                        strncpy(g_ad.proxy, optarg, sizeof(g_ad.proxy) - 1);
                        break;
+               case 'f':
+                       g_ad.validate_feeding_data = true;
+                       break;
+               case 'e':
+                       g_ad.validate_encoded_frame_cb = true;
+                       break;
                case 'h':
                default:
                        print_usage();
index 25e95001b913bd2f8ce0f520d027316450b76c26..d077c4bd99e9508a8e7396c032b5ffe5f7f30206 100644 (file)
@@ -150,6 +150,11 @@ void display_setting_status(void)
 
        if (len_proxy > 0)
                g_print("  proxy[%s]\n", get_appdata()->proxy);
+       if (get_appdata()->validate_feeding_data)
+               g_print("  validate-feeding-data[on]\n");
+       if (get_appdata()->validate_encoded_frame_cb)
+               g_print("  validate-encoded-frame-cb[on]\n");
+
        if (len_server > 0)
                g_print("  server[%s][%s]\n", get_appdata()->signaling_server.url, g_server_status_str[get_appdata()->signaling_server.server_status]);
        if (get_appdata()->signaling_server.private_ip && get_appdata()->signaling_server.port > 0)
index f1ba03a017bfbdeefd6c9a0ccfe1bf2e5ccf2316..d60982d136f5af4bccbfaa1a98d95b41d7bdbd19 100644 (file)
@@ -169,10 +169,6 @@ typedef struct {
 
        webrtc_h webrtc;
        GstElement *src_pipeline;
-#ifdef __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__
-       GstElement *render_pipeline;
-       GstElement *appsrc;
-#endif
        GstElement *src;
        GstElement *sink;
        GstElement *demux;
@@ -185,6 +181,10 @@ typedef struct {
        GCond cond;
        GMutex mutex;
        bool got_eos;
+
+       /* for validating media packet source */
+       GstElement *render_pipeline;
+       GstElement *appsrc;
 } media_packet_source_s;
 
 typedef struct _connection_s {
@@ -237,13 +237,13 @@ typedef struct _connection_s {
        bool encoded_video_frame_cb_is_set;
        bool encoded_audio_frame_cb_is_set;
 #endif
-#ifdef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
+       media_packet_source_s packet_sources[MAX_MEDIA_PACKET_SOURCE_LEN];
+
+       /* for validating encoded frame cb */
        GstElement *audio_render_pipeline;
        GstElement *video_render_pipeline;
        GstElement *appsrc_for_audio;
        GstElement *appsrc_for_video;
-#endif
-       media_packet_source_s packet_sources[MAX_MEDIA_PACKET_SOURCE_LEN];
 } connection_s;
 
 typedef struct _signaling_server_s {
@@ -280,7 +280,10 @@ typedef struct {
        connection_s conns[MAX_CONNECTION_LEN];
        signaling_server_s signaling_server;
        webrtc_signaling_server_h inner_signaling_server;
+
        gchar proxy[MAX_STRING_LEN];
+       bool validate_feeding_data;
+       bool validate_encoded_frame_cb;
 } appdata_s;
 
 extern menu_info_s g_menu_infos[];