webrtc_test: Add local rendering feature with encoded frame callback 98/253198/4
authorSangchul Lee <sc11.lee@samsung.com>
Fri, 5 Feb 2021 11:17:36 +0000 (20:17 +0900)
committerSangchul Lee <sc11.lee@samsung.com>
Mon, 22 Feb 2021 03:29:44 +0000 (12:29 +0900)
__DEBUG_VALIDATE_ENCODED_FRAME_CB__ definition is added to test
the media packet for H264 encoded data received from the encoded
frame callback with local rendering pipeline.

[Version] 0.1.115
[Issue Type] Test application

Change-Id: I27c697737226d4715576a7057b4c2d66c6bbef79
Signed-off-by: Sangchul Lee <sc11.lee@samsung.com>
packaging/capi-media-webrtc.spec
test/webrtc_test.c

index 85a2b53e585f64cf57e389d6e4a856eed3a3a45a..98be288b1f86d31fe0bb6102e7053a5a3a12ba9a 100644 (file)
@@ -1,6 +1,6 @@
 Name:       capi-media-webrtc
 Summary:    A WebRTC library in Tizen Native API
-Version:    0.1.114
+Version:    0.1.115
 Release:    0
 Group:      Multimedia/API
 License:    Apache-2.0
index aa87dd17fb11439296e604263fd0883723f2c2f9..f751841609c7013a79001661c58202afac385de7 100644 (file)
@@ -34,7 +34,8 @@
 #endif
 #define PACKAGE "webrtc_test"
 
-//#define __DEBUG_VALIDATE_MEDIA_PACKET__
+//#define __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__
+//#define __DEBUG_VALIDATE_ENCODED_FRAME_CB__
 
 #ifdef LOG_TAG
 #undef LOG_TAG
@@ -120,7 +121,7 @@ typedef struct {
 
        webrtc_h webrtc;
        GstElement *src_pipeline;
-#ifdef __DEBUG_VALIDATE_MEDIA_PACKET__
+#ifdef __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__
        GstElement *render_pipeline;
        GstElement *appsrc;
 #endif
@@ -165,6 +166,10 @@ typedef struct _connection_s {
        webrtc_display_type_e display_type;
        Evas_Object *eo;
 
+#ifdef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
+       GstElement *render_pipeline;
+       GstElement *appsrc;
+#endif
        media_packet_source_s packet_sources[MAX_MEDIA_PACKET_SOURCE_LEN];
 } connection_s;
 
@@ -177,6 +182,10 @@ static int g_conn_index;
 
 static webrtc_signaling_server_h g_inner_signaling_server;
 
+#if defined(__DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__) || defined(__DEBUG_VALIDATE_ENCODED_FRAME_CB__)
+static GstElement* __build_appsrc_render_pipeline(GstElement **appsrc);
+#endif
+
 static void win_del(void *data, Evas_Object *obj, void *event)
 {
        elm_exit();
@@ -430,6 +439,16 @@ static void _webrtc_stop(int index)
                        if (g_conns[index].recv_channels[i] != NULL)
                                g_conns[index].recv_channels[i] = NULL;
                }
+#ifdef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
+               if (g_conns[index].render_pipeline) {
+                       GstStateChangeReturn state_change_ret = gst_element_set_state(g_conns[index].render_pipeline, GST_STATE_NULL);
+                       if (state_change_ret == GST_STATE_CHANGE_FAILURE)
+                               g_printerr("failed to set state to NULL\n");
+                       gst_object_unref(g_conns[index].render_pipeline);
+                       g_conns[index].render_pipeline = NULL;
+                       g_print("appsrc render pipeline is released\n");
+               }
+#endif
        }
 }
 
@@ -1176,18 +1195,26 @@ static void __track_added_cb(webrtc_h webrtc, webrtc_media_type_e type, unsigned
        g_print("__track_added_cb() is invoked, webrtc[%p], type[%d], id[%u], conn[%p]\n", webrtc, type, id, conn);
 
        if (type == WEBRTC_MEDIA_TYPE_VIDEO) {
-               g_print("Video track is added, ");
+#ifndef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
                if (conn->display_type == WEBRTC_DISPLAY_TYPE_OVERLAY) {
-                       g_print("set display - overlay, object[%p]\n", g_win_id);
+                       g_print("Video track is added, set display - overlay, object[%p]\n", g_win_id);
                        webrtc_set_display(conn->webrtc, id, WEBRTC_DISPLAY_TYPE_OVERLAY, g_win_id);
 
                } else if (conn->display_type == WEBRTC_DISPLAY_TYPE_EVAS) {
-                       g_print("set display - evas object[%p]\n", conn->eo);
+                       g_print("Video track is added, set display - evas object[%p]\n", conn->eo);
                        webrtc_set_display(conn->webrtc, id, WEBRTC_DISPLAY_TYPE_EVAS, conn->eo);
 
                } else {
-                       g_print("invalid display type[%d]\n", conn->display_type);
+                       g_print("Video track is added, invalid display type[%d]\n", conn->display_type);
                }
+#else
+               g_print("Video track is added\n");
+               if ((conn->render_pipeline = __build_appsrc_render_pipeline(&conn->appsrc))) {
+                       GstStateChangeReturn state_change_ret = gst_element_set_state(conn->render_pipeline, GST_STATE_PLAYING);
+                       if (state_change_ret == GST_STATE_CHANGE_FAILURE)
+                               g_printerr("failed to set state to PLAYING\n");
+               }
+#endif
        } else if (type == WEBRTC_MEDIA_TYPE_AUDIO) {
                g_print("Audio track is added\n");
        }
@@ -1218,6 +1245,12 @@ static void _webrtc_unset_track_added_cb(int index)
 static void __encoded_frame_cb(webrtc_h webrtc, webrtc_media_type_e type, unsigned int track_id, media_packet_h packet, void *user_data)
 {
        void *data_ptr = NULL;
+       connection_s *conn = (connection_s *)user_data;
+
+       if (conn == NULL) {
+               g_printerr("conn is NULL\n");
+               return;
+       }
 
        /* get data pointer from media packet */
        if (media_packet_get_buffer_data_ptr(packet, &data_ptr) != MEDIA_PACKET_ERROR_NONE)
@@ -1226,6 +1259,28 @@ static void __encoded_frame_cb(webrtc_h webrtc, webrtc_media_type_e type, unsign
        g_print("webrtc[%p] type[%u] track_id[%u] packet[%p, data_ptr:%p] user_data[%p]\n",
                webrtc, type, track_id, packet, data_ptr, user_data);
 
+#ifdef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
+       {
+               GstFlowReturn gst_ret = GST_FLOW_OK;
+               GstBuffer *buffer = NULL;
+
+               media_packet_get_extra(packet, (void**)&buffer);
+               if (buffer) {
+                       GstMapInfo buff_info = GST_MAP_INFO_INIT;
+                       if (!gst_buffer_map(buffer, &buff_info, GST_MAP_READ)) {
+                               g_print("failed to gst_buffer_map()\n");
+                               media_packet_destroy(packet);
+                               return;
+                       }
+                       g_print("buffer[%p] buffer_info.data[%p]\n", buffer, buff_info.data);
+                       gst_buffer_unmap(buffer, &buff_info);
+
+                       g_signal_emit_by_name(G_OBJECT(conn->appsrc), "push-buffer", buffer, &gst_ret, NULL);
+                       if (gst_ret != GST_FLOW_OK)
+                               g_printerr("failed to 'push-buffer', gst_ret[0x%x]\n", gst_ret);
+               }
+       }
+#endif
        /* media packet should be freed after use */
        media_packet_destroy(packet);
 }
@@ -1685,7 +1740,7 @@ error:
        return NULL;
 }
 
-#ifdef __DEBUG_VALIDATE_MEDIA_PACKET__
+#ifdef __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__
 static int __fill_gst_buffer_mapped_data_from_packet(GstBuffer *buffer, media_packet_h packet)
 {
        bool has_tbm_surface = false;
@@ -1801,7 +1856,7 @@ GstBuffer *__get_buffer_from_packet(media_packet_h packet)
 
        return new_buffer;
 }
-#endif /* __DEBUG_VALIDATE_MEDIA_PACKET__ */
+#endif /* __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__ */
 
 static void __stream_handoff_cb(GstElement *object, GstBuffer *buffer, GstPad *pad, gpointer data)
 {
@@ -1838,7 +1893,7 @@ static void __stream_handoff_cb(GstElement *object, GstBuffer *buffer, GstPad *p
                return;
        }
 
-#ifdef __DEBUG_VALIDATE_MEDIA_PACKET__
+#ifdef __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__
        GstBuffer *buffer_from_packet = __get_buffer_from_packet(packet);
        if (!buffer_from_packet) {
                media_packet_destroy(packet);
@@ -2135,8 +2190,8 @@ error:
        return NULL;
 }
 
-#ifdef __DEBUG_VALIDATE_MEDIA_PACKET__
-static GstElement* __build_appsrc_render_pipeline(media_packet_source_s *packet_source)
+#if defined(__DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__) || defined(__DEBUG_VALIDATE_ENCODED_FRAME_CB__)
+static GstElement* __build_appsrc_render_pipeline(GstElement **appsrc)
 {
        GstElement *pipeline;
        GstElement *src;
@@ -2145,8 +2200,8 @@ static GstElement* __build_appsrc_render_pipeline(media_packet_source_s *packet_
        GstElement *sink;
        GstCaps *caps;
 
-       if (!packet_source) {
-               g_printerr("packet_source is NULL\n");
+       if (!appsrc) {
+               g_printerr("appsrc is NULL\n");
                return NULL;
        }
 
@@ -2189,7 +2244,9 @@ static GstElement* __build_appsrc_render_pipeline(media_packet_source_s *packet_
                goto error;
        }
 
-       packet_source->appsrc = src;
+       *appsrc = src;
+
+       g_print("appsrc render pipeline is created\n");
 
        return pipeline;
 
@@ -2230,8 +2287,8 @@ static GstElement* __make_src_pipeline(media_packet_source_s *packet_source)
                switch (mimetype) {
                case MEDIA_FORMAT_H264_SP:
                        packet_source->src_pipeline = __build_h264_format_pipeline(packet_source);
-#ifdef __DEBUG_VALIDATE_MEDIA_PACKET__
-                       packet_source->render_pipeline = __build_appsrc_render_pipeline(packet_source);
+#ifdef __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__
+                       packet_source->render_pipeline = __build_appsrc_render_pipeline(&packet_source->appsrc);
 #endif
                        break;
                case MEDIA_FORMAT_I420:
@@ -2274,7 +2331,7 @@ static void _start_pushing_packet(int index, int source_id)
        }
 
        state_change_ret = gst_element_set_state(g_conns[index].packet_sources[i].src_pipeline, GST_STATE_PLAYING);
-#ifdef __DEBUG_VALIDATE_MEDIA_PACKET__
+#ifdef __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__
        if (g_conns[index].packet_sources[i].render_pipeline)
                state_change_ret = gst_element_set_state(g_conns[index].packet_sources[i].render_pipeline, GST_STATE_PLAYING);
 #endif
@@ -2307,7 +2364,7 @@ static void _stop_pushing_packet(int index, int source_id)
        g_conns[index].packet_sources[i].is_stop_requested = true;
 
        gst_element_set_state(g_conns[index].packet_sources[i].src_pipeline, GST_STATE_PAUSED);
-#ifdef __DEBUG_VALIDATE_MEDIA_PACKET__
+#ifdef __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__
        if (g_conns[index].packet_sources[i].render_pipeline)
                gst_element_set_state(g_conns[index].packet_sources[i].render_pipeline, GST_STATE_PAUSED);
 #endif