#endif
#define PACKAGE "webrtc_test"
-//#define __DEBUG_VALIDATE_MEDIA_PACKET__
+//#define __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__
+//#define __DEBUG_VALIDATE_ENCODED_FRAME_CB__
#ifdef LOG_TAG
#undef LOG_TAG
webrtc_h webrtc;
GstElement *src_pipeline;
-#ifdef __DEBUG_VALIDATE_MEDIA_PACKET__
+#ifdef __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__
GstElement *render_pipeline;
GstElement *appsrc;
#endif
webrtc_display_type_e display_type;
Evas_Object *eo;
+#ifdef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
+ GstElement *render_pipeline;
+ GstElement *appsrc;
+#endif
media_packet_source_s packet_sources[MAX_MEDIA_PACKET_SOURCE_LEN];
} connection_s;
static webrtc_signaling_server_h g_inner_signaling_server;
+#if defined(__DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__) || defined(__DEBUG_VALIDATE_ENCODED_FRAME_CB__)
+static GstElement* __build_appsrc_render_pipeline(GstElement **appsrc);
+#endif
+
static void win_del(void *data, Evas_Object *obj, void *event)
{
elm_exit();
if (g_conns[index].recv_channels[i] != NULL)
g_conns[index].recv_channels[i] = NULL;
}
+#ifdef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
+ if (g_conns[index].render_pipeline) {
+ GstStateChangeReturn state_change_ret = gst_element_set_state(g_conns[index].render_pipeline, GST_STATE_NULL);
+ if (state_change_ret == GST_STATE_CHANGE_FAILURE)
+ g_printerr("failed to set state to NULL\n");
+ gst_object_unref(g_conns[index].render_pipeline);
+ g_conns[index].render_pipeline = NULL;
+ g_print("appsrc render pipeline is released\n");
+ }
+#endif
}
}
g_print("__track_added_cb() is invoked, webrtc[%p], type[%d], id[%u], conn[%p]\n", webrtc, type, id, conn);
if (type == WEBRTC_MEDIA_TYPE_VIDEO) {
- g_print("Video track is added, ");
+#ifndef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
if (conn->display_type == WEBRTC_DISPLAY_TYPE_OVERLAY) {
- g_print("set display - overlay, object[%p]\n", g_win_id);
+ g_print("Video track is added, set display - overlay, object[%p]\n", g_win_id);
webrtc_set_display(conn->webrtc, id, WEBRTC_DISPLAY_TYPE_OVERLAY, g_win_id);
} else if (conn->display_type == WEBRTC_DISPLAY_TYPE_EVAS) {
- g_print("set display - evas object[%p]\n", conn->eo);
+ g_print("Video track is added, set display - evas object[%p]\n", conn->eo);
webrtc_set_display(conn->webrtc, id, WEBRTC_DISPLAY_TYPE_EVAS, conn->eo);
} else {
- g_print("invalid display type[%d]\n", conn->display_type);
+ g_print("Video track is added, invalid display type[%d]\n", conn->display_type);
}
+#else
+ g_print("Video track is added\n");
+ if ((conn->render_pipeline = __build_appsrc_render_pipeline(&conn->appsrc))) {
+ GstStateChangeReturn state_change_ret = gst_element_set_state(conn->render_pipeline, GST_STATE_PLAYING);
+ if (state_change_ret == GST_STATE_CHANGE_FAILURE)
+ g_printerr("failed to set state to PLAYING\n");
+ }
+#endif
} else if (type == WEBRTC_MEDIA_TYPE_AUDIO) {
g_print("Audio track is added\n");
}
static void __encoded_frame_cb(webrtc_h webrtc, webrtc_media_type_e type, unsigned int track_id, media_packet_h packet, void *user_data)
{
void *data_ptr = NULL;
+ connection_s *conn = (connection_s *)user_data;
+
+ if (conn == NULL) {
+ g_printerr("conn is NULL\n");
+ return;
+ }
/* get data pointer from media packet */
if (media_packet_get_buffer_data_ptr(packet, &data_ptr) != MEDIA_PACKET_ERROR_NONE)
g_print("webrtc[%p] type[%u] track_id[%u] packet[%p, data_ptr:%p] user_data[%p]\n",
webrtc, type, track_id, packet, data_ptr, user_data);
+#ifdef __DEBUG_VALIDATE_ENCODED_FRAME_CB__
+ {
+ GstFlowReturn gst_ret = GST_FLOW_OK;
+ GstBuffer *buffer = NULL;
+
+ media_packet_get_extra(packet, (void**)&buffer);
+ if (buffer) {
+ GstMapInfo buff_info = GST_MAP_INFO_INIT;
+ if (!gst_buffer_map(buffer, &buff_info, GST_MAP_READ)) {
+ g_print("failed to gst_buffer_map()\n");
+ media_packet_destroy(packet);
+ return;
+ }
+ g_print("buffer[%p] buffer_info.data[%p]\n", buffer, buff_info.data);
+ gst_buffer_unmap(buffer, &buff_info);
+
+ g_signal_emit_by_name(G_OBJECT(conn->appsrc), "push-buffer", buffer, &gst_ret, NULL);
+ if (gst_ret != GST_FLOW_OK)
+ g_printerr("failed to 'push-buffer', gst_ret[0x%x]\n", gst_ret);
+ }
+ }
+#endif
/* media packet should be freed after use */
media_packet_destroy(packet);
}
return NULL;
}
-#ifdef __DEBUG_VALIDATE_MEDIA_PACKET__
+#ifdef __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__
static int __fill_gst_buffer_mapped_data_from_packet(GstBuffer *buffer, media_packet_h packet)
{
bool has_tbm_surface = false;
return new_buffer;
}
-#endif /* __DEBUG_VALIDATE_MEDIA_PACKET__ */
+#endif /* __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__ */
static void __stream_handoff_cb(GstElement *object, GstBuffer *buffer, GstPad *pad, gpointer data)
{
return;
}
-#ifdef __DEBUG_VALIDATE_MEDIA_PACKET__
+#ifdef __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__
GstBuffer *buffer_from_packet = __get_buffer_from_packet(packet);
if (!buffer_from_packet) {
media_packet_destroy(packet);
return NULL;
}
-#ifdef __DEBUG_VALIDATE_MEDIA_PACKET__
-static GstElement* __build_appsrc_render_pipeline(media_packet_source_s *packet_source)
+#if defined(__DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__) || defined(__DEBUG_VALIDATE_ENCODED_FRAME_CB__)
+static GstElement* __build_appsrc_render_pipeline(GstElement **appsrc)
{
GstElement *pipeline;
GstElement *src;
GstElement *sink;
GstCaps *caps;
- if (!packet_source) {
- g_printerr("packet_source is NULL\n");
+ if (!appsrc) {
+ g_printerr("appsrc is NULL\n");
return NULL;
}
goto error;
}
- packet_source->appsrc = src;
+ *appsrc = src;
+
+ g_print("appsrc render pipeline is created\n");
return pipeline;
switch (mimetype) {
case MEDIA_FORMAT_H264_SP:
packet_source->src_pipeline = __build_h264_format_pipeline(packet_source);
-#ifdef __DEBUG_VALIDATE_MEDIA_PACKET__
- packet_source->render_pipeline = __build_appsrc_render_pipeline(packet_source);
+#ifdef __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__
+ packet_source->render_pipeline = __build_appsrc_render_pipeline(&packet_source->appsrc);
#endif
break;
case MEDIA_FORMAT_I420:
}
state_change_ret = gst_element_set_state(g_conns[index].packet_sources[i].src_pipeline, GST_STATE_PLAYING);
-#ifdef __DEBUG_VALIDATE_MEDIA_PACKET__
+#ifdef __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__
if (g_conns[index].packet_sources[i].render_pipeline)
state_change_ret = gst_element_set_state(g_conns[index].packet_sources[i].render_pipeline, GST_STATE_PLAYING);
#endif
g_conns[index].packet_sources[i].is_stop_requested = true;
gst_element_set_state(g_conns[index].packet_sources[i].src_pipeline, GST_STATE_PAUSED);
-#ifdef __DEBUG_VALIDATE_MEDIA_PACKET__
+#ifdef __DEBUG_VALIDATE_MEDIA_PACKET_SOURCE__
if (g_conns[index].packet_sources[i].render_pipeline)
gst_element_set_state(g_conns[index].packet_sources[i].render_pipeline, GST_STATE_PAUSED);
#endif