g_cond_clear(&packet_source->cond);
}
-#ifndef TIZEN_TV
-static void __espp_prepare_async_done_cb(bool result, void *user_data)
-{
- connection_s *conn = (connection_s *)user_data;
-
- g_print("__espp_prepare_async_done_cb() is called, result[%u] conn[%p]\n", result, conn);
-
- g_cond_signal(&conn->render.espp.cond);
-}
-
-static void __espp_ready_to_prepare_cb(esplusplayer_stream_type type, void *user_data)
-{
- connection_s *conn = (connection_s *)user_data;
-
- g_print("__espp_ready_to_prepare_cb() is called, type[%u], conn[%p]\n", type, conn);
-
- g_cond_signal(&conn->render.espp.cond);
-}
-
-static void __espp_init(int index)
-{
- if (g_ad.conns[index].render.espp.handle)
- return;
-
- g_mutex_init(&g_ad.conns[index].render.espp.mutex);
- g_cond_init(&g_ad.conns[index].render.espp.cond);
- g_ad.conns[index].render.espp.audio_track_preparing = false;
- g_ad.conns[index].render.espp.video_track_preparing = false;
-
- g_print("espp create & open\n");
- g_ad.conns[index].render.espp.handle = esplusplayer_create();
- esplusplayer_set_prepare_async_done_cb(g_ad.conns[index].render.espp.handle, __espp_prepare_async_done_cb, &g_ad.conns[index]);
- esplusplayer_set_ready_to_prepare_cb(g_ad.conns[index].render.espp.handle, __espp_ready_to_prepare_cb, &g_ad.conns[index]);
- esplusplayer_open(g_ad.conns[index].render.espp.handle);
-}
-
-static void __espp_deinit(int index)
-{
- if (!g_ad.conns[index].render.espp.handle)
- return;
-
- g_print("espp stop & close & destroy\n");
- esplusplayer_stop(g_ad.conns[index].render.espp.handle);
- esplusplayer_close(g_ad.conns[index].render.espp.handle);
- esplusplayer_destroy(g_ad.conns[index].render.espp.handle);
-
- g_ad.conns[index].render.espp.handle = NULL;
- g_ad.conns[index].render.espp.audio_track_preparing = false;
- g_ad.conns[index].render.espp.video_track_preparing = false;
- g_mutex_clear(&g_ad.conns[index].render.espp.mutex);
- g_cond_clear(&g_ad.conns[index].render.espp.cond);
-}
-
-static int __convert_media_format_mime_to_espp_mime(media_format_mimetype_e type)
-{
- g_print("media_format_mimetype(0x%x)\n", type);
-
- switch (type) {
- case MEDIA_FORMAT_OPUS:
- return (int)ESPLUSPLAYER_AUDIO_MIME_TYPE_OPUS;
-
- case MEDIA_FORMAT_VP8:
- return (int)ESPLUSPLAYER_VIDEO_MIME_TYPE_VP8;
-
- case MEDIA_FORMAT_VP9:
- return (int)ESPLUSPLAYER_VIDEO_MIME_TYPE_VP9;
-
- case MEDIA_FORMAT_H264_SP:
- case MEDIA_FORMAT_H264_MP:
- case MEDIA_FORMAT_H264_HP:
- return (int)ESPLUSPLAYER_VIDEO_MIME_TYPE_H264;
-
- case MEDIA_FORMAT_MJPEG:
- return (int)ESPLUSPLAYER_VIDEO_MIME_TYPE_MJPEG;
-
- default:
- return -1;
- }
-}
-
-static void __espp_update_audio_info(connection_s *conn, media_packet_h packet)
-{
- media_format_h format;
- media_format_mimetype_e mimetype;
- int type;
- int channels = 0;
- int rate = 0;
-
- RET_IF(!conn, "conn is NULL");
- RET_IF(!packet, "packet is NULL");
- RET_IF(media_packet_get_format(packet, &format) != MEDIA_PACKET_ERROR_NONE, "format is NULL");
-
- media_format_get_audio_info(format, &mimetype, &channels, &rate, NULL, NULL);
- media_format_unref(format);
- RET_IF((type = __convert_media_format_mime_to_espp_mime(mimetype)) == -1, "not supported type");
-
- conn->render.espp.audio_info.codec_data = NULL;
- conn->render.espp.audio_info.codec_data_length = 0;
- conn->render.espp.audio_info.bitrate = 0;
- conn->render.espp.audio_info.channels = channels;
- conn->render.espp.audio_info.sample_rate = rate;
- conn->render.espp.audio_info.mime_type = type;
-}
-
-static void __espp_update_video_info(connection_s *conn, media_packet_h packet)
-{
- media_format_h format;
- media_format_mimetype_e mimetype;
- int type;
- int width = 0;
- int height = 0;
-
- RET_IF(!conn, "conn is NULL");
- RET_IF(!packet, "packet is NULL");
- RET_IF(media_packet_get_format(packet, &format) != MEDIA_PACKET_ERROR_NONE, "format is NULL");
-
- media_format_get_video_info(format, &mimetype, &width, &height, NULL, NULL);
- media_format_unref(format);
- RET_IF((type = __convert_media_format_mime_to_espp_mime(mimetype)) == -1, "not supported type");
-
- conn->render.espp.video_info.codec_data = NULL;
- conn->render.espp.video_info.codec_data_length = 0;
- conn->render.espp.video_info.width = width;
- conn->render.espp.video_info.height = height;
- conn->render.espp.video_info.max_width = width;
- conn->render.espp.video_info.max_height = height;
- conn->render.espp.video_info.mime_type = type;
- conn->render.espp.video_info.framerate_num = 30; /* FIXME: avoid hard-coding */
- conn->render.espp.video_info.framerate_den = 1;
-}
-#endif
-
static void _webrtc_create(int index)
{
int ret = WEBRTC_ERROR_NONE;
g_ad.conns[index].render.loopback_track_id = 0;
-#ifndef TIZEN_TV
g_ad.conns[index].encoded_audio_frame_cb_is_set = false;
g_ad.conns[index].encoded_video_frame_cb_is_set = false;
- __espp_deinit(index);
+#ifdef TIZEN_FEATURE_ESPP
+ _espp_deinit(index);
#endif
}
RET_IF(ret != WEBRTC_ERROR_NONE, "ret[0x%x]", ret);
g_print("webrtc_start() success\n");
-#ifndef TIZEN_TV
- if (g_ad.conns[index].encoded_audio_frame_cb_is_set ||
- g_ad.conns[index].encoded_video_frame_cb_is_set)
- __espp_init(index);
+#ifdef TIZEN_FEATURE_ESPP
+ _espp_init(index);
#endif
}
g_print("audio render pipeline is released\n");
}
}
-#ifndef TIZEN_TV
- if (g_ad.conns[index].encoded_audio_frame_cb_is_set ||
- g_ad.conns[index].encoded_video_frame_cb_is_set)
- __espp_deinit(index);
+#ifdef TIZEN_FEATURE_ESPP
+ _espp_deinit(index);
#endif
}
if (type == WEBRTC_MEDIA_TYPE_VIDEO) {
if (!g_ad.validate_encoded_frame_cb) {
-#ifndef TIZEN_TV
- conn->render.espp.video_track_preparing = true;
-#endif
- if (conn->render.display_type == WEBRTC_DISPLAY_TYPE_OVERLAY) {
- g_print("Video track is added, set display - overlay, object[%p]\n", g_ad.win_id);
-#ifndef TIZEN_TV
- if (!conn->render.espp.handle)
- webrtc_set_display(webrtc, id, WEBRTC_DISPLAY_TYPE_OVERLAY, g_ad.win_id);
- else
+ g_print("Video track is added, set display - display_type[%d], display[%p]\n",
+ conn->render.display_type, conn->render.display_type == WEBRTC_DISPLAY_TYPE_OVERLAY ? g_ad.win_id : conn->render.eo);
+#ifdef TIZEN_FEATURE_ESPP
+ if (conn->render.espp.handle) {
+ conn->render.espp.video_track_preparing = true;
+ if (conn->render.display_type == WEBRTC_DISPLAY_TYPE_OVERLAY)
esplusplayer_set_display(conn->render.espp.handle, ESPLUSPLAYER_DISPLAY_TYPE_OVERLAY, g_ad.win_id);
-#else
- webrtc_set_display(webrtc, id, WEBRTC_DISPLAY_TYPE_OVERLAY, g_ad.win_id);
+ else if (conn->render.display_type == WEBRTC_DISPLAY_TYPE_EVAS)
+ esplusplayer_set_display(conn->render.espp.handle, ESPLUSPLAYER_DISPLAY_TYPE_EVAS, conn->render.eo);
+ else
+ g_print("invalid display type[%d]\n", conn->render.display_type);
+ } else
#endif
- } else if (conn->render.display_type == WEBRTC_DISPLAY_TYPE_EVAS) {
- g_print("Video track is added, set display - evas object[%p]\n", conn->render.eo);
-#ifndef TIZEN_TV
- if (!conn->render.espp.handle)
+ {
+ if (conn->render.display_type == WEBRTC_DISPLAY_TYPE_OVERLAY)
+ webrtc_set_display(webrtc, id, WEBRTC_DISPLAY_TYPE_OVERLAY, g_ad.win_id);
+ else if (conn->render.display_type == WEBRTC_DISPLAY_TYPE_EVAS)
webrtc_set_display(webrtc, id, WEBRTC_DISPLAY_TYPE_EVAS, conn->render.eo);
else
- esplusplayer_set_display(conn->render.espp.handle, ESPLUSPLAYER_DISPLAY_TYPE_EVAS, conn->render.eo);
-#else
- webrtc_set_display(webrtc, id, WEBRTC_DISPLAY_TYPE_EVAS, conn->render.eo);
-#endif
- } else {
- g_print("Video track is added, invalid display type[%d]\n", conn->render.display_type);
+ g_print("invalid display type[%d]\n", conn->render.display_type);
}
} else {
- g_print("Video track is added\n");
+ g_print("Video track is added, render it on gst pipeline for validation\n");
if ((conn->video_render_pipeline = _build_appsrc_vp8_render_pipeline(&conn->appsrc_for_video))) {
GstStateChangeReturn state_change_ret = gst_element_set_state(conn->video_render_pipeline, GST_STATE_PLAYING);
if (state_change_ret == GST_STATE_CHANGE_FAILURE)
}
} else if (type == WEBRTC_MEDIA_TYPE_AUDIO) {
- g_print("Audio track is added\n");
if (!g_ad.validate_encoded_frame_cb) {
int ret;
-#ifndef TIZEN_TV
- conn->render.espp.audio_track_preparing = true;
-
- if (conn->render.espp.handle) /* TODO: apply stream info if the function is provided in espp */
+ g_print("Audio track is added, set stream_info[%p]\n", &conn->render.stream_info);
+#ifdef TIZEN_FEATURE_ESPP
+ if (conn->render.espp.handle) { /* TODO: apply stream info if the function is provided in espp */
+ conn->render.espp.audio_track_preparing = true;
return;
+ }
#endif
if (!conn->render.stream_info) {
ret = sound_manager_create_stream_information(SOUND_STREAM_TYPE_MEDIA, NULL, NULL, &conn->render.stream_info);
if (ret != WEBRTC_ERROR_NONE)
g_printerr("failed to webrtc_set_sound_stream_info(), ret[0x%x]\n", ret);
} else {
+ g_print("Video track is added, render it on gst pipeline for validation\n");
if ((conn->audio_render_pipeline = _build_appsrc_opus_render_pipeline(&conn->appsrc_for_audio))) {
GstStateChangeReturn state_change_ret = gst_element_set_state(conn->audio_render_pipeline, GST_STATE_PLAYING);
if (state_change_ret == GST_STATE_CHANGE_FAILURE)
g_print("webrtc_unset_data_channel_cb() success\n");
}
-#ifndef TIZEN_TV
-static void __espp_submit_packet(connection_s *conn, media_packet_h packet, webrtc_media_type_e type)
-{
- uint64_t gst_pts;
- uint64_t gst_duration;
- esplusplayer_es_packet es_packet;
- esplusplayer_submit_status submit_status;
-
- RET_IF(!conn, "conn is NULL");
- RET_IF(!packet, "media_packet is NULL");
-
- es_packet.type = type == WEBRTC_MEDIA_TYPE_AUDIO ? ESPLUSPLAYER_STREAM_TYPE_AUDIO : ESPLUSPLAYER_STREAM_TYPE_VIDEO;
- media_packet_get_buffer_data_ptr(packet, (void**)&es_packet.buffer);
- media_packet_get_buffer_size(packet, (uint64_t *)&es_packet.buffer_size);
- media_packet_get_pts(packet, &gst_pts);
- media_packet_get_duration(packet, &gst_duration);
- if (GST_CLOCK_TIME_IS_VALID(gst_pts))
- es_packet.pts = GST_TIME_AS_MSECONDS(gst_pts);
- if (GST_CLOCK_TIME_IS_VALID(gst_duration))
- es_packet.duration = GST_TIME_AS_MSECONDS(gst_duration);
- es_packet.matroska_color_info = NULL;
- es_packet.hdr10p_metadata = NULL;
-
- submit_status = esplusplayer_submit_packet(conn->render.espp.handle, &es_packet);
- g_print("packet[media:%p, es:%p] type[%d] espp submit packet status[%d]\n", packet, &es_packet, type, submit_status);
-}
-#endif
-
static void __encoded_frame_cb(webrtc_h webrtc, webrtc_media_type_e type, unsigned int track_id, media_packet_h packet, void *user_data)
{
void *data_ptr = NULL;
g_print("webrtc[%p] type[%u] track_id[%u] packet[%p, data_ptr:%p] user_data[%p]\n",
webrtc, type, track_id, packet, data_ptr, user_data);
-#ifndef TIZEN_TV
+#ifdef TIZEN_FEATURE_ESPP
if (type == WEBRTC_MEDIA_TYPE_AUDIO && conn->encoded_audio_frame_cb_is_set) {
- if (!conn->render.espp.handle) {
- g_printerr("render.espp is NULL\n");
- goto out;
- }
- /* Get format and set espp audiostream & prepare async */
- if (conn->render.espp.audio_track_preparing) {
- __espp_update_audio_info(conn, packet);
-
- esplusplayer_set_audio_stream_info(conn->render.espp.handle, &conn->render.espp.audio_info);
- esplusplayer_activate(conn->render.espp.handle, ESPLUSPLAYER_STREAM_TYPE_AUDIO);
-
- if (!conn->render.espp.video_track_preparing) {
- g_print("espp prepare async\n");
- esplusplayer_prepare_async(conn->render.espp.handle);
-
- g_mutex_lock(&conn->render.espp.mutex);
- g_cond_wait(&conn->render.espp.cond, &conn->render.espp.mutex); /* wait for ready to prepare cb */
- g_mutex_unlock(&conn->render.espp.mutex);
-
- __espp_submit_packet(conn, packet, type);
-
- g_mutex_lock(&conn->render.espp.mutex);
- g_cond_wait(&conn->render.espp.cond, &conn->render.espp.mutex); /* wait for prepare async done cb */
- g_mutex_unlock(&conn->render.espp.mutex);
-
- g_print("espp start\n");
- esplusplayer_start(conn->render.espp.handle);
- }
-
- conn->render.espp.audio_track_preparing = false;
+ if (_espp_prepare_and_start(conn, packet, type))
goto out;
- }
if (conn->render.espp.video_track_preparing) /* not ready for video track, skip submit packet */
goto out;
- __espp_submit_packet(conn, packet, type);
+ _espp_submit_packet(conn, packet, type);
} else if (type == WEBRTC_MEDIA_TYPE_VIDEO && conn->encoded_video_frame_cb_is_set) {
-
- if (!conn->render.espp.handle) {
- g_printerr("render.espp is NULL\n");
- goto out;
- }
- /* Get format and set espp videostream & prepare async */
- if (conn->render.espp.video_track_preparing) {
- __espp_update_video_info(conn, packet);
-
- esplusplayer_set_video_stream_info(conn->render.espp.handle, &conn->render.espp.video_info);
- esplusplayer_activate(conn->render.espp.handle, ESPLUSPLAYER_STREAM_TYPE_VIDEO);
-
- if (!conn->render.espp.audio_track_preparing) {
- g_print("espp prepare async\n");
- esplusplayer_prepare_async(conn->render.espp.handle);
-
- g_mutex_lock(&conn->render.espp.mutex);
- g_cond_wait(&conn->render.espp.cond, &conn->render.espp.mutex); /* wait for ready to prepare cb */
- g_mutex_unlock(&conn->render.espp.mutex);
-
- __espp_submit_packet(conn, packet, type);
-
- g_mutex_lock(&conn->render.espp.mutex);
- g_cond_wait(&conn->render.espp.cond, &conn->render.espp.mutex); /* wait for prepare async done cb */
- g_mutex_unlock(&conn->render.espp.mutex);
-
- g_print("espp start\n");
- esplusplayer_start(conn->render.espp.handle);
- }
-
- conn->render.espp.video_track_preparing = false;
+ if (_espp_prepare_and_start(conn, packet, type))
goto out;
- }
if (conn->render.espp.audio_track_preparing) /* not ready for audio track, skip submit packet */
goto out;
- __espp_submit_packet(conn, packet, type);
+ _espp_submit_packet(conn, packet, type);
}
#endif
if (g_ad.validate_encoded_frame_cb)
_push_buffer_to_validate_encoded_frame_callback_data(conn, packet, type);
-#ifndef TIZEN_TV
+#ifdef TIZEN_FEATURE_ESPP
out:
#endif
/* media packet should be freed after use */
g_print("webrtc_set_encoded_audio_frame_cb() success\n");
if (!g_ad.validate_encoded_frame_cb) {
-#ifndef TIZEN_TV
- if (!g_ad.conns[index].encoded_audio_frame_cb_is_set) {
- g_ad.conns[index].encoded_audio_frame_cb_is_set = true;
- __espp_init(index);
- }
+ g_ad.conns[index].encoded_audio_frame_cb_is_set = true;
+#ifdef TIZEN_FEATURE_ESPP
+ _espp_init(index);
#endif
}
}
g_print("webrtc_unset_encoded_audio_frame_cb() success\n");
if (!g_ad.validate_encoded_frame_cb) {
-#ifndef TIZEN_TV
- if (g_ad.conns[index].encoded_audio_frame_cb_is_set) {
- g_ad.conns[index].encoded_audio_frame_cb_is_set = false;
- __espp_deinit(index);
- }
+ g_ad.conns[index].encoded_audio_frame_cb_is_set = false;
+#ifdef TIZEN_FEATURE_ESPP
+ _espp_deinit(index);
#endif
}
}
g_print("webrtc_set_encoded_video_frame_cb() success\n");
if (!g_ad.validate_encoded_frame_cb) {
-#ifndef TIZEN_TV
- if (!g_ad.conns[index].encoded_video_frame_cb_is_set) {
- g_ad.conns[index].encoded_video_frame_cb_is_set = true;
- __espp_init(index);
- }
+ g_ad.conns[index].encoded_video_frame_cb_is_set = true;
+#ifdef TIZEN_FEATURE_ESPP
+ _espp_init(index);
#endif
}
}
g_print("webrtc_unset_encoded_video_frame_cb() success\n");
if (!g_ad.validate_encoded_frame_cb) {
-#ifndef TIZEN_TV
- if (g_ad.conns[index].encoded_video_frame_cb_is_set) {
- g_ad.conns[index].encoded_video_frame_cb_is_set = false;
- __espp_deinit(index);
- }
+ g_ad.conns[index].encoded_video_frame_cb_is_set = false;
+#ifdef TIZEN_FEATURE_ESPP
+ _espp_deinit(index);
#endif
}
}
--- /dev/null
+/*
+ * Copyright (c) 2022 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "webrtc_test_priv.h"
+
+static void __espp_prepare_async_done_cb(bool result, void *user_data)
+{
+ connection_s *conn = (connection_s *)user_data;
+
+ g_print("__espp_prepare_async_done_cb() is called, result[%u] conn[%p]\n", result, conn);
+
+ g_cond_signal(&conn->render.espp.cond);
+}
+
+static void __espp_ready_to_prepare_cb(esplusplayer_stream_type type, void *user_data)
+{
+ connection_s *conn = (connection_s *)user_data;
+
+ g_print("__espp_ready_to_prepare_cb() is called, type[%u], conn[%p]\n", type, conn);
+
+ g_cond_signal(&conn->render.espp.cond);
+}
+
+void _espp_init(int index)
+{
+ if (get_appdata()->conns[index].render.espp.handle)
+ return;
+
+ if (!get_appdata()->conns[index].encoded_audio_frame_cb_is_set &&
+ !get_appdata()->conns[index].encoded_video_frame_cb_is_set)
+ return;
+
+ g_mutex_init(&get_appdata()->conns[index].render.espp.mutex);
+ g_cond_init(&get_appdata()->conns[index].render.espp.cond);
+ get_appdata()->conns[index].render.espp.audio_track_preparing = false;
+ get_appdata()->conns[index].render.espp.video_track_preparing = false;
+
+ g_print("espp create & open\n");
+ get_appdata()->conns[index].render.espp.handle = esplusplayer_create();
+ esplusplayer_set_prepare_async_done_cb(get_appdata()->conns[index].render.espp.handle, __espp_prepare_async_done_cb, &get_appdata()->conns[index]);
+ esplusplayer_set_ready_to_prepare_cb(get_appdata()->conns[index].render.espp.handle, __espp_ready_to_prepare_cb, &get_appdata()->conns[index]);
+ esplusplayer_open(get_appdata()->conns[index].render.espp.handle);
+}
+
+void _espp_deinit(int index)
+{
+ if (!get_appdata()->conns[index].render.espp.handle)
+ return;
+
+ g_print("espp stop & close & destroy\n");
+ esplusplayer_stop(get_appdata()->conns[index].render.espp.handle);
+ esplusplayer_close(get_appdata()->conns[index].render.espp.handle);
+ esplusplayer_destroy(get_appdata()->conns[index].render.espp.handle);
+
+ get_appdata()->conns[index].render.espp.handle = NULL;
+ get_appdata()->conns[index].render.espp.audio_track_preparing = false;
+ get_appdata()->conns[index].render.espp.video_track_preparing = false;
+ g_mutex_clear(&get_appdata()->conns[index].render.espp.mutex);
+ g_cond_clear(&get_appdata()->conns[index].render.espp.cond);
+}
+
+static int __convert_media_format_mime_to_espp_mime(media_format_mimetype_e type)
+{
+ g_print("media_format_mimetype(0x%x)\n", type);
+
+ switch (type) {
+ case MEDIA_FORMAT_OPUS:
+ return (int)ESPLUSPLAYER_AUDIO_MIME_TYPE_OPUS;
+
+ case MEDIA_FORMAT_VP8:
+ return (int)ESPLUSPLAYER_VIDEO_MIME_TYPE_VP8;
+
+ case MEDIA_FORMAT_VP9:
+ return (int)ESPLUSPLAYER_VIDEO_MIME_TYPE_VP9;
+
+ case MEDIA_FORMAT_H264_SP:
+ case MEDIA_FORMAT_H264_MP:
+ case MEDIA_FORMAT_H264_HP:
+ return (int)ESPLUSPLAYER_VIDEO_MIME_TYPE_H264;
+
+ case MEDIA_FORMAT_MJPEG:
+ return (int)ESPLUSPLAYER_VIDEO_MIME_TYPE_MJPEG;
+
+ default:
+ return -1;
+ }
+}
+
+static void __espp_update_audio_info(connection_s *conn, media_packet_h packet)
+{
+ media_format_h format;
+ media_format_mimetype_e mimetype;
+ int type;
+ int channels = 0;
+ int rate = 0;
+
+ RET_IF(!conn, "conn is NULL");
+ RET_IF(!packet, "packet is NULL");
+ RET_IF(media_packet_get_format(packet, &format) != MEDIA_PACKET_ERROR_NONE, "format is NULL");
+
+ media_format_get_audio_info(format, &mimetype, &channels, &rate, NULL, NULL);
+ media_format_unref(format);
+ RET_IF((type = __convert_media_format_mime_to_espp_mime(mimetype)) == -1, "not supported type");
+
+ conn->render.espp.audio_info.codec_data = NULL;
+ conn->render.espp.audio_info.codec_data_length = 0;
+ conn->render.espp.audio_info.bitrate = 0;
+ conn->render.espp.audio_info.channels = channels;
+ conn->render.espp.audio_info.sample_rate = rate;
+ conn->render.espp.audio_info.mime_type = type;
+}
+
+static void __espp_update_video_info(connection_s *conn, media_packet_h packet)
+{
+ media_format_h format;
+ media_format_mimetype_e mimetype;
+ int type;
+ int width = 0;
+ int height = 0;
+
+ RET_IF(!conn, "conn is NULL");
+ RET_IF(!packet, "packet is NULL");
+ RET_IF(media_packet_get_format(packet, &format) != MEDIA_PACKET_ERROR_NONE, "format is NULL");
+
+ media_format_get_video_info(format, &mimetype, &width, &height, NULL, NULL);
+ media_format_unref(format);
+ RET_IF((type = __convert_media_format_mime_to_espp_mime(mimetype)) == -1, "not supported type");
+
+ conn->render.espp.video_info.codec_data = NULL;
+ conn->render.espp.video_info.codec_data_length = 0;
+ conn->render.espp.video_info.width = width;
+ conn->render.espp.video_info.height = height;
+ conn->render.espp.video_info.max_width = width;
+ conn->render.espp.video_info.max_height = height;
+ conn->render.espp.video_info.mime_type = type;
+ conn->render.espp.video_info.framerate_num = 30; /* FIXME: avoid hard-coding */
+ conn->render.espp.video_info.framerate_den = 1;
+}
+
+void _espp_submit_packet(connection_s *conn, media_packet_h packet, webrtc_media_type_e type)
+{
+ uint64_t gst_pts;
+ uint64_t gst_duration;
+ esplusplayer_es_packet es_packet;
+ esplusplayer_submit_status submit_status;
+
+ RET_IF(!conn, "conn is NULL");
+ RET_IF(!packet, "media_packet is NULL");
+
+ es_packet.type = type == WEBRTC_MEDIA_TYPE_AUDIO ? ESPLUSPLAYER_STREAM_TYPE_AUDIO : ESPLUSPLAYER_STREAM_TYPE_VIDEO;
+ media_packet_get_buffer_data_ptr(packet, (void**)&es_packet.buffer);
+ media_packet_get_buffer_size(packet, (uint64_t *)&es_packet.buffer_size);
+ media_packet_get_pts(packet, &gst_pts);
+ media_packet_get_duration(packet, &gst_duration);
+ if (GST_CLOCK_TIME_IS_VALID(gst_pts))
+ es_packet.pts = GST_TIME_AS_MSECONDS(gst_pts);
+ if (GST_CLOCK_TIME_IS_VALID(gst_duration))
+ es_packet.duration = GST_TIME_AS_MSECONDS(gst_duration);
+ es_packet.matroska_color_info = NULL;
+ es_packet.hdr10p_metadata = NULL;
+
+ submit_status = esplusplayer_submit_packet(conn->render.espp.handle, &es_packet);
+ g_print("packet[media:%p, es:%p] type[%d] espp submit packet status[%d]\n", packet, &es_packet, type, submit_status);
+}
+
+typedef void (*update_stream_info_func)(connection_s *conn, media_packet_h packet);
+typedef int (*espp_set_stream_info_func)(esplusplayer_handle handle, void *info);
+
+typedef struct {
+ update_stream_info_func update_stream_info_func;
+ esplusplayer_stream_type stream_type;
+ espp_set_stream_info_func set_stream_info_func;
+} espp_info_s;
+
+espp_info_s espp_infos[] = {
+ { __espp_update_audio_info, ESPLUSPLAYER_STREAM_TYPE_AUDIO, (void *)esplusplayer_set_audio_stream_info },
+ { __espp_update_video_info, ESPLUSPLAYER_STREAM_TYPE_VIDEO, (void *)esplusplayer_set_video_stream_info }
+};
+
+bool _espp_prepare_and_start(connection_s *conn, media_packet_h packet, webrtc_media_type_e type)
+{
+ bool is_audio = (type == WEBRTC_MEDIA_TYPE_AUDIO) ? true : false;
+ bool *espp_track_preparing = is_audio ? &conn->render.espp.audio_track_preparing : &conn->render.espp.video_track_preparing;
+ bool *espp_track_preparing_oppsite = is_audio ? &conn->render.espp.video_track_preparing : &conn->render.espp.audio_track_preparing;
+
+ if (!conn)
+ return false;
+ if (!packet)
+ return false;
+ if (!conn->render.espp.handle) {
+ g_printerr("render.espp.handle is NULL\n");
+ return false;
+ }
+ if (!(*espp_track_preparing))
+ return false;
+
+ /* Get format and set espp audiostream & prepare async */
+ espp_infos[type].update_stream_info_func(conn, packet);
+ espp_infos[type].set_stream_info_func(conn->render.espp.handle, is_audio ?
+ (void *)&conn->render.espp.audio_info :
+ (void *)&conn->render.espp.video_info);
+ esplusplayer_activate(conn->render.espp.handle, espp_infos[type].stream_type);
+
+ if (!(*espp_track_preparing_oppsite)) {
+ g_print("espp prepare async\n");
+ esplusplayer_prepare_async(conn->render.espp.handle);
+
+ g_mutex_lock(&conn->render.espp.mutex);
+ g_cond_wait(&conn->render.espp.cond, &conn->render.espp.mutex); /* wait for ready to prepare cb */
+ g_mutex_unlock(&conn->render.espp.mutex);
+
+ _espp_submit_packet(conn, packet, type);
+
+ g_mutex_lock(&conn->render.espp.mutex);
+ g_cond_wait(&conn->render.espp.cond, &conn->render.espp.mutex); /* wait for prepare async done cb */
+ g_mutex_unlock(&conn->render.espp.mutex);
+
+ g_print("espp start\n");
+ esplusplayer_start(conn->render.espp.handle);
+ }
+
+ *espp_track_preparing = false;
+
+ return true;
+}