*/
#include <gst/audio/audio.h>
+#include <gst/allocators/gsttizenmemory.h>
+
#include "webrtc_internal.h"
#include "webrtc_private.h"
+#include "webrtc_source_common.h"
+#include "webrtc_source_mediapacket.h"
+
#include <tbm_surface_internal.h>
#include <media_packet_internal.h>
-#include <gst/allocators/gsttizenmemory.h>
#include <Elementary.h>
-#define GST_KLASS_NAME_ENCODER_AUDIO "Codec/Encoder/Audio"
-#define GST_KLASS_NAME_ENCODER_VIDEO "Codec/Encoder/Video"
-#define GST_KLASS_NAME_DECODER_AUDIO "Codec/Decoder/Audio"
-#define GST_KLASS_NAME_DECODER_VIDEO "Codec/Decoder/Video"
-#define GST_KLASS_NAME_CONVERTER_AUDIO "Filter/Converter/Audio"
-#define GST_KLASS_NAME_CONVERTER_VIDEO "Filter/Converter/Video"
-
-#define DEFAULT_ELEMENT_CAMERASRC "v4l2src"
-#define DEFAULT_ELEMENT_AUDIOSRC "pulsesrc"
-#define DEFAULT_ELEMENT_VIDEOTESTSRC "videotestsrc"
-#define DEFAULT_ELEMENT_AUDIOTESTSRC "audiotestsrc"
-#define DEFAULT_ELEMENT_APPSRC "appsrc"
-#define DEFAULT_ELEMENT_SCREENSRC "waylandsrc"
-#define DEFAULT_ELEMENT_QUEUE "queue"
-#define DEFAULT_ELEMENT_VOLUME "volume"
-#define DEFAULT_ELEMENT_INPUT_SELECTOR "input-selector"
-#define DEFAULT_ELEMENT_VIDEOCROP "videocrop"
-#define DEFAULT_ELEMENT_FILESRC "filesrc"
-
-#define ELEMENT_NAME_FIRST_CAPSFILTER "firstCapsfilter"
-#define ELEMENT_NAME_RTP_CAPSFILTER "rtpCapsfilter"
-#define ELEMENT_NAME_VIDEO_SRC "videoSrc"
-#define ELEMENT_NAME_VIDEO_SWITCH "videoSwitch"
-#define ELEMENT_NAME_VIDEO_MUTE_SRC "videoMuteSrc"
-#define ELEMENT_NAME_VOLUME "volume"
-#define ELEMENT_NAME_AUDIO_SRC "audioSrc"
-#define ELEMENT_NAME_MIC_SRC "micSrc"
-#define ELEMENT_NAME_FILE_SRC "fileSrc"
-#define ELEMENT_NAME_AUDIO_QUEUE "audioQueue"
-#define ELEMENT_NAME_VIDEO_QUEUE "videoQueue"
-#define ELEMENT_NAME_AUDIO_CAPSFILTER "audioCapsfilter"
-#define ELEMENT_NAME_VIDEO_CAPSFILTER "videoCapsfilter"
-#define ELEMENT_NAME_AUDIO_PAYLOADER "audioPayloader"
-#define ELEMENT_NAME_VIDEO_PAYLOADER "videoPayloader"
-#define ELEMENT_NAME_VIDEOCROP "videoCrop"
-#define ELEMENT_NAME_SCREENSRC "waylandSrc"
-#define ELEMENT_NAME_AUDIO_FAKESINK "audioFakeSink"
-#define ELEMENT_NAME_VIDEO_FAKESINK "videoFakeSink"
-#define ELEMENT_NAME_AUDIO_APPSRC "audioAppsrc"
-#define ELEMENT_NAME_VIDEO_APPSRC "videoAppsrc"
-
#define MIN_DYNAMIC_PAYLOAD_TYPE 96
#define MAX_DYNAMIC_PAYLOAD_TYPE 127
-#define APPEND_ELEMENT(x_list, x_element) \
-do { \
- if (!(x_element)) \
- break; \
- x_list = g_list_append(x_list, x_element); \
- LOG_DEBUG("%s is appended", GST_ELEMENT_NAME(x_element)); \
-} while (0)
-
-#define PREPEND_ELEMENT(x_list, x_element) \
-do { \
- if (!(x_element)) \
- break; \
- x_list = g_list_prepend(x_list, x_element); \
- LOG_DEBUG("%s is prepended", GST_ELEMENT_NAME(x_element)); \
-} while (0)
-
-#define GET_AV_IDX_BY_TYPE(x_media_type) GET_AV_IDX(x_media_type == MEDIA_TYPE_AUDIO)
-
-typedef struct {
- media_packet_h packet;
- GstBuffer *buffer;
-} packet_buffer_s;
-
typedef struct {
int av_idx;
webrtc_gst_slot_s *source;
static int __link_source_with_webrtcbin(webrtc_gst_slot_s *source, GstElement *webrtcbin);
static GstPadProbeReturn __camerasrc_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer u_data);
-static const char * __get_audio_media_type(const char *codec_name)
-{
- RET_VAL_IF(codec_name == NULL, NULL, "codec_name is NULL");
-
- if (!strcmp(codec_name, "pcmu") || !strcmp(codec_name, "PCMU"))
- return MEDIA_TYPE_AUDIO_MULAW;
- if (!strcmp(codec_name, "pcma") || !strcmp(codec_name, "PCMA"))
- return MEDIA_TYPE_AUDIO_ALAW;
- if (!strcmp(codec_name, "opus") || !strcmp(codec_name, "OPUS"))
- return MEDIA_TYPE_AUDIO_OPUS;
- if (!strcmp(codec_name, "vorbis") || !strcmp(codec_name, "VORBIS"))
- return MEDIA_TYPE_AUDIO_VORBIS;
-
- LOG_ERROR("not supported audio codec_name[%s]", codec_name);
-
- return NULL;
-}
-
-static const char * __get_video_media_type(const char *codec_name)
-{
- RET_VAL_IF(codec_name == NULL, NULL, "codec_name is NULL");
-
- if (!strcmp(codec_name, "vp8") || !strcmp(codec_name, "VP8"))
- return MEDIA_TYPE_VIDEO_VP8;
- if (!strcmp(codec_name, "vp9") || !strcmp(codec_name, "VP9"))
- return MEDIA_TYPE_VIDEO_VP9;
- if (!strcmp(codec_name, "theora") || !strcmp(codec_name, "THEORA"))
- return MEDIA_TYPE_VIDEO_THEORA;
- if (!strcmp(codec_name, "h264") || !strcmp(codec_name, "H264"))
- return MEDIA_TYPE_VIDEO_H264;
- if (!strcmp(codec_name, "h265") || !strcmp(codec_name, "H265"))
- return MEDIA_TYPE_VIDEO_H265;
- if (!strcmp(codec_name, "jpeg") || !strcmp(codec_name, "JPEG") || !strcmp(codec_name, "mjpeg") || !strcmp(codec_name, "MJPEG"))
- return MEDIA_TYPE_VIDEO_JPEG;
-
- LOG_ERROR("not supported video codec_name[%s]", codec_name);
-
- return NULL;
-}
-
-static const char *__get_audio_format_name(media_format_mimetype_e mime_type)
-{
- switch (mime_type) {
- /* RAW formats */
- case MEDIA_FORMAT_PCM_S16LE:
- return "S16LE";
- /* ENCODED formats */
- case MEDIA_FORMAT_PCMU:
- return "PCMU";
- case MEDIA_FORMAT_PCMA:
- return "PCMA";
- case MEDIA_FORMAT_OPUS:
- return "OPUS";
- case MEDIA_FORMAT_VORBIS:
- return "VORBIS";
- default:
- LOG_ERROR("not supported audio mime_type(0x%x)", mime_type);
- return NULL;
- }
-}
-
-static const char *__get_video_format_name(media_format_mimetype_e mime_type, bool zerocopy_enabled)
-{
- switch (mime_type) {
- /* RAW formats */
- case MEDIA_FORMAT_I420:
- return zerocopy_enabled ? "S420" : "I420";
- case MEDIA_FORMAT_NV12:
- return zerocopy_enabled ? "SN12" : "NV12";
- /* ENCODED formats */
- case MEDIA_FORMAT_VP8:
- return "VP8";
- case MEDIA_FORMAT_VP9:
- return "VP9";
- case MEDIA_FORMAT_H264_SP: /* baseline profile */
- case MEDIA_FORMAT_H264_MP: /* main profile */
- case MEDIA_FORMAT_H264_HP: /* high profile */
- return "H264";
- case MEDIA_FORMAT_MJPEG:
- return "JPEG";
- default:
- LOG_ERROR("not supported video mime_type(0x%x)", mime_type);
- return NULL;
- }
-}
-
-static GstAudioFormat __get_gst_audio_format(media_format_mimetype_e mime_type)
-{
- switch (mime_type) {
- /* RAW formats */
- case MEDIA_FORMAT_PCM_S16LE:
- return GST_AUDIO_FORMAT_S16LE;
- /* ENCODED formats */
- case MEDIA_FORMAT_OPUS:
- case MEDIA_FORMAT_VORBIS:
- return GST_AUDIO_FORMAT_ENCODED;
- default:
- LOG_ERROR("not supported mime_type(0x%x)", mime_type);
- return GST_AUDIO_FORMAT_UNKNOWN;
- }
-}
-
-static GstAudioFormat __get_gst_audio_raw_format_from_string(const char *format)
-{
- RET_VAL_IF(format == NULL, GST_AUDIO_FORMAT_UNKNOWN, "format is NULL");
-
- if (!strcmp(format, "S16LE"))
- return GST_AUDIO_FORMAT_S16LE;
-
- LOG_ERROR("not supported raw format(%s)", format);
- return GST_AUDIO_FORMAT_UNKNOWN;
-}
-
-static bool __is_supported_mime_type(media_format_mimetype_e mime_type)
-{
- switch (mime_type) {
- /* AUDIO/RAW formats */
- case MEDIA_FORMAT_PCM_S16LE:
- LOG_INFO("[AUDIO][RAW/%s] mime_type[0x%x]", __get_audio_format_name(mime_type), mime_type);
- return true;
- /* VIDEO/RAW formats */
- case MEDIA_FORMAT_I420:
- case MEDIA_FORMAT_NV12:
- LOG_INFO("[VIDEO][RAW/%s] mime_type[0x%x]", __get_video_format_name(mime_type, false), mime_type);
- return true;
- /* AUDIO/ENCODED formats */
- case MEDIA_FORMAT_PCMU:
- case MEDIA_FORMAT_PCMA:
- case MEDIA_FORMAT_OPUS:
- case MEDIA_FORMAT_VORBIS:
- LOG_INFO("[AUDIO][ENCODED/%s] mime_type[0x%x]", __get_audio_format_name(mime_type), mime_type);
- return true;
- /* VIDEO/ENCODED formats */
- case MEDIA_FORMAT_VP8:
- case MEDIA_FORMAT_VP9:
- case MEDIA_FORMAT_H264_SP:
- case MEDIA_FORMAT_H264_MP:
- case MEDIA_FORMAT_H264_HP:
- case MEDIA_FORMAT_MJPEG:
- LOG_INFO("[VIDEO][ENCODED/%s] mime_type[0x%x]", __get_video_format_name(mime_type, false), mime_type);
- return true;
- default:
- LOG_ERROR("not supported mime_type(0x%x)", mime_type);
- return false;
- }
-}
-
static GstCaps *__make_video_raw_caps_with_resolution(webrtc_gst_slot_s *source, webrtc_ini_s *ini, int width, int height)
{
GstCaps *caps = NULL;
return caps;
}
-static GstCaps *__make_mediapacketsrc_raw_caps_from_media_format(webrtc_gst_slot_s *source)
-{
- GstCaps *caps = NULL;
- media_format_mimetype_e mime_type;
-
- RET_VAL_IF(source == NULL, NULL, "source is NULL");
- RET_VAL_IF(source->media_format == NULL, NULL, "media_format is NULL");
- RET_VAL_IF(source->type != WEBRTC_MEDIA_SOURCE_TYPE_MEDIA_PACKET, NULL,
- "type(%d) is not for media packet source", source->type);
-
- if (source->media_types == MEDIA_TYPE_AUDIO) {
- int channels;
- int samplerate;
- GstAudioInfo info;
- GstAudioFormat format;
-
- RET_VAL_IF(media_format_get_audio_info(source->media_format, &mime_type, &channels, &samplerate, NULL, NULL) != MEDIA_FORMAT_ERROR_NONE,
- NULL, "failed to media_format_get_audio_info()");
-
- format = __get_gst_audio_format(mime_type);
- RET_VAL_IF(format == GST_AUDIO_FORMAT_ENCODED || format == GST_AUDIO_FORMAT_UNKNOWN, NULL, "could not get valid GstAudioFormat for PCM");
-
- gst_audio_info_set_format(&info, format, samplerate, channels, NULL);
- caps = gst_audio_info_to_caps(&info);
-
- } else if (source->media_types == MEDIA_TYPE_VIDEO) {
- int width;
- int height;
- int framerate;
-
- RET_VAL_IF(media_format_get_video_info(source->media_format, &mime_type, &width, &height, NULL, NULL) != MEDIA_FORMAT_ERROR_NONE,
- NULL, "failed to media_format_get_video_info()");
- RET_VAL_IF(media_format_get_video_frame_rate(source->media_format, &framerate) != MEDIA_FORMAT_ERROR_NONE,
- NULL, "failed to media_format_get_video_frame_rate()");
- caps = gst_caps_new_simple(MEDIA_TYPE_VIDEO_RAW,
- "format", G_TYPE_STRING, __get_video_format_name(mime_type, source->zerocopy_enabled),
- "framerate", GST_TYPE_FRACTION, framerate, 1,
- "width", G_TYPE_INT, width,
- "height", G_TYPE_INT, height,
- NULL);
-
- } else {
- LOG_ERROR_IF_REACHED("source->media_types(0x%x)", source->media_types);
- }
-
- return caps;
-}
-
static GstCaps *__make_default_raw_caps(webrtc_gst_slot_s *source, webrtc_ini_s *ini)
{
GstCaps *caps = NULL;
case WEBRTC_MEDIA_SOURCE_TYPE_AUDIOTEST:
case WEBRTC_MEDIA_SOURCE_TYPE_MIC:
case WEBRTC_MEDIA_SOURCE_TYPE_CUSTOM_AUDIO:
- format = __get_gst_audio_raw_format_from_string(ini_source->a_raw_format);
+ format = _get_gst_audio_raw_format_from_string(ini_source->a_raw_format);
RET_VAL_IF(format == GST_AUDIO_FORMAT_UNKNOWN, NULL, "not supported raw format");
gst_audio_info_set_format(&info, format, ini_source->a_samplerate, ini_source->a_channels, NULL);
case WEBRTC_MEDIA_SOURCE_TYPE_MEDIA_PACKET: {
RET_VAL_IF(source->media_format == NULL, NULL, "media_format is NULL");
- caps = __make_mediapacketsrc_raw_caps_from_media_format(source);
+ caps = _make_mediapacketsrc_raw_caps_from_media_format(source);
break;
}
default:
return caps;
}
-static GstCaps *__get_caps_from_encoded_audio_media_type(const char *media_type, int channels, int samplerate)
-{
- RET_VAL_IF(media_type == NULL, NULL, "media_type is NULL");
-
- if (!strcmp(media_type, MEDIA_TYPE_AUDIO_MULAW) ||
- !strcmp(media_type, MEDIA_TYPE_AUDIO_ALAW))
- return gst_caps_new_simple(media_type,
- "rate", G_TYPE_INT, samplerate,
- "channels", G_TYPE_INT, channels,
- NULL);
-
- if (!strcmp(media_type, MEDIA_TYPE_AUDIO_OPUS) ||
- !strcmp(media_type, MEDIA_TYPE_AUDIO_VORBIS))
- return gst_caps_new_simple(media_type, NULL, NULL);
-
- LOG_ERROR_IF_REACHED("invalid media_type(%s)", media_type);
-
- return NULL;
-}
-
-static GstCaps *__get_caps_from_encoded_video_media_type(const char *media_type, int width, int height)
-{
- RET_VAL_IF(media_type == NULL, NULL, "media_type is NULL");
-
- if (!strcmp(media_type, MEDIA_TYPE_VIDEO_H264) ||
- !strcmp(media_type, MEDIA_TYPE_VIDEO_H265))
- return gst_caps_new_simple(media_type,
- "stream-format", G_TYPE_STRING, "byte-stream",
- "alignment", G_TYPE_STRING, "au",
- "width", G_TYPE_INT, width,
- "height", G_TYPE_INT, height,
- NULL);
-
- if (!strcmp(media_type, MEDIA_TYPE_VIDEO_VP8) ||
- !strcmp(media_type, MEDIA_TYPE_VIDEO_VP9) ||
- !strcmp(media_type, MEDIA_TYPE_VIDEO_THEORA))
- return gst_caps_new_simple(media_type, NULL, NULL); /* NOTE: need to verify these codecs */
-
- if (!strcmp(media_type, MEDIA_TYPE_VIDEO_JPEG))
- return gst_caps_new_simple(media_type,
- "width", G_TYPE_INT, width,
- "height", G_TYPE_INT, height,
- NULL);
-
- LOG_ERROR_IF_REACHED("invalid media_type(%s)", media_type);
-
- return NULL;
-}
-
/* Use g_free() to free the media_type parameter. */
static GstCaps *__make_default_encoded_caps(webrtc_gst_slot_s *source, webrtc_ini_s *ini, gchar **media_type)
{
case WEBRTC_MEDIA_SOURCE_TYPE_CAMERA:
case WEBRTC_MEDIA_SOURCE_TYPE_SCREEN:
case WEBRTC_MEDIA_SOURCE_TYPE_CUSTOM_VIDEO:
- _media_type = __get_video_media_type(source->av[AV_IDX_VIDEO].codec);
+ _media_type = _get_video_media_type(source->av[AV_IDX_VIDEO].codec);
RET_VAL_IF(_media_type == NULL, NULL, "_media_type is NULL");
- caps = __get_caps_from_encoded_video_media_type(_media_type, source->video_info.width, source->video_info.height);
+ caps = _get_caps_from_encoded_video_media_type(_media_type, source->video_info.width, source->video_info.height);
break;
case WEBRTC_MEDIA_SOURCE_TYPE_AUDIOTEST:
case WEBRTC_MEDIA_SOURCE_TYPE_MIC:
case WEBRTC_MEDIA_SOURCE_TYPE_CUSTOM_AUDIO:
- _media_type = __get_audio_media_type(source->av[AV_IDX_AUDIO].codec);
+ _media_type = _get_audio_media_type(source->av[AV_IDX_AUDIO].codec);
RET_VAL_IF(_media_type == NULL, NULL, "_media_type is NULL");
- caps = __get_caps_from_encoded_audio_media_type(_media_type, ini_source->a_channels, ini_source->a_samplerate);
+ caps = _get_caps_from_encoded_audio_media_type(_media_type, ini_source->a_channels, ini_source->a_samplerate);
break;
case WEBRTC_MEDIA_SOURCE_TYPE_MEDIA_PACKET:
if (source->media_types == MEDIA_TYPE_AUDIO) {
- _media_type = __get_audio_media_type(ini_source->a_codecs[0]);
+ _media_type = _get_audio_media_type(ini_source->a_codecs[0]);
RET_VAL_IF(_media_type == NULL, NULL, "_media_type is NULL");
- caps = __get_caps_from_encoded_audio_media_type(_media_type, ini_source->a_channels, ini_source->a_samplerate);
+ caps = _get_caps_from_encoded_audio_media_type(_media_type, ini_source->a_channels, ini_source->a_samplerate);
} else if (source->media_types == MEDIA_TYPE_VIDEO) {
- _media_type = __get_video_media_type(ini_source->v_codecs[0]);
+ _media_type = _get_video_media_type(ini_source->v_codecs[0]);
RET_VAL_IF(_media_type == NULL, NULL, "_media_type is NULL");
- caps = __get_caps_from_encoded_video_media_type(_media_type, ini_source->v_width, ini_source->v_height);
+ caps = _get_caps_from_encoded_video_media_type(_media_type, ini_source->v_width, ini_source->v_height);
} else {
LOG_ERROR_IF_REACHED("source->media_types(0x%x)", source->media_types);
return NULL;
NULL);
}
-//LCOV_EXCL_START
-/* Use g_free() to free the media_type parameter. */
-static GstCaps *__make_encoded_caps_from_media_format(webrtc_gst_slot_s *source, gchar **media_type)
-{
- GstCaps *caps;
- const char *_media_type;
-
- RET_VAL_IF(source == NULL, NULL, "source is NULL");
- RET_VAL_IF(source->media_format == NULL, NULL, "media_format is NULL");
-
- switch (source->type) {
- case WEBRTC_MEDIA_SOURCE_TYPE_MEDIA_PACKET: {
- media_format_mimetype_e mime_type;
-
- if (source->media_types == MEDIA_TYPE_AUDIO) {
- int channels;
- int samplerate;
-
- RET_VAL_IF(media_format_get_audio_info(source->media_format, &mime_type, &channels, &samplerate, NULL, NULL) != MEDIA_FORMAT_ERROR_NONE,
- NULL, "failed to media_format_get_audio_info()");
-
- _media_type = __get_audio_media_type(__get_audio_format_name(mime_type));
- RET_VAL_IF(_media_type == NULL, NULL, "media_type is NULL");
-
- caps = __get_caps_from_encoded_audio_media_type(_media_type, channels, samplerate);
-
- } else if (source->media_types == MEDIA_TYPE_VIDEO) {
- int width;
- int height;
- int framerate;
-
- RET_VAL_IF(media_format_get_video_info(source->media_format, &mime_type, &width, &height, NULL, NULL) != MEDIA_FORMAT_ERROR_NONE,
- NULL, "failed to media_format_get_video_info()");
- RET_VAL_IF(media_format_get_video_frame_rate(source->media_format, &framerate) != MEDIA_FORMAT_ERROR_NONE,
- NULL, "failed to media_format_get_video_frame_rate()");
-
- _media_type = __get_video_media_type(__get_video_format_name(mime_type, source->zerocopy_enabled));
- RET_VAL_IF(_media_type == NULL, NULL, "media_type is NULL");
-
- caps = __get_caps_from_encoded_video_media_type(_media_type, width, height);
-
- } else {
- LOG_ERROR_IF_REACHED("source->media_types(0x%x)", source->media_types);
- return NULL;
- }
- break;
- }
-
- default:
- LOG_ERROR_IF_REACHED("type(%d)", source->type);
- return NULL;
- }
-
- if (media_type)
- *media_type = g_strdup(_media_type);
-
- return caps;
-}
-//LCOV_EXCL_STOP
-
-static GstCaps *__make_rtp_caps(const gchar *media_type, unsigned int payload_type, webrtc_gst_slot_s *source)
-{
- GstCaps *caps;
- bool is_audio;
-
- RET_VAL_IF(media_type == NULL, NULL, "media_type is NULL");
-
- is_audio = !(g_strrstr(media_type, "video") || g_strrstr(media_type, "image"));
-
- caps = gst_caps_new_simple("application/x-rtp",
- "media", G_TYPE_STRING, GET_MEDIA_TYPE_NAME(is_audio),
- "payload", G_TYPE_INT, payload_type,
- NULL);
-
- if (is_audio && source->av[AV_IDX_AUDIO].inbandfec)
- /* NOTE: set it with string type due to the parsing logic in gstwebrtcbin.c */
- gst_structure_set(gst_caps_get_structure(caps, 0), "useinbandfec", G_TYPE_STRING, "1", NULL);
-
- PRINT_CAPS(caps, "RTP");
-
- return caps;
-}
-
-static bool __is_hw_encoder_used(webrtc_s *webrtc, webrtc_media_source_type_e source_type, media_type_e media_type)
-{
- const ini_item_media_source_s *ini_source;
-
- RET_VAL_IF(webrtc == NULL, NULL, "webrtc is NULL");
-
- ini_source = _ini_get_source_by_type(&webrtc->ini, source_type);
-
- switch (media_type) {
- case MEDIA_TYPE_AUDIO:
- if (ini_source && ini_source->a_hw_encoder_element)
- return true;
- else if (webrtc->ini.media_source.a_hw_encoder_element)
- return true;
- break;
- case MEDIA_TYPE_VIDEO:
- if (ini_source && ini_source->v_hw_encoder_element)
- return true;
- else if (webrtc->ini.media_source.v_hw_encoder_element)
- return true;
- break;
- default:
- LOG_ERROR_IF_REACHED("type(%d)", media_type);
- break;
- }
-
- LOG_DEBUG("no hw encoder is used, source_type(%d), media_type(%d)", source_type, media_type);
- return false;
-}
-
//LCOV_EXCL_START
static GstElement *__get_hw_encoder_element(webrtc_s *webrtc, webrtc_gst_slot_s *source)
{
}
//LCOV_EXCL_STOP
-static bool __is_encoded_format_supported(webrtc_media_source_type_e type, webrtc_ini_s *ini)
-{
- const ini_item_media_source_s *ini_source;
-
- RET_VAL_IF(ini == NULL, NULL, "ini is NULL");
-
- ini_source = _ini_get_source_by_type(ini, type);
- if (ini_source == NULL)
- ini_source = &ini->media_source;
-
- LOG_DEBUG("type[%d], v_encoded_fmt_support[%d]", type, ini_source->v_encoded_fmt_support);
- return ini_source->v_encoded_fmt_support;
-}
-
static int __get_fixed_payload_type(const gchar *media_type)
{
RET_VAL_IF(media_type == NULL, -1, "media_type is NULL");
return -1;
}
-static unsigned int __get_available_payload_type(webrtc_s *webrtc)
-{
- int bitmask = 0x1;
- int count = 0;
-
- RET_VAL_IF(webrtc == NULL, 0, "webrtc is NULL");
-
- while (count++ < PAYLOAD_TYPE_BITS) {
- if (webrtc->payload_types & bitmask) {
- bitmask <<= 1;
- continue;
- }
- webrtc->payload_types |= bitmask;
- LOG_DEBUG("found available payload type[%d]", count + 95);
- return count + (MIN_DYNAMIC_PAYLOAD_TYPE - 1); /* 96 ~ 127 */
- }
-
- LOG_ERROR("could not assign payload type");
- return 0;
-}
-
static void __return_payload_type(webrtc_s *webrtc, unsigned int payload_type)
{
int i;
if ((payload_type = __get_fixed_payload_type(media_type)) != -1)
goto out;
- if ((payload_type = __get_available_payload_type(webrtc)) == 0)
+ if ((payload_type = _get_available_payload_type(webrtc)) == 0)
return WEBRTC_ERROR_INVALID_OPERATION;
out:
return WEBRTC_ERROR_NONE;
}
-static int __create_rest_of_elements(webrtc_s *webrtc, webrtc_gst_slot_s *source, bool need_capsfilter, GList **element_list, bool is_audio)
+static GstPadProbeReturn __payloaded_data_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
{
- GstElement *encoder = NULL;
- GstElement *payloader;
- GstElement *queue;
- GstElement *capsfilter2;
- GstElement *videocrop;
- GstCaps *sink_caps;
- element_info_s elem_info;
- gchar *media_type = NULL;
- int idx;
+ probe_userdata_s *probe_data = (probe_userdata_s *)user_data;
+ GstBuffer *buffer;
+ static unsigned int counts[AV_IDX_MAX] = { 0 };
- RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
- RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
- RET_VAL_IF(element_list == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "element_list is NULL");
+ RET_VAL_IF(info == NULL, GST_PAD_PROBE_REMOVE, "info is NULL");
+ RET_VAL_IF(info->data == NULL, GST_PAD_PROBE_REMOVE, "info->data is NULL");
+ RET_VAL_IF(probe_data == NULL, GST_PAD_PROBE_REMOVE, "probe_data is NULL");
- idx = GET_AV_IDX(is_audio);
+ buffer = gst_pad_probe_info_get_buffer(info);
- if (need_capsfilter) {
- GstElement *capsfilter = _create_element(DEFAULT_ELEMENT_CAPSFILTER, ELEMENT_NAME_FIRST_CAPSFILTER);
- if (!capsfilter)
- return WEBRTC_ERROR_INVALID_OPERATION;
- APPEND_ELEMENT(*element_list, capsfilter);
+ if (probe_data->source->av[probe_data->av_idx].pause) {
+ if (counts[probe_data->av_idx]++ % 10 == 0)
+ LOG_DEBUG("paused, drop [%s] buffer[%p] of pad[%p], source[%p], count[%u]",
+ GET_MEDIA_TYPE_NAME(probe_data->av_idx == AV_IDX_AUDIO),
+ buffer, pad, probe_data->source, counts[probe_data->av_idx]);
+ return GST_PAD_PROBE_DROP;
+ }
- if (__is_encoded_format_supported(source->type, &webrtc->ini)) {
- if ((sink_caps = __make_default_encoded_caps(source, &webrtc->ini, NULL))) {
- PRINT_CAPS(sink_caps, "capsfilter");
- g_object_set(G_OBJECT(capsfilter), "caps", sink_caps, NULL);
- source->av[idx].render.appsrc_caps = sink_caps;
- if (source->av[idx].render.appsrc)
- g_object_set(G_OBJECT(source->av[idx].render.appsrc), "caps", sink_caps, NULL);
- }
+ if (counts[probe_data->av_idx] > 0) {
+ counts[probe_data->av_idx] = 0;
+ LOG_DEBUG("play again, [%s] buffer[%p] of pad[%p], source[%p]",
+ GET_MEDIA_TYPE_NAME(probe_data->av_idx == AV_IDX_AUDIO), buffer, pad, probe_data->source);
+ }
- source->av[idx].render.need_decoding = true;
- __add_probe_to_pad_for_render(source, idx, gst_element_get_static_pad(capsfilter, "src"), __source_data_probe_cb);
+ return GST_PAD_PROBE_OK;
+}
- goto skip_encoder;
- }
-
- if ((sink_caps = __make_default_raw_caps(source, &webrtc->ini))) {
- PRINT_CAPS(sink_caps, "capsfilter");
- g_object_set(G_OBJECT(capsfilter), "caps", sink_caps, NULL);
- source->av[idx].render.appsrc_caps = sink_caps;
- if (source->av[idx].render.appsrc)
- g_object_set(G_OBJECT(source->av[idx].render.appsrc), "caps", sink_caps, NULL);
- }
-
- __add_probe_to_pad_for_render(source, idx, gst_element_get_static_pad(capsfilter, "src"), __source_data_probe_cb);
- }
-
- if (source->type == WEBRTC_MEDIA_SOURCE_TYPE_SCREEN && !source->zerocopy_enabled) {
- if (!(videocrop = _create_element(DEFAULT_ELEMENT_VIDEOCROP, ELEMENT_NAME_VIDEOCROP)))
- goto error;
- APPEND_ELEMENT(*element_list, videocrop);
- }
-
- encoder = __prepare_encoder(webrtc, source, is_audio);
- if (encoder == NULL) {
- __remove_probe_from_pad_for_render(source, idx);
- return WEBRTC_ERROR_INVALID_OPERATION;
- }
- APPEND_ELEMENT(*element_list, encoder);
-
- source->av[idx].render.need_decoding = false;
-
-skip_encoder:
- CREATE_ELEMENT_FROM_REGISTRY(elem_info, GST_KLASS_NAME_PAYLOADER_RTP,
- __make_default_encoded_caps(source, &webrtc->ini, &media_type),
- NULL,
- NULL,
- payloader);
- if (payloader == NULL)
- goto error;
- APPEND_ELEMENT(*element_list, payloader);
-
- if (!(queue = _create_element(DEFAULT_ELEMENT_QUEUE, NULL)))
- goto error;
- APPEND_ELEMENT(*element_list, queue);
-
- if (!(capsfilter2 = _create_element(DEFAULT_ELEMENT_CAPSFILTER, ELEMENT_NAME_RTP_CAPSFILTER)))
- goto error;
- APPEND_ELEMENT(*element_list, capsfilter2);
-
- if(__set_payload_type(webrtc, source, idx, media_type) != WEBRTC_ERROR_NONE)
- goto error;
-
- if ((sink_caps = __make_rtp_caps(media_type, source->av[idx].pt, source))) {
- g_object_set(G_OBJECT(capsfilter2), "caps", sink_caps, NULL);
- gst_caps_unref(sink_caps);
- }
-
- g_free(media_type);
-
- return WEBRTC_ERROR_NONE;
-
-error:
- __remove_probe_from_pad_for_render(source, idx);
- g_free(media_type);
-
- return WEBRTC_ERROR_INVALID_OPERATION;
-}
-
-//LCOV_EXCL_START
-static int __create_rest_of_elements_for_encoded_format(webrtc_s *webrtc, webrtc_gst_slot_s *source,
- GList **element_list)
-{
- GstCaps *sink_caps;
- element_info_s elem_info;
- gchar *media_type = NULL;
- GstElement *payloader;
- GstElement *queue;
- GstElement *capsfilter;
-
- RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
- RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
- RET_VAL_IF(element_list == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "element_list is NULL");
-
- CREATE_ELEMENT_FROM_REGISTRY(elem_info, GST_KLASS_NAME_PAYLOADER_RTP,
- __make_encoded_caps_from_media_format(source, &media_type),
- NULL,
- NULL,
- payloader);
- if (!payloader)
- goto error;
- APPEND_ELEMENT(*element_list, payloader);
-
- if (!(queue = _create_element(DEFAULT_ELEMENT_QUEUE, NULL)))
- goto error;
- APPEND_ELEMENT(*element_list, queue);
-
- if (!(capsfilter = _create_element(DEFAULT_ELEMENT_CAPSFILTER, ELEMENT_NAME_RTP_CAPSFILTER)))
- goto error;
- APPEND_ELEMENT(*element_list, capsfilter);
-
- if(__set_payload_type(webrtc, source, GET_AV_IDX_BY_TYPE(source->media_types), NULL) != WEBRTC_ERROR_NONE)
- goto error;
-
- if ((sink_caps = __make_rtp_caps(media_type, source->av[GET_AV_IDX_BY_TYPE(source->media_types)].pt, source))) {
- g_object_set(G_OBJECT(capsfilter), "caps", sink_caps, NULL);
- gst_caps_unref(sink_caps);
- }
-
- g_free(media_type);
- return WEBRTC_ERROR_NONE;
-
-error:
- g_free(media_type);
- return WEBRTC_ERROR_INVALID_OPERATION;
-}
-//LCOV_EXCL_STOP
-
-static const char *__get_default_element(int type)
-{
- const char *element = NULL;
-
- if (type == WEBRTC_MEDIA_SOURCE_TYPE_AUDIOTEST)
- element = DEFAULT_ELEMENT_AUDIOTESTSRC;
- else if (type == WEBRTC_MEDIA_SOURCE_TYPE_VIDEOTEST)
- element = DEFAULT_ELEMENT_VIDEOTESTSRC;
- else if (type == WEBRTC_MEDIA_SOURCE_TYPE_MIC)
- element = DEFAULT_ELEMENT_AUDIOSRC;
- else if (type == WEBRTC_MEDIA_SOURCE_TYPE_CAMERA)
- element = DEFAULT_ELEMENT_CAMERASRC;
- else if (type == WEBRTC_MEDIA_SOURCE_TYPE_SCREEN)
- element = DEFAULT_ELEMENT_SCREENSRC;
- else if (type == WEBRTC_MEDIA_SOURCE_TYPE_MEDIA_PACKET)
- element = DEFAULT_ELEMENT_APPSRC;
- else if (type == WEBRTC_MEDIA_SOURCE_TYPE_CUSTOM_AUDIO)
- element = DEFAULT_ELEMENT_AUDIOTESTSRC;
- else if (type == WEBRTC_MEDIA_SOURCE_TYPE_CUSTOM_VIDEO)
- element = DEFAULT_ELEMENT_VIDEOTESTSRC;
- else
- LOG_ERROR_IF_REACHED("type(%d)", type);
-
- LOG_DEBUG("type(%d) -> element[%s]", type, element);
-
- return element;
-}
-
-static const char *__get_source_element(webrtc_s *webrtc, int type)
-{
- const ini_item_media_source_s *source;
-
- RET_VAL_IF(webrtc == NULL, NULL, "webrtc is NULL");
-
- source = _ini_get_source_by_type(&webrtc->ini, type);
- if (source == NULL || source->source_element == NULL)
- return __get_default_element(type);
-
- return source->source_element;
-}
-
-static GstPadProbeReturn __payloaded_data_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
-{
- probe_userdata_s *probe_data = (probe_userdata_s *)user_data;
- GstBuffer *buffer;
- static unsigned int counts[AV_IDX_MAX] = { 0 };
-
- RET_VAL_IF(info == NULL, GST_PAD_PROBE_REMOVE, "info is NULL");
- RET_VAL_IF(info->data == NULL, GST_PAD_PROBE_REMOVE, "info->data is NULL");
- RET_VAL_IF(probe_data == NULL, GST_PAD_PROBE_REMOVE, "probe_data is NULL");
-
- buffer = gst_pad_probe_info_get_buffer(info);
-
- if (probe_data->source->av[probe_data->av_idx].pause) {
- if (counts[probe_data->av_idx]++ % 10 == 0)
- LOG_DEBUG("paused, drop [%s] buffer[%p] of pad[%p], source[%p], count[%u]",
- GET_MEDIA_TYPE_NAME(probe_data->av_idx == AV_IDX_AUDIO),
- buffer, pad, probe_data->source, counts[probe_data->av_idx]);
- return GST_PAD_PROBE_DROP;
- }
-
- if (counts[probe_data->av_idx] > 0) {
- counts[probe_data->av_idx] = 0;
- LOG_DEBUG("play again, [%s] buffer[%p] of pad[%p], source[%p]",
- GET_MEDIA_TYPE_NAME(probe_data->av_idx == AV_IDX_AUDIO), buffer, pad, probe_data->source);
- }
-
- return GST_PAD_PROBE_OK;
-}
-
-static void __add_probe_to_pad_for_pause(webrtc_gst_slot_s *source, unsigned int idx, GstPad *pad, void *probe_cb)
-{
- probe_userdata_s *probe_userdata;
+static void __add_probe_to_pad_for_pause(webrtc_gst_slot_s *source, unsigned int idx, GstPad *pad, void *probe_cb)
+{
+ probe_userdata_s *probe_userdata;
RET_IF(source == NULL, "source is NULL");
RET_IF(idx >= AV_IDX_MAX, "invalid idx(%u)", idx);
source->av[idx].src_pad = NULL;
}
-//LCOV_EXCL_START
-static bool __remove_elements_from_bin(GstBin *bin, GList *element_list)
-{
- GstElement *element;
- GList *list;
- int count = 0;
-
- RET_VAL_IF(bin == NULL, false, "bin is NULL");
- RET_VAL_IF(element_list == NULL, false, "element_list is NULL");
-
- for (list = element_list; list; list = g_list_next(list)) {
- element = (GstElement *)list->data;
- RET_VAL_IF(!gst_bin_remove(bin, element), false,
- "failed to gst_bin_remove(), bin[%s], element[%s]", GST_ELEMENT_NAME(bin), GST_ELEMENT_NAME(element));
- count++;
- }
-
- LOG_DEBUG("%d elements are removed from bin[%s]", count, GST_ELEMENT_NAME(bin));
-
- return true;
-}
-//LCOV_EXCL_STOP
-
-static void __foreach_unref_object_cb(gpointer data, gpointer user_data)
-{
- GstElement *element = (GstElement *)data;
-
- LOG_DEBUG("%s is unreferenced", GST_ELEMENT_NAME(element));
- SAFE_GST_OBJECT_UNREF(element);
-}
-
-static bool __add_elements_to_bin(GstBin *bin, GList *element_list)
-{
- GstElement *element;
- GList *list;
- GList *added_list = NULL;
-
- RET_VAL_IF(bin == NULL, false, "bin is NULL");
- RET_VAL_IF(element_list == NULL, false, "element_list is NULL");
-
- for (list = element_list; list; list = list->next) {
- element = (GstElement *)list->data;
- if (!gst_bin_add(bin, element)) {
- LOG_ERROR("failed to gst_bin_add(), bin[%s], element[%s]", GST_ELEMENT_NAME(bin), GST_ELEMENT_NAME(element));
- __remove_elements_from_bin(bin, added_list);
- SAFE_G_LIST_FREE(added_list);
- g_list_foreach(list, __foreach_unref_object_cb, NULL); /* rest of elements on the list should be unreferenced */
- return false;
- }
- APPEND_ELEMENT(added_list, element);
- }
-
- LOG_DEBUG("%d elements are added to bin[%s]", g_list_length(added_list), GST_ELEMENT_NAME(bin));
-
- SAFE_G_LIST_FREE(added_list);
-
- return true;
-}
-
-static bool __link_elements(GList *element_list)
-{
- GstElement *curr;
- GstElement *prev = NULL;
- GList *list;
- int count = 0;
-
- RET_VAL_IF(element_list == NULL, false, "element_list is NULL");
-
- for (list = element_list; list; list = g_list_next(list)) {
- if (!g_list_previous(list))
- continue;
- prev = (GstElement *)(g_list_previous(list)->data);
- curr = (GstElement *)list->data;
- RET_VAL_IF(!gst_element_link(prev, curr), false,
- "failed to gst_element_link(), [%s] - [%s]", GST_ELEMENT_NAME(prev), GST_ELEMENT_NAME(curr));
- LOG_WARNING("[%s] - [%s]", GST_ELEMENT_NAME(prev), GST_ELEMENT_NAME(curr));
- count++;
- }
-
- LOG_DEBUG("%d elements are linked", count);
-
- return true;
-}
-
static bool __sync_elements_state_with_parent(GList *element_list)
{
GList *list;
return true;
}
-static GstElement *__find_element_in_bin(GstBin *bin, const gchar *name)
-{
- GValue value = G_VALUE_INIT;
- GstElement *element;
- GstIterator *bin_iterator;
-
- RET_VAL_IF(bin == NULL, NULL, "bin is NULL");
- RET_VAL_IF(name == NULL, NULL, "name is NULL");
- RET_VAL_IF(!(bin_iterator = gst_bin_iterate_sorted(bin)), NULL, "bin_iterator is NULL");
-
- while (GST_ITERATOR_OK == gst_iterator_next(bin_iterator, &value)) {
- element = GST_ELEMENT(g_value_get_object(&value));
-
- if (g_strrstr(GST_ELEMENT_NAME(element), name)) {
- LOG_DEBUG("found element by name [%s]", GST_ELEMENT_NAME(element));
- g_value_unset(&value);
- gst_iterator_free(bin_iterator);
- return element;
- }
-
- g_value_reset(&value);
- }
- g_value_unset(&value);
- gst_iterator_free(bin_iterator);
-
- return NULL;
-}
-
static bool __set_default_video_info(webrtc_gst_slot_s *source, const ini_item_media_source_s *ini_source)
{
RET_VAL_IF(source == NULL, false, "source is NULL");
RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _add_no_target_ghostpad_to_slot()");
source->media_types = MEDIA_TYPE_VIDEO;
- source->zerocopy_enabled = __is_hw_encoder_used(webrtc, source->type, source->media_types);
+ source->zerocopy_enabled = _is_hw_encoder_used(webrtc, source->type, source->media_types);
source->av[AV_IDX_VIDEO].raw_format = g_strdup(ini_source->v_raw_format);
source->av[AV_IDX_VIDEO].codec = ini_source->v_codecs[0];
if (!__set_default_video_info(source, ini_source))
return WEBRTC_ERROR_INVALID_OPERATION;
- if (!(screensrc = _create_element(__get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_SCREEN), ELEMENT_NAME_SCREENSRC)))
+ if (!(screensrc = _create_element(_get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_SCREEN), ELEMENT_NAME_SCREENSRC)))
return WEBRTC_ERROR_INVALID_OPERATION;
APPEND_ELEMENT(switch_src_list, screensrc);
"pattern", 2, /* black */
NULL);
- if (!__add_elements_to_bin(source->bin, switch_src_list)) {
+ if (!_add_elements_to_bin(source->bin, switch_src_list)) {
SAFE_G_LIST_FREE(switch_src_list);
return WEBRTC_ERROR_INVALID_OPERATION;
}
return WEBRTC_ERROR_NONE;
exit_with_remove_from_bin:
- __remove_elements_from_bin(source->bin, switch_src_list);
+ _remove_elements_from_bin(source->bin, switch_src_list);
SAFE_G_LIST_FREE(switch_src_list);
return WEBRTC_ERROR_INVALID_OPERATION;
exit:
RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
RET_VAL_IF(videotestsrc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "videotestsrc is NULL");
- if (!g_object_class_find_property(G_OBJECT_GET_CLASS(videotestsrc), "pattern")) {
- LOG_ERROR("there is no pattern property");
+ if (!g_object_class_find_property(G_OBJECT_GET_CLASS(videotestsrc), "pattern")) {
+ LOG_ERROR("there is no pattern property");
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ }
+
+ /* FIXME: get original value from ini file */
+ g_object_set(G_OBJECT(videotestsrc), "pattern", mute ? 2 : 18, NULL); /* 2: black 18: ball */
+
+ return WEBRTC_ERROR_NONE;
+}
+
+static int __mute_by_manipulating_buffer(webrtc_gst_slot_s *source, GstElement *camerasrc, bool mute)
+{
+ g_autoptr(GstPad) src_pad = NULL;
+
+ RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
+ RET_VAL_IF(camerasrc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "camerasrc is NULL");
+
+ src_pad = gst_element_get_static_pad(camerasrc, "src");
+ RET_VAL_IF(src_pad == NULL, WEBRTC_ERROR_INVALID_OPERATION, "src_pad is NULL");
+
+ if (mute && source->camerasrc_probe_id == 0) {
+ source->camerasrc_probe_id = gst_pad_add_probe(src_pad, GST_PAD_PROBE_TYPE_BUFFER, __camerasrc_probe_cb, source, NULL);
+ if (source->camerasrc_probe_id == 0) {
+ LOG_ERROR("failed to gst_pad_add_probe()");
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ }
+
+ } else if (!mute && source->camerasrc_probe_id != 0) {
+ gst_pad_remove_probe(src_pad, source->camerasrc_probe_id);
+ source->camerasrc_probe_id = 0;
+
+ } else {
+ LOG_ERROR("failed to change mute to (%d)", mute);
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ }
+
+ return WEBRTC_ERROR_NONE;
+}
+
+static int __mute_by_switching_video(webrtc_gst_slot_s *source, GstElement *videoswitch, bool mute)
+{
+ GstPad *sink_pad = NULL;
+
+ RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
+ RET_VAL_IF(videoswitch == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "videoswitch is NULL");
+
+ sink_pad = gst_element_get_static_pad(videoswitch, mute ? "sink_1" : "sink_0");
+ RET_VAL_IF(sink_pad == NULL, WEBRTC_ERROR_INVALID_OPERATION, "sink_pad is NULL");
+
+ g_object_set(G_OBJECT(videoswitch), "active-pad", sink_pad, NULL);
+ gst_object_unref(sink_pad);
+
+ return WEBRTC_ERROR_NONE;
+}
+//LCOV_EXCL_STOP
+
+int __create_rest_of_elements(webrtc_s *webrtc, webrtc_gst_slot_s *source, bool need_capsfilter, GList **element_list, bool is_audio)
+{
+ GstElement *encoder = NULL;
+ GstElement *payloader;
+ GstElement *queue;
+ GstElement *capsfilter2;
+ GstElement *videocrop;
+ GstCaps *sink_caps;
+ element_info_s elem_info;
+ gchar *media_type = NULL;
+ int payload_type;
+ int idx;
+
+ RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
+ RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
+ RET_VAL_IF(element_list == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "element_list is NULL");
+
+ idx = GET_AV_IDX(is_audio);
+
+ if (need_capsfilter) {
+ GstElement *capsfilter = _create_element(DEFAULT_ELEMENT_CAPSFILTER, ELEMENT_NAME_FIRST_CAPSFILTER);
+ if (!capsfilter)
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ APPEND_ELEMENT(*element_list, capsfilter);
+
+ if (_is_encoded_format_supported(source->type, &webrtc->ini)) {
+ if ((sink_caps = __make_default_encoded_caps(source, &webrtc->ini, NULL))) {
+ PRINT_CAPS(sink_caps, "capsfilter");
+ g_object_set(G_OBJECT(capsfilter), "caps", sink_caps, NULL);
+ source->av[idx].render.appsrc_caps = sink_caps;
+ if (source->av[idx].render.appsrc)
+ g_object_set(G_OBJECT(source->av[idx].render.appsrc), "caps", sink_caps, NULL);
+ }
+
+ source->av[idx].render.need_decoding = true;
+ __add_probe_to_pad_for_render(source, idx, gst_element_get_static_pad(capsfilter, "src"), __source_data_probe_cb);
+
+ goto skip_encoder;
+ }
+
+ if ((sink_caps = __make_default_raw_caps(source, &webrtc->ini))) {
+ PRINT_CAPS(sink_caps, "capsfilter");
+ g_object_set(G_OBJECT(capsfilter), "caps", sink_caps, NULL);
+ source->av[idx].render.appsrc_caps = sink_caps;
+ if (source->av[idx].render.appsrc)
+ g_object_set(G_OBJECT(source->av[idx].render.appsrc), "caps", sink_caps, NULL);
+ }
+
+ __add_probe_to_pad_for_render(source, idx, gst_element_get_static_pad(capsfilter, "src"), __source_data_probe_cb);
+ }
+
+ if (source->type == WEBRTC_MEDIA_SOURCE_TYPE_SCREEN && !source->zerocopy_enabled) {
+ if (!(videocrop = _create_element(DEFAULT_ELEMENT_VIDEOCROP, ELEMENT_NAME_VIDEOCROP)))
+ goto error;
+ APPEND_ELEMENT(*element_list, videocrop);
+ }
+
+ encoder = __prepare_encoder(webrtc, source, is_audio);
+ if (encoder == NULL) {
+ __remove_probe_from_pad_for_render(source, idx);
return WEBRTC_ERROR_INVALID_OPERATION;
}
+ APPEND_ELEMENT(*element_list, encoder);
- /* FIXME: get original value from ini file */
- g_object_set(G_OBJECT(videotestsrc), "pattern", mute ? 2 : 18, NULL); /* 2: black 18: ball */
-
- return WEBRTC_ERROR_NONE;
-}
+ source->av[idx].render.need_decoding = false;
-static int __mute_by_manipulating_buffer(webrtc_gst_slot_s *source, GstElement *camerasrc, bool mute)
-{
- g_autoptr(GstPad) src_pad = NULL;
+skip_encoder:
+ CREATE_ELEMENT_FROM_REGISTRY(elem_info, GST_KLASS_NAME_PAYLOADER_RTP,
+ __make_default_encoded_caps(source, &webrtc->ini, &media_type),
+ NULL,
+ NULL,
+ payloader);
+ if (payloader == NULL)
+ goto error;
+ APPEND_ELEMENT(*element_list, payloader);
- RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
- RET_VAL_IF(camerasrc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "camerasrc is NULL");
+ if (!(queue = _create_element(DEFAULT_ELEMENT_QUEUE, NULL)))
+ goto error;
+ APPEND_ELEMENT(*element_list, queue);
- src_pad = gst_element_get_static_pad(camerasrc, "src");
- RET_VAL_IF(src_pad == NULL, WEBRTC_ERROR_INVALID_OPERATION, "src_pad is NULL");
+ if (!(capsfilter2 = _create_element(DEFAULT_ELEMENT_CAPSFILTER, ELEMENT_NAME_RTP_CAPSFILTER)))
+ goto error;
+ APPEND_ELEMENT(*element_list, capsfilter2);
- if (mute && source->camerasrc_probe_id == 0) {
- source->camerasrc_probe_id = gst_pad_add_probe(src_pad, GST_PAD_PROBE_TYPE_BUFFER, __camerasrc_probe_cb, source, NULL);
- if (source->camerasrc_probe_id == 0) {
- LOG_ERROR("failed to gst_pad_add_probe()");
- return WEBRTC_ERROR_INVALID_OPERATION;
- }
+ if ((payload_type = __get_fixed_payload_type(media_type)) == -1)
+ if ((payload_type = _get_available_payload_type(webrtc)) == 0)
+ goto error;
- } else if (!mute && source->camerasrc_probe_id != 0) {
- gst_pad_remove_probe(src_pad, source->camerasrc_probe_id);
- source->camerasrc_probe_id = 0;
+ source->av[idx].pt = payload_type;
- } else {
- LOG_ERROR("failed to change mute to (%d)", mute);
- return WEBRTC_ERROR_INVALID_OPERATION;
+ if ((sink_caps = _make_rtp_caps(media_type, payload_type, source))) {
+ g_object_set(G_OBJECT(capsfilter2), "caps", sink_caps, NULL);
+ gst_caps_unref(sink_caps);
}
- return WEBRTC_ERROR_NONE;
-}
-
-static int __mute_by_switching_video(webrtc_gst_slot_s *source, GstElement *videoswitch, bool mute)
-{
- GstPad *sink_pad = NULL;
-
- RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
- RET_VAL_IF(videoswitch == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "videoswitch is NULL");
+ g_free(media_type);
- sink_pad = gst_element_get_static_pad(videoswitch, mute ? "sink_1" : "sink_0");
- RET_VAL_IF(sink_pad == NULL, WEBRTC_ERROR_INVALID_OPERATION, "sink_pad is NULL");
+ return WEBRTC_ERROR_NONE;
- g_object_set(G_OBJECT(videoswitch), "active-pad", sink_pad, NULL);
- gst_object_unref(sink_pad);
+error:
+ __remove_probe_from_pad_for_render(source, idx);
+ g_free(media_type);
- return WEBRTC_ERROR_NONE;
+ return WEBRTC_ERROR_INVALID_OPERATION;
}
-//LCOV_EXCL_STOP
static int __complete_rest_of_videosrc(webrtc_s *webrtc, webrtc_gst_slot_s *source)
{
if (__create_rest_of_elements(webrtc, source, true, &element_list, false) != WEBRTC_ERROR_NONE)
goto exit;
- if (!__add_elements_to_bin(source->bin, element_list)) {
+ if (!_add_elements_to_bin(source->bin, element_list)) {
SAFE_G_LIST_FREE(element_list);
return WEBRTC_ERROR_INVALID_OPERATION;
}
}
PREPEND_ELEMENT(element_list, videoswitch ? videoswitch : videosrc);
- if (!__link_elements(element_list))
+ if (!_link_elements(element_list))
goto exit_with_remove_from_bin;
if (!(capsfilter = gst_bin_get_by_name(source->bin, ELEMENT_NAME_RTP_CAPSFILTER)))
return WEBRTC_ERROR_NONE;
exit_with_remove_from_bin:
- __remove_elements_from_bin(source->bin, element_list);
+ _remove_elements_from_bin(source->bin, element_list);
SAFE_G_LIST_FREE(element_list);
return WEBRTC_ERROR_INVALID_OPERATION;
exit:
RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _add_no_target_ghostpad_to_slot()");
source->media_types = MEDIA_TYPE_VIDEO;
- source->zerocopy_enabled = __is_hw_encoder_used(webrtc, source->type, source->media_types);
+ source->zerocopy_enabled = _is_hw_encoder_used(webrtc, source->type, source->media_types);
source->av[AV_IDX_VIDEO].raw_format = g_strdup(ini_source->v_raw_format);
source->av[AV_IDX_VIDEO].codec = ini_source->v_codecs[0];
if (webrtc->ini.resource_acquisition.camera)
webrtc->resource.need_to_acquire[MM_RESOURCE_MANAGER_RES_TYPE_CAMERA] = true;
#endif
- if (!(camerasrc = _create_element(__get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_CAMERA), ELEMENT_NAME_VIDEO_SRC)))
+ if (!(camerasrc = _create_element(_get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_CAMERA), ELEMENT_NAME_VIDEO_SRC)))
return WEBRTC_ERROR_INVALID_OPERATION;
_gst_set_element_properties(camerasrc, ini_source->source_element_properties);
if (__create_rest_of_elements(webrtc, source, true, &element_list, true) != WEBRTC_ERROR_NONE)
goto exit;
- if (!__add_elements_to_bin(source->bin, element_list)) {
+ if (!_add_elements_to_bin(source->bin, element_list)) {
SAFE_G_LIST_FREE(element_list);
return WEBRTC_ERROR_INVALID_OPERATION;
}
}
PREPEND_ELEMENT(element_list, audiosrc);
- if (!__link_elements(element_list))
+ if (!_link_elements(element_list))
goto exit_with_remove_from_bin;
if (!(capsfilter = gst_bin_get_by_name(source->bin, ELEMENT_NAME_RTP_CAPSFILTER)))
return WEBRTC_ERROR_NONE;
exit_with_remove_from_bin:
- __remove_elements_from_bin(source->bin, element_list);
+ _remove_elements_from_bin(source->bin, element_list);
SAFE_G_LIST_FREE(element_list);
return WEBRTC_ERROR_INVALID_OPERATION;
exit:
RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _add_no_target_ghostpad_to_slot()");
source->media_types = MEDIA_TYPE_AUDIO;
- source->zerocopy_enabled = __is_hw_encoder_used(webrtc, source->type, source->media_types);
+ source->zerocopy_enabled = _is_hw_encoder_used(webrtc, source->type, source->media_types);
source->av[AV_IDX_AUDIO].raw_format = g_strdup(ini_source->a_raw_format);
source->av[AV_IDX_AUDIO].codec = ini_source->a_codecs[0];
- source_factory_name = __get_source_element(webrtc, use_mic ? WEBRTC_MEDIA_SOURCE_TYPE_MIC : WEBRTC_MEDIA_SOURCE_TYPE_AUDIOTEST);
+ source_factory_name = _get_source_element(webrtc, use_mic ? WEBRTC_MEDIA_SOURCE_TYPE_MIC : WEBRTC_MEDIA_SOURCE_TYPE_AUDIOTEST);
if (!(audiosrc = _create_element(source_factory_name, use_mic ? ELEMENT_NAME_MIC_SRC : ELEMENT_NAME_AUDIO_SRC)))
return WEBRTC_ERROR_INVALID_OPERATION;
RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _add_no_target_ghostpad_to_slot()");
source->media_types = MEDIA_TYPE_VIDEO;
- source->zerocopy_enabled = __is_hw_encoder_used(webrtc, source->type, source->media_types);
+ source->zerocopy_enabled = _is_hw_encoder_used(webrtc, source->type, source->media_types);
source->av[AV_IDX_VIDEO].raw_format = g_strdup(ini_source->v_raw_format);
source->av[AV_IDX_VIDEO].codec = ini_source->v_codecs[0];
if (!__set_default_video_info(source, ini_source))
return WEBRTC_ERROR_INVALID_OPERATION;
- if (!(videotestsrc = _create_element(__get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_VIDEOTEST), ELEMENT_NAME_VIDEO_SRC)))
+ if (!(videotestsrc = _create_element(_get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_VIDEOTEST), ELEMENT_NAME_VIDEO_SRC)))
return WEBRTC_ERROR_INVALID_OPERATION;
_gst_set_element_properties(videotestsrc, ini_source->source_element_properties);
RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _add_no_target_ghostpad_to_slot()");
source->media_types = MEDIA_TYPE_VIDEO;
- source->zerocopy_enabled = __is_hw_encoder_used(webrtc, source->type, source->media_types);
+ source->zerocopy_enabled = _is_hw_encoder_used(webrtc, source->type, source->media_types);
source->av[AV_IDX_VIDEO].codec = ini_source->v_codecs[0];
if (!__set_default_video_info(source, ini_source))
return WEBRTC_ERROR_INVALID_OPERATION;
- if (!(custom_videosrc = _create_element(__get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_CUSTOM_VIDEO), ELEMENT_NAME_VIDEO_SRC)))
+ if (!(custom_videosrc = _create_element(_get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_CUSTOM_VIDEO), ELEMENT_NAME_VIDEO_SRC)))
return WEBRTC_ERROR_INVALID_OPERATION;
_gst_set_element_properties(custom_videosrc, ini_source->source_element_properties);
RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _add_no_target_ghostpad_to_slot()");
source->media_types = MEDIA_TYPE_AUDIO;
- source->zerocopy_enabled = __is_hw_encoder_used(webrtc, source->type, source->media_types);
+ source->zerocopy_enabled = _is_hw_encoder_used(webrtc, source->type, source->media_types);
source->av[AV_IDX_AUDIO].codec = ini_source->a_codecs[0];
- source_factory_name = __get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_CUSTOM_AUDIO);
+ source_factory_name = _get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_CUSTOM_AUDIO);
if (!(custom_audiosrc = _create_element(source_factory_name, NULL)))
return WEBRTC_ERROR_INVALID_OPERATION;
goto exit;
APPEND_ELEMENT(element_list, capsfilter);
- if (!__add_elements_to_bin(source->bin, element_list)) {
+ if (!_add_elements_to_bin(source->bin, element_list)) {
SAFE_G_LIST_FREE(element_list);
return WEBRTC_ERROR_INVALID_OPERATION;
}
- if (!__link_elements(element_list))
+ if (!_link_elements(element_list))
goto exit_with_remove_from_bin;
if (!__sync_elements_state_with_parent(element_list))
return WEBRTC_ERROR_NONE;
exit_with_remove_from_bin:
- __remove_elements_from_bin(source->bin, element_list);
+ _remove_elements_from_bin(source->bin, element_list);
SAFE_G_LIST_FREE(element_list);
return WEBRTC_ERROR_INVALID_OPERATION;
exit:
else
LOG_ERROR("fakesink is NULL");
- __remove_elements_from_bin(bin, element_list);
+ _remove_elements_from_bin(bin, element_list);
SAFE_G_LIST_FREE(element_list);
}
return NULL;
}
- if ((sink_caps = __make_rtp_caps(GET_MEDIA_TYPE_NAME(is_audio), payload_type, source))) {
+ if ((sink_caps = _make_rtp_caps(GET_MEDIA_TYPE_NAME(is_audio), payload_type, source))) {
g_object_set(G_OBJECT(capsfilter), "caps", sink_caps, NULL);
gst_caps_unref(sink_caps);
}
goto exit;
APPEND_ELEMENT(element_list, fakesink);
- if (!__add_elements_to_bin(bin, element_list)) {
+ if (!_add_elements_to_bin(bin, element_list)) {
SAFE_G_LIST_FREE(element_list);
return WEBRTC_ERROR_INVALID_OPERATION;
}
- if (!__link_elements(element_list))
+ if (!_link_elements(element_list))
goto exit_with_remove_from_bin;
if (!__sync_elements_state_with_parent(element_list))
return WEBRTC_ERROR_NONE;
exit_with_remove_from_bin:
- __remove_elements_from_bin(bin, element_list);
+ _remove_elements_from_bin(bin, element_list);
SAFE_G_LIST_FREE(element_list);
return WEBRTC_ERROR_INVALID_OPERATION;
exit:
}
-//LCOV_EXCL_START
-static void _appsrc_need_data_cb(GstElement *appsrc, guint size, gpointer data)
-{
- webrtc_gst_slot_s *source = (webrtc_gst_slot_s *)data;
-
- RET_IF(source == NULL, "source is NULL");
-
- LOG_INFO("appsrc[%s] size[%u] source[%p, idx:%u]", GST_ELEMENT_NAME(appsrc), size, source, source->id);
-
- if (source->buffer_state_changed_cb.callback == NULL) {
- LOG_DEBUG("buffer state changed callback is NULL");
- return;
- }
-
- LOG_DEBUG(">>> invoke buffer_state_changed_cb[%p] for UNDERFLOW, user_data[%p]",
- source->buffer_state_changed_cb.callback, source->buffer_state_changed_cb.user_data);
- ((webrtc_media_packet_source_buffer_state_changed_cb)(source->buffer_state_changed_cb.callback))
- (source->id, WEBRTC_MEDIA_PACKET_SOURCE_BUFFER_STATE_UNDERFLOW, source->buffer_state_changed_cb.user_data);
- LOG_DEBUG("<<< end of the callback");
-}
-
-static void _appsrc_enough_data_cb(GstElement *appsrc, gpointer data)
-{
- webrtc_gst_slot_s *source = (webrtc_gst_slot_s *)data;
-
- RET_IF(source == NULL, "source is NULL");
-
- LOG_INFO("appsrc[%s] source[%p, idx:%u]", GST_ELEMENT_NAME(appsrc), source, source->id);
-
- if (source->buffer_state_changed_cb.callback == NULL) {
- LOG_DEBUG("buffer state changed callback is NULL");
- return;
- }
-
- LOG_DEBUG(">>> invoke buffer_state_changed_cb[%p] for OVERFLOW, user_data[%p]",
- source->buffer_state_changed_cb.callback, source->buffer_state_changed_cb.user_data);
- ((webrtc_media_packet_source_buffer_state_changed_cb)(source->buffer_state_changed_cb.callback))
- (source->id, WEBRTC_MEDIA_PACKET_SOURCE_BUFFER_STATE_OVERFLOW, source->buffer_state_changed_cb.user_data);
- LOG_DEBUG("<<< end of the callback");
-}
-//LCOV_EXCL_STOP
-
-static int __build_mediapacketsrc(webrtc_s *webrtc, webrtc_gst_slot_s *source)
-{
- GstElement *appsrc;
-
- RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
- RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
- RET_VAL_IF(source->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL");
-
- if (!(appsrc = _create_element(__get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_MEDIA_PACKET), NULL)))
- return WEBRTC_ERROR_INVALID_OPERATION;
-
- g_object_set(G_OBJECT(appsrc),
- "is-live", TRUE,
- "format", GST_FORMAT_TIME,
- NULL);
-
- _connect_and_append_signal(&source->signals, G_OBJECT(appsrc), "need-data", G_CALLBACK(_appsrc_need_data_cb), source);
- _connect_and_append_signal(&source->signals, G_OBJECT(appsrc), "enough-data", G_CALLBACK(_appsrc_enough_data_cb), source);
-
- gst_bin_add(source->bin, appsrc);
-
- return WEBRTC_ERROR_NONE;
-}
-
-static int __complete_rest_of_mediapacketsrc(webrtc_gst_slot_s *source, GstPad **src_pad, GstElement *appsrc, GList *element_list)
-{
- int ret;
- GstElement *capsfilter;
-
- RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
- RET_VAL_IF(src_pad == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "src_pad is NULL");
- RET_VAL_IF(appsrc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "appsrc is NULL");
- RET_VAL_IF(element_list == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "element_list is NULL");
- RET_VAL_IF(source->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL");
-
- if (!__add_elements_to_bin(source->bin, element_list))
- return WEBRTC_ERROR_INVALID_OPERATION;
-
- PREPEND_ELEMENT(element_list, appsrc);
- if (!__link_elements(element_list)) {
- ret = WEBRTC_ERROR_INVALID_OPERATION;
- goto error;
- }
-
- if (!(capsfilter = gst_bin_get_by_name(source->bin, ELEMENT_NAME_RTP_CAPSFILTER))) {
- ret = WEBRTC_ERROR_INVALID_OPERATION;
- goto error;
- }
-
- if ((ret = _set_ghost_pad_target(*src_pad, capsfilter, true)) != WEBRTC_ERROR_NONE)
- goto error;
-
- return ret;
-
-error:
- __remove_elements_from_bin(source->bin, element_list);
- return ret;
-}
-
-static int __complete_mediapacketsrc_from_raw_format(webrtc_s *webrtc, webrtc_gst_slot_s *source)
-{
- int ret;
- GstPad **src_pad;
- GstElement *appsrc;
- GList *element_list = NULL;
- GstCaps *sink_caps;
-
- RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
- RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
- RET_VAL_IF(source->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL");
-
- src_pad = &source->av[GET_AV_IDX_BY_TYPE(source->media_types)].src_pad;
- ret = _add_no_target_ghostpad_to_slot(source, true, src_pad);
- RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _add_no_target_ghostpad_to_slot()");
-
- appsrc = __find_element_in_bin(source->bin, "appsrc");
- RET_VAL_IF(appsrc == NULL, WEBRTC_ERROR_INVALID_OPERATION, "appsrc is NULL");
-
- source->zerocopy_enabled = __is_hw_encoder_used(webrtc, source->type, source->media_types);
-
- if ((ret = __create_rest_of_elements(webrtc, source, false, &element_list, (source->media_types == MEDIA_TYPE_AUDIO))) != WEBRTC_ERROR_NONE)
- goto exit;
-
- if (!(sink_caps = __make_mediapacketsrc_raw_caps_from_media_format(source))) {
- ret = WEBRTC_ERROR_INVALID_OPERATION;
- goto exit;
- }
- PRINT_CAPS(sink_caps, "appsrc");
- g_object_set(G_OBJECT(appsrc), "caps", sink_caps, NULL);
- gst_caps_unref(sink_caps);
-
- if ((ret = __complete_rest_of_mediapacketsrc(source, src_pad, appsrc, element_list)) != WEBRTC_ERROR_NONE)
- goto exit;
-
- SAFE_G_LIST_FREE(element_list);
- return ret;
-
-exit:
- SAFE_G_LIST_FREE_FULL(element_list, gst_object_unref);
- return ret;
-}
-
-//LCOV_EXCL_START
-static int __complete_mediapacketsrc_from_encoded_format(webrtc_s *webrtc, webrtc_gst_slot_s *source)
-{
- int ret;
- GstPad **src_pad;
- GstElement *appsrc;
- GList *element_list = NULL;
- GstCaps *sink_caps;
-
- RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
- RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
- RET_VAL_IF(source->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL");
- RET_VAL_IF(source->media_format == NULL, WEBRTC_ERROR_INVALID_OPERATION, "media_format is NULL");
-
- src_pad = &source->av[GET_AV_IDX_BY_TYPE(source->media_types)].src_pad;
- ret = _add_no_target_ghostpad_to_slot(source, true, src_pad);
- RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _add_no_target_ghostpad_to_slot()");
-
- appsrc = __find_element_in_bin(source->bin, "appsrc");
- RET_VAL_IF(appsrc == NULL, WEBRTC_ERROR_INVALID_OPERATION, "appsrc is NULL");
-
- if ((ret = __create_rest_of_elements_for_encoded_format(webrtc, source, &element_list)) != WEBRTC_ERROR_NONE)
- goto exit;
-
- if (!(sink_caps = __make_encoded_caps_from_media_format(source, NULL))) {
- ret = WEBRTC_ERROR_INVALID_OPERATION;
- goto exit;
- }
- PRINT_CAPS(sink_caps, "appsrc");
- g_object_set(G_OBJECT(appsrc), "caps", sink_caps, NULL);
- gst_caps_unref(sink_caps);
-
- if ((ret = __complete_rest_of_mediapacketsrc(source, src_pad, appsrc, element_list)) != WEBRTC_ERROR_NONE)
- goto exit;
-
- SAFE_G_LIST_FREE(element_list);
- return ret;
-
-exit:
- SAFE_G_LIST_FREE_FULL(element_list, gst_object_unref);
- return ret;
-}
-//LCOV_EXCL_STOP
-
static int __build_source_bin(webrtc_s *webrtc, webrtc_gst_slot_s *source)
{
RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
return __build_filesrc_pipeline(webrtc, source);
case WEBRTC_MEDIA_SOURCE_TYPE_MEDIA_PACKET:
- return __build_mediapacketsrc(webrtc, source);
+ return _build_mediapacketsrc(webrtc, source);
//LCOV_EXCL_START
/* for internal use */
case WEBRTC_MEDIA_SOURCE_TYPE_CUSTOM_AUDIO:
RET_VAL_IF(!ini_source->v_drc_support, WEBRTC_ERROR_INVALID_OPERATION, "not supported dynamic resolution change");
}
- if ((capsfilter = __find_element_in_bin(source->bin, ELEMENT_NAME_FIRST_CAPSFILTER))) {
+ if ((capsfilter = _find_element_in_bin(source->bin, ELEMENT_NAME_FIRST_CAPSFILTER))) {
/* FIXME: check if the [width x height] is supported or not */
if (!(new_caps = __make_video_raw_caps_with_resolution(source, &webrtc->ini, width, height)))
return WEBRTC_ERROR_INVALID_OPERATION;
RET_VAL_IF(webrtc->state != WEBRTC_STATE_IDLE, WEBRTC_ERROR_INVALID_OPERATION, "for now, it is only supported in IDLE state");
/* FIXME: check if the framerate is supported or not */
- if ((capsfilter = __find_element_in_bin(source->bin, ELEMENT_NAME_FIRST_CAPSFILTER))) {
+ if ((capsfilter = _find_element_in_bin(source->bin, ELEMENT_NAME_FIRST_CAPSFILTER))) {
if (!(new_caps = __make_video_raw_caps_with_framerate(source, &webrtc->ini, framerate)))
return WEBRTC_ERROR_INVALID_OPERATION;
PRINT_CAPS(new_caps, "capsfilter");
RET_VAL_IF(!g_object_class_find_property(G_OBJECT_GET_CLASS(G_OBJECT(element)), "stream-properties"),
WEBRTC_ERROR_INVALID_OPERATION, "mic source element does not have 'stream-properties");
- sound_manager_get_type_from_stream_information(stream_info, &stream_type);
- sound_manager_get_index_from_stream_information(stream_info, &stream_index);
- sound_manager_get_echo_cancel_reference_device(stream_info, &aec_ref_device_id);
+ sound_manager_get_type_from_stream_information(stream_info, &stream_type);
+ sound_manager_get_index_from_stream_information(stream_info, &stream_index);
+ sound_manager_get_echo_cancel_reference_device(stream_info, &aec_ref_device_id);
+
+ ret = sound_manager_is_available_stream_information(stream_info, NATIVE_API_WEBRTC, &available);
+ if (ret != SOUND_MANAGER_ERROR_NONE) {
+ LOG_ERROR("failed to sound_manager_is_available_stream_information()");
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ }
+
+ if (!available) {
+ LOG_ERROR("this stream info[%p, type:%s, index:%d] is not allowed to this framework", stream_info, stream_type, stream_index);
+ return WEBRTC_ERROR_INVALID_PARAMETER;
+ }
+
+ LOG_INFO("webrtc[%p], source_id[%u], stream_info[%p, type:%s, index:%d, aec_ref_device_id:%d]",
+ webrtc, source_id, stream_info, stream_type, stream_index, aec_ref_device_id);
+
+ return _apply_stream_info(element, stream_type, stream_index, aec_ref_device_id);
+}
+
+static int __complete_mediapacketsrc_from_raw_format(webrtc_s *webrtc, webrtc_gst_slot_s *source)
+{
+ int ret;
+ GstPad **src_pad;
+ GstElement *appsrc;
+ GList *element_list = NULL;
+ GstCaps *sink_caps;
+
+ RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
+ RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
+ RET_VAL_IF(source->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL");
+
+ src_pad = &source->av[GET_AV_IDX_BY_TYPE(source->media_types)].src_pad;
+ ret = _add_no_target_ghostpad_to_slot(source, true, src_pad);
+ RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _add_no_target_ghostpad_to_slot()");
+
+ appsrc = _find_element_in_bin(source->bin, "appsrc");
+ RET_VAL_IF(appsrc == NULL, WEBRTC_ERROR_INVALID_OPERATION, "appsrc is NULL");
+
+ source->zerocopy_enabled = _is_hw_encoder_used(webrtc, source->type, source->media_types);
- ret = sound_manager_is_available_stream_information(stream_info, NATIVE_API_WEBRTC, &available);
- if (ret != SOUND_MANAGER_ERROR_NONE) {
- LOG_ERROR("failed to sound_manager_is_available_stream_information()");
- return WEBRTC_ERROR_INVALID_OPERATION;
- }
+ if ((ret = __create_rest_of_elements(webrtc, source, false, &element_list, (source->media_types == MEDIA_TYPE_AUDIO))) != WEBRTC_ERROR_NONE)
+ goto exit;
- if (!available) {
- LOG_ERROR("this stream info[%p, type:%s, index:%d] is not allowed to this framework", stream_info, stream_type, stream_index);
- return WEBRTC_ERROR_INVALID_PARAMETER;
+ if (!(sink_caps = _make_mediapacketsrc_raw_caps_from_media_format(source))) {
+ ret = WEBRTC_ERROR_INVALID_OPERATION;
+ goto exit;
}
+ PRINT_CAPS(sink_caps, "appsrc");
+ g_object_set(G_OBJECT(appsrc), "caps", sink_caps, NULL);
+ gst_caps_unref(sink_caps);
- LOG_INFO("webrtc[%p], source_id[%u], stream_info[%p, type:%s, index:%d, aec_ref_device_id:%d]",
- webrtc, source_id, stream_info, stream_type, stream_index, aec_ref_device_id);
+ if ((ret = _complete_rest_of_mediapacketsrc(source, src_pad, appsrc, element_list)) != WEBRTC_ERROR_NONE)
+ goto exit;
- return _apply_stream_info(element, stream_type, stream_index, aec_ref_device_id);
+ SAFE_G_LIST_FREE(element_list);
+ return ret;
+
+exit:
+ SAFE_G_LIST_FREE_FULL(element_list, gst_object_unref);
+ return ret;
}
int _set_media_format(webrtc_s *webrtc, unsigned int source_id, media_format_h format)
return WEBRTC_ERROR_INVALID_OPERATION;
}
- RET_VAL_IF(!__is_supported_mime_type(mime_type), WEBRTC_ERROR_INVALID_PARAMETER, "mime_type[0x%x] is not supported", mime_type);
+ RET_VAL_IF(!_is_supported_mime_type(mime_type), WEBRTC_ERROR_INVALID_PARAMETER, "mime_type[0x%x] is not supported", mime_type);
media_format_ref(format);
source->media_format = format;
!(mime_type == MEDIA_FORMAT_PCMU || mime_type == MEDIA_FORMAT_PCMA)) /* FIXME: media_format.h defined PCMU/PCMA as a raw format, it's a bug. */
ret = __complete_mediapacketsrc_from_raw_format(webrtc, source);
else
- ret = __complete_mediapacketsrc_from_encoded_format(webrtc, source);
+ ret = _complete_mediapacketsrc_from_encoded_format(webrtc, source);
if (ret != WEBRTC_ERROR_NONE) {
LOG_ERROR("failed to complete mediapacketsrc");
goto error;
else
LOG_ERROR("capsfilter is NULL");
- __remove_elements_from_bin(source->bin, element_list);
+ _remove_elements_from_bin(source->bin, element_list);
SAFE_G_LIST_FREE(element_list);
}
return true;
}
-static int __fill_gst_buffer_mapped_data_from_packet(GstBuffer *buffer, media_packet_h packet)
-{
- bool has_tbm_surface = false;
- tbm_surface_info_s ts_info;
- guint64 size = 0;
- GstMapInfo buff_info = GST_MAP_INFO_INIT;
-
- RET_VAL_IF(buffer == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "buffer is NULL");
- RET_VAL_IF(packet == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "packet is NULL");
-
- media_packet_get_buffer_size(packet, &size);
- media_packet_has_tbm_surface_buffer(packet, &has_tbm_surface);
-
- if (has_tbm_surface) {
- int ret = TBM_SURFACE_ERROR_NONE;
- tbm_surface_h ts;
-
- media_packet_get_tbm_surface(packet, &ts);
- ret = tbm_surface_get_info(ts, &ts_info);
- if (ret != TBM_SURFACE_ERROR_NONE) {
- LOG_ERROR("failed to tbm_surface_get_info()");
- return WEBRTC_ERROR_INVALID_OPERATION;
- }
-
- LOG_DEBUG("tbm surface[%p, %ux%u, size:%u, format:%u, num_planes:%u] found",
- ts, ts_info.width, ts_info.height, ts_info.size, ts_info.format, ts_info.num_planes);
- }
-
- if (gst_buffer_map(buffer, &buff_info, GST_MAP_READWRITE)) {
- if (has_tbm_surface) {
- unsigned int i;
- guint8 *ptr = buff_info.data;
- for (i = 0; i < ts_info.num_planes; i++) {
- LOG_DEBUG("plane[%d][ptr:%p size:%u]", i, ts_info.planes[i].ptr, ts_info.planes[i].size);
- memcpy(ptr, ts_info.planes[i].ptr, ts_info.planes[i].size);
- ptr += ts_info.planes[i].size;
- }
-
- } else {
- guchar *data_ptr;
- media_packet_get_buffer_data_ptr(packet, (void **)&data_ptr);
- if (data_ptr == NULL) {
- LOG_ERROR("invalid packet, data_ptr is NULL");
- gst_buffer_unmap(buffer, &buff_info);
- return WEBRTC_ERROR_INVALID_OPERATION;
- }
- memcpy(buff_info.data, data_ptr, size);
- }
-
- buff_info.size = size;
-
- LOG_DEBUG("buffer[%p], buff_info[data:%p, size:%"G_GSIZE_FORMAT"]", buffer, buff_info.data, buff_info.size);
-
- gst_buffer_unmap(buffer, &buff_info);
- }
-
- return WEBRTC_ERROR_NONE;
-}
-
-static bool __is_valid_format(media_type_e type, media_format_h conf_format, media_format_h push_format)
-{
- int ret = MEDIA_PACKET_ERROR_NONE;
- media_format_mimetype_e mime_type1;
- media_format_mimetype_e mime_type2;
-
- RET_VAL_IF(conf_format == NULL, false, "conf_format is NULL");
- RET_VAL_IF(push_format == NULL, false, "push_format is NULL");
-
- if (type == MEDIA_TYPE_AUDIO) {
- ret |= media_format_get_audio_info(conf_format, &mime_type1, NULL, NULL, NULL, NULL);
- ret |= media_format_get_audio_info(push_format, &mime_type2, NULL, NULL, NULL, NULL);
- RET_VAL_IF(ret != MEDIA_PACKET_ERROR_NONE, false, "failed to media_format_get_audio_info()");
-
- } else if (type == MEDIA_TYPE_VIDEO) {
- ret |= media_format_get_video_info(conf_format, &mime_type1, NULL, NULL, NULL, NULL);
- ret |= media_format_get_video_info(push_format, &mime_type2, NULL, NULL, NULL, NULL);
- RET_VAL_IF(ret != MEDIA_PACKET_ERROR_NONE, false, "failed to media_format_get_video_info()");
-
- } else {
- LOG_ERROR_IF_REACHED("type(%u)", type);
- return false;
- }
-
- RET_VAL_IF(mime_type1 != mime_type2, false, "invalid format(0x%x, 0x%x)", mime_type1, mime_type2);
-
- return true;
-}
-
-static void __memory_finalize_cb(packet_buffer_s *packet_buffer)
-{
- LOG_DEBUG("packet[%p] buffer[%p] is about to release", packet_buffer->packet, packet_buffer->buffer);
-
- media_packet_destroy(packet_buffer->packet);
- g_free(packet_buffer);
-}
-
-//LCOV_EXCL_START
-static GstBuffer* __make_buffer_from_zerocopy_video_packet(webrtc_gst_slot_s *source, media_packet_h packet)
-{
- int ret = MEDIA_PACKET_ERROR_NONE;
- gint stride_width;
- gint stride_height;
- uint32_t plane_num;
- guint8 *planes[2];
- guint64 pts = 0;
- guint64 dts = 0;
- guint64 duration = 0;
- tbm_surface_h surface = NULL;
- GstVideoInfo vinfo;
- GstMemory *mem;
- GstBuffer *buffer;
- packet_buffer_s *packet_buffer;
-
- RET_VAL_IF(source == NULL, NULL, "source is NULL");
- RET_VAL_IF(packet == NULL, NULL, "packet is NULL");
-
- ret = media_packet_get_number_of_video_planes(packet, &plane_num);
- RET_VAL_IF(ret != MEDIA_PACKET_ERROR_NONE, NULL, "failed to media_packet_get_number_of_video_planes()");
-
- ret = media_packet_get_video_plane_data_ptr(packet, 0, (void **)&planes[0]);
- RET_VAL_IF(ret != MEDIA_PACKET_ERROR_NONE, NULL, "failed to media_packet_get_video_plane_data_ptr()");
-
- ret = media_packet_get_video_stride_width(packet, 0, &stride_width);
- RET_VAL_IF(ret != MEDIA_PACKET_ERROR_NONE, NULL, "failed to media_packet_get_video_stride_width()");
-
- ret = media_packet_get_video_stride_height(packet, 0, &stride_height);
- RET_VAL_IF(ret != MEDIA_PACKET_ERROR_NONE, NULL, "failed to media_packet_get_video_stride_height()");
-
- ret = media_packet_get_tbm_surface(packet, &surface);
- RET_VAL_IF(ret != MEDIA_PACKET_ERROR_NONE, NULL, "failed to media_packet_get_tbm_surface()");
-
- ret = media_packet_get_pts(packet, &pts);
- RET_VAL_IF(ret != MEDIA_PACKET_ERROR_NONE, NULL, "failed to media_packet_get_pts()");
-
- ret = media_packet_get_dts(packet, &dts);
- RET_VAL_IF(ret != MEDIA_PACKET_ERROR_NONE, NULL, "failed to media_packet_get_dts()");
-
- ret = media_packet_get_duration(packet, &duration);
- RET_VAL_IF(ret != MEDIA_PACKET_ERROR_NONE, NULL, "failed to media_packet_get_duration()");
-
- buffer = gst_buffer_new();
-
- packet_buffer = g_new0(packet_buffer_s, 1);
- packet_buffer->packet = packet;
- packet_buffer->buffer = buffer;
-
- memset(&vinfo, 0, sizeof(GstVideoInfo));
- mem = gst_tizen_allocator_alloc_surface(source->allocator, &vinfo, surface, packet_buffer, (GDestroyNotify)__memory_finalize_cb);
- if (!mem) {
- LOG_ERROR("failed to gst_tizen_allocator_alloc_surface()");
- gst_buffer_unref(buffer);
- g_free(packet_buffer);
- return NULL;
- }
-
- GST_BUFFER_PTS(buffer) = pts;
- GST_BUFFER_DTS(buffer) = dts;
- GST_BUFFER_DURATION(buffer) = duration;
- gst_buffer_append_memory(buffer, mem);
-
- LOG_DEBUG("buffer[%p, pts:%"G_GUINT64_FORMAT", dts:%"G_GUINT64_FORMAT", duration:%"G_GUINT64_FORMAT", tizen memory:%p]",
- buffer, pts, dts, duration, mem);
-
- return buffer;
-}
-//LCOV_EXCL_STOP
-
-int _push_media_packet(webrtc_s *webrtc, unsigned int source_id, media_packet_h packet)
-{
- int ret = WEBRTC_ERROR_NONE;
- webrtc_gst_slot_s *source;
- GstElement *appsrc;
- media_format_h format;
- GstBuffer *buffer = NULL;
- guint64 pts = 0;
- guint64 dts = 0;
- guint64 duration = 0;
- guint64 size = 0;
- GstFlowReturn gst_ret = GST_FLOW_OK;
-
- RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
- RET_VAL_IF(source_id == 0, WEBRTC_ERROR_INVALID_PARAMETER, "source_id is 0");
- RET_VAL_IF((source = _get_slot_by_id(webrtc->gst.source_slots, source_id)) == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "could not find source");
- RET_VAL_IF(source->media_format == NULL, WEBRTC_ERROR_INVALID_OPERATION, "media_format is NULL");
-
- appsrc = __find_element_in_bin(source->bin, "appsrc");
- RET_VAL_IF(appsrc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source_id[%u] is not for media packet source", source_id);
-
- if (packet == NULL) {
- LOG_INFO("packet is NULL, emit EOS signal");
- g_signal_emit_by_name(G_OBJECT(appsrc), "end-of-stream", &gst_ret, NULL);
- if (gst_ret != GST_FLOW_OK) {
- LOG_ERROR("failed to 'end-of-stream', gst_ret[%d]", gst_ret);
- return WEBRTC_ERROR_INVALID_OPERATION;
- }
- return WEBRTC_ERROR_NONE;
- }
-
- ret = media_packet_get_format(packet, &format);
- RET_VAL_IF(ret != MEDIA_PACKET_ERROR_NONE, WEBRTC_ERROR_INVALID_OPERATION, "failed to media_packet_get_format()");
- if (!__is_valid_format(source->media_types, source->media_format, format)) {
- media_format_unref(format);
- return WEBRTC_ERROR_INVALID_OPERATION;
- }
- media_format_unref(format);
-
- /* the incoming media packet should have zerocopy format (e.g., SN12) */
- if (source->zerocopy_enabled) {
- if (!source->allocator)
- source->allocator = gst_tizen_allocator_new();
- buffer = __make_buffer_from_zerocopy_video_packet(source, packet);
- RET_VAL_IF(buffer == NULL, WEBRTC_ERROR_INVALID_OPERATION, "buffer is NULL");
-
- g_signal_emit_by_name(G_OBJECT(appsrc), "push-buffer", buffer, &gst_ret, NULL);
- if (gst_ret != GST_FLOW_OK) {
- LOG_ERROR("failed to 'push-buffer', gst_ret[%d]", gst_ret);
- gst_buffer_unref(buffer);
- return WEBRTC_ERROR_INVALID_OPERATION;
- }
- gst_buffer_unref(buffer);
- return WEBRTC_ERROR_NONE;
- }
-
- ret = media_packet_get_extra(packet, (void **)&buffer);
- RET_VAL_IF(ret != MEDIA_PACKET_ERROR_NONE, WEBRTC_ERROR_INVALID_OPERATION, "failed to media_packet_get_extra()");
- if (GST_IS_BUFFER(buffer)) {
- LOG_DEBUG("external gst buffer[%p]", buffer);
- g_signal_emit_by_name(G_OBJECT(appsrc), "push-buffer", buffer, &gst_ret, NULL);
- if (gst_ret != GST_FLOW_OK) {
- LOG_ERROR("failed to 'push-buffer', gst_ret[%d]", gst_ret);
- return WEBRTC_ERROR_INVALID_OPERATION;
- }
- media_packet_destroy(packet);
- return WEBRTC_ERROR_NONE;
- }
-
- /* FIXME: make subfunction for codes below */
- ret = media_packet_get_buffer_size(packet, &size);
- RET_VAL_IF(ret != MEDIA_PACKET_ERROR_NONE, WEBRTC_ERROR_INVALID_OPERATION, "failed to media_packet_get_buffer_size()");
-
- buffer = gst_buffer_new_and_alloc(size);
- RET_VAL_IF(buffer == NULL, WEBRTC_ERROR_INVALID_OPERATION, "failed to gst_buffer_new_and_alloc()");
-
- ret = __fill_gst_buffer_mapped_data_from_packet(buffer, packet);
- if (ret != WEBRTC_ERROR_NONE) {
- LOG_ERROR("failed to __fill_gst_buffer_mapped_data_from_packet()");
- goto exit;
- }
-
- ret = media_packet_get_pts(packet, &pts);
- if (ret != MEDIA_PACKET_ERROR_NONE) {
- LOG_ERROR("failed to media_packet_get_pts()");
- ret = WEBRTC_ERROR_INVALID_OPERATION;
- goto exit;
- }
- GST_BUFFER_PTS(buffer) = pts;
-
- ret = media_packet_get_dts(packet, &dts);
- if (ret != MEDIA_PACKET_ERROR_NONE) {
- LOG_ERROR("failed to media_packet_get_dts()");
- ret = WEBRTC_ERROR_INVALID_OPERATION;
- goto exit;
- }
- GST_BUFFER_DTS(buffer) = dts;
-
- ret = media_packet_get_duration(packet, &duration);
- if (ret != MEDIA_PACKET_ERROR_NONE) {
- LOG_ERROR("failed to media_packet_get_duration()");
- ret = WEBRTC_ERROR_INVALID_OPERATION;
- goto exit;
- }
- GST_BUFFER_DURATION(buffer) = duration;
-
- LOG_DEBUG("new gst buffer[%p, pts:%"G_GUINT64_FORMAT", dts:%"G_GUINT64_FORMAT", duration:%"G_GUINT64_FORMAT"]",
- buffer, pts, dts, duration);
-
- g_signal_emit_by_name(G_OBJECT(appsrc), "push-buffer", buffer, &gst_ret, NULL);
- if (gst_ret != GST_FLOW_OK) {
- LOG_ERROR("failed to 'push-buffer', gst_ret[%d]", gst_ret);
- ret = WEBRTC_ERROR_INVALID_OPERATION;
- }
-
-exit:
- gst_buffer_unref(buffer);
- if (ret == WEBRTC_ERROR_NONE)
- media_packet_destroy(packet);
-
- return ret;
-}
-
//LCOV_EXCL_START
static GstPadProbeReturn __camerasrc_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer u_data)
{
}
g_object_set(G_OBJECT(audiosink), "sync", FALSE, NULL);
- if (!__add_elements_to_bin(GST_BIN(source->av[AV_IDX_AUDIO].render.pipeline), element_list)) {
+ if (!_add_elements_to_bin(GST_BIN(source->av[AV_IDX_AUDIO].render.pipeline), element_list)) {
SAFE_G_LIST_FREE(element_list);
return WEBRTC_ERROR_INVALID_OPERATION;
}
- if (!__link_elements(element_list))
+ if (!_link_elements(element_list))
goto exit_with_remove_from_bin;
if (!gst_element_link(link_with, audioconvert)) {
return WEBRTC_ERROR_NONE;
exit_with_remove_from_bin:
- __remove_elements_from_bin(source->bin, element_list);
+ _remove_elements_from_bin(source->bin, element_list);
SAFE_G_LIST_FREE(element_list);
return WEBRTC_ERROR_INVALID_OPERATION;
exit:
_connect_and_append_signal(&source->signals, (GObject *)videosink, "handoff", G_CALLBACK(_video_stream_decoded_cb), source->display);
}
//LCOV_EXCL_STOP
- if (!__add_elements_to_bin(GST_BIN(source->av[AV_IDX_VIDEO].render.pipeline), element_list)) {
+ if (!_add_elements_to_bin(GST_BIN(source->av[AV_IDX_VIDEO].render.pipeline), element_list)) {
SAFE_G_LIST_FREE(element_list);
return WEBRTC_ERROR_INVALID_OPERATION;
}
- if (!__link_elements(element_list))
+ if (!_link_elements(element_list))
goto exit_with_remove_from_bin;
if (!gst_element_link(link_with, videoconvert)) {
return WEBRTC_ERROR_NONE;
exit_with_remove_from_bin:
- __remove_elements_from_bin(source->bin, element_list);
+ _remove_elements_from_bin(source->bin, element_list);
SAFE_G_LIST_FREE(element_list);
return WEBRTC_ERROR_INVALID_OPERATION;
exit:
return WEBRTC_ERROR_NONE;
}
-
-//LCOV_EXCL_START
-int _set_screen_source_crop(webrtc_s *webrtc, unsigned int source_id, int x, int y, int w, int h, bool portrait_mode, int *width, int *height)
-{
- webrtc_gst_slot_s *source = NULL;
- GstElement *screen_source = NULL;
- GstElement *videocrop = NULL;
- int src_width, src_height, output_width, output_height;
- int mirroring_x, mirroring_y, mirroring_width, mirroring_height;
- float rw, rh;
- int left, right, top, bottom;
-
- RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
- RET_VAL_IF(source_id == 0, WEBRTC_ERROR_INVALID_PARAMETER, "source_id is 0");
- RET_VAL_IF((source = _get_slot_by_id(webrtc->gst.source_slots, source_id)) == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
- RET_VAL_IF(source->type != WEBRTC_MEDIA_SOURCE_TYPE_SCREEN, WEBRTC_ERROR_INVALID_PARAMETER, "source type is not screen");
- RET_VAL_IF(w == 0, WEBRTC_ERROR_INVALID_PARAMETER, "w is 0");
- RET_VAL_IF(h == 0, WEBRTC_ERROR_INVALID_PARAMETER, "h is 0");
- RET_VAL_IF(width == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "width is NULL");
- RET_VAL_IF(height == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "height is NULL");
-
- screen_source = gst_bin_get_by_name(source->bin, ELEMENT_NAME_SCREENSRC);
- RET_VAL_IF(screen_source == NULL, WEBRTC_ERROR_INVALID_OPERATION, "sreen_source is NULL");
-
- videocrop = gst_bin_get_by_name(source->bin, ELEMENT_NAME_VIDEOCROP);
- RET_VAL_IF(videocrop == NULL, WEBRTC_ERROR_INVALID_OPERATION, "videocrop is NULL");
-
- LOG_INFO("set source crop x:%d, y:%d, width:%d, height:%d, mode:%s", x, y, w, h, (portrait_mode) ? "portrait" : "landscape");
-
- g_object_get(G_OBJECT(screen_source),
- portrait_mode ? "mirroring-v-src-width" : "mirroring-h-src-width", &src_width,
- portrait_mode ? "mirroring-v-src-height" : "mirroring-h-src-height", &src_height,
- portrait_mode ? "mirroring-v-x" : "mirroring-h-x", &mirroring_x,
- portrait_mode ? "mirroring-v-y" : "mirroring-h-y", &mirroring_y,
- portrait_mode ? "mirroring-v-width" : "mirroring-h-width", &mirroring_width,
- portrait_mode ? "mirroring-v-height" : "mirroring-h-height", &mirroring_height,
- "output-width", &output_width,
- "output-height", &output_height,
- NULL);
-
- rw = (float)src_width / mirroring_width;
- rh = (float)src_height / mirroring_height;
- left = mirroring_x + ((float)x / rw);
- right = output_width - (left + (float)w / rw);
- top = mirroring_y + ((float)y /rh);
- bottom = output_height - (top + (float)h / rh);
-
- LOG_INFO("Screen source info: source[width:%d, height:%d], output[width:%d, height:%d]"
- "mirroring[x:%d y:%d width:%d, height:%d", src_width, src_height, output_width,
- output_height, mirroring_x, mirroring_y, mirroring_width, mirroring_height);
-
- g_object_set(G_OBJECT(videocrop),
- "left", left,
- "right", right,
- "top", top,
- "bottom", bottom,
- NULL);
-
- LOG_INFO("cropped: left:%d, right:%d, top:%d, bottom:%d", left, right, top, bottom);
-
- *width = output_width - (left + right);
- *height = output_height - (top + bottom);
- LOG_INFO("source_id[%u], video resolution is changed [%dx%d] ==> [%dx%d]", source_id,
- output_width, output_height, *width, *height);
-
- return WEBRTC_ERROR_NONE;
-}
-
-int _unset_screen_source_crop(webrtc_s *webrtc, unsigned int source_id)
-{
- webrtc_gst_slot_s *source = NULL;
- GstElement *videocrop = NULL;
- GstElement *screen_source = NULL;
- int left, right, top, bottom;
-
- RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
- RET_VAL_IF(source_id == 0, WEBRTC_ERROR_INVALID_PARAMETER, "source_id is 0");
- RET_VAL_IF((source = _get_slot_by_id(webrtc->gst.source_slots, source_id)) == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
- RET_VAL_IF(source->type != WEBRTC_MEDIA_SOURCE_TYPE_SCREEN, WEBRTC_ERROR_INVALID_PARAMETER, "source type is not screen");
-
- screen_source = gst_bin_get_by_name(source->bin, ELEMENT_NAME_SCREENSRC);
- RET_VAL_IF(screen_source == NULL, WEBRTC_ERROR_INVALID_OPERATION, "sreen_source is NULL");
-
- videocrop = gst_bin_get_by_name(source->bin, ELEMENT_NAME_VIDEOCROP);
- RET_VAL_IF(videocrop == NULL, WEBRTC_ERROR_INVALID_OPERATION, "videocrop is NULL");
-
- g_object_get(G_OBJECT(videocrop),
- "left", &left,
- "right", &right,
- "top", &top,
- "bottom", &bottom,
- NULL);
- RET_VAL_IF(left == 0 && right == 0 && top == 0 && bottom == 0, WEBRTC_ERROR_INVALID_OPERATION, "webrtc_screen_source_set_crop was not set");
-
- g_object_set(G_OBJECT(videocrop),
- "left", 0,
- "right", 0,
- "top", 0,
- "bottom", 0,
- NULL);
-
- return WEBRTC_ERROR_NONE;
-}
-
-static void __set_filesrc_pipline_state_foreach_cb(gpointer key, gpointer value, gpointer user_data)
-{
- webrtc_gst_slot_s *source = (webrtc_gst_slot_s *)value;
- GstStateChangeReturn ret;
- GstState state = GPOINTER_TO_UINT(user_data);
-
- if (source->type != WEBRTC_MEDIA_SOURCE_TYPE_FILE)
- return;
-
- LOG_INFO("found file source[%p, id:%u]", source, source->id);
-
- ret = gst_element_set_state(source->filesrc_pipeline, state);
- if (ret == GST_STATE_CHANGE_FAILURE) {
- LOG_ERROR("failed to gst_element_set_state(), state[%s]", gst_element_state_get_name(state));
- return;
- }
-
- LOG_INFO("change filesrc pipeline state to [%s]", gst_element_state_get_name(state));
-}
-
-int _gst_filesrc_pipeline_set_state(webrtc_s *webrtc, GstState state)
-{
- RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
-
- g_hash_table_foreach(webrtc->gst.source_slots, __set_filesrc_pipline_state_foreach_cb, GINT_TO_POINTER(state));
-
- return WEBRTC_ERROR_NONE;
-}
-
-int _set_filesrc_looping(webrtc_s *webrtc, unsigned int source_id, bool looping)
-{
- webrtc_gst_slot_s *source;
-
- RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
- RET_VAL_IF(source_id == 0, WEBRTC_ERROR_INVALID_PARAMETER, "source_id is 0");
- RET_VAL_IF((source = _get_slot_by_id(webrtc->gst.source_slots, source_id)) == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
- RET_VAL_IF(source->type != WEBRTC_MEDIA_SOURCE_TYPE_FILE, WEBRTC_ERROR_INVALID_PARAMETER, "invalid source type [%d]", source->type);
-
- source->filesrc_loop = looping;
-
- LOG_DEBUG("source_id[%u] looping[%d]", source_id, looping);
-
- return WEBRTC_ERROR_NONE;
-}
-
-int _get_filesrc_looping(webrtc_s *webrtc, unsigned int source_id, bool *looping)
-{
- const webrtc_gst_slot_s *source;
-
- RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
- RET_VAL_IF(source_id == 0, WEBRTC_ERROR_INVALID_PARAMETER, "source_id is 0");
- RET_VAL_IF(looping == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "looping is NULL");
- RET_VAL_IF((source = _get_slot_by_id(webrtc->gst.source_slots, source_id)) == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
- RET_VAL_IF(source->type != WEBRTC_MEDIA_SOURCE_TYPE_FILE, WEBRTC_ERROR_INVALID_PARAMETER, "invalid source type [%d]", source->type);
-
- *looping = source->filesrc_loop;
-
- LOG_DEBUG("source_id[%u] looping[%d]", source_id, *looping);
-
- return WEBRTC_ERROR_NONE;
-}
-
-static void __remove_filesrc_pad_block_foreach_cb(gpointer key, gpointer value, gpointer user_data)
-{
- webrtc_gst_slot_s *source = (webrtc_gst_slot_s *)value;
- int av_idx;
-
- if (source->type != WEBRTC_MEDIA_SOURCE_TYPE_FILE)
- return;
-
- for (av_idx = 0; av_idx < AV_IDX_MAX; av_idx++) {
- if (source->filesrc_av[av_idx].sink_pad_probe_id == 0)
- continue;
-
- LOG_DEBUG("source[%p, id:%u] fakesink pad[%p] for [%s] is unblocked",
- source, source->id, source->filesrc_av[av_idx].sink_pad, GET_MEDIA_TYPE_NAME(av_idx == AV_IDX_AUDIO));
- gst_pad_remove_probe(source->filesrc_av[av_idx].sink_pad, source->filesrc_av[av_idx].sink_pad_probe_id);
- source->filesrc_av[av_idx].sink_pad = NULL;
- source->filesrc_av[av_idx].sink_pad_probe_id = 0;
- }
-}
-
-int _remove_filesrc_pad_block_probe(webrtc_s *webrtc)
-{
- RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
-
- g_hash_table_foreach(webrtc->gst.source_slots, __remove_filesrc_pad_block_foreach_cb, NULL);
-
- return WEBRTC_ERROR_NONE;
-}
-//LCOV_EXCL_STOP
--- /dev/null
+/*
+ * Copyright (c) 2022 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "webrtc_internal.h"
+#include "webrtc_private.h"
+#include "webrtc_source_common.h"
+#include <gst/audio/audio.h>
+#include <gst/allocators/gsttizenmemory.h>
+
+typedef struct {
+ media_packet_h packet;
+ GstBuffer *buffer;
+} packet_buffer_s;
+
+static void __memory_finalize_cb(packet_buffer_s *packet_buffer)
+{
+ LOG_DEBUG("packet[%p] buffer[%p] is about to release", packet_buffer->packet, packet_buffer->buffer);
+
+ media_packet_destroy(packet_buffer->packet);
+ g_free(packet_buffer);
+}
+
+static GstAudioFormat __get_gst_audio_format(media_format_mimetype_e mime_type)
+{
+ switch (mime_type) {
+ /* RAW formats */
+ case MEDIA_FORMAT_PCM_S16LE:
+ return GST_AUDIO_FORMAT_S16LE;
+ /* ENCODED formats */
+ case MEDIA_FORMAT_OPUS:
+ case MEDIA_FORMAT_VORBIS:
+ return GST_AUDIO_FORMAT_ENCODED;
+ default:
+ LOG_ERROR("not supported mime_type(0x%x)", mime_type);
+ return GST_AUDIO_FORMAT_UNKNOWN;
+ }
+}
+
+//LCOV_EXCL_START
+static GstBuffer* __make_buffer_from_zerocopy_video_packet(webrtc_gst_slot_s *source, media_packet_h packet)
+{
+ int ret = MEDIA_PACKET_ERROR_NONE;
+ gint stride_width;
+ gint stride_height;
+ uint32_t plane_num;
+ guint8 *planes[2];
+ guint64 pts = 0;
+ guint64 dts = 0;
+ guint64 duration = 0;
+ tbm_surface_h surface = NULL;
+ GstVideoInfo vinfo;
+ GstMemory *mem;
+ GstBuffer *buffer;
+ packet_buffer_s *packet_buffer;
+
+ RET_VAL_IF(source == NULL, NULL, "source is NULL");
+ RET_VAL_IF(packet == NULL, NULL, "packet is NULL");
+
+ ret = media_packet_get_number_of_video_planes(packet, &plane_num);
+ RET_VAL_IF(ret != MEDIA_PACKET_ERROR_NONE, NULL, "failed to media_packet_get_number_of_video_planes()");
+
+ ret = media_packet_get_video_plane_data_ptr(packet, 0, (void **)&planes[0]);
+ RET_VAL_IF(ret != MEDIA_PACKET_ERROR_NONE, NULL, "failed to media_packet_get_video_plane_data_ptr()");
+
+ ret = media_packet_get_video_stride_width(packet, 0, &stride_width);
+ RET_VAL_IF(ret != MEDIA_PACKET_ERROR_NONE, NULL, "failed to media_packet_get_video_stride_width()");
+
+ ret = media_packet_get_video_stride_height(packet, 0, &stride_height);
+ RET_VAL_IF(ret != MEDIA_PACKET_ERROR_NONE, NULL, "failed to media_packet_get_video_stride_height()");
+
+ ret = media_packet_get_tbm_surface(packet, &surface);
+ RET_VAL_IF(ret != MEDIA_PACKET_ERROR_NONE, NULL, "failed to media_packet_get_tbm_surface()");
+
+ ret = media_packet_get_pts(packet, &pts);
+ RET_VAL_IF(ret != MEDIA_PACKET_ERROR_NONE, NULL, "failed to media_packet_get_pts()");
+
+ ret = media_packet_get_dts(packet, &dts);
+ RET_VAL_IF(ret != MEDIA_PACKET_ERROR_NONE, NULL, "failed to media_packet_get_dts()");
+
+ ret = media_packet_get_duration(packet, &duration);
+ RET_VAL_IF(ret != MEDIA_PACKET_ERROR_NONE, NULL, "failed to media_packet_get_duration()");
+
+ buffer = gst_buffer_new();
+
+ packet_buffer = g_new0(packet_buffer_s, 1);
+ packet_buffer->packet = packet;
+ packet_buffer->buffer = buffer;
+
+ memset(&vinfo, 0, sizeof(GstVideoInfo));
+ mem = gst_tizen_allocator_alloc_surface(source->allocator, &vinfo, surface, packet_buffer, (GDestroyNotify)__memory_finalize_cb);
+ if (!mem) {
+ LOG_ERROR("failed to gst_tizen_allocator_alloc_surface()");
+ gst_buffer_unref(buffer);
+ g_free(packet_buffer);
+ return NULL;
+ }
+
+ GST_BUFFER_PTS(buffer) = pts;
+ GST_BUFFER_DTS(buffer) = dts;
+ GST_BUFFER_DURATION(buffer) = duration;
+ gst_buffer_append_memory(buffer, mem);
+
+ LOG_DEBUG("buffer[%p, pts:%"G_GUINT64_FORMAT", dts:%"G_GUINT64_FORMAT", duration:%"G_GUINT64_FORMAT", tizen memory:%p]",
+ buffer, pts, dts, duration, mem);
+
+ return buffer;
+}
+//LCOV_EXCL_STOP
+
+static bool __is_valid_format(media_type_e type, media_format_h conf_format, media_format_h push_format)
+{
+ int ret = MEDIA_PACKET_ERROR_NONE;
+ media_format_mimetype_e mime_type1;
+ media_format_mimetype_e mime_type2;
+
+ RET_VAL_IF(conf_format == NULL, false, "conf_format is NULL");
+ RET_VAL_IF(push_format == NULL, false, "push_format is NULL");
+
+ if (type == MEDIA_TYPE_AUDIO) {
+ ret |= media_format_get_audio_info(conf_format, &mime_type1, NULL, NULL, NULL, NULL);
+ ret |= media_format_get_audio_info(push_format, &mime_type2, NULL, NULL, NULL, NULL);
+ RET_VAL_IF(ret != MEDIA_PACKET_ERROR_NONE, false, "failed to media_format_get_audio_info()");
+
+ } else if (type == MEDIA_TYPE_VIDEO) {
+ ret |= media_format_get_video_info(conf_format, &mime_type1, NULL, NULL, NULL, NULL);
+ ret |= media_format_get_video_info(push_format, &mime_type2, NULL, NULL, NULL, NULL);
+ RET_VAL_IF(ret != MEDIA_PACKET_ERROR_NONE, false, "failed to media_format_get_video_info()");
+
+ } else {
+ LOG_ERROR_IF_REACHED("type(%u)", type);
+ return false;
+ }
+
+ RET_VAL_IF(mime_type1 != mime_type2, false, "invalid format(0x%x, 0x%x)", mime_type1, mime_type2);
+
+ return true;
+}
+
+static int __fill_gst_buffer_mapped_data_from_packet(GstBuffer *buffer, media_packet_h packet)
+{
+ bool has_tbm_surface = false;
+ tbm_surface_info_s ts_info;
+ guint64 size = 0;
+ GstMapInfo buff_info = GST_MAP_INFO_INIT;
+
+ RET_VAL_IF(buffer == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "buffer is NULL");
+ RET_VAL_IF(packet == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "packet is NULL");
+
+ media_packet_get_buffer_size(packet, &size);
+ media_packet_has_tbm_surface_buffer(packet, &has_tbm_surface);
+
+ if (has_tbm_surface) {
+ int ret = TBM_SURFACE_ERROR_NONE;
+ tbm_surface_h ts;
+
+ media_packet_get_tbm_surface(packet, &ts);
+ ret = tbm_surface_get_info(ts, &ts_info);
+ if (ret != TBM_SURFACE_ERROR_NONE) {
+ LOG_ERROR("failed to tbm_surface_get_info()");
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ }
+
+ LOG_DEBUG("tbm surface[%p, %ux%u, size:%u, format:%u, num_planes:%u] found",
+ ts, ts_info.width, ts_info.height, ts_info.size, ts_info.format, ts_info.num_planes);
+ }
+
+ if (gst_buffer_map(buffer, &buff_info, GST_MAP_READWRITE)) {
+ if (has_tbm_surface) {
+ unsigned int i;
+ guint8 *ptr = buff_info.data;
+ for (i = 0; i < ts_info.num_planes; i++) {
+ LOG_DEBUG("plane[%d][ptr:%p size:%u]", i, ts_info.planes[i].ptr, ts_info.planes[i].size);
+ memcpy(ptr, ts_info.planes[i].ptr, ts_info.planes[i].size);
+ ptr += ts_info.planes[i].size;
+ }
+
+ } else {
+ guchar *data_ptr;
+ media_packet_get_buffer_data_ptr(packet, (void **)&data_ptr);
+ if (data_ptr == NULL) {
+ LOG_ERROR("invalid packet, data_ptr is NULL");
+ gst_buffer_unmap(buffer, &buff_info);
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ }
+ memcpy(buff_info.data, data_ptr, size);
+ }
+
+ buff_info.size = size;
+
+ LOG_DEBUG("buffer[%p], buff_info[data:%p, size:%"G_GSIZE_FORMAT"]", buffer, buff_info.data, buff_info.size);
+
+ gst_buffer_unmap(buffer, &buff_info);
+ }
+
+ return WEBRTC_ERROR_NONE;
+}
+
+//LCOV_EXCL_START
+static void _appsrc_need_data_cb(GstElement *appsrc, guint size, gpointer data)
+{
+ webrtc_gst_slot_s *source = (webrtc_gst_slot_s *)data;
+
+ RET_IF(source == NULL, "source is NULL");
+
+ LOG_INFO("appsrc[%s] size[%u] source[%p, idx:%u]", GST_ELEMENT_NAME(appsrc), size, source, source->id);
+
+ if (source->buffer_state_changed_cb.callback == NULL) {
+ LOG_DEBUG("buffer state changed callback is NULL");
+ return;
+ }
+
+ LOG_DEBUG(">>> invoke buffer_state_changed_cb[%p] for UNDERFLOW, user_data[%p]",
+ source->buffer_state_changed_cb.callback, source->buffer_state_changed_cb.user_data);
+ ((webrtc_media_packet_source_buffer_state_changed_cb)(source->buffer_state_changed_cb.callback))
+ (source->id, WEBRTC_MEDIA_PACKET_SOURCE_BUFFER_STATE_UNDERFLOW, source->buffer_state_changed_cb.user_data);
+ LOG_DEBUG("<<< end of the callback");
+}
+
+static void _appsrc_enough_data_cb(GstElement *appsrc, gpointer data)
+{
+ webrtc_gst_slot_s *source = (webrtc_gst_slot_s *)data;
+
+ RET_IF(source == NULL, "source is NULL");
+
+ LOG_INFO("appsrc[%s] source[%p, idx:%u]", GST_ELEMENT_NAME(appsrc), source, source->id);
+
+ if (source->buffer_state_changed_cb.callback == NULL) {
+ LOG_DEBUG("buffer state changed callback is NULL");
+ return;
+ }
+
+ LOG_DEBUG(">>> invoke buffer_state_changed_cb[%p] for OVERFLOW, user_data[%p]",
+ source->buffer_state_changed_cb.callback, source->buffer_state_changed_cb.user_data);
+ ((webrtc_media_packet_source_buffer_state_changed_cb)(source->buffer_state_changed_cb.callback))
+ (source->id, WEBRTC_MEDIA_PACKET_SOURCE_BUFFER_STATE_OVERFLOW, source->buffer_state_changed_cb.user_data);
+ LOG_DEBUG("<<< end of the callback");
+}
+//LCOV_EXCL_STOP
+
+//LCOV_EXCL_START
+/* Use g_free() to free the media_type parameter. */
+static GstCaps *__make_encoded_caps_from_media_format(webrtc_gst_slot_s *source, gchar **media_type)
+{
+ GstCaps *caps;
+ const char *_media_type;
+
+ RET_VAL_IF(source == NULL, NULL, "source is NULL");
+ RET_VAL_IF(source->media_format == NULL, NULL, "media_format is NULL");
+
+ switch (source->type) {
+ case WEBRTC_MEDIA_SOURCE_TYPE_MEDIA_PACKET: {
+ media_format_mimetype_e mime_type;
+
+ if (source->media_types == MEDIA_TYPE_AUDIO) {
+ int channels;
+ int samplerate;
+
+ RET_VAL_IF(media_format_get_audio_info(source->media_format, &mime_type, &channels, &samplerate, NULL, NULL) != MEDIA_FORMAT_ERROR_NONE,
+ NULL, "failed to media_format_get_audio_info()");
+
+ _media_type = _get_audio_media_type(_get_audio_format_name(mime_type));
+ RET_VAL_IF(_media_type == NULL, NULL, "media_type is NULL");
+
+ caps = _get_caps_from_encoded_audio_media_type(_media_type, channels, samplerate);
+
+ } else if (source->media_types == MEDIA_TYPE_VIDEO) {
+ int width;
+ int height;
+ int framerate;
+
+ RET_VAL_IF(media_format_get_video_info(source->media_format, &mime_type, &width, &height, NULL, NULL) != MEDIA_FORMAT_ERROR_NONE,
+ NULL, "failed to media_format_get_video_info()");
+ RET_VAL_IF(media_format_get_video_frame_rate(source->media_format, &framerate) != MEDIA_FORMAT_ERROR_NONE,
+ NULL, "failed to media_format_get_video_frame_rate()");
+
+ _media_type = _get_video_media_type(_get_video_format_name(mime_type, source->zerocopy_enabled));
+ RET_VAL_IF(_media_type == NULL, NULL, "media_type is NULL");
+
+ caps = _get_caps_from_encoded_video_media_type(_media_type, width, height);
+
+ } else {
+ LOG_ERROR_IF_REACHED("source->media_types(0x%x)", source->media_types);
+ return NULL;
+ }
+ break;
+ }
+
+ default:
+ LOG_ERROR_IF_REACHED("type(%d)", source->type);
+ return NULL;
+ }
+
+ if (media_type)
+ *media_type = g_strdup(_media_type);
+
+ return caps;
+}
+//LCOV_EXCL_STOP
+
+//LCOV_EXCL_START
+static int __create_rest_of_elements_for_encoded_format(webrtc_s *webrtc, webrtc_gst_slot_s *source,
+ GList **element_list)
+{
+ GstCaps *sink_caps;
+ element_info_s elem_info;
+ gchar *media_type = NULL;
+ GstElement *payloader;
+ GstElement *queue;
+ GstElement *capsfilter;
+ unsigned int payload_type;
+
+ RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
+ RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
+ RET_VAL_IF(element_list == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "element_list is NULL");
+
+ CREATE_ELEMENT_FROM_REGISTRY(elem_info, GST_KLASS_NAME_PAYLOADER_RTP,
+ __make_encoded_caps_from_media_format(source, &media_type),
+ NULL,
+ NULL,
+ payloader);
+ if (!payloader)
+ goto error;
+ APPEND_ELEMENT(*element_list, payloader);
+
+ if (!(queue = _create_element(DEFAULT_ELEMENT_QUEUE, NULL)))
+ goto error;
+ APPEND_ELEMENT(*element_list, queue);
+
+ if (!(capsfilter = _create_element(DEFAULT_ELEMENT_CAPSFILTER, ELEMENT_NAME_RTP_CAPSFILTER)))
+ goto error;
+ APPEND_ELEMENT(*element_list, capsfilter);
+
+ payload_type = _get_available_payload_type(webrtc);
+ if (payload_type == 0)
+ goto error;
+
+ source->av[GET_AV_IDX_BY_TYPE(source->media_types)].pt = payload_type;
+
+ if ((sink_caps = _make_rtp_caps(media_type, payload_type, source))) {
+ g_object_set(G_OBJECT(capsfilter), "caps", sink_caps, NULL);
+ gst_caps_unref(sink_caps);
+ }
+
+ g_free(media_type);
+ return WEBRTC_ERROR_NONE;
+
+error:
+ g_free(media_type);
+ return WEBRTC_ERROR_INVALID_OPERATION;
+}
+//LCOV_EXCL_STOP
+
+GstCaps *_make_mediapacketsrc_raw_caps_from_media_format(webrtc_gst_slot_s *source)
+{
+ GstCaps *caps = NULL;
+ media_format_mimetype_e mime_type;
+
+ RET_VAL_IF(source == NULL, NULL, "source is NULL");
+ RET_VAL_IF(source->media_format == NULL, NULL, "media_format is NULL");
+ RET_VAL_IF(source->type != WEBRTC_MEDIA_SOURCE_TYPE_MEDIA_PACKET, NULL,
+ "type(%d) is not for media packet source", source->type);
+
+ if (source->media_types == MEDIA_TYPE_AUDIO) {
+ int channels;
+ int samplerate;
+ GstAudioInfo info;
+ GstAudioFormat format;
+
+ RET_VAL_IF(media_format_get_audio_info(source->media_format, &mime_type, &channels, &samplerate, NULL, NULL) != MEDIA_FORMAT_ERROR_NONE,
+ NULL, "failed to media_format_get_audio_info()");
+
+ format = __get_gst_audio_format(mime_type);
+ RET_VAL_IF(format == GST_AUDIO_FORMAT_ENCODED || format == GST_AUDIO_FORMAT_UNKNOWN, NULL, "could not get valid GstAudioFormat for PCM");
+
+ gst_audio_info_set_format(&info, format, samplerate, channels, NULL);
+ caps = gst_audio_info_to_caps(&info);
+
+ } else if (source->media_types == MEDIA_TYPE_VIDEO) {
+ int width;
+ int height;
+ int framerate;
+
+ RET_VAL_IF(media_format_get_video_info(source->media_format, &mime_type, &width, &height, NULL, NULL) != MEDIA_FORMAT_ERROR_NONE,
+ NULL, "failed to media_format_get_video_info()");
+ RET_VAL_IF(media_format_get_video_frame_rate(source->media_format, &framerate) != MEDIA_FORMAT_ERROR_NONE,
+ NULL, "failed to media_format_get_video_frame_rate()");
+ caps = gst_caps_new_simple(MEDIA_TYPE_VIDEO_RAW,
+ "format", G_TYPE_STRING, _get_video_format_name(mime_type, source->zerocopy_enabled),
+ "framerate", GST_TYPE_FRACTION, framerate, 1,
+ "width", G_TYPE_INT, width,
+ "height", G_TYPE_INT, height,
+ NULL);
+
+ } else {
+ LOG_ERROR_IF_REACHED("source->media_types(0x%x)", source->media_types);
+ }
+
+ return caps;
+}
+
+int _push_media_packet(webrtc_s *webrtc, unsigned int source_id, media_packet_h packet)
+{
+ int ret = WEBRTC_ERROR_NONE;
+ webrtc_gst_slot_s *source;
+ GstElement *appsrc;
+ media_format_h format;
+ GstBuffer *buffer = NULL;
+ guint64 pts = 0;
+ guint64 dts = 0;
+ guint64 duration = 0;
+ guint64 size = 0;
+ GstFlowReturn gst_ret = GST_FLOW_OK;
+
+ RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
+ RET_VAL_IF(source_id == 0, WEBRTC_ERROR_INVALID_PARAMETER, "source_id is 0");
+ RET_VAL_IF((source = _get_slot_by_id(webrtc->gst.source_slots, source_id)) == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "could not find source");
+ RET_VAL_IF(source->media_format == NULL, WEBRTC_ERROR_INVALID_OPERATION, "media_format is NULL");
+
+ appsrc = _find_element_in_bin(source->bin, "appsrc");
+ RET_VAL_IF(appsrc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source_id[%u] is not for media packet source", source_id);
+
+ if (packet == NULL) {
+ LOG_INFO("packet is NULL, emit EOS signal");
+ g_signal_emit_by_name(G_OBJECT(appsrc), "end-of-stream", &gst_ret, NULL);
+ if (gst_ret != GST_FLOW_OK) {
+ LOG_ERROR("failed to 'end-of-stream', gst_ret[%d]", gst_ret);
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ }
+ return WEBRTC_ERROR_NONE;
+ }
+
+ ret = media_packet_get_format(packet, &format);
+ RET_VAL_IF(ret != MEDIA_PACKET_ERROR_NONE, WEBRTC_ERROR_INVALID_OPERATION, "failed to media_packet_get_format()");
+ if (!__is_valid_format(source->media_types, source->media_format, format)) {
+ media_format_unref(format);
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ }
+ media_format_unref(format);
+
+ /* the incoming media packet should have zerocopy format (e.g., SN12) */
+ if (source->zerocopy_enabled) {
+ if (!source->allocator)
+ source->allocator = gst_tizen_allocator_new();
+ buffer = __make_buffer_from_zerocopy_video_packet(source, packet);
+ RET_VAL_IF(buffer == NULL, WEBRTC_ERROR_INVALID_OPERATION, "buffer is NULL");
+
+ g_signal_emit_by_name(G_OBJECT(appsrc), "push-buffer", buffer, &gst_ret, NULL);
+ if (gst_ret != GST_FLOW_OK) {
+ LOG_ERROR("failed to 'push-buffer', gst_ret[%d]", gst_ret);
+ gst_buffer_unref(buffer);
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ }
+ gst_buffer_unref(buffer);
+ return WEBRTC_ERROR_NONE;
+ }
+
+ ret = media_packet_get_extra(packet, (void **)&buffer);
+ RET_VAL_IF(ret != MEDIA_PACKET_ERROR_NONE, WEBRTC_ERROR_INVALID_OPERATION, "failed to media_packet_get_extra()");
+ if (GST_IS_BUFFER(buffer)) {
+ LOG_DEBUG("external gst buffer[%p]", buffer);
+ g_signal_emit_by_name(G_OBJECT(appsrc), "push-buffer", buffer, &gst_ret, NULL);
+ if (gst_ret != GST_FLOW_OK) {
+ LOG_ERROR("failed to 'push-buffer', gst_ret[%d]", gst_ret);
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ }
+ media_packet_destroy(packet);
+ return WEBRTC_ERROR_NONE;
+ }
+
+ /* FIXME: make subfunction for codes below */
+ ret = media_packet_get_buffer_size(packet, &size);
+ RET_VAL_IF(ret != MEDIA_PACKET_ERROR_NONE, WEBRTC_ERROR_INVALID_OPERATION, "failed to media_packet_get_buffer_size()");
+
+ buffer = gst_buffer_new_and_alloc(size);
+ RET_VAL_IF(buffer == NULL, WEBRTC_ERROR_INVALID_OPERATION, "failed to gst_buffer_new_and_alloc()");
+
+ ret = __fill_gst_buffer_mapped_data_from_packet(buffer, packet);
+ if (ret != WEBRTC_ERROR_NONE) {
+ LOG_ERROR("failed to __fill_gst_buffer_mapped_data_from_packet()");
+ goto exit;
+ }
+
+ ret = media_packet_get_pts(packet, &pts);
+ if (ret != MEDIA_PACKET_ERROR_NONE) {
+ LOG_ERROR("failed to media_packet_get_pts()");
+ ret = WEBRTC_ERROR_INVALID_OPERATION;
+ goto exit;
+ }
+ GST_BUFFER_PTS(buffer) = pts;
+
+ ret = media_packet_get_dts(packet, &dts);
+ if (ret != MEDIA_PACKET_ERROR_NONE) {
+ LOG_ERROR("failed to media_packet_get_dts()");
+ ret = WEBRTC_ERROR_INVALID_OPERATION;
+ goto exit;
+ }
+ GST_BUFFER_DTS(buffer) = dts;
+
+ ret = media_packet_get_duration(packet, &duration);
+ if (ret != MEDIA_PACKET_ERROR_NONE) {
+ LOG_ERROR("failed to media_packet_get_duration()");
+ ret = WEBRTC_ERROR_INVALID_OPERATION;
+ goto exit;
+ }
+ GST_BUFFER_DURATION(buffer) = duration;
+
+ LOG_DEBUG("new gst buffer[%p, pts:%"G_GUINT64_FORMAT", dts:%"G_GUINT64_FORMAT", duration:%"G_GUINT64_FORMAT"]",
+ buffer, pts, dts, duration);
+
+ g_signal_emit_by_name(G_OBJECT(appsrc), "push-buffer", buffer, &gst_ret, NULL);
+ if (gst_ret != GST_FLOW_OK) {
+ LOG_ERROR("failed to 'push-buffer', gst_ret[%d]", gst_ret);
+ ret = WEBRTC_ERROR_INVALID_OPERATION;
+ }
+
+exit:
+ gst_buffer_unref(buffer);
+ if (ret == WEBRTC_ERROR_NONE)
+ media_packet_destroy(packet);
+
+ return ret;
+}
+
+int _build_mediapacketsrc(webrtc_s *webrtc, webrtc_gst_slot_s *source)
+{
+ GstElement *appsrc;
+
+ RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
+ RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
+ RET_VAL_IF(source->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL");
+
+ if (!(appsrc = _create_element(_get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_MEDIA_PACKET), NULL)))
+ return WEBRTC_ERROR_INVALID_OPERATION;
+
+ g_object_set(G_OBJECT(appsrc),
+ "is-live", TRUE,
+ "format", GST_FORMAT_TIME,
+ NULL);
+
+ _connect_and_append_signal(&source->signals, G_OBJECT(appsrc), "need-data", G_CALLBACK(_appsrc_need_data_cb), source);
+ _connect_and_append_signal(&source->signals, G_OBJECT(appsrc), "enough-data", G_CALLBACK(_appsrc_enough_data_cb), source);
+
+ gst_bin_add(source->bin, appsrc);
+
+ return WEBRTC_ERROR_NONE;
+}
+
+int _complete_rest_of_mediapacketsrc(webrtc_gst_slot_s *source, GstPad **src_pad, GstElement *appsrc, GList *element_list)
+{
+ int ret;
+ GstElement *capsfilter;
+
+ RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
+ RET_VAL_IF(src_pad == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "src_pad is NULL");
+ RET_VAL_IF(appsrc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "appsrc is NULL");
+ RET_VAL_IF(element_list == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "element_list is NULL");
+ RET_VAL_IF(source->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL");
+
+ if (!_add_elements_to_bin(source->bin, element_list))
+ return WEBRTC_ERROR_INVALID_OPERATION;
+
+ PREPEND_ELEMENT(element_list, appsrc);
+ if (!_link_elements(element_list)) {
+ ret = WEBRTC_ERROR_INVALID_OPERATION;
+ goto error;
+ }
+
+ if (!(capsfilter = gst_bin_get_by_name(source->bin, ELEMENT_NAME_RTP_CAPSFILTER))) {
+ ret = WEBRTC_ERROR_INVALID_OPERATION;
+ goto error;
+ }
+
+ if ((ret = _set_ghost_pad_target(*src_pad, capsfilter, true)) != WEBRTC_ERROR_NONE)
+ goto error;
+
+ return ret;
+
+error:
+ _remove_elements_from_bin(source->bin, element_list);
+ return ret;
+}
+
+//LCOV_EXCL_START
+int _complete_mediapacketsrc_from_encoded_format(webrtc_s *webrtc, webrtc_gst_slot_s *source)
+{
+ int ret;
+ GstPad **src_pad;
+ GstElement *appsrc;
+ GList *element_list = NULL;
+ GstCaps *sink_caps;
+
+ RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
+ RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
+ RET_VAL_IF(source->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL");
+ RET_VAL_IF(source->media_format == NULL, WEBRTC_ERROR_INVALID_OPERATION, "media_format is NULL");
+
+ src_pad = &source->av[GET_AV_IDX_BY_TYPE(source->media_types)].src_pad;
+ ret = _add_no_target_ghostpad_to_slot(source, true, src_pad);
+ RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _add_no_target_ghostpad_to_slot()");
+
+ appsrc = _find_element_in_bin(source->bin, "appsrc");
+ RET_VAL_IF(appsrc == NULL, WEBRTC_ERROR_INVALID_OPERATION, "appsrc is NULL");
+
+ if ((ret = __create_rest_of_elements_for_encoded_format(webrtc, source, &element_list)) != WEBRTC_ERROR_NONE)
+ goto exit;
+
+ if (!(sink_caps = __make_encoded_caps_from_media_format(source, NULL))) {
+ ret = WEBRTC_ERROR_INVALID_OPERATION;
+ goto exit;
+ }
+ PRINT_CAPS(sink_caps, "appsrc");
+ g_object_set(G_OBJECT(appsrc), "caps", sink_caps, NULL);
+ gst_caps_unref(sink_caps);
+
+ if ((ret = _complete_rest_of_mediapacketsrc(source, src_pad, appsrc, element_list)) != WEBRTC_ERROR_NONE)
+ goto exit;
+
+ SAFE_G_LIST_FREE(element_list);
+ return ret;
+
+exit:
+ SAFE_G_LIST_FREE_FULL(element_list, gst_object_unref);
+ return ret;
+}
+//LCOV_EXCL_STOP