LOG_INFO("display[%p, type:%d, surface:%p]", display, type, surface);
}
-
+//LCOV_EXCL_START
void _set_display_surface_id(webrtc_display_s *display, int surface_id, int x, int y, int w, int h)
{
g_autoptr(GMutexLocker) locker = NULL;
LOG_INFO("display[%p, surface_id:%d, x:%d, y:%d, w:%d, h:%d]", display, surface_id, x, y, w, h);
}
-
+//LCOV_EXCL_STOP
void _release_display(webrtc_display_s *display)
{
RET_IF(display == NULL, "display is NULL");
LOG_INFO("visible[%d]", *visible);
return WEBRTC_ERROR_NONE;
-}
\ No newline at end of file
+}
(source->id, WEBRTC_MEDIA_PACKET_SOURCE_BUFFER_STATE_OVERFLOW, source->buffer_state_changed_cb.user_data);
LOG_DEBUG("<<< end of the callback");
}
-//LCOV_EXCL_STOP
-//LCOV_EXCL_START
/* Use g_free() to free the media_type parameter. */
static GstCaps *__make_encoded_caps_from_media_format(webrtc_gst_slot_s *source, gchar **media_type)
{
g_free(media_type);
return WEBRTC_ERROR_INVALID_OPERATION;
}
-//LCOV_EXCL_STOP
static const char *__get_audio_format_name(media_format_mimetype_e mime_type)
{
return NULL;
}
}
+//LCOV_EXCL_STOP
GstCaps *_make_mediapacketsrc_raw_caps_from_media_format(webrtc_gst_slot_s *source)
{
return WEBRTC_ERROR_NONE;
}
-//LCOV_EXCL_STOP
\ No newline at end of file
+//LCOV_EXCL_STOP