--- /dev/null
+[general]
+upstream_branch = upstream/1.16
+upstream_tag = ${upstreamversion}
-DISTCHECK_CONFIGURE_FLAGS=--enable-gtk-doc
+DISTCHECK_CONFIGURE_FLAGS=--disable-gtk-doc
ALWAYS_SUBDIRS = \
gst sys ext \
tests \
- docs \
po \
common \
m4 \
AM_CONDITIONAL(HAVE_GTK, test "x$HAVE_GTK" = "xyes")
AM_CONDITIONAL(HAVE_GTK_X11, test "x$HAVE_GTK_X11" = "xyes")
+AC_ARG_ENABLE(pcmdump, AC_HELP_STRING([--enable-pcmdump], [pcm dump]),
+ [
+ case "${enableval}" in
+ yes) PCM_DUMP_ENABLE=yes ;;
+ no) PCM_DUMP_ENABLE=no ;;
+ *) AC_MSG_ERROR(bad value ${enableval} for --enable-pcmdump) ;;
+ esac
+ ],
+ [PCM_DUMP_ENABLE=no])
+AM_CONDITIONAL([PCM_DUMP_ENABLE], [test "x$PCM_DUMP_ENABLE" = "xyes"])
+
+if test "x$PCM_DUMP_ENABLE" = "xyes"; then
+PKG_CHECK_MODULES(VCONF, vconf)
+AC_SUBST(VCONF_CFLAGS)
+AC_SUBST(VCONF_LIBS)
+fi
+
+PKG_CHECK_MODULES(GIO, gio-2.0)
+AC_SUBST(GIO_CFLAGS)
+AC_SUBST(GIO_LIBS)
+
dnl Check for -Bsymbolic-functions linker flag used to avoid
dnl intra-library PLT jumps, if available.
AC_ARG_ENABLE(Bsymbolic,
translit(dnm, m, l) AM_CONDITIONAL(USE_X, true)
AG_GST_CHECK_FEATURE(X, [X libraries and plugins],
[ximagesrc], [
- PKG_CHECK_MODULES([X], [x11], [
- HAVE_X="yes"
+ AG_GST_CHECK_X
+
+ if test "x$HAVE_X" = "xyes"
+ then
dnl check for Xfixes
- PKG_CHECK_MODULES([XFIXES], [ xfixes ], [
- AC_DEFINE(HAVE_XFIXES, 1, [Defined if Xfixes is available])
- ], [ HAVE_XFIXES="no" ])
+ PKG_CHECK_MODULES(XFIXES, xfixes, HAVE_XFIXES="yes", HAVE_XFIXES="no")
+ if test "x$HAVE_XFIXES" = "xyes"
+ then
+ XFIXES_CFLAGS="-DHAVE_XFIXES $XFIXES_CFLAGS"
+ fi
+ AC_SUBST(XFIXES_LIBS)
+ AC_SUBST(XFIXES_CFLAGS)
dnl check for Xdamage
- PKG_CHECK_MODULES([XDAMAGE], [ xdamage ], [
- AC_DEFINE(HAVE_XDAMAGE, 1, [Defined if Xdamage is available])
- ], [ HAVE_XDAMAGE="no" ])
-
- dnl check for X Shm
- PKG_CHECK_MODULES([XEXT], [ xext ], [
- AC_CHECK_LIB([Xext], [ XShmAttach ], [
- AC_DEFINE(HAVE_XSHM, 1, [Defined if XShm is available])
- XSHM_LIBS="$XEXT_LIBS"
- XSHM_CFLAGS="$XEXT_CFLAGS"
- ], [ HAVE_XEXT="no" ] , [ $X_LIBS ])
- ])
- AC_SUBST(XSHM_LIBS)
- AC_SUBST(XSHM_CFLAGS)
- ], [ HAVE_X="no" ])
+ PKG_CHECK_MODULES(XDAMAGE, xdamage, HAVE_XDAMAGE="yes", HAVE_XDAMAGE="no")
+ if test "x$HAVE_XDAMAGE" = "xyes"
+ then
+ XDAMAGE_CFLAGS="-DHAVE_XDAMAGE $XDAMAGE_CFLAGS"
+ fi
+ AC_SUBST(XDAMAGE_LIBS)
+ AC_SUBST(XDAMAGE_CFLAGS)
+ fi
+])
+
+dnl FIXME: this should be rolled into the test above, it's just an additional
+dnl feature of the ximagesrc plug-in
+dnl This is the same as in gst-plugins-base
+dnl check for X Shm
+translit(dnm, m, l) AM_CONDITIONAL(USE_XSHM, true)
+AG_GST_CHECK_FEATURE(XSHM, [X Shared Memory extension], , [
+ if test x$HAVE_X = xyes; then
+ AC_CHECK_LIB(Xext, XShmAttach,
+ HAVE_XSHM="yes", HAVE_XSHM="no",
+ $X_LIBS)
+ if test "x$HAVE_XSHM" = "xyes"; then
+ XSHM_LIBS="-lXext"
+ else
+ dnl On AIX, it is in XextSam instead, but we still need -lXext
+ AC_CHECK_LIB(XextSam, XShmAttach,
+ HAVE_XSHM="yes", HAVE_XSHM="no",
+ $X_LIBS)
+ if test "x$HAVE_XSHM" = "xyes"; then
+ XSHM_LIBS="-lXext -lXextSam"
+ fi
+ fi
+ fi
+], ,[
+ AC_SUBST(HAVE_XSHM)
+ AC_SUBST(XSHM_LIBS)
])
dnl *** ext plug-ins ***
AM_CONDITIONAL(USE_WAVEFORM, false)
AM_CONDITIONAL(USE_WAVPACK, false)
AM_CONDITIONAL(USE_X, false)
+AM_CONDITIONAL(USE_XSHM, false)
AM_CONDITIONAL(USE_ZLIB, false)
fi dnl of EXT plugins
pulsedeviceprovider.c \
pulseutil.c
-libgstpulseaudio_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS) $(PULSE_CFLAGS)
+libgstpulseaudio_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS) $(PULSE_CFLAGS) $(GIO_CFLAGS)
libgstpulseaudio_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_API_VERSION) \
-lgstpbutils-$(GST_API_VERSION) \
- $(GST_BASE_LIBS) $(GST_LIBS) $(PULSE_LIBS)
+ $(GST_BASE_LIBS) $(GST_LIBS) $(PULSE_LIBS) $(GIO_LIBS)
libgstpulseaudio_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
+if PCM_DUMP_ENABLE
+libgstpulseaudio_la_CFLAGS += $(VCONF_CFLAGS) -DPCM_DUMP_ENABLE
+libgstpulseaudio_la_LIBADD += $(VCONF_LIBS)
+endif
+
noinst_HEADERS = \
pulsesink.h \
pulsesrc.h \
#include <gst/pbutils/pbutils.h> /* only used for GST_PLUGINS_BASE_VERSION_* */
#include <gst/glib-compat-private.h>
-
+#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+#include <vconf.h>
+#endif /* __TIZEN__ && PCM_DUMP_ENABLE */
#include "pulsesink.h"
#include "pulseutil.h"
#define DEFAULT_VOLUME 1.0
#define DEFAULT_MUTE FALSE
#define MAX_VOLUME 10.0
+#ifdef __TIZEN__
+#define DEFAULT_AUDIO_LATENCY "mid"
+#endif /* __TIZEN__ */
enum
{
PROP_MUTE,
PROP_CLIENT_NAME,
PROP_STREAM_PROPERTIES,
+#ifdef __TIZEN__
+ PROP_AUDIO_LATENCY,
+#endif /* __TIZEN__ */
PROP_LAST
};
+#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+#define GST_PULSESINK_DUMP_VCONF_KEY "memory/private/sound/pcm_dump"
+#define GST_PULSESINK_DUMP_INPUT_PATH_PREFIX "/tmp/dump_pulsesink_in_"
+#define GST_PULSESINK_DUMP_OUTPUT_PATH_PREFIX "/tmp/dump_pulsesink_out_"
+#define GST_PULSESINK_DUMP_INPUT_FLAG 0x00000400
+#define GST_PULSESINK_DUMP_OUTPUT_FLAG 0x00000800
+#endif /* __TIZEN__ && PCM_DUMP_ENABLE */
+
#define GST_TYPE_PULSERING_BUFFER \
(gst_pulseringbuffer_get_type())
#define GST_PULSERING_BUFFER(obj) \
static guint gst_pulseringbuffer_commit (GstAudioRingBuffer * buf,
guint64 * sample, guchar * data, gint in_samples, gint out_samples,
gint * accum);
+#ifdef __TIZEN__
+static gboolean gst_pulsering_set_corked (GstPulseRingBuffer * pbuf, gboolean corked,
+ gboolean wait);
+#endif
G_DEFINE_TYPE (GstPulseRingBuffer, gst_pulseringbuffer,
GST_TYPE_AUDIO_RING_BUFFER);
gst_pulsering_destroy_context (pbuf);
pa_threaded_mainloop_unlock (mainloop);
+#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+ if (psink->dump_fd_input) {
+ fclose(psink->dump_fd_input);
+ psink->dump_fd_input = NULL;
+ }
+#endif /* __TIZEN__ && PCM_DUMP_ENABLE */
+
GST_LOG_OBJECT (psink, "closed device");
return TRUE;
GST_ELEMENT_ERROR (psink, STREAM, FORMAT, ("Sink format changed"),
("Sink format changed"));
}
+#ifdef __TIZEN__
+ } else if (!strcmp (name, PA_STREAM_EVENT_POP_TIMEOUT)) {
+ GST_WARNING_OBJECT (psink, "got event [%s], cork stream now!!!!", name);
+ gst_pulsering_set_corked (pbuf, TRUE, FALSE);
+#endif
} else {
GST_DEBUG_OBJECT (psink, "got unknown event %s", name);
}
const pa_buffer_attr *actual;
pa_channel_map channel_map;
pa_operation *o = NULL;
+#ifndef __TIZEN__
pa_cvolume v;
+#endif
pa_cvolume *pv = NULL;
pa_stream_flags_t flags;
const gchar *name;
else
name = "Playback Stream";
+#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+ if (psink->need_dump_input == TRUE && psink->dump_fd_input == NULL) {
+ char *suffix , *dump_path;
+ GDateTime *time = g_date_time_new_now_local();
+
+ suffix = g_date_time_format(time, "%m%d_%H%M%S");
+ dump_path = g_strdup_printf("%s%dch_%dhz_%s.pcm", GST_PULSESINK_DUMP_INPUT_PATH_PREFIX, pbuf->channels, spec->info.rate, suffix);
+ GST_WARNING_OBJECT(psink, "pulse-sink dumping enabled: dump path [%s]", dump_path);
+ psink->dump_fd_input = fopen(dump_path, "w+");
+
+ g_free(suffix);
+ g_free(dump_path);
+ g_date_time_unref(time);
+ }
+#endif /* __TIZEN__ && PCM_DUMP_ENABLE */
+
/* create a stream */
formats[0] = pbuf->format;
if (!(pbuf->stream = pa_stream_new_extended (pbuf->context, name, formats, 1,
GST_INFO_OBJECT (psink, "prebuf: %d", wanted.prebuf);
GST_INFO_OBJECT (psink, "minreq: %d", wanted.minreq);
+#ifndef __TIZEN__
/* configure volume when we changed it, else we leave the default */
if (psink->volume_set) {
GST_LOG_OBJECT (psink, "have volume of %f", psink->volume);
} else {
pv = NULL;
}
+#endif
/* construct the flags */
flags = PA_STREAM_INTERPOLATE_TIMING | PA_STREAM_AUTO_TIMING_UPDATE |
PA_STREAM_ADJUST_LATENCY | PA_STREAM_START_CORKED;
+#ifndef __TIZEN__
if (psink->mute_set) {
if (psink->mute)
flags |= PA_STREAM_START_MUTED;
else
flags |= PA_STREAM_START_UNMUTED;
}
+#endif
/* we always start corked (see flags above) */
pbuf->corked = TRUE;
GST_INFO_OBJECT (psink, "negotiated to: %s", print_buf);
#endif
+#ifdef __TIZEN__
+ {
+ uint32_t idx;
+ if ((idx = pa_stream_get_index (pbuf->stream)) == PA_INVALID_INDEX)
+ goto no_index;
+ if (psink->volume_set)
+ gst_pulse_set_volume_ratio (idx, "out", psink->volume);
+ if (psink->mute_set)
+ if (psink->mute)
+ gst_pulse_set_volume_ratio (idx, "out", 0);
+ }
+#endif
/* After we passed the volume off of to PA we never want to set it
again, since it is PA's job to save/restore volumes. */
psink->volume_set = psink->mute_set = FALSE;
pa_strerror (pa_context_errno (pbuf->context))), (NULL));
goto unlock_and_fail;
}
+#ifdef __TIZEN__
+no_index:
+ {
+ GST_ELEMENT_ERROR (psink, RESOURCE, FAILED,
+ ("Failed to get stream index: %s",
+ pa_strerror (pa_context_errno (pbuf->context))), (NULL));
+ goto unlock_and_fail;
+ }
+#endif
}
/* free the stream that we acquired before */
if (pbuf->paused)
goto was_paused;
+#ifdef __TIZEN__
+ /* ensure running clock for whatever out there */
+ if (pbuf->corked) {
+ if (!gst_pulsering_set_corked (pbuf, FALSE, FALSE))
+ goto uncork_failed;
+ }
+#endif
/* offset is in bytes */
offset = *sample * bpf;
}
}
+#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+static GstPadProbeReturn
+gst_pulsesink_pad_dump_probe (GstPad * pad, GstPadProbeInfo * info, gpointer data)
+{
+ GstPulseSink *psink = GST_PULSESINK_CAST (data);
+ size_t written = 0;
+ GstBuffer *buffer = GST_PAD_PROBE_INFO_BUFFER (info);
+ GstMapInfo in_map;
+ if (psink->dump_fd_input) {
+ gst_buffer_map(buffer, &in_map, GST_MAP_READ);
+ written = fwrite(in_map.data, 1, in_map.size, psink->dump_fd_input);
+ if (written != in_map.size)
+ GST_WARNING("failed to write!!! ferror=%d", ferror(psink->dump_fd_input));
+ gst_buffer_unmap(buffer, &in_map);
+ }
+ return GST_PAD_PROBE_OK;
+}
+#endif /* __TIZEN__ && PCM_DUMP_ENABLE */
+
static void
gst_pulsesink_class_init (GstPulseSinkClass * klass)
{
"list of pulseaudio stream properties",
GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+#ifdef __TIZEN__
+ g_object_class_install_property (gobject_class,
+ PROP_AUDIO_LATENCY,
+ g_param_spec_string ("latency", "Audio Backend Latency",
+ "Audio Backend Latency (\"low\": Low Latency, \"mid\": Mid Latency, \"high\": High Latency)",
+ DEFAULT_AUDIO_LATENCY,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+#endif /* __TIZEN__ */
+
gst_element_class_set_static_metadata (gstelement_class,
"PulseAudio Audio Sink",
"Sink/Audio", "Plays audio to a PulseAudio server", "Lennart Poettering");
static void
gst_pulsesink_init (GstPulseSink * pulsesink)
{
+#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+ GstPad *sinkpad = NULL;
+ int vconf_dump = 0;
+#endif /* __TIZEN__ && PCM_DUMP_ENABLE */
+
pulsesink->server = NULL;
pulsesink->device = NULL;
pulsesink->device_info.description = NULL;
pulsesink->properties = NULL;
pulsesink->proplist = NULL;
+#ifdef __TIZEN__
+ pulsesink->latency = g_strdup (DEFAULT_AUDIO_LATENCY);
+ pulsesink->proplist = pa_proplist_new();
+ pa_proplist_sets(pulsesink->proplist, PA_PROP_MEDIA_TIZEN_AUDIO_LATENCY, pulsesink->latency);
+#ifdef PCM_DUMP_ENABLE
+ if (vconf_get_int(GST_PULSESINK_DUMP_VCONF_KEY, &vconf_dump)) {
+ GST_WARNING("vconf_get_int %s failed", GST_PULSESINK_DUMP_VCONF_KEY);
+ }
+ pulsesink->need_dump_input = vconf_dump & GST_PULSESINK_DUMP_INPUT_FLAG ? TRUE : FALSE;
+ pulsesink->dump_fd_input = NULL;
+ if (pulsesink->need_dump_input) {
+ sinkpad = gst_element_get_static_pad((GstElement *)pulsesink, "sink");
+ if (sinkpad) {
+ gst_pad_add_probe (sinkpad, GST_PAD_PROBE_TYPE_BUFFER, gst_pulsesink_pad_dump_probe, pulsesink, NULL);
+ gst_object_unref (GST_OBJECT(sinkpad));
+ }
+ }
+#endif
+#endif /* __TIZEN__ */
/* override with a custom clock */
if (GST_AUDIO_BASE_SINK (pulsesink)->provided_clock)
if (pulsesink->proplist)
pa_proplist_free (pulsesink->proplist);
+#ifdef __TIZEN__
+ g_free (pulsesink->latency);
+#endif /* __TIZEN__ */
+
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
gst_pulsesink_set_volume (GstPulseSink * psink, gdouble volume)
{
+#ifndef __TIZEN__
pa_cvolume v;
pa_operation *o = NULL;
+#endif
GstPulseRingBuffer *pbuf;
uint32_t idx;
+#ifndef __TIZEN__
if (!mainloop)
goto no_mainloop;
pa_threaded_mainloop_lock (mainloop);
+#endif
GST_DEBUG_OBJECT (psink, "setting volume to %f", volume);
if ((idx = pa_stream_get_index (pbuf->stream)) == PA_INVALID_INDEX)
goto no_index;
+#ifndef __TIZEN__
if (pbuf->is_pcm)
gst_pulse_cvolume_from_linear (&v, pbuf->channels, volume);
else
&v, NULL, NULL)))
goto volume_failed;
+#else
+ if (!psink->mute)
+ gst_pulse_set_volume_ratio (idx, "out", volume);
+ psink->volume = volume;
+#endif
+
/* We don't really care about the result of this call */
unlock:
+#ifndef __TIZEN__
if (o)
pa_operation_unref (o);
pa_threaded_mainloop_unlock (mainloop);
+#endif
return;
/* ERRORS */
+#ifndef __TIZEN__
no_mainloop:
{
psink->volume = volume;
GST_DEBUG_OBJECT (psink, "we have no mainloop");
return;
}
+#endif
no_buffer:
{
psink->volume = volume;
GST_DEBUG_OBJECT (psink, "we don't have a stream index");
goto unlock;
}
+#ifndef __TIZEN__
volume_failed:
{
GST_ELEMENT_ERROR (psink, RESOURCE, FAILED,
pa_strerror (pa_context_errno (pbuf->context))), (NULL));
goto unlock;
}
+#endif
}
static void
gst_pulsesink_set_mute (GstPulseSink * psink, gboolean mute)
{
+#ifndef __TIZEN__
pa_operation *o = NULL;
+#endif
GstPulseRingBuffer *pbuf;
uint32_t idx;
+#ifndef __TIZEN__
if (!mainloop)
goto no_mainloop;
pa_threaded_mainloop_lock (mainloop);
+#endif
GST_DEBUG_OBJECT (psink, "setting mute state to %d", mute);
if ((idx = pa_stream_get_index (pbuf->stream)) == PA_INVALID_INDEX)
goto no_index;
+#ifndef __TIZEN__
if (!(o = pa_context_set_sink_input_mute (pbuf->context, idx,
mute, NULL, NULL)))
goto mute_failed;
+#else
+ gst_pulse_set_volume_ratio (idx, "out", mute ? 0 : psink->volume);
+ psink->mute = mute;
+#endif
/* We don't really care about the result of this call */
unlock:
+#ifndef __TIZEN__
if (o)
pa_operation_unref (o);
pa_threaded_mainloop_unlock (mainloop);
+#endif
return;
/* ERRORS */
+#ifndef __TIZEN__
no_mainloop:
{
psink->mute = mute;
GST_DEBUG_OBJECT (psink, "we have no mainloop");
return;
}
+#endif
no_buffer:
{
psink->mute = mute;
GST_DEBUG_OBJECT (psink, "we don't have a stream index");
goto unlock;
}
+#ifndef __TIZEN__
mute_failed:
{
GST_ELEMENT_ERROR (psink, RESOURCE, FAILED,
pa_strerror (pa_context_errno (pbuf->context))), (NULL));
goto unlock;
}
+#endif
}
static void
pa_proplist_free (pulsesink->proplist);
pulsesink->proplist = gst_pulse_make_proplist (pulsesink->properties);
break;
+#ifdef __TIZEN__
+ case PROP_AUDIO_LATENCY:
+ g_free (pulsesink->latency);
+ pulsesink->latency = g_value_dup_string (value);
+ /* setting NULL restores the default latency */
+ if (pulsesink->latency == NULL) {
+ pulsesink->latency = g_strdup (DEFAULT_AUDIO_LATENCY);
+ }
+ if (!pulsesink->proplist) {
+ pulsesink->proplist = pa_proplist_new();
+ }
+ pa_proplist_sets(pulsesink->proplist, PA_PROP_MEDIA_TIZEN_AUDIO_LATENCY, pulsesink->latency);
+ GST_DEBUG_OBJECT(pulsesink, "latency(%s)", pulsesink->latency);
+ break;
+#endif /* __TIZEN__ */
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
break;
case PROP_VOLUME:
{
+#ifndef __TIZEN__
gdouble volume;
gst_pulsesink_get_sink_input_info (pulsesink, &volume, NULL);
g_value_set_double (value, volume);
+#else
+ g_value_set_double (value, pulsesink->volume);
+#endif
break;
}
case PROP_MUTE:
{
+#ifndef __TIZEN__
gboolean mute;
gst_pulsesink_get_sink_input_info (pulsesink, NULL, &mute);
g_value_set_boolean (value, mute);
+#else
+ g_value_set_boolean (value, pulsesink->mute);
+#endif
break;
}
case PROP_CLIENT_NAME:
case PROP_STREAM_PROPERTIES:
gst_value_set_structure (value, pulsesink->properties);
break;
+#ifdef __TIZEN__
+ case PROP_AUDIO_LATENCY:
+ g_value_set_string (value, pulsesink->latency);
+ break;
+#endif /* __TIZEN__ */
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
#include "config.h"
#endif
+#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+#include <stdio.h>
+#endif
+
#include <gst/gst.h>
#include <gst/audio/audio.h>
#include <gst/audio/gstaudiosink.h>
gint notify; /* atomic */
+#ifdef __TIZEN__
+ gchar *latency;
+#ifdef PCM_DUMP_ENABLE
+ gint need_dump_input;
+ FILE *dump_fd_input;
+#endif
+#endif /* __TIZEN__ */
+
const gchar *pa_version;
GstStructure *properties;
#include "pulsesrc.h"
#include "pulseutil.h"
+#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+#include <vconf.h>
+#endif
+
GST_DEBUG_CATEGORY_EXTERN (pulse_debug);
#define GST_CAT_DEFAULT pulse_debug
#define DEFAULT_VOLUME 1.0
#define DEFAULT_MUTE FALSE
#define MAX_VOLUME 10.0
+#ifdef __TIZEN__
+#define DEFAULT_AUDIO_LATENCY "mid"
+#endif /* __TIZEN__ */
+/* See the pulsesink code for notes on how we interact with the PA mainloop
+ * thread. */
/* See the pulsesink code for notes on how we interact with the PA mainloop
* thread. */
PROP_SOURCE_OUTPUT_INDEX,
PROP_VOLUME,
PROP_MUTE,
+#ifdef __TIZEN__
+ PROP_AUDIO_LATENCY,
+#endif /* __TIZEN__ */
PROP_LAST
};
+#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+#define GST_PULSESRC_DUMP_VCONF_KEY "memory/private/sound/pcm_dump"
+#define GST_PULSESRC_DUMP_OUTPUT_PATH_PREFIX "/tmp/dump_pulsesrc_out"
+#define GST_PULSESRC_DUMP_OUTPUT_FLAG 0x00200000U
+#endif
+
static void gst_pulsesrc_destroy_stream (GstPulseSrc * pulsesrc);
static void gst_pulsesrc_destroy_context (GstPulseSrc * pulsesrc);
PROP_MUTE, g_param_spec_boolean ("mute", "Mute",
"Mute state of this stream",
DEFAULT_MUTE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+#ifdef __TIZEN__
+ g_object_class_install_property (gobject_class,
+ PROP_AUDIO_LATENCY,
+ g_param_spec_string ("latency", "Audio Backend Latency",
+ "Audio Backend Latency (\"low\": Low Latency, \"mid\": Mid Latency, \"high\": High Latency)",
+ DEFAULT_AUDIO_LATENCY,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+#endif /* __TIZEN__ */
}
+#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+static GstPadProbeReturn
+gst_pulsesrc_pad_dump_probe (GstPad *pad, GstPadProbeInfo * info, gpointer data)
+{
+ GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (data);
+ size_t written = 0;
+ GstBuffer *buffer = GST_PAD_PROBE_INFO_BUFFER (info);
+ GstMapInfo in_map;
+ if (pulsesrc->dump_fd_output) {
+ gst_buffer_map(buffer, &in_map, GST_MAP_READ);
+ written = fwrite(in_map.data, 1, in_map.size, pulsesrc->dump_fd_output);
+ if (written != in_map.size)
+ GST_WARNING("failed to write!!! ferror=%d", ferror(pulsesrc->dump_fd_output));
+ gst_buffer_unmap(buffer, &in_map);
+ }
+ return GST_PAD_PROBE_OK;
+}
+#endif
+
static void
gst_pulsesrc_init (GstPulseSrc * pulsesrc)
{
+#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+ GstPad *srcpad = NULL;
+ int vconf_dump = 0;
+#endif
pulsesrc->server = NULL;
pulsesrc->device = NULL;
pulsesrc->client_name = gst_pulse_client_name ();
pulsesrc->properties = NULL;
pulsesrc->proplist = NULL;
-
+#ifdef __TIZEN__
+ pulsesrc->latency = g_strdup (DEFAULT_AUDIO_LATENCY);
+ pulsesrc->proplist = pa_proplist_new();
+ pa_proplist_sets(pulsesrc->proplist, PA_PROP_MEDIA_TIZEN_AUDIO_LATENCY, pulsesrc->latency);
+
+#ifdef PCM_DUMP_ENABLE
+ if (vconf_get_int(GST_PULSESRC_DUMP_VCONF_KEY, &vconf_dump)) {
+ GST_WARNING("vconf_get_int %s failed", GST_PULSESRC_DUMP_VCONF_KEY);
+ }
+ pulsesrc->need_dump_output = vconf_dump & GST_PULSESRC_DUMP_OUTPUT_FLAG ? TRUE : FALSE;
+ pulsesrc->dump_fd_output = NULL;
+ if (pulsesrc->need_dump_output) {
+ srcpad = gst_element_get_static_pad((GstElement *)pulsesrc, "src");
+ gst_pad_add_probe (srcpad, GST_PAD_PROBE_TYPE_BUFFER, gst_pulsesrc_pad_dump_probe, pulsesrc, NULL);
+ }
+#endif /* PCM_DUMP_ENABLE */
+#endif /* __TIZEN__ */
/* this should be the default but it isn't yet */
gst_audio_base_src_set_slave_method (GST_AUDIO_BASE_SRC (pulsesrc),
GST_AUDIO_BASE_SRC_SLAVE_SKEW);
if (pulsesrc->proplist)
pa_proplist_free (pulsesrc->proplist);
+#ifdef __TIZEN__
+ g_free (pulsesrc->latency);
+#endif /* __TIZEN__ */
+
G_OBJECT_CLASS (parent_class)->finalize (object);
}
case PROP_MUTE:
gst_pulsesrc_set_stream_mute (pulsesrc, g_value_get_boolean (value));
break;
+#ifdef __TIZEN__
+ case PROP_AUDIO_LATENCY:
+ g_free (pulsesrc->latency);
+ pulsesrc->latency = g_value_dup_string (value);
+ /* setting NULL restores the default latency */
+ if (pulsesrc->latency == NULL) {
+ pulsesrc->latency = g_strdup (DEFAULT_AUDIO_LATENCY);
+ }
+ if (!pulsesrc->proplist) {
+ pulsesrc->proplist = pa_proplist_new();
+ }
+ pa_proplist_sets(pulsesrc->proplist, PA_PROP_MEDIA_TIZEN_AUDIO_LATENCY, pulsesrc->latency);
+ GST_DEBUG_OBJECT(pulsesrc, "latency(%s)", pulsesrc->latency);
+ break;
+#endif /* __TIZEN__ */
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
g_value_set_boolean (value, mute);
break;
}
+#ifdef __TIZEN__
+ case PROP_AUDIO_LATENCY:
+ g_value_set_string (value, pulsesrc->latency);
+ break;
+#endif /* __TIZEN__ */
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
pa_threaded_mainloop_lock (pulsesrc->mainloop);
gst_pulsesrc_destroy_context (pulsesrc);
pa_threaded_mainloop_unlock (pulsesrc->mainloop);
-
+#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+ if (pulsesrc->dump_fd_output) {
+ fclose(pulsesrc->dump_fd_output);
+ pulsesrc->dump_fd_output = NULL;
+ }
+#endif
return TRUE;
}
gst_pulsesrc_set_stream_volume (pulsesrc, pulsesrc->volume);
pulsesrc->volume_set = FALSE;
}
+#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+ if (pulsesrc->need_dump_output) {
+ char *suffix , *dump_path;
+ GDateTime *time = NULL;
+ if (pulsesrc->dump_fd_output) {
+ fclose(pulsesrc->dump_fd_output);
+ pulsesrc->dump_fd_output = NULL;
+ }
+ time = g_date_time_new_now_local();
+ suffix = g_date_time_format(time, "%m%d_%H%M%S");
+ dump_path = g_strdup_printf("%s_%dch_%dhz_%s.pcm", GST_PULSESRC_DUMP_OUTPUT_PATH_PREFIX, pulsesrc->sample_spec.channels, pulsesrc->sample_spec.rate, suffix);
+ GST_WARNING_OBJECT(asrc,"pulse-source dumping enabled: dump path [%s]", dump_path);
+ pulsesrc->dump_fd_output = fopen(dump_path, "w+");
+
+ g_free(suffix);
+ g_free(dump_path);
+ g_date_time_unref(time);
+ }
+#endif
/* get the actual buffering properties now */
actual = pa_stream_get_buffer_attr (pulsesrc->stream);
#include <pulse/pulseaudio.h>
#include <pulse/thread-mainloop.h>
+#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+#include <stdio.h>
+#endif
+
G_BEGIN_DECLS
#define GST_TYPE_PULSESRC \
gboolean paused:1;
gboolean in_read:1;
+#ifdef __TIZEN__
+ gchar *latency;
+#endif /* __TIZEN__ */
+
GstStructure *properties;
pa_proplist *proplist;
+
+#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+ gint need_dump_output;
+ FILE *dump_fd_output;
+#endif
};
struct _GstPulseSrcClass
case G_TYPE_STRING:
pa_proplist_sets (p, prop_id, g_value_get_string (value));
break;
+#ifdef __TIZEN__
+ case G_TYPE_INT:
+ pa_proplist_setf (p, prop_id, "%d", g_value_get_int (value));
+ break;
+#endif
default:
GST_WARNING ("unmapped property type %s", G_VALUE_TYPE_NAME (value));
break;
return ret;
}
+#ifdef __TIZEN__
+#include <gio/gio.h>
+#define PA_BUS_NAME "org.pulseaudio.Server"
+#define PA_STREAM_MANAGER_OBJECT_PATH "/org/pulseaudio/StreamManager"
+#define PA_STREAM_MANAGER_INTERFACE "org.pulseaudio.StreamManager"
+#define PA_STREAM_MANAGER_METHOD_NAME_SET_VOLUME_RATIO "SetVolumeRatio"
+void
+gst_pulse_set_volume_ratio (uint32_t stream_index, const char *direction, double ratio)
+{
+ GDBusConnection *conn = NULL;
+ GError *err = NULL;
+ GVariant *result = NULL;
+ const gchar *dbus_ret = NULL;
+
+ conn = g_bus_get_sync (G_BUS_TYPE_SYSTEM, NULL, &err);
+ if (!conn || err) {
+ GST_ERROR ("g_bus_get_sync() error (%s)", err ? err->message : NULL);
+ if (err)
+ g_error_free (err);
+ return;
+ }
+
+ result = g_dbus_connection_call_sync (conn,
+ PA_BUS_NAME,
+ PA_STREAM_MANAGER_OBJECT_PATH,
+ PA_STREAM_MANAGER_INTERFACE,
+ PA_STREAM_MANAGER_METHOD_NAME_SET_VOLUME_RATIO,
+ g_variant_new("(sud)", direction, stream_index, ratio),
+ G_VARIANT_TYPE("(s)"),
+ G_DBUS_CALL_FLAGS_NONE,
+ 1000,
+ NULL,
+ &err);
+ if (!result || err) {
+ GST_ERROR ("g_dbus_connection_call_sync() for SET_VOLUME_RATIO error (%s)", err ? err->message : NULL);
+ if (err)
+ g_error_free (err);
+ goto finish;
+ }
+ g_variant_get (result, "(&s)", &dbus_ret);
+ GST_DEBUG ("SET_VOLUME_RATIO returns value(%s) for stream index(%u), ratio(%f)", dbus_ret, stream_index, ratio);
+
+finish:
+ g_variant_unref(result);
+ g_object_unref(conn);
+
+ return;
+}
+#endif
+
GstCaps *
gst_pulse_fix_pcm_caps (GstCaps * incaps)
{
GstStructure *gst_pulse_make_structure (pa_proplist *properties);
GstCaps * gst_pulse_format_info_to_caps (pa_format_info * format);
+
+#ifdef __TIZEN__
+void gst_pulse_set_volume_ratio (uint32_t stream_index, const char *direction, double ratio);
+#endif
GstCaps * gst_pulse_fix_pcm_caps (GstCaps * incaps);
#endif
#define REDUCE_BLOCKSIZE_FACTOR 0.5
#define GROW_TIME_LIMIT (1 * GST_SECOND)
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+#define DLNA_OP_TIMED_SEEK 0x02
+#define DLNA_OP_BYTE_SEEK 0x01
+#endif
+
static void gst_soup_http_src_uri_handler_init (gpointer g_iface,
gpointer iface_data);
static void gst_soup_http_src_finalize (GObject * gobject);
src->increase_blocksize_count = 0;
src->last_socket_read_time = 0;
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ if (src->dash_oldest_segment) {
+ g_free (src->dash_oldest_segment);
+ src->dash_oldest_segment = NULL;
+ }
+ if (src->dash_newest_segment) {
+ g_free (src->dash_newest_segment);
+ src->dash_newest_segment = NULL;
+ }
+ src->dlna_opt = 0;
+#endif
+
g_cancellable_reset (src->cancellable);
g_mutex_lock (&src->mutex);
if (src->input_stream) {
src->max_retries = DEFAULT_RETRIES;
src->method = DEFAULT_SOUP_METHOD;
src->minimum_blocksize = gst_base_src_get_blocksize (GST_BASE_SRC_CAST (src));
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ src->dash_oldest_segment = NULL;
+ src->dash_newest_segment = NULL;
+ src->received_total = 0;
+ src->dlna_opt = 0;
+#endif
proxy = g_getenv ("http_proxy");
if (!gst_soup_http_src_set_proxy (src, proxy)) {
GST_WARNING_OBJECT (src,
"The proxy in the http_proxy env var (\"%s\") cannot be parsed.",
proxy);
}
-
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ src->cookie_jar = NULL;
+#endif
gst_base_src_set_automatic_eos (GST_BASE_SRC (src), FALSE);
gst_soup_http_src_reset (src);
break;
}
case PROP_COOKIES:
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ {
+ char **array;
+ SoupURI *base_uri;
+ g_strfreev (src->cookies);
+ src->cookies = g_strdupv (g_value_get_boxed (value));
+
+ if (src->cookie_jar && ((array = src->cookies) != NULL)) {
+ base_uri = soup_uri_new (src->location);
+ GST_INFO_OBJECT (src, "request to set cookies...");
+ while (*array != NULL) {
+ soup_cookie_jar_add_cookie (src->cookie_jar,
+ soup_cookie_parse (*array++, base_uri));
+ }
+ soup_uri_free (base_uri);
+ } else {
+ GST_INFO_OBJECT (src, "set cookies after session creation");
+ }
+ }
+#else
g_strfreev (src->cookies);
src->cookies = g_strdupv (g_value_get_boxed (value));
+#endif
break;
case PROP_IS_LIVE:
gst_base_src_set_live (GST_BASE_SRC (src), g_value_get_boolean (value));
}
break;
case PROP_COOKIES:
- g_value_set_boxed (value, g_strdupv (src->cookies));
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ {
+ GSList *cookie_list, *c;
+ gchar **cookies, **array;
+
+ cookies = NULL;
+ if ((src->cookie_jar) &&
+ ((cookie_list = soup_cookie_jar_all_cookies (src->cookie_jar)) != NULL)) {
+ cookies = g_new0 (gchar *, g_slist_length(cookie_list) + 1);
+ array = cookies;
+ for (c = cookie_list; c; c = c->next) {
+ *array++ = soup_cookie_to_set_cookie_header ((SoupCookie *)(c->data));
+ }
+ soup_cookies_free (cookie_list);
+ }
+ g_value_set_boxed (value, cookies);
+ }
+#else
+ g_value_set_boxed (value, g_strdupv (src->cookies));
+#endif
break;
case PROP_IS_LIVE:
g_value_set_boolean (value, gst_base_src_is_live (GST_BASE_SRC (src)));
gint rc;
soup_message_headers_remove (src->msg->request_headers, "Range");
- if (offset || stop_offset != -1) {
+
+/* This changes are needed to enable Seekable Contents from server.
+ We have observed that , for few specific networks ( VODAFONE ) , without theabove headers ,
+ Youtube is sending non-seekable contents to the Client. */
+#ifndef TIZEN_FEATURE_SOUP_MODIFICATION
+ if (offset || stop_offset != -1)
+#endif
+ {
if (stop_offset != -1) {
g_assert (offset != stop_offset);
soup_message_headers_append (src->msg->request_headers, field_name,
field_content);
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ if (!g_ascii_strcasecmp(field_name, "Cookie")) {
+ SoupURI *uri = NULL;
+ SoupCookie *cookie_parsed = NULL;
+ gchar *saveptr = NULL;
+
+ if (strlen(field_content) > 0) {
+ gchar *tmp_field = NULL;
+
+ uri = soup_uri_new (src->location);
+
+ tmp_field = strtok_r (field_content, ";", &saveptr);
+
+ while (tmp_field != NULL) {
+ GST_DEBUG_OBJECT (src, "field_content = %s", tmp_field);
+
+ cookie_parsed = soup_cookie_parse(tmp_field, uri);
+ GST_DEBUG_OBJECT (src, "cookie parsed = %p", cookie_parsed);
+
+ if (src->cookie_jar)
+ soup_cookie_jar_add_cookie (src->cookie_jar, cookie_parsed);
+
+ tmp_field = strtok_r (NULL, ";", &saveptr);
+ }
+ soup_uri_free (uri);
+ }
+ }
+#endif
+
g_free (field_content);
return TRUE;
return FALSE;
}
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ {
+ char **array = NULL;
+ SoupURI *base_uri;
+ SoupCookie *soup_cookie = NULL;
+
+ soup_session_add_feature_by_type (src->session, SOUP_TYPE_COOKIE_JAR);
+ src->cookie_jar = SOUP_COOKIE_JAR (soup_session_get_feature (src->session, SOUP_TYPE_COOKIE_JAR));
+ if ((array = src->cookies) != NULL) {
+ base_uri = soup_uri_new (src->location);
+ while (*array != NULL) {
+ soup_cookie = soup_cookie_parse (*array++, base_uri);
+ if (soup_cookie != NULL) {
+ GST_INFO_OBJECT (src, "adding cookies..");
+ soup_cookie_jar_add_cookie (src->cookie_jar, soup_cookie);
+ }
+ }
+ soup_uri_free (base_uri);
+ }
+ }
+#endif
+
g_signal_connect (src->session, "authenticate",
G_CALLBACK (gst_soup_http_src_authenticate_cb), src);
if (src->session) {
if (!src->session_is_shared)
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+/* When Playback is ongoing and Browser is moved to background ( Pressing Menu or Home Key ), The Session gets destroyed.
+ But the cookie_jar property remains unfreed. This results in garbage pointer and causes crash.
+ Removing the cookie_jar feature during close session of browser to handle the issue. */
+ {
+ GST_DEBUG_OBJECT (src, "Removing Cookie Jar instance");
+ soup_session_remove_feature_by_type(src->session, SOUP_TYPE_COOKIE_JAR);
+ src->cookie_jar = NULL;
+ soup_session_abort (src->session);
+ }
+#else
soup_session_abort (src->session);
+#endif
g_signal_handlers_disconnect_by_func (src->session,
G_CALLBACK (gst_soup_http_src_authenticate_cb), src);
g_object_unref (src->session);
}
}
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+static void
+gst_soup_http_src_headers_foreach (const gchar * name, const gchar * val,
+ gpointer src)
+{
+ GST_INFO_OBJECT (src, " %s: %s", name, val);
+
+ if (g_ascii_strcasecmp (name, "Set-Cookie") == 0) {
+ if (val) {
+ gboolean bret = FALSE;
+ GstStructure *s = NULL;
+ GstSoupHTTPSrc * tmp = src;
+ SoupURI *uri;
+
+ uri = soup_uri_new (tmp->location);
+
+ /* post current bandwith & uri to application */
+ s = gst_structure_new ("cookies",
+ "updated-cookie", G_TYPE_STRING, val,
+ "updated-url", G_TYPE_STRING, tmp->location, NULL);
+ bret = gst_element_post_message (GST_ELEMENT_CAST (src), gst_message_new_element (GST_OBJECT_CAST (src), s));
+ soup_cookie_jar_set_cookie (tmp->cookie_jar, uri, val);
+ soup_uri_free (uri);
+
+ GST_INFO_OBJECT (src, "request url [%s], posted cookies [%s] msg and returned = %d", tmp->location, val, bret);
+ }
+ } else if (g_ascii_strcasecmp (name, "Dash-Oldest-Segment") == 0) {
+ if (val) {
+ GstSoupHTTPSrc * tmp = src;
+ tmp->dash_oldest_segment = g_strdup (val);
+ GST_INFO_OBJECT (src, "Dash-Oldest-Segment set as %s ", tmp->dash_oldest_segment);
+ }
+ } else if (g_ascii_strcasecmp (name, "Dash-Newest-Segment") == 0) {
+ if (val) {
+ GstSoupHTTPSrc * tmp = src;
+ tmp->dash_newest_segment = g_strdup (val);
+ GST_INFO_OBJECT (src, "Dash-Newest-Segment set as %s ", tmp->dash_newest_segment);
+ }
+ }
+}
+#endif
+
static GstFlowReturn
gst_soup_http_src_got_headers (GstSoupHTTPSrc * src, SoupMessage * msg)
{
GstEvent *http_headers_event;
GstStructure *http_headers, *headers;
const gchar *accept_ranges;
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ gint64 start = 0, stop = 0, total = 0;
+#endif
- GST_INFO_OBJECT (src, "got headers");
-
+ GST_INFO_OBJECT (src, "got headers : %d", msg->status_code);
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ soup_message_headers_foreach (msg->response_headers,
+ gst_soup_http_src_headers_foreach, src);
+#endif
if (msg->status_code == SOUP_STATUS_PROXY_AUTHENTICATION_REQUIRED &&
src->proxy_id && src->proxy_pw) {
/* wait for authenticate callback */
gst_event_replace (&src->http_headers_event, http_headers_event);
gst_event_unref (http_headers_event);
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ /* Parse DLNA OP CODE */
+ if ((value = soup_message_headers_get_one
+ (msg->response_headers, "contentFeatures.dlna.org")) != NULL) {
+ gchar **token = NULL;
+ gchar **ptr = NULL;
+
+ GST_DEBUG_OBJECT (src, "DLNA server response");
+
+ token = g_strsplit (value, ";", 0);
+ for (ptr = token ; *ptr ; ptr++) {
+ gchar *tmp = NULL;
+ gchar *op_code = NULL;
+
+ if (!strlen (*ptr))
+ continue;
+
+ tmp = g_ascii_strup (*ptr, strlen (*ptr));
+ if (!strstr (tmp, "DLNA.ORG_OP")) {
+ g_free (tmp);
+ continue;
+ }
+
+ g_free (tmp);
+
+ op_code = strchr (*ptr, '=');
+ if (op_code) {
+ op_code++;
+
+ src->dlna_opt = (atoi (op_code) / 10 << 1) | (atoi (op_code) % 10);
+ GST_DEBUG_OBJECT (src, "dlna op code: %s (0x%X)", op_code, src->dlna_opt);
+ break;
+ }
+ }
+ g_strfreev (token);
+ }
+#endif
+
/* Parse Content-Length. */
if (soup_message_headers_get_encoding (msg->response_headers) ==
SOUP_ENCODING_CONTENT_LENGTH) {
- newsize = src->request_position +
- soup_message_headers_get_content_length (msg->response_headers);
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ if (msg->status_code == SOUP_STATUS_PARTIAL_CONTENT) {
+ newsize = src->request_position +
+ soup_message_headers_get_content_length (msg->response_headers);
+ } else {
+ if (soup_message_headers_get_content_range(msg->response_headers, &start, &stop, &total) && (total > 0)) {
+ GST_DEBUG_OBJECT (src, "get range header : %" G_GINT64_FORMAT
+ "~%" G_GINT64_FORMAT"/%"G_GINT64_FORMAT, start, stop, total);
+ newsize = (guint64)total;
+ } else {
+ if ((src->have_size) && (src->content_size <= src->request_position)) {
+ newsize = src->content_size;
+ } else {
+ newsize = soup_message_headers_get_content_length (msg->response_headers);
+ }
+ }
+ }
+#else
+ newsize = src->request_position +
+ soup_message_headers_get_content_length (msg->response_headers);
+#endif
if (!src->have_size || (src->content_size != newsize)) {
src->content_size = newsize;
src->have_size = TRUE;
if (g_ascii_strcasecmp (accept_ranges, "none") == 0)
src->seekable = FALSE;
}
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ else if (src->dlna_opt & DLNA_OP_BYTE_SEEK) {
+ if (src->have_size) {
+ GST_DEBUG_OBJECT (src, "DLNA server is seekable");
+ src->seekable = TRUE;
+ }
+ }
+ /* The Range request header is always included.
+ * @ref gst_soup_http_src_add_range_header() */
+ else if ((msg->status_code == SOUP_STATUS_OK) &&
+ (soup_message_headers_get_content_range (msg->response_headers, &start, &stop, &total) == FALSE)) {
+ GST_DEBUG_OBJECT (src, "there is no accept range header");
+ src->seekable = FALSE;
+ }
+#endif
/* Icecast stuff */
tag_list = gst_tag_list_new_empty ();
/* when content_size is unknown and we have just finished receiving
* a body message, requests that go beyond the content limits will result
* in an error. Here we convert those to EOS */
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ if (msg->status_code == SOUP_STATUS_REQUESTED_RANGE_NOT_SATISFIABLE &&
+ ((src->have_body && !src->have_size) ||
+ (src->have_size && src->request_position >= src->content_size))) {
+ GST_DEBUG_OBJECT (src, "Requested range out of limits and received full "
+ "body, returning EOS");
+ return GST_FLOW_EOS;
+ }
+#else
if (msg->status_code == SOUP_STATUS_REQUESTED_RANGE_NOT_SATISFIABLE &&
src->have_body && !src->have_size) {
GST_DEBUG_OBJECT (src, "Requested range out of limits and received full "
"body, returning EOS");
return GST_FLOW_EOS;
}
+#endif
/* FIXME: reason_phrase is not translated and not suitable for user
* error dialog according to libsoup documentation.
soup_message_headers_append (src->msg->request_headers, "icy-metadata",
"1");
}
+
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+/* This changes are needed to enable Seekable Contents from server.
+ We have observed that , for few specific networks ( VODAFONE ) , without theabove headers ,
+ Youtube is sending non-seekable contents to the Client. */
+ soup_message_headers_append (src->msg->request_headers, "Accept-Ranges","bytes");
+
+ if (src->cookie_jar) {
+ GSList *cookie_list, *c;
+ gchar *header;
+
+ SoupURI *uri = NULL;
+ SoupCookie *cookie;
+ uri = soup_uri_new (src->location);
+
+ if ((cookie_list = soup_cookie_jar_all_cookies (src->cookie_jar)) != NULL) {
+ for (c = cookie_list; c; c = c->next) {
+ cookie = (SoupCookie *)c->data;
+ if (soup_cookie_applies_to_uri(cookie, uri)) {
+ header = soup_cookie_to_cookie_header (cookie);
+ soup_message_headers_append (src->msg->request_headers, "Cookie", header);
+ g_free (header);
+ }
+ }
+ }
+ soup_cookies_free (cookie_list);
+ soup_uri_free (uri);
+ }
+#else
if (src->cookies) {
gchar **cookie;
*cookie);
}
}
+#endif
if (!src->compress)
soup_message_disable_feature (src->msg, SOUP_TYPE_CONTENT_DECODER);
gst_soup_http_src_add_extra_headers (src);
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ soup_message_headers_foreach (src->msg->request_headers,
+ gst_soup_http_src_headers_foreach, src);
+#endif
+
return TRUE;
}
ret = gst_soup_http_src_send_message (src);
/* Check if Range header was respected. */
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ if (ret == GST_FLOW_OK && src->request_position > 0 &&
+ (src->msg->status_code != SOUP_STATUS_PARTIAL_CONTENT) &&
+ (src->request_position < src->content_size)) {
+#else
if (ret == GST_FLOW_OK && src->request_position > 0 &&
src->msg->status_code != SOUP_STATUS_PARTIAL_CONTENT) {
+#endif
src->seekable = FALSE;
GST_ELEMENT_ERROR_WITH_DETAILS (src, RESOURCE, SEEK,
(_("Server does not support seeking.")),
GST_BUFFER_OFFSET (*outbuf) = bsrc->segment.position;
ret = GST_FLOW_OK;
gst_soup_http_src_update_position (src, read_bytes);
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ src->received_total += read_bytes;
+#endif
/* Got some data, reset retry counter */
src->retry_count = 0;
GST_DEBUG_OBJECT (src, "start(\"%s\")", src->location);
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ if (src->dash_oldest_segment) {
+ g_free (src->dash_oldest_segment);
+ src->dash_oldest_segment = NULL;
+ }
+ if (src->dash_newest_segment) {
+ g_free (src->dash_newest_segment);
+ src->dash_newest_segment = NULL;
+ }
+#endif
return gst_soup_http_src_session_open (src);
}
src = GST_SOUP_HTTP_SRC (element);
switch (transition) {
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ GST_WARNING_OBJECT (src, "Last read pos"
+ ": %" G_GINT64_FORMAT ", received total : %" G_GINT64_FORMAT,
+ src->read_position, src->received_total);
+ break;
+#endif
case GST_STATE_CHANGE_READY_TO_NULL:
gst_soup_http_src_session_close (src);
break;
case GST_QUERY_SCHEDULING:
gst_query_parse_scheduling (query, &flags, &minsize, &maxsize, &align);
flags |= GST_SCHEDULING_FLAG_BANDWIDTH_LIMITED;
+
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ if (gst_soup_http_src_is_seekable(bsrc)) {
+ GST_DEBUG_OBJECT (src, "set seekable flag");
+ flags |= GST_SCHEDULING_FLAG_SEEKABLE;
+ }
+#endif
gst_query_set_scheduling (query, flags, minsize, maxsize, align);
break;
default:
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more
+ * Library General Public License for more
*/
#ifndef __GST_SOUP_HTTP_SRC_H__
gchar *iradio_url;
GstStructure *extra_headers;
-
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ SoupCookieJar *cookie_jar;
+#endif
SoupLoggerLogLevel log_level;/* Soup HTTP session logger level */
gboolean compress;
GstEvent *http_headers_event;
gint64 last_socket_read_time;
+
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ gchar *dash_oldest_segment;
+ gchar *dash_newest_segment;
+ guint64 received_total; /* temp: for debugging */
+ guint dlna_opt; /* DLNA server option */
+#endif
};
struct _GstSoupHTTPSrcClass {
--- /dev/null
+<manifest>
+ <request>
+ <domain name="_"/>
+ </request>
+</manifest>
headers prepended during raw to ADTS
conversion */
+#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION /* to get more accurate duration */
+#define AAC_MAX_ESTIMATE_DURATION_BUF (1024 * 1024) /* use first 1 Mbyte */
+#define AAC_SAMPLE_PER_FRAME 1024
+
+#define AAC_MAX_PULL_RANGE_BUF (1 * 1024 * 1024) /* 1 MByte */
+#define AAC_LARGE_FILE_SIZE (2 * 1024 * 1024) /* 2 MByte */
+#define gst_aac_parse_parent_class parent_class
+#endif
+
#define AAC_FRAME_DURATION(parse) (GST_SECOND/parse->frames_per_sec)
static const gint loas_sample_rate_table[16] = {
aacparse, GstBitReader * br, gint * object_type, gint * sample_rate,
gint * channels, gint * frame_samples);
+#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION
+static guint gst_aac_parse_adts_get_fast_frame_len (const guint8 * data);
+/* make full aac(adts) index table when seek */
+static gboolean gst_aac_parse_adts_src_eventfunc (GstBaseParse * parse,
+ GstEvent * event);
+int get_aac_parse_get_adts_frame_length (const unsigned char *data,
+ gint64 offset);
+static gboolean gst_aac_parse_estimate_duration (GstBaseParse * parse);
+#endif
#define gst_aac_parse_parent_class parent_class
G_DEFINE_TYPE (GstAacParse, gst_aac_parse, GST_TYPE_BASE_PARSE);
+#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION
+static inline gint
+gst_aac_parse_get_sample_rate_from_index (guint sr_idx)
+{
+ static const guint aac_sample_rates[] = { 96000, 88200, 64000, 48000, 44100,
+ 32000, 24000, 22050, 16000, 12000, 11025, 8000
+ };
+
+ if (sr_idx < G_N_ELEMENTS (aac_sample_rates))
+ return aac_sample_rates[sr_idx];
+ GST_WARNING ("Invalid sample rate index %u", sr_idx);
+ return 0;
+}
+#endif
/**
* gst_aac_parse_class_init:
* @klass: #GstAacParseClass.
aacparse->last_parsed_sample_rate = 0;
aacparse->last_parsed_channels = 0;
+#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION
+ /* to get more correct duration */
+ aacparse->first_frame = TRUE;
+#endif
}
gst_base_parse_set_frame_rate (GST_BASE_PARSE (aacparse),
aacparse->sample_rate, aacparse->frame_samples, 2, 2);
}
+#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION
+ if (aacparse->first_frame == TRUE) {
+ gboolean ret = FALSE;
+ aacparse->first_frame = FALSE;
+
+ ret = gst_aac_parse_estimate_duration (parse);
+ if (!ret) {
+ GST_WARNING_OBJECT (aacparse, "can not estimate total duration");
+ ret = GST_FLOW_NOT_SUPPORTED;
+ }
+ }
+#endif
} else if (aacparse->header_type == DSPAAC_HEADER_LOAS) {
gboolean setcaps = FALSE;
aacparse->sample_rate, aacparse->frame_samples, 2, 2);
}
}
+#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION
+ else if (aacparse->header_type == DSPAAC_HEADER_ADIF) {
+ /* to get more correct duration */
+ float estimated_duration = 0;
+ gint64 total_file_size;
+ gst_base_parse_get_upstream_size (parse, &total_file_size);
+ estimated_duration =
+ ((total_file_size * 8) / (float) (aacparse->bitrate * 1000)) *
+ GST_SECOND;
+ gst_base_parse_set_duration (parse, GST_FORMAT_TIME,
+ estimated_duration * 1000, 0);
+ }
+#endif
if (aacparse->header_type == DSPAAC_HEADER_NONE
&& aacparse->output_header_type == DSPAAC_HEADER_ADTS) {
aacparse->last_parsed_channels = 0;
aacparse->last_parsed_sample_rate = 0;
}
-
+#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION
+ GST_DEBUG ("Entering gst_aac_parse_src_event header type = %d",
+ aacparse->header_type);
+ if (aacparse->header_type == DSPAAC_HEADER_ADTS)
+ return gst_aac_parse_adts_src_eventfunc (parse, event);
+#endif
return GST_BASE_PARSE_CLASS (parent_class)->src_event (parse, event);
+
+}
+
+#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION
+/**
+ * get_aac_parse_get_adts_framelength:
+ * @data: #GstBufferData.
+ * @offset: #GstBufferData offset
+ *
+ * Implementation to get adts framelength by using first some frame.
+ *
+ * Returns: frame size
+ */
+int
+get_aac_parse_get_adts_frame_length (const unsigned char *data, gint64 offset)
+{
+ const gint adts_header_length_no_crc = 7;
+ const gint adts_header_length_with_crc = 9;
+ gint frame_size = 0;
+ gint protection_absent;
+ gint head_size;
+
+ /* check of syncword */
+ if ((data[offset + 0] != 0xff) || ((data[offset + 1] & 0xf6) != 0xf0)) {
+ GST_ERROR ("check sync word is fail\n");
+ return -1;
+ }
+
+ /* check of protection absent */
+ protection_absent = (data[offset + 1] & 0x01);
+
+ /*check of frame length */
+ frame_size =
+ (data[offset + 3] & 0x3) << 11 | data[offset + 4] << 3 | data[offset +
+ 5] >> 5;
+
+ /* check of header size */
+ /* protectionAbsent is 0 if there is CRC */
+ head_size =
+ protection_absent ? adts_header_length_no_crc :
+ adts_header_length_with_crc;
+ if (head_size > frame_size) {
+ GST_ERROR ("return frame length as 0 (frameSize %u < headSize %u)",
+ frame_size, head_size);
+ return 0;
+ }
+
+ return frame_size;
+}
+
+/**
+ * gst_aac_parse_estimate_duration:
+ * @parse: #GstBaseParse.
+ *
+ * Implementation to get estimated total duration by using first some frame.
+ *
+ * Returns: TRUE if we can get estimated total duraion
+ */
+static gboolean
+gst_aac_parse_estimate_duration (GstBaseParse * parse)
+{
+ gboolean ret = FALSE;
+ GstFlowReturn res = GST_FLOW_OK;
+ gint64 pull_size = 0, file_size = 0, offset = 0, num_frames = 0, duration = 0;
+ guint sample_rate_index = 0, sample_rate = 0, channel = 0;
+ guint frame_size = 0, frame_duration_us = 0, estimated_bitrate = 0;
+ guint lost_sync_count = 0;
+ GstClockTime estimated_duration = GST_CLOCK_TIME_NONE;
+ GstBuffer *buffer = NULL;
+ guint8 *buf = NULL;
+ gint i = 0;
+ GstPadMode pad_mode = GST_PAD_MODE_NONE;
+ GstAacParse *aacparse;
+ gint64 buffer_size = 0;
+ GstMapInfo map;
+
+ aacparse = GST_AAC_PARSE (parse);
+ GST_LOG_OBJECT (aacparse, "gst_aac_parse_estimate_duration enter");
+
+ /* check baseparse define these fuction */
+ gst_base_parse_get_pad_mode (parse, &pad_mode);
+ if (pad_mode != GST_PAD_MODE_PULL) {
+ GST_INFO_OBJECT (aacparse,
+ "aac parser is not pull mode. can not estimate duration");
+ return FALSE;
+ }
+
+ gst_base_parse_get_upstream_size (parse, &file_size);
+
+ if (file_size < ADIF_MAX_SIZE) {
+ GST_ERROR_OBJECT (aacparse, "file size is too short");
+ return FALSE;
+ }
+
+ pull_size = MIN (file_size, AAC_MAX_ESTIMATE_DURATION_BUF);
+
+ res = gst_pad_pull_range (parse->sinkpad, 0, pull_size, &buffer);
+ if (res != GST_FLOW_OK) {
+ GST_ERROR_OBJECT (aacparse, "gst_pad_pull_range failed!");
+ return FALSE;
+ }
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ buf = map.data;
+ buffer_size = map.size;
+ if (buffer_size != pull_size) {
+ GST_ERROR_OBJECT (aacparse,
+ "We got different buffer_size(%" G_GINT64_FORMAT ") with pull_size(%"
+ G_GINT64_FORMAT ").", buffer_size, pull_size);
+ }
+
+ /* MODIFICATION : add defence codes for real buffer_size is different with pull_size */
+ for (i = 0; i < buffer_size; i++) {
+ if ((buf[i] == 0xff) && ((buf[i + 1] & 0xf6) == 0xf0)) { /* aac sync word */
+ //guint profile = (buf[i+2] >> 6) & 0x3;
+ sample_rate_index = (buf[i + 2] >> 2) & 0xf;
+ sample_rate =
+ gst_aac_parse_get_sample_rate_from_index (sample_rate_index);
+ if (sample_rate == 0) {
+ GST_WARNING_OBJECT (aacparse, "Invalid sample rate index (0)");
+ goto EXIT;
+ }
+ channel = (buf[i + 2] & 0x1) << 2 | (buf[i + 3] >> 6);
+
+ GST_INFO_OBJECT (aacparse, "found sync. aac fs=%d, ch=%d", sample_rate,
+ channel);
+
+ /* count number of frames */
+ /* MODIFICATION : add defence codes for real buffer_size is different with pull_size */
+ //while (offset < pull_size) {
+ while (offset < buffer_size) {
+ frame_size = get_aac_parse_get_adts_frame_length (buf, i + offset);
+ if (frame_size == 0) {
+ GST_ERROR_OBJECT (aacparse,
+ "framesize error at offset %" G_GINT64_FORMAT, offset);
+ break;
+ } else if (frame_size == -1) {
+ offset++;
+ lost_sync_count++; // lost sync count limmitation 2K Bytes
+ if (lost_sync_count > (1024 * 2)) {
+ GST_WARNING_OBJECT (aacparse,
+ "lost_sync_count is larger than 2048");
+ goto EXIT;
+ }
+ } else {
+ offset += frame_size;
+ num_frames++;
+ lost_sync_count = 0;
+ }
+ } /* while */
+
+ /* if we can got full file, we can calculate the accurate duration */
+ /* MODIFICATION : add defence codes for real buffer_size is different with pull_size */
+ //if (pull_size == file_size) {
+ if (buffer_size == file_size) {
+ gfloat duration_for_one_frame = 0;
+ GstClockTime calculated_duration = GST_CLOCK_TIME_NONE;
+
+ GST_INFO_OBJECT (aacparse,
+ "we got total file (%" G_GINT64_FORMAT
+ " bytes). do not estimate but make Accurate total duration.",
+ pull_size);
+
+ duration_for_one_frame =
+ (gfloat) AAC_SAMPLE_PER_FRAME / (gfloat) sample_rate;
+ calculated_duration =
+ num_frames * duration_for_one_frame * 1000 * 1000 * 1000;
+
+ GST_INFO_OBJECT (aacparse, "duration_for_one_frame %f ms",
+ duration_for_one_frame);
+ GST_INFO_OBJECT (aacparse, "calculated duration = %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (calculated_duration));
+ /* 0 means disable estimate */
+ gst_base_parse_set_duration (parse, GST_FORMAT_TIME,
+ calculated_duration, 0);
+
+ } else {
+ GST_INFO_OBJECT (aacparse,
+ "we got %" G_GUINT64_FORMAT " bytes in total file (%"
+ G_GINT64_FORMAT "). can not make accurate duration but Estimate.",
+ pull_size, file_size);
+ frame_duration_us =
+ (1024 * 1000000ll + (sample_rate - 1)) / sample_rate;
+ duration = num_frames * frame_duration_us;
+
+ if (duration == 0) {
+ GST_WARNING_OBJECT (aacparse, "Invalid duration");
+ goto EXIT;
+ }
+ estimated_bitrate =
+ (gint) ((gfloat) (offset * 8) / (gfloat) (duration / 1000));
+
+ if (estimated_bitrate == 0) {
+ GST_WARNING_OBJECT (aacparse, "Invalid estimated_bitrate");
+ goto EXIT;
+ }
+ estimated_duration =
+ (GstClockTime) ((file_size * 8) / (estimated_bitrate * 1000)) *
+ GST_SECOND;
+
+ GST_INFO_OBJECT (aacparse, "number of frame = %" G_GINT64_FORMAT,
+ num_frames);
+ GST_INFO_OBJECT (aacparse, "duration = %" G_GINT64_FORMAT,
+ duration / 1000000);
+ GST_INFO_OBJECT (aacparse, "byte = %" G_GINT64_FORMAT, offset);
+ GST_INFO_OBJECT (aacparse, "estimated bitrate = %d bps",
+ estimated_bitrate);
+ GST_INFO_OBJECT (aacparse, "estimated duration = %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (estimated_duration));
+
+ gst_base_parse_set_average_bitrate (parse, estimated_bitrate * 1000);
+ /* set update_interval as duration(sec)/2 */
+ gst_base_parse_set_duration (parse, GST_FORMAT_TIME, estimated_duration,
+ (gint) (duration / 2));
+ }
+
+ break;
+ }
+ }
+ ret = TRUE;
+
+EXIT:
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+ return ret;
+}
+
+
+/* perform seek in push based mode:
+ find BYTE position to move to based on time and delegate to upstream
+*/
+static gboolean
+gst_aac_audio_parse_do_push_seek (GstBaseParse * parse,
+ GstPad * pad, GstEvent * event)
+{
+ GstAacParse *aacparse = GST_AAC_PARSE (parse);
+ gdouble rate;
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType cur_type, stop_type;
+ gint64 cur, stop;
+ gboolean res;
+ gint64 byte_cur;
+ gint64 esimate_byte;
+ gint32 frame_dur;
+ gint64 upstream_total_bytes = 0;
+ GstFormat fmt = GST_FORMAT_BYTES;
+
+ GST_INFO_OBJECT (parse, "doing aac push-based seek");
+
+ gst_event_parse_seek (event, &rate, &format, &flags, &cur_type, &cur,
+ &stop_type, &stop);
+
+ /* FIXME, always play to the end */
+ stop = -1;
+
+ /* only forward streaming and seeking is possible */
+ if (rate <= 0)
+ goto unsupported_seek;
+
+ if (cur == 0) {
+ /* handle rewind only */
+ cur_type = GST_SEEK_TYPE_SET;
+ byte_cur = 0;
+ stop_type = GST_SEEK_TYPE_NONE;
+ stop = -1;
+ flags |= GST_SEEK_FLAG_FLUSH;
+ } else {
+ /* handle normal seek */
+ cur_type = GST_SEEK_TYPE_SET;
+ stop_type = GST_SEEK_TYPE_NONE;
+ stop = -1;
+ flags |= GST_SEEK_FLAG_FLUSH;
+
+ esimate_byte = (cur / (1000 * 1000)) * aacparse->frame_byte;
+ if (aacparse->sample_rate > 0)
+ frame_dur = (aacparse->spf * 1000) / aacparse->sample_rate;
+ else
+ goto unsupported_seek;
+ if (frame_dur > 0)
+ byte_cur = esimate_byte / (frame_dur);
+ else
+ goto unsupported_seek;
+
+ GST_INFO_OBJECT (parse, "frame_byte(%d) spf(%d) rate (%d) ",
+ aacparse->frame_byte, aacparse->spf, aacparse->sample_rate);
+ GST_INFO_OBJECT (parse,
+ "seek cur (%" G_GINT64_FORMAT ") = (%" GST_TIME_FORMAT ") ", cur,
+ GST_TIME_ARGS (cur));
+ GST_INFO_OBJECT (parse,
+ "esimate_byte(%" G_GINT64_FORMAT ") esimate_byte (%d)", esimate_byte,
+ frame_dur);
+ }
+
+ /* obtain real upstream total bytes */
+ if (!gst_pad_peer_query_duration (parse->sinkpad, fmt, &upstream_total_bytes))
+ upstream_total_bytes = 0;
+ GST_INFO_OBJECT (aacparse,
+ "gst_pad_query_peer_duration -upstream_total_bytes (%" G_GUINT64_FORMAT
+ ")", upstream_total_bytes);
+ aacparse->file_size = upstream_total_bytes;
+
+ if ((byte_cur == -1) || (byte_cur > aacparse->file_size)) {
+ GST_INFO_OBJECT (parse,
+ "[WEB-ERROR] seek cur (%" G_GINT64_FORMAT ") > file_size (%"
+ G_GINT64_FORMAT ") ", cur, aacparse->file_size);
+ goto abort_seek;
+ }
+
+ GST_INFO_OBJECT (parse,
+ "Pushing BYTE seek rate %g, " "start %" G_GINT64_FORMAT ", stop %"
+ G_GINT64_FORMAT, rate, byte_cur, stop);
+
+ if (!(flags & GST_SEEK_FLAG_KEY_UNIT)) {
+ GST_INFO_OBJECT (parse,
+ "Requested seek time: %" GST_TIME_FORMAT ", calculated seek offset: %"
+ G_GUINT64_FORMAT, GST_TIME_ARGS (cur), byte_cur);
+ }
+
+ /* BYTE seek event */
+ event =
+ gst_event_new_seek (rate, GST_FORMAT_BYTES, flags, cur_type, byte_cur,
+ stop_type, stop);
+ res = gst_pad_push_event (parse->sinkpad, event);
+
+ return res;
+
+ /* ERRORS */
+
+abort_seek:
+ {
+ GST_DEBUG_OBJECT (parse,
+ "could not determine byte position to seek to, " "seek aborted.");
+ return FALSE;
+ }
+
+unsupported_seek:
+ {
+ GST_DEBUG_OBJECT (parse, "unsupported seek, seek aborted.");
+ return FALSE;
+ }
+}
+
+
+static guint
+gst_aac_parse_adts_get_fast_frame_len (const guint8 * data)
+{
+ int length;
+ if ((data[0] == 0xff) && ((data[1] & 0xf6) == 0xf0)) {
+ length =
+ ((data[3] & 0x03) << 11) | (data[4] << 3) | ((data[5] & 0xe0) >> 5);
+ } else {
+ length = 0;
+ }
+ return length;
+}
+
+/**
+ * gst_aac_parse_adts_src_eventfunc:
+ * @parse: #GstBaseParse. #event
+ *
+ * before baseparse handles seek event, make full amr index table.
+ *
+ * Returns: TRUE on success.
+ */
+static gboolean
+gst_aac_parse_adts_src_eventfunc (GstBaseParse * parse, GstEvent * event)
+{
+ gboolean handled = FALSE;
+ GstAacParse *aacparse = GST_AAC_PARSE (parse);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ {
+ GstFlowReturn res = GST_FLOW_OK;
+ gint64 base_offset = 0, cur = 0;
+ gint32 frame_count = 1; /* do not add first frame because it is already in index table */
+ gint64 second_count = 0; /* initial 1 second */
+ gint64 total_file_size = 0, start_offset = 0;
+ GstClockTime current_ts = GST_CLOCK_TIME_NONE;
+ GstPadMode pad_mode = GST_PAD_MODE_NONE;
+
+ /* check baseparse define these fuction */
+ gst_base_parse_get_pad_mode (parse, &pad_mode);
+ if (pad_mode != GST_PAD_MODE_PULL) {
+ gboolean ret = FALSE;
+ GstPad *srcpad = parse->srcpad;
+ GST_INFO_OBJECT (aacparse, "aac parser is PUSH MODE.");
+ /* check NULL */
+ ret = gst_aac_audio_parse_do_push_seek (parse, srcpad, event);
+ gst_object_unref (srcpad);
+ return ret;
+ }
+ gst_base_parse_get_upstream_size (parse, &total_file_size);
+ gst_base_parse_get_index_last_offset (parse, &start_offset);
+ gst_base_parse_get_index_last_ts (parse, ¤t_ts);
+
+ if (total_file_size > AAC_LARGE_FILE_SIZE) {
+ gst_base_parse_set_seek_mode (parse, 0);
+ GST_INFO_OBJECT (aacparse, "larger than big size (2MB).");
+ goto aac_seek_null_exit;
+ }
+
+ GST_DEBUG ("gst_aac_parse_adts_src_eventfunc GST_EVENT_SEEK enter");
+
+ if (total_file_size == 0 || start_offset >= total_file_size) {
+ GST_ERROR ("last index offset %" G_GINT64_FORMAT
+ " is larger than file size %" G_GINT64_FORMAT, start_offset,
+ total_file_size);
+ break;
+ }
+
+ gst_event_parse_seek (event, NULL, NULL, NULL, NULL, &cur, NULL, NULL);
+ if (cur <= current_ts) {
+ GST_INFO ("seek to %" GST_TIME_FORMAT " within index table %"
+ GST_TIME_FORMAT ". do not make index table", GST_TIME_ARGS (cur),
+ GST_TIME_ARGS (current_ts));
+ break;
+ } else {
+ GST_INFO ("seek to %" GST_TIME_FORMAT " without index table %"
+ GST_TIME_FORMAT ". make index table", GST_TIME_ARGS (cur),
+ GST_TIME_ARGS (current_ts));
+ }
+
+ GST_INFO ("make AAC(ADTS) Index Table. file_size = %" G_GINT64_FORMAT
+ " last idx offset=%" G_GINT64_FORMAT ", last idx ts=%"
+ GST_TIME_FORMAT, total_file_size, start_offset,
+ GST_TIME_ARGS (current_ts));
+
+ base_offset = start_offset; /* set base by start offset */
+ second_count = current_ts + GST_SECOND; /* 1sec */
+
+ /************************************/
+ /* STEP 0: Setting parse information */
+ /************************************/
+ aacparse->spf = aacparse->frame_samples;
+ aacparse->frame_duration = (aacparse->spf * 1000 * 100) / aacparse->sample_rate; /* duration per frame (msec) */
+ aacparse->frame_per_sec = (aacparse->sample_rate) / aacparse->spf; /* frames per second (ea) */
+
+ /************************************/
+ /* STEP 1: MAX_PULL_RANGE_BUF cycle */
+ /************************************/
+ while (total_file_size - base_offset >= AAC_MAX_PULL_RANGE_BUF) {
+ gint64 offset = 0;
+ GstBuffer *buffer = NULL;
+ guint8 *buf = NULL;
+ GstMapInfo map;
+ GST_INFO ("gst_pad_pull_range %d bytes (from %" G_GINT64_FORMAT
+ ") use max size", AAC_MAX_PULL_RANGE_BUF, base_offset);
+ res =
+ gst_pad_pull_range (parse->sinkpad, base_offset,
+ base_offset + AAC_MAX_PULL_RANGE_BUF, &buffer);
+ if (res != GST_FLOW_OK) {
+ GST_ERROR ("gst_pad_pull_range failed!");
+ break;
+ }
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ buf = map.data;
+ if (buf == NULL) {
+ gst_buffer_unmap (buffer, &map);
+ GST_WARNING ("buffer is NULL in make aac seek table's STEP1");
+ gst_buffer_unref (buffer);
+ goto aac_seek_null_exit;
+ }
+
+ while (offset <= AAC_MAX_PULL_RANGE_BUF) {
+ gint frame_size = 0;
+
+ /* make sure the values in the frame header look sane */
+ frame_size = gst_aac_parse_adts_get_fast_frame_len (buf);
+
+ if ((frame_size > 0)
+ && (frame_size < (AAC_MAX_PULL_RANGE_BUF - offset))) {
+ if (current_ts > second_count) { /* 1 sec == xx frames. we make idx per sec */
+ gst_base_parse_add_index_entry (parse, base_offset + offset, current_ts, TRUE, TRUE); /* force */
+ GST_DEBUG ("Adding index ts=%" GST_TIME_FORMAT " offset %"
+ G_GINT64_FORMAT, GST_TIME_ARGS (current_ts),
+ base_offset + offset);
+ second_count += GST_SECOND; /* 1sec */
+ }
+
+ current_ts += (aacparse->frame_duration * GST_MSECOND) / 100; /* each frame is (frame_duration) ms */
+ offset += frame_size;
+ buf += frame_size;
+ frame_count++;
+ } else if (frame_size >= (AAC_MAX_PULL_RANGE_BUF - offset)) {
+ GST_DEBUG ("we need refill buffer");
+ break;
+ } else {
+ GST_WARNING ("we lost sync");
+ buf++;
+ offset++;
+ }
+ } /* while */
+
+ base_offset = base_offset + offset;
+
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+ } /* end MAX buffer cycle */
+
+ /*******************************/
+ /* STEP 2: Remain Buffer cycle */
+ /*******************************/
+ if (total_file_size - base_offset > 0) {
+ gint64 offset = 0;
+ GstBuffer *buffer = NULL;
+ guint8 *buf = NULL;
+ GstMapInfo map;
+
+ GST_INFO ("gst_pad_pull_range %" G_GINT64_FORMAT " bytes (from %"
+ G_GINT64_FORMAT ") use remain_buf size",
+ total_file_size - base_offset, base_offset);
+ res =
+ gst_pad_pull_range (parse->sinkpad, base_offset, total_file_size,
+ &buffer);
+ if (res != GST_FLOW_OK) {
+ GST_ERROR ("gst_pad_pull_range failed!");
+ break;
+ }
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ buf = map.data;
+ if (buf == NULL) {
+ gst_buffer_unmap (buffer, &map);
+ GST_WARNING ("buffer is NULL in make aac seek table's STEP2");
+ gst_buffer_unref (buffer);
+ goto aac_seek_null_exit;
+ }
+
+ while (base_offset + offset < total_file_size) {
+ gint frame_size = 0;
+
+ /* make sure the values in the frame header look sane */
+ frame_size = gst_aac_parse_adts_get_fast_frame_len (buf);
+
+ if ((frame_size > 0)
+ && (frame_size <= (total_file_size - (base_offset + offset)))) {
+ if (current_ts > second_count) { /* 1 sec == xx frames. we make idx per sec */
+ gst_base_parse_add_index_entry (parse, base_offset + offset, current_ts, TRUE, TRUE); /* force */
+ GST_DEBUG ("Adding index ts=%" GST_TIME_FORMAT " offset %"
+ G_GINT64_FORMAT, GST_TIME_ARGS (current_ts),
+ base_offset + offset);
+ second_count += GST_SECOND; /* 1sec */
+ }
+
+ current_ts += (aacparse->frame_duration * GST_MSECOND) / 100; /* each frame is (frame_duration) ms */
+ offset += frame_size;
+ buf += frame_size;
+ frame_count++;
+ } else if (frame_size == 0) {
+ GST_DEBUG ("Frame size is 0 so, Decoding end..");
+ break;
+ } else {
+ GST_WARNING ("we lost sync");
+ buf++;
+ offset++;
+ }
+ } /* while */
+
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+ }
+ /* end remain_buf buffer cycle */
+ GST_DEBUG ("gst_aac_parse_adts_src_eventfunc GST_EVENT_SEEK leave");
+ }
+ break;
+
+ default:
+ break;
+ }
+
+aac_seek_null_exit:
+
+ /* call baseparse src_event function to handle event */
+ handled = GST_BASE_PARSE_CLASS (parent_class)->src_event (parse, event);
+ return handled;
}
+#endif /* TIZEN_FEATURE_AACPARSE_MODIFICATION */
gint mpegversion;
gint frame_samples;
+#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION
+ gboolean first_frame; /* estimate duration once at the first time */
+ guint hdr_bitrate; /* estimated bitrate (bps) */
+ guint spf; /* samples per frame = frame_samples */
+ guint frame_duration; /* duration per frame (msec) */
+ guint frame_per_sec; /* frames per second (ea) */
+ guint bitstream_type; /* bitstream type - constant or variable */
+ guint adif_header_length;
+ guint num_program_config_elements;
+ guint read_bytes;
+ gint64 file_size;
+ guint frame_byte;
+#endif
+
GstAacHeaderType header_type;
GstAacHeaderType output_header_type;
#define MIN_FRAME_SIZE 6
+#ifdef TIZEN_FEATURE_MP3PARSE_MODIFICATION
+#define DEFAULT_CHECK_HTTP_SEEK FALSE
+
+/* Property */
+enum
+{
+ PROP_0,
+ PROP_CHECK_HTTP_SEEK
+};
+#endif
+
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
static gboolean gst_mpeg_audio_parse_start (GstBaseParse * parse);
static gboolean gst_mpeg_audio_parse_stop (GstBaseParse * parse);
+
+#ifdef TIZEN_FEATURE_MP3PARSE_MODIFICATION
+static void gst_mpeg_audio_parse_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_mpeg_audio_parse_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static gboolean gst_mpeg_audio_parse_src_eventfunc (GstBaseParse * parse,
+ GstEvent * event);
+#endif
+
static GstFlowReturn gst_mpeg_audio_parse_handle_frame (GstBaseParse * parse,
GstBaseParseFrame * frame, gint * skipsize);
static GstFlowReturn gst_mpeg_audio_parse_pre_push_frame (GstBaseParse * parse,
parse_class->get_sink_caps =
GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_get_sink_caps);
+#ifdef TIZEN_FEATURE_MP3PARSE_MODIFICATION
+ object_class->set_property =
+ GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_set_property);
+ object_class->get_property =
+ GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_get_property);
+
+ g_object_class_install_property (object_class, PROP_CHECK_HTTP_SEEK,
+ g_param_spec_boolean ("http-pull-mp3dec", "enable/disable",
+ "enable/disable mp3dec http seek pull mode",
+ DEFAULT_CHECK_HTTP_SEEK, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /* T.B.D : make full mp3 index table when seek */
+ parse_class->src_event = gst_mpeg_audio_parse_src_eventfunc;
+#endif
+
/* register tags */
#define GST_TAG_CRC "has-crc"
#define GST_TAG_MODE "channel-mode"
gst_mpeg_audio_parse_reset (mp3parse);
+#ifdef TIZEN_FEATURE_MP3PARSE_MODIFICATION
+ if (mp3parse->http_seek_flag) {
+ /* Don't need Accurate Seek table (in http pull mode) */
+ GST_INFO_OBJECT (parse, "Enable (1) : mp3parse->http_seek_flag");
+ } else {
+ GST_INFO_OBJECT (parse, "Disable (0) : mp3parse->http_seek_flag");
+ }
+#endif
+
return TRUE;
}
+#ifdef TIZEN_FEATURE_MP3PARSE_MODIFICATION
+static void
+gst_mpeg_audio_parse_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstMpegAudioParse *mp3parse = GST_MPEG_AUDIO_PARSE (object);
+ GST_INFO_OBJECT (mp3parse, "set_property() START- prop_id(%d)", prop_id);
+ switch (prop_id) {
+ case PROP_CHECK_HTTP_SEEK:
+ mp3parse->http_seek_flag = g_value_get_boolean (value);
+ GST_INFO_OBJECT (mp3parse, "http_seek_flag(%d)",
+ mp3parse->http_seek_flag);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_mpeg_audio_parse_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstMpegAudioParse *mp3parse = GST_MPEG_AUDIO_PARSE (object);
+ GST_INFO_OBJECT (mp3parse, "get_property() START- prop_id(%d)", prop_id);
+ switch (prop_id) {
+ case PROP_CHECK_HTTP_SEEK:
+ g_value_set_boolean (value, mp3parse->http_seek_flag);
+ GST_INFO_OBJECT (mp3parse, "http_seek_flag(%d)",
+ mp3parse->http_seek_flag);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+#endif
+
static gboolean
gst_mpeg_audio_parse_stop (GstBaseParse * parse)
{
return res;
}
+
+#ifdef TIZEN_FEATURE_MP3PARSE_MODIFICATION
+/**
+ * gst_mpeg_audio_parse_src_eventfunc:
+ * @parse: #GstBaseParse. #event
+ *
+ * before baseparse handles seek event, check any mode and flag.
+ *
+ * Returns: TRUE on success.
+ */
+static gboolean
+gst_mpeg_audio_parse_src_eventfunc (GstBaseParse * parse, GstEvent * event)
+{
+ gboolean handled = FALSE;
+ GstMpegAudioParse *mp3parse;
+ mp3parse = GST_MPEG_AUDIO_PARSE (parse);
+
+ GST_DEBUG_OBJECT (parse, "handling event %d, %s", GST_EVENT_TYPE (event),
+ GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ {
+ GST_INFO_OBJECT (mp3parse, "GST_EVENT_SEEK enter");
+ if (mp3parse->http_seek_flag) {
+ GST_INFO_OBJECT (mp3parse,
+ "souphttpsrc is PULL MODE (so accurate seek mode is OFF)");
+ /* Check the declaration of this function in the baseparse */
+ gst_base_parse_set_seek_mode (parse, 0);
+ goto mp3_seek_null_exit;
+ }
+ GST_INFO_OBJECT (mp3parse, "GST_EVENT_SEEK leave");
+ break;
+ }
+ default:
+ break;
+ }
+
+mp3_seek_null_exit:
+ /* call baseparse src_event function to handle event */
+ handled = GST_BASE_PARSE_CLASS (parent_class)->src_event (parse, event);
+
+ return handled;
+}
+#endif
/* LAME info */
guint32 encoder_delay;
guint32 encoder_padding;
+#ifdef TIZEN_FEATURE_MP3PARSE_MODIFICATION
+ /* Additional info */
+ gboolean http_seek_flag;
+#endif
};
/**
gst_flv_demux_push_src_event (demux,
gst_event_new_tag (gst_tag_list_copy (demux->taglist)));
+#ifdef TIZEN_FEATURE_FLVDEMUX_MODIFICATION
+ GST_DEBUG_OBJECT (demux, "post tag msg %" GST_PTR_FORMAT, demux->taglist);
+
+ /* post message flv tag (for early recive application) */
+ gst_element_post_message (GST_ELEMENT_CAST (demux),
+ gst_message_new_tag (GST_OBJECT_CAST (demux),
+ gst_tag_list_copy (demux->taglist)));
+#endif
+
if (demux->audio_pad) {
GST_DEBUG_OBJECT (demux->audio_pad, "pushing audio %" GST_PTR_FORMAT,
demux->audio_tags);
#define FOURCC_tenc GST_MAKE_FOURCC('t','e','n','c')
#define FOURCC_cenc GST_MAKE_FOURCC('c','e','n','c')
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+/* Spatial Audio */
+#define FOURCC_SA3D GST_MAKE_FOURCC('S','A','3','D')
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
G_END_DECLS
#endif /* __FOURCC_H__ */
PROP_INTERLEAVE_TIME,
PROP_MAX_RAW_AUDIO_DRIFT,
PROP_START_GAP_THRESHOLD,
+#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
+ PROP_EXPECTED_TRAILER_SIZE,
+#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
};
/* some spare for header size as well */
static GstElementClass *parent_class = NULL;
+#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
+/*
+ [[ Metadata Size ]]
+ 1. Common
+ free = 8
+ moov = 8
+ mvhd = 108
+ -------------
+ total : 124
+
+ 2. Video
+ i. Video common
+ trak = 8
+ tkhd = 92
+ mdia = 8
+ mdhd = 32
+ hdlr = 45
+ minf = 8
+ vmhd = 20
+ dinf = 36 (8, dref : 16 , url : 12)
+ stbl = 8
+ ---------------
+ total : 257
+
+ ii. Variation in file format
+ - MP4
+ ftyp = 32
+ udta = 61
+ - 3GP
+ ftyp = 28
+ udta = 8
+
+ iii. Variation in codec
+ - MPEG4
+ stsd = 137(16, mp4v : 86, esds : 35)
+
+ - H.264 = 487(or 489) + (8*stts_count) + (8*frame) + (4*I-frame)
+ stsd = 134 (SPS 9, PPS 4) or 136 (SPS 111, PPS 4)
+
+ - H.263 = 470 + + (8*stts_count) + (8*frame) + (4*I-frame)
+ stsd = 102 -> different from H.264
+
+ iv. Variation in frame
+ stts = 16 + (8*stts_count)
+ stss = 16 + (4*I-frame)
+ stsc = 28
+ stsz = 20 + (4*frame)
+ stco = 16 + (4*frame)
+
+ 3. Audio
+ i. Audio common
+ trak = 8
+ tkhd = 92
+ mdia = 8
+ mdhd = 32
+ hdlr = 45
+ minf = 8
+ smhd = 16
+ dinf = 36 (8, dref : 16, url : 12)
+ stbl = 8
+ ---------------
+ total : 253
+
+ stts = 16
+ stsz = 20
+ stco = 16
+ ------------
+ total : 52
+
+ ii. Variation in file format
+ - MP4
+ udta = 61
+ - 3GP
+ udta = 8
+
+ iii. Variation in codec
+ - Common
+ stts = 16 + (8*stts_count)
+ stsc = 28
+ stsz = 20 + (4*frame)
+ stco = 16 + (4*frame)
+
+ - AAC
+ stsd = 94 (16, mp4a : 78(36 ,esds : 42))
+
+ - AMR
+ stsd = 69 (16, samr : 53(36, damr : 17))
+*/
+
+/* trailer entry size */
+#define ENTRY_SIZE_VIDEO_STTS 8
+#define ENTRY_SIZE_VIDEO_STSS 4
+#define ENTRY_SIZE_VIDEO_STSZ 4
+#define ENTRY_SIZE_VIDEO_STCO 4
+#define ENTRY_SIZE_AUDIO_STTS 8
+#define ENTRY_SIZE_AUDIO_STSZ 4
+#define ENTRY_SIZE_AUDIO_STCO 4
+
+#define ENTRY_SIZE_VIDEO_MPEG4_STSD 137
+#define ENTRY_SIZE_VIDEO_H263P_STSD 102
+#define ENTRY_SIZE_AUDIO_AAC_STSD 94
+#define ENTRY_SIZE_AUDIO_AMR_STSD 69
+
+#define ENTRY_SIZE_STSC 28
+#define ENTRY_SIZE_VIDEO_ST 68 /*atom size (stss + stts + stsc + stsz + stco ) * (size + atom + version + flags + sample count)+stsz(sample size) */
+#define ENTRY_SIZE_AUDIO_ST 52 /*atom size (stss + stsc + stsz + stco ) * (size + atom + version + flags + sample count)+stsz(sample size) */
+
+/* common */
+#define MUX_COMMON_SIZE_HEADER 124 /* free + moov + moov.mvhd*/
+
+#define MUX_COMMON_SIZE_VIDEO_HEADER 257
+#define MUX_COMMON_SIZE_AUDIO_HEADER 253
+
+#define MUX_COMMON_SIZE_MP4_FTYP 32
+#define MUX_COMMON_SIZE_3GP_FTYP 28
+
+#define MUX_COMMON_SIZE_MP4_UDTA 61
+#define MUX_COMMON_SIZE_3GP_UDTA 8
+
+static void
+gst_qt_mux_update_expected_trailer_size (GstQTMux *qtmux, GstQTPad *pad)
+{
+ guint nb_video_frames = 0;
+ guint nb_video_i_frames = 0;
+ guint nb_video_stts_entry = 0;
+ guint nb_audio_frames = 0;
+ guint nb_audio_stts_entry = 0;
+ gboolean video_stream = FALSE;
+ gboolean audio_stream = FALSE;
+ guint exp_size = 0;
+ GstQTMuxClass *qtmux_klass = NULL;
+
+ if (qtmux == NULL || pad == NULL) {
+ GST_ERROR_OBJECT (qtmux, "Invalid parameter");
+ return;
+ }
+
+ qtmux_klass = (GstQTMuxClass *)(G_OBJECT_GET_CLASS(qtmux));
+
+ if (!strncmp(GST_PAD_NAME(pad->collect.pad), "video", 5)) {
+ nb_video_frames += pad->trak->mdia.minf.stbl.stsz.table_size;
+ nb_video_i_frames += pad->trak->mdia.minf.stbl.stss.entries.len;
+ nb_video_stts_entry += pad->trak->mdia.minf.stbl.stts.entries.len;
+
+ video_stream = TRUE;
+ } else if (!strncmp(GST_PAD_NAME(pad->collect.pad), "audio", 5)) {
+ nb_audio_frames += pad->trak->mdia.minf.stbl.stsz.table_size;
+ nb_audio_stts_entry += pad->trak->mdia.minf.stbl.stts.entries.len;
+
+ audio_stream = TRUE;
+ }
+
+ /* free + moov + mvhd */
+ qtmux->expected_trailer_size = MUX_COMMON_SIZE_HEADER;
+
+ /* ftyp + udta * 3 (There is 3 udta fields and it's same size) */
+ switch (qtmux_klass->format) {
+ case GST_QT_MUX_FORMAT_MP4:
+ qtmux->expected_trailer_size += MUX_COMMON_SIZE_MP4_FTYP + MUX_COMMON_SIZE_MP4_UDTA * 3;
+ break;
+ case GST_QT_MUX_FORMAT_3GP:
+ qtmux->expected_trailer_size += MUX_COMMON_SIZE_3GP_FTYP + MUX_COMMON_SIZE_3GP_UDTA * 3;
+ break;
+ default:
+ break;
+ }
+
+ /* Calculate trailer size for video stream */
+ if (video_stream) {
+ switch (pad->fourcc) {
+ case FOURCC_h263:
+ case FOURCC_s263:
+ exp_size += MUX_COMMON_SIZE_VIDEO_HEADER + ENTRY_SIZE_VIDEO_H263P_STSD;
+ break;
+ case FOURCC_mp4v:
+ case FOURCC_MP4V:
+ case FOURCC_fmp4:
+ case FOURCC_FMP4:
+ case FOURCC_3gp4:
+ case FOURCC_3gp6:
+ case FOURCC_3gg6:
+ exp_size += MUX_COMMON_SIZE_VIDEO_HEADER + ENTRY_SIZE_VIDEO_MPEG4_STSD;
+ break;
+ default:
+ break;
+ }
+
+ /* frame related */
+ exp_size += ENTRY_SIZE_VIDEO_ST + (ENTRY_SIZE_VIDEO_STTS * nb_video_stts_entry) +
+ (ENTRY_SIZE_VIDEO_STSS * nb_video_i_frames) + (ENTRY_SIZE_STSC) +
+ ((ENTRY_SIZE_VIDEO_STSZ + ENTRY_SIZE_VIDEO_STCO) * nb_video_frames);
+
+ qtmux->video_expected_trailer_size = exp_size;
+ }
+
+ /* Calculate trailer size for audio stream */
+ if (audio_stream) {
+ exp_size += MUX_COMMON_SIZE_AUDIO_HEADER + ENTRY_SIZE_AUDIO_ST + (ENTRY_SIZE_AUDIO_STTS * nb_audio_stts_entry) +
+ (ENTRY_SIZE_STSC) + ((ENTRY_SIZE_AUDIO_STSZ + ENTRY_SIZE_AUDIO_STCO) * nb_audio_frames);
+
+ if (pad->fourcc == FOURCC_samr)
+ exp_size += ENTRY_SIZE_AUDIO_AMR_STSD;
+ else
+ exp_size += ENTRY_SIZE_AUDIO_AAC_STSD;
+
+ qtmux->audio_expected_trailer_size = exp_size;
+ }
+
+ qtmux->expected_trailer_size += qtmux->video_expected_trailer_size + qtmux->audio_expected_trailer_size;
+
+ /*
+ GST_INFO_OBJECT (qtmux, "pad type %s", GST_PAD_NAME(pad->collect.pad));
+ GST_INFO_OBJECT (qtmux, "VIDEO : stts-entry=[%d], i-frame=[%d], video-sample=[%d]", nb_video_stts_entry, nb_video_i_frames, nb_video_frames);
+ GST_INFO_OBJECT (qtmux, "AUDIO : stts-entry=[%d], audio-sample=[%d]", nb_audio_stts_entry, nb_audio_frames);
+ GST_INFO_OBJECT (qtmux, "expected trailer size %d", qtmux->expected_trailer_size);
+ */
+
+ return;
+}
+#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
+
static void
gst_qt_mux_base_init (gpointer g_class)
{
GParamFlags streamable_flags;
const gchar *streamable_desc;
gboolean streamable;
+#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
+ GParamSpec *tspec = NULL;
+#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
#define STREAMABLE_DESC "If set to true, the output should be as if it is to "\
"be streamed and hence no indexes written or duration written."
0, G_MAXUINT64, DEFAULT_START_GAP_THRESHOLD,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
+ tspec = g_param_spec_uint("expected-trailer-size", "Expected Trailer Size",
+ "Expected trailer size (bytes)",
+ 0, G_MAXUINT, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS);
+ if (tspec)
+ g_object_class_install_property(gobject_class, PROP_EXPECTED_TRAILER_SIZE, tspec);
+ else
+ GST_ERROR("g_param_spec failed for \"expected-trailer-size\"");
+#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
+
gstelement_class->request_new_pad =
GST_DEBUG_FUNCPTR (gst_qt_mux_request_new_pad);
gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_qt_mux_change_state);
if (alloc) {
qtmux->moov = atom_moov_new (qtmux->context);
+#ifndef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
/* ensure all is as nice and fresh as request_new_pad would provide it */
for (walk = qtmux->sinkpads; walk; walk = g_slist_next (walk)) {
GstQTPad *qtpad = (GstQTPad *) walk->data;
qtpad->trak = atom_trak_new (qtmux->context);
atom_moov_add_trak (qtmux->moov, qtpad->trak);
}
+#endif
}
qtmux->current_pad = NULL;
qtmux->last_moov_update = GST_CLOCK_TIME_NONE;
qtmux->muxed_since_last_update = 0;
qtmux->reserved_duration_remaining = GST_CLOCK_TIME_NONE;
+
+#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
+ qtmux->expected_trailer_size = 0;
+ qtmux->video_expected_trailer_size = 0;
+ qtmux->audio_expected_trailer_size = 0;
+#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
}
static void
}
}
+#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
+ gst_qt_mux_update_expected_trailer_size(qtmux, pad);
+#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
+
exit:
return ret;
case PROP_START_GAP_THRESHOLD:
g_value_set_uint64 (value, qtmux->start_gap_threshold);
break;
+#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
+ case PROP_EXPECTED_TRAILER_SIZE:
+ g_value_set_uint(value, qtmux->expected_trailer_size);
+ break;
+#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
/* for request pad naming */
guint video_pads, audio_pads, subtitle_pads, caption_pads;
+
+#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
+ guint expected_trailer_size;
+ guint audio_expected_trailer_size;
+ guint video_expected_trailer_size;
+#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
};
struct _GstQTMuxClass
#include <gst/math-compat.h>
#ifdef HAVE_ZLIB
-# include <zlib.h>
+#include <zlib.h>
#endif
/* max. size considered 'sane' for non-mdat atoms */
gboolean keyframe; /* TRUE when this packet is a keyframe */
};
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+typedef struct _QtDemuxSphericalMetadata QtDemuxSphericalMetadata;
+
+struct _QtDemuxSphericalMetadata
+{
+ gboolean is_spherical;
+ gboolean is_stitched;
+ char *stitching_software;
+ char *projection_type;
+ char *stereo_mode;
+ int source_count;
+ int init_view_heading;
+ int init_view_pitch;
+ int init_view_roll;
+ int timestamp;
+ int full_pano_width_pixels;
+ int full_pano_height_pixels;
+ int cropped_area_image_width;
+ int cropped_area_image_height;
+ int cropped_area_left;
+ int cropped_area_top;
+ QTDEMUX_AMBISONIC_TYPE ambisonic_type;
+ QTDEMUX_AMBISONIC_FORMAT ambisonic_format;
+ QTDEMUX_AMBISONIC_ORDER ambisonic_order;
+};
+
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
/* Macros for converting to/from timescale */
#define QTSTREAMTIME_TO_GSTTIME(stream, value) (gst_util_uint64_scale((value), GST_SECOND, (stream)->timescale))
#define GSTTIME_TO_QTSTREAMTIME(stream, value) (gst_util_uint64_scale((value), (stream)->timescale, GST_SECOND))
static void qtdemux_gst_structure_free (GstStructure * gststructure);
static void gst_qtdemux_reset (GstQTDemux * qtdemux, gboolean hard);
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+static void gst_tag_register_spherical_tags (void);
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
static void
gst_qtdemux_class_init (GstQTDemuxClass * klass)
{
gst_tag_register_musicbrainz_tags ();
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ gst_tag_register_spherical_tags ();
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
gst_element_class_add_static_pad_template (gstelement_class,
&gst_qtdemux_sink_template);
gst_element_class_add_static_pad_template (gstelement_class,
qtdemux->old_streams = g_ptr_array_new_with_free_func
((GDestroyNotify) gst_qtdemux_stream_unref);
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ qtdemux->spherical_metadata = (QtDemuxSphericalMetadata *)
+ malloc (sizeof (QtDemuxSphericalMetadata));
+
+ if (qtdemux->spherical_metadata) {
+ qtdemux->spherical_metadata->is_spherical = FALSE;
+ qtdemux->spherical_metadata->is_stitched = FALSE;
+ qtdemux->spherical_metadata->stitching_software = NULL;
+ qtdemux->spherical_metadata->projection_type = NULL;
+ qtdemux->spherical_metadata->stereo_mode = NULL;
+ qtdemux->spherical_metadata->source_count = 0;
+ qtdemux->spherical_metadata->init_view_heading = 0;
+ qtdemux->spherical_metadata->init_view_pitch = 0;
+ qtdemux->spherical_metadata->init_view_roll = 0;
+ qtdemux->spherical_metadata->timestamp = 0;
+ qtdemux->spherical_metadata->full_pano_width_pixels = 0;
+ qtdemux->spherical_metadata->full_pano_height_pixels = 0;
+ qtdemux->spherical_metadata->cropped_area_image_width = 0;
+ qtdemux->spherical_metadata->cropped_area_image_height = 0;
+ qtdemux->spherical_metadata->cropped_area_left = 0;
+ qtdemux->spherical_metadata->cropped_area_top = 0;
+ qtdemux->spherical_metadata->ambisonic_type = QTDEMUX_AMBISONIC_TYPE_UNKNOWN;
+ qtdemux->spherical_metadata->ambisonic_format = QTDEMUX_AMBISONIC_FORMAT_UNKNOWN;
+ qtdemux->spherical_metadata->ambisonic_order = QTDEMUX_AMBISONIC_ORDER_UNKNOWN;
+ }
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
GST_OBJECT_FLAG_SET (qtdemux, GST_ELEMENT_FLAG_INDEXABLE);
gst_qtdemux_reset (qtdemux, TRUE);
{
GstQTDemux *qtdemux = GST_QTDEMUX (object);
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ if (qtdemux->spherical_metadata) {
+ if (qtdemux->spherical_metadata->stitching_software)
+ free(qtdemux->spherical_metadata->stitching_software);
+ if (qtdemux->spherical_metadata->projection_type)
+ free(qtdemux->spherical_metadata->projection_type);
+ if (qtdemux->spherical_metadata->stereo_mode)
+ free(qtdemux->spherical_metadata->stereo_mode);
+
+ free(qtdemux->spherical_metadata);
+ qtdemux->spherical_metadata = NULL;
+ }
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
if (qtdemux->adapter) {
g_object_unref (G_OBJECT (qtdemux->adapter));
qtdemux->adapter = NULL;
stream->stream_tags);
gst_pad_push_event (stream->pad,
gst_event_new_tag (gst_tag_list_ref (stream->stream_tags)));
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ /* post message qtdemux tag (for early recive application) */
+ gst_element_post_message (GST_ELEMENT_CAST (qtdemux),
+ gst_message_new_tag (GST_OBJECT_CAST (qtdemux),
+ gst_tag_list_copy (stream->stream_tags)));
+#endif
}
if (G_UNLIKELY (stream->send_global_tags)) {
}
}
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+static void
+_get_int_value_from_xml_string (GstQTDemux * qtdemux,
+ const char *xml_str, const char *param_name, int *value)
+{
+ char *value_start, *value_end, *endptr;
+ const short value_length_max = 12;
+ char init_view_ret[12];
+ int value_length = 0;
+ int i = 0;
+
+ value_start = (xml_str && param_name) ? strstr (xml_str, param_name) : NULL;
+
+ if (!value_start) {
+ GST_WARNING_OBJECT (qtdemux, "error: parameter does not exist: %s\n",
+ param_name);
+ return;
+ }
+
+ value_start += strlen (param_name);
+ while ((value_start[0] == ' ') || (value_start[0] == '\t'))
+ value_start++;
+
+ value_end = strchr (value_start, '<');
+ if (!value_end) {
+ GST_ERROR_OBJECT (qtdemux, "error: incorrect XML\n");
+ return;
+ }
+
+ value_length = value_end - value_start;
+ while ((value_length >= 1) && ((value_start[value_length - 1] == ' ')
+ || (value_start[value_length - 1] == '\t')))
+ value_length--;
+
+ if (value_start[i] == '+' || value_start[i] == '-')
+ i++;
+ while (i < value_length) {
+ if (value_start[i] < '0' || value_start[i] > '9') {
+ GST_ERROR_OBJECT (qtdemux,
+ "error: incorrect value, integer was expected\n");
+ return;
+ }
+ i++;
+ }
+
+ if (value_length >= value_length_max || value_length < 1) {
+ GST_ERROR_OBJECT (qtdemux, "error: empty XML value or incorrect range\n");
+ return;
+ }
+
+ strncpy (init_view_ret, value_start, value_length_max);
+ init_view_ret[value_length] = '\0';
+
+ *value = strtol (init_view_ret, &endptr, 10);
+ if (endptr == init_view_ret) {
+ GST_ERROR_OBJECT (qtdemux, "error: no digits were found\n");
+ return;
+ }
+
+ return;
+}
+
+static void
+_get_string_value_from_xml_string (GstQTDemux * qtdemux,
+ const char *xml_str, const char *param_name, char **value)
+{
+ char *value_start, *value_end;
+ const short value_length_max = 256;
+ int value_length = 0;
+
+ value_start = (xml_str && param_name) ? strstr (xml_str, param_name) : NULL;
+
+ if (!value_start) {
+ GST_WARNING_OBJECT (qtdemux, "error: parameter does not exist: %s\n",
+ param_name);
+ return;
+ }
+
+ value_start += strlen (param_name);
+ while ((value_start[0] == ' ') || (value_start[0] == '\t'))
+ value_start++;
+
+ value_end = strchr (value_start, '<');
+ if (!value_end) {
+ GST_ERROR_OBJECT (qtdemux, "error: incorrect XML\n");
+ return;
+ }
+
+ value_length = value_end - value_start;
+ while ((value_length >= 1) && ((value_start[value_length - 1] == ' ')
+ || (value_start[value_length - 1] == '\t')))
+ value_length--;
+
+ if (value_length >= value_length_max || value_length < 1) {
+ GST_ERROR_OBJECT (qtdemux, "error: empty XML value or incorrect range\n");
+ return;
+ }
+
+ *value = strndup(value_start, value_length);
+
+ return;
+}
+
+static void
+_get_bool_value_from_xml_string (GstQTDemux * qtdemux,
+ const char *xml_str, const char *param_name, gboolean * value)
+{
+ char *value_start, *value_end;
+ int value_length = 0;
+
+ value_start = (xml_str && param_name) ? strstr (xml_str, param_name) : NULL;
+
+ if (!value_start) {
+ GST_WARNING_OBJECT (qtdemux, "error: parameter does not exist: %s\n",
+ param_name);
+ return;
+ }
+
+ value_start += strlen (param_name);
+ while ((value_start[0] == ' ') || (value_start[0] == '\t'))
+ value_start++;
+
+ value_end = strchr (value_start, '<');
+ if (!value_end) {
+ GST_ERROR_OBJECT (qtdemux, "error: incorrect XML\n");
+ return;
+ }
+
+ value_length = value_end - value_start;
+ while ((value_length >= 1) && ((value_start[value_length - 1] == ' ')
+ || (value_start[value_length - 1] == '\t')))
+ value_length--;
+
+ if (value_length < 1) {
+ GST_ERROR_OBJECT (qtdemux, "error: empty XML value or incorrect range\n");
+ return;
+ }
+
+ *value = g_strstr_len(value_start, value_length, "true") ? TRUE : FALSE;
+
+ return;
+}
+
+static void
+_parse_spatial_video_metadata_from_xml_string (GstQTDemux * qtdemux, const char *xmlStr)
+{
+ const char is_spherical_str[] = "<GSpherical:Spherical>";
+ const char is_stitched_str[] = "<GSpherical:Stitched>";
+ const char stitching_software_str[] = "<GSpherical:StitchingSoftware>";
+ const char projection_type_str[] = "<GSpherical:ProjectionType>";
+ const char stereo_mode_str[] = "<GSpherical:StereoMode>";
+ const char source_count_str[] = "<GSpherical:SourceCount>";
+ const char init_view_heading_str[] = "<GSpherical:InitialViewHeadingDegrees>";
+ const char init_view_pitch_str[] = "<GSpherical:InitialViewPitchDegrees>";
+ const char init_view_roll_str[] = "<GSpherical:InitialViewRollDegrees>";
+ const char timestamp_str[] = "<GSpherical:Timestamp>";
+ const char full_pano_width_str[] = "<GSpherical:FullPanoWidthPixels>";
+ const char full_pano_height_str[] = "<GSpherical:FullPanoHeightPixels>";
+ const char cropped_area_image_width_str[] =
+ "<GSpherical:CroppedAreaImageWidthPixels>";
+ const char cropped_area_image_height_str[] =
+ "<GSpherical:CroppedAreaImageHeightPixels>";
+ const char cropped_area_left_str[] = "<GSpherical:CroppedAreaLeftPixels>";
+ const char cropped_area_top_str[] = "<GSpherical:CroppedAreaTopPixels>";
+
+ QtDemuxSphericalMetadata * spherical_metadata = qtdemux->spherical_metadata;
+
+ _get_bool_value_from_xml_string (qtdemux, xmlStr, is_spherical_str,
+ (gboolean *) & spherical_metadata->is_spherical);
+ _get_bool_value_from_xml_string (qtdemux, xmlStr, is_stitched_str,
+ (gboolean *) & spherical_metadata->is_stitched);
+
+ if (spherical_metadata->is_spherical && spherical_metadata->is_stitched) {
+ _get_string_value_from_xml_string (qtdemux, xmlStr,
+ stitching_software_str, &spherical_metadata->stitching_software);
+ _get_string_value_from_xml_string (qtdemux, xmlStr,
+ projection_type_str, &spherical_metadata->projection_type);
+ _get_string_value_from_xml_string (qtdemux, xmlStr, stereo_mode_str,
+ &spherical_metadata->stereo_mode);
+ _get_int_value_from_xml_string (qtdemux, xmlStr, source_count_str,
+ &spherical_metadata->source_count);
+ _get_int_value_from_xml_string (qtdemux, xmlStr,
+ init_view_heading_str, &spherical_metadata->init_view_heading);
+ _get_int_value_from_xml_string (qtdemux, xmlStr, init_view_pitch_str,
+ &spherical_metadata->init_view_pitch);
+ _get_int_value_from_xml_string (qtdemux, xmlStr, init_view_roll_str,
+ &spherical_metadata->init_view_roll);
+ _get_int_value_from_xml_string (qtdemux, xmlStr, timestamp_str,
+ &spherical_metadata->timestamp);
+ _get_int_value_from_xml_string (qtdemux, xmlStr, full_pano_width_str,
+ &spherical_metadata->full_pano_width_pixels);
+ _get_int_value_from_xml_string (qtdemux, xmlStr,
+ full_pano_height_str, &spherical_metadata->full_pano_height_pixels);
+ _get_int_value_from_xml_string (qtdemux, xmlStr,
+ cropped_area_image_width_str,
+ &spherical_metadata->cropped_area_image_width);
+ _get_int_value_from_xml_string (qtdemux, xmlStr,
+ cropped_area_image_height_str,
+ &spherical_metadata->cropped_area_image_height);
+ _get_int_value_from_xml_string (qtdemux, xmlStr, cropped_area_left_str,
+ &spherical_metadata->cropped_area_left);
+ _get_int_value_from_xml_string (qtdemux, xmlStr, cropped_area_top_str,
+ &spherical_metadata->cropped_area_top);
+ }
+
+ return;
+}
+
+static void
+gst_tag_register_spherical_tags (void) {
+ gst_tag_register ("is_spherical", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-spherical"),
+ _("Flag indicating if the video is a spherical video"),
+ NULL);
+ gst_tag_register ("is_stitched", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-stitched"),
+ _("Flag indicating if the video is stitched"),
+ NULL);
+ gst_tag_register ("stitching_software", GST_TAG_FLAG_META,
+ G_TYPE_STRING,
+ _("tag-stitching-software"),
+ _("Software used to stitch the spherical video"),
+ NULL);
+ gst_tag_register ("projection_type", GST_TAG_FLAG_META,
+ G_TYPE_STRING,
+ _("tag-projection-type"),
+ _("Projection type used in the video frames"),
+ NULL);
+ gst_tag_register ("stereo_mode", GST_TAG_FLAG_META,
+ G_TYPE_STRING,
+ _("tag-stereo-mode"),
+ _("Description of stereoscopic 3D layout"),
+ NULL);
+ gst_tag_register ("source_count", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-source-count"),
+ _("Number of cameras used to create the spherical video"),
+ NULL);
+ gst_tag_register ("init_view_heading", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-init-view-heading"),
+ _("The heading angle of the initial view in degrees"),
+ NULL);
+ gst_tag_register ("init_view_pitch", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-init-view-pitch"),
+ _("The pitch angle of the initial view in degrees"),
+ NULL);
+ gst_tag_register ("init_view_roll", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-init-view-roll"),
+ _("The roll angle of the initial view in degrees"),
+ NULL);
+ gst_tag_register ("timestamp", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-timestamp"),
+ _("Epoch timestamp of when the first frame in the video was recorded"),
+ NULL);
+ gst_tag_register ("full_pano_width_pixels", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-full-pano-width"),
+ _("Width of the encoded video frame in pixels"),
+ NULL);
+ gst_tag_register ("full_pano_height_pixels", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-full-pano-height"),
+ _("Height of the encoded video frame in pixels"),
+ NULL);
+ gst_tag_register ("cropped_area_image_width", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-cropped-area-image-width"),
+ _("Width of the video frame to display (e.g. cropping)"),
+ NULL);
+ gst_tag_register ("cropped_area_image_height", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-cropped-area-image-height"),
+ _("Height of the video frame to display (e.g. cropping)"),
+ NULL);
+ gst_tag_register ("cropped_area_left", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-cropped-area-left"),
+ _("Column where the left edge of the image was cropped from the"
+ " full sized panorama"),
+ NULL);
+ gst_tag_register ("cropped_area_top", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-cropped-area-top"),
+ _("Row where the top edge of the image was cropped from the"
+ " full sized panorama"),
+ NULL);
+ gst_tag_register ("ambisonic_type", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-ambisonic-type"),
+ _("Specifies the type of ambisonic audio represented"),
+ NULL);
+ gst_tag_register ("ambisonic_format", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-ambisonic-format"),
+ _("Specifies the ambisonic audio format"),
+ NULL);
+ gst_tag_register ("ambisonic_order", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-ambisonic-order"),
+ _("Specifies the ambisonic audio channel order"),
+ NULL);
+
+ return;
+}
+
+static void
+_send_spherical_metadata_msg_to_bus (GstQTDemux * qtdemux)
+{
+ GstTagList *taglist;
+ QtDemuxSphericalMetadata *spherical_metadata = qtdemux->spherical_metadata;
+
+ GST_DEBUG_OBJECT (qtdemux, "is_spherical = %d",
+ spherical_metadata->is_spherical);
+ GST_DEBUG_OBJECT (qtdemux, "is_stitched = %d",
+ spherical_metadata->is_stitched);
+ GST_DEBUG_OBJECT (qtdemux, "stitching_software = %s",
+ spherical_metadata->stitching_software);
+ GST_DEBUG_OBJECT (qtdemux, "projection_type = %s",
+ spherical_metadata->projection_type);
+ GST_DEBUG_OBJECT (qtdemux, "stereo_mode = %s",
+ spherical_metadata->stereo_mode);
+ GST_DEBUG_OBJECT (qtdemux, "source_count %d",
+ spherical_metadata->source_count);
+ GST_DEBUG_OBJECT (qtdemux, "init_view_heading = %d",
+ spherical_metadata->init_view_heading);
+ GST_DEBUG_OBJECT (qtdemux, "init_view_pitch = %d",
+ spherical_metadata->init_view_pitch);
+ GST_DEBUG_OBJECT (qtdemux, "init_view_roll = %d",
+ spherical_metadata->init_view_roll);
+ GST_DEBUG_OBJECT (qtdemux, "timestamp = %d", spherical_metadata->timestamp);
+ GST_DEBUG_OBJECT (qtdemux, "full_pano_width_pixels = %d",
+ spherical_metadata->full_pano_width_pixels);
+ GST_DEBUG_OBJECT (qtdemux, "full_pano_height_pixels = %d",
+ spherical_metadata->full_pano_height_pixels);
+ GST_DEBUG_OBJECT (qtdemux, "cropped_area_image_width = %d",
+ spherical_metadata->cropped_area_image_width);
+ GST_DEBUG_OBJECT (qtdemux, "cropped_area_image_height = %d",
+ spherical_metadata->cropped_area_image_height);
+ GST_DEBUG_OBJECT (qtdemux, "cropped_area_left = %d",
+ spherical_metadata->cropped_area_left);
+ GST_DEBUG_OBJECT (qtdemux, "cropped_area_top = %d",
+ spherical_metadata->cropped_area_top);
+ GST_DEBUG_OBJECT (qtdemux, "ambisonic_type = %d",
+ spherical_metadata->ambisonic_type);
+ GST_DEBUG_OBJECT (qtdemux, "ambisonic_order = %d",
+ spherical_metadata->ambisonic_order);
+ GST_DEBUG_OBJECT (qtdemux, "ambisonic_format = %d",
+ spherical_metadata->ambisonic_format);
+
+ taglist = gst_tag_list_new_empty ();
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
+ "is_spherical", spherical_metadata->is_spherical,
+ "is_stitched", spherical_metadata->is_stitched,
+ "source_count", spherical_metadata->source_count,
+ "init_view_heading", spherical_metadata->init_view_heading,
+ "init_view_pitch", spherical_metadata->init_view_pitch,
+ "init_view_roll", spherical_metadata->init_view_roll,
+ "timestamp", spherical_metadata->timestamp,
+ "full_pano_width_pixels", spherical_metadata->full_pano_width_pixels,
+ "full_pano_height_pixels", spherical_metadata->full_pano_height_pixels,
+ "cropped_area_image_width", spherical_metadata->cropped_area_image_width,
+ "cropped_area_image_height", spherical_metadata->cropped_area_image_height,
+ "cropped_area_left", spherical_metadata->cropped_area_left,
+ "cropped_area_top", spherical_metadata->cropped_area_top,
+ "ambisonic_type", spherical_metadata->ambisonic_type,
+ "ambisonic_format", spherical_metadata->ambisonic_format,
+ "ambisonic_order", spherical_metadata->ambisonic_order,
+ NULL);
+
+ if (spherical_metadata->stitching_software)
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
+ "stitching_software", spherical_metadata->stitching_software,
+ NULL);
+ if (spherical_metadata->projection_type)
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
+ "projection_type", spherical_metadata->projection_type,
+ NULL);
+ if (spherical_metadata->stereo_mode)
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
+ "stereo_mode", spherical_metadata->stereo_mode,
+ NULL);
+
+ gst_element_post_message (GST_ELEMENT_CAST (qtdemux),
+ gst_message_new_tag (GST_OBJECT_CAST (qtdemux),
+ gst_tag_list_copy (taglist)));
+
+ gst_tag_list_unref(taglist);
+
+ return;
+}
+
+static void
+qtdemux_parse_SA3D (GstQTDemux * qtdemux, const guint8 * buffer, gint length)
+{
+ guint offset = 0;
+
+ guint8 version = 0;
+ guint8 ambisonic_type = 0;
+ guint32 ambisonic_order = 0;
+ guint8 ambisonic_channel_ordering = 0;
+ guint8 ambisonic_normalization = 0;
+ guint32 num_channels = 0;
+ guint32 channel_map[49] = { 0 }; /* Up to 6th order */
+
+ int i;
+
+ GST_DEBUG_OBJECT (qtdemux, "qtdemux_parse_SA3D");
+
+ qtdemux->header_size += length;
+ offset = (QT_UINT32 (buffer) == 0) ? 16 : 8;
+
+ if (length <= offset + 16) {
+ GST_DEBUG_OBJECT (qtdemux, "SA3D atom is too short, skipping");
+ return;
+ }
+
+ version = QT_UINT8 (buffer + offset);
+ ambisonic_type = QT_UINT8 (buffer + offset + 1);
+ ambisonic_order = QT_UINT32 (buffer + offset + 2);
+ ambisonic_channel_ordering = QT_UINT8 (buffer + offset + 6);
+ ambisonic_normalization = QT_UINT8 (buffer + offset + 7);
+ num_channels = QT_UINT32 (buffer + offset + 8);
+ for (i = 0; i < num_channels; ++i)
+ channel_map[i] = QT_UINT32 (buffer + offset + 12 + i * 4);
+
+ GST_DEBUG_OBJECT (qtdemux, "version: %d", version);
+ GST_DEBUG_OBJECT (qtdemux, "ambisonic_type: %d", ambisonic_type);
+ GST_DEBUG_OBJECT (qtdemux, "ambisonic_order: %d", ambisonic_order);
+ GST_DEBUG_OBJECT (qtdemux, "ambisonic_channel_ordering: %d",
+ ambisonic_channel_ordering);
+ GST_DEBUG_OBJECT (qtdemux, "ambisonic_normalization: %d",
+ ambisonic_normalization);
+ GST_DEBUG_OBJECT (qtdemux, "num_channels: %d", num_channels);
+ for (i = 0; i < num_channels; ++i)
+ GST_DEBUG_OBJECT (qtdemux, "channel_map: %d", channel_map[i]);
+
+ if (version == RFC_AMBISONIC_SA3DBOX_VERSION_SUPPORTED) {
+ if (ambisonic_type == RFC_AMBISONIC_TYPE_PERIPHONIC)
+ qtdemux->spherical_metadata->ambisonic_type = QTDEMUX_AMBISONIC_TYPE_PERIPHONIC;
+
+ if (ambisonic_order == RFC_AMBISONIC_ORDER_FOA) {
+ if (num_channels == 4) {
+ qtdemux->spherical_metadata->ambisonic_order = QTDEMUX_AMBISONIC_ORDER_FOA;
+
+ if ((ambisonic_channel_ordering == RFC_AMBISONIC_CHANNEL_ORDERING_ACN)
+ && (ambisonic_normalization == RFC_AMBISONIC_NORMALIZATION_SN3D)
+ && (channel_map[0] == 0) && (channel_map[1] == 1)
+ && (channel_map[2] == 2) && (channel_map[3] == 3))
+ qtdemux->spherical_metadata->ambisonic_format = QTDEMUX_AMBISONIC_FORMAT_AMBIX;
+
+ if ((ambisonic_channel_ordering == RFC_AMBISONIC_CHANNEL_ORDERING_FUMA)
+ && (ambisonic_normalization == RFC_AMBISONIC_NORMALIZATION_FUMA)
+ && (channel_map[0] == 0) && (channel_map[1] == 3)
+ && (channel_map[2] == 1) && (channel_map[3] == 2))
+ qtdemux->spherical_metadata->ambisonic_format = QTDEMUX_AMBISONIC_FORMAT_AMB;
+ }
+ }
+ }
+
+ return;
+}
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
static void
qtdemux_update_default_sample_encryption_settings (GstQTDemux * qtdemux,
QtDemuxCencSampleSetInfo * info, guint32 is_encrypted, guint8 iv_size,
0xa2, 0x44, 0x6c, 0x42, 0x7c, 0x64, 0x8d, 0xf4
};
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ static const guint8 spherical_uuid[] = {
+ 0xff, 0xcc, 0x82, 0x63, 0xf8, 0x55, 0x4a, 0x93,
+ 0x88, 0x14, 0x58, 0x7a, 0x02, 0x52, 0x1f, 0xdd
+ };
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
guint offset;
/* counts as header data */
return;
}
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ if (memcmp (buffer + offset, spherical_uuid, 16) == 0) {
+ const char *contents;
+
+ GST_DEBUG_OBJECT (qtdemux, "spherical uuid was found");
+ contents = (char *) (buffer + offset + 16);
+ GST_DEBUG_OBJECT (qtdemux, "contents: %s\n", contents);
+
+ if (qtdemux->spherical_metadata)
+ _parse_spatial_video_metadata_from_xml_string (qtdemux, contents);
+
+ return;
+ }
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
if (memcmp (buffer + offset, xmp_uuid, 16) == 0) {
GstBuffer *buf;
GstTagList *taglist;
beach:
if (ret == GST_FLOW_EOS && (qtdemux->got_moov || qtdemux->media_caps)) {
/* digested all data, show what we have */
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ if (qtdemux->spherical_metadata)
+ _send_spherical_metadata_msg_to_bus (qtdemux);
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
qtdemux_prepare_streams (qtdemux);
QTDEMUX_EXPOSE_LOCK (qtdemux);
ret = qtdemux_expose_streams (qtdemux);
qtdemux_parse_container (qtdemux, node, buffer + 36, end);
break;
}
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ case FOURCC_SA3D:
+ {
+ qtdemux_parse_SA3D (qtdemux, buffer, end - buffer);
+ break;
+ }
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
default:
if (!strcmp (type->name, "unknown"))
GST_MEMDUMP ("Unknown tag", buffer + 4, end - buffer - 4);
QTDEMUX_STATE_BUFFER_MDAT /* Buffering the mdat atom */
};
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+typedef struct _QtDemuxSphericalMetadata QtDemuxSphericalMetadata;
+#endif
+
struct _GstQTDemux {
GstElement element;
* fields. */
gboolean received_seek;
gboolean first_moof_already_parsed;
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ QtDemuxSphericalMetadata *spherical_metadata;
+#endif
};
struct _GstQTDemuxClass {
#define QT_FOURCC(a) (GST_READ_UINT32_LE(a))
#define QT_UINT64(a) ((((guint64)QT_UINT32(a))<<32)|QT_UINT32(((guint8 *)a)+4))
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+#define RFC_AMBISONIC_SA3DBOX_VERSION_SUPPORTED 0
+#define RFC_AMBISONIC_TYPE_PERIPHONIC 0
+#define RFC_AMBISONIC_ORDER_FOA 1
+#define RFC_AMBISONIC_CHANNEL_ORDERING_ACN 0
+#define RFC_AMBISONIC_CHANNEL_ORDERING_FUMA 1 /* FIXME: Currently value is not defined in Spatial Audio RFC */
+#define RFC_AMBISONIC_NORMALIZATION_SN3D 0
+#define RFC_AMBISONIC_NORMALIZATION_FUMA 1 /* FIXME: Currently value is not defined in Spatial Audio RFC */
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
typedef enum {
QT_FLAG_NONE = (0),
QT_FLAG_CONTAINER = (1 << 0)
TR_COMPOSITION_TIME_OFFSETS = 0x000800 /* sample-composition-time-offsets-presents */
};
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+typedef enum
+{
+ QTDEMUX_AMBISONIC_TYPE_UNKNOWN = 0,
+ QTDEMUX_AMBISONIC_TYPE_PERIPHONIC = 1, /* To comply with Google's Spatial Audio RFC */
+ QTDEMUX_AMBISONIC_TYPE_NON_PERIPHONIC = 2,
+} QTDEMUX_AMBISONIC_TYPE;
+
+typedef enum
+{
+ QTDEMUX_AMBISONIC_FORMAT_UNKNOWN = 0,
+ QTDEMUX_AMBISONIC_FORMAT_AMBIX = 1, /* AMBIX (Channel sequence: ACN, Normalization: SN3D) */
+ QTDEMUX_AMBISONIC_FORMAT_AMB = 2, /* .AMB, Tetraproc (Channel sequence: FuMa, Normalization: FuMa) */
+ QTDEMUX_AMBISONIC_FORMAT_UA = 3, /* Universal Ambisonics (Channel sequence: SID, Normalization: N3D) */
+} QTDEMUX_AMBISONIC_FORMAT;
+
+typedef enum
+{
+ QTDEMUX_AMBISONIC_ORDER_UNKNOWN = 0,
+ QTDEMUX_AMBISONIC_ORDER_FOA = 1, /* First order Ambisonics */
+ QTDEMUX_AMBISONIC_ORDER_TOA = 3, /* Third order Ambisonics */
+ QTDEMUX_AMBISONIC_ORDER_HOA, /* Higher order Ambisonics */
+} QTDEMUX_AMBISONIC_ORDER;
+
+typedef enum
+{
+ QTDEMUX_AMBISONIC_CHANNEL_ORDERING_UNKNOWN = 0,
+ QTDEMUX_AMBISONIC_CHANNEL_ORDERING_ACN = 1, /* Ambisonic Channel Number (ACN) system */
+ QTDEMUX_AMBISONIC_CHANNEL_ORDERING_FUMA = 2, /* Furse-Malham ordering */
+ QTDEMUX_AMBISONIC_CHANNEL_ORDERING_SID = 3, /* Single Index Designation ordering */
+} QTDEMUX_AMBISONIC_CHANNEL_ORDERING;
+
+typedef enum
+{
+ QTDEMUX_AMBISONIC_NORMALIZATION_UNKNOWN = 0,
+ QTDEMUX_AMBISONIC_NORMALIZATION_SN3D = 1, /* Schmidt semi-normalization */
+ QTDEMUX_AMBISONIC_NORMALIZATION_FUMA = 2, /* Furse-Malham MaxN normalization */
+ QTDEMUX_AMBISONIC_NORMALIZATION_N3D = 3, /* Full 3D normalization */
+} QTDEMUX_AMBISONIC_NORMALIZATION;
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
const QtNodeType *qtdemux_type_get (guint32 fourcc);
G_END_DECLS
static GstStaticCaps intra_caps = GST_STATIC_CAPS ("image/jpeg; "
"video/x-raw; image/png; video/x-dv; video/x-huffyuv; video/x-ffv; "
"video/x-compressed-yuv");
+ GstCaps *tmp = gst_static_caps_get (&intra_caps);
+
context->intra_only =
- gst_caps_can_intersect (gst_static_caps_get (&intra_caps), caps);
+ gst_caps_can_intersect (tmp, caps);
+ gst_caps_unref(tmp);
}
if (buf)
/* GStreamer ReplayGain volume adjustment
*
* Copyright (C) 2007 Rene Stadler <mail@renestadler.de>
- *
+ *
* gstrgvolume.c: Element to apply ReplayGain volume adjustment
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
- *
+ *
* This library is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
- *
+ *
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* The information carried by these tags must have been calculated beforehand by
* performing the ReplayGain analysis. This is implemented by the <link
* linkend="GstRgAnalysis">rganalysis</link> element.
- *
+ *
* The signal compression/limiting recommendations outlined in the proposed
* standard are not implemented by this element. This has to be handled by
* separate elements because applications might want to have additional filters
* between the volume adjustment and the limiting stage. A basic limiter is
* included with this plugin: The <link linkend="GstRgLimiter">rglimiter</link>
* element applies -6 dB hard limiting as mentioned in the ReplayGain standard.
- *
+ *
* <refsect2>
* <title>Example launch line</title>
* |[
enum
{
PROP_0,
+#ifdef TIZEN_FEATURE_RGVOLUME_MODIFICATION
+ PROP_ENABLE_RGVOLUME,
+#endif
PROP_ALBUM_MODE,
PROP_HEADROOM,
PROP_PRE_AMP,
PROP_RESULT_GAIN
};
+#ifdef TIZEN_FEATURE_RGVOLUME_MODIFICATION
+#define DEFAULT_ENABLE_RGVOLUME TRUE
+#endif
#define DEFAULT_ALBUM_MODE TRUE
#define DEFAULT_HEADROOM 0.0
#define DEFAULT_PRE_AMP 0.0
gobject_class->get_property = gst_rg_volume_get_property;
gobject_class->dispose = gst_rg_volume_dispose;
+#ifdef TIZEN_FEATURE_RGVOLUME_MODIFICATION
+ /**
+ * GstRgVolume:enable-rgvolume:
+ *
+ * Whether to enable replaygain volume.
+ *
+ * If rgvulme is disabled, the rgvolume isn't affected by tag and properties.
+ */
+ g_object_class_install_property (gobject_class, PROP_ENABLE_RGVOLUME,
+ g_param_spec_boolean ("enable-rgvolume", "Enable rg volume",
+ "Whether to enable replaygain volume", DEFAULT_ENABLE_RGVOLUME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+#endif
+
/**
* GstRgVolume:album-mode:
*
GObjectClass *volume_class;
GstPad *volume_pad, *ghost_pad;
+#ifdef TIZEN_FEATURE_RGVOLUME_MODIFICATION
+ self->enable_rgvolume = DEFAULT_ENABLE_RGVOLUME;
+#endif
self->album_mode = DEFAULT_ALBUM_MODE;
self->headroom = DEFAULT_HEADROOM;
self->pre_amp = DEFAULT_PRE_AMP;
GstRgVolume *self = GST_RG_VOLUME (object);
switch (prop_id) {
+#ifdef TIZEN_FEATURE_RGVOLUME_MODIFICATION
+ case PROP_ENABLE_RGVOLUME:
+ self->enable_rgvolume = g_value_get_boolean (value);
+ break;
+#endif
case PROP_ALBUM_MODE:
self->album_mode = g_value_get_boolean (value);
break;
GstRgVolume *self = GST_RG_VOLUME (object);
switch (prop_id) {
+#ifdef TIZEN_FEATURE_RGVOLUME_MODIFICATION
+ case PROP_ENABLE_RGVOLUME:
+ g_value_set_boolean (value, self->enable_rgvolume);
+ break;
+#endif
case PROP_ALBUM_MODE:
g_value_set_boolean (value, self->album_mode);
break;
break;
case PROP_TARGET_GAIN:
g_value_set_double (value, self->target_gain);
+#ifdef TIZEN_FEATURE_RGVOLUME_MODIFICATION
+ if (!self->enable_rgvolume)
+ g_value_set_double (value, 0.0);
+#endif
break;
case PROP_RESULT_GAIN:
g_value_set_double (value, self->result_gain);
+#ifdef TIZEN_FEATURE_RGVOLUME_MODIFICATION
+ if (!self->enable_rgvolume)
+ g_value_set_double (value, 0.0);
+#endif
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
gdouble target_gain, result_gain, result_volume;
gboolean target_gain_changed, result_gain_changed;
+#ifdef TIZEN_FEATURE_RGVOLUME_MODIFICATION
+ if (!self->enable_rgvolume) {
+ g_object_set (self->volume_element, "volume", 1.0, NULL);
+ return;
+ }
+#endif
gst_rg_volume_determine_gain (self, &target_gain, &result_gain);
result_volume = DB_TO_LINEAR (result_gain);
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
- *
+ *
* This library is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
- *
+ *
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
GstElement *volume_element;
gdouble max_volume;
+#ifdef TIZEN_FEATURE_RGVOLUME_MODIFICATION
+ gboolean enable_rgvolume;
+#endif
gboolean album_mode;
gdouble headroom;
gdouble pre_amp;
-/* GStreamer
- * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA 02110-1301, USA.
- */
+ /* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
/**
* SECTION:element-rtpbin
gulong buffer_ptreq_sig;
gulong buffer_ntpstop_sig;
gint percent;
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gint prev_percent;
+#endif
/* the PT demuxer of the SSRC */
GstElement *demux;
gulong demux_newpad_sig;
create_stream (GstRtpBinSession * session, guint32 ssrc)
{
GstElement *buffer, *demux = NULL;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ GstElement *queue2 = NULL;
+#endif
GstRtpBinStream *stream;
GstRtpBin *rtpbin;
GstState target;
if (!(demux = gst_element_factory_make ("rtpptdemux", NULL)))
goto no_demux;
}
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if (session->bin->buffer_mode == RTP_JITTER_BUFFER_MODE_SLAVE)
+ if (!(queue2 = gst_element_factory_make ("queue2", NULL)))
+ goto no_queue2;
+#endif
stream = g_new0 (GstRtpBinStream, 1);
stream->ssrc = ssrc;
stream->bin = rtpbin;
stream->rt_delta = 0;
stream->rtp_delta = 0;
stream->percent = 100;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ stream->prev_percent = 0;
+#endif
stream->clock_base = -100 * GST_SECOND;
session->streams = g_slist_prepend (session->streams, stream);
g_object_set (buffer, "max-ts-offset-adjustment",
rtpbin->max_ts_offset_adjustment, NULL);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ /* configure queue2 to use live buffering */
+ if (queue2) {
+ g_object_set_data (G_OBJECT (queue2), "GstRTPBin.stream", stream);
+ g_object_set (queue2, "use-buffering", TRUE, NULL);
+ g_object_set (queue2, "buffer-mode", GST_BUFFERING_LIVE, NULL);
+ }
+#endif
/* need to sink the jitterbufer or otherwise signal handlers from bindings will
* take ownership of it and we don't own it anymore */
gst_object_ref_sink (buffer);
if (!rtpbin->ignore_pt)
gst_bin_add (GST_BIN_CAST (rtpbin), demux);
+
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if (queue2)
+ gst_bin_add (GST_BIN_CAST (rtpbin), queue2);
+#endif
+
gst_bin_add (GST_BIN_CAST (rtpbin), buffer);
/* unref the jitterbuffer again, the bin has a reference now and
gst_object_unref (buffer);
/* link stuff */
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if (queue2) {
+ gst_element_link_pads_full (buffer, "src", queue2, "sink",
+ GST_PAD_LINK_CHECK_NOTHING);
+ if (demux) {
+ gst_element_link_pads_full (queue2, "src", demux, "sink",
+ GST_PAD_LINK_CHECK_NOTHING);
+ }
+ } else if (demux) {
+ gst_element_link_pads_full (buffer, "src", demux, "sink",
+ GST_PAD_LINK_CHECK_NOTHING);
+ }
+#else
if (demux)
gst_element_link_pads_full (buffer, "src", demux, "sink",
GST_PAD_LINK_CHECK_NOTHING);
+#endif
if (rtpbin->buffering) {
guint64 last_out;
gst_element_set_state (buffer, target);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if (queue2)
+ gst_element_set_state (queue2, target);
+#endif
+
return stream;
/* ERRORS */
g_warning ("rtpbin: could not create rtpptdemux element");
return NULL;
}
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+no_queue2:
+ {
+ gst_object_unref (buffer);
+ gst_object_unref (demux);
+ g_warning ("rtpbin: could not create queue2 element");
+ return NULL;
+ }
+#endif
}
/* called with RTP_BIN_LOCK */
gint min_percent = 100;
GSList *sessions, *streams;
GstRtpBinStream *stream;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gboolean buffering_flag = FALSE, update_buffering_status = TRUE;
+#endif
gboolean change = FALSE, active = FALSE;
GstClockTime min_out_time;
GstBufferingMode mode;
for (streams = session->streams; streams;
streams = g_slist_next (streams)) {
GstRtpBinStream *stream = (GstRtpBinStream *) streams->data;
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ GstPad *temp_pad_src = NULL;
+ GstCaps *temp_caps_src = NULL;
+ GstStructure *caps_structure;
+ const gchar *caps_str_media = NULL;
+ temp_pad_src = gst_element_get_static_pad (stream->buffer, "src");
+ temp_caps_src = gst_pad_get_current_caps (temp_pad_src);
+ GST_DEBUG_OBJECT (bin,
+ "stream %p percent %d : temp_caps_src=%" GST_PTR_FORMAT,
+ stream, stream->percent, temp_caps_src);
+ if (temp_caps_src) {
+ caps_structure = gst_caps_get_structure (temp_caps_src, 0);
+ caps_str_media =
+ gst_structure_get_string (caps_structure, "media");
+ if (caps_str_media != NULL) {
+ if ((strcmp (caps_str_media, "video") != 0)
+ && (strcmp (caps_str_media, "audio") != 0)) {
+ GST_DEBUG_OBJECT (bin,
+ "Non Audio/Video Stream.. ignoring the same !!");
+ gst_caps_unref (temp_caps_src);
+ gst_object_unref (temp_pad_src);
+ continue;
+ } else if (stream->percent >= 100) {
+ /* Most of the time buffering icon displays in rtsp playback.
+ Optimizing the buffering updation code. Whenever any stream percentage
+ reaches 100 do not post buffering messages. */
+ if (stream->prev_percent < 100)
+ buffering_flag = TRUE;
+ else
+ update_buffering_status = FALSE;
+ }
+ }
+ gst_caps_unref (temp_caps_src);
+ }
+ gst_object_unref (temp_pad_src);
+#else
GST_DEBUG_OBJECT (bin, "stream %p percent %d", stream,
stream->percent);
-
+#endif
/* find min percent */
if (min_percent > stream->percent)
min_percent = stream->percent;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ /* Updating prev stream percentage */
+ stream->prev_percent = stream->percent;
+#endif
}
} else {
GST_INFO_OBJECT (bin,
GST_RTP_SESSION_UNLOCK (session);
}
GST_DEBUG_OBJECT (bin, "min percent %d", min_percent);
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if (rtpbin->buffer_mode != RTP_JITTER_BUFFER_MODE_SLAVE) {
+ if (rtpbin->buffering) {
+ if (min_percent == 100) {
+ rtpbin->buffering = FALSE;
+ active = TRUE;
+ change = TRUE;
+ }
+ } else {
+ if (min_percent < 100) {
+ /* pause the streams */
+ rtpbin->buffering = TRUE;
+ active = FALSE;
+ change = TRUE;
+ }
+ }
+ }
+#else
if (rtpbin->buffering) {
if (min_percent == 100) {
rtpbin->buffering = FALSE;
change = TRUE;
}
}
+#endif
GST_RTP_BIN_UNLOCK (rtpbin);
gst_message_unref (message);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if (rtpbin->buffer_mode == RTP_JITTER_BUFFER_MODE_SLAVE) {
+ if (update_buffering_status == FALSE)
+ break;
+ if (buffering_flag) {
+ min_percent = 100;
+ GST_DEBUG_OBJECT (bin, "forcefully change min_percent to 100!!!");
+ }
+ }
+#endif
/* make a new buffering message with the min value */
message =
gst_message_new_buffering (GST_OBJECT_CAST (bin), min_percent);
gst_message_set_buffering_stats (message, mode, avg_in, avg_out,
buffering_left);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if (rtpbin->buffer_mode == RTP_JITTER_BUFFER_MODE_SLAVE)
+ goto slave_buffering;
+#endif
if (G_UNLIKELY (change)) {
GstClock *clock;
guint64 running_time = 0;
GST_RTP_BIN_UNLOCK (rtpbin);
}
}
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+slave_buffering:
+#endif
GST_BIN_CLASS (parent_class)->handle_message (bin, message);
break;
}
#define DEFAULT_BACKCHANNEL GST_RTSP_BACKCHANNEL_NONE
#define DEFAULT_TEARDOWN_TIMEOUT (100 * GST_MSECOND)
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+#define DEFAULT_START_POSITION 0
+#endif
+
enum
{
PROP_0,
PROP_DEBUG,
PROP_RETRY,
PROP_TIMEOUT,
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ PROP_START_POSITION,
+ PROP_RESUME_POSITION,
+#endif
PROP_TCP_TIMEOUT,
PROP_LATENCY,
PROP_DROP_ON_LATENCY,
}
#endif
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+static void
+gst_rtspsrc_post_error_message (GstRTSPSrc * src, GstRTSPSrcError error_id,
+ const gchar * error_string)
+{
+ GstMessage *message;
+ GstStructure *structure;
+ gboolean ret = TRUE;
+
+ GST_ERROR_OBJECT (src, "[%d] %s", error_id, error_string);
+
+ structure = gst_structure_new ("streaming_error",
+ "error_id", G_TYPE_UINT, error_id,
+ "error_string", G_TYPE_STRING, error_string, NULL);
+
+ message =
+ gst_message_new_custom (GST_MESSAGE_ERROR, GST_OBJECT (src), structure);
+
+ ret = gst_element_post_message (GST_ELEMENT (src), message);
+ if (!ret)
+ GST_ERROR_OBJECT (src, "fail to post error message.");
+
+ return;
+}
+#endif
+
static gboolean
default_select_stream (GstRTSPSrc * src, guint id, GstCaps * caps)
{
"Retry TCP transport after UDP timeout microseconds (0 = disabled)",
0, G_MAXUINT64, DEFAULT_TIMEOUT,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ g_object_class_install_property (gobject_class, PROP_START_POSITION,
+ g_param_spec_uint64 ("pending-start-position", "set start position",
+ "Set start position before PLAYING request.",
+ 0, G_MAXUINT64, DEFAULT_START_POSITION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_RESUME_POSITION,
+ g_param_spec_uint64 ("resume-position", "set resume position",
+ "Set resume position before PLAYING request after pause.",
+ 0, G_MAXUINT64, DEFAULT_START_POSITION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+#endif
g_object_class_install_property (gobject_class, PROP_TCP_TIMEOUT,
g_param_spec_uint64 ("tcp-timeout", "TCP Timeout",
"Fail after timeout microseconds on TCP connections (0 = disabled)",
src->debug = DEFAULT_DEBUG;
src->retry = DEFAULT_RETRY;
src->udp_timeout = DEFAULT_TIMEOUT;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ src->start_position = DEFAULT_START_POSITION;
+ src->is_audio_codec_supported = FALSE;
+ src->is_video_codec_supported = FALSE;
+ src->audio_codec = NULL;
+ src->video_codec = NULL;
+ src->video_frame_size = NULL;
+#endif
gst_rtspsrc_set_tcp_timeout (src, DEFAULT_TCP_TIMEOUT);
src->latency = DEFAULT_LATENCY_MS;
src->drop_on_latency = DEFAULT_DROP_ON_LATENCY;
src->version = GST_RTSP_VERSION_INVALID;
src->teardown_timeout = DEFAULT_TEARDOWN_TIMEOUT;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ g_mutex_init (&(src)->pause_lock);
+ g_cond_init (&(src)->open_end);
+#endif
/* get a list of all extensions */
src->extensions = gst_rtsp_ext_list_get ();
rtspsrc = GST_RTSPSRC (object);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ rtspsrc->is_audio_codec_supported = FALSE;
+ rtspsrc->is_video_codec_supported = FALSE;
+ if (rtspsrc->audio_codec) {
+ g_free (rtspsrc->audio_codec);
+ rtspsrc->audio_codec = NULL;
+ }
+ if (rtspsrc->video_codec) {
+ g_free (rtspsrc->video_codec);
+ rtspsrc->video_codec = NULL;
+ }
+ if (rtspsrc->video_frame_size) {
+ g_free (rtspsrc->video_frame_size);
+ rtspsrc->video_frame_size = NULL;
+ }
+#endif
gst_rtsp_ext_list_free (rtspsrc->extensions);
g_free (rtspsrc->conninfo.location);
gst_rtsp_url_free (rtspsrc->conninfo.url);
g_free (rtspsrc->multi_iface);
g_free (rtspsrc->user_agent);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ g_mutex_clear (&(rtspsrc)->pause_lock);
+ g_cond_clear (&(rtspsrc)->open_end);
+#endif
+
if (rtspsrc->sdp) {
gst_sdp_message_free (rtspsrc->sdp);
rtspsrc->sdp = NULL;
case PROP_TIMEOUT:
rtspsrc->udp_timeout = g_value_get_uint64 (value);
break;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ case PROP_START_POSITION:
+ rtspsrc->start_position = g_value_get_uint64 (value);
+ break;
+ case PROP_RESUME_POSITION:
+ rtspsrc->last_pos = g_value_get_uint64 (value);
+ GST_DEBUG_OBJECT (rtspsrc, "src->last_pos value set to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (rtspsrc->last_pos));
+ break;
+#endif
case PROP_TCP_TIMEOUT:
gst_rtspsrc_set_tcp_timeout (rtspsrc, g_value_get_uint64 (value));
break;
case PROP_TIMEOUT:
g_value_set_uint64 (value, rtspsrc->udp_timeout);
break;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ case PROP_START_POSITION:
+ g_value_set_uint64 (value, rtspsrc->start_position);
+ break;
+ case PROP_RESUME_POSITION:
+ g_value_set_uint64 (value, rtspsrc->last_pos);
+ break;
+#endif
case PROP_TCP_TIMEOUT:
{
guint64 timeout;
GstStructure *s;
const gchar *enc;
PtMapItem item;
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ const gchar *encoder, *mediatype;
+#endif
pt = atoi (gst_sdp_media_get_format (media, i));
GST_DEBUG_OBJECT (src, " looking at %d pt: %d", i, pt);
if (strcmp (enc, "X-ASF-PF") == 0)
stream->container = TRUE;
}
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if ((mediatype = gst_structure_get_string (s, "media"))) {
+ GST_DEBUG_OBJECT (src, " mediatype : %s", mediatype);
+ if (!strcmp (mediatype, "video")) {
+ if ((encoder = gst_structure_get_string (s, "encoding-name"))) {
+ GST_DEBUG_OBJECT (src, " encoder : %s", encoder);
+ if ((!strcmp (encoder, "H261")) ||
+ (!strcmp (encoder, "H263")) ||
+ (!strcmp (encoder, "H263-1998"))
+ || (!strcmp (encoder, "H263-2000")) || (!strcmp (encoder, "H264"))
+ || (!strcmp (encoder, "MP4V-ES"))) {
+ src->is_video_codec_supported = TRUE;
+ GST_DEBUG_OBJECT (src, "Supported Video Codec %s", encoder);
+ } else {
+ GST_DEBUG_OBJECT (src, "Unsupported Video Codec %s", encoder);
+ }
+ }
+
+ src->video_codec = g_strdup (encoder);
+ src->video_frame_size =
+ g_strdup (gst_structure_get_string (s, "a-framesize"));
+ GST_DEBUG_OBJECT (src, "video_codec %s , video_frame_size %s ",
+ src->video_codec, src->video_frame_size);
+ } else if (!strcmp (mediatype, "audio")) {
+ if ((encoder = gst_structure_get_string (s, "encoding-name"))) {
+ GST_DEBUG_OBJECT (src, " encoder : %s", encoder);
+ if ((!strcmp (encoder, "MP4A-LATM")) ||
+ (!strcmp (encoder, "AMR")) || (!strcmp (encoder, "AMR-WB"))
+ || (!strcmp (encoder, "AMR-NB"))
+ || (!strcmp (encoder, "mpeg4-generic"))
+ || (!strcmp (encoder, "MPEG4-GENERIC"))
+ || (!strcmp (encoder, "QCELP")) || ((strstr (encoder, "G726"))
+ || (strstr (encoder, "PCMU")))) {
+ src->is_audio_codec_supported = TRUE;
+ GST_DEBUG_OBJECT (src, "Supported Audio Codec %s", encoder);
+ } else {
+ GST_DEBUG_OBJECT (src, "Unsupported Audio Codec %s", encoder);
+ }
+ }
+
+ src->audio_codec = g_strdup (encoder);
+ GST_DEBUG_OBJECT (src, "audio_codec %s ", src->audio_codec);
+ }
+ }
+#endif
/* Merge in global caps */
/* Intersect will merge in missing fields to the current caps */
{
GList *walk;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ GST_WARNING_OBJECT (src, "Setting [%s] element state to: %s \n",
+ GST_ELEMENT_NAME (GST_ELEMENT_CAST (src)),
+ gst_element_state_get_name (state));
+#endif
if (src->manager)
gst_element_set_state (GST_ELEMENT_CAST (src->manager), state);
{
gchar *str = gst_rtsp_strresult (res);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_BAD_SERVER,
+ "Could not receive message.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
("Could not receive message. (%s)", str));
+#endif
g_free (str);
gst_rtsp_message_unset (&message);
{
gchar *str = gst_rtsp_strresult (res);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_SERVICE_UNAVAILABLE,
+ "Could not handle server message.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
("Could not handle server message. (%s)", str));
+#endif
g_free (str);
gst_rtsp_message_unset (&message);
return GST_FLOW_ERROR;
src->conninfo.connected = FALSE;
if (res != GST_RTSP_EINTR) {
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_CONNECTION_FAIL,
+ "Could not connect to server.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ_WRITE, (NULL),
("Could not connect to server. (%s)", str));
+#endif
g_free (str);
ret = GST_FLOW_ERROR;
} else {
{
gchar *str = gst_rtsp_strresult (res);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_SERVER_DISCONNECTED,
+ "Could not receive message.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
("Could not receive message. (%s)", str));
+#endif
g_free (str);
return GST_FLOW_ERROR;
}
gst_rtsp_message_unset (&message);
if (res != GST_RTSP_EINTR) {
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src,
+ GST_RTSPSRC_ERROR_SERVICE_UNAVAILABLE,
+ "Could not handle server message.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
("Could not handle server message. (%s)", str));
+#endif
g_free (str);
ret = GST_FLOW_ERROR;
} else {
{
src->cur_protocols = 0;
/* no transport possible, post an error and stop */
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_BAD_TRANSPORT,
+ "Could not receive any UDP packets for seconds, maybe your firewall is blocking it. No other protocols to try.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
("Could not receive any UDP packets for %.4f seconds, maybe your "
"firewall is blocking it. No other protocols to try.",
gst_guint64_to_gdouble (src->udp_timeout) / 1000000.0));
+#endif
return GST_RTSP_ERROR;
}
open_failed:
static void
gst_rtspsrc_loop_complete_cmd (GstRTSPSrc * src, gint cmd)
{
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ GstMessage *s;
+ GST_WARNING_OBJECT (src, "Got cmd %s", cmd_to_string (cmd));
+#endif
+
switch (cmd) {
case CMD_OPEN:
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ GST_DEBUG_OBJECT (src,
+ "rtsp_duration %" GST_TIME_FORMAT
+ ", rtsp_audio_codec %s , rtsp_video_codec %s , rtsp_video_frame_size %s",
+ GST_TIME_ARGS (src->segment.duration), src->audio_codec,
+ src->video_codec, src->video_frame_size);
+
+ /* post message */
+ s = gst_message_new_element (GST_OBJECT_CAST (src),
+ gst_structure_new ("rtspsrc_properties",
+ "rtsp_duration", G_TYPE_UINT64, src->segment.duration,
+ "rtsp_audio_codec", G_TYPE_STRING, src->audio_codec,
+ "rtsp_video_codec", G_TYPE_STRING, src->video_codec,
+ "rtsp_video_frame_size", G_TYPE_STRING, src->video_frame_size,
+ NULL));
+
+ gst_element_post_message (GST_ELEMENT_CAST (src), s);
+#endif
GST_ELEMENT_PROGRESS (src, COMPLETE, "open", ("Opened Stream"));
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ /* rtspsrc PAUSE state should be here for parsing sdp before PAUSE state changed. */
+ g_mutex_lock (&(src)->pause_lock);
+ g_cond_signal (&(src)->open_end);
+ g_mutex_unlock (&(src)->pause_lock);
+#endif
break;
case CMD_PLAY:
GST_ELEMENT_PROGRESS (src, COMPLETE, "request", ("Sent PLAY request"));
switch (cmd) {
case CMD_OPEN:
GST_ELEMENT_PROGRESS (src, ERROR, "open", ("Open failed"));
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ /* Ending conditional wait for pause when open fails.*/
+ g_mutex_lock (&(src)->pause_lock);
+ g_cond_signal (&(src)->open_end);
+ g_mutex_unlock (&(src)->pause_lock);
+ GST_WARNING_OBJECT (src,
+ "ending conditional wait for pause as open is failed.");
+#endif
break;
case CMD_PLAY:
GST_ELEMENT_PROGRESS (src, ERROR, "request", ("PLAY failed"));
{
/* Output an error indicating that we couldn't connect because there were
* no supported authentication protocols */
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_NOT_AUTHORIZED,
+ "No supported authentication protocol was found");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, (NULL),
("No supported authentication protocol was found"));
+#endif
return FALSE;
}
no_user_pass:
gchar *str = gst_rtsp_strresult (res);
if (res != GST_RTSP_EINTR) {
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src,
+ GST_RTSPSRC_ERROR_SERVER_DISCONNECTED,
+ "Could not receive message.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
("Could not receive message. (%s)", str));
+#endif
} else {
GST_WARNING_OBJECT (src, "receive interrupted");
}
gchar *str = gst_rtsp_strresult (res);
if (res != GST_RTSP_EINTR) {
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_CONNECTION_FAIL,
+ "Could not send message.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
("Could not send message. (%s)", str));
+#endif
} else {
GST_WARNING_OBJECT (src, "send interrupted");
}
switch (response->type_data.response.code) {
case GST_RTSP_STS_NOT_FOUND:
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_BAD_REQUEST,
+ "STS NOT FOUND");
+#else
RTSP_SRC_RESPONSE_ERROR (src, response, RESOURCE, NOT_FOUND,
"Not found");
+#endif
break;
case GST_RTSP_STS_UNAUTHORIZED:
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_NOT_AUTHORIZED,
+ "STS NOT AUTHORIZED");
+#else
RTSP_SRC_RESPONSE_ERROR (src, response, RESOURCE, NOT_AUTHORIZED,
"Unauthorized");
+#endif
break;
case GST_RTSP_STS_MOVED_PERMANENTLY:
case GST_RTSP_STS_MOVE_TEMPORARILY:
res = GST_RTSP_OK;
break;
default:
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_UNEXPECTED_MSG,
+ "Got error response from Server");
+#else
RTSP_SRC_RESPONSE_ERROR (src, response, RESOURCE, READ,
"Unhandled error");
+#endif
break;
}
/* if we return ERROR we should unset the response ourselves */
/* ERRORS */
no_describe:
{
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_METHOD_NOT_ALLOWED,
+ "Server does not support DESCRIBE.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, (NULL),
("Server does not support DESCRIBE."));
+#endif
return FALSE;
}
no_setup:
{
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_METHOD_NOT_ALLOWED,
+ "Server does not support SETUP.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, (NULL),
("Server does not support SETUP."));
+#endif
return FALSE;
}
}
/* ERRORS */
no_protocols:
{
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_INVALID_PROTOCOL,
+ "Could not connect to server, no protocols left");
+#else
/* no transport possible, post an error and stop */
GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
("Could not connect to server, no protocols left"));
+#endif
return GST_RTSP_ERROR;
}
no_streams:
{
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_CONTENT_NOT_FOUND,
+ "SDP contains no streams");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
("SDP contains no streams"));
+#endif
return GST_RTSP_ERROR;
}
create_request_failed:
{
gchar *str = gst_rtsp_strresult (res);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_BAD_REQUEST,
+ "Could not create request.");
+#else
GST_ELEMENT_ERROR (src, LIBRARY, INIT, (NULL),
("Could not create request. (%s)", str));
+#endif
g_free (str);
goto cleanup_error;
}
setup_transport_failed:
{
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_BAD_REQUEST,
+ "Could not setup transport.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
("Could not setup transport."));
+#endif
res = GST_RTSP_ERROR;
goto cleanup_error;
}
response_error:
{
+#ifndef TIZEN_FEATURE_RTSP_MODIFICATION
const gchar *str = gst_rtsp_status_as_text (code);
+#endif
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_UNEXPECTED_MSG,
+ "Error from Server .");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
("Error (%d): %s", code, GST_STR_NULL (str)));
+#endif
res = GST_RTSP_ERROR;
goto cleanup_error;
}
gchar *str = gst_rtsp_strresult (res);
if (res != GST_RTSP_EINTR) {
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_CONNECTION_FAIL,
+ "Could not send message.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
("Could not send message. (%s)", str));
+#endif
} else {
GST_WARNING_OBJECT (src, "send interrupted");
}
{
/* none of the available error codes is really right .. */
if (unsupported_real) {
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src,
+ GST_RTSPSRC_ERROR_UNSUPPORTED_MEDIA_TYPE,
+ "No supported stream was found. You might need to install a GStreamer RTSP extension plugin for Real media streams.");
+#else
GST_ELEMENT_ERROR (src, STREAM, CODEC_NOT_FOUND,
(_("No supported stream was found. You might need to install a "
"GStreamer RTSP extension plugin for Real media streams.")),
(NULL));
+#endif
} else {
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src,
+ GST_RTSPSRC_ERROR_UNSUPPORTED_MEDIA_TYPE,
+ "No supported stream was found. You might need to allow more transport protocols or may otherwise be missing the right GStreamer RTSP extension plugin.");
+#else
GST_ELEMENT_ERROR (src, STREAM, CODEC_NOT_FOUND,
(_("No supported stream was found. You might need to allow "
"more transport protocols or may otherwise be missing "
"the right GStreamer RTSP extension plugin.")), (NULL));
+#endif
}
return GST_RTSP_ERROR;
}
/* we need to start playback without clipping from the position reported by
* the server */
segment->start = seconds;
+#ifndef TIZEN_FEATURE_RTSP_MODIFICATION
+/*
+The range-min points to the start of the segment , not the current position.
+After getting the current position from MSL during normal pause/resume or during seek , we should not
+update the segment->position again with the rtp header npt timestamp.
+*/
segment->position = seconds;
+#endif
if (therange->max.type == GST_RTSP_TIME_NOW)
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ seconds = 0;
+#else
seconds = -1;
+#endif
else if (therange->max.type == GST_RTSP_TIME_END)
seconds = -1;
else
src->control = g_strdup (control);
}
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ src->is_audio_codec_supported = FALSE;
+ src->is_video_codec_supported = FALSE;
+#endif
+
/* create streams */
n_streams = gst_sdp_message_medias_len (sdp);
for (i = 0; i < n_streams; i++) {
}
src->state = GST_RTSP_STATE_INIT;
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ /* Check for the support for the Media codecs */
+ if ((!src->is_audio_codec_supported) && (!src->is_video_codec_supported)) {
+ GST_ERROR_OBJECT (src, "UnSupported Media Type !!!! \n");
+ goto unsupported_file_type;
+ } else {
+ GST_DEBUG_OBJECT (src, "Supported Media Type. \n");
+ }
+#endif
/* setup streams */
if ((res = gst_rtspsrc_setup_streams_start (src, async)) < 0)
goto setup_failed;
gst_rtspsrc_cleanup (src);
return res;
}
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+unsupported_file_type:
+ {
+ gst_rtspsrc_post_error_message (src,
+ GST_RTSPSRC_ERROR_UNSUPPORTED_MEDIA_TYPE,
+ "No supported stream was found");
+ res = GST_RTSP_ERROR;
+ gst_rtspsrc_cleanup (src);
+ return res;
+ }
+#endif
}
static GstRTSPResult
/* ERRORS */
no_url:
{
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_INVALID_URL,
+ "No valid RTSP URL was provided");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, NOT_FOUND, (NULL),
("No valid RTSP URL was provided"));
+#endif
goto cleanup_error;
}
connect_failed:
gchar *str = gst_rtsp_strresult (res);
if (res != GST_RTSP_EINTR) {
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_CONNECTION_FAIL,
+ "Failed to connect.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ_WRITE, (NULL),
("Failed to connect. (%s)", str));
+#endif
} else {
GST_WARNING_OBJECT (src, "connect interrupted");
}
{
gchar *str = gst_rtsp_strresult (res);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_BAD_REQUEST,
+ "Could not create request.");
+#else
GST_ELEMENT_ERROR (src, LIBRARY, INIT, (NULL),
("Could not create request. (%s)", str));
+#endif
g_free (str);
goto cleanup_error;
}
}
wrong_content_type:
{
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_OPTION_NOT_SUPPORTED,
+ "Server does not support SDP. ");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
("Server does not support SDP, got %s.", respcont));
+#endif
res = GST_RTSP_ERROR;
goto cleanup_error;
}
no_describe:
{
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_METHOD_NOT_ALLOWED,
+ "Server can not provide an SDP.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
("Server can not provide an SDP."));
+#endif
res = GST_RTSP_ERROR;
goto cleanup_error;
}
{
gchar *str = gst_rtsp_strresult (res);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_BAD_REQUEST,
+ "Could not create request.");
+#else
GST_ELEMENT_ERROR (src, LIBRARY, INIT, (NULL),
("Could not create request. (%s)", str));
+#endif
g_free (str);
goto close;
}
gst_rtsp_message_unset (&request);
if (res != GST_RTSP_EINTR) {
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_CONNECTION_FAIL,
+ "Could not send message.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
("Could not send message. (%s)", str));
+#endif
} else {
GST_WARNING_OBJECT (src, "TEARDOWN interrupted");
}
gen_range_header (GstRTSPSrc * src, GstSegment * segment)
{
gchar val_str[G_ASCII_DTOSTR_BUF_SIZE] = { 0, };
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if (src->start_position != 0 && segment->position == 0) {
+ segment->position = src->start_position;
+ src->start_position = 0;
+ }
+#endif
if (src->range && src->range->min.type == GST_RTSP_TIME_NOW) {
g_strlcpy (val_str, "now", sizeof (val_str));
} else {
((gdouble) segment->position) / GST_SECOND);
}
}
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ GST_DEBUG_OBJECT (src, "Range Header Added : npt=%s-", val_str);
+#endif
return g_strdup_printf ("npt=%s-", val_str);
}
goto create_request_failed;
if (src->need_range && src->seekable >= 0.0) {
+#ifndef TIZEN_FEATURE_RTSP_MODIFICATION
hval = gen_range_header (src, segment);
gst_rtsp_message_take_header (&request, GST_RTSP_HDR_RANGE, hval);
+#endif
/* store the newsegment event so it can be sent from the streaming thread. */
src->need_segment = TRUE;
}
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ else {
+/*
+ Updating position with the MSL current position as gst_rtspsrc_get_position() does not return correct position.
+*/
+ GST_DEBUG_OBJECT (src,
+ " During normal pause-resume , segment->position=%" GST_TIME_FORMAT
+ ",src->start_position=%" GST_TIME_FORMAT,
+ GST_TIME_ARGS (segment->position),
+ GST_TIME_ARGS (src->start_position));
+ segment->position = src->last_pos;
+ }
+
+/*
+ Sending the npt range request for each play request for updating the segment position properly.
+*/
+ hval = gen_range_header (src, segment);
+ gst_rtsp_message_take_header (&request, GST_RTSP_HDR_RANGE, hval);
+#endif
if (segment->rate != 1.0) {
gchar hval[G_ASCII_DTOSTR_BUF_SIZE];
{
gchar *str = gst_rtsp_strresult (res);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_BAD_REQUEST,
+ "Could not create request. ");
+#else
GST_ELEMENT_ERROR (src, LIBRARY, INIT, (NULL),
("Could not create request. (%s)", str));
+#endif
g_free (str);
goto done;
}
gst_rtsp_message_unset (&request);
if (res != GST_RTSP_EINTR) {
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_CONNECTION_FAIL,
+ "Could not send message.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
("Could not send message. (%s)", str));
+#endif
} else {
GST_WARNING_OBJECT (src, "PLAY interrupted");
}
{
gchar *str = gst_rtsp_strresult (res);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_BAD_REQUEST,
+ "Could not create request.");
+#else
GST_ELEMENT_ERROR (src, LIBRARY, INIT, (NULL),
("Could not create request. (%s)", str));
+#endif
g_free (str);
goto done;
}
gst_rtsp_message_unset (&request);
if (res != GST_RTSP_EINTR) {
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_CONNECTION_FAIL,
+ "Could not send message.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
("Could not send message. (%s)", str));
+#endif
} else {
GST_WARNING_OBJECT (src, "PAUSE interrupted");
}
{
GstRTSPSrc *rtspsrc;
GstStateChangeReturn ret;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ guint64 end_time;
+#endif
rtspsrc = GST_RTSPSRC (element);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ GST_WARNING_OBJECT (rtspsrc, "State change transition: %d \n", transition);
+#endif
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
ret = GST_STATE_CHANGE_SUCCESS;
break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ /* don't change to PAUSE state before complete stream opend.
+ see gst_rtspsrc_loop_complete_cmd() */
+ g_mutex_lock (&(rtspsrc)->pause_lock);
+ end_time = g_get_monotonic_time () + 10 * G_TIME_SPAN_SECOND;
+ if (!g_cond_wait_until (&(rtspsrc)->open_end, &(rtspsrc)->pause_lock,
+ end_time)) {
+ GST_WARNING_OBJECT (rtspsrc,
+ "time out: stream opend is not completed yet..");
+ }
+ g_mutex_unlock (&(rtspsrc)->pause_lock);
+#endif
ret = GST_STATE_CHANGE_NO_PREROLL;
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
#define GST_RTSP_STREAM_LOCK(rtsp) (g_rec_mutex_lock (GST_RTSP_STREAM_GET_LOCK(rtsp)))
#define GST_RTSP_STREAM_UNLOCK(rtsp) (g_rec_mutex_unlock (GST_RTSP_STREAM_GET_LOCK(rtsp)))
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+typedef enum {
+ GST_RTSPSRC_ERROR_NONE = 0,
+ GST_RTSPSRC_ERROR_UNSUPPORTED_AUDIO,
+ GST_RTSPSRC_ERROR_UNSUPPORTED_VIDEO,
+ GST_RTSPSRC_ERROR_CONNECTION_FAIL,
+ GST_RTSPSRC_ERROR_DNS_FAIL,
+ GST_RTSPSRC_ERROR_SERVER_DISCONNECTED,
+ GST_RTSPSRC_ERROR_BAD_SERVER,
+ GST_RTSPSRC_ERROR_INVALID_PROTOCOL,
+ GST_RTSPSRC_ERROR_INVALID_URL,
+ GST_RTSPSRC_ERROR_UNEXPECTED_MSG,
+ GST_RTSPSRC_ERROR_OUT_OF_MEMORIES,
+ GST_RTSPSRC_ERROR_RTSP_TIMEOUT,
+ GST_RTSPSRC_ERROR_BAD_REQUEST,
+ GST_RTSPSRC_ERROR_NOT_AUTHORIZED,
+ GST_RTSPSRC_ERROR_PAYMENT_REQUIRED,
+ GST_RTSPSRC_ERROR_FORBIDDEN,
+ GST_RTSPSRC_ERROR_CONTENT_NOT_FOUND,
+ GST_RTSPSRC_ERROR_METHOD_NOT_ALLOWED,
+ GST_RTSPSRC_ERROR_NOT_ACCEPTABLE,
+ GST_RTSPSRC_ERROR_PROXY_AUTHENTICATION_REQUIRED,
+ GST_RTSPSRC_ERROR_SERVER_TIMEOUT,
+ GST_RTSPSRC_ERROR_GONE,
+ GST_RTSPSRC_ERROR_LENGTH_REQUIRED,
+ GST_RTSPSRC_ERROR_PRECONDITION_FAILED,
+ GST_RTSPSRC_ERROR_REQUEST_ENTITY_TOO_LARGE,
+ GST_RTSPSRC_ERROR_REQUEST_URI_TOO_LARGE,
+ GST_RTSPSRC_ERROR_UNSUPPORTED_MEDIA_TYPE,
+ GST_RTSPSRC_ERROR_PARAMETER_NOT_UNDERSTOOD,
+ GST_RTSPSRC_ERROR_CONFERENCE_NOT_FOUND,
+ GST_RTSPSRC_ERROR_NOT_ENOUGH_BANDWIDTH,
+ GST_RTSPSRC_ERROR_NO_SESSION_ID,
+ GST_RTSPSRC_ERROR_METHOD_NOT_VALID_IN_THIS_STATE,
+ GST_RTSPSRC_ERROR_HEADER_FIELD_NOT_VALID_FOR_SOURCE,
+ GST_RTSPSRC_ERROR_INVALID_RANGE,
+ GST_RTSPSRC_ERROR_PARAMETER_IS_READONLY,
+ GST_RTSPSRC_ERROR_AGGREGATE_OP_NOT_ALLOWED,
+ GST_RTSPSRC_ERROR_ONLY_AGGREGATE_OP_ALLOWED,
+ GST_RTSPSRC_ERROR_BAD_TRANSPORT,
+ GST_RTSPSRC_ERROR_DESTINATION_UNREACHABLE,
+ GST_RTSPSRC_ERROR_INTERNAL_SERVER_ERROR,
+ GST_RTSPSRC_ERROR_NOT_IMPLEMENTED,
+ GST_RTSPSRC_ERROR_BAD_GATEWAY,
+ GST_RTSPSRC_ERROR_SERVICE_UNAVAILABLE,
+ GST_RTSPSRC_ERROR_GATEWAY_TIME_OUT ,
+ GST_RTSPSRC_ERROR_RTSP_VERSION_NOT_SUPPORTED,
+ GST_RTSPSRC_ERROR_OPTION_NOT_SUPPORTED,
+}_GstRTSPSrcError;
+typedef _GstRTSPSrcError GstRTSPSrcError;
+#endif
+
typedef struct _GstRTSPConnInfo GstRTSPConnInfo;
struct _GstRTSPConnInfo {
gulong manager_ptmap_id;
gboolean use_buffering;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ /* media type */
+ gboolean is_audio_codec_supported;
+ gboolean is_video_codec_supported;
+ gchar *audio_codec;
+ gchar *video_codec;
+ gchar *video_frame_size;
+#endif
+
GstRTSPConnInfo conninfo;
/* SET/GET PARAMETER requests queue */
GstRTSPVersion default_version;
GstRTSPVersion version;
+
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ GCond open_end;
+ GMutex pause_lock;
+ guint64 start_position;
+#endif
};
struct _GstRTSPSrcClass {
if (!gst_video_balance_is_passthrough (balance)) {
static GstStaticCaps raw_caps =
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (PROCESSING_CAPS));
+ GstCaps *tmp = gst_static_caps_get (&raw_caps);
- caps = gst_caps_intersect (caps, gst_static_caps_get (&raw_caps));
+ caps = gst_caps_intersect (caps, tmp);
+ gst_caps_unref (tmp);
if (filter) {
ret = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
const gst_riff_acid *acid = NULL;
const guint data_size = sizeof (gst_riff_acid);
gfloat tempo;
+#ifdef TIZEN_FEATURE_WAVPARSE_MODIFICATION
+ const guint8 *data = NULL;
+#endif
GST_INFO_OBJECT (wav, "Have acid chunk");
if (size < data_size) {
goto exit;
}
gst_adapter_flush (wav->adapter, 8);
+#ifdef TIZEN_FEATURE_WAVPARSE_MODIFICATION
+ if (gst_adapter_available (wav->adapter) < 24) {
+ goto exit;
+ }
+ data = gst_adapter_map (wav->adapter, 24);
+ tempo = GST_READ_FLOAT_LE (data + 20);
+#else
acid = (const gst_riff_acid *) gst_adapter_map (wav->adapter,
data_size);
tempo = acid->tempo;
+#endif
gst_adapter_unmap (wav->adapter);
} else {
GstMapInfo map;
--- /dev/null
+%bcond_with x
+%define gst_branch 1.0
+
+Name: gst-plugins-good
+Version: 1.16.2
+Release: 1
+License: LGPL-2.1+
+Summary: GStreamer Streaming-Media Framework Plug-Ins
+Url: http://gstreamer.freedesktop.org/
+Group: Multimedia/Framework
+Source: http://gstreamer.freedesktop.org/src/gst-plugins-good/gst-plugins-good-%{version}.tar.xz
+Source100: common.tar.gz
+BuildRequires: gcc-c++
+BuildRequires: gettext-tools
+BuildRequires: pkgconfig(glib-2.0) >= 2.32
+BuildRequires: pkgconfig(gstreamer-1.0)
+BuildRequires: pkgconfig(gstreamer-plugins-base-1.0)
+BuildRequires: libjpeg-devel
+BuildRequires: orc >= 0.4.16
+BuildRequires: python
+BuildRequires: xsltproc
+BuildRequires: pkgconfig(bzip2)
+BuildRequires: pkgconfig(libpng) >= 1.2
+BuildRequires: pkgconfig(libpulse) >= 1.0
+BuildRequires: pkgconfig(libsoup-2.4)
+BuildRequires: pkgconfig(libxml-2.0) >= 2.4.9
+# TODO find where process.h comes from, not kernel-devel and not wxWidgets so far.
+%if %{with x}
+BuildRequires: pkgconfig(ice)
+BuildRequires: pkgconfig(sm)
+BuildRequires: pkgconfig(xdamage)
+BuildRequires: pkgconfig(xfixes)
+# used by libgstvideo4linux2.so
+BuildRequires: pkgconfig(xv)
+%endif
+
+BuildRequires: pkgconfig(zlib)
+%if "%{tizen_profile_name}" != "tv"
+BuildRequires: pkgconfig(libv4l2)
+%endif
+BuildRequires: pkgconfig(vconf)
+BuildRequires: pkgconfig(gio-2.0)
+Requires: gst-plugins-base >= 1.0.0
+Requires: gstreamer >= 1.0.5
+
+%description
+GStreamer is a streaming media framework based on graphs of filters
+that operate on media data. Applications using this library can do
+anything media-related, from real-time sound processing to playing
+videos. Its plug-in-based architecture means that new data types or
+processing capabilities can be added simply by installing new plug-ins.
+
+%package extra
+Summary: Complementary plugins for %{name}
+Group: Productivity/Multimedia/Other
+Requires: %{name} = %{version}
+Enhances: gst-plugins-good
+
+%description extra
+This package provides complementary plugins for %{name} and
+plugins not included in official Tizen images, which may be used for development / experimental purposes.
+
+%prep
+%setup -q -n gst-plugins-good-%{version}
+%setup -q -T -D -a 100
+
+%build
+# FIXME:
+# warning: failed to load external entity "xml/element-v4l2src-details.xml"
+# warning: failed to load external entity "xml/plugin-video4linux2.xml"
+export V=1
+NOCONFIGURE=1 ./autogen.sh
+export CFLAGS+=" -DTIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE\
+ -DTIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID\
+ -DTIZEN_FEATURE_WAVPARSE_MODIFICATION\
+ -DTIZEN_FEATURE_MP3PARSE_MODIFICATION\
+ -DTIZEN_FEATURE_AACPARSE_MODIFICATION\
+ -DTIZEN_FEATURE_QTDEMUX_MODIFICATION\
+ -DTIZEN_FEATURE_FLVDEMUX_MODIFICATION\
+ -DTIZEN_FEATURE_GST_UPSTREAM\
+ -DTIZEN_FEATURE_RTSP_MODIFICATION\
+ -DTIZEN_FEATURE_GST_MUX_ENHANCEMENT\
+ -DTIZEN_FEATURE_SOUP_MODIFICATION\
+ -DTIZEN_FEATURE_RGVOLUME_MODIFICATION\
+ -DTIZEN_FEATURE_BASEPARSE_MODIFICATION\
+ -DTIZEN_FEATURE_DISABLE_V4L2_DEPENDENCY\
+ -fstack-protector-strong\
+ -Wl,-z,relro\
+ -D_FORTIFY_SOURCE=2"
+%configure\
+%if ! 0%{?ENABLE_AALIB}
+ --disable-aalib\
+%endif
+%if "%{tizen_profile_name}" != "tv"
+ --with-libv4l2 \
+%endif
+ --disable-gtk-doc\
+ --with-gtk=3.0\
+ --disable-monoscope\
+ --disable-y4m\
+ --disable-taglib\
+ --disable-wavpack\
+ --enable-experimental\
+ --disable-equalizer\
+%if "%{tizen_profile_name}" == "tv"
+ --disable-flv\
+ --disable-videobox\
+ --disable-videomixer\
+%endif
+ --disable-effectv\
+ --disable-alpha\
+ --disable-auparse\
+ --disable-effectv\
+ --disable-flx\
+ --disable-goom\
+ --disable-goom2k1\
+ --disable-level\
+ --disable-multipart\
+ --disable-smpte\
+ --disable-spectrum\
+ --disable-cutter\
+ --disable-dtmf\
+ --disable-oss4\
+ --disable-oss\
+ --disable-shapewipe
+
+make %{?_smp_mflags} CFLAGS+="-Wno-error" CXXFLAGS+="-Wno-error"
+
+%install
+%make_install
+%find_lang %{name}-%{gst_branch}
+
+%lang_package -f %{name}-%{gst_branch}
+
+%files
+%manifest %{name}.manifest
+%defattr(-, root, root)
+%license COPYING
+%{_libdir}/gstreamer-%{gst_branch}/libgstalaw.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstalpha.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstalphacolor.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstapetag.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstaudiofx.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstaudioparsers.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstauparse.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstautodetect.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstavi.so
+# Not yet ported
+#%{_libdir}/gstreamer-%{gst_branch}/libgstcutter.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstdebug.so
+# Not yet ported
+%{_libdir}/gstreamer-%{gst_branch}/libgstdeinterlace.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgsteffectv.so
+
+#%{_datadir}/gstreamer-%{gst_branch}/presets/GstVP8Enc.prs
+
+#%{_libdir}/gstreamer-%{gst_branch}/libgstflxdec.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstgoom.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstgoom2k1.so
+%{_libdir}/gstreamer-%{gst_branch}/libgsticydemux.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstid3demux.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstinterleave.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstisomp4.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstjpeg.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstlevel.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstmatroska.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstmonoscope.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstmulaw.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstmultifile.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstmultipart.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstnavigationtest.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstoss4audio.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstossaudio.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstpulseaudio.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstreplaygain.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstrtp.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstrtpmanager.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstrtsp.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstshapewipe.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstsmpte.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstspectrum.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstspeex.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstudp.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstvideo4linux2.so
+
+%{_libdir}/gstreamer-%{gst_branch}/libgstvideocrop.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstvideofilter.so
+%if "%{tizen_profile_name}" != "tv"
+%{_libdir}/gstreamer-%{gst_branch}/libgstflv.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstequalizer.so
+#%{_datadir}/gstreamer-%{gst_branch}/presets/GstIirEqualizer10Bands.prs
+#%{_datadir}/gstreamer-%{gst_branch}/presets/GstIirEqualizer3Bands.prs
+%{_datadir}/gstreamer-%{gst_branch}/presets/GstQTMux.prs
+%{_libdir}/gstreamer-%{gst_branch}/libgstvideobox.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstvideomixer.so
+%endif
+%{_libdir}/gstreamer-%{gst_branch}/libgstwavenc.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstwavparse.so
+%if %{with x}
+%{_libdir}/gstreamer-%{gst_branch}/libgstximagesrc.so
+%endif
+#%{_libdir}/gstreamer-%{gst_branch}/libgsty4menc.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstcairo.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstsoup.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstflac.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstvpx.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstdtmf.so
+
+
+%files extra
+%manifest %{name}.manifest
+%defattr(-, root, root)
+%if 0%{?ENABLE_AALIB}
+%{_libdir}/gstreamer-%{gst_branch}/libgstaasink.so
+%endif
+%{_libdir}/gstreamer-%{gst_branch}/libgstpng.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstimagefreeze.so
msgstr "Videouređaj koristi nepodržanu metodu preplitanja (interlacing)."
msgid "Video device uses an unsupported pixel format."
-msgstr "Videouređaj koristi nepodržani format piksela."
+msgstr "Videouređaj koristi format piksela koji nije podržan."
msgid "Failed to configure internal buffer pool."
msgstr "Nije uspjelo konfigurirati interne međuspremnike (buffer pool)."
#define V4L2_META_FMT_UVC v4l2_fourcc('U', 'V', 'C', 'H') /* UVC Payload Header metadata */
#define V4L2_META_FMT_D4XX v4l2_fourcc('D', '4', 'X', 'X') /* D4XX Payload Header metadata */
+#define V4L2_PIX_FMT_INVZ v4l2_fourcc('I', 'N', 'V', 'Z') /* Intel Planar Depth 16-bit */
+
/* priv field value to indicates that subsequent fields are valid. */
#define V4L2_PIX_FMT_PRIV_MAGIC 0xfeedcafe
static gboolean
plugin_init (GstPlugin * plugin)
{
+#ifndef TIZEN_FEATURE_DISABLE_V4L2_DEPENDENCY
const gchar *paths[] = { "/dev", "/dev/v4l2", NULL };
const gchar *names[] = { "video", NULL };
+#endif /* TIZEN_FEATURE_DISABLE_V4L2_DEPENDENCY */
GST_DEBUG_CATEGORY_INIT (v4l2_debug, "v4l2", 0, "V4L2 API calls");
-
+#ifndef TIZEN_FEATURE_DISABLE_V4L2_DEPENDENCY
/* Add some depedency, so the dynamic features get updated upon changes in
* /dev/video* */
gst_plugin_add_dependency (plugin,
NULL, paths, names, GST_PLUGIN_DEPENDENCY_FLAG_FILE_NAME_IS_PREFIX);
+#endif /* TIZEN_FEATURE_DISABLE_V4L2_DEPENDENCY */
if (!gst_element_register (plugin, "v4l2src", GST_RANK_PRIMARY,
GST_TYPE_V4L2SRC) ||
{V4L2_PIX_FMT_Y16, TRUE, GST_V4L2_RAW},
{V4L2_PIX_FMT_Y16_BE, TRUE, GST_V4L2_RAW},
{V4L2_PIX_FMT_Y10BPACK, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_INVZ, TRUE, GST_V4L2_RAW},
/* Palette formats */
{V4L2_PIX_FMT_PAL8, TRUE, GST_V4L2_RAW},
break;
case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
+ case V4L2_PIX_FMT_INVZ:
rank = GREY_BASE_RANK;
break;
case V4L2_PIX_FMT_NV24:
format = GST_VIDEO_FORMAT_NV24;
break;
+ case V4L2_PIX_FMT_INVZ:
+ format = GST_VIDEO_FORMAT_INVZ;
+ break;
default:
format = GST_VIDEO_FORMAT_UNKNOWN;
break;
case V4L2_PIX_FMT_UYVY:
case V4L2_PIX_FMT_YUV422P:
case V4L2_PIX_FMT_YVYU:
- case V4L2_PIX_FMT_YUV411P:{
+ case V4L2_PIX_FMT_YUV411P:
+ case V4L2_PIX_FMT_INVZ:{
GstVideoFormat format;
format = gst_v4l2_object_v4l2fourcc_to_video_format (fourcc);
if (format != GST_VIDEO_FORMAT_UNKNOWN)
case GST_VIDEO_FORMAT_GRAY16_BE:
fourcc = V4L2_PIX_FMT_Y16_BE;
break;
+ case GST_VIDEO_FORMAT_INVZ:
+ fourcc = V4L2_PIX_FMT_INVZ;
+ break;
default:
break;
}
{
PROP_0,
V4L2_STD_OBJECT_PROPS,
+#ifdef TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID
+ PROP_CAMERA_ID,
+#endif /* TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID */
PROP_LAST
};
gst_v4l2_object_install_properties_helper (gobject_class,
DEFAULT_PROP_DEVICE);
+#ifdef TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID
+ /**
+ * GstV4l2Src:camera-id:
+ *
+ * The value which is set by application will be used as a number of device node.
+ * ex) 1 -> /dev/video1
+ */
+ g_object_class_install_property (gobject_class, PROP_CAMERA_ID,
+ g_param_spec_uint ("camera-id", "Camera ID",
+ "Camera ID for device node", 0, G_MAXUINT, 0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+#endif /* TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID */
+
/**
* GstV4l2Src::prepare-format:
* @v4l2src: the v4l2src instance
if (!gst_v4l2_object_set_property_helper (v4l2src->v4l2object,
prop_id, value, pspec)) {
switch (prop_id) {
+#ifdef TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID
+ case PROP_CAMERA_ID:
+ g_free (v4l2src->v4l2object->videodev);
+
+ v4l2src->camera_id = g_value_get_uint (value);
+ v4l2src->v4l2object->videodev = g_strdup_printf ("/dev/video%u", v4l2src->camera_id);
+
+ GST_INFO_OBJECT(v4l2src, "videodev [%s]", v4l2src->v4l2object->videodev);
+ break;
+#endif /* TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID */
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
if (!gst_v4l2_object_get_property_helper (v4l2src->v4l2object,
prop_id, value, pspec)) {
switch (prop_id) {
+#ifdef TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID
+ case PROP_CAMERA_ID:
+ g_value_set_uint (value, v4l2src->camera_id);
+ break;
+#endif /* TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID */
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
/* Timestamp sanity check */
GstClockTime last_timestamp;
gboolean has_bad_timestamp;
+
+#ifdef TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID
+ /* Properties */
+ guint camera_id;
+#endif /* TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID */
};
struct _GstV4l2SrcClass
#include <string.h>
#include <errno.h>
#include <unistd.h>
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+#include <glob.h>
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
#ifdef __sun
/* Needed on older Solaris Nevada builds (72 at least) */
#include <stropts.h>
#include "gst/gst-i18n-plugin.h"
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+enum {
+ V4L2_OPEN_ERROR = 0,
+ V4L2_OPEN_ERROR_STAT_FAILED,
+ V4L2_OPEN_ERROR_NO_DEVICE,
+ V4L2_OPEN_ERROR_NOT_OPEN,
+ V4L2_OPEN_ERROR_NOT_CAPTURE,
+ V4L2_OPEN_ERROR_NOT_OUTPUT
+};
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
+
GST_DEBUG_CATEGORY_EXTERN (v4l2_debug);
#define GST_CAT_DEFAULT v4l2_debug
{
struct stat st;
int libv4l2_fd = -1;
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+ int error_type = V4L2_OPEN_ERROR_STAT_FAILED;
+ int device_index = 0;
+ glob_t glob_buf;
+
+ memset(&glob_buf, 0x0, sizeof(glob_t));
+ if (!v4l2object) {
+ GST_ERROR("v4l2object is NULL");
+ return FALSE;
+ }
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Trying to open device %s",
v4l2object->videodev);
if (!v4l2object->videodev)
v4l2object->videodev = g_strdup ("/dev/video");
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+ if (!v4l2object->videodev) {
+ GST_ERROR_OBJECT(v4l2object->element, "videodev is NULL");
+ return FALSE;
+ }
+
+CHECK_AGAIN:
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
/* check if it is a device */
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+ if (stat (v4l2object->videodev, &st) == -1) {
+ error_type = V4L2_OPEN_ERROR_STAT_FAILED;
+ goto pre_error_check;
+ }
+#else /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
if (stat (v4l2object->videodev, &st) == -1)
goto stat_failed;
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+ if (!S_ISCHR (st.st_mode)) {
+ error_type = V4L2_OPEN_ERROR_NO_DEVICE;
+ goto pre_error_check;
+ }
+#else /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
if (!S_ISCHR (st.st_mode))
goto no_device;
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
/* open the device */
v4l2object->video_fd =
open (v4l2object->videodev, O_RDWR /* | O_NONBLOCK */ );
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+ if (!GST_V4L2_IS_OPEN (v4l2object)) {
+ error_type = V4L2_OPEN_ERROR_NOT_OPEN;
+ goto pre_error_check;
+ }
+#else /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
if (!GST_V4L2_IS_OPEN (v4l2object))
goto not_open;
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
#ifdef HAVE_LIBV4L2
if (v4l2object->fd_open)
v4l2object->video_fd = libv4l2_fd;
/* get capabilities, error will be posted */
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+ if (!gst_v4l2_get_capabilities (v4l2object)) {
+ error_type = V4L2_OPEN_ERROR;
+ goto pre_error_check;
+ }
+#else /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
if (!gst_v4l2_get_capabilities (v4l2object))
goto error;
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
/* do we need to be a capture device? */
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+ GST_INFO_OBJECT(v4l2object->element, "device_caps 0x%x", v4l2object->device_caps);
+ if (GST_IS_V4L2SRC (v4l2object->element) &&
+ (!(v4l2object->device_caps & (V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_VIDEO_CAPTURE_MPLANE)) ||
+ (v4l2object->device_caps & (V4L2_CAP_VIDEO_OUTPUT | V4L2_CAP_VIDEO_OUTPUT_MPLANE)))) {
+ error_type = V4L2_OPEN_ERROR_NOT_CAPTURE;
+ goto pre_error_check;
+ }
+#else /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
if (GST_IS_V4L2SRC (v4l2object->element) &&
!(v4l2object->device_caps & (V4L2_CAP_VIDEO_CAPTURE |
V4L2_CAP_VIDEO_CAPTURE_MPLANE)))
goto not_capture;
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+ if (GST_IS_V4L2SINK (v4l2object->element) &&
+ !(v4l2object->device_caps & (V4L2_CAP_VIDEO_OUTPUT |
+ V4L2_CAP_VIDEO_OUTPUT_MPLANE))) {
+ error_type = V4L2_OPEN_ERROR_NOT_OUTPUT;
+ goto pre_error_check;
+ }
+#else /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
if (GST_IS_V4L2SINK (v4l2object->element) &&
!(v4l2object->device_caps & (V4L2_CAP_VIDEO_OUTPUT |
V4L2_CAP_VIDEO_OUTPUT_MPLANE)))
goto not_output;
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
if (GST_IS_V4L2_VIDEO_DEC (v4l2object->element) &&
!GST_V4L2_IS_M2M (v4l2object->device_caps))
gst_v4l2_adjust_buf_type (v4l2object);
/* create enumerations, posts errors. */
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+ if (!gst_v4l2_fill_lists (v4l2object)) {
+ error_type = V4L2_OPEN_ERROR;
+ goto pre_error_check;
+ }
+#else /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
if (!gst_v4l2_fill_lists (v4l2object))
goto error;
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
GST_INFO_OBJECT (v4l2object->dbg_obj,
"Opened device '%s' (%s) successfully",
if (v4l2object->extra_controls)
gst_v4l2_set_controls (v4l2object, v4l2object->extra_controls);
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+ globfree(&glob_buf);
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
+
/* UVC devices are never interlaced, and doing VIDIOC_TRY_FMT on them
* causes expensive and slow USB IO, so don't probe them for interlaced
*/
return TRUE;
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+pre_error_check:
+ {
+ if (GST_IS_V4L2SRC(v4l2object->element) && glob_buf.gl_pathc == 0) {
+ if (glob("/dev/video*", 0, 0, &glob_buf) != 0) {
+ GST_WARNING_OBJECT(v4l2object->element, "glob failed");
+ }
+ }
+
+ if (glob_buf.gl_pathc > 0 && device_index < glob_buf.gl_pathc) {
+ if (v4l2object->videodev) {
+ g_free(v4l2object->videodev);
+ v4l2object->videodev = NULL;
+ }
+ v4l2object->videodev = g_strdup(glob_buf.gl_pathv[device_index]);
+ if (v4l2object->videodev) {
+ device_index++;
+ GST_INFO_OBJECT(v4l2object->element, "check device [%s]",
+ v4l2object->videodev);
+
+ if (GST_V4L2_IS_OPEN (v4l2object)) {
+ /* close device */
+ v4l2_close (v4l2object->video_fd);
+ v4l2object->video_fd = -1;
+ }
+ /* empty lists */
+ gst_v4l2_empty_lists (v4l2object);
+
+ goto CHECK_AGAIN;
+ } else {
+ GST_WARNING_OBJECT(v4l2object->element, "g_strdup failed [%s]",
+ glob_buf.gl_pathv[device_index]);
+ }
+ }
+
+ GST_WARNING_OBJECT(v4l2object->element, "error type : %d", error_type);
+
+ switch (error_type) {
+ case V4L2_OPEN_ERROR_STAT_FAILED:
+ goto stat_failed;
+ case V4L2_OPEN_ERROR_NO_DEVICE:
+ goto no_device;
+ case V4L2_OPEN_ERROR_NOT_OPEN:
+ goto not_open;
+ case V4L2_OPEN_ERROR_NOT_CAPTURE:
+ goto not_capture;
+ case V4L2_OPEN_ERROR_NOT_OUTPUT:
+ goto not_output;
+ default:
+ goto error;
+ }
+ }
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
+
/* ERRORS */
stat_failed:
{
/* empty lists */
gst_v4l2_empty_lists (v4l2object);
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+ globfree(&glob_buf);
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
+
return FALSE;
}
}
caps = gst_caps_new_empty ();
- /* create a caps for all wave formats supported by the device
+ /* create a caps for all wave formats supported by the device
starting by the best quality format */
if (wocaps.dwFormats & WAVE_FORMAT_96S16) {
caps_temp = gst_waveform_sink_create_caps (96000, 2, GST_AUDIO_NE (S16));