--- /dev/null
- upstream_branch = upstream/1.6
+[general]
++upstream_branch = upstream/1.16
+upstream_tag = ${upstreamversion}
pulsedeviceprovider.c \
pulseutil.c
-libgstpulseaudio_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS) $(PULSE_CFLAGS)
+libgstpulseaudio_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS) $(PULSE_CFLAGS) $(GIO_CFLAGS)
libgstpulseaudio_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_API_VERSION) \
-lgstpbutils-$(GST_API_VERSION) \
- $(GST_BASE_LIBS) $(GST_LIBS) $(PULSE_LIBS)
+ $(GST_BASE_LIBS) $(GST_LIBS) $(PULSE_LIBS) $(GIO_LIBS)
libgstpulseaudio_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
- libgstpulseaudio_la_LIBTOOLFLAGS = $(GST_PLUGIN_LIBTOOLFLAGS)
+if PCM_DUMP_ENABLE
+libgstpulseaudio_la_CFLAGS += $(VCONF_CFLAGS) -DPCM_DUMP_ENABLE
+libgstpulseaudio_la_LIBADD += $(VCONF_LIBS)
+endif
+
noinst_HEADERS = \
pulsesink.h \
pulsesrc.h \
#include "config.h"
#endif
- #ifdef __TIZEN__
- #ifdef PCM_DUMP_ENABLE
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+#include <stdio.h>
+#endif
- #endif
+
#include <gst/gst.h>
#include <gst/audio/audio.h>
#include <gst/audio/gstaudiosink.h>
#include "pulsesrc.h"
#include "pulseutil.h"
- #ifdef __TIZEN__
- #ifdef PCM_DUMP_ENABLE
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+#include <vconf.h>
+#endif
- #endif
+
GST_DEBUG_CATEGORY_EXTERN (pulse_debug);
#define GST_CAT_DEFAULT pulse_debug
PROP_LAST
};
- #ifdef __TIZEN__
- #ifdef PCM_DUMP_ENABLE
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+#define GST_PULSESRC_DUMP_VCONF_KEY "memory/private/sound/pcm_dump"
+#define GST_PULSESRC_DUMP_OUTPUT_PATH_PREFIX "/tmp/dump_pulsesrc_out"
+#define GST_PULSESRC_DUMP_OUTPUT_FLAG 0x00200000U
+#endif
- #endif
+
static void gst_pulsesrc_destroy_stream (GstPulseSrc * pulsesrc);
static void gst_pulsesrc_destroy_context (GstPulseSrc * pulsesrc);
PROP_MUTE, g_param_spec_boolean ("mute", "Mute",
"Mute state of this stream",
DEFAULT_MUTE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+#ifdef __TIZEN__
+ g_object_class_install_property (gobject_class,
+ PROP_AUDIO_LATENCY,
+ g_param_spec_string ("latency", "Audio Backend Latency",
+ "Audio Backend Latency (\"low\": Low Latency, \"mid\": Mid Latency, \"high\": High Latency)",
+ DEFAULT_AUDIO_LATENCY,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+#endif /* __TIZEN__ */
}
- #ifdef __TIZEN__
- #ifdef PCM_DUMP_ENABLE
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+static GstPadProbeReturn
+gst_pulsesrc_pad_dump_probe (GstPad *pad, GstPadProbeInfo * info, gpointer data)
+{
+ GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (data);
+ size_t written = 0;
+ GstBuffer *buffer = GST_PAD_PROBE_INFO_BUFFER (info);
+ GstMapInfo in_map;
+ if (pulsesrc->dump_fd_output) {
+ gst_buffer_map(buffer, &in_map, GST_MAP_READ);
+ written = fwrite(in_map.data, 1, in_map.size, pulsesrc->dump_fd_output);
+ if (written != in_map.size)
+ GST_WARNING("failed to write!!! ferror=%d", ferror(pulsesrc->dump_fd_output));
+ gst_buffer_unmap(buffer, &in_map);
+ }
+ return GST_PAD_PROBE_OK;
+}
+#endif
- #endif
+
static void
gst_pulsesrc_init (GstPulseSrc * pulsesrc)
{
- #ifdef __TIZEN__
- #ifdef PCM_DUMP_ENABLE
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+ GstPad *srcpad = NULL;
+ int vconf_dump = 0;
+#endif
- #endif
pulsesrc->server = NULL;
pulsesrc->device = NULL;
pulsesrc->client_name = gst_pulse_client_name ();
pa_threaded_mainloop_lock (pulsesrc->mainloop);
gst_pulsesrc_destroy_context (pulsesrc);
pa_threaded_mainloop_unlock (pulsesrc->mainloop);
- #ifdef __TIZEN__
- #ifdef PCM_DUMP_ENABLE
-
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+ if (pulsesrc->dump_fd_output) {
+ fclose(pulsesrc->dump_fd_output);
+ pulsesrc->dump_fd_output = NULL;
+ }
+#endif
- #endif
return TRUE;
}
gst_pulsesrc_set_stream_volume (pulsesrc, pulsesrc->volume);
pulsesrc->volume_set = FALSE;
}
- #ifdef __TIZEN__
- #ifdef PCM_DUMP_ENABLE
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+ if (pulsesrc->need_dump_output) {
+ char *suffix , *dump_path;
+ GDateTime *time = NULL;
+ if (pulsesrc->dump_fd_output) {
+ fclose(pulsesrc->dump_fd_output);
+ pulsesrc->dump_fd_output = NULL;
+ }
+ time = g_date_time_new_now_local();
+ suffix = g_date_time_format(time, "%m%d_%H%M%S");
+ dump_path = g_strdup_printf("%s_%dch_%dhz_%s.pcm", GST_PULSESRC_DUMP_OUTPUT_PATH_PREFIX, pulsesrc->sample_spec.channels, pulsesrc->sample_spec.rate, suffix);
+ GST_WARNING_OBJECT(asrc,"pulse-source dumping enabled: dump path [%s]", dump_path);
+ pulsesrc->dump_fd_output = fopen(dump_path, "w+");
+
+ g_free(suffix);
+ g_free(dump_path);
+ g_date_time_unref(time);
+ }
- #endif /* PCM_DUMP_ENABLE */
+#endif
/* get the actual buffering properties now */
actual = pa_stream_get_buffer_attr (pulsesrc->stream);
#include <pulse/pulseaudio.h>
#include <pulse/thread-mainloop.h>
- #ifdef __TIZEN__
- #ifdef PCM_DUMP_ENABLE
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+#include <stdio.h>
+#endif
- #endif
+
G_BEGIN_DECLS
#define GST_TYPE_PULSESRC \
gboolean paused:1;
gboolean in_read:1;
+#ifdef __TIZEN__
+ gchar *latency;
+#endif /* __TIZEN__ */
+
GstStructure *properties;
pa_proplist *proplist;
- #ifdef __TIZEN__
- #ifdef PCM_DUMP_ENABLE
+
- #endif
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+ gint need_dump_output;
+ FILE *dump_fd_output;
+#endif
};
struct _GstPulseSrcClass
return ret;
}
- #endif
+#ifdef __TIZEN__
+#include <gio/gio.h>
+#define PA_BUS_NAME "org.pulseaudio.Server"
+#define PA_STREAM_MANAGER_OBJECT_PATH "/org/pulseaudio/StreamManager"
+#define PA_STREAM_MANAGER_INTERFACE "org.pulseaudio.StreamManager"
+#define PA_STREAM_MANAGER_METHOD_NAME_SET_VOLUME_RATIO "SetVolumeRatio"
+void
+gst_pulse_set_volume_ratio (uint32_t stream_index, const char *direction, double ratio)
+{
+ GDBusConnection *conn = NULL;
+ GError *err = NULL;
+ GVariant *result = NULL;
+ const gchar *dbus_ret = NULL;
+
+ conn = g_bus_get_sync (G_BUS_TYPE_SYSTEM, NULL, &err);
+ if (!conn || err) {
+ GST_ERROR ("g_bus_get_sync() error (%s)", err ? err->message : NULL);
+ if (err)
+ g_error_free (err);
+ return;
+ }
+
+ result = g_dbus_connection_call_sync (conn,
+ PA_BUS_NAME,
+ PA_STREAM_MANAGER_OBJECT_PATH,
+ PA_STREAM_MANAGER_INTERFACE,
+ PA_STREAM_MANAGER_METHOD_NAME_SET_VOLUME_RATIO,
+ g_variant_new("(sud)", direction, stream_index, ratio),
+ G_VARIANT_TYPE("(s)"),
+ G_DBUS_CALL_FLAGS_NONE,
+ 1000,
+ NULL,
+ &err);
+ if (!result || err) {
+ GST_ERROR ("g_dbus_connection_call_sync() for SET_VOLUME_RATIO error (%s)", err ? err->message : NULL);
+ if (err)
+ g_error_free (err);
+ goto finish;
+ }
+ g_variant_get (result, "(&s)", &dbus_ret);
+ GST_DEBUG ("SET_VOLUME_RATIO returns value(%s) for stream index(%u), ratio(%f)", dbus_ret, stream_index, ratio);
+
+finish:
+ g_variant_unref(result);
+ g_object_unref(conn);
+
+ return;
+}
++#endif
++
+ GstCaps *
+ gst_pulse_fix_pcm_caps (GstCaps * incaps)
+ {
+ GstCaps *outcaps;
+ int i;
+
+ outcaps = gst_caps_make_writable (incaps);
+
+ for (i = 0; i < gst_caps_get_size (outcaps); i++) {
+ GstStructure *st = gst_caps_get_structure (outcaps, i);
+ const gchar *format = gst_structure_get_name (st);
+ const GValue *value;
+ GValue new_value = G_VALUE_INIT;
+ gint min, max, step;
+
+ if (!(g_str_equal (format, "audio/x-raw") ||
+ g_str_equal (format, "audio/x-alaw") ||
+ g_str_equal (format, "audio/x-mulaw")))
+ continue;
+
+ value = gst_structure_get_value (st, "rate");
+
+ if (!GST_VALUE_HOLDS_INT_RANGE (value))
+ continue;
+
+ min = gst_value_get_int_range_min (value);
+ max = gst_value_get_int_range_max (value);
+ step = gst_value_get_int_range_step (value);
+
+ if (min > PA_RATE_MAX)
+ min = PA_RATE_MAX;
+ if (max > PA_RATE_MAX)
+ max = PA_RATE_MAX;
+
+ g_value_init (&new_value, GST_TYPE_INT_RANGE);
+ gst_value_set_int_range_step (&new_value, min, max, step);
+
+ gst_structure_take_value (st, "rate", &new_value);
+ }
+
+ return outcaps;
+ }
GstStructure *gst_pulse_make_structure (pa_proplist *properties);
GstCaps * gst_pulse_format_info_to_caps (pa_format_info * format);
+
+#ifdef __TIZEN__
+void gst_pulse_set_volume_ratio (uint32_t stream_index, const char *direction, double ratio);
+#endif
+ GstCaps * gst_pulse_fix_pcm_caps (GstCaps * incaps);
+
#endif
#define REDUCE_BLOCKSIZE_LIMIT 0.20
#define REDUCE_BLOCKSIZE_COUNT 2
#define REDUCE_BLOCKSIZE_FACTOR 0.5
+ #define GROW_TIME_LIMIT (1 * GST_SECOND)
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+#define DLNA_OP_TIMED_SEEK 0x02
+#define DLNA_OP_BYTE_SEEK 0x01
+#endif
+
static void gst_soup_http_src_uri_handler_init (gpointer g_iface,
gpointer iface_data);
static void gst_soup_http_src_finalize (GObject * gobject);
src->reduce_blocksize_count = 0;
src->increase_blocksize_count = 0;
+ src->last_socket_read_time = 0;
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ if (src->dash_oldest_segment) {
+ g_free (src->dash_oldest_segment);
+ src->dash_oldest_segment = NULL;
+ }
+ if (src->dash_newest_segment) {
+ g_free (src->dash_newest_segment);
+ src->dash_newest_segment = NULL;
+ }
+ src->dlna_opt = 0;
+#endif
+
g_cancellable_reset (src->cancellable);
+ g_mutex_lock (&src->mutex);
if (src->input_stream) {
g_object_unref (src->input_stream);
src->input_stream = NULL;
}
if (src->session) {
- GST_DEBUG_OBJECT (src, "Removing Cookie Jar instance");
- soup_session_remove_feature_by_type(src->session, SOUP_TYPE_COOKIE_JAR);
- src->cookie_jar = NULL;
+ if (!src->session_is_shared)
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+/* When Playback is ongoing and Browser is moved to background ( Pressing Menu or Home Key ), The Session gets destroyed.
+ But the cookie_jar property remains unfreed. This results in garbage pointer and causes crash.
+ Removing the cookie_jar feature during close session of browser to handle the issue. */
- soup_session_abort (src->session);
++ {
++ GST_DEBUG_OBJECT (src, "Removing Cookie Jar instance");
++ soup_session_remove_feature_by_type(src->session, SOUP_TYPE_COOKIE_JAR);
++ src->cookie_jar = NULL;
++ soup_session_abort (src->session);
++ }
++#else
+ soup_session_abort (src->session);
+#endif
+ g_signal_handlers_disconnect_by_func (src->session,
+ G_CALLBACK (gst_soup_http_src_authenticate_cb), src);
g_object_unref (src->session);
src->session = NULL;
}
}
}
- if (g_ascii_strcasecmp (name, "Set-Cookie") == 0)
- {
- if (val)
- {
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+static void
+gst_soup_http_src_headers_foreach (const gchar * name, const gchar * val,
+ gpointer src)
+{
+ GST_INFO_OBJECT (src, " %s: %s", name, val);
+
- }
- else if (g_ascii_strcasecmp (name, "Dash-Oldest-Segment") == 0)
- {
- if (val)
- {
++ if (g_ascii_strcasecmp (name, "Set-Cookie") == 0) {
++ if (val) {
+ gboolean bret = FALSE;
+ GstStructure *s = NULL;
+ GstSoupHTTPSrc * tmp = src;
+ SoupURI *uri;
+
+ uri = soup_uri_new (tmp->location);
+
+ /* post current bandwith & uri to application */
+ s = gst_structure_new ("cookies",
+ "updated-cookie", G_TYPE_STRING, val,
+ "updated-url", G_TYPE_STRING, tmp->location, NULL);
+ bret = gst_element_post_message (GST_ELEMENT_CAST (src), gst_message_new_element (GST_OBJECT_CAST (src), s));
+ soup_cookie_jar_set_cookie (tmp->cookie_jar, uri, val);
+ soup_uri_free (uri);
+
+ GST_INFO_OBJECT (src, "request url [%s], posted cookies [%s] msg and returned = %d", tmp->location, val, bret);
+ }
- }
- else if (g_ascii_strcasecmp (name, "Dash-Newest-Segment") == 0)
- {
- if (val)
- {
++ } else if (g_ascii_strcasecmp (name, "Dash-Oldest-Segment") == 0) {
++ if (val) {
+ GstSoupHTTPSrc * tmp = src;
+ tmp->dash_oldest_segment = g_strdup (val);
+ GST_INFO_OBJECT (src, "Dash-Oldest-Segment set as %s ", tmp->dash_oldest_segment);
+ }
++ } else if (g_ascii_strcasecmp (name, "Dash-Newest-Segment") == 0) {
++ if (val) {
+ GstSoupHTTPSrc * tmp = src;
+ tmp->dash_newest_segment = g_strdup (val);
+ GST_INFO_OBJECT (src, "Dash-Newest-Segment set as %s ", tmp->dash_newest_segment);
+ }
+ }
+}
+#endif
+
static GstFlowReturn
gst_soup_http_src_got_headers (GstSoupHTTPSrc * src, SoupMessage * msg)
{
*cookie);
}
}
+#endif
+ if (!src->compress)
+ soup_message_disable_feature (src->msg, SOUP_TYPE_CONTENT_DECODER);
+
soup_message_set_flags (src->msg, SOUP_MESSAGE_OVERWRITE_CHUNKS |
(src->automatic_redirect ? 0 : SOUP_MESSAGE_NO_REDIRECT));
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_SCHEDULING:
-
gst_query_parse_scheduling (query, &flags, &minsize, &maxsize, &align);
flags |= GST_SCHEDULING_FLAG_BANDWIDTH_LIMITED;
+
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ if (gst_soup_http_src_is_seekable(bsrc)) {
+ GST_DEBUG_OBJECT (src, "set seekable flag");
+ flags |= GST_SCHEDULING_FLAG_SEEKABLE;
+ }
+#endif
gst_query_set_scheduling (query, flags, minsize, maxsize, align);
-
break;
default:
break;
GCond have_headers_cond;
GstEvent *http_headers_event;
- #endif
+
+ gint64 last_socket_read_time;
++
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ gchar *dash_oldest_segment;
+ gchar *dash_newest_segment;
+ guint64 received_total; /* temp: for debugging */
+ guint dlna_opt; /* DLNA server option */
++#endif
};
struct _GstSoupHTTPSrcClass {
aacparse->last_parsed_channels = 0;
aacparse->last_parsed_sample_rate = 0;
}
-
+#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION
+ GST_DEBUG ("Entering gst_aac_parse_src_event header type = %d",
+ aacparse->header_type);
+ if (aacparse->header_type == DSPAAC_HEADER_ADTS)
+ return gst_aac_parse_adts_src_eventfunc (parse, event);
+#endif
return GST_BASE_PARSE_CLASS (parent_class)->src_event (parse, event);
+
+}
+
+#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION
+/**
+ * get_aac_parse_get_adts_framelength:
+ * @data: #GstBufferData.
+ * @offset: #GstBufferData offset
+ *
+ * Implementation to get adts framelength by using first some frame.
+ *
+ * Returns: frame size
+ */
+int
+get_aac_parse_get_adts_frame_length (const unsigned char *data, gint64 offset)
+{
+ const gint adts_header_length_no_crc = 7;
+ const gint adts_header_length_with_crc = 9;
+ gint frame_size = 0;
+ gint protection_absent;
+ gint head_size;
+
+ /* check of syncword */
+ if ((data[offset + 0] != 0xff) || ((data[offset + 1] & 0xf6) != 0xf0)) {
+ GST_ERROR ("check sync word is fail\n");
+ return -1;
+ }
+
+ /* check of protection absent */
+ protection_absent = (data[offset + 1] & 0x01);
+
+ /*check of frame length */
+ frame_size =
+ (data[offset + 3] & 0x3) << 11 | data[offset + 4] << 3 | data[offset +
+ 5] >> 5;
+
+ /* check of header size */
+ /* protectionAbsent is 0 if there is CRC */
+ head_size =
+ protection_absent ? adts_header_length_no_crc :
+ adts_header_length_with_crc;
+ if (head_size > frame_size) {
+ GST_ERROR ("return frame length as 0 (frameSize %u < headSize %u)",
+ frame_size, head_size);
+ return 0;
+ }
+
+ return frame_size;
+}
+
+/**
+ * gst_aac_parse_estimate_duration:
+ * @parse: #GstBaseParse.
+ *
+ * Implementation to get estimated total duration by using first some frame.
+ *
+ * Returns: TRUE if we can get estimated total duraion
+ */
+static gboolean
+gst_aac_parse_estimate_duration (GstBaseParse * parse)
+{
+ gboolean ret = FALSE;
+ GstFlowReturn res = GST_FLOW_OK;
+ gint64 pull_size = 0, file_size = 0, offset = 0, num_frames = 0, duration = 0;
+ guint sample_rate_index = 0, sample_rate = 0, channel = 0;
+ guint frame_size = 0, frame_duration_us = 0, estimated_bitrate = 0;
+ guint lost_sync_count = 0;
+ GstClockTime estimated_duration = GST_CLOCK_TIME_NONE;
+ GstBuffer *buffer = NULL;
+ guint8 *buf = NULL;
+ gint i = 0;
+ GstPadMode pad_mode = GST_PAD_MODE_NONE;
+ GstAacParse *aacparse;
+ gint64 buffer_size = 0;
+ GstMapInfo map;
+
+ aacparse = GST_AAC_PARSE (parse);
+ GST_LOG_OBJECT (aacparse, "gst_aac_parse_estimate_duration enter");
+
+ /* check baseparse define these fuction */
+ gst_base_parse_get_pad_mode (parse, &pad_mode);
+ if (pad_mode != GST_PAD_MODE_PULL) {
+ GST_INFO_OBJECT (aacparse,
+ "aac parser is not pull mode. can not estimate duration");
+ return FALSE;
+ }
+
+ gst_base_parse_get_upstream_size (parse, &file_size);
+
+ if (file_size < ADIF_MAX_SIZE) {
+ GST_ERROR_OBJECT (aacparse, "file size is too short");
+ return FALSE;
+ }
+
+ pull_size = MIN (file_size, AAC_MAX_ESTIMATE_DURATION_BUF);
+
+ res = gst_pad_pull_range (parse->sinkpad, 0, pull_size, &buffer);
+ if (res != GST_FLOW_OK) {
+ GST_ERROR_OBJECT (aacparse, "gst_pad_pull_range failed!");
+ return FALSE;
+ }
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ buf = map.data;
+ buffer_size = map.size;
+ if (buffer_size != pull_size) {
+ GST_ERROR_OBJECT (aacparse,
+ "We got different buffer_size(%" G_GINT64_FORMAT ") with pull_size(%"
+ G_GINT64_FORMAT ").", buffer_size, pull_size);
+ }
+
+ /* MODIFICATION : add defence codes for real buffer_size is different with pull_size */
+ for (i = 0; i < buffer_size; i++) {
+ if ((buf[i] == 0xff) && ((buf[i + 1] & 0xf6) == 0xf0)) { /* aac sync word */
+ //guint profile = (buf[i+2] >> 6) & 0x3;
+ sample_rate_index = (buf[i + 2] >> 2) & 0xf;
+ sample_rate =
+ gst_aac_parse_get_sample_rate_from_index (sample_rate_index);
+ if (sample_rate == 0) {
+ GST_WARNING_OBJECT (aacparse, "Invalid sample rate index (0)");
+ goto EXIT;
+ }
+ channel = (buf[i + 2] & 0x1) << 2 | (buf[i + 3] >> 6);
+
+ GST_INFO_OBJECT (aacparse, "found sync. aac fs=%d, ch=%d", sample_rate,
+ channel);
+
+ /* count number of frames */
+ /* MODIFICATION : add defence codes for real buffer_size is different with pull_size */
+ //while (offset < pull_size) {
+ while (offset < buffer_size) {
+ frame_size = get_aac_parse_get_adts_frame_length (buf, i + offset);
+ if (frame_size == 0) {
+ GST_ERROR_OBJECT (aacparse,
+ "framesize error at offset %" G_GINT64_FORMAT, offset);
+ break;
+ } else if (frame_size == -1) {
+ offset++;
+ lost_sync_count++; // lost sync count limmitation 2K Bytes
+ if (lost_sync_count > (1024 * 2)) {
+ GST_WARNING_OBJECT (aacparse,
+ "lost_sync_count is larger than 2048");
+ goto EXIT;
+ }
+ } else {
+ offset += frame_size;
+ num_frames++;
+ lost_sync_count = 0;
+ }
+ } /* while */
+
+ /* if we can got full file, we can calculate the accurate duration */
+ /* MODIFICATION : add defence codes for real buffer_size is different with pull_size */
+ //if (pull_size == file_size) {
+ if (buffer_size == file_size) {
+ gfloat duration_for_one_frame = 0;
+ GstClockTime calculated_duration = GST_CLOCK_TIME_NONE;
+
+ GST_INFO_OBJECT (aacparse,
+ "we got total file (%" G_GINT64_FORMAT
+ " bytes). do not estimate but make Accurate total duration.",
+ pull_size);
+
+ duration_for_one_frame =
+ (gfloat) AAC_SAMPLE_PER_FRAME / (gfloat) sample_rate;
+ calculated_duration =
+ num_frames * duration_for_one_frame * 1000 * 1000 * 1000;
+
+ GST_INFO_OBJECT (aacparse, "duration_for_one_frame %f ms",
+ duration_for_one_frame);
+ GST_INFO_OBJECT (aacparse, "calculated duration = %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (calculated_duration));
+ /* 0 means disable estimate */
+ gst_base_parse_set_duration (parse, GST_FORMAT_TIME,
+ calculated_duration, 0);
+
+ } else {
+ GST_INFO_OBJECT (aacparse,
+ "we got %" G_GUINT64_FORMAT " bytes in total file (%"
+ G_GINT64_FORMAT "). can not make accurate duration but Estimate.",
+ pull_size, file_size);
+ frame_duration_us =
+ (1024 * 1000000ll + (sample_rate - 1)) / sample_rate;
+ duration = num_frames * frame_duration_us;
+
+ if (duration == 0) {
+ GST_WARNING_OBJECT (aacparse, "Invalid duration");
+ goto EXIT;
+ }
+ estimated_bitrate =
+ (gint) ((gfloat) (offset * 8) / (gfloat) (duration / 1000));
+
+ if (estimated_bitrate == 0) {
+ GST_WARNING_OBJECT (aacparse, "Invalid estimated_bitrate");
+ goto EXIT;
+ }
+ estimated_duration =
+ (GstClockTime) ((file_size * 8) / (estimated_bitrate * 1000)) *
+ GST_SECOND;
+
+ GST_INFO_OBJECT (aacparse, "number of frame = %" G_GINT64_FORMAT,
+ num_frames);
+ GST_INFO_OBJECT (aacparse, "duration = %" G_GINT64_FORMAT,
+ duration / 1000000);
+ GST_INFO_OBJECT (aacparse, "byte = %" G_GINT64_FORMAT, offset);
+ GST_INFO_OBJECT (aacparse, "estimated bitrate = %d bps",
+ estimated_bitrate);
+ GST_INFO_OBJECT (aacparse, "estimated duration = %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (estimated_duration));
+
+ gst_base_parse_set_average_bitrate (parse, estimated_bitrate * 1000);
+ /* set update_interval as duration(sec)/2 */
+ gst_base_parse_set_duration (parse, GST_FORMAT_TIME, estimated_duration,
+ (gint) (duration / 2));
+ }
+
+ break;
+ }
+ }
+ ret = TRUE;
+
+EXIT:
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+ return ret;
+}
+
+
+/* perform seek in push based mode:
+ find BYTE position to move to based on time and delegate to upstream
+*/
+static gboolean
+gst_aac_audio_parse_do_push_seek (GstBaseParse * parse,
+ GstPad * pad, GstEvent * event)
+{
+ GstAacParse *aacparse = GST_AAC_PARSE (parse);
+ gdouble rate;
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType cur_type, stop_type;
+ gint64 cur, stop;
+ gboolean res;
+ gint64 byte_cur;
+ gint64 esimate_byte;
+ gint32 frame_dur;
+ gint64 upstream_total_bytes = 0;
+ GstFormat fmt = GST_FORMAT_BYTES;
+
+ GST_INFO_OBJECT (parse, "doing aac push-based seek");
+
+ gst_event_parse_seek (event, &rate, &format, &flags, &cur_type, &cur,
+ &stop_type, &stop);
+
+ /* FIXME, always play to the end */
+ stop = -1;
+
+ /* only forward streaming and seeking is possible */
+ if (rate <= 0)
+ goto unsupported_seek;
+
+ if (cur == 0) {
+ /* handle rewind only */
+ cur_type = GST_SEEK_TYPE_SET;
+ byte_cur = 0;
+ stop_type = GST_SEEK_TYPE_NONE;
+ stop = -1;
+ flags |= GST_SEEK_FLAG_FLUSH;
+ } else {
+ /* handle normal seek */
+ cur_type = GST_SEEK_TYPE_SET;
+ stop_type = GST_SEEK_TYPE_NONE;
+ stop = -1;
+ flags |= GST_SEEK_FLAG_FLUSH;
+
+ esimate_byte = (cur / (1000 * 1000)) * aacparse->frame_byte;
+ if (aacparse->sample_rate > 0)
+ frame_dur = (aacparse->spf * 1000) / aacparse->sample_rate;
+ else
+ goto unsupported_seek;
+ if (frame_dur > 0)
+ byte_cur = esimate_byte / (frame_dur);
+ else
+ goto unsupported_seek;
+
+ GST_INFO_OBJECT (parse, "frame_byte(%d) spf(%d) rate (%d) ",
+ aacparse->frame_byte, aacparse->spf, aacparse->sample_rate);
+ GST_INFO_OBJECT (parse,
+ "seek cur (%" G_GINT64_FORMAT ") = (%" GST_TIME_FORMAT ") ", cur,
+ GST_TIME_ARGS (cur));
+ GST_INFO_OBJECT (parse,
+ "esimate_byte(%" G_GINT64_FORMAT ") esimate_byte (%d)", esimate_byte,
+ frame_dur);
+ }
+
+ /* obtain real upstream total bytes */
+ if (!gst_pad_peer_query_duration (parse->sinkpad, fmt, &upstream_total_bytes))
+ upstream_total_bytes = 0;
+ GST_INFO_OBJECT (aacparse,
+ "gst_pad_query_peer_duration -upstream_total_bytes (%" G_GUINT64_FORMAT
+ ")", upstream_total_bytes);
+ aacparse->file_size = upstream_total_bytes;
+
+ if ((byte_cur == -1) || (byte_cur > aacparse->file_size)) {
+ GST_INFO_OBJECT (parse,
+ "[WEB-ERROR] seek cur (%" G_GINT64_FORMAT ") > file_size (%"
+ G_GINT64_FORMAT ") ", cur, aacparse->file_size);
+ goto abort_seek;
+ }
+
+ GST_INFO_OBJECT (parse,
+ "Pushing BYTE seek rate %g, " "start %" G_GINT64_FORMAT ", stop %"
+ G_GINT64_FORMAT, rate, byte_cur, stop);
+
+ if (!(flags & GST_SEEK_FLAG_KEY_UNIT)) {
+ GST_INFO_OBJECT (parse,
+ "Requested seek time: %" GST_TIME_FORMAT ", calculated seek offset: %"
+ G_GUINT64_FORMAT, GST_TIME_ARGS (cur), byte_cur);
+ }
+
+ /* BYTE seek event */
+ event =
+ gst_event_new_seek (rate, GST_FORMAT_BYTES, flags, cur_type, byte_cur,
+ stop_type, stop);
+ res = gst_pad_push_event (parse->sinkpad, event);
+
+ return res;
+
+ /* ERRORS */
+
+abort_seek:
+ {
+ GST_DEBUG_OBJECT (parse,
+ "could not determine byte position to seek to, " "seek aborted.");
+ return FALSE;
+ }
+
+unsupported_seek:
+ {
+ GST_DEBUG_OBJECT (parse, "unsupported seek, seek aborted.");
+ return FALSE;
+ }
+}
+
+
+static guint
+gst_aac_parse_adts_get_fast_frame_len (const guint8 * data)
+{
+ int length;
+ if ((data[0] == 0xff) && ((data[1] & 0xf6) == 0xf0)) {
+ length =
+ ((data[3] & 0x03) << 11) | (data[4] << 3) | ((data[5] & 0xe0) >> 5);
+ } else {
+ length = 0;
+ }
+ return length;
+}
+
+/**
+ * gst_aac_parse_adts_src_eventfunc:
+ * @parse: #GstBaseParse. #event
+ *
+ * before baseparse handles seek event, make full amr index table.
+ *
+ * Returns: TRUE on success.
+ */
+static gboolean
+gst_aac_parse_adts_src_eventfunc (GstBaseParse * parse, GstEvent * event)
+{
+ gboolean handled = FALSE;
+ GstAacParse *aacparse = GST_AAC_PARSE (parse);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ {
+ GstFlowReturn res = GST_FLOW_OK;
+ gint64 base_offset = 0, cur = 0;
+ gint32 frame_count = 1; /* do not add first frame because it is already in index table */
+ gint64 second_count = 0; /* initial 1 second */
+ gint64 total_file_size = 0, start_offset = 0;
+ GstClockTime current_ts = GST_CLOCK_TIME_NONE;
+ GstPadMode pad_mode = GST_PAD_MODE_NONE;
+
+ /* check baseparse define these fuction */
+ gst_base_parse_get_pad_mode (parse, &pad_mode);
+ if (pad_mode != GST_PAD_MODE_PULL) {
+ gboolean ret = FALSE;
+ GstPad *srcpad = parse->srcpad;
+ GST_INFO_OBJECT (aacparse, "aac parser is PUSH MODE.");
+ /* check NULL */
+ ret = gst_aac_audio_parse_do_push_seek (parse, srcpad, event);
+ gst_object_unref (srcpad);
+ return ret;
+ }
+ gst_base_parse_get_upstream_size (parse, &total_file_size);
+ gst_base_parse_get_index_last_offset (parse, &start_offset);
+ gst_base_parse_get_index_last_ts (parse, ¤t_ts);
+
+ if (total_file_size > AAC_LARGE_FILE_SIZE) {
+ gst_base_parse_set_seek_mode (parse, 0);
+ GST_INFO_OBJECT (aacparse, "larger than big size (2MB).");
+ goto aac_seek_null_exit;
+ }
+
+ GST_DEBUG ("gst_aac_parse_adts_src_eventfunc GST_EVENT_SEEK enter");
+
+ if (total_file_size == 0 || start_offset >= total_file_size) {
+ GST_ERROR ("last index offset %" G_GINT64_FORMAT
+ " is larger than file size %" G_GINT64_FORMAT, start_offset,
+ total_file_size);
+ break;
+ }
+
+ gst_event_parse_seek (event, NULL, NULL, NULL, NULL, &cur, NULL, NULL);
+ if (cur <= current_ts) {
+ GST_INFO ("seek to %" GST_TIME_FORMAT " within index table %"
+ GST_TIME_FORMAT ". do not make index table", GST_TIME_ARGS (cur),
+ GST_TIME_ARGS (current_ts));
+ break;
+ } else {
+ GST_INFO ("seek to %" GST_TIME_FORMAT " without index table %"
+ GST_TIME_FORMAT ". make index table", GST_TIME_ARGS (cur),
+ GST_TIME_ARGS (current_ts));
+ }
+
+ GST_INFO ("make AAC(ADTS) Index Table. file_size = %" G_GINT64_FORMAT
+ " last idx offset=%" G_GINT64_FORMAT ", last idx ts=%"
+ GST_TIME_FORMAT, total_file_size, start_offset,
+ GST_TIME_ARGS (current_ts));
+
+ base_offset = start_offset; /* set base by start offset */
+ second_count = current_ts + GST_SECOND; /* 1sec */
+
+ /************************************/
+ /* STEP 0: Setting parse information */
+ /************************************/
+ aacparse->spf = aacparse->frame_samples;
+ aacparse->frame_duration = (aacparse->spf * 1000 * 100) / aacparse->sample_rate; /* duration per frame (msec) */
+ aacparse->frame_per_sec = (aacparse->sample_rate) / aacparse->spf; /* frames per second (ea) */
+
+ /************************************/
+ /* STEP 1: MAX_PULL_RANGE_BUF cycle */
+ /************************************/
+ while (total_file_size - base_offset >= AAC_MAX_PULL_RANGE_BUF) {
+ gint64 offset = 0;
+ GstBuffer *buffer = NULL;
+ guint8 *buf = NULL;
+ GstMapInfo map;
+ GST_INFO ("gst_pad_pull_range %d bytes (from %" G_GINT64_FORMAT
+ ") use max size", AAC_MAX_PULL_RANGE_BUF, base_offset);
+ res =
+ gst_pad_pull_range (parse->sinkpad, base_offset,
+ base_offset + AAC_MAX_PULL_RANGE_BUF, &buffer);
+ if (res != GST_FLOW_OK) {
+ GST_ERROR ("gst_pad_pull_range failed!");
+ break;
+ }
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ buf = map.data;
+ if (buf == NULL) {
+ gst_buffer_unmap (buffer, &map);
+ GST_WARNING ("buffer is NULL in make aac seek table's STEP1");
+ gst_buffer_unref (buffer);
+ goto aac_seek_null_exit;
+ }
+
+ while (offset <= AAC_MAX_PULL_RANGE_BUF) {
+ gint frame_size = 0;
+
+ /* make sure the values in the frame header look sane */
+ frame_size = gst_aac_parse_adts_get_fast_frame_len (buf);
+
+ if ((frame_size > 0)
+ && (frame_size < (AAC_MAX_PULL_RANGE_BUF - offset))) {
+ if (current_ts > second_count) { /* 1 sec == xx frames. we make idx per sec */
+ gst_base_parse_add_index_entry (parse, base_offset + offset, current_ts, TRUE, TRUE); /* force */
+ GST_DEBUG ("Adding index ts=%" GST_TIME_FORMAT " offset %"
+ G_GINT64_FORMAT, GST_TIME_ARGS (current_ts),
+ base_offset + offset);
+ second_count += GST_SECOND; /* 1sec */
+ }
+
+ current_ts += (aacparse->frame_duration * GST_MSECOND) / 100; /* each frame is (frame_duration) ms */
+ offset += frame_size;
+ buf += frame_size;
+ frame_count++;
+ } else if (frame_size >= (AAC_MAX_PULL_RANGE_BUF - offset)) {
+ GST_DEBUG ("we need refill buffer");
+ break;
+ } else {
+ GST_WARNING ("we lost sync");
+ buf++;
+ offset++;
+ }
+ } /* while */
+
+ base_offset = base_offset + offset;
+
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+ } /* end MAX buffer cycle */
+
+ /*******************************/
+ /* STEP 2: Remain Buffer cycle */
+ /*******************************/
+ if (total_file_size - base_offset > 0) {
+ gint64 offset = 0;
+ GstBuffer *buffer = NULL;
+ guint8 *buf = NULL;
+ GstMapInfo map;
+
+ GST_INFO ("gst_pad_pull_range %" G_GINT64_FORMAT " bytes (from %"
+ G_GINT64_FORMAT ") use remain_buf size",
+ total_file_size - base_offset, base_offset);
+ res =
+ gst_pad_pull_range (parse->sinkpad, base_offset, total_file_size,
+ &buffer);
+ if (res != GST_FLOW_OK) {
+ GST_ERROR ("gst_pad_pull_range failed!");
+ break;
+ }
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ buf = map.data;
+ if (buf == NULL) {
+ gst_buffer_unmap (buffer, &map);
+ GST_WARNING ("buffer is NULL in make aac seek table's STEP2");
+ gst_buffer_unref (buffer);
+ goto aac_seek_null_exit;
+ }
+
+ while (base_offset + offset < total_file_size) {
+ gint frame_size = 0;
+
+ /* make sure the values in the frame header look sane */
+ frame_size = gst_aac_parse_adts_get_fast_frame_len (buf);
+
+ if ((frame_size > 0)
+ && (frame_size <= (total_file_size - (base_offset + offset)))) {
+ if (current_ts > second_count) { /* 1 sec == xx frames. we make idx per sec */
+ gst_base_parse_add_index_entry (parse, base_offset + offset, current_ts, TRUE, TRUE); /* force */
+ GST_DEBUG ("Adding index ts=%" GST_TIME_FORMAT " offset %"
+ G_GINT64_FORMAT, GST_TIME_ARGS (current_ts),
+ base_offset + offset);
+ second_count += GST_SECOND; /* 1sec */
+ }
+
+ current_ts += (aacparse->frame_duration * GST_MSECOND) / 100; /* each frame is (frame_duration) ms */
+ offset += frame_size;
+ buf += frame_size;
+ frame_count++;
+ } else if (frame_size == 0) {
+ GST_DEBUG ("Frame size is 0 so, Decoding end..");
+ break;
+ } else {
+ GST_WARNING ("we lost sync");
+ buf++;
+ offset++;
+ }
+ } /* while */
+
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+ }
+ /* end remain_buf buffer cycle */
+ GST_DEBUG ("gst_aac_parse_adts_src_eventfunc GST_EVENT_SEEK leave");
+ }
+ break;
+
+ default:
+ break;
+ }
+
+aac_seek_null_exit:
+
+ /* call baseparse src_event function to handle event */
+ handled = GST_BASE_PARSE_CLASS (parent_class)->src_event (parse, event);
+ return handled;
}
- #endif //end of #ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION
++#endif /* TIZEN_FEATURE_AACPARSE_MODIFICATION */
parse_class->get_sink_caps =
GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_get_sink_caps);
-
-
-
+#ifdef TIZEN_FEATURE_MP3PARSE_MODIFICATION
+ object_class->set_property =
+ GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_set_property);
+ object_class->get_property =
+ GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_get_property);
+
+ g_object_class_install_property (object_class, PROP_CHECK_HTTP_SEEK,
+ g_param_spec_boolean ("http-pull-mp3dec", "enable/disable",
+ "enable/disable mp3dec http seek pull mode",
+ DEFAULT_CHECK_HTTP_SEEK, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /* T.B.D : make full mp3 index table when seek */
+ parse_class->src_event = gst_mpeg_audio_parse_src_eventfunc;
+#endif
+
/* register tags */
#define GST_TAG_CRC "has-crc"
#define GST_TAG_MODE "channel-mode"
PROP_DO_CTTS,
PROP_INTERLEAVE_BYTES,
PROP_INTERLEAVE_TIME,
- #endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
+ PROP_MAX_RAW_AUDIO_DRIFT,
+ PROP_START_GAP_THRESHOLD,
+#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
+ PROP_EXPECTED_TRAILER_SIZE,
++#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
};
/* some spare for header size as well */
static GstFlowReturn
gst_qt_mux_robust_recording_rewrite_moov (GstQTMux * qtmux);
+ static void gst_qt_mux_update_global_statistics (GstQTMux * qtmux);
+ static void gst_qt_mux_update_edit_lists (GstQTMux * qtmux);
+
static GstElementClass *parent_class = NULL;
+#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
+/*
+ [[ Metadata Size ]]
+ 1. Common
+ free = 8
+ moov = 8
+ mvhd = 108
+ -------------
+ total : 124
+
+ 2. Video
+ i. Video common
+ trak = 8
+ tkhd = 92
+ mdia = 8
+ mdhd = 32
+ hdlr = 45
+ minf = 8
+ vmhd = 20
+ dinf = 36 (8, dref : 16 , url : 12)
+ stbl = 8
+ ---------------
+ total : 257
+
+ ii. Variation in file format
+ - MP4
+ ftyp = 32
+ udta = 61
+ - 3GP
+ ftyp = 28
+ udta = 8
+
+ iii. Variation in codec
+ - MPEG4
+ stsd = 137(16, mp4v : 86, esds : 35)
+
+ - H.264 = 487(or 489) + (8*stts_count) + (8*frame) + (4*I-frame)
+ stsd = 134 (SPS 9, PPS 4) or 136 (SPS 111, PPS 4)
+
+ - H.263 = 470 + + (8*stts_count) + (8*frame) + (4*I-frame)
+ stsd = 102 -> different from H.264
+
+ iv. Variation in frame
+ stts = 16 + (8*stts_count)
+ stss = 16 + (4*I-frame)
+ stsc = 28
+ stsz = 20 + (4*frame)
+ stco = 16 + (4*frame)
+
+ 3. Audio
+ i. Audio common
+ trak = 8
+ tkhd = 92
+ mdia = 8
+ mdhd = 32
+ hdlr = 45
+ minf = 8
+ smhd = 16
+ dinf = 36 (8, dref : 16, url : 12)
+ stbl = 8
+ ---------------
+ total : 253
+
+ stts = 16
+ stsz = 20
+ stco = 16
+ ------------
+ total : 52
+
+ ii. Variation in file format
+ - MP4
+ udta = 61
+ - 3GP
+ udta = 8
+
+ iii. Variation in codec
+ - Common
+ stts = 16 + (8*stts_count)
+ stsc = 28
+ stsz = 20 + (4*frame)
+ stco = 16 + (4*frame)
+
+ - AAC
+ stsd = 94 (16, mp4a : 78(36 ,esds : 42))
+
+ - AMR
+ stsd = 69 (16, samr : 53(36, damr : 17))
+*/
+
+/* trailer entry size */
+#define ENTRY_SIZE_VIDEO_STTS 8
+#define ENTRY_SIZE_VIDEO_STSS 4
+#define ENTRY_SIZE_VIDEO_STSZ 4
+#define ENTRY_SIZE_VIDEO_STCO 4
+#define ENTRY_SIZE_AUDIO_STTS 8
+#define ENTRY_SIZE_AUDIO_STSZ 4
+#define ENTRY_SIZE_AUDIO_STCO 4
+
+#define ENTRY_SIZE_VIDEO_MPEG4_STSD 137
+#define ENTRY_SIZE_VIDEO_H263P_STSD 102
+#define ENTRY_SIZE_AUDIO_AAC_STSD 94
+#define ENTRY_SIZE_AUDIO_AMR_STSD 69
+
+#define ENTRY_SIZE_STSC 28
+#define ENTRY_SIZE_VIDEO_ST 68 /*atom size (stss + stts + stsc + stsz + stco ) * (size + atom + version + flags + sample count)+stsz(sample size) */
+#define ENTRY_SIZE_AUDIO_ST 52 /*atom size (stss + stsc + stsz + stco ) * (size + atom + version + flags + sample count)+stsz(sample size) */
+
+/* common */
+#define MUX_COMMON_SIZE_HEADER 124 /* free + moov + moov.mvhd*/
+
+#define MUX_COMMON_SIZE_VIDEO_HEADER 257
+#define MUX_COMMON_SIZE_AUDIO_HEADER 253
+
+#define MUX_COMMON_SIZE_MP4_FTYP 32
+#define MUX_COMMON_SIZE_3GP_FTYP 28
+
+#define MUX_COMMON_SIZE_MP4_UDTA 61
+#define MUX_COMMON_SIZE_3GP_UDTA 8
+
+static void
+gst_qt_mux_update_expected_trailer_size (GstQTMux *qtmux, GstQTPad *pad)
+{
+ guint nb_video_frames = 0;
+ guint nb_video_i_frames = 0;
+ guint nb_video_stts_entry = 0;
+ guint nb_audio_frames = 0;
+ guint nb_audio_stts_entry = 0;
+ gboolean video_stream = FALSE;
+ gboolean audio_stream = FALSE;
+ guint exp_size = 0;
+ GstQTMuxClass *qtmux_klass = NULL;
+
+ if (qtmux == NULL || pad == NULL) {
+ GST_ERROR_OBJECT (qtmux, "Invalid parameter");
+ return;
+ }
+
+ qtmux_klass = (GstQTMuxClass *)(G_OBJECT_GET_CLASS(qtmux));
+
+ if (!strncmp(GST_PAD_NAME(pad->collect.pad), "video", 5)) {
+ nb_video_frames += pad->trak->mdia.minf.stbl.stsz.table_size;
+ nb_video_i_frames += pad->trak->mdia.minf.stbl.stss.entries.len;
+ nb_video_stts_entry += pad->trak->mdia.minf.stbl.stts.entries.len;
+
+ video_stream = TRUE;
+ } else if (!strncmp(GST_PAD_NAME(pad->collect.pad), "audio", 5)) {
+ nb_audio_frames += pad->trak->mdia.minf.stbl.stsz.table_size;
+ nb_audio_stts_entry += pad->trak->mdia.minf.stbl.stts.entries.len;
+
+ audio_stream = TRUE;
+ }
+
+ /* free + moov + mvhd */
+ qtmux->expected_trailer_size = MUX_COMMON_SIZE_HEADER;
+
+ /* ftyp + udta * 3 (There is 3 udta fields and it's same size) */
+ switch (qtmux_klass->format) {
+ case GST_QT_MUX_FORMAT_MP4:
+ qtmux->expected_trailer_size += MUX_COMMON_SIZE_MP4_FTYP + MUX_COMMON_SIZE_MP4_UDTA * 3;
+ break;
+ case GST_QT_MUX_FORMAT_3GP:
+ qtmux->expected_trailer_size += MUX_COMMON_SIZE_3GP_FTYP + MUX_COMMON_SIZE_3GP_UDTA * 3;
+ break;
+ default:
+ break;
+ }
+
+ /* Calculate trailer size for video stream */
+ if (video_stream) {
+ switch (pad->fourcc) {
+ case FOURCC_h263:
+ case FOURCC_s263:
+ exp_size += MUX_COMMON_SIZE_VIDEO_HEADER + ENTRY_SIZE_VIDEO_H263P_STSD;
+ break;
+ case FOURCC_mp4v:
+ case FOURCC_MP4V:
+ case FOURCC_fmp4:
+ case FOURCC_FMP4:
+ case FOURCC_3gp4:
+ case FOURCC_3gp6:
+ case FOURCC_3gg6:
+ exp_size += MUX_COMMON_SIZE_VIDEO_HEADER + ENTRY_SIZE_VIDEO_MPEG4_STSD;
+ break;
+ default:
+ break;
+ }
+
+ /* frame related */
+ exp_size += ENTRY_SIZE_VIDEO_ST + (ENTRY_SIZE_VIDEO_STTS * nb_video_stts_entry) +
+ (ENTRY_SIZE_VIDEO_STSS * nb_video_i_frames) + (ENTRY_SIZE_STSC) +
+ ((ENTRY_SIZE_VIDEO_STSZ + ENTRY_SIZE_VIDEO_STCO) * nb_video_frames);
+
+ qtmux->video_expected_trailer_size = exp_size;
+ }
+
+ /* Calculate trailer size for audio stream */
+ if (audio_stream) {
+ exp_size += MUX_COMMON_SIZE_AUDIO_HEADER + ENTRY_SIZE_AUDIO_ST + (ENTRY_SIZE_AUDIO_STTS * nb_audio_stts_entry) +
+ (ENTRY_SIZE_STSC) + ((ENTRY_SIZE_AUDIO_STSZ + ENTRY_SIZE_AUDIO_STCO) * nb_audio_frames);
+
+ if (pad->fourcc == FOURCC_samr)
+ exp_size += ENTRY_SIZE_AUDIO_AMR_STSD;
+ else
+ exp_size += ENTRY_SIZE_AUDIO_AAC_STSD;
+
+ qtmux->audio_expected_trailer_size = exp_size;
+ }
+
+ qtmux->expected_trailer_size += qtmux->video_expected_trailer_size + qtmux->audio_expected_trailer_size;
+
+ /*
+ GST_INFO_OBJECT (qtmux, "pad type %s", GST_PAD_NAME(pad->collect.pad));
+ GST_INFO_OBJECT (qtmux, "VIDEO : stts-entry=[%d], i-frame=[%d], video-sample=[%d]", nb_video_stts_entry, nb_video_i_frames, nb_video_frames);
+ GST_INFO_OBJECT (qtmux, "AUDIO : stts-entry=[%d], audio-sample=[%d]", nb_audio_stts_entry, nb_audio_frames);
+ GST_INFO_OBJECT (qtmux, "expected trailer size %d", qtmux->expected_trailer_size);
+ */
+
+ return;
+}
+#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
+
static void
gst_qt_mux_base_init (gpointer g_class)
{
"Interleave between streams in nanoseconds",
0, G_MAXUINT64, DEFAULT_INTERLEAVE_TIME,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_MAX_RAW_AUDIO_DRIFT,
+ g_param_spec_uint64 ("max-raw-audio-drift", "Max Raw Audio Drift",
+ "Maximum allowed drift of raw audio samples vs. timestamps in nanoseconds",
+ 0, G_MAXUINT64, DEFAULT_MAX_RAW_AUDIO_DRIFT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_START_GAP_THRESHOLD,
+ g_param_spec_uint64 ("start-gap-threshold", "Start Gap Threshold",
+ "Threshold for creating an edit list for gaps at the start in nanoseconds",
+ 0, G_MAXUINT64, DEFAULT_START_GAP_THRESHOLD,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
+ tspec = g_param_spec_uint("expected-trailer-size", "Expected Trailer Size",
+ "Expected trailer size (bytes)",
+ 0, G_MAXUINT, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS);
+ if (tspec)
+ g_object_class_install_property(gobject_class, PROP_EXPECTED_TRAILER_SIZE, tspec);
+ else
+ GST_ERROR("g_param_spec failed for \"expected-trailer-size\"");
+#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
+
gstelement_class->request_new_pad =
GST_DEBUG_FUNCPTR (gst_qt_mux_request_new_pad);
gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_qt_mux_change_state);
case PROP_INTERLEAVE_TIME:
g_value_set_uint64 (value, qtmux->interleave_time);
break;
- #endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
+ case PROP_MAX_RAW_AUDIO_DRIFT:
+ g_value_set_uint64 (value, qtmux->max_raw_audio_drift);
+ break;
+ case PROP_START_GAP_THRESHOLD:
+ g_value_set_uint64 (value, qtmux->start_gap_threshold);
+ break;
+#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
+ case PROP_EXPECTED_TRAILER_SIZE:
+ g_value_set_uint(value, qtmux->expected_trailer_size);
+ break;
++#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
GstClockTime reserved_moov_update_period;
GstClockTime muxed_since_last_update;
+ gboolean reserved_prefill;
+
+ GstClockTime start_gap_threshold;
+
/* for request pad naming */
- guint video_pads, audio_pads, subtitle_pads;
+ guint video_pads, audio_pads, subtitle_pads, caption_pads;
+
+#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
+ guint expected_trailer_size;
+ guint audio_expected_trailer_size;
+ guint video_expected_trailer_size;
- #endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
++#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
};
struct _GstQTMuxClass
static void gst_qtdemux_append_protection_system_id (GstQTDemux * qtdemux,
const gchar * id);
static void qtdemux_gst_structure_free (GstStructure * gststructure);
+ static void gst_qtdemux_reset (GstQTDemux * qtdemux, gboolean hard);
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+static void gst_tag_register_spherical_tags (void);
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
static void
gst_qtdemux_class_init (GstQTDemuxClass * klass)
{
qtdemux_sink_activate_mode);
gst_pad_set_chain_function (qtdemux->sinkpad, gst_qtdemux_chain);
gst_pad_set_event_function (qtdemux->sinkpad, gst_qtdemux_handle_sink_event);
+ gst_pad_set_query_function (qtdemux->sinkpad, gst_qtdemux_handle_sink_query);
gst_element_add_pad (GST_ELEMENT_CAST (qtdemux), qtdemux->sinkpad);
- qtdemux->state = QTDEMUX_STATE_INITIAL;
- qtdemux->pullbased = FALSE;
- qtdemux->posted_redirect = FALSE;
- qtdemux->neededbytes = 16;
- qtdemux->todrop = 0;
qtdemux->adapter = gst_adapter_new ();
- qtdemux->offset = 0;
- qtdemux->first_mdat = -1;
- qtdemux->got_moov = FALSE;
- qtdemux->mdatoffset = -1;
- qtdemux->mdatbuffer = NULL;
- qtdemux->restoredata_buffer = NULL;
- qtdemux->restoredata_offset = -1;
- qtdemux->fragment_start = -1;
- qtdemux->fragment_start_offset = -1;
- qtdemux->media_caps = NULL;
- qtdemux->exposed = FALSE;
- qtdemux->mss_mode = FALSE;
- qtdemux->pending_newsegment = NULL;
- qtdemux->upstream_format_is_time = FALSE;
- qtdemux->have_group_id = FALSE;
- qtdemux->group_id = G_MAXUINT;
- qtdemux->cenc_aux_info_offset = 0;
- qtdemux->cenc_aux_info_sizes = NULL;
- qtdemux->cenc_aux_sample_count = 0;
- qtdemux->protection_system_ids = NULL;
g_queue_init (&qtdemux->protection_event_queue);
- gst_segment_init (&qtdemux->segment, GST_FORMAT_TIME);
- qtdemux->tag_list = gst_tag_list_new_empty ();
- gst_tag_list_set_scope (qtdemux->tag_list, GST_TAG_SCOPE_GLOBAL);
qtdemux->flowcombiner = gst_flow_combiner_new ();
+ g_mutex_init (&qtdemux->expose_lock);
+
+ qtdemux->active_streams = g_ptr_array_new_with_free_func
+ ((GDestroyNotify) gst_qtdemux_stream_unref);
+ qtdemux->old_streams = g_ptr_array_new_with_free_func
+ ((GDestroyNotify) gst_qtdemux_stream_unref);
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ qtdemux->spherical_metadata = (QtDemuxSphericalMetadata *)
+ malloc (sizeof (QtDemuxSphericalMetadata));
+
+ if (qtdemux->spherical_metadata) {
+ qtdemux->spherical_metadata->is_spherical = FALSE;
+ qtdemux->spherical_metadata->is_stitched = FALSE;
+ qtdemux->spherical_metadata->stitching_software = NULL;
+ qtdemux->spherical_metadata->projection_type = NULL;
+ qtdemux->spherical_metadata->stereo_mode = NULL;
+ qtdemux->spherical_metadata->source_count = 0;
+ qtdemux->spherical_metadata->init_view_heading = 0;
+ qtdemux->spherical_metadata->init_view_pitch = 0;
+ qtdemux->spherical_metadata->init_view_roll = 0;
+ qtdemux->spherical_metadata->timestamp = 0;
+ qtdemux->spherical_metadata->full_pano_width_pixels = 0;
+ qtdemux->spherical_metadata->full_pano_height_pixels = 0;
+ qtdemux->spherical_metadata->cropped_area_image_width = 0;
+ qtdemux->spherical_metadata->cropped_area_image_height = 0;
+ qtdemux->spherical_metadata->cropped_area_left = 0;
+ qtdemux->spherical_metadata->cropped_area_top = 0;
+ qtdemux->spherical_metadata->ambisonic_type = QTDEMUX_AMBISONIC_TYPE_UNKNOWN;
+ qtdemux->spherical_metadata->ambisonic_format = QTDEMUX_AMBISONIC_FORMAT_UNKNOWN;
+ qtdemux->spherical_metadata->ambisonic_order = QTDEMUX_AMBISONIC_ORDER_UNKNOWN;
+ }
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
GST_OBJECT_FLAG_SET (qtdemux, GST_ELEMENT_FLAG_INDEXABLE);
+
+ gst_qtdemux_reset (qtdemux, TRUE);
}
static void
}
}
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+static void
+_get_int_value_from_xml_string (GstQTDemux * qtdemux,
+ const char *xml_str, const char *param_name, int *value)
+{
+ char *value_start, *value_end, *endptr;
+ const short value_length_max = 12;
+ char init_view_ret[12];
+ int value_length = 0;
+ int i = 0;
+
+ value_start = (xml_str && param_name) ? strstr (xml_str, param_name) : NULL;
+
+ if (!value_start) {
+ GST_WARNING_OBJECT (qtdemux, "error: parameter does not exist: %s\n",
+ param_name);
+ return;
+ }
+
+ value_start += strlen (param_name);
+ while ((value_start[0] == ' ') || (value_start[0] == '\t'))
+ value_start++;
+
+ value_end = strchr (value_start, '<');
+ if (!value_end) {
+ GST_ERROR_OBJECT (qtdemux, "error: incorrect XML\n");
+ return;
+ }
+
+ value_length = value_end - value_start;
+ while ((value_length >= 1) && ((value_start[value_length - 1] == ' ')
+ || (value_start[value_length - 1] == '\t')))
+ value_length--;
+
+ if (value_start[i] == '+' || value_start[i] == '-')
+ i++;
+ while (i < value_length) {
+ if (value_start[i] < '0' || value_start[i] > '9') {
+ GST_ERROR_OBJECT (qtdemux,
+ "error: incorrect value, integer was expected\n");
+ return;
+ }
+ i++;
+ }
+
+ if (value_length >= value_length_max || value_length < 1) {
+ GST_ERROR_OBJECT (qtdemux, "error: empty XML value or incorrect range\n");
+ return;
+ }
+
+ strncpy (init_view_ret, value_start, value_length_max);
+ init_view_ret[value_length] = '\0';
+
+ *value = strtol (init_view_ret, &endptr, 10);
+ if (endptr == init_view_ret) {
+ GST_ERROR_OBJECT (qtdemux, "error: no digits were found\n");
+ return;
+ }
+
+ return;
+}
+
+static void
+_get_string_value_from_xml_string (GstQTDemux * qtdemux,
+ const char *xml_str, const char *param_name, char **value)
+{
+ char *value_start, *value_end;
+ const short value_length_max = 256;
+ int value_length = 0;
+
+ value_start = (xml_str && param_name) ? strstr (xml_str, param_name) : NULL;
+
+ if (!value_start) {
+ GST_WARNING_OBJECT (qtdemux, "error: parameter does not exist: %s\n",
+ param_name);
+ return;
+ }
+
+ value_start += strlen (param_name);
+ while ((value_start[0] == ' ') || (value_start[0] == '\t'))
+ value_start++;
+
+ value_end = strchr (value_start, '<');
+ if (!value_end) {
+ GST_ERROR_OBJECT (qtdemux, "error: incorrect XML\n");
+ return;
+ }
+
+ value_length = value_end - value_start;
+ while ((value_length >= 1) && ((value_start[value_length - 1] == ' ')
+ || (value_start[value_length - 1] == '\t')))
+ value_length--;
+
+ if (value_length >= value_length_max || value_length < 1) {
+ GST_ERROR_OBJECT (qtdemux, "error: empty XML value or incorrect range\n");
+ return;
+ }
+
+ *value = strndup(value_start, value_length);
+
+ return;
+}
+
+static void
+_get_bool_value_from_xml_string (GstQTDemux * qtdemux,
+ const char *xml_str, const char *param_name, gboolean * value)
+{
+ char *value_start, *value_end;
+ int value_length = 0;
+
+ value_start = (xml_str && param_name) ? strstr (xml_str, param_name) : NULL;
+
+ if (!value_start) {
+ GST_WARNING_OBJECT (qtdemux, "error: parameter does not exist: %s\n",
+ param_name);
+ return;
+ }
+
+ value_start += strlen (param_name);
+ while ((value_start[0] == ' ') || (value_start[0] == '\t'))
+ value_start++;
+
+ value_end = strchr (value_start, '<');
+ if (!value_end) {
+ GST_ERROR_OBJECT (qtdemux, "error: incorrect XML\n");
+ return;
+ }
+
+ value_length = value_end - value_start;
+ while ((value_length >= 1) && ((value_start[value_length - 1] == ' ')
+ || (value_start[value_length - 1] == '\t')))
+ value_length--;
+
+ if (value_length < 1) {
+ GST_ERROR_OBJECT (qtdemux, "error: empty XML value or incorrect range\n");
+ return;
+ }
+
+ *value = g_strstr_len(value_start, value_length, "true") ? TRUE : FALSE;
+
+ return;
+}
+
+static void
+_parse_spatial_video_metadata_from_xml_string (GstQTDemux * qtdemux, const char *xmlStr)
+{
+ const char is_spherical_str[] = "<GSpherical:Spherical>";
+ const char is_stitched_str[] = "<GSpherical:Stitched>";
+ const char stitching_software_str[] = "<GSpherical:StitchingSoftware>";
+ const char projection_type_str[] = "<GSpherical:ProjectionType>";
+ const char stereo_mode_str[] = "<GSpherical:StereoMode>";
+ const char source_count_str[] = "<GSpherical:SourceCount>";
+ const char init_view_heading_str[] = "<GSpherical:InitialViewHeadingDegrees>";
+ const char init_view_pitch_str[] = "<GSpherical:InitialViewPitchDegrees>";
+ const char init_view_roll_str[] = "<GSpherical:InitialViewRollDegrees>";
+ const char timestamp_str[] = "<GSpherical:Timestamp>";
+ const char full_pano_width_str[] = "<GSpherical:FullPanoWidthPixels>";
+ const char full_pano_height_str[] = "<GSpherical:FullPanoHeightPixels>";
+ const char cropped_area_image_width_str[] =
+ "<GSpherical:CroppedAreaImageWidthPixels>";
+ const char cropped_area_image_height_str[] =
+ "<GSpherical:CroppedAreaImageHeightPixels>";
+ const char cropped_area_left_str[] = "<GSpherical:CroppedAreaLeftPixels>";
+ const char cropped_area_top_str[] = "<GSpherical:CroppedAreaTopPixels>";
+
+ QtDemuxSphericalMetadata * spherical_metadata = qtdemux->spherical_metadata;
+
+ _get_bool_value_from_xml_string (qtdemux, xmlStr, is_spherical_str,
+ (gboolean *) & spherical_metadata->is_spherical);
+ _get_bool_value_from_xml_string (qtdemux, xmlStr, is_stitched_str,
+ (gboolean *) & spherical_metadata->is_stitched);
+
+ if (spherical_metadata->is_spherical && spherical_metadata->is_stitched) {
+ _get_string_value_from_xml_string (qtdemux, xmlStr,
+ stitching_software_str, &spherical_metadata->stitching_software);
+ _get_string_value_from_xml_string (qtdemux, xmlStr,
+ projection_type_str, &spherical_metadata->projection_type);
+ _get_string_value_from_xml_string (qtdemux, xmlStr, stereo_mode_str,
+ &spherical_metadata->stereo_mode);
+ _get_int_value_from_xml_string (qtdemux, xmlStr, source_count_str,
+ &spherical_metadata->source_count);
+ _get_int_value_from_xml_string (qtdemux, xmlStr,
+ init_view_heading_str, &spherical_metadata->init_view_heading);
+ _get_int_value_from_xml_string (qtdemux, xmlStr, init_view_pitch_str,
+ &spherical_metadata->init_view_pitch);
+ _get_int_value_from_xml_string (qtdemux, xmlStr, init_view_roll_str,
+ &spherical_metadata->init_view_roll);
+ _get_int_value_from_xml_string (qtdemux, xmlStr, timestamp_str,
+ &spherical_metadata->timestamp);
+ _get_int_value_from_xml_string (qtdemux, xmlStr, full_pano_width_str,
+ &spherical_metadata->full_pano_width_pixels);
+ _get_int_value_from_xml_string (qtdemux, xmlStr,
+ full_pano_height_str, &spherical_metadata->full_pano_height_pixels);
+ _get_int_value_from_xml_string (qtdemux, xmlStr,
+ cropped_area_image_width_str,
+ &spherical_metadata->cropped_area_image_width);
+ _get_int_value_from_xml_string (qtdemux, xmlStr,
+ cropped_area_image_height_str,
+ &spherical_metadata->cropped_area_image_height);
+ _get_int_value_from_xml_string (qtdemux, xmlStr, cropped_area_left_str,
+ &spherical_metadata->cropped_area_left);
+ _get_int_value_from_xml_string (qtdemux, xmlStr, cropped_area_top_str,
+ &spherical_metadata->cropped_area_top);
+ }
+
+ return;
+}
+
+static void
+gst_tag_register_spherical_tags (void) {
+ gst_tag_register ("is_spherical", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-spherical"),
+ _("Flag indicating if the video is a spherical video"),
+ NULL);
+ gst_tag_register ("is_stitched", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-stitched"),
+ _("Flag indicating if the video is stitched"),
+ NULL);
+ gst_tag_register ("stitching_software", GST_TAG_FLAG_META,
+ G_TYPE_STRING,
+ _("tag-stitching-software"),
+ _("Software used to stitch the spherical video"),
+ NULL);
+ gst_tag_register ("projection_type", GST_TAG_FLAG_META,
+ G_TYPE_STRING,
+ _("tag-projection-type"),
+ _("Projection type used in the video frames"),
+ NULL);
+ gst_tag_register ("stereo_mode", GST_TAG_FLAG_META,
+ G_TYPE_STRING,
+ _("tag-stereo-mode"),
+ _("Description of stereoscopic 3D layout"),
+ NULL);
+ gst_tag_register ("source_count", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-source-count"),
+ _("Number of cameras used to create the spherical video"),
+ NULL);
+ gst_tag_register ("init_view_heading", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-init-view-heading"),
+ _("The heading angle of the initial view in degrees"),
+ NULL);
+ gst_tag_register ("init_view_pitch", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-init-view-pitch"),
+ _("The pitch angle of the initial view in degrees"),
+ NULL);
+ gst_tag_register ("init_view_roll", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-init-view-roll"),
+ _("The roll angle of the initial view in degrees"),
+ NULL);
+ gst_tag_register ("timestamp", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-timestamp"),
+ _("Epoch timestamp of when the first frame in the video was recorded"),
+ NULL);
+ gst_tag_register ("full_pano_width_pixels", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-full-pano-width"),
+ _("Width of the encoded video frame in pixels"),
+ NULL);
+ gst_tag_register ("full_pano_height_pixels", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-full-pano-height"),
+ _("Height of the encoded video frame in pixels"),
+ NULL);
+ gst_tag_register ("cropped_area_image_width", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-cropped-area-image-width"),
+ _("Width of the video frame to display (e.g. cropping)"),
+ NULL);
+ gst_tag_register ("cropped_area_image_height", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-cropped-area-image-height"),
+ _("Height of the video frame to display (e.g. cropping)"),
+ NULL);
+ gst_tag_register ("cropped_area_left", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-cropped-area-left"),
+ _("Column where the left edge of the image was cropped from the"
+ " full sized panorama"),
+ NULL);
+ gst_tag_register ("cropped_area_top", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-cropped-area-top"),
+ _("Row where the top edge of the image was cropped from the"
+ " full sized panorama"),
+ NULL);
+ gst_tag_register ("ambisonic_type", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-ambisonic-type"),
+ _("Specifies the type of ambisonic audio represented"),
+ NULL);
+ gst_tag_register ("ambisonic_format", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-ambisonic-format"),
+ _("Specifies the ambisonic audio format"),
+ NULL);
+ gst_tag_register ("ambisonic_order", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-ambisonic-order"),
+ _("Specifies the ambisonic audio channel order"),
+ NULL);
+
+ return;
+}
+
+static void
+_send_spherical_metadata_msg_to_bus (GstQTDemux * qtdemux)
+{
+ GstTagList *taglist;
+ QtDemuxSphericalMetadata *spherical_metadata = qtdemux->spherical_metadata;
+
+ GST_DEBUG_OBJECT (qtdemux, "is_spherical = %d",
+ spherical_metadata->is_spherical);
+ GST_DEBUG_OBJECT (qtdemux, "is_stitched = %d",
+ spherical_metadata->is_stitched);
+ GST_DEBUG_OBJECT (qtdemux, "stitching_software = %s",
+ spherical_metadata->stitching_software);
+ GST_DEBUG_OBJECT (qtdemux, "projection_type = %s",
+ spherical_metadata->projection_type);
+ GST_DEBUG_OBJECT (qtdemux, "stereo_mode = %s",
+ spherical_metadata->stereo_mode);
+ GST_DEBUG_OBJECT (qtdemux, "source_count %d",
+ spherical_metadata->source_count);
+ GST_DEBUG_OBJECT (qtdemux, "init_view_heading = %d",
+ spherical_metadata->init_view_heading);
+ GST_DEBUG_OBJECT (qtdemux, "init_view_pitch = %d",
+ spherical_metadata->init_view_pitch);
+ GST_DEBUG_OBJECT (qtdemux, "init_view_roll = %d",
+ spherical_metadata->init_view_roll);
+ GST_DEBUG_OBJECT (qtdemux, "timestamp = %d", spherical_metadata->timestamp);
+ GST_DEBUG_OBJECT (qtdemux, "full_pano_width_pixels = %d",
+ spherical_metadata->full_pano_width_pixels);
+ GST_DEBUG_OBJECT (qtdemux, "full_pano_height_pixels = %d",
+ spherical_metadata->full_pano_height_pixels);
+ GST_DEBUG_OBJECT (qtdemux, "cropped_area_image_width = %d",
+ spherical_metadata->cropped_area_image_width);
+ GST_DEBUG_OBJECT (qtdemux, "cropped_area_image_height = %d",
+ spherical_metadata->cropped_area_image_height);
+ GST_DEBUG_OBJECT (qtdemux, "cropped_area_left = %d",
+ spherical_metadata->cropped_area_left);
+ GST_DEBUG_OBJECT (qtdemux, "cropped_area_top = %d",
+ spherical_metadata->cropped_area_top);
+ GST_DEBUG_OBJECT (qtdemux, "ambisonic_type = %d",
+ spherical_metadata->ambisonic_type);
+ GST_DEBUG_OBJECT (qtdemux, "ambisonic_order = %d",
+ spherical_metadata->ambisonic_order);
+ GST_DEBUG_OBJECT (qtdemux, "ambisonic_format = %d",
+ spherical_metadata->ambisonic_format);
+
+ taglist = gst_tag_list_new_empty ();
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
+ "is_spherical", spherical_metadata->is_spherical,
+ "is_stitched", spherical_metadata->is_stitched,
+ "source_count", spherical_metadata->source_count,
+ "init_view_heading", spherical_metadata->init_view_heading,
+ "init_view_pitch", spherical_metadata->init_view_pitch,
+ "init_view_roll", spherical_metadata->init_view_roll,
+ "timestamp", spherical_metadata->timestamp,
+ "full_pano_width_pixels", spherical_metadata->full_pano_width_pixels,
+ "full_pano_height_pixels", spherical_metadata->full_pano_height_pixels,
+ "cropped_area_image_width", spherical_metadata->cropped_area_image_width,
+ "cropped_area_image_height", spherical_metadata->cropped_area_image_height,
+ "cropped_area_left", spherical_metadata->cropped_area_left,
+ "cropped_area_top", spherical_metadata->cropped_area_top,
+ "ambisonic_type", spherical_metadata->ambisonic_type,
+ "ambisonic_format", spherical_metadata->ambisonic_format,
+ "ambisonic_order", spherical_metadata->ambisonic_order,
+ NULL);
+
+ if (spherical_metadata->stitching_software)
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
+ "stitching_software", spherical_metadata->stitching_software,
+ NULL);
+ if (spherical_metadata->projection_type)
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
+ "projection_type", spherical_metadata->projection_type,
+ NULL);
+ if (spherical_metadata->stereo_mode)
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
+ "stereo_mode", spherical_metadata->stereo_mode,
+ NULL);
+
+ gst_element_post_message (GST_ELEMENT_CAST (qtdemux),
+ gst_message_new_tag (GST_OBJECT_CAST (qtdemux),
+ gst_tag_list_copy (taglist)));
+
+ gst_tag_list_unref(taglist);
+
+ return;
+}
+
+static void
+qtdemux_parse_SA3D (GstQTDemux * qtdemux, const guint8 * buffer, gint length)
+{
+ guint offset = 0;
+
+ guint8 version = 0;
+ guint8 ambisonic_type = 0;
+ guint32 ambisonic_order = 0;
+ guint8 ambisonic_channel_ordering = 0;
+ guint8 ambisonic_normalization = 0;
+ guint32 num_channels = 0;
+ guint32 channel_map[49] = { 0 }; /* Up to 6th order */
+
+ int i;
+
+ GST_DEBUG_OBJECT (qtdemux, "qtdemux_parse_SA3D");
+
+ qtdemux->header_size += length;
+ offset = (QT_UINT32 (buffer) == 0) ? 16 : 8;
+
+ if (length <= offset + 16) {
+ GST_DEBUG_OBJECT (qtdemux, "SA3D atom is too short, skipping");
+ return;
+ }
+
+ version = QT_UINT8 (buffer + offset);
+ ambisonic_type = QT_UINT8 (buffer + offset + 1);
+ ambisonic_order = QT_UINT32 (buffer + offset + 2);
+ ambisonic_channel_ordering = QT_UINT8 (buffer + offset + 6);
+ ambisonic_normalization = QT_UINT8 (buffer + offset + 7);
+ num_channels = QT_UINT32 (buffer + offset + 8);
+ for (i = 0; i < num_channels; ++i)
+ channel_map[i] = QT_UINT32 (buffer + offset + 12 + i * 4);
+
+ GST_DEBUG_OBJECT (qtdemux, "version: %d", version);
+ GST_DEBUG_OBJECT (qtdemux, "ambisonic_type: %d", ambisonic_type);
+ GST_DEBUG_OBJECT (qtdemux, "ambisonic_order: %d", ambisonic_order);
+ GST_DEBUG_OBJECT (qtdemux, "ambisonic_channel_ordering: %d",
+ ambisonic_channel_ordering);
+ GST_DEBUG_OBJECT (qtdemux, "ambisonic_normalization: %d",
+ ambisonic_normalization);
+ GST_DEBUG_OBJECT (qtdemux, "num_channels: %d", num_channels);
+ for (i = 0; i < num_channels; ++i)
+ GST_DEBUG_OBJECT (qtdemux, "channel_map: %d", channel_map[i]);
+
+ if (version == RFC_AMBISONIC_SA3DBOX_VERSION_SUPPORTED) {
+ if (ambisonic_type == RFC_AMBISONIC_TYPE_PERIPHONIC)
+ qtdemux->spherical_metadata->ambisonic_type = QTDEMUX_AMBISONIC_TYPE_PERIPHONIC;
+
+ if (ambisonic_order == RFC_AMBISONIC_ORDER_FOA) {
+ if (num_channels == 4) {
+ qtdemux->spherical_metadata->ambisonic_order = QTDEMUX_AMBISONIC_ORDER_FOA;
+
+ if ((ambisonic_channel_ordering == RFC_AMBISONIC_CHANNEL_ORDERING_ACN)
+ && (ambisonic_normalization == RFC_AMBISONIC_NORMALIZATION_SN3D)
+ && (channel_map[0] == 0) && (channel_map[1] == 1)
+ && (channel_map[2] == 2) && (channel_map[3] == 3))
+ qtdemux->spherical_metadata->ambisonic_format = QTDEMUX_AMBISONIC_FORMAT_AMBIX;
+
+ if ((ambisonic_channel_ordering == RFC_AMBISONIC_CHANNEL_ORDERING_FUMA)
+ && (ambisonic_normalization == RFC_AMBISONIC_NORMALIZATION_FUMA)
+ && (channel_map[0] == 0) && (channel_map[1] == 3)
+ && (channel_map[2] == 1) && (channel_map[3] == 2))
+ qtdemux->spherical_metadata->ambisonic_format = QTDEMUX_AMBISONIC_FORMAT_AMB;
+ }
+ }
+ }
+
+ return;
+}
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
static void
+ qtdemux_update_default_sample_encryption_settings (GstQTDemux * qtdemux,
+ QtDemuxCencSampleSetInfo * info, guint32 is_encrypted, guint8 iv_size,
+ const guint8 * kid)
+ {
+ GstBuffer *kid_buf = gst_buffer_new_allocate (NULL, 16, NULL);
+ gst_buffer_fill (kid_buf, 0, kid, 16);
+ if (info->default_properties)
+ gst_structure_free (info->default_properties);
+ info->default_properties =
+ gst_structure_new ("application/x-cenc",
+ "iv_size", G_TYPE_UINT, iv_size,
+ "encrypted", G_TYPE_BOOLEAN, (is_encrypted == 1),
+ "kid", GST_TYPE_BUFFER, kid_buf, NULL);
+ GST_DEBUG_OBJECT (qtdemux, "default sample properties: "
+ "is_encrypted=%u, iv_size=%u", is_encrypted, iv_size);
+ gst_buffer_unref (kid_buf);
+ }
+
+ static gboolean
+ qtdemux_update_default_piff_encryption_settings (GstQTDemux * qtdemux,
+ QtDemuxCencSampleSetInfo * info, GstByteReader * br)
+ {
+ guint32 algorithm_id = 0;
+ const guint8 *kid;
+ gboolean is_encrypted = TRUE;
+ guint8 iv_size = 8;
+
+ if (!gst_byte_reader_get_uint24_le (br, &algorithm_id)) {
+ GST_ERROR_OBJECT (qtdemux, "Error getting box's algorithm ID field");
+ return FALSE;
+ }
+
+ algorithm_id >>= 8;
+ if (algorithm_id == 0) {
+ is_encrypted = FALSE;
+ } else if (algorithm_id == 1) {
+ GST_DEBUG_OBJECT (qtdemux, "AES 128-bits CTR encrypted stream");
+ } else if (algorithm_id == 2) {
+ GST_DEBUG_OBJECT (qtdemux, "AES 128-bits CBC encrypted stream");
+ }
+
+ if (!gst_byte_reader_get_uint8 (br, &iv_size))
+ return FALSE;
+
+ if (!gst_byte_reader_get_data (br, 16, &kid))
+ return FALSE;
+
+ qtdemux_update_default_sample_encryption_settings (qtdemux, info,
+ is_encrypted, iv_size, kid);
+ gst_structure_set (info->default_properties, "piff_algorithm_id",
+ G_TYPE_UINT, algorithm_id, NULL);
+ return TRUE;
+ }
+
+
+ static void
qtdemux_parse_piff (GstQTDemux * qtdemux, const guint8 * buffer, gint length,
guint offset)
{
beach:
if (ret == GST_FLOW_EOS && (qtdemux->got_moov || qtdemux->media_caps)) {
/* digested all data, show what we have */
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ if (qtdemux->spherical_metadata)
+ _send_spherical_metadata_msg_to_bus (qtdemux);
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
qtdemux_prepare_streams (qtdemux);
+ QTDEMUX_EXPOSE_LOCK (qtdemux);
ret = qtdemux_expose_streams (qtdemux);
+ QTDEMUX_EXPOSE_UNLOCK (qtdemux);
qtdemux->state = QTDEMUX_STATE_MOVIE;
GST_DEBUG_OBJECT (qtdemux, "switching state to STATE_MOVIE (%d)",
* header start.
* Note : This is not computed from the GST_BUFFER_OFFSET field */
guint64 fragment_start_offset;
- #endif
+
+ /* These two fields are used to perform an implicit seek when a fragmented
+ * file whose first tfdt is not zero. This way if the first fragment starts
+ * at 1 hour, the user does not have to wait 1 hour or perform a manual seek
+ * for the image to move and the sound to play.
+ *
+ * This implicit seek is only done if the first parsed fragment has a non-zero
+ * decode base time and a seek has not been received previously, hence these
+ * fields. */
+ gboolean received_seek;
+ gboolean first_moof_already_parsed;
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ QtDemuxSphericalMetadata *spherical_metadata;
++#endif
};
struct _GstQTDemuxClass {
--/* GStreamer
-- * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
-- *
-- * This library is free software; you can redistribute it and/or
-- * modify it under the terms of the GNU Library General Public
-- * License as published by the Free Software Foundation; either
-- * version 2 of the License, or (at your option) any later version.
-- *
-- * This library is distributed in the hope that it will be useful,
-- * but WITHOUT ANY WARRANTY; without even the implied warranty of
-- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- * Library General Public License for more details.
-- *
-- * You should have received a copy of the GNU Library General Public
-- * License along with this library; if not, write to the
-- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
-- * Boston, MA 02110-1301, USA.
-- */
++ /* GStreamer
++ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
++ *
++ * This library is free software; you can redistribute it and/or
++ * modify it under the terms of the GNU Library General Public
++ * License as published by the Free Software Foundation; either
++ * version 2 of the License, or (at your option) any later version.
++ *
++ * This library is distributed in the hope that it will be useful,
++ * but WITHOUT ANY WARRANTY; without even the implied warranty of
++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
++ * Library General Public License for more details.
++ *
++ * You should have received a copy of the GNU Library General Public
++ * License along with this library; if not, write to the
++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
++ * Boston, MA 02110-1301, USA.
++ */
/**
* SECTION:element-rtpbin
if (!(buffer = gst_element_factory_make ("rtpjitterbuffer", NULL)))
goto no_jitterbuffer;
- if (!rtpbin->ignore_pt)
+ if (!rtpbin->ignore_pt) {
if (!(demux = gst_element_factory_make ("rtpptdemux", NULL)))
goto no_demux;
-
+ }
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if (session->bin->buffer_mode == RTP_JITTER_BUFFER_MODE_SLAVE)
+ if (!(queue2 = gst_element_factory_make ("queue2", NULL)))
+ goto no_queue2;
+#endif
stream = g_new0 (GstRtpBinStream, 1);
stream->ssrc = ssrc;
stream->bin = rtpbin;
g_object_set (buffer, "max-dropout-time", rtpbin->max_dropout_time,
"max-misorder-time", rtpbin->max_misorder_time, NULL);
g_object_set (buffer, "rfc7273-sync", rtpbin->rfc7273_sync, NULL);
+ g_object_set (buffer, "max-ts-offset-adjustment",
+ rtpbin->max_ts_offset_adjustment, NULL);
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ /* configure queue2 to use live buffering */
+ if (queue2) {
+ g_object_set_data (G_OBJECT (queue2), "GstRTPBin.stream", stream);
+ g_object_set (queue2, "use-buffering", TRUE, NULL);
+ g_object_set (queue2, "buffer-mode", GST_BUFFERING_LIVE, NULL);
+ }
+#endif
+ /* need to sink the jitterbufer or otherwise signal handlers from bindings will
+ * take ownership of it and we don't own it anymore */
+ gst_object_ref_sink (buffer);
g_signal_emit (rtpbin, gst_rtp_bin_signals[SIGNAL_NEW_JITTERBUFFER], 0,
buffer, session->id, ssrc);
if (!rtpbin->ignore_pt)
gst_bin_add (GST_BIN_CAST (rtpbin), demux);
+
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if (queue2)
+ gst_bin_add (GST_BIN_CAST (rtpbin), queue2);
+#endif
+
gst_bin_add (GST_BIN_CAST (rtpbin), buffer);
+ /* unref the jitterbuffer again, the bin has a reference now and
+ * we don't need it anymore */
+ gst_object_unref (buffer);
+
/* link stuff */
- } else if (demux)
- gst_element_link_pads_full (buffer, "src", demux, "sink",
- GST_PAD_LINK_CHECK_NOTHING);
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if (queue2) {
+ gst_element_link_pads_full (buffer, "src", queue2, "sink",
+ GST_PAD_LINK_CHECK_NOTHING);
+ if (demux) {
+ gst_element_link_pads_full (queue2, "src", demux, "sink",
+ GST_PAD_LINK_CHECK_NOTHING);
+ }
++ } else if (demux) {
++ gst_element_link_pads_full (buffer, "src", demux, "sink",
++ GST_PAD_LINK_CHECK_NOTHING);
++ }
+#else
if (demux)
gst_element_link_pads_full (buffer, "src", demux, "sink",
GST_PAD_LINK_CHECK_NOTHING);
for (streams = session->streams; streams;
streams = g_slist_next (streams)) {
GstRtpBinStream *stream = (GstRtpBinStream *) streams->data;
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ GstPad *temp_pad_src = NULL;
+ GstCaps *temp_caps_src = NULL;
+ GstStructure *caps_structure;
+ const gchar *caps_str_media = NULL;
+ temp_pad_src = gst_element_get_static_pad (stream->buffer, "src");
- temp_caps_src = gst_pad_get_current_caps(temp_pad_src);
- GST_DEBUG_OBJECT (bin, "stream %p percent %d : temp_caps_src=%"GST_PTR_FORMAT, stream,stream->percent,temp_caps_src);
- if (temp_caps_src)
- {
++ temp_caps_src = gst_pad_get_current_caps (temp_pad_src);
++ GST_DEBUG_OBJECT (bin,
++ "stream %p percent %d : temp_caps_src=%" GST_PTR_FORMAT,
++ stream, stream->percent, temp_caps_src);
++ if (temp_caps_src) {
+ caps_structure = gst_caps_get_structure (temp_caps_src, 0);
- caps_str_media = gst_structure_get_string (caps_structure, "media");
- if (caps_str_media != NULL)
- {
- if ((strcmp(caps_str_media,"video") != 0)&&(strcmp(caps_str_media,"audio") != 0))
- {
- GST_DEBUG_OBJECT (bin, "Non Audio/Video Stream.. ignoring the same !!");
- gst_caps_unref( temp_caps_src );
- gst_object_unref( temp_pad_src );
++ caps_str_media =
++ gst_structure_get_string (caps_structure, "media");
++ if (caps_str_media != NULL) {
++ if ((strcmp (caps_str_media, "video") != 0)
++ && (strcmp (caps_str_media, "audio") != 0)) {
++ GST_DEBUG_OBJECT (bin,
++ "Non Audio/Video Stream.. ignoring the same !!");
++ gst_caps_unref (temp_caps_src);
++ gst_object_unref (temp_pad_src);
+ continue;
- }
- else if(stream->percent >= 100)
- {
++ } else if (stream->percent >= 100) {
+ /* Most of the time buffering icon displays in rtsp playback.
- Optimizing the buffering updation code. Whenever any stream percentage
- reaches 100 do not post buffering messages.*/
- if(stream->prev_percent < 100)
- {
++ Optimizing the buffering updation code. Whenever any stream percentage
++ reaches 100 do not post buffering messages. */
++ if (stream->prev_percent < 100)
+ buffering_flag = TRUE;
- }
+ else
- {
+ update_buffering_status = FALSE;
- }
+ }
+ }
- gst_caps_unref( temp_caps_src );
++ gst_caps_unref (temp_caps_src);
+ }
- gst_object_unref( temp_pad_src );
++ gst_object_unref (temp_pad_src);
+#else
GST_DEBUG_OBJECT (bin, "stream %p percent %d", stream,
stream->percent);
-
+#endif
/* find min percent */
if (min_percent > stream->percent)
min_percent = stream->percent;
gst_message_unref (message);
- if (rtpbin->buffer_mode == RTP_JITTER_BUFFER_MODE_SLAVE)
- {
- if(update_buffering_status==FALSE)
- {
- break;
- }
- if(buffering_flag)
- {
- min_percent=100;
- GST_DEBUG_OBJECT (bin, "forcefully change min_percent to 100!!!");
- }
- }
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ if (rtpbin->buffer_mode == RTP_JITTER_BUFFER_MODE_SLAVE) {
++ if (update_buffering_status == FALSE)
++ break;
++ if (buffering_flag) {
++ min_percent = 100;
++ GST_DEBUG_OBJECT (bin, "forcefully change min_percent to 100!!!");
++ }
++ }
+#endif
/* make a new buffering message with the min value */
message =
gst_message_new_buffering (GST_OBJECT_CAST (bin), min_percent);
#define DEFAULT_USER_AGENT "GStreamer/" PACKAGE_VERSION
#define DEFAULT_MAX_RTCP_RTP_TIME_DIFF 1000
#define DEFAULT_RFC7273_SYNC FALSE
+ #define DEFAULT_MAX_TS_OFFSET_ADJUSTMENT G_GUINT64_CONSTANT(0)
+ #define DEFAULT_MAX_TS_OFFSET G_GINT64_CONSTANT(3000000000)
+ #define DEFAULT_VERSION GST_RTSP_VERSION_1_0
+ #define DEFAULT_BACKCHANNEL GST_RTSP_BACKCHANNEL_NONE
+ #define DEFAULT_TEARDOWN_TIMEOUT (100 * GST_MSECOND)
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+#define DEFAULT_START_POSITION 0
+#endif
+
enum
{
PROP_0,
src->user_agent = g_strdup (DEFAULT_USER_AGENT);
src->max_rtcp_rtp_time_diff = DEFAULT_MAX_RTCP_RTP_TIME_DIFF;
src->rfc7273_sync = DEFAULT_RFC7273_SYNC;
+ src->max_ts_offset_adjustment = DEFAULT_MAX_TS_OFFSET_ADJUSTMENT;
+ src->max_ts_offset = DEFAULT_MAX_TS_OFFSET;
+ src->max_ts_offset_is_set = FALSE;
+ src->default_version = DEFAULT_VERSION;
+ src->version = GST_RTSP_VERSION_INVALID;
+ src->teardown_timeout = DEFAULT_TEARDOWN_TIMEOUT;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ g_mutex_init (&(src)->pause_lock);
+ g_cond_init (&(src)->open_end);
+#endif
/* get a list of all extensions */
src->extensions = gst_rtsp_ext_list_get ();
{
GList *walk;
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ GST_WARNING_OBJECT (src, "Setting [%s] element state to: %s \n",
+ GST_ELEMENT_NAME (GST_ELEMENT_CAST (src)),
+ gst_element_state_get_name (state));
++#endif
if (src->manager)
gst_element_set_state (GST_ELEMENT_CAST (src->manager), state);
static void
gst_rtspsrc_loop_complete_cmd (GstRTSPSrc * src, gint cmd)
{
- #endif
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ GstMessage *s;
+ GST_WARNING_OBJECT (src, "Got cmd %s", cmd_to_string (cmd));
++#endif
+
switch (cmd) {
case CMD_OPEN:
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ GST_DEBUG_OBJECT (src,
+ "rtsp_duration %" GST_TIME_FORMAT
+ ", rtsp_audio_codec %s , rtsp_video_codec %s , rtsp_video_frame_size %s",
+ GST_TIME_ARGS (src->segment.duration), src->audio_codec,
+ src->video_codec, src->video_frame_size);
+
+ /* post message */
+ s = gst_message_new_element (GST_OBJECT_CAST (src),
+ gst_structure_new ("rtspsrc_properties",
+ "rtsp_duration", G_TYPE_UINT64, src->segment.duration,
+ "rtsp_audio_codec", G_TYPE_STRING, src->audio_codec,
+ "rtsp_video_codec", G_TYPE_STRING, src->video_codec,
+ "rtsp_video_frame_size", G_TYPE_STRING, src->video_frame_size,
+ NULL));
+
+ gst_element_post_message (GST_ELEMENT_CAST (src), s);
+#endif
GST_ELEMENT_PROGRESS (src, COMPLETE, "open", ("Opened Stream"));
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ /* rtspsrc PAUSE state should be here for parsing sdp before PAUSE state changed. */
+ g_mutex_lock (&(src)->pause_lock);
+ g_cond_signal (&(src)->open_end);
+ g_mutex_unlock (&(src)->pause_lock);
+#endif
break;
case CMD_PLAY:
GST_ELEMENT_PROGRESS (src, COMPLETE, "request", ("Sent PLAY request"));
GstRTSPMessage * request, GstRTSPMessage * response,
GstRTSPStatusCode * code)
{
- GstRTSPStatusCode int_code = GST_RTSP_STS_OK;
- GstRTSPResult res = GST_RTSP_ERROR;
- gint count;
- gboolean retry;
- GstRTSPMethod method = GST_RTSP_INVALID;
+ GstRTSPResult res;
+ gint try = 0;
+ gboolean allow_send = TRUE;
- count = 0;
- do {
- retry = FALSE;
+ again:
+ if (!src->short_header)
+ gst_rtsp_ext_list_before_send (src->extensions, request);
- /* make sure we don't loop forever */
- if (count++ > 8)
- break;
+ g_signal_emit (src, gst_rtspsrc_signals[SIGNAL_BEFORE_SEND], 0,
+ request, &allow_send);
+ if (!allow_send) {
+ GST_DEBUG_OBJECT (src, "skipping message, disabled by signal");
+ return GST_RTSP_OK;
+ }
- /* save method so we can disable it when the server complains */
+ GST_DEBUG_OBJECT (src, "sending message");
+
+ DEBUG_RTSP (src, request);
+
+ res = gst_rtspsrc_connection_send (src, conninfo, request, src->ptcp_timeout);
+ if (res < 0)
+ goto send_error;
+
+ gst_rtsp_connection_reset_timeout (conninfo->connection);
+ if (!response)
+ return res;
+
+ res = gst_rtsp_src_receive_response (src, conninfo, response, code);
+ if (res == GST_RTSP_EEOF) {
+ GST_WARNING_OBJECT (src, "server closed connection");
+ /* only try once after reconnect, then fallthrough and error out */
+ if ((try == 0) && !src->interleaved && src->udp_reconnect) {
+ try++;
+ /* if reconnect succeeds, try again */
+ if ((res = gst_rtsp_conninfo_reconnect (src, &src->conninfo, FALSE)) == 0)
+ goto again;
+ }
+ }
+ gst_rtsp_ext_list_after_send (src->extensions, request, response);
+
+ return res;
+
+ send_error:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ if (res != GST_RTSP_EINTR) {
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_CONNECTION_FAIL,
++ "Could not send message.");
++#else
+ GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
+ ("Could not send message. (%s)", str));
++#endif
+ } else {
+ GST_WARNING_OBJECT (src, "send interrupted");
+ }
+ g_free (str);
+ return res;
+ }
+ }
+
+ /**
+ * gst_rtspsrc_send:
+ * @src: the rtsp source
+ * @conninfo: the connection information to send on
+ * @request: must point to a valid request
+ * @response: must point to an empty #GstRTSPMessage
+ * @code: an optional code result
+ * @versions: List of versions to try, setting it back onto the @request message
+ * if not set, `src->version` will be used as RTSP version.
+ *
+ * send @request and retrieve the response in @response. optionally @code can be
+ * non-NULL in which case it will contain the status code of the response.
+ *
+ * If This function returns #GST_RTSP_OK, @response will contain a valid response
+ * message that should be cleaned with gst_rtsp_message_unset() after usage.
+ *
+ * If @code is NULL, this function will return #GST_RTSP_ERROR (with an invalid
+ * @response message) if the response code was not 200 (OK).
+ *
+ * If the attempt results in an authentication failure, then this will attempt
+ * to retrieve authentication credentials via gst_rtspsrc_setup_auth and retry
+ * the request.
+ *
+ * Returns: #GST_RTSP_OK if the processing was successful.
+ */
+ static GstRTSPResult
+ gst_rtspsrc_send (GstRTSPSrc * src, GstRTSPConnInfo * conninfo,
+ GstRTSPMessage * request, GstRTSPMessage * response,
+ GstRTSPStatusCode * code, GstRTSPVersion * versions)
+ {
+ GstRTSPStatusCode int_code = GST_RTSP_STS_OK;
+ GstRTSPResult res = GST_RTSP_ERROR;
+ gint count;
+ gboolean retry;
+ GstRTSPMethod method = GST_RTSP_INVALID;
+ gint version_retry = 0;
+
+ count = 0;
+ do {
+ retry = FALSE;
+
+ /* make sure we don't loop forever */
+ if (count++ > 8)
+ break;
+
+ /* save method so we can disable it when the server complains */
method = request->type_data.request.method;
+ if (!versions)
+ request->type_data.request.version = src->version;
+
if ((res =
gst_rtspsrc_try_send (src, conninfo, request, response,
&int_code)) < 0)
}
src->state = GST_RTSP_STATE_INIT;
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ /* Check for the support for the Media codecs */
+ if ((!src->is_audio_codec_supported) && (!src->is_video_codec_supported)) {
+ GST_ERROR_OBJECT (src, "UnSupported Media Type !!!! \n");
+ goto unsupported_file_type;
+ } else {
+ GST_DEBUG_OBJECT (src, "Supported Media Type. \n");
+ }
+#endif
/* setup streams */
- if ((res = gst_rtspsrc_setup_streams (src, async)) < 0)
+ if ((res = gst_rtspsrc_setup_streams_start (src, async)) < 0)
goto setup_failed;
/* reset our state */
if (res < 0)
goto create_request_failed;
- if (src->need_range) {
+ if (src->need_range && src->seekable >= 0.0) {
+#ifndef TIZEN_FEATURE_RTSP_MODIFICATION
hval = gen_range_header (src, segment);
gst_rtsp_message_take_header (&request, GST_RTSP_HDR_RANGE, hval);
gst_rtsp_message_unset (&request);
if (res != GST_RTSP_EINTR) {
- "Could not send message. ");
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_CONNECTION_FAIL,
++ "Could not send message.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
("Could not send message. (%s)", str));
+#endif
} else {
GST_WARNING_OBJECT (src, "PAUSE interrupted");
}
{
GstRTSPSrc *rtspsrc;
GstStateChangeReturn ret;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ guint64 end_time;
+#endif
rtspsrc = GST_RTSPSRC (element);
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ GST_WARNING_OBJECT (rtspsrc, "State change transition: %d \n", transition);
++#endif
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
gulong manager_ptmap_id;
gboolean use_buffering;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ /* media type */
+ gboolean is_audio_codec_supported;
+ gboolean is_video_codec_supported;
+ gchar *audio_codec;
+ gchar *video_codec;
+ gchar *video_frame_size;
+#endif
+
GstRTSPConnInfo conninfo;
+ /* SET/GET PARAMETER requests queue */
+ GQueue set_get_param_q;
+
/* a list of RTSP extensions as GstElement */
GstRTSPExtensionList *extensions;
- #endif
+ GstRTSPVersion default_version;
+ GstRTSPVersion version;
++
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ GCond open_end;
+ GMutex pause_lock;
+ guint64 start_position;
++#endif
};
struct _GstRTSPSrcClass {
--- /dev/null
- Version: 1.12.2
- Release: 2
+%bcond_with x
+%define gst_branch 1.0
+
+Name: gst-plugins-good
++Version: 1.16.2
++Release: 1
+License: LGPL-2.1+
+Summary: GStreamer Streaming-Media Framework Plug-Ins
+Url: http://gstreamer.freedesktop.org/
+Group: Multimedia/Framework
+Source: http://gstreamer.freedesktop.org/src/gst-plugins-good/gst-plugins-good-%{version}.tar.xz
+Source100: common.tar.gz
+BuildRequires: gcc-c++
+BuildRequires: gettext-tools
+BuildRequires: pkgconfig(glib-2.0) >= 2.32
+BuildRequires: pkgconfig(gstreamer-1.0)
+BuildRequires: pkgconfig(gstreamer-plugins-base-1.0)
+BuildRequires: libjpeg-devel
+BuildRequires: orc >= 0.4.16
+BuildRequires: python
+BuildRequires: xsltproc
+BuildRequires: pkgconfig(bzip2)
+BuildRequires: pkgconfig(libpng) >= 1.2
+BuildRequires: pkgconfig(libpulse) >= 1.0
+BuildRequires: pkgconfig(libsoup-2.4)
+BuildRequires: pkgconfig(libxml-2.0) >= 2.4.9
+# TODO find where process.h comes from, not kernel-devel and not wxWidgets so far.
+%if %{with x}
+BuildRequires: pkgconfig(ice)
+BuildRequires: pkgconfig(sm)
+BuildRequires: pkgconfig(xdamage)
+BuildRequires: pkgconfig(xfixes)
+# used by libgstvideo4linux2.so
+BuildRequires: pkgconfig(xv)
+%endif
+
+BuildRequires: pkgconfig(zlib)
+%if "%{tizen_profile_name}" != "tv"
+BuildRequires: pkgconfig(libv4l2)
+%endif
+BuildRequires: pkgconfig(vconf)
+BuildRequires: pkgconfig(gio-2.0)
+Requires: gst-plugins-base >= 1.0.0
+Requires: gstreamer >= 1.0.5
+
+%description
+GStreamer is a streaming media framework based on graphs of filters
+that operate on media data. Applications using this library can do
+anything media-related, from real-time sound processing to playing
+videos. Its plug-in-based architecture means that new data types or
+processing capabilities can be added simply by installing new plug-ins.
+
+%package extra
+Summary: Complementary plugins for %{name}
+Group: Productivity/Multimedia/Other
+Requires: %{name} = %{version}
+Enhances: gst-plugins-good
+
+%description extra
+This package provides complementary plugins for %{name} and
+plugins not included in official Tizen images, which may be used for development / experimental purposes.
+
+%prep
+%setup -q -n gst-plugins-good-%{version}
+%setup -q -T -D -a 100
+
+%build
+# FIXME:
+# warning: failed to load external entity "xml/element-v4l2src-details.xml"
+# warning: failed to load external entity "xml/plugin-video4linux2.xml"
+export V=1
+NOCONFIGURE=1 ./autogen.sh
+export CFLAGS+=" -DTIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE\
+ -DTIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID\
+ -DTIZEN_FEATURE_WAVPARSE_MODIFICATION\
+ -DTIZEN_FEATURE_MP3PARSE_MODIFICATION\
+ -DTIZEN_FEATURE_AACPARSE_MODIFICATION\
+ -DTIZEN_FEATURE_QTDEMUX_MODIFICATION\
+ -DTIZEN_FEATURE_FLVDEMUX_MODIFICATION\
+ -DTIZEN_FEATURE_GST_UPSTREAM\
+ -DTIZEN_FEATURE_RTSP_MODIFICATION\
+ -DTIZEN_FEATURE_GST_MUX_ENHANCEMENT\
+ -DTIZEN_FEATURE_SOUP_MODIFICATION\
+ -DTIZEN_FEATURE_RGVOLUME_MODIFICATION\
+ -DTIZEN_FEATURE_BASEPARSE_MODIFICATION\
+ -DTIZEN_FEATURE_DISABLE_V4L2_DEPENDENCY\
+ -fstack-protector-strong\
+ -Wl,-z,relro\
+ -D_FORTIFY_SOURCE=2"
+%configure\
+%if ! 0%{?ENABLE_AALIB}
+ --disable-aalib\
+%endif
+%if "%{tizen_profile_name}" != "tv"
+ --with-libv4l2 \
+%endif
+ --disable-gtk-doc\
+ --with-gtk=3.0\
+ --disable-monoscope\
+ --disable-y4m\
+ --disable-taglib\
+ --disable-wavpack\
+ --enable-experimental\
+ --disable-equalizer\
+%if "%{tizen_profile_name}" == "tv"
+ --disable-flv\
+ --disable-videobox\
+ --disable-videomixer\
+%endif
+ --disable-effectv\
+ --disable-alpha\
+ --disable-auparse\
+ --disable-effectv\
+ --disable-flx\
+ --disable-goom\
+ --disable-goom2k1\
+ --disable-level\
+ --disable-multipart\
+ --disable-smpte\
+ --disable-spectrum\
+ --disable-cutter\
+ --disable-dtmf\
+ --disable-oss4\
+ --disable-oss\
+ --disable-shapewipe
+
+make %{?_smp_mflags} CFLAGS+="-Wno-error" CXXFLAGS+="-Wno-error"
+
+%install
+%make_install
+%find_lang %{name}-%{gst_branch}
+
+%lang_package -f %{name}-%{gst_branch}
+
+%files
+%manifest %{name}.manifest
+%defattr(-, root, root)
+%license COPYING
+%{_libdir}/gstreamer-%{gst_branch}/libgstalaw.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstalpha.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstalphacolor.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstapetag.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstaudiofx.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstaudioparsers.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstauparse.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstautodetect.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstavi.so
+# Not yet ported
+#%{_libdir}/gstreamer-%{gst_branch}/libgstcutter.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstdebug.so
+# Not yet ported
+%{_libdir}/gstreamer-%{gst_branch}/libgstdeinterlace.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgsteffectv.so
+
+#%{_datadir}/gstreamer-%{gst_branch}/presets/GstVP8Enc.prs
+
+#%{_libdir}/gstreamer-%{gst_branch}/libgstflxdec.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstgoom.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstgoom2k1.so
+%{_libdir}/gstreamer-%{gst_branch}/libgsticydemux.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstid3demux.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstinterleave.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstisomp4.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstjpeg.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstlevel.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstmatroska.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstmonoscope.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstmulaw.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstmultifile.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstmultipart.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstnavigationtest.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstoss4audio.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstossaudio.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstpulseaudio.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstreplaygain.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstrtp.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstrtpmanager.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstrtsp.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstshapewipe.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstsmpte.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstspectrum.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstspeex.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstudp.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstvideo4linux2.so
+
+%{_libdir}/gstreamer-%{gst_branch}/libgstvideocrop.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstvideofilter.so
+%if "%{tizen_profile_name}" != "tv"
+%{_libdir}/gstreamer-%{gst_branch}/libgstflv.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstequalizer.so
+#%{_datadir}/gstreamer-%{gst_branch}/presets/GstIirEqualizer10Bands.prs
+#%{_datadir}/gstreamer-%{gst_branch}/presets/GstIirEqualizer3Bands.prs
+%{_datadir}/gstreamer-%{gst_branch}/presets/GstQTMux.prs
+%{_libdir}/gstreamer-%{gst_branch}/libgstvideobox.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstvideomixer.so
+%endif
+%{_libdir}/gstreamer-%{gst_branch}/libgstwavenc.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstwavparse.so
+%if %{with x}
+%{_libdir}/gstreamer-%{gst_branch}/libgstximagesrc.so
+%endif
+#%{_libdir}/gstreamer-%{gst_branch}/libgsty4menc.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstcairo.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstsoup.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstflac.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstvpx.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstdtmf.so
+
+
+%files extra
+%manifest %{name}.manifest
+%defattr(-, root, root)
+%if 0%{?ENABLE_AALIB}
+%{_libdir}/gstreamer-%{gst_branch}/libgstaasink.so
+%endif
+%{_libdir}/gstreamer-%{gst_branch}/libgstpng.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstimagefreeze.so
#, c-format
msgid "Could not get parameters on device '%s'"
- msgstr "Parametre uređaja ‘%s’ nije moguće dobiti"
+ msgstr "Nije moguće dobiti parametre uređaja „%s“"
msgid "Video device did not accept new frame rate setting."
- msgstr "Videouređaj nije prihvatio novu frekvenciju slika (ili poluslika)."
+ msgstr "Videouređaj nije prihvatio novu postavku frekvencije okvira (slika)."
msgid "Video device did not provide output format."
- msgstr "Videouređaj nije predočio izlazni format."
+ msgstr "Videouređaj nije dao/odredio izlazni format."
msgid "Video device returned invalid dimensions."
- msgstr "Videouređaj je uzvratio s neispravnim dimenzijama."
+ msgstr "Videouređaj nije vratio valjane dimenzije."
msgid "Video device uses an unsupported interlacing method."
- msgstr "Videouređaj koristi nepodržanu metodu poluslika."
+ msgstr "Videouređaj koristi nepodržanu metodu preplitanja (interlacing)."
msgid "Video device uses an unsupported pixel format."
-msgstr "Videouređaj koristi nepodržani format piksela."
+msgstr "Videouređaj koristi format piksela koji nije podržan."
msgid "Failed to configure internal buffer pool."
- msgstr "Nije uspjelo konfigurirati internu zalihu međuspremnika."
+ msgstr "Nije uspjelo konfigurirati interne međuspremnike (buffer pool)."
msgid "Video device did not suggest any buffer size."
- msgstr "Videouređaj nije predložio nijednu veličinu međuspremnika."
+ msgstr "Videouređaj nije naveo/zatražio bilo kakvu veličinu međuspremnika."
msgid "No downstream pool to import from."
- msgstr "Nema se od nikuda uvesti ‘downstream’ zaliha."
+ msgstr "Ne postoji mjesto (downstream pool) iz kojeg se može uvoziti."
# tuner > štelanje frekvencije, mijenjanje (biranje) kanala
#, c-format
#define V4L2_SDR_FMT_CS8 v4l2_fourcc('C', 'S', '0', '8') /* complex s8 */
#define V4L2_SDR_FMT_CS14LE v4l2_fourcc('C', 'S', '1', '4') /* complex s14le */
#define V4L2_SDR_FMT_RU12LE v4l2_fourcc('R', 'U', '1', '2') /* real u12le */
+ #define V4L2_SDR_FMT_PCU16BE v4l2_fourcc('P', 'C', '1', '6') /* planar complex u16be */
+ #define V4L2_SDR_FMT_PCU18BE v4l2_fourcc('P', 'C', '1', '8') /* planar complex u18be */
+ #define V4L2_SDR_FMT_PCU20BE v4l2_fourcc('P', 'C', '2', '0') /* planar complex u20be */
+
+ /* Touch formats - used for Touch devices */
+ #define V4L2_TCH_FMT_DELTA_TD16 v4l2_fourcc('T', 'D', '1', '6') /* 16-bit signed deltas */
+ #define V4L2_TCH_FMT_DELTA_TD08 v4l2_fourcc('T', 'D', '0', '8') /* 8-bit signed deltas */
+ #define V4L2_TCH_FMT_TU16 v4l2_fourcc('T', 'U', '1', '6') /* 16-bit unsigned touch data */
+ #define V4L2_TCH_FMT_TU08 v4l2_fourcc('T', 'U', '0', '8') /* 8-bit unsigned touch data */
+
+ /* Meta-data formats */
+ #define V4L2_META_FMT_VSP1_HGO v4l2_fourcc('V', 'S', 'P', 'H') /* R-Car VSP1 1-D Histogram */
+ #define V4L2_META_FMT_VSP1_HGT v4l2_fourcc('V', 'S', 'P', 'T') /* R-Car VSP1 2-D Histogram */
+ #define V4L2_META_FMT_UVC v4l2_fourcc('U', 'V', 'C', 'H') /* UVC Payload Header metadata */
+ #define V4L2_META_FMT_D4XX v4l2_fourcc('D', '4', 'X', 'X') /* D4XX Payload Header metadata */
+#define V4L2_PIX_FMT_INVZ v4l2_fourcc('I', 'N', 'V', 'Z') /* Intel Planar Depth 16-bit */
+
/* priv field value to indicates that subsequent fields are valid. */
#define V4L2_PIX_FMT_PRIV_MAGIC 0xfeedcafe
gst_v4l2_open (GstV4l2Object * v4l2object)
{
struct stat st;
- int libv4l2_fd;
+ int libv4l2_fd = -1;
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+ int error_type = V4L2_OPEN_ERROR_STAT_FAILED;
+ int device_index = 0;
+ glob_t glob_buf;
+
+ memset(&glob_buf, 0x0, sizeof(glob_t));
-
- GST_DEBUG_OBJECT (v4l2object->element, "Trying to open device %s",
+ if (!v4l2object) {
+ GST_ERROR("v4l2object is NULL");
+ return FALSE;
+ }
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Trying to open device %s",
v4l2object->videodev);
GST_V4L2_CHECK_NOT_OPEN (v4l2object);
v4l2object->video_fd =
open (v4l2object->videodev, O_RDWR /* | O_NONBLOCK */ );
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+ if (!GST_V4L2_IS_OPEN (v4l2object)) {
+ error_type = V4L2_OPEN_ERROR_NOT_OPEN;
+ goto pre_error_check;
+ }
+#else /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
if (!GST_V4L2_IS_OPEN (v4l2object))
goto not_open;
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
- libv4l2_fd = v4l2_fd_open (v4l2object->video_fd,
- V4L2_ENABLE_ENUM_FMT_EMULATION);
+ #ifdef HAVE_LIBV4L2
+ if (v4l2object->fd_open)
+ libv4l2_fd = v4l2object->fd_open (v4l2object->video_fd,
+ V4L2_ENABLE_ENUM_FMT_EMULATION);
+ #endif
+
/* Note the v4l2_xxx functions are designed so that if they get passed an
unknown fd, the will behave exactly as their regular xxx counterparts, so
if v4l2_fd_open fails, we continue as normal (missing the libv4l2 custom
!(v4l2object->device_caps & (V4L2_CAP_VIDEO_OUTPUT |
V4L2_CAP_VIDEO_OUTPUT_MPLANE)))
goto not_output;
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
if (GST_IS_V4L2_VIDEO_DEC (v4l2object->element) &&
- /* Today's M2M device only expose M2M */
- !((v4l2object->device_caps & (V4L2_CAP_VIDEO_M2M |
- V4L2_CAP_VIDEO_M2M_MPLANE)) ||
- /* But legacy driver may expose both CAPTURE and OUTPUT */
- ((v4l2object->device_caps &
- (V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_VIDEO_CAPTURE_MPLANE)) &&
- (v4l2object->device_caps &
- (V4L2_CAP_VIDEO_OUTPUT | V4L2_CAP_VIDEO_OUTPUT_MPLANE)))))
+ !GST_V4L2_IS_M2M (v4l2object->device_caps))
goto not_m2m;
gst_v4l2_adjust_buf_type (v4l2object);
/* create enumerations, posts errors. */
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+ if (!gst_v4l2_fill_lists (v4l2object)) {
+ error_type = V4L2_OPEN_ERROR;
+ goto pre_error_check;
+ }
+#else /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
if (!gst_v4l2_fill_lists (v4l2object))
goto error;
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
- GST_INFO_OBJECT (v4l2object->element,
+ GST_INFO_OBJECT (v4l2object->dbg_obj,
"Opened device '%s' (%s) successfully",
v4l2object->vcap.card, v4l2object->videodev);