#ifndef GST_DISABLE_GST_DEBUG
static const char *const snap_types[2][2] = {
- {"any", "before"},
- {"after", "nearest"},
+ {"any", "after"},
+ {"before", "nearest"},
};
#endif
static gboolean gst_avi_demux_src_convert (GstPad * pad, GstFormat src_format,
gint64 src_value, GstFormat * dest_format, gint64 * dest_value);
-static gboolean gst_avi_demux_do_seek (GstAviDemux * avi, GstSegment * segment);
+static gboolean gst_avi_demux_do_seek (GstAviDemux * avi, GstSegment * segment,
+ GstSeekFlags flags);
static gboolean gst_avi_demux_handle_seek (GstAviDemux * avi, GstPad * pad,
GstEvent * event);
static gboolean gst_avi_demux_handle_seek_push (GstAviDemux * avi, GstPad * pad,
static void gst_avi_demux_parse_idit (GstAviDemux * avi, GstBuffer * buf);
static void gst_avi_demux_parse_strd (GstAviDemux * avi, GstBuffer * buf);
+static void parse_tag_value (GstAviDemux * avi, GstTagList * taglist,
+ const gchar * type, guint8 * ptr, guint tsize);
+
/* GObject methods */
#define gst_avi_demux_parent_class parent_class
GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
GObjectClass *gobject_class = (GObjectClass *) klass;
GstPadTemplate *videosrctempl, *audiosrctempl, *subsrctempl, *subpicsrctempl;
- GstCaps *audcaps, *vidcaps, *subcaps, *subpiccaps;;
+ GstCaps *audcaps, *vidcaps, *subcaps, *subpiccaps;
GST_DEBUG_CATEGORY_INIT (avidemux_debug, "avidemux",
0, "Demuxer for AVI streams");
gst_element_class_add_pad_template (gstelement_class, videosrctempl);
gst_element_class_add_pad_template (gstelement_class, subsrctempl);
gst_element_class_add_pad_template (gstelement_class, subpicsrctempl);
- gst_element_class_add_pad_template (gstelement_class,
- gst_static_pad_template_get (&sink_templ));
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_templ);
gst_element_class_set_static_metadata (gstelement_class, "Avi demuxer",
"Codec/Demuxer",
gst_adapter_clear (avi->adapter);
gst_segment_init (&avi->segment, GST_FORMAT_TIME);
+ avi->segment_seqnum = 0;
}
}
#endif
-static guint
+static gint
gst_avi_demux_index_entry_offset_search (GstAviIndexEntry * entry,
guint64 * offset)
{
gst_segment_copy_into (&segment, &avi->segment);
GST_DEBUG_OBJECT (avi, "Pushing newseg %" GST_SEGMENT_FORMAT, &segment);
+ avi->segment_seqnum = gst_event_get_seqnum (event);
segment_event = gst_event_new_segment (&segment);
gst_event_set_seqnum (segment_event, gst_event_get_seqnum (event));
gst_avi_demux_push_event (avi, segment_event);
}
gst_event_unref (event);
break;
- case GST_EVENT_QOS:
- case GST_EVENT_NAVIGATION:
- res = FALSE;
- gst_event_unref (event);
- break;
default:
res = gst_pad_event_default (pad, parent, event);
break;
* Create and push a flushing seek event upstream
*/
static gboolean
-perform_seek_to_offset (GstAviDemux * demux, guint64 offset)
+perform_seek_to_offset (GstAviDemux * demux, guint64 offset, guint32 seqnum)
{
GstEvent *event;
gboolean res = 0;
gst_event_new_seek (1.0, GST_FORMAT_BYTES,
GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE, GST_SEEK_TYPE_SET, offset,
GST_SEEK_TYPE_NONE, -1);
-
+ gst_event_set_seqnum (event, seqnum);
res = gst_pad_push_event (demux->sinkpad, event);
if (res)
}
/* seek to next index */
- return perform_seek_to_offset (avi, avi->odml_subidxs[avi->odml_subidx]);
+ return perform_seek_to_offset (avi, avi->odml_subidxs[avi->odml_subidx],
+ avi->segment_seqnum);
}
/*
s = gst_caps_get_structure (caps, 0);
if (gst_structure_has_name (s, "video/x-raw")) {
stream->is_raw = TRUE;
+ stream->alignment = 32;
if (!gst_structure_has_field (s, "pixel-aspect-ratio"))
gst_structure_set (s, "pixel-aspect-ratio", GST_TYPE_FRACTION,
1, 1, NULL);
GstEvent *event;
gchar *stream_id;
GstMapInfo map;
+ gboolean sparse = FALSE;
element = GST_ELEMENT_CAST (avi);
g_free (stream->name);
gst_buffer_map (sub, &map, GST_MAP_READ);
- stream->name = g_strndup ((gchar *) map.data, map.size);
- gst_buffer_unmap (sub, &map);
- gst_buffer_unref (sub);
- sub = NULL;
if (avi->globaltags == NULL)
avi->globaltags = gst_tag_list_new_empty ();
- gst_tag_list_add (avi->globaltags, GST_TAG_MERGE_REPLACE,
- GST_TAG_TITLE, stream->name, NULL);
- GST_DEBUG_OBJECT (avi, "stream name: %s", stream->name);
+ parse_tag_value (avi, avi->globaltags, GST_TAG_TITLE,
+ map.data, map.size);
+
+ if (gst_tag_list_get_string (avi->globaltags, GST_TAG_TITLE,
+ &stream->name))
+ GST_DEBUG_OBJECT (avi, "stream name: %s", stream->name);
+
+ gst_buffer_unmap (sub, &map);
+ gst_buffer_unref (sub);
+ sub = NULL;
break;
case GST_RIFF_IDIT:
gst_avi_demux_parse_idit (avi, sub);
stream->strf.vids, stream->extradata, stream->initdata, &codec_name);
/* DXSB is XSUB, and it is placed inside a vids */
- if (!caps || fourcc != GST_MAKE_FOURCC ('D', 'X', 'S', 'B')) {
+ if (!caps || (fourcc != GST_MAKE_FOURCC ('D', 'X', 'S', 'B') &&
+ fourcc != GST_MAKE_FOURCC ('D', 'X', 'S', 'A'))) {
padname = g_strdup_printf ("video_%u", avi->num_v_streams);
templ = gst_element_class_get_pad_template (klass, "video_%u");
if (!caps) {
if (n && d)
gst_caps_set_simple (caps, "pixel-aspect-ratio", GST_TYPE_FRACTION,
n, d, NULL);
- /* very local, not needed elsewhere */
- g_free (vprp);
- vprp = NULL;
}
caps = gst_avi_demux_check_caps (avi, stream, caps);
tag_name = GST_TAG_VIDEO_CODEC;
templ = gst_element_class_get_pad_template (klass, "subpicture_%u");
tag_name = NULL;
avi->num_sp_streams++;
+ sparse = TRUE;
}
break;
}
caps = gst_caps_new_empty_simple ("application/x-subtitle-avi");
tag_name = NULL;
avi->num_t_streams++;
+ sparse = TRUE;
break;
}
default:
event = gst_event_new_stream_start (stream_id);
if (avi->have_group_id)
gst_event_set_group_id (event, avi->group_id);
+ if (sparse)
+ gst_event_set_stream_flags (event, GST_STREAM_FLAG_SPARSE);
gst_pad_push_event (pad, event);
g_free (stream_id);
gst_tag_list_add (stream->taglist, GST_TAG_MERGE_APPEND, tag_name,
codec_name, NULL);
- g_free (codec_name);
}
+ g_free (vprp);
+ g_free (codec_name);
gst_buffer_unref (buf);
return TRUE;
* @avi: Avi object
* @stream: the stream
* @time: a time position
+ * @next: whether to look for entry before or after @time
*
- * Finds the index entry which time is less or equal than the requested time.
+ * Finds the index entry which time is less/more or equal than the requested time.
* Try to avoid binary search when we can convert the time to an index
* position directly (for example for video frames with a fixed duration).
*
*/
static guint
gst_avi_demux_index_for_time (GstAviDemux * avi,
- GstAviStream * stream, guint64 time)
+ GstAviStream * stream, guint64 time, gboolean next)
{
guint index = -1;
guint64 total;
total = avi_stream_convert_time_to_frames_unchecked (stream, time);
} else {
index = avi_stream_convert_time_to_frames_unchecked (stream, time);
+ /* this entry typically undershoots the target time,
+ * so check a bit more if next needed */
+ if (next) {
+ GstClockTime itime =
+ avi_stream_convert_frames_to_time_unchecked (stream, index);
+ if (itime < time && index + 1 < stream->idx_n)
+ index++;
+ }
}
} else if (stream->strh->type == GST_RIFF_FCC_auds) {
/* constant rate stream */
entry = gst_util_array_binary_search (stream->index,
stream->idx_n, sizeof (GstAviIndexEntry),
(GCompareDataFunc) gst_avi_demux_index_entry_search,
- GST_SEARCH_MODE_BEFORE, &total, NULL);
+ next ? GST_SEARCH_MODE_AFTER : GST_SEARCH_MODE_BEFORE, &total, NULL);
if (entry == NULL) {
GST_LOG_OBJECT (avi, "not found, assume index 0");
(8 + GST_ROUND_UP_2 (size)));
avi->idx1_offset = offset + 8 + GST_ROUND_UP_2 (size);
/* issue seek to allow chain function to handle it and return! */
- perform_seek_to_offset (avi, avi->idx1_offset);
+ perform_seek_to_offset (avi, avi->idx1_offset, avi->segment_seqnum);
return;
}
if (avi->globaltags) {
gst_tag_list_insert (avi->globaltags, tags,
GST_TAG_MERGE_REPLACE);
+ gst_tag_list_unref (tags);
} else {
avi->globaltags = tags;
}
if (avi->seg_event)
gst_event_unref (avi->seg_event);
avi->seg_event = gst_event_new_segment (&avi->segment);
+ if (avi->segment_seqnum)
+ gst_event_set_seqnum (avi->seg_event, avi->segment_seqnum);
gst_avi_demux_check_seekability (avi);
if (g_ascii_isdigit (ptr[0])) {
gst_avi_demux_parse_idit_nums_only (avi, safedata);
g_free (safedata);
+ gst_buffer_unmap (buf, &map);
return;
} else if (g_ascii_isalpha (ptr[0])) {
gst_avi_demux_parse_idit_text (avi, safedata);
g_free (safedata);
+ gst_buffer_unmap (buf, &map);
return;
}
avi->globaltags = gst_tag_list_new_empty ();
gst_tag_list_add (avi->globaltags, GST_TAG_MERGE_APPEND,
- GST_TAG_DEVICE_MANUFACTURER, "FUJIFILM",
- GST_TAG_DEVICE_MODEL, ptr, NULL);
+ GST_TAG_DEVICE_MANUFACTURER, "FUJIFILM", NULL);
+ parse_tag_value (avi, avi->globaltags, GST_TAG_DEVICE_MODEL, ptr,
+ sub_size);
while (ptr[sub_size] == '\0' && sub_size < left)
sub_size++;
if (avi->globaltags) {
gst_tag_list_insert (avi->globaltags, tags,
GST_TAG_MERGE_REPLACE);
+ gst_tag_list_unref (tags);
} else {
avi->globaltags = tags;
}
if (avi->globaltags) {
gst_tag_list_insert (avi->globaltags, tags,
GST_TAG_MERGE_REPLACE);
+ gst_tag_list_unref (tags);
} else {
avi->globaltags = tags;
}
if (avi->globaltags) {
gst_tag_list_insert (avi->globaltags, tags,
GST_TAG_MERGE_REPLACE);
+ gst_tag_list_unref (tags);
} else {
avi->globaltags = tags;
}
if (avi->globaltags) {
gst_tag_list_insert (avi->globaltags, tags,
GST_TAG_MERGE_REPLACE);
+ gst_tag_list_unref (tags);
} else {
avi->globaltags = tags;
}
gst_avi_demux_expose_streams (avi, FALSE);
/* do initial seek to the default segment values */
- gst_avi_demux_do_seek (avi, &avi->segment);
+ gst_avi_demux_do_seek (avi, &avi->segment, 0);
/* create initial NEWSEGMENT event */
if (avi->seg_event)
gst_event_unref (avi->seg_event);
avi->seg_event = gst_event_new_segment (&avi->segment);
+ if (avi->segment_seqnum)
+ gst_event_set_seqnum (avi->seg_event, avi->segment_seqnum);
stamp = gst_util_get_timestamp () - stamp;
GST_DEBUG_OBJECT (avi, "pulling header took %" GST_TIME_FORMAT,
}
pull_range_failed:
{
+ if (res == GST_FLOW_FLUSHING)
+ return res;
GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
("pull_range flow reading header: %s", gst_flow_get_name (res)));
- return GST_FLOW_ERROR;
+ return res;
}
}
* Do the actual seeking.
*/
static gboolean
-gst_avi_demux_do_seek (GstAviDemux * avi, GstSegment * segment)
+gst_avi_demux_do_seek (GstAviDemux * avi, GstSegment * segment,
+ GstSeekFlags flags)
{
GstClockTime seek_time;
gboolean keyframe, before, after;
guint i, index;
GstAviStream *stream;
+ gboolean next;
seek_time = segment->position;
- keyframe = ! !(segment->flags & GST_SEEK_FLAG_KEY_UNIT);
- before = ! !(segment->flags & GST_SEEK_FLAG_SNAP_BEFORE);
- after = ! !(segment->flags & GST_SEEK_FLAG_SNAP_AFTER);
+ keyframe = ! !(flags & GST_SEEK_FLAG_KEY_UNIT);
+ before = ! !(flags & GST_SEEK_FLAG_SNAP_BEFORE);
+ after = ! !(flags & GST_SEEK_FLAG_SNAP_AFTER);
GST_DEBUG_OBJECT (avi, "seek to: %" GST_TIME_FORMAT
" keyframe seeking:%d, %s", GST_TIME_ARGS (seek_time), keyframe,
* which is mostly correct... */
stream = &avi->stream[avi->main_stream];
+ next = after && !before;
+ if (segment->rate < 0)
+ next = !next;
+
/* get the entry index for the requested position */
- index = gst_avi_demux_index_for_time (avi, stream, seek_time);
+ index = gst_avi_demux_index_for_time (avi, stream, seek_time, next);
GST_DEBUG_OBJECT (avi, "Got entry %u", index);
if (index == -1)
return FALSE;
/* check if we are already on a keyframe */
if (!ENTRY_IS_KEYFRAME (&stream->index[index])) {
- gboolean next;
-
- next = after && !before;
- if (segment->rate < 0)
- next = !next;
-
if (next) {
GST_DEBUG_OBJECT (avi, "not keyframe, searching forward");
/* now go to the next keyframe, this is where we should start
seek_time = stream->current_timestamp;
GST_DEBUG_OBJECT (avi, "keyframe adjusted to %" GST_TIME_FORMAT,
GST_TIME_ARGS (seek_time));
+ /* the seek time is always the position ... */
+ segment->position = seek_time;
+ /* ... and start and stream time when going forwards,
+ * otherwise only stop time */
+ if (segment->rate > 0.0)
+ segment->start = segment->time = seek_time;
+ else
+ segment->stop = seek_time;
}
- /* the seek time is also the position and stream time when going
- * forwards */
- segment->position = seek_time;
- if (segment->rate > 0.0)
- segment->time = seek_time;
-
/* now set DISCONT and align the other streams */
for (i = 0; i < avi->num_streams; i++) {
GstAviStream *ostream;
continue;
/* get the entry index for the requested position */
- index = gst_avi_demux_index_for_time (avi, ostream, seek_time);
+ index = gst_avi_demux_index_for_time (avi, ostream, seek_time, FALSE);
if (index == -1)
continue;
}
/* do the seek, seeksegment.position contains the new position, this
* actually never fails. */
- gst_avi_demux_do_seek (avi, &seeksegment);
+ gst_avi_demux_do_seek (avi, &seeksegment, flags);
if (flush) {
GstEvent *fevent = gst_event_new_flush_stop (TRUE);
avi->seg_event = gst_event_new_segment (&avi->segment);
if (seqnum)
gst_event_set_seqnum (avi->seg_event, seqnum);
+ avi->segment_seqnum = seqnum;
if (!avi->streaming) {
gst_pad_start_task (avi->sinkpad, (GstTaskFunction) gst_avi_demux_loop,
GstSeekFlags flags;
GstSeekType cur_type = GST_SEEK_TYPE_NONE, stop_type;
gint64 cur, stop;
- gboolean keyframe, before, after;
+ gboolean keyframe, before, after, next;
GstAviStream *stream;
guint index;
guint n, str_num;
str_num = avi->main_stream;
stream = &avi->stream[str_num];
+ next = after && !before;
+ if (seeksegment.rate < 0)
+ next = !next;
+
/* get the entry index for the requested position */
- index = gst_avi_demux_index_for_time (avi, stream, cur);
+ index = gst_avi_demux_index_for_time (avi, stream, cur, next);
GST_DEBUG_OBJECT (avi, "str %u: Found entry %u for %" GST_TIME_FORMAT,
str_num, index, GST_TIME_ARGS (cur));
if (index == -1)
/* check if we are already on a keyframe */
if (!ENTRY_IS_KEYFRAME (&stream->index[index])) {
- gboolean next;
-
- next = after && !before;
- if (seeksegment.rate < 0)
- next = !next;
-
if (next) {
GST_DEBUG_OBJECT (avi, "Entry is not a keyframe - searching forward");
/* now go to the next keyframe, this is where we should start
continue;
/* get the entry index for the requested position */
- idx = gst_avi_demux_index_for_time (avi, str, cur);
+ idx = gst_avi_demux_index_for_time (avi, str, cur, FALSE);
GST_DEBUG_OBJECT (avi, "str %u: Found entry %u for %" GST_TIME_FORMAT, n,
idx, GST_TIME_ARGS (cur));
if (idx == -1)
/* check if we are already on a keyframe */
if (!ENTRY_IS_KEYFRAME (&str->index[idx])) {
- if (after && !before) {
+ if (next) {
GST_DEBUG_OBJECT (avi, "Entry is not a keyframe - searching forward");
/* now go to the next keyframe, this is where we should start
* decoding from. */
GST_DEBUG_OBJECT (avi, "seeking to chunk at offset %" G_GUINT64_FORMAT,
min_offset);
- if (!perform_seek_to_offset (avi, min_offset)) {
+ if (!perform_seek_to_offset (avi, min_offset, gst_event_get_seqnum (event))) {
GST_DEBUG_OBJECT (avi, "seek event failed!");
return FALSE;
}
GST_INFO_OBJECT (avi,
"Seeking to legacy index/first subindex at %" G_GUINT64_FORMAT,
offset);
- return perform_seek_to_offset (avi, offset);
+ return perform_seek_to_offset (avi, offset, gst_event_get_seqnum (event));
}
/* FIXME: we have to always return true so that we don't block the seek
return stream_num;
}
+static GstBuffer *
+gst_avi_demux_align_buffer (GstAviDemux * demux,
+ GstBuffer * buffer, gsize alignment)
+{
+ GstMapInfo map;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ if (map.size < sizeof (guintptr)) {
+ gst_buffer_unmap (buffer, &map);
+ return buffer;
+ }
+
+ if (((guintptr) map.data) & (alignment - 1)) {
+ GstBuffer *new_buffer;
+ GstAllocationParams params = { 0, alignment - 1, 0, 0, };
+
+ new_buffer = gst_buffer_new_allocate (NULL,
+ gst_buffer_get_size (buffer), ¶ms);
+
+ /* Copy data "by hand", so ensure alignment is kept: */
+ gst_buffer_fill (new_buffer, 0, map.data, map.size);
+
+ gst_buffer_copy_into (new_buffer, buffer, GST_BUFFER_COPY_METADATA, 0, -1);
+ GST_DEBUG_OBJECT (demux,
+ "We want output aligned on %" G_GSIZE_FORMAT ", reallocated",
+ alignment);
+
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+
+ return new_buffer;
+ }
+
+ gst_buffer_unmap (buffer, &map);
+ return buffer;
+}
+
static GstFlowReturn
gst_avi_demux_loop_data (GstAviDemux * avi)
{
&& (timestamp > avi->segment.stop)) {
goto eos_stop;
}
+ } else {
+ if (keyframe && GST_CLOCK_TIME_IS_VALID (avi->segment.start)
+ && (timestamp < avi->segment.start))
+ goto eos_stop;
}
GST_LOG ("reading buffer (size=%" G_GUINT64_FORMAT "), stream %d, pos %"
gst_buffer_get_size (buf), GST_TIME_ARGS (timestamp),
GST_TIME_ARGS (duration), out_offset, out_offset_end);
+ if (stream->alignment > 1)
+ buf = gst_avi_demux_align_buffer (avi, buf, stream->alignment);
ret = gst_pad_push (stream->pad, buf);
/* mark as processed, we increment the frame and byte counters then
} else {
GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
}
+
+ if (stream->alignment > 1)
+ buf = gst_avi_demux_align_buffer (avi, buf, stream->alignment);
res = gst_pad_push (stream->pad, buf);
buf = NULL;
avi->segment.position = avi->segment.start;
if (avi->segment.flags & GST_SEEK_FLAG_SEGMENT) {
gint64 stop;
+ GstEvent *event;
+ GstMessage *msg;
if ((stop = avi->segment.stop) == -1)
stop = avi->segment.duration;
GST_INFO_OBJECT (avi, "sending segment_done");
- gst_element_post_message
- (GST_ELEMENT_CAST (avi),
+ msg =
gst_message_new_segment_done (GST_OBJECT_CAST (avi),
- GST_FORMAT_TIME, stop));
- gst_avi_demux_push_event (avi,
- gst_event_new_segment_done (GST_FORMAT_TIME, stop));
+ GST_FORMAT_TIME, stop);
+ if (avi->segment_seqnum)
+ gst_message_set_seqnum (msg, avi->segment_seqnum);
+ gst_element_post_message (GST_ELEMENT_CAST (avi), msg);
+
+ event = gst_event_new_segment_done (GST_FORMAT_TIME, stop);
+ if (avi->segment_seqnum)
+ gst_event_set_seqnum (event, avi->segment_seqnum);
+ gst_avi_demux_push_event (avi, event);
} else {
push_eos = TRUE;
}
/* for fatal errors we post an error message, wrong-state is
* not fatal because it happens due to flushes and only means
* that we should stop now. */
- GST_ELEMENT_ERROR (avi, STREAM, FAILED,
- (_("Internal data stream error.")),
- ("streaming stopped, reason %s", gst_flow_get_name (res)));
+ GST_ELEMENT_FLOW_ERROR (avi, res);
push_eos = TRUE;
}
if (push_eos) {
+ GstEvent *event;
+
GST_INFO_OBJECT (avi, "sending eos");
- if (!gst_avi_demux_push_event (avi, gst_event_new_eos ()) &&
- (res == GST_FLOW_EOS)) {
+ event = gst_event_new_eos ();
+ if (avi->segment_seqnum)
+ gst_event_set_seqnum (event, avi->segment_seqnum);
+ if (!gst_avi_demux_push_event (avi, event) && (res == GST_FLOW_EOS)) {
GST_ELEMENT_ERROR (avi, STREAM, DEMUX,
(NULL), ("got eos but no streams (yet)"));
}
GST_OBJECT_UNLOCK (avi);
/* calculate and perform seek */
- if (!avi_demux_handle_seek_push (avi, avi->sinkpad, event))
+ if (!avi_demux_handle_seek_push (avi, avi->sinkpad, event)) {
+ gst_event_unref (event);
goto seek_failed;
+ }
gst_event_unref (event);
avi->state = GST_AVI_DEMUX_MOVI;