2005-01-08 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
+ * configure.ac:
+ * ext/ogg/gstoggdemux.c: (gst_ogg_pad_new):
+ * ext/ogg/gstogmparse.c: (gst_ogm_text_parse_get_type),
+ (gst_ogm_text_parse_base_init), (gst_ogm_text_parse_init),
+ (gst_ogm_parse_get_sink_querytypes), (gst_ogm_parse_sink_convert),
+ (gst_ogm_parse_sink_query), (gst_ogm_parse_chain),
+ (gst_ogm_parse_plugin_init):
+ * ext/pango/gsttextoverlay.c: (gst_textoverlay_linkedpads),
+ (gst_textoverlay_link), (gst_textoverlay_getcaps),
+ (gst_textoverlay_event), (gst_textoverlay_video_chain),
+ (gst_textoverlay_loop), (gst_textoverlay_init), (plugin_init):
+ * ext/pango/gsttextoverlay.h:
+ * gst/matroska/matroska-demux.c: (gst_matroska_demux_add_stream),
+ (gst_matroska_demux_handle_seek_event),
+ (gst_matroska_demux_sync_streams),
+ (gst_matroska_demux_parse_blockgroup),
+ (gst_matroska_demux_subtitle_caps),
+ (gst_matroska_demux_plugin_init):
+ * gst/matroska/matroska-ids.h:
+ * gst/playback/gstdecodebin.c: (close_pad_link):
+ * gst/playback/gstplaybasebin.c: (gst_play_base_bin_init),
+ (gen_preroll_element), (remove_groups), (add_stream),
+ (new_decoded_pad), (setup_subtitles), (gen_source_element),
+ (setup_source):
+ * gst/playback/gstplaybasebin.h:
+ * gst/playback/gstplaybin.c: (gen_text_element), (setup_sinks):
+ * gst/subparse/Makefile.am:
+ * gst/subparse/gstsubparse.c: (gst_subparse_get_type),
+ (gst_subparse_base_init), (gst_subparse_class_init),
+ (gst_subparse_init), (gst_subparse_formats),
+ (gst_subparse_eventmask), (gst_subparse_event),
+ (gst_subparse_handle_event), (convert_encoding), (get_next_line),
+ (parse_mdvdsub), (parse_mdvdsub_init), (parse_subrip),
+ (parse_subrip_deinit), (parse_subrip_init), (parse_mpsub),
+ (parse_mpsub_deinit), (parse_mpsub_init),
+ (gst_subparse_buffer_format_autodetect),
+ (gst_subparse_format_autodetect), (gst_subparse_loop),
+ (gst_subparse_change_state), (gst_subparse_type_find),
+ (plugin_init):
+ * gst/subparse/gstsubparse.h:
+ * gst/typefind/gsttypefindfunctions.c: (ogmtext_type_find),
+ (plugin_init):
+ Add subtitle support, .sub parser (supports SRT and MPsub),
+ OGM text support, Matroska UTF-8 text support, deadlock fixes
+ all over the place, subtitle awareness in decodebin/playbin
+ and some fixes to textoverlay to handle subtitles in a stream
+ correctly. Fixes #100931.
+
+2005-01-08 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
+
* ext/vorbis/vorbisdec.c: (vorbis_dec_src_query):
Check for pad availability before doing a query on it.
spectrum \
speed \
stereo \
+ subparse \
switch \
synaesthesia \
tags \
gst/spectrum/Makefile
gst/speed/Makefile
gst/stereo/Makefile
+gst/subparse/Makefile
gst/switch/Makefile
gst/synaesthesia/Makefile
gst/tags/Makefile
GST_OGG_SET_STATE (ogg, GST_OGG_STATE_SEEK);
FOR_PAD_IN_CURRENT_CHAIN (ogg, pad,
- pad->flags |= GST_OGG_PAD_NEEDS_DISCONT;
- );
+ pad->flags |= GST_OGG_PAD_NEEDS_DISCONT;);
if (GST_EVENT_SEEK_FLAGS (event) & GST_SEEK_FLAG_FLUSH) {
FOR_PAD_IN_CURRENT_CHAIN (ogg, pad,
- pad->flags |= GST_OGG_PAD_NEEDS_FLUSH;
- );
+ pad->flags |= GST_OGG_PAD_NEEDS_FLUSH;);
}
GST_DEBUG_OBJECT (ogg,
"initiating seeking to format %d, offset %" G_GUINT64_FORMAT, format,
gst_event_unref (event);
GST_FLAG_UNSET (ogg, GST_OGG_FLAG_WAIT_FOR_DISCONT);
FOR_PAD_IN_CURRENT_CHAIN (ogg, pad,
- pad->flags |= GST_OGG_PAD_NEEDS_DISCONT;
- );
+ pad->flags |= GST_OGG_PAD_NEEDS_DISCONT;);
break;
default:
gst_pad_event_default (pad, event);
*end = G_MAXINT64;
g_assert (ogg->current_chain >= 0);
- FOR_PAD_IN_CURRENT_CHAIN (ogg, pad, *start = MAX (*start, pad->end_offset);
- );
+ FOR_PAD_IN_CURRENT_CHAIN (ogg, pad, *start = MAX (*start, pad->end_offset););
if (ogg->setup_state == SETUP_FIND_LAST_CHAIN) {
*end = gst_file_pad_get_length (ogg->sinkpad);
} else {
endpos = G_MAXINT64;
FOR_PAD_IN_CHAIN (ogg, pad, ogg->chains->len - 1,
- endpos = MIN (endpos, pad->start_offset);
- );
+ endpos = MIN (endpos, pad->start_offset););
}
if (!ogg->seek_skipped || gst_ogg_demux_position (ogg) >= endpos) {
/* have we found the endposition for all streams yet? */
ret->start_offset = ret->end_offset = -1;
ret->start = -1;
ret->start_found = ret->end_found = FALSE;
+ ret->offset = GST_BUFFER_OFFSET_NONE;
return ret;
}
cur->pad = NULL;
}
}
+
+static void
+gst_ogg_sync (GstOggDemux * ogg, GstOggPad * cur)
+{
+ gint64 bias, time;
+ GstFormat fmt = GST_FORMAT_TIME;
+
+ time = get_relative (ogg, cur, cur->offset, GST_FORMAT_TIME);
+ FOR_PAD_IN_CURRENT_CHAIN (ogg, pad,
+ if (pad->pad && GST_PAD_PEER (pad->pad) &&
+ pad->offset != GST_BUFFER_OFFSET_NONE) {
+ if (gst_pad_query (GST_PAD_PEER (pad->pad),
+ GST_QUERY_POSITION, &fmt, &bias) && bias + GST_SECOND < time) {
+ GstEvent * event;
+ gint64 val = 0;
+ GstFormat fmt = GST_FORMAT_DEFAULT;
+ event = gst_event_new_filler_stamped (bias,
+ time - bias - GST_SECOND / 2);
+ GST_DEBUG ("Syncing stream %d at time %" GST_TIME_FORMAT
+ " and duration %" GST_TIME_FORMAT,
+ pad->serial, GST_TIME_ARGS (bias),
+ GST_TIME_ARGS (time - bias - GST_SECOND / 2));
+ gst_pad_push (pad->pad, GST_DATA (event));
+ gst_pad_convert (GST_PAD_PEER (pad->pad),
+ GST_FORMAT_TIME, bias, &fmt, &val);
+ /* hmm... */
+ pad->offset = pad->start + val;}
+ }
+ );
+}
+
static void
gst_ogg_pad_push (GstOggDemux * ogg, GstOggPad * pad)
{
} else {
event = gst_event_new_discontinuous (FALSE,
GST_FORMAT_DEFAULT, discont, GST_FORMAT_UNDEFINED);
+ pad->offset = discont;
}
} else {
event = gst_event_new_discontinuous (FALSE,
if (packet.granulepos != -1 && pos != -1)
GST_BUFFER_OFFSET_END (buf) = pos;
pad->offset = packet.granulepos;
+ gst_ogg_sync (ogg, pad);
if (GST_PAD_IS_USABLE (pad->pad))
gst_pad_push (pad->pad, GST_DATA (buf));
break;
#define GST_IS_OGM_AUDIO_PARSE(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_OGM_AUDIO_PARSE))
+#define GST_TYPE_OGM_TEXT_PARSE (gst_ogm_text_parse_get_type())
+#define GST_IS_OGM_TEXT_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_OGM_TEXT_PARSE))
+
#define GST_TYPE_OGM_PARSE (gst_ogm_parse_get_type())
#define GST_OGM_PARSE(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_OGM_PARSE, GstOgmParse))
{
stream_header_video video;
stream_header_audio audio;
+ /* text has no additional data */
} s;
} stream_header;
static GstStaticPadTemplate ogm_audio_parse_sink_template_factory =
GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
GST_STATIC_CAPS ("application/x-ogm-audio"));
-static GstPadTemplate *video_src_templ, *audio_src_templ;
+static GstStaticPadTemplate ogm_text_parse_sink_template_factory =
+GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-ogm-text"));
+static GstPadTemplate *video_src_templ, *audio_src_templ, *text_src_templ;
static GType gst_ogm_audio_parse_get_type (void);
static GType gst_ogm_video_parse_get_type (void);
+static GType gst_ogm_text_parse_get_type (void);
static GType gst_ogm_parse_get_type (void);
static void gst_ogm_audio_parse_base_init (GstOgmParseClass * klass);
static void gst_ogm_video_parse_base_init (GstOgmParseClass * klass);
+static void gst_ogm_text_parse_base_init (GstOgmParseClass * klass);
static void gst_ogm_parse_class_init (GstOgmParseClass * klass);
static void gst_ogm_parse_init (GstOgmParse * ogm);
static void gst_ogm_video_parse_init (GstOgmParse * ogm);
static void gst_ogm_audio_parse_init (GstOgmParse * ogm);
+static void gst_ogm_text_parse_init (GstOgmParse * ogm);
static const GstFormat *gst_ogm_parse_get_sink_formats (GstPad * pad);
+static const GstQueryType *gst_ogm_parse_get_sink_querytypes (GstPad * pad);
static gboolean gst_ogm_parse_sink_convert (GstPad * pad, GstFormat src_format,
gint64 src_value, GstFormat * dest_format, gint64 * dest_value);
+static gboolean gst_ogm_parse_sink_query (GstPad * pad, GstQueryType type,
+ GstFormat * fmt, gint64 * val);
static void gst_ogm_parse_chain (GstPad * pad, GstData * data);
return ogm_video_parse_type;
}
+GType
+gst_ogm_text_parse_get_type (void)
+{
+ static GType ogm_text_parse_type = 0;
+
+ if (!ogm_text_parse_type) {
+ static const GTypeInfo ogm_text_parse_info = {
+ sizeof (GstOgmParseClass),
+ (GBaseInitFunc) gst_ogm_text_parse_base_init,
+ NULL,
+ NULL,
+ NULL,
+ NULL,
+ sizeof (GstOgmParse),
+ 0,
+ (GInstanceInitFunc) gst_ogm_text_parse_init,
+ };
+
+ ogm_text_parse_type =
+ g_type_register_static (GST_TYPE_OGM_PARSE,
+ "GstOgmTextParse", &ogm_text_parse_info, 0);
+ }
+
+ return ogm_text_parse_type;
+}
+
static void
gst_ogm_audio_parse_base_init (GstOgmParseClass * klass)
{
}
static void
+gst_ogm_text_parse_base_init (GstOgmParseClass * klass)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ static GstElementDetails gst_ogm_text_parse_details =
+ GST_ELEMENT_DETAILS ("OGM text stream parser",
+ "Codec/Decoder/Subtitle",
+ "parse an OGM text header and stream",
+ "Ronald Bultje <rbultje@ronald.bitfreak.net>");
+ GstCaps *caps = gst_caps_new_simple ("text/plain", NULL);
+
+ gst_element_class_set_details (element_class, &gst_ogm_text_parse_details);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&ogm_text_parse_sink_template_factory));
+ text_src_templ = gst_pad_template_new ("src",
+ GST_PAD_SRC, GST_PAD_SOMETIMES, caps);
+ gst_element_class_add_pad_template (element_class, text_src_templ);
+}
+
+static void
gst_ogm_parse_class_init (GstOgmParseClass * klass)
{
GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
ogm->srcpadtempl = video_src_templ;
}
+static void
+gst_ogm_text_parse_init (GstOgmParse * ogm)
+{
+ GstPadTemplate *templ;
+
+ /* create the pads */
+ templ = gst_static_pad_template_get (&ogm_text_parse_sink_template_factory);
+ ogm->sinkpad = gst_pad_new_from_template (templ, "sink");
+ gst_pad_set_convert_function (ogm->sinkpad, gst_ogm_parse_sink_convert);
+ gst_pad_set_formats_function (ogm->sinkpad, gst_ogm_parse_get_sink_formats);
+ gst_pad_set_query_type_function (ogm->sinkpad,
+ gst_ogm_parse_get_sink_querytypes);
+ gst_pad_set_query_function (ogm->sinkpad, gst_ogm_parse_sink_query);
+ gst_pad_set_chain_function (ogm->sinkpad, gst_ogm_parse_chain);
+ gst_element_add_pad (GST_ELEMENT (ogm), ogm->sinkpad);
+
+#if 0
+ ogm->srcpad = gst_pad_new_from_template (text_src_templ, "src");
+ gst_pad_use_explicit_caps (ogm->srcpad);
+ gst_element_add_pad (GST_ELEMENT (ogm), ogm->srcpad);
+#endif
+ ogm->srcpadtempl = text_src_templ;
+}
+
static const GstFormat *
gst_ogm_parse_get_sink_formats (GstPad * pad)
{
return formats;
}
+static const GstQueryType *
+gst_ogm_parse_get_sink_querytypes (GstPad * pad)
+{
+ static const GstQueryType types[] = {
+ GST_QUERY_POSITION,
+ 0
+ };
+
+ return types;
+}
+
static gboolean
gst_ogm_parse_sink_convert (GstPad * pad,
GstFormat src_format, gint64 src_value,
res = TRUE;
break;
case 'v':
+ case 't':
*dest_value = (GST_SECOND / 10000000) *
ogm->hdr.time_unit * src_value;
res = TRUE;
break;
}
break;
+ case GST_FORMAT_TIME:
+ switch (*dest_format) {
+ case GST_FORMAT_DEFAULT:
+ switch (ogm->hdr.streamtype[0]) {
+ case 'a':
+ *dest_value = ogm->hdr.samples_per_unit * src_value / GST_SECOND;
+ res = TRUE;
+ break;
+ case 'v':
+ case 't':
+ *dest_value = src_value /
+ ((GST_SECOND / 10000000) * ogm->hdr.time_unit);
+ res = TRUE;
+ break;
+ default:
+ break;
+ }
+ break;
+ default:
+ break;
+ }
+ break;
default:
break;
}
return res;
}
+static gboolean
+gst_ogm_parse_sink_query (GstPad * pad,
+ GstQueryType type, GstFormat * fmt, gint64 * val)
+{
+ GstOgmParse *ogm = GST_OGM_PARSE (gst_pad_get_parent (pad));
+
+ if (type != GST_QUERY_POSITION)
+ return FALSE;
+ if (*fmt != GST_FORMAT_DEFAULT && *fmt != GST_FORMAT_TIME)
+ return FALSE;
+
+ return gst_pad_convert (pad,
+ GST_FORMAT_DEFAULT, ogm->next_granulepos, fmt, val);
+}
+
static void
gst_ogm_parse_chain (GstPad * pad, GstData * dat)
{
ogm->hdr.s.audio.channels = GST_READ_UINT32_LE (&data[45]);
ogm->hdr.s.audio.blockalign = GST_READ_UINT32_LE (&data[47]);
ogm->hdr.s.audio.avgbytespersec = GST_READ_UINT32_LE (&data[49]);
+ } else if (!memcmp (&data[1], "text\000\000\000\000", 8)) {
+ /* nothing here */
} else {
GST_ELEMENT_ERROR (ogm, STREAM, WRONG_TYPE,
("Unknown stream type"), (NULL));
"framerate", G_TYPE_DOUBLE, 10000000. / ogm->hdr.time_unit, NULL);
break;
}
+ case 't':
+ GST_LOG_OBJECT (ogm, "Type: %s, s/u: %" G_GINT64_FORMAT
+ ", timeunit=%" G_GINT64_FORMAT,
+ ogm->hdr.streamtype, ogm->hdr.samples_per_unit,
+ ogm->hdr.time_unit);
+ caps = gst_caps_new_simple ("text/plain", NULL);
+ break;
default:
g_assert_not_reached ();
}
ogm->next_granulepos = GST_BUFFER_OFFSET_END (buf);
}
switch (ogm->hdr.streamtype[0]) {
- case 'v':
+ case 't':
+ case 'v':{
+ gint samples = (ogm->hdr.streamtype[0] == 'v') ? 1 : xsize;
+
if (keyframe)
GST_BUFFER_FLAG_SET (sbuf, GST_BUFFER_KEY_UNIT);
GST_BUFFER_TIMESTAMP (sbuf) = (GST_SECOND / 10000000) *
ogm->next_granulepos * ogm->hdr.time_unit;
GST_BUFFER_DURATION (sbuf) = (GST_SECOND / 10000000) *
- ogm->hdr.time_unit;
- ogm->next_granulepos++;
+ ogm->hdr.time_unit * samples;
+ ogm->next_granulepos += samples;
break;
+ }
case 'a':
GST_BUFFER_TIMESTAMP (sbuf) = GST_SECOND *
ogm->next_granulepos / ogm->hdr.samples_per_unit;
ogm->next_granulepos += xsize;
break;
default:
- g_assert_not_reached ();
+ gst_buffer_unref (sbuf);
+ sbuf = NULL;
+ GST_ELEMENT_ERROR (ogm, RESOURCE, SYNC, (NULL), (NULL));
+ break;
}
- gst_pad_push (ogm->srcpad, GST_DATA (sbuf));
+ if (sbuf)
+ gst_pad_push (ogm->srcpad, GST_DATA (sbuf));
} else {
GST_ELEMENT_ERROR (ogm, STREAM, WRONG_TYPE,
("Wrong packet startcode 0x%02x", data[0]), (NULL));
GST_DEBUG_CATEGORY_INIT (gst_ogm_parse_debug, "ogmparse", 0, "ogm parser");
return gst_element_register (plugin, "ogmaudioparse", GST_RANK_PRIMARY,
- GST_TYPE_OGM_AUDIO_PARSE)
- && gst_element_register (plugin, "ogmvideoparse", GST_RANK_PRIMARY,
- GST_TYPE_OGM_VIDEO_PARSE);
+ GST_TYPE_OGM_AUDIO_PARSE) &&
+ gst_element_register (plugin, "ogmvideoparse", GST_RANK_PRIMARY,
+ GST_TYPE_OGM_VIDEO_PARSE) &&
+ gst_element_register (plugin, "ogmtextparse", GST_RANK_PRIMARY,
+ GST_TYPE_OGM_TEXT_PARSE);
}
#include <gst/gst.h>
#include "gsttextoverlay.h"
+GST_DEBUG_CATEGORY_STATIC (pango_debug);
+#define GST_CAT_DEFAULT pango_debug
+
static GstElementDetails textoverlay_details = {
"Text Overlay",
"Filter/Editor/Video",
/* return GST_PAD_LINK_DONE; */
/* } */
+static GList *
+gst_textoverlay_linkedpads (GstPad * pad)
+{
+ GstPad *otherpad;
+ GstTextOverlay *overlay;
+
+ overlay = GST_TEXTOVERLAY (gst_pad_get_parent (pad));
+ if (pad == overlay->text_sinkpad)
+ return NULL;
+ otherpad = (pad == overlay->video_sinkpad) ?
+ overlay->srcpad : overlay->video_sinkpad;
+
+ return g_list_append (NULL, otherpad);
+}
static GstPadLinkReturn
-gst_textoverlay_video_sinkconnect (GstPad * pad, const GstCaps * caps)
+gst_textoverlay_link (GstPad * pad, const GstCaps * caps)
{
+ GstPad *otherpad;
GstTextOverlay *overlay;
GstStructure *structure;
+ GstPadLinkReturn ret;
overlay = GST_TEXTOVERLAY (gst_pad_get_parent (pad));
+ otherpad = (pad == overlay->video_sinkpad) ?
+ overlay->srcpad : overlay->video_sinkpad;
+
+ ret = gst_pad_try_set_caps (otherpad, caps);
+ if (GST_PAD_LINK_FAILED (ret))
+ return ret;
structure = gst_caps_get_structure (caps, 0);
overlay->width = overlay->height = 0;
gst_structure_get_int (structure, "width", &overlay->width);
gst_structure_get_int (structure, "height", &overlay->height);
- return gst_pad_try_set_caps (overlay->srcpad, caps);
+ return ret;
}
+static GstCaps *
+gst_textoverlay_getcaps (GstPad * pad)
+{
+ GstPad *otherpad;
+ GstTextOverlay *overlay;
+ GstCaps *caps, *rcaps;
+ const GstCaps *tcaps;
+
+ overlay = GST_TEXTOVERLAY (gst_pad_get_parent (pad));
+ otherpad = (pad == overlay->video_sinkpad) ?
+ overlay->srcpad : overlay->video_sinkpad;
+
+ caps = gst_pad_get_allowed_caps (otherpad);
+ tcaps = gst_pad_get_pad_template_caps (pad);
+ rcaps = gst_caps_intersect (caps, tcaps);
+ gst_caps_free (caps);
+
+ return rcaps;
+}
+
+static gboolean
+gst_textoverlay_event (GstPad * pad, GstEvent * event)
+{
+ GstTextOverlay *overlay = GST_TEXTOVERLAY (gst_pad_get_parent (pad));
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_SEEK &&
+ GST_PAD_IS_LINKED (overlay->text_sinkpad)) {
+ gst_event_ref (event);
+ gst_pad_send_event (GST_PAD_PEER (overlay->text_sinkpad), event);
+ }
+
+ return gst_pad_send_event (GST_PAD_PEER (overlay->video_sinkpad), event);
+}
static void
gst_text_overlay_blit_yuv420 (GstTextOverlay * overlay, FT_Bitmap * bitmap,
y0 = overlay->y0;
switch (overlay->valign) {
case GST_TEXT_OVERLAY_VALIGN_BOTTOM:
- y0 += overlay->bitmap.rows;
+ y0 = overlay->height - overlay->bitmap.rows - y0;
break;
- case GST_TEXT_OVERLAY_VALIGN_BASELINE:
+ case GST_TEXT_OVERLAY_VALIGN_BASELINE: /* ? */
y0 -= (overlay->bitmap.rows - overlay->baseline_y);
break;
case GST_TEXT_OVERLAY_VALIGN_TOP:
case GST_TEXT_OVERLAY_HALIGN_LEFT:
break;
case GST_TEXT_OVERLAY_HALIGN_RIGHT:
- x0 -= overlay->bitmap.width;
+ x0 = overlay->width - overlay->bitmap.width - x0;
break;
case GST_TEXT_OVERLAY_HALIGN_CENTER:
- x0 -= overlay->bitmap.width / 2;
+ x0 = (overlay->width - overlay->bitmap.width) / 2;
break;
}
gst_pad_push (overlay->srcpad, GST_DATA (buf));
}
-#define PAST_END(buffer, time) \
- (GST_BUFFER_TIMESTAMP (buffer) != GST_CLOCK_TIME_NONE && \
- GST_BUFFER_DURATION (buffer) != GST_CLOCK_TIME_NONE && \
- GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer) \
+#define GST_DATA_TIMESTAMP(data) \
+ (GST_IS_EVENT (data) ? \
+ GST_EVENT_TIMESTAMP (GST_EVENT (data)) : \
+ GST_BUFFER_TIMESTAMP (GST_BUFFER (data)))
+#define GST_DATA_DURATION(data) \
+ (GST_IS_EVENT (data) ? \
+ gst_event_filler_get_duration (GST_EVENT (data)) : \
+ GST_BUFFER_DURATION (GST_BUFFER (data)))
+
+#define PAST_END(data, time) \
+ (GST_DATA_TIMESTAMP (data) != GST_CLOCK_TIME_NONE && \
+ GST_DATA_DURATION (data) != GST_CLOCK_TIME_NONE && \
+ GST_DATA_TIMESTAMP (data) + GST_DATA_DURATION (data) \
< (time))
static void
g_return_if_fail (GST_IS_TEXTOVERLAY (element));
overlay = GST_TEXTOVERLAY (element);
- video_frame = GST_BUFFER (gst_pad_pull (overlay->video_sinkpad));
+ do {
+ GST_DEBUG ("Attempting to pull next video frame");
+ video_frame = GST_BUFFER (gst_pad_pull (overlay->video_sinkpad));
+ if (GST_IS_EVENT (video_frame)) {
+ GstEvent *event = GST_EVENT (video_frame);
+ GstEventType type = GST_EVENT_TYPE (event);
+
+ gst_pad_event_default (overlay->video_sinkpad, event);
+ GST_DEBUG ("Received event of type %d", type);
+ if (type == GST_EVENT_INTERRUPT)
+ return;
+ else if (type == GST_EVENT_EOS) {
+ /* EOS text stream */
+ GstData *data = NULL;
+
+ do {
+ if (data)
+ gst_data_unref (data);
+ data = gst_pad_pull (overlay->text_sinkpad);
+ } while (!GST_IS_EVENT (data) ||
+ GST_EVENT_TYPE (data) == GST_EVENT_EOS);
+ gst_data_unref (data);
+
+ return;
+ }
+ video_frame = NULL;
+ }
+ } while (!video_frame);
now = GST_BUFFER_TIMESTAMP (video_frame);
+ GST_DEBUG ("Got video frame, time=%" GST_TIME_FORMAT, GST_TIME_ARGS (now));
/*
* This state machine has a bug that can't be resolved easily.
* buffer timestamps and durations correctly. (I think)
*/
- while (overlay->next_buffer == NULL) {
- GST_DEBUG ("attempting to pull a buffer");
+ while ((!overlay->current_data ||
+ PAST_END (overlay->current_data, now)) &&
+ overlay->next_data == NULL) {
+ GST_DEBUG ("attempting to pull text data");
/* read all text buffers until we get one "in the future" */
if (!GST_PAD_IS_USABLE (overlay->text_sinkpad)) {
break;
}
- overlay->next_buffer = GST_BUFFER (gst_pad_pull (overlay->text_sinkpad));
- if (!overlay->next_buffer)
- break;
-
- if (PAST_END (overlay->next_buffer, now)) {
- gst_buffer_unref (overlay->next_buffer);
- overlay->next_buffer = NULL;
+ do {
+ overlay->next_data = gst_pad_pull (overlay->text_sinkpad);
+ if (GST_IS_EVENT (overlay->next_data) &&
+ GST_EVENT_TYPE (overlay->next_data) != GST_EVENT_FILLER) {
+ GstEvent *event = GST_EVENT (overlay->next_data);
+ GstEventType type = GST_EVENT_TYPE (event);
+
+ gst_event_unref (event);
+ if (type == GST_EVENT_EOS)
+ break;
+ else if (type == GST_EVENT_INTERRUPT)
+ return;
+ overlay->next_data = NULL;
+ }
+ } while (!overlay->next_data);
+
+ if (PAST_END (overlay->next_data, now)) {
+ GST_DEBUG ("Received %s is past end (%" GST_TIME_FORMAT " + %"
+ GST_TIME_FORMAT " < %" GST_TIME_FORMAT ")",
+ GST_IS_EVENT (overlay->next_data) ? "event" : "buffer",
+ GST_TIME_ARGS (GST_DATA_TIMESTAMP (overlay->next_data)),
+ GST_TIME_ARGS (GST_DATA_DURATION (overlay->next_data)),
+ GST_TIME_ARGS (now));
+ gst_data_unref (overlay->next_data);
+ overlay->next_data = NULL;
+ } else {
+ GST_DEBUG ("Received new text %s of time %" GST_TIME_FORMAT
+ "and duration %" GST_TIME_FORMAT,
+ GST_IS_EVENT (overlay->next_data) ? "event" : "buffer",
+ GST_TIME_ARGS (GST_DATA_TIMESTAMP (overlay->next_data)),
+ GST_TIME_ARGS (GST_DATA_DURATION (overlay->next_data)));
}
}
- if (overlay->next_buffer &&
- (GST_BUFFER_TIMESTAMP (overlay->next_buffer) <= now ||
- GST_BUFFER_TIMESTAMP (overlay->next_buffer) == GST_CLOCK_TIME_NONE)) {
- GST_DEBUG ("using new buffer");
+ if (overlay->next_data &&
+ (GST_DATA_TIMESTAMP (overlay->next_data) <= now ||
+ GST_DATA_TIMESTAMP (overlay->next_data) == GST_CLOCK_TIME_NONE)) {
+ GST_DEBUG ("using new %s",
+ GST_IS_EVENT (overlay->next_data) ? "event" : "buffer");
- if (overlay->current_buffer) {
- gst_buffer_unref (overlay->current_buffer);
+ if (overlay->current_data) {
+ gst_data_unref (overlay->current_data);
+ }
+ overlay->current_data = overlay->next_data;
+ overlay->next_data = NULL;
+
+ if (GST_IS_BUFFER (overlay->current_data)) {
+ guint size = GST_BUFFER_SIZE (overlay->current_data);
+ guint8 *data = GST_BUFFER_DATA (overlay->current_data);
+
+ while (size > 0 &&
+ (data[size - 1] == '\r' ||
+ data[size - 1] == '\n' || data[size - 1] == '\0'))
+ size--;
+
+ GST_DEBUG ("rendering '%*s'", size,
+ GST_BUFFER_DATA (overlay->current_data));
+ /* somehow pango barfs over "\0" buffers... */
+ pango_layout_set_markup (overlay->layout,
+ GST_BUFFER_DATA (overlay->current_data), size);
+ } else {
+ GST_DEBUG ("Filler - no data");
+ pango_layout_set_markup (overlay->layout, "", 0);
}
- overlay->current_buffer = overlay->next_buffer;
- overlay->next_buffer = NULL;
-
- GST_DEBUG ("rendering '%*s'",
- GST_BUFFER_SIZE (overlay->current_buffer),
- GST_BUFFER_DATA (overlay->current_buffer));
- pango_layout_set_markup (overlay->layout,
- GST_BUFFER_DATA (overlay->current_buffer),
- GST_BUFFER_SIZE (overlay->current_buffer));
render_text (overlay);
overlay->need_render = FALSE;
}
- if (overlay->current_buffer && PAST_END (overlay->current_buffer, now)) {
- GST_DEBUG ("dropping old buffer");
+ if (overlay->current_data && PAST_END (overlay->current_data, now)) {
+ GST_DEBUG ("dropping old %s",
+ GST_IS_EVENT (overlay->current_data) ? "event" : "buffer");
- gst_buffer_unref (overlay->current_buffer);
- overlay->current_buffer = NULL;
+ gst_buffer_unref (overlay->current_data);
+ overlay->current_data = NULL;
overlay->need_render = TRUE;
}
overlay->video_sinkpad =
gst_pad_new_from_template (gst_static_pad_template_get
(&video_sink_template_factory), "video_sink");
-/* gst_pad_set_chain_function(overlay->video_sinkpad, gst_textoverlay_video_chain); */
- gst_pad_set_link_function (overlay->video_sinkpad,
- gst_textoverlay_video_sinkconnect);
+ gst_pad_set_link_function (overlay->video_sinkpad, gst_textoverlay_link);
+ gst_pad_set_getcaps_function (overlay->video_sinkpad,
+ gst_textoverlay_getcaps);
+ gst_pad_set_internal_link_function (overlay->video_sinkpad,
+ gst_textoverlay_linkedpads);
gst_element_add_pad (GST_ELEMENT (overlay), overlay->video_sinkpad);
/* text sink */
overlay->text_sinkpad =
gst_pad_new_from_template (gst_static_pad_template_get
(&text_sink_template_factory), "text_sink");
-/* gst_pad_set_link_function(overlay->text_sinkpad, gst_textoverlay_text_sinkconnect); */
+ gst_pad_set_internal_link_function (overlay->text_sinkpad,
+ gst_textoverlay_linkedpads);
gst_element_add_pad (GST_ELEMENT (overlay), overlay->text_sinkpad);
/* (video) source */
overlay->srcpad =
gst_pad_new_from_template (gst_static_pad_template_get
(&textoverlay_src_template_factory), "src");
+ gst_pad_set_link_function (overlay->srcpad, gst_textoverlay_link);
+ gst_pad_set_getcaps_function (overlay->srcpad, gst_textoverlay_getcaps);
+ gst_pad_set_internal_link_function (overlay->srcpad,
+ gst_textoverlay_linkedpads);
+ gst_pad_set_event_function (overlay->srcpad, gst_textoverlay_event);
gst_element_add_pad (GST_ELEMENT (overlay), overlay->srcpad);
overlay->layout =
overlay->halign = GST_TEXT_OVERLAY_HALIGN_CENTER;
overlay->valign = GST_TEXT_OVERLAY_VALIGN_BASELINE;
- overlay->x0 = overlay->y0 = 0;
+ overlay->x0 = overlay->y0 = 25;
overlay->default_text = g_strdup ("");
overlay->need_render = TRUE;
gst_element_set_loop_function (GST_ELEMENT (overlay), gst_textoverlay_loop);
+
+ GST_FLAG_SET (overlay, GST_ELEMENT_EVENT_AWARE);
}
/*texttestsrc_plugin_init(module, plugin); */
/*subparse_plugin_init(module, plugin); */
+
+ GST_DEBUG_CATEGORY_INIT (pango_debug, "pango", 0, "Pango elements");
+
return TRUE;
}
GstTextOverlayHAlign halign;
gint x0;
gint y0;
- GstBuffer *current_buffer;
- GstBuffer *next_buffer;
+ GstData *current_data;
+ GstData *next_data;
gchar *default_text;
gboolean need_render;
};
/* first see if this is raw. If the type is raw, we can
* create a ghostpad for this pad. */
if (g_str_has_prefix (mimetype, "video/x-raw") ||
- g_str_has_prefix (mimetype, "audio/x-raw")) {
+ g_str_has_prefix (mimetype, "audio/x-raw") ||
+ g_str_has_prefix (mimetype, "text/plain")) {
gchar *padname;
GstPad *ghost;
play_base_bin->need_rebuild = TRUE;
play_base_bin->source = NULL;
play_base_bin->decoder = NULL;
+ play_base_bin->subtitles = NULL;
play_base_bin->group_lock = g_mutex_new ();
play_base_bin->group_cond = g_cond_new ();
name = g_strdup_printf ("preroll_%s", gst_pad_get_name (pad));
element = gst_element_factory_make ("queue", name);
- g_object_set (G_OBJECT (element), "max-size-buffers", 0, NULL);
- g_object_set (G_OBJECT (element), "max-size-bytes", 0, NULL);
- g_object_set (G_OBJECT (element), "max-size-time", play_base_bin->queue_size,
- NULL);
- sig =
- g_signal_connect (G_OBJECT (element), "overrun",
+ g_object_set (G_OBJECT (element),
+ "max-size-buffers", 0, "max-size-bytes", 10 * 1024 * 1024,
+ "max-size-time", play_base_bin->queue_size, NULL);
+ sig = g_signal_connect (G_OBJECT (element), "overrun",
G_CALLBACK (queue_overrun), play_base_bin);
/* keep a ref to the signal id so that we can disconnect the signal callback
* when we are done with the preroll */
static void
remove_groups (GstPlayBaseBin * play_base_bin)
{
- GList *groups;
+ GList *groups, *item;
/* first destroy the group we were building if any */
if (play_base_bin->building_group) {
}
g_list_free (play_base_bin->queued_groups);
play_base_bin->queued_groups = NULL;
+
+ /* clear subs */
+ for (item = play_base_bin->subtitles; item; item = item->next) {
+ gst_bin_remove (GST_BIN (play_base_bin->thread), item->data);
+ }
+ g_list_free (play_base_bin->subtitles);
+ play_base_bin->subtitles = NULL;
}
/* Add/remove a single stream to current building group.
case GST_STREAM_TYPE_VIDEO:
group->nvideopads++;
break;
+ case GST_STREAM_TYPE_TEXT:
+ group->ntextpads++;
+ break;
default:
group->nunknownpads++;
break;
if (group->nvideopads == 0) {
need_preroll = TRUE;
}
+ } else if (g_str_has_prefix (mimetype, "text/")) {
+ type = GST_STREAM_TYPE_TEXT;
+ /* first text pad gets a preroll element */
+ if (group->ntextpads == 0) {
+ need_preroll = TRUE;
+ }
} else {
type = GST_STREAM_TYPE_UNKNOWN;
}
}
/*
+ * Generate source ! subparse bins.
+ */
+
+static GList *
+setup_subtitles (GstPlayBaseBin * play_base_bin, gchar * sub_uri[])
+{
+ GstElement *source, *subparse, *bin;
+ gint n;
+ gchar *name;
+ GList *subtitles = NULL;
+
+ for (n = 0; sub_uri[n]; n++) {
+ source = gst_element_make_from_uri (GST_URI_SRC, sub_uri[n], NULL);
+ if (!source)
+ continue;
+
+ subparse = gst_element_factory_make ("subparse", NULL);
+ name = g_strdup_printf ("subbin%d", n);
+ bin = gst_thread_new (name);
+ g_free (name);
+
+ gst_bin_add_many (GST_BIN (bin), source, subparse, NULL);
+ gst_element_link (source, subparse);
+ gst_element_add_ghost_pad (bin,
+ gst_element_get_pad (subparse, "src"), "src");
+ subtitles = g_list_append (subtitles, bin);
+ }
+
+ return subtitles;
+}
+
+/*
* Generate a source element that does caching for network streams.
*/
static GstElement *
-gen_source_element (GstPlayBaseBin * play_base_bin)
+gen_source_element (GstPlayBaseBin * play_base_bin, GList ** subbins)
{
GstElement *source, *queue, *bin;
GstProbe *probe;
gboolean is_stream;
+ gchar **src, **subs, *uri;
+
+ /* create subtitle elements */
+ src = g_strsplit (play_base_bin->uri, "#", 2);
+ if (!src[0])
+ return NULL;
+ if (src[1]) {
+ subs = g_strsplit (src[1], ",", 8);
+ *subbins = setup_subtitles (play_base_bin, subs);
+ g_strfreev (subs);
+ } else {
+ *subbins = NULL;
+ }
+ uri = src[0];
+ src[0] = NULL;
+ g_strfreev (src);
- source =
- gst_element_make_from_uri (GST_URI_SRC, play_base_bin->uri, "source");
+ source = gst_element_make_from_uri (GST_URI_SRC, uri, "source");
if (!source)
return NULL;
GstElement *old_src;
GstElement *old_dec;
GstPad *srcpad = NULL;
+ GList *new_subs, *item;
if (!play_base_bin->need_rebuild)
return TRUE;
old_src = play_base_bin->source;
/* create and configure an element that can handle the uri */
- play_base_bin->source = gen_source_element (play_base_bin);
+ play_base_bin->source = gen_source_element (play_base_bin, &new_subs);
if (!play_base_bin->source) {
/* whoops, could not create the source element */
/* remove our previous preroll queues */
remove_groups (play_base_bin);
+ /* do subs */
+ if (new_subs) {
+ play_base_bin->subtitles = new_subs;
+ for (item = play_base_bin->subtitles; item; item = item->next) {
+ GstElement *bin = item->data;
+
+ /* don't add yet, because we will preroll, and subs shouldn't
+ * preroll (we shouldn't preroll more than once source). */
+ new_decoded_pad (bin, gst_element_get_pad (bin, "src"), FALSE,
+ play_base_bin);
+ gst_element_set_state (bin, GST_STATE_PAUSED);
+ }
+ }
+
/* now see if the source element emits raw audio/video all by itself,
* if so, we can create streams for the pads and be done with it.
* Also check that is has source pads, if not, we assume it will
play_base_bin->need_rebuild = FALSE;
}
+ /* make subs iterate from now on */
+ for (item = play_base_bin->subtitles; item; item = item->next) {
+ gst_bin_add (GST_BIN (play_base_bin->thread), item->data);
+ }
+
return TRUE;
}
gint naudiopads;
gint nvideopads;
+ gint ntextpads;
gint nunknownpads;
GList *preroll_elems;
gchar *uri;
GstElement *source;
GstElement *decoder;
+ GList *subtitles; /* additional filesrc ! subparse bins */
gboolean need_rebuild;
/* group management */
return element;
}
+/* make an element for playback of video with subtitles embedded.
+ *
+ * +--------------------------------------------------+
+ * | tbin +-------------+ |
+ * | +-----+ | textoverlay | +------+ |
+ * | | csp | +--video_sink | | vbin | |
+ * video_sink-sink src+ +-text_sink src-sink | |
+ * | +-----+ | +-------------+ +------+ |
+ * text_sink-------------+ |
+ * +--------------------------------------------------+
+ */
+
+static GstElement *
+gen_text_element (GstPlayBin * play_bin)
+{
+ GstElement *element, *csp, *overlay, *vbin;
+
+ overlay = gst_element_factory_make ("textoverlay", "overlay");
+ g_object_set (G_OBJECT (overlay),
+ "halign", "center", "valign", "bottom", NULL);
+ vbin = gen_video_element (play_bin);
+ if (!overlay) {
+ g_warning ("No overlay (pango) element, subtitles disabled");
+ return vbin;
+ }
+ csp = gst_element_factory_make ("ffmpegcolorspace", "subtitlecsp");
+ element = gst_bin_new ("textbin");
+ gst_element_link_many (csp, overlay, vbin, NULL);
+ gst_bin_add_many (GST_BIN (element), csp, overlay, vbin, NULL);
+
+ gst_element_add_ghost_pad (element,
+ gst_element_get_pad (overlay, "text_sink"), "text_sink");
+ gst_element_add_ghost_pad (element,
+ gst_element_get_pad (csp, "sink"), "sink");
+
+ return element;
+}
+
/* make the element (bin) that contains the elements needed to perform
* audio playback.
*
* | | | | +-------------------+ |
* | | +------+ |
* sink-+ |
- * +--------------------------------------------------------------------------+
+ +--------------------------------------------------------------------------+
*/
static GstElement *
gen_vis_element (GstPlayBin * play_bin)
GList *s;
gint num_audio = 0;
gint num_video = 0;
+ gint num_text = 0;
gboolean need_vis = FALSE;
+ gboolean need_text = FALSE;
+ GstPad *textsrcpad = NULL, *textsinkpad = NULL;
/* FIXME: do this nicer, like taking a look at the installed
* bins and figuring out if we can simply reconnect them, remove
num_audio++;
} else if (type == 2) {
num_video++;
+ } else if (type == 3) {
+ num_text++;
}
}
/* no video, use vis */
if (num_video == 0 && num_audio > 0 && play_bin->visualisation) {
need_vis = TRUE;
+ } else if (num_video > 0 && num_text > 0) {
+ need_text = TRUE;
}
num_audio = 0;
num_video = 0;
+ num_text = 0;
/* now actually connect everything */
for (s = streaminfo; s; s = g_list_next (s)) {
g_warning ("two video streams found, playing first one");
mute = TRUE;
} else {
- sink = gen_video_element (play_bin);
+ if (need_text) {
+ sink = gen_text_element (play_bin);
+ textsinkpad = gst_element_get_pad (sink, "text_sink");
+ } else {
+ sink = gen_video_element (play_bin);
+ }
num_video++;
}
+ } else if (type == 3) {
+ if (num_text > 0) {
+ g_warning ("two subtitle streams found, playing first one");
+ mute = TRUE;
+ } else {
+ textsrcpad = srcpad;
+ num_text++;
+ }
} else if (type == 4) {
/* we can ignore these streams here */
} else {
g_object_set (G_OBJECT (obj), "mute", TRUE, NULL);
}
}
+
+ /* if subtitles, link */
+ if (textsrcpad && num_video > 0) {
+ gst_pad_link (textsrcpad, textsinkpad);
+ }
}
static GstElementStateReturn
--- /dev/null
+plugin_LTLIBRARIES = libgstsubparse.la
+
+libgstsubparse_la_SOURCES = \
+ gstsubparse.c
+
+libgstsubparse_la_CFLAGS = $(GST_CFLAGS)
+libgstsubparse_la_LIBADD =
+libgstsubparse_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
+
+noinst_HEADERS = \
+ gstsubparse.h
--- /dev/null
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (c) 2004 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+#include <stdlib.h>
+#include <regex.h>
+
+#include "gstsubparse.h"
+
+GST_DEBUG_CATEGORY_STATIC (subparse_debug);
+#define GST_CAT_DEFAULT subparse_debug
+
+/* format enum */
+typedef enum
+{
+ GST_SUB_PARSE_FORMAT_UNKNOWN = 0,
+ GST_SUB_PARSE_FORMAT_MDVDSUB = 1,
+ GST_SUB_PARSE_FORMAT_SUBRIP = 2,
+ GST_SUB_PARSE_FORMAT_MPSUB = 3
+} GstSubParseFormat;
+
+static GstStaticPadTemplate sink_templ = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-subtitle")
+ );
+
+static GstStaticPadTemplate src_templ = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("text/plain; text/x-pango-markup")
+ );
+
+static void gst_subparse_base_init (GstSubparseClass * klass);
+static void gst_subparse_class_init (GstSubparseClass * klass);
+static void gst_subparse_init (GstSubparse * subparse);
+
+static const GstFormat *gst_subparse_formats (GstPad * pad);
+static const GstEventMask *gst_subparse_eventmask (GstPad * pad);
+static gboolean gst_subparse_event (GstPad * pad, GstEvent * event);
+
+static GstElementStateReturn gst_subparse_change_state (GstElement * element);
+static void gst_subparse_loop (GstElement * element);
+
+#if 0
+static GstCaps *gst_subparse_type_find (GstBuffer * buf, gpointer private);
+#endif
+
+static GstElementClass *parent_class = NULL;
+
+GType
+gst_subparse_get_type (void)
+{
+ static GType subparse_type = 0;
+
+ if (!subparse_type) {
+ static const GTypeInfo subparse_info = {
+ sizeof (GstSubparseClass),
+ (GBaseInitFunc) gst_subparse_base_init,
+ NULL,
+ (GClassInitFunc) gst_subparse_class_init,
+ NULL,
+ NULL,
+ sizeof (GstSubparse),
+ 0,
+ (GInstanceInitFunc) gst_subparse_init,
+ };
+
+ subparse_type = g_type_register_static (GST_TYPE_ELEMENT,
+ "GstSubparse", &subparse_info, 0);
+ }
+
+ return subparse_type;
+}
+
+static void
+gst_subparse_base_init (GstSubparseClass * klass)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ static GstElementDetails subparse_details = {
+ "Subtitle parsers",
+ "Codec/Parser/Subtitle",
+ "Parses subtitle (.sub) files into text streams",
+ "Gustavo J. A. M. Carneiro <gjc@inescporto.pt>\n"
+ "Ronald S. Bultje <rbultje@ronald.bitfreak.net>"
+ };
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_templ));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_templ));
+ gst_element_class_set_details (element_class, &subparse_details);
+}
+
+static void
+gst_subparse_class_init (GstSubparseClass * klass)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+
+ parent_class = g_type_class_ref (GST_TYPE_ELEMENT);
+
+ element_class->change_state = gst_subparse_change_state;
+}
+
+static void
+gst_subparse_init (GstSubparse * subparse)
+{
+ subparse->sinkpad =
+ gst_pad_new_from_template (gst_static_pad_template_get (&sink_templ),
+ "sink");
+ gst_element_add_pad (GST_ELEMENT (subparse), subparse->sinkpad);
+
+ subparse->srcpad =
+ gst_pad_new_from_template (gst_static_pad_template_get (&src_templ),
+ "src");
+ gst_pad_use_explicit_caps (subparse->srcpad);
+ gst_pad_set_formats_function (subparse->srcpad, gst_subparse_formats);
+ gst_pad_set_event_function (subparse->srcpad, gst_subparse_event);
+ gst_pad_set_event_mask_function (subparse->srcpad, gst_subparse_eventmask);
+ gst_element_add_pad (GST_ELEMENT (subparse), subparse->srcpad);
+
+ gst_element_set_loop_function (GST_ELEMENT (subparse), gst_subparse_loop);
+
+ subparse->textbuf = g_string_new (NULL);
+ subparse->parser.type = GST_SUB_PARSE_FORMAT_UNKNOWN;
+ subparse->parser_detected = FALSE;
+ subparse->seek_time = GST_CLOCK_TIME_NONE;
+ subparse->flush = FALSE;
+}
+
+/*
+ * Source pad functions.
+ */
+
+static const GstFormat *
+gst_subparse_formats (GstPad * pad)
+{
+ static const GstFormat formats[] = {
+ GST_FORMAT_TIME,
+ 0
+ };
+
+ return formats;
+}
+
+static const GstEventMask *
+gst_subparse_eventmask (GstPad * pad)
+{
+ static const GstEventMask masks[] = {
+ {GST_EVENT_SEEK, GST_SEEK_METHOD_SET},
+ {0, 0}
+ };
+
+ return masks;
+}
+
+static gboolean
+gst_subparse_event (GstPad * pad, GstEvent * event)
+{
+ GstSubparse *self = GST_SUBPARSE (gst_pad_get_parent (pad));
+ gboolean res = FALSE;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ if (!(GST_EVENT_SEEK_FORMAT (event) == GST_FORMAT_TIME &&
+ GST_EVENT_SEEK_METHOD (event) == GST_SEEK_METHOD_SET))
+ break;
+ self->seek_time = GST_EVENT_SEEK_OFFSET (event);
+ res = TRUE;
+ break;
+ default:
+ break;
+ }
+
+ gst_event_unref (event);
+
+ return res;
+}
+
+/*
+ * TRUE = continue, FALSE = stop.
+ */
+
+static gboolean
+gst_subparse_handle_event (GstSubparse * self, GstEvent * event)
+{
+ gboolean res = TRUE;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_INTERRUPT:
+ gst_event_unref (event);
+ res = FALSE;
+ break;
+ case GST_EVENT_EOS:
+ res = FALSE;
+ /* fall-through */
+ default:
+ gst_pad_event_default (self->sinkpad, event);
+ break;
+ }
+
+ return res;
+}
+
+static gchar *
+convert_encoding (GstSubparse * self, const gchar * str, gsize len)
+{
+ gsize bytes_read, bytes_written;
+ gchar *rv;
+ GString *converted;
+
+ converted = g_string_new (NULL);
+ while (len) {
+ GST_DEBUG ("Trying to convert '%s'", g_strndup (str, len));
+ rv = g_locale_to_utf8 (str, len, &bytes_read, &bytes_written, NULL);
+ g_string_append_len (converted, rv, bytes_written);
+ len -= bytes_read;
+ str += bytes_read;
+ if (len) {
+ /* conversion error ocurred => skip one char */
+ len--;
+ str++;
+ g_string_append_c (converted, '?');
+ }
+ }
+ rv = converted->str;
+ g_string_free (converted, FALSE);
+ GST_DEBUG ("Converted to '%s'", rv);
+ return rv;
+}
+
+static gchar *
+get_next_line (GstSubparse * self)
+{
+ GstBuffer *buf;
+ const char *line_end;
+ int line_len;
+ gboolean have_r = FALSE;
+ gchar *line;
+
+ if ((line_end = strchr (self->textbuf->str, '\n')) == NULL) {
+ /* end-of-line not found; try to get more data */
+ buf = NULL;
+ do {
+ GstData *data = gst_pad_pull (self->sinkpad);
+
+ if (GST_IS_EVENT (data)) {
+ if (!gst_subparse_handle_event (self, GST_EVENT (data)))
+ return NULL;
+ } else {
+ buf = GST_BUFFER (data);
+ }
+ } while (!buf);
+ self->textbuf = g_string_append_len (self->textbuf,
+ GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
+ gst_buffer_unref (buf);
+ /* search for end-of-line again */
+ line_end = strchr (self->textbuf->str, '\n');
+ }
+ /* get rid of '\r' */
+ if ((int) (line_end - self->textbuf->str) > 0 &&
+ self->textbuf->str[(int) (line_end - self->textbuf->str) - 1] == '\r') {
+ line_end--;
+ have_r = TRUE;
+ }
+
+ if (line_end) {
+ line_len = line_end - self->textbuf->str;
+ line = convert_encoding (self, self->textbuf->str, line_len);
+ self->textbuf = g_string_erase (self->textbuf, 0,
+ line_len + (have_r ? 2 : 1));
+ return line;
+ }
+ return NULL;
+}
+
+static gchar *
+parse_mdvdsub (GstSubparse * self, guint64 * out_start_time,
+ guint64 * out_end_time, gboolean after_seek)
+{
+ gchar *line, *line_start, *line_split, *line_chunk;
+ guint start_frame, end_frame;
+
+ /* FIXME: hardcoded for now, but detecting the correct value is
+ * not going to be easy, I suspect... */
+ const double frames_per_sec = 23.98;
+ GString *markup;
+ gchar *rv;
+
+ /* style variables */
+ gboolean italic;
+ gboolean bold;
+ guint fontsize;
+
+ line = line_start = get_next_line (self);
+ if (!line)
+ return NULL;
+
+ if (sscanf (line, "{%u}{%u}", &start_frame, &end_frame) != 2) {
+ g_warning ("Parse of the following line, assumed to be in microdvd .sub"
+ " format, failed:\n%s", line);
+ g_free (line_start);
+ return NULL;
+ }
+ *out_start_time = (start_frame - 1000) / frames_per_sec * GST_SECOND;
+ *out_end_time = (end_frame - 1000) / frames_per_sec * GST_SECOND;
+ /* skip the {%u}{%u} part */
+ line = strchr (line, '}') + 1;
+ line = strchr (line, '}') + 1;
+
+ markup = g_string_new (NULL);
+ while (1) {
+ italic = FALSE;
+ bold = FALSE;
+ fontsize = 0;
+ /* parse style markup */
+ if (strncmp (line, "{y:i}", 5) == 0) {
+ italic = TRUE;
+ line = strchr (line, '}') + 1;
+ }
+ if (strncmp (line, "{y:b}", 5) == 0) {
+ bold = TRUE;
+ line = strchr (line, '}') + 1;
+ }
+ if (sscanf (line, "{s:%u}", &fontsize) == 1) {
+ line = strchr (line, '}') + 1;
+ }
+ if ((line_split = strchr (line, '|')))
+ line_chunk = g_markup_escape_text (line, line_split - line);
+ else
+ line_chunk = g_markup_escape_text (line, strlen (line));
+ markup = g_string_append (markup, "<span");
+ if (italic)
+ g_string_append (markup, " style=\"italic\"");
+ if (bold)
+ g_string_append (markup, " weight=\"bold\"");
+ if (fontsize)
+ g_string_append_printf (markup, " size=\"%u\"", fontsize * 1000);
+ g_string_append_printf (markup, ">%s</span>", line_chunk);
+ g_free (line_chunk);
+ if (line_split) {
+ g_string_append (markup, "\n");
+ line = line_split + 1;
+ } else
+ break;
+ }
+ rv = markup->str;
+ g_string_free (markup, FALSE);
+ g_free (line_start);
+ GST_DEBUG ("parse_mdvdsub returning (start=%f, end=%f): %s",
+ *out_start_time / (double) GST_SECOND,
+ *out_end_time / (double) GST_SECOND, rv);
+ return rv;
+}
+
+static void
+parse_mdvdsub_init (GstSubparse * self)
+{
+ self->parser.deinit = NULL;
+ self->parser.parse = parse_mdvdsub;
+}
+
+static gchar *
+parse_subrip (GstSubparse * self, guint64 * out_start_time,
+ guint64 * out_end_time, gboolean after_seek)
+{
+ gchar *line;
+ guint h1, m1, s1, ms1;
+ guint h2, m2, s2, ms2;
+ int subnum;
+
+ while (1) {
+ switch (self->state.subrip.state) {
+ case 0:
+ /* looking for a single integer */
+ line = get_next_line (self);
+ if (!line)
+ return NULL;
+ if (sscanf (line, "%u", &subnum) == 1)
+ self->state.subrip.state = 1;
+ g_free (line);
+ break;
+ case 1:
+ /* looking for start_time --> end_time */
+ line = get_next_line (self);
+ if (!line)
+ return NULL;
+ if (sscanf (line, "%u:%u:%u,%u --> %u:%u:%u,%u",
+ &h1, &m1, &s1, &ms1, &h2, &m2, &s2, &ms2) == 8) {
+ self->state.subrip.state = 2;
+ self->state.subrip.time1 =
+ (((guint64) h1) * 3600 + m1 * 60 + s1) * GST_SECOND +
+ ms1 * GST_MSECOND;
+ self->state.subrip.time2 =
+ (((guint64) h2) * 3600 + m2 * 60 + s2) * GST_SECOND +
+ ms2 * GST_MSECOND;
+ } else {
+ GST_DEBUG (0, "error parsing subrip time line");
+ self->state.subrip.state = 0;
+ }
+ g_free (line);
+ break;
+ case 2:
+ /* looking for subtitle text; empty line ends this
+ * subtitle entry */
+ line = get_next_line (self);
+ if (!line)
+ return NULL;
+ if (self->state.subrip.buf->len)
+ g_string_append_c (self->state.subrip.buf, '\n');
+ g_string_append (self->state.subrip.buf, line);
+ if (strlen (line) == 0) {
+ gchar *rv;
+
+ g_free (line);
+ *out_start_time = self->state.subrip.time1;
+ *out_end_time = self->state.subrip.time2;
+ rv = g_markup_escape_text (self->state.subrip.buf->str,
+ self->state.subrip.buf->len);
+ rv = g_strdup (self->state.subrip.buf->str);
+ g_string_truncate (self->state.subrip.buf, 0);
+ self->state.subrip.state = 0;
+ return rv;
+ }
+ g_free (line);
+ }
+ }
+}
+
+static void
+parse_subrip_deinit (GstSubparse * self)
+{
+ g_string_free (self->state.subrip.buf, TRUE);
+}
+
+static void
+parse_subrip_init (GstSubparse * self)
+{
+ self->state.subrip.state = 0;
+ self->state.subrip.buf = g_string_new (NULL);
+ self->parser.parse = parse_subrip;
+ self->parser.deinit = parse_subrip_deinit;
+}
+
+
+static gchar *
+parse_mpsub (GstSubparse * self, guint64 * out_start_time,
+ guint64 * out_end_time, gboolean after_seek)
+{
+ gchar *line;
+ float t1, t2;
+
+ if (after_seek) {
+ self->state.mpsub.time = 0;
+ }
+
+ while (1) {
+ switch (self->state.mpsub.state) {
+ case 0:
+ /* looking for two floats (offset, duration) */
+ line = get_next_line (self);
+ if (!line)
+ return NULL;
+ if (sscanf (line, "%f %f", &t1, &t2) == 2) {
+ self->state.mpsub.state = 1;
+ self->state.mpsub.time += GST_SECOND * t1;
+ }
+ g_free (line);
+ break;
+ case 1:
+ /* looking for subtitle text; empty line ends this
+ * subtitle entry */
+ line = get_next_line (self);
+ if (!line)
+ return NULL;
+ if (self->state.mpsub.buf->len)
+ g_string_append_c (self->state.mpsub.buf, '\n');
+ g_string_append (self->state.mpsub.buf, line);
+ if (strlen (line) == 0) {
+ gchar *rv;
+
+ g_free (line);
+ *out_start_time = self->state.mpsub.time;
+ *out_end_time = self->state.mpsub.time + t2 * GST_SECOND;
+ self->state.mpsub.time += t2 * GST_SECOND;
+ rv = g_markup_escape_text (self->state.mpsub.buf->str,
+ self->state.mpsub.buf->len);
+ rv = g_strdup (self->state.mpsub.buf->str);
+ g_string_truncate (self->state.mpsub.buf, 0);
+ self->state.mpsub.state = 0;
+ return rv;
+ }
+ g_free (line);
+ break;
+ }
+ }
+
+ return NULL;
+}
+
+static void
+parse_mpsub_deinit (GstSubparse * self)
+{
+ g_string_free (self->state.mpsub.buf, TRUE);
+}
+
+static void
+parse_mpsub_init (GstSubparse * self)
+{
+ self->state.mpsub.state = 0;
+ self->state.mpsub.buf = g_string_new (NULL);
+ self->parser.deinit = parse_mpsub_deinit;
+ self->parser.parse = parse_mpsub;
+}
+
+/*
+ * FIXME: maybe we should pass along a second argument, the preceding
+ * text buffer, because that is how this originally worked, even though
+ * I don't really see the use of that.
+ */
+
+static GstSubParseFormat
+gst_subparse_buffer_format_autodetect (GstBuffer * buf)
+{
+ static gboolean need_init_regexps = TRUE;
+ static regex_t mdvd_rx;
+ static regex_t subrip_rx;
+ const gchar *str = GST_BUFFER_DATA (buf);
+
+ /* initialize the regexps used the first time around */
+ if (need_init_regexps) {
+ int err;
+ char errstr[128];
+
+ need_init_regexps = FALSE;
+ regcomp (&mdvd_rx, "^\\{[0-9]+\\}\\{[0-9]+\\}",
+ REG_EXTENDED | REG_NEWLINE | REG_NOSUB);
+ if ((err = regcomp (&subrip_rx, "^1\x0d\x0a"
+ "[0-9][0-9]:[0-9][0-9]:[0-9][0-9],[0-9]{3}"
+ " --> [0-9][0-9]:[0-9][0-9]:[0-9][0-9],[0-9]{3}",
+ REG_EXTENDED | REG_NEWLINE | REG_NOSUB)) != 0) {
+ regerror (err, &subrip_rx, errstr, 127);
+ GST_WARNING ("Compilation of subrip regex failed: %s", errstr);
+ }
+ }
+
+ if (regexec (&mdvd_rx, str, 0, NULL, 0) == 0) {
+ GST_LOG ("subparse: MicroDVD (frame based) format detected");
+ return GST_SUB_PARSE_FORMAT_MDVDSUB;
+ }
+ if (regexec (&subrip_rx, str, 0, NULL, 0) == 0) {
+ GST_LOG ("subparse: SubRip (time based) format detected");
+ return GST_SUB_PARSE_FORMAT_SUBRIP;
+ }
+ if (!strncmp (str, "FORMAT=TIME", 11)) {
+ GST_LOG ("subparse: MPSub (time based) format detected");
+ return GST_SUB_PARSE_FORMAT_MPSUB;
+ }
+ GST_WARNING ("subparse: subtitle format autodetection failed!");
+ return GST_SUB_PARSE_FORMAT_UNKNOWN;
+}
+
+static gboolean
+gst_subparse_format_autodetect (GstSubparse * self)
+{
+ GstBuffer *buf = NULL;
+ GstSubParseFormat format;
+ gboolean res = TRUE;
+
+ do {
+ GstData *data = gst_pad_pull (self->sinkpad);
+
+ if (GST_IS_EVENT (data)) {
+ if (!gst_subparse_handle_event (self, GST_EVENT (data)))
+ return FALSE;
+ } else {
+ buf = GST_BUFFER (data);
+ }
+ } while (!buf);
+ self->textbuf = g_string_append_len (self->textbuf, GST_BUFFER_DATA (buf),
+ GST_BUFFER_SIZE (buf));
+ format = gst_subparse_buffer_format_autodetect (buf);
+ gst_buffer_unref (buf);
+ self->parser_detected = TRUE;
+ self->parser.type = format;
+ switch (format) {
+ case GST_SUB_PARSE_FORMAT_MDVDSUB:
+ GST_DEBUG ("MicroDVD format detected");
+ parse_mdvdsub_init (self);
+ res = gst_pad_set_explicit_caps (self->srcpad,
+ gst_caps_new_simple ("text/x-pango-markup", NULL));
+ break;
+ case GST_SUB_PARSE_FORMAT_SUBRIP:
+ GST_DEBUG ("SubRip format detected");
+ parse_subrip_init (self);
+ res = gst_pad_set_explicit_caps (self->srcpad,
+ gst_caps_new_simple ("text/plain", NULL));
+ break;
+ case GST_SUB_PARSE_FORMAT_MPSUB:
+ GST_DEBUG ("MPSub format detected");
+ parse_mpsub_init (self);
+ res = gst_pad_set_explicit_caps (self->srcpad,
+ gst_caps_new_simple ("text/plain", NULL));
+ break;
+ case GST_SUB_PARSE_FORMAT_UNKNOWN:
+ default:
+ GST_DEBUG ("no subtitle format detected");
+ GST_ELEMENT_ERROR (self, STREAM, WRONG_TYPE,
+ ("The input is not a valid/supported subtitle file"), (NULL));
+ res = FALSE;
+ break;
+ }
+
+ return res;
+}
+
+/*
+ * parse input, getting a start and end time
+ * then parse next input, and if next start time > current end time, send
+ * clear buffer.
+ */
+
+static void
+gst_subparse_loop (GstElement * element)
+{
+ GstSubparse *self;
+ GstBuffer *buf;
+ guint64 start_time, end_time, need_time = GST_CLOCK_TIME_NONE;
+ gchar *subtitle;
+ gboolean after_seek = FALSE;
+
+ GST_DEBUG ("gst_subparse_loop");
+ self = GST_SUBPARSE (element);
+
+ /* make sure we know the format */
+ if (!self->parser_detected) {
+ if (!gst_subparse_format_autodetect (self))
+ return;
+ }
+
+ /* handle seeks */
+ if (GST_CLOCK_TIME_IS_VALID (self->seek_time)) {
+ GstEvent *seek;
+
+ seek = gst_event_new_seek (GST_SEEK_FLAG_FLUSH | GST_FORMAT_BYTES |
+ GST_SEEK_METHOD_SET, 0);
+ if (gst_pad_send_event (GST_PAD_PEER (self->sinkpad), seek)) {
+ need_time = self->seek_time;
+ after_seek = TRUE;
+
+ if (self->flush) {
+ gst_pad_push (self->srcpad, GST_DATA (gst_event_new (GST_EVENT_FLUSH)));
+ self->flush = FALSE;
+ }
+ gst_pad_push (self->srcpad,
+ GST_DATA (gst_event_new_discontinuous (FALSE,
+ GST_FORMAT_TIME, need_time, GST_FORMAT_UNDEFINED)));
+ }
+
+ self->seek_time = GST_CLOCK_TIME_NONE;
+ }
+
+ /* get a next buffer */
+ GST_INFO ("getting text buffer");
+ if (!self->parser.parse || self->parser.type == GST_SUB_PARSE_FORMAT_UNKNOWN) {
+ GST_ELEMENT_ERROR (self, LIBRARY, INIT, (NULL), (NULL));
+ return;
+ }
+
+ do {
+ subtitle = self->parser.parse (self, &start_time, &end_time, after_seek);
+ if (!subtitle)
+ return;
+ after_seek = FALSE;
+
+ if (GST_CLOCK_TIME_IS_VALID (need_time) && end_time < need_time) {
+ g_free (subtitle);
+ } else {
+ need_time = GST_CLOCK_TIME_NONE;
+ GST_DEBUG ("subparse: loop: text %s, start %lld, end %lld\n",
+ subtitle, start_time, end_time);
+
+ buf = gst_buffer_new ();
+ GST_BUFFER_DATA (buf) = subtitle;
+ GST_BUFFER_SIZE (buf) = strlen (subtitle);
+ GST_BUFFER_TIMESTAMP (buf) = start_time;
+ GST_BUFFER_DURATION (buf) = end_time - start_time;
+ GST_DEBUG ("sending text buffer %s at %lld", subtitle, start_time);
+ gst_pad_push (self->srcpad, GST_DATA (buf));
+ }
+ } while (GST_CLOCK_TIME_IS_VALID (need_time));
+}
+
+static GstElementStateReturn
+gst_subparse_change_state (GstElement * element)
+{
+ GstSubparse *self = GST_SUBPARSE (element);
+
+ switch (GST_STATE_TRANSITION (element)) {
+ case GST_STATE_PAUSED_TO_READY:
+ self->parser.deinit (self);
+ self->parser.type = GST_SUB_PARSE_FORMAT_UNKNOWN;
+ self->parser_detected = FALSE;
+ self->seek_time = GST_CLOCK_TIME_NONE;
+ self->flush = FALSE;
+ break;
+ default:
+ break;
+ }
+
+ return parent_class->change_state (element);
+}
+
+#if 0
+/* typefinding stuff */
+static GstTypeDefinition subparse_definition = {
+ "subparse/x-text",
+ "text/plain",
+ ".sub",
+ gst_subparse_type_find,
+};
+static GstCaps *
+gst_subparse_type_find (GstBuffer * buf, gpointer private)
+{
+ GstSubParseFormat format;
+
+ format = gst_subparse_buffer_format_autodetect (buf);
+ switch (format) {
+ case GST_SUB_PARSE_FORMAT_MDVDSUB:
+ GST_DEBUG (GST_CAT_PLUGIN_INFO, "MicroDVD format detected");
+ return gst_caps_new ("subparse_type_find", "text/plain", NULL);
+ case GST_SUB_PARSE_FORMAT_SUBRIP:
+ GST_DEBUG (GST_CAT_PLUGIN_INFO, "SubRip format detected");
+ return gst_caps_new ("subparse_type_find", "text/plain", NULL);
+ case GST_SUB_PARSE_FORMAT_UNKNOWN:
+ GST_DEBUG (GST_CAT_PLUGIN_INFO, "no subtitle format detected");
+ break;
+ }
+ /* don't know which this is */
+ return NULL;
+}
+#endif
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ GST_DEBUG_CATEGORY_INIT (subparse_debug, "subparse", 0, ".sub parser");
+
+ return gst_element_register (plugin, "subparse",
+ GST_RANK_PRIMARY, GST_TYPE_SUBPARSE);
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ "subparse",
+ "Subtitle (.sub) file parsing",
+ plugin_init, VERSION, "LGPL", GST_PACKAGE, GST_ORIGIN)
--- /dev/null
+/* GStreamer
+ * Copyright (C) <2002> David A. Schleef <ds@schleef.org>
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_SUBPARSE_H__
+#define __GST_SUBPARSE_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_SUBPARSE \
+ (gst_subparse_get_type ())
+#define GST_SUBPARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_SUBPARSE, GstSubparse))
+#define GST_SUBPARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_SUBPARSE, GstSubparse))
+#define GST_IS_SUBPARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_SUBPARSE))
+#define GST_IS_SUBPARSE_CLASS(obj) \
+ (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_SUBPARSE))
+
+typedef struct _GstSubparse GstSubparse;
+typedef struct _GstSubparseClass GstSubparseClass;
+
+typedef void (* GstSubparseInit) (GstSubparse *self);
+typedef gchar * (* GstSubparseParser) (GstSubparse *self,
+ guint64 *out_start_time,
+ guint64 *out_end_time,
+ gboolean after_seek);
+
+struct _GstSubparse {
+ GstElement element;
+
+ GstPad *sinkpad,*srcpad;
+
+ GString *textbuf;
+ struct {
+ GstSubparseInit deinit;
+ GstSubparseParser parse;
+ gint type;
+ } parser;
+ gboolean parser_detected;
+
+ union {
+ struct {
+ int state;
+ GString *buf;
+ guint64 time1, time2;
+ } subrip;
+ struct {
+ int state;
+ GString *buf;
+ guint64 time;
+ } mpsub;
+ } state;
+
+ /* seek */
+ guint64 seek_time;
+ gboolean flush;
+};
+
+struct _GstSubparseClass {
+ GstElementClass parent_class;
+};
+
+GType gst_subparse_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_SUBPARSE_H__ */
}
}
+static GstStaticCaps ogmtext_caps = GST_STATIC_CAPS ("application/x-ogm-text");
+
+#define OGMTEXT_CAPS (gst_static_caps_get(&ogmtext_caps))
+static void
+ogmtext_type_find (GstTypeFind * tf, gpointer private)
+{
+ guint8 *data = gst_type_find_peek (tf, 0, 9);
+
+ if (data) {
+ if (memcmp (data, "\001text\000\000\000\000", 9) != 0)
+ return;
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, OGMTEXT_CAPS);
+ }
+}
+
/*** audio/x-speex ***********************************************************/
static GstStaticCaps speex_caps = GST_STATIC_CAPS ("audio/x-speex");
ogmvideo_type_find, NULL, OGMVIDEO_CAPS, NULL);
TYPE_FIND_REGISTER (plugin, "application/x-ogm-audio", GST_RANK_PRIMARY,
ogmaudio_type_find, NULL, OGMAUDIO_CAPS, NULL);
+ TYPE_FIND_REGISTER (plugin, "application/x-ogm-text", GST_RANK_PRIMARY,
+ ogmtext_type_find, NULL, OGMTEXT_CAPS, NULL);
TYPE_FIND_REGISTER (plugin, "audio/x-speex", GST_RANK_PRIMARY,
speex_type_find, NULL, SPEEX_CAPS, NULL);
TYPE_FIND_REGISTER (plugin, "audio/x-m4a", GST_RANK_PRIMARY, m4a_type_find,