AVCodecContext *context;
AVFrame *picture;
gboolean opened;
+ union {
+ struct {
+ gint width, height, fps, fps_base;
+ } video;
+ struct {
+ gint channels, samplerate;
+ } audio;
+ } format;
/* parsing */
AVCodecParserContext *pctx;
static void gst_ffmpegdec_init (GstFFMpegDec * ffmpegdec);
static void gst_ffmpegdec_dispose (GObject * object);
+static gboolean gst_ffmpegdec_query (GstPad * pad, GstQueryType type,
+ GstFormat * fmt, gint64 * value);
+static gboolean gst_ffmpegdec_event (GstPad * pad, GstEvent * event);
+
static GstPadLinkReturn gst_ffmpegdec_connect (GstPad * pad,
const GstCaps * caps);
static void gst_ffmpegdec_chain (GstPad * pad, GstData * data);
ffmpegdec->sinkpad = gst_pad_new_from_template (oclass->sinktempl, "sink");
gst_pad_set_link_function (ffmpegdec->sinkpad, gst_ffmpegdec_connect);
gst_pad_set_chain_function (ffmpegdec->sinkpad, gst_ffmpegdec_chain);
+ gst_element_add_pad (GST_ELEMENT (ffmpegdec), ffmpegdec->sinkpad);
+
ffmpegdec->srcpad = gst_pad_new_from_template (oclass->srctempl, "src");
gst_pad_use_explicit_caps (ffmpegdec->srcpad);
-
- gst_element_add_pad (GST_ELEMENT (ffmpegdec), ffmpegdec->sinkpad);
+ gst_pad_set_event_function (ffmpegdec->srcpad,
+ GST_DEBUG_FUNCPTR (gst_ffmpegdec_event));
+ gst_pad_set_query_function (ffmpegdec->srcpad,
+ GST_DEBUG_FUNCPTR (gst_ffmpegdec_query));
gst_element_add_pad (GST_ELEMENT (ffmpegdec), ffmpegdec->srcpad);
/* some ffmpeg data */
av_free (ffmpegdec->picture);
}
+static gboolean
+gst_ffmpegdec_query (GstPad * pad, GstQueryType type,
+ GstFormat * fmt, gint64 * value)
+{
+ GstFFMpegDec *ffmpegdec = (GstFFMpegDec *) gst_pad_get_parent (pad);
+ GstPad *peer = GST_PAD_PEER (ffmpegdec->sinkpad);
+ GstFormat bfmt = GST_FORMAT_BYTES;
+
+ if (!peer)
+ return FALSE;
+ else if (gst_pad_query (peer, type, fmt, value))
+ return TRUE;
+ /* ok, do bitrate calc... */
+ else if ((type != GST_QUERY_POSITION && type != GST_QUERY_TOTAL) ||
+ *fmt != GST_FORMAT_TIME || ffmpegdec->context->bit_rate == 0 ||
+ !gst_pad_query (peer, type, &bfmt, value))
+ return FALSE;
+
+ if (ffmpegdec->pcache && type == GST_QUERY_POSITION)
+ *value -= GST_BUFFER_SIZE (ffmpegdec->pcache);
+ *value *= GST_SECOND / ffmpegdec->context->bit_rate;
+
+ return TRUE;
+}
+
+static gboolean
+gst_ffmpegdec_event (GstPad * pad, GstEvent * event)
+{
+ GstFFMpegDec *ffmpegdec = (GstFFMpegDec *) gst_pad_get_parent (pad);
+ GstPad *peer = GST_PAD_PEER (ffmpegdec->sinkpad);
+
+ if (!peer)
+ return FALSE;
+ else if (gst_pad_send_event (peer, event))
+ return TRUE;
+ else
+ return FALSE; /* .. */
+}
+
static void
gst_ffmpegdec_close (GstFFMpegDec *ffmpegdec)
{
return FALSE;
}
- /* open a parser if we can - exclude mpeg4 for now... */
- if (oclass->in_plugin->id != CODEC_ID_MPEG4)
+ /* open a parser if we can - exclude mpeg4, because it is already
+ * framed (divx), mp3 because it doesn't work (?) and mjpeg because
+ * of $(see mpeg4)... */
+ if (oclass->in_plugin->id != CODEC_ID_MPEG4 &&
+ oclass->in_plugin->id != CODEC_ID_MJPEG &&
+ oclass->in_plugin->id != CODEC_ID_MP3)
ffmpegdec->pctx = av_parser_init (oclass->in_plugin->id);
+ switch (oclass->in_plugin->type) {
+ case CODEC_TYPE_VIDEO:
+ ffmpegdec->format.video.width = 0;
+ ffmpegdec->format.video.height = 0;
+ ffmpegdec->format.video.fps = 0;
+ ffmpegdec->format.video.fps_base = 0;
+ break;
+ case CODEC_TYPE_AUDIO:
+ ffmpegdec->format.audio.samplerate = 0;
+ ffmpegdec->format.audio.channels = 0;
+ break;
+ default:
+ break;
+ }
+
return TRUE;
}
(GstFFMpegDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec));
GstCaps *caps;
+ switch (oclass->in_plugin->type) {
+ case CODEC_TYPE_VIDEO:
+ if (ffmpegdec->format.video.width == ffmpegdec->context->width &&
+ ffmpegdec->format.video.height == ffmpegdec->context->height &&
+ ffmpegdec->format.video.fps == ffmpegdec->context->frame_rate &&
+ ffmpegdec->format.video.fps_base ==
+ ffmpegdec->context->frame_rate_base)
+ return TRUE;
+ break;
+ case CODEC_TYPE_AUDIO:
+ if (ffmpegdec->format.audio.samplerate ==
+ ffmpegdec->context->sample_rate &&
+ ffmpegdec->format.audio.channels == ffmpegdec->context->channels)
+ return TRUE;
+ break;
+ default:
+ break;
+ }
+
caps = gst_ffmpeg_codectype_to_caps (oclass->in_plugin->type,
ffmpegdec->context);
/* parse cache joining */
if (ffmpegdec->pcache) {
-GST_LOG ("Joining %p[%lld/%d]&&%p[%lld/%d]",
- ffmpegdec->pcache, GST_BUFFER_OFFSET (ffmpegdec->pcache),
- GST_BUFFER_SIZE (ffmpegdec->pcache), inbuf,
- GST_BUFFER_OFFSET (inbuf), GST_BUFFER_SIZE (inbuf));
inbuf = gst_buffer_join (ffmpegdec->pcache, inbuf);
-GST_LOG ("done");
ffmpegdec->pcache = NULL;
bdata = GST_BUFFER_DATA (inbuf);
bsize = GST_BUFFER_SIZE (inbuf);
do {
/* parse, if at all possible */
- if (ffmpegdec->pctx && ffmpegdec->context->codec_id != CODEC_ID_MP3 &&
- ffmpegdec->context->codec_id != CODEC_ID_MJPEG) {
+ if (ffmpegdec->pctx) {
gint res;
res = av_parser_parse (ffmpegdec->pctx, ffmpegdec->context,
if (have_data) {
GST_DEBUG ("Decoded data, now pushing");
- if (!GST_PAD_CAPS (ffmpegdec->srcpad)) {
- if (!gst_ffmpegdec_negotiate (ffmpegdec)) {
- gst_buffer_unref (outbuf);
- return;
- }
+ if (!gst_ffmpegdec_negotiate (ffmpegdec)) {
+ gst_buffer_unref (outbuf);
+ return;
}
if (GST_PAD_IS_USABLE (ffmpegdec->srcpad))
}
} while (bsize > 0);
- if (ffmpegdec->pctx && bsize > 0) {
+ if ((ffmpegdec->pctx || oclass->in_plugin->id == CODEC_ID_MP3) &&
+ bsize > 0) {
GST_DEBUG ("Keeping %d bytes of data", bsize);
ffmpegdec->pcache = gst_buffer_create_sub (inbuf,
for (n = 0; n < MAX_STREAMS; n++) {
demux->srcpads[n] = NULL;
demux->handled[n] = FALSE;
- demux->last_ts[n] = 0;
+ demux->last_ts[n] = GST_CLOCK_TIME_NONE;
}
demux->videopads = 0;
demux->audiopads = 0;
demux->srcpads[n] = NULL;
}
demux->handled[n] = FALSE;
- demux->last_ts[n] = 0;
+ demux->last_ts[n] = GST_CLOCK_TIME_NONE;
}
demux->videopads = 0;
demux->audiopads = 0;
{
GstFFMpegDemux *demux = (GstFFMpegDemux *) gst_pad_get_parent (pad);
AVStream *stream = gst_ffmpegdemux_stream_from_pad (pad);
- gboolean res = TRUE;
-
- if (!stream || (*fmt == GST_FORMAT_DEFAULT &&
- stream->codec.codec_type != CODEC_TYPE_VIDEO))
- return FALSE;
+ gboolean res = FALSE;
switch (type) {
case GST_QUERY_TOTAL:
switch (*fmt) {
case GST_FORMAT_TIME:
- *value = stream->duration * (GST_SECOND / AV_TIME_BASE);
+ if (stream) {
+ *value = stream->duration * (GST_SECOND / AV_TIME_BASE);
+ res = TRUE;
+ }
break;
case GST_FORMAT_DEFAULT:
- if (stream->codec_info_nb_frames) {
+ if (stream->codec_info_nb_frames &&
+ stream->codec.codec_type == CODEC_TYPE_VIDEO) {
*value = stream->codec_info_nb_frames;
- break;
- } /* else fall-through */
+ res = TRUE;
+ }
+ break;
+ case GST_FORMAT_BYTES:
+ if (demux->videopads + demux->audiopads == 1 &&
+ GST_PAD_PEER (demux->sinkpad) != NULL) {
+ res = gst_pad_query (GST_PAD_PEER (demux->sinkpad),
+ type, fmt, value);
+ }
+ break;
default:
- res = FALSE;
break;
}
break;
case GST_QUERY_POSITION:
switch (*fmt) {
case GST_FORMAT_TIME:
- *value = demux->last_ts[stream->index];
+ if (stream &&
+ GST_CLOCK_TIME_IS_VALID (demux->last_ts[stream->index])) {
+ *value = demux->last_ts[stream->index];
+ res = TRUE;
+ }
break;
case GST_FORMAT_DEFAULT:
- res = gst_pad_convert (pad, GST_FORMAT_TIME,
- demux->last_ts[stream->index], fmt, value);
+ if (stream && stream->codec.codec_type == CODEC_TYPE_VIDEO &&
+ GST_CLOCK_TIME_IS_VALID (demux->last_ts[stream->index])) {
+ res = gst_pad_convert (pad, GST_FORMAT_TIME,
+ demux->last_ts[stream->index], fmt, value);
+ }
break;
+ case GST_FORMAT_BYTES:
+ if (demux->videopads + demux->audiopads == 1 &&
+ GST_PAD_PEER (demux->sinkpad) != NULL) {
+ res = gst_pad_query (GST_PAD_PEER (demux->sinkpad),
+ type, fmt, value);
+ }
default:
- res = FALSE;
break;
}
break;
default:
- res = FALSE;
break;
}
if (pkt.pts != AV_NOPTS_VALUE) {
GST_BUFFER_TIMESTAMP (outbuf) = (gdouble) (pkt.pts +
stream->start_time) * GST_SECOND / AV_TIME_BASE;
+ demux->last_ts[stream->index] = GST_BUFFER_TIMESTAMP (outbuf);
}
if (pkt.flags & PKT_FLAG_KEY) {