From 7383aa1baa1b466bcc5ab339991b1de20ab572fa Mon Sep 17 00:00:00 2001 From: Thomas Vander Stichele Date: Mon, 29 Mar 2004 16:39:18 +0000 Subject: [PATCH] indent Original commit message from CVS: indent --- ChangeLog | 4 + ext/ffmpeg/gstffmpeg.c | 33 +- ext/ffmpeg/gstffmpegcodecmap.c | 633 +++++++++++++++++++-------------------- ext/ffmpeg/gstffmpegcolorspace.c | 169 +++++------ ext/ffmpeg/gstffmpegdec.c | 230 +++++++------- ext/ffmpeg/gstffmpegdemux.c | 242 +++++++-------- ext/ffmpeg/gstffmpegenc.c | 292 +++++++++--------- ext/ffmpeg/gstffmpegmux.c | 230 +++++++------- ext/ffmpeg/gstffmpegprotocol.c | 48 ++- 9 files changed, 894 insertions(+), 987 deletions(-) diff --git a/ChangeLog b/ChangeLog index 9488d8a..06682c6 100644 --- a/ChangeLog +++ b/ChangeLog @@ -1,3 +1,7 @@ +2004-03-29 Thomas Vander Stichele + + * ext/ffmpeg/*.c: indent + 2004-03-25 Ronald Bultje * ext/ffmpeg/gstffmpegdec.c: (gst_ffmpegdec_base_init): diff --git a/ext/ffmpeg/gstffmpeg.c b/ext/ffmpeg/gstffmpeg.c index e10a274..36df0b2 100644 --- a/ext/ffmpeg/gstffmpeg.c +++ b/ext/ffmpeg/gstffmpeg.c @@ -33,16 +33,16 @@ #include #endif -extern gboolean gst_ffmpegdemux_register (GstPlugin *plugin); -extern gboolean gst_ffmpegdec_register (GstPlugin *plugin); -extern gboolean gst_ffmpegenc_register (GstPlugin *plugin); -extern gboolean gst_ffmpegmux_register (GstPlugin *plugin); -extern gboolean gst_ffmpegcsp_register (GstPlugin *plugin); - +extern gboolean gst_ffmpegdemux_register (GstPlugin * plugin); +extern gboolean gst_ffmpegdec_register (GstPlugin * plugin); +extern gboolean gst_ffmpegenc_register (GstPlugin * plugin); +extern gboolean gst_ffmpegmux_register (GstPlugin * plugin); +extern gboolean gst_ffmpegcsp_register (GstPlugin * plugin); + extern URLProtocol gstreamer_protocol; static gboolean -plugin_init (GstPlugin *plugin) +plugin_init (GstPlugin * plugin) { if (!gst_library_load ("gstbytestream")) return FALSE; @@ -54,7 +54,7 @@ plugin_init (GstPlugin *plugin) gst_ffmpegenc_register (plugin); gst_ffmpegdec_register (plugin); gst_ffmpegdemux_register (plugin); - /*gst_ffmpegmux_register (plugin);*/ + /*gst_ffmpegmux_register (plugin); */ gst_ffmpegcsp_register (plugin); register_protocol (&gstreamer_protocol); @@ -63,14 +63,9 @@ plugin_init (GstPlugin *plugin) return TRUE; } -GST_PLUGIN_DEFINE ( - GST_VERSION_MAJOR, - GST_VERSION_MINOR, - "ffmpeg", - "All FFMPEG codecs", - plugin_init, - FFMPEG_VERSION, - "LGPL", - "FFMpeg", - "http://ffmpeg.sourceforge.net/" -) +GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, + GST_VERSION_MINOR, + "ffmpeg", + "All FFMPEG codecs", + plugin_init, + FFMPEG_VERSION, "LGPL", "FFMpeg", "http://ffmpeg.sourceforge.net/") diff --git a/ext/ffmpeg/gstffmpegcodecmap.c b/ext/ffmpeg/gstffmpegcodecmap.c index 837aec9..28a18510 100644 --- a/ext/ffmpeg/gstffmpegcodecmap.c +++ b/ext/ffmpeg/gstffmpegcodecmap.c @@ -86,9 +86,8 @@ */ GstCaps * -gst_ffmpeg_codecid_to_caps (enum CodecID codec_id, - AVCodecContext *context, - gboolean encode) +gst_ffmpeg_codecid_to_caps (enum CodecID codec_id, + AVCodecContext * context, gboolean encode) { GstCaps *caps = NULL; gboolean buildcaps = FALSE; @@ -99,9 +98,8 @@ gst_ffmpeg_codecid_to_caps (enum CodecID codec_id, if (encode) { /* FIXME: bitrate */ caps = GST_FF_VID_CAPS_NEW ("video/mpeg", - "mpegversion", G_TYPE_INT, 1, - "systemstream", G_TYPE_BOOLEAN, FALSE, - NULL); + "mpegversion", G_TYPE_INT, 1, + "systemstream", G_TYPE_BOOLEAN, FALSE, NULL); } break; @@ -109,16 +107,14 @@ gst_ffmpeg_codecid_to_caps (enum CodecID codec_id, if (encode) { /* FIXME: bitrate */ caps = GST_FF_VID_CAPS_NEW ("video/mpeg", - "mpegversion", G_TYPE_INT, 2, - "systemstream", G_TYPE_BOOLEAN, FALSE, - NULL); + "mpegversion", G_TYPE_INT, 2, + "systemstream", G_TYPE_BOOLEAN, FALSE, NULL); } else { /* decode both MPEG-1 and MPEG-2; width/height/fps are all in * the MPEG video stream headers, so may be omitted from caps. */ caps = gst_caps_new_simple ("video/mpeg", - "mpegversion", GST_TYPE_INT_RANGE, 1, 2, - "systemstream", G_TYPE_BOOLEAN, FALSE, - NULL); + "mpegversion", GST_TYPE_INT_RANGE, 1, 2, + "systemstream", G_TYPE_BOOLEAN, FALSE, NULL); } break; @@ -126,7 +122,7 @@ gst_ffmpeg_codecid_to_caps (enum CodecID codec_id, /* this is a special ID - don't need it in GStreamer, I think */ break; - /* I don't know the exact differences between those... Anyone? */ + /* I don't know the exact differences between those... Anyone? */ case CODEC_ID_H263: case CODEC_ID_H263P: case CODEC_ID_H263I: @@ -140,9 +136,8 @@ gst_ffmpeg_codecid_to_caps (enum CodecID codec_id, /* FIXME: context->sub_id must be filled in during decoding */ caps = GST_FF_VID_CAPS_NEW ("video/x-pn-realvideo", - "systemstream", G_TYPE_BOOLEAN, FALSE, - "rmversion", G_TYPE_INT, version, - NULL); + "systemstream", G_TYPE_BOOLEAN, FALSE, + "rmversion", G_TYPE_INT, version, NULL); } while (0); break; @@ -151,9 +146,7 @@ gst_ffmpeg_codecid_to_caps (enum CodecID codec_id, if (encode) { /* FIXME: bitrate */ caps = GST_FF_AUD_CAPS_NEW ("audio/mpeg", - "mpegversion", G_TYPE_INT, 1, - "layer", G_TYPE_INT, 2, - NULL); + "mpegversion", G_TYPE_INT, 1, "layer", G_TYPE_INT, 2, NULL); } break; @@ -161,16 +154,13 @@ gst_ffmpeg_codecid_to_caps (enum CodecID codec_id, if (encode) { /* FIXME: bitrate */ caps = GST_FF_AUD_CAPS_NEW ("audio/mpeg", - "mpegversion", G_TYPE_INT, 1, - "layer", G_TYPE_INT, 3, - NULL); + "mpegversion", G_TYPE_INT, 1, "layer", G_TYPE_INT, 3, NULL); } else { /* Decodes MPEG-1 layer 1/2/3. Samplerate, channels et al are * in the MPEG audio header, so may be omitted from caps. */ caps = gst_caps_new_simple ("audio/mpeg", - "mpegversion", G_TYPE_INT, 1, - "layer", GST_TYPE_INT_RANGE, 1, 3, - NULL); + "mpegversion", G_TYPE_INT, 1, + "layer", GST_TYPE_INT_RANGE, 1, 3, NULL); } break; @@ -195,11 +185,11 @@ gst_ffmpeg_codecid_to_caps (enum CodecID codec_id, } break; - /* MJPEG is normal JPEG, Motion-JPEG and Quicktime MJPEG-A. MJPEGB - * is Quicktime's MJPEG-B. LJPEG is lossless JPEG. I don't know what - * sp5x is, but it's apparently something JPEG... We don't separate - * between those in GStreamer. Should we (at least between MJPEG, - * MJPEG-B and sp5x decoding...)? */ + /* MJPEG is normal JPEG, Motion-JPEG and Quicktime MJPEG-A. MJPEGB + * is Quicktime's MJPEG-B. LJPEG is lossless JPEG. I don't know what + * sp5x is, but it's apparently something JPEG... We don't separate + * between those in GStreamer. Should we (at least between MJPEG, + * MJPEG-B and sp5x decoding...)? */ case CODEC_ID_MJPEG: case CODEC_ID_MJPEGB: case CODEC_ID_LJPEG: @@ -213,18 +203,15 @@ gst_ffmpeg_codecid_to_caps (enum CodecID codec_id, * the AVI fourcc 'DIVX', but 'mp4v' for Quicktime... */ /* FIXME: bitrate */ caps = GST_FF_VID_CAPS_NEW ("video/mpeg", - "systemstream", G_TYPE_BOOLEAN, FALSE, - "mpegversion", G_TYPE_INT, 4, - NULL); + "systemstream", G_TYPE_BOOLEAN, FALSE, + "mpegversion", G_TYPE_INT, 4, NULL); } else { /* The trick here is to separate xvid, divx, mpeg4, 3ivx et al */ caps = GST_FF_VID_CAPS_NEW ("video/mpeg", - "mpegversion", G_TYPE_INT, 4, - "systemstream", G_TYPE_BOOLEAN, FALSE, - NULL); + "mpegversion", G_TYPE_INT, 4, + "systemstream", G_TYPE_BOOLEAN, FALSE, NULL); gst_caps_append (caps, GST_FF_VID_CAPS_NEW ("video/x-divx", - "divxversion", GST_TYPE_INT_RANGE, 4, 5, - NULL)); + "divxversion", GST_TYPE_INT_RANGE, 4, 5, NULL)); gst_caps_append (caps, GST_FF_VID_CAPS_NEW ("video/x-xvid")); gst_caps_append (caps, GST_FF_VID_CAPS_NEW ("video/x-3ivx")); } @@ -242,12 +229,10 @@ gst_ffmpeg_codecid_to_caps (enum CodecID codec_id, /* encode-FIXME: bitrate */ caps = GST_FF_VID_CAPS_NEW ("video/x-msmpeg", - "msmpegversion", G_TYPE_INT, version, - NULL); + "msmpegversion", G_TYPE_INT, version, NULL); if (!encode && codec_id == CODEC_ID_MSMPEG4V3) { gst_caps_append (caps, GST_FF_VID_CAPS_NEW ("video/x-divx", - "divxversion", G_TYPE_INT, 3, - NULL)); + "divxversion", G_TYPE_INT, 3, NULL)); } } while (0); break; @@ -259,8 +244,7 @@ gst_ffmpeg_codecid_to_caps (enum CodecID codec_id, /* encode-FIXME: bitrate */ caps = GST_FF_VID_CAPS_NEW ("video/x-wmv", - "wmvversion", G_TYPE_INT, version, - NULL); + "wmvversion", G_TYPE_INT, version, NULL); } while (0); break; @@ -270,18 +254,16 @@ gst_ffmpeg_codecid_to_caps (enum CodecID codec_id, case CODEC_ID_SVQ1: caps = GST_FF_VID_CAPS_NEW ("video/x-svq", - "svqversion", G_TYPE_INT, 1, - NULL); + "svqversion", G_TYPE_INT, 1, NULL); break; case CODEC_ID_SVQ3: caps = GST_FF_VID_CAPS_NEW ("video/x-svq", - "svqversion", G_TYPE_INT, 3, - "halfpel_flag", GST_TYPE_INT_RANGE, 0, 1, - "thirdpel_flag", GST_TYPE_INT_RANGE, 0, 1, - "low_delay", GST_TYPE_INT_RANGE, 0, 1, - "unknown_svq3_flag", GST_TYPE_INT_RANGE, 0, 1, - NULL); + "svqversion", G_TYPE_INT, 3, + "halfpel_flag", GST_TYPE_INT_RANGE, 0, 1, + "thirdpel_flag", GST_TYPE_INT_RANGE, 0, 1, + "low_delay", GST_TYPE_INT_RANGE, 0, 1, + "unknown_svq3_flag", GST_TYPE_INT_RANGE, 0, 1, NULL); break; case CODEC_ID_DVAUDIO: @@ -298,12 +280,11 @@ gst_ffmpeg_codecid_to_caps (enum CodecID codec_id, gint version = (codec_id == CODEC_ID_WMAV1) ? 1 : 2; caps = GST_FF_AUD_CAPS_NEW ("audio/x-wma", - "wmaversion", G_TYPE_INT, version, - "flags1", GST_TYPE_INT_RANGE, G_MININT, G_MAXINT, - "flags2", GST_TYPE_INT_RANGE, G_MININT, G_MAXINT, - "block_align", GST_TYPE_INT_RANGE, 0, G_MAXINT, - "bitrate", GST_TYPE_INT_RANGE, 0, G_MAXINT, - NULL); + "wmaversion", G_TYPE_INT, version, + "flags1", GST_TYPE_INT_RANGE, G_MININT, G_MAXINT, + "flags2", GST_TYPE_INT_RANGE, G_MININT, G_MAXINT, + "block_align", GST_TYPE_INT_RANGE, 0, G_MAXINT, + "bitrate", GST_TYPE_INT_RANGE, 0, G_MAXINT, NULL); } while (0); break; @@ -313,8 +294,7 @@ gst_ffmpeg_codecid_to_caps (enum CodecID codec_id, gint version = (codec_id == CODEC_ID_MACE3) ? 3 : 6; caps = GST_FF_AUD_CAPS_NEW ("audio/x-mace", - "maceversion", G_TYPE_INT, version, - NULL); + "maceversion", G_TYPE_INT, version, NULL); } while (0); break; @@ -332,8 +312,7 @@ gst_ffmpeg_codecid_to_caps (enum CodecID codec_id, case CODEC_ID_INDEO3: caps = GST_FF_VID_CAPS_NEW ("video/x-indeo", - "indeoversion", G_TYPE_INT, 3, - NULL); + "indeoversion", G_TYPE_INT, 3, NULL); break; case CODEC_ID_VP3: @@ -358,8 +337,7 @@ gst_ffmpeg_codecid_to_caps (enum CodecID codec_id, case CODEC_ID_FFV1: caps = GST_FF_VID_CAPS_NEW ("video/x-ffv", - "ffvversion", G_TYPE_INT, 1, - NULL); + "ffvversion", G_TYPE_INT, 1, NULL); break; case CODEC_ID_4XM: @@ -397,7 +375,7 @@ gst_ffmpeg_codecid_to_caps (enum CodecID codec_id, buildcaps = TRUE; break; - /* weird quasi-codecs for the demuxers only */ + /* weird quasi-codecs for the demuxers only */ case CODEC_ID_PCM_S16LE: case CODEC_ID_PCM_S16BE: case CODEC_ID_PCM_U16LE: @@ -406,50 +384,55 @@ gst_ffmpeg_codecid_to_caps (enum CodecID codec_id, case CODEC_ID_PCM_U8: do { gint width = 0, depth = 0, endianness = 0; - gboolean signedness = FALSE; /* blabla */ + gboolean signedness = FALSE; /* blabla */ switch (codec_id) { case CODEC_ID_PCM_S16LE: - width = 16; depth = 16; + width = 16; + depth = 16; endianness = G_LITTLE_ENDIAN; signedness = TRUE; break; case CODEC_ID_PCM_S16BE: - width = 16; depth = 16; + width = 16; + depth = 16; endianness = G_BIG_ENDIAN; signedness = TRUE; break; case CODEC_ID_PCM_U16LE: - width = 16; depth = 16; + width = 16; + depth = 16; endianness = G_LITTLE_ENDIAN; signedness = FALSE; break; case CODEC_ID_PCM_U16BE: - width = 16; depth = 16; + width = 16; + depth = 16; endianness = G_BIG_ENDIAN; signedness = FALSE; break; case CODEC_ID_PCM_S8: - width = 8; depth = 8; + width = 8; + depth = 8; endianness = G_BYTE_ORDER; signedness = TRUE; break; case CODEC_ID_PCM_U8: - width = 8; depth = 8; + width = 8; + depth = 8; endianness = G_BYTE_ORDER; signedness = FALSE; break; default: - g_assert(0); /* don't worry, we never get here */ + g_assert (0); /* don't worry, we never get here */ break; } caps = GST_FF_AUD_CAPS_NEW ("audio/x-raw-int", - "width", G_TYPE_INT, width, - "depth", G_TYPE_INT, depth, - "endianness", G_TYPE_INT, endianness, - "signed", G_TYPE_BOOLEAN, signedness, - NULL); + "width", G_TYPE_INT, width, + "depth", G_TYPE_INT, depth, + "endianness", G_TYPE_INT, endianness, + "signed", G_TYPE_BOOLEAN, signedness, NULL); } while (0); break; @@ -514,15 +497,14 @@ gst_ffmpeg_codecid_to_caps (enum CodecID codec_id, layout = "g726"; break; default: - g_assert (0); /* don't worry, we never get here */ + g_assert (0); /* don't worry, we never get here */ break; } /* FIXME: someone please check whether we need additional properties * in this caps definition. */ caps = GST_FF_AUD_CAPS_NEW ("audio/x-adpcm", - "layout", G_TYPE_STRING, layout, - NULL); + "layout", G_TYPE_STRING, layout, NULL); } while (0); break; @@ -540,8 +522,7 @@ gst_ffmpeg_codecid_to_caps (enum CodecID codec_id, /* FIXME: properties? */ caps = GST_FF_AUD_CAPS_NEW ("audio/x-pn-realaudio", - "raversion", G_TYPE_INT, version, - NULL); + "raversion", G_TYPE_INT, version, NULL); } while (0); break; @@ -562,18 +543,17 @@ gst_ffmpeg_codecid_to_caps (enum CodecID codec_id, layout = "xan"; break; default: - g_assert (0); /* don't worry, we never get here */ + g_assert (0); /* don't worry, we never get here */ break; } /* FIXME: someone please check whether we need additional properties * in this caps definition. */ caps = GST_FF_AUD_CAPS_NEW ("audio/x-dpcm", - "layout", G_TYPE_STRING, layout, - NULL); + "layout", G_TYPE_STRING, layout, NULL); } while (0); break; - + case CODEC_ID_FLAC: /* Note that ffmpeg has no encoder yet, but just for safety. In the * encoder case, we want to add things like samplerate, channels... */ @@ -613,6 +593,7 @@ gst_ffmpeg_codecid_to_caps (enum CodecID codec_id, if (caps != NULL) { char *str = gst_caps_to_string (caps); + GST_DEBUG ("caps for codec_id=%d: %s", codec_id, str); g_free (str); } else { @@ -630,8 +611,7 @@ gst_ffmpeg_codecid_to_caps (enum CodecID codec_id, */ static GstCaps * -gst_ffmpeg_pixfmt_to_caps (enum PixelFormat pix_fmt, - AVCodecContext *context) +gst_ffmpeg_pixfmt_to_caps (enum PixelFormat pix_fmt, AVCodecContext * context) { GstCaps *caps = NULL; @@ -641,51 +621,65 @@ gst_ffmpeg_pixfmt_to_caps (enum PixelFormat pix_fmt, switch (pix_fmt) { case PIX_FMT_YUV420P: - fmt = GST_MAKE_FOURCC ('I','4','2','0'); + fmt = GST_MAKE_FOURCC ('I', '4', '2', '0'); break; case PIX_FMT_YUV422: - fmt = GST_MAKE_FOURCC ('Y','U','Y','2'); + fmt = GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'); break; case PIX_FMT_RGB24: bpp = depth = 24; endianness = G_BIG_ENDIAN; - r_mask = 0xff0000; g_mask = 0x00ff00; b_mask = 0x0000ff; + r_mask = 0xff0000; + g_mask = 0x00ff00; + b_mask = 0x0000ff; break; case PIX_FMT_BGR24: bpp = depth = 24; endianness = G_BIG_ENDIAN; - r_mask = 0x0000ff; g_mask = 0x00ff00; b_mask = 0xff0000; + r_mask = 0x0000ff; + g_mask = 0x00ff00; + b_mask = 0xff0000; break; case PIX_FMT_YUV422P: - fmt = GST_MAKE_FOURCC ('Y','4','2','B'); + fmt = GST_MAKE_FOURCC ('Y', '4', '2', 'B'); break; case PIX_FMT_YUV444P: /* .. */ break; case PIX_FMT_RGBA32: - bpp = 32; depth = 24; + bpp = 32; + depth = 24; endianness = G_BIG_ENDIAN; #if (G_BYTE_ORDER == G_BIG_ENDIAN) - r_mask = 0x00ff0000; g_mask = 0x0000ff00; b_mask = 0x000000ff; -#else - r_mask = 0x0000ff00; g_mask = 0x00ff0000; b_mask = 0xff000000; + r_mask = 0x00ff0000; + g_mask = 0x0000ff00; + b_mask = 0x000000ff; +#else + r_mask = 0x0000ff00; + g_mask = 0x00ff0000; + b_mask = 0xff000000; #endif break; case PIX_FMT_YUV410P: - fmt = GST_MAKE_FOURCC ('Y','U','V','9'); + fmt = GST_MAKE_FOURCC ('Y', 'U', 'V', '9'); break; case PIX_FMT_YUV411P: - fmt = GST_MAKE_FOURCC ('Y','4','1','B'); + fmt = GST_MAKE_FOURCC ('Y', '4', '1', 'B'); break; case PIX_FMT_RGB565: bpp = depth = 16; endianness = G_BYTE_ORDER; - r_mask = 0xf800; g_mask = 0x07e0; b_mask = 0x001f; + r_mask = 0xf800; + g_mask = 0x07e0; + b_mask = 0x001f; break; case PIX_FMT_RGB555: - bpp = 16; depth = 15; + bpp = 16; + depth = 15; endianness = G_BYTE_ORDER; - r_mask = 0x7c00; g_mask = 0x03e0; b_mask = 0x001f; + r_mask = 0x7c00; + g_mask = 0x03e0; + b_mask = 0x001f; break; default: /* give up ... */ @@ -694,23 +688,22 @@ gst_ffmpeg_pixfmt_to_caps (enum PixelFormat pix_fmt, if (bpp != 0) { caps = GST_FF_VID_CAPS_NEW ("video/x-raw-rgb", - "bpp", G_TYPE_INT, bpp, - "depth", G_TYPE_INT, depth, - "red_mask", G_TYPE_INT, r_mask, + "bpp", G_TYPE_INT, bpp, + "depth", G_TYPE_INT, depth, + "red_mask", G_TYPE_INT, r_mask, "green_mask", G_TYPE_INT, g_mask, - "blue_mask", G_TYPE_INT, b_mask, - "endianness", G_TYPE_INT, endianness, - NULL); + "blue_mask", G_TYPE_INT, b_mask, + "endianness", G_TYPE_INT, endianness, NULL); } else if (fmt) { caps = GST_FF_VID_CAPS_NEW ("video/x-raw-yuv", - "format", GST_TYPE_FOURCC, fmt, - NULL); + "format", GST_TYPE_FOURCC, fmt, NULL); } if (caps != NULL) { char *str = gst_caps_to_string (caps); + GST_DEBUG ("caps for pix_fmt=%d: %s", pix_fmt, str); - g_free(str); + g_free (str); } else { GST_WARNING ("No caps found for pix_fmt=%d", pix_fmt); } @@ -726,8 +719,8 @@ gst_ffmpeg_pixfmt_to_caps (enum PixelFormat pix_fmt, */ static GstCaps * -gst_ffmpeg_smpfmt_to_caps (enum SampleFormat sample_fmt, - AVCodecContext *context) +gst_ffmpeg_smpfmt_to_caps (enum SampleFormat sample_fmt, + AVCodecContext * context) { GstCaps *caps = NULL; @@ -747,17 +740,16 @@ gst_ffmpeg_smpfmt_to_caps (enum SampleFormat sample_fmt, if (bpp) { caps = GST_FF_AUD_CAPS_NEW ("audio/x-raw-int", - "signed", G_TYPE_BOOLEAN, signedness, - "endianness", G_TYPE_INT, G_BYTE_ORDER, - "width", G_TYPE_INT, bpp, - "depth", G_TYPE_INT, bpp, - NULL); + "signed", G_TYPE_BOOLEAN, signedness, + "endianness", G_TYPE_INT, G_BYTE_ORDER, + "width", G_TYPE_INT, bpp, "depth", G_TYPE_INT, bpp, NULL); } if (caps != NULL) { char *str = gst_caps_to_string (caps); + GST_DEBUG ("caps for sample_fmt=%d: %s", sample_fmt, str); - g_free(str); + g_free (str); } else { GST_WARNING ("No caps found for sample_fmt=%d", sample_fmt); } @@ -773,8 +765,8 @@ gst_ffmpeg_smpfmt_to_caps (enum SampleFormat sample_fmt, */ GstCaps * -gst_ffmpeg_codectype_to_caps (enum CodecType codec_type, - AVCodecContext *context) +gst_ffmpeg_codectype_to_caps (enum CodecType codec_type, + AVCodecContext * context) { GstCaps *caps; @@ -827,23 +819,22 @@ gst_ffmpeg_codectype_to_caps (enum CodecType codec_type, * when needed. */ static void -gst_ffmpeg_caps_to_extradata (const GstCaps *caps, - AVCodecContext *context) +gst_ffmpeg_caps_to_extradata (const GstCaps * caps, AVCodecContext * context) { GstStructure *structure; const gchar *mimetype; g_return_if_fail (gst_caps_get_size (caps) == 1); structure = gst_caps_get_structure (caps, 0); - + mimetype = gst_structure_get_name (structure); - if (!strcmp(mimetype, "audio/x-wma")) { + if (!strcmp (mimetype, "audio/x-wma")) { gint flags1, flags2, wmaversion = 0; if (!gst_structure_get_int (structure, "flags1", &flags1) || - !gst_structure_get_int (structure, "flags2", &flags2) || - !gst_structure_get_int (structure, "wmaversion", &wmaversion)) { + !gst_structure_get_int (structure, "flags2", &flags2) || + !gst_structure_get_int (structure, "wmaversion", &wmaversion)) { g_warning ("invalid caps for audio/x-wma"); return; } @@ -855,32 +846,32 @@ gst_ffmpeg_caps_to_extradata (const GstCaps *caps, gst_structure_get_int (structure, "wmaversion", &wmaversion); switch (wmaversion) { case 1: - /* FIXME: is this freed with g_free? If not, don't use g_malloc */ - context->extradata = (guint8 *) g_malloc0 (4); - ((guint8 *)context->extradata)[0] = flags1; - ((guint8 *)context->extradata)[2] = flags2; - context->extradata_size = 4; - break; + /* FIXME: is this freed with g_free? If not, don't use g_malloc */ + context->extradata = (guint8 *) g_malloc0 (4); + ((guint8 *) context->extradata)[0] = flags1; + ((guint8 *) context->extradata)[2] = flags2; + context->extradata_size = 4; + break; case 2: - /* FIXME: is this freed with g_free? If not, don't use g_malloc */ - context->extradata = (guint8 *) g_malloc0 (6); - ((guint8 *) context->extradata)[0] = flags1; - ((guint8 *) context->extradata)[1] = flags1 >> 8; - ((guint8 *) context->extradata)[2] = flags1 >> 16; - ((guint8 *) context->extradata)[3] = flags1 >> 24; - ((guint8 *) context->extradata)[4] = flags2; - ((guint8 *) context->extradata)[5] = flags2 >> 8; - context->extradata_size = 6; - break; + /* FIXME: is this freed with g_free? If not, don't use g_malloc */ + context->extradata = (guint8 *) g_malloc0 (6); + ((guint8 *) context->extradata)[0] = flags1; + ((guint8 *) context->extradata)[1] = flags1 >> 8; + ((guint8 *) context->extradata)[2] = flags1 >> 16; + ((guint8 *) context->extradata)[3] = flags1 >> 24; + ((guint8 *) context->extradata)[4] = flags2; + ((guint8 *) context->extradata)[5] = flags2 >> 8; + context->extradata_size = 6; + break; default: - g_warning ("Unknown wma version %d\n", wmaversion); - break; + g_warning ("Unknown wma version %d\n", wmaversion); + break; } return; } - if (!strcmp(mimetype, "video/x-svq")) { + if (!strcmp (mimetype, "video/x-svq")) { gint version = 0; if (!gst_structure_get_int (structure, "svqversion", &version)) { @@ -894,11 +885,12 @@ gst_ffmpeg_caps_to_extradata (const GstCaps *caps, if (!gst_structure_get_int (structure, "halfpel_flag", &halfpel_flag) || !gst_structure_get_int (structure, "thirdpel_flag", &thirdpel_flag) || - !gst_structure_get_int (structure, "low_delay", &low_delay) || - !gst_structure_get_int (structure, "unknown_svq3_flag", &unknown_svq3_flag)) { - g_warning ("invalid caps for video/x-svq"); - return; - } + !gst_structure_get_int (structure, "low_delay", &low_delay) || + !gst_structure_get_int (structure, "unknown_svq3_flag", + &unknown_svq3_flag)) { + g_warning ("invalid caps for video/x-svq"); + return; + } context->extradata = (guint8 *) g_malloc0 (0x64); g_stpcpy (context->extradata, "SVQ3"); @@ -913,7 +905,7 @@ gst_ffmpeg_caps_to_extradata (const GstCaps *caps, flags = flags << 3; flags = GUINT16_FROM_LE (flags); - + memcpy (context->extradata + 0x62, &flags, 2); context->extradata_size = 0x64; } @@ -928,8 +920,7 @@ gst_ffmpeg_caps_to_extradata (const GstCaps *caps, */ static void -gst_ffmpeg_caps_to_smpfmt (const GstCaps *caps, - AVCodecContext *context) +gst_ffmpeg_caps_to_smpfmt (const GstCaps * caps, AVCodecContext * context) { GstStructure *structure; gint depth = 0, width = 0, endianness = 0; @@ -937,7 +928,7 @@ gst_ffmpeg_caps_to_smpfmt (const GstCaps *caps, g_return_if_fail (gst_caps_get_size (caps) == 1); structure = gst_caps_get_structure (caps, 0); - + if (gst_structure_get_int (structure, "width", &width) && gst_structure_get_int (structure, "depth", &depth) && gst_structure_get_int (structure, "signed", &signedness) && @@ -964,15 +955,14 @@ gst_ffmpeg_caps_to_smpfmt (const GstCaps *caps, */ static void -gst_ffmpeg_caps_to_pixfmt (const GstCaps *caps, - AVCodecContext *context) +gst_ffmpeg_caps_to_pixfmt (const GstCaps * caps, AVCodecContext * context) { GstStructure *structure; gdouble fps; - + g_return_if_fail (gst_caps_get_size (caps) == 1); structure = gst_caps_get_structure (caps, 0); - + gst_structure_get_int (structure, "width", &context->width); gst_structure_get_int (structure, "height", &context->height); @@ -983,37 +973,38 @@ gst_ffmpeg_caps_to_pixfmt (const GstCaps *caps, if (strcmp (gst_structure_get_name (structure), "video/x-raw-yuv") == 0) { guint32 fourcc; - + if (gst_structure_get_fourcc (structure, "format", &fourcc)) { switch (fourcc) { - case GST_MAKE_FOURCC ('Y','U','Y','2'): - context->pix_fmt = PIX_FMT_YUV422; - break; - case GST_MAKE_FOURCC ('I','4','2','0'): - context->pix_fmt = PIX_FMT_YUV420P; - break; - case GST_MAKE_FOURCC ('Y','4','1','B'): - context->pix_fmt = PIX_FMT_YUV411P; - break; - case GST_MAKE_FOURCC ('Y','4','2','B'): - context->pix_fmt = PIX_FMT_YUV422P; - break; - case GST_MAKE_FOURCC ('Y','U','V','9'): - context->pix_fmt = PIX_FMT_YUV410P; - break; + case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'): + context->pix_fmt = PIX_FMT_YUV422; + break; + case GST_MAKE_FOURCC ('I', '4', '2', '0'): + context->pix_fmt = PIX_FMT_YUV420P; + break; + case GST_MAKE_FOURCC ('Y', '4', '1', 'B'): + context->pix_fmt = PIX_FMT_YUV411P; + break; + case GST_MAKE_FOURCC ('Y', '4', '2', 'B'): + context->pix_fmt = PIX_FMT_YUV422P; + break; + case GST_MAKE_FOURCC ('Y', 'U', 'V', '9'): + context->pix_fmt = PIX_FMT_YUV410P; + break; #if 0 - case FIXME: - context->pix_fmt = PIX_FMT_YUV444P; - break; + case FIXME: + context->pix_fmt = PIX_FMT_YUV444P; + break; #endif } } - } else if (strcmp (gst_structure_get_name (structure), "video/x-raw-rgb") == 0) { + } else if (strcmp (gst_structure_get_name (structure), + "video/x-raw-rgb") == 0) { gint bpp = 0, rmask = 0, endianness = 0; - + if (gst_structure_get_int (structure, "bpp", &bpp) && - gst_structure_get_int (structure, "endianness", &endianness) && - gst_structure_get_int (structure, "red_mask", &rmask)) { + gst_structure_get_int (structure, "endianness", &endianness) && + gst_structure_get_int (structure, "red_mask", &rmask)) { switch (bpp) { case 32: #if (G_BYTE_ORDER == G_BIG_ENDIAN) @@ -1028,18 +1019,18 @@ gst_ffmpeg_caps_to_pixfmt (const GstCaps *caps, context->pix_fmt = PIX_FMT_BGR24; else context->pix_fmt = PIX_FMT_RGB24; - break; + break; case 16: if (endianness == G_BYTE_ORDER) context->pix_fmt = PIX_FMT_RGB565; - break; + break; case 15: if (endianness == G_BYTE_ORDER) context->pix_fmt = PIX_FMT_RGB555; - break; + break; default: /* nothing */ - break; + break; } } } @@ -1055,9 +1046,8 @@ gst_ffmpeg_caps_to_pixfmt (const GstCaps *caps, */ void -gst_ffmpeg_caps_to_codectype (enum CodecType type, - const GstCaps *caps, - AVCodecContext *context) +gst_ffmpeg_caps_to_codectype (enum CodecType type, + const GstCaps * caps, AVCodecContext * context) { if (context == NULL) return; @@ -1088,50 +1078,38 @@ gst_ffmpeg_caps_to_codectype (enum CodecType type, */ GstCaps * -gst_ffmpeg_formatid_to_caps (const gchar *format_name) +gst_ffmpeg_formatid_to_caps (const gchar * format_name) { GstCaps *caps = NULL; if (!strcmp (format_name, "mpeg")) { caps = gst_caps_new_simple ("video/mpeg", - "systemstream", G_TYPE_BOOLEAN, TRUE, - NULL); + "systemstream", G_TYPE_BOOLEAN, TRUE, NULL); } else if (!strcmp (format_name, "mpegts")) { caps = gst_caps_new_simple ("video/mpegts", - "systemstream", G_TYPE_BOOLEAN, TRUE, - NULL); + "systemstream", G_TYPE_BOOLEAN, TRUE, NULL); } else if (!strcmp (format_name, "rm")) { caps = gst_caps_new_simple ("application/x-pn-realmedia", - "systemstream", G_TYPE_BOOLEAN, TRUE, - NULL); + "systemstream", G_TYPE_BOOLEAN, TRUE, NULL); } else if (!strcmp (format_name, "asf")) { - caps = gst_caps_new_simple ("video/x-ms-asf", - NULL); + caps = gst_caps_new_simple ("video/x-ms-asf", NULL); } else if (!strcmp (format_name, "avi")) { - caps = gst_caps_new_simple ("video/x-msvideo", - NULL); + caps = gst_caps_new_simple ("video/x-msvideo", NULL); } else if (!strcmp (format_name, "wav")) { - caps = gst_caps_new_simple ("video/x-wav", - NULL); + caps = gst_caps_new_simple ("video/x-wav", NULL); } else if (!strcmp (format_name, "swf")) { - caps = gst_caps_new_simple ("application/x-shockwave-flash", - NULL); + caps = gst_caps_new_simple ("application/x-shockwave-flash", NULL); } else if (!strcmp (format_name, "au")) { - caps = gst_caps_new_simple ("audio/x-au", - NULL); + caps = gst_caps_new_simple ("audio/x-au", NULL); } else if (!strcmp (format_name, "mov_mp4_m4a_3gp")) { - caps = gst_caps_new_simple ("video/quicktime", - NULL); + caps = gst_caps_new_simple ("video/quicktime", NULL); } else if (!strcmp (format_name, "dv")) { caps = gst_caps_new_simple ("video/x-dv", - "systemstream", G_TYPE_BOOLEAN, TRUE, - NULL); + "systemstream", G_TYPE_BOOLEAN, TRUE, NULL); } else if (!strcmp (format_name, "4xm")) { - caps = gst_caps_new_simple ("video/x-4xm", - NULL); + caps = gst_caps_new_simple ("video/x-4xm", NULL); } else if (!strcmp (format_name, "matroska")) { - caps = gst_caps_new_simple ("video/x-matroska", - NULL); + caps = gst_caps_new_simple ("video/x-matroska", NULL); } else { gchar *name; @@ -1154,18 +1132,17 @@ gst_ffmpeg_formatid_to_caps (const gchar *format_name) */ enum CodecID -gst_ffmpeg_caps_to_codecid (const GstCaps *caps, - AVCodecContext *context) +gst_ffmpeg_caps_to_codecid (const GstCaps * caps, AVCodecContext * context) { enum CodecID id = CODEC_ID_NONE; const gchar *mimetype; const GstStructure *structure; - gboolean video = FALSE, audio = FALSE; /* we want to be sure! */ + gboolean video = FALSE, audio = FALSE; /* we want to be sure! */ g_return_val_if_fail (caps != NULL, CODEC_ID_NONE); g_return_val_if_fail (gst_caps_get_size (caps) == 1, CODEC_ID_NONE); structure = gst_caps_get_structure (caps, 0); - + mimetype = gst_structure_get_name (structure); if (!strcmp (mimetype, "video/x-raw-rgb")) { @@ -1176,10 +1153,10 @@ gst_ffmpeg_caps_to_codecid (const GstCaps *caps, gint bpp = 0, endianness = 0, rmask = 0; enum PixelFormat pix_fmt = -1; - gst_structure_get_int (structure, "bpp", &bpp); + gst_structure_get_int (structure, "bpp", &bpp); gst_structure_get_int (structure, "endianness", &endianness); - gst_structure_get_int (structure, "rmask", &rmask); - + gst_structure_get_int (structure, "rmask", &rmask); + switch (bpp) { case 15: if (endianness == G_BYTE_ORDER) { @@ -1232,19 +1209,19 @@ gst_ffmpeg_caps_to_codecid (const GstCaps *caps, gst_structure_get_fourcc (structure, "format", &fmt_fcc); switch (fmt_fcc) { - case GST_MAKE_FOURCC ('Y','U','Y','2'): + case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'): pix_fmt = PIX_FMT_YUV422; break; - case GST_MAKE_FOURCC ('I','4','2','0'): + case GST_MAKE_FOURCC ('I', '4', '2', '0'): pix_fmt = PIX_FMT_YUV420P; break; - case GST_MAKE_FOURCC ('Y','4','1','B'): + case GST_MAKE_FOURCC ('Y', '4', '1', 'B'): pix_fmt = PIX_FMT_YUV411P; break; - case GST_MAKE_FOURCC ('Y','4','2','B'): + case GST_MAKE_FOURCC ('Y', '4', '2', 'B'): pix_fmt = PIX_FMT_YUV422P; break; - case GST_MAKE_FOURCC ('Y','U','V','9'): + case GST_MAKE_FOURCC ('Y', 'U', 'V', '9'): pix_fmt = PIX_FMT_YUV410P; break; default: @@ -1264,7 +1241,7 @@ gst_ffmpeg_caps_to_codecid (const GstCaps *caps, } else if (!strcmp (mimetype, "audio/x-raw-int")) { gint depth = 0, width = 0, endianness = 0; - gboolean signedness = FALSE; /* bla default value */ + gboolean signedness = FALSE; /* bla default value */ gst_structure_get_int (structure, "endianness", &endianness); gst_structure_get_boolean (structure, "signed", &signedness); @@ -1280,67 +1257,68 @@ gst_ffmpeg_caps_to_codecid (const GstCaps *caps, if (depth == width) { switch (depth) { - case 8: - if (signedness) { - id = CODEC_ID_PCM_S8; - } else { - id = CODEC_ID_PCM_U8; - } - break; - case 16: - switch (endianness) { - case G_BIG_ENDIAN: - if (signedness) { - id = CODEC_ID_PCM_S16BE; - } else { - id = CODEC_ID_PCM_U16BE; - } - break; - case G_LITTLE_ENDIAN: - if (signedness) { - id = CODEC_ID_PCM_S16LE; - } else { - id = CODEC_ID_PCM_U16LE; - } - break; - } - break; + case 8: + if (signedness) { + id = CODEC_ID_PCM_S8; + } else { + id = CODEC_ID_PCM_U8; + } + break; + case 16: + switch (endianness) { + case G_BIG_ENDIAN: + if (signedness) { + id = CODEC_ID_PCM_S16BE; + } else { + id = CODEC_ID_PCM_U16BE; + } + break; + case G_LITTLE_ENDIAN: + if (signedness) { + id = CODEC_ID_PCM_S16LE; + } else { + id = CODEC_ID_PCM_U16LE; + } + break; + } + break; } if (id != CODEC_ID_NONE) { - audio = TRUE; + audio = TRUE; } } - } else if (!strcmp(mimetype, "audio/x-mulaw")) { + } else if (!strcmp (mimetype, "audio/x-mulaw")) { id = CODEC_ID_PCM_MULAW; audio = TRUE; - } else if (!strcmp(mimetype, "audio/x-alaw")) { + } else if (!strcmp (mimetype, "audio/x-alaw")) { id = CODEC_ID_PCM_ALAW; audio = TRUE; - } else if (!strcmp(mimetype, "video/x-dv")) { + } else if (!strcmp (mimetype, "video/x-dv")) { id = CODEC_ID_DVVIDEO; video = TRUE; - } else if (!strcmp(mimetype, "audio/x-dv")) { /* ??? */ + } else if (!strcmp (mimetype, "audio/x-dv")) { /* ??? */ id = CODEC_ID_DVAUDIO; audio = TRUE; - } else if (!strcmp(mimetype, "video/x-h263")) { + } else if (!strcmp (mimetype, "video/x-h263")) { - id = CODEC_ID_H263; /* or H263[IP] */ + id = CODEC_ID_H263; /* or H263[IP] */ video = TRUE; - } else if (!strcmp(mimetype, "video/mpeg")) { + } else if (!strcmp (mimetype, "video/mpeg")) { gboolean sys_strm = TRUE; gint mpegversion = 0; + gst_structure_get_boolean (structure, "systemstream", &sys_strm); gst_structure_get_int (structure, "mpegversion", &mpegversion); if (!sys_strm) { @@ -1354,7 +1332,7 @@ gst_ffmpeg_caps_to_codecid (const GstCaps *caps, case 4: id = CODEC_ID_MPEG4; if (context) { - context->codec_tag = GST_MAKE_FOURCC ('m','p','4','v'); + context->codec_tag = GST_MAKE_FOURCC ('m', 'p', '4', 'v'); } break; default: @@ -1367,43 +1345,43 @@ gst_ffmpeg_caps_to_codecid (const GstCaps *caps, video = TRUE; } - } else if (!strcmp(mimetype, "video/x-jpeg")) { + } else if (!strcmp (mimetype, "video/x-jpeg")) { - id = CODEC_ID_MJPEG; /* A... B... */ + id = CODEC_ID_MJPEG; /* A... B... */ video = TRUE; - } else if (!strcmp(mimetype, "video/x-wmv")) { + } else if (!strcmp (mimetype, "video/x-wmv")) { gint wmvversion = 0; gst_structure_get_int (structure, "wmvversion", &wmvversion); switch (wmvversion) { case 1: - id = CODEC_ID_WMV1; - break; + id = CODEC_ID_WMV1; + break; case 2: - id = CODEC_ID_WMV2; - break; + id = CODEC_ID_WMV2; + break; default: - /* ... */ - break; + /* ... */ + break; } if (id != CODEC_ID_NONE) { video = TRUE; } - } else if (!strcmp(mimetype, "application/ogg")) { + } else if (!strcmp (mimetype, "application/ogg")) { id = CODEC_ID_VORBIS; - } else if (!strcmp(mimetype, "audio/mpeg")) { + } else if (!strcmp (mimetype, "audio/mpeg")) { gint layer = 0; gint mpegversion = 0; if (gst_structure_get_int (structure, "mpegversion", &mpegversion)) { switch (mpegversion) { - case 2: /* ffmpeg uses faad for both... */ + case 2: /* ffmpeg uses faad for both... */ case 4: id = CODEC_ID_MPEG4AAC; break; @@ -1432,70 +1410,70 @@ gst_ffmpeg_caps_to_codecid (const GstCaps *caps, audio = TRUE; } - } else if (!strcmp(mimetype, "audio/x-wma")) { + } else if (!strcmp (mimetype, "audio/x-wma")) { gint wmaversion = 0; gst_structure_get_int (structure, "wmaversion", &wmaversion); switch (wmaversion) { case 1: - id = CODEC_ID_WMAV1; - break; + id = CODEC_ID_WMAV1; + break; case 2: - id = CODEC_ID_WMAV2; - break; + id = CODEC_ID_WMAV2; + break; default: - /* ... */ - break; + /* ... */ + break; } if (id != CODEC_ID_NONE) { audio = TRUE; } - } else if (!strcmp(mimetype, "audio/x-ac3")) { + } else if (!strcmp (mimetype, "audio/x-ac3")) { id = CODEC_ID_AC3; - } else if (!strcmp(mimetype, "video/x-msmpeg")) { + } else if (!strcmp (mimetype, "video/x-msmpeg")) { gint msmpegversion = 0; gst_structure_get_int (structure, "msmpegversion", &msmpegversion); switch (msmpegversion) { case 41: - id = CODEC_ID_MSMPEG4V1; - break; + id = CODEC_ID_MSMPEG4V1; + break; case 42: - id = CODEC_ID_MSMPEG4V2; - break; + id = CODEC_ID_MSMPEG4V2; + break; case 43: - id = CODEC_ID_MSMPEG4V3; - break; + id = CODEC_ID_MSMPEG4V3; + break; default: - /* ... */ - break; + /* ... */ + break; } if (id != CODEC_ID_NONE) { video = TRUE; } - } else if (!strcmp(mimetype, "video/x-svq")) { + } else if (!strcmp (mimetype, "video/x-svq")) { gint svqversion = 0; gst_structure_get_int (structure, "svqversion", &svqversion); switch (svqversion) { case 1: - id = CODEC_ID_SVQ1; - break; + id = CODEC_ID_SVQ1; + break; case 3: - id = CODEC_ID_SVQ3; - break; + id = CODEC_ID_SVQ3; + break; default: - /* ... */ - break; + /* ... */ + break; } if (id != CODEC_ID_NONE) { @@ -1513,14 +1491,14 @@ gst_ffmpeg_caps_to_codecid (const GstCaps *caps, gst_structure_get_int (structure, "maceversion", &maceversion); switch (maceversion) { case 3: - id = CODEC_ID_MACE3; - break; + id = CODEC_ID_MACE3; + break; case 6: - id = CODEC_ID_MACE6; - break; + id = CODEC_ID_MACE6; + break; default: - /* ... */ - break; + /* ... */ + break; } if (id != CODEC_ID_NONE) { @@ -1543,11 +1521,11 @@ gst_ffmpeg_caps_to_codecid (const GstCaps *caps, gst_structure_get_int (structure, "indeoversion", &indeoversion); switch (indeoversion) { case 3: - id = CODEC_ID_INDEO3; - break; + id = CODEC_ID_INDEO3; + break; default: - /* ... */ - break; + /* ... */ + break; } if (id != CODEC_ID_NONE) { @@ -1560,19 +1538,19 @@ gst_ffmpeg_caps_to_codecid (const GstCaps *caps, gst_structure_get_int (structure, "divxversion", &divxversion); switch (divxversion) { case 3: - id = CODEC_ID_MSMPEG4V3; - break; + id = CODEC_ID_MSMPEG4V3; + break; case 4: case 5: - id = CODEC_ID_MPEG4; - break; + id = CODEC_ID_MPEG4; + break; default: - /* ... */ - break; + /* ... */ + break; } if (context) { - context->codec_tag = GST_MAKE_FOURCC ('D','I','V','X'); + context->codec_tag = GST_MAKE_FOURCC ('D', 'I', 'V', 'X'); } if (id != CODEC_ID_NONE) { @@ -1585,7 +1563,7 @@ gst_ffmpeg_caps_to_codecid (const GstCaps *caps, video = TRUE; if (context) { - context->codec_tag = GST_MAKE_FOURCC ('3','I','V','X'); + context->codec_tag = GST_MAKE_FOURCC ('3', 'I', 'V', 'X'); } } else if (!strcmp (mimetype, "video/x-xvid")) { @@ -1594,7 +1572,7 @@ gst_ffmpeg_caps_to_codecid (const GstCaps *caps, video = TRUE; if (context) { - context->codec_tag = GST_MAKE_FOURCC ('X','V','I','D'); + context->codec_tag = GST_MAKE_FOURCC ('X', 'V', 'I', 'D'); } } else if (!strcmp (mimetype, "video/x-ffv")) { @@ -1603,11 +1581,11 @@ gst_ffmpeg_caps_to_codecid (const GstCaps *caps, gst_structure_get_int (structure, "ffvversion", &ffvversion); switch (ffvversion) { case 1: - id = CODEC_ID_FFV1; - break; + id = CODEC_ID_FFV1; + break; default: - /* ... */ - break; + /* ... */ + break; } if (id != CODEC_ID_NONE) { @@ -1649,7 +1627,7 @@ gst_ffmpeg_caps_to_codecid (const GstCaps *caps, if (id != CODEC_ID_NONE) { audio = TRUE; } - + } else if (!strcmp (mimetype, "video/x-4xm")) { id = CODEC_ID_4XM; @@ -1707,9 +1685,9 @@ gst_ffmpeg_caps_to_codecid (const GstCaps *caps, } else if (!strcmp (mimetype, "audio/x-pn-realaudio")) { gint raversion; - + gst_structure_get_int (structure, "raversion", &raversion); - + switch (raversion) { case 1: id = CODEC_ID_RA_144; @@ -1721,13 +1699,13 @@ gst_ffmpeg_caps_to_codecid (const GstCaps *caps, /* .. */ break; } - + if (id != CODEC_ID_NONE) { audio = TRUE; } } else if (!strncmp (mimetype, "audio/x-gst_ff-", 15) || - !strncmp (mimetype, "video/x-gst_ff-", 15)) { + !strncmp (mimetype, "video/x-gst_ff-", 15)) { gchar ext[16]; AVCodec *codec; @@ -1760,8 +1738,9 @@ gst_ffmpeg_caps_to_codecid (const GstCaps *caps, if (id != CODEC_ID_NONE) { char *str = gst_caps_to_string (caps); + GST_DEBUG ("The id=%d belongs to the caps %s", id, str); - g_free(str); + g_free (str); } return id; diff --git a/ext/ffmpeg/gstffmpegcolorspace.c b/ext/ffmpeg/gstffmpegcolorspace.c index ff00f79..ae4772a 100644 --- a/ext/ffmpeg/gstffmpegcolorspace.c +++ b/ext/ffmpeg/gstffmpegcolorspace.c @@ -46,22 +46,21 @@ typedef struct _GstFFMpegCsp GstFFMpegCsp; typedef struct _GstFFMpegCspClass GstFFMpegCspClass; -struct _GstFFMpegCsp { - GstElement element; - - GstPad *sinkpad, *srcpad; - - gint width, height; - gfloat fps; - enum PixelFormat - from_pixfmt, - to_pixfmt; - AVFrame *from_frame, - *to_frame; - GstCaps *sinkcaps; +struct _GstFFMpegCsp +{ + GstElement element; + + GstPad *sinkpad, *srcpad; + + gint width, height; + gfloat fps; + enum PixelFormat from_pixfmt, to_pixfmt; + AVFrame *from_frame, *to_frame; + GstCaps *sinkcaps; }; -struct _GstFFMpegCspClass { +struct _GstFFMpegCspClass +{ GstElementClass parent_class; }; @@ -75,72 +74,68 @@ static GstElementDetails ffmpegcsp_details = { /* Stereo signals and args */ -enum { +enum +{ /* FILL ME */ LAST_SIGNAL }; -enum { +enum +{ ARG_0, }; -static GType gst_ffmpegcsp_get_type (void); +static GType gst_ffmpegcsp_get_type (void); -static void gst_ffmpegcsp_base_init (GstFFMpegCspClass *klass); -static void gst_ffmpegcsp_class_init (GstFFMpegCspClass *klass); -static void gst_ffmpegcsp_init (GstFFMpegCsp *space); +static void gst_ffmpegcsp_base_init (GstFFMpegCspClass * klass); +static void gst_ffmpegcsp_class_init (GstFFMpegCspClass * klass); +static void gst_ffmpegcsp_init (GstFFMpegCsp * space); -static void gst_ffmpegcsp_set_property (GObject *object, - guint prop_id, - const GValue *value, - GParamSpec *pspec); -static void gst_ffmpegcsp_get_property (GObject *object, - guint prop_id, - GValue *value, - GParamSpec *pspec); +static void gst_ffmpegcsp_set_property (GObject * object, + guint prop_id, const GValue * value, GParamSpec * pspec); +static void gst_ffmpegcsp_get_property (GObject * object, + guint prop_id, GValue * value, GParamSpec * pspec); static GstPadLinkReturn - gst_ffmpegcsp_pad_link (GstPad *pad, - const GstCaps *caps); +gst_ffmpegcsp_pad_link (GstPad * pad, const GstCaps * caps); -static void gst_ffmpegcsp_chain (GstPad *pad, - GstData *data); -static GstElementStateReturn - gst_ffmpegcsp_change_state (GstElement *element); +static void gst_ffmpegcsp_chain (GstPad * pad, GstData * data); +static GstElementStateReturn gst_ffmpegcsp_change_state (GstElement * element); static GstPadTemplate *srctempl, *sinktempl; static GstElementClass *parent_class = NULL; + /*static guint gst_ffmpegcsp_signals[LAST_SIGNAL] = { 0 }; */ static GstCaps * -gst_ffmpegcsp_caps_remove_format_info (GstCaps *caps) +gst_ffmpegcsp_caps_remove_format_info (GstCaps * caps) { int i; GstStructure *structure; GstCaps *rgbcaps; - for(i=0;isrcpad) ? space->sinkpad : space->srcpad; @@ -171,8 +166,7 @@ gst_ffmpegcsp_getcaps (GstPad *pad) } static GstPadLinkReturn -gst_ffmpegcsp_pad_link (GstPad *pad, - const GstCaps *caps) +gst_ffmpegcsp_pad_link (GstPad * pad, const GstCaps * caps) { GstStructure *structure; AVCodecContext *ctx; @@ -223,8 +217,7 @@ gst_ffmpegcsp_pad_link (GstPad *pad, gst_caps_set_simple (caps, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, - "framerate", G_TYPE_DOUBLE, framerate, - NULL); + "framerate", G_TYPE_DOUBLE, framerate, NULL); ret = gst_pad_try_set_caps (otherpad, caps); if (GST_PAD_LINK_FAILED (ret)) { return ret; @@ -270,15 +263,14 @@ gst_ffmpegcsp_get_type (void) }; ffmpegcsp_type = g_type_register_static (GST_TYPE_ELEMENT, - "GstFFMpegCsp", - &ffmpegcsp_info, 0); + "GstFFMpegCsp", &ffmpegcsp_info, 0); } return ffmpegcsp_type; } static void -gst_ffmpegcsp_base_init (GstFFMpegCspClass *klass) +gst_ffmpegcsp_base_init (GstFFMpegCspClass * klass) { GstElementClass *element_class = GST_ELEMENT_CLASS (klass); @@ -288,13 +280,13 @@ gst_ffmpegcsp_base_init (GstFFMpegCspClass *klass) } static void -gst_ffmpegcsp_class_init (GstFFMpegCspClass *klass) +gst_ffmpegcsp_class_init (GstFFMpegCspClass * klass) { GObjectClass *gobject_class; GstElementClass *gstelement_class; - gobject_class = (GObjectClass*) klass; - gstelement_class = (GstElementClass*) klass; + gobject_class = (GObjectClass *) klass; + gstelement_class = (GstElementClass *) klass; parent_class = g_type_class_ref (GST_TYPE_ELEMENT); @@ -305,13 +297,13 @@ gst_ffmpegcsp_class_init (GstFFMpegCspClass *klass) } static void -gst_ffmpegcsp_init (GstFFMpegCsp *space) +gst_ffmpegcsp_init (GstFFMpegCsp * space) { space->sinkpad = gst_pad_new_from_template (sinktempl, "sink"); gst_pad_set_link_function (space->sinkpad, gst_ffmpegcsp_pad_link); gst_pad_set_getcaps_function (space->sinkpad, gst_ffmpegcsp_getcaps); - gst_pad_set_chain_function (space->sinkpad,gst_ffmpegcsp_chain); - gst_element_add_pad (GST_ELEMENT(space), space->sinkpad); + gst_pad_set_chain_function (space->sinkpad, gst_ffmpegcsp_chain); + gst_element_add_pad (GST_ELEMENT (space), space->sinkpad); space->srcpad = gst_pad_new_from_template (srctempl, "src"); gst_element_add_pad (GST_ELEMENT (space), space->srcpad); @@ -323,8 +315,7 @@ gst_ffmpegcsp_init (GstFFMpegCsp *space) } static void -gst_ffmpegcsp_chain (GstPad *pad, - GstData *data) +gst_ffmpegcsp_chain (GstPad * pad, GstData * data) { GstBuffer *inbuf = GST_BUFFER (data); GstFFMpegCsp *space; @@ -335,14 +326,13 @@ gst_ffmpegcsp_chain (GstPad *pad, g_return_if_fail (inbuf != NULL); space = GST_FFMPEGCSP (gst_pad_get_parent (pad)); - + g_return_if_fail (space != NULL); g_return_if_fail (GST_IS_FFMPEGCSP (space)); - if (space->from_pixfmt == PIX_FMT_NB || - space->to_pixfmt == PIX_FMT_NB) { + if (space->from_pixfmt == PIX_FMT_NB || space->to_pixfmt == PIX_FMT_NB) { GST_ELEMENT_ERROR (space, CORE, NOT_IMPLEMENTED, NULL, - ("attempting to convert colorspaces between unknown formats")); + ("attempting to convert colorspaces between unknown formats")); gst_buffer_unref (inbuf); return; } @@ -352,19 +342,19 @@ gst_ffmpegcsp_chain (GstPad *pad, } else { /* use bufferpool here */ guint size = avpicture_get_size (space->to_pixfmt, - space->width, - space->height); - outbuf = gst_pad_alloc_buffer (space->srcpad, - GST_BUFFER_OFFSET_NONE, size); + space->width, + space->height); + + outbuf = gst_pad_alloc_buffer (space->srcpad, GST_BUFFER_OFFSET_NONE, size); /* convert */ avpicture_fill ((AVPicture *) space->from_frame, GST_BUFFER_DATA (inbuf), - space->from_pixfmt, space->width, space->height); + space->from_pixfmt, space->width, space->height); avpicture_fill ((AVPicture *) space->to_frame, GST_BUFFER_DATA (outbuf), - space->to_pixfmt, space->width, space->height); + space->to_pixfmt, space->width, space->height); img_convert ((AVPicture *) space->to_frame, space->to_pixfmt, - (AVPicture *) space->from_frame, space->from_pixfmt, - space->width, space->height); + (AVPicture *) space->from_frame, space->from_pixfmt, + space->width, space->height); GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (inbuf); GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (inbuf); @@ -376,7 +366,7 @@ gst_ffmpegcsp_chain (GstPad *pad, } static GstElementStateReturn -gst_ffmpegcsp_change_state (GstElement *element) +gst_ffmpegcsp_change_state (GstElement * element) { GstFFMpegCsp *space; @@ -400,10 +390,8 @@ gst_ffmpegcsp_change_state (GstElement *element) } static void -gst_ffmpegcsp_set_property (GObject *object, - guint prop_id, - const GValue *value, - GParamSpec *pspec) +gst_ffmpegcsp_set_property (GObject * object, + guint prop_id, const GValue * value, GParamSpec * pspec) { GstFFMpegCsp *space; @@ -418,10 +406,8 @@ gst_ffmpegcsp_set_property (GObject *object, } static void -gst_ffmpegcsp_get_property (GObject *object, - guint prop_id, - GValue *value, - GParamSpec *pspec) +gst_ffmpegcsp_get_property (GObject * object, + guint prop_id, GValue * value, GParamSpec * pspec) { GstFFMpegCsp *space; @@ -437,7 +423,7 @@ gst_ffmpegcsp_get_property (GObject *object, } gboolean -gst_ffmpegcsp_register (GstPlugin *plugin) +gst_ffmpegcsp_register (GstPlugin * plugin) { GstCaps *caps; @@ -445,15 +431,10 @@ gst_ffmpegcsp_register (GstPlugin *plugin) caps = gst_ffmpeg_codectype_to_caps (CODEC_TYPE_VIDEO, NULL); /* build templates */ - srctempl = gst_pad_template_new ("src", - GST_PAD_SRC, - GST_PAD_ALWAYS, - gst_caps_copy (caps)); - sinktempl = gst_pad_template_new ("sink", - GST_PAD_SINK, - GST_PAD_ALWAYS, - caps); + srctempl = gst_pad_template_new ("src", + GST_PAD_SRC, GST_PAD_ALWAYS, gst_caps_copy (caps)); + sinktempl = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, caps); return gst_element_register (plugin, "ffcolorspace", - GST_RANK_NONE, GST_TYPE_FFMPEGCSP); + GST_RANK_NONE, GST_TYPE_FFMPEGCSP); } diff --git a/ext/ffmpeg/gstffmpegdec.c b/ext/ffmpeg/gstffmpegdec.c index 42da799..6726287 100644 --- a/ext/ffmpeg/gstffmpegdec.c +++ b/ext/ffmpeg/gstffmpegdec.c @@ -36,7 +36,8 @@ typedef struct _GstFFMpegDec GstFFMpegDec; -struct _GstFFMpegDec { +struct _GstFFMpegDec +{ GstElement element; /* We need to keep track of our pads, so we do so here. */ @@ -50,7 +51,8 @@ struct _GstFFMpegDec { typedef struct _GstFFMpegDecClass GstFFMpegDecClass; -struct _GstFFMpegDecClass { +struct _GstFFMpegDecClass +{ GstElementClass parent_class; AVCodec *in_plugin; @@ -59,7 +61,8 @@ struct _GstFFMpegDecClass { typedef struct _GstFFMpegDecClassParams GstFFMpegDecClassParams; -struct _GstFFMpegDecClassParams { +struct _GstFFMpegDecClassParams +{ AVCodec *in_plugin; GstCaps *srccaps, *sinkcaps; }; @@ -75,12 +78,14 @@ struct _GstFFMpegDecClassParams { #define GST_IS_FFMPEGDEC_CLASS(obj) \ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_FFMPEGDEC)) -enum { +enum +{ /* FILL ME */ LAST_SIGNAL }; -enum { +enum +{ ARG_0, /* FILL ME */ }; @@ -88,25 +93,23 @@ enum { static GHashTable *global_plugins; /* A number of functon prototypes are given so we can refer to them later. */ -static void gst_ffmpegdec_base_init (GstFFMpegDecClass *klass); -static void gst_ffmpegdec_class_init (GstFFMpegDecClass *klass); -static void gst_ffmpegdec_init (GstFFMpegDec *ffmpegdec); -static void gst_ffmpegdec_dispose (GObject *object); +static void gst_ffmpegdec_base_init (GstFFMpegDecClass * klass); +static void gst_ffmpegdec_class_init (GstFFMpegDecClass * klass); +static void gst_ffmpegdec_init (GstFFMpegDec * ffmpegdec); +static void gst_ffmpegdec_dispose (GObject * object); -static GstPadLinkReturn gst_ffmpegdec_connect (GstPad *pad, - const GstCaps *caps); -static void gst_ffmpegdec_chain (GstPad *pad, - GstData *data); +static GstPadLinkReturn gst_ffmpegdec_connect (GstPad * pad, + const GstCaps * caps); +static void gst_ffmpegdec_chain (GstPad * pad, GstData * data); -static GstElementStateReturn - gst_ffmpegdec_change_state (GstElement *element); +static GstElementStateReturn gst_ffmpegdec_change_state (GstElement * element); #if 0 /* some sort of bufferpool handling, but different */ -static int gst_ffmpegdec_get_buffer (AVCodecContext *context, - AVFrame *picture); -static void gst_ffmpegdec_release_buffer (AVCodecContext *context, - AVFrame *picture); +static int gst_ffmpegdec_get_buffer (AVCodecContext * context, + AVFrame * picture); +static void gst_ffmpegdec_release_buffer (AVCodecContext * context, + AVFrame * picture); #endif static GstElementClass *parent_class = NULL; @@ -114,7 +117,7 @@ static GstElementClass *parent_class = NULL; /*static guint gst_ffmpegdec_signals[LAST_SIGNAL] = { 0 }; */ static void -gst_ffmpegdec_base_init (GstFFMpegDecClass *klass) +gst_ffmpegdec_base_init (GstFFMpegDecClass * klass) { GObjectClass *gobject_class = G_OBJECT_CLASS (klass); GstElementClass *element_class = GST_ELEMENT_CLASS (klass); @@ -123,22 +126,20 @@ gst_ffmpegdec_base_init (GstFFMpegDecClass *klass) GstPadTemplate *sinktempl, *srctempl; params = g_hash_table_lookup (global_plugins, - GINT_TO_POINTER (G_OBJECT_CLASS_TYPE (gobject_class))); + GINT_TO_POINTER (G_OBJECT_CLASS_TYPE (gobject_class))); if (!params) - params = g_hash_table_lookup (global_plugins, - GINT_TO_POINTER (0)); + params = g_hash_table_lookup (global_plugins, GINT_TO_POINTER (0)); g_assert (params); /* construct the element details struct */ - details.longname = g_strdup_printf("FFMPEG %s decoder", - params->in_plugin->name); - details.klass = g_strdup_printf("Codec/%s/Decoder", - (params->in_plugin->type == CODEC_TYPE_VIDEO) ? - "Video" : "Audio"); - details.description = g_strdup_printf("FFMPEG %s decoder", - params->in_plugin->name); + details.longname = g_strdup_printf ("FFMPEG %s decoder", + params->in_plugin->name); + details.klass = g_strdup_printf ("Codec/%s/Decoder", + (params->in_plugin->type == CODEC_TYPE_VIDEO) ? "Video" : "Audio"); + details.description = g_strdup_printf ("FFMPEG %s decoder", + params->in_plugin->name); details.author = "Wim Taymans , " - "Ronald Bultje "; + "Ronald Bultje "; gst_element_class_set_details (element_class, &details); g_free (details.longname); g_free (details.klass); @@ -146,9 +147,9 @@ gst_ffmpegdec_base_init (GstFFMpegDecClass *klass) /* pad templates */ sinktempl = gst_pad_template_new ("sink", GST_PAD_SINK, - GST_PAD_ALWAYS, params->sinkcaps); + GST_PAD_ALWAYS, params->sinkcaps); srctempl = gst_pad_template_new ("src", GST_PAD_SRC, - GST_PAD_ALWAYS, params->srccaps); + GST_PAD_ALWAYS, params->srccaps); gst_element_class_add_pad_template (element_class, srctempl); gst_element_class_add_pad_template (element_class, sinktempl); @@ -159,13 +160,13 @@ gst_ffmpegdec_base_init (GstFFMpegDecClass *klass) } static void -gst_ffmpegdec_class_init (GstFFMpegDecClass *klass) +gst_ffmpegdec_class_init (GstFFMpegDecClass * klass) { GObjectClass *gobject_class; GstElementClass *gstelement_class; - gobject_class = (GObjectClass*)klass; - gstelement_class = (GstElementClass*)klass; + gobject_class = (GObjectClass *) klass; + gstelement_class = (GstElementClass *) klass; parent_class = g_type_class_peek_parent (klass); @@ -174,9 +175,10 @@ gst_ffmpegdec_class_init (GstFFMpegDecClass *klass) } static void -gst_ffmpegdec_init (GstFFMpegDec *ffmpegdec) +gst_ffmpegdec_init (GstFFMpegDec * ffmpegdec) { - GstFFMpegDecClass *oclass = (GstFFMpegDecClass*)(G_OBJECT_GET_CLASS (ffmpegdec)); + GstFFMpegDecClass *oclass = + (GstFFMpegDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec)); /* setup pads */ ffmpegdec->sinkpad = gst_pad_new_from_template (oclass->sinktempl, "sink"); @@ -189,14 +191,14 @@ gst_ffmpegdec_init (GstFFMpegDec *ffmpegdec) gst_element_add_pad (GST_ELEMENT (ffmpegdec), ffmpegdec->srcpad); /* some ffmpeg data */ - ffmpegdec->context = avcodec_alloc_context(); - ffmpegdec->picture = avcodec_alloc_frame(); + ffmpegdec->context = avcodec_alloc_context (); + ffmpegdec->picture = avcodec_alloc_frame (); ffmpegdec->opened = FALSE; } static void -gst_ffmpegdec_dispose (GObject *object) +gst_ffmpegdec_dispose (GObject * object) { GstFFMpegDec *ffmpegdec = (GstFFMpegDec *) object; @@ -210,11 +212,11 @@ gst_ffmpegdec_dispose (GObject *object) } static GstPadLinkReturn -gst_ffmpegdec_connect (GstPad *pad, - const GstCaps *caps) +gst_ffmpegdec_connect (GstPad * pad, const GstCaps * caps) { - GstFFMpegDec *ffmpegdec = (GstFFMpegDec *)(gst_pad_get_parent (pad)); - GstFFMpegDecClass *oclass = (GstFFMpegDecClass*)(G_OBJECT_GET_CLASS (ffmpegdec)); + GstFFMpegDec *ffmpegdec = (GstFFMpegDec *) (gst_pad_get_parent (pad)); + GstFFMpegDecClass *oclass = + (GstFFMpegDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec)); /* close old session */ if (ffmpegdec->opened) { @@ -233,13 +235,13 @@ gst_ffmpegdec_connect (GstPad *pad, /* get size and so */ gst_ffmpeg_caps_to_codectype (oclass->in_plugin->type, - caps, ffmpegdec->context); + caps, ffmpegdec->context); /* we dont send complete frames - FIXME: we need a 'framed' property * in caps */ if (oclass->in_plugin->capabilities & CODEC_CAP_TRUNCATED && (ffmpegdec->context->codec_id == CODEC_ID_MPEG1VIDEO || - ffmpegdec->context->codec_id == CODEC_ID_MPEG2VIDEO)) + ffmpegdec->context->codec_id == CODEC_ID_MPEG2VIDEO)) ffmpegdec->context->flags |= CODEC_FLAG_TRUNCATED; /* do *not* draw edges */ @@ -251,7 +253,7 @@ gst_ffmpegdec_connect (GstPad *pad, if (avcodec_open (ffmpegdec->context, oclass->in_plugin) < 0) { avcodec_close (ffmpegdec->context); GST_DEBUG ("ffdec_%s: Failed to open FFMPEG codec", - oclass->in_plugin->name); + oclass->in_plugin->name); return GST_PAD_LINK_REFUSED; } @@ -263,8 +265,7 @@ gst_ffmpegdec_connect (GstPad *pad, #if 0 static int -gst_ffmpegdec_get_buffer (AVCodecContext *context, - AVFrame *picture) +gst_ffmpegdec_get_buffer (AVCodecContext * context, AVFrame * picture) { GstBuffer *buf = NULL; gulong bufsize = 0; @@ -272,12 +273,10 @@ gst_ffmpegdec_get_buffer (AVCodecContext *context, switch (context->codec_type) { case CODEC_TYPE_VIDEO: bufsize = avpicture_get_size (context->pix_fmt, - context->width, - context->height); + context->width, context->height); buf = gst_buffer_new_and_alloc (bufsize); avpicture_fill ((AVPicture *) picture, GST_BUFFER_DATA (buf), - context->pix_fmt, - context->width, context->height); + context->pix_fmt, context->width, context->height); break; case CODEC_TYPE_AUDIO: @@ -301,15 +300,15 @@ gst_ffmpegdec_get_buffer (AVCodecContext *context, } static void -gst_ffmpegdec_release_buffer (AVCodecContext *context, - AVFrame *picture) +gst_ffmpegdec_release_buffer (AVCodecContext * context, AVFrame * picture) { gint i; GstBuffer *buf = GST_BUFFER (picture->base[0]); + gst_buffer_unref (buf); /* zero out the reference in ffmpeg */ - for (i=0;i<4;i++) { + for (i = 0; i < 4; i++) { picture->data[i] = NULL; picture->linesize[i] = 0; } @@ -317,21 +316,21 @@ gst_ffmpegdec_release_buffer (AVCodecContext *context, #endif static void -gst_ffmpegdec_chain (GstPad *pad, - GstData *_data) +gst_ffmpegdec_chain (GstPad * pad, GstData * _data) { GstBuffer *inbuf = GST_BUFFER (_data); GstBuffer *outbuf = NULL; - GstFFMpegDec *ffmpegdec = (GstFFMpegDec *)(gst_pad_get_parent (pad)); - GstFFMpegDecClass *oclass = (GstFFMpegDecClass*)(G_OBJECT_GET_CLASS (ffmpegdec)); + GstFFMpegDec *ffmpegdec = (GstFFMpegDec *) (gst_pad_get_parent (pad)); + GstFFMpegDecClass *oclass = + (GstFFMpegDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec)); guchar *data; gint size, len = 0; gint have_data; if (!ffmpegdec->opened) { GST_ELEMENT_ERROR (ffmpegdec, CORE, NEGOTIATION, (NULL), - ("ffdec_%s: input format was not set before data start", - oclass->in_plugin->name)); + ("ffdec_%s: input format was not set before data start", + oclass->in_plugin->name)); return; } @@ -348,38 +347,35 @@ gst_ffmpegdec_chain (GstPad *pad, switch (oclass->in_plugin->type) { case CODEC_TYPE_VIDEO: - /* workarounds, functions write to buffers: - * libavcodec/svq1.c:svq1_decode_frame writes to the given buffer. + /* workarounds, functions write to buffers: + * libavcodec/svq1.c:svq1_decode_frame writes to the given buffer. * libavcodec/svq3.c:svq3_decode_slice_header too. * ffmpeg devs know about it and will fix it (they said). */ - if (oclass->in_plugin->id == CODEC_ID_SVQ1 || + if (oclass->in_plugin->id == CODEC_ID_SVQ1 || oclass->in_plugin->id == CODEC_ID_SVQ3) { - inbuf = gst_buffer_copy_on_write(inbuf); - data = GST_BUFFER_DATA (inbuf); - size = GST_BUFFER_SIZE (inbuf); - } + inbuf = gst_buffer_copy_on_write (inbuf); + data = GST_BUFFER_DATA (inbuf); + size = GST_BUFFER_SIZE (inbuf); + } len = avcodec_decode_video (ffmpegdec->context, - ffmpegdec->picture, - &have_data, - data, size); + ffmpegdec->picture, &have_data, data, size); if (have_data) { /* libavcodec constantly crashes on stupid buffer allocation * errors inside. This drives me crazy, so we let it allocate * it's own buffers and copy to our own buffer afterwards... */ AVPicture pic; gint size = avpicture_get_size (ffmpegdec->context->pix_fmt, - ffmpegdec->context->width, - ffmpegdec->context->height); + ffmpegdec->context->width, + ffmpegdec->context->height); + outbuf = gst_buffer_new_and_alloc (size); avpicture_fill (&pic, GST_BUFFER_DATA (outbuf), - ffmpegdec->context->pix_fmt, - ffmpegdec->context->width, - ffmpegdec->context->height); + ffmpegdec->context->pix_fmt, + ffmpegdec->context->width, ffmpegdec->context->height); img_convert (&pic, ffmpegdec->context->pix_fmt, - (AVPicture *) ffmpegdec->picture, - ffmpegdec->context->pix_fmt, - ffmpegdec->context->width, - ffmpegdec->context->height); + (AVPicture *) ffmpegdec->picture, + ffmpegdec->context->pix_fmt, + ffmpegdec->context->width, ffmpegdec->context->height); /* this isn't necessarily true, but it's better than nothing */ GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (inbuf); @@ -389,40 +385,38 @@ gst_ffmpegdec_chain (GstPad *pad, case CODEC_TYPE_AUDIO: outbuf = gst_buffer_new_and_alloc (AVCODEC_MAX_AUDIO_FRAME_SIZE); len = avcodec_decode_audio (ffmpegdec->context, - (int16_t *) GST_BUFFER_DATA (outbuf), - &have_data, - data, size); + (int16_t *) GST_BUFFER_DATA (outbuf), &have_data, data, size); if (have_data) { GST_BUFFER_SIZE (outbuf) = have_data; GST_BUFFER_DURATION (outbuf) = (have_data * GST_SECOND) / - (ffmpegdec->context->channels * - ffmpegdec->context->sample_rate); + (ffmpegdec->context->channels * ffmpegdec->context->sample_rate); } else { gst_buffer_unref (outbuf); - } + } break; default: - g_assert(0); + g_assert (0); break; } if (len < 0) { GST_ERROR_OBJECT (ffmpegdec, "ffdec_%s: decoding error", - oclass->in_plugin->name); + oclass->in_plugin->name); break; } if (have_data) { if (!GST_PAD_CAPS (ffmpegdec->srcpad)) { GstCaps *caps; + caps = gst_ffmpeg_codectype_to_caps (oclass->in_plugin->type, - ffmpegdec->context); + ffmpegdec->context); if (caps == NULL || !gst_pad_set_explicit_caps (ffmpegdec->srcpad, caps)) { GST_ELEMENT_ERROR (ffmpegdec, CORE, NEGOTIATION, (NULL), - ("Failed to link ffmpeg decoder (%s) to next element", - oclass->in_plugin->name)); + ("Failed to link ffmpeg decoder (%s) to next element", + oclass->in_plugin->name)); return; } } @@ -430,7 +424,7 @@ gst_ffmpegdec_chain (GstPad *pad, GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (inbuf); gst_pad_push (ffmpegdec->srcpad, GST_DATA (outbuf)); - } + } size -= len; data += len; @@ -440,7 +434,7 @@ gst_ffmpegdec_chain (GstPad *pad, } static GstElementStateReturn -gst_ffmpegdec_change_state (GstElement *element) +gst_ffmpegdec_change_state (GstElement * element) { GstFFMpegDec *ffmpegdec = (GstFFMpegDec *) element; gint transition = GST_STATE_TRANSITION (element); @@ -461,22 +455,22 @@ gst_ffmpegdec_change_state (GstElement *element) } gboolean -gst_ffmpegdec_register (GstPlugin *plugin) +gst_ffmpegdec_register (GstPlugin * plugin) { GTypeInfo typeinfo = { - sizeof(GstFFMpegDecClass), - (GBaseInitFunc)gst_ffmpegdec_base_init, + sizeof (GstFFMpegDecClass), + (GBaseInitFunc) gst_ffmpegdec_base_init, NULL, - (GClassInitFunc)gst_ffmpegdec_class_init, + (GClassInitFunc) gst_ffmpegdec_class_init, NULL, NULL, - sizeof(GstFFMpegDec), + sizeof (GstFFMpegDec), 0, - (GInstanceInitFunc)gst_ffmpegdec_init, + (GInstanceInitFunc) gst_ffmpegdec_init, }; GType type; AVCodec *in_plugin; - + in_plugin = first_avcodec; global_plugins = g_hash_table_new (NULL, NULL); @@ -488,8 +482,8 @@ gst_ffmpegdec_register (GstPlugin *plugin) /* no quasi-codecs, please */ if (in_plugin->id == CODEC_ID_RAWVIDEO || - (in_plugin->id >= CODEC_ID_PCM_S16LE && - in_plugin->id <= CODEC_ID_PCM_ALAW)) { + (in_plugin->id >= CODEC_ID_PCM_S16LE && + in_plugin->id <= CODEC_ID_PCM_ALAW)) { goto next; } @@ -500,16 +494,16 @@ gst_ffmpegdec_register (GstPlugin *plugin) /* first make sure we've got a supported type */ sinkcaps = gst_ffmpeg_codecid_to_caps (in_plugin->id, NULL, FALSE); - srccaps = gst_ffmpeg_codectype_to_caps (in_plugin->type, NULL); + srccaps = gst_ffmpeg_codectype_to_caps (in_plugin->type, NULL); if (!sinkcaps || !srccaps) goto next; /* construct the type */ - type_name = g_strdup_printf("ffdec_%s", in_plugin->name); + type_name = g_strdup_printf ("ffdec_%s", in_plugin->name); /* if it's already registered, drop it */ - if (g_type_from_name(type_name)) { - g_free(type_name); + if (g_type_from_name (type_name)) { + g_free (type_name); goto next; } @@ -517,28 +511,26 @@ gst_ffmpegdec_register (GstPlugin *plugin) params->in_plugin = in_plugin; params->srccaps = srccaps; params->sinkcaps = sinkcaps; - g_hash_table_insert (global_plugins, - GINT_TO_POINTER (0), - (gpointer) params); - + g_hash_table_insert (global_plugins, + GINT_TO_POINTER (0), (gpointer) params); + /* create the gtype now * (Ronald) MPEG-4 gets a higher priority because it has been well- * tested and by far outperforms divxdec/xviddec - so we prefer it. */ - type = g_type_register_static(GST_TYPE_ELEMENT, type_name , &typeinfo, 0); + type = g_type_register_static (GST_TYPE_ELEMENT, type_name, &typeinfo, 0); if (!gst_element_register (plugin, type_name, - (in_plugin->id == CODEC_ID_MPEG4) ? - GST_RANK_PRIMARY : GST_RANK_MARGINAL, type)) { + (in_plugin->id == CODEC_ID_MPEG4) ? + GST_RANK_PRIMARY : GST_RANK_MARGINAL, type)) { g_free (type_name); return FALSE; } g_free (type_name); - g_hash_table_insert (global_plugins, - GINT_TO_POINTER (type), - (gpointer) params); + g_hash_table_insert (global_plugins, + GINT_TO_POINTER (type), (gpointer) params); -next: + next: in_plugin = in_plugin->next; } g_hash_table_remove (global_plugins, GINT_TO_POINTER (0)); diff --git a/ext/ffmpeg/gstffmpegdemux.c b/ext/ffmpeg/gstffmpegdemux.c index 1f4b55a..8b9a6a7 100644 --- a/ext/ffmpeg/gstffmpegdemux.c +++ b/ext/ffmpeg/gstffmpegdemux.c @@ -36,35 +36,38 @@ typedef struct _GstFFMpegDemux GstFFMpegDemux; -struct _GstFFMpegDemux { - GstElement element; +struct _GstFFMpegDemux +{ + GstElement element; /* We need to keep track of our pads, so we do so here. */ - GstPad *sinkpad; + GstPad *sinkpad; - AVFormatContext *context; - gboolean opened; + AVFormatContext *context; + gboolean opened; - GstPad *srcpads[MAX_STREAMS]; - gboolean handled[MAX_STREAMS]; - guint64 last_ts[MAX_STREAMS]; - gint videopads, audiopads; + GstPad *srcpads[MAX_STREAMS]; + gboolean handled[MAX_STREAMS]; + guint64 last_ts[MAX_STREAMS]; + gint videopads, audiopads; }; -typedef struct _GstFFMpegDemuxClassParams { - AVInputFormat *in_plugin; - GstCaps *sinkcaps, *videosrccaps, *audiosrccaps; +typedef struct _GstFFMpegDemuxClassParams +{ + AVInputFormat *in_plugin; + GstCaps *sinkcaps, *videosrccaps, *audiosrccaps; } GstFFMpegDemuxClassParams; typedef struct _GstFFMpegDemuxClass GstFFMpegDemuxClass; -struct _GstFFMpegDemuxClass { - GstElementClass parent_class; +struct _GstFFMpegDemuxClass +{ + GstElementClass parent_class; - AVInputFormat *in_plugin; - GstPadTemplate *sinktempl; - GstPadTemplate *videosrctempl; - GstPadTemplate *audiosrctempl; + AVInputFormat *in_plugin; + GstPadTemplate *sinktempl; + GstPadTemplate *videosrctempl; + GstPadTemplate *audiosrctempl; }; #define GST_TYPE_FFMPEGDEC \ @@ -78,12 +81,14 @@ struct _GstFFMpegDemuxClass { #define GST_IS_FFMPEGDEC_CLASS(obj) \ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_FFMPEGDEC)) -enum { +enum +{ /* FILL ME */ LAST_SIGNAL }; -enum { +enum +{ ARG_0, /* FILL ME */ }; @@ -91,14 +96,14 @@ enum { static GHashTable *global_plugins; /* A number of functon prototypes are given so we can refer to them later. */ -static void gst_ffmpegdemux_class_init (GstFFMpegDemuxClass *klass); -static void gst_ffmpegdemux_base_init (GstFFMpegDemuxClass *klass); -static void gst_ffmpegdemux_init (GstFFMpegDemux *demux); +static void gst_ffmpegdemux_class_init (GstFFMpegDemuxClass * klass); +static void gst_ffmpegdemux_base_init (GstFFMpegDemuxClass * klass); +static void gst_ffmpegdemux_init (GstFFMpegDemux * demux); -static void gst_ffmpegdemux_loop (GstElement *element); +static void gst_ffmpegdemux_loop (GstElement * element); static GstElementStateReturn - gst_ffmpegdemux_change_state (GstElement *element); +gst_ffmpegdemux_change_state (GstElement * element); static GstElementClass *parent_class = NULL; @@ -138,7 +143,7 @@ gst_ffmpegdemux_averror (gint av_errno) } static void -gst_ffmpegdemux_base_init (GstFFMpegDemuxClass *klass) +gst_ffmpegdemux_base_init (GstFFMpegDemuxClass * klass) { GObjectClass *gobject_class = G_OBJECT_CLASS (klass); GstElementClass *element_class = GST_ELEMENT_CLASS (klass); @@ -147,37 +152,30 @@ gst_ffmpegdemux_base_init (GstFFMpegDemuxClass *klass) GstPadTemplate *sinktempl, *audiosrctempl, *videosrctempl; params = g_hash_table_lookup (global_plugins, - GINT_TO_POINTER (G_OBJECT_CLASS_TYPE (gobject_class))); + GINT_TO_POINTER (G_OBJECT_CLASS_TYPE (gobject_class))); if (!params) - params = g_hash_table_lookup (global_plugins, - GINT_TO_POINTER (0)); + params = g_hash_table_lookup (global_plugins, GINT_TO_POINTER (0)); g_assert (params); /* construct the element details struct */ - details.longname = g_strdup_printf("FFMPEG %s demuxer", - params->in_plugin->long_name); + details.longname = g_strdup_printf ("FFMPEG %s demuxer", + params->in_plugin->long_name); details.klass = "Codec/Demuxer"; - details.description = g_strdup_printf("FFMPEG %s decoder", - params->in_plugin->long_name); + details.description = g_strdup_printf ("FFMPEG %s decoder", + params->in_plugin->long_name); details.author = "Wim Taymans , " - "Ronald Bultje "; + "Ronald Bultje "; gst_element_class_set_details (element_class, &details); g_free (details.longname); g_free (details.description); /* pad templates */ sinktempl = gst_pad_template_new ("sink", - GST_PAD_SINK, - GST_PAD_ALWAYS, - params->sinkcaps); + GST_PAD_SINK, GST_PAD_ALWAYS, params->sinkcaps); videosrctempl = gst_pad_template_new ("video_%02d", - GST_PAD_SRC, - GST_PAD_SOMETIMES, - params->videosrccaps); + GST_PAD_SRC, GST_PAD_SOMETIMES, params->videosrccaps); audiosrctempl = gst_pad_template_new ("audio_%02d", - GST_PAD_SRC, - GST_PAD_SOMETIMES, - params->audiosrccaps); + GST_PAD_SRC, GST_PAD_SOMETIMES, params->audiosrccaps); gst_element_class_add_pad_template (element_class, videosrctempl); gst_element_class_add_pad_template (element_class, audiosrctempl); @@ -190,30 +188,29 @@ gst_ffmpegdemux_base_init (GstFFMpegDemuxClass *klass) } static void -gst_ffmpegdemux_class_init (GstFFMpegDemuxClass *klass) +gst_ffmpegdemux_class_init (GstFFMpegDemuxClass * klass) { GObjectClass *gobject_class; GstElementClass *gstelement_class; - gobject_class = (GObjectClass*)klass; - gstelement_class = (GstElementClass*)klass; + gobject_class = (GObjectClass *) klass; + gstelement_class = (GstElementClass *) klass; - parent_class = g_type_class_ref(GST_TYPE_ELEMENT); + parent_class = g_type_class_ref (GST_TYPE_ELEMENT); gstelement_class->change_state = gst_ffmpegdemux_change_state; } static void -gst_ffmpegdemux_init (GstFFMpegDemux *demux) +gst_ffmpegdemux_init (GstFFMpegDemux * demux) { - GstFFMpegDemuxClass *oclass = (GstFFMpegDemuxClass *) (G_OBJECT_GET_CLASS (demux)); + GstFFMpegDemuxClass *oclass = + (GstFFMpegDemuxClass *) (G_OBJECT_GET_CLASS (demux)); gint n; - demux->sinkpad = gst_pad_new_from_template (oclass->sinktempl, - "sink"); + demux->sinkpad = gst_pad_new_from_template (oclass->sinktempl, "sink"); gst_element_add_pad (GST_ELEMENT (demux), demux->sinkpad); - gst_element_set_loop_function (GST_ELEMENT (demux), - gst_ffmpegdemux_loop); + gst_element_set_loop_function (GST_ELEMENT (demux), gst_ffmpegdemux_loop); demux->opened = FALSE; demux->context = NULL; @@ -228,7 +225,7 @@ gst_ffmpegdemux_init (GstFFMpegDemux *demux) } static void -gst_ffmpegdemux_close (GstFFMpegDemux *demux) +gst_ffmpegdemux_close (GstFFMpegDemux * demux) { gint n; @@ -255,7 +252,7 @@ gst_ffmpegdemux_close (GstFFMpegDemux *demux) } static AVStream * -gst_ffmpegdemux_stream_from_pad (GstPad *pad) +gst_ffmpegdemux_stream_from_pad (GstPad * pad) { GstFFMpegDemux *demux = (GstFFMpegDemux *) gst_pad_get_parent (pad); AVStream *stream = NULL; @@ -272,19 +269,18 @@ gst_ffmpegdemux_stream_from_pad (GstPad *pad) } static const GstEventMask * -gst_ffmpegdemux_src_event_mask (GstPad *pad) +gst_ffmpegdemux_src_event_mask (GstPad * pad) { static const GstEventMask masks[] = { - { GST_EVENT_SEEK, GST_SEEK_METHOD_SET | GST_SEEK_FLAG_KEY_UNIT }, - { 0, } + {GST_EVENT_SEEK, GST_SEEK_METHOD_SET | GST_SEEK_FLAG_KEY_UNIT}, + {0,} }; - + return masks; } static gboolean -gst_ffmpegdemux_src_event (GstPad *pad, - GstEvent *event) +gst_ffmpegdemux_src_event (GstPad * pad, GstEvent * event) { GstFFMpegDemux *demux = (GstFFMpegDemux *) gst_pad_get_parent (pad); AVStream *stream = gst_ffmpegdemux_stream_from_pad (pad); @@ -304,14 +300,15 @@ gst_ffmpegdemux_src_event (GstPad *pad, break; } else { GstFormat fmt = GST_FORMAT_TIME; + if (!(res = gst_pad_convert (pad, GST_FORMAT_DEFAULT, offset, - &fmt, &offset))) + &fmt, &offset))) break; } /* fall-through */ case GST_FORMAT_TIME: if (av_seek_frame (demux->context, stream->index, - offset / (GST_SECOND / AV_TIME_BASE))) + offset / (GST_SECOND / AV_TIME_BASE))) res = FALSE; break; default: @@ -328,7 +325,7 @@ gst_ffmpegdemux_src_event (GstPad *pad, } static const GstFormat * -gst_ffmpegdemux_src_format_list (GstPad *pad) +gst_ffmpegdemux_src_format_list (GstPad * pad) { AVStream *stream = gst_ffmpegdemux_stream_from_pad (pad); static const GstFormat src_v_formats[] = { @@ -336,31 +333,27 @@ gst_ffmpegdemux_src_format_list (GstPad *pad) GST_FORMAT_DEFAULT, 0 }, src_a_formats[] = { - GST_FORMAT_TIME, - 0 - }; + GST_FORMAT_TIME, 0}; return (stream->codec.codec_type == CODEC_TYPE_VIDEO) ? - src_v_formats : src_a_formats; + src_v_formats : src_a_formats; } static const GstQueryType * -gst_ffmpegdemux_src_query_list (GstPad *pad) +gst_ffmpegdemux_src_query_list (GstPad * pad) { static const GstQueryType src_types[] = { GST_QUERY_TOTAL, GST_QUERY_POSITION, 0 }; - + return src_types; } static gboolean -gst_ffmpegdemux_src_query (GstPad *pad, - GstQueryType type, - GstFormat *fmt, - gint64 *value) +gst_ffmpegdemux_src_query (GstPad * pad, + GstQueryType type, GstFormat * fmt, gint64 * value) { GstFFMpegDemux *demux = (GstFFMpegDemux *) gst_pad_get_parent (pad); AVStream *stream = gst_ffmpegdemux_stream_from_pad (pad); @@ -368,7 +361,7 @@ gst_ffmpegdemux_src_query (GstPad *pad, gint n; if (!stream || (*fmt == GST_FORMAT_DEFAULT && - stream->codec.codec_type != CODEC_TYPE_VIDEO)) + stream->codec.codec_type != CODEC_TYPE_VIDEO)) return FALSE; switch (type) { @@ -381,7 +374,7 @@ gst_ffmpegdemux_src_query (GstPad *pad, if (stream->codec_info_nb_frames) { *value = stream->codec_info_nb_frames; break; - } /* else fall-through */ + } /* else fall-through */ default: res = FALSE; break; @@ -394,8 +387,7 @@ gst_ffmpegdemux_src_query (GstPad *pad, break; case GST_FORMAT_DEFAULT: res = gst_pad_convert (pad, GST_FORMAT_TIME, - demux->last_ts[stream->index], - fmt, value); + demux->last_ts[stream->index], fmt, value); break; default: res = FALSE; @@ -411,11 +403,9 @@ gst_ffmpegdemux_src_query (GstPad *pad, } static gboolean -gst_ffmpegdemux_src_convert (GstPad *pad, - GstFormat src_fmt, - gint64 src_value, - GstFormat *dest_fmt, - gint64 *dest_value) +gst_ffmpegdemux_src_convert (GstPad * pad, + GstFormat src_fmt, + gint64 src_value, GstFormat * dest_fmt, gint64 * dest_value) { GstFFMpegDemux *demux = (GstFFMpegDemux *) gst_pad_get_parent (pad); AVStream *stream = gst_ffmpegdemux_stream_from_pad (pad); @@ -429,7 +419,7 @@ gst_ffmpegdemux_src_convert (GstPad *pad, switch (*dest_fmt) { case GST_FORMAT_DEFAULT: *dest_value = src_value * stream->r_frame_rate / - (GST_SECOND * stream->r_frame_rate_base); + (GST_SECOND * stream->r_frame_rate_base); break; default: res = FALSE; @@ -440,7 +430,7 @@ gst_ffmpegdemux_src_convert (GstPad *pad, switch (*dest_fmt) { case GST_FORMAT_TIME: *dest_value = src_value * GST_SECOND * stream->r_frame_rate_base / - stream->r_frame_rate; + stream->r_frame_rate; break; default: res = FALSE; @@ -456,16 +446,16 @@ gst_ffmpegdemux_src_convert (GstPad *pad, } static gboolean -gst_ffmpegdemux_add (GstFFMpegDemux *demux, - AVStream *stream) +gst_ffmpegdemux_add (GstFFMpegDemux * demux, AVStream * stream) { - GstFFMpegDemuxClass *oclass = (GstFFMpegDemuxClass *) G_OBJECT_GET_CLASS (demux); + GstFFMpegDemuxClass *oclass = + (GstFFMpegDemuxClass *) G_OBJECT_GET_CLASS (demux); GstPadTemplate *templ = NULL; GstPad *pad; GstCaps *caps; gint num; gchar *padname; - + switch (stream->codec.codec_type) { case CODEC_TYPE_VIDEO: templ = oclass->videosrctempl; @@ -499,7 +489,8 @@ gst_ffmpegdemux_add (GstFFMpegDemux *demux, demux->srcpads[stream->index] = pad; /* get caps that belongs to this stream */ - caps = gst_ffmpeg_codecid_to_caps (stream->codec.codec_id, &stream->codec, TRUE); + caps = + gst_ffmpeg_codecid_to_caps (stream->codec.codec_id, &stream->codec, TRUE); gst_pad_set_explicit_caps (pad, caps); gst_element_add_pad (GST_ELEMENT (demux), pad); @@ -508,9 +499,10 @@ gst_ffmpegdemux_add (GstFFMpegDemux *demux, } static gboolean -gst_ffmpegdemux_open (GstFFMpegDemux *demux) +gst_ffmpegdemux_open (GstFFMpegDemux * demux) { - GstFFMpegDemuxClass *oclass = (GstFFMpegDemuxClass *) G_OBJECT_GET_CLASS (demux); + GstFFMpegDemuxClass *oclass = + (GstFFMpegDemuxClass *) G_OBJECT_GET_CLASS (demux); gchar *location; gint res; @@ -520,11 +512,11 @@ gst_ffmpegdemux_open (GstFFMpegDemux *demux) /* open via our input protocol hack */ location = g_strdup_printf ("gstreamer://%p", demux->sinkpad); res = av_open_input_file (&demux->context, location, - oclass->in_plugin, 0, NULL); + oclass->in_plugin, 0, NULL); g_free (location); if (res < 0) { GST_ELEMENT_ERROR (demux, LIBRARY, FAILED, (NULL), - (gst_ffmpegdemux_averror (res))); + (gst_ffmpegdemux_averror (res))); return FALSE; } @@ -542,13 +534,13 @@ gst_ffmpegdemux_open (GstFFMpegDemux *demux) #define GST_FFMPEG_TYPE_FIND_SIZE 4096 static void -gst_ffmpegdemux_type_find (GstTypeFind *tf, gpointer priv) +gst_ffmpegdemux_type_find (GstTypeFind * tf, gpointer priv) { guint8 *data; GstFFMpegDemuxClassParams *params = (GstFFMpegDemuxClassParams *) priv; AVInputFormat *in_plugin = params->in_plugin; gint res = 0; - + if (in_plugin->read_probe && (data = gst_type_find_peek (tf, 0, GST_FFMPEG_TYPE_FIND_SIZE)) != NULL) { AVProbeData probe_data; @@ -559,15 +551,15 @@ gst_ffmpegdemux_type_find (GstTypeFind *tf, gpointer priv) res = in_plugin->read_probe (&probe_data); res = res * GST_TYPE_FIND_MAXIMUM / AVPROBE_SCORE_MAX; - if (res > 0) + if (res > 0) gst_type_find_suggest (tf, res, params->sinkcaps); } } static void -gst_ffmpegdemux_loop (GstElement *element) +gst_ffmpegdemux_loop (GstElement * element) { - GstFFMpegDemux *demux = (GstFFMpegDemux *)(element); + GstFFMpegDemux *demux = (GstFFMpegDemux *) (element); gint res; AVPacket pkt; GstPad *pad; @@ -586,7 +578,7 @@ gst_ffmpegdemux_loop (GstElement *element) gst_ffmpegdemux_close (demux); } else { GST_ELEMENT_ERROR (demux, LIBRARY, FAILED, (NULL), - (gst_ffmpegdemux_averror (res))); + (gst_ffmpegdemux_averror (res))); } return; } @@ -610,7 +602,7 @@ gst_ffmpegdemux_loop (GstElement *element) if (pkt.pts != AV_NOPTS_VALUE && demux->context->pts_den) GST_BUFFER_TIMESTAMP (outbuf) = (double) pkt.pts * GST_SECOND * - demux->context->pts_num / demux->context->pts_den; + demux->context->pts_num / demux->context->pts_den; if (pkt.flags & PKT_FLAG_KEY) { GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_KEY_UNIT); @@ -623,9 +615,9 @@ gst_ffmpegdemux_loop (GstElement *element) } static GstElementStateReturn -gst_ffmpegdemux_change_state (GstElement *element) +gst_ffmpegdemux_change_state (GstElement * element) { - GstFFMpegDemux *demux = (GstFFMpegDemux *)(element); + GstFFMpegDemux *demux = (GstFFMpegDemux *) (element); gint transition = GST_STATE_TRANSITION (element); switch (transition) { @@ -641,7 +633,7 @@ gst_ffmpegdemux_change_state (GstElement *element) } gboolean -gst_ffmpegdemux_register (GstPlugin *plugin) +gst_ffmpegdemux_register (GstPlugin * plugin) { GType type; AVInputFormat *in_plugin; @@ -649,17 +641,17 @@ gst_ffmpegdemux_register (GstPlugin *plugin) AVCodec *in_codec; gchar **extensions; GTypeInfo typeinfo = { - sizeof(GstFFMpegDemuxClass), - (GBaseInitFunc)gst_ffmpegdemux_base_init, + sizeof (GstFFMpegDemuxClass), + (GBaseInitFunc) gst_ffmpegdemux_base_init, NULL, - (GClassInitFunc)gst_ffmpegdemux_class_init, + (GClassInitFunc) gst_ffmpegdemux_class_init, NULL, NULL, - sizeof(GstFFMpegDemux), + sizeof (GstFFMpegDemux), 0, - (GInstanceInitFunc)gst_ffmpegdemux_init, + (GInstanceInitFunc) gst_ffmpegdemux_init, }; - + in_plugin = first_iformat; global_plugins = g_hash_table_new (NULL, NULL); @@ -680,7 +672,8 @@ gst_ffmpegdemux_register (GstPlugin *plugin) p = name = g_strdup (in_plugin->name); while (*p) { - if (*p == '.' || *p == ',') *p = '_'; + if (*p == '.' || *p == ',') + *p = '_'; p++; } @@ -694,9 +687,9 @@ gst_ffmpegdemux_register (GstPlugin *plugin) * when we open the stream */ audiosrccaps = gst_caps_new_empty (); videosrccaps = gst_caps_new_empty (); - for (in_codec = first_avcodec; in_codec != NULL; - in_codec = in_codec->next) { + for (in_codec = first_avcodec; in_codec != NULL; in_codec = in_codec->next) { GstCaps *temp = gst_ffmpeg_codecid_to_caps (in_codec->id, NULL, TRUE); + if (!temp) { continue; } @@ -714,7 +707,7 @@ gst_ffmpegdemux_register (GstPlugin *plugin) } /* construct the type */ - type_name = g_strdup_printf("ffdemux_%s", name); + type_name = g_strdup_printf ("ffdemux_%s", name); /* if it's already registered, drop it */ if (g_type_from_name (type_name)) { @@ -722,8 +715,8 @@ gst_ffmpegdemux_register (GstPlugin *plugin) goto next; } - typefind_name = g_strdup_printf("fftype_%s", name); - + typefind_name = g_strdup_printf ("fftype_%s", name); + /* create a cache for these properties */ params = g_new0 (GstFFMpegDemuxClassParams, 1); params->in_plugin = in_plugin; @@ -731,16 +724,14 @@ gst_ffmpegdemux_register (GstPlugin *plugin) params->videosrccaps = videosrccaps; params->audiosrccaps = audiosrccaps; - g_hash_table_insert (global_plugins, - GINT_TO_POINTER (0), - (gpointer) params); + g_hash_table_insert (global_plugins, + GINT_TO_POINTER (0), (gpointer) params); /* create the type now */ - type = g_type_register_static (GST_TYPE_ELEMENT, type_name , &typeinfo, 0); + type = g_type_register_static (GST_TYPE_ELEMENT, type_name, &typeinfo, 0); - g_hash_table_insert (global_plugins, - GINT_TO_POINTER (type), - (gpointer) params); + g_hash_table_insert (global_plugins, + GINT_TO_POINTER (type), (gpointer) params); if (in_plugin->extensions) extensions = g_strsplit (in_plugin->extensions, " ", 0); @@ -749,8 +740,7 @@ gst_ffmpegdemux_register (GstPlugin *plugin) if (!gst_element_register (plugin, type_name, GST_RANK_MARGINAL, type) || !gst_type_find_register (plugin, typefind_name, GST_RANK_MARGINAL, - gst_ffmpegdemux_type_find, - extensions, sinkcaps, params)) { + gst_ffmpegdemux_type_find, extensions, sinkcaps, params)) { g_warning ("Register of type ffdemux_%s failed", name); return FALSE; } @@ -758,7 +748,7 @@ gst_ffmpegdemux_register (GstPlugin *plugin) if (extensions) g_strfreev (extensions); -next: + next: g_free (name); in_plugin = in_plugin->next; } diff --git a/ext/ffmpeg/gstffmpegenc.c b/ext/ffmpeg/gstffmpegenc.c index 33c71a4..a3fd6e1 100644 --- a/ext/ffmpeg/gstffmpegenc.c +++ b/ext/ffmpeg/gstffmpegenc.c @@ -36,7 +36,8 @@ typedef struct _GstFFMpegEnc GstFFMpegEnc; -struct _GstFFMpegEnc { +struct _GstFFMpegEnc +{ GstElement element; /* We need to keep track of our pads, so we do so here. */ @@ -57,14 +58,16 @@ struct _GstFFMpegEnc { typedef struct _GstFFMpegEncClass GstFFMpegEncClass; -struct _GstFFMpegEncClass { +struct _GstFFMpegEncClass +{ GstElementClass parent_class; AVCodec *in_plugin; GstPadTemplate *srctempl, *sinktempl; }; -typedef struct { +typedef struct +{ AVCodec *in_plugin; GstCaps *srccaps, *sinkcaps; } GstFFMpegEncClassParams; @@ -82,18 +85,20 @@ typedef struct { #define VIDEO_BUFFER_SIZE (1024*1024) -enum { +enum +{ /* FILL ME */ LAST_SIGNAL }; -enum { +enum +{ ARG_0, ARG_BIT_RATE, ARG_GOP_SIZE, ARG_ME_METHOD, ARG_BUFSIZE - /* FILL ME */ + /* FILL ME */ }; #define GST_TYPE_ME_METHOD (gst_ffmpegenc_me_method_get_type()) @@ -102,16 +107,17 @@ gst_ffmpegenc_me_method_get_type (void) { static GType ffmpegenc_me_method_type = 0; static GEnumValue ffmpegenc_me_methods[] = { - { ME_ZERO, "0", "zero" }, - { ME_FULL, "1", "full" }, - { ME_LOG, "2", "logarithmic" }, - { ME_PHODS, "3", "phods" }, - { ME_EPZS, "4", "epzs" }, - { ME_X1 , "5", "x1" }, - { 0, NULL, NULL }, + {ME_ZERO, "0", "zero"}, + {ME_FULL, "1", "full"}, + {ME_LOG, "2", "logarithmic"}, + {ME_PHODS, "3", "phods"}, + {ME_EPZS, "4", "epzs"}, + {ME_X1, "5", "x1"}, + {0, NULL, NULL}, }; if (!ffmpegenc_me_method_type) { - ffmpegenc_me_method_type = g_enum_register_static ("GstFFMpegEncMeMethod", ffmpegenc_me_methods); + ffmpegenc_me_method_type = + g_enum_register_static ("GstFFMpegEncMeMethod", ffmpegenc_me_methods); } return ffmpegenc_me_method_type; } @@ -119,34 +125,29 @@ gst_ffmpegenc_me_method_get_type (void) static GHashTable *enc_global_plugins; /* A number of functon prototypes are given so we can refer to them later. */ -static void gst_ffmpegenc_class_init (GstFFMpegEncClass *klass); -static void gst_ffmpegenc_base_init (GstFFMpegEncClass *klass); -static void gst_ffmpegenc_init (GstFFMpegEnc *ffmpegenc); -static void gst_ffmpegenc_dispose (GObject *object); +static void gst_ffmpegenc_class_init (GstFFMpegEncClass * klass); +static void gst_ffmpegenc_base_init (GstFFMpegEncClass * klass); +static void gst_ffmpegenc_init (GstFFMpegEnc * ffmpegenc); +static void gst_ffmpegenc_dispose (GObject * object); static GstPadLinkReturn - gst_ffmpegenc_connect (GstPad *pad, const GstCaps *caps); -static void gst_ffmpegenc_chain_video (GstPad *pad, GstData *_data); -static void gst_ffmpegenc_chain_audio (GstPad *pad, GstData *_data); - -static void gst_ffmpegenc_set_property (GObject *object, - guint prop_id, - const GValue *value, - GParamSpec *pspec); -static void gst_ffmpegenc_get_property (GObject *object, - guint prop_id, - GValue *value, - GParamSpec *pspec); +gst_ffmpegenc_connect (GstPad * pad, const GstCaps * caps); +static void gst_ffmpegenc_chain_video (GstPad * pad, GstData * _data); +static void gst_ffmpegenc_chain_audio (GstPad * pad, GstData * _data); -static GstElementStateReturn - gst_ffmpegenc_change_state (GstElement *element); +static void gst_ffmpegenc_set_property (GObject * object, + guint prop_id, const GValue * value, GParamSpec * pspec); +static void gst_ffmpegenc_get_property (GObject * object, + guint prop_id, GValue * value, GParamSpec * pspec); + +static GstElementStateReturn gst_ffmpegenc_change_state (GstElement * element); static GstElementClass *parent_class = NULL; /*static guint gst_ffmpegenc_signals[LAST_SIGNAL] = { 0 }; */ static void -gst_ffmpegenc_base_init (GstFFMpegEncClass *klass) +gst_ffmpegenc_base_init (GstFFMpegEncClass * klass) { GObjectClass *gobject_class = G_OBJECT_CLASS (klass); GstElementClass *element_class = GST_ELEMENT_CLASS (klass); @@ -155,24 +156,22 @@ gst_ffmpegenc_base_init (GstFFMpegEncClass *klass) GstPadTemplate *srctempl, *sinktempl; params = g_hash_table_lookup (enc_global_plugins, - GINT_TO_POINTER (G_OBJECT_CLASS_TYPE (gobject_class))); + GINT_TO_POINTER (G_OBJECT_CLASS_TYPE (gobject_class))); /* HACK: if we don't have a GType yet, our params are stored at position 0 */ if (!params) { - params = g_hash_table_lookup (enc_global_plugins, - GINT_TO_POINTER (0)); + params = g_hash_table_lookup (enc_global_plugins, GINT_TO_POINTER (0)); } g_assert (params); /* construct the element details struct */ - details.longname = g_strdup_printf("FFMPEG %s encoder", - params->in_plugin->name); - details.klass = g_strdup_printf("Codec/%s/Encoder", - (params->in_plugin->type == CODEC_TYPE_VIDEO) ? - "Video" : "Audio"); - details.description = g_strdup_printf("FFMPEG %s encoder", - params->in_plugin->name); + details.longname = g_strdup_printf ("FFMPEG %s encoder", + params->in_plugin->name); + details.klass = g_strdup_printf ("Codec/%s/Encoder", + (params->in_plugin->type == CODEC_TYPE_VIDEO) ? "Video" : "Audio"); + details.description = g_strdup_printf ("FFMPEG %s encoder", + params->in_plugin->name); details.author = "Wim Taymans , " - "Ronald Bultje "; + "Ronald Bultje "; gst_element_class_set_details (element_class, &details); g_free (details.longname); g_free (details.klass); @@ -180,9 +179,9 @@ gst_ffmpegenc_base_init (GstFFMpegEncClass *klass) /* pad templates */ sinktempl = gst_pad_template_new ("sink", GST_PAD_SINK, - GST_PAD_ALWAYS, params->sinkcaps); + GST_PAD_ALWAYS, params->sinkcaps); srctempl = gst_pad_template_new ("src", GST_PAD_SRC, - GST_PAD_ALWAYS, params->srccaps); + GST_PAD_ALWAYS, params->srccaps); gst_element_class_add_pad_template (element_class, srctempl); gst_element_class_add_pad_template (element_class, sinktempl); @@ -193,39 +192,35 @@ gst_ffmpegenc_base_init (GstFFMpegEncClass *klass) } static void -gst_ffmpegenc_class_init (GstFFMpegEncClass *klass) +gst_ffmpegenc_class_init (GstFFMpegEncClass * klass) { GObjectClass *gobject_class; GstElementClass *gstelement_class; - gobject_class = (GObjectClass*)klass; - gstelement_class = (GstElementClass*)klass; + gobject_class = (GObjectClass *) klass; + gstelement_class = (GstElementClass *) klass; - parent_class = g_type_class_ref(GST_TYPE_ELEMENT); + parent_class = g_type_class_ref (GST_TYPE_ELEMENT); if (klass->in_plugin->type == CODEC_TYPE_VIDEO) { - g_object_class_install_property(G_OBJECT_CLASS (klass), ARG_BIT_RATE, - g_param_spec_ulong ("bitrate","Bit Rate", - "Target Video Bitrate", - 0, G_MAXULONG, 300000, G_PARAM_READWRITE)); - g_object_class_install_property(G_OBJECT_CLASS (klass), ARG_GOP_SIZE, - g_param_spec_int ("gop_size","GOP Size", - "Number of frames within one GOP", - 0, G_MAXINT, 15, G_PARAM_READWRITE)); - g_object_class_install_property(G_OBJECT_CLASS (klass), ARG_ME_METHOD, - g_param_spec_enum ("me_method","ME Method", - "Motion Estimation Method", - GST_TYPE_ME_METHOD, ME_LOG, G_PARAM_READWRITE)); - g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_BUFSIZE, - g_param_spec_ulong("buffer_size", "Buffer Size", - "Size of the video buffers", - 0,G_MAXULONG,0,G_PARAM_READWRITE)); - } - else if (klass->in_plugin->type == CODEC_TYPE_AUDIO) { - g_object_class_install_property(G_OBJECT_CLASS (klass), ARG_BIT_RATE, - g_param_spec_ulong ("bitrate","Bit Rate", - "Target Audio Bitrate", - 0, G_MAXULONG, 128000, G_PARAM_READWRITE)); + g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_BIT_RATE, + g_param_spec_ulong ("bitrate", "Bit Rate", + "Target Video Bitrate", 0, G_MAXULONG, 300000, G_PARAM_READWRITE)); + g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_GOP_SIZE, + g_param_spec_int ("gop_size", "GOP Size", + "Number of frames within one GOP", + 0, G_MAXINT, 15, G_PARAM_READWRITE)); + g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_ME_METHOD, + g_param_spec_enum ("me_method", "ME Method", + "Motion Estimation Method", + GST_TYPE_ME_METHOD, ME_LOG, G_PARAM_READWRITE)); + g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_BUFSIZE, + g_param_spec_ulong ("buffer_size", "Buffer Size", + "Size of the video buffers", 0, G_MAXULONG, 0, G_PARAM_READWRITE)); + } else if (klass->in_plugin->type == CODEC_TYPE_AUDIO) { + g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_BIT_RATE, + g_param_spec_ulong ("bitrate", "Bit Rate", + "Target Audio Bitrate", 0, G_MAXULONG, 128000, G_PARAM_READWRITE)); } gobject_class->set_property = gst_ffmpegenc_set_property; @@ -237,9 +232,10 @@ gst_ffmpegenc_class_init (GstFFMpegEncClass *klass) } static void -gst_ffmpegenc_init(GstFFMpegEnc *ffmpegenc) +gst_ffmpegenc_init (GstFFMpegEnc * ffmpegenc) { - GstFFMpegEncClass *oclass = (GstFFMpegEncClass*)(G_OBJECT_GET_CLASS (ffmpegenc)); + GstFFMpegEncClass *oclass = + (GstFFMpegEncClass *) (G_OBJECT_GET_CLASS (ffmpegenc)); /* setup pads */ ffmpegenc->sinkpad = gst_pad_new_from_template (oclass->sinktempl, "sink"); @@ -251,8 +247,8 @@ gst_ffmpegenc_init(GstFFMpegEnc *ffmpegenc) gst_element_add_pad (GST_ELEMENT (ffmpegenc), ffmpegenc->srcpad); /* ffmpeg objects */ - ffmpegenc->context = avcodec_alloc_context(); - ffmpegenc->picture = avcodec_alloc_frame(); + ffmpegenc->context = avcodec_alloc_context (); + ffmpegenc->picture = avcodec_alloc_frame (); ffmpegenc->opened = FALSE; ffmpegenc->cache = NULL; @@ -270,7 +266,7 @@ gst_ffmpegenc_init(GstFFMpegEnc *ffmpegenc) } static void -gst_ffmpegenc_dispose (GObject *object) +gst_ffmpegenc_dispose (GObject * object) { GstFFMpegEnc *ffmpegenc = (GstFFMpegEnc *) object; @@ -286,15 +282,15 @@ gst_ffmpegenc_dispose (GObject *object) } static GstPadLinkReturn -gst_ffmpegenc_connect (GstPad *pad, - const GstCaps *caps) +gst_ffmpegenc_connect (GstPad * pad, const GstCaps * caps) { GstCaps *other_caps; GstCaps *allowed_caps; GstCaps *icaps; enum PixelFormat pix_fmt; GstFFMpegEnc *ffmpegenc = (GstFFMpegEnc *) gst_pad_get_parent (pad); - GstFFMpegEncClass *oclass = (GstFFMpegEncClass *) G_OBJECT_GET_CLASS(ffmpegenc); + GstFFMpegEncClass *oclass = + (GstFFMpegEncClass *) G_OBJECT_GET_CLASS (ffmpegenc); /* close old session */ if (ffmpegenc->opened) { @@ -321,7 +317,7 @@ gst_ffmpegenc_connect (GstPad *pad, /* fetch pix_fmt and so on */ gst_ffmpeg_caps_to_codectype (oclass->in_plugin->type, - caps, ffmpegenc->context); + caps, ffmpegenc->context); pix_fmt = ffmpegenc->context->pix_fmt; @@ -329,7 +325,7 @@ gst_ffmpegenc_connect (GstPad *pad, if (avcodec_open (ffmpegenc->context, oclass->in_plugin) < 0) { avcodec_close (ffmpegenc->context); GST_DEBUG ("ffenc_%s: Failed to open FFMPEG codec", - oclass->in_plugin->name); + oclass->in_plugin->name); return GST_PAD_LINK_REFUSED; } @@ -337,13 +333,13 @@ gst_ffmpegenc_connect (GstPad *pad, if (pix_fmt != ffmpegenc->context->pix_fmt) { avcodec_close (ffmpegenc->context); GST_DEBUG ("ffenc_%s: AV wants different colourspace (%d given, %d wanted)", - oclass->in_plugin->name, pix_fmt, ffmpegenc->context->pix_fmt); + oclass->in_plugin->name, pix_fmt, ffmpegenc->context->pix_fmt); return GST_PAD_LINK_REFUSED; } /* try to set this caps on the other side */ other_caps = gst_ffmpeg_codecid_to_caps (oclass->in_plugin->id, - ffmpegenc->context, TRUE); + ffmpegenc->context, TRUE); if (!other_caps) { avcodec_close (ffmpegenc->context); GST_DEBUG ("Unsupported codec - no caps found"); @@ -363,7 +359,9 @@ gst_ffmpegenc_connect (GstPad *pad, if (gst_caps_get_size (icaps) > 1) { GstCaps *newcaps; - newcaps = gst_caps_new_full (gst_structure_copy (gst_caps_get_structure (icaps, 0)), NULL); + newcaps = + gst_caps_new_full (gst_structure_copy (gst_caps_get_structure (icaps, + 0)), NULL); gst_caps_free (icaps); icaps = newcaps; } @@ -384,32 +382,29 @@ gst_ffmpegenc_connect (GstPad *pad, } static void -gst_ffmpegenc_chain_video (GstPad *pad, - GstData *_data) +gst_ffmpegenc_chain_video (GstPad * pad, GstData * _data) { GstBuffer *inbuf = GST_BUFFER (_data); GstBuffer *outbuf = NULL; - GstFFMpegEnc *ffmpegenc = (GstFFMpegEnc *)(gst_pad_get_parent (pad)); - GstFFMpegEncClass *oclass = (GstFFMpegEncClass*)(G_OBJECT_GET_CLASS(ffmpegenc)); + GstFFMpegEnc *ffmpegenc = (GstFFMpegEnc *) (gst_pad_get_parent (pad)); + GstFFMpegEncClass *oclass = + (GstFFMpegEncClass *) (G_OBJECT_GET_CLASS (ffmpegenc)); gint ret_size = 0; /* FIXME: events (discont (flush!) and eos (close down) etc.) */ outbuf = gst_buffer_new_and_alloc (ffmpegenc->buffer_size); avpicture_fill ((AVPicture *) ffmpegenc->picture, - GST_BUFFER_DATA (inbuf), - ffmpegenc->context->pix_fmt, - ffmpegenc->context->width, - ffmpegenc->context->height); + GST_BUFFER_DATA (inbuf), + ffmpegenc->context->pix_fmt, + ffmpegenc->context->width, ffmpegenc->context->height); ffmpegenc->picture->pts = GST_BUFFER_TIMESTAMP (inbuf) / 1000; ret_size = avcodec_encode_video (ffmpegenc->context, - GST_BUFFER_DATA (outbuf), - GST_BUFFER_MAXSIZE (outbuf), - ffmpegenc->picture); + GST_BUFFER_DATA (outbuf), + GST_BUFFER_MAXSIZE (outbuf), ffmpegenc->picture); if (ret_size < 0) { - g_warning("ffenc_%s: failed to encode buffer", - oclass->in_plugin->name); + g_warning ("ffenc_%s: failed to encode buffer", oclass->in_plugin->name); gst_buffer_unref (inbuf); return; } @@ -423,13 +418,13 @@ gst_ffmpegenc_chain_video (GstPad *pad, } static void -gst_ffmpegenc_chain_audio (GstPad *pad, - GstData *_data) +gst_ffmpegenc_chain_audio (GstPad * pad, GstData * _data) { GstBuffer *inbuf = GST_BUFFER (_data); GstBuffer *outbuf = NULL, *subbuf; - GstFFMpegEnc *ffmpegenc = (GstFFMpegEnc *)(gst_pad_get_parent (pad)); - GstFFMpegEncClass *oclass = (GstFFMpegEncClass*)(G_OBJECT_GET_CLASS(ffmpegenc)); + GstFFMpegEnc *ffmpegenc = (GstFFMpegEnc *) (gst_pad_get_parent (pad)); + GstFFMpegEncClass *oclass = + (GstFFMpegEncClass *) (G_OBJECT_GET_CLASS (ffmpegenc)); gint size, ret_size = 0, in_size, frame_size; size = GST_BUFFER_SIZE (inbuf); @@ -437,7 +432,7 @@ gst_ffmpegenc_chain_audio (GstPad *pad, /* FIXME: events (discont (flush!) and eos (close down) etc.) */ frame_size = ffmpegenc->context->frame_size * 2 * - ffmpegenc->context->channels; + ffmpegenc->context->channels; in_size = size; if (ffmpegenc->cache) in_size += GST_BUFFER_SIZE (ffmpegenc->cache); @@ -445,7 +440,7 @@ gst_ffmpegenc_chain_audio (GstPad *pad, while (1) { /* do we have enough data for one frame? */ if (in_size / (2 * ffmpegenc->context->channels) < - ffmpegenc->context->frame_size) { + ffmpegenc->context->frame_size) { if (in_size > size) { /* this is panic! we got a buffer, but still don't have enough * data. Merge them and retry in the next cycle... */ @@ -455,17 +450,18 @@ gst_ffmpegenc_chain_audio (GstPad *pad, ffmpegenc->cache = inbuf; } else if (in_size > 0) { ffmpegenc->cache = gst_buffer_create_sub (inbuf, size - in_size, - in_size); + in_size); GST_BUFFER_DURATION (ffmpegenc->cache) = - GST_BUFFER_DURATION (inbuf) * GST_BUFFER_SIZE (ffmpegenc->cache) / size; + GST_BUFFER_DURATION (inbuf) * GST_BUFFER_SIZE (ffmpegenc->cache) / + size; GST_BUFFER_TIMESTAMP (ffmpegenc->cache) = - GST_BUFFER_TIMESTAMP (inbuf) + (GST_BUFFER_DURATION (inbuf) * - (size - in_size) / size); + GST_BUFFER_TIMESTAMP (inbuf) + + (GST_BUFFER_DURATION (inbuf) * (size - in_size) / size); gst_buffer_unref (inbuf); } else { gst_buffer_unref (inbuf); } - + return; } @@ -474,28 +470,26 @@ gst_ffmpegenc_chain_audio (GstPad *pad, /* merge */ subbuf = gst_buffer_create_sub (inbuf, 0, frame_size - (in_size - size)); GST_BUFFER_DURATION (subbuf) = - GST_BUFFER_DURATION (inbuf) * GST_BUFFER_SIZE (subbuf) / size; + GST_BUFFER_DURATION (inbuf) * GST_BUFFER_SIZE (subbuf) / size; subbuf = gst_buffer_merge (ffmpegenc->cache, subbuf); ffmpegenc->cache = NULL; } else { subbuf = gst_buffer_create_sub (inbuf, size - in_size, frame_size); GST_BUFFER_DURATION (subbuf) = - GST_BUFFER_DURATION (inbuf) * GST_BUFFER_SIZE (subbuf) / size; + GST_BUFFER_DURATION (inbuf) * GST_BUFFER_SIZE (subbuf) / size; GST_BUFFER_TIMESTAMP (subbuf) = - GST_BUFFER_TIMESTAMP (inbuf) + (GST_BUFFER_DURATION (inbuf) * - (size - in_size) / size); + GST_BUFFER_TIMESTAMP (inbuf) + (GST_BUFFER_DURATION (inbuf) * + (size - in_size) / size); } outbuf = gst_buffer_new_and_alloc (GST_BUFFER_SIZE (inbuf)); ret_size = avcodec_encode_audio (ffmpegenc->context, - GST_BUFFER_DATA (outbuf), - GST_BUFFER_MAXSIZE (outbuf), - (const short int *) - GST_BUFFER_DATA (subbuf)); + GST_BUFFER_DATA (outbuf), + GST_BUFFER_MAXSIZE (outbuf), (const short int *) + GST_BUFFER_DATA (subbuf)); if (ret_size < 0) { - g_warning("ffenc_%s: failed to encode buffer", - oclass->in_plugin->name); + g_warning ("ffenc_%s: failed to encode buffer", oclass->in_plugin->name); gst_buffer_unref (inbuf); gst_buffer_unref (outbuf); gst_buffer_unref (subbuf); @@ -513,15 +507,13 @@ gst_ffmpegenc_chain_audio (GstPad *pad, } static void -gst_ffmpegenc_set_property (GObject *object, - guint prop_id, - const GValue *value, - GParamSpec *pspec) +gst_ffmpegenc_set_property (GObject * object, + guint prop_id, const GValue * value, GParamSpec * pspec) { GstFFMpegEnc *ffmpegenc; /* Get a pointer of the right type. */ - ffmpegenc = (GstFFMpegEnc *)(object); + ffmpegenc = (GstFFMpegEnc *) (object); /* Check the argument id to see which argument we're setting. */ switch (prop_id) { @@ -535,7 +527,7 @@ gst_ffmpegenc_set_property (GObject *object, ffmpegenc->me_method = g_value_get_enum (value); break; case ARG_BUFSIZE: - ffmpegenc->buffer_size = g_value_get_ulong(value); + ffmpegenc->buffer_size = g_value_get_ulong (value); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); @@ -545,15 +537,13 @@ gst_ffmpegenc_set_property (GObject *object, /* The set function is simply the inverse of the get fuction. */ static void -gst_ffmpegenc_get_property (GObject *object, - guint prop_id, - GValue *value, - GParamSpec *pspec) +gst_ffmpegenc_get_property (GObject * object, + guint prop_id, GValue * value, GParamSpec * pspec) { GstFFMpegEnc *ffmpegenc; /* It's not null if we got it, but it might not be ours */ - ffmpegenc = (GstFFMpegEnc *)(object); + ffmpegenc = (GstFFMpegEnc *) (object); switch (prop_id) { case ARG_BIT_RATE: @@ -575,7 +565,7 @@ gst_ffmpegenc_get_property (GObject *object, } static GstElementStateReturn -gst_ffmpegenc_change_state (GstElement *element) +gst_ffmpegenc_change_state (GstElement * element) { GstFFMpegEnc *ffmpegenc = (GstFFMpegEnc *) element; gint transition = GST_STATE_TRANSITION (element); @@ -600,22 +590,22 @@ gst_ffmpegenc_change_state (GstElement *element) } gboolean -gst_ffmpegenc_register (GstPlugin *plugin) +gst_ffmpegenc_register (GstPlugin * plugin) { GTypeInfo typeinfo = { - sizeof(GstFFMpegEncClass), - (GBaseInitFunc)gst_ffmpegenc_base_init, + sizeof (GstFFMpegEncClass), + (GBaseInitFunc) gst_ffmpegenc_base_init, NULL, - (GClassInitFunc)gst_ffmpegenc_class_init, + (GClassInitFunc) gst_ffmpegenc_class_init, NULL, NULL, - sizeof(GstFFMpegEnc), + sizeof (GstFFMpegEnc), 0, - (GInstanceInitFunc)gst_ffmpegenc_init, + (GInstanceInitFunc) gst_ffmpegenc_init, }; GType type; AVCodec *in_plugin; - + in_plugin = first_avcodec; enc_global_plugins = g_hash_table_new (NULL, NULL); @@ -627,8 +617,8 @@ gst_ffmpegenc_register (GstPlugin *plugin) /* no quasi codecs, please */ if (in_plugin->id == CODEC_ID_RAWVIDEO || - (in_plugin->id >= CODEC_ID_PCM_S16LE && - in_plugin->id <= CODEC_ID_PCM_ALAW)) { + (in_plugin->id >= CODEC_ID_PCM_S16LE && + in_plugin->id <= CODEC_ID_PCM_ALAW)) { goto next; } @@ -639,16 +629,16 @@ gst_ffmpegenc_register (GstPlugin *plugin) /* first make sure we've got a supported type */ srccaps = gst_ffmpeg_codecid_to_caps (in_plugin->id, NULL, TRUE); - sinkcaps = gst_ffmpeg_codectype_to_caps (in_plugin->type, NULL); + sinkcaps = gst_ffmpeg_codectype_to_caps (in_plugin->type, NULL); if (!sinkcaps || !srccaps) goto next; /* construct the type */ - type_name = g_strdup_printf("ffenc_%s", in_plugin->name); + type_name = g_strdup_printf ("ffenc_%s", in_plugin->name); /* if it's already registered, drop it */ - if (g_type_from_name(type_name)) { - g_free(type_name); + if (g_type_from_name (type_name)) { + g_free (type_name); goto next; } @@ -657,12 +647,11 @@ gst_ffmpegenc_register (GstPlugin *plugin) params->srccaps = srccaps; params->sinkcaps = sinkcaps; - g_hash_table_insert (enc_global_plugins, - GINT_TO_POINTER (0), - (gpointer) params); + g_hash_table_insert (enc_global_plugins, + GINT_TO_POINTER (0), (gpointer) params); /* create the glib type now */ - type = g_type_register_static(GST_TYPE_ELEMENT, type_name , &typeinfo, 0); + type = g_type_register_static (GST_TYPE_ELEMENT, type_name, &typeinfo, 0); if (!gst_element_register (plugin, type_name, GST_RANK_NONE, type)) { g_free (type_name); return FALSE; @@ -670,11 +659,10 @@ gst_ffmpegenc_register (GstPlugin *plugin) g_free (type_name); - g_hash_table_insert (enc_global_plugins, - GINT_TO_POINTER (type), - (gpointer) params); + g_hash_table_insert (enc_global_plugins, + GINT_TO_POINTER (type), (gpointer) params); -next: + next: in_plugin = in_plugin->next; } g_hash_table_remove (enc_global_plugins, GINT_TO_POINTER (0)); diff --git a/ext/ffmpeg/gstffmpegmux.c b/ext/ffmpeg/gstffmpegmux.c index 15430d5..ab99d6a 100644 --- a/ext/ffmpeg/gstffmpegmux.c +++ b/ext/ffmpeg/gstffmpegmux.c @@ -34,32 +34,35 @@ typedef struct _GstFFMpegMux GstFFMpegMux; -struct _GstFFMpegMux { - GstElement element; +struct _GstFFMpegMux +{ + GstElement element; /* We need to keep track of our pads, so we do so here. */ - GstPad *srcpad; + GstPad *srcpad; - AVFormatContext *context; - gboolean opened; + AVFormatContext *context; + gboolean opened; - GstPad *sinkpads[MAX_STREAMS]; - gint videopads, audiopads; - GstBuffer *bufferqueue[MAX_STREAMS]; - gboolean eos[MAX_STREAMS]; + GstPad *sinkpads[MAX_STREAMS]; + gint videopads, audiopads; + GstBuffer *bufferqueue[MAX_STREAMS]; + gboolean eos[MAX_STREAMS]; }; -typedef struct _GstFFMpegMuxClassParams { - AVOutputFormat *in_plugin; - GstCaps *srccaps, *videosinkcaps, *audiosinkcaps; +typedef struct _GstFFMpegMuxClassParams +{ + AVOutputFormat *in_plugin; + GstCaps *srccaps, *videosinkcaps, *audiosinkcaps; } GstFFMpegMuxClassParams; typedef struct _GstFFMpegMuxClass GstFFMpegMuxClass; -struct _GstFFMpegMuxClass { - GstElementClass parent_class; +struct _GstFFMpegMuxClass +{ + GstElementClass parent_class; - AVOutputFormat *in_plugin; + AVOutputFormat *in_plugin; }; #define GST_TYPE_FFMPEGMUX \ @@ -73,12 +76,14 @@ struct _GstFFMpegMuxClass { #define GST_IS_FFMPEGMUX_CLASS(obj) \ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_FFMPEGMUX)) -enum { +enum +{ /* FILL ME */ LAST_SIGNAL }; -enum { +enum +{ ARG_0, /* FILL ME */ }; @@ -86,28 +91,25 @@ enum { static GHashTable *global_plugins; /* A number of functon prototypes are given so we can refer to them later. */ -static void gst_ffmpegmux_class_init (GstFFMpegMuxClass *klass); -static void gst_ffmpegmux_base_init (GstFFMpegMuxClass *klass); -static void gst_ffmpegmux_init (GstFFMpegMux *ffmpegmux); -static void gst_ffmpegmux_dispose (GObject *object); +static void gst_ffmpegmux_class_init (GstFFMpegMuxClass * klass); +static void gst_ffmpegmux_base_init (GstFFMpegMuxClass * klass); +static void gst_ffmpegmux_init (GstFFMpegMux * ffmpegmux); +static void gst_ffmpegmux_dispose (GObject * object); static GstPadLinkReturn - gst_ffmpegmux_connect (GstPad *pad, - const GstCaps *caps); -static GstPad * gst_ffmpegmux_request_new_pad (GstElement *element, - GstPadTemplate *templ, - const gchar *name); -static void gst_ffmpegmux_loop (GstElement *element); +gst_ffmpegmux_connect (GstPad * pad, const GstCaps * caps); +static GstPad *gst_ffmpegmux_request_new_pad (GstElement * element, + GstPadTemplate * templ, const gchar * name); +static void gst_ffmpegmux_loop (GstElement * element); -static GstElementStateReturn - gst_ffmpegmux_change_state (GstElement *element); +static GstElementStateReturn gst_ffmpegmux_change_state (GstElement * element); static GstElementClass *parent_class = NULL; /*static guint gst_ffmpegmux_signals[LAST_SIGNAL] = { 0 }; */ static void -gst_ffmpegmux_base_init (GstFFMpegMuxClass *klass) +gst_ffmpegmux_base_init (GstFFMpegMuxClass * klass) { GObjectClass *gobject_class = G_OBJECT_CLASS (klass); GstElementClass *element_class = GST_ELEMENT_CLASS (klass); @@ -116,20 +118,19 @@ gst_ffmpegmux_base_init (GstFFMpegMuxClass *klass) GstPadTemplate *videosinktempl, *audiosinktempl, *srctempl; params = g_hash_table_lookup (global_plugins, - GINT_TO_POINTER (G_OBJECT_CLASS_TYPE (gobject_class))); + GINT_TO_POINTER (G_OBJECT_CLASS_TYPE (gobject_class))); if (!params) - params = g_hash_table_lookup (global_plugins, - GINT_TO_POINTER (0)); + params = g_hash_table_lookup (global_plugins, GINT_TO_POINTER (0)); g_assert (params); /* construct the element details struct */ details.longname = g_strdup_printf ("FFMPEG %s Muxer", - params->in_plugin->name); + params->in_plugin->name); details.klass = g_strdup ("Codec/Muxer"); details.description = g_strdup_printf ("FFMPEG %s Muxer", - params->in_plugin->name); + params->in_plugin->name); details.author = "Wim Taymans , " - "Ronald Bultje "; + "Ronald Bultje "; gst_element_class_set_details (element_class, &details); g_free (details.longname); g_free (details.klass); @@ -137,16 +138,11 @@ gst_ffmpegmux_base_init (GstFFMpegMuxClass *klass) /* pad templates */ srctempl = gst_pad_template_new ("src", GST_PAD_SRC, - GST_PAD_ALWAYS, - params->srccaps); + GST_PAD_ALWAYS, params->srccaps); audiosinktempl = gst_pad_template_new ("audio_%d", - GST_PAD_SINK, - GST_PAD_REQUEST, - params->audiosinkcaps); + GST_PAD_SINK, GST_PAD_REQUEST, params->audiosinkcaps); videosinktempl = gst_pad_template_new ("video_%d", - GST_PAD_SINK, - GST_PAD_REQUEST, - params->videosinkcaps); + GST_PAD_SINK, GST_PAD_REQUEST, params->videosinkcaps); gst_element_class_add_pad_template (element_class, srctempl); gst_element_class_add_pad_template (element_class, videosinktempl); @@ -156,15 +152,15 @@ gst_ffmpegmux_base_init (GstFFMpegMuxClass *klass) } static void -gst_ffmpegmux_class_init (GstFFMpegMuxClass *klass) +gst_ffmpegmux_class_init (GstFFMpegMuxClass * klass) { GObjectClass *gobject_class; GstElementClass *gstelement_class; - gobject_class = (GObjectClass*)klass; - gstelement_class = (GstElementClass*)klass; + gobject_class = (GObjectClass *) klass; + gstelement_class = (GstElementClass *) klass; - parent_class = g_type_class_ref(GST_TYPE_ELEMENT); + parent_class = g_type_class_ref (GST_TYPE_ELEMENT); gstelement_class->request_new_pad = gst_ffmpegmux_request_new_pad; gstelement_class->change_state = gst_ffmpegmux_change_state; @@ -172,24 +168,22 @@ gst_ffmpegmux_class_init (GstFFMpegMuxClass *klass) } static void -gst_ffmpegmux_init(GstFFMpegMux *ffmpegmux) +gst_ffmpegmux_init (GstFFMpegMux * ffmpegmux) { GstElementClass *klass = GST_ELEMENT_GET_CLASS (ffmpegmux); - GstFFMpegMuxClass *oclass = (GstFFMpegMuxClass*) klass; + GstFFMpegMuxClass *oclass = (GstFFMpegMuxClass *) klass; GstPadTemplate *templ = gst_element_class_get_pad_template (klass, "src"); ffmpegmux->srcpad = gst_pad_new_from_template (templ, "src"); - gst_element_set_loop_function (GST_ELEMENT (ffmpegmux), - gst_ffmpegmux_loop); - gst_element_add_pad (GST_ELEMENT (ffmpegmux), - ffmpegmux->srcpad); + gst_element_set_loop_function (GST_ELEMENT (ffmpegmux), gst_ffmpegmux_loop); + gst_element_add_pad (GST_ELEMENT (ffmpegmux), ffmpegmux->srcpad); ffmpegmux->context = g_new0 (AVFormatContext, 1); ffmpegmux->context->oformat = oclass->in_plugin; ffmpegmux->context->nb_streams = 0; snprintf (ffmpegmux->context->filename, - sizeof (ffmpegmux->context->filename), - "gstreamer://%p", ffmpegmux->srcpad); + sizeof (ffmpegmux->context->filename), + "gstreamer://%p", ffmpegmux->srcpad); ffmpegmux->opened = FALSE; ffmpegmux->videopads = 0; @@ -197,7 +191,7 @@ gst_ffmpegmux_init(GstFFMpegMux *ffmpegmux) } static void -gst_ffmpegmux_dispose (GObject *object) +gst_ffmpegmux_dispose (GObject * object) { GstFFMpegMux *ffmpegmux = (GstFFMpegMux *) object; @@ -210,13 +204,12 @@ gst_ffmpegmux_dispose (GObject *object) } static GstPad * -gst_ffmpegmux_request_new_pad (GstElement *element, - GstPadTemplate *templ, - const gchar *name) +gst_ffmpegmux_request_new_pad (GstElement * element, + GstPadTemplate * templ, const gchar * name) { GstFFMpegMux *ffmpegmux = (GstFFMpegMux *) element; GstElementClass *klass = GST_ELEMENT_GET_CLASS (element); - GstFFMpegMuxClass *oclass = (GstFFMpegMuxClass*) klass; + GstFFMpegMuxClass *oclass = (GstFFMpegMuxClass *) klass; gchar *padname; GstPad *pad; AVStream *st; @@ -229,18 +222,16 @@ gst_ffmpegmux_request_new_pad (GstElement *element, /* figure out a name that *we* like */ if (templ == gst_element_class_get_pad_template (klass, "video_%d")) { - padname = g_strdup_printf ("video_%d", - ffmpegmux->videopads++); + padname = g_strdup_printf ("video_%d", ffmpegmux->videopads++); type = CODEC_TYPE_VIDEO; bitrate = 64 * 1024; framesize = 1152; } else if (templ == gst_element_class_get_pad_template (klass, "audio_%d")) { - padname = g_strdup_printf ("audio_%d", - ffmpegmux->audiopads++); + padname = g_strdup_printf ("audio_%d", ffmpegmux->audiopads++); type = CODEC_TYPE_AUDIO; bitrate = 285 * 1024; } else { - g_warning("ffmux: unknown pad template!"); + g_warning ("ffmux: unknown pad template!"); return NULL; } @@ -254,30 +245,29 @@ gst_ffmpegmux_request_new_pad (GstElement *element, /* AVStream needs to be created */ st = av_new_stream (ffmpegmux->context, padnum); st->codec.codec_type = type; - st->codec.codec_id = CODEC_ID_NONE; /* this is a check afterwards */ - st->stream_copy = 1; /* we're not the actual encoder */ + st->codec.codec_id = CODEC_ID_NONE; /* this is a check afterwards */ + st->stream_copy = 1; /* we're not the actual encoder */ st->codec.bit_rate = bitrate; st->codec.frame_size = framesize; /* we fill in codec during capsnego */ /* we love debug output (c) (tm) (r) */ GST_DEBUG ("Created %s pad for ffmux_%s element", - padname, oclass->in_plugin->name); + padname, oclass->in_plugin->name); g_free (padname); return pad; } static GstPadLinkReturn -gst_ffmpegmux_connect (GstPad *pad, - const GstCaps *caps) +gst_ffmpegmux_connect (GstPad * pad, const GstCaps * caps) { - GstFFMpegMux *ffmpegmux = (GstFFMpegMux *)(gst_pad_get_parent (pad)); + GstFFMpegMux *ffmpegmux = (GstFFMpegMux *) (gst_pad_get_parent (pad)); gint i; AVStream *st; /*g_return_val_if_fail (ffmpegmux->opened == FALSE, - GST_PAD_LINK_REFUSED);*/ + GST_PAD_LINK_REFUSED); */ for (i = 0; i < ffmpegmux->context->nb_streams; i++) { if (pad == ffmpegmux->sinkpads[i]) { @@ -292,17 +282,16 @@ gst_ffmpegmux_connect (GstPad *pad, /* for the format-specific guesses, we'll go to * our famous codec mapper */ - if (gst_ffmpeg_caps_to_codecid (caps, - &st->codec) != CODEC_ID_NONE) { + if (gst_ffmpeg_caps_to_codecid (caps, &st->codec) != CODEC_ID_NONE) { ffmpegmux->eos[i] = FALSE; return GST_PAD_LINK_OK; - } + } return GST_PAD_LINK_REFUSED; } static void -gst_ffmpegmux_loop (GstElement *element) +gst_ffmpegmux_loop (GstElement * element) { GstFFMpegMux *ffmpegmux = (GstFFMpegMux *) element; gint i, bufnum; @@ -313,25 +302,24 @@ gst_ffmpegmux_loop (GstElement *element) /* check for "pull'ability" */ while (pad != NULL && - GST_PAD_IS_USABLE (pad) && - ffmpegmux->eos[i] == FALSE && - ffmpegmux->bufferqueue[i] == NULL) { + GST_PAD_IS_USABLE (pad) && + ffmpegmux->eos[i] == FALSE && ffmpegmux->bufferqueue[i] == NULL) { GstData *data; /* we can pull a buffer! */ data = gst_pad_pull (pad); if (GST_IS_EVENT (data)) { - GstEvent *event = GST_EVENT (data); + GstEvent *event = GST_EVENT (data); - switch (GST_EVENT_TYPE (event)) { + switch (GST_EVENT_TYPE (event)) { case GST_EVENT_EOS: /* flag EOS on this stream */ ffmpegmux->eos[i] = TRUE; gst_event_unref (event); - break; - default: - gst_pad_event_default (pad, event); - break; + break; + default: + gst_pad_event_default (pad, event); + break; } } else { ffmpegmux->bufferqueue[i] = GST_BUFFER (data); @@ -346,27 +334,26 @@ gst_ffmpegmux_loop (GstElement *element) for (i = 0; i < ffmpegmux->context->nb_streams; i++) { AVStream *st = ffmpegmux->context->streams[i]; - /* check whether the pad has successfully completed capsnego */ + /* check whether the pad has successfully completed capsnego */ if (st->codec.codec_id == CODEC_ID_NONE) { GST_ELEMENT_ERROR (element, CORE, NEGOTIATION, (NULL), - ("no caps set on stream %d (%s)", i, - (st->codec.codec_type == CODEC_TYPE_VIDEO) ? - "video" : "audio")); + ("no caps set on stream %d (%s)", i, + (st->codec.codec_type == CODEC_TYPE_VIDEO) ? + "video" : "audio")); return; } } if (url_fopen (&ffmpegmux->context->pb, - ffmpegmux->context->filename, - URL_WRONLY) < 0) { + ffmpegmux->context->filename, URL_WRONLY) < 0) { GST_ELEMENT_ERROR (element, LIBRARY, TOO_LAZY, (NULL), - ("Failed to open stream context in ffmux")); + ("Failed to open stream context in ffmux")); return; } if (av_set_parameters (ffmpegmux->context, NULL)) { GST_ELEMENT_ERROR (element, LIBRARY, INIT, (NULL), - ("Failed to initialize muxer")); + ("Failed to initialize muxer")); return; } @@ -394,7 +381,7 @@ gst_ffmpegmux_loop (GstElement *element) /* if we do have one, only use this one if it's older */ if (GST_BUFFER_TIMESTAMP (ffmpegmux->bufferqueue[i]) < - GST_BUFFER_TIMESTAMP (ffmpegmux->bufferqueue[bufnum])) { + GST_BUFFER_TIMESTAMP (ffmpegmux->bufferqueue[bufnum])) { bufnum = i; } } @@ -411,10 +398,10 @@ gst_ffmpegmux_loop (GstElement *element) ffmpegmux->context->streams[bufnum]->codec.frame_number++; /* set time */ - ffmpegmux->context->streams[bufnum]->pts.val = (GST_BUFFER_TIMESTAMP (buf) * 90) / 1000000; - av_write_frame (ffmpegmux->context, bufnum, - GST_BUFFER_DATA (buf), - GST_BUFFER_SIZE (buf)); + ffmpegmux->context->streams[bufnum]->pts.val = + (GST_BUFFER_TIMESTAMP (buf) * 90) / 1000000; + av_write_frame (ffmpegmux->context, bufnum, GST_BUFFER_DATA (buf), + GST_BUFFER_SIZE (buf)); //ffmpegmux->context->streams[bufnum]->codec.real_pict_num++; gst_buffer_unref (buf); } else { @@ -427,9 +414,9 @@ gst_ffmpegmux_loop (GstElement *element) } static GstElementStateReturn -gst_ffmpegmux_change_state (GstElement *element) +gst_ffmpegmux_change_state (GstElement * element) { - GstFFMpegMux *ffmpegmux = (GstFFMpegMux *)(element); + GstFFMpegMux *ffmpegmux = (GstFFMpegMux *) (element); gint transition = GST_STATE_TRANSITION (element); switch (transition) { @@ -449,24 +436,24 @@ gst_ffmpegmux_change_state (GstElement *element) gboolean -gst_ffmpegmux_register (GstPlugin *plugin) +gst_ffmpegmux_register (GstPlugin * plugin) { GTypeInfo typeinfo = { - sizeof(GstFFMpegMuxClass), - (GBaseInitFunc)gst_ffmpegmux_base_init, + sizeof (GstFFMpegMuxClass), + (GBaseInitFunc) gst_ffmpegmux_base_init, NULL, - (GClassInitFunc)gst_ffmpegmux_class_init, + (GClassInitFunc) gst_ffmpegmux_class_init, NULL, NULL, - sizeof(GstFFMpegMux), + sizeof (GstFFMpegMux), 0, - (GInstanceInitFunc)gst_ffmpegmux_init, + (GInstanceInitFunc) gst_ffmpegmux_init, }; GType type; AVOutputFormat *in_plugin; GstFFMpegMuxClassParams *params; AVCodec *in_codec; - + in_plugin = first_oformat; global_plugins = g_hash_table_new (NULL, NULL); @@ -486,9 +473,9 @@ gst_ffmpegmux_register (GstPlugin *plugin) * when we open the stream */ audiosinkcaps = gst_caps_new_empty (); videosinkcaps = gst_caps_new_empty (); - for (in_codec = first_avcodec; in_codec != NULL; - in_codec = in_codec->next) { + for (in_codec = first_avcodec; in_codec != NULL; in_codec = in_codec->next) { GstCaps *temp = gst_ffmpeg_codecid_to_caps (in_codec->id, NULL, TRUE); + if (!temp) { continue; } @@ -506,18 +493,19 @@ gst_ffmpegmux_register (GstPlugin *plugin) } /* construct the type */ - type_name = g_strdup_printf("ffmux_%s", in_plugin->name); + type_name = g_strdup_printf ("ffmux_%s", in_plugin->name); p = type_name; while (*p) { - if (*p == '.') *p = '_'; + if (*p == '.') + *p = '_'; p++; } /* if it's already registered, drop it */ - if (g_type_from_name(type_name)) { - g_free(type_name); + if (g_type_from_name (type_name)) { + g_free (type_name); goto next; } @@ -528,20 +516,18 @@ gst_ffmpegmux_register (GstPlugin *plugin) params->videosinkcaps = videosinkcaps; params->audiosinkcaps = audiosinkcaps; - g_hash_table_insert (global_plugins, - GINT_TO_POINTER (0), - (gpointer) params); + g_hash_table_insert (global_plugins, + GINT_TO_POINTER (0), (gpointer) params); /* create the type now */ - type = g_type_register_static(GST_TYPE_ELEMENT, type_name , &typeinfo, 0); + type = g_type_register_static (GST_TYPE_ELEMENT, type_name, &typeinfo, 0); if (!gst_element_register (plugin, type_name, GST_RANK_NONE, type)) return FALSE; - g_hash_table_insert (global_plugins, - GINT_TO_POINTER (type), - (gpointer) params); + g_hash_table_insert (global_plugins, + GINT_TO_POINTER (type), (gpointer) params); -next: + next: in_plugin = in_plugin->next; } g_hash_table_remove (global_plugins, GINT_TO_POINTER (0)); diff --git a/ext/ffmpeg/gstffmpegprotocol.c b/ext/ffmpeg/gstffmpegprotocol.c index be95dcc..a03013f 100644 --- a/ext/ffmpeg/gstffmpegprotocol.c +++ b/ext/ffmpeg/gstffmpegprotocol.c @@ -34,18 +34,17 @@ typedef struct _GstProtocolInfo GstProtocolInfo; -struct _GstProtocolInfo { - GstPad *pad; +struct _GstProtocolInfo +{ + GstPad *pad; - int flags; + int flags; GstByteStream *bs; - gboolean eos; + gboolean eos; }; -static int -gst_ffmpegdata_open (URLContext *h, - const char *filename, - int flags) +static int +gst_ffmpegdata_open (URLContext * h, const char *filename, int flags) { GstProtocolInfo *info; GstPad *pad; @@ -54,8 +53,7 @@ gst_ffmpegdata_open (URLContext *h, info->flags = flags; /* we don't support R/W together */ - if (flags != URL_RDONLY && - flags != URL_WRONLY) { + if (flags != URL_RDONLY && flags != URL_WRONLY) { g_warning ("Only read-only or write-only are supported"); return -EINVAL; } @@ -87,10 +85,8 @@ gst_ffmpegdata_open (URLContext *h, return 0; } -static int -gst_ffmpegdata_read (URLContext *h, - unsigned char *buf, - int size) +static int +gst_ffmpegdata_read (URLContext * h, unsigned char *buf, int size) { GstByteStream *bs; guint32 total, request; @@ -144,7 +140,7 @@ gst_ffmpegdata_read (URLContext *h, } } } while (!info->eos && total != request); - + memcpy (buf, data, total); gst_bytestream_flush (bs, total); @@ -152,9 +148,7 @@ gst_ffmpegdata_read (URLContext *h, } static int -gst_ffmpegdata_write (URLContext *h, - unsigned char *buf, - int size) +gst_ffmpegdata_write (URLContext * h, unsigned char *buf, int size) { GstProtocolInfo *info; GstBuffer *outbuf; @@ -174,9 +168,7 @@ gst_ffmpegdata_write (URLContext *h, } static offset_t -gst_ffmpegdata_seek (URLContext *h, - offset_t pos, - int whence) +gst_ffmpegdata_seek (URLContext * h, offset_t pos, int whence) { GstSeekType seek_type = 0; GstProtocolInfo *info; @@ -216,16 +208,17 @@ gst_ffmpegdata_seek (URLContext *h, } static int -gst_ffmpegdata_close (URLContext *h) +gst_ffmpegdata_close (URLContext * h) { GstProtocolInfo *info; info = (GstProtocolInfo *) h->priv_data; switch (info->flags) { - case URL_WRONLY: { + case URL_WRONLY:{ /* send EOS - that closes down the stream */ GstEvent *event = gst_event_new (GST_EVENT_EOS); + gst_pad_push (info->pad, GST_DATA (event)); } break; @@ -243,11 +236,10 @@ gst_ffmpegdata_close (URLContext *h) } URLProtocol gstreamer_protocol = { - .name = "gstreamer", - .url_open = gst_ffmpegdata_open, - .url_read = gst_ffmpegdata_read, + .name = "gstreamer", + .url_open = gst_ffmpegdata_open, + .url_read = gst_ffmpegdata_read, .url_write = gst_ffmpegdata_write, - .url_seek = gst_ffmpegdata_seek, + .url_seek = gst_ffmpegdata_seek, .url_close = gst_ffmpegdata_close, }; - -- 2.7.4