From ff2603a727eb7ef90cbed88e7ffd8fa8dff78857 Mon Sep 17 00:00:00 2001 From: =?utf8?q?Olivier=20Cr=C3=AAte?= Date: Thu, 12 Apr 2012 15:57:32 -0400 Subject: [PATCH] Port to Gst 0.11 --- common | 2 +- configure.ac | 29 +-- omx/Makefile.am | 6 +- omx/gstbasevideocodec.c | 180 +++++++++++++---- omx/gstbasevideocodec.h | 138 +++++++++++-- omx/gstbasevideodecoder.c | 503 +++++++++++++++++++++++++--------------------- omx/gstbasevideodecoder.h | 34 ++-- omx/gstbasevideoencoder.c | 365 ++++++++++++++++----------------- omx/gstbasevideoencoder.h | 21 +- omx/gstomx.c | 191 ++++++++++++++++-- omx/gstomx.h | 19 +- omx/gstomxaacenc.c | 79 +++----- omx/gstomxaudioenc.c | 267 ++++++------------------ omx/gstomxaudioenc.h | 11 +- omx/gstomxh263dec.c | 47 ++--- omx/gstomxh263enc.c | 84 ++++---- omx/gstomxh264dec.c | 47 ++--- omx/gstomxh264enc.c | 100 +++++---- omx/gstomxmpeg4videodec.c | 47 ++--- omx/gstomxmpeg4videoenc.c | 78 +++---- omx/gstomxvideodec.c | 307 ++++++++-------------------- omx/gstomxvideodec.h | 13 +- omx/gstomxvideoenc.c | 315 +++++++++-------------------- omx/gstomxvideoenc.h | 18 +- omx/gstomxwmvdec.c | 48 ++--- 25 files changed, 1407 insertions(+), 1542 deletions(-) diff --git a/common b/common index 116ba9b..4de86d2 160000 --- a/common +++ b/common @@ -1 +1 @@ -Subproject commit 116ba9b1446a420d0062b3a0d6178b424b6f8645 +Subproject commit 4de86d2d4abeb0f4a04eb7844dec163c7d011b37 diff --git a/configure.ac b/configure.ac index d4f7373..8a30952 100644 --- a/configure.ac +++ b/configure.ac @@ -5,7 +5,7 @@ dnl please read gstreamer/docs/random/autotools before changing this file dnl initialize autoconf dnl releases only do -Wall, git and prerelease does -Werror too dnl use a three digit version number for releases, and four for git/prerelease -AC_INIT(GStreamer OpenMAX Plug-ins, 0.10.0.1, +AC_INIT(GStreamer OpenMAX Plug-ins, 0.11.0.1, http://bugzilla.gnome.org/enter_bug.cgi?product=GStreamer, gst-omx) @@ -37,11 +37,12 @@ m4_ifdef([AM_SILENT_RULES],[AM_SILENT_RULES([yes])], [AM_DEFAULT_VERBOSITY=1 AC_SUBST(AM_DEFAULT_VERBOSITY)]) -dnl our libraries and install dirs use major.minor as a version -GST_MAJORMINOR=$PACKAGE_VERSION_MAJOR.$PACKAGE_VERSION_MINOR -dnl we override it here if we need to for the release candidate of new series -GST_MAJORMINOR=0.10 -AC_SUBST(GST_MAJORMINOR) +dnl our libraries and install dirs use GST_API_VERSION in the filename +dnl to allow side-by-side installation of different API versions +GST_API_VERSION=1.0 +AC_SUBST(GST_API_VERSION) +AC_DEFINE_UNQUOTED(GST_API_VERSION, "$GST_API_VERSION", + [GStreamer API Version]) AG_GST_LIBTOOL_PREPARE @@ -54,7 +55,7 @@ AC_LIBTOOL_WIN32_DLL AM_PROG_LIBTOOL dnl *** required versions of GStreamer stuff *** -GST_REQ=0.10.35.1 +GST_REQ=0.11.90 dnl *** autotools stuff **** @@ -141,20 +142,20 @@ AG_GST_CHECK_FUNCTION dnl *** checks for dependency libraries *** dnl GLib is required -AG_GST_GLIB_CHECK([2.16]) +AG_GST_GLIB_CHECK([2.32]) dnl checks for gstreamer dnl uninstalled is selected preferentially -- see pkg-config(1) -AG_GST_CHECK_GST($GST_MAJORMINOR, [$GST_REQ], yes) -AG_GST_CHECK_GST_BASE($GST_MAJORMINOR, [$GST_REQ], yes) -AG_GST_CHECK_GST_CONTROLLER($GST_MAJORMINOR, [$GST_REQ], yes) -AG_GST_CHECK_GST_CHECK($GST_MAJORMINOR, [$GST_REQ], no) -AG_GST_CHECK_GST_PLUGINS_BASE($GST_MAJORMINOR, [$GST_REQ], yes) +AG_GST_CHECK_GST($GST_API_VERSION, [$GST_REQ], yes) +AG_GST_CHECK_GST_BASE($GST_API_VERSION, [$GST_REQ], yes) +AG_GST_CHECK_GST_CONTROLLER($GST_API_VERSION, [$GST_REQ], yes) +AG_GST_CHECK_GST_CHECK($GST_API_VERSION, [$GST_REQ], no) +AG_GST_CHECK_GST_PLUGINS_BASE($GST_API_VERSION, [$GST_REQ], yes) AM_CONDITIONAL(HAVE_GST_CHECK, test "x$HAVE_GST_CHECK" = "xyes") dnl Check for documentation xrefs GLIB_PREFIX="`$PKG_CONFIG --variable=prefix glib-2.0`" -GST_PREFIX="`$PKG_CONFIG --variable=prefix gstreamer-$GST_MAJORMINOR`" +GST_PREFIX="`$PKG_CONFIG --variable=prefix gstreamer-$GST_API_VERSION`" AC_SUBST(GLIB_PREFIX) AC_SUBST(GST_PREFIX) diff --git a/omx/Makefile.am b/omx/Makefile.am index c3ef484..2d6955a 100644 --- a/omx/Makefile.am +++ b/omx/Makefile.am @@ -53,9 +53,9 @@ libgstopenmax_la_CFLAGS = \ $(fixbaseclasses) libgstopenmax_la_LIBADD = \ $(GST_PLUGINS_BASE_LIBS) \ - -lgstaudio-@GST_MAJORMINOR@ \ - -lgstpbutils-@GST_MAJORMINOR@ \ - -lgstvideo-@GST_MAJORMINOR@ \ + -lgstaudio-@GST_API_VERSION@ \ + -lgstpbutils-@GST_API_VERSION@ \ + -lgstvideo-@GST_API_VERSION@ \ $(GST_BASE_LIBS) \ $(GST_LIBS) libgstopenmax_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) diff --git a/omx/gstbasevideocodec.c b/omx/gstbasevideocodec.c index a1dc91d..9f839cc 100644 --- a/omx/gstbasevideocodec.c +++ b/omx/gstbasevideocodec.c @@ -17,10 +17,26 @@ * Boston, MA 02111-1307, USA. */ +/** + * SECTION:gstbasevideocodec + * @short_description: Base class and objects for video codecs + * + **/ + #ifdef HAVE_CONFIG_H #include "config.h" #endif +/** + * SECTION:gstbasevideocodec + * @short_description: Base class for video codecs + * @see_also: #GstBaseVideoDecoder , #GstBaseVideoEncoder + */ + +/* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex + * with newer GLib versions (>= 2.31.0) */ +#define GLIB_DISABLE_DEPRECATION_WARNINGS + #include "gstbasevideocodec.h" #include @@ -45,31 +61,44 @@ static void gst_base_video_codec_finalize (GObject * object); static GstStateChangeReturn gst_base_video_codec_change_state (GstElement * element, GstStateChange transition); -GType -gst_video_frame_get_type (void) -{ - static volatile gsize type = 0; +static GstElementClass *parent_class = NULL; - if (g_once_init_enter (&type)) { - GType _type; +G_DEFINE_BOXED_TYPE (GstVideoFrameState, gst_video_frame_state, + (GBoxedCopyFunc) gst_video_frame_state_ref, + (GBoxedFreeFunc) gst_video_frame_state_unref); - _type = g_boxed_type_register_static ("GstVideoFrame", - (GBoxedCopyFunc) gst_video_frame_ref, - (GBoxedFreeFunc) gst_video_frame_unref); - g_once_init_leave (&type, _type); - } - return (GType) type; -} - -GST_BOILERPLATE (GstBaseVideoCodec, gst_base_video_codec, GstElement, - GST_TYPE_ELEMENT); +/* NOTE (Edward): Do not use G_DEFINE_* because we need to have + * a GClassInitFunc called with the target class (which the macros + * don't handle). + */ +static void gst_base_video_codec_class_init (GstBaseVideoCodecClass * klass); +static void gst_base_video_codec_init (GstBaseVideoCodec * dec, + GstBaseVideoCodecClass * klass); -static void -gst_base_video_codec_base_init (gpointer g_class) +GType +gst_base_video_codec_get_type (void) { - GST_DEBUG_CATEGORY_INIT (basevideocodec_debug, "basevideocodec", 0, - "Base Video Codec"); + static volatile gsize base_video_codec_type = 0; + if (g_once_init_enter (&base_video_codec_type)) { + GType _type; + static const GTypeInfo base_video_codec_info = { + sizeof (GstBaseVideoCodecClass), + NULL, + NULL, + (GClassInitFunc) gst_base_video_codec_class_init, + NULL, + NULL, + sizeof (GstBaseVideoCodec), + 0, + (GInstanceInitFunc) gst_base_video_codec_init, + }; + + _type = g_type_register_static (GST_TYPE_ELEMENT, + "GstBaseVideoCodec", &base_video_codec_info, G_TYPE_FLAG_ABSTRACT); + g_once_init_leave (&base_video_codec_type, _type); + } + return base_video_codec_type; } static void @@ -81,9 +110,14 @@ gst_base_video_codec_class_init (GstBaseVideoCodecClass * klass) gobject_class = G_OBJECT_CLASS (klass); element_class = GST_ELEMENT_CLASS (klass); + parent_class = g_type_class_peek_parent (klass); + gobject_class->finalize = gst_base_video_codec_finalize; element_class->change_state = gst_base_video_codec_change_state; + + GST_DEBUG_CATEGORY_INIT (basevideocodec_debug, "basevideocodec", 0, + "Base Video Codec"); } static void @@ -112,7 +146,7 @@ gst_base_video_codec_init (GstBaseVideoCodec * base_video_codec, gst_segment_init (&base_video_codec->segment, GST_FORMAT_TIME); - g_static_rec_mutex_init (&base_video_codec->stream_lock); + g_rec_mutex_init (&base_video_codec->stream_lock); } static void @@ -124,7 +158,7 @@ gst_base_video_codec_reset (GstBaseVideoCodec * base_video_codec) GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_codec); for (g = base_video_codec->frames; g; g = g_list_next (g)) { - gst_video_frame_unref ((GstVideoFrame *) g->data); + gst_video_frame_state_unref ((GstVideoFrameState *) g->data); } g_list_free (base_video_codec->frames); base_video_codec->frames = NULL; @@ -134,6 +168,8 @@ gst_base_video_codec_reset (GstBaseVideoCodec * base_video_codec) gst_buffer_replace (&base_video_codec->state.codec_data, NULL); gst_caps_replace (&base_video_codec->state.caps, NULL); + memset (&base_video_codec->state, 0, sizeof (GstVideoState)); + base_video_codec->state.format = GST_VIDEO_FORMAT_UNKNOWN; GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_codec); } @@ -142,7 +178,7 @@ gst_base_video_codec_finalize (GObject * object) { GstBaseVideoCodec *base_video_codec = GST_BASE_VIDEO_CODEC (object); - g_static_rec_mutex_free (&base_video_codec->stream_lock); + g_rec_mutex_clear (&base_video_codec->stream_lock); G_OBJECT_CLASS (parent_class)->finalize (object); } @@ -183,31 +219,47 @@ gst_base_video_codec_change_state (GstElement * element, return ret; } -GstVideoFrame * -gst_base_video_codec_new_frame (GstBaseVideoCodec * base_video_codec) +/** + * gst_base_video_codec_append_frame: + * @codec: a #GstBaseVideoCodec + * @frame: the #GstVideoFrameState to append + * + * Appends a frame to the list of frames handled by the codec. + * + * Note: This should normally not be used by implementations. + **/ +void +gst_base_video_codec_append_frame (GstBaseVideoCodec * codec, + GstVideoFrameState * frame) { - GstVideoFrame *frame; - - frame = g_slice_new0 (GstVideoFrame); + g_return_if_fail (frame != NULL); - frame->ref_count = 1; + gst_video_frame_state_ref (frame); + codec->frames = g_list_append (codec->frames, frame); +} - GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_codec); - frame->system_frame_number = base_video_codec->system_frame_number; - base_video_codec->system_frame_number++; - GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_codec); +void +gst_base_video_codec_remove_frame (GstBaseVideoCodec * codec, + GstVideoFrameState * frame) +{ + GList *link; - GST_LOG_OBJECT (base_video_codec, "Created new frame %p (sfn:%d)", - frame, frame->system_frame_number); + g_return_if_fail (frame != NULL); - return frame; + link = g_list_find (codec->frames, frame); + if (link) { + gst_video_frame_state_unref ((GstVideoFrameState *) link->data); + codec->frames = g_list_delete_link (codec->frames, link); + } } static void -_gst_video_frame_free (GstVideoFrame * frame) +_gst_video_frame_state_free (GstVideoFrameState * frame) { g_return_if_fail (frame != NULL); + GST_LOG ("Freeing frame %p (sfn:%d)", frame, frame->system_frame_number); + if (frame->sink_buffer) { gst_buffer_unref (frame->sink_buffer); } @@ -222,11 +274,48 @@ _gst_video_frame_free (GstVideoFrame * frame) if (frame->coder_hook_destroy_notify && frame->coder_hook) frame->coder_hook_destroy_notify (frame->coder_hook); - g_slice_free (GstVideoFrame, frame); + g_slice_free (GstVideoFrameState, frame); } -GstVideoFrame * -gst_video_frame_ref (GstVideoFrame * frame) +/** + * gst_base_video_codec_new_frame: + * @base_video_codec: a #GstBaseVideoCodec + * + * Creates a new #GstVideoFrameState for usage in decoders or encoders. + * + * Returns: (transfer full): The new #GstVideoFrameState, call + * #gst_video_frame_state_unref() when done with it. + */ +GstVideoFrameState * +gst_base_video_codec_new_frame (GstBaseVideoCodec * base_video_codec) +{ + GstVideoFrameState *frame; + + frame = g_slice_new0 (GstVideoFrameState); + + frame->ref_count = 1; + + GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_codec); + frame->system_frame_number = base_video_codec->system_frame_number; + base_video_codec->system_frame_number++; + GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_codec); + + GST_LOG_OBJECT (base_video_codec, "Created new frame %p (sfn:%d)", + frame, frame->system_frame_number); + + return frame; +} + +/** + * gst_video_frame_state_ref: + * @frame: a #GstVideoFrameState + * + * Increases the refcount of the given frame by one. + * + * Returns: @buf + */ +GstVideoFrameState * +gst_video_frame_state_ref (GstVideoFrameState * frame) { g_return_val_if_fail (frame != NULL, NULL); @@ -235,13 +324,20 @@ gst_video_frame_ref (GstVideoFrame * frame) return frame; } +/** + * gst_video_frame_state_unref: + * @frame: a #GstVideoFrameState + * + * Decreases the refcount of the frame. If the refcount reaches 0, the frame + * will be freed. + */ void -gst_video_frame_unref (GstVideoFrame * frame) +gst_video_frame_state_unref (GstVideoFrameState * frame) { g_return_if_fail (frame != NULL); g_return_if_fail (frame->ref_count > 0); if (g_atomic_int_dec_and_test (&frame->ref_count)) { - _gst_video_frame_free (frame); + _gst_video_frame_state_free (frame); } } diff --git a/omx/gstbasevideocodec.h b/omx/gstbasevideocodec.h index 41c2282..256279d 100644 --- a/omx/gstbasevideocodec.h +++ b/omx/gstbasevideocodec.h @@ -28,6 +28,8 @@ #include #include #include +#include +#include G_BEGIN_DECLS @@ -76,17 +78,56 @@ G_BEGIN_DECLS /** * GST_BASE_VIDEO_CODEC_FLOW_NEED_DATA: * + * Returned while parsing to indicate more data is needed. */ #define GST_BASE_VIDEO_CODEC_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS -#define GST_BASE_VIDEO_CODEC_STREAM_LOCK(codec) g_static_rec_mutex_lock (&GST_BASE_VIDEO_CODEC (codec)->stream_lock) -#define GST_BASE_VIDEO_CODEC_STREAM_UNLOCK(codec) g_static_rec_mutex_unlock (&GST_BASE_VIDEO_CODEC (codec)->stream_lock) +/** + * GST_BASE_VIDEO_CODEC_STREAM_LOCK: + * @codec: video codec instance + * + * Obtain a lock to protect the codec function from concurrent access. + * + * Since: 0.10.22 + */ +#define GST_BASE_VIDEO_CODEC_STREAM_LOCK(codec) g_rec_mutex_lock (&GST_BASE_VIDEO_CODEC (codec)->stream_lock) +/** + * GST_BASE_VIDEO_CODEC_STREAM_UNLOCK: + * @codec: video codec instance + * + * Release the lock that protects the codec function from concurrent access. + * + * Since: 0.10.22 + */ +#define GST_BASE_VIDEO_CODEC_STREAM_UNLOCK(codec) g_rec_mutex_unlock (&GST_BASE_VIDEO_CODEC (codec)->stream_lock) typedef struct _GstVideoState GstVideoState; -typedef struct _GstVideoFrame GstVideoFrame; +typedef struct _GstVideoFrameState GstVideoFrameState; typedef struct _GstBaseVideoCodec GstBaseVideoCodec; typedef struct _GstBaseVideoCodecClass GstBaseVideoCodecClass; +/* GstVideoState is only used on the compressed video pad */ +/** + * GstVideoState: + * @width: Width in pixels (including borders) + * @height: Height in pixels (including borders) + * @fps_n: Numerator of framerate + * @fps_d: Denominator of framerate + * @par_n: Numerator of Pixel Aspect Ratio + * @par_d: Denominator of Pixel Aspect Ratio + * @have_interlaced: The content of the @interlaced field is present and valid + * @interlaced: %TRUE if the stream is interlaced + * @top_field_first: %TRUE if the interlaced frame is top-field-first + * @clean_width: Useful width of video in pixels (i.e. without borders) + * @clean_height: Useful height of video in pixels (i.e. without borders) + * @clean_offset_left: Horizontal offset (from the left) of useful region in pixels + * @clean_offset_top: Vertical offset (from the top) of useful region in pixels + * @bytes_per_picture: Size in bytes of each picture + * @codec_data: Optional Codec Data for the stream + * + * Information about compressed video stream. + * FIXME: Re-use GstVideoInfo for more fields. + */ struct _GstVideoState { GstCaps *caps; @@ -105,13 +146,45 @@ struct _GstVideoState int bytes_per_picture; GstBuffer *codec_data; - }; -struct _GstVideoFrame +/** + * GstVideoFrameState: + * @decode_timestamp: Decoding timestamp (aka DTS) + * @presentation_timestamp: Presentation timestamp (aka PTS) + * @presentation_duration: Duration of frame + * @system_frame_number: unique ID attributed when #GstVideoFrameState is + * created + * @decode_frame_number: Decoded frame number, increases in decoding order + * @presentation_frame_number: Presentation frame number, increases in + * presentation order. + * @distance_from_sync: Distance of the frame from a sync point, in number + * of frames. + * @is_sync_point: #TRUE if the frame is a synchronization point (like a + * keyframe) + * @is_eos: #TRUE if the frame is the last one of a segment. + * @decode_only: If #TRUE, the frame is only meant to be decoded but not + * pushed downstream + * @sink_buffer: input buffer + * @src_buffer: output buffer + * @field_index: Number of fields since beginning of stream + * @n_fields: Number of fields present in frame (default 2) + * @coder_hook: Private data called with @coder_hook_destroy_notify + * @coder_hook_destroy_notify: Called when frame is destroyed + * @deadline: Target clock time for display (running time) + * @force_keyframe: For encoders, if #TRUE a keyframe must be generated + * @force_keyframe_headers: For encoders, if #TRUE new headers must be generated + * @events: List of #GstEvent that must be pushed before the next @src_buffer + * + * State of a video frame going through the codec + **/ + +struct _GstVideoFrameState { + /*< private >*/ gint ref_count; + /*< public >*/ GstClockTime decode_timestamp; GstClockTime presentation_timestamp; GstClockTime presentation_duration; @@ -124,6 +197,10 @@ struct _GstVideoFrame gboolean is_sync_point; gboolean is_eos; + /* Frames that should not be pushed downstream and are + * not meant for display */ + gboolean decode_only; + GstBuffer *sink_buffer; GstBuffer *src_buffer; @@ -143,51 +220,68 @@ struct _GstVideoFrame GList *events; }; +/** + * GstBaseVideoCodec: + * + * The opaque #GstBaseVideoCodec data structure. + */ struct _GstBaseVideoCodec { - GstElement element; - /*< private >*/ - GstPad *sinkpad; - GstPad *srcpad; + GstElement element; + + /*< protected >*/ + GstPad *sinkpad; + GstPad *srcpad; /* protects all data processing, i.e. is locked * in the chain function, finish_frame and when * processing serialized events */ - GStaticRecMutex stream_lock; + GRecMutex stream_lock; - guint64 system_frame_number; + guint64 system_frame_number; GList *frames; /* Protected with OBJECT_LOCK */ - GstVideoState state; + GstVideoState state; /* Compressed video pad */ + GstVideoInfo info; /* Raw video pad */ GstSegment segment; - gdouble proportion; - GstClockTime earliest_time; - gboolean discont; + /* QoS properties */ + gdouble proportion; + GstClockTime earliest_time; + gboolean discont; - gint64 bytes; - gint64 time; + gint64 bytes; + gint64 time; /* FIXME before moving to base */ - void *padding[GST_PADDING_LARGE]; + void *padding[GST_PADDING_LARGE]; }; +/** + * GstBaseVideoCodecClass: + * + * The opaque #GstBaseVideoCodecClass data structure. + */ struct _GstBaseVideoCodecClass { + /*< private >*/ GstElementClass element_class; /* FIXME before moving to base */ void *padding[GST_PADDING_LARGE]; }; -GType gst_video_frame_get_type (void); +GType gst_video_frame_state_get_type (void); GType gst_base_video_codec_get_type (void); -GstVideoFrame * gst_base_video_codec_new_frame (GstBaseVideoCodec *base_video_codec); +void gst_base_video_codec_append_frame (GstBaseVideoCodec *codec, GstVideoFrameState *frame); +void gst_base_video_codec_remove_frame (GstBaseVideoCodec *codec, GstVideoFrameState *frame); + +GstVideoFrameState * gst_base_video_codec_new_frame (GstBaseVideoCodec *base_video_codec); -GstVideoFrame * gst_video_frame_ref (GstVideoFrame * frame); -void gst_video_frame_unref (GstVideoFrame * frame); +GstVideoFrameState * gst_video_frame_state_ref (GstVideoFrameState * frame); +void gst_video_frame_state_unref (GstVideoFrameState * frame); G_END_DECLS diff --git a/omx/gstbasevideodecoder.c b/omx/gstbasevideodecoder.c index 6a5554e..e3ec9aa 100644 --- a/omx/gstbasevideodecoder.c +++ b/omx/gstbasevideodecoder.c @@ -127,6 +127,10 @@ #include "config.h" #endif +/* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex + * with newer GLib versions (>= 2.31.0) */ +#define GLIB_DISABLE_DEPRECATION_WARNINGS + #include "gstbasevideodecoder.h" #include "gstbasevideoutils.h" @@ -137,22 +141,20 @@ GST_DEBUG_CATEGORY (basevideodecoder_debug); static void gst_base_video_decoder_finalize (GObject * object); -static gboolean gst_base_video_decoder_sink_setcaps (GstPad * pad, +static gboolean gst_base_video_decoder_setcaps (GstBaseVideoDecoder * vdec, GstCaps * caps); static gboolean gst_base_video_decoder_sink_event (GstPad * pad, - GstEvent * event); + GstObject * parent, GstEvent * event); static gboolean gst_base_video_decoder_src_event (GstPad * pad, - GstEvent * event); + GstObject * parent, GstEvent * event); static GstFlowReturn gst_base_video_decoder_chain (GstPad * pad, - GstBuffer * buf); + GstObject * parent, GstBuffer * buf); static gboolean gst_base_video_decoder_sink_query (GstPad * pad, - GstQuery * query); + GstObject * parent, GstQuery * query); static GstStateChangeReturn gst_base_video_decoder_change_state (GstElement * element, GstStateChange transition); -static const GstQueryType *gst_base_video_decoder_get_query_types (GstPad * - pad); static gboolean gst_base_video_decoder_src_query (GstPad * pad, - GstQuery * query); + GstObject * parent, GstQuery * query); static void gst_base_video_decoder_reset (GstBaseVideoDecoder * base_video_decoder, gboolean full); @@ -167,21 +169,14 @@ gst_base_video_decoder_get_field_timestamp (GstBaseVideoDecoder * base_video_decoder, int field_offset); static guint64 gst_base_video_decoder_get_field_duration (GstBaseVideoDecoder * base_video_decoder, int n_fields); -static GstVideoFrame *gst_base_video_decoder_new_frame (GstBaseVideoDecoder * - base_video_decoder); +static GstVideoFrameState *gst_base_video_decoder_new_frame (GstBaseVideoDecoder + * base_video_decoder); static void gst_base_video_decoder_clear_queues (GstBaseVideoDecoder * dec); -GST_BOILERPLATE (GstBaseVideoDecoder, gst_base_video_decoder, - GstBaseVideoCodec, GST_TYPE_BASE_VIDEO_CODEC); - -static void -gst_base_video_decoder_base_init (gpointer g_class) -{ - GST_DEBUG_CATEGORY_INIT (basevideodecoder_debug, "basevideodecoder", 0, - "Base Video Decoder"); - -} +#define gst_base_video_decoder_parent_class parent_class +G_DEFINE_TYPE (GstBaseVideoDecoder, gst_base_video_decoder, + GST_TYPE_BASE_VIDEO_CODEC); static void gst_base_video_decoder_class_init (GstBaseVideoDecoderClass * klass) @@ -196,11 +191,13 @@ gst_base_video_decoder_class_init (GstBaseVideoDecoderClass * klass) gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_base_video_decoder_change_state); + + GST_DEBUG_CATEGORY_INIT (basevideodecoder_debug, "basevideodecoder", 0, + "Base Video Decoder"); } static void -gst_base_video_decoder_init (GstBaseVideoDecoder * base_video_decoder, - GstBaseVideoDecoderClass * klass) +gst_base_video_decoder_init (GstBaseVideoDecoder * base_video_decoder) { GstPad *pad; @@ -212,8 +209,6 @@ gst_base_video_decoder_init (GstBaseVideoDecoder * base_video_decoder, GST_DEBUG_FUNCPTR (gst_base_video_decoder_chain)); gst_pad_set_event_function (pad, GST_DEBUG_FUNCPTR (gst_base_video_decoder_sink_event)); - gst_pad_set_setcaps_function (pad, - GST_DEBUG_FUNCPTR (gst_base_video_decoder_sink_setcaps)); gst_pad_set_query_function (pad, GST_DEBUG_FUNCPTR (gst_base_video_decoder_sink_query)); @@ -221,8 +216,6 @@ gst_base_video_decoder_init (GstBaseVideoDecoder * base_video_decoder, gst_pad_set_event_function (pad, GST_DEBUG_FUNCPTR (gst_base_video_decoder_src_event)); - gst_pad_set_query_type_function (pad, - GST_DEBUG_FUNCPTR (gst_base_video_decoder_get_query_types)); gst_pad_set_query_function (pad, GST_DEBUG_FUNCPTR (gst_base_video_decoder_src_query)); gst_pad_use_fixed_caps (pad); @@ -264,16 +257,15 @@ gst_base_video_decoder_push_src_event (GstBaseVideoDecoder * decoder, } static gboolean -gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps) +gst_base_video_decoder_setcaps (GstBaseVideoDecoder * base_video_decoder, + GstCaps * caps) { - GstBaseVideoDecoder *base_video_decoder; GstBaseVideoDecoderClass *base_video_decoder_class; GstStructure *structure; const GValue *codec_data; GstVideoState state; gboolean ret = TRUE; - base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); base_video_decoder_class = GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); @@ -287,35 +279,42 @@ gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps) structure = gst_caps_get_structure (caps, 0); - gst_video_format_parse_caps (caps, NULL, &state.width, &state.height); - /* this one fails if no framerate in caps */ - if (!gst_video_parse_caps_framerate (caps, &state.fps_n, &state.fps_d)) { + /* FIXME : Add have_{width_height|framerate|par} fields to + * GstVideoState so we can make better decisions + */ + + gst_structure_get_int (structure, "width", &state.width); + gst_structure_get_int (structure, "height", &state.height); + + if (!gst_structure_get_fraction (structure, "framerate", &state.fps_n, + &state.fps_d)) { state.fps_n = 0; state.fps_d = 1; } - /* but the p-a-r sets 1/1 instead, which is not quite informative ... */ - if (!gst_structure_has_field (structure, "pixel-aspect-ratio") || - !gst_video_parse_caps_pixel_aspect_ratio (caps, + + if (!gst_structure_get_fraction (structure, "pixel-aspect-ratio", &state.par_n, &state.par_d)) { state.par_n = 0; state.par_d = 1; } state.have_interlaced = - gst_video_format_parse_caps_interlaced (caps, &state.interlaced); + gst_structure_get_boolean (structure, "interlaced", &state.interlaced); codec_data = gst_structure_get_value (structure, "codec_data"); if (codec_data && G_VALUE_TYPE (codec_data) == GST_TYPE_BUFFER) { - state.codec_data = GST_BUFFER (gst_value_dup_mini_object (codec_data)); + state.codec_data = GST_BUFFER (gst_value_get_buffer (codec_data)); + gst_buffer_ref (state.codec_data); } if (base_video_decoder_class->set_format) { + GST_LOG_OBJECT (base_video_decoder, "Calling ::set_format()"); ret = base_video_decoder_class->set_format (base_video_decoder, &state); } if (ret) { - gst_buffer_replace (&GST_BASE_VIDEO_CODEC (base_video_decoder)-> - state.codec_data, NULL); + gst_buffer_replace (&GST_BASE_VIDEO_CODEC (base_video_decoder)->state. + codec_data, NULL); gst_caps_replace (&GST_BASE_VIDEO_CODEC (base_video_decoder)->state.caps, NULL); GST_BASE_VIDEO_CODEC (base_video_decoder)->state = state; @@ -325,7 +324,6 @@ gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps) } GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder); - g_object_unref (base_video_decoder); return ret; } @@ -348,6 +346,11 @@ gst_base_video_decoder_finalize (GObject * object) base_video_decoder->output_adapter = NULL; } + if (base_video_decoder->pool) { + g_object_unref (base_video_decoder->pool); + base_video_decoder->pool = NULL; + } + G_OBJECT_CLASS (parent_class)->finalize (object); } @@ -389,13 +392,14 @@ gst_base_video_decoder_flush (GstBaseVideoDecoder * dec, gboolean hard) } static gboolean -gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event) +gst_base_video_decoder_sink_event (GstPad * pad, GstObject * parent, + GstEvent * event) { GstBaseVideoDecoder *base_video_decoder; GstBaseVideoDecoderClass *base_video_decoder_class; gboolean ret = FALSE; - base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); + base_video_decoder = GST_BASE_VIDEO_DECODER (parent); base_video_decoder_class = GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); @@ -404,6 +408,15 @@ gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event) GST_EVENT_TYPE_NAME (event)); switch (GST_EVENT_TYPE (event)) { + case GST_EVENT_CAPS: + { + GstCaps *caps; + + gst_event_parse_caps (event, &caps); + ret = gst_base_video_decoder_setcaps (base_video_decoder, caps); + gst_event_unref (event); + break; + } case GST_EVENT_EOS: { GstFlowReturn flow_ret; @@ -427,66 +440,52 @@ gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event) GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder); break; } - case GST_EVENT_NEWSEGMENT: + case GST_EVENT_SEGMENT: { - gboolean update; - double rate, arate; - GstFormat format; - gint64 start; - gint64 stop; - gint64 pos; + GstSegment seg; GstSegment *segment = &GST_BASE_VIDEO_CODEC (base_video_decoder)->segment; GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder); - gst_event_parse_new_segment_full (event, &update, &rate, - &arate, &format, &start, &stop, &pos); + gst_event_copy_segment (event, &seg); - if (format == GST_FORMAT_TIME) { + if (seg.format == GST_FORMAT_TIME) { GST_DEBUG_OBJECT (base_video_decoder, - "received TIME NEW_SEGMENT %" GST_TIME_FORMAT - " -- %" GST_TIME_FORMAT ", pos %" GST_TIME_FORMAT - ", rate %g, applied_rate %g", - GST_TIME_ARGS (start), GST_TIME_ARGS (stop), GST_TIME_ARGS (pos), - rate, arate); + "received TIME SEGMENT %" GST_SEGMENT_FORMAT, &seg); } else { GstFormat dformat = GST_FORMAT_TIME; + gint64 start; GST_DEBUG_OBJECT (base_video_decoder, - "received NEW_SEGMENT %" G_GINT64_FORMAT - " -- %" G_GINT64_FORMAT ", time %" G_GINT64_FORMAT - ", rate %g, applied_rate %g", start, stop, pos, rate, arate); + "received SEGMENT %" GST_SEGMENT_FORMAT, &seg); /* handle newsegment as a result from our legacy simple seeking */ /* note that initial 0 should convert to 0 in any case */ if (base_video_decoder->do_byte_time && gst_pad_query_convert (GST_BASE_VIDEO_CODEC_SINK_PAD - (base_video_decoder), GST_FORMAT_BYTES, start, &dformat, + (base_video_decoder), GST_FORMAT_BYTES, seg.start, dformat, &start)) { /* best attempt convert */ /* as these are only estimates, stop is kept open-ended to avoid * premature cutting */ GST_DEBUG_OBJECT (base_video_decoder, "converted to TIME start %" GST_TIME_FORMAT, - GST_TIME_ARGS (start)); - pos = start; - stop = GST_CLOCK_TIME_NONE; + GST_TIME_ARGS (seg.start)); + seg.start = start; + seg.stop = GST_CLOCK_TIME_NONE; + seg.time = start; /* replace event */ gst_event_unref (event); - event = gst_event_new_new_segment_full (update, rate, arate, - GST_FORMAT_TIME, start, stop, pos); + event = gst_event_new_segment (&seg); } else { GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder); goto newseg_wrong_format; } } - if (!update) { - gst_base_video_decoder_flush (base_video_decoder, FALSE); - } + gst_base_video_decoder_flush (base_video_decoder, FALSE); - base_video_decoder->timestamp_offset = start; + base_video_decoder->timestamp_offset = seg.start; - gst_segment_set_newsegment_full (segment, - update, rate, arate, format, start, stop, pos); + *segment = seg; ret = gst_base_video_decoder_push_src_event (base_video_decoder, event); GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder); @@ -506,7 +505,6 @@ gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event) } done: - gst_object_unref (base_video_decoder); return ret; newseg_wrong_format: @@ -565,13 +563,12 @@ gst_base_video_decoder_do_seek (GstBaseVideoDecoder * dec, GstEvent * event) } memcpy (&seek_segment, &codec->segment, sizeof (seek_segment)); - gst_segment_set_seek (&seek_segment, rate, format, flags, start_type, + gst_segment_do_seek (&seek_segment, rate, format, flags, start_type, start_time, end_type, end_time, NULL); - start_time = seek_segment.last_stop; + start_time = seek_segment.position; - format = GST_FORMAT_BYTES; if (!gst_pad_query_convert (codec->sinkpad, GST_FORMAT_TIME, start_time, - &format, &start)) { + GST_FORMAT_BYTES, &start)) { GST_DEBUG_OBJECT (dec, "conversion failed"); return FALSE; } @@ -588,12 +585,13 @@ gst_base_video_decoder_do_seek (GstBaseVideoDecoder * dec, GstEvent * event) } static gboolean -gst_base_video_decoder_src_event (GstPad * pad, GstEvent * event) +gst_base_video_decoder_src_event (GstPad * pad, GstObject * parent, + GstEvent * event) { GstBaseVideoDecoder *base_video_decoder; gboolean res = FALSE; - base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); + base_video_decoder = GST_BASE_VIDEO_DECODER (parent); GST_DEBUG_OBJECT (base_video_decoder, "received event %d, %s", GST_EVENT_TYPE (event), @@ -630,9 +628,9 @@ gst_base_video_decoder_src_event (GstPad * pad, GstEvent * event) /* ... though a non-time seek can be aided as well */ /* First bring the requested format to time */ tformat = GST_FORMAT_TIME; - if (!(res = gst_pad_query_convert (pad, format, cur, &tformat, &tcur))) + if (!(res = gst_pad_query_convert (pad, format, cur, tformat, &tcur))) goto convert_error; - if (!(res = gst_pad_query_convert (pad, format, stop, &tformat, &tstop))) + if (!(res = gst_pad_query_convert (pad, format, stop, tformat, &tstop))) goto convert_error; /* then seek with time on the peer */ @@ -647,12 +645,13 @@ gst_base_video_decoder_src_event (GstPad * pad, GstEvent * event) } case GST_EVENT_QOS: { + GstQOSType type; gdouble proportion; GstClockTimeDiff diff; GstClockTime timestamp; GstClockTime duration; - gst_event_parse_qos (event, &proportion, &diff, ×tamp); + gst_event_parse_qos (event, &type, &proportion, &diff, ×tamp); GST_OBJECT_LOCK (base_video_decoder); GST_BASE_VIDEO_CODEC (base_video_decoder)->proportion = proportion; @@ -693,7 +692,6 @@ gst_base_video_decoder_src_event (GstPad * pad, GstEvent * event) break; } done: - gst_object_unref (base_video_decoder); return res; convert_error: @@ -701,26 +699,14 @@ convert_error: goto done; } -static const GstQueryType * -gst_base_video_decoder_get_query_types (GstPad * pad) -{ - static const GstQueryType query_types[] = { - GST_QUERY_POSITION, - GST_QUERY_DURATION, - GST_QUERY_CONVERT, - 0 - }; - - return query_types; -} - static gboolean -gst_base_video_decoder_src_query (GstPad * pad, GstQuery * query) +gst_base_video_decoder_src_query (GstPad * pad, GstObject * parent, + GstQuery * query) { GstBaseVideoDecoder *dec; gboolean res = TRUE; - dec = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); + dec = GST_BASE_VIDEO_DECODER (parent); GST_LOG_OBJECT (dec, "handling query: %" GST_PTR_FORMAT, query); @@ -750,7 +736,7 @@ gst_base_video_decoder_src_query (GstPad * pad, GstQuery * query) /* and convert to the final format */ gst_query_parse_position (query, &format, NULL); if (!(res = gst_pad_query_convert (pad, GST_FORMAT_TIME, time, - &format, &value))) + format, &value))) break; gst_query_set_position (query, format, value); @@ -765,7 +751,7 @@ gst_base_video_decoder_src_query (GstPad * pad, GstQuery * query) GstFormat format; /* upstream in any case */ - if ((res = gst_pad_query_default (pad, query))) + if ((res = gst_pad_query_default (pad, parent, query))) break; gst_query_parse_duration (query, &format, NULL); @@ -774,12 +760,12 @@ gst_base_video_decoder_src_query (GstPad * pad, GstQuery * query) gint64 value; format = GST_FORMAT_BYTES; - if (gst_pad_query_peer_duration (GST_BASE_VIDEO_CODEC_SINK_PAD (dec), - &format, &value)) { + if (gst_pad_peer_query_duration (GST_BASE_VIDEO_CODEC_SINK_PAD (dec), + format, &value)) { GST_LOG_OBJECT (dec, "upstream size %" G_GINT64_FORMAT, value); format = GST_FORMAT_TIME; if (gst_pad_query_convert (GST_BASE_VIDEO_CODEC_SINK_PAD (dec), - GST_FORMAT_BYTES, value, &format, &value)) { + GST_FORMAT_BYTES, value, format, &value)) { gst_query_set_duration (query, GST_FORMAT_TIME, value); res = TRUE; } @@ -803,24 +789,26 @@ gst_base_video_decoder_src_query (GstPad * pad, GstQuery * query) break; } default: - res = gst_pad_query_default (pad, query); + res = gst_pad_query_default (pad, parent, query); } - gst_object_unref (dec); return res; + /* ERRORS */ error: - GST_ERROR_OBJECT (dec, "query failed"); - gst_object_unref (dec); - return res; + { + GST_ERROR_OBJECT (dec, "query failed"); + return res; + } } static gboolean -gst_base_video_decoder_sink_query (GstPad * pad, GstQuery * query) +gst_base_video_decoder_sink_query (GstPad * pad, GstObject * parent, + GstQuery * query) { GstBaseVideoDecoder *base_video_decoder; gboolean res = FALSE; - base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); + base_video_decoder = GST_BASE_VIDEO_DECODER (parent); GST_LOG_OBJECT (base_video_decoder, "handling query: %" GST_PTR_FORMAT, query); @@ -841,16 +829,18 @@ gst_base_video_decoder_sink_query (GstPad * pad, GstQuery * query) break; } default: - res = gst_pad_query_default (pad, query); + res = gst_pad_query_default (pad, parent, query); break; } done: - gst_object_unref (base_video_decoder); - return res; + + /* ERRORS */ error: - GST_DEBUG_OBJECT (base_video_decoder, "query failed"); - goto done; + { + GST_DEBUG_OBJECT (base_video_decoder, "query failed"); + goto done; + } } typedef struct _Timestamp Timestamp; @@ -922,17 +912,17 @@ gst_base_video_decoder_clear_queues (GstBaseVideoDecoder * dec) g_list_foreach (dec->gather, (GFunc) gst_mini_object_unref, NULL); g_list_free (dec->gather); dec->gather = NULL; - g_list_foreach (dec->decode, (GFunc) gst_video_frame_unref, NULL); + g_list_foreach (dec->decode, (GFunc) gst_video_frame_state_unref, NULL); g_list_free (dec->decode); dec->decode = NULL; g_list_foreach (dec->parse, (GFunc) gst_mini_object_unref, NULL); g_list_free (dec->parse); dec->parse = NULL; - g_list_foreach (dec->parse_gather, (GFunc) gst_video_frame_unref, NULL); + g_list_foreach (dec->parse_gather, (GFunc) gst_video_frame_state_unref, NULL); g_list_free (dec->parse_gather); dec->parse_gather = NULL; g_list_foreach (GST_BASE_VIDEO_CODEC (dec)->frames, - (GFunc) gst_video_frame_unref, NULL); + (GFunc) gst_video_frame_state_unref, NULL); g_list_free (GST_BASE_VIDEO_CODEC (dec)->frames); GST_BASE_VIDEO_CODEC (dec)->frames = NULL; } @@ -968,7 +958,7 @@ gst_base_video_decoder_reset (GstBaseVideoDecoder * base_video_decoder, base_video_decoder->timestamps = NULL; if (base_video_decoder->current_frame) { - gst_video_frame_unref (base_video_decoder->current_frame); + gst_video_frame_state_unref (base_video_decoder->current_frame); base_video_decoder->current_frame = NULL; } @@ -1006,7 +996,7 @@ gst_base_video_decoder_chain_forward (GstBaseVideoDecoder * base_video_decoder, if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) { gst_base_video_decoder_add_timestamp (base_video_decoder, buf); } - base_video_decoder->input_offset += GST_BUFFER_SIZE (buf); + base_video_decoder->input_offset += gst_buffer_get_size (buf); if (base_video_decoder->packetized) { base_video_decoder->current_frame->sink_buffer = buf; @@ -1060,6 +1050,7 @@ gst_base_video_decoder_chain_forward (GstBaseVideoDecoder * base_video_decoder, } do { + GST_LOG_OBJECT (base_video_decoder, "Calling ::parse_data()"); ret = klass->parse_data (base_video_decoder, FALSE); } while (ret == GST_FLOW_OK); @@ -1089,7 +1080,7 @@ gst_base_video_decoder_flush_decode (GstBaseVideoDecoder * dec) while (walk) { GList *next; - GstVideoFrame *frame = (GstVideoFrame *) (walk->data); + GstVideoFrameState *frame = (GstVideoFrameState *) (walk->data); GstBuffer *buf = frame->sink_buffer; GST_DEBUG_OBJECT (dec, "decoding frame %p, ts %" GST_TIME_FORMAT, @@ -1097,9 +1088,9 @@ gst_base_video_decoder_flush_decode (GstBaseVideoDecoder * dec) next = g_list_next (walk); if (dec->current_frame) - gst_video_frame_unref (dec->current_frame); + gst_video_frame_state_unref (dec->current_frame); dec->current_frame = frame; - gst_video_frame_ref (dec->current_frame); + gst_video_frame_state_ref (dec->current_frame); /* decode buffer, resulting data prepended to queue */ res = gst_base_video_decoder_have_frame_2 (dec); @@ -1152,9 +1143,9 @@ gst_base_video_decoder_flush_parse (GstBaseVideoDecoder * dec) /* now we can process frames */ GST_DEBUG_OBJECT (dec, "checking frames"); while (dec->parse_gather) { - GstVideoFrame *frame; + GstVideoFrameState *frame; - frame = (GstVideoFrame *) (dec->parse_gather->data); + frame = (GstVideoFrameState *) (dec->parse_gather->data); /* remove from the gather list */ dec->parse_gather = g_list_delete_link (dec->parse_gather, dec->parse_gather); @@ -1173,12 +1164,12 @@ gst_base_video_decoder_flush_parse (GstBaseVideoDecoder * dec) GstBuffer *buf = GST_BUFFER_CAST (dec->queued->data); if (G_LIKELY (res == GST_FLOW_OK)) { - GST_DEBUG_OBJECT (dec, "pushing buffer %p of size %u, " + GST_DEBUG_OBJECT (dec, "pushing buffer %p of size %" G_GSIZE_FORMAT ", " "time %" GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT, buf, - GST_BUFFER_SIZE (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), + gst_buffer_get_size (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), GST_TIME_ARGS (GST_BUFFER_DURATION (buf))); /* should be already, but let's be sure */ - buf = gst_buffer_make_metadata_writable (buf); + buf = gst_buffer_make_writable (buf); /* avoid stray DISCONT from forward processing, * which have no meaning in reverse pushing */ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT); @@ -1216,9 +1207,9 @@ gst_base_video_decoder_chain_reverse (GstBaseVideoDecoder * dec, } if (G_LIKELY (buf)) { - GST_DEBUG_OBJECT (dec, "gathering buffer %p of size %u, " + GST_DEBUG_OBJECT (dec, "gathering buffer %p of size %" G_GSIZE_FORMAT ", " "time %" GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT, buf, - GST_BUFFER_SIZE (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), + gst_buffer_get_size (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), GST_TIME_ARGS (GST_BUFFER_DURATION (buf))); /* add buffer to gather queue */ @@ -1229,17 +1220,17 @@ gst_base_video_decoder_chain_reverse (GstBaseVideoDecoder * dec, } static GstFlowReturn -gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf) +gst_base_video_decoder_chain (GstPad * pad, GstObject * parent, GstBuffer * buf) { GstBaseVideoDecoder *base_video_decoder; GstFlowReturn ret = GST_FLOW_OK; - base_video_decoder = GST_BASE_VIDEO_DECODER (GST_PAD_PARENT (pad)); + base_video_decoder = GST_BASE_VIDEO_DECODER (parent); GST_LOG_OBJECT (base_video_decoder, - "chain %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT " size %d", - GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), - GST_TIME_ARGS (GST_BUFFER_DURATION (buf)), GST_BUFFER_SIZE (buf)); + "chain %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT " size %" + G_GSIZE_FORMAT "", GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), + GST_TIME_ARGS (GST_BUFFER_DURATION (buf)), gst_buffer_get_size (buf)); GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder); @@ -1251,17 +1242,15 @@ gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf) GST_FORMAT_UNDEFINED) { GstEvent *event; GstFlowReturn ret; + GstSegment *segment = &GST_BASE_VIDEO_CODEC (base_video_decoder)->segment; GST_WARNING_OBJECT (base_video_decoder, "Received buffer without a new-segment. " "Assuming timestamps start from 0."); - gst_segment_set_newsegment_full (&GST_BASE_VIDEO_CODEC - (base_video_decoder)->segment, FALSE, 1.0, 1.0, GST_FORMAT_TIME, 0, - GST_CLOCK_TIME_NONE, 0); + gst_segment_init (segment, GST_FORMAT_TIME); - event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, - GST_CLOCK_TIME_NONE, 0); + event = gst_event_new_segment (segment); ret = gst_base_video_decoder_push_src_event (base_video_decoder, event); if (!ret) { @@ -1347,10 +1336,10 @@ gst_base_video_decoder_change_state (GstElement * element, return ret; } -static GstVideoFrame * +static GstVideoFrameState * gst_base_video_decoder_new_frame (GstBaseVideoDecoder * base_video_decoder) { - GstVideoFrame *frame; + GstVideoFrameState *frame; GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder); frame = @@ -1375,12 +1364,13 @@ gst_base_video_decoder_new_frame (GstBaseVideoDecoder * base_video_decoder) static void gst_base_video_decoder_prepare_finish_frame (GstBaseVideoDecoder * - base_video_decoder, GstVideoFrame * frame) + base_video_decoder, GstVideoFrameState * frame) { GList *l, *events = NULL; #ifndef GST_DISABLE_GST_DEBUG - GST_LOG_OBJECT (base_video_decoder, "n %d in %d out %d", + GST_LOG_OBJECT (base_video_decoder, + "n %d in %" G_GSIZE_FORMAT " out %" G_GSIZE_FORMAT, g_list_length (GST_BASE_VIDEO_CODEC (base_video_decoder)->frames), gst_adapter_available (base_video_decoder->input_adapter), gst_adapter_available (base_video_decoder->output_adapter)); @@ -1392,7 +1382,7 @@ gst_base_video_decoder_prepare_finish_frame (GstBaseVideoDecoder * /* Push all pending events that arrived before this frame */ for (l = base_video_decoder->base_video_codec.frames; l; l = l->next) { - GstVideoFrame *tmp = l->data; + GstVideoFrameState *tmp = l->data; if (tmp->events) { events = tmp->events; @@ -1411,6 +1401,11 @@ gst_base_video_decoder_prepare_finish_frame (GstBaseVideoDecoder * } g_list_free (events); + /* Check if the data should not be displayed. For example altref/invisible + * frame in vp8. In this case we should not update the timestamps. */ + if (frame->decode_only) + return; + if (GST_CLOCK_TIME_IS_VALID (frame->presentation_timestamp)) { if (frame->presentation_timestamp != base_video_decoder->timestamp_offset) { GST_DEBUG_OBJECT (base_video_decoder, @@ -1471,12 +1466,14 @@ gst_base_video_decoder_prepare_finish_frame (GstBaseVideoDecoder * static void gst_base_video_decoder_do_finish_frame (GstBaseVideoDecoder * dec, - GstVideoFrame * frame) + GstVideoFrameState * frame) { - GST_BASE_VIDEO_CODEC (dec)->frames = - g_list_remove (GST_BASE_VIDEO_CODEC (dec)->frames, frame); + gst_base_video_codec_remove_frame (GST_BASE_VIDEO_CODEC (dec), frame); + + if (frame->src_buffer) + gst_buffer_unref (frame->src_buffer); - gst_video_frame_unref (frame); + gst_video_frame_state_unref (frame); } /** @@ -1494,7 +1491,7 @@ gst_base_video_decoder_do_finish_frame (GstBaseVideoDecoder * dec, */ GstFlowReturn gst_base_video_decoder_drop_frame (GstBaseVideoDecoder * dec, - GstVideoFrame * frame) + GstVideoFrameState * frame) { GstClockTime stream_time, jitter, earliest_time, qostime, timestamp; GstSegment *segment; @@ -1539,7 +1536,7 @@ gst_base_video_decoder_drop_frame (GstBaseVideoDecoder * dec, /** * gst_base_video_decoder_finish_frame: * @base_video_decoder: a #GstBaseVideoDecoder - * @frame: a decoded #GstVideoFrame + * @frame: a decoded #GstVideoFrameState * * @frame should have a valid decoded data buffer, whose metadata fields * are then appropriately set according to frame data and pushed downstream. @@ -1550,7 +1547,7 @@ gst_base_video_decoder_drop_frame (GstBaseVideoDecoder * dec, */ GstFlowReturn gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder, - GstVideoFrame * frame) + GstVideoFrameState * frame) { GstVideoState *state = &GST_BASE_VIDEO_CODEC (base_video_decoder)->state; GstBuffer *src_buffer; @@ -1564,13 +1561,13 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder, base_video_decoder->processed++; /* no buffer data means this frame is skipped */ - if (!frame->src_buffer) { + if (!frame->src_buffer || frame->decode_only) { GST_DEBUG_OBJECT (base_video_decoder, "skipping frame %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->presentation_timestamp)); goto done; } - src_buffer = gst_buffer_make_metadata_writable (frame->src_buffer); + src_buffer = gst_buffer_make_writable (frame->src_buffer); frame->src_buffer = NULL; GST_BUFFER_FLAG_UNSET (src_buffer, GST_BUFFER_FLAG_DELTA_UNIT); @@ -1581,16 +1578,16 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder, tff ^= 1; } if (tff) { - GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_TFF); + GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_FLAG_TFF); } else { - GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_TFF); + GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_FLAG_TFF); } - GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_RFF); - GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_ONEFIELD); + GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_FLAG_RFF); + GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_FLAG_ONEFIELD); if (frame->n_fields == 3) { - GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_RFF); + GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_FLAG_RFF); } else if (frame->n_fields == 1) { - GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_ONEFIELD); + GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_FLAG_ONEFIELD); } } if (GST_BASE_VIDEO_CODEC (base_video_decoder)->discont) { @@ -1605,7 +1602,7 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder, /* update rate estimate */ GST_BASE_VIDEO_CODEC (base_video_decoder)->bytes += - GST_BUFFER_SIZE (src_buffer); + gst_buffer_get_size (src_buffer); if (GST_CLOCK_TIME_IS_VALID (frame->presentation_duration)) { GST_BASE_VIDEO_CODEC (base_video_decoder)->time += frame->presentation_duration; @@ -1614,17 +1611,14 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder, GST_BASE_VIDEO_CODEC (base_video_decoder)->time = GST_CLOCK_TIME_NONE; } - gst_buffer_set_caps (src_buffer, - GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder))); - GST_LOG_OBJECT (base_video_decoder, "pushing frame ts %" GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer)), GST_TIME_ARGS (GST_BUFFER_DURATION (src_buffer))); if (base_video_decoder->sink_clipping) { - gint64 start = GST_BUFFER_TIMESTAMP (src_buffer); - gint64 stop = GST_BUFFER_TIMESTAMP (src_buffer) + + guint64 start = GST_BUFFER_TIMESTAMP (src_buffer); + guint64 stop = GST_BUFFER_TIMESTAMP (src_buffer) + GST_BUFFER_DURATION (src_buffer); GstSegment *segment = &GST_BASE_VIDEO_CODEC (base_video_decoder)->segment; @@ -1681,9 +1675,9 @@ done: } /** - * gst_base_video_decoder_finish_frame: + * gst_base_video_decoder_add_to_frame: * @base_video_decoder: a #GstBaseVideoDecoder - * @n_bytes: an encoded #GstVideoFrame + * @n_bytes: an encoded #GstVideoFrameState * * Removes next @n_bytes of input data and adds it to currently parsed frame. */ @@ -1817,7 +1811,7 @@ gst_base_video_decoder_have_frame (GstBaseVideoDecoder * base_video_decoder) static GstFlowReturn gst_base_video_decoder_have_frame_2 (GstBaseVideoDecoder * base_video_decoder) { - GstVideoFrame *frame = base_video_decoder->current_frame; + GstVideoFrameState *frame = base_video_decoder->current_frame; GstBaseVideoDecoderClass *base_video_decoder_class; GstFlowReturn ret = GST_FLOW_OK; @@ -1847,8 +1841,8 @@ gst_base_video_decoder_have_frame_2 (GstBaseVideoDecoder * base_video_decoder) GST_TIME_ARGS (frame->decode_timestamp)); GST_LOG_OBJECT (base_video_decoder, "dist %d", frame->distance_from_sync); - GST_BASE_VIDEO_CODEC (base_video_decoder)->frames = - g_list_append (GST_BASE_VIDEO_CODEC (base_video_decoder)->frames, frame); + gst_base_video_codec_append_frame (GST_BASE_VIDEO_CODEC (base_video_decoder), + frame); frame->deadline = gst_segment_to_running_time (&GST_BASE_VIDEO_CODEC @@ -1856,6 +1850,7 @@ gst_base_video_decoder_have_frame_2 (GstBaseVideoDecoder * base_video_decoder) frame->presentation_timestamp); /* do something with frame */ + GST_LOG_OBJECT (base_video_decoder, "Calling ::handle_frame()"); ret = base_video_decoder_class->handle_frame (base_video_decoder, frame); if (ret != GST_FLOW_OK) { GST_DEBUG_OBJECT (base_video_decoder, "flow error %s", @@ -1877,11 +1872,15 @@ exit: * gst_base_video_decoder_get_state: * @base_video_decoder: a #GstBaseVideoDecoder * + * Get the current #GstVideoState + * * Returns: #GstVideoState describing format of video data. */ GstVideoState * gst_base_video_decoder_get_state (GstBaseVideoDecoder * base_video_decoder) { + /* FIXME : Move to base codec class */ + return &GST_BASE_VIDEO_CODEC (base_video_decoder)->state; } @@ -1930,21 +1929,25 @@ gst_base_video_decoder_set_sync_point (GstBaseVideoDecoder * base_video_decoder) * gst_base_video_decoder_get_oldest_frame: * @base_video_decoder: a #GstBaseVideoDecoder * - * Returns: oldest pending unfinished #GstVideoFrame. + * Get the oldest pending unfinished #GstVideoFrameState + * + * Returns: oldest pending unfinished #GstVideoFrameState. */ -GstVideoFrame * +GstVideoFrameState * gst_base_video_decoder_get_oldest_frame (GstBaseVideoDecoder * base_video_decoder) { GList *g; + /* FIXME : Move to base codec class */ + GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder); g = g_list_first (GST_BASE_VIDEO_CODEC (base_video_decoder)->frames); GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder); if (g == NULL) return NULL; - return (GstVideoFrame *) (g->data); + return (GstVideoFrameState *) (g->data); } /** @@ -1952,19 +1955,21 @@ gst_base_video_decoder_get_oldest_frame (GstBaseVideoDecoder * * @base_video_decoder: a #GstBaseVideoDecoder * @frame_number: system_frame_number of a frame * - * Returns: pending unfinished #GstVideoFrame identified by @frame_number. + * Get a pending unfinished #GstVideoFrameState + * + * Returns: pending unfinished #GstVideoFrameState identified by @frame_number. */ -GstVideoFrame * +GstVideoFrameState * gst_base_video_decoder_get_frame (GstBaseVideoDecoder * base_video_decoder, int frame_number) { GList *g; - GstVideoFrame *frame = NULL; + GstVideoFrameState *frame = NULL; GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder); for (g = g_list_first (GST_BASE_VIDEO_CODEC (base_video_decoder)->frames); g; g = g_list_next (g)) { - GstVideoFrame *tmp = g->data; + GstVideoFrameState *tmp = g->data; if (frame->system_frame_number == frame_number) { frame = tmp; @@ -1980,14 +1985,22 @@ gst_base_video_decoder_get_frame (GstBaseVideoDecoder * base_video_decoder, * gst_base_video_decoder_set_src_caps: * @base_video_decoder: a #GstBaseVideoDecoder * - * Sets src pad caps according to currently configured #GstVideoState. + * The #GstVideoInfo and #GstBufferPool will be created and negotiated + * according to those values. * + * Returns: %TRUE if the format was properly negotiated, else %FALSE. */ gboolean gst_base_video_decoder_set_src_caps (GstBaseVideoDecoder * base_video_decoder) { GstCaps *caps; - GstVideoState *state = &GST_BASE_VIDEO_CODEC (base_video_decoder)->state; + GstBaseVideoCodec *codec = GST_BASE_VIDEO_CODEC (base_video_decoder); + GstVideoState *state = &codec->state; + GstVideoInfo *info = &codec->info; + GstQuery *query; + GstBufferPool *pool; + GstStructure *config; + guint size, min, max; gboolean ret; /* minimum sense */ @@ -1997,6 +2010,8 @@ gst_base_video_decoder_set_src_caps (GstBaseVideoDecoder * base_video_decoder) GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder); + gst_video_info_set_format (info, state->format, state->width, state->height); + /* sanitize */ if (state->fps_n == 0 || state->fps_d == 0) { state->fps_n = 0; @@ -2007,11 +2022,22 @@ gst_base_video_decoder_set_src_caps (GstBaseVideoDecoder * base_video_decoder) state->par_d = 1; } - caps = gst_video_format_new_caps (state->format, - state->width, state->height, - state->fps_n, state->fps_d, state->par_n, state->par_d); - gst_caps_set_simple (caps, "interlaced", - G_TYPE_BOOLEAN, state->interlaced, NULL); + info->par_n = state->par_n; + info->par_d = state->par_d; + info->fps_n = state->fps_n; + info->fps_d = state->fps_d; + + if (state->have_interlaced) { + if (state->interlaced) + GST_VIDEO_INFO_FLAG_SET (info, GST_VIDEO_FLAG_INTERLACED); + if (state->top_field_first) + GST_VIDEO_INFO_FLAG_SET (info, GST_VIDEO_FLAG_TFF); + } + + /* FIXME : Handle chroma site */ + /* FIXME : Handle colorimetry */ + + caps = gst_video_info_to_caps (info); GST_DEBUG_OBJECT (base_video_decoder, "setting caps %" GST_PTR_FORMAT, caps); @@ -2020,9 +2046,55 @@ gst_base_video_decoder_set_src_caps (GstBaseVideoDecoder * base_video_decoder) caps); gst_caps_unref (caps); - /* arrange for derived info */ - state->bytes_per_picture = - gst_video_format_get_size (state->format, state->width, state->height); + /* Negotiate pool */ + query = gst_query_new_allocation (caps, TRUE); + + if (!gst_pad_peer_query (codec->srcpad, query)) { + GST_DEBUG_OBJECT (codec, "didn't get downstream ALLOCATION hints"); + } + + if (gst_query_get_n_allocation_pools (query) > 0) { + /* we got configuration from our peer, parse them */ + gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max); + size = MAX (size, info->size); + } else { + pool = NULL; + size = info->size; + min = max = 0; + } + + if (pool == NULL) { + /* we did not get a pool, make one ourselves then */ + pool = gst_video_buffer_pool_new (); + } + + if (base_video_decoder->pool) { + gst_buffer_pool_set_active (base_video_decoder->pool, FALSE); + gst_object_unref (base_video_decoder->pool); + } + base_video_decoder->pool = pool; + + config = gst_buffer_pool_get_config (pool); + gst_buffer_pool_config_set_params (config, caps, size, min, max); + state->bytes_per_picture = size; + + if (gst_query_has_allocation_meta (query, GST_VIDEO_META_API_TYPE)) { + /* just set the option, if the pool can support it we will transparently use + * it through the video info API. We could also see if the pool support this + * option and only activate it then. */ + gst_buffer_pool_config_add_option (config, + GST_BUFFER_POOL_OPTION_VIDEO_META); + } + + /* check if downstream supports cropping */ + base_video_decoder->use_cropping = + gst_query_has_allocation_meta (query, GST_VIDEO_CROP_META_API_TYPE); + + gst_buffer_pool_set_config (pool, config); + /* and activate */ + gst_buffer_pool_set_active (pool, TRUE); + + gst_query_unref (query); GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder); @@ -2033,48 +2105,32 @@ gst_base_video_decoder_set_src_caps (GstBaseVideoDecoder * base_video_decoder) * gst_base_video_decoder_alloc_src_buffer: * @base_video_decoder: a #GstBaseVideoDecoder * - * Helper function that uses gst_pad_alloc_buffer_and_set_caps - * to allocate a buffer to hold a video frame for @base_video_decoder's - * current #GstVideoState. + * Helper function that returns a buffer from the decoders' configured + * #GstBufferPool. * - * Returns: allocated buffer + * Returns: (transfer full): allocated buffer */ GstBuffer * gst_base_video_decoder_alloc_src_buffer (GstBaseVideoDecoder * base_video_decoder) { - GstBuffer *buffer; - GstFlowReturn flow_ret; - GstVideoState *state = &GST_BASE_VIDEO_CODEC (base_video_decoder)->state; - int num_bytes = state->bytes_per_picture; - - GST_DEBUG ("alloc src buffer caps=%" GST_PTR_FORMAT, - GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder))); + GstBuffer *buffer = NULL; GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder); - flow_ret = - gst_pad_alloc_buffer_and_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD - (base_video_decoder), GST_BUFFER_OFFSET_NONE, num_bytes, - GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder)), - &buffer); - - if (flow_ret != GST_FLOW_OK) { - buffer = gst_buffer_new_and_alloc (num_bytes); - gst_buffer_set_caps (buffer, - GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder))); - } + gst_buffer_pool_acquire_buffer (base_video_decoder->pool, &buffer, NULL); GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder); + return buffer; } /** * gst_base_video_decoder_alloc_src_frame: * @base_video_decoder: a #GstBaseVideoDecoder - * @frame: a #GstVideoFrame + * @frame: a #GstVideoFrameState * - * Helper function that uses gst_pad_alloc_buffer_and_set_caps + * Helper function that uses @gst_pad_alloc_buffer_and_set_caps() * to allocate a buffer to hold a video frame for @base_video_decoder's * current #GstVideoState. Subclass should already have configured video state * and set src pad caps. @@ -2083,24 +2139,17 @@ gst_base_video_decoder_alloc_src_buffer (GstBaseVideoDecoder * */ GstFlowReturn gst_base_video_decoder_alloc_src_frame (GstBaseVideoDecoder * - base_video_decoder, GstVideoFrame * frame) + base_video_decoder, GstVideoFrameState * frame) { GstFlowReturn flow_ret; - GstVideoState *state = &GST_BASE_VIDEO_CODEC (base_video_decoder)->state; - int num_bytes = state->bytes_per_picture; - g_return_val_if_fail (state->bytes_per_picture != 0, GST_FLOW_ERROR); - g_return_val_if_fail (GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD - (base_video_decoder)) != NULL, GST_FLOW_ERROR); + GST_LOG_OBJECT (base_video_decoder, "alloc buffer"); - GST_LOG_OBJECT (base_video_decoder, "alloc buffer size %d", num_bytes); GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder); flow_ret = - gst_pad_alloc_buffer_and_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD - (base_video_decoder), GST_BUFFER_OFFSET_NONE, num_bytes, - GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder)), - &frame->src_buffer); + gst_buffer_pool_acquire_buffer (base_video_decoder->pool, + &frame->src_buffer, NULL); if (flow_ret != GST_FLOW_OK) { GST_WARNING_OBJECT (base_video_decoder, "failed to get buffer %s", @@ -2115,7 +2164,7 @@ gst_base_video_decoder_alloc_src_frame (GstBaseVideoDecoder * /** * gst_base_video_decoder_get_max_decode_time: * @base_video_decoder: a #GstBaseVideoDecoder - * @frame: a #GstVideoFrame + * @frame: a #GstVideoFrameState * * Determines maximum possible decoding time for @frame that will * allow it to decode and arrive in time (as determined by QoS events). @@ -2126,7 +2175,7 @@ gst_base_video_decoder_alloc_src_frame (GstBaseVideoDecoder * */ GstClockTimeDiff gst_base_video_decoder_get_max_decode_time (GstBaseVideoDecoder * - base_video_decoder, GstVideoFrame * frame) + base_video_decoder, GstVideoFrameState * frame) { GstClockTimeDiff deadline; GstClockTime earliest_time; @@ -2149,8 +2198,10 @@ gst_base_video_decoder_get_max_decode_time (GstBaseVideoDecoder * } /** - * gst_base_video_decoder_get_oldest_frame: + * gst_base_video_decoder_class_set_capture_pattern: * @base_video_decoder_class: a #GstBaseVideoDecoderClass + * @mask: The mask used for scanning + * @pattern: The pattern used for matching * * Sets the mask and pattern that will be scanned for to obtain parse sync. * Note that a non-zero @mask implies that @scan_for_sync will be ignored. diff --git a/omx/gstbasevideodecoder.h b/omx/gstbasevideodecoder.h index 226dc38..53ba565 100644 --- a/omx/gstbasevideodecoder.h +++ b/omx/gstbasevideodecoder.h @@ -95,7 +95,7 @@ GstFlowReturn _gst_base_video_decoder_error (GstBaseVideoDecoder *dec, gint weig * enclosed in parentheses) * @ret: variable to receive return value * - * Utility function that audio decoder elements can use in case they encountered + * Utility function that video decoder elements can use in case they encountered * a data processing error that may be fatal for the current "data unit" but * need not prevent subsequent decoding. Such errors are counted and if there * are too many, as configured in the context's max_errors, the pipeline will @@ -104,7 +104,7 @@ GstFlowReturn _gst_base_video_decoder_error (GstBaseVideoDecoder *dec, gint weig * is logged. In either case, @ret is set to the proper value to * return to upstream/caller (indicating either GST_FLOW_ERROR or GST_FLOW_OK). */ -#define GST_BASE_AUDIO_DECODER_ERROR(el, w, domain, code, text, debug, ret) \ +#define GST_BASE_VIDEO_DECODER_ERROR(el, w, domain, code, text, debug, ret) \ G_STMT_START { \ gchar *__txt = _gst_element_error_printf text; \ gchar *__dbg = _gst_element_error_printf debug; \ @@ -122,6 +122,7 @@ G_STMT_START { \ */ struct _GstBaseVideoDecoder { + /*< private >*/ GstBaseVideoCodec base_video_codec; /*< protected >*/ @@ -142,7 +143,7 @@ struct _GstBaseVideoDecoder /* ... being tracked here; * only available during parsing */ /* FIXME remove and add parameter to method */ - GstVideoFrame *current_frame; + GstVideoFrameState *current_frame; /* events that should apply to the current frame */ GList *current_frame_events; /* relative offset of input data */ @@ -182,6 +183,12 @@ struct _GstBaseVideoDecoder int reorder_depth; int distance_from_sync; + /* Raw video bufferpool */ + GstBufferPool *pool; + /* Indicates whether downstream can handle + * GST_META_API_VIDEO_CROP */ + gboolean use_cropping; + /* qos messages: frames dropped/processed */ guint dropped; guint processed; @@ -191,7 +198,7 @@ struct _GstBaseVideoDecoder }; /** - * GstBaseAudioDecoderClass: + * GstBaseVideoDecoderClass: * @start: Optional. * Called when the element starts processing. * Allows opening external resources. @@ -220,8 +227,10 @@ struct _GstBaseVideoDecoder */ struct _GstBaseVideoDecoderClass { + /*< private >*/ GstBaseVideoCodecClass base_video_codec_class; + /*< public >*/ gboolean (*start) (GstBaseVideoDecoder *coder); gboolean (*stop) (GstBaseVideoDecoder *coder); @@ -237,7 +246,7 @@ struct _GstBaseVideoDecoderClass GstFlowReturn (*finish) (GstBaseVideoDecoder *coder); - GstFlowReturn (*handle_frame) (GstBaseVideoDecoder *coder, GstVideoFrame *frame); + GstFlowReturn (*handle_frame) (GstBaseVideoDecoder *coder, GstVideoFrameState *frame); /*< private >*/ @@ -248,12 +257,12 @@ struct _GstBaseVideoDecoderClass void *padding[GST_PADDING_LARGE]; }; -void gst_base_video_decoder_class_set_capture_pattern (GstBaseVideoDecoderClass *klass, +void gst_base_video_decoder_class_set_capture_pattern (GstBaseVideoDecoderClass *base_video_decoder_class, guint32 mask, guint32 pattern); -GstVideoFrame *gst_base_video_decoder_get_frame (GstBaseVideoDecoder *coder, +GstVideoFrameState *gst_base_video_decoder_get_frame (GstBaseVideoDecoder *coder, int frame_number); -GstVideoFrame *gst_base_video_decoder_get_oldest_frame (GstBaseVideoDecoder *coder); +GstVideoFrameState *gst_base_video_decoder_get_oldest_frame (GstBaseVideoDecoder *coder); void gst_base_video_decoder_add_to_frame (GstBaseVideoDecoder *base_video_decoder, int n_bytes); @@ -264,15 +273,16 @@ void gst_base_video_decoder_set_sync_point (GstBaseVideoDecoder *bas gboolean gst_base_video_decoder_set_src_caps (GstBaseVideoDecoder *base_video_decoder); GstBuffer *gst_base_video_decoder_alloc_src_buffer (GstBaseVideoDecoder * base_video_decoder); GstFlowReturn gst_base_video_decoder_alloc_src_frame (GstBaseVideoDecoder *base_video_decoder, - GstVideoFrame *frame); + GstVideoFrameState *frame); GstVideoState *gst_base_video_decoder_get_state (GstBaseVideoDecoder *base_video_decoder); GstClockTimeDiff gst_base_video_decoder_get_max_decode_time ( GstBaseVideoDecoder *base_video_decoder, - GstVideoFrame *frame); + GstVideoFrameState *frame); GstFlowReturn gst_base_video_decoder_finish_frame (GstBaseVideoDecoder *base_video_decoder, - GstVideoFrame *frame); + GstVideoFrameState *frame); GstFlowReturn gst_base_video_decoder_drop_frame (GstBaseVideoDecoder *dec, - GstVideoFrame *frame); + GstVideoFrameState *frame); + GType gst_base_video_decoder_get_type (void); G_END_DECLS diff --git a/omx/gstbasevideoencoder.c b/omx/gstbasevideoencoder.c index e4fe35f..36248bb 100644 --- a/omx/gstbasevideoencoder.c +++ b/omx/gstbasevideoencoder.c @@ -105,6 +105,10 @@ #include "config.h" #endif +/* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex + * with newer GLib versions (>= 2.31.0) */ +#define GLIB_DISABLE_DEPRECATION_WARNINGS + #include "gstbasevideoencoder.h" #include "gstbasevideoutils.h" @@ -143,45 +147,24 @@ forced_key_unit_event_new (GstClockTime running_time, gboolean all_headers, static void gst_base_video_encoder_finalize (GObject * object); -static gboolean gst_base_video_encoder_sink_setcaps (GstPad * pad, - GstCaps * caps); -static GstCaps *gst_base_video_encoder_sink_getcaps (GstPad * pad); +static GstCaps *gst_base_video_encoder_sink_getcaps (GstPad * pad, + GstCaps * filter); static gboolean gst_base_video_encoder_src_event (GstPad * pad, - GstEvent * event); + GstObject * parent, GstEvent * event); static gboolean gst_base_video_encoder_sink_event (GstPad * pad, - GstEvent * event); + GstObject * parent, GstEvent * event); +static gboolean gst_base_video_encoder_sink_query (GstPad * pad, + GstObject * parent, GstQuery * query); static GstFlowReturn gst_base_video_encoder_chain (GstPad * pad, - GstBuffer * buf); + GstObject * parent, GstBuffer * buf); static GstStateChangeReturn gst_base_video_encoder_change_state (GstElement * element, GstStateChange transition); -static const GstQueryType *gst_base_video_encoder_get_query_types (GstPad * - pad); static gboolean gst_base_video_encoder_src_query (GstPad * pad, - GstQuery * query); - -static void -_do_init (GType object_type) -{ - const GInterfaceInfo preset_interface_info = { - NULL, /* interface_init */ - NULL, /* interface_finalize */ - NULL /* interface_data */ - }; - - g_type_add_interface_static (object_type, GST_TYPE_PRESET, - &preset_interface_info); -} + GstObject * parent, GstQuery * query); -GST_BOILERPLATE_FULL (GstBaseVideoEncoder, gst_base_video_encoder, - GstBaseVideoCodec, GST_TYPE_BASE_VIDEO_CODEC, _do_init); - -static void -gst_base_video_encoder_base_init (gpointer g_class) -{ - GST_DEBUG_CATEGORY_INIT (basevideoencoder_debug, "basevideoencoder", 0, - "Base Video Encoder"); - -} +#define gst_base_video_encoder_parent_class parent_class +G_DEFINE_TYPE_WITH_CODE (GstBaseVideoEncoder, gst_base_video_encoder, + GST_TYPE_BASE_VIDEO_CODEC, G_IMPLEMENT_INTERFACE (GST_TYPE_PRESET, NULL);); static void gst_base_video_encoder_class_init (GstBaseVideoEncoderClass * klass) @@ -189,6 +172,9 @@ gst_base_video_encoder_class_init (GstBaseVideoEncoderClass * klass) GObjectClass *gobject_class; GstElementClass *gstelement_class; + GST_DEBUG_CATEGORY_INIT (basevideoencoder_debug, "basevideoencoder", 0, + "Base Video Encoder"); + gobject_class = G_OBJECT_CLASS (klass); gstelement_class = GST_ELEMENT_CLASS (klass); @@ -226,8 +212,7 @@ gst_base_video_encoder_reset (GstBaseVideoEncoder * base_video_encoder) } static void -gst_base_video_encoder_init (GstBaseVideoEncoder * base_video_encoder, - GstBaseVideoEncoderClass * klass) +gst_base_video_encoder_init (GstBaseVideoEncoder * base_video_encoder) { GstPad *pad; @@ -239,15 +224,11 @@ gst_base_video_encoder_init (GstBaseVideoEncoder * base_video_encoder, GST_DEBUG_FUNCPTR (gst_base_video_encoder_chain)); gst_pad_set_event_function (pad, GST_DEBUG_FUNCPTR (gst_base_video_encoder_sink_event)); - gst_pad_set_setcaps_function (pad, - GST_DEBUG_FUNCPTR (gst_base_video_encoder_sink_setcaps)); - gst_pad_set_getcaps_function (pad, - GST_DEBUG_FUNCPTR (gst_base_video_encoder_sink_getcaps)); + gst_pad_set_query_function (pad, + GST_DEBUG_FUNCPTR (gst_base_video_encoder_sink_query)); pad = GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder); - gst_pad_set_query_type_function (pad, - GST_DEBUG_FUNCPTR (gst_base_video_encoder_get_query_types)); gst_pad_set_query_function (pad, GST_DEBUG_FUNCPTR (gst_base_video_encoder_src_query)); gst_pad_set_event_function (pad, @@ -260,6 +241,13 @@ gst_base_video_encoder_init (GstBaseVideoEncoder * base_video_encoder, base_video_encoder->sink_clipping = TRUE; } +/** + * gst_base_video_encoder_set_headers: + * @base_video_encoder: a #GstBaseVideoEncoder + * @headers: (transfer full): the #GstBuffer containing the codec header + * + * Set the codec headers to be sent downstream whenever requested. + */ void gst_base_video_encoder_set_headers (GstBaseVideoEncoder * base_video_encoder, GstBuffer * headers) @@ -292,7 +280,7 @@ gst_base_video_encoder_drain (GstBaseVideoEncoder * enc) /* everything should be away now */ if (codec->frames) { /* not fatal/impossible though if subclass/codec eats stuff */ - g_list_foreach (codec->frames, (GFunc) gst_video_frame_unref, NULL); + g_list_foreach (codec->frames, (GFunc) gst_video_frame_state_unref, NULL); g_list_free (codec->frames); codec->frames = NULL; } @@ -301,70 +289,71 @@ gst_base_video_encoder_drain (GstBaseVideoEncoder * enc) } static gboolean -gst_base_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps) +gst_base_video_encoder_sink_setcaps (GstBaseVideoEncoder * base_video_encoder, + GstCaps * caps) { - GstBaseVideoEncoder *base_video_encoder; GstBaseVideoEncoderClass *base_video_encoder_class; - GstStructure *structure; + GstBaseVideoCodec *codec = GST_BASE_VIDEO_CODEC (base_video_encoder); + GstVideoInfo *info, tmp_info; GstVideoState *state, tmp_state; - gboolean ret; - gboolean changed = FALSE; + gboolean ret = FALSE; + gboolean changed = TRUE; + + GST_DEBUG_OBJECT (base_video_encoder, "setcaps %" GST_PTR_FORMAT, caps); - base_video_encoder = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad)); base_video_encoder_class = GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder); /* subclass should do something here ... */ g_return_val_if_fail (base_video_encoder_class->set_format != NULL, FALSE); - GST_DEBUG_OBJECT (base_video_encoder, "setcaps %" GST_PTR_FORMAT, caps); - GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_encoder); - state = &GST_BASE_VIDEO_CODEC (base_video_encoder)->state; + /* Get GstVideoInfo from upstream caps */ + info = &codec->info; + if (!gst_video_info_from_caps (&tmp_info, caps)) + goto exit; + + state = &codec->state; memset (&tmp_state, 0, sizeof (tmp_state)); tmp_state.caps = gst_caps_ref (caps); - structure = gst_caps_get_structure (caps, 0); - - ret = - gst_video_format_parse_caps (caps, &tmp_state.format, &tmp_state.width, - &tmp_state.height); - if (!ret) - goto exit; - changed = (tmp_state.format != state->format - || tmp_state.width != state->width || tmp_state.height != state->height); - - if (!gst_video_parse_caps_framerate (caps, &tmp_state.fps_n, - &tmp_state.fps_d)) { - tmp_state.fps_n = 0; - tmp_state.fps_d = 1; + /* Check if input caps changed */ + if (info->finfo) { + /* Check if anything changed */ + changed = GST_VIDEO_INFO_FORMAT (&tmp_info) != GST_VIDEO_INFO_FORMAT (info); + changed |= GST_VIDEO_INFO_FLAGS (&tmp_info) != GST_VIDEO_INFO_FLAGS (info); + changed |= GST_VIDEO_INFO_WIDTH (&tmp_info) != GST_VIDEO_INFO_WIDTH (info); + changed |= + GST_VIDEO_INFO_HEIGHT (&tmp_info) != GST_VIDEO_INFO_HEIGHT (info); + changed |= GST_VIDEO_INFO_SIZE (&tmp_info) != GST_VIDEO_INFO_SIZE (info); + changed |= GST_VIDEO_INFO_VIEWS (&tmp_info) != GST_VIDEO_INFO_VIEWS (info); + changed |= GST_VIDEO_INFO_FPS_N (&tmp_info) != GST_VIDEO_INFO_FPS_N (info); + changed |= GST_VIDEO_INFO_FPS_D (&tmp_info) != GST_VIDEO_INFO_FPS_D (info); + changed |= GST_VIDEO_INFO_PAR_N (&tmp_info) != GST_VIDEO_INFO_PAR_N (info); + changed |= GST_VIDEO_INFO_PAR_D (&tmp_info) != GST_VIDEO_INFO_PAR_D (info); } - changed = changed || (tmp_state.fps_n != state->fps_n - || tmp_state.fps_d != state->fps_d); - if (!gst_video_parse_caps_pixel_aspect_ratio (caps, &tmp_state.par_n, - &tmp_state.par_d)) { - tmp_state.par_n = 1; - tmp_state.par_d = 1; - } - changed = changed || (tmp_state.par_n != state->par_n - || tmp_state.par_d != state->par_d); - - tmp_state.have_interlaced = - gst_structure_get_boolean (structure, "interlaced", - &tmp_state.interlaced); - changed = changed || (tmp_state.have_interlaced != state->have_interlaced - || tmp_state.interlaced != state->interlaced); - - tmp_state.bytes_per_picture = - gst_video_format_get_size (tmp_state.format, tmp_state.width, - tmp_state.height); - tmp_state.clean_width = tmp_state.width; - tmp_state.clean_height = tmp_state.height; + /* Copy over info from input GstVideoInfo into output GstVideoFrameState */ + tmp_state.format = GST_VIDEO_INFO_FORMAT (&tmp_info); + tmp_state.bytes_per_picture = tmp_info.size; + tmp_state.width = tmp_info.width; + tmp_state.height = tmp_info.height; + tmp_state.fps_n = tmp_info.fps_n; + tmp_state.fps_d = tmp_info.fps_d; + tmp_state.par_n = tmp_info.par_n; + tmp_state.par_d = tmp_info.par_d; + tmp_state.clean_width = tmp_info.width; + tmp_state.clean_height = tmp_info.height; tmp_state.clean_offset_left = 0; tmp_state.clean_offset_top = 0; + /* FIXME (Edward): We need flags in GstVideoInfo to know whether + * interlaced field was present in input caps */ + tmp_state.have_interlaced = tmp_state.interlaced = + GST_VIDEO_INFO_FLAG_IS_SET (&tmp_info, GST_VIDEO_FLAG_INTERLACED); + tmp_state.top_field_first = + GST_VIDEO_INFO_FLAG_IS_SET (&tmp_info, GST_VIDEO_FLAG_TFF); if (changed) { /* arrange draining pending frames */ @@ -373,10 +362,11 @@ gst_base_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps) /* and subclass should be ready to configure format at any time around */ if (base_video_encoder_class->set_format) ret = - base_video_encoder_class->set_format (base_video_encoder, &tmp_state); + base_video_encoder_class->set_format (base_video_encoder, &tmp_info); if (ret) { gst_caps_replace (&state->caps, NULL); *state = tmp_state; + *info = tmp_info; } } else { /* no need to stir things up */ @@ -394,16 +384,14 @@ exit: caps); } - g_object_unref (base_video_encoder); - return ret; } static GstCaps * -gst_base_video_encoder_sink_getcaps (GstPad * pad) +gst_base_video_encoder_sink_getcaps (GstPad * pad, GstCaps * filter) { GstBaseVideoEncoder *base_video_encoder; - const GstCaps *templ_caps; + GstCaps *templ_caps; GstCaps *allowed; GstCaps *fcaps, *filter_caps; gint i, j; @@ -422,7 +410,7 @@ gst_base_video_encoder_sink_getcaps (GstPad * pad) gst_pad_get_allowed_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder)); if (!allowed || gst_caps_is_empty (allowed) || gst_caps_is_any (allowed)) { - fcaps = gst_caps_copy (templ_caps); + fcaps = templ_caps; goto done; } @@ -441,7 +429,7 @@ gst_base_video_encoder_sink_getcaps (GstPad * pad) const GValue *val; GstStructure *s; - s = gst_structure_id_empty_new (q_name); + s = gst_structure_new_id_empty (q_name); if ((val = gst_structure_get_value (allowed_s, "width"))) gst_structure_set_value (s, "width", val); if ((val = gst_structure_get_value (allowed_s, "height"))) @@ -451,13 +439,25 @@ gst_base_video_encoder_sink_getcaps (GstPad * pad) if ((val = gst_structure_get_value (allowed_s, "pixel-aspect-ratio"))) gst_structure_set_value (s, "pixel-aspect-ratio", val); - gst_caps_merge_structure (filter_caps, s); + filter_caps = gst_caps_merge_structure (filter_caps, s); } } + GST_LOG_OBJECT (base_video_encoder, "filtered caps (first) %" GST_PTR_FORMAT, + filter_caps); + fcaps = gst_caps_intersect (filter_caps, templ_caps); + gst_caps_unref (templ_caps); gst_caps_unref (filter_caps); + if (filter) { + GST_LOG_OBJECT (base_video_encoder, "intersecting with %" GST_PTR_FORMAT, + filter); + filter_caps = gst_caps_intersect (fcaps, filter); + gst_caps_unref (fcaps); + fcaps = filter_caps; + } + done: gst_caps_replace (&allowed, NULL); @@ -468,14 +468,37 @@ done: return fcaps; } +static gboolean +gst_base_video_encoder_sink_query (GstPad * pad, GstObject * parent, + GstQuery * query) +{ + gboolean res = FALSE; + + switch (GST_QUERY_TYPE (query)) { + case GST_QUERY_CAPS: + { + GstCaps *filter, *caps; + + gst_query_parse_caps (query, &filter); + caps = gst_base_video_encoder_sink_getcaps (pad, filter); + gst_query_set_caps_result (query, caps); + gst_caps_unref (caps); + res = TRUE; + break; + } + default: + res = gst_pad_query_default (pad, parent, query); + break; + } + return res; +} + static void gst_base_video_encoder_finalize (GObject * object) { - GstBaseVideoEncoder *base_video_encoder; - + GstBaseVideoEncoder *base_video_encoder = (GstBaseVideoEncoder *) object; GST_DEBUG_OBJECT (object, "finalize"); - base_video_encoder = GST_BASE_VIDEO_ENCODER (object); gst_buffer_replace (&base_video_encoder->headers, NULL); G_OBJECT_CLASS (parent_class)->finalize (object); @@ -492,6 +515,15 @@ gst_base_video_encoder_sink_eventfunc (GstBaseVideoEncoder * base_video_encoder, GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder); switch (GST_EVENT_TYPE (event)) { + case GST_EVENT_CAPS: + { + GstCaps *caps; + + gst_event_parse_caps (event, &caps); + ret = gst_base_video_encoder_sink_setcaps (base_video_encoder, caps); + gst_event_unref (event); + } + break; case GST_EVENT_EOS: { GstFlowReturn flow_ret; @@ -509,27 +541,20 @@ gst_base_video_encoder_sink_eventfunc (GstBaseVideoEncoder * base_video_encoder, GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder); break; } - case GST_EVENT_NEWSEGMENT: + case GST_EVENT_SEGMENT: { - gboolean update; - double rate; - double applied_rate; - GstFormat format; - gint64 start; - gint64 stop; - gint64 position; + const GstSegment *segment; GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_encoder); - gst_event_parse_new_segment_full (event, &update, &rate, &applied_rate, - &format, &start, &stop, &position); + gst_event_parse_segment (event, &segment); GST_DEBUG_OBJECT (base_video_encoder, "newseg rate %g, applied rate %g, " "format %d, start = %" GST_TIME_FORMAT ", stop = %" GST_TIME_FORMAT - ", pos = %" GST_TIME_FORMAT, rate, applied_rate, format, - GST_TIME_ARGS (start), GST_TIME_ARGS (stop), - GST_TIME_ARGS (position)); + ", pos = %" GST_TIME_FORMAT, segment->rate, segment->applied_rate, + segment->format, GST_TIME_ARGS (segment->start), + GST_TIME_ARGS (segment->stop), GST_TIME_ARGS (segment->position)); - if (format != GST_FORMAT_TIME) { + if (segment->format != GST_FORMAT_TIME) { GST_DEBUG_OBJECT (base_video_encoder, "received non TIME newsegment"); GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder); break; @@ -537,9 +562,8 @@ gst_base_video_encoder_sink_eventfunc (GstBaseVideoEncoder * base_video_encoder, base_video_encoder->at_eos = FALSE; - gst_segment_set_newsegment_full (&GST_BASE_VIDEO_CODEC - (base_video_encoder)->segment, update, rate, applied_rate, format, - start, stop, position); + gst_segment_copy_into (segment, &GST_BASE_VIDEO_CODEC + (base_video_encoder)->segment); GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder); break; } @@ -578,14 +602,15 @@ gst_base_video_encoder_sink_eventfunc (GstBaseVideoEncoder * base_video_encoder, } static gboolean -gst_base_video_encoder_sink_event (GstPad * pad, GstEvent * event) +gst_base_video_encoder_sink_event (GstPad * pad, GstObject * parent, + GstEvent * event) { GstBaseVideoEncoder *enc; GstBaseVideoEncoderClass *klass; gboolean handled = FALSE; gboolean ret = TRUE; - enc = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad)); + enc = GST_BASE_VIDEO_ENCODER (parent); klass = GST_BASE_VIDEO_ENCODER_GET_CLASS (enc); GST_DEBUG_OBJECT (enc, "received event %d, %s", GST_EVENT_TYPE (event), @@ -623,17 +648,17 @@ gst_base_video_encoder_sink_event (GstPad * pad, GstEvent * event) GST_DEBUG_OBJECT (enc, "event handled"); - gst_object_unref (enc); return ret; } static gboolean -gst_base_video_encoder_src_event (GstPad * pad, GstEvent * event) +gst_base_video_encoder_src_event (GstPad * pad, GstObject * parent, + GstEvent * event) { GstBaseVideoEncoder *base_video_encoder; gboolean ret = FALSE; - base_video_encoder = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad)); + base_video_encoder = GST_BASE_VIDEO_ENCODER (parent); GST_LOG_OBJECT (base_video_encoder, "handling event: %" GST_PTR_FORMAT, event); @@ -677,31 +702,17 @@ gst_base_video_encoder_src_event (GstPad * pad, GstEvent * event) break; } - gst_object_unref (base_video_encoder); return ret; } -static const GstQueryType * -gst_base_video_encoder_get_query_types (GstPad * pad) -{ - static const GstQueryType query_types[] = { - GST_QUERY_CONVERT, - GST_QUERY_LATENCY, - 0 - }; - - return query_types; -} - static gboolean -gst_base_video_encoder_src_query (GstPad * pad, GstQuery * query) +gst_base_video_encoder_src_query (GstPad * pad, GstObject * parent, + GstQuery * query) { GstBaseVideoEncoder *enc; gboolean res; - GstPad *peerpad; - enc = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad)); - peerpad = gst_pad_get_peer (GST_BASE_VIDEO_CODEC_SINK_PAD (enc)); + enc = GST_BASE_VIDEO_ENCODER (parent); GST_LOG_OBJECT (enc, "handling query: %" GST_PTR_FORMAT, query); @@ -725,7 +736,7 @@ gst_base_video_encoder_src_query (GstPad * pad, GstQuery * query) gboolean live; GstClockTime min_latency, max_latency; - res = gst_pad_query (peerpad, query); + res = gst_pad_peer_query (GST_BASE_VIDEO_CODEC_SINK_PAD (enc), query); if (res) { gst_query_parse_latency (query, &live, &min_latency, &max_latency); GST_DEBUG_OBJECT (enc, "Peer latency: live %d, min %" @@ -744,55 +755,49 @@ gst_base_video_encoder_src_query (GstPad * pad, GstQuery * query) } break; default: - res = gst_pad_query_default (pad, query); + res = gst_pad_query_default (pad, parent, query); } - gst_object_unref (peerpad); - gst_object_unref (enc); return res; + /* ERRORS */ error: - GST_DEBUG_OBJECT (enc, "query failed"); - gst_object_unref (peerpad); - gst_object_unref (enc); - return res; + { + GST_DEBUG_OBJECT (enc, "query failed"); + return res; + } } static GstFlowReturn -gst_base_video_encoder_chain (GstPad * pad, GstBuffer * buf) +gst_base_video_encoder_chain (GstPad * pad, GstObject * parent, GstBuffer * buf) { GstBaseVideoEncoder *base_video_encoder; GstBaseVideoEncoderClass *klass; - GstVideoFrame *frame; + GstVideoFrameState *frame; GstFlowReturn ret = GST_FLOW_OK; - base_video_encoder = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad)); + base_video_encoder = GST_BASE_VIDEO_ENCODER (parent); klass = GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder); g_return_val_if_fail (klass->handle_frame != NULL, GST_FLOW_ERROR); GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_encoder); - if (!GST_PAD_CAPS (pad)) { - ret = GST_FLOW_NOT_NEGOTIATED; - goto done; - } - GST_LOG_OBJECT (base_video_encoder, - "received buffer of size %d with ts %" GST_TIME_FORMAT - ", duration %" GST_TIME_FORMAT, GST_BUFFER_SIZE (buf), + "received buffer of size %" G_GSIZE_FORMAT " with ts %" GST_TIME_FORMAT + ", duration %" GST_TIME_FORMAT, gst_buffer_get_size (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), GST_TIME_ARGS (GST_BUFFER_DURATION (buf))); if (base_video_encoder->at_eos) { - ret = GST_FLOW_UNEXPECTED; + ret = GST_FLOW_EOS; goto done; } if (base_video_encoder->sink_clipping) { - gint64 start = GST_BUFFER_TIMESTAMP (buf); - gint64 stop = start + GST_BUFFER_DURATION (buf); - gint64 clip_start; - gint64 clip_stop; + guint64 start = GST_BUFFER_TIMESTAMP (buf); + guint64 stop = start + GST_BUFFER_DURATION (buf); + guint64 clip_start; + guint64 clip_stop; if (!gst_segment_clip (&GST_BASE_VIDEO_CODEC (base_video_encoder)->segment, GST_FORMAT_TIME, start, stop, &clip_start, &clip_stop)) { @@ -874,8 +879,6 @@ gst_base_video_encoder_chain (GstPad * pad, GstBuffer * buf) done: GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder); - g_object_unref (base_video_encoder); - return ret; } @@ -930,7 +933,7 @@ stop_error: /** * gst_base_video_encoder_finish_frame: * @base_video_encoder: a #GstBaseVideoEncoder - * @frame: an encoded #GstVideoFrame + * @frame: an encoded #GstVideoFrameState * * @frame must have a valid encoded data buffer, whose metadata fields * are then appropriately set according to frame data or no buffer at @@ -942,7 +945,7 @@ stop_error: */ GstFlowReturn gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder, - GstVideoFrame * frame) + GstVideoFrameState * frame) { GstFlowReturn ret = GST_FLOW_OK; GstBaseVideoEncoderClass *base_video_encoder_class; @@ -959,7 +962,7 @@ gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder, /* Push all pending events that arrived before this frame */ for (l = base_video_encoder->base_video_codec.frames; l; l = l->next) { - GstVideoFrame *tmp = l->data; + GstVideoFrameState *tmp = l->data; if (tmp->events) { GList *k; @@ -992,6 +995,8 @@ gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder, (base_video_encoder)->segment, GST_FORMAT_TIME, frame->presentation_timestamp); + /* re-use upstream event if any so it also conveys any additional + * info upstream arranged in there */ GST_OBJECT_LOCK (base_video_encoder); for (l = base_video_encoder->force_key_unit; l; l = l->next) { ForcedKeyUnitEvent *tmp = l->data; @@ -1035,7 +1040,7 @@ gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder, if (fevt->all_headers) { if (base_video_encoder->headers) { headers = gst_buffer_ref (base_video_encoder->headers); - headers = gst_buffer_make_metadata_writable (headers); + headers = gst_buffer_make_writable (headers); } } @@ -1048,6 +1053,7 @@ gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder, } if (frame->is_sync_point) { + GST_LOG_OBJECT (base_video_encoder, "key frame"); base_video_encoder->distance_from_sync = 0; GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_BUFFER_FLAG_DELTA_UNIT); } else { @@ -1078,7 +1084,7 @@ gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder, /* update rate estimate */ GST_BASE_VIDEO_CODEC (base_video_encoder)->bytes += - GST_BUFFER_SIZE (frame->src_buffer); + gst_buffer_get_size (frame->src_buffer); if (GST_CLOCK_TIME_IS_VALID (frame->presentation_duration)) { GST_BASE_VIDEO_CODEC (base_video_encoder)->time += frame->presentation_duration; @@ -1093,15 +1099,6 @@ gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder, GST_BASE_VIDEO_CODEC (base_video_encoder)->discont = FALSE; } - gst_buffer_set_caps (GST_BUFFER (frame->src_buffer), - GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder))); - - if (G_UNLIKELY (headers)) { - gst_buffer_set_caps (headers, - GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder))); - gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), headers); - } - if (base_video_encoder_class->shape_output) { ret = base_video_encoder_class->shape_output (base_video_encoder, frame); } else { @@ -1116,7 +1113,7 @@ done: GST_BASE_VIDEO_CODEC (base_video_encoder)->frames = g_list_remove (GST_BASE_VIDEO_CODEC (base_video_encoder)->frames, frame); - gst_video_frame_unref (frame); + gst_video_frame_state_unref (frame); GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder); @@ -1127,11 +1124,15 @@ done: * gst_base_video_encoder_get_state: * @base_video_encoder: a #GstBaseVideoEncoder * + * Get the current #GstVideoState + * * Returns: #GstVideoState describing format of video data. */ const GstVideoState * gst_base_video_encoder_get_state (GstBaseVideoEncoder * base_video_encoder) { + /* FIXME : Move to base codec class */ + return &GST_BASE_VIDEO_CODEC (base_video_encoder)->state; } @@ -1147,7 +1148,7 @@ void gst_base_video_encoder_set_latency (GstBaseVideoEncoder * base_video_encoder, GstClockTime min_latency, GstClockTime max_latency) { - g_return_if_fail (min_latency >= 0); + g_return_if_fail (GST_CLOCK_TIME_IS_VALID (min_latency)); g_return_if_fail (max_latency >= min_latency); GST_OBJECT_LOCK (base_video_encoder); @@ -1162,7 +1163,7 @@ gst_base_video_encoder_set_latency (GstBaseVideoEncoder * base_video_encoder, /** * gst_base_video_encoder_set_latency_fields: * @base_video_encoder: a #GstBaseVideoEncoder - * @fields: latency in fields + * @n_fields: latency in fields * * Informs baseclass of encoding latency in terms of fields (both min * and max latency). @@ -1189,21 +1190,25 @@ gst_base_video_encoder_set_latency_fields (GstBaseVideoEncoder * * gst_base_video_encoder_get_oldest_frame: * @base_video_encoder: a #GstBaseVideoEncoder * - * Returns: oldest unfinished pending #GstVideoFrame + * Get the oldest unfinished pending #GstVideoFrameState + * + * Returns: oldest unfinished pending #GstVideoFrameState */ -GstVideoFrame * +GstVideoFrameState * gst_base_video_encoder_get_oldest_frame (GstBaseVideoEncoder * base_video_encoder) { GList *g; + /* FIXME : Move to base codec class */ + GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_encoder); g = g_list_first (GST_BASE_VIDEO_CODEC (base_video_encoder)->frames); GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder); if (g == NULL) return NULL; - return (GstVideoFrame *) (g->data); + return (GstVideoFrameState *) (g->data); } /* FIXME there could probably be more of these; diff --git a/omx/gstbasevideoencoder.h b/omx/gstbasevideoencoder.h index de52102..5a34d4b 100644 --- a/omx/gstbasevideoencoder.h +++ b/omx/gstbasevideoencoder.h @@ -70,12 +70,12 @@ typedef struct _GstBaseVideoEncoderClass GstBaseVideoEncoderClass; /** * GstBaseVideoEncoder: - * @element: the parent element. * * The opaque #GstBaseVideoEncoder data structure. */ struct _GstBaseVideoEncoder { + /*< private >*/ GstBaseVideoCodec base_video_codec; /*< protected >*/ @@ -112,9 +112,11 @@ struct _GstBaseVideoEncoder * Allows closing external resources. * @set_format: Optional. * Notifies subclass of incoming data format. - * GstVideoState fields have already been + * GstVideoInfo fields have already been * set according to provided caps. * @handle_frame: Provides input frame to subclass. + * @reset: Optional. + * Allows subclass (codec) to perform post-seek semantics reset. * @finish: Optional. * Called to request subclass to dispatch any pending remaining * data (e.g. at EOS). @@ -133,6 +135,7 @@ struct _GstBaseVideoEncoder */ struct _GstBaseVideoEncoderClass { + /*< private >*/ GstBaseVideoCodecClass base_video_codec_class; /*< public >*/ @@ -143,16 +146,16 @@ struct _GstBaseVideoEncoderClass gboolean (*stop) (GstBaseVideoEncoder *coder); gboolean (*set_format) (GstBaseVideoEncoder *coder, - GstVideoState *state); + GstVideoInfo *info); GstFlowReturn (*handle_frame) (GstBaseVideoEncoder *coder, - GstVideoFrame *frame); + GstVideoFrameState *frame); gboolean (*reset) (GstBaseVideoEncoder *coder); GstFlowReturn (*finish) (GstBaseVideoEncoder *coder); GstFlowReturn (*shape_output) (GstBaseVideoEncoder *coder, - GstVideoFrame *frame); + GstVideoFrameState *frame); gboolean (*event) (GstBaseVideoEncoder *coder, GstEvent *event); @@ -164,18 +167,18 @@ struct _GstBaseVideoEncoderClass GType gst_base_video_encoder_get_type (void); -const GstVideoState* gst_base_video_encoder_get_state (GstBaseVideoEncoder *coder); +const GstVideoState* gst_base_video_encoder_get_state (GstBaseVideoEncoder *base_video_encoder); -GstVideoFrame* gst_base_video_encoder_get_oldest_frame (GstBaseVideoEncoder *coder); +GstVideoFrameState* gst_base_video_encoder_get_oldest_frame (GstBaseVideoEncoder *coder); GstFlowReturn gst_base_video_encoder_finish_frame (GstBaseVideoEncoder *base_video_encoder, - GstVideoFrame *frame); + GstVideoFrameState *frame); void gst_base_video_encoder_set_latency (GstBaseVideoEncoder *base_video_encoder, GstClockTime min_latency, GstClockTime max_latency); void gst_base_video_encoder_set_latency_fields (GstBaseVideoEncoder *base_video_encoder, int n_fields); void gst_base_video_encoder_set_headers (GstBaseVideoEncoder *base_video_encoder, - GstBuffer *headers); + GstBuffer *headers); G_END_DECLS #endif diff --git a/omx/gstomx.c b/omx/gstomx.c index c70d980..426e953 100644 --- a/omx/gstomx.c +++ b/omx/gstomx.c @@ -393,14 +393,13 @@ static OMX_CALLBACKTYPE callbacks = { EventHandler, EmptyBufferDone, FillBufferDone }; GstOMXComponent * -gst_omx_component_new (GstObject * parent, const gchar * core_name, - const gchar * component_name, const gchar * component_role, guint64 hacks) +gst_omx_component_new (GstObject * parent, const GstOMXClassData * cdata) { OMX_ERRORTYPE err; GstOMXCore *core; GstOMXComponent *comp; - core = gst_omx_core_acquire (core_name); + core = gst_omx_core_acquire (cdata->core_name); if (!core) return NULL; @@ -408,21 +407,21 @@ gst_omx_component_new (GstObject * parent, const gchar * core_name, comp->core = core; err = - core->get_handle (&comp->handle, (OMX_STRING) component_name, comp, + core->get_handle (&comp->handle, (OMX_STRING) cdata->component_name, comp, &callbacks); if (err != OMX_ErrorNone) { GST_ERROR_OBJECT (parent, "Failed to get component handle '%s' from core '%s': 0x%08x", - component_name, core_name, err); + cdata->component_name, cdata->core_name, err); gst_omx_core_release (core); g_slice_free (GstOMXComponent, comp); return NULL; } GST_DEBUG_OBJECT (parent, "Successfully got component handle %p (%s) from core '%s'", comp->handle, - component_name, core_name); + cdata->component_name, cdata->core_name); comp->parent = gst_object_ref (parent); - comp->hacks = hacks; + comp->hacks = cdata->hacks; comp->ports = g_ptr_array_new (); comp->n_in_ports = 0; @@ -434,18 +433,19 @@ gst_omx_component_new (GstObject * parent, const gchar * core_name, comp->last_error = OMX_ErrorNone; /* Set component role if any */ - if (component_role) { + if (cdata->component_role) { OMX_PARAM_COMPONENTROLETYPE param; GST_OMX_INIT_STRUCT (¶m); - g_strlcpy ((gchar *) param.cRole, component_role, sizeof (param.cRole)); + g_strlcpy ((gchar *) param.cRole, cdata->component_role, + sizeof (param.cRole)); err = gst_omx_component_set_parameter (comp, OMX_IndexParamStandardComponentRole, ¶m); GST_DEBUG_OBJECT (parent, "Setting component role to '%s': %s (0x%08x)", - component_role, gst_omx_error_to_string (err), err); + cdata->component_role, gst_omx_error_to_string (err), err); /* If setting the role failed this component is unusable */ if (err != OMX_ErrorNone) { @@ -1830,14 +1830,24 @@ done: return err; } -GQuark gst_omx_element_name_quark = 0; - static GType (*types[]) (void) = { gst_omx_mpeg4_video_dec_get_type, gst_omx_h264_dec_get_type, gst_omx_h263_dec_get_type, gst_omx_wmv_dec_get_type, gst_omx_mpeg4_video_enc_get_type, gst_omx_h264_enc_get_type, gst_omx_h263_enc_get_type, gst_omx_aac_enc_get_type}; +struct TypeOffest +{ + GType (*get_type) (void); + glong offset; +}; + +static struct TypeOffest base_types[] = { + {gst_omx_video_dec_get_type, G_STRUCT_OFFSET (GstOMXVideoDecClass, cdata)}, + {gst_omx_video_enc_get_type, G_STRUCT_OFFSET (GstOMXVideoEncClass, cdata)}, + {gst_omx_audio_enc_get_type, G_STRUCT_OFFSET (GstOMXAudioEncClass, cdata)}, +}; + static GKeyFile *config = NULL; GKeyFile * gst_omx_get_configuration (void) @@ -1969,6 +1979,156 @@ gst_omx_parse_hacks (gchar ** hacks) return hacks_flags; } + +void +gst_omx_set_default_role (GstOMXClassData * class_data, + const gchar * default_role) +{ + if (!class_data->component_role) + class_data->component_role = default_role; +} + +static void +_class_init (gpointer g_class, gpointer data) +{ + GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); + GstOMXClassData *class_data = NULL; + GKeyFile *config; + const gchar *element_name = data; + GError *err; + gchar *core_name, *component_name, *component_role; + gint in_port_index, out_port_index; + gchar *template_caps; + GstPadTemplate *templ; + GstCaps *caps; + gchar **hacks; + int i; + + if (!element_name) + return; + + /* Find the GstOMXClassData for this class */ + for (i = 0; i < G_N_ELEMENTS (base_types); i++) { + GType gtype = base_types[i].get_type (); + + if (G_TYPE_CHECK_CLASS_TYPE (g_class, gtype)) { + class_data = (GstOMXClassData *) + (((guint8 *) g_class) + base_types[i].offset); + break; + } + } + + g_assert (class_data != NULL); + + config = gst_omx_get_configuration (); + + /* This will alwaxys succeed, see check in plugin_init */ + core_name = g_key_file_get_string (config, element_name, "core-name", NULL); + g_assert (core_name != NULL); + class_data->core_name = core_name; + component_name = + g_key_file_get_string (config, element_name, "component-name", NULL); + g_assert (component_name != NULL); + class_data->component_name = component_name; + + /* If this fails we simply don't set a role */ + if ((component_role = + g_key_file_get_string (config, element_name, "component-role", + NULL))) { + GST_DEBUG ("Using component-role '%s' for element '%s'", component_role, + element_name); + class_data->component_role = component_role; + } + + + /* Now set the inport/outport indizes and assume sane defaults */ + err = NULL; + in_port_index = + g_key_file_get_integer (config, element_name, "in-port-index", &err); + if (err != NULL) { + GST_DEBUG ("No 'in-port-index' set for element '%s', assuming 0: %s", + element_name, err->message); + in_port_index = 0; + g_error_free (err); + } + class_data->in_port_index = in_port_index; + + err = NULL; + out_port_index = + g_key_file_get_integer (config, element_name, "out-port-index", &err); + if (err != NULL) { + GST_DEBUG ("No 'out-port-index' set for element '%s', assuming 1: %s", + element_name, err->message); + out_port_index = 1; + g_error_free (err); + } + class_data->out_port_index = out_port_index; + + /* Add pad templates */ + err = NULL; + if (!(template_caps = + g_key_file_get_string (config, element_name, "sink-template-caps", + &err))) { + GST_DEBUG + ("No sink template caps specified for element '%s', using default '%s'", + element_name, class_data->default_sink_template_caps); + caps = gst_caps_from_string (class_data->default_sink_template_caps); + g_assert (caps != NULL); + g_error_free (err); + } else { + caps = gst_caps_from_string (template_caps); + if (!caps) { + GST_DEBUG + ("Could not parse sink template caps '%s' for element '%s', using default '%s'", + template_caps, element_name, class_data->default_sink_template_caps); + caps = gst_caps_from_string (class_data->default_sink_template_caps); + g_assert (caps != NULL); + } + } + templ = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, caps); + g_free (template_caps); + gst_element_class_add_pad_template (element_class, templ); + + err = NULL; + if (!(template_caps = + g_key_file_get_string (config, element_name, "src-template-caps", + &err))) { + GST_DEBUG + ("No src template caps specified for element '%s', using default '%s'", + element_name, class_data->default_src_template_caps); + caps = gst_caps_from_string (class_data->default_src_template_caps); + g_assert (caps != NULL); + g_error_free (err); + } else { + caps = gst_caps_from_string (template_caps); + if (!caps) { + GST_DEBUG + ("Could not parse src template caps '%s' for element '%s', using default '%s'", + template_caps, element_name, class_data->default_src_template_caps); + caps = gst_caps_from_string (class_data->default_src_template_caps); + g_assert (caps != NULL); + } + } + templ = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, caps); + g_free (template_caps); + gst_element_class_add_pad_template (element_class, templ); + + if ((hacks = + g_key_file_get_string_list (config, element_name, "hacks", NULL, + NULL))) { +#ifndef GST_DISABLE_GST_DEBUG + gchar **walk = hacks; + + while (*walk) { + GST_DEBUG ("Using hack: %s", *walk); + walk++; + } +#endif + + class_data->hacks = gst_omx_parse_hacks (hacks); + } +} + static gboolean plugin_init (GstPlugin * plugin) { @@ -1986,9 +2146,6 @@ plugin_init (GstPlugin * plugin) GST_DEBUG_CATEGORY_INIT (gstomx_debug, "omx", 0, "gst-omx"); - gst_omx_element_name_quark = - g_quark_from_static_string ("gst-omx-element-name"); - /* Read configuration file gstomx.conf from the preferred * configuration directories */ env_config_dir = g_strdup (g_getenv (*env_config_name)); @@ -2101,6 +2258,8 @@ plugin_init (GstPlugin * plugin) memset (&type_info, 0, sizeof (type_info)); type_info.class_size = type_query.class_size; type_info.instance_size = type_query.instance_size; + type_info.class_init = _class_init; + type_info.class_data = g_strdup (elements[i]); type_name = g_strdup_printf ("%s-%s", g_type_name (type), elements[i]); if (g_type_from_name (type_name) != G_TYPE_INVALID) { GST_ERROR ("Type '%s' already exists for element '%s'", type_name, @@ -2110,8 +2269,6 @@ plugin_init (GstPlugin * plugin) } subtype = g_type_register_static (type, type_name, &type_info, 0); g_free (type_name); - g_type_set_qdata (subtype, gst_omx_element_name_quark, - g_strdup (elements[i])); ret |= gst_element_register (plugin, elements[i], rank, subtype); } g_strfreev (elements); diff --git a/omx/gstomx.h b/omx/gstomx.h index 6ef3beb..84b3bea 100644 --- a/omx/gstomx.h +++ b/omx/gstomx.h @@ -71,6 +71,7 @@ typedef struct _GstOMXPort GstOMXPort; typedef enum _GstOMXPortDirection GstOMXPortDirection; typedef struct _GstOMXComponent GstOMXComponent; typedef struct _GstOMXBuffer GstOMXBuffer; +typedef struct _GstOMXClassData GstOMXClassData; typedef enum { /* Everything good and the buffer is valid */ @@ -184,7 +185,18 @@ struct _GstOMXBuffer { gint settings_cookie; }; -extern GQuark gst_omx_element_name_quark; +struct _GstOMXClassData { + const gchar *core_name; + const gchar *component_name; + const gchar *component_role; + + const gchar *default_src_template_caps; + const gchar *default_sink_template_caps; + + guint32 in_port_index, out_port_index; + + guint64 hacks; +}; GKeyFile * gst_omx_get_configuration (void); @@ -195,7 +207,7 @@ GstOMXCore * gst_omx_core_acquire (const gchar * filename); void gst_omx_core_release (GstOMXCore * core); -GstOMXComponent * gst_omx_component_new (GstObject *parent, const gchar * core_name, const gchar * component_name, const gchar *component_role, guint64 hacks); +GstOMXComponent * gst_omx_component_new (GstObject * parent, const GstOMXClassData *cdata); void gst_omx_component_free (GstOMXComponent * comp); OMX_ERRORTYPE gst_omx_component_set_state (GstOMXComponent * comp, OMX_STATETYPE state); @@ -236,6 +248,9 @@ gboolean gst_omx_port_is_enabled (GstOMXPort * port); OMX_ERRORTYPE gst_omx_port_manual_reconfigure (GstOMXPort * port, gboolean start); +void gst_omx_set_default_role (GstOMXClassData *class_data, const gchar *default_role); + + G_END_DECLS #endif /* __GST_OMX_H__ */ diff --git a/omx/gstomxaacenc.c b/omx/gstomxaacenc.c index 79e1046..3f77935 100644 --- a/omx/gstomxaacenc.c +++ b/omx/gstomxaacenc.c @@ -30,7 +30,6 @@ GST_DEBUG_CATEGORY_STATIC (gst_omx_aac_enc_debug_category); #define GST_CAT_DEFAULT gst_omx_aac_enc_debug_category /* prototypes */ -static void gst_omx_aac_enc_finalize (GObject * object); static void gst_omx_aac_enc_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec); static void gst_omx_aac_enc_get_property (GObject * object, guint prop_id, @@ -98,38 +97,21 @@ gst_omx_aac_er_tools_get_type (void) /* class initialization */ -#define DEBUG_INIT(bla) \ +#define DEBUG_INIT \ GST_DEBUG_CATEGORY_INIT (gst_omx_aac_enc_debug_category, "omxaacenc", 0, \ "debug category for gst-omx audio encoder base class"); -GST_BOILERPLATE_FULL (GstOMXAACEnc, gst_omx_aac_enc, - GstOMXAudioEnc, GST_TYPE_OMX_AUDIO_ENC, DEBUG_INIT); +G_DEFINE_TYPE_WITH_CODE (GstOMXAACEnc, gst_omx_aac_enc, + GST_TYPE_OMX_AUDIO_ENC, DEBUG_INIT); -static void -gst_omx_aac_enc_base_init (gpointer g_class) -{ - GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); - GstOMXAudioEncClass *audioenc_class = GST_OMX_AUDIO_ENC_CLASS (g_class); - - gst_element_class_set_details_simple (element_class, - "OpenMAX AAC Audio Encoder", - "Codec/Encoder/Audio", - "Encode AAC audio streams", - "Sebastian Dröge "); - - /* If no role was set from the config file we set the - * default AAC audio encoder role */ - if (!audioenc_class->component_role) - audioenc_class->component_role = "audio_encoder.aac"; -} static void gst_omx_aac_enc_class_init (GstOMXAACEncClass * klass) { GObjectClass *gobject_class = G_OBJECT_CLASS (klass); + GstElementClass *element_class = GST_ELEMENT_CLASS (klass); GstOMXAudioEncClass *audioenc_class = GST_OMX_AUDIO_ENC_CLASS (klass); - gobject_class->finalize = gst_omx_aac_enc_finalize; gobject_class->set_property = gst_omx_aac_enc_set_property; gobject_class->get_property = gst_omx_aac_enc_get_property; @@ -161,13 +143,22 @@ gst_omx_aac_enc_class_init (GstOMXAACEncClass * klass) audioenc_class->get_num_samples = GST_DEBUG_FUNCPTR (gst_omx_aac_enc_get_num_samples); - audioenc_class->default_src_template_caps = "audio/mpeg, " + audioenc_class->cdata.default_src_template_caps = "audio/mpeg, " "mpegversion=(int){2, 4}, " "stream-format=(string){raw, adts, adif, loas, latm}"; + + + gst_element_class_set_details_simple (element_class, + "OpenMAX AAC Audio Encoder", + "Codec/Encoder/Audio", + "Encode AAC audio streams", + "Sebastian Dröge "); + + gst_omx_set_default_role (&audioenc_class->cdata, "audio_encoder.aac"); } static void -gst_omx_aac_enc_init (GstOMXAACEnc * self, GstOMXAACEncClass * klass) +gst_omx_aac_enc_init (GstOMXAACEnc * self) { self->bitrate = DEFAULT_BITRATE; self->aac_tools = DEFAULT_AAC_TOOLS; @@ -175,14 +166,6 @@ gst_omx_aac_enc_init (GstOMXAACEnc * self, GstOMXAACEncClass * klass) } static void -gst_omx_aac_enc_finalize (GObject * object) -{ - /* GstOMXAACEnc *self = GST_OMX_AAC_ENC (object); */ - - G_OBJECT_CLASS (parent_class)->finalize (object); -} - -static void gst_omx_aac_enc_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec) { @@ -250,24 +233,20 @@ gst_omx_aac_enc_set_format (GstOMXAudioEnc * enc, GstOMXPort * port, return FALSE; } - peercaps = gst_pad_peer_get_caps (GST_AUDIO_ENCODER_SRC_PAD (self)); + peercaps = gst_pad_peer_query_caps (GST_AUDIO_ENCODER_SRC_PAD (self), + gst_pad_get_pad_template_caps (GST_AUDIO_ENCODER_SRC_PAD (self))); if (peercaps) { - GstCaps *intersection; GstStructure *s; gint mpegversion = 0; const gchar *profile_string, *stream_format_string; - intersection = - gst_caps_intersect (peercaps, - gst_pad_get_pad_template_caps (GST_AUDIO_ENCODER_SRC_PAD (self))); - gst_caps_unref (peercaps); - if (gst_caps_is_empty (intersection)) { - gst_caps_unref (intersection); + if (gst_caps_is_empty (peercaps)) { + gst_caps_unref (peercaps); GST_ERROR_OBJECT (self, "Empty caps"); return FALSE; } - s = gst_caps_get_structure (intersection, 0); + s = gst_caps_get_structure (peercaps, 0); if (gst_structure_get_int (s, "mpegversion", &mpegversion)) { profile_string = @@ -285,7 +264,7 @@ gst_omx_aac_enc_set_format (GstOMXAudioEnc * enc, GstOMXPort * port, profile = OMX_AUDIO_AACObjectLTP; } else { GST_ERROR_OBJECT (self, "Unsupported profile '%s'", profile_string); - gst_caps_unref (intersection); + gst_caps_unref (peercaps); return FALSE; } } @@ -310,12 +289,12 @@ gst_omx_aac_enc_set_format (GstOMXAudioEnc * enc, GstOMXPort * port, } else { GST_ERROR_OBJECT (self, "Unsupported stream-format '%s'", stream_format_string); - gst_caps_unref (intersection); + gst_caps_unref (peercaps); return FALSE; } } - gst_caps_unref (intersection); + gst_caps_unref (peercaps); } aac_profile.eAACProfile = profile; @@ -487,7 +466,7 @@ gst_omx_aac_enc_get_caps (GstOMXAudioEnc * enc, GstOMXPort * port, break; } - caps = gst_caps_new_simple ("audio/mpeg", NULL); + caps = gst_caps_new_empty_simple ("audio/mpeg"); if (mpegversion != 0) gst_caps_set_simple (caps, "mpegversion", G_TYPE_INT, mpegversion, @@ -505,14 +484,16 @@ gst_omx_aac_enc_get_caps (GstOMXAudioEnc * enc, GstOMXPort * port, if (aac_profile.eAACStreamFormat == OMX_AUDIO_AACStreamFormatRAW) { GstBuffer *codec_data; - guint8 *cdata; adts_sample_index sr_idx; + GstMapInfo map = GST_MAP_INFO_INIT; codec_data = gst_buffer_new_and_alloc (2); - cdata = GST_BUFFER_DATA (codec_data); + gst_buffer_map (codec_data, &map, GST_MAP_WRITE); sr_idx = map_adts_sample_index (aac_profile.nSampleRate); - cdata[0] = ((aac_profile.eAACProfile & 0x1F) << 3) | ((sr_idx & 0xE) >> 1); - cdata[1] = ((sr_idx & 0x1) << 7) | ((aac_profile.nChannels & 0xF) << 3); + map.data[0] = ((aac_profile.eAACProfile & 0x1F) << 3) | + ((sr_idx & 0xE) >> 1); + map.data[1] = ((sr_idx & 0x1) << 7) | ((aac_profile.nChannels & 0xF) << 3); + gst_buffer_unmap (codec_data, &map); GST_DEBUG_OBJECT (enc, "setting new codec_data"); gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, codec_data, NULL); diff --git a/omx/gstomxaudioenc.c b/omx/gstomxaudioenc.c index 18beb26..b278efa 100644 --- a/omx/gstomxaudioenc.c +++ b/omx/gstomxaudioenc.c @@ -41,7 +41,7 @@ static gboolean gst_omx_audio_enc_start (GstAudioEncoder * encoder); static gboolean gst_omx_audio_enc_stop (GstAudioEncoder * encoder); static gboolean gst_omx_audio_enc_set_format (GstAudioEncoder * encoder, GstAudioInfo * info); -static gboolean gst_omx_audio_enc_event (GstAudioEncoder * encoder, +static gboolean gst_omx_audio_enc_sink_event (GstAudioEncoder * encoder, GstEvent * event); static GstFlowReturn gst_omx_audio_enc_handle_frame (GstAudioEncoder * encoder, GstBuffer * buffer); @@ -56,149 +56,14 @@ enum /* class initialization */ -#define DEBUG_INIT(bla) \ +#define DEBUG_INIT \ GST_DEBUG_CATEGORY_INIT (gst_omx_audio_enc_debug_category, "omxaudioenc", 0, \ "debug category for gst-omx audio encoder base class"); -GST_BOILERPLATE_FULL (GstOMXAudioEnc, gst_omx_audio_enc, GstAudioEncoder, +G_DEFINE_ABSTRACT_TYPE_WITH_CODE (GstOMXAudioEnc, gst_omx_audio_enc, GST_TYPE_AUDIO_ENCODER, DEBUG_INIT); static void -gst_omx_audio_enc_base_init (gpointer g_class) -{ - GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); - GstOMXAudioEncClass *audioenc_class = GST_OMX_AUDIO_ENC_CLASS (g_class); - GKeyFile *config; - const gchar *element_name; - GError *err; - gchar *core_name, *component_name, *component_role; - gint in_port_index, out_port_index; - gchar *template_caps; - GstPadTemplate *templ; - GstCaps *caps; - gchar **hacks; - - element_name = - g_type_get_qdata (G_TYPE_FROM_CLASS (g_class), - gst_omx_element_name_quark); - /* This happens for the base class and abstract subclasses */ - if (!element_name) - return; - - config = gst_omx_get_configuration (); - - /* This will always succeed, see check in plugin_init */ - core_name = g_key_file_get_string (config, element_name, "core-name", NULL); - g_assert (core_name != NULL); - audioenc_class->core_name = core_name; - component_name = - g_key_file_get_string (config, element_name, "component-name", NULL); - g_assert (component_name != NULL); - audioenc_class->component_name = component_name; - - /* If this fails we simply don't set a role */ - if ((component_role = - g_key_file_get_string (config, element_name, "component-role", - NULL))) { - GST_DEBUG ("Using component-role '%s' for element '%s'", component_role, - element_name); - audioenc_class->component_role = component_role; - } - - - /* Now set the inport/outport indizes and assume sane defaults */ - err = NULL; - in_port_index = - g_key_file_get_integer (config, element_name, "in-port-index", &err); - if (err != NULL) { - GST_DEBUG ("No 'in-port-index' set for element '%s', assuming 0: %s", - element_name, err->message); - in_port_index = 0; - g_error_free (err); - } - audioenc_class->in_port_index = in_port_index; - - err = NULL; - out_port_index = - g_key_file_get_integer (config, element_name, "out-port-index", &err); - if (err != NULL) { - GST_DEBUG ("No 'out-port-index' set for element '%s', assuming 1: %s", - element_name, err->message); - out_port_index = 1; - g_error_free (err); - } - audioenc_class->out_port_index = out_port_index; - - /* Add pad templates */ - err = NULL; - if (!(template_caps = - g_key_file_get_string (config, element_name, "sink-template-caps", - &err))) { - GST_DEBUG - ("No sink template caps specified for element '%s', using default '%s'", - element_name, audioenc_class->default_sink_template_caps); - caps = gst_caps_from_string (audioenc_class->default_sink_template_caps); - g_assert (caps != NULL); - g_error_free (err); - } else { - caps = gst_caps_from_string (template_caps); - if (!caps) { - GST_DEBUG - ("Could not parse sink template caps '%s' for element '%s', using default '%s'", - template_caps, element_name, - audioenc_class->default_sink_template_caps); - caps = gst_caps_from_string (audioenc_class->default_sink_template_caps); - g_assert (caps != NULL); - } - } - templ = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, caps); - g_free (template_caps); - gst_element_class_add_pad_template (element_class, templ); - gst_object_unref (templ); - - err = NULL; - if (!(template_caps = - g_key_file_get_string (config, element_name, "src-template-caps", - &err))) { - GST_DEBUG - ("No src template caps specified for element '%s', using default '%s'", - element_name, audioenc_class->default_src_template_caps); - caps = gst_caps_from_string (audioenc_class->default_src_template_caps); - g_assert (caps != NULL); - g_error_free (err); - } else { - caps = gst_caps_from_string (template_caps); - if (!caps) { - GST_DEBUG - ("Could not parse src template caps '%s' for element '%s', using default '%s'", - template_caps, element_name, - audioenc_class->default_src_template_caps); - caps = gst_caps_from_string (audioenc_class->default_src_template_caps); - g_assert (caps != NULL); - } - } - templ = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, caps); - g_free (template_caps); - gst_element_class_add_pad_template (element_class, templ); - gst_object_unref (templ); - - if ((hacks = - g_key_file_get_string_list (config, element_name, "hacks", NULL, - NULL))) { -#ifndef GST_DISABLE_GST_DEBUG - gchar **walk = hacks; - - while (*walk) { - GST_DEBUG ("Using hack: %s", *walk); - walk++; - } -#endif - - audioenc_class->hacks = gst_omx_parse_hacks (hacks); - } -} - -static void gst_omx_audio_enc_class_init (GstOMXAudioEncClass * klass) { GObjectClass *gobject_class = G_OBJECT_CLASS (klass); @@ -217,40 +82,18 @@ gst_omx_audio_enc_class_init (GstOMXAudioEncClass * klass) GST_DEBUG_FUNCPTR (gst_omx_audio_enc_set_format); audio_encoder_class->handle_frame = GST_DEBUG_FUNCPTR (gst_omx_audio_enc_handle_frame); - audio_encoder_class->event = GST_DEBUG_FUNCPTR (gst_omx_audio_enc_event); + audio_encoder_class->sink_event = + GST_DEBUG_FUNCPTR (gst_omx_audio_enc_sink_event); - klass->default_sink_template_caps = "audio/x-raw-int, " - "rate = (int) [ 1, MAX ], " - "channels = (int) [ 1, " G_STRINGIFY (OMX_AUDIO_MAXCHANNELS) " ], " - "endianness = (int) { LITTLE_ENDIAN, BIG_ENDIAN }, " - "width = (int) 8, " - "depth = (int) 8, " - "signed = (boolean) { true, false }; " - "audio/x-raw-int, " - "rate = (int) [ 1, MAX ], " - "channels = (int) [ 1, " G_STRINGIFY (OMX_AUDIO_MAXCHANNELS) " ], " - "endianness = (int) { LITTLE_ENDIAN, BIG_ENDIAN }, " - "width = (int) 16, " - "depth = (int) 16, " - "signed = (boolean) { true, false }; " - "audio/x-raw-int, " - "rate = (int) [ 1, MAX ], " - "channels = (int) [ 1, " G_STRINGIFY (OMX_AUDIO_MAXCHANNELS) " ], " - "endianness = (int) { LITTLE_ENDIAN, BIG_ENDIAN }, " - "width = (int) 24, " - "depth = (int) 24, " - "signed = (boolean) { true, false }; " - "audio/x-raw-int, " + klass->cdata.default_sink_template_caps = "audio/x-raw, " "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, " G_STRINGIFY (OMX_AUDIO_MAXCHANNELS) " ], " - "endianness = (int) { LITTLE_ENDIAN, BIG_ENDIAN }, " - "width = (int) 32, " - "depth = (int) 32, " "signed = (boolean) { true, false }"; - + "format = (string) { S8, U8, S16LE, S16BE, U16LE, U16BE, " + "S24LE, S24BE, U24LE, U24BE, S32LE, S32BE, U32LE, U32BE }"; } static void -gst_omx_audio_enc_init (GstOMXAudioEnc * self, GstOMXAudioEncClass * klass) +gst_omx_audio_enc_init (GstOMXAudioEnc * self) { self->drain_lock = g_mutex_new (); self->drain_cond = g_cond_new (); @@ -262,8 +105,7 @@ gst_omx_audio_enc_open (GstOMXAudioEnc * self) GstOMXAudioEncClass *klass = GST_OMX_AUDIO_ENC_GET_CLASS (self); self->component = - gst_omx_component_new (GST_OBJECT_CAST (self), klass->core_name, - klass->component_name, klass->component_role, klass->hacks); + gst_omx_component_new (GST_OBJECT_CAST (self), &klass->cdata); self->started = FALSE; if (!self->component) @@ -274,9 +116,9 @@ gst_omx_audio_enc_open (GstOMXAudioEnc * self) return FALSE; self->in_port = - gst_omx_component_add_port (self->component, klass->in_port_index); + gst_omx_component_add_port (self->component, klass->cdata.in_port_index); self->out_port = - gst_omx_component_add_port (self->component, klass->out_port_index); + gst_omx_component_add_port (self->component, klass->cdata.out_port_index); if (!self->in_port || !self->out_port) return FALSE; @@ -333,7 +175,7 @@ gst_omx_audio_enc_finalize (GObject * object) g_mutex_free (self->drain_lock); g_cond_free (self->drain_cond); - G_OBJECT_CLASS (parent_class)->finalize (object); + G_OBJECT_CLASS (gst_omx_audio_enc_parent_class)->finalize (object); } static GstStateChangeReturn @@ -381,7 +223,9 @@ gst_omx_audio_enc_change_state (GstElement * element, GstStateChange transition) if (ret == GST_STATE_CHANGE_FAILURE) return ret; - ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); + ret = + GST_ELEMENT_CLASS (gst_omx_audio_enc_parent_class)->change_state (element, + transition); if (ret == GST_STATE_CHANGE_FAILURE) return ret; @@ -390,7 +234,7 @@ gst_omx_audio_enc_change_state (GstElement * element, GstStateChange transition) case GST_STATE_CHANGE_PLAYING_TO_PAUSED: break; case GST_STATE_CHANGE_PAUSED_TO_READY: - self->downstream_flow_ret = GST_FLOW_WRONG_STATE; + self->downstream_flow_ret = GST_FLOW_FLUSHING; self->started = FALSE; if (!gst_omx_audio_enc_shutdown (self)) @@ -431,7 +275,7 @@ gst_omx_audio_enc_loop (GstOMXAudioEnc * self) return; } - if (!GST_PAD_CAPS (GST_AUDIO_ENCODER_SRC_PAD (self)) + if (!gst_pad_has_current_caps (GST_AUDIO_ENCODER_SRC_PAD (self)) || acq_return == GST_OMX_ACQUIRE_BUFFER_RECONFIGURED) { GstAudioInfo *info = gst_audio_encoder_get_audio_info (GST_AUDIO_ENCODER (self)); @@ -487,12 +331,18 @@ gst_omx_audio_enc_loop (GstOMXAudioEnc * self) && buf->omx_buf->nFilledLen > 0) { GstCaps *caps; GstBuffer *codec_data; + GstMapInfo map = GST_MAP_INFO_INIT; - caps = gst_caps_copy (GST_PAD_CAPS (GST_AUDIO_ENCODER_SRC_PAD (self))); + caps = + gst_caps_copy (gst_pad_get_current_caps (GST_AUDIO_ENCODER_SRC_PAD + (self))); codec_data = gst_buffer_new_and_alloc (buf->omx_buf->nFilledLen); - memcpy (GST_BUFFER_DATA (codec_data), + + gst_buffer_map (codec_data, &map, GST_MAP_WRITE); + memcpy (map.data, buf->omx_buf->pBuffer + buf->omx_buf->nOffset, buf->omx_buf->nFilledLen); + gst_buffer_unmap (codec_data, &map); gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, codec_data, NULL); @@ -514,18 +364,20 @@ gst_omx_audio_enc_loop (GstOMXAudioEnc * self) gst_audio_encoder_get_audio_info (GST_AUDIO_ENCODER (self)), buf); if (buf->omx_buf->nFilledLen > 0) { + GstMapInfo map = GST_MAP_INFO_INIT; outbuf = gst_buffer_new_and_alloc (buf->omx_buf->nFilledLen); - memcpy (GST_BUFFER_DATA (outbuf), + gst_buffer_map (outbuf, &map, GST_MAP_WRITE); + + memcpy (map.data, buf->omx_buf->pBuffer + buf->omx_buf->nOffset, buf->omx_buf->nFilledLen); + gst_buffer_unmap (outbuf, &map); + } else { outbuf = gst_buffer_new (); } - gst_buffer_set_caps (outbuf, - GST_PAD_CAPS (GST_AUDIO_ENCODER_SRC_PAD (self))); - GST_BUFFER_TIMESTAMP (outbuf) = gst_util_uint64_scale (buf->omx_buf->nTimeStamp, GST_SECOND, OMX_TICKS_PER_SECOND); @@ -539,7 +391,7 @@ gst_omx_audio_enc_loop (GstOMXAudioEnc * self) outbuf, n_samples); } - if (is_eos || flow_ret == GST_FLOW_UNEXPECTED) { + if (is_eos || flow_ret == GST_FLOW_EOS) { g_mutex_lock (self->drain_lock); if (self->draining) { GST_DEBUG_OBJECT (self, "Drained"); @@ -547,7 +399,7 @@ gst_omx_audio_enc_loop (GstOMXAudioEnc * self) g_cond_broadcast (self->drain_cond); } else if (flow_ret == GST_FLOW_OK) { GST_DEBUG_OBJECT (self, "Component signalled EOS"); - flow_ret = GST_FLOW_UNEXPECTED; + flow_ret = GST_FLOW_EOS; } g_mutex_unlock (self->drain_lock); } else { @@ -559,9 +411,9 @@ gst_omx_audio_enc_loop (GstOMXAudioEnc * self) self->downstream_flow_ret = flow_ret; } else { - g_assert ((klass->hacks & GST_OMX_HACK_NO_EMPTY_EOS_BUFFER)); + g_assert ((klass->cdata.hacks & GST_OMX_HACK_NO_EMPTY_EOS_BUFFER)); GST_AUDIO_ENCODER_STREAM_LOCK (self); - flow_ret = GST_FLOW_UNEXPECTED; + flow_ret = GST_FLOW_EOS; } if (flow_ret != GST_FLOW_OK) @@ -587,20 +439,19 @@ flushing: { GST_DEBUG_OBJECT (self, "Flushing -- stopping task"); gst_pad_pause_task (GST_AUDIO_ENCODER_SRC_PAD (self)); - self->downstream_flow_ret = GST_FLOW_WRONG_STATE; + self->downstream_flow_ret = GST_FLOW_FLUSHING; self->started = FALSE; return; } flow_error: { - if (flow_ret == GST_FLOW_UNEXPECTED) { + if (flow_ret == GST_FLOW_EOS) { GST_DEBUG_OBJECT (self, "EOS"); gst_pad_push_event (GST_AUDIO_ENCODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_ENCODER_SRC_PAD (self)); - } else if (flow_ret == GST_FLOW_NOT_LINKED - || flow_ret < GST_FLOW_UNEXPECTED) { + } else if (flow_ret == GST_FLOW_NOT_LINKED || flow_ret < GST_FLOW_EOS) { GST_ELEMENT_ERROR (self, STREAM, FAILED, ("Internal data stream error."), ("stream stopped, reason %s", gst_flow_get_name (flow_ret))); @@ -668,7 +519,7 @@ gst_omx_audio_enc_stop (GstAudioEncoder * encoder) if (gst_omx_component_get_state (self->component, 0) > OMX_StateIdle) gst_omx_component_set_state (self->component, OMX_StateIdle); - self->downstream_flow_ret = GST_FLOW_WRONG_STATE; + self->downstream_flow_ret = GST_FLOW_FLUSHING; self->started = FALSE; self->eos = FALSE; @@ -746,7 +597,7 @@ gst_omx_audio_enc_set_format (GstAudioEncoder * encoder, GstAudioInfo * info) OMX_AUDIO_CHANNELTYPE pos; switch (info->position[i]) { - case GST_AUDIO_CHANNEL_POSITION_FRONT_MONO: + case GST_AUDIO_CHANNEL_POSITION_MONO: case GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER: pos = OMX_AUDIO_ChannelCF; break; @@ -762,7 +613,7 @@ gst_omx_audio_enc_set_format (GstAudioEncoder * encoder, GstAudioInfo * info) case GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT: pos = OMX_AUDIO_ChannelRS; break; - case GST_AUDIO_CHANNEL_POSITION_LFE: + case GST_AUDIO_CHANNEL_POSITION_LFE1: pos = OMX_AUDIO_ChannelLFE; break; case GST_AUDIO_CHANNEL_POSITION_REAR_CENTER: @@ -884,12 +735,13 @@ gst_omx_audio_enc_handle_frame (GstAudioEncoder * encoder, GstBuffer * inbuf) GstOMXBuffer *buf; guint offset = 0; GstClockTime timestamp, duration, timestamp_offset = 0; + GstMapInfo map = GST_MAP_INFO_INIT; self = GST_OMX_AUDIO_ENC (encoder); if (self->eos) { GST_WARNING_OBJECT (self, "Got frame after EOS"); - return GST_FLOW_UNEXPECTED; + return GST_FLOW_EOS; } if (self->downstream_flow_ret != GST_FLOW_OK) { @@ -907,7 +759,9 @@ gst_omx_audio_enc_handle_frame (GstAudioEncoder * encoder, GstBuffer * inbuf) timestamp = GST_BUFFER_TIMESTAMP (inbuf); duration = GST_BUFFER_DURATION (inbuf); - while (offset < GST_BUFFER_SIZE (inbuf)) { + gst_buffer_map (inbuf, &map, GST_MAP_READ); + + while (offset < map.size) { /* Make sure to release the base class stream lock, otherwise * _loop() can't call _finish_frame() and we might block forever * because no input buffers are released */ @@ -947,16 +801,15 @@ gst_omx_audio_enc_handle_frame (GstAudioEncoder * encoder, GstBuffer * inbuf) /* Copy the buffer content in chunks of size as requested * by the port */ buf->omx_buf->nFilledLen = - MIN (GST_BUFFER_SIZE (inbuf) - offset, + MIN (map.size - offset, buf->omx_buf->nAllocLen - buf->omx_buf->nOffset); memcpy (buf->omx_buf->pBuffer + buf->omx_buf->nOffset, - GST_BUFFER_DATA (inbuf) + offset, buf->omx_buf->nFilledLen); + map.data + offset, buf->omx_buf->nFilledLen); /* Interpolate timestamps if we're passing the buffer * in multiple chunks */ if (offset != 0 && duration != GST_CLOCK_TIME_NONE) { - timestamp_offset = - gst_util_uint64_scale (offset, duration, GST_BUFFER_SIZE (inbuf)); + timestamp_offset = gst_util_uint64_scale (offset, duration, map.size); } if (timestamp != GST_CLOCK_TIME_NONE) { @@ -967,8 +820,7 @@ gst_omx_audio_enc_handle_frame (GstAudioEncoder * encoder, GstBuffer * inbuf) } if (duration != GST_CLOCK_TIME_NONE) { buf->omx_buf->nTickCount = - gst_util_uint64_scale (buf->omx_buf->nFilledLen, duration, - GST_BUFFER_SIZE (inbuf)); + gst_util_uint64_scale (buf->omx_buf->nFilledLen, duration, map.size); self->last_upstream_ts += duration; } @@ -977,10 +829,14 @@ gst_omx_audio_enc_handle_frame (GstAudioEncoder * encoder, GstBuffer * inbuf) gst_omx_port_release_buffer (self->in_port, buf); } + gst_buffer_unmap (inbuf, &map); + + return self->downstream_flow_ret; full_buffer: { + gst_buffer_unmap (inbuf, &map); GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ("Got OpenMAX buffer with no free space (%p, %u/%u)", buf, buf->omx_buf->nOffset, buf->omx_buf->nAllocLen)); @@ -988,6 +844,7 @@ full_buffer: } component_error: { + gst_buffer_unmap (inbuf, &map); GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ("OpenMAX component in error state %s (0x%08x)", gst_omx_component_get_last_error_string (self->component), @@ -997,11 +854,13 @@ component_error: flushing: { - GST_DEBUG_OBJECT (self, "Flushing -- returning WRONG_STATE"); - return GST_FLOW_WRONG_STATE; + gst_buffer_unmap (inbuf, &map); + GST_DEBUG_OBJECT (self, "Flushing -- returning FLUSHING"); + return GST_FLOW_FLUSHING; } reconfigure_error: { + gst_buffer_unmap (inbuf, &map); GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL), ("Unable to reconfigure input port")); return GST_FLOW_ERROR; @@ -1009,7 +868,7 @@ reconfigure_error: } static gboolean -gst_omx_audio_enc_event (GstAudioEncoder * encoder, GstEvent * event) +gst_omx_audio_enc_sink_event (GstAudioEncoder * encoder, GstEvent * event) { GstOMXAudioEnc *self; GstOMXAudioEncClass *klass; @@ -1030,7 +889,7 @@ gst_omx_audio_enc_event (GstAudioEncoder * encoder, GstEvent * event) } self->eos = TRUE; - if ((klass->hacks & GST_OMX_HACK_NO_EMPTY_EOS_BUFFER)) { + if ((klass->cdata.hacks & GST_OMX_HACK_NO_EMPTY_EOS_BUFFER)) { GST_WARNING_OBJECT (self, "Component does not support empty EOS buffers"); /* Insert a NULL into the queue to signal EOS */ @@ -1094,7 +953,7 @@ gst_omx_audio_enc_drain (GstOMXAudioEnc * self) return GST_FLOW_OK; } - if ((klass->hacks & GST_OMX_HACK_NO_EMPTY_EOS_BUFFER)) { + if ((klass->cdata.hacks & GST_OMX_HACK_NO_EMPTY_EOS_BUFFER)) { GST_WARNING_OBJECT (self, "Component does not support empty EOS buffers"); return GST_FLOW_OK; } diff --git a/omx/gstomxaudioenc.h b/omx/gstomxaudioenc.h index 28828f7..8986e57 100644 --- a/omx/gstomxaudioenc.h +++ b/omx/gstomxaudioenc.h @@ -76,16 +76,7 @@ struct _GstOMXAudioEncClass { GstAudioEncoderClass parent_class; - const gchar *core_name; - const gchar *component_name; - const gchar *component_role; - - const gchar *default_src_template_caps; - const gchar *default_sink_template_caps; - - guint32 in_port_index, out_port_index; - - guint64 hacks; + GstOMXClassData cdata; gboolean (*set_format) (GstOMXAudioEnc * self, GstOMXPort * port, GstAudioInfo * info); GstCaps *(*get_caps) (GstOMXAudioEnc * self, GstOMXPort * port, GstAudioInfo * info); diff --git a/omx/gstomxh263dec.c b/omx/gstomxh263dec.c index a294c3a..d135529 100644 --- a/omx/gstomxh263dec.c +++ b/omx/gstomxh263dec.c @@ -30,7 +30,6 @@ GST_DEBUG_CATEGORY_STATIC (gst_omx_h263_dec_debug_category); #define GST_CAT_DEFAULT gst_omx_h263_dec_debug_category /* prototypes */ -static void gst_omx_h263_dec_finalize (GObject * object); static gboolean gst_omx_h263_dec_is_format_change (GstOMXVideoDec * dec, GstOMXPort * port, GstVideoState * state); static gboolean gst_omx_h263_dec_set_format (GstOMXVideoDec * dec, @@ -43,58 +42,38 @@ enum /* class initialization */ -#define DEBUG_INIT(bla) \ +#define DEBUG_INIT \ GST_DEBUG_CATEGORY_INIT (gst_omx_h263_dec_debug_category, "omxh263dec", 0, \ "debug category for gst-omx video decoder base class"); -GST_BOILERPLATE_FULL (GstOMXH263Dec, gst_omx_h263_dec, - GstOMXVideoDec, GST_TYPE_OMX_VIDEO_DEC, DEBUG_INIT); - -static void -gst_omx_h263_dec_base_init (gpointer g_class) -{ - GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); - GstOMXVideoDecClass *videodec_class = GST_OMX_VIDEO_DEC_CLASS (g_class); - - gst_element_class_set_details_simple (element_class, - "OpenMAX H.263 Video Decoder", - "Codec/Decoder/Video", - "Decode H.263 video streams", - "Sebastian Dröge "); - - /* If no role was set from the config file we set the - * default H.263 video decoder role */ - if (!videodec_class->component_role) - videodec_class->component_role = "video_decoder.h263"; -} +G_DEFINE_TYPE_WITH_CODE (GstOMXH263Dec, gst_omx_h263_dec, + GST_TYPE_OMX_VIDEO_DEC, DEBUG_INIT); static void gst_omx_h263_dec_class_init (GstOMXH263DecClass * klass) { - GObjectClass *gobject_class = G_OBJECT_CLASS (klass); + GstElementClass *element_class = GST_ELEMENT_CLASS (klass); GstOMXVideoDecClass *videodec_class = GST_OMX_VIDEO_DEC_CLASS (klass); - gobject_class->finalize = gst_omx_h263_dec_finalize; - videodec_class->is_format_change = GST_DEBUG_FUNCPTR (gst_omx_h263_dec_is_format_change); videodec_class->set_format = GST_DEBUG_FUNCPTR (gst_omx_h263_dec_set_format); - videodec_class->default_sink_template_caps = "video/x-h263, " + videodec_class->cdata.default_sink_template_caps = "video/x-h263, " "parsed=(boolean) true"; -} -static void -gst_omx_h263_dec_init (GstOMXH263Dec * self, GstOMXH263DecClass * klass) -{ + gst_element_class_set_details_simple (element_class, + "OpenMAX H.263 Video Decoder", + "Codec/Decoder/Video", + "Decode H.263 video streams", + "Sebastian Dröge "); + + gst_omx_set_default_role (&videodec_class->cdata, "video_decoder.h263"); } static void -gst_omx_h263_dec_finalize (GObject * object) +gst_omx_h263_dec_init (GstOMXH263Dec * self) { - /* GstOMXH263Dec *self = GST_OMX_H263_DEC (object); */ - - G_OBJECT_CLASS (parent_class)->finalize (object); } static gboolean diff --git a/omx/gstomxh263enc.c b/omx/gstomxh263enc.c index 1a32c75..372d5b4 100644 --- a/omx/gstomxh263enc.c +++ b/omx/gstomxh263enc.c @@ -30,9 +30,8 @@ GST_DEBUG_CATEGORY_STATIC (gst_omx_h263_enc_debug_category); #define GST_CAT_DEFAULT gst_omx_h263_enc_debug_category /* prototypes */ -static void gst_omx_h263_enc_finalize (GObject * object); static gboolean gst_omx_h263_enc_set_format (GstOMXVideoEnc * enc, - GstOMXPort * port, GstVideoState * state); + GstOMXPort * port, GstVideoInfo * state); static GstCaps *gst_omx_h263_enc_get_caps (GstOMXVideoEnc * enc, GstOMXPort * port, GstVideoState * state); @@ -43,62 +42,42 @@ enum /* class initialization */ -#define DEBUG_INIT(bla) \ +#define DEBUG_INIT \ GST_DEBUG_CATEGORY_INIT (gst_omx_h263_enc_debug_category, "omxh263enc", 0, \ "debug category for gst-omx video encoder base class"); -GST_BOILERPLATE_FULL (GstOMXH263Enc, gst_omx_h263_enc, - GstOMXVideoEnc, GST_TYPE_OMX_VIDEO_ENC, DEBUG_INIT); - -static void -gst_omx_h263_enc_base_init (gpointer g_class) -{ - GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); - GstOMXVideoEncClass *videoenc_class = GST_OMX_VIDEO_ENC_CLASS (g_class); - - gst_element_class_set_details_simple (element_class, - "OpenMAX H.263 Video Encoder", - "Codec/Encoder/Video", - "Encode H.263 video streams", - "Sebastian Dröge "); - - /* If no role was set from the config file we set the - * default H263 video encoder role */ - if (!videoenc_class->component_role) - videoenc_class->component_role = "video_encoder.h263"; -} +G_DEFINE_TYPE_WITH_CODE (GstOMXH263Enc, gst_omx_h263_enc, + GST_TYPE_OMX_VIDEO_ENC, DEBUG_INIT); static void gst_omx_h263_enc_class_init (GstOMXH263EncClass * klass) { - GObjectClass *gobject_class = G_OBJECT_CLASS (klass); + GstElementClass *element_class = GST_ELEMENT_CLASS (klass); GstOMXVideoEncClass *videoenc_class = GST_OMX_VIDEO_ENC_CLASS (klass); - gobject_class->finalize = gst_omx_h263_enc_finalize; - videoenc_class->set_format = GST_DEBUG_FUNCPTR (gst_omx_h263_enc_set_format); videoenc_class->get_caps = GST_DEBUG_FUNCPTR (gst_omx_h263_enc_get_caps); - videoenc_class->default_src_template_caps = "video/x-h263, " + videoenc_class->cdata.default_src_template_caps = "video/x-h263, " "width=(int) [ 16, 4096 ], " "height=(int) [ 16, 4096 ]"; -} -static void -gst_omx_h263_enc_init (GstOMXH263Enc * self, GstOMXH263EncClass * klass) -{ + gst_element_class_set_details_simple (element_class, + "OpenMAX H.263 Video Encoder", + "Codec/Encoder/Video", + "Encode H.263 video streams", + "Sebastian Dröge "); + + gst_omx_set_default_role (&videoenc_class->cdata, "video_encoder.h263"); } static void -gst_omx_h263_enc_finalize (GObject * object) +gst_omx_h263_enc_init (GstOMXH263Enc * self) { - /* GstOMXH263Enc *self = GST_OMX_H263_VIDEO_ENC (object); */ - - G_OBJECT_CLASS (parent_class)->finalize (object); } static gboolean gst_omx_h263_enc_set_format (GstOMXVideoEnc * enc, GstOMXPort * port, - GstVideoState * state) + GstVideoInfo * info) { GstOMXH263Enc *self = GST_OMX_H263_ENC (enc); GstCaps *peercaps; @@ -106,24 +85,20 @@ gst_omx_h263_enc_set_format (GstOMXVideoEnc * enc, GstOMXPort * port, OMX_VIDEO_H263LEVELTYPE level = OMX_VIDEO_H263Level10; OMX_VIDEO_PARAM_PROFILELEVELTYPE param; OMX_ERRORTYPE err; + guint profile_id, level_id; - peercaps = gst_pad_peer_get_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (enc)); + peercaps = gst_pad_peer_query_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (enc), + gst_pad_get_pad_template_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (enc))); if (peercaps) { GstStructure *s; - GstCaps *intersection; - guint profile_id, level_id; - intersection = - gst_caps_intersect (peercaps, - gst_pad_get_pad_template_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (enc))); - gst_caps_unref (peercaps); - if (gst_caps_is_empty (intersection)) { - gst_caps_unref (intersection); + if (gst_caps_is_empty (peercaps)) { + gst_caps_unref (peercaps); GST_ERROR_OBJECT (self, "Empty caps"); return FALSE; } - s = gst_caps_get_structure (intersection, 0); + s = gst_caps_get_structure (peercaps, 0); if (gst_structure_get_uint (s, "profile", &profile_id)) { switch (profile_id) { case 0: @@ -154,8 +129,7 @@ gst_omx_h263_enc_set_format (GstOMXVideoEnc * enc, GstOMXPort * port, profile = OMX_VIDEO_H263ProfileHighLatency; break; default: - GST_ERROR_OBJECT (self, "Invalid profile %u", profile_id); - return FALSE; + goto unsupported_profile; } } if (gst_structure_get_uint (s, "level", &level_id)) { @@ -182,10 +156,10 @@ gst_omx_h263_enc_set_format (GstOMXVideoEnc * enc, GstOMXPort * port, level = OMX_VIDEO_H263Level70; break; default: - GST_ERROR_OBJECT (self, "Unsupported level %u", level_id); - return FALSE; + goto unsupported_level; } } + gst_caps_unref (peercaps); } GST_OMX_INIT_STRUCT (¶m); @@ -207,6 +181,16 @@ gst_omx_h263_enc_set_format (GstOMXVideoEnc * enc, GstOMXPort * port, } return TRUE; + +unsupported_profile: + gst_caps_unref (peercaps); + GST_ERROR_OBJECT (self, "Unsupported profile %u", profile_id); + return FALSE; + +unsupported_level: + gst_caps_unref (peercaps); + GST_ERROR_OBJECT (self, "Unsupported level %u", level_id); + return FALSE; } static GstCaps * diff --git a/omx/gstomxh264dec.c b/omx/gstomxh264dec.c index b9b1af2..105fa47 100644 --- a/omx/gstomxh264dec.c +++ b/omx/gstomxh264dec.c @@ -30,7 +30,6 @@ GST_DEBUG_CATEGORY_STATIC (gst_omx_h264_dec_debug_category); #define GST_CAT_DEFAULT gst_omx_h264_dec_debug_category /* prototypes */ -static void gst_omx_h264_dec_finalize (GObject * object); static gboolean gst_omx_h264_dec_is_format_change (GstOMXVideoDec * dec, GstOMXPort * port, GstVideoState * state); static gboolean gst_omx_h264_dec_set_format (GstOMXVideoDec * dec, @@ -43,59 +42,39 @@ enum /* class initialization */ -#define DEBUG_INIT(bla) \ +#define DEBUG_INIT \ GST_DEBUG_CATEGORY_INIT (gst_omx_h264_dec_debug_category, "omxh264dec", 0, \ "debug category for gst-omx video decoder base class"); -GST_BOILERPLATE_FULL (GstOMXH264Dec, gst_omx_h264_dec, - GstOMXVideoDec, GST_TYPE_OMX_VIDEO_DEC, DEBUG_INIT); - -static void -gst_omx_h264_dec_base_init (gpointer g_class) -{ - GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); - GstOMXVideoDecClass *videodec_class = GST_OMX_VIDEO_DEC_CLASS (g_class); - - gst_element_class_set_details_simple (element_class, - "OpenMAX H.264 Video Decoder", - "Codec/Decoder/Video", - "Decode H.264 video streams", - "Sebastian Dröge "); - - /* If no role was set from the config file we set the - * default H.264 video decoder role */ - if (!videodec_class->component_role) - videodec_class->component_role = "video_decoder.avc"; -} +G_DEFINE_TYPE_WITH_CODE (GstOMXH264Dec, gst_omx_h264_dec, + GST_TYPE_OMX_VIDEO_DEC, DEBUG_INIT); static void gst_omx_h264_dec_class_init (GstOMXH264DecClass * klass) { - GObjectClass *gobject_class = G_OBJECT_CLASS (klass); GstOMXVideoDecClass *videodec_class = GST_OMX_VIDEO_DEC_CLASS (klass); - - gobject_class->finalize = gst_omx_h264_dec_finalize; + GstElementClass *element_class = GST_ELEMENT_CLASS (klass); videodec_class->is_format_change = GST_DEBUG_FUNCPTR (gst_omx_h264_dec_is_format_change); videodec_class->set_format = GST_DEBUG_FUNCPTR (gst_omx_h264_dec_set_format); - videodec_class->default_sink_template_caps = "video/x-h264, " + videodec_class->cdata.default_sink_template_caps = "video/x-h264, " "parsed=(boolean) true, " "alignment=(string)au, " "stream-format=(string) byte-stream"; -} -static void -gst_omx_h264_dec_init (GstOMXH264Dec * self, GstOMXH264DecClass * klass) -{ + gst_element_class_set_details_simple (element_class, + "OpenMAX H.264 Video Decoder", + "Codec/Decoder/Video", + "Decode H.264 video streams", + "Sebastian Dröge "); + + gst_omx_set_default_role (&videodec_class->cdata, "video_decoder.avc"); } static void -gst_omx_h264_dec_finalize (GObject * object) +gst_omx_h264_dec_init (GstOMXH264Dec * self) { - /* GstOMXH264Dec *self = GST_OMX_H264_DEC (object); */ - - G_OBJECT_CLASS (parent_class)->finalize (object); } static gboolean diff --git a/omx/gstomxh264enc.c b/omx/gstomxh264enc.c index 95e8f2e..8f0d646 100644 --- a/omx/gstomxh264enc.c +++ b/omx/gstomxh264enc.c @@ -30,13 +30,12 @@ GST_DEBUG_CATEGORY_STATIC (gst_omx_h264_enc_debug_category); #define GST_CAT_DEFAULT gst_omx_h264_enc_debug_category /* prototypes */ -static void gst_omx_h264_enc_finalize (GObject * object); static gboolean gst_omx_h264_enc_set_format (GstOMXVideoEnc * enc, - GstOMXPort * port, GstVideoState * state); + GstOMXPort * port, GstVideoInfo * info); static GstCaps *gst_omx_h264_enc_get_caps (GstOMXVideoEnc * enc, GstOMXPort * port, GstVideoState * state); static GstFlowReturn gst_omx_h264_enc_handle_output_frame (GstOMXVideoEnc * - self, GstOMXPort * port, GstOMXBuffer * buf, GstVideoFrame * frame); + self, GstOMXPort * port, GstOMXBuffer * buf, GstVideoFrameState * frame); enum { @@ -45,64 +44,44 @@ enum /* class initialization */ -#define DEBUG_INIT(bla) \ +#define DEBUG_INIT \ GST_DEBUG_CATEGORY_INIT (gst_omx_h264_enc_debug_category, "omxh264enc", 0, \ "debug category for gst-omx video encoder base class"); -GST_BOILERPLATE_FULL (GstOMXH264Enc, gst_omx_h264_enc, - GstOMXVideoEnc, GST_TYPE_OMX_VIDEO_ENC, DEBUG_INIT); - -static void -gst_omx_h264_enc_base_init (gpointer g_class) -{ - GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); - GstOMXVideoEncClass *videoenc_class = GST_OMX_VIDEO_ENC_CLASS (g_class); - - gst_element_class_set_details_simple (element_class, - "OpenMAX H.264 Video Encoder", - "Codec/Encoder/Video", - "Encode H.264 video streams", - "Sebastian Dröge "); - - /* If no role was set from the config file we set the - * default H264 video encoder role */ - if (!videoenc_class->component_role) - videoenc_class->component_role = "video_encoder.avc"; -} +G_DEFINE_TYPE_WITH_CODE (GstOMXH264Enc, gst_omx_h264_enc, + GST_TYPE_OMX_VIDEO_ENC, DEBUG_INIT); static void gst_omx_h264_enc_class_init (GstOMXH264EncClass * klass) { - GObjectClass *gobject_class = G_OBJECT_CLASS (klass); + GstElementClass *element_class = GST_ELEMENT_CLASS (klass); GstOMXVideoEncClass *videoenc_class = GST_OMX_VIDEO_ENC_CLASS (klass); - gobject_class->finalize = gst_omx_h264_enc_finalize; - videoenc_class->set_format = GST_DEBUG_FUNCPTR (gst_omx_h264_enc_set_format); videoenc_class->get_caps = GST_DEBUG_FUNCPTR (gst_omx_h264_enc_get_caps); - videoenc_class->default_src_template_caps = "video/x-h264, " + videoenc_class->cdata.default_src_template_caps = "video/x-h264, " "width=(int) [ 16, 4096 ], " "height=(int) [ 16, 4096 ]"; videoenc_class->handle_output_frame = GST_DEBUG_FUNCPTR (gst_omx_h264_enc_handle_output_frame); -} -static void -gst_omx_h264_enc_init (GstOMXH264Enc * self, GstOMXH264EncClass * klass) -{ + gst_element_class_set_details_simple (element_class, + "OpenMAX H.264 Video Encoder", + "Codec/Encoder/Video", + "Encode H.264 video streams", + "Sebastian Dröge "); + + gst_omx_set_default_role (&videoenc_class->cdata, "video_encoder.avc"); } static void -gst_omx_h264_enc_finalize (GObject * object) +gst_omx_h264_enc_init (GstOMXH264Enc * self) { - /* GstOMXH264Enc *self = GST_OMX_H264_VIDEO_ENC (object); */ - - G_OBJECT_CLASS (parent_class)->finalize (object); } static gboolean gst_omx_h264_enc_set_format (GstOMXVideoEnc * enc, GstOMXPort * port, - GstVideoState * state) + GstVideoInfo * info) { GstOMXH264Enc *self = GST_OMX_H264_ENC (enc); GstCaps *peercaps; @@ -110,24 +89,20 @@ gst_omx_h264_enc_set_format (GstOMXVideoEnc * enc, GstOMXPort * port, OMX_VIDEO_AVCLEVELTYPE level = OMX_VIDEO_AVCLevel11; OMX_VIDEO_PARAM_PROFILELEVELTYPE param; OMX_ERRORTYPE err; + const gchar *profile_string, *level_string; - peercaps = gst_pad_peer_get_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (enc)); + peercaps = gst_pad_peer_query_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (enc), + gst_pad_get_pad_template_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (enc))); if (peercaps) { GstStructure *s; - GstCaps *intersection; - const gchar *profile_string, *level_string; - intersection = - gst_caps_intersect (peercaps, - gst_pad_get_pad_template_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (enc))); - gst_caps_unref (peercaps); - if (gst_caps_is_empty (intersection)) { - gst_caps_unref (intersection); + if (gst_caps_is_empty (peercaps)) { + gst_caps_unref (peercaps); GST_ERROR_OBJECT (self, "Empty caps"); return FALSE; } - s = gst_caps_get_structure (intersection, 0); + s = gst_caps_get_structure (peercaps, 0); profile_string = gst_structure_get_string (s, "profile"); if (profile_string) { if (g_str_equal (profile_string, "baseline")) { @@ -145,8 +120,7 @@ gst_omx_h264_enc_set_format (GstOMXVideoEnc * enc, GstOMXPort * port, } else if (g_str_equal (profile_string, "high-4:4:4")) { profile = OMX_VIDEO_AVCProfileHigh444; } else { - GST_ERROR_OBJECT (self, "Unsupported profile %s", profile_string); - return FALSE; + goto unsupported_profile; } } level_string = gst_structure_get_string (s, "level"); @@ -184,10 +158,10 @@ gst_omx_h264_enc_set_format (GstOMXVideoEnc * enc, GstOMXPort * port, } else if (g_str_equal (level_string, "5.1")) { level = OMX_VIDEO_AVCLevel51; } else { - GST_ERROR_OBJECT (self, "Unsupported level %s", level_string); - return FALSE; + goto unsupported_level; } } + gst_caps_unref (peercaps); } GST_OMX_INIT_STRUCT (¶m); @@ -209,6 +183,16 @@ gst_omx_h264_enc_set_format (GstOMXVideoEnc * enc, GstOMXPort * port, } return TRUE; + +unsupported_profile: + gst_caps_unref (peercaps); + GST_ERROR_OBJECT (self, "Unsupported profile %s", profile_string); + return FALSE; + +unsupported_level: + gst_caps_unref (peercaps); + GST_ERROR_OBJECT (self, "Unsupported level %s", level_string); + return FALSE; } static GstCaps * @@ -331,7 +315,7 @@ gst_omx_h264_enc_get_caps (GstOMXVideoEnc * enc, GstOMXPort * port, static GstFlowReturn gst_omx_h264_enc_handle_output_frame (GstOMXVideoEnc * self, GstOMXPort * port, - GstOMXBuffer * buf, GstVideoFrame * frame) + GstOMXBuffer * buf, GstVideoFrameState * frame) { if (buf->omx_buf->nFlags & OMX_BUFFERFLAG_CODECCONFIG) { /* The codec data is SPS/PPS with a startcode => bytestream stream format @@ -342,19 +326,25 @@ gst_omx_h264_enc_handle_output_frame (GstOMXVideoEnc * self, GstOMXPort * port, GST_READ_UINT32_BE (buf->omx_buf->pBuffer + buf->omx_buf->nOffset) == 0x00000001) { GstBuffer *hdrs; + GstMapInfo map = GST_MAP_INFO_INIT; GST_DEBUG_OBJECT (self, "got codecconfig in byte-stream format"); buf->omx_buf->nFlags &= ~OMX_BUFFERFLAG_CODECCONFIG; hdrs = gst_buffer_new_and_alloc (buf->omx_buf->nFilledLen); - memcpy (GST_BUFFER_DATA (hdrs), + + gst_buffer_map (hdrs, &map, GST_MAP_WRITE); + memcpy (map.data, buf->omx_buf->pBuffer + buf->omx_buf->nOffset, buf->omx_buf->nFilledLen); + gst_buffer_unmap (hdrs, &map); gst_base_video_encoder_set_headers (GST_BASE_VIDEO_ENCODER (self), hdrs); gst_buffer_unref (hdrs); } } - return GST_OMX_VIDEO_ENC_CLASS (parent_class)->handle_output_frame (self, - port, buf, frame); + return + GST_OMX_VIDEO_ENC_CLASS + (gst_omx_h264_enc_parent_class)->handle_output_frame (self, port, buf, + frame); } diff --git a/omx/gstomxmpeg4videodec.c b/omx/gstomxmpeg4videodec.c index 12c2f66..341401f 100644 --- a/omx/gstomxmpeg4videodec.c +++ b/omx/gstomxmpeg4videodec.c @@ -30,7 +30,6 @@ GST_DEBUG_CATEGORY_STATIC (gst_omx_mpeg4_video_dec_debug_category); #define GST_CAT_DEFAULT gst_omx_mpeg4_video_dec_debug_category /* prototypes */ -static void gst_omx_mpeg4_video_dec_finalize (GObject * object); static gboolean gst_omx_mpeg4_video_dec_is_format_change (GstOMXVideoDec * dec, GstOMXPort * port, GstVideoState * state); static gboolean gst_omx_mpeg4_video_dec_set_format (GstOMXVideoDec * dec, @@ -43,61 +42,41 @@ enum /* class initialization */ -#define DEBUG_INIT(bla) \ +#define DEBUG_INIT \ GST_DEBUG_CATEGORY_INIT (gst_omx_mpeg4_video_dec_debug_category, "omxmpeg4videodec", 0, \ "debug category for gst-omx video decoder base class"); -GST_BOILERPLATE_FULL (GstOMXMPEG4VideoDec, gst_omx_mpeg4_video_dec, - GstOMXVideoDec, GST_TYPE_OMX_VIDEO_DEC, DEBUG_INIT); +G_DEFINE_TYPE_WITH_CODE (GstOMXMPEG4VideoDec, gst_omx_mpeg4_video_dec, + GST_TYPE_OMX_VIDEO_DEC, DEBUG_INIT); -static void -gst_omx_mpeg4_video_dec_base_init (gpointer g_class) -{ - GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); - GstOMXVideoDecClass *videodec_class = GST_OMX_VIDEO_DEC_CLASS (g_class); - - gst_element_class_set_details_simple (element_class, - "OpenMAX MPEG4 Video Decoder", - "Codec/Decoder/Video", - "Decode MPEG4 video streams", - "Sebastian Dröge "); - - /* If no role was set from the config file we set the - * default MPEG4 video decoder role */ - if (!videodec_class->component_role) - videodec_class->component_role = "video_decoder.mpeg4"; -} static void gst_omx_mpeg4_video_dec_class_init (GstOMXMPEG4VideoDecClass * klass) { - GObjectClass *gobject_class = G_OBJECT_CLASS (klass); + GstElementClass *element_class = GST_ELEMENT_CLASS (klass); GstOMXVideoDecClass *videodec_class = GST_OMX_VIDEO_DEC_CLASS (klass); - gobject_class->finalize = gst_omx_mpeg4_video_dec_finalize; - videodec_class->is_format_change = GST_DEBUG_FUNCPTR (gst_omx_mpeg4_video_dec_is_format_change); videodec_class->set_format = GST_DEBUG_FUNCPTR (gst_omx_mpeg4_video_dec_set_format); - videodec_class->default_sink_template_caps = "video/mpeg, " + videodec_class->cdata.default_sink_template_caps = "video/mpeg, " "mpegversion=(int) 4, " "systemstream=(boolean) false, " "parsed=(boolean) true"; -} -static void -gst_omx_mpeg4_video_dec_init (GstOMXMPEG4VideoDec * self, - GstOMXMPEG4VideoDecClass * klass) -{ + gst_element_class_set_details_simple (element_class, + "OpenMAX MPEG4 Video Decoder", + "Codec/Decoder/Video", + "Decode MPEG4 video streams", + "Sebastian Dröge "); + + gst_omx_set_default_role (&videodec_class->cdata, "video_decoder.mpeg4"); } static void -gst_omx_mpeg4_video_dec_finalize (GObject * object) +gst_omx_mpeg4_video_dec_init (GstOMXMPEG4VideoDec * self) { - /* GstOMXMPEG4VideoDec *self = GST_OMX_MPEG4_VIDEO_DEC (object); */ - - G_OBJECT_CLASS (parent_class)->finalize (object); } static gboolean diff --git a/omx/gstomxmpeg4videoenc.c b/omx/gstomxmpeg4videoenc.c index 04971ff..927abe7 100644 --- a/omx/gstomxmpeg4videoenc.c +++ b/omx/gstomxmpeg4videoenc.c @@ -30,9 +30,8 @@ GST_DEBUG_CATEGORY_STATIC (gst_omx_mpeg4_video_enc_debug_category); #define GST_CAT_DEFAULT gst_omx_mpeg4_video_enc_debug_category /* prototypes */ -static void gst_omx_mpeg4_video_enc_finalize (GObject * object); static gboolean gst_omx_mpeg4_video_enc_set_format (GstOMXVideoEnc * enc, - GstOMXPort * port, GstVideoState * state); + GstOMXPort * port, GstVideoInfo * info); static GstCaps *gst_omx_mpeg4_video_enc_get_caps (GstOMXVideoEnc * enc, GstOMXPort * port, GstVideoState * state); @@ -43,84 +42,63 @@ enum /* class initialization */ -#define DEBUG_INIT(bla) \ +#define DEBUG_INIT \ GST_DEBUG_CATEGORY_INIT (gst_omx_mpeg4_video_enc_debug_category, "omxmpeg4videoenc", 0, \ "debug category for gst-omx video encoder base class"); -GST_BOILERPLATE_FULL (GstOMXMPEG4VideoEnc, gst_omx_mpeg4_video_enc, - GstOMXVideoEnc, GST_TYPE_OMX_VIDEO_ENC, DEBUG_INIT); - -static void -gst_omx_mpeg4_video_enc_base_init (gpointer g_class) -{ - GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); - GstOMXVideoEncClass *videoenc_class = GST_OMX_VIDEO_ENC_CLASS (g_class); - - gst_element_class_set_details_simple (element_class, - "OpenMAX MPEG4 Video Encoder", - "Codec/Encoder/Video", - "Encode MPEG4 video streams", - "Sebastian Dröge "); - - /* If no role was set from the config file we set the - * default MPEG4 video encoder role */ - if (!videoenc_class->component_role) - videoenc_class->component_role = "video_encoder.mpeg4"; -} +G_DEFINE_TYPE_WITH_CODE (GstOMXMPEG4VideoEnc, gst_omx_mpeg4_video_enc, + GST_TYPE_OMX_VIDEO_ENC, DEBUG_INIT); static void gst_omx_mpeg4_video_enc_class_init (GstOMXMPEG4VideoEncClass * klass) { - GObjectClass *gobject_class = G_OBJECT_CLASS (klass); + GstElementClass *element_class = GST_ELEMENT_CLASS (klass); GstOMXVideoEncClass *videoenc_class = GST_OMX_VIDEO_ENC_CLASS (klass); - gobject_class->finalize = gst_omx_mpeg4_video_enc_finalize; - videoenc_class->set_format = GST_DEBUG_FUNCPTR (gst_omx_mpeg4_video_enc_set_format); videoenc_class->get_caps = GST_DEBUG_FUNCPTR (gst_omx_mpeg4_video_enc_get_caps); - videoenc_class->default_src_template_caps = "video/mpeg, " + videoenc_class->cdata.default_src_template_caps = "video/mpeg, " "mpegversion=(int) 4, " "systemstream=(boolean) false, " "width=(int) [ 16, 4096 ], " "height=(int) [ 16, 4096 ]"; -} -static void -gst_omx_mpeg4_video_enc_init (GstOMXMPEG4VideoEnc * self, - GstOMXMPEG4VideoEncClass * klass) -{ + gst_element_class_set_details_simple (element_class, + "OpenMAX MPEG4 Video Encoder", + "Codec/Encoder/Video", + "Encode MPEG4 video streams", + "Sebastian Dröge "); + + gst_omx_set_default_role (&videoenc_class->cdata, "video_encoder.mpeg4"); } static void -gst_omx_mpeg4_video_enc_finalize (GObject * object) +gst_omx_mpeg4_video_enc_init (GstOMXMPEG4VideoEnc * self) { - /* GstOMXMPEG4VideoEnc *self = GST_OMX_MPEG4_VIDEO_ENC (object); */ - - G_OBJECT_CLASS (parent_class)->finalize (object); } static gboolean gst_omx_mpeg4_video_enc_set_format (GstOMXVideoEnc * enc, GstOMXPort * port, - GstVideoState * state) + GstVideoInfo * info) { GstOMXMPEG4VideoEnc *self = GST_OMX_MPEG4_VIDEO_ENC (enc); - GstCaps *peercaps; + GstCaps *peercaps, *intersection; OMX_VIDEO_MPEG4PROFILETYPE profile = OMX_VIDEO_MPEG4ProfileSimple; OMX_VIDEO_MPEG4LEVELTYPE level = OMX_VIDEO_MPEG4Level1; OMX_VIDEO_PARAM_PROFILELEVELTYPE param; OMX_ERRORTYPE err; + const gchar *profile_string, *level_string; - peercaps = gst_pad_peer_get_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (enc)); + peercaps = gst_pad_peer_query_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (enc), NULL); if (peercaps) { GstStructure *s; - GstCaps *intersection; - const gchar *profile_string, *level_string; intersection = gst_caps_intersect (peercaps, gst_pad_get_pad_template_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (enc))); + gst_caps_unref (peercaps); if (gst_caps_is_empty (intersection)) { gst_caps_unref (intersection); @@ -164,8 +142,7 @@ gst_omx_mpeg4_video_enc_set_format (GstOMXVideoEnc * enc, GstOMXPort * port, } else if (g_str_equal (profile_string, "advanced-simple")) { profile = OMX_VIDEO_MPEG4ProfileAdvancedSimple; } else { - GST_ERROR_OBJECT (self, "Unsupported profile %s", profile_string); - return FALSE; + goto unsupported_profile; } } level_string = gst_structure_get_string (s, "level"); @@ -187,10 +164,11 @@ gst_omx_mpeg4_video_enc_set_format (GstOMXVideoEnc * enc, GstOMXPort * port, } else if (g_str_equal (level_string, "5")) { level = OMX_VIDEO_MPEG4Level5; } else { - GST_ERROR_OBJECT (self, "Unsupported level %s", level_string); - return FALSE; + goto unsupported_level; } } + + gst_caps_unref (intersection); } GST_OMX_INIT_STRUCT (¶m); @@ -212,6 +190,16 @@ gst_omx_mpeg4_video_enc_set_format (GstOMXVideoEnc * enc, GstOMXPort * port, } return TRUE; + +unsupported_profile: + gst_caps_unref (intersection); + GST_ERROR_OBJECT (self, "Unsupported profile %s", profile_string); + return FALSE; + +unsupported_level: + gst_caps_unref (intersection); + GST_ERROR_OBJECT (self, "Unsupported level %s", level_string); + return FALSE; } static GstCaps * diff --git a/omx/gstomxvideodec.c b/omx/gstomxvideodec.c index 561bf98..85a83c2 100644 --- a/omx/gstomxvideodec.c +++ b/omx/gstomxvideodec.c @@ -57,7 +57,7 @@ static gboolean gst_omx_video_dec_reset (GstBaseVideoDecoder * decoder); static GstFlowReturn gst_omx_video_dec_parse_data (GstBaseVideoDecoder * decoder, gboolean at_eos); static GstFlowReturn gst_omx_video_dec_handle_frame (GstBaseVideoDecoder * - decoder, GstVideoFrame * frame); + decoder, GstVideoFrameState * frame); static GstFlowReturn gst_omx_video_dec_finish (GstBaseVideoDecoder * decoder); static GstFlowReturn gst_omx_video_dec_drain (GstOMXVideoDec * self); @@ -69,147 +69,13 @@ enum /* class initialization */ -#define DEBUG_INIT(bla) \ +#define DEBUG_INIT \ GST_DEBUG_CATEGORY_INIT (gst_omx_video_dec_debug_category, "omxvideodec", 0, \ "debug category for gst-omx video decoder base class"); -GST_BOILERPLATE_FULL (GstOMXVideoDec, gst_omx_video_dec, GstBaseVideoDecoder, - GST_TYPE_BASE_VIDEO_DECODER, DEBUG_INIT); - -static void -gst_omx_video_dec_base_init (gpointer g_class) -{ - GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); - GstOMXVideoDecClass *videodec_class = GST_OMX_VIDEO_DEC_CLASS (g_class); - GKeyFile *config; - const gchar *element_name; - GError *err; - gchar *core_name, *component_name, *component_role; - gint in_port_index, out_port_index; - gchar *template_caps; - GstPadTemplate *templ; - GstCaps *caps; - gchar **hacks; - - element_name = - g_type_get_qdata (G_TYPE_FROM_CLASS (g_class), - gst_omx_element_name_quark); - /* This happens for the base class and abstract subclasses */ - if (!element_name) - return; - - config = gst_omx_get_configuration (); - - /* This will always succeed, see check in plugin_init */ - core_name = g_key_file_get_string (config, element_name, "core-name", NULL); - g_assert (core_name != NULL); - videodec_class->core_name = core_name; - component_name = - g_key_file_get_string (config, element_name, "component-name", NULL); - g_assert (component_name != NULL); - videodec_class->component_name = component_name; - - /* If this fails we simply don't set a role */ - if ((component_role = - g_key_file_get_string (config, element_name, "component-role", - NULL))) { - GST_DEBUG ("Using component-role '%s' for element '%s'", component_role, - element_name); - videodec_class->component_role = component_role; - } - - /* Now set the inport/outport indizes and assume sane defaults */ - err = NULL; - in_port_index = - g_key_file_get_integer (config, element_name, "in-port-index", &err); - if (err != NULL) { - GST_DEBUG ("No 'in-port-index' set for element '%s', assuming 0: %s", - element_name, err->message); - in_port_index = 0; - g_error_free (err); - } - videodec_class->in_port_index = in_port_index; - - err = NULL; - out_port_index = - g_key_file_get_integer (config, element_name, "out-port-index", &err); - if (err != NULL) { - GST_DEBUG ("No 'out-port-index' set for element '%s', assuming 1: %s", - element_name, err->message); - out_port_index = 1; - g_error_free (err); - } - videodec_class->out_port_index = out_port_index; - - /* Add pad templates */ - err = NULL; - if (!(template_caps = - g_key_file_get_string (config, element_name, "sink-template-caps", - &err))) { - GST_DEBUG - ("No sink template caps specified for element '%s', using default '%s'", - element_name, videodec_class->default_sink_template_caps); - caps = gst_caps_from_string (videodec_class->default_sink_template_caps); - g_assert (caps != NULL); - g_error_free (err); - } else { - caps = gst_caps_from_string (template_caps); - if (!caps) { - GST_DEBUG - ("Could not parse sink template caps '%s' for element '%s', using default '%s'", - template_caps, element_name, - videodec_class->default_sink_template_caps); - caps = gst_caps_from_string (videodec_class->default_sink_template_caps); - g_assert (caps != NULL); - } - } - templ = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, caps); - g_free (template_caps); - gst_element_class_add_pad_template (element_class, templ); - gst_object_unref (templ); - - err = NULL; - if (!(template_caps = - g_key_file_get_string (config, element_name, "src-template-caps", - &err))) { - GST_DEBUG - ("No src template caps specified for element '%s', using default '%s'", - element_name, videodec_class->default_src_template_caps); - caps = gst_caps_from_string (videodec_class->default_src_template_caps); - g_assert (caps != NULL); - g_error_free (err); - } else { - caps = gst_caps_from_string (template_caps); - if (!caps) { - GST_DEBUG - ("Could not parse src template caps '%s' for element '%s', using default '%s'", - template_caps, element_name, - videodec_class->default_src_template_caps); - caps = gst_caps_from_string (videodec_class->default_src_template_caps); - g_assert (caps != NULL); - } - } - templ = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, caps); - g_free (template_caps); - gst_element_class_add_pad_template (element_class, templ); - gst_object_unref (templ); - - if ((hacks = - g_key_file_get_string_list (config, element_name, "hacks", NULL, - NULL))) { -#ifndef GST_DISABLE_GST_DEBUG - gchar **walk = hacks; - - while (*walk) { - GST_DEBUG ("Using hack: %s", *walk); - walk++; - } -#endif - - videodec_class->hacks = gst_omx_parse_hacks (hacks); - } -} +G_DEFINE_ABSTRACT_TYPE_WITH_CODE (GstOMXVideoDec, gst_omx_video_dec, + GST_TYPE_BASE_VIDEO_DECODER, DEBUG_INIT); static void gst_omx_video_dec_class_init (GstOMXVideoDecClass * klass) @@ -236,13 +102,13 @@ gst_omx_video_dec_class_init (GstOMXVideoDecClass * klass) base_video_decoder_class->finish = GST_DEBUG_FUNCPTR (gst_omx_video_dec_finish); - klass->default_src_template_caps = "video/x-raw-yuv, " + klass->cdata.default_src_template_caps = "video/x-raw, " "width = " GST_VIDEO_SIZE_RANGE ", " "height = " GST_VIDEO_SIZE_RANGE ", " "framerate = " GST_VIDEO_FPS_RANGE; } static void -gst_omx_video_dec_init (GstOMXVideoDec * self, GstOMXVideoDecClass * klass) +gst_omx_video_dec_init (GstOMXVideoDec * self) { GST_BASE_VIDEO_DECODER (self)->packetized = TRUE; @@ -258,8 +124,7 @@ gst_omx_video_dec_open (GstOMXVideoDec * self) GST_DEBUG_OBJECT (self, "Opening decoder"); self->component = - gst_omx_component_new (GST_OBJECT_CAST (self), klass->core_name, - klass->component_name, klass->component_role, klass->hacks); + gst_omx_component_new (GST_OBJECT_CAST (self), &klass->cdata); self->started = FALSE; if (!self->component) @@ -270,9 +135,9 @@ gst_omx_video_dec_open (GstOMXVideoDec * self) return FALSE; self->in_port = - gst_omx_component_add_port (self->component, klass->in_port_index); + gst_omx_component_add_port (self->component, klass->cdata.in_port_index); self->out_port = - gst_omx_component_add_port (self->component, klass->out_port_index); + gst_omx_component_add_port (self->component, klass->cdata.out_port_index); if (!self->in_port || !self->out_port) return FALSE; @@ -334,7 +199,7 @@ gst_omx_video_dec_finalize (GObject * object) g_mutex_free (self->drain_lock); g_cond_free (self->drain_cond); - G_OBJECT_CLASS (parent_class)->finalize (object); + G_OBJECT_CLASS (gst_omx_video_dec_parent_class)->finalize (object); } static GstStateChangeReturn @@ -381,7 +246,9 @@ gst_omx_video_dec_change_state (GstElement * element, GstStateChange transition) if (ret == GST_STATE_CHANGE_FAILURE) return ret; - ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); + ret = + GST_ELEMENT_CLASS (gst_omx_video_dec_parent_class)->change_state (element, + transition); if (ret == GST_STATE_CHANGE_FAILURE) return ret; @@ -390,7 +257,7 @@ gst_omx_video_dec_change_state (GstElement * element, GstStateChange transition) case GST_STATE_CHANGE_PLAYING_TO_PAUSED: break; case GST_STATE_CHANGE_PAUSED_TO_READY: - self->downstream_flow_ret = GST_FLOW_WRONG_STATE; + self->downstream_flow_ret = GST_FLOW_FLUSHING; self->started = FALSE; if (!gst_omx_video_dec_shutdown (self)) @@ -410,18 +277,18 @@ gst_omx_video_dec_change_state (GstElement * element, GstStateChange transition) #define MAX_FRAME_DIST_TICKS (5 * OMX_TICKS_PER_SECOND) #define MAX_FRAME_DIST_FRAMES (100) -static GstVideoFrame * +static GstVideoFrameState * _find_nearest_frame (GstOMXVideoDec * self, GstOMXBuffer * buf) { GList *l, *best_l = NULL; GList *finish_frames = NULL; - GstVideoFrame *best = NULL; + GstVideoFrameState *best = NULL; guint64 best_timestamp = 0; guint64 best_diff = G_MAXUINT64; BufferIdentification *best_id = NULL; for (l = GST_BASE_VIDEO_CODEC (self)->frames; l; l = l->next) { - GstVideoFrame *tmp = l->data; + GstVideoFrameState *tmp = l->data; BufferIdentification *id = tmp->coder_hook; guint64 timestamp, diff; @@ -454,7 +321,7 @@ _find_nearest_frame (GstOMXVideoDec * self, GstOMXBuffer * buf) if (best_id) { for (l = GST_BASE_VIDEO_CODEC (self)->frames; l && l != best_l; l = l->next) { - GstVideoFrame *tmp = l->data; + GstVideoFrameState *tmp = l->data; BufferIdentification *id = tmp->coder_hook; guint64 diff_ticks, diff_frames; @@ -492,6 +359,8 @@ gst_omx_video_dec_fill_buffer (GstOMXVideoDec * self, GstOMXBuffer * inbuf, GstVideoState *state = &GST_BASE_VIDEO_CODEC (self)->state; OMX_PARAM_PORTDEFINITIONTYPE *port_def = &self->out_port->port_def; gboolean ret = FALSE; + GstVideoInfo vinfo; + GstVideoFrame frame; if (state->width != port_def->format.video.nFrameWidth || state->height != port_def->format.video.nFrameHeight) { @@ -500,15 +369,22 @@ gst_omx_video_dec_fill_buffer (GstOMXVideoDec * self, GstOMXBuffer * inbuf, } /* Same strides and everything */ - if (GST_BUFFER_SIZE (outbuf) == inbuf->omx_buf->nFilledLen) { - memcpy (GST_BUFFER_DATA (outbuf), + if (gst_buffer_get_size (outbuf) == inbuf->omx_buf->nFilledLen) { + GstMapInfo map = GST_MAP_INFO_INIT; + + gst_buffer_map (outbuf, &map, GST_MAP_WRITE); + memcpy (map.data, inbuf->omx_buf->pBuffer + inbuf->omx_buf->nOffset, inbuf->omx_buf->nFilledLen); + gst_buffer_unmap (outbuf, &map); ret = TRUE; goto done; } + /* Different strides */ + gst_video_info_from_caps (&vinfo, state->caps); + switch (state->format) { case GST_VIDEO_FORMAT_I420:{ gint i, j, height; @@ -518,16 +394,14 @@ gst_omx_video_dec_fill_buffer (GstOMXVideoDec * self, GstOMXBuffer * inbuf, for (i = 0; i < 3; i++) { if (i == 0) { src_stride = port_def->format.video.nStride; - dest_stride = - gst_video_format_get_row_stride (state->format, 0, state->width); + dest_stride = vinfo.stride[0]; /* XXX: Try this if no stride was set */ if (src_stride == 0) src_stride = dest_stride; } else { src_stride = port_def->format.video.nStride / 2; - dest_stride = - gst_video_format_get_row_stride (state->format, 1, state->width); + dest_stride = vinfo.stride[1]; /* XXX: Try this if no stride was set */ if (src_stride == 0) @@ -544,20 +418,16 @@ gst_omx_video_dec_fill_buffer (GstOMXVideoDec * self, GstOMXBuffer * inbuf, (port_def->format.video.nSliceHeight / 2) * (port_def->format.video.nStride / 2); - dest = - GST_BUFFER_DATA (outbuf) + - gst_video_format_get_component_offset (state->format, i, - state->width, state->height); - - height = - gst_video_format_get_component_height (state->format, i, - state->height); + gst_video_frame_map (&frame, &vinfo, outbuf, GST_MAP_WRITE); + dest = GST_VIDEO_FRAME_COMP_DATA (&frame, i); + height = GST_VIDEO_FRAME_HEIGHT (&frame); for (j = 0; j < height; j++) { memcpy (dest, src, MIN (src_stride, dest_stride)); src += src_stride; dest += dest_stride; } + gst_video_frame_unmap (&frame); } ret = TRUE; break; @@ -570,16 +440,14 @@ gst_omx_video_dec_fill_buffer (GstOMXVideoDec * self, GstOMXBuffer * inbuf, for (i = 0; i < 2; i++) { if (i == 0) { src_stride = port_def->format.video.nStride; - dest_stride = - gst_video_format_get_row_stride (state->format, 0, state->width); + dest_stride = vinfo.stride[0]; /* XXX: Try this if no stride was set */ if (src_stride == 0) src_stride = dest_stride; } else { src_stride = port_def->format.video.nStride; - dest_stride = - gst_video_format_get_row_stride (state->format, 1, state->width); + dest_stride = vinfo.stride[1]; /* XXX: Try this if no stride was set */ if (src_stride == 0) @@ -592,19 +460,16 @@ gst_omx_video_dec_fill_buffer (GstOMXVideoDec * self, GstOMXBuffer * inbuf, port_def->format.video.nSliceHeight * port_def->format.video.nStride; - dest = - GST_BUFFER_DATA (outbuf) + - gst_video_format_get_component_offset (state->format, i, - state->width, state->height); + gst_video_frame_map (&frame, &vinfo, outbuf, GST_MAP_WRITE); + dest = GST_VIDEO_FRAME_COMP_DATA (&frame, i); + height = GST_VIDEO_FRAME_HEIGHT (&frame); - height = - gst_video_format_get_component_height (state->format, i, - state->height); for (j = 0; j < height; j++) { memcpy (dest, src, MIN (src_stride, dest_stride)); src += src_stride; dest += dest_stride; } + gst_video_frame_unmap (&frame); } ret = TRUE; break; @@ -618,7 +483,7 @@ gst_omx_video_dec_fill_buffer (GstOMXVideoDec * self, GstOMXBuffer * inbuf, done: if (ret) { - GST_BUFFER_TIMESTAMP (outbuf) = + GST_BUFFER_PTS (outbuf) = gst_util_uint64_scale (inbuf->omx_buf->nTimeStamp, GST_SECOND, OMX_TICKS_PER_SECOND); if (inbuf->omx_buf->nTickCount != 0) @@ -635,7 +500,7 @@ gst_omx_video_dec_loop (GstOMXVideoDec * self) { GstOMXPort *port = self->out_port; GstOMXBuffer *buf = NULL; - GstVideoFrame *frame; + GstVideoFrameState *frame; GstFlowReturn flow_ret = GST_FLOW_OK; GstOMXAcquireBufferReturn acq_return; GstClockTimeDiff deadline; @@ -653,7 +518,7 @@ gst_omx_video_dec_loop (GstOMXVideoDec * self) return; } - if (!GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (self)) + if (!gst_pad_has_current_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (self)) || acq_return == GST_OMX_ACQUIRE_BUFFER_RECONFIGURED) { GstVideoState *state = &GST_BASE_VIDEO_CODEC (self)->state; OMX_PARAM_PORTDEFINITIONTYPE port_def; @@ -783,7 +648,7 @@ gst_omx_video_dec_loop (GstOMXVideoDec * self) frame); } - if (is_eos || flow_ret == GST_FLOW_UNEXPECTED) { + if (is_eos || flow_ret == GST_FLOW_EOS) { g_mutex_lock (self->drain_lock); if (self->draining) { GST_DEBUG_OBJECT (self, "Drained"); @@ -791,7 +656,7 @@ gst_omx_video_dec_loop (GstOMXVideoDec * self) g_cond_broadcast (self->drain_cond); } else if (flow_ret == GST_FLOW_OK) { GST_DEBUG_OBJECT (self, "Component signalled EOS"); - flow_ret = GST_FLOW_UNEXPECTED; + flow_ret = GST_FLOW_EOS; } g_mutex_unlock (self->drain_lock); } else { @@ -827,21 +692,20 @@ flushing: { GST_DEBUG_OBJECT (self, "Flushing -- stopping task"); gst_pad_pause_task (GST_BASE_VIDEO_CODEC_SRC_PAD (self)); - self->downstream_flow_ret = GST_FLOW_WRONG_STATE; + self->downstream_flow_ret = GST_FLOW_FLUSHING; self->started = FALSE; return; } flow_error: { - if (flow_ret == GST_FLOW_UNEXPECTED) { + if (flow_ret == GST_FLOW_EOS) { GST_DEBUG_OBJECT (self, "EOS"); gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_BASE_VIDEO_CODEC_SRC_PAD (self)); - } else if (flow_ret == GST_FLOW_NOT_LINKED - || flow_ret < GST_FLOW_UNEXPECTED) { + } else if (flow_ret == GST_FLOW_NOT_LINKED || flow_ret < GST_FLOW_EOS) { GST_ELEMENT_ERROR (self, STREAM, FAILED, ("Internal data stream error."), ("stream stopped, reason %s", gst_flow_get_name (flow_ret))); @@ -926,7 +790,7 @@ gst_omx_video_dec_stop (GstBaseVideoDecoder * decoder) if (gst_omx_component_get_state (self->component, 0) > OMX_StateIdle) gst_omx_component_set_state (self->component, OMX_StateIdle); - self->downstream_flow_ret = GST_FLOW_WRONG_STATE; + self->downstream_flow_ret = GST_FLOW_FLUSHING; self->started = FALSE; self->eos = FALSE; @@ -952,21 +816,23 @@ gst_omx_video_dec_negotiate (GstOMXVideoDec * self) OMX_VIDEO_PARAM_PORTFORMATTYPE param; OMX_ERRORTYPE err; GstCaps *comp_supported_caps; - const GstCaps *templ_caps; + GstCaps *templ_caps; GstCaps *peer_caps, *intersection; GstVideoFormat format; gint old_index; GstStructure *s; - guint32 fourcc; + const gchar *format_str; templ_caps = - gst_pad_get_pad_template_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (self)); - peer_caps = gst_pad_peer_get_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (self)); + gst_caps_copy (gst_pad_get_pad_template_caps (GST_BASE_VIDEO_CODEC_SRC_PAD + (self))); + peer_caps = + gst_pad_peer_query_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (self), templ_caps); if (peer_caps) { - intersection = gst_caps_intersect (templ_caps, peer_caps); - gst_caps_unref (peer_caps); + intersection = peer_caps; + gst_caps_unref (templ_caps); } else { - intersection = gst_caps_copy (templ_caps); + intersection = templ_caps; } GST_OMX_INIT_STRUCT (¶m); @@ -995,15 +861,13 @@ gst_omx_video_dec_negotiate (GstOMXVideoDec * self) switch (param.eColorFormat) { case OMX_COLOR_FormatYUV420Planar: gst_caps_append_structure (comp_supported_caps, - gst_structure_new ("video/x-raw-yuv", - "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('I', '4', '2', - '0'), NULL)); + gst_structure_new ("video/x-raw", + "format", G_TYPE_STRING, "I420", NULL)); break; case OMX_COLOR_FormatYUV420SemiPlanar: gst_caps_append_structure (comp_supported_caps, - gst_structure_new ("video/x-raw-yuv", - "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('N', 'V', '1', - '2'), NULL)); + gst_structure_new ("video/x-raw", + "format", G_TYPE_STRING, "NV12", NULL)); break; default: break; @@ -1020,17 +884,21 @@ gst_omx_video_dec_negotiate (GstOMXVideoDec * self) intersection = tmp; } + if (gst_caps_is_empty (intersection)) { + gst_caps_unref (intersection); GST_ERROR_OBJECT (self, "Empty caps"); return FALSE; } - gst_caps_truncate (intersection); + intersection = gst_caps_truncate (intersection); s = gst_caps_get_structure (intersection, 0); - if (!gst_structure_get_fourcc (s, "format", &fourcc) || + format_str = gst_structure_get_string (s, "format"); + if (!format_str || (format = - gst_video_format_from_fourcc (fourcc)) == GST_VIDEO_FORMAT_UNKNOWN) { + gst_video_format_from_string (format_str)) == + GST_VIDEO_FORMAT_UNKNOWN) { GST_ERROR_OBJECT (self, "Invalid caps: %" GST_PTR_FORMAT, intersection); return FALSE; } @@ -1108,7 +976,7 @@ gst_omx_video_dec_set_format (GstBaseVideoDecoder * decoder, if (needs_disable && is_format_change) { gst_omx_video_dec_drain (self); - if (klass->hacks & GST_OMX_HACK_NO_COMPONENT_RECONFIGURE) { + if (klass->cdata.hacks & GST_OMX_HACK_NO_COMPONENT_RECONFIGURE) { GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (self); gst_omx_video_dec_stop (GST_BASE_VIDEO_DECODER (self)); gst_omx_video_dec_close (self); @@ -1245,7 +1113,7 @@ gst_omx_video_dec_parse_data (GstBaseVideoDecoder * decoder, gboolean at_eos) static GstFlowReturn gst_omx_video_dec_handle_frame (GstBaseVideoDecoder * decoder, - GstVideoFrame * frame) + GstVideoFrameState * frame) { GstOMXAcquireBufferReturn acq_ret = GST_OMX_ACQUIRE_BUFFER_ERROR; GstOMXVideoDec *self; @@ -1262,7 +1130,7 @@ gst_omx_video_dec_handle_frame (GstBaseVideoDecoder * decoder, if (self->eos) { GST_WARNING_OBJECT (self, "Got frame after EOS"); - return GST_FLOW_UNEXPECTED; + return GST_FLOW_EOS; } timestamp = frame->presentation_timestamp; @@ -1286,7 +1154,7 @@ gst_omx_video_dec_handle_frame (GstBaseVideoDecoder * decoder, } } - while (offset < GST_BUFFER_SIZE (frame->sink_buffer)) { + while (offset < gst_buffer_get_size (frame->sink_buffer)) { /* Make sure to release the base class stream lock, otherwise * _loop() can't call _finish_frame() and we might block forever * because no input buffers are released */ @@ -1327,15 +1195,16 @@ gst_omx_video_dec_handle_frame (GstBaseVideoDecoder * decoder, codec_data = self->codec_data; if (buf->omx_buf->nAllocLen - buf->omx_buf->nOffset < - GST_BUFFER_SIZE (codec_data)) { + gst_buffer_get_size (codec_data)) { gst_omx_port_release_buffer (self->in_port, buf); goto too_large_codec_data; } buf->omx_buf->nFlags |= OMX_BUFFERFLAG_CODECCONFIG; - buf->omx_buf->nFilledLen = GST_BUFFER_SIZE (codec_data); - memcpy (buf->omx_buf->pBuffer + buf->omx_buf->nOffset, - GST_BUFFER_DATA (codec_data), GST_BUFFER_SIZE (codec_data)); + buf->omx_buf->nFilledLen = gst_buffer_get_size (codec_data);; + gst_buffer_extract (codec_data, 0, + buf->omx_buf->pBuffer + buf->omx_buf->nOffset, + buf->omx_buf->nFilledLen); self->started = TRUE; gst_omx_port_release_buffer (self->in_port, buf); @@ -1348,11 +1217,8 @@ gst_omx_video_dec_handle_frame (GstBaseVideoDecoder * decoder, /* Copy the buffer content in chunks of size as requested * by the port */ - buf->omx_buf->nFilledLen = - MIN (GST_BUFFER_SIZE (frame->sink_buffer) - offset, - buf->omx_buf->nAllocLen - buf->omx_buf->nOffset); - memcpy (buf->omx_buf->pBuffer + buf->omx_buf->nOffset, - GST_BUFFER_DATA (frame->sink_buffer) + offset, + gst_buffer_extract (codec_data, offset, + buf->omx_buf->pBuffer + buf->omx_buf->nOffset, buf->omx_buf->nFilledLen); /* Interpolate timestamps if we're passing the buffer @@ -1360,7 +1226,7 @@ gst_omx_video_dec_handle_frame (GstBaseVideoDecoder * decoder, if (offset != 0 && duration != GST_CLOCK_TIME_NONE) { timestamp_offset = gst_util_uint64_scale (offset, duration, - GST_BUFFER_SIZE (frame->sink_buffer)); + gst_buffer_get_size (frame->sink_buffer)); } if (timestamp != GST_CLOCK_TIME_NONE) { @@ -1372,7 +1238,7 @@ gst_omx_video_dec_handle_frame (GstBaseVideoDecoder * decoder, if (duration != GST_CLOCK_TIME_NONE) { buf->omx_buf->nTickCount = gst_util_uint64_scale (buf->omx_buf->nFilledLen, duration, - GST_BUFFER_SIZE (frame->sink_buffer)); + gst_buffer_get_size (frame->sink_buffer)); self->last_upstream_ts += duration; } @@ -1414,7 +1280,8 @@ too_large_codec_data: { GST_ELEMENT_ERROR (self, STREAM, FORMAT, (NULL), ("codec_data larger than supported by OpenMAX port (%u > %u)", - GST_BUFFER_SIZE (codec_data), self->in_port->port_def.nBufferSize)); + gst_buffer_get_size (codec_data), + self->in_port->port_def.nBufferSize)); return GST_FLOW_ERROR; } @@ -1429,8 +1296,8 @@ component_error: flushing: { - GST_DEBUG_OBJECT (self, "Flushing -- returning WRONG_STATE"); - return GST_FLOW_WRONG_STATE; + GST_DEBUG_OBJECT (self, "Flushing -- returning FLUSHING"); + return GST_FLOW_FLUSHING; } reconfigure_error: { diff --git a/omx/gstomxvideodec.h b/omx/gstomxvideodec.h index 661957a..a22b7b2 100644 --- a/omx/gstomxvideodec.h +++ b/omx/gstomxvideodec.h @@ -77,20 +77,11 @@ struct _GstOMXVideoDecClass { GstBaseVideoDecoderClass parent_class; - const gchar *core_name; - const gchar *component_name; - const gchar *component_role; - - const gchar *default_src_template_caps; - const gchar *default_sink_template_caps; - - guint32 in_port_index, out_port_index; - - guint64 hacks; + GstOMXClassData cdata; gboolean (*is_format_change) (GstOMXVideoDec * self, GstOMXPort * port, GstVideoState * state); gboolean (*set_format) (GstOMXVideoDec * self, GstOMXPort * port, GstVideoState * state); - GstFlowReturn (*prepare_frame) (GstOMXVideoDec * self, GstVideoFrame *frame); + GstFlowReturn (*prepare_frame) (GstOMXVideoDec * self, GstVideoFrameState *frame); }; GType gst_omx_video_dec_get_type (void); diff --git a/omx/gstomxvideoenc.c b/omx/gstomxvideoenc.c index 1d6db82..78df4c5 100644 --- a/omx/gstomxvideoenc.c +++ b/omx/gstomxvideoenc.c @@ -81,16 +81,16 @@ gst_omx_video_enc_change_state (GstElement * element, static gboolean gst_omx_video_enc_start (GstBaseVideoEncoder * encoder); static gboolean gst_omx_video_enc_stop (GstBaseVideoEncoder * encoder); static gboolean gst_omx_video_enc_set_format (GstBaseVideoEncoder * encoder, - GstVideoState * state); + GstVideoInfo * info); static gboolean gst_omx_video_enc_reset (GstBaseVideoEncoder * encoder); static GstFlowReturn gst_omx_video_enc_handle_frame (GstBaseVideoEncoder * - encoder, GstVideoFrame * frame); + encoder, GstVideoFrameState * frame); static gboolean gst_omx_video_enc_finish (GstBaseVideoEncoder * encoder); static GstFlowReturn gst_omx_video_enc_drain (GstOMXVideoEnc * self); static GstFlowReturn gst_omx_video_enc_handle_output_frame (GstOMXVideoEnc * - self, GstOMXPort * port, GstOMXBuffer * buf, GstVideoFrame * frame); + self, GstOMXPort * port, GstOMXBuffer * buf, GstVideoFrameState * frame); enum { @@ -111,149 +111,14 @@ enum /* class initialization */ -#define DEBUG_INIT(bla) \ +#define DEBUG_INIT \ GST_DEBUG_CATEGORY_INIT (gst_omx_video_enc_debug_category, "omxvideoenc", 0, \ "debug category for gst-omx video encoder base class"); -GST_BOILERPLATE_FULL (GstOMXVideoEnc, gst_omx_video_enc, GstBaseVideoEncoder, +G_DEFINE_ABSTRACT_TYPE_WITH_CODE (GstOMXVideoEnc, gst_omx_video_enc, GST_TYPE_BASE_VIDEO_ENCODER, DEBUG_INIT); static void -gst_omx_video_enc_base_init (gpointer g_class) -{ - GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); - GstOMXVideoEncClass *videoenc_class = GST_OMX_VIDEO_ENC_CLASS (g_class); - GKeyFile *config; - const gchar *element_name; - GError *err; - gchar *core_name, *component_name, *component_role; - gint in_port_index, out_port_index; - gchar *template_caps; - GstPadTemplate *templ; - GstCaps *caps; - gchar **hacks; - - element_name = - g_type_get_qdata (G_TYPE_FROM_CLASS (g_class), - gst_omx_element_name_quark); - /* This happens for the base class and abstract subclasses */ - if (!element_name) - return; - - config = gst_omx_get_configuration (); - - /* This will always succeed, see check in plugin_init */ - core_name = g_key_file_get_string (config, element_name, "core-name", NULL); - g_assert (core_name != NULL); - videoenc_class->core_name = core_name; - component_name = - g_key_file_get_string (config, element_name, "component-name", NULL); - g_assert (component_name != NULL); - videoenc_class->component_name = component_name; - - /* If this fails we simply don't set a role */ - if ((component_role = - g_key_file_get_string (config, element_name, "component-role", - NULL))) { - GST_DEBUG ("Using component-role '%s' for element '%s'", component_role, - element_name); - videoenc_class->component_role = component_role; - } - - - /* Now set the inport/outport indizes and assume sane defaults */ - err = NULL; - in_port_index = - g_key_file_get_integer (config, element_name, "in-port-index", &err); - if (err != NULL) { - GST_DEBUG ("No 'in-port-index' set for element '%s', assuming 0: %s", - element_name, err->message); - in_port_index = 0; - g_error_free (err); - } - videoenc_class->in_port_index = in_port_index; - - err = NULL; - out_port_index = - g_key_file_get_integer (config, element_name, "out-port-index", &err); - if (err != NULL) { - GST_DEBUG ("No 'out-port-index' set for element '%s', assuming 1: %s", - element_name, err->message); - out_port_index = 1; - g_error_free (err); - } - videoenc_class->out_port_index = out_port_index; - - /* Add pad templates */ - err = NULL; - if (!(template_caps = - g_key_file_get_string (config, element_name, "sink-template-caps", - &err))) { - GST_DEBUG - ("No sink template caps specified for element '%s', using default '%s'", - element_name, videoenc_class->default_sink_template_caps); - caps = gst_caps_from_string (videoenc_class->default_sink_template_caps); - g_assert (caps != NULL); - g_error_free (err); - } else { - caps = gst_caps_from_string (template_caps); - if (!caps) { - GST_DEBUG - ("Could not parse sink template caps '%s' for element '%s', using default '%s'", - template_caps, element_name, - videoenc_class->default_sink_template_caps); - caps = gst_caps_from_string (videoenc_class->default_sink_template_caps); - g_assert (caps != NULL); - } - } - templ = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, caps); - g_free (template_caps); - gst_element_class_add_pad_template (element_class, templ); - gst_object_unref (templ); - - err = NULL; - if (!(template_caps = - g_key_file_get_string (config, element_name, "src-template-caps", - &err))) { - GST_DEBUG - ("No src template caps specified for element '%s', using default '%s'", - element_name, videoenc_class->default_src_template_caps); - caps = gst_caps_from_string (videoenc_class->default_src_template_caps); - g_assert (caps != NULL); - g_error_free (err); - } else { - caps = gst_caps_from_string (template_caps); - if (!caps) { - GST_DEBUG - ("Could not parse src template caps '%s' for element '%s', using default '%s'", - template_caps, element_name, - videoenc_class->default_src_template_caps); - caps = gst_caps_from_string (videoenc_class->default_src_template_caps); - g_assert (caps != NULL); - } - } - templ = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, caps); - g_free (template_caps); - gst_element_class_add_pad_template (element_class, templ); - gst_object_unref (templ); - - if ((hacks = - g_key_file_get_string_list (config, element_name, "hacks", NULL, - NULL))) { -#ifndef GST_DISABLE_GST_DEBUG - gchar **walk = hacks; - - while (*walk) { - GST_DEBUG ("Using hack: %s", *walk); - walk++; - } -#endif - - videoenc_class->hacks = gst_omx_parse_hacks (hacks); - } -} - -static void gst_omx_video_enc_class_init (GstOMXVideoEncClass * klass) { GObjectClass *gobject_class = G_OBJECT_CLASS (klass); @@ -261,6 +126,7 @@ gst_omx_video_enc_class_init (GstOMXVideoEncClass * klass) GstBaseVideoEncoderClass *base_video_encoder_class = GST_BASE_VIDEO_ENCODER_CLASS (klass); + gobject_class->finalize = gst_omx_video_enc_finalize; gobject_class->set_property = gst_omx_video_enc_set_property; gobject_class->get_property = gst_omx_video_enc_get_property; @@ -314,7 +180,7 @@ gst_omx_video_enc_class_init (GstOMXVideoEncClass * klass) base_video_encoder_class->finish = GST_DEBUG_FUNCPTR (gst_omx_video_enc_finish); - klass->default_sink_template_caps = "video/x-raw-yuv, " + klass->cdata.default_sink_template_caps = "video/x-raw, " "width = " GST_VIDEO_SIZE_RANGE ", " "height = " GST_VIDEO_SIZE_RANGE ", " "framerate = " GST_VIDEO_FPS_RANGE; @@ -323,7 +189,7 @@ gst_omx_video_enc_class_init (GstOMXVideoEncClass * klass) } static void -gst_omx_video_enc_init (GstOMXVideoEnc * self, GstOMXVideoEncClass * klass) +gst_omx_video_enc_init (GstOMXVideoEnc * self) { self->control_rate = GST_OMX_VIDEO_ENC_CONTROL_RATE_DEFAULT; self->target_bitrate = GST_OMX_VIDEO_ENC_TARGET_BITRATE_DEFAULT; @@ -341,8 +207,7 @@ gst_omx_video_enc_open (GstOMXVideoEnc * self) GstOMXVideoEncClass *klass = GST_OMX_VIDEO_ENC_GET_CLASS (self); self->component = - gst_omx_component_new (GST_OBJECT_CAST (self), klass->core_name, - klass->component_name, klass->component_role, klass->hacks); + gst_omx_component_new (GST_OBJECT_CAST (self), &klass->cdata); self->started = FALSE; if (!self->component) @@ -353,9 +218,9 @@ gst_omx_video_enc_open (GstOMXVideoEnc * self) return FALSE; self->in_port = - gst_omx_component_add_port (self->component, klass->in_port_index); + gst_omx_component_add_port (self->component, klass->cdata.in_port_index); self->out_port = - gst_omx_component_add_port (self->component, klass->out_port_index); + gst_omx_component_add_port (self->component, klass->cdata.out_port_index); if (!self->in_port || !self->out_port) return FALSE; @@ -497,7 +362,7 @@ gst_omx_video_enc_finalize (GObject * object) g_mutex_free (self->drain_lock); g_cond_free (self->drain_cond); - G_OBJECT_CLASS (parent_class)->finalize (object); + G_OBJECT_CLASS (gst_omx_video_enc_parent_class)->finalize (object); } static void @@ -616,7 +481,9 @@ gst_omx_video_enc_change_state (GstElement * element, GstStateChange transition) if (ret == GST_STATE_CHANGE_FAILURE) return ret; - ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); + ret = + GST_ELEMENT_CLASS (gst_omx_video_enc_parent_class)->change_state (element, + transition); if (ret == GST_STATE_CHANGE_FAILURE) return ret; @@ -625,7 +492,7 @@ gst_omx_video_enc_change_state (GstElement * element, GstStateChange transition) case GST_STATE_CHANGE_PLAYING_TO_PAUSED: break; case GST_STATE_CHANGE_PAUSED_TO_READY: - self->downstream_flow_ret = GST_FLOW_WRONG_STATE; + self->downstream_flow_ret = GST_FLOW_FLUSHING; self->started = FALSE; if (!gst_omx_video_enc_shutdown (self)) @@ -645,18 +512,18 @@ gst_omx_video_enc_change_state (GstElement * element, GstStateChange transition) #define MAX_FRAME_DIST_TICKS (5 * OMX_TICKS_PER_SECOND) #define MAX_FRAME_DIST_FRAMES (100) -static GstVideoFrame * +static GstVideoFrameState * _find_nearest_frame (GstOMXVideoEnc * self, GstOMXBuffer * buf) { GList *l, *best_l = NULL; GList *finish_frames = NULL; - GstVideoFrame *best = NULL; + GstVideoFrameState *best = NULL; guint64 best_timestamp = 0; guint64 best_diff = G_MAXUINT64; BufferIdentification *best_id = NULL; for (l = GST_BASE_VIDEO_CODEC (self)->frames; l; l = l->next) { - GstVideoFrame *tmp = l->data; + GstVideoFrameState *tmp = l->data; BufferIdentification *id = tmp->coder_hook; guint64 timestamp, diff; @@ -689,7 +556,7 @@ _find_nearest_frame (GstOMXVideoEnc * self, GstOMXBuffer * buf) if (best_id) { for (l = GST_BASE_VIDEO_CODEC (self)->frames; l && l != best_l; l = l->next) { - GstVideoFrame *tmp = l->data; + GstVideoFrameState *tmp = l->data; BufferIdentification *id = tmp->coder_hook; guint64 diff_ticks, diff_frames; @@ -722,7 +589,7 @@ _find_nearest_frame (GstOMXVideoEnc * self, GstOMXBuffer * buf) static GstFlowReturn gst_omx_video_enc_handle_output_frame (GstOMXVideoEnc * self, GstOMXPort * port, - GstOMXBuffer * buf, GstVideoFrame * frame) + GstOMXBuffer * buf, GstVideoFrameState * frame) { GstOMXVideoEncClass *klass = GST_OMX_VIDEO_ENC_GET_CLASS (self); GstFlowReturn flow_ret = GST_FLOW_OK; @@ -731,12 +598,18 @@ gst_omx_video_enc_handle_output_frame (GstOMXVideoEnc * self, GstOMXPort * port, && buf->omx_buf->nFilledLen > 0) { GstCaps *caps; GstBuffer *codec_data; + GstMapInfo map = GST_MAP_INFO_INIT; - caps = gst_caps_copy (GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (self))); + caps = + gst_caps_copy (gst_pad_get_current_caps (GST_BASE_VIDEO_CODEC_SRC_PAD + (self))); codec_data = gst_buffer_new_and_alloc (buf->omx_buf->nFilledLen); - memcpy (GST_BUFFER_DATA (codec_data), + + gst_buffer_map (codec_data, &map, GST_MAP_WRITE); + memcpy (map.data, buf->omx_buf->pBuffer + buf->omx_buf->nOffset, buf->omx_buf->nFilledLen); + gst_buffer_unmap (codec_data, &map); gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, codec_data, NULL); if (!gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (self), caps)) { @@ -747,20 +620,20 @@ gst_omx_video_enc_handle_output_frame (GstOMXVideoEnc * self, GstOMXPort * port, flow_ret = GST_FLOW_OK; } else if (buf->omx_buf->nFilledLen > 0) { GstBuffer *outbuf; + GstMapInfo map = GST_MAP_INFO_INIT; if (buf->omx_buf->nFilledLen > 0) { outbuf = gst_buffer_new_and_alloc (buf->omx_buf->nFilledLen); - memcpy (GST_BUFFER_DATA (outbuf), + gst_buffer_map (outbuf, &map, GST_MAP_WRITE); + memcpy (map.data, buf->omx_buf->pBuffer + buf->omx_buf->nOffset, buf->omx_buf->nFilledLen); + gst_buffer_unmap (outbuf, &map); } else { outbuf = gst_buffer_new (); } - gst_buffer_set_caps (outbuf, - GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (self))); - GST_BUFFER_TIMESTAMP (outbuf) = gst_util_uint64_scale (buf->omx_buf->nTimeStamp, GST_SECOND, OMX_TICKS_PER_SECOND); @@ -769,7 +642,7 @@ gst_omx_video_enc_handle_output_frame (GstOMXVideoEnc * self, GstOMXPort * port, gst_util_uint64_scale (buf->omx_buf->nTickCount, GST_SECOND, OMX_TICKS_PER_SECOND); - if ((klass->hacks & GST_OMX_HACK_SYNCFRAME_FLAG_NOT_USED) + if ((klass->cdata.hacks & GST_OMX_HACK_SYNCFRAME_FLAG_NOT_USED) || (buf->omx_buf->nFlags & OMX_BUFFERFLAG_SYNCFRAME)) { if (frame) frame->is_sync_point = TRUE; @@ -806,7 +679,7 @@ gst_omx_video_enc_loop (GstOMXVideoEnc * self) GstOMXVideoEncClass *klass; GstOMXPort *port = self->out_port; GstOMXBuffer *buf = NULL; - GstVideoFrame *frame; + GstVideoFrameState *frame; GstFlowReturn flow_ret = GST_FLOW_OK; GstOMXAcquireBufferReturn acq_return; gboolean is_eos; @@ -825,7 +698,7 @@ gst_omx_video_enc_loop (GstOMXVideoEnc * self) return; } - if (!GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (self)) + if (!gst_pad_has_current_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (self)) || acq_return == GST_OMX_ACQUIRE_BUFFER_RECONFIGURED) { GstVideoState *state = &GST_BASE_VIDEO_CODEC (self)->state; GstCaps *caps; @@ -882,7 +755,7 @@ gst_omx_video_enc_loop (GstOMXVideoEnc * self) g_assert (klass->handle_output_frame); flow_ret = klass->handle_output_frame (self, self->out_port, buf, frame); - if (is_eos || flow_ret == GST_FLOW_UNEXPECTED) { + if (is_eos || flow_ret == GST_FLOW_EOS) { g_mutex_lock (self->drain_lock); if (self->draining) { GST_DEBUG_OBJECT (self, "Drained"); @@ -890,7 +763,7 @@ gst_omx_video_enc_loop (GstOMXVideoEnc * self) g_cond_broadcast (self->drain_cond); } else if (flow_ret == GST_FLOW_OK) { GST_DEBUG_OBJECT (self, "Component signalled EOS"); - flow_ret = GST_FLOW_UNEXPECTED; + flow_ret = GST_FLOW_EOS; } g_mutex_unlock (self->drain_lock); } else { @@ -903,9 +776,9 @@ gst_omx_video_enc_loop (GstOMXVideoEnc * self) self->downstream_flow_ret = flow_ret; } else { - g_assert ((klass->hacks & GST_OMX_HACK_NO_EMPTY_EOS_BUFFER)); + g_assert ((klass->cdata.hacks & GST_OMX_HACK_NO_EMPTY_EOS_BUFFER)); GST_BASE_VIDEO_CODEC_STREAM_LOCK (self); - flow_ret = GST_FLOW_UNEXPECTED; + flow_ret = GST_FLOW_EOS; } if (flow_ret != GST_FLOW_OK) @@ -932,20 +805,19 @@ flushing: { GST_DEBUG_OBJECT (self, "Flushing -- stopping task"); gst_pad_pause_task (GST_BASE_VIDEO_CODEC_SRC_PAD (self)); - self->downstream_flow_ret = GST_FLOW_WRONG_STATE; + self->downstream_flow_ret = GST_FLOW_FLUSHING; self->started = FALSE; return; } flow_error: { - if (flow_ret == GST_FLOW_UNEXPECTED) { + if (flow_ret == GST_FLOW_EOS) { GST_DEBUG_OBJECT (self, "EOS"); gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_BASE_VIDEO_CODEC_SRC_PAD (self)); - } else if (flow_ret == GST_FLOW_NOT_LINKED - || flow_ret < GST_FLOW_UNEXPECTED) { + } else if (flow_ret == GST_FLOW_NOT_LINKED || flow_ret < GST_FLOW_EOS) { GST_ELEMENT_ERROR (self, STREAM, FAILED, ("Internal data stream error."), ("stream stopped, reason %s", gst_flow_get_name (flow_ret))); @@ -1015,7 +887,7 @@ gst_omx_video_enc_stop (GstBaseVideoEncoder * encoder) if (gst_omx_component_get_state (self->component, 0) > OMX_StateIdle) gst_omx_component_set_state (self->component, OMX_StateIdle); - self->downstream_flow_ret = GST_FLOW_WRONG_STATE; + self->downstream_flow_ret = GST_FLOW_FLUSHING; self->started = FALSE; self->eos = FALSE; @@ -1031,7 +903,7 @@ gst_omx_video_enc_stop (GstBaseVideoEncoder * encoder) static gboolean gst_omx_video_enc_set_format (GstBaseVideoEncoder * encoder, - GstVideoState * state) + GstVideoInfo * info) { GstOMXVideoEnc *self; GstOMXVideoEncClass *klass; @@ -1041,7 +913,8 @@ gst_omx_video_enc_set_format (GstBaseVideoEncoder * encoder, self = GST_OMX_VIDEO_ENC (encoder); klass = GST_OMX_VIDEO_ENC_GET_CLASS (encoder); - GST_DEBUG_OBJECT (self, "Setting new caps %" GST_PTR_FORMAT, state->caps); + GST_DEBUG_OBJECT (self, "Setting new format %s", + gst_video_format_to_string (info->finfo->format)); gst_omx_port_get_port_definition (self->in_port, &port_def); @@ -1061,7 +934,7 @@ gst_omx_video_enc_set_format (GstBaseVideoEncoder * encoder, return FALSE; } - switch (state->format) { + switch (info->finfo->format) { case GST_VIDEO_FORMAT_I420: port_def.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar; break; @@ -1069,19 +942,20 @@ gst_omx_video_enc_set_format (GstBaseVideoEncoder * encoder, port_def.format.video.eColorFormat = OMX_COLOR_FormatYUV420SemiPlanar; break; default: - GST_ERROR_OBJECT (self, "Unsupported caps %" GST_PTR_FORMAT, state->caps); + GST_ERROR_OBJECT (self, "Unsupported format %s", + gst_video_format_to_string (info->finfo->format)); return FALSE; break; } - port_def.format.video.nFrameWidth = state->width; - port_def.format.video.nFrameHeight = state->height; - if (state->fps_n == 0) { + port_def.format.video.nFrameWidth = info->width; + port_def.format.video.nFrameHeight = info->height; + if (info->fps_n == 0) { port_def.format.video.xFramerate = 0; } else { - if (!(klass->hacks & GST_OMX_HACK_VIDEO_FRAMERATE_INTEGER)) - port_def.format.video.xFramerate = (state->fps_n << 16) / (state->fps_d); + if (!(klass->cdata.hacks & GST_OMX_HACK_VIDEO_FRAMERATE_INTEGER)) + port_def.format.video.xFramerate = (info->fps_n << 16) / (info->fps_d); else - port_def.format.video.xFramerate = (state->fps_n) / (state->fps_d); + port_def.format.video.xFramerate = (info->fps_n) / (info->fps_d); } if (!gst_omx_port_update_port_definition (self->in_port, &port_def)) @@ -1090,7 +964,7 @@ gst_omx_video_enc_set_format (GstBaseVideoEncoder * encoder, return FALSE; if (klass->set_format) { - if (!klass->set_format (self, self->in_port, state)) { + if (!klass->set_format (self, self->in_port, info)) { GST_ERROR_OBJECT (self, "Subclass failed to set the new format"); return FALSE; } @@ -1186,6 +1060,8 @@ gst_omx_video_enc_fill_buffer (GstOMXVideoEnc * self, GstBuffer * inbuf, GstVideoState *state = &GST_BASE_VIDEO_CODEC (self)->state; OMX_PARAM_PORTDEFINITIONTYPE *port_def = &self->in_port->port_def; gboolean ret = FALSE; + GstVideoInfo vinfo; + GstVideoFrame frame; if (state->width != port_def->format.video.nFrameWidth || state->height != port_def->format.video.nFrameHeight) { @@ -1194,16 +1070,21 @@ gst_omx_video_enc_fill_buffer (GstOMXVideoEnc * self, GstBuffer * inbuf, } /* Same strides and everything */ - if (GST_BUFFER_SIZE (inbuf) == + if (gst_buffer_get_size (inbuf) == outbuf->omx_buf->nAllocLen - outbuf->omx_buf->nOffset) { - outbuf->omx_buf->nFilledLen = GST_BUFFER_SIZE (inbuf); - memcpy (outbuf->omx_buf->pBuffer + outbuf->omx_buf->nOffset, - GST_BUFFER_DATA (inbuf), outbuf->omx_buf->nFilledLen); + outbuf->omx_buf->nFilledLen = gst_buffer_get_size (inbuf); + + gst_buffer_extract (inbuf, 0, + outbuf->omx_buf->pBuffer + outbuf->omx_buf->nOffset, + outbuf->omx_buf->nFilledLen); ret = TRUE; goto done; } /* Different strides */ + + gst_video_info_from_caps (&vinfo, state->caps); + switch (state->format) { case GST_VIDEO_FORMAT_I420:{ gint i, j, height; @@ -1215,16 +1096,15 @@ gst_omx_video_enc_fill_buffer (GstOMXVideoEnc * self, GstBuffer * inbuf, for (i = 0; i < 3; i++) { if (i == 0) { dest_stride = port_def->format.video.nStride; - src_stride = - gst_video_format_get_row_stride (state->format, 0, state->width); + src_stride = vinfo.stride[0]; /* XXX: Try this if no stride was set */ if (dest_stride == 0) dest_stride = src_stride; } else { dest_stride = port_def->format.video.nStride / 2; - src_stride = - gst_video_format_get_row_stride (state->format, 1, state->width); + src_stride = vinfo.stride[1]; + /* XXX: Try this if no stride was set */ if (dest_stride == 0) dest_stride = src_stride; @@ -1240,21 +1120,14 @@ gst_omx_video_enc_fill_buffer (GstOMXVideoEnc * self, GstBuffer * inbuf, (port_def->format.video.nSliceHeight / 2) * (port_def->format.video.nStride / 2); - src = - GST_BUFFER_DATA (inbuf) + - gst_video_format_get_component_offset (state->format, i, - state->width, state->height); - - height = - gst_video_format_get_component_height (state->format, i, - state->height); - - if (src + src_stride * height > - GST_BUFFER_DATA (inbuf) + GST_BUFFER_SIZE (inbuf)) { + if (!gst_video_frame_map (&frame, &vinfo, inbuf, GST_MAP_READ)) { GST_ERROR_OBJECT (self, "Invalid input buffer size"); ret = FALSE; break; } + src = GST_VIDEO_FRAME_COMP_DATA (&frame, i); + height = GST_VIDEO_FRAME_HEIGHT (&frame); + if (dest + dest_stride * height > outbuf->omx_buf->pBuffer + outbuf->omx_buf->nAllocLen) { GST_ERROR_OBJECT (self, "Invalid output buffer size"); @@ -1268,6 +1141,8 @@ gst_omx_video_enc_fill_buffer (GstOMXVideoEnc * self, GstBuffer * inbuf, src += src_stride; dest += dest_stride; } + + gst_video_frame_unmap (&frame); } ret = TRUE; break; @@ -1282,15 +1157,14 @@ gst_omx_video_enc_fill_buffer (GstOMXVideoEnc * self, GstBuffer * inbuf, for (i = 0; i < 2; i++) { if (i == 0) { dest_stride = port_def->format.video.nStride; - src_stride = - gst_video_format_get_row_stride (state->format, 0, state->width); + src_stride = vinfo.stride[0]; /* XXX: Try this if no stride was set */ if (dest_stride == 0) dest_stride = src_stride; } else { dest_stride = port_def->format.video.nStride; - src_stride = - gst_video_format_get_row_stride (state->format, 1, state->width); + src_stride = vinfo.stride[1]; + /* XXX: Try this if no stride was set */ if (dest_stride == 0) dest_stride = src_stride; @@ -1302,21 +1176,16 @@ gst_omx_video_enc_fill_buffer (GstOMXVideoEnc * self, GstBuffer * inbuf, port_def->format.video.nSliceHeight * port_def->format.video.nStride; - src = - GST_BUFFER_DATA (inbuf) + - gst_video_format_get_component_offset (state->format, i, - state->width, state->height); - height = - gst_video_format_get_component_height (state->format, i, - state->height); - if (src + src_stride * height > - GST_BUFFER_DATA (inbuf) + GST_BUFFER_SIZE (inbuf)) { + if (!gst_video_frame_map (&frame, &vinfo, inbuf, GST_MAP_READ)) { GST_ERROR_OBJECT (self, "Invalid input buffer size"); ret = FALSE; break; } + src = GST_VIDEO_FRAME_COMP_DATA (&frame, i); + height = GST_VIDEO_FRAME_HEIGHT (&frame); + if (dest + dest_stride * height > outbuf->omx_buf->pBuffer + outbuf->omx_buf->nAllocLen) { GST_ERROR_OBJECT (self, "Invalid output buffer size"); @@ -1330,6 +1199,8 @@ gst_omx_video_enc_fill_buffer (GstOMXVideoEnc * self, GstBuffer * inbuf, src += src_stride; dest += dest_stride; } + + gst_video_frame_unmap (&frame); } ret = TRUE; break; @@ -1346,7 +1217,7 @@ done: static GstFlowReturn gst_omx_video_enc_handle_frame (GstBaseVideoEncoder * encoder, - GstVideoFrame * frame) + GstVideoFrameState * frame) { GstOMXAcquireBufferReturn acq_ret = GST_OMX_ACQUIRE_BUFFER_ERROR; GstOMXVideoEnc *self; @@ -1358,7 +1229,7 @@ gst_omx_video_enc_handle_frame (GstBaseVideoEncoder * encoder, if (self->eos) { GST_WARNING_OBJECT (self, "Got frame after EOS"); - return GST_FLOW_UNEXPECTED; + return GST_FLOW_EOS; } if (self->downstream_flow_ret != GST_FLOW_OK) { @@ -1445,7 +1316,7 @@ gst_omx_video_enc_handle_frame (GstBaseVideoEncoder * encoder, if (duration != GST_CLOCK_TIME_NONE) { buf->omx_buf->nTickCount = gst_util_uint64_scale (buf->omx_buf->nFilledLen, duration, - GST_BUFFER_SIZE (frame->sink_buffer)); + gst_buffer_get_size (frame->sink_buffer)); self->last_upstream_ts += duration; } @@ -1480,8 +1351,8 @@ component_error: flushing: { - GST_DEBUG_OBJECT (self, "Flushing -- returning WRONG_STATE"); - return GST_FLOW_WRONG_STATE; + GST_DEBUG_OBJECT (self, "Flushing -- returning FLUSHING"); + return GST_FLOW_FLUSHING; } reconfigure_error: { @@ -1517,7 +1388,7 @@ gst_omx_video_enc_finish (GstBaseVideoEncoder * encoder) } self->eos = TRUE; - if ((klass->hacks & GST_OMX_HACK_NO_EMPTY_EOS_BUFFER)) { + if ((klass->cdata.hacks & GST_OMX_HACK_NO_EMPTY_EOS_BUFFER)) { GST_WARNING_OBJECT (self, "Component does not support empty EOS buffers"); /* Insert a NULL into the queue to signal EOS */ @@ -1579,7 +1450,7 @@ gst_omx_video_enc_drain (GstOMXVideoEnc * self) return GST_FLOW_OK; } - if ((klass->hacks & GST_OMX_HACK_NO_EMPTY_EOS_BUFFER)) { + if ((klass->cdata.hacks & GST_OMX_HACK_NO_EMPTY_EOS_BUFFER)) { GST_WARNING_OBJECT (self, "Component does not support empty EOS buffers"); return GST_FLOW_OK; } diff --git a/omx/gstomxvideoenc.h b/omx/gstomxvideoenc.h index e728d06..e87e98f 100644 --- a/omx/gstomxvideoenc.h +++ b/omx/gstomxvideoenc.h @@ -49,6 +49,9 @@ struct _GstOMXVideoEnc GstBaseVideoEncoder parent; /* < protected > */ + + GstOMXClassData cdata; + GstOMXCore *core; GstOMXComponent *component; GstOMXPort *in_port, *out_port; @@ -83,20 +86,11 @@ struct _GstOMXVideoEncClass { GstBaseVideoEncoderClass parent_class; - const gchar *core_name; - const gchar *component_name; - const gchar *component_role; - - const gchar *default_src_template_caps; - const gchar *default_sink_template_caps; - - guint32 in_port_index, out_port_index; - - guint64 hacks; + GstOMXClassData cdata; - gboolean (*set_format) (GstOMXVideoEnc * self, GstOMXPort * port, GstVideoState * state); + gboolean (*set_format) (GstOMXVideoEnc * self, GstOMXPort * port, GstVideoInfo * info ); GstCaps *(*get_caps) (GstOMXVideoEnc * self, GstOMXPort * port, GstVideoState * state); - GstFlowReturn (*handle_output_frame) (GstOMXVideoEnc * self, GstOMXPort * port, GstOMXBuffer * buffer, GstVideoFrame * frame); + GstFlowReturn (*handle_output_frame) (GstOMXVideoEnc * self, GstOMXPort * port, GstOMXBuffer * buffer, GstVideoFrameState * frame); }; GType gst_omx_video_enc_get_type (void); diff --git a/omx/gstomxwmvdec.c b/omx/gstomxwmvdec.c index e86a656..ac646f4 100644 --- a/omx/gstomxwmvdec.c +++ b/omx/gstomxwmvdec.c @@ -30,7 +30,6 @@ GST_DEBUG_CATEGORY_STATIC (gst_omx_wmv_dec_debug_category); #define GST_CAT_DEFAULT gst_omx_wmv_dec_debug_category /* prototypes */ -static void gst_omx_wmv_dec_finalize (GObject * object); static gboolean gst_omx_wmv_dec_is_format_change (GstOMXVideoDec * dec, GstOMXPort * port, GstVideoState * state); static gboolean gst_omx_wmv_dec_set_format (GstOMXVideoDec * dec, @@ -43,57 +42,38 @@ enum /* class initialization */ -#define DEBUG_INIT(bla) \ +#define DEBUG_INIT \ GST_DEBUG_CATEGORY_INIT (gst_omx_wmv_dec_debug_category, "omxwmvdec", 0, \ "debug category for gst-omx video decoder base class"); -GST_BOILERPLATE_FULL (GstOMXWMVDec, gst_omx_wmv_dec, - GstOMXVideoDec, GST_TYPE_OMX_VIDEO_DEC, DEBUG_INIT); - -static void -gst_omx_wmv_dec_base_init (gpointer g_class) -{ - GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); - GstOMXVideoDecClass *videodec_class = GST_OMX_VIDEO_DEC_CLASS (g_class); - - gst_element_class_set_details_simple (element_class, - "OpenMAX WMV Video Decoder", - "Codec/Decoder/Video", - "Decode WMV video streams", - "Sebastian Dröge "); - - /* If no role was set from the config file we set the - * default WMV video decoder role */ - if (!videodec_class->component_role) - videodec_class->component_role = "video_decoder.wmv"; -} +G_DEFINE_TYPE_WITH_CODE (GstOMXWMVDec, gst_omx_wmv_dec, + GST_TYPE_OMX_VIDEO_DEC, DEBUG_INIT); static void gst_omx_wmv_dec_class_init (GstOMXWMVDecClass * klass) { - GObjectClass *gobject_class = G_OBJECT_CLASS (klass); + GstElementClass *element_class = GST_ELEMENT_CLASS (klass); GstOMXVideoDecClass *videodec_class = GST_OMX_VIDEO_DEC_CLASS (klass); - gobject_class->finalize = gst_omx_wmv_dec_finalize; - videodec_class->is_format_change = GST_DEBUG_FUNCPTR (gst_omx_wmv_dec_is_format_change); videodec_class->set_format = GST_DEBUG_FUNCPTR (gst_omx_wmv_dec_set_format); - videodec_class->default_sink_template_caps = "video/x-wmv"; -} + videodec_class->cdata.default_sink_template_caps = "video/x-wmv"; -static void -gst_omx_wmv_dec_init (GstOMXWMVDec * self, GstOMXWMVDecClass * klass) -{ + + gst_element_class_set_details_simple (element_class, + "OpenMAX WMV Video Decoder", + "Codec/Decoder/Video", + "Decode WMV video streams", + "Sebastian Dröge "); + + gst_omx_set_default_role (&videodec_class->cdata, "video_decoder.wmv"); } static void -gst_omx_wmv_dec_finalize (GObject * object) +gst_omx_wmv_dec_init (GstOMXWMVDec * self) { - /* GstOMXWMVDec *self = GST_OMX_WMV_DEC (object); */ - - G_OBJECT_CLASS (parent_class)->finalize (object); } static gboolean -- 2.7.4