2 * Copyright (C) 2008 David Schleef <ds@schleef.org>
3 * Copyright (C) 2011 Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>.
4 * Copyright (C) 2011 Nokia Corporation. All rights reserved.
5 * Contact: Stefan Kost <stefan.kost@nokia.com>
6 * Copyright (C) 2012 Collabora Ltd.
7 * Author : Edward Hervey <edward@collabora.com>
9 * This library is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Library General Public
11 * License as published by the Free Software Foundation; either
12 * version 2 of the License, or (at your option) any later version.
14 * This library is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Library General Public License for more details.
19 * You should have received a copy of the GNU Library General Public
20 * License along with this library; if not, write to the
21 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
22 * Boston, MA 02110-1301, USA.
25 #ifndef _GST_VIDEO_DECODER_H_
26 #define _GST_VIDEO_DECODER_H_
28 #include <gst/base/gstadapter.h>
29 #include <gst/video/gstvideoutils.h>
33 #define GST_TYPE_VIDEO_DECODER \
34 (gst_video_decoder_get_type())
35 #define GST_VIDEO_DECODER(obj) \
36 (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_DECODER,GstVideoDecoder))
37 #define GST_VIDEO_DECODER_CLASS(klass) \
38 (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VIDEO_DECODER,GstVideoDecoderClass))
39 #define GST_VIDEO_DECODER_GET_CLASS(obj) \
40 (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_VIDEO_DECODER,GstVideoDecoderClass))
41 #define GST_IS_VIDEO_DECODER(obj) \
42 (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEO_DECODER))
43 #define GST_IS_VIDEO_DECODER_CLASS(klass) \
44 (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEO_DECODER))
45 #define GST_VIDEO_DECODER_CAST(obj) ((GstVideoDecoder *)(obj))
48 * GST_VIDEO_DECODER_SINK_NAME:
50 * The name of the templates for the sink pad.
52 #define GST_VIDEO_DECODER_SINK_NAME "sink"
54 * GST_VIDEO_DECODER_SRC_NAME:
56 * The name of the templates for the source pad.
58 #define GST_VIDEO_DECODER_SRC_NAME "src"
61 * GST_VIDEO_DECODER_SRC_PAD:
62 * @obj: a #GstVideoDecoder
64 * Gives the pointer to the source #GstPad object of the element.
66 #define GST_VIDEO_DECODER_SRC_PAD(obj) (((GstVideoDecoder *) (obj))->srcpad)
69 * GST_VIDEO_DECODER_SINK_PAD:
70 * @obj: a #GstVideoDecoder
72 * Gives the pointer to the sink #GstPad object of the element.
74 #define GST_VIDEO_DECODER_SINK_PAD(obj) (((GstVideoDecoder *) (obj))->sinkpad)
76 * GST_VIDEO_DECODER_FLOW_NEED_DATA:
78 * Returned while parsing to indicate more data is needed.
80 #define GST_VIDEO_DECODER_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS
83 * GST_VIDEO_DECODER_INPUT_SEGMENT:
84 * @obj: base decoder instance
86 * Gives the segment of the element.
88 #define GST_VIDEO_DECODER_INPUT_SEGMENT(obj) (GST_VIDEO_DECODER_CAST (obj)->input_segment)
91 * GST_VIDEO_DECODER_OUTPUT_SEGMENT:
92 * @obj: base decoder instance
94 * Gives the segment of the element.
96 #define GST_VIDEO_DECODER_OUTPUT_SEGMENT(obj) (GST_VIDEO_DECODER_CAST (obj)->output_segment)
99 * GST_VIDEO_DECODER_STREAM_LOCK:
100 * @decoder: video decoder instance
102 * Obtain a lock to protect the decoder function from concurrent access.
104 #define GST_VIDEO_DECODER_STREAM_LOCK(decoder) g_rec_mutex_lock (&GST_VIDEO_DECODER (decoder)->stream_lock)
107 * GST_VIDEO_DECODER_STREAM_UNLOCK:
108 * @decoder: video decoder instance
110 * Release the lock that protects the decoder function from concurrent access.
112 #define GST_VIDEO_DECODER_STREAM_UNLOCK(decoder) g_rec_mutex_unlock (&GST_VIDEO_DECODER (decoder)->stream_lock)
114 typedef struct _GstVideoDecoder GstVideoDecoder;
115 typedef struct _GstVideoDecoderClass GstVideoDecoderClass;
116 typedef struct _GstVideoDecoderPrivate GstVideoDecoderPrivate;
119 /* do not use this one, use macro below */
122 GstFlowReturn _gst_video_decoder_error (GstVideoDecoder *dec, gint weight,
123 GQuark domain, gint code,
124 gchar *txt, gchar *debug,
125 const gchar *file, const gchar *function,
129 * GST_VIDEO_DECODER_ERROR:
130 * @el: the base video decoder element that generates the error
131 * @w: element defined weight of the error, added to error count
132 * @domain: like CORE, LIBRARY, RESOURCE or STREAM (see #gstreamer-GstGError)
133 * @code: error code defined for that domain (see #gstreamer-GstGError)
134 * @text: the message to display (format string and args enclosed in
136 * @debug: debugging information for the message (format string and args
137 * enclosed in parentheses)
138 * @ret: variable to receive return value
140 * Utility function that video decoder elements can use in case they encountered
141 * a data processing error that may be fatal for the current "data unit" but
142 * need not prevent subsequent decoding. Such errors are counted and if there
143 * are too many, as configured in the context's max_errors, the pipeline will
144 * post an error message and the application will be requested to stop further
145 * media processing. Otherwise, it is considered a "glitch" and only a warning
146 * is logged. In either case, @ret is set to the proper value to
147 * return to upstream/caller (indicating either GST_FLOW_ERROR or GST_FLOW_OK).
149 #define GST_VIDEO_DECODER_ERROR(el, w, domain, code, text, debug, ret) \
151 gchar *__txt = _gst_element_error_printf text; \
152 gchar *__dbg = _gst_element_error_printf debug; \
153 GstVideoDecoder *__dec = GST_VIDEO_DECODER (el); \
154 ret = _gst_video_decoder_error (__dec, w, GST_ ## domain ## _ERROR, \
155 GST_ ## domain ## _ERROR_ ## code, __txt, __dbg, __FILE__, \
156 GST_FUNCTION, __LINE__); \
160 * GST_VIDEO_DECODER_MAX_ERRORS:
162 * Default maximum number of errors tolerated before signaling error.
164 #define GST_VIDEO_DECODER_MAX_ERRORS 10
170 * The opaque #GstVideoDecoder data structure.
172 struct _GstVideoDecoder
181 /* protects all data processing, i.e. is locked
182 * in the chain function, finish_frame and when
183 * processing serialized events */
184 GRecMutex stream_lock;
186 /* MT-protected (with STREAM_LOCK) */
187 GstSegment input_segment;
188 GstSegment output_segment;
190 GstVideoDecoderPrivate *priv;
193 gpointer padding[GST_PADDING_LARGE];
197 * GstVideoDecoderClass:
199 * Called when the element changes to GST_STATE_READY.
200 * Allows opening external resources.
202 * Called when the element changes to GST_STATE_NULL.
203 * Allows closing external resources.
205 * Called when the element starts processing.
206 * Allows opening external resources.
208 * Called when the element stops processing.
209 * Allows closing external resources.
210 * @set_format: Notifies subclass of incoming data format (caps).
211 * @parse: Required for non-packetized input.
212 * Allows chopping incoming data into manageable units (frames)
213 * for subsequent decoding.
215 * Allows subclass (decoder) to perform post-seek semantics reset.
217 * @handle_frame: Provides input data frame to subclass.
219 * Called to request subclass to dispatch any pending remaining
220 * data at EOS. Sub-classes can refuse to decode new data after.
222 * Called to request subclass to decode any data it can at this
223 * point, but that more data may arrive after. (e.g. at segment end).
224 * Sub-classes should be prepared to handle new data afterward,
225 * or seamless segment processing will break. Since: 1.6
226 * @sink_event: Optional.
227 * Event handler on the sink pad. This function should return
228 * TRUE if the event was handled and should be discarded
229 * (i.e. not unref'ed).
230 * Subclasses should chain up to the parent implementation to
231 * invoke the default handler.
232 * @src_event: Optional.
233 * Event handler on the source pad. This function should return
234 * TRUE if the event was handled and should be discarded
235 * (i.e. not unref'ed).
236 * Subclasses should chain up to the parent implementation to
237 * invoke the default handler.
238 * @negotiate: Optional.
239 * Negotiate with downstream and configure buffer pools, etc.
240 * Subclasses should chain up to the parent implementation to
241 * invoke the default handler.
242 * @decide_allocation: Optional.
243 * Setup the allocation parameters for allocating output
244 * buffers. The passed in query contains the result of the
245 * downstream allocation query.
246 * Subclasses should chain up to the parent implementation to
247 * invoke the default handler.
248 * @propose_allocation: Optional.
249 * Propose buffer allocation parameters for upstream elements.
250 * Subclasses should chain up to the parent implementation to
251 * invoke the default handler.
253 * Flush all remaining data from the decoder without
254 * pushing it downstream. Since: 1.2
255 * @sink_query: Optional.
256 * Query handler on the sink pad. This function should
257 * return TRUE if the query could be performed. Subclasses
258 * should chain up to the parent implementation to invoke the
259 * default handler. Since: 1.4
260 * @src_query: Optional.
261 * Query handler on the source pad. This function should
262 * return TRUE if the query could be performed. Subclasses
263 * should chain up to the parent implementation to invoke the
264 * default handler. Since: 1.4
265 * @getcaps: Optional.
266 * Allows for a custom sink getcaps implementation.
267 * If not implemented, default returns
268 * gst_video_decoder_proxy_getcaps
269 * applied to sink template caps.
270 * @transform_meta: Optional. Transform the metadata on the input buffer to the
271 * output buffer. By default this method is copies all meta without
272 * tags and meta with only the "video" tag. subclasses can
273 * implement this method and return %TRUE if the metadata is to be
276 * Subclasses can override any of the available virtual methods or not, as
277 * needed. At minimum @handle_frame needs to be overridden, and @set_format
278 * and likely as well. If non-packetized input is supported or expected,
279 * @parse needs to be overridden as well.
281 struct _GstVideoDecoderClass
284 GstElementClass element_class;
287 gboolean (*open) (GstVideoDecoder *decoder);
289 gboolean (*close) (GstVideoDecoder *decoder);
291 gboolean (*start) (GstVideoDecoder *decoder);
293 gboolean (*stop) (GstVideoDecoder *decoder);
295 GstFlowReturn (*parse) (GstVideoDecoder *decoder,
296 GstVideoCodecFrame *frame,
300 gboolean (*set_format) (GstVideoDecoder *decoder,
301 GstVideoCodecState * state);
303 gboolean (*reset) (GstVideoDecoder *decoder,
306 GstFlowReturn (*finish) (GstVideoDecoder *decoder);
308 GstFlowReturn (*handle_frame) (GstVideoDecoder *decoder,
309 GstVideoCodecFrame *frame);
311 gboolean (*sink_event) (GstVideoDecoder *decoder,
314 gboolean (*src_event) (GstVideoDecoder *decoder,
317 gboolean (*negotiate) (GstVideoDecoder *decoder);
319 gboolean (*decide_allocation) (GstVideoDecoder *decoder, GstQuery *query);
321 gboolean (*propose_allocation) (GstVideoDecoder *decoder, GstQuery * query);
323 gboolean (*flush) (GstVideoDecoder *decoder);
325 gboolean (*sink_query) (GstVideoDecoder *decoder,
328 gboolean (*src_query) (GstVideoDecoder *decoder,
331 GstCaps* (*getcaps) (GstVideoDecoder *decoder,
334 GstFlowReturn (*drain) (GstVideoDecoder *decoder);
336 gboolean (*transform_meta) (GstVideoDecoder *decoder,
337 GstVideoCodecFrame *frame,
341 gpointer padding[GST_PADDING_LARGE-6];
345 * GstVideoDecoderRequestSyncPointFlags:
346 * @GST_VIDEO_DECODER_REQUEST_SYNC_POINT_DISCARD_INPUT: discard all following
347 * input until the next sync point.
348 * @GST_VIDEO_DECODER_REQUEST_SYNC_POINT_CORRUPT_OUTPUT: discard all following
349 * output until the next sync point.
351 * Flags to be used in combination with gst_video_decoder_request_sync_point().
352 * See the function documentation for more details.
357 GST_VIDEO_DECODER_REQUEST_SYNC_POINT_DISCARD_INPUT = (1<<0),
358 GST_VIDEO_DECODER_REQUEST_SYNC_POINT_CORRUPT_OUTPUT = (1<<1),
359 } GstVideoDecoderRequestSyncPointFlags;
362 GType gst_video_decoder_get_type (void);
364 /* Context parameters */
367 void gst_video_decoder_set_packetized (GstVideoDecoder * decoder,
368 gboolean packetized);
371 gboolean gst_video_decoder_get_packetized (GstVideoDecoder * decoder);
374 void gst_video_decoder_set_estimate_rate (GstVideoDecoder * dec,
378 gint gst_video_decoder_get_estimate_rate (GstVideoDecoder * dec);
381 void gst_video_decoder_set_max_errors (GstVideoDecoder * dec,
385 gint gst_video_decoder_get_max_errors (GstVideoDecoder * dec);
388 void gst_video_decoder_set_needs_format (GstVideoDecoder * dec,
392 gboolean gst_video_decoder_get_needs_format (GstVideoDecoder * dec);
395 void gst_video_decoder_set_needs_sync_point (GstVideoDecoder * dec,
399 gboolean gst_video_decoder_get_needs_sync_point (GstVideoDecoder * dec);
402 void gst_video_decoder_set_latency (GstVideoDecoder *decoder,
403 GstClockTime min_latency,
404 GstClockTime max_latency);
407 void gst_video_decoder_get_latency (GstVideoDecoder *decoder,
408 GstClockTime *min_latency,
409 GstClockTime *max_latency);
412 void gst_video_decoder_get_allocator (GstVideoDecoder *decoder,
413 GstAllocator **allocator,
414 GstAllocationParams *params);
417 GstBufferPool *gst_video_decoder_get_buffer_pool (GstVideoDecoder *decoder);
422 GstVideoCodecFrame *gst_video_decoder_get_frame (GstVideoDecoder *decoder,
426 GstVideoCodecFrame *gst_video_decoder_get_oldest_frame (GstVideoDecoder *decoder);
429 GList * gst_video_decoder_get_frames (GstVideoDecoder *decoder);
431 /* Parsing related methods */
434 void gst_video_decoder_add_to_frame (GstVideoDecoder *decoder,
438 GstFlowReturn gst_video_decoder_have_frame (GstVideoDecoder *decoder);
441 gsize gst_video_decoder_get_pending_frame_size (GstVideoDecoder *decoder);
444 GstBuffer *gst_video_decoder_allocate_output_buffer (GstVideoDecoder * decoder);
447 GstFlowReturn gst_video_decoder_allocate_output_frame_with_params (GstVideoDecoder *decoder,
448 GstVideoCodecFrame * frame,
449 GstBufferPoolAcquireParams *params);
452 GstFlowReturn gst_video_decoder_allocate_output_frame (GstVideoDecoder *decoder,
453 GstVideoCodecFrame *frame);
456 GstVideoCodecState *gst_video_decoder_set_output_state (GstVideoDecoder *decoder,
457 GstVideoFormat fmt, guint width, guint height,
458 GstVideoCodecState *reference);
461 GstVideoCodecState *gst_video_decoder_set_interlaced_output_state (GstVideoDecoder *decoder,
462 GstVideoFormat fmt, GstVideoInterlaceMode interlace_mode,
463 guint width, guint height, GstVideoCodecState *reference);
466 GstVideoCodecState *gst_video_decoder_get_output_state (GstVideoDecoder *decoder);
469 gboolean gst_video_decoder_negotiate (GstVideoDecoder * decoder);
472 GstClockTimeDiff gst_video_decoder_get_max_decode_time (GstVideoDecoder *decoder,
473 GstVideoCodecFrame *frame);
476 gdouble gst_video_decoder_get_qos_proportion (GstVideoDecoder * decoder);
479 GstFlowReturn gst_video_decoder_finish_frame (GstVideoDecoder *decoder,
480 GstVideoCodecFrame *frame);
483 GstFlowReturn gst_video_decoder_drop_frame (GstVideoDecoder *dec,
484 GstVideoCodecFrame *frame);
487 void gst_video_decoder_request_sync_point (GstVideoDecoder *dec,
488 GstVideoCodecFrame *frame,
489 GstVideoDecoderRequestSyncPointFlags flags);
492 void gst_video_decoder_release_frame (GstVideoDecoder * dec,
493 GstVideoCodecFrame * frame);
496 void gst_video_decoder_merge_tags (GstVideoDecoder *decoder,
497 const GstTagList *tags,
498 GstTagMergeMode mode);
501 GstCaps * gst_video_decoder_proxy_getcaps (GstVideoDecoder * decoder,
506 void gst_video_decoder_set_use_default_pad_acceptcaps (GstVideoDecoder * decoder,
509 G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstVideoDecoder, gst_object_unref)