#include "gstvideoencoder.h"
#include "gstvideoutils.h"
+#include <gst/video/gstvideometa.h>
+#include <gst/video/gstvideopool.h>
+
#include <string.h>
GST_DEBUG_CATEGORY (videoencoder_debug);
/* FIXME : (and introduce a context ?) */
gboolean drained;
gboolean at_eos;
+ gboolean do_caps;
gint64 min_latency;
gint64 max_latency;
GList *force_key_unit; /* List of pending forced keyunits */
- guint64 system_frame_number;
+ guint32 system_frame_number;
GList *frames; /* Protected with OBJECT_LOCK */
GstVideoCodecState *input_state;
gint64 bytes;
gint64 time;
+
+ GstAllocator *allocator;
+ GstAllocationParams params;
};
typedef struct _ForcedKeyUnitEvent ForcedKeyUnitEvent;
return evt;
}
+static GstElementClass *parent_class = NULL;
+static void gst_video_encoder_class_init (GstVideoEncoderClass * klass);
+static void gst_video_encoder_init (GstVideoEncoder * enc,
+ GstVideoEncoderClass * klass);
+
static void gst_video_encoder_finalize (GObject * object);
-static gboolean gst_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps);
-static GstCaps *gst_video_encoder_sink_getcaps (GstPad * pad);
-static gboolean gst_video_encoder_src_event (GstPad * pad, GstEvent * event);
-static gboolean gst_video_encoder_sink_event (GstPad * pad, GstEvent * event);
-static GstFlowReturn gst_video_encoder_chain (GstPad * pad, GstBuffer * buf);
+static gboolean gst_video_encoder_setcaps (GstVideoEncoder * enc,
+ GstCaps * caps);
+static GstCaps *gst_video_encoder_sink_getcaps (GstVideoEncoder * encoder,
+ GstCaps * filter);
+static gboolean gst_video_encoder_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static gboolean gst_video_encoder_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static GstFlowReturn gst_video_encoder_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buf);
static GstStateChangeReturn gst_video_encoder_change_state (GstElement *
element, GstStateChange transition);
-static const GstQueryType *gst_video_encoder_get_query_types (GstPad * pad);
-static gboolean gst_video_encoder_src_query (GstPad * pad, GstQuery * query);
+static gboolean gst_video_encoder_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+static gboolean gst_video_encoder_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
static GstVideoCodecFrame *gst_video_encoder_new_frame (GstVideoEncoder *
- encoder, GstBuffer * buf, GstClockTime timestamp, GstClockTime duration);
-
-static void
-_do_init (GType object_type)
+ encoder, GstBuffer * buf, GstClockTime pts, GstClockTime dts,
+ GstClockTime duration);
+
+static gboolean gst_video_encoder_sink_event_default (GstVideoEncoder * encoder,
+ GstEvent * event);
+static gboolean gst_video_encoder_src_event_default (GstVideoEncoder * encoder,
+ GstEvent * event);
+static gboolean gst_video_encoder_decide_allocation_default (GstVideoEncoder *
+ encoder, GstQuery * query);
+static gboolean gst_video_encoder_propose_allocation_default (GstVideoEncoder *
+ encoder, GstQuery * query);
+
+/* we can't use G_DEFINE_ABSTRACT_TYPE because we need the klass in the _init
+ * method to get to the padtemplates */
+GType
+gst_video_encoder_get_type (void)
{
- const GInterfaceInfo preset_interface_info = {
- NULL, /* interface_init */
- NULL, /* interface_finalize */
- NULL /* interface_data */
- };
-
- g_type_add_interface_static (object_type, GST_TYPE_PRESET,
- &preset_interface_info);
-}
-
-GST_BOILERPLATE_FULL (GstVideoEncoder, gst_video_encoder,
- GstElement, GST_TYPE_ELEMENT, _do_init);
-
-static void
-gst_video_encoder_base_init (gpointer g_class)
-{
- GST_DEBUG_CATEGORY_INIT (videoencoder_debug, "videoencoder", 0,
- "Base Video Encoder");
+ static volatile gsize type = 0;
+
+ if (g_once_init_enter (&type)) {
+ GType _type;
+ static const GTypeInfo info = {
+ sizeof (GstVideoEncoderClass),
+ NULL,
+ NULL,
+ (GClassInitFunc) gst_video_encoder_class_init,
+ NULL,
+ NULL,
+ sizeof (GstVideoEncoder),
+ 0,
+ (GInstanceInitFunc) gst_video_encoder_init,
+ };
+ const GInterfaceInfo preset_interface_info = {
+ NULL, /* interface_init */
+ NULL, /* interface_finalize */
+ NULL /* interface_data */
+ };
+
+ _type = g_type_register_static (GST_TYPE_ELEMENT,
+ "GstVideoEncoder", &info, G_TYPE_FLAG_ABSTRACT);
+ g_type_add_interface_static (_type, GST_TYPE_PRESET,
+ &preset_interface_info);
+ g_once_init_leave (&type, _type);
+ }
+ return type;
}
static void
gobject_class = G_OBJECT_CLASS (klass);
gstelement_class = GST_ELEMENT_CLASS (klass);
+ GST_DEBUG_CATEGORY_INIT (videoencoder_debug, "videoencoder", 0,
+ "Base Video Encoder");
+
+ parent_class = g_type_class_peek_parent (klass);
+
g_type_class_add_private (klass, sizeof (GstVideoEncoderPrivate));
gobject_class->finalize = gst_video_encoder_finalize;
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_video_encoder_change_state);
+
+ klass->sink_event = gst_video_encoder_sink_event_default;
+ klass->src_event = gst_video_encoder_src_event_default;
+ klass->propose_allocation = gst_video_encoder_propose_allocation_default;
+ klass->decide_allocation = gst_video_encoder_decide_allocation_default;
}
static void
gst_pad_set_chain_function (pad, GST_DEBUG_FUNCPTR (gst_video_encoder_chain));
gst_pad_set_event_function (pad,
GST_DEBUG_FUNCPTR (gst_video_encoder_sink_event));
- gst_pad_set_setcaps_function (pad,
- GST_DEBUG_FUNCPTR (gst_video_encoder_sink_setcaps));
- gst_pad_set_getcaps_function (pad,
- GST_DEBUG_FUNCPTR (gst_video_encoder_sink_getcaps));
+ gst_pad_set_query_function (pad,
+ GST_DEBUG_FUNCPTR (gst_video_encoder_sink_query));
gst_element_add_pad (GST_ELEMENT (encoder), encoder->sinkpad);
pad_template =
encoder->srcpad = pad = gst_pad_new_from_template (pad_template, "src");
- gst_pad_set_query_type_function (pad,
- GST_DEBUG_FUNCPTR (gst_video_encoder_get_query_types));
gst_pad_set_query_function (pad,
GST_DEBUG_FUNCPTR (gst_video_encoder_src_query));
gst_pad_set_event_function (pad,
gst_segment_init (&encoder->input_segment, GST_FORMAT_TIME);
gst_segment_init (&encoder->output_segment, GST_FORMAT_TIME);
- g_static_rec_mutex_init (&encoder->stream_lock);
+ g_rec_mutex_init (&encoder->stream_lock);
priv->at_eos = FALSE;
priv->headers = NULL;
* @headers: (transfer full) (element-type GstBuffer): a list of #GstBuffer containing the codec header
*
* Set the codec headers to be sent downstream whenever requested.
- *
- * Since: 0.10.36
*/
void
gst_video_encoder_set_headers (GstVideoEncoder * video_encoder, GList * headers)
}
static gboolean
-gst_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps)
+gst_video_encoder_setcaps (GstVideoEncoder * encoder, GstCaps * caps)
{
- GstVideoEncoder *encoder;
GstVideoEncoderClass *encoder_class;
GstVideoCodecState *state;
gboolean ret;
gboolean samecaps = FALSE;
- encoder = GST_VIDEO_ENCODER (gst_pad_get_parent (pad));
encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
/* subclass should do something here ... */
if (!ret)
GST_WARNING_OBJECT (encoder, "rejected caps %" GST_PTR_FORMAT, caps);
- gst_object_unref (encoder);
-
return ret;
parse_fail:
{
GST_WARNING_OBJECT (encoder, "Failed to parse caps");
- gst_object_unref (encoder);
return FALSE;
}
}
* gst_video_encoder_proxy_getcaps:
* @enc: a #GstVideoEncoder
* @caps: initial caps
+ * @filter: filter caps
*
* Returns caps that express @caps (or sink template caps if @caps == NULL)
* restricted to resolution/format/... combinations supported by downstream
* elements (e.g. muxers).
*
* Returns: a #GstCaps owned by caller
- *
- * Since: 0.10.36
*/
GstCaps *
-gst_video_encoder_proxy_getcaps (GstVideoEncoder * encoder, GstCaps * caps)
+gst_video_encoder_proxy_getcaps (GstVideoEncoder * encoder, GstCaps * caps,
+ GstCaps * filter)
{
- const GstCaps *templ_caps;
+ GstCaps *templ_caps;
GstCaps *allowed;
GstCaps *fcaps, *filter_caps;
gint i, j;
/* Allow downstream to specify width/height/framerate/PAR constraints
* and forward them upstream for video converters to handle
*/
- templ_caps = caps ? caps : gst_pad_get_pad_template_caps (encoder->sinkpad);
+ templ_caps =
+ caps ? gst_caps_ref (caps) :
+ gst_pad_get_pad_template_caps (encoder->sinkpad);
allowed = gst_pad_get_allowed_caps (encoder->srcpad);
if (!allowed || gst_caps_is_empty (allowed) || gst_caps_is_any (allowed)) {
- fcaps = gst_caps_copy (templ_caps);
+ fcaps = templ_caps;
goto done;
}
const GValue *val;
GstStructure *s;
- s = gst_structure_id_empty_new (q_name);
+ s = gst_structure_new_id_empty (q_name);
if ((val = gst_structure_get_value (allowed_s, "width")))
gst_structure_set_value (s, "width", val);
if ((val = gst_structure_get_value (allowed_s, "height")))
if ((val = gst_structure_get_value (allowed_s, "pixel-aspect-ratio")))
gst_structure_set_value (s, "pixel-aspect-ratio", val);
- gst_caps_merge_structure (filter_caps, s);
+ filter_caps = gst_caps_merge_structure (filter_caps, s);
}
}
fcaps = gst_caps_intersect (filter_caps, templ_caps);
gst_caps_unref (filter_caps);
+ gst_caps_unref (templ_caps);
+
+ if (filter) {
+ GST_LOG_OBJECT (encoder, "intersecting with %" GST_PTR_FORMAT, filter);
+ filter_caps = gst_caps_intersect (fcaps, filter);
+ gst_caps_unref (fcaps);
+ fcaps = filter_caps;
+ }
done:
gst_caps_replace (&allowed, NULL);
}
static GstCaps *
-gst_video_encoder_sink_getcaps (GstPad * pad)
+gst_video_encoder_sink_getcaps (GstVideoEncoder * encoder, GstCaps * filter)
{
- GstVideoEncoder *encoder;
GstVideoEncoderClass *klass;
GstCaps *caps;
- encoder = GST_VIDEO_ENCODER (gst_pad_get_parent (pad));
klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
if (klass->getcaps)
- caps = klass->getcaps (encoder);
+ caps = klass->getcaps (encoder, filter);
else
- caps = gst_video_encoder_proxy_getcaps (encoder, NULL);
- gst_object_unref (encoder);
+ caps = gst_video_encoder_proxy_getcaps (encoder, NULL, filter);
GST_LOG_OBJECT (encoder, "Returning caps %" GST_PTR_FORMAT, caps);
return caps;
}
+static gboolean
+gst_video_encoder_decide_allocation_default (GstVideoEncoder * encoder,
+ GstQuery * query)
+{
+ GstAllocator *allocator = NULL;
+ GstAllocationParams params;
+ gboolean update_allocator;
+
+ /* we got configuration from our peer or the decide_allocation method,
+ * parse them */
+ if (gst_query_get_n_allocation_params (query) > 0) {
+ /* try the allocator */
+ gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
+ update_allocator = TRUE;
+ } else {
+ allocator = NULL;
+ gst_allocation_params_init (¶ms);
+ update_allocator = FALSE;
+ }
+
+ if (update_allocator)
+ gst_query_set_nth_allocation_param (query, 0, allocator, ¶ms);
+ else
+ gst_query_add_allocation_param (query, allocator, ¶ms);
+ if (allocator)
+ gst_object_unref (allocator);
+
+ return TRUE;
+}
+
+static gboolean
+gst_video_encoder_propose_allocation_default (GstVideoEncoder * encoder,
+ GstQuery * query)
+{
+ GstCaps *caps;
+ GstVideoInfo info;
+ GstBufferPool *pool;
+ guint size;
+
+ gst_query_parse_allocation (query, &caps, NULL);
+
+ if (caps == NULL)
+ return FALSE;
+
+ if (!gst_video_info_from_caps (&info, caps))
+ return FALSE;
+
+ size = GST_VIDEO_INFO_SIZE (&info);
+
+ if (gst_query_get_n_allocation_pools (query) == 0) {
+ GstStructure *structure;
+ GstAllocator *allocator = NULL;
+ GstAllocationParams params = { 0, 15, 0, 0 };
+
+ if (gst_query_get_n_allocation_params (query) > 0)
+ gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
+ else
+ gst_query_add_allocation_param (query, allocator, ¶ms);
+
+ pool = gst_video_buffer_pool_new ();
+
+ structure = gst_buffer_pool_get_config (pool);
+ gst_buffer_pool_config_set_params (structure, caps, size, 0, 0);
+ gst_buffer_pool_config_set_allocator (structure, allocator, ¶ms);
+
+ if (allocator)
+ gst_object_unref (allocator);
+
+ if (!gst_buffer_pool_set_config (pool, structure))
+ goto config_failed;
+
+ gst_query_add_allocation_pool (query, pool, size, 0, 0);
+ gst_object_unref (pool);
+ gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
+ }
+
+ return TRUE;
+
+ /* ERRORS */
+config_failed:
+ {
+ GST_ERROR_OBJECT (encoder, "failed to set config");
+ gst_object_unref (pool);
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_video_encoder_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+{
+ GstVideoEncoder *encoder;
+ gboolean res = FALSE;
+
+ encoder = GST_VIDEO_ENCODER (parent);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_video_encoder_sink_getcaps (encoder, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ res = TRUE;
+ break;
+ }
+ case GST_QUERY_ALLOCATION:
+ {
+ GstVideoEncoderClass *klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
+
+ if (klass->propose_allocation)
+ res = klass->propose_allocation (encoder, query);
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+ return res;
+}
+
static void
gst_video_encoder_finalize (GObject * object)
{
g_list_foreach (encoder->priv->headers, (GFunc) gst_buffer_unref, NULL);
g_list_free (encoder->priv->headers);
}
- g_static_rec_mutex_free (&encoder->stream_lock);
+ g_rec_mutex_clear (&encoder->stream_lock);
+
+ if (encoder->priv->allocator) {
+ gst_object_unref (encoder->priv->allocator);
+ encoder->priv->allocator = NULL;
+ }
G_OBJECT_CLASS (parent_class)->finalize (object);
}
gst_video_encoder_push_event (GstVideoEncoder * encoder, GstEvent * event)
{
switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_SEGMENT:
{
- gboolean update;
- double rate;
- double applied_rate;
- GstFormat format;
- gint64 start;
- gint64 stop;
- gint64 position;
+ GstSegment segment;
GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
- gst_event_parse_new_segment_full (event, &update, &rate, &applied_rate,
- &format, &start, &stop, &position);
- GST_DEBUG_OBJECT (encoder, "newseg rate %g, applied rate %g, "
- "format %d, start = %" GST_TIME_FORMAT ", stop = %" GST_TIME_FORMAT
- ", pos = %" GST_TIME_FORMAT, rate, applied_rate, format,
- GST_TIME_ARGS (start), GST_TIME_ARGS (stop),
- GST_TIME_ARGS (position));
+ gst_event_copy_segment (event, &segment);
- if (format != GST_FORMAT_TIME) {
- GST_DEBUG_OBJECT (encoder, "received non TIME newsegment");
+ GST_DEBUG_OBJECT (encoder, "segment %" GST_SEGMENT_FORMAT, &segment);
+
+ if (segment.format != GST_FORMAT_TIME) {
+ GST_DEBUG_OBJECT (encoder, "received non TIME segment");
GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
break;
}
- gst_segment_set_newsegment_full (&encoder->output_segment, update, rate,
- applied_rate, format, start, stop, position);
+ encoder->output_segment = segment;
GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
break;
}
}
static gboolean
-gst_video_encoder_sink_eventfunc (GstVideoEncoder * encoder, GstEvent * event)
+gst_video_encoder_sink_event_default (GstVideoEncoder * encoder,
+ GstEvent * event)
{
GstVideoEncoderClass *encoder_class;
gboolean ret = FALSE;
encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ ret = TRUE;
+ encoder->priv->do_caps = TRUE;
+ gst_event_unref (event);
+ event = NULL;
+ break;
+ }
case GST_EVENT_EOS:
{
GstFlowReturn flow_ret;
flow_ret = GST_FLOW_OK;
}
- ret = (flow_ret == GST_VIDEO_ENCODER_FLOW_DROPPED);
+ ret = (flow_ret == GST_FLOW_OK);
GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
break;
}
- case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_SEGMENT:
{
- gboolean update;
- double rate;
- double applied_rate;
- GstFormat format;
- gint64 start;
- gint64 stop;
- gint64 position;
+ GstSegment segment;
GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
- gst_event_parse_new_segment_full (event, &update, &rate, &applied_rate,
- &format, &start, &stop, &position);
- GST_DEBUG_OBJECT (encoder, "newseg rate %g, applied rate %g, "
- "format %d, start = %" GST_TIME_FORMAT ", stop = %" GST_TIME_FORMAT
- ", pos = %" GST_TIME_FORMAT, rate, applied_rate, format,
- GST_TIME_ARGS (start), GST_TIME_ARGS (stop),
- GST_TIME_ARGS (position));
+ gst_event_copy_segment (event, &segment);
+
+ GST_DEBUG_OBJECT (encoder, "segment %" GST_SEGMENT_FORMAT, &segment);
- if (format != GST_FORMAT_TIME) {
+ if (segment.format != GST_FORMAT_TIME) {
GST_DEBUG_OBJECT (encoder, "received non TIME newsegment");
GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
break;
encoder->priv->at_eos = FALSE;
- gst_segment_set_newsegment_full (&encoder->input_segment, update, rate,
- applied_rate, format, start, stop, position);
+ encoder->input_segment = segment;
+ ret = TRUE;
GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
break;
}
GST_TIME_ARGS (running_time), all_headers, count);
}
gst_event_unref (event);
+ event = NULL;
ret = TRUE;
}
break;
break;
}
+ /* Forward non-serialized events and EOS/FLUSH_STOP immediately.
+ * For EOS this is required because no buffer or serialized event
+ * will come after EOS and nothing could trigger another
+ * _finish_frame() call. *
+ * If the subclass handles sending of EOS manually it can simply
+ * not chain up to the parent class' event handler
+ *
+ * For FLUSH_STOP this is required because it is expected
+ * to be forwarded immediately and no buffers are queued anyway.
+ */
+ if (event) {
+ if (!GST_EVENT_IS_SERIALIZED (event)
+ || GST_EVENT_TYPE (event) == GST_EVENT_EOS
+ || GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP) {
+ ret = gst_video_encoder_push_event (encoder, event);
+ } else {
+ GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
+ encoder->priv->current_frame_events =
+ g_list_prepend (encoder->priv->current_frame_events, event);
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
+ ret = TRUE;
+ }
+ }
+
return ret;
}
static gboolean
-gst_video_encoder_sink_event (GstPad * pad, GstEvent * event)
+gst_video_encoder_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
{
GstVideoEncoder *enc;
GstVideoEncoderClass *klass;
- gboolean handled = FALSE;
gboolean ret = TRUE;
- enc = GST_VIDEO_ENCODER (gst_pad_get_parent (pad));
+ enc = GST_VIDEO_ENCODER (parent);
klass = GST_VIDEO_ENCODER_GET_CLASS (enc);
GST_DEBUG_OBJECT (enc, "received event %d, %s", GST_EVENT_TYPE (event),
GST_EVENT_TYPE_NAME (event));
if (klass->sink_event)
- handled = klass->sink_event (enc, event);
-
- if (!handled)
- handled = gst_video_encoder_sink_eventfunc (enc, event);
-
- if (!handled) {
- /* Forward non-serialized events and EOS/FLUSH_STOP immediately.
- * For EOS this is required because no buffer or serialized event
- * will come after EOS and nothing could trigger another
- * _finish_frame() call. *
- * If the subclass handles sending of EOS manually it can return
- * _DROPPED from ::finish() and all other subclasses should have
- * decoded/flushed all remaining data before this
- *
- * For FLUSH_STOP this is required because it is expected
- * to be forwarded immediately and no buffers are queued anyway.
- */
- if (!GST_EVENT_IS_SERIALIZED (event)
- || GST_EVENT_TYPE (event) == GST_EVENT_EOS
- || GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP) {
- ret = gst_video_encoder_push_event (enc, event);
- } else {
- GST_VIDEO_ENCODER_STREAM_LOCK (enc);
- enc->priv->current_frame_events =
- g_list_prepend (enc->priv->current_frame_events, event);
- GST_VIDEO_ENCODER_STREAM_UNLOCK (enc);
- }
- }
-
- GST_DEBUG_OBJECT (enc, "event handled");
+ ret = klass->sink_event (enc, event);
- gst_object_unref (enc);
return ret;
}
static gboolean
-gst_video_encoder_src_eventfunc (GstVideoEncoder * encoder, GstEvent * event)
+gst_video_encoder_src_event_default (GstVideoEncoder * encoder,
+ GstEvent * event)
{
- gboolean handled = FALSE;
+ gboolean ret = FALSE;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CUSTOM_UPSTREAM:
GST_TIME_ARGS (running_time), all_headers, count);
}
gst_event_unref (event);
- handled = TRUE;
+ event = NULL;
+ ret = TRUE;
}
break;
}
break;
}
- return handled;
+ if (event)
+ ret =
+ gst_pad_event_default (encoder->srcpad, GST_OBJECT_CAST (encoder),
+ event);
+
+ return ret;
}
static gboolean
-gst_video_encoder_src_event (GstPad * pad, GstEvent * event)
+gst_video_encoder_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
GstVideoEncoder *encoder;
GstVideoEncoderClass *klass;
gboolean ret = FALSE;
- gboolean handled = FALSE;
- encoder = GST_VIDEO_ENCODER (gst_pad_get_parent (pad));
+ encoder = GST_VIDEO_ENCODER (parent);
klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
GST_LOG_OBJECT (encoder, "handling event: %" GST_PTR_FORMAT, event);
if (klass->src_event)
- handled = klass->src_event (encoder, event);
-
- if (!handled)
- handled = gst_video_encoder_src_eventfunc (encoder, event);
-
- if (!handled)
- ret = gst_pad_event_default (pad, event);
-
- gst_object_unref (encoder);
+ ret = klass->src_event (encoder, event);
return ret;
}
-static const GstQueryType *
-gst_video_encoder_get_query_types (GstPad * pad)
-{
- static const GstQueryType query_types[] = {
- GST_QUERY_CONVERT,
- GST_QUERY_LATENCY,
- 0
- };
-
- return query_types;
-}
-
static gboolean
-gst_video_encoder_src_query (GstPad * pad, GstQuery * query)
+gst_video_encoder_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
{
GstVideoEncoderPrivate *priv;
GstVideoEncoder *enc;
gboolean res;
- GstPad *peerpad;
- enc = GST_VIDEO_ENCODER (gst_pad_get_parent (pad));
+ enc = GST_VIDEO_ENCODER (parent);
priv = enc->priv;
- peerpad = gst_pad_get_peer (enc->sinkpad);
GST_LOG_OBJECT (enc, "handling query: %" GST_PTR_FORMAT, query);
gboolean live;
GstClockTime min_latency, max_latency;
- res = gst_pad_query (peerpad, query);
+ res = gst_pad_peer_query (enc->sinkpad, query);
if (res) {
gst_query_parse_latency (query, &live, &min_latency, &max_latency);
GST_DEBUG_OBJECT (enc, "Peer latency: live %d, min %"
GST_OBJECT_LOCK (enc);
min_latency += priv->min_latency;
- if (max_latency != GST_CLOCK_TIME_NONE) {
- max_latency += priv->max_latency;
+ if (enc->priv->max_latency == GST_CLOCK_TIME_NONE) {
+ max_latency = GST_CLOCK_TIME_NONE;
+ } else if (max_latency != GST_CLOCK_TIME_NONE) {
+ max_latency += enc->priv->max_latency;
}
GST_OBJECT_UNLOCK (enc);
}
break;
default:
- res = gst_pad_query_default (pad, query);
+ res = gst_pad_query_default (pad, parent, query);
}
- gst_object_unref (peerpad);
- gst_object_unref (enc);
return res;
error:
GST_DEBUG_OBJECT (enc, "query failed");
- gst_object_unref (peerpad);
- gst_object_unref (enc);
return res;
}
static GstVideoCodecFrame *
gst_video_encoder_new_frame (GstVideoEncoder * encoder, GstBuffer * buf,
- GstClockTime timestamp, GstClockTime duration)
+ GstClockTime pts, GstClockTime dts, GstClockTime duration)
{
GstVideoEncoderPrivate *priv = encoder->priv;
GstVideoCodecFrame *frame;
frame->events = priv->current_frame_events;
priv->current_frame_events = NULL;
frame->input_buffer = buf;
- frame->pts = timestamp;
+ frame->pts = pts;
+ frame->dts = dts;
frame->duration = duration;
return frame;
static GstFlowReturn
-gst_video_encoder_chain (GstPad * pad, GstBuffer * buf)
+gst_video_encoder_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
GstVideoEncoder *encoder;
GstVideoEncoderPrivate *priv;
GstVideoEncoderClass *klass;
GstVideoCodecFrame *frame;
+ GstClockTime pts, dts, duration;
GstFlowReturn ret = GST_FLOW_OK;
- gint64 start, stop = GST_CLOCK_TIME_NONE, cstart, cstop;
+ guint64 start, stop, cstart, cstop;
- encoder = GST_VIDEO_ENCODER (gst_pad_get_parent (pad));
+ encoder = GST_VIDEO_ENCODER (parent);
priv = encoder->priv;
klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
g_return_val_if_fail (klass->handle_frame != NULL, GST_FLOW_ERROR);
- GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
-
- /* .... ?? */
- if (!GST_PAD_CAPS (pad)) {
- ret = GST_FLOW_NOT_NEGOTIATED;
- goto done;
+ if (G_UNLIKELY (encoder->priv->do_caps)) {
+ GstCaps *caps = gst_pad_get_current_caps (encoder->sinkpad);
+ if (!caps)
+ goto not_negotiated;
+ if (!gst_video_encoder_setcaps (encoder, caps)) {
+ gst_caps_unref (caps);
+ goto not_negotiated;
+ }
+ encoder->priv->do_caps = FALSE;
}
- start = GST_BUFFER_TIMESTAMP (buf);
- if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DURATION (buf)))
- stop = start + GST_BUFFER_DURATION (buf);
+ GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
+
+ pts = GST_BUFFER_PTS (buf);
+ dts = GST_BUFFER_DTS (buf);
+ duration = GST_BUFFER_DURATION (buf);
GST_LOG_OBJECT (encoder,
- "received buffer of size %d with ts %" GST_TIME_FORMAT
- ", duration %" GST_TIME_FORMAT, GST_BUFFER_SIZE (buf),
- GST_TIME_ARGS (start), GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
+ "received buffer of size %" G_GSIZE_FORMAT " with PTS %" GST_TIME_FORMAT
+ ", PTS %" GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT,
+ gst_buffer_get_size (buf), GST_TIME_ARGS (pts), GST_TIME_ARGS (dts),
+ GST_TIME_ARGS (duration));
if (priv->at_eos) {
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
goto done;
}
+ start = pts;
+ if (GST_CLOCK_TIME_IS_VALID (duration))
+ stop = start + duration;
+ else
+ stop = GST_CLOCK_TIME_NONE;
+
/* Drop buffers outside of segment */
if (!gst_segment_clip (&encoder->output_segment,
GST_FORMAT_TIME, start, stop, &cstart, &cstop)) {
goto done;
}
- frame = gst_video_encoder_new_frame (encoder, buf, cstart, cstop - cstart);
+ frame =
+ gst_video_encoder_new_frame (encoder, buf, cstart, dts, cstop - cstart);
GST_OBJECT_LOCK (encoder);
if (priv->force_key_unit) {
running_time =
gst_segment_to_running_time (&encoder->output_segment, GST_FORMAT_TIME,
- GST_BUFFER_TIMESTAMP (buf));
+ cstart);
for (l = priv->force_key_unit; l; l = l->next) {
ForcedKeyUnitEvent *tmp = l->data;
}
GST_OBJECT_UNLOCK (encoder);
+ gst_video_codec_frame_ref (frame);
priv->frames = g_list_append (priv->frames, frame);
/* new data, more finish needed */
done:
GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
- gst_object_unref (encoder);
-
return ret;
+
+ /* ERRORS */
+not_negotiated:
+ {
+ GST_ELEMENT_ERROR (encoder, CORE, NEGOTIATION, (NULL),
+ ("encoder not initialized"));
+ gst_buffer_unref (buf);
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
}
static GstStateChangeReturn
}
}
-static gboolean
-gst_video_encoder_set_src_caps (GstVideoEncoder * encoder)
+/**
+ * gst_video_encoder_negotiate:
+ * @encoder: a #GstVideoEncoder
+ *
+ * Negotiate with downstream elements to currently configured #GstVideoCodecState.
+ *
+ * Returns: #TRUE if the negotiation succeeded, else #FALSE.
+ */
+gboolean
+gst_video_encoder_negotiate (GstVideoEncoder * encoder)
{
+ GstVideoEncoderClass *klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
+ GstAllocator *allocator;
+ GstAllocationParams params;
gboolean ret;
GstVideoCodecState *state = encoder->priv->output_state;
GstVideoInfo *info = &state->info;
+ GstQuery *query = NULL;
g_return_val_if_fail (state->caps != NULL, FALSE);
}
ret = gst_pad_set_caps (encoder->srcpad, state->caps);
+ if (!ret)
+ goto done;
+
+ query = gst_query_new_allocation (state->caps, TRUE);
+ if (!gst_pad_peer_query (encoder->srcpad, query)) {
+ GST_DEBUG_OBJECT (encoder, "didn't get downstream ALLOCATION hints");
+ }
+
+ g_assert (klass->decide_allocation != NULL);
+ ret = klass->decide_allocation (encoder, query);
+
+ GST_DEBUG_OBJECT (encoder, "ALLOCATION (%d) params: %" GST_PTR_FORMAT, ret,
+ query);
+
+ if (!ret)
+ goto no_decide_allocation;
+
+ /* we got configuration from our peer or the decide_allocation method,
+ * parse them */
+ if (gst_query_get_n_allocation_params (query) > 0) {
+ gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
+ } else {
+ allocator = NULL;
+ gst_allocation_params_init (¶ms);
+ }
+
+ if (encoder->priv->allocator)
+ gst_object_unref (encoder->priv->allocator);
+ encoder->priv->allocator = allocator;
+ encoder->priv->params = params;
+
+done:
+ if (query)
+ gst_query_unref (query);
return ret;
+
+ /* Errors */
+no_decide_allocation:
+ {
+ GST_WARNING_OBJECT (encoder, "Subclass failed to decide allocation");
+ goto done;
+ }
+}
+
+/**
+ * gst_video_encoder_allocate_output_buffer:
+ * @encoder: a #GstVideoEncoder
+ * @size: size of the buffer
+ *
+ * Helper function that allocates a buffer to hold an encoded video frame
+ * for @encoder's current #GstVideoCodecState.
+ *
+ * Returns: (transfer full): allocated buffer
+ */
+GstBuffer *
+gst_video_encoder_allocate_output_buffer (GstVideoEncoder * encoder, gsize size)
+{
+ GstBuffer *buffer;
+
+ g_return_val_if_fail (size > 0, NULL);
+
+ GST_DEBUG ("alloc src buffer");
+
+ GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
+ if (G_UNLIKELY (encoder->priv->output_state_changed
+ || (encoder->priv->output_state
+ && gst_pad_check_reconfigure (encoder->srcpad))))
+ gst_video_encoder_negotiate (encoder);
+
+ buffer =
+ gst_buffer_new_allocate (encoder->priv->allocator, size,
+ &encoder->priv->params);
+
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
+
+ return buffer;
+}
+
+/**
+ * gst_video_encoder_allocate_output_frame:
+ * @encoder: a #GstVideoEncoder
+ * @frame: a #GstVideoCodecFrame
+ * @size: size of the buffer
+ *
+ * Helper function that allocates a buffer to hold an encoded video frame for @encoder's
+ * current #GstVideoCodecState. Subclass should already have configured video
+ * state and set src pad caps.
+ *
+ * The buffer allocated here is owned by the frame and you should only
+ * keep references to the frame, not the buffer.
+ *
+ * Returns: %GST_FLOW_OK if an output buffer could be allocated
+ */
+GstFlowReturn
+gst_video_encoder_allocate_output_frame (GstVideoEncoder *
+ encoder, GstVideoCodecFrame * frame, gsize size)
+{
+ g_return_val_if_fail (frame->output_buffer == NULL, GST_FLOW_ERROR);
+ g_return_val_if_fail (size > 0, GST_FLOW_ERROR);
+
+ GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
+ if (G_UNLIKELY (encoder->priv->output_state_changed
+ || (encoder->priv->output_state
+ && gst_pad_check_reconfigure (encoder->srcpad))))
+ gst_video_encoder_negotiate (encoder);
+
+ GST_LOG_OBJECT (encoder, "alloc buffer size %" G_GSIZE_FORMAT, size);
+
+ frame->output_buffer =
+ gst_buffer_new_allocate (encoder->priv->allocator, size,
+ &encoder->priv->params);
+
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
+
+ return frame->output_buffer ? GST_FLOW_OK : GST_FLOW_ERROR;
}
/**
* It is subsequently pushed downstream or provided to @pre_push.
* In any case, the frame is considered finished and released.
*
- * Returns: a #GstFlowReturn resulting from sending data downstream
+ * After calling this function the output buffer of the frame is to be
+ * considered read-only. This function will also change the metadata
+ * of the buffer.
*
- * Since: 0.10.36
+ * Returns: a #GstFlowReturn resulting from sending data downstream
*/
GstFlowReturn
gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
- if (G_UNLIKELY (priv->output_state_changed))
- gst_video_encoder_set_src_caps (encoder);
+ if (G_UNLIKELY (priv->output_state_changed || (priv->output_state
+ && gst_pad_check_reconfigure (encoder->srcpad))))
+ gst_video_encoder_negotiate (encoder);
+
if (G_UNLIKELY (priv->output_state == NULL))
goto no_output_state;
frame->distance_from_sync = priv->distance_from_sync;
priv->distance_from_sync++;
- GST_BUFFER_TIMESTAMP (frame->output_buffer) = frame->pts;
+ GST_BUFFER_PTS (frame->output_buffer) = frame->pts;
+ GST_BUFFER_DTS (frame->output_buffer) = frame->dts;
GST_BUFFER_DURATION (frame->output_buffer) = frame->duration;
/* update rate estimate */
- priv->bytes += GST_BUFFER_SIZE (frame->output_buffer);
+ priv->bytes += gst_buffer_get_size (frame->output_buffer);
if (GST_CLOCK_TIME_IS_VALID (frame->duration)) {
priv->time += frame->duration;
} else {
for (tmp = priv->headers; tmp; tmp = tmp->next) {
GstBuffer *tmpbuf = GST_BUFFER (tmp->data);
- copy = g_list_append (copy, gst_buffer_make_metadata_writable (tmpbuf));
+ copy = g_list_append (copy, gst_buffer_make_writable (tmpbuf));
}
g_list_free (priv->headers);
priv->headers = copy;
for (tmp = priv->headers; tmp; tmp = tmp->next) {
GstBuffer *tmpbuf = GST_BUFFER (tmp->data);
- gst_buffer_set_caps (tmpbuf, GST_PAD_CAPS (encoder->srcpad));
gst_buffer_ref (tmpbuf);
- priv->bytes += GST_BUFFER_SIZE (tmpbuf);
+ priv->bytes += gst_buffer_get_size (tmpbuf);
if (G_UNLIKELY (discont)) {
GST_LOG_OBJECT (encoder, "marking discont");
GST_BUFFER_FLAG_SET (tmpbuf, GST_BUFFER_FLAG_DISCONT);
GST_BUFFER_FLAG_SET (frame->output_buffer, GST_BUFFER_FLAG_DISCONT);
}
- gst_buffer_set_caps (GST_BUFFER (frame->output_buffer),
- GST_PAD_CAPS (encoder->srcpad));
-
if (encoder_class->pre_push)
ret = encoder_class->pre_push (encoder, frame);
+ /* A reference always needs to be owned by the frame on the buffer.
+ * For that reason, we use a complete sub-buffer (zero-cost) to push
+ * downstream.
+ * The original buffer will be free-ed only when downstream AND the
+ * current implementation are done with the frame. */
if (ret == GST_FLOW_OK)
- ret = gst_pad_push (encoder->srcpad, frame->output_buffer);
-
- frame->output_buffer = NULL;
+ ret = gst_pad_push (encoder->srcpad, gst_buffer_ref (frame->output_buffer));
done:
/* handed out */
- priv->frames = g_list_remove (priv->frames, frame);
+ /* unref once from the list */
+ l = g_list_find (priv->frames, frame);
+ if (l) {
+ gst_video_codec_frame_unref (frame);
+ priv->frames = g_list_delete_link (priv->frames, l);
+ }
+ /* unref because this function takes ownership */
gst_video_codec_frame_unref (frame);
GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
* Get the current #GstVideoCodecState
*
* Returns: (transfer full): #GstVideoCodecState describing format of video data.
- *
- * Since: 0.10.36
*/
GstVideoCodecState *
gst_video_encoder_get_output_state (GstVideoEncoder * encoder)
* from the next call to #gst_video_encoder_finish_frame().
*
* Returns: (transfer full): the newly configured output state.
- *
- * Since: 0.10.36
*/
GstVideoCodecState *
gst_video_encoder_set_output_state (GstVideoEncoder * encoder, GstCaps * caps,
* @max_latency: maximum latency
*
* Informs baseclass of encoding latency.
- *
- * Since: 0.10.36
*/
void
gst_video_encoder_set_latency (GstVideoEncoder * encoder,
/**
* gst_video_encoder_get_latency:
* @encoder: a #GstVideoEncoder
- * @min_latency: (out) (allow-none): the configured minimum latency
- * @max_latency: (out) (allow-none): the configured maximum latency
+ * @min_latency: (out) (allow-none): address of variable in which to store the
+ * configured minimum latency, or %NULL
+ * @max_latency: (out) (allow-none): address of variable in which to store the
+ * configured maximum latency, or %NULL
*
- * Returns the configured encoding latency.
- *
- * Since: 0.10.36
+ * Query the configured encoding latency. Results will be returned via
+ * @min_latency and @max_latency.
*/
void
gst_video_encoder_get_latency (GstVideoEncoder * encoder,
*
* Get the oldest unfinished pending #GstVideoCodecFrame
*
- * Returns: oldest unfinished pending #GstVideoCodecFrame
- *
- * Since: 0.10.36
+ * Returns: (transfer full): oldest unfinished pending #GstVideoCodecFrame
*/
GstVideoCodecFrame *
gst_video_encoder_get_oldest_frame (GstVideoEncoder * encoder)
{
- GList *g;
+ GstVideoCodecFrame *frame = NULL;
GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
- g = encoder->priv->frames;
+ if (encoder->priv->frames)
+ frame = gst_video_codec_frame_ref (encoder->priv->frames->data);
GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
- if (g == NULL)
- return NULL;
- return (GstVideoCodecFrame *) (g->data);
+ return (GstVideoCodecFrame *) frame;
}
/**
*
* Get a pending unfinished #GstVideoCodecFrame
*
- * Returns: (transfer none): pending unfinished #GstVideoCodecFrame identified by @frame_number.
- *
- * Since: 0.10.36
+ * Returns: (transfer full): pending unfinished #GstVideoCodecFrame identified by @frame_number.
*/
GstVideoCodecFrame *
gst_video_encoder_get_frame (GstVideoEncoder * encoder, int frame_number)
GstVideoCodecFrame *tmp = g->data;
if (tmp->system_frame_number == frame_number) {
- frame = tmp;
+ frame = gst_video_codec_frame_ref (tmp);
break;
}
}