{
PROP_0,
V4L2_STD_OBJECT_PROPS
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ , PROP_TBM_OUTPUT
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
};
#define gst_v4l2_video_dec_parent_class parent_class
G_DEFINE_ABSTRACT_TYPE (GstV4l2VideoDec, gst_v4l2_video_dec,
GST_TYPE_VIDEO_DECODER);
+static GstFlowReturn gst_v4l2_video_dec_finish (GstVideoDecoder * decoder);
+
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+static void gst_v4l2_video_dec_flush_buffer_event (GstVideoDecoder * decoder)
+{
+ gboolean ret = FALSE;
+
+ if (!decoder) {
+ GST_ERROR("no decoder");
+ return;
+ }
+
+ ret = gst_pad_push_event (decoder->srcpad,
+ gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM | GST_EVENT_TYPE_SERIALIZED,
+ gst_structure_new_empty("tizen/flush-buffer")));
+
+ GST_WARNING_OBJECT(decoder, "event push ret[%d] for flush-buffer", ret);
+}
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
static void
gst_v4l2_video_dec_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec)
GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (object);
switch (prop_id) {
- case PROP_OUTPUT_IO_MODE:
- gst_v4l2_object_set_property_helper (self->v4l2output, prop_id, value,
- pspec);
- break;
case PROP_CAPTURE_IO_MODE:
- gst_v4l2_object_set_property_helper (self->v4l2capture, prop_id, value,
- pspec);
+ if (!gst_v4l2_object_set_property_helper (self->v4l2capture,
+ prop_id, value, pspec)) {
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+ break;
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ case PROP_TBM_OUTPUT:
+ self->v4l2capture->tbm_output = g_value_get_boolean (value);
+ GST_INFO_OBJECT (self, "tbm output [%d]", self->v4l2capture->tbm_output);
break;
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
+
/* By default, only set on output */
default:
GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (object);
switch (prop_id) {
- case PROP_OUTPUT_IO_MODE:
- gst_v4l2_object_get_property_helper (self->v4l2output, prop_id, value,
- pspec);
- break;
case PROP_CAPTURE_IO_MODE:
- gst_v4l2_object_get_property_helper (self->v4l2capture, prop_id, value,
- pspec);
+ if (!gst_v4l2_object_get_property_helper (self->v4l2capture,
+ prop_id, value, pspec)) {
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+ break;
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ case PROP_TBM_OUTPUT:
+ GST_INFO_OBJECT (self, "tbm output [%d]", self->v4l2capture->tbm_output);
+ g_value_set_boolean (value, self->v4l2capture->tbm_output);
break;
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
/* By default read from output */
default:
if (gst_caps_is_empty (self->probed_sinkcaps))
goto no_encoded_format;
- self->probed_srccaps = gst_v4l2_object_probe_caps (self->v4l2capture,
- gst_v4l2_object_get_raw_caps ());
-
- if (gst_caps_is_empty (self->probed_srccaps))
- goto no_raw_format;
-
return TRUE;
no_encoded_format:
GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
- (_("Encoder on device %s has no supported input format"),
- self->v4l2output->videodev), (NULL));
- goto failure;
-
-
-no_raw_format:
- GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
- (_("Encoder on device %s has no supported output format"),
+ (_("Decoder on device %s has no supported input format"),
self->v4l2output->videodev), (NULL));
goto failure;
/* Should have been flushed already */
g_assert (g_atomic_int_get (&self->active) == FALSE);
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ gst_v4l2_video_dec_flush_buffer_event (decoder);
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
gst_v4l2_object_stop (self->v4l2output);
gst_v4l2_object_stop (self->v4l2capture);
gst_video_codec_state_unref (self->input_state);
self->input_state = NULL;
- /* FIXME we probably need to do more work if pools are active */
+ gst_v4l2_video_dec_finish (decoder);
+ gst_v4l2_object_stop (self->v4l2output);
+
+ /* The renegotiation flow don't blend with the base class flow. To properly
+ * stop the capture pool, if the buffers can't be orphaned, we need to
+ * reclaim our buffers, which will happend through the allocation query.
+ * The allocation query is triggered by gst_video_decoder_negotiate() which
+ * requires the output caps to be set, but we can't know this information
+ * as we rely on the decoder, which requires the capture queue to be
+ * stopped.
+ *
+ * To workaround this issue, we simply run an allocation query with the
+ * old negotiated caps in order to drain/reclaim our buffers. That breaks
+ * the complexity and should not have much impact in performance since the
+ * following allocation query will happen on a drained pipeline and won't
+ * block. */
+ if (self->v4l2capture->pool &&
+ !gst_v4l2_buffer_pool_orphan (&self->v4l2capture->pool)) {
+ GstCaps *caps = gst_pad_get_current_caps (decoder->srcpad);
+ if (caps) {
+ GstQuery *query = gst_query_new_allocation (caps, FALSE);
+ gst_pad_peer_query (decoder->srcpad, query);
+ gst_query_unref (query);
+ gst_caps_unref (caps);
+ }
+ }
+
+ gst_v4l2_object_stop (self->v4l2capture);
+ self->output_flow = GST_FLOW_OK;
}
ret = gst_v4l2_object_set_format (self->v4l2output, state->caps, &error);
+ gst_caps_replace (&self->probed_srccaps, NULL);
+ self->probed_srccaps = gst_v4l2_object_probe_caps (self->v4l2capture,
+ gst_v4l2_object_get_raw_caps ());
+
+ if (gst_caps_is_empty (self->probed_srccaps))
+ goto no_raw_format;
+
if (ret)
self->input_state = gst_video_codec_state_ref (state);
else
done:
return ret;
+
+no_raw_format:
+ GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
+ (_("Decoder on device %s has no supported output format"),
+ self->v4l2output->videodev), (NULL));
+ return GST_FLOW_ERROR;
}
static gboolean
gst_v4l2_object_unlock_stop (self->v4l2output);
gst_v4l2_object_unlock_stop (self->v4l2capture);
+ if (self->v4l2output->pool)
+ gst_v4l2_buffer_pool_flush (self->v4l2output->pool);
+
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ gst_v4l2_video_dec_flush_buffer_event (decoder);
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
+ /* gst_v4l2_buffer_pool_flush() calls streamon the capture pool and must be
+ * called after gst_v4l2_object_unlock_stop() stopped flushing the buffer
+ * pool. */
+ if (self->v4l2capture->pool)
+ gst_v4l2_buffer_pool_flush (self->v4l2capture->pool);
+
return TRUE;
}
/* If the decoder stop command succeeded, just wait until processing is
* finished */
+ GST_DEBUG_OBJECT (self, "Waiting for decoder stop");
GST_OBJECT_LOCK (task);
while (GST_TASK_STATE (task) == GST_TASK_STARTED)
GST_TASK_WAIT (task);
GST_DEBUG_OBJECT (decoder, "Done draining buffers");
+ /* TODO Shall we cleanup any reffed frame to workaround broken decoders ? */
+
done:
return ret;
}
+static GstFlowReturn
+gst_v4l2_video_dec_drain (GstVideoDecoder * decoder)
+{
+ GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
+
+ GST_DEBUG_OBJECT (self, "Draining...");
+ gst_v4l2_video_dec_finish (decoder);
+ gst_v4l2_video_dec_flush (decoder);
+
+ return GST_FLOW_OK;
+}
+
static GstVideoCodecFrame *
gst_v4l2_video_dec_get_oldest_frame (GstVideoDecoder * decoder)
{
return TRUE;
if (align->padding_left != 0 || align->padding_top != 0 ||
- width != info->width + align->padding_right ||
height != info->height + align->padding_bottom)
return TRUE;
- gst_structure_set (structure,
- "width", G_TYPE_INT, width - align->padding_right,
- "height", G_TYPE_INT, height - align->padding_bottom, NULL);
+ if (height == info->height + align->padding_bottom) {
+ /* Some drivers may round up width to the padded with */
+ if (width == info->width + align->padding_right)
+ gst_structure_set (structure,
+ "width", G_TYPE_INT, width - align->padding_right,
+ "height", G_TYPE_INT, height - align->padding_bottom, NULL);
+ /* Some drivers may keep visible width and only round up bytesperline */
+ else if (width == info->width)
+ gst_structure_set (structure,
+ "height", G_TYPE_INT, height - align->padding_bottom, NULL);
+ }
return TRUE;
}
GstFlowReturn ret = GST_FLOW_OK;
gboolean processed = FALSE;
GstBuffer *tmp;
+ GstTaskState task_state;
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ GstStructure *structure = NULL;
+ const gchar *caps_format = NULL;
+ GstMessage *msg = NULL;
+ GstV4l2BufferPool *capture_pool = NULL;
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
GST_DEBUG_OBJECT (self, "Handling frame %d", frame->system_frame_number);
acquired_caps = gst_video_info_to_caps (&info);
GST_DEBUG_OBJECT (self, "Acquired caps: %" GST_PTR_FORMAT, acquired_caps);
st = gst_caps_get_structure (acquired_caps, 0);
- gst_structure_remove_field (st, "format");
+ gst_structure_remove_fields (st, "format", "colorimetry", "chroma-site",
+ NULL);
/* Probe currently available pixel formats */
- available_caps = gst_v4l2_object_probe_caps (self->v4l2capture, NULL);
- available_caps = gst_caps_make_writable (available_caps);
+ available_caps = gst_caps_copy (self->probed_srccaps);
GST_DEBUG_OBJECT (self, "Available caps: %" GST_PTR_FORMAT, available_caps);
/* Replace coded size with visible size, we want to negotiate visible size
caps = gst_caps_fixate (caps);
GST_DEBUG_OBJECT (self, "Chosen decoded caps: %" GST_PTR_FORMAT, caps);
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ structure = gst_caps_get_structure (caps, 0);
+ caps_format = gst_structure_get_string (structure, "format");
+
+ if (!strcmp (caps_format, "I420")) {
+ GST_INFO_OBJECT (self, "I420 -> S420");
+ gst_caps_set_simple (caps, "format", G_TYPE_STRING, "S420", NULL);
+ } else if (!strcmp (caps_format, "NV12")) {
+ GST_INFO_OBJECT (self, "NV12 -> SN12");
+ gst_caps_set_simple (caps, "format", G_TYPE_STRING, "SN12", NULL);
+ }
+ GST_INFO_OBJECT (self, "Updated decoded caps: %" GST_PTR_FORMAT, caps);
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
/* Try to set negotiated format, on success replace acquired format */
if (gst_v4l2_object_set_format (self->v4l2capture, caps, &error))
if (!gst_buffer_pool_set_active (GST_BUFFER_POOL (self->v4l2capture->pool),
TRUE))
goto activate_failed;
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ capture_pool = GST_V4L2_BUFFER_POOL (self->v4l2capture->pool);
+
+ msg = gst_message_new_element (GST_OBJECT_CAST (decoder),
+ gst_structure_new ("prepare-decode-buffers",
+ "num_buffers", G_TYPE_INT, capture_pool->num_allocated,
+ "extra_num_buffers", G_TYPE_INT, capture_pool->num_allocated - 2, NULL));
+
+ gst_element_post_message (GST_ELEMENT_CAST (decoder), msg);
+
+ GST_WARNING_OBJECT (self, "output buffer[%d]", capture_pool->num_allocated);
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
}
- if (gst_pad_get_task_state (GST_VIDEO_DECODER_SRC_PAD (self)) ==
- GST_TASK_STOPPED) {
+ task_state = gst_pad_get_task_state (GST_VIDEO_DECODER_SRC_PAD (self));
+ if (task_state == GST_TASK_STOPPED || task_state == GST_TASK_PAUSED) {
/* It's possible that the processing thread stopped due to an error */
if (self->output_flow != GST_FLOW_OK &&
self->output_flow != GST_FLOW_FLUSHING) {
{
GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
gboolean ret;
+ GstEventType type = GST_EVENT_TYPE (event);
- switch (GST_EVENT_TYPE (event)) {
+ switch (type) {
case GST_EVENT_FLUSH_START:
GST_DEBUG_OBJECT (self, "flush start");
gst_v4l2_object_unlock (self->v4l2output);
ret = GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (decoder, event);
- switch (GST_EVENT_TYPE (event)) {
+ switch (type) {
case GST_EVENT_FLUSH_START:
/* The processing thread should stop now, wait for it */
gst_pad_stop_task (decoder->srcpad);
gst_video_decoder_set_packetized (decoder, TRUE);
self->v4l2output = gst_v4l2_object_new (GST_ELEMENT (self),
+ GST_OBJECT (GST_VIDEO_DECODER_SINK_PAD (self)),
V4L2_BUF_TYPE_VIDEO_OUTPUT, klass->default_device,
gst_v4l2_get_output, gst_v4l2_set_output, NULL);
self->v4l2output->no_initial_format = TRUE;
self->v4l2output->keep_aspect = FALSE;
self->v4l2capture = gst_v4l2_object_new (GST_ELEMENT (self),
+ GST_OBJECT (GST_VIDEO_DECODER_SRC_PAD (self)),
V4L2_BUF_TYPE_VIDEO_CAPTURE, klass->default_device,
gst_v4l2_get_input, gst_v4l2_set_input, NULL);
- self->v4l2capture->no_initial_format = TRUE;
- self->v4l2output->keep_aspect = FALSE;
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ self->v4l2capture->tbm_output = TRUE;
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
}
static void
video_decoder_class->stop = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_stop);
video_decoder_class->finish = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_finish);
video_decoder_class->flush = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_flush);
+ video_decoder_class->drain = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_drain);
video_decoder_class->set_format =
GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_set_format);
video_decoder_class->negotiate =
GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_change_state);
gst_v4l2_object_install_m2m_properties_helper (gobject_class);
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ g_object_class_install_property (gobject_class, PROP_TBM_OUTPUT,
+ g_param_spec_boolean ("tbm-output", "Enable TBM for output buffer",
+ "It works for only DMABUF mode.",
+ TRUE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
}
static void
cdata->src_caps));
gst_element_class_set_static_metadata (element_class, cdata->longname,
- "Codec/Decoder/Video", cdata->description,
+ "Codec/Decoder/Video/Hardware", cdata->description,
"Nicolas Dufresne <nicolas.dufresne@collabora.com>");
+ gst_caps_unref (cdata->sink_caps);
+ gst_caps_unref (cdata->src_caps);
g_free (cdata);
}
}
} else if (gst_structure_has_name (s, "video/x-h263")) {
SET_META ("H263");
+ } else if (gst_structure_has_name (s, "video/x-fwht")) {
+ SET_META ("FWHT");
} else if (gst_structure_has_name (s, "video/x-h264")) {
SET_META ("H264");
+ } else if (gst_structure_has_name (s, "video/x-h265")) {
+ SET_META ("H265");
} else if (gst_structure_has_name (s, "video/x-wmv")) {
SET_META ("VC1");
} else if (gst_structure_has_name (s, "video/x-vp8")) {
SET_META ("VP8");
+ } else if (gst_structure_has_name (s, "video/x-vp9")) {
+ SET_META ("VP9");
} else if (gst_structure_has_name (s, "video/x-bayer")) {
SET_META ("BAYER");
} else if (gst_structure_has_name (s, "video/x-sonix")) {
g_free (type_name);
type_name = g_strdup_printf ("v4l2%s%sdec", basename, codec_name);
}
+
+ g_free (codec_name);
}
return type_name;
#undef SET_META
}
-gboolean
+void
gst_v4l2_video_dec_register (GstPlugin * plugin, const gchar * basename,
const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps)
{
type_info.instance_init = gst_v4l2_video_dec_subinstance_init;
subtype = g_type_register_static (type, type_name, &type_info, 0);
- gst_element_register (plugin, type_name, GST_RANK_PRIMARY + 1, subtype);
+#ifdef TIZEN_FEATURE_V4L2VIDEO_ADJ_RANK
+ if (!gst_element_register (plugin, type_name, GST_RANK_PRIMARY,
+ subtype))
+ GST_WARNING ("Failed to register plugin '%s'", type_name);
+#else
+ if (!gst_element_register (plugin, type_name, GST_RANK_PRIMARY + 1,
+ subtype))
+ GST_WARNING ("Failed to register plugin '%s'", type_name);
+#endif
g_free (type_name);
}
-
- return TRUE;
}