/*
* Copyright (C) 2014 Collabora Ltd.
- * Author: Nicolas Dufresne <nicolas.dufresne@collabora.co.uk>
+ * Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
#include <unistd.h>
#include <string.h>
+#include "gstv4l2object.h"
#include "gstv4l2videodec.h"
-#include "v4l2_calls.h"
#include <string.h>
#include <gst/gst-i18n-plugin.h>
GST_DEBUG_CATEGORY_STATIC (gst_v4l2_video_dec_debug);
#define GST_CAT_DEFAULT gst_v4l2_video_dec_debug
-static gboolean gst_v4l2_video_dec_flush (GstVideoDecoder * decoder);
-
typedef struct
{
gchar *device;
GstCaps *sink_caps;
GstCaps *src_caps;
+ const gchar *longname;
+ const gchar *description;
} GstV4l2VideoDecCData;
enum
{
PROP_0,
V4L2_STD_OBJECT_PROPS
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ , PROP_TBM_OUTPUT
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
};
#define gst_v4l2_video_dec_parent_class parent_class
G_DEFINE_ABSTRACT_TYPE (GstV4l2VideoDec, gst_v4l2_video_dec,
GST_TYPE_VIDEO_DECODER);
+static GstFlowReturn gst_v4l2_video_dec_finish (GstVideoDecoder * decoder);
+
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+static void gst_v4l2_video_dec_flush_buffer_event (GstVideoDecoder * decoder)
+{
+ gboolean ret = FALSE;
+
+ if (!decoder) {
+ GST_ERROR("no decoder");
+ return;
+ }
+
+ ret = gst_pad_push_event (decoder->srcpad,
+ gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM | GST_EVENT_TYPE_SERIALIZED,
+ gst_structure_new_empty("tizen/flush-buffer")));
+
+ GST_WARNING_OBJECT(decoder, "event push ret[%d] for flush-buffer", ret);
+}
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
static void
gst_v4l2_video_dec_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec)
GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (object);
switch (prop_id) {
- case PROP_OUTPUT_IO_MODE:
- gst_v4l2_object_set_property_helper (self->v4l2output, prop_id, value,
- pspec);
- break;
case PROP_CAPTURE_IO_MODE:
- gst_v4l2_object_set_property_helper (self->v4l2capture, prop_id, value,
- pspec);
+ if (!gst_v4l2_object_set_property_helper (self->v4l2capture,
+ prop_id, value, pspec)) {
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+ break;
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ case PROP_TBM_OUTPUT:
+ self->v4l2capture->tbm_output = g_value_get_boolean (value);
+ GST_INFO_OBJECT (self, "tbm output [%d]", self->v4l2capture->tbm_output);
break;
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
+
/* By default, only set on output */
default:
GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (object);
switch (prop_id) {
- case PROP_OUTPUT_IO_MODE:
- gst_v4l2_object_get_property_helper (self->v4l2output, prop_id, value,
- pspec);
- break;
case PROP_CAPTURE_IO_MODE:
- gst_v4l2_object_get_property_helper (self->v4l2capture, prop_id, value,
- pspec);
+ if (!gst_v4l2_object_get_property_helper (self->v4l2capture,
+ prop_id, value, pspec)) {
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+ break;
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ case PROP_TBM_OUTPUT:
+ GST_INFO_OBJECT (self, "tbm output [%d]", self->v4l2capture->tbm_output);
+ g_value_set_boolean (value, self->v4l2capture->tbm_output);
break;
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
/* By default read from output */
default:
gst_v4l2_video_dec_open (GstVideoDecoder * decoder)
{
GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
+ GstCaps *codec_caps;
GST_DEBUG_OBJECT (self, "Opening");
if (!gst_v4l2_object_open_shared (self->v4l2capture, self->v4l2output))
goto failure;
- self->probed_sinkcaps = gst_v4l2_object_get_caps (self->v4l2output,
- gst_v4l2_object_get_codec_caps ());
+ codec_caps = gst_pad_get_pad_template_caps (decoder->sinkpad);
+ self->probed_sinkcaps = gst_v4l2_object_probe_caps (self->v4l2output,
+ codec_caps);
+ gst_caps_unref (codec_caps);
if (gst_caps_is_empty (self->probed_sinkcaps))
goto no_encoded_format;
- self->probed_srccaps = gst_v4l2_object_get_caps (self->v4l2capture,
- gst_v4l2_object_get_raw_caps ());
-
- if (gst_caps_is_empty (self->probed_srccaps))
- goto no_raw_format;
-
return TRUE;
no_encoded_format:
GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
- (_("Encoder on device %s has no supported input format"),
- self->v4l2output->videodev), (NULL));
- goto failure;
-
-
-no_raw_format:
- GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
- (_("Encoder on device %s has no supported output format"),
+ (_("Decoder on device %s has no supported input format"),
self->v4l2output->videodev), (NULL));
goto failure;
/* Should have been flushed already */
g_assert (g_atomic_int_get (&self->active) == FALSE);
- g_assert (g_atomic_int_get (&self->processing) == FALSE);
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ gst_v4l2_video_dec_flush_buffer_event (decoder);
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
gst_v4l2_object_stop (self->v4l2output);
gst_v4l2_object_stop (self->v4l2capture);
gst_v4l2_video_dec_set_format (GstVideoDecoder * decoder,
GstVideoCodecState * state)
{
+ GstV4l2Error error = GST_V4L2_ERROR_INIT;
gboolean ret = TRUE;
GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
gst_video_codec_state_unref (self->input_state);
self->input_state = NULL;
- /* FIXME we probably need to do more work if pools are active */
+ gst_v4l2_video_dec_finish (decoder);
+ gst_v4l2_object_stop (self->v4l2output);
+
+ /* The renegotiation flow don't blend with the base class flow. To properly
+ * stop the capture pool, if the buffers can't be orphaned, we need to
+ * reclaim our buffers, which will happend through the allocation query.
+ * The allocation query is triggered by gst_video_decoder_negotiate() which
+ * requires the output caps to be set, but we can't know this information
+ * as we rely on the decoder, which requires the capture queue to be
+ * stopped.
+ *
+ * To workaround this issue, we simply run an allocation query with the
+ * old negotiated caps in order to drain/reclaim our buffers. That breaks
+ * the complexity and should not have much impact in performance since the
+ * following allocation query will happen on a drained pipeline and won't
+ * block. */
+ if (self->v4l2capture->pool &&
+ !gst_v4l2_buffer_pool_orphan (&self->v4l2capture->pool)) {
+ GstCaps *caps = gst_pad_get_current_caps (decoder->srcpad);
+ if (caps) {
+ GstQuery *query = gst_query_new_allocation (caps, FALSE);
+ gst_pad_peer_query (decoder->srcpad, query);
+ gst_query_unref (query);
+ gst_caps_unref (caps);
+ }
+ }
+
+ gst_v4l2_object_stop (self->v4l2capture);
+ self->output_flow = GST_FLOW_OK;
}
- ret = gst_v4l2_object_set_format (self->v4l2output, state->caps);
+ ret = gst_v4l2_object_set_format (self->v4l2output, state->caps, &error);
+
+ gst_caps_replace (&self->probed_srccaps, NULL);
+ self->probed_srccaps = gst_v4l2_object_probe_caps (self->v4l2capture,
+ gst_v4l2_object_get_raw_caps ());
+
+ if (gst_caps_is_empty (self->probed_srccaps))
+ goto no_raw_format;
if (ret)
self->input_state = gst_video_codec_state_ref (state);
+ else
+ gst_v4l2_error (self, &error);
done:
return ret;
+
+no_raw_format:
+ GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
+ (_("Decoder on device %s has no supported output format"),
+ self->v4l2output->videodev), (NULL));
+ return GST_FLOW_ERROR;
}
static gboolean
/* Ensure the processing thread has stopped for the reverse playback
* discount case */
- if (g_atomic_int_get (&self->processing)) {
+ if (gst_pad_get_task_state (decoder->srcpad) == GST_TASK_STARTED) {
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
gst_v4l2_object_unlock (self->v4l2output);
gst_v4l2_object_unlock_stop (self->v4l2output);
gst_v4l2_object_unlock_stop (self->v4l2capture);
+ if (self->v4l2output->pool)
+ gst_v4l2_buffer_pool_flush (self->v4l2output->pool);
+
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ gst_v4l2_video_dec_flush_buffer_event (decoder);
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
+ /* gst_v4l2_buffer_pool_flush() calls streamon the capture pool and must be
+ * called after gst_v4l2_object_unlock_stop() stopped flushing the buffer
+ * pool. */
+ if (self->v4l2capture->pool)
+ gst_v4l2_buffer_pool_flush (self->v4l2capture->pool);
+
return TRUE;
}
dcmd.cmd = cmd;
dcmd.flags = flags;
- if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_DECODER_CMD, &dcmd) < 0)
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_DECODER_CMD, &dcmd) < 0)
goto dcmd_failed;
return TRUE;
GstFlowReturn ret = GST_FLOW_OK;
GstBuffer *buffer;
- if (!g_atomic_int_get (&self->processing))
+ if (gst_pad_get_task_state (decoder->srcpad) != GST_TASK_STARTED)
goto done;
GST_DEBUG_OBJECT (self, "Finishing decoding");
/* If the decoder stop command succeeded, just wait until processing is
* finished */
+ GST_DEBUG_OBJECT (self, "Waiting for decoder stop");
GST_OBJECT_LOCK (task);
while (GST_TASK_STATE (task) == GST_TASK_STARTED)
GST_TASK_WAIT (task);
GST_DEBUG_OBJECT (decoder, "Done draining buffers");
+ /* TODO Shall we cleanup any reffed frame to workaround broken decoders ? */
+
done:
return ret;
}
+static GstFlowReturn
+gst_v4l2_video_dec_drain (GstVideoDecoder * decoder)
+{
+ GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
+
+ GST_DEBUG_OBJECT (self, "Draining...");
+ gst_v4l2_video_dec_finish (decoder);
+ gst_v4l2_video_dec_flush (decoder);
+
+ return GST_FLOW_OK;
+}
+
static GstVideoCodecFrame *
gst_v4l2_video_dec_get_oldest_frame (GstVideoDecoder * decoder)
{
GST_LOG_OBJECT (decoder, "Allocate output buffer");
+ self->output_flow = GST_FLOW_OK;
do {
/* We cannot use the base class allotate helper since it taking the internal
* stream lock. we know that the acquire may need to poll until more frames
gst_buffer_replace (&buffer, NULL);
self->output_flow = ret;
- g_atomic_int_set (&self->processing, FALSE);
gst_v4l2_object_unlock (self->v4l2output);
gst_pad_pause_task (decoder->srcpad);
}
-static void
-gst_v4l2_video_dec_loop_stopped (GstV4l2VideoDec * self)
-{
- /* When flushing, decoding thread may never run */
- if (g_atomic_int_get (&self->processing)) {
- GST_DEBUG_OBJECT (self, "Early stop of decoding thread");
- self->output_flow = GST_FLOW_FLUSHING;
- g_atomic_int_set (&self->processing, FALSE);
- }
-
- GST_DEBUG_OBJECT (self, "Decoding task destroyed: %s",
- gst_flow_get_name (self->output_flow));
-}
-
static gboolean
-gst_v4l2_video_remove_padding(GstCapsFeatures * features,
+gst_v4l2_video_remove_padding (GstCapsFeatures * features,
GstStructure * structure, gpointer user_data)
{
GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (user_data);
GstVideoInfo *info = &self->v4l2capture->info;
int width, height;
- if (!gst_structure_get_int(structure, "width", &width))
+ if (!gst_structure_get_int (structure, "width", &width))
return TRUE;
- if (!gst_structure_get_int(structure, "height", &height))
+ if (!gst_structure_get_int (structure, "height", &height))
return TRUE;
if (align->padding_left != 0 || align->padding_top != 0 ||
- width != info->width + align->padding_right ||
height != info->height + align->padding_bottom)
return TRUE;
- gst_structure_set(structure,
- "width", G_TYPE_INT, width - align->padding_right,
- "height", G_TYPE_INT, height - align->padding_bottom, NULL);
+ if (height == info->height + align->padding_bottom) {
+ /* Some drivers may round up width to the padded with */
+ if (width == info->width + align->padding_right)
+ gst_structure_set (structure,
+ "width", G_TYPE_INT, width - align->padding_right,
+ "height", G_TYPE_INT, height - align->padding_bottom, NULL);
+ /* Some drivers may keep visible width and only round up bytesperline */
+ else if (width == info->width)
+ gst_structure_set (structure,
+ "height", G_TYPE_INT, height - align->padding_bottom, NULL);
+ }
return TRUE;
}
gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder,
GstVideoCodecFrame * frame)
{
+ GstV4l2Error error = GST_V4L2_ERROR_INIT;
GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
GstFlowReturn ret = GST_FLOW_OK;
+ gboolean processed = FALSE;
+ GstBuffer *tmp;
+ GstTaskState task_state;
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ GstStructure *structure = NULL;
+ const gchar *caps_format = NULL;
+ GstMessage *msg = NULL;
+ GstV4l2BufferPool *capture_pool = NULL;
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
GST_DEBUG_OBJECT (self, "Handling frame %d", frame->system_frame_number);
if (G_UNLIKELY (!GST_V4L2_IS_ACTIVE (self->v4l2output))) {
if (!self->input_state)
goto not_negotiated;
- if (!gst_v4l2_object_set_format (self->v4l2output, self->input_state->caps))
+ if (!gst_v4l2_object_set_format (self->v4l2output, self->input_state->caps,
+ &error))
goto not_negotiated;
}
if (codec_data) {
gst_buffer_ref (codec_data);
} else {
- codec_data = frame->input_buffer;
- frame->input_buffer = NULL;
+ codec_data = gst_buffer_ref (frame->input_buffer);
+ processed = TRUE;
}
/* Ensure input internal pool is active */
/* Create caps from the acquired format, remove the format field */
acquired_caps = gst_video_info_to_caps (&info);
+ GST_DEBUG_OBJECT (self, "Acquired caps: %" GST_PTR_FORMAT, acquired_caps);
st = gst_caps_get_structure (acquired_caps, 0);
- gst_structure_remove_field (st, "format");
+ gst_structure_remove_fields (st, "format", "colorimetry", "chroma-site",
+ NULL);
/* Probe currently available pixel formats */
- available_caps = gst_v4l2_object_probe_caps (self->v4l2capture, NULL);
- available_caps = gst_caps_make_writable (available_caps);
+ available_caps = gst_caps_copy (self->probed_srccaps);
+ GST_DEBUG_OBJECT (self, "Available caps: %" GST_PTR_FORMAT, available_caps);
/* Replace coded size with visible size, we want to negotiate visible size
* with downstream, not coded size. */
filter = gst_caps_intersect_full (available_caps, acquired_caps,
GST_CAPS_INTERSECT_FIRST);
+ GST_DEBUG_OBJECT (self, "Filtered caps: %" GST_PTR_FORMAT, filter);
gst_caps_unref (acquired_caps);
gst_caps_unref (available_caps);
caps = gst_pad_peer_query_caps (decoder->srcpad, filter);
caps = gst_caps_fixate (caps);
GST_DEBUG_OBJECT (self, "Chosen decoded caps: %" GST_PTR_FORMAT, caps);
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ structure = gst_caps_get_structure (caps, 0);
+ caps_format = gst_structure_get_string (structure, "format");
+
+ if (!strcmp (caps_format, "I420")) {
+ GST_INFO_OBJECT (self, "I420 -> S420");
+ gst_caps_set_simple (caps, "format", G_TYPE_STRING, "S420", NULL);
+ } else if (!strcmp (caps_format, "NV12")) {
+ GST_INFO_OBJECT (self, "NV12 -> SN12");
+ gst_caps_set_simple (caps, "format", G_TYPE_STRING, "SN12", NULL);
+ }
+ GST_INFO_OBJECT (self, "Updated decoded caps: %" GST_PTR_FORMAT, caps);
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
/* Try to set negotiated format, on success replace acquired format */
- if (gst_v4l2_object_set_format (self->v4l2capture, caps))
+ if (gst_v4l2_object_set_format (self->v4l2capture, caps, &error))
gst_video_info_from_caps (&info, caps);
+ else
+ gst_v4l2_clear_error (&error);
gst_caps_unref (caps);
output_state = gst_video_decoder_set_output_state (decoder,
if (!gst_buffer_pool_set_active (GST_BUFFER_POOL (self->v4l2capture->pool),
TRUE))
goto activate_failed;
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ capture_pool = GST_V4L2_BUFFER_POOL (self->v4l2capture->pool);
+
+ msg = gst_message_new_element (GST_OBJECT_CAST (decoder),
+ gst_structure_new ("prepare-decode-buffers",
+ "num_buffers", G_TYPE_INT, capture_pool->num_allocated,
+ "extra_num_buffers", G_TYPE_INT, capture_pool->num_allocated - 2, NULL));
+
+ gst_element_post_message (GST_ELEMENT_CAST (decoder), msg);
+
+ GST_WARNING_OBJECT (self, "output buffer[%d]", capture_pool->num_allocated);
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
}
- if (g_atomic_int_get (&self->processing) == FALSE) {
+ task_state = gst_pad_get_task_state (GST_VIDEO_DECODER_SRC_PAD (self));
+ if (task_state == GST_TASK_STOPPED || task_state == GST_TASK_PAUSED) {
/* It's possible that the processing thread stopped due to an error */
if (self->output_flow != GST_FLOW_OK &&
self->output_flow != GST_FLOW_FLUSHING) {
/* Start the processing task, when it quits, the task will disable input
* processing to unlock input if draining, or prevent potential block */
- g_atomic_int_set (&self->processing, TRUE);
+ self->output_flow = GST_FLOW_FLUSHING;
if (!gst_pad_start_task (decoder->srcpad,
- (GstTaskFunction) gst_v4l2_video_dec_loop, self,
- (GDestroyNotify) gst_v4l2_video_dec_loop_stopped))
+ (GstTaskFunction) gst_v4l2_video_dec_loop, self, NULL))
goto start_task_failed;
}
- if (frame->input_buffer) {
+ if (!processed) {
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
ret =
gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL (self->v4l2output->
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
if (ret == GST_FLOW_FLUSHING) {
- if (g_atomic_int_get (&self->processing) == FALSE)
+ if (gst_pad_get_task_state (GST_VIDEO_DECODER_SRC_PAD (self)) !=
+ GST_TASK_STARTED)
ret = self->output_flow;
goto drop;
} else if (ret != GST_FLOW_OK) {
goto process_failed;
}
-
- /* No need to keep input arround */
- gst_buffer_replace (&frame->input_buffer, NULL);
}
+ /* No need to keep input arround */
+ tmp = frame->input_buffer;
+ frame->input_buffer = gst_buffer_new ();
+ gst_buffer_copy_into (frame->input_buffer, tmp,
+ GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS |
+ GST_BUFFER_COPY_META, 0, 0);
+ gst_buffer_unref (tmp);
+
gst_video_codec_frame_unref (frame);
return ret;
{
GST_ERROR_OBJECT (self, "not negotiated");
ret = GST_FLOW_NOT_NEGOTIATED;
+ gst_v4l2_error (self, &error);
goto drop;
}
activate_failed:
{
GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
(_("Failed to start decoding thread.")), (NULL));
- g_atomic_int_set (&self->processing, FALSE);
ret = GST_FLOW_ERROR;
goto drop;
}
ret = GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (decoder,
query);
- latency = self->v4l2capture->min_buffers * self->v4l2capture->duration;
- gst_video_decoder_set_latency (decoder, latency, latency);
+ if (GST_CLOCK_TIME_IS_VALID (self->v4l2capture->duration)) {
+ latency = self->v4l2capture->min_buffers * self->v4l2capture->duration;
+ GST_DEBUG_OBJECT (self, "Setting latency: %" GST_TIME_FORMAT " (%"
+ G_GUINT32_FORMAT " * %" G_GUINT64_FORMAT, GST_TIME_ARGS (latency),
+ self->v4l2capture->min_buffers, self->v4l2capture->duration);
+ gst_video_decoder_set_latency (decoder, latency, latency);
+ } else {
+ GST_WARNING_OBJECT (self, "Duration invalid, not setting latency");
+ }
return ret;
}
{
GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
gboolean ret;
+ GstEventType type = GST_EVENT_TYPE (event);
- switch (GST_EVENT_TYPE (event)) {
+ switch (type) {
case GST_EVENT_FLUSH_START:
GST_DEBUG_OBJECT (self, "flush start");
gst_v4l2_object_unlock (self->v4l2output);
ret = GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (decoder, event);
- switch (GST_EVENT_TYPE (event)) {
+ switch (type) {
case GST_EVENT_FLUSH_START:
/* The processing thread should stop now, wait for it */
gst_pad_stop_task (decoder->srcpad);
gst_video_decoder_set_packetized (decoder, TRUE);
self->v4l2output = gst_v4l2_object_new (GST_ELEMENT (self),
+ GST_OBJECT (GST_VIDEO_DECODER_SINK_PAD (self)),
V4L2_BUF_TYPE_VIDEO_OUTPUT, klass->default_device,
gst_v4l2_get_output, gst_v4l2_set_output, NULL);
self->v4l2output->no_initial_format = TRUE;
self->v4l2output->keep_aspect = FALSE;
self->v4l2capture = gst_v4l2_object_new (GST_ELEMENT (self),
+ GST_OBJECT (GST_VIDEO_DECODER_SRC_PAD (self)),
V4L2_BUF_TYPE_VIDEO_CAPTURE, klass->default_device,
gst_v4l2_get_input, gst_v4l2_set_input, NULL);
- self->v4l2capture->no_initial_format = TRUE;
- self->v4l2output->keep_aspect = FALSE;
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ self->v4l2capture->tbm_output = TRUE;
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
}
static void
GST_DEBUG_CATEGORY_INIT (gst_v4l2_video_dec_debug, "v4l2videodec", 0,
"V4L2 Video Decoder");
- gst_element_class_set_static_metadata (element_class,
- "V4L2 Video Decoder",
- "Codec/Decoder/Video",
- "Decode video streams via V4L2 API",
- "Nicolas Dufresne <nicolas.dufresne@collabora.co.uk>");
-
gobject_class->dispose = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_dispose);
gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_finalize);
gobject_class->set_property =
video_decoder_class->stop = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_stop);
video_decoder_class->finish = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_finish);
video_decoder_class->flush = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_flush);
+ video_decoder_class->drain = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_drain);
video_decoder_class->set_format =
GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_set_format);
video_decoder_class->negotiate =
GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_change_state);
gst_v4l2_object_install_m2m_properties_helper (gobject_class);
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ g_object_class_install_property (gobject_class, PROP_TBM_OUTPUT,
+ g_param_spec_boolean ("tbm-output", "Enable TBM for output buffer",
+ "It works for only DMABUF mode.",
+ TRUE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
}
static void
gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
cdata->src_caps));
+ gst_element_class_set_static_metadata (element_class, cdata->longname,
+ "Codec/Decoder/Video/Hardware", cdata->description,
+ "Nicolas Dufresne <nicolas.dufresne@collabora.com>");
+
+ gst_caps_unref (cdata->sink_caps);
+ gst_caps_unref (cdata->src_caps);
g_free (cdata);
}
return ret;
}
-gboolean
-gst_v4l2_video_dec_register (GstPlugin * plugin, const gchar * basename,
- const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps)
+static gchar *
+gst_v4l2_video_dec_set_metadata (GstStructure * s, GstV4l2VideoDecCData * cdata,
+ const gchar * basename)
{
- GTypeQuery type_query;
- GTypeInfo type_info = { 0, };
- GType type, subtype;
- gchar *type_name;
- GstV4l2VideoDecCData *cdata;
+ gchar *codec_name = NULL;
+ gchar *type_name = NULL;
+
+#define SET_META(codec) \
+G_STMT_START { \
+ cdata->longname = "V4L2 " codec " Decoder"; \
+ cdata->description = "Decodes " codec " streams via V4L2 API"; \
+ codec_name = g_ascii_strdown (codec, -1); \
+} G_STMT_END
+
+ if (gst_structure_has_name (s, "image/jpeg")) {
+ SET_META ("JPEG");
+ } else if (gst_structure_has_name (s, "video/mpeg")) {
+ gint mpegversion = 0;
+ gst_structure_get_int (s, "mpegversion", &mpegversion);
+
+ if (mpegversion == 2) {
+ SET_META ("MPEG2");
+ } else {
+ SET_META ("MPEG4");
+ }
+ } else if (gst_structure_has_name (s, "video/x-h263")) {
+ SET_META ("H263");
+ } else if (gst_structure_has_name (s, "video/x-fwht")) {
+ SET_META ("FWHT");
+ } else if (gst_structure_has_name (s, "video/x-h264")) {
+ SET_META ("H264");
+ } else if (gst_structure_has_name (s, "video/x-h265")) {
+ SET_META ("H265");
+ } else if (gst_structure_has_name (s, "video/x-wmv")) {
+ SET_META ("VC1");
+ } else if (gst_structure_has_name (s, "video/x-vp8")) {
+ SET_META ("VP8");
+ } else if (gst_structure_has_name (s, "video/x-vp9")) {
+ SET_META ("VP9");
+ } else if (gst_structure_has_name (s, "video/x-bayer")) {
+ SET_META ("BAYER");
+ } else if (gst_structure_has_name (s, "video/x-sonix")) {
+ SET_META ("SONIX");
+ } else if (gst_structure_has_name (s, "video/x-pwc1")) {
+ SET_META ("PWC1");
+ } else if (gst_structure_has_name (s, "video/x-pwc2")) {
+ SET_META ("PWC2");
+ } else {
+ /* This code should be kept on sync with the exposed CODEC type of format
+ * from gstv4l2object.c. This warning will only occure in case we forget
+ * to also add a format here. */
+ gchar *s_str = gst_structure_to_string (s);
+ g_warning ("Missing fixed name mapping for caps '%s', this is a GStreamer "
+ "bug, please report at https://bugs.gnome.org", s_str);
+ g_free (s_str);
+ }
- cdata = g_new0 (GstV4l2VideoDecCData, 1);
- cdata->device = g_strdup (device_path);
- cdata->sink_caps = gst_caps_ref (sink_caps);
- cdata->src_caps = gst_caps_ref (src_caps);
+ if (codec_name) {
+ type_name = g_strdup_printf ("v4l2%sdec", codec_name);
+ if (g_type_from_name (type_name) != 0) {
+ g_free (type_name);
+ type_name = g_strdup_printf ("v4l2%s%sdec", basename, codec_name);
+ }
- type = gst_v4l2_video_dec_get_type ();
- g_type_query (type, &type_query);
- memset (&type_info, 0, sizeof (type_info));
- type_info.class_size = type_query.class_size;
- type_info.instance_size = type_query.instance_size;
- type_info.class_init = gst_v4l2_video_dec_subclass_init;
- type_info.class_data = cdata;
- type_info.instance_init = gst_v4l2_video_dec_subinstance_init;
+ g_free (codec_name);
+ }
- type_name = g_strdup_printf ("v4l2%sdec", basename);
- subtype = g_type_register_static (type, type_name, &type_info, 0);
+ return type_name;
+#undef SET_META
+}
- gst_element_register (plugin, type_name, GST_RANK_PRIMARY + 1, subtype);
+void
+gst_v4l2_video_dec_register (GstPlugin * plugin, const gchar * basename,
+ const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps)
+{
+ gint i;
+
+ for (i = 0; i < gst_caps_get_size (sink_caps); i++) {
+ GstV4l2VideoDecCData *cdata;
+ GstStructure *s;
+ GTypeQuery type_query;
+ GTypeInfo type_info = { 0, };
+ GType type, subtype;
+ gchar *type_name;
+
+ s = gst_caps_get_structure (sink_caps, i);
+
+ cdata = g_new0 (GstV4l2VideoDecCData, 1);
+ cdata->device = g_strdup (device_path);
+ cdata->sink_caps = gst_caps_new_empty ();
+ gst_caps_append_structure (cdata->sink_caps, gst_structure_copy (s));
+ cdata->src_caps = gst_caps_ref (src_caps);
+ type_name = gst_v4l2_video_dec_set_metadata (s, cdata, basename);
+
+ /* Skip over if we hit an unmapped type */
+ if (!type_name) {
+ g_free (cdata);
+ continue;
+ }
- g_free (type_name);
+ type = gst_v4l2_video_dec_get_type ();
+ g_type_query (type, &type_query);
+ memset (&type_info, 0, sizeof (type_info));
+ type_info.class_size = type_query.class_size;
+ type_info.instance_size = type_query.instance_size;
+ type_info.class_init = gst_v4l2_video_dec_subclass_init;
+ type_info.class_data = cdata;
+ type_info.instance_init = gst_v4l2_video_dec_subinstance_init;
+
+ subtype = g_type_register_static (type, type_name, &type_info, 0);
+#ifdef TIZEN_FEATURE_V4L2VIDEO_ADJ_RANK
+ if (!gst_element_register (plugin, type_name, GST_RANK_PRIMARY,
+ subtype))
+ GST_WARNING ("Failed to register plugin '%s'", type_name);
+#else
+ if (!gst_element_register (plugin, type_name, GST_RANK_PRIMARY + 1,
+ subtype))
+ GST_WARNING ("Failed to register plugin '%s'", type_name);
+#endif
- return TRUE;
+ g_free (type_name);
+ }
}