static GstFlowReturn gst_v4l2_video_dec_finish (GstVideoDecoder * decoder);
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+static void gst_v4l2_video_dec_flush_buffer_event (GstVideoDecoder * decoder)
+{
+ gboolean ret = FALSE;
+
+ if (!decoder) {
+ GST_ERROR("no decoder");
+ return;
+ }
+
+ ret = gst_pad_push_event (decoder->srcpad,
+ gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM | GST_EVENT_TYPE_SERIALIZED,
+ gst_structure_new_empty("tizen/flush-buffer")));
+
+ GST_WARNING_OBJECT(decoder, "event push ret[%d] for flush-buffer", ret);
+}
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
static void
gst_v4l2_video_dec_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec)
GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (object);
switch (prop_id) {
- case PROP_OUTPUT_IO_MODE:
- gst_v4l2_object_set_property_helper (self->v4l2output, prop_id, value,
- pspec);
- break;
case PROP_CAPTURE_IO_MODE:
- gst_v4l2_object_set_property_helper (self->v4l2capture, prop_id, value,
- pspec);
+ if (!gst_v4l2_object_set_property_helper (self->v4l2capture,
+ prop_id, value, pspec)) {
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
break;
/* By default, only set on output */
GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (object);
switch (prop_id) {
- case PROP_OUTPUT_IO_MODE:
- gst_v4l2_object_get_property_helper (self->v4l2output, prop_id, value,
- pspec);
- break;
case PROP_CAPTURE_IO_MODE:
- gst_v4l2_object_get_property_helper (self->v4l2capture, prop_id, value,
- pspec);
+ if (!gst_v4l2_object_get_property_helper (self->v4l2capture,
+ prop_id, value, pspec)) {
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
break;
/* By default read from output */
if (gst_caps_is_empty (self->probed_sinkcaps))
goto no_encoded_format;
- self->probed_srccaps = gst_v4l2_object_probe_caps (self->v4l2capture,
- gst_v4l2_object_get_raw_caps ());
-
- if (gst_caps_is_empty (self->probed_srccaps))
- goto no_raw_format;
-
return TRUE;
no_encoded_format:
GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
- (_("Encoder on device %s has no supported input format"),
- self->v4l2output->videodev), (NULL));
- goto failure;
-
-
-no_raw_format:
- GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
- (_("Encoder on device %s has no supported output format"),
+ (_("Decoder on device %s has no supported input format"),
self->v4l2output->videodev), (NULL));
goto failure;
/* Should have been flushed already */
g_assert (g_atomic_int_get (&self->active) == FALSE);
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ gst_v4l2_video_dec_flush_buffer_event (decoder);
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
gst_v4l2_object_stop (self->v4l2output);
gst_v4l2_object_stop (self->v4l2capture);
gst_v4l2_video_dec_finish (decoder);
gst_v4l2_object_stop (self->v4l2output);
- /* The renegotiation flow don't blend with the base class flow. To
- * properly stop the capture pool we need to reclaim our buffers, which
- * will happend through the allocation query. The allocation query is
- * triggered by gst_video_decoder_negotiate() which requires the output
- * caps to be set, but we can't know this information as we rely on the
- * decoder, which requires the capture queue to be stopped.
+ /* The renegotiation flow don't blend with the base class flow. To properly
+ * stop the capture pool, if the buffers can't be orphaned, we need to
+ * reclaim our buffers, which will happend through the allocation query.
+ * The allocation query is triggered by gst_video_decoder_negotiate() which
+ * requires the output caps to be set, but we can't know this information
+ * as we rely on the decoder, which requires the capture queue to be
+ * stopped.
*
* To workaround this issue, we simply run an allocation query with the
* old negotiated caps in order to drain/reclaim our buffers. That breaks
* the complexity and should not have much impact in performance since the
* following allocation query will happen on a drained pipeline and won't
* block. */
- {
+ if (self->v4l2capture->pool &&
+ !gst_v4l2_buffer_pool_orphan (&self->v4l2capture->pool)) {
GstCaps *caps = gst_pad_get_current_caps (decoder->srcpad);
- GstQuery *query = gst_query_new_allocation (caps, FALSE);
- gst_pad_peer_query (decoder->srcpad, query);
- gst_query_unref (query);
- gst_caps_unref (caps);
+ if (caps) {
+ GstQuery *query = gst_query_new_allocation (caps, FALSE);
+ gst_pad_peer_query (decoder->srcpad, query);
+ gst_query_unref (query);
+ gst_caps_unref (caps);
+ }
}
gst_v4l2_object_stop (self->v4l2capture);
ret = gst_v4l2_object_set_format (self->v4l2output, state->caps, &error);
+ gst_caps_replace (&self->probed_srccaps, NULL);
+ self->probed_srccaps = gst_v4l2_object_probe_caps (self->v4l2capture,
+ gst_v4l2_object_get_raw_caps ());
+
+ if (gst_caps_is_empty (self->probed_srccaps))
+ goto no_raw_format;
+
if (ret)
self->input_state = gst_video_codec_state_ref (state);
else
done:
return ret;
+
+no_raw_format:
+ GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
+ (_("Decoder on device %s has no supported output format"),
+ self->v4l2output->videodev), (NULL));
+ return GST_FLOW_ERROR;
}
static gboolean
self->output_flow = GST_FLOW_OK;
+ gst_v4l2_object_unlock_stop (self->v4l2output);
+ gst_v4l2_object_unlock_stop (self->v4l2capture);
+
if (self->v4l2output->pool)
gst_v4l2_buffer_pool_flush (self->v4l2output->pool);
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ gst_v4l2_video_dec_flush_buffer_event (decoder);
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
+ /* gst_v4l2_buffer_pool_flush() calls streamon the capture pool and must be
+ * called after gst_v4l2_object_unlock_stop() stopped flushing the buffer
+ * pool. */
if (self->v4l2capture->pool)
gst_v4l2_buffer_pool_flush (self->v4l2capture->pool);
- gst_v4l2_object_unlock_stop (self->v4l2output);
- gst_v4l2_object_unlock_stop (self->v4l2capture);
-
return TRUE;
}
/* If the decoder stop command succeeded, just wait until processing is
* finished */
+ GST_DEBUG_OBJECT (self, "Waiting for decoder stop");
GST_OBJECT_LOCK (task);
while (GST_TASK_STATE (task) == GST_TASK_STARTED)
GST_TASK_WAIT (task);
return ret;
}
-static gboolean
+static GstFlowReturn
gst_v4l2_video_dec_drain (GstVideoDecoder * decoder)
{
GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
gst_v4l2_video_dec_finish (decoder);
gst_v4l2_video_dec_flush (decoder);
- return TRUE;
+ return GST_FLOW_OK;
}
static GstVideoCodecFrame *
gboolean processed = FALSE;
GstBuffer *tmp;
GstTaskState task_state;
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ GstStructure *structure = NULL;
+ const gchar *caps_format = NULL;
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
GST_DEBUG_OBJECT (self, "Handling frame %d", frame->system_frame_number);
acquired_caps = gst_video_info_to_caps (&info);
GST_DEBUG_OBJECT (self, "Acquired caps: %" GST_PTR_FORMAT, acquired_caps);
st = gst_caps_get_structure (acquired_caps, 0);
- gst_structure_remove_field (st, "format");
+ gst_structure_remove_fields (st, "format", "colorimetry", "chroma-site",
+ NULL);
/* Probe currently available pixel formats */
- available_caps = gst_v4l2_object_probe_caps (self->v4l2capture, NULL);
- available_caps = gst_caps_make_writable (available_caps);
+ available_caps = gst_caps_copy (self->probed_srccaps);
GST_DEBUG_OBJECT (self, "Available caps: %" GST_PTR_FORMAT, available_caps);
/* Replace coded size with visible size, we want to negotiate visible size
caps = gst_caps_fixate (caps);
GST_DEBUG_OBJECT (self, "Chosen decoded caps: %" GST_PTR_FORMAT, caps);
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ structure = gst_caps_get_structure (caps, 0);
+ caps_format = gst_structure_get_string (structure, "format");
+
+ if (!strcmp (caps_format, "I420")) {
+ GST_INFO_OBJECT (self, "I420 -> S420");
+ gst_caps_set_simple (caps, "format", G_TYPE_STRING, "S420", NULL);
+ } else if (!strcmp (caps_format, "NV12")) {
+ GST_INFO_OBJECT (self, "NV12 -> SN12");
+ gst_caps_set_simple (caps, "format", G_TYPE_STRING, "SN12", NULL);
+ }
+ GST_INFO_OBJECT (self, "Updated decoded caps: %" GST_PTR_FORMAT, caps);
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
/* Try to set negotiated format, on success replace acquired format */
if (gst_v4l2_object_set_format (self->v4l2capture, caps, &error))
{
GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
gboolean ret;
+ GstEventType type = GST_EVENT_TYPE (event);
- switch (GST_EVENT_TYPE (event)) {
+ switch (type) {
case GST_EVENT_FLUSH_START:
GST_DEBUG_OBJECT (self, "flush start");
gst_v4l2_object_unlock (self->v4l2output);
ret = GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (decoder, event);
- switch (GST_EVENT_TYPE (event)) {
+ switch (type) {
case GST_EVENT_FLUSH_START:
/* The processing thread should stop now, wait for it */
gst_pad_stop_task (decoder->srcpad);
GST_OBJECT (GST_VIDEO_DECODER_SRC_PAD (self)),
V4L2_BUF_TYPE_VIDEO_CAPTURE, klass->default_device,
gst_v4l2_get_input, gst_v4l2_set_input, NULL);
- self->v4l2capture->no_initial_format = TRUE;
- self->v4l2output->keep_aspect = FALSE;
}
static void
cdata->src_caps));
gst_element_class_set_static_metadata (element_class, cdata->longname,
- "Codec/Decoder/Video", cdata->description,
+ "Codec/Decoder/Video/Hardware", cdata->description,
"Nicolas Dufresne <nicolas.dufresne@collabora.com>");
gst_caps_unref (cdata->sink_caps);
}
} else if (gst_structure_has_name (s, "video/x-h263")) {
SET_META ("H263");
+ } else if (gst_structure_has_name (s, "video/x-fwht")) {
+ SET_META ("FWHT");
} else if (gst_structure_has_name (s, "video/x-h264")) {
SET_META ("H264");
+ } else if (gst_structure_has_name (s, "video/x-h265")) {
+ SET_META ("H265");
} else if (gst_structure_has_name (s, "video/x-wmv")) {
SET_META ("VC1");
} else if (gst_structure_has_name (s, "video/x-vp8")) {
type_info.instance_init = gst_v4l2_video_dec_subinstance_init;
subtype = g_type_register_static (type, type_name, &type_info, 0);
+#ifdef TIZEN_FEATURE_V4L2VIDEO_ADJ_RANK
+ if (!gst_element_register (plugin, type_name, GST_RANK_PRIMARY,
+ subtype))
+ GST_WARNING ("Failed to register plugin '%s'", type_name);
+#else
if (!gst_element_register (plugin, type_name, GST_RANK_PRIMARY + 1,
subtype))
GST_WARNING ("Failed to register plugin '%s'", type_name);
+#endif
g_free (type_name);
}