X-Git-Url: http://review.tizen.org/git/?a=blobdiff_plain;f=sys%2Fv4l2%2Fgstv4l2videodec.c;h=f1c32e81e9889051886b321efb390551d1117603;hb=63f9f02c627a2bf40051222e0238deda912e68de;hp=d148b66ad5a320577cb558484010a0a83998b10e;hpb=462800e9c632fe28bcce2d5523e3b05090052057;p=platform%2Fupstream%2Fgst-plugins-good.git diff --git a/sys/v4l2/gstv4l2videodec.c b/sys/v4l2/gstv4l2videodec.c index d148b66..f1c32e8 100644 --- a/sys/v4l2/gstv4l2videodec.c +++ b/sys/v4l2/gstv4l2videodec.c @@ -1,6 +1,6 @@ /* * Copyright (C) 2014 Collabora Ltd. - * Author: Nicolas Dufresne + * Author: Nicolas Dufresne * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Library General Public @@ -29,8 +29,8 @@ #include #include +#include "gstv4l2object.h" #include "gstv4l2videodec.h" -#include "v4l2_calls.h" #include #include @@ -38,25 +38,47 @@ GST_DEBUG_CATEGORY_STATIC (gst_v4l2_video_dec_debug); #define GST_CAT_DEFAULT gst_v4l2_video_dec_debug -static gboolean gst_v4l2_video_dec_flush (GstVideoDecoder * decoder); - typedef struct { gchar *device; GstCaps *sink_caps; GstCaps *src_caps; + const gchar *longname; + const gchar *description; } GstV4l2VideoDecCData; enum { PROP_0, V4L2_STD_OBJECT_PROPS +#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER + , PROP_TBM_OUTPUT +#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */ }; #define gst_v4l2_video_dec_parent_class parent_class G_DEFINE_ABSTRACT_TYPE (GstV4l2VideoDec, gst_v4l2_video_dec, GST_TYPE_VIDEO_DECODER); +static GstFlowReturn gst_v4l2_video_dec_finish (GstVideoDecoder * decoder); + +#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER +static void gst_v4l2_video_dec_flush_buffer_event (GstVideoDecoder * decoder) +{ + gboolean ret = FALSE; + + if (!decoder) { + GST_ERROR("no decoder"); + return; + } + + ret = gst_pad_push_event (decoder->srcpad, + gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM | GST_EVENT_TYPE_SERIALIZED, + gst_structure_new_empty("tizen/flush-buffer"))); + + GST_WARNING_OBJECT(decoder, "event push ret[%d] for flush-buffer", ret); +} +#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */ static void gst_v4l2_video_dec_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec) @@ -64,14 +86,19 @@ gst_v4l2_video_dec_set_property (GObject * object, GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (object); switch (prop_id) { - case PROP_OUTPUT_IO_MODE: - gst_v4l2_object_set_property_helper (self->v4l2output, prop_id, value, - pspec); - break; case PROP_CAPTURE_IO_MODE: - gst_v4l2_object_set_property_helper (self->v4l2capture, prop_id, value, - pspec); + if (!gst_v4l2_object_set_property_helper (self->v4l2capture, + prop_id, value, pspec)) { + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + } + break; +#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER + case PROP_TBM_OUTPUT: + self->v4l2capture->tbm_output = g_value_get_boolean (value); + GST_INFO_OBJECT (self, "tbm output [%d]", self->v4l2capture->tbm_output); break; +#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */ + /* By default, only set on output */ default: @@ -90,14 +117,18 @@ gst_v4l2_video_dec_get_property (GObject * object, GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (object); switch (prop_id) { - case PROP_OUTPUT_IO_MODE: - gst_v4l2_object_get_property_helper (self->v4l2output, prop_id, value, - pspec); - break; case PROP_CAPTURE_IO_MODE: - gst_v4l2_object_get_property_helper (self->v4l2capture, prop_id, value, - pspec); + if (!gst_v4l2_object_get_property_helper (self->v4l2capture, + prop_id, value, pspec)) { + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + } + break; +#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER + case PROP_TBM_OUTPUT: + GST_INFO_OBJECT (self, "tbm output [%d]", self->v4l2capture->tbm_output); + g_value_set_boolean (value, self->v4l2capture->tbm_output); break; +#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */ /* By default read from output */ default: @@ -113,6 +144,7 @@ static gboolean gst_v4l2_video_dec_open (GstVideoDecoder * decoder) { GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder); + GstCaps *codec_caps; GST_DEBUG_OBJECT (self, "Opening"); @@ -122,30 +154,19 @@ gst_v4l2_video_dec_open (GstVideoDecoder * decoder) if (!gst_v4l2_object_open_shared (self->v4l2capture, self->v4l2output)) goto failure; - self->probed_sinkcaps = gst_v4l2_object_get_caps (self->v4l2output, - gst_v4l2_object_get_codec_caps ()); + codec_caps = gst_pad_get_pad_template_caps (decoder->sinkpad); + self->probed_sinkcaps = gst_v4l2_object_probe_caps (self->v4l2output, + codec_caps); + gst_caps_unref (codec_caps); if (gst_caps_is_empty (self->probed_sinkcaps)) goto no_encoded_format; - self->probed_srccaps = gst_v4l2_object_get_caps (self->v4l2capture, - gst_v4l2_object_get_raw_caps ()); - - if (gst_caps_is_empty (self->probed_srccaps)) - goto no_raw_format; - return TRUE; no_encoded_format: GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS, - (_("Encoder on device %s has no supported input format"), - self->v4l2output->videodev), (NULL)); - goto failure; - - -no_raw_format: - GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS, - (_("Encoder on device %s has no supported output format"), + (_("Decoder on device %s has no supported input format"), self->v4l2output->videodev), (NULL)); goto failure; @@ -210,7 +231,9 @@ gst_v4l2_video_dec_stop (GstVideoDecoder * decoder) /* Should have been flushed already */ g_assert (g_atomic_int_get (&self->active) == FALSE); - g_assert (g_atomic_int_get (&self->processing) == FALSE); +#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER + gst_v4l2_video_dec_flush_buffer_event (decoder); +#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */ gst_v4l2_object_stop (self->v4l2output); gst_v4l2_object_stop (self->v4l2capture); @@ -243,11 +266,46 @@ gst_v4l2_video_dec_set_format (GstVideoDecoder * decoder, gst_video_codec_state_unref (self->input_state); self->input_state = NULL; - /* FIXME we probably need to do more work if pools are active */ + gst_v4l2_video_dec_finish (decoder); + gst_v4l2_object_stop (self->v4l2output); + + /* The renegotiation flow don't blend with the base class flow. To properly + * stop the capture pool, if the buffers can't be orphaned, we need to + * reclaim our buffers, which will happend through the allocation query. + * The allocation query is triggered by gst_video_decoder_negotiate() which + * requires the output caps to be set, but we can't know this information + * as we rely on the decoder, which requires the capture queue to be + * stopped. + * + * To workaround this issue, we simply run an allocation query with the + * old negotiated caps in order to drain/reclaim our buffers. That breaks + * the complexity and should not have much impact in performance since the + * following allocation query will happen on a drained pipeline and won't + * block. */ + if (self->v4l2capture->pool && + !gst_v4l2_buffer_pool_orphan (&self->v4l2capture->pool)) { + GstCaps *caps = gst_pad_get_current_caps (decoder->srcpad); + if (caps) { + GstQuery *query = gst_query_new_allocation (caps, FALSE); + gst_pad_peer_query (decoder->srcpad, query); + gst_query_unref (query); + gst_caps_unref (caps); + } + } + + gst_v4l2_object_stop (self->v4l2capture); + self->output_flow = GST_FLOW_OK; } ret = gst_v4l2_object_set_format (self->v4l2output, state->caps, &error); + gst_caps_replace (&self->probed_srccaps, NULL); + self->probed_srccaps = gst_v4l2_object_probe_caps (self->v4l2capture, + gst_v4l2_object_get_raw_caps ()); + + if (gst_caps_is_empty (self->probed_srccaps)) + goto no_raw_format; + if (ret) self->input_state = gst_video_codec_state_ref (state); else @@ -255,6 +313,12 @@ gst_v4l2_video_dec_set_format (GstVideoDecoder * decoder, done: return ret; + +no_raw_format: + GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS, + (_("Decoder on device %s has no supported output format"), + self->v4l2output->videodev), (NULL)); + return GST_FLOW_ERROR; } static gboolean @@ -266,7 +330,7 @@ gst_v4l2_video_dec_flush (GstVideoDecoder * decoder) /* Ensure the processing thread has stopped for the reverse playback * discount case */ - if (g_atomic_int_get (&self->processing)) { + if (gst_pad_get_task_state (decoder->srcpad) == GST_TASK_STARTED) { GST_VIDEO_DECODER_STREAM_UNLOCK (decoder); gst_v4l2_object_unlock (self->v4l2output); @@ -280,6 +344,18 @@ gst_v4l2_video_dec_flush (GstVideoDecoder * decoder) gst_v4l2_object_unlock_stop (self->v4l2output); gst_v4l2_object_unlock_stop (self->v4l2capture); + if (self->v4l2output->pool) + gst_v4l2_buffer_pool_flush (self->v4l2output->pool); + +#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER + gst_v4l2_video_dec_flush_buffer_event (decoder); +#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */ + /* gst_v4l2_buffer_pool_flush() calls streamon the capture pool and must be + * called after gst_v4l2_object_unlock_stop() stopped flushing the buffer + * pool. */ + if (self->v4l2capture->pool) + gst_v4l2_buffer_pool_flush (self->v4l2capture->pool); + return TRUE; } @@ -309,7 +385,7 @@ gst_v4l2_decoder_cmd (GstV4l2Object * v4l2object, guint cmd, guint flags) dcmd.cmd = cmd; dcmd.flags = flags; - if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_DECODER_CMD, &dcmd) < 0) + if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_DECODER_CMD, &dcmd) < 0) goto dcmd_failed; return TRUE; @@ -334,7 +410,7 @@ gst_v4l2_video_dec_finish (GstVideoDecoder * decoder) GstFlowReturn ret = GST_FLOW_OK; GstBuffer *buffer; - if (!g_atomic_int_get (&self->processing)) + if (gst_pad_get_task_state (decoder->srcpad) != GST_TASK_STARTED) goto done; GST_DEBUG_OBJECT (self, "Finishing decoding"); @@ -346,6 +422,7 @@ gst_v4l2_video_dec_finish (GstVideoDecoder * decoder) /* If the decoder stop command succeeded, just wait until processing is * finished */ + GST_DEBUG_OBJECT (self, "Waiting for decoder stop"); GST_OBJECT_LOCK (task); while (GST_TASK_STATE (task) == GST_TASK_STARTED) GST_TASK_WAIT (task); @@ -374,10 +451,24 @@ gst_v4l2_video_dec_finish (GstVideoDecoder * decoder) GST_DEBUG_OBJECT (decoder, "Done draining buffers"); + /* TODO Shall we cleanup any reffed frame to workaround broken decoders ? */ + done: return ret; } +static GstFlowReturn +gst_v4l2_video_dec_drain (GstVideoDecoder * decoder) +{ + GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder); + + GST_DEBUG_OBJECT (self, "Draining..."); + gst_v4l2_video_dec_finish (decoder); + gst_v4l2_video_dec_flush (decoder); + + return GST_FLOW_OK; +} + static GstVideoCodecFrame * gst_v4l2_video_dec_get_oldest_frame (GstVideoDecoder * decoder) { @@ -420,6 +511,7 @@ gst_v4l2_video_dec_loop (GstVideoDecoder * decoder) GST_LOG_OBJECT (decoder, "Allocate output buffer"); + self->output_flow = GST_FLOW_OK; do { /* We cannot use the base class allotate helper since it taking the internal * stream lock. we know that the acquire may need to poll until more frames @@ -469,25 +561,10 @@ beach: gst_buffer_replace (&buffer, NULL); self->output_flow = ret; - g_atomic_int_set (&self->processing, FALSE); gst_v4l2_object_unlock (self->v4l2output); gst_pad_pause_task (decoder->srcpad); } -static void -gst_v4l2_video_dec_loop_stopped (GstV4l2VideoDec * self) -{ - /* When flushing, decoding thread may never run */ - if (g_atomic_int_get (&self->processing)) { - GST_DEBUG_OBJECT (self, "Early stop of decoding thread"); - self->output_flow = GST_FLOW_FLUSHING; - g_atomic_int_set (&self->processing, FALSE); - } - - GST_DEBUG_OBJECT (self, "Decoding task destroyed: %s", - gst_flow_get_name (self->output_flow)); -} - static gboolean gst_v4l2_video_remove_padding (GstCapsFeatures * features, GstStructure * structure, gpointer user_data) @@ -504,13 +581,20 @@ gst_v4l2_video_remove_padding (GstCapsFeatures * features, return TRUE; if (align->padding_left != 0 || align->padding_top != 0 || - width != info->width + align->padding_right || height != info->height + align->padding_bottom) return TRUE; - gst_structure_set (structure, - "width", G_TYPE_INT, width - align->padding_right, - "height", G_TYPE_INT, height - align->padding_bottom, NULL); + if (height == info->height + align->padding_bottom) { + /* Some drivers may round up width to the padded with */ + if (width == info->width + align->padding_right) + gst_structure_set (structure, + "width", G_TYPE_INT, width - align->padding_right, + "height", G_TYPE_INT, height - align->padding_bottom, NULL); + /* Some drivers may keep visible width and only round up bytesperline */ + else if (width == info->width) + gst_structure_set (structure, + "height", G_TYPE_INT, height - align->padding_bottom, NULL); + } return TRUE; } @@ -524,6 +608,13 @@ gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder, GstFlowReturn ret = GST_FLOW_OK; gboolean processed = FALSE; GstBuffer *tmp; + GstTaskState task_state; +#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER + GstStructure *structure = NULL; + const gchar *caps_format = NULL; + GstMessage *msg = NULL; + GstV4l2BufferPool *capture_pool = NULL; +#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */ GST_DEBUG_OBJECT (self, "Handling frame %d", frame->system_frame_number); @@ -592,12 +683,14 @@ gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder, /* Create caps from the acquired format, remove the format field */ acquired_caps = gst_video_info_to_caps (&info); + GST_DEBUG_OBJECT (self, "Acquired caps: %" GST_PTR_FORMAT, acquired_caps); st = gst_caps_get_structure (acquired_caps, 0); - gst_structure_remove_field (st, "format"); + gst_structure_remove_fields (st, "format", "colorimetry", "chroma-site", + NULL); /* Probe currently available pixel formats */ - available_caps = gst_v4l2_object_probe_caps (self->v4l2capture, NULL); - available_caps = gst_caps_make_writable (available_caps); + available_caps = gst_caps_copy (self->probed_srccaps); + GST_DEBUG_OBJECT (self, "Available caps: %" GST_PTR_FORMAT, available_caps); /* Replace coded size with visible size, we want to negotiate visible size * with downstream, not coded size. */ @@ -605,6 +698,7 @@ gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder, filter = gst_caps_intersect_full (available_caps, acquired_caps, GST_CAPS_INTERSECT_FIRST); + GST_DEBUG_OBJECT (self, "Filtered caps: %" GST_PTR_FORMAT, filter); gst_caps_unref (acquired_caps); gst_caps_unref (available_caps); caps = gst_pad_peer_query_caps (decoder->srcpad, filter); @@ -620,6 +714,19 @@ gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder, caps = gst_caps_fixate (caps); GST_DEBUG_OBJECT (self, "Chosen decoded caps: %" GST_PTR_FORMAT, caps); +#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER + structure = gst_caps_get_structure (caps, 0); + caps_format = gst_structure_get_string (structure, "format"); + + if (!strcmp (caps_format, "I420")) { + GST_INFO_OBJECT (self, "I420 -> S420"); + gst_caps_set_simple (caps, "format", G_TYPE_STRING, "S420", NULL); + } else if (!strcmp (caps_format, "NV12")) { + GST_INFO_OBJECT (self, "NV12 -> SN12"); + gst_caps_set_simple (caps, "format", G_TYPE_STRING, "SN12", NULL); + } + GST_INFO_OBJECT (self, "Updated decoded caps: %" GST_PTR_FORMAT, caps); +#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */ /* Try to set negotiated format, on success replace acquired format */ if (gst_v4l2_object_set_format (self->v4l2capture, caps, &error)) @@ -646,9 +753,22 @@ gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder, if (!gst_buffer_pool_set_active (GST_BUFFER_POOL (self->v4l2capture->pool), TRUE)) goto activate_failed; +#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER + capture_pool = GST_V4L2_BUFFER_POOL (self->v4l2capture->pool); + + msg = gst_message_new_element (GST_OBJECT_CAST (decoder), + gst_structure_new ("prepare-decode-buffers", + "num_buffers", G_TYPE_INT, capture_pool->num_allocated, + "extra_num_buffers", G_TYPE_INT, capture_pool->num_allocated - 2, NULL)); + + gst_element_post_message (GST_ELEMENT_CAST (decoder), msg); + + GST_WARNING_OBJECT (self, "output buffer[%d]", capture_pool->num_allocated); +#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */ } - if (g_atomic_int_get (&self->processing) == FALSE) { + task_state = gst_pad_get_task_state (GST_VIDEO_DECODER_SRC_PAD (self)); + if (task_state == GST_TASK_STOPPED || task_state == GST_TASK_PAUSED) { /* It's possible that the processing thread stopped due to an error */ if (self->output_flow != GST_FLOW_OK && self->output_flow != GST_FLOW_FLUSHING) { @@ -661,10 +781,9 @@ gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder, /* Start the processing task, when it quits, the task will disable input * processing to unlock input if draining, or prevent potential block */ - g_atomic_int_set (&self->processing, TRUE); + self->output_flow = GST_FLOW_FLUSHING; if (!gst_pad_start_task (decoder->srcpad, - (GstTaskFunction) gst_v4l2_video_dec_loop, self, - (GDestroyNotify) gst_v4l2_video_dec_loop_stopped)) + (GstTaskFunction) gst_v4l2_video_dec_loop, self, NULL)) goto start_task_failed; } @@ -676,7 +795,8 @@ gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder, GST_VIDEO_DECODER_STREAM_LOCK (decoder); if (ret == GST_FLOW_FLUSHING) { - if (g_atomic_int_get (&self->processing) == FALSE) + if (gst_pad_get_task_state (GST_VIDEO_DECODER_SRC_PAD (self)) != + GST_TASK_STARTED) ret = self->output_flow; goto drop; } else if (ret != GST_FLOW_OK) { @@ -721,7 +841,6 @@ start_task_failed: { GST_ELEMENT_ERROR (self, RESOURCE, FAILED, (_("Failed to start decoding thread.")), (NULL)); - g_atomic_int_set (&self->processing, FALSE); ret = GST_FLOW_ERROR; goto drop; } @@ -752,8 +871,15 @@ gst_v4l2_video_dec_decide_allocation (GstVideoDecoder * decoder, ret = GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (decoder, query); - latency = self->v4l2capture->min_buffers * self->v4l2capture->duration; - gst_video_decoder_set_latency (decoder, latency, latency); + if (GST_CLOCK_TIME_IS_VALID (self->v4l2capture->duration)) { + latency = self->v4l2capture->min_buffers * self->v4l2capture->duration; + GST_DEBUG_OBJECT (self, "Setting latency: %" GST_TIME_FORMAT " (%" + G_GUINT32_FORMAT " * %" G_GUINT64_FORMAT, GST_TIME_ARGS (latency), + self->v4l2capture->min_buffers, self->v4l2capture->duration); + gst_video_decoder_set_latency (decoder, latency, latency); + } else { + GST_WARNING_OBJECT (self, "Duration invalid, not setting latency"); + } return ret; } @@ -817,8 +943,9 @@ gst_v4l2_video_dec_sink_event (GstVideoDecoder * decoder, GstEvent * event) { GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder); gboolean ret; + GstEventType type = GST_EVENT_TYPE (event); - switch (GST_EVENT_TYPE (event)) { + switch (type) { case GST_EVENT_FLUSH_START: GST_DEBUG_OBJECT (self, "flush start"); gst_v4l2_object_unlock (self->v4l2output); @@ -830,7 +957,7 @@ gst_v4l2_video_dec_sink_event (GstVideoDecoder * decoder, GstEvent * event) ret = GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (decoder, event); - switch (GST_EVENT_TYPE (event)) { + switch (type) { case GST_EVENT_FLUSH_START: /* The processing thread should stop now, wait for it */ gst_pad_stop_task (decoder->srcpad); @@ -898,16 +1025,19 @@ gst_v4l2_video_dec_subinstance_init (GTypeInstance * instance, gpointer g_class) gst_video_decoder_set_packetized (decoder, TRUE); self->v4l2output = gst_v4l2_object_new (GST_ELEMENT (self), + GST_OBJECT (GST_VIDEO_DECODER_SINK_PAD (self)), V4L2_BUF_TYPE_VIDEO_OUTPUT, klass->default_device, gst_v4l2_get_output, gst_v4l2_set_output, NULL); self->v4l2output->no_initial_format = TRUE; self->v4l2output->keep_aspect = FALSE; self->v4l2capture = gst_v4l2_object_new (GST_ELEMENT (self), + GST_OBJECT (GST_VIDEO_DECODER_SRC_PAD (self)), V4L2_BUF_TYPE_VIDEO_CAPTURE, klass->default_device, gst_v4l2_get_input, gst_v4l2_set_input, NULL); - self->v4l2capture->no_initial_format = TRUE; - self->v4l2output->keep_aspect = FALSE; +#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER + self->v4l2capture->tbm_output = TRUE; +#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */ } static void @@ -926,12 +1056,6 @@ gst_v4l2_video_dec_class_init (GstV4l2VideoDecClass * klass) GST_DEBUG_CATEGORY_INIT (gst_v4l2_video_dec_debug, "v4l2videodec", 0, "V4L2 Video Decoder"); - gst_element_class_set_static_metadata (element_class, - "V4L2 Video Decoder", - "Codec/Decoder/Video", - "Decode video streams via V4L2 API", - "Nicolas Dufresne "); - gobject_class->dispose = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_dispose); gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_finalize); gobject_class->set_property = @@ -945,6 +1069,7 @@ gst_v4l2_video_dec_class_init (GstV4l2VideoDecClass * klass) video_decoder_class->stop = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_stop); video_decoder_class->finish = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_finish); video_decoder_class->flush = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_flush); + video_decoder_class->drain = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_drain); video_decoder_class->set_format = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_set_format); video_decoder_class->negotiate = @@ -965,6 +1090,12 @@ gst_v4l2_video_dec_class_init (GstV4l2VideoDecClass * klass) GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_change_state); gst_v4l2_object_install_m2m_properties_helper (gobject_class); +#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER + g_object_class_install_property (gobject_class, PROP_TBM_OUTPUT, + g_param_spec_boolean ("tbm-output", "Enable TBM for output buffer", + "It works for only DMABUF mode.", + TRUE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); +#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */ } static void @@ -984,6 +1115,12 @@ gst_v4l2_video_dec_subclass_init (gpointer g_class, gpointer data) gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, cdata->src_caps)); + gst_element_class_set_static_metadata (element_class, cdata->longname, + "Codec/Decoder/Video/Hardware", cdata->description, + "Nicolas Dufresne "); + + gst_caps_unref (cdata->sink_caps); + gst_caps_unref (cdata->src_caps); g_free (cdata); } @@ -1000,36 +1137,126 @@ gst_v4l2_is_video_dec (GstCaps * sink_caps, GstCaps * src_caps) return ret; } -gboolean -gst_v4l2_video_dec_register (GstPlugin * plugin, const gchar * basename, - const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps) +static gchar * +gst_v4l2_video_dec_set_metadata (GstStructure * s, GstV4l2VideoDecCData * cdata, + const gchar * basename) { - GTypeQuery type_query; - GTypeInfo type_info = { 0, }; - GType type, subtype; - gchar *type_name; - GstV4l2VideoDecCData *cdata; + gchar *codec_name = NULL; + gchar *type_name = NULL; + +#define SET_META(codec) \ +G_STMT_START { \ + cdata->longname = "V4L2 " codec " Decoder"; \ + cdata->description = "Decodes " codec " streams via V4L2 API"; \ + codec_name = g_ascii_strdown (codec, -1); \ +} G_STMT_END + + if (gst_structure_has_name (s, "image/jpeg")) { + SET_META ("JPEG"); + } else if (gst_structure_has_name (s, "video/mpeg")) { + gint mpegversion = 0; + gst_structure_get_int (s, "mpegversion", &mpegversion); + + if (mpegversion == 2) { + SET_META ("MPEG2"); + } else { + SET_META ("MPEG4"); + } + } else if (gst_structure_has_name (s, "video/x-h263")) { + SET_META ("H263"); + } else if (gst_structure_has_name (s, "video/x-fwht")) { + SET_META ("FWHT"); + } else if (gst_structure_has_name (s, "video/x-h264")) { + SET_META ("H264"); + } else if (gst_structure_has_name (s, "video/x-h265")) { + SET_META ("H265"); + } else if (gst_structure_has_name (s, "video/x-wmv")) { + SET_META ("VC1"); + } else if (gst_structure_has_name (s, "video/x-vp8")) { + SET_META ("VP8"); + } else if (gst_structure_has_name (s, "video/x-vp9")) { + SET_META ("VP9"); + } else if (gst_structure_has_name (s, "video/x-bayer")) { + SET_META ("BAYER"); + } else if (gst_structure_has_name (s, "video/x-sonix")) { + SET_META ("SONIX"); + } else if (gst_structure_has_name (s, "video/x-pwc1")) { + SET_META ("PWC1"); + } else if (gst_structure_has_name (s, "video/x-pwc2")) { + SET_META ("PWC2"); + } else { + /* This code should be kept on sync with the exposed CODEC type of format + * from gstv4l2object.c. This warning will only occure in case we forget + * to also add a format here. */ + gchar *s_str = gst_structure_to_string (s); + g_warning ("Missing fixed name mapping for caps '%s', this is a GStreamer " + "bug, please report at https://bugs.gnome.org", s_str); + g_free (s_str); + } - cdata = g_new0 (GstV4l2VideoDecCData, 1); - cdata->device = g_strdup (device_path); - cdata->sink_caps = gst_caps_ref (sink_caps); - cdata->src_caps = gst_caps_ref (src_caps); + if (codec_name) { + type_name = g_strdup_printf ("v4l2%sdec", codec_name); + if (g_type_from_name (type_name) != 0) { + g_free (type_name); + type_name = g_strdup_printf ("v4l2%s%sdec", basename, codec_name); + } - type = gst_v4l2_video_dec_get_type (); - g_type_query (type, &type_query); - memset (&type_info, 0, sizeof (type_info)); - type_info.class_size = type_query.class_size; - type_info.instance_size = type_query.instance_size; - type_info.class_init = gst_v4l2_video_dec_subclass_init; - type_info.class_data = cdata; - type_info.instance_init = gst_v4l2_video_dec_subinstance_init; + g_free (codec_name); + } - type_name = g_strdup_printf ("v4l2%sdec", basename); - subtype = g_type_register_static (type, type_name, &type_info, 0); + return type_name; +#undef SET_META +} - gst_element_register (plugin, type_name, GST_RANK_PRIMARY + 1, subtype); +void +gst_v4l2_video_dec_register (GstPlugin * plugin, const gchar * basename, + const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps) +{ + gint i; + + for (i = 0; i < gst_caps_get_size (sink_caps); i++) { + GstV4l2VideoDecCData *cdata; + GstStructure *s; + GTypeQuery type_query; + GTypeInfo type_info = { 0, }; + GType type, subtype; + gchar *type_name; + + s = gst_caps_get_structure (sink_caps, i); + + cdata = g_new0 (GstV4l2VideoDecCData, 1); + cdata->device = g_strdup (device_path); + cdata->sink_caps = gst_caps_new_empty (); + gst_caps_append_structure (cdata->sink_caps, gst_structure_copy (s)); + cdata->src_caps = gst_caps_ref (src_caps); + type_name = gst_v4l2_video_dec_set_metadata (s, cdata, basename); + + /* Skip over if we hit an unmapped type */ + if (!type_name) { + g_free (cdata); + continue; + } - g_free (type_name); + type = gst_v4l2_video_dec_get_type (); + g_type_query (type, &type_query); + memset (&type_info, 0, sizeof (type_info)); + type_info.class_size = type_query.class_size; + type_info.instance_size = type_query.instance_size; + type_info.class_init = gst_v4l2_video_dec_subclass_init; + type_info.class_data = cdata; + type_info.instance_init = gst_v4l2_video_dec_subinstance_init; + + subtype = g_type_register_static (type, type_name, &type_info, 0); +#ifdef TIZEN_FEATURE_V4L2VIDEO_ADJ_RANK + if (!gst_element_register (plugin, type_name, GST_RANK_PRIMARY, + subtype)) + GST_WARNING ("Failed to register plugin '%s'", type_name); +#else + if (!gst_element_register (plugin, type_name, GST_RANK_PRIMARY + 1, + subtype)) + GST_WARNING ("Failed to register plugin '%s'", type_name); +#endif - return TRUE; + g_free (type_name); + } }