*
* Copyright (C) 2010-2011 Splitted-Desktop Systems
* Author: Gwenole Beauchesne <gwenole.beauchesne@splitted-desktop.com>
- * Copyright (C) 2011-2013 Intel Corporation
+ * Copyright (C) 2011-2014 Intel Corporation
* Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
*
* This library is free software; you can redistribute it and/or
#include "gstvaapidecode.h"
#include "gstvaapipluginutil.h"
-#include "gstvaapivideocontext.h"
#include "gstvaapivideobuffer.h"
#if GST_CHECK_VERSION(1,1,0) && USE_GLX
#include "gstvaapivideometa_texture.h"
#include <gst/vaapi/gstvaapidecoder_mpeg2.h>
#include <gst/vaapi/gstvaapidecoder_mpeg4.h>
#include <gst/vaapi/gstvaapidecoder_vc1.h>
+#include <gst/vaapi/gstvaapidecoder_vp8.h>
#define GST_PLUGIN_NAME "vaapidecode"
#define GST_PLUGIN_DESC "A VA-API based video decoder"
+#define GST_VAAPI_DECODE_FLOW_PARSE_DATA GST_FLOW_CUSTOM_SUCCESS_2
+
GST_DEBUG_CATEGORY_STATIC(gst_debug_vaapidecode);
#define GST_CAT_DEFAULT gst_debug_vaapidecode
GST_CAPS_CODEC("video/x-h263")
GST_CAPS_CODEC("video/x-h264")
GST_CAPS_CODEC("video/x-wmv")
+ GST_CAPS_CODEC("video/x-vp8")
GST_CAPS_CODEC("image/jpeg")
;
GST_VIDEO_CAPS_MAKE_WITH_FEATURES(
GST_CAPS_FEATURE_MEMORY_VAAPI_SURFACE, "{ ENCODED, NV12, I420, YV12 }") ";"
GST_VIDEO_CAPS_MAKE_WITH_FEATURES(
- GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META, "RGBA");
+ GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META, "RGBA") ";"
+ GST_VIDEO_CAPS_MAKE("{ NV12, I420, YV12 }");
#else
GST_VAAPI_SURFACE_CAPS;
#endif
GST_PAD_ALWAYS,
GST_STATIC_CAPS(gst_vaapidecode_src_caps_str));
-/* GstImplementsInterface interface */
-#if !GST_CHECK_VERSION(1,0,0)
-static gboolean
-gst_vaapidecode_implements_interface_supported(
- GstImplementsInterface *iface,
- GType type
-)
-{
- return (type == GST_TYPE_VIDEO_CONTEXT);
-}
-
-static void
-gst_vaapidecode_implements_iface_init(GstImplementsInterfaceClass *iface)
-{
- iface->supported = gst_vaapidecode_implements_interface_supported;
-}
-#endif
-
-/* GstVideoContext interface */
-#if !GST_CHECK_VERSION(1,1,0)
-static void
-gst_vaapidecode_set_video_context(GstVideoContext *context, const gchar *type,
- const GValue *value)
-{
- GstVaapiDecode *decode = GST_VAAPIDECODE (context);
- gst_vaapi_set_display (type, value, &GST_VAAPI_PLUGIN_BASE_DISPLAY(decode));
-}
-
-static void
-gst_video_context_interface_init(GstVideoContextInterface *iface)
-{
- iface->set_context = gst_vaapidecode_set_video_context;
-}
-
-#define GstVideoContextClass GstVideoContextInterface
-#endif
-
G_DEFINE_TYPE_WITH_CODE(
GstVaapiDecode,
gst_vaapidecode,
GST_TYPE_VIDEO_DECODER,
-#if !GST_CHECK_VERSION(1,0,0)
- G_IMPLEMENT_INTERFACE(GST_TYPE_IMPLEMENTS_INTERFACE,
- gst_vaapidecode_implements_iface_init);
-#endif
-#if !GST_CHECK_VERSION(1,1,0)
- G_IMPLEMENT_INTERFACE(GST_TYPE_VIDEO_CONTEXT,
- gst_video_context_interface_init)
-#endif
- )
+ GST_VAAPI_PLUGIN_BASE_INIT_INTERFACES)
static gboolean
gst_vaapidecode_update_src_caps(GstVaapiDecode *decode,
const GstVideoCodecState *codec_state, gpointer user_data)
{
GstVaapiDecode * const decode = GST_VAAPIDECODE(user_data);
+ GstVideoDecoder * const vdec = GST_VIDEO_DECODER(decode);
g_assert(decode->decoder == decoder);
gst_vaapidecode_update_src_caps(decode, codec_state);
+ gst_video_decoder_negotiate(vdec);
}
static inline gboolean
return TRUE;
}
+#if GST_CHECK_VERSION(1,1,0)
+static void
+gst_vaapidecode_video_info_change_format(GstVideoInfo *info,
+ GstVideoFormat format, guint width, guint height)
+{
+ GstVideoInfo vi = *info;
+
+ gst_video_info_set_format (info, format, width, height);
+
+ info->interlace_mode = vi.interlace_mode;
+ info->flags = vi.flags;
+ info->views = vi.views;
+ info->par_n = vi.par_n;
+ info->par_d = vi.par_d;
+ info->fps_n = vi.fps_n;
+ info->fps_d = vi.fps_d;
+}
+#endif
+
static gboolean
gst_vaapidecode_update_src_caps(GstVaapiDecode *decode,
const GstVideoCodecState *ref_state)
GstVideoDecoder * const vdec = GST_VIDEO_DECODER(decode);
GstVideoCodecState *state;
GstVideoInfo *vi, vis;
+#if GST_CHECK_VERSION(1,1,0)
+ GstCapsFeatures *features = NULL;
+ GstVaapiCapsFeature feature;
+
+ feature = gst_vaapi_find_preferred_caps_feature(
+ GST_VIDEO_DECODER_SRC_PAD(vdec),
+ GST_VIDEO_INFO_FORMAT(&ref_state->info));
+#endif
state = gst_video_decoder_set_output_state(vdec,
GST_VIDEO_INFO_FORMAT(&ref_state->info),
#if GST_CHECK_VERSION(1,1,0)
vis = *vi;
- if (GST_VIDEO_INFO_FORMAT(vi) == GST_VIDEO_FORMAT_ENCODED) {
- /* XXX: this is a workaround until auto-plugging is fixed when
- format=ENCODED + memory:VASurface caps feature are provided.
- Meanwhile, providing a random format here works but this is
- a terribly wrong thing per se. */
- gst_video_info_set_format(&vis, GST_VIDEO_FORMAT_NV12,
+ switch (feature) {
+ case GST_VAAPI_CAPS_FEATURE_GL_TEXTURE_UPLOAD_META:
+ gst_vaapidecode_video_info_change_format(&vis, GST_VIDEO_FORMAT_RGBA,
GST_VIDEO_INFO_WIDTH(vi), GST_VIDEO_INFO_HEIGHT(vi));
+ features = gst_caps_features_new(
+ GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META, NULL);
+ break;
+ default:
+ if (GST_VIDEO_INFO_FORMAT(vi) == GST_VIDEO_FORMAT_ENCODED) {
+ /* XXX: this is a workaround until auto-plugging is fixed when
+ format=ENCODED + memory:VASurface caps feature are provided.
+ Meanwhile, providing a random format here works but this is
+ a terribly wrong thing per se. */
+ gst_vaapidecode_video_info_change_format(&vis, GST_VIDEO_FORMAT_NV12,
+ GST_VIDEO_INFO_WIDTH(vi), GST_VIDEO_INFO_HEIGHT(vi));
+#if GST_CHECK_VERSION(1,3,0)
+ if (feature == GST_VAAPI_CAPS_FEATURE_VAAPI_SURFACE)
+ features = gst_caps_features_new(
+ GST_CAPS_FEATURE_MEMORY_VAAPI_SURFACE, NULL);
+#endif
+ }
+ break;
}
state->caps = gst_video_info_to_caps(&vis);
+ if (features)
+ gst_caps_set_features(state->caps, 0, features);
#else
/* XXX: gst_video_info_to_caps() from GStreamer 0.10 does not
reconstruct suitable caps for "encoded" video formats */
/* Suspend the task if an error occurred */
if (ret != GST_VIDEO_DECODER_FLOW_NEED_DATA)
- gst_pad_pause_task(decode->srcpad);
+ gst_pad_pause_task(GST_VAAPI_PLUGIN_BASE_SRC_PAD(decode));
}
static gboolean
decode->decoder_finish = TRUE;
g_cond_wait(&decode->decoder_finish_done, &decode->decoder_mutex);
g_mutex_unlock(&decode->decoder_mutex);
- gst_pad_stop_task(decode->srcpad);
+ gst_pad_stop_task(GST_VAAPI_PLUGIN_BASE_SRC_PAD(decode));
GST_VIDEO_DECODER_STREAM_LOCK(vdec);
return ret;
}
GstVideoInfo vi;
guint size, min, max;
gboolean need_pool, update_pool;
+ gboolean has_video_meta = FALSE;
+ GstVideoCodecState *state;
+#if GST_CHECK_VERSION(1,1,0) && USE_GLX
+ GstCapsFeatures *features, *features2;
+#endif
gst_query_parse_allocation(query, &caps, &need_pool);
if (!caps)
goto error_no_caps;
+ state = gst_video_decoder_get_output_state(vdec);
+
+ decode->has_texture_upload_meta = FALSE;
+ has_video_meta = gst_query_find_allocation_meta(query, GST_VIDEO_META_API_TYPE, NULL);
+#if GST_CHECK_VERSION(1,1,0) && USE_GLX
+ if (has_video_meta)
+ decode->has_texture_upload_meta = gst_query_find_allocation_meta(query,
+ GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, NULL);
+
+ features = gst_caps_get_features(state->caps, 0);
+ features2 = gst_caps_features_new(GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META, NULL);
+
+ /* Update src caps if feature is not handled downstream */
+ if (!decode->has_texture_upload_meta &&
+ gst_caps_features_is_equal(features, features2))
+ gst_vaapidecode_update_src_caps (decode, state);
+ gst_caps_features_free(features2);
+#endif
+
+ gst_video_codec_state_unref(state);
+
gst_video_info_init(&vi);
gst_video_info_from_caps(&vi, caps);
if (GST_VIDEO_INFO_FORMAT(&vi) == GST_VIDEO_FORMAT_ENCODED)
GST_BUFFER_POOL_OPTION_VAAPI_VIDEO_META)) {
GST_INFO("no pool or doesn't support GstVaapiVideoMeta, "
"making new pool");
+ if (pool)
+ gst_object_unref(pool);
pool = gst_vaapi_video_buffer_pool_new(
GST_VAAPI_PLUGIN_BASE_DISPLAY(decode));
if (!pool)
gst_buffer_pool_set_config(pool, config);
}
- decode->has_texture_upload_meta = FALSE;
- if (gst_query_find_allocation_meta(query, GST_VIDEO_META_API_TYPE, NULL)) {
+ if (has_video_meta) {
config = gst_buffer_pool_get_config(pool);
gst_buffer_pool_config_add_option(config,
GST_BUFFER_POOL_OPTION_VIDEO_META);
#if GST_CHECK_VERSION(1,1,0) && USE_GLX
- decode->has_texture_upload_meta = gst_query_find_allocation_meta(query,
- GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, NULL);
if (decode->has_texture_upload_meta)
gst_buffer_pool_config_add_option(config,
GST_BUFFER_POOL_OPTION_VIDEO_GL_TEXTURE_UPLOAD_META);
}
#endif
-#if GST_CHECK_VERSION(1,1,0)
-static void
-gst_vaapidecode_set_context(GstElement *element, GstContext *context)
-{
- GstVaapiDecode * const decode = GST_VAAPIDECODE(element);
- GstVaapiDisplay *display = NULL;
-
- if (gst_vaapi_video_context_get_display(context, &display)) {
- GST_INFO_OBJECT(element, "set display %p", display);
- GST_VAAPI_PLUGIN_BASE_DISPLAY_REPLACE(decode, display);
- gst_vaapi_display_unref(display);
- }
-}
-#endif
-
static inline gboolean
gst_vaapidecode_ensure_display(GstVaapiDecode *decode)
{
break;
case GST_VAAPI_CODEC_H264:
decode->decoder = gst_vaapi_decoder_h264_new(dpy, caps);
+
+ /* Set the stream buffer alignment for better optimizations */
+ if (decode->decoder && caps) {
+ GstStructure * const structure = gst_caps_get_structure(caps, 0);
+ const gchar *str = NULL;
+
+ if ((str = gst_structure_get_string(structure, "alignment"))) {
+ GstVaapiStreamAlignH264 alignment;
+ if (g_strcmp0(str, "au") == 0)
+ alignment = GST_VAAPI_STREAM_ALIGN_H264_AU;
+ else if (g_strcmp0(str, "nal") == 0)
+ alignment = GST_VAAPI_STREAM_ALIGN_H264_NALU;
+ else
+ alignment = GST_VAAPI_STREAM_ALIGN_H264_NONE;
+ gst_vaapi_decoder_h264_set_alignment(
+ GST_VAAPI_DECODER_H264(decode->decoder), alignment);
+ }
+ }
break;
case GST_VAAPI_CODEC_WMV3:
case GST_VAAPI_CODEC_VC1:
decode->decoder = gst_vaapi_decoder_jpeg_new(dpy, caps);
break;
#endif
+#if USE_VP8_DECODER
+ case GST_VAAPI_CODEC_VP8:
+ decode->decoder = gst_vaapi_decoder_vp8_new(dpy, caps);
+ break;
+#endif
default:
decode->decoder = NULL;
break;
gst_vaapi_decoder_state_changed, decode);
decode->decoder_caps = gst_caps_ref(caps);
- return gst_pad_start_task(decode->srcpad,
+ return gst_pad_start_task(GST_VAAPI_PLUGIN_BASE_SRC_PAD(decode),
(GstTaskFunction)gst_vaapidecode_decode_loop, decode, NULL);
}
static void
gst_vaapidecode_destroy(GstVaapiDecode *decode)
{
- gst_pad_stop_task(decode->srcpad);
+ gst_pad_stop_task(GST_VAAPI_PLUGIN_BASE_SRC_PAD(decode));
gst_vaapi_decoder_replace(&decode->decoder, NULL);
gst_caps_replace(&decode->decoder_caps, NULL);
gst_vaapidecode_release(decode);
gst_vaapi_decoder_flush(decode->decoder);
GST_VIDEO_DECODER_STREAM_UNLOCK(vdec);
- gst_pad_stop_task(decode->srcpad);
+ gst_pad_stop_task(GST_VAAPI_PLUGIN_BASE_SRC_PAD(decode));
GST_VIDEO_DECODER_STREAM_LOCK(vdec);
decode->decoder_loop_status = GST_FLOW_OK;
static gboolean
gst_vaapidecode_set_format(GstVideoDecoder *vdec, GstVideoCodecState *state)
{
+ GstVaapiPluginBase * const plugin = GST_VAAPI_PLUGIN_BASE(vdec);
GstVaapiDecode * const decode = GST_VAAPIDECODE(vdec);
if (!gst_vaapidecode_update_sink_caps(decode, state->caps))
return FALSE;
if (!gst_vaapidecode_update_src_caps(decode, state))
return FALSE;
+ if (!gst_video_decoder_negotiate(vdec))
+ return FALSE;
+ if (!gst_vaapi_plugin_base_set_caps(plugin, decode->sinkpad_caps,
+ decode->srcpad_caps))
+ return FALSE;
if (!gst_vaapidecode_reset_full(decode, decode->sinkpad_caps, FALSE))
return FALSE;
return TRUE;
}
static GstFlowReturn
-gst_vaapidecode_parse(GstVideoDecoder *vdec,
+gst_vaapidecode_parse_frame(GstVideoDecoder *vdec,
GstVideoCodecFrame *frame, GstAdapter *adapter, gboolean at_eos)
{
GstVaapiDecode * const decode = GST_VAAPIDECODE(vdec);
decode->current_frame_size = 0;
}
else
- ret = GST_FLOW_OK;
+ ret = GST_VAAPI_DECODE_FLOW_PARSE_DATA;
break;
case GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA:
ret = GST_VIDEO_DECODER_FLOW_NEED_DATA;
return ret;
}
+static GstFlowReturn
+gst_vaapidecode_parse(GstVideoDecoder *vdec,
+ GstVideoCodecFrame *frame, GstAdapter *adapter, gboolean at_eos)
+{
+ GstFlowReturn ret;
+
+ do {
+ ret = gst_vaapidecode_parse_frame(vdec, frame, adapter, at_eos);
+ } while (ret == GST_VAAPI_DECODE_FLOW_PARSE_DATA);
+ return ret;
+}
+
+static GstStateChangeReturn
+gst_vaapidecode_change_state (GstElement * element, GstStateChange transition)
+{
+ GstVaapiDecode * const decode = GST_VAAPIDECODE(element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_pad_stop_task(GST_VAAPI_PLUGIN_BASE_SRC_PAD(decode));
+ break;
+ default:
+ break;
+ }
+ return GST_ELEMENT_CLASS(gst_vaapidecode_parent_class)->change_state(
+ element, transition);
+}
+
static void
gst_vaapidecode_class_init(GstVaapiDecodeClass *klass)
{
object_class->finalize = gst_vaapidecode_finalize;
+ element_class->change_state =
+ GST_DEBUG_FUNCPTR(gst_vaapidecode_change_state);
+
vdec_class->open = GST_DEBUG_FUNCPTR(gst_vaapidecode_open);
vdec_class->close = GST_DEBUG_FUNCPTR(gst_vaapidecode_close);
vdec_class->set_format = GST_DEBUG_FUNCPTR(gst_vaapidecode_set_format);
GST_DEBUG_FUNCPTR(gst_vaapidecode_decide_allocation);
#endif
-#if GST_CHECK_VERSION(1,1,0)
- element_class->set_context = GST_DEBUG_FUNCPTR(gst_vaapidecode_set_context);
-#endif
-
gst_element_class_set_static_metadata(element_class,
"VA-API decoder",
"Codec/Decoder/Video",
static gboolean
gst_vaapidecode_ensure_allowed_caps(GstVaapiDecode *decode)
{
- GstCaps *decode_caps;
- guint i, n_decode_caps;
+ GstCaps *caps, *allowed_caps;
+ GArray *profiles;
+ guint i;
if (decode->allowed_caps)
return TRUE;
if (!gst_vaapidecode_ensure_display(decode))
goto error_no_display;
- decode_caps = gst_vaapi_display_get_decode_caps(
+ profiles = gst_vaapi_display_get_decode_profiles(
GST_VAAPI_PLUGIN_BASE_DISPLAY(decode));
- if (!decode_caps)
- goto error_no_decode_caps;
- n_decode_caps = gst_caps_get_size(decode_caps);
+ if (!profiles)
+ goto error_no_profiles;
- decode->allowed_caps = gst_caps_new_empty();
- if (!decode->allowed_caps)
+ allowed_caps = gst_caps_new_empty();
+ if (!allowed_caps)
goto error_no_memory;
- for (i = 0; i < n_decode_caps; i++) {
- GstStructure *structure;
- structure = gst_caps_get_structure(decode_caps, i);
- if (!structure)
+ for (i = 0; i < profiles->len; i++) {
+ const GstVaapiProfile profile =
+ g_array_index(profiles, GstVaapiProfile, i);
+ const gchar *media_type_name;
+
+ media_type_name = gst_vaapi_profile_get_media_type_name(profile);
+ if (!media_type_name)
continue;
- structure = gst_structure_copy(structure);
- if (!structure)
+
+ caps = gst_caps_from_string(media_type_name);
+ if (!caps)
continue;
- gst_structure_remove_field(structure, "profile");
- decode->allowed_caps =
- gst_caps_merge_structure(decode->allowed_caps, structure);
+ allowed_caps = gst_caps_merge(allowed_caps, caps);
}
+ decode->allowed_caps = allowed_caps;
- gst_caps_unref(decode_caps);
+ g_array_unref(profiles);
return TRUE;
/* ERRORS */
GST_ERROR("failed to retrieve VA display");
return FALSE;
}
-error_no_decode_caps:
+error_no_profiles:
{
- GST_ERROR("failed to retrieve VA decode caps");
+ GST_ERROR("failed to retrieve VA decode profiles");
return FALSE;
}
error_no_memory:
{
GST_ERROR("failed to allocate allowed-caps set");
- gst_caps_unref(decode_caps);
+ g_array_unref(profiles);
return FALSE;
}
}
{
GstVaapiDecode * const decode =
GST_VAAPIDECODE(gst_pad_get_parent_element(pad));
+ GstVaapiPluginBase * const plugin = GST_VAAPI_PLUGIN_BASE(decode);
gboolean res;
GST_INFO_OBJECT(decode, "query type %s", GST_QUERY_TYPE_NAME(query));
- if (gst_vaapi_reply_to_query(query, GST_VAAPI_PLUGIN_BASE_DISPLAY(decode))) {
- GST_DEBUG("sharing display %p", GST_VAAPI_PLUGIN_BASE_DISPLAY(decode));
+ if (gst_vaapi_reply_to_query(query, plugin->display)) {
+ GST_DEBUG("sharing display %p", plugin->display);
res = TRUE;
}
else if (GST_PAD_IS_SINK(pad)) {
}
#endif
default:
- res = GST_PAD_QUERY_FUNCTION_CALL(decode->sinkpad_query,
- decode->sinkpad, parent, query);
+ res = GST_PAD_QUERY_FUNCTION_CALL(plugin->sinkpad_query, pad,
+ parent, query);
break;
}
}
else
- res = GST_PAD_QUERY_FUNCTION_CALL(decode->srcpad_query,
- decode->srcpad, parent, query);
+ res = GST_PAD_QUERY_FUNCTION_CALL(plugin->srcpad_query, pad,
+ parent, query);
gst_object_unref(decode);
return res;
gst_vaapidecode_init(GstVaapiDecode *decode)
{
GstVideoDecoder * const vdec = GST_VIDEO_DECODER(decode);
+ GstPad *pad;
gst_vaapi_plugin_base_init(GST_VAAPI_PLUGIN_BASE(decode), GST_CAT_DEFAULT);
gst_video_decoder_set_packetized(vdec, FALSE);
/* Pad through which data comes in to the element */
- decode->sinkpad = GST_VIDEO_DECODER_SINK_PAD(vdec);
- decode->sinkpad_query = GST_PAD_QUERYFUNC(decode->sinkpad);
- gst_pad_set_query_function(decode->sinkpad, gst_vaapidecode_query);
+ pad = GST_VAAPI_PLUGIN_BASE_SINK_PAD(decode);
+ gst_pad_set_query_function(pad, gst_vaapidecode_query);
#if !GST_CHECK_VERSION(1,0,0)
- gst_pad_set_getcaps_function(decode->sinkpad, gst_vaapidecode_get_caps);
+ gst_pad_set_getcaps_function(pad, gst_vaapidecode_get_caps);
#endif
/* Pad through which data goes out of the element */
- decode->srcpad = GST_VIDEO_DECODER_SRC_PAD(vdec);
- decode->srcpad_query = GST_PAD_QUERYFUNC(decode->srcpad);
- gst_pad_set_query_function(decode->srcpad, gst_vaapidecode_query);
+ pad = GST_VAAPI_PLUGIN_BASE_SRC_PAD(decode);
+ gst_pad_set_query_function(pad, gst_vaapidecode_query);
}