From afe87231968ba3a58356d0211590ccb93aade242 Mon Sep 17 00:00:00 2001 From: "U. Artie Eoff" Date: Tue, 26 Apr 2022 12:06:17 -0400 Subject: [PATCH] va: vpp: add compositor New implementation of a VA-API compositor with blend feature. Various chunks of code was taken from vavpp. Part-of: --- .../gst-plugins-bad/sys/va/gstvacompositor.c | 1510 ++++++++++++++++++++ .../gst-plugins-bad/sys/va/gstvacompositor.h | 36 + subprojects/gst-plugins-bad/sys/va/gstvafilter.c | 181 ++- subprojects/gst-plugins-bad/sys/va/gstvafilter.h | 35 + subprojects/gst-plugins-bad/sys/va/meson.build | 1 + 5 files changed, 1754 insertions(+), 9 deletions(-) create mode 100644 subprojects/gst-plugins-bad/sys/va/gstvacompositor.c create mode 100644 subprojects/gst-plugins-bad/sys/va/gstvacompositor.h diff --git a/subprojects/gst-plugins-bad/sys/va/gstvacompositor.c b/subprojects/gst-plugins-bad/sys/va/gstvacompositor.c new file mode 100644 index 0000000..c2ce8dc --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/va/gstvacompositor.c @@ -0,0 +1,1510 @@ +/* GStreamer + * Copyright (C) 2022 Intel Corporation + * Author: U. Artie Eoff + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the0 + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +/** + * SECTION:element-vacompositor + * @title: vacompositor + * @short_description: A VA-API based video compositing element + * + * A video compositing element that uses VA-API VPP to accelerate the compose, + * blending, and scaling of multiple inputs into one output. + * + * ## Example launch line + * ``` + * gst-launch-1.0 videotestsrc \ + * ! "video/x-raw,format=(string)NV12,width=640,height=480" \ + * ! tee name=testsrc ! queue ! vacompositor name=comp \ + * sink_1::width=160 sink_1::height=120 sink_1::xpos=480 \ + * sink_1::ypos=360 sink_1::alpha=0.75 \ + * ! autovideosink testsrc. ! queue ! comp. + * ``` + * + * Since: 1.22 + * + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "gstvacompositor.h" + +#include +#include +#include + +#include "gstvacaps.h" +#include "gstvadisplay_priv.h" +#include "gstvafilter.h" + +GST_DEBUG_CATEGORY_STATIC (gst_va_compositor_debug); +#define GST_CAT_DEFAULT gst_va_compositor_debug + +struct _GstVaCompositorPad +{ + GstVideoAggregatorPad parent; + + GstBufferPool *pool; + + gint xpos; + gint ypos; + gint width; + gint height; + gdouble alpha; +}; + +enum +{ + PROP_PAD_0, + PROP_PAD_XPOS, + PROP_PAD_YPOS, + PROP_PAD_WIDTH, + PROP_PAD_HEIGHT, + PROP_PAD_ALPHA, +}; + +#define DEFAULT_PAD_XPOS 0 +#define DEFAULT_PAD_YPOS 0 +#define DEFAULT_PAD_WIDTH 0 +#define DEFAULT_PAD_HEIGHT 0 +#define DEFAULT_PAD_ALPHA 1.0 + +G_DEFINE_TYPE (GstVaCompositorPad, gst_va_compositor_pad, + GST_TYPE_VIDEO_AGGREGATOR_PAD); + +static void +gst_va_compositor_pad_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec) +{ + GstVaCompositorPad *const self = GST_VA_COMPOSITOR_PAD (object); + + switch (prop_id) { + case PROP_PAD_XPOS: + g_value_set_int (value, self->xpos); + break; + case PROP_PAD_YPOS: + g_value_set_int (value, self->ypos); + break; + case PROP_PAD_WIDTH: + g_value_set_int (value, self->width); + break; + case PROP_PAD_HEIGHT: + g_value_set_int (value, self->height); + break; + case PROP_PAD_ALPHA: + g_value_set_double (value, self->alpha); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static void +gst_va_compositor_pad_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec) +{ + GstVaCompositorPad *const self = GST_VA_COMPOSITOR_PAD (object); + + GST_OBJECT_LOCK (object); + switch (prop_id) { + case PROP_PAD_XPOS: + self->xpos = g_value_get_int (value); + break; + case PROP_PAD_YPOS: + self->ypos = g_value_get_int (value); + break; + case PROP_PAD_WIDTH: + self->width = g_value_get_int (value); + break; + case PROP_PAD_HEIGHT: + self->height = g_value_get_int (value); + break; + case PROP_PAD_ALPHA: + self->alpha = g_value_get_double (value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } + GST_OBJECT_UNLOCK (object); +} + +static void +gst_va_compositor_pad_finalize (GObject * object) +{ + GstVaCompositorPad *const self = GST_VA_COMPOSITOR_PAD (object); + + if (self->pool) { + gst_buffer_pool_set_active (self->pool, FALSE); + gst_clear_object (&self->pool); + } + + G_OBJECT_CLASS (gst_va_compositor_pad_parent_class)->finalize (object); +} + +static void +gst_va_compositor_pad_init (GstVaCompositorPad * self) +{ + self->pool = NULL; + self->xpos = DEFAULT_PAD_XPOS; + self->ypos = DEFAULT_PAD_YPOS; + self->width = DEFAULT_PAD_WIDTH; + self->height = DEFAULT_PAD_HEIGHT; + self->alpha = DEFAULT_PAD_ALPHA; +} + +static void +gst_va_compositor_pad_class_init (GstVaCompositorPadClass * klass) +{ + GObjectClass *const gobject_class = G_OBJECT_CLASS (klass); + GstVideoAggregatorPadClass *const vaggpad_class = + GST_VIDEO_AGGREGATOR_PAD_CLASS (klass); + + gobject_class->finalize = gst_va_compositor_pad_finalize; + gobject_class->get_property = gst_va_compositor_pad_get_property; + gobject_class->set_property = gst_va_compositor_pad_set_property; + + g_object_class_install_property (gobject_class, PROP_PAD_XPOS, + g_param_spec_int ("xpos", "X Position", "X Position of the picture", + G_MININT, G_MAXINT, DEFAULT_PAD_XPOS, + G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_PAD_YPOS, + g_param_spec_int ("ypos", "Y Position", "Y Position of the picture", + G_MININT, G_MAXINT, DEFAULT_PAD_YPOS, + G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_PAD_WIDTH, + g_param_spec_int ("width", "Width", + "Width of the picture (0, to use the width of the input frame)", + 0, G_MAXINT, DEFAULT_PAD_WIDTH, + G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_PAD_HEIGHT, + g_param_spec_int ("height", "Height", + "Height of the picture (0, to use the height of the input frame)", + 0, G_MAXINT, DEFAULT_PAD_HEIGHT, + G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_PAD_ALPHA, + g_param_spec_double ("alpha", "Alpha", "Alpha of the picture", 0.0, 1.0, + DEFAULT_PAD_ALPHA, + G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS)); + + /* Don't use mapped video frames. Handle video buffers directly */ + vaggpad_class->prepare_frame = NULL; + vaggpad_class->clean_frame = NULL; +} + +#define GST_VA_COMPOSITOR(obj) ((GstVaCompositor *) obj) +#define GST_VA_COMPOSITOR_CLASS(klass) ((GstVaCompositorClass *) klass) +#define GST_VA_COMPOSITOR_GET_CLASS(obj) \ + (G_TYPE_INSTANCE_GET_CLASS ((obj), G_TYPE_FROM_INSTANCE (obj), GstVaCompositorClass)) + +typedef struct _GstVaCompositor GstVaCompositor; +typedef struct _GstVaCompositorClass GstVaCompositorClass; + +struct _GstVaCompositorClass +{ + GstVideoAggregatorClass parent_class; + + /*< private > */ + gchar *render_device_path; +}; + +struct _GstVaCompositor +{ + GstVideoAggregator parent; + + GstVaDisplay *display; + GstVaFilter *filter; + + GstVideoInfo other_info; /* downstream info */ + GstBufferPool *other_pool; /* downstream pool */ + + guint32 scale_method; +}; + +struct CData +{ + gchar *render_device_path; + gchar *description; +}; + +enum +{ + PROP_DEVICE_PATH = 1, + PROP_SCALE_METHOD, + N_PROPERTIES +}; + +static GParamSpec *properties[N_PROPERTIES]; +static GstElementClass *parent_class = NULL; + +static void +gst_va_compositor_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec) +{ + GstVaCompositor *const self = GST_VA_COMPOSITOR (object); + + switch (prop_id) { + case PROP_SCALE_METHOD: + { + GST_OBJECT_LOCK (object); + self->scale_method = g_value_get_enum (value); + GST_OBJECT_UNLOCK (object); + break; + } + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + } +} + +static void +gst_va_compositor_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec) +{ + GstVaCompositor *const self = GST_VA_COMPOSITOR (object); + + switch (prop_id) { + case PROP_DEVICE_PATH: + { + if (!(self->display && GST_IS_VA_DISPLAY_DRM (self->display))) { + g_value_set_string (value, NULL); + return; + } + g_object_get_property (G_OBJECT (self->display), "path", value); + break; + } + case PROP_SCALE_METHOD: + { + GST_OBJECT_LOCK (object); + g_value_set_enum (value, self->scale_method); + GST_OBJECT_UNLOCK (object); + break; + } + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + } +} + +static gboolean +gst_va_compositor_start (GstAggregator * agg) +{ + GstElement *const element = GST_ELEMENT (agg); + GstVaCompositor *const self = GST_VA_COMPOSITOR (agg); + GstVaCompositorClass *const klass = GST_VA_COMPOSITOR_GET_CLASS (agg); + + if (!gst_va_ensure_element_data (element, klass->render_device_path, + &self->display)) + return FALSE; + g_object_notify_by_pspec (G_OBJECT (self), properties[PROP_DEVICE_PATH]); + + self->filter = gst_va_filter_new (self->display); + if (!gst_va_filter_open (self->filter)) + return FALSE; + + return GST_AGGREGATOR_CLASS (parent_class)->start (agg); +} + +static gboolean +gst_va_compositor_stop (GstAggregator * agg) +{ + GstVaCompositor *const self = GST_VA_COMPOSITOR (agg); + + gst_va_filter_close (self->filter); + gst_clear_object (&self->filter); + gst_clear_object (&self->display); + g_object_notify_by_pspec (G_OBJECT (self), properties[PROP_DEVICE_PATH]); + + return GST_AGGREGATOR_CLASS (parent_class)->stop (agg); +} + +static void +gst_va_compositor_dispose (GObject * object) +{ + GstVaCompositor *const self = GST_VA_COMPOSITOR (object); + + if (self->other_pool) { + gst_buffer_pool_set_active (self->other_pool, FALSE); + gst_clear_object (&self->other_pool); + } + + gst_clear_object (&self->display); + + G_OBJECT_CLASS (parent_class)->dispose (object); +} + +static GstPad * +gst_va_compositor_request_new_pad (GstElement * element, GstPadTemplate * templ, + const gchar * req_name, const GstCaps * caps) +{ + GstPad *newpad = GST_PAD (GST_ELEMENT_CLASS + (parent_class)->request_new_pad (element, templ, req_name, caps)); + + if (!newpad) + GST_DEBUG_OBJECT (element, "could not create/add pad"); + else + gst_child_proxy_child_added (GST_CHILD_PROXY (element), G_OBJECT (newpad), + GST_OBJECT_NAME (newpad)); + + return newpad; +} + +static void +gst_va_compositor_release_pad (GstElement * element, GstPad * pad) +{ + GstVaCompositor *const self = GST_VA_COMPOSITOR (element); + + gst_child_proxy_child_removed (GST_CHILD_PROXY (self), G_OBJECT (pad), + GST_OBJECT_NAME (pad)); + + GST_ELEMENT_CLASS (parent_class)->release_pad (element, pad); +} + +static void +gst_va_compositor_set_context (GstElement * element, GstContext * context) +{ + GstVaDisplay *old_display, *new_display; + GstVaCompositor *const self = GST_VA_COMPOSITOR (element); + GstVaCompositorClass *const klass = GST_VA_COMPOSITOR_GET_CLASS (self); + gboolean ret; + + old_display = self->display ? gst_object_ref (self->display) : NULL; + ret = gst_va_handle_set_context (element, context, klass->render_device_path, + &self->display); + new_display = self->display ? gst_object_ref (self->display) : NULL; + + if (!ret + || (old_display && new_display && old_display != new_display + && self->filter)) { + GST_ELEMENT_WARNING (element, RESOURCE, BUSY, + ("Can't replace VA display while operating"), (NULL)); + } + + gst_clear_object (&old_display); + gst_clear_object (&new_display); + + GST_ELEMENT_CLASS (parent_class)->set_context (element, context); +} + +static gboolean +_handle_context_query (GstVaCompositor * const self, GstQuery * query) +{ + GstVaDisplay *display = NULL; + gboolean ret = FALSE; + + gst_object_replace ((GstObject **) & display, (GstObject *) self->display); + ret = gst_va_handle_context_query (GST_ELEMENT_CAST (self), query, display); + gst_clear_object (&display); + + return ret; +} + +static GstCaps * +gst_va_compositor_sink_getcaps (GstPad * pad, GstCaps * filter) +{ + GstCaps *sinkcaps; + GstCaps *template_caps; + GstCaps *filtered_caps; + GstCaps *returned_caps; + + template_caps = gst_pad_get_pad_template_caps (pad); + + sinkcaps = gst_pad_get_current_caps (pad); + if (!sinkcaps) { + sinkcaps = gst_caps_ref (template_caps); + } else { + sinkcaps = gst_caps_merge (sinkcaps, gst_caps_ref (template_caps)); + } + + if (filter) { + filtered_caps = gst_caps_intersect (sinkcaps, filter); + gst_caps_unref (sinkcaps); + } else { + filtered_caps = sinkcaps; + } + + returned_caps = gst_caps_intersect (filtered_caps, template_caps); + + gst_caps_unref (template_caps); + gst_caps_unref (filtered_caps); + + GST_DEBUG_OBJECT (pad, "returning %" GST_PTR_FORMAT, returned_caps); + + return returned_caps; +} + +static gboolean +gst_va_compositor_sink_acceptcaps (GstPad * pad, GstCaps * caps) +{ + gboolean ret; + GstCaps *template_caps; + + template_caps = gst_pad_get_pad_template_caps (pad); + template_caps = gst_caps_make_writable (template_caps); + + ret = gst_caps_can_intersect (caps, template_caps); + GST_DEBUG_OBJECT (pad, "%saccepted caps %" GST_PTR_FORMAT, + (ret ? "" : "not "), caps); + gst_caps_unref (template_caps); + + return ret; +} + +static gboolean +gst_va_compositor_sink_query (GstAggregator * agg, GstAggregatorPad * pad, + GstQuery * query) +{ + GstVaCompositor *const self = GST_VA_COMPOSITOR (agg); + + switch (GST_QUERY_TYPE (query)) { + case GST_QUERY_CONTEXT: + { + if (_handle_context_query (self, query)) + return TRUE; + break; + } + case GST_QUERY_CAPS: + { + GstCaps *filter, *caps; + + gst_query_parse_caps (query, &filter); + caps = gst_va_compositor_sink_getcaps (GST_PAD (pad), filter); + gst_query_set_caps_result (query, caps); + gst_caps_unref (caps); + return TRUE; + } + case GST_QUERY_ACCEPT_CAPS: + { + GstCaps *caps; + gboolean ret; + + gst_query_parse_accept_caps (query, &caps); + ret = gst_va_compositor_sink_acceptcaps (GST_PAD (pad), caps); + gst_query_set_accept_caps_result (query, ret); + return TRUE; + } + default: + break; + } + + return GST_AGGREGATOR_CLASS (parent_class)->sink_query (agg, pad, query); +} + +static gboolean +gst_va_compositor_src_query (GstAggregator * agg, GstQuery * query) +{ + GstVaCompositor *const self = GST_VA_COMPOSITOR (agg); + + switch (GST_QUERY_TYPE (query)) { + case GST_QUERY_CONTEXT: + if (_handle_context_query (self, query)) + return TRUE; + break; + default: + break; + } + + return GST_AGGREGATOR_CLASS (parent_class)->src_query (agg, query); +} + +static GstAllocator * +gst_va_compositor_allocator_from_caps (GstVaCompositor * const self, + GstCaps * caps) +{ + GstAllocator *allocator = NULL; + + if (gst_caps_is_dmabuf (caps)) { + allocator = gst_va_dmabuf_allocator_new (self->display); + } else { + GArray *surface_formats = gst_va_filter_get_surface_formats (self->filter); + allocator = gst_va_allocator_new (self->display, surface_formats); + } + + return allocator; +} + +/* Answer upstream allocation query. */ +static gboolean +gst_va_compositor_propose_allocation (GstAggregator * agg, + GstAggregatorPad * aggpad, GstQuery * decide_query, GstQuery * query) +{ + GstVaCompositor *const self = GST_VA_COMPOSITOR (agg); + GstAllocator *allocator = NULL; + GstAllocationParams params = { 0, }; + GstBufferPool *pool; + GstCaps *caps; + GstVideoInfo info; + gboolean update_allocator = FALSE; + guint size, usage_hint = VA_SURFACE_ATTRIB_USAGE_HINT_GENERIC; + + gst_query_parse_allocation (query, &caps, NULL); + + if (!caps) + return FALSE; + + if (!gst_video_info_from_caps (&info, caps)) + return FALSE; + + if (gst_query_get_n_allocation_pools (query) > 0) + return TRUE; + + size = GST_VIDEO_INFO_SIZE (&info); + + if (gst_query_get_n_allocation_params (query) > 0) { + gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms); + if (!GST_IS_VA_DMABUF_ALLOCATOR (allocator) + && !GST_IS_VA_ALLOCATOR (allocator)) + gst_clear_object (&allocator); + update_allocator = TRUE; + } else { + gst_allocation_params_init (¶ms); + } + + if (!allocator) { + if (!(allocator = gst_va_compositor_allocator_from_caps (self, caps))) + return FALSE; + } + + /* Now we have a VA-based allocator */ + + pool = gst_va_pool_new_with_config (caps, size, 1, 0, usage_hint, + GST_VA_FEATURE_AUTO, allocator, ¶ms); + if (!pool) { + gst_object_unref (allocator); + goto config_failed; + } + + if (update_allocator) + gst_query_set_nth_allocation_param (query, 0, allocator, ¶ms); + else + gst_query_add_allocation_param (query, allocator, ¶ms); + + gst_query_add_allocation_pool (query, pool, size, 1, 0); + + GST_DEBUG_OBJECT (self, + "proposing %" GST_PTR_FORMAT " with allocator %" GST_PTR_FORMAT, + pool, allocator); + + gst_object_unref (allocator); + gst_object_unref (pool); + + gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL); + + return TRUE; + +config_failed: + { + GST_ERROR_OBJECT (self, "failed to set config"); + return FALSE; + } +} + +static GstBufferPool * +_create_other_pool (GstAllocator * allocator, GstAllocationParams * params, + GstCaps * caps, guint size) +{ + GstBufferPool *pool = NULL; + GstStructure *config; + + pool = gst_video_buffer_pool_new (); + config = gst_buffer_pool_get_config (pool); + + gst_buffer_pool_config_set_params (config, caps, size, 0, 0); + gst_buffer_pool_config_set_allocator (config, allocator, params); + if (!gst_buffer_pool_set_config (pool, config)) { + gst_clear_object (&pool); + } + + return pool; +} + +/* configure the allocation query that was answered downstream */ +static gboolean +gst_va_compositor_decide_allocation (GstAggregator * agg, GstQuery * query) +{ + GstVaCompositor *const self = GST_VA_COMPOSITOR (agg); + GstVideoAggregator *const vagg = GST_VIDEO_AGGREGATOR (agg); + + GstAllocator *allocator = NULL, *other_allocator = NULL; + GstAllocationParams params, other_params; + GstBufferPool *pool = NULL, *other_pool = NULL; + GstCaps *caps = NULL; + GstStructure *config; + GstVideoInfo info; + guint min, max, size = 0, usage_hint = VA_SURFACE_ATTRIB_USAGE_HINT_VPP_WRITE; + gboolean update_pool, update_allocator, has_videometa, copy_frames; + + gst_query_parse_allocation (query, &caps, NULL); + + gst_allocation_params_init (&other_params); + gst_allocation_params_init (¶ms); + + if (!gst_video_info_from_caps (&info, caps)) { + GST_ERROR_OBJECT (self, "Cannot parse caps %" GST_PTR_FORMAT, caps); + return FALSE; + } + + if (gst_query_get_n_allocation_params (query) > 0) { + gst_query_parse_nth_allocation_param (query, 0, &allocator, &other_params); + if (allocator && !(GST_IS_VA_DMABUF_ALLOCATOR (allocator) + || GST_IS_VA_ALLOCATOR (allocator))) { + /* save the allocator for the other pool */ + other_allocator = allocator; + allocator = NULL; + } + update_allocator = TRUE; + } else { + update_allocator = FALSE; + } + + if (gst_query_get_n_allocation_pools (query) > 0) { + gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max); + + if (pool) { + if (!GST_IS_VA_POOL (pool)) { + GST_DEBUG_OBJECT (self, + "may need other pool for copy frames %" GST_PTR_FORMAT, pool); + other_pool = pool; + pool = NULL; + } + } + + update_pool = TRUE; + } else { + size = GST_VIDEO_INFO_SIZE (&info); + min = 1; + max = 0; + update_pool = FALSE; + } + + if (!allocator) { + if (gst_caps_is_dmabuf (caps) && GST_VIDEO_INFO_IS_RGB (&info)) + usage_hint = VA_SURFACE_ATTRIB_USAGE_HINT_GENERIC; + if (!(allocator = gst_va_compositor_allocator_from_caps (self, caps))) + return FALSE; + } + + if (!pool) + pool = gst_va_pool_new (); + + config = gst_buffer_pool_get_config (pool); + gst_buffer_pool_config_set_allocator (config, allocator, ¶ms); + gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META); + gst_buffer_pool_config_set_params (config, caps, size, min, max); + gst_buffer_pool_config_set_va_allocation_params (config, usage_hint, + GST_VA_FEATURE_AUTO); + if (!gst_buffer_pool_set_config (pool, config)) { + gst_object_unref (allocator); + gst_object_unref (pool); + return FALSE; + } + + if (GST_IS_VA_DMABUF_ALLOCATOR (allocator)) { + gst_va_dmabuf_allocator_get_format (allocator, &vagg->info, NULL); + } else if (GST_IS_VA_ALLOCATOR (allocator)) { + gst_va_allocator_get_format (allocator, &vagg->info, NULL, NULL); + } + + if (update_allocator) + gst_query_set_nth_allocation_param (query, 0, allocator, ¶ms); + else + gst_query_add_allocation_param (query, allocator, ¶ms); + + if (update_pool) + gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max); + else + gst_query_add_allocation_pool (query, pool, size, min, max); + + has_videometa = gst_query_find_allocation_meta (query, + GST_VIDEO_META_API_TYPE, NULL); + + copy_frames = (!has_videometa && gst_va_pool_requires_video_meta (pool) + && gst_caps_is_raw (caps)); + if (copy_frames) { + if (other_pool) { + gst_object_replace ((GstObject **) & self->other_pool, + (GstObject *) other_pool); + } else { + self->other_pool = + _create_other_pool (other_allocator, &other_params, caps, size); + } + GST_DEBUG_OBJECT (self, "Use the other pool for copy %" GST_PTR_FORMAT, + self->other_pool); + } else { + gst_clear_object (&self->other_pool); + } + + GST_DEBUG_OBJECT (self, + "decided pool %" GST_PTR_FORMAT " with allocator %" GST_PTR_FORMAT, + pool, allocator); + + gst_object_unref (allocator); + gst_object_unref (pool); + gst_clear_object (&other_allocator); + gst_clear_object (&other_pool); + + return TRUE; +} + +static GstBufferPool * +_get_sinkpad_pool (GstVaCompositor * const self, GstVaCompositorPad * const pad) +{ + GstAllocator *allocator; + GstAllocationParams params = { 0, }; + GstCaps *caps; + GstVideoInfo info; + guint size, usage_hint = VA_SURFACE_ATTRIB_USAGE_HINT_VPP_READ; + + if (pad->pool) + return pad->pool; + + gst_allocation_params_init (¶ms); + + caps = gst_pad_get_current_caps (GST_PAD (pad)); + gst_video_info_from_caps (&info, caps); + + size = GST_VIDEO_INFO_SIZE (&info); + + allocator = gst_va_compositor_allocator_from_caps (self, caps); + pad->pool = gst_va_pool_new_with_config (caps, size, 1, 0, usage_hint, + GST_VA_FEATURE_AUTO, allocator, ¶ms); + gst_caps_unref (caps); + + if (!pad->pool) { + gst_object_unref (allocator); + return NULL; + } + + if (GST_IS_VA_DMABUF_ALLOCATOR (allocator)) { + gst_va_dmabuf_allocator_get_format (allocator, &info, NULL); + } else if (GST_IS_VA_ALLOCATOR (allocator)) { + gst_va_allocator_get_format (allocator, &info, NULL, NULL); + } + + gst_object_unref (allocator); + + if (!gst_buffer_pool_set_active (pad->pool, TRUE)) { + GST_WARNING_OBJECT (self, "failed to active the sinkpad pool %" + GST_PTR_FORMAT, pad->pool); + return NULL; + } + + return pad->pool; +} + +static inline gsize +_get_plane_data_size (GstVideoInfo * info, guint plane) +{ + gint comp[GST_VIDEO_MAX_COMPONENTS]; + gint height, padded_height; + + gst_video_format_info_component (info->finfo, plane, comp); + + height = GST_VIDEO_INFO_HEIGHT (info); + padded_height = + GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (info->finfo, comp[0], height); + + return GST_VIDEO_INFO_PLANE_STRIDE (info, plane) * padded_height; +} + +static gboolean +_try_import_dmabuf_unlocked (GstVaCompositor * const self, GstVideoInfo * info, + GstBuffer * inbuf) +{ + GstVideoMeta *meta; + GstMemory *mems[GST_VIDEO_MAX_PLANES]; + guint i, n_mem, n_planes; + gsize offset[GST_VIDEO_MAX_PLANES]; + uintptr_t fd[GST_VIDEO_MAX_PLANES]; + + n_planes = GST_VIDEO_INFO_N_PLANES (info); + n_mem = gst_buffer_n_memory (inbuf); + meta = gst_buffer_get_video_meta (inbuf); + + /* This will eliminate most non-dmabuf out there */ + if (!gst_is_dmabuf_memory (gst_buffer_peek_memory (inbuf, 0))) + return FALSE; + + /* We cannot have multiple dmabuf per plane */ + if (n_mem > n_planes) + return FALSE; + + /* Update video info based on video meta */ + if (meta) { + GST_VIDEO_INFO_WIDTH (info) = meta->width; + GST_VIDEO_INFO_HEIGHT (info) = meta->height; + + for (i = 0; i < meta->n_planes; i++) { + GST_VIDEO_INFO_PLANE_OFFSET (info, i) = meta->offset[i]; + GST_VIDEO_INFO_PLANE_STRIDE (info, i) = meta->stride[i]; + } + } + + /* Find and validate all memories */ + for (i = 0; i < n_planes; i++) { + guint plane_size; + guint length; + guint mem_idx; + gsize mem_skip; + + plane_size = _get_plane_data_size (info, i); + + if (!gst_buffer_find_memory (inbuf, info->offset[i], plane_size, + &mem_idx, &length, &mem_skip)) + return FALSE; + + /* We can't have more then one dmabuf per plane */ + if (length != 1) + return FALSE; + + mems[i] = gst_buffer_peek_memory (inbuf, mem_idx); + + /* And all memory found must be dmabuf */ + if (!gst_is_dmabuf_memory (mems[i])) + return FALSE; + + offset[i] = mems[i]->offset + mem_skip; + fd[i] = gst_dmabuf_memory_get_fd (mems[i]); + } + + /* Now create a VASurfaceID for the buffer */ + return gst_va_dmabuf_memories_setup (self->display, info, n_planes, + mems, fd, offset, VA_SURFACE_ATTRIB_USAGE_HINT_VPP_READ); +} + +extern GRecMutex GST_VA_SHARED_LOCK; + +static gboolean +_try_import_buffer (GstVaCompositor * const self, + GstVaCompositorPad * const pad, GstBuffer * inbuf) +{ + VASurfaceID surface; + GstCaps *caps; + GstVideoInfo info; + gboolean ret; + + surface = gst_va_buffer_get_surface (inbuf); + if (surface != VA_INVALID_ID) + return TRUE; + + caps = gst_pad_get_current_caps (GST_PAD (pad)); + gst_video_info_from_caps (&info, caps); + gst_caps_unref (caps); + + g_rec_mutex_lock (&GST_VA_SHARED_LOCK); + ret = _try_import_dmabuf_unlocked (self, &info, inbuf); + g_rec_mutex_unlock (&GST_VA_SHARED_LOCK); + + return ret; +} + +static GstFlowReturn +gst_va_compositor_import_buffer (GstVaCompositor * const self, + GstVaCompositorPad * const pad, GstBuffer * inbuf, GstBuffer ** buf) +{ + GstBuffer *buffer = NULL; + GstBufferPool *pool; + GstFlowReturn ret; + GstCaps *caps; + GstVideoInfo info; + GstVideoFrame in_frame, out_frame; + gboolean imported, copied; + + imported = _try_import_buffer (self, pad, inbuf); + if (imported) { + *buf = gst_buffer_ref (inbuf); + return GST_FLOW_OK; + } + + /* input buffer doesn't come from a vapool, thus it is required to + * have a pool, grab from it a new buffer and copy the input + * buffer to the new one */ + if (!(pool = _get_sinkpad_pool (self, pad))) + return GST_FLOW_ERROR; + + ret = gst_buffer_pool_acquire_buffer (pool, &buffer, NULL); + if (ret != GST_FLOW_OK) + return ret; + + GST_LOG_OBJECT (self, "copying input frame"); + + caps = gst_pad_get_current_caps (GST_PAD (pad)); + gst_video_info_from_caps (&info, caps); + gst_caps_unref (caps); + + if (!gst_video_frame_map (&in_frame, &info, inbuf, GST_MAP_READ)) + goto invalid_buffer; + + if (!gst_video_frame_map (&out_frame, &info, buffer, GST_MAP_WRITE)) { + gst_video_frame_unmap (&in_frame); + goto invalid_buffer; + } + + copied = gst_video_frame_copy (&out_frame, &in_frame); + + gst_video_frame_unmap (&out_frame); + gst_video_frame_unmap (&in_frame); + + if (!copied) + goto invalid_buffer; + + *buf = buffer; + + return GST_FLOW_OK; + +invalid_buffer: + { + GST_ELEMENT_WARNING (self, CORE, NOT_IMPLEMENTED, (NULL), + ("invalid video buffer received")); + if (buffer) + gst_buffer_unref (buffer); + return GST_FLOW_OK; + } +} + +typedef struct _GstVaCompositorSampleGenerator GstVaCompositorSampleGenerator; +struct _GstVaCompositorSampleGenerator +{ + GstVaCompositor *comp; + GList *current; + GstVaComposeSample sample; +}; + +static GstVaComposeSample * +gst_va_compositor_sample_next (gpointer data) +{ + GstVaCompositorSampleGenerator *generator; + GstVideoAggregatorPad *vaggpad; + GstVaCompositorPad *pad; + GstBuffer *inbuf; + GstBuffer *buf; + GstFlowReturn res; + GstVideoCropMeta *crop = NULL; + + generator = (GstVaCompositorSampleGenerator *) data; + + /* at the end of the generator? */ + while (generator->current) { + /* get the current sinkpad for processing */ + vaggpad = GST_VIDEO_AGGREGATOR_PAD (generator->current->data); + + /* increment to next sinkpad */ + generator->current = generator->current->next; + + /* reset sample */ + /* *INDENT-OFF* */ + generator->sample = (GstVaComposeSample) { 0, }; + /* *INDENT-ON* */ + + /* current sinkpad may not be queueing buffers yet (e.g. timestamp-offset) + * or it may have reached EOS */ + if (!gst_video_aggregator_pad_has_current_buffer (vaggpad)) + continue; + + inbuf = gst_video_aggregator_pad_get_current_buffer (vaggpad); + pad = GST_VA_COMPOSITOR_PAD (vaggpad); + + res = gst_va_compositor_import_buffer (generator->comp, pad, inbuf, &buf); + if (res != GST_FLOW_OK) + return &generator->sample; + + crop = gst_buffer_get_video_crop_meta (buf); + + GST_OBJECT_LOCK (vaggpad); + /* *INDENT-OFF* */ + generator->sample = (GstVaComposeSample) { + .buffer = buf, + .input_region = (VARectangle) { + .x = crop ? crop->x : 0, + .y = crop ? crop->y : 0, + .width = crop ? crop->width : GST_VIDEO_INFO_WIDTH (&vaggpad->info), + .height = crop ? crop->height : GST_VIDEO_INFO_HEIGHT (&vaggpad->info), + }, + .output_region = (VARectangle) { + .x = pad->xpos, + .y = pad->ypos, + .width = (pad->width == DEFAULT_PAD_WIDTH) + ? GST_VIDEO_INFO_WIDTH (&vaggpad->info) : pad->width, + .height = (pad->height == DEFAULT_PAD_HEIGHT) + ? GST_VIDEO_INFO_HEIGHT (&vaggpad->info) : pad->height, + }, + .flags = generator->comp->scale_method, + .alpha = pad->alpha, + }; + /* *INDENT-ON* */ + GST_OBJECT_UNLOCK (vaggpad); + + return &generator->sample; + } + + return NULL; +} + +static gboolean +gst_va_compositor_copy_output_buffer (GstVaCompositor * const self, + GstBuffer * src_buf, GstBuffer * dst_buf) +{ + GstVideoAggregator *const vagg = GST_VIDEO_AGGREGATOR (self); + GstVideoFrame src_frame, dst_frame; + + GST_LOG_OBJECT (self, "copying output buffer"); + + if (!gst_video_frame_map (&src_frame, &vagg->info, src_buf, GST_MAP_READ)) { + GST_ERROR_OBJECT (self, "couldn't map source buffer"); + return FALSE; + } + + if (!gst_video_frame_map (&dst_frame, &self->other_info, dst_buf, + GST_MAP_WRITE)) { + GST_ERROR_OBJECT (self, "couldn't map output buffer"); + gst_video_frame_unmap (&src_frame); + return FALSE; + } + + if (!gst_video_frame_copy (&dst_frame, &src_frame)) { + GST_ERROR_OBJECT (self, "couldn't copy output buffer"); + gst_video_frame_unmap (&src_frame); + gst_video_frame_unmap (&dst_frame); + return FALSE; + } + + gst_video_frame_unmap (&src_frame); + gst_video_frame_unmap (&dst_frame); + + return TRUE; +} + +static GstFlowReturn +gst_va_compositor_aggregate_frames (GstVideoAggregator * vagg, + GstBuffer * outbuf) +{ + GstVaCompositor *const self = GST_VA_COMPOSITOR (vagg); + GstVaCompositorSampleGenerator generator; + GstVaComposeTransaction tx; + GstBuffer *vabuffer; + gboolean need_copy = FALSE; + GstFlowReturn ret = GST_FLOW_OK; + + if (self->other_pool) { + /* create a va buffer for filter */ + ret = GST_VIDEO_AGGREGATOR_CLASS (parent_class)->create_output_buffer + (vagg, &vabuffer); + if (ret != GST_FLOW_OK) + return ret; + + need_copy = TRUE; + } else { + /* already a va buffer */ + vabuffer = gst_buffer_ref (outbuf); + } + + /* *INDENT-OFF* */ + generator = (GstVaCompositorSampleGenerator) { + .comp = self, + .current = GST_ELEMENT (self)->sinkpads, + }; + tx = (GstVaComposeTransaction) { + .next = gst_va_compositor_sample_next, + .output = vabuffer, + .user_data = (gpointer) &generator, + }; + /* *INDENT-ON* */ + + GST_OBJECT_LOCK (self); + + if (!gst_va_filter_compose (self->filter, &tx)) { + GST_ERROR_OBJECT (self, "couldn't apply filter"); + ret = GST_FLOW_ERROR; + } + + GST_OBJECT_UNLOCK (self); + + if (ret != GST_FLOW_OK) + goto done; + + if (need_copy && !gst_va_compositor_copy_output_buffer (self, vabuffer, + outbuf)) { + GST_ERROR_OBJECT (self, "couldn't copy va buffer to output buffer"); + ret = GST_FLOW_ERROR; + } + +done: + gst_buffer_unref (vabuffer); + return ret; +} + +static GstFlowReturn +gst_va_compositor_create_output_buffer (GstVideoAggregator * vagg, + GstBuffer ** outbuf) +{ + GstVaCompositor *const self = GST_VA_COMPOSITOR (vagg); + GstFlowReturn ret; + + *outbuf = NULL; + + if (!self->other_pool) + /* no copy necessary, so use a va buffer directly */ + return GST_VIDEO_AGGREGATOR_CLASS (parent_class)->create_output_buffer + (vagg, outbuf); + + /* use output buffers from downstream pool for copy */ + if (!gst_buffer_pool_is_active (self->other_pool) && + !gst_buffer_pool_set_active (self->other_pool, TRUE)) { + GST_ERROR_OBJECT (self, "failed to activate other pool %" + GST_PTR_FORMAT, self->other_pool); + return GST_FLOW_ERROR; + } + + /* acquire a buffer from downstream pool for copy */ + ret = gst_buffer_pool_acquire_buffer (self->other_pool, outbuf, NULL); + if (ret != GST_FLOW_OK || !*outbuf) { + GST_ERROR_OBJECT (self, "failed to acquire output buffer"); + return GST_FLOW_ERROR; + } + + return GST_FLOW_OK; +} + +static gboolean +gst_va_compositor_negotiated_src_caps (GstAggregator * agg, GstCaps * caps) +{ + GstVaCompositor *const self = GST_VA_COMPOSITOR (agg); + + if (!gst_video_info_from_caps (&self->other_info, caps)) { + GST_ERROR_OBJECT (self, "invalid caps"); + return FALSE; + } + + if (self->other_pool) { + gst_buffer_pool_set_active (self->other_pool, FALSE); + gst_clear_object (&self->other_pool); + } + + return GST_AGGREGATOR_CLASS (parent_class)->negotiated_src_caps (agg, caps); +} + +static void +gst_va_compositor_pad_get_output_size (GstVaCompositorPad * const pad, + gint * width, gint * height) +{ + GstVideoAggregatorPad *vaggpad = GST_VIDEO_AGGREGATOR_PAD (pad); + *width = (pad->width == DEFAULT_PAD_WIDTH) + ? GST_VIDEO_INFO_WIDTH (&vaggpad->info) : pad->width; + *height = (pad->height == DEFAULT_PAD_HEIGHT) + ? GST_VIDEO_INFO_HEIGHT (&vaggpad->info) : pad->height; + + *width += MAX (pad->xpos, 0); + *height += MAX (pad->ypos, 0); +} + +static GstCaps * +gst_va_compositor_fixate_src_caps (GstAggregator * agg, GstCaps * caps) +{ + GstVideoAggregator *const vagg = GST_VIDEO_AGGREGATOR (agg); + GList *l; + gint best_width = -1, best_height = -1; + gint best_fps_n = -1, best_fps_d = -1; + gdouble best_fps = 0.; + GstCaps *ret = NULL; + GstStructure *s; + + ret = gst_caps_make_writable (caps); + + GST_OBJECT_LOCK (vagg); + for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) { + GstVideoAggregatorPad *const vaggpad = l->data; + GstVaCompositorPad *const pad = GST_VA_COMPOSITOR_PAD (vaggpad); + gint this_width, this_height; + gint fps_n, fps_d; + gdouble cur_fps; + + fps_n = GST_VIDEO_INFO_FPS_N (&vaggpad->info); + fps_d = GST_VIDEO_INFO_FPS_D (&vaggpad->info); + + gst_va_compositor_pad_get_output_size (pad, &this_width, &this_height); + + if (best_width < this_width) + best_width = this_width; + if (best_height < this_height) + best_height = this_height; + + if (fps_d == 0) + cur_fps = 0.0; + else + gst_util_fraction_to_double (fps_n, fps_d, &cur_fps); + + if (best_fps < cur_fps) { + best_fps = cur_fps; + best_fps_n = fps_n; + best_fps_d = fps_d; + } + } + GST_OBJECT_UNLOCK (vagg); + + if (best_fps_n <= 0 || best_fps_d <= 0 || best_fps == 0.0) { + best_fps_n = 25; + best_fps_d = 1; + best_fps = 25.0; + } + + s = gst_caps_get_structure (ret, 0); + gst_structure_fixate_field_nearest_int (s, "width", best_width); + gst_structure_fixate_field_nearest_int (s, "height", best_height); + if (gst_structure_has_field (s, "framerate")) { + gst_structure_fixate_field_nearest_fraction (s, "framerate", best_fps_n, + best_fps_d); + } else { + gst_structure_set (s, "framerate", GST_TYPE_FRACTION, best_fps_n, + best_fps_d, NULL); + } + + return gst_caps_fixate (ret); +} + +/* *INDENT-OFF* */ +static const gchar *caps_str = + GST_VIDEO_CAPS_MAKE_WITH_FEATURES (GST_CAPS_FEATURE_MEMORY_VA, + "{ NV12, I420, YV12, YUY2, RGBA, BGRA, P010_10LE, ARGB, ABGR }") " ;" + GST_VIDEO_CAPS_MAKE ("{ VUYA, GRAY8, NV12, NV21, YUY2, UYVY, YV12, " + "I420, P010_10LE, RGBA, BGRA, ARGB, ABGR }"); +/* *INDENT-ON* */ + +static void +gst_va_compositor_class_init (gpointer g_class, gpointer class_data) +{ + GstCaps *doc_caps, *caps = NULL; + GstPadTemplate *sink_pad_templ, *src_pad_templ; + GObjectClass *const object_class = G_OBJECT_CLASS (g_class); + GstElementClass *const element_class = GST_ELEMENT_CLASS (g_class); + GstAggregatorClass *const agg_class = GST_AGGREGATOR_CLASS (g_class); + GstVideoAggregatorClass *const vagg_class = + GST_VIDEO_AGGREGATOR_CLASS (g_class); + GstVaCompositorClass *const klass = GST_VA_COMPOSITOR_CLASS (g_class); + GstVaDisplay *display; + GstVaFilter *filter; + struct CData *cdata = class_data; + gchar *long_name; + + parent_class = g_type_class_peek_parent (g_class); + + klass->render_device_path = g_strdup (cdata->render_device_path); + + if (cdata->description) { + long_name = g_strdup_printf ("VA-API Video Compositor in %s", + cdata->description); + } else { + long_name = g_strdup ("VA-API Video Compositor"); + } + + display = gst_va_display_drm_new_from_path (klass->render_device_path); + filter = gst_va_filter_new (display); + + if (gst_va_filter_open (filter)) { + caps = gst_va_filter_get_caps (filter); + } else { + caps = gst_caps_from_string (caps_str); + } + + object_class->dispose = GST_DEBUG_FUNCPTR (gst_va_compositor_dispose); + object_class->get_property = + GST_DEBUG_FUNCPTR (gst_va_compositor_get_property); + object_class->set_property = + GST_DEBUG_FUNCPTR (gst_va_compositor_set_property); + + gst_element_class_set_static_metadata (element_class, long_name, + "Filter/Editor/Video/Compositor/Hardware", + "VA-API based video compositor", + "U. Artie Eoff "); + + element_class->request_new_pad = + GST_DEBUG_FUNCPTR (gst_va_compositor_request_new_pad); + element_class->release_pad = + GST_DEBUG_FUNCPTR (gst_va_compositor_release_pad); + element_class->set_context = + GST_DEBUG_FUNCPTR (gst_va_compositor_set_context); + + doc_caps = gst_caps_from_string (caps_str); + + sink_pad_templ = gst_pad_template_new_with_gtype ("sink_%u", GST_PAD_SINK, + GST_PAD_REQUEST, caps, GST_TYPE_VA_COMPOSITOR_PAD); + gst_element_class_add_pad_template (element_class, sink_pad_templ); + gst_pad_template_set_documentation_caps (sink_pad_templ, + gst_caps_ref (doc_caps)); + + src_pad_templ = gst_pad_template_new_with_gtype ("src", GST_PAD_SRC, + GST_PAD_ALWAYS, caps, GST_TYPE_AGGREGATOR_PAD); + gst_element_class_add_pad_template (element_class, src_pad_templ); + gst_pad_template_set_documentation_caps (src_pad_templ, + gst_caps_ref (doc_caps)); + + gst_caps_unref (doc_caps); + gst_caps_unref (caps); + + agg_class->sink_query = GST_DEBUG_FUNCPTR (gst_va_compositor_sink_query); + agg_class->src_query = GST_DEBUG_FUNCPTR (gst_va_compositor_src_query); + agg_class->start = GST_DEBUG_FUNCPTR (gst_va_compositor_start); + agg_class->stop = GST_DEBUG_FUNCPTR (gst_va_compositor_stop); + agg_class->propose_allocation = + GST_DEBUG_FUNCPTR (gst_va_compositor_propose_allocation); + agg_class->fixate_src_caps = + GST_DEBUG_FUNCPTR (gst_va_compositor_fixate_src_caps); + agg_class->negotiated_src_caps = + GST_DEBUG_FUNCPTR (gst_va_compositor_negotiated_src_caps); + agg_class->decide_allocation = + GST_DEBUG_FUNCPTR (gst_va_compositor_decide_allocation); + + vagg_class->aggregate_frames = + GST_DEBUG_FUNCPTR (gst_va_compositor_aggregate_frames); + vagg_class->create_output_buffer = + GST_DEBUG_FUNCPTR (gst_va_compositor_create_output_buffer); + + properties[PROP_DEVICE_PATH] = g_param_spec_string ("device-path", + "Device Path", "DRM device path", NULL, + G_PARAM_READABLE | G_PARAM_STATIC_STRINGS); + + properties[PROP_SCALE_METHOD] = g_param_spec_enum ("scale-method", + "Scale Method", "Scale method to use", GST_TYPE_VA_SCALE_METHOD, + VA_FILTER_SCALING_DEFAULT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS); + + g_object_class_install_properties (object_class, N_PROPERTIES, properties); + + g_free (long_name); + g_free (cdata->description); + g_free (cdata->render_device_path); + g_free (cdata); + gst_object_unref (filter); + gst_object_unref (display); +} + +static GObject * +gst_va_compositor_child_proxy_get_child_by_index (GstChildProxy * proxy, + guint index) +{ + GstVaCompositor *self = GST_VA_COMPOSITOR (proxy); + GObject *obj = NULL; + + GST_OBJECT_LOCK (self); + obj = g_list_nth_data (GST_ELEMENT_CAST (self)->sinkpads, index); + if (obj) + gst_object_ref (obj); + GST_OBJECT_UNLOCK (self); + + return obj; +} + +static guint +gst_va_compositor_child_proxy_get_children_count (GstChildProxy * proxy) +{ + GstVaCompositor *self = GST_VA_COMPOSITOR (proxy); + guint count = 0; + + GST_OBJECT_LOCK (self); + count = GST_ELEMENT_CAST (self)->numsinkpads; + GST_OBJECT_UNLOCK (self); + GST_INFO_OBJECT (self, "Children Count: %d", count); + + return count; +} + +static void +gst_va_compositor_child_proxy_init (gpointer g_iface, gpointer iface_data) +{ + GstChildProxyInterface *iface = (GstChildProxyInterface *) g_iface; + + iface->get_child_by_index = gst_va_compositor_child_proxy_get_child_by_index; + iface->get_children_count = gst_va_compositor_child_proxy_get_children_count; +} + +static void +gst_va_compositor_init (GTypeInstance * instance, gpointer g_class) +{ + GstVaCompositor *const self = GST_VA_COMPOSITOR (instance); + + self->other_pool = NULL; +} + +static gpointer +_register_debug_category (gpointer data) +{ + GST_DEBUG_CATEGORY_INIT (gst_va_compositor_debug, "vacompositor", 0, + "VA Video Compositor"); + + return NULL; +} + +gboolean +gst_va_compositor_register (GstPlugin * plugin, GstVaDevice * device, + guint rank) +{ + static GOnce debug_once = G_ONCE_INIT; + GType type; + GTypeInfo type_info = { + .class_size = sizeof (GstVaCompositorClass), + .class_init = gst_va_compositor_class_init, + .instance_size = sizeof (GstVaCompositor), + .instance_init = gst_va_compositor_init, + }; + GInterfaceInfo interface_info = { + (GInterfaceInitFunc) gst_va_compositor_child_proxy_init, + }; + struct CData *cdata; + gboolean ret; + gchar *type_name, *feature_name; + + g_return_val_if_fail (GST_IS_PLUGIN (plugin), FALSE); + g_return_val_if_fail (GST_IS_VA_DEVICE (device), FALSE); + + cdata = g_new (struct CData, 1); + cdata->description = NULL; + cdata->render_device_path = g_strdup (device->render_device_path); + + type_info.class_data = cdata; + + type_name = g_strdup ("GstVaCompositor"); + feature_name = g_strdup ("vacompositor"); + + /* The first compositor to be registered should use a constant + * name, like vacompositor, for any additional compositors, we + * create unique names, using the render device name. */ + if (g_type_from_name (type_name)) { + gchar *basename = g_path_get_basename (device->render_device_path); + g_free (type_name); + g_free (feature_name); + type_name = g_strdup_printf ("GstVa%sCompositor", basename); + feature_name = g_strdup_printf ("va%scompositor", basename); + cdata->description = basename; + + /* lower rank for non-first device */ + if (rank > 0) + rank--; + } + + g_once (&debug_once, _register_debug_category, NULL); + + type = g_type_register_static (GST_TYPE_VIDEO_AGGREGATOR, type_name, + &type_info, 0); + g_type_add_interface_static (type, GST_TYPE_CHILD_PROXY, &interface_info); + + ret = gst_element_register (plugin, feature_name, rank, type); + + g_free (type_name); + g_free (feature_name); + + return ret; +} diff --git a/subprojects/gst-plugins-bad/sys/va/gstvacompositor.h b/subprojects/gst-plugins-bad/sys/va/gstvacompositor.h new file mode 100644 index 0000000..ad59d12 --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/va/gstvacompositor.h @@ -0,0 +1,36 @@ +/* GStreamer + * Copyright (C) 2022 Intel Corporation + * Author: U. Artie Eoff + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the0 + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#pragma once + +#include "gstvadevice.h" +#include + +G_BEGIN_DECLS + +#define GST_TYPE_VA_COMPOSITOR_PAD (gst_va_compositor_pad_get_type()) +G_DECLARE_FINAL_TYPE (GstVaCompositorPad, gst_va_compositor_pad, + GST, VA_COMPOSITOR_PAD, GstVideoAggregatorPad) + +gboolean gst_va_compositor_register (GstPlugin * plugin, + GstVaDevice * device, + guint rank); + +G_END_DECLS diff --git a/subprojects/gst-plugins-bad/sys/va/gstvafilter.c b/subprojects/gst-plugins-bad/sys/va/gstvafilter.c index b536b6e..59de2e8 100644 --- a/subprojects/gst-plugins-bad/sys/va/gstvafilter.c +++ b/subprojects/gst-plugins-bad/sys/va/gstvafilter.c @@ -1518,24 +1518,35 @@ gst_va_filter_drop_filter_buffers (GstVaFilter * self) return ret; } +static VASurfaceID +_get_surface_from_buffer (GstVaFilter * self, GstBuffer * buffer) +{ + VASurfaceID surface = VA_INVALID_ID; + + if (buffer) + surface = gst_va_buffer_get_surface (buffer); + + if (surface != VA_INVALID_ID) { + /* @FIXME: in gallium vaQuerySurfaceStatus only seems to work with + * encoder's surfaces */ + if (!GST_VA_DISPLAY_IS_IMPLEMENTATION (self->display, MESA_GALLIUM)) + if (!va_check_surface (self->display, surface)) + surface = VA_INVALID_ID; + } + + return surface; +} + static gboolean _fill_va_sample (GstVaFilter * self, GstVaSample * sample, GstPadDirection direction) { GstVideoCropMeta *crop = NULL; - if (sample->buffer) - sample->surface = gst_va_buffer_get_surface (sample->buffer); + sample->surface = _get_surface_from_buffer (self, sample->buffer); if (sample->surface == VA_INVALID_ID) return FALSE; - /* @FIXME: in gallium vaQuerySurfaceStatus only seems to work with - * encoder's surfaces */ - if (!GST_VA_DISPLAY_IS_IMPLEMENTATION (self->display, MESA_GALLIUM)) { - if (!va_check_surface (self->display, sample->surface)) - return FALSE; - } - /* XXX: cropping occurs only in input frames */ if (direction == GST_PAD_SRC) { GST_OBJECT_LOCK (self); @@ -1703,6 +1714,140 @@ fail_end_pic: } } +gboolean +gst_va_filter_has_compose (GstVaFilter * self) +{ + g_return_val_if_fail (GST_IS_VA_FILTER (self), FALSE); + + if (!gst_va_filter_is_open (self)) + return FALSE; + + /* HACK(uartie): i965 can't do composition */ + if (gst_va_display_is_implementation (self->display, + GST_VA_IMPLEMENTATION_INTEL_I965)) + return FALSE; + + /* some drivers can compose, but may not support blending (e.g. GALLIUM) */ +#ifndef GST_DISABLE_GST_DEBUG + if (self->pipeline_caps.blend_flags & VA_BLEND_GLOBAL_ALPHA) + GST_WARNING_OBJECT (self, "VPP does not support alpha blending"); +#endif + + return TRUE; +} + +/** + * gst_va_filter_compose: + * @tx: the #GstVaComposeTransaction for input samples and output. + * + * Iterates over all inputs via #GstVaComposeTransaction:next and composes + * them onto the #GstVaComposeTransaction:output. + * + * Only csc, scaling and blending filters are applied during composition. + * All other filters are ignored here. Use #gst_va_filter_process to apply + * other filters. + * + * Returns: TRUE on successful compose, FALSE otherwise. + * + * Since: 1.22 + */ +gboolean +gst_va_filter_compose (GstVaFilter * self, GstVaComposeTransaction * tx) +{ + VADisplay dpy; + VAStatus status; + VASurfaceID out_surface; + GstVaComposeSample *sample; + + g_return_val_if_fail (GST_IS_VA_FILTER (self), FALSE); + g_return_val_if_fail (tx, FALSE); + g_return_val_if_fail (tx->next, FALSE); + g_return_val_if_fail (tx->output, FALSE); + + if (!gst_va_filter_is_open (self)) + return FALSE; + + out_surface = _get_surface_from_buffer (self, tx->output); + if (out_surface == VA_INVALID_ID) + return FALSE; + + dpy = gst_va_display_get_va_dpy (self->display); + + status = vaBeginPicture (dpy, self->context, out_surface); + if (status != VA_STATUS_SUCCESS) { + GST_ERROR_OBJECT (self, "vaBeginPicture: %s", vaErrorStr (status)); + return FALSE; + } + + sample = tx->next (tx->user_data); + for (; sample; sample = tx->next (tx->user_data)) { + VAProcPipelineParameterBuffer params = { 0, }; + VABufferID buffer; + VASurfaceID in_surface; + VABlendState blend = { 0, }; + + in_surface = _get_surface_from_buffer (self, sample->buffer); + if (in_surface == VA_INVALID_ID) + return FALSE; + + /* (transfer full), unref it */ + gst_buffer_unref (sample->buffer); + + /* *INDENT-OFF* */ + params = (VAProcPipelineParameterBuffer) { + .surface = in_surface, + .surface_region = &sample->input_region, + .output_region = &sample->output_region, + .output_background_color = 0xff000000, + .filter_flags = sample->flags, + }; + /* *INDENT-ON* */ + + /* only send blend state when sample is not fully opaque */ + if ((self->pipeline_caps.blend_flags & VA_BLEND_GLOBAL_ALPHA) + && sample->alpha < 1.0) { + /* *INDENT-OFF* */ + blend = (VABlendState) { + .flags = VA_BLEND_GLOBAL_ALPHA, + .global_alpha = sample->alpha, + }; + /* *INDENT-ON* */ + params.blend_state = &blend; + } + + status = vaCreateBuffer (dpy, self->context, + VAProcPipelineParameterBufferType, sizeof (params), 1, ¶ms, + &buffer); + if (status != VA_STATUS_SUCCESS) { + GST_ERROR_OBJECT (self, "vaCreateBuffer: %s", vaErrorStr (status)); + goto fail_end_pic; + } + + status = vaRenderPicture (dpy, self->context, &buffer, 1); + vaDestroyBuffer (dpy, buffer); + if (status != VA_STATUS_SUCCESS) { + GST_ERROR_OBJECT (self, "vaRenderPicture: %s", vaErrorStr (status)); + goto fail_end_pic; + } + } + + status = vaEndPicture (dpy, self->context); + if (status != VA_STATUS_SUCCESS) { + GST_ERROR_OBJECT (self, "vaEndPicture: %s", vaErrorStr (status)); + return FALSE; + } + + return TRUE; + +fail_end_pic: + { + status = vaEndPicture (dpy, self->context); + if (status != VA_STATUS_SUCCESS) + GST_ERROR_OBJECT (self, "vaEndPicture: %s", vaErrorStr (status)); + return FALSE; + } +} + /** * gst_va_buffer_get_surface_flags: * @buffer: the #GstBuffer to check. @@ -1782,3 +1927,21 @@ gst_va_filter_has_video_format (GstVaFilter * self, GstVideoFormat format, return FALSE; } + +GType +gst_va_scale_method_get_type (void) +{ + static gsize type = 0; + static const GEnumValue values[] = { + {VA_FILTER_SCALING_DEFAULT, "Default scaling method", "default"}, + {VA_FILTER_SCALING_FAST, "Fast scaling method", "fast"}, + {VA_FILTER_SCALING_HQ, "High quality scaling method", "hq"}, + {0, NULL, NULL}, + }; + + if (g_once_init_enter (&type)) { + const GType _type = g_enum_register_static ("GstVaScaleMethod", values); + g_once_init_leave (&type, _type); + } + return type; +} diff --git a/subprojects/gst-plugins-bad/sys/va/gstvafilter.h b/subprojects/gst-plugins-bad/sys/va/gstvafilter.h index 4de1f64..0c93233 100644 --- a/subprojects/gst-plugins-bad/sys/va/gstvafilter.h +++ b/subprojects/gst-plugins-bad/sys/va/gstvafilter.h @@ -57,6 +57,9 @@ enum { GST_VA_FILTER_PROP_LAST }; +#define GST_TYPE_VA_SCALE_METHOD gst_va_scale_method_get_type() +GType gst_va_scale_method_get_type (void) G_GNUC_CONST; + typedef struct _GstVaSample GstVaSample; struct _GstVaSample { @@ -78,6 +81,34 @@ struct _GstVaSample VARectangle rect; }; +typedef struct _GstVaComposeSample GstVaComposeSample; +struct _GstVaComposeSample +{ + /* input buffer (transfer full) */ + GstBuffer *buffer; + + /* scale method flags */ + guint32 flags; + + VARectangle input_region; + VARectangle output_region; + + gdouble alpha; +}; + +typedef struct _GstVaComposeTransaction GstVaComposeTransaction; +struct _GstVaComposeTransaction +{ + /* input sample iterator function */ + GstVaComposeSample* (*next) (gpointer user_data); + + /* the output buffer to compose onto */ + GstBuffer *output; + + /* user data parameter for "next" function */ + gpointer user_data; +}; + GstVaFilter * gst_va_filter_new (GstVaDisplay * display); gboolean gst_va_filter_open (GstVaFilter * self); gboolean gst_va_filter_close (GstVaFilter * self); @@ -123,4 +154,8 @@ gboolean gst_va_filter_has_video_format (GstVaFilter * self, GstVideoFormat format, GstCapsFeatures * feature); +gboolean gst_va_filter_has_compose (GstVaFilter * self); +gboolean gst_va_filter_compose (GstVaFilter * self, + GstVaComposeTransaction * tx); + G_END_DECLS diff --git a/subprojects/gst-plugins-bad/sys/va/meson.build b/subprojects/gst-plugins-bad/sys/va/meson.build index 0b82527..18d1ca3 100644 --- a/subprojects/gst-plugins-bad/sys/va/meson.build +++ b/subprojects/gst-plugins-bad/sys/va/meson.build @@ -4,6 +4,7 @@ va_sources = [ 'gstvabasetransform.c', 'gstvabaseenc.c', 'gstvacaps.c', + 'gstvacompositor.c', 'gstvadecoder.c', 'gstvadeinterlace.c', 'gstvaencoder.c', -- 2.7.4