From 64ed6075b782578d57edd09b3463071d6e5a4342 Mon Sep 17 00:00:00 2001 From: Seungha Yang Date: Sun, 6 Feb 2022 21:34:43 +0900 Subject: [PATCH] qsv: Introduce H.264 Intel Quick Sync Video Encoder A new implementation of Intel Quick Sync Video plugin. This plugin supports both Windows and Linux but optimization for VA/DMABuf is not implemented yet. This new plugin has some notable differences compared with existing MSDK plugin. * Encoder will expose formats which can be natively supported without internal conversion. This will make encoder control/negotiation flow much simpler and cleaner than that of MSDK plugin. * This plugin includes QSV specific library loading helper, called dispatcher, with QSV SDK headers as a part of this plugin. So, there will be no more SDK version dependent #ifdef in the code and also there will be no more build-time MSDK/oneVPL SDK dependency. * Memory allocator interop between GStreamer and QSV is re-designed and decoupled. Instead of implementing QSV specific allocator/bufferpool, this plugin will make use of generic GStreamer memory allocator/bufferpool (e.g., GstD3D11Allocator and GstD3D11BufferPool). Specifically, GstQsvAllocator object will help interop between GstMemory and mfxFrameAllocator memory abstraction layers. Note that because of the design decision, VA/DMABuf support is not made as a part of this initial commit. We can add the optimization for Linux later once GstVA library exposes allocator/bufferpool implementation as an API like GstD3D11. * Initial encoder implementation supports interop with GstD3D11 infrastructure, including zero-copy encoding with upstream D3D11 element. Part-of: --- subprojects/gst-plugins-bad/meson_options.txt | 5 + subprojects/gst-plugins-bad/sys/meson.build | 1 + .../gst-plugins-bad/sys/qsv/gstqsvallocator.cpp | 506 ++++++ .../gst-plugins-bad/sys/qsv/gstqsvallocator.h | 106 ++ .../sys/qsv/gstqsvallocator_d3d11.cpp | 417 +++++ .../sys/qsv/gstqsvallocator_d3d11.h | 34 + .../gst-plugins-bad/sys/qsv/gstqsvallocator_va.cpp | 104 ++ .../gst-plugins-bad/sys/qsv/gstqsvallocator_va.h | 34 + .../gst-plugins-bad/sys/qsv/gstqsvencoder.cpp | 1452 +++++++++++++++ .../gst-plugins-bad/sys/qsv/gstqsvencoder.h | 95 + .../gst-plugins-bad/sys/qsv/gstqsvh264enc.cpp | 1846 ++++++++++++++++++++ .../gst-plugins-bad/sys/qsv/gstqsvh264enc.h | 34 + .../gst-plugins-bad/sys/qsv/gstqsvutils.cpp | 203 +++ subprojects/gst-plugins-bad/sys/qsv/gstqsvutils.h | 57 + .../gst-plugins-bad/sys/qsv/libmfx/meson.build | 100 ++ subprojects/gst-plugins-bad/sys/qsv/meson.build | 92 + subprojects/gst-plugins-bad/sys/qsv/plugin.cpp | 266 +++ 17 files changed, 5352 insertions(+) create mode 100644 subprojects/gst-plugins-bad/sys/qsv/gstqsvallocator.cpp create mode 100644 subprojects/gst-plugins-bad/sys/qsv/gstqsvallocator.h create mode 100644 subprojects/gst-plugins-bad/sys/qsv/gstqsvallocator_d3d11.cpp create mode 100644 subprojects/gst-plugins-bad/sys/qsv/gstqsvallocator_d3d11.h create mode 100644 subprojects/gst-plugins-bad/sys/qsv/gstqsvallocator_va.cpp create mode 100644 subprojects/gst-plugins-bad/sys/qsv/gstqsvallocator_va.h create mode 100644 subprojects/gst-plugins-bad/sys/qsv/gstqsvencoder.cpp create mode 100644 subprojects/gst-plugins-bad/sys/qsv/gstqsvencoder.h create mode 100644 subprojects/gst-plugins-bad/sys/qsv/gstqsvh264enc.cpp create mode 100644 subprojects/gst-plugins-bad/sys/qsv/gstqsvh264enc.h create mode 100644 subprojects/gst-plugins-bad/sys/qsv/gstqsvutils.cpp create mode 100644 subprojects/gst-plugins-bad/sys/qsv/gstqsvutils.h create mode 100644 subprojects/gst-plugins-bad/sys/qsv/libmfx/meson.build create mode 100644 subprojects/gst-plugins-bad/sys/qsv/meson.build create mode 100644 subprojects/gst-plugins-bad/sys/qsv/plugin.cpp diff --git a/subprojects/gst-plugins-bad/meson_options.txt b/subprojects/gst-plugins-bad/meson_options.txt index b347dcb..fc77967 100644 --- a/subprojects/gst-plugins-bad/meson_options.txt +++ b/subprojects/gst-plugins-bad/meson_options.txt @@ -140,6 +140,7 @@ option('openni2', type : 'feature', value : 'auto', description : 'OpenNI2 libra option('opensles', type : 'feature', value : 'auto', description : 'OpenSL ES audio source/sink plugin') option('opus', type : 'feature', value : 'auto', description : 'OPUS audio parser plugin') option('qroverlay', type : 'feature', value : 'auto', description : 'Element to set random data on a qroverlay') +option('qsv', type : 'feature', value : 'auto', description : 'Intel Quick Sync Video plugin') option('resindvd', type : 'feature', value : 'auto', description : 'Resin DVD playback plugin (GPL - only built if gpl option is also enabled!)') option('rsvg', type : 'feature', value : 'auto', description : 'SVG overlayer and image decoder plugin') option('rtmp', type : 'feature', value : 'auto', description : 'RTMP video network source and sink plugin') @@ -191,6 +192,10 @@ option('sctp-internal-usrsctp', type: 'feature', value : 'enabled', option('mfx_api', type : 'combo', choices : ['MSDK', 'oneVPL', 'auto'], value : 'auto', description : 'Select MFX API to build against') +# QSV plugin options +option('mfx-modules-dir', type: 'string', value : '', + description : 'libmfx runtime module dir, linux only') + # License-related feature options option('gpl', type: 'feature', value: 'disabled', yield: true, description: 'Allow build plugins that have (A)GPL-licensed dependencies') diff --git a/subprojects/gst-plugins-bad/sys/meson.build b/subprojects/gst-plugins-bad/sys/meson.build index f635077..2d12a6d 100644 --- a/subprojects/gst-plugins-bad/sys/meson.build +++ b/subprojects/gst-plugins-bad/sys/meson.build @@ -18,6 +18,7 @@ subdir('mediafoundation') subdir('msdk') subdir('nvcodec') subdir('opensles') +subdir('qsv') subdir('shm') subdir('tinyalsa') subdir('uvch264') diff --git a/subprojects/gst-plugins-bad/sys/qsv/gstqsvallocator.cpp b/subprojects/gst-plugins-bad/sys/qsv/gstqsvallocator.cpp new file mode 100644 index 0000000..5d9fd5f --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/qsv/gstqsvallocator.cpp @@ -0,0 +1,506 @@ +/* GStreamer + * Copyright (C) 2021 Seungha Yang + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "gstqsvallocator.h" + +GST_DEBUG_CATEGORY_EXTERN (gst_qsv_allocator_debug); +#define GST_CAT_DEFAULT gst_qsv_allocator_debug + +/* Both d3d11 and va use (GST_MAP_FLAG_LAST << 1) value + * for GPU access */ +#define GST_MAP_QSV (GST_MAP_FLAG_LAST << 1) + +struct _GstQsvFrame +{ + GstMiniObject parent; + + GstQsvAllocator *allocator; + + GMutex lock; + + guint map_count; + GstBuffer *buffer; + GstVideoInfo info; + GstVideoFrame frame; + GstQsvMemoryType mem_type; + + /* For direct GPU access */ + GstMapInfo map_info; +}; + +GST_DEFINE_MINI_OBJECT_TYPE (GstQsvFrame, gst_qsv_frame); + +static void +_gst_qsv_frame_free (GstQsvFrame * frame) +{ + g_mutex_clear (&frame->lock); + gst_clear_buffer (&frame->buffer); + gst_clear_object (&frame->allocator); + g_free (frame); +} + +static GstQsvFrame * +gst_qsv_frame_new (void) +{ + GstQsvFrame *self; + + self = g_new0 (GstQsvFrame, 1); + g_mutex_init (&self->lock); + + gst_mini_object_init (GST_MINI_OBJECT_CAST (self), 0, + GST_TYPE_QSV_FRAME, nullptr, nullptr, + (GstMiniObjectFreeFunction) _gst_qsv_frame_free); + + return self; +} + +GstBuffer * +gst_qsv_frame_peek_buffer (GstQsvFrame * frame) +{ + g_return_val_if_fail (GST_IS_QSV_FRAME (frame), nullptr); + + return frame->buffer; +} + +struct _GstQsvAllocatorPrivate +{ + GstAtomicQueue *queue; + + mfxFrameAllocator allocator; +}; + +#define gst_qsv_allocator_parent_class parent_class +G_DEFINE_ABSTRACT_TYPE_WITH_PRIVATE (GstQsvAllocator, + gst_qsv_allocator, GST_TYPE_OBJECT); + +static void gst_qsv_allocator_finalize (GObject * object); +static mfxStatus gst_qsv_allocator_alloc (mfxHDL pthis, + mfxFrameAllocRequest * request, mfxFrameAllocResponse * response); +static mfxStatus gst_qsv_allocator_lock (mfxHDL pthis, mfxMemId mid, + mfxFrameData * ptr); +static mfxStatus gst_qsv_allocator_unlock (mfxHDL pthis, mfxMemId mid, + mfxFrameData * ptr); +static mfxStatus gst_qsv_allocator_get_hdl (mfxHDL pthis, mfxMemId mid, + mfxHDL * handle); +static mfxStatus gst_qsv_allocator_free (mfxHDL pthis, + mfxFrameAllocResponse * response); + +static void +gst_qsv_allocator_class_init (GstQsvAllocatorClass * klass) +{ + GObjectClass *object_class = G_OBJECT_CLASS (klass); + + object_class->finalize = gst_qsv_allocator_finalize; +} + +static void +gst_qsv_allocator_init (GstQsvAllocator * self) +{ + GstQsvAllocatorPrivate *priv; + + priv = self->priv = (GstQsvAllocatorPrivate *) + gst_qsv_allocator_get_instance_private (self); + + priv->queue = gst_atomic_queue_new (16); + + priv->allocator.pthis = self; + priv->allocator.Alloc = gst_qsv_allocator_alloc; + priv->allocator.Lock = gst_qsv_allocator_lock; + priv->allocator.Unlock = gst_qsv_allocator_unlock; + priv->allocator.GetHDL = gst_qsv_allocator_get_hdl; + priv->allocator.Free = gst_qsv_allocator_free; +} + +static void +gst_qsv_allocator_finalize (GObject * object) +{ + GstQsvAllocator *self = GST_QSV_ALLOCATOR (object); + GstQsvAllocatorPrivate *priv = self->priv; + GstQsvFrame *frame; + + GST_DEBUG_OBJECT (object, "finalize"); + + while ((frame = (GstQsvFrame *) gst_atomic_queue_pop (priv->queue))) + gst_qsv_frame_unref (frame); + + gst_atomic_queue_unref (priv->queue); + + G_OBJECT_CLASS (parent_class)->finalize (object); +} + +static mfxStatus +gst_qsv_allocator_alloc_default (GstQsvAllocator * self, + mfxFrameAllocRequest * request, mfxFrameAllocResponse * response) +{ + GstQsvFrame **mids = nullptr; + GstVideoInfo info; + GstVideoFormat format = GST_VIDEO_FORMAT_UNKNOWN; + + GST_TRACE_OBJECT (self, "Alloc"); + + /* Something unexpected and went wrong */ + if ((request->Type & MFX_MEMTYPE_SYSTEM_MEMORY) == 0) { + GST_ERROR_OBJECT (self, + "MFX is requesting system memory, type 0x%x", request->Type); + return MFX_ERR_UNSUPPORTED; + } + + switch (request->Info.FourCC) { + case MFX_FOURCC_NV12: + format = GST_VIDEO_FORMAT_NV12; + break; + default: + /* TODO: add more formats */ + break; + } + + if (format == GST_VIDEO_FORMAT_UNKNOWN) { + GST_ERROR_OBJECT (self, "Unknown MFX format fourcc %" GST_FOURCC_FORMAT, + GST_FOURCC_ARGS (request->Info.FourCC)); + + return MFX_ERR_UNSUPPORTED; + } + + mids = g_new0 (GstQsvFrame *, request->NumFrameSuggested); + response->NumFrameActual = request->NumFrameSuggested; + + gst_video_info_set_format (&info, + format, request->Info.Width, request->Info.Height); + for (guint i = 0; i < request->NumFrameSuggested; i++) { + GstBuffer *buffer; + + buffer = gst_buffer_new_and_alloc (info.size); + mids[i] = gst_qsv_allocator_acquire_frame (self, + GST_QSV_SYSTEM_MEMORY, &info, buffer, nullptr); + gst_buffer_unref (buffer); + } + + response->mids = (mfxMemId *) mids; + + return MFX_ERR_NONE; +} + +static mfxStatus +gst_qsv_allocator_alloc (mfxHDL pthis, + mfxFrameAllocRequest * request, mfxFrameAllocResponse * response) +{ + GstQsvAllocator *self = GST_QSV_ALLOCATOR (pthis); + GstQsvAllocatorClass *klass; + + if ((request->Type & MFX_MEMTYPE_SYSTEM_MEMORY) != 0) + return gst_qsv_allocator_alloc_default (self, request, response); + + klass = GST_QSV_ALLOCATOR_GET_CLASS (self); + + g_assert (klass->alloc); + + return klass->alloc (self, request, response); +} + +static mfxStatus +gst_qsv_allocator_lock (mfxHDL pthis, mfxMemId mid, mfxFrameData * ptr) +{ + GstQsvAllocator *self = GST_QSV_ALLOCATOR (pthis); + GstQsvFrame *frame = (GstQsvFrame *) mid; + + GST_TRACE_OBJECT (self, "Lock mfxMemId %p", mid); + + g_mutex_lock (&frame->lock); + if (frame->map_count == 0) { + gst_video_frame_map (&frame->frame, &frame->info, frame->buffer, + GST_MAP_READ); + } + + frame->map_count++; + + ptr->Pitch = (mfxU16) GST_VIDEO_FRAME_PLANE_STRIDE (&frame->frame, 0); + ptr->Y = (mfxU8 *) GST_VIDEO_FRAME_PLANE_DATA (&frame->frame, 0); + + /* FIXME: check and handle other formats */ + if (GST_VIDEO_INFO_FORMAT (&frame->info) == GST_VIDEO_FORMAT_NV12) + ptr->UV = (mfxU8 *) GST_VIDEO_FRAME_PLANE_DATA (&frame->frame, 1); + + g_mutex_unlock (&frame->lock); + + return MFX_ERR_NONE; +} + +static mfxStatus +gst_qsv_allocator_unlock (mfxHDL pthis, mfxMemId mid, mfxFrameData * ptr) +{ + GstQsvAllocator *self = GST_QSV_ALLOCATOR (pthis); + GstQsvFrame *frame = (GstQsvFrame *) mid; + + GST_TRACE_OBJECT (self, "Unlock mfxMemId %p", mid); + + g_mutex_lock (&frame->lock); + + if (frame->map_count > 0) { + frame->map_count--; + + if (frame->map_count == 0) + gst_video_frame_unmap (&frame->frame); + } else { + GST_WARNING_OBJECT (self, "Unlock request for non-locked memory"); + } + + g_mutex_unlock (&frame->lock); + + return MFX_ERR_NONE; +} + +static mfxStatus +gst_qsv_allocator_get_hdl (mfxHDL pthis, mfxMemId mid, mfxHDL * handle) +{ + GstQsvAllocator *self = GST_QSV_ALLOCATOR (pthis); + GstQsvFrame *frame = GST_QSV_FRAME_CAST (mid); + + if (frame->mem_type != GST_QSV_VIDEO_MEMORY) { + GST_ERROR_OBJECT (self, "Unexpected call"); + + return MFX_ERR_UNSUPPORTED; + } + + if (!frame->map_info.data) { + GST_ERROR_OBJECT (self, "No mapped data"); + return MFX_ERR_UNSUPPORTED; + } + + GST_TRACE_OBJECT (self, "Get handle for mfxMemId %p", mid); + +#ifdef G_OS_WIN32 + mfxHDLPair *pair = (mfxHDLPair *) handle; + pair->first = (mfxHDL) frame->map_info.data; + + /* GstD3D11 will fill user_data[0] with subresource index */ + pair->second = (mfxHDL) frame->map_info.user_data[0]; +#else + *handle = (mfxHDL) frame->map_info.data; +#endif + + return MFX_ERR_NONE; +} + +static mfxStatus +gst_qsv_allocator_free (mfxHDL pthis, mfxFrameAllocResponse * response) +{ + GstQsvFrame **frames = (GstQsvFrame **) response->mids; + + for (guint i = 0; i < response->NumFrameActual; i++) + gst_clear_qsv_frame (&frames[i]); + + g_clear_pointer (&response->mids, g_free); + + return MFX_ERR_NONE; +} + +static void +gst_qsv_frame_release (GstQsvFrame * frame) +{ + GstQsvAllocator *allocator = frame->allocator; + + g_mutex_lock (&frame->lock); + if (frame->map_count > 0) { + GST_WARNING_OBJECT (allocator, "Releasing mapped frame %p", frame); + gst_video_frame_unmap (&frame->frame); + } + frame->map_count = 0; + g_mutex_unlock (&frame->lock); + + if (frame->mem_type == GST_QSV_VIDEO_MEMORY && frame->map_info.data) + gst_buffer_unmap (frame->buffer, &frame->map_info); + + memset (&frame->map_info, 0, sizeof (GstMapInfo)); + + gst_clear_buffer (&frame->buffer); + GST_MINI_OBJECT_CAST (frame)->dispose = nullptr; + frame->allocator = nullptr; + + GST_TRACE_OBJECT (allocator, "Moving frame %p back to pool", frame); + + gst_atomic_queue_push (allocator->priv->queue, frame); + gst_object_unref (allocator); +} + +static gboolean +gst_qsv_frame_dispose (GstQsvFrame * frame) +{ + g_assert (frame->allocator); + + gst_qsv_frame_ref (frame); + gst_qsv_frame_release (frame); + + return FALSE; +} + +static GstBuffer * +gst_qsv_allocator_upload_default (GstQsvAllocator * allocator, + const GstVideoInfo * info, GstBuffer * buffer, GstBufferPool * pool) +{ + GstBuffer *dst_buf; + GstFlowReturn flow_ret; + GstVideoFrame src_frame, dst_frame; + + flow_ret = gst_buffer_pool_acquire_buffer (pool, &dst_buf, nullptr); + if (flow_ret != GST_FLOW_OK) { + GST_WARNING ("Failed to acquire buffer from pool, return %s", + gst_flow_get_name (flow_ret)); + return nullptr; + } + + gst_video_frame_map (&src_frame, info, buffer, GST_MAP_READ); + gst_video_frame_map (&dst_frame, info, dst_buf, GST_MAP_WRITE); + + if (GST_VIDEO_FRAME_WIDTH (&src_frame) == GST_VIDEO_FRAME_WIDTH (&dst_frame) + && GST_VIDEO_FRAME_HEIGHT (&src_frame) == + GST_VIDEO_FRAME_HEIGHT (&dst_frame)) { + gst_video_frame_unmap (&src_frame); + gst_video_frame_unmap (&dst_frame); + + gst_buffer_unref (dst_buf); + return gst_buffer_ref (buffer); + } + + for (guint i = 0; i < GST_VIDEO_FRAME_N_PLANES (&src_frame); i++) { + guint src_width_in_bytes, src_height; + guint dst_width_in_bytes, dst_height; + guint width_in_bytes, height; + guint src_stride, dst_stride; + guint8 *src_data, *dst_data; + + src_width_in_bytes = GST_VIDEO_FRAME_COMP_WIDTH (&src_frame, i) * + GST_VIDEO_FRAME_COMP_PSTRIDE (&src_frame, i); + src_height = GST_VIDEO_FRAME_COMP_HEIGHT (&src_frame, i); + src_stride = GST_VIDEO_FRAME_COMP_STRIDE (&src_frame, i); + + dst_width_in_bytes = GST_VIDEO_FRAME_COMP_WIDTH (&dst_frame, i) * + GST_VIDEO_FRAME_COMP_PSTRIDE (&dst_frame, i); + dst_height = GST_VIDEO_FRAME_COMP_HEIGHT (&dst_frame, i); + dst_stride = GST_VIDEO_FRAME_COMP_STRIDE (&dst_frame, i); + + width_in_bytes = MIN (src_width_in_bytes, dst_width_in_bytes); + height = MIN (src_height, dst_height); + + src_data = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&src_frame, i); + dst_data = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&dst_frame, i); + + for (guint j = 0; j < height; j++) { + memcpy (dst_data, src_data, width_in_bytes); + dst_data += dst_stride; + src_data += src_stride; + } + } + + gst_video_frame_unmap (&dst_frame); + gst_video_frame_unmap (&src_frame); + + return dst_buf; +} + +/** + * gst_qsv_allocator_acquire_frame: + * @allocator: a #GstQsvAllocator + * @mem_type: a memory type + * @info: a #GstVideoInfo + * @buffer: (transfer none): a #GstBuffer + * @pool: (nullable): a #GstBufferPool + * + * Uploads @buffer to video memory if required, and wraps GstBuffer using + * #GstQsvFrame object so that QSV API can access native memory handle + * via mfxFrameAllocator interface. + * + * Returns: a #GstQsvFrame object + */ +GstQsvFrame * +gst_qsv_allocator_acquire_frame (GstQsvAllocator * allocator, + GstQsvMemoryType mem_type, const GstVideoInfo * info, GstBuffer * buffer, + GstBufferPool * pool) +{ + GstQsvAllocatorPrivate *priv; + GstQsvFrame *frame; + + g_return_val_if_fail (GST_IS_QSV_ALLOCATOR (allocator), nullptr); + + priv = allocator->priv; + frame = (GstQsvFrame *) gst_atomic_queue_pop (priv->queue); + + if (!frame) + frame = gst_qsv_frame_new (); + + frame->mem_type = mem_type; + frame->allocator = (GstQsvAllocator *) gst_object_ref (allocator); + GST_MINI_OBJECT_CAST (frame)->dispose = + (GstMiniObjectDisposeFunction) gst_qsv_frame_dispose; + + if (!pool) { + frame->buffer = gst_buffer_ref (buffer); + frame->info = *info; + } else { + GstBuffer *upload_buf; + + if (mem_type == GST_QSV_SYSTEM_MEMORY) { + upload_buf = gst_qsv_allocator_upload_default (allocator, info, buffer, + pool); + } else { + GstQsvAllocatorClass *klass; + + klass = GST_QSV_ALLOCATOR_GET_CLASS (allocator); + g_assert (klass->upload); + + upload_buf = klass->upload (allocator, info, buffer, pool); + } + + if (!upload_buf) { + GST_WARNING_OBJECT (allocator, "Failed to upload buffer"); + gst_qsv_frame_unref (frame); + + return nullptr; + } + + frame->buffer = upload_buf; + frame->info = *info; + } + + if (mem_type == GST_QSV_VIDEO_MEMORY) { + /* TODO: we need to know context whether this memory is for + * output (e.g., decoder or vpp), but we have only encoder + * implementation at the moment, so GST_MAP_READ should be fine */ + if (!gst_buffer_map (frame->buffer, &frame->map_info, + (GstMapFlags) (GST_MAP_READ | GST_MAP_QSV))) { + GST_ERROR_OBJECT (allocator, "Failed to map video buffer"); + gst_qsv_frame_unref (frame); + + return nullptr; + } + } + + return frame; +} + +mfxFrameAllocator * +gst_qsv_allocator_get_allocator_handle (GstQsvAllocator * allocator) +{ + g_return_val_if_fail (GST_IS_QSV_ALLOCATOR (allocator), nullptr); + + return &allocator->priv->allocator; +} diff --git a/subprojects/gst-plugins-bad/sys/qsv/gstqsvallocator.h b/subprojects/gst-plugins-bad/sys/qsv/gstqsvallocator.h new file mode 100644 index 0000000..4024862 --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/qsv/gstqsvallocator.h @@ -0,0 +1,106 @@ +/* GStreamer + * Copyright (C) 2021 Seungha Yang + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#pragma once + +#include +#include +#include + +G_BEGIN_DECLS + +#define GST_TYPE_QSV_FRAME (gst_qsv_frame_get_type()) +#define GST_IS_QSV_FRAME(obj) (GST_IS_MINI_OBJECT_TYPE(obj, GST_TYPE_QSV_FRAME)) +#define GST_QSV_FRAME_CAST(obj) ((GstQsvFrame *) obj) + +#define GST_TYPE_QSV_ALLOCATOR (gst_qsv_allocator_get_type()) +#define GST_QSV_ALLOCATOR(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_QSV_ALLOCATOR, GstQsvAllocator)) +#define GST_QSV_ALLOCATOR_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_QSV_ALLOCATOR, GstQsvAllocatorClass)) +#define GST_IS_QSV_ALLOCATOR(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_QSV_ALLOCATOR)) +#define GST_IS_QSV_ALLOCATOR_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_QSV_ALLOCATOR)) +#define GST_QSV_ALLOCATOR_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj), GST_TYPE_QSV_ALLOCATOR, GstQsvAllocatorClass)) +#define GST_QSV_ALLOCATOR_CAST(obj) ((GstQsvAllocator *)obj) + +typedef struct _GstQsvFrame GstQsvFrame; +typedef struct _GstQsvAllocator GstQsvAllocator; +typedef struct _GstQsvAllocatorClass GstQsvAllocatorClass; +typedef struct _GstQsvAllocatorPrivate GstQsvAllocatorPrivate; + +GType gst_qsv_frame_get_type (void); + +GstBuffer * gst_qsv_frame_peek_buffer (GstQsvFrame * frame); + +static inline GstQsvFrame * +gst_qsv_frame_ref (GstQsvFrame * frame) +{ + return (GstQsvFrame *) gst_mini_object_ref (GST_MINI_OBJECT_CAST (frame)); +} + +static inline void +gst_qsv_frame_unref (GstQsvFrame * frame) +{ + gst_mini_object_unref (GST_MINI_OBJECT_CAST (frame)); +} + +static inline void +gst_clear_qsv_frame (GstQsvFrame ** frame) +{ + gst_clear_mini_object ((GstMiniObject **) frame); +} + +typedef enum +{ + GST_QSV_SYSTEM_MEMORY, + GST_QSV_VIDEO_MEMORY, +} GstQsvMemoryType; + +struct _GstQsvAllocator +{ + GstObject parent; + + GstQsvAllocatorPrivate *priv; +}; + +struct _GstQsvAllocatorClass +{ + GstObjectClass parent_class; + + mfxStatus (*alloc) (GstQsvAllocator * allocator, + mfxFrameAllocRequest * request, + mfxFrameAllocResponse * response); + + GstBuffer * (*upload) (GstQsvAllocator * allocator, + const GstVideoInfo * info, + GstBuffer * buffer, + GstBufferPool * pool); +}; + +GType gst_qsv_allocator_get_type (void); + +GstQsvFrame * gst_qsv_allocator_acquire_frame (GstQsvAllocator * allocator, + GstQsvMemoryType mem_type, + const GstVideoInfo * info, + GstBuffer * buffer, + GstBufferPool * pool); + +mfxFrameAllocator * gst_qsv_allocator_get_allocator_handle (GstQsvAllocator * allocator); + +G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstQsvAllocator, gst_object_unref) + +G_END_DECLS diff --git a/subprojects/gst-plugins-bad/sys/qsv/gstqsvallocator_d3d11.cpp b/subprojects/gst-plugins-bad/sys/qsv/gstqsvallocator_d3d11.cpp new file mode 100644 index 0000000..0d4e627 --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/qsv/gstqsvallocator_d3d11.cpp @@ -0,0 +1,417 @@ +/* GStreamer + * Copyright (C) 2021 Seungha Yang + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "gstqsvallocator_d3d11.h" +#include + +GST_DEBUG_CATEGORY_EXTERN (gst_qsv_allocator_debug); +#define GST_CAT_DEFAULT gst_qsv_allocator_debug + +struct _GstQsvD3D11Allocator +{ + GstQsvAllocator parent; + + GstD3D11Device *device; +}; + +#define gst_qsv_d3d11_allocator_parent_class parent_class +G_DEFINE_TYPE (GstQsvD3D11Allocator, gst_qsv_d3d11_allocator, + GST_TYPE_QSV_ALLOCATOR); + +static void gst_qsv_d3d11_allocator_dispose (GObject * object); +static mfxStatus gst_qsv_d3d11_allocator_alloc (GstQsvAllocator * allocator, + mfxFrameAllocRequest * request, mfxFrameAllocResponse * response); +static GstBuffer *gst_qsv_d3d11_allocator_upload (GstQsvAllocator * allocator, + const GstVideoInfo * info, GstBuffer * buffer, GstBufferPool * pool); + +static void +gst_qsv_d3d11_allocator_class_init (GstQsvD3D11AllocatorClass * klass) +{ + GObjectClass *object_class = G_OBJECT_CLASS (klass); + GstQsvAllocatorClass *alloc_class = GST_QSV_ALLOCATOR_CLASS (klass); + + object_class->dispose = gst_qsv_d3d11_allocator_dispose; + + alloc_class->alloc = GST_DEBUG_FUNCPTR (gst_qsv_d3d11_allocator_alloc); + alloc_class->upload = GST_DEBUG_FUNCPTR (gst_qsv_d3d11_allocator_upload); +} + +static void +gst_qsv_d3d11_allocator_init (GstQsvD3D11Allocator * self) +{ +} + +static void +gst_qsv_d3d11_allocator_dispose (GObject * object) +{ + GstQsvD3D11Allocator *self = GST_QSV_D3D11_ALLOCATOR (object); + + gst_clear_object (&self->device); + + G_OBJECT_CLASS (parent_class)->dispose (object); +} + +static mfxStatus +gst_qsv_d3d11_allocator_alloc (GstQsvAllocator * allocator, + mfxFrameAllocRequest * request, mfxFrameAllocResponse * response) +{ + GstQsvD3D11Allocator *self = GST_QSV_D3D11_ALLOCATOR (allocator); + DXGI_FORMAT dxgi_format = DXGI_FORMAT_UNKNOWN; + GstQsvFrame **mids = nullptr; + + GST_TRACE_OBJECT (self, "Alloc"); + + /* Something unexpected and went wrong */ + if ((request->Type & MFX_MEMTYPE_SYSTEM_MEMORY) != 0) { + GST_ERROR_OBJECT (self, + "MFX is requesting system memory, type 0x%x", request->Type); + return MFX_ERR_UNSUPPORTED; + } + + switch (request->Info.FourCC) { + case MFX_FOURCC_NV12: + dxgi_format = DXGI_FORMAT_NV12; + break; + default: + /* TODO: add more formats */ + break; + } + + if (dxgi_format == DXGI_FORMAT_UNKNOWN && + request->Info.FourCC != MFX_FOURCC_P8) { + GST_ERROR_OBJECT (self, "Failed to convert %d to DXGI format", + request->Info.FourCC); + + return MFX_ERR_UNSUPPORTED; + } + + if (request->Info.FourCC == MFX_FOURCC_P8) { + GstD3D11Allocator *d3d11_alloc = nullptr; + D3D11_BUFFER_DESC desc; + GstVideoInfo info; + GstMemory *mem; + GstBuffer *buffer; + gsize offset[GST_VIDEO_MAX_PLANES] = { 0, }; + gint stride[GST_VIDEO_MAX_PLANES] = { 0, }; + guint size; + + d3d11_alloc = + (GstD3D11Allocator *) gst_allocator_find (GST_D3D11_MEMORY_NAME); + if (!d3d11_alloc) { + GST_ERROR_OBJECT (self, "D3D11 allocator is unavailable"); + + return MFX_ERR_MEMORY_ALLOC; + } + + memset (&desc, 0, sizeof (D3D11_BUFFER_DESC)); + + desc.ByteWidth = request->Info.Width * request->Info.Height; + desc.Usage = D3D11_USAGE_STAGING; + desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ; + + mem = gst_d3d11_allocator_alloc_buffer (d3d11_alloc, self->device, &desc); + gst_object_unref (d3d11_alloc); + + if (!mem) { + GST_ERROR_OBJECT (self, "Failed to allocate buffer"); + + return MFX_ERR_MEMORY_ALLOC; + } + + size = request->Info.Width * request->Info.Height; + stride[0] = size; + + gst_video_info_set_format (&info, GST_VIDEO_FORMAT_GRAY8, size, 1); + + buffer = gst_buffer_new (); + gst_buffer_append_memory (buffer, mem); + gst_buffer_add_video_meta_full (buffer, GST_VIDEO_FRAME_FLAG_NONE, + GST_VIDEO_FORMAT_GRAY8, size, 1, 1, offset, stride); + + mids = g_new0 (GstQsvFrame *, 1); + response->NumFrameActual = 1; + mids[0] = gst_qsv_allocator_acquire_frame (allocator, + GST_QSV_VIDEO_MEMORY, &info, buffer, nullptr); + gst_buffer_unref (buffer); + } else { + GstBufferPool *pool; + GstVideoFormat format; + GstVideoInfo info; + GstCaps *caps; + GstStructure *config; + GstD3D11AllocationParams *params; + guint bind_flags = 0; + + if ((request->Type & MFX_MEMTYPE_VIDEO_MEMORY_ENCODER_TARGET) != 0) + bind_flags |= D3D11_BIND_VIDEO_ENCODER; + + format = gst_d3d11_dxgi_format_to_gst (dxgi_format); + gst_video_info_set_format (&info, + format, request->Info.Width, request->Info.Height); + caps = gst_video_info_to_caps (&info); + + pool = gst_d3d11_buffer_pool_new (self->device); + params = gst_d3d11_allocation_params_new (self->device, &info, + (GstD3D11AllocationFlags) 0, bind_flags); + + config = gst_buffer_pool_get_config (pool); + gst_buffer_pool_config_set_d3d11_allocation_params (config, params); + gst_d3d11_allocation_params_free (params); + gst_buffer_pool_config_set_params (config, caps, + GST_VIDEO_INFO_SIZE (&info), 0, 0); + gst_caps_unref (caps); + gst_buffer_pool_set_config (pool, config); + gst_buffer_pool_set_active (pool, TRUE); + + mids = g_new0 (GstQsvFrame *, request->NumFrameSuggested); + response->NumFrameActual = request->NumFrameSuggested; + for (guint i = 0; i < request->NumFrameSuggested; i++) { + GstBuffer *buffer; + + if (gst_buffer_pool_acquire_buffer (pool, &buffer, nullptr) != + GST_FLOW_OK) { + GST_ERROR_OBJECT (self, "Failed to allocate texture buffer"); + gst_buffer_pool_set_active (pool, FALSE); + gst_object_unref (pool); + goto error; + } + + mids[i] = gst_qsv_allocator_acquire_frame (allocator, + GST_QSV_VIDEO_MEMORY, &info, buffer, nullptr); + gst_buffer_unref (buffer); + } + gst_buffer_pool_set_active (pool, FALSE); + gst_object_unref (pool); + } + + response->mids = (mfxMemId *) mids; + + return MFX_ERR_NONE; + +error: + if (mids) { + for (guint i = 0; i < response->NumFrameActual; i++) + gst_clear_qsv_frame (&mids[i]); + + g_free (mids); + } + + response->NumFrameActual = 0; + + return MFX_ERR_MEMORY_ALLOC; +} + +static GstBuffer * +gst_qsv_frame_copy_d3d11 (const GstVideoInfo * info, GstBuffer * src_buf, + GstBuffer * dst_buf) +{ + D3D11_TEXTURE2D_DESC src_desc, dst_desc; + D3D11_BOX src_box; + guint subresource_idx; + GstMemory *src_mem, *dst_mem; + GstMapInfo src_info, dst_info; + ID3D11Texture2D *src_tex, *dst_tex; + GstD3D11Device *device; + ID3D11DeviceContext *device_context; + + GST_TRACE ("Copying D3D11 buffer %" GST_PTR_FORMAT, src_buf); + + src_mem = gst_buffer_peek_memory (src_buf, 0); + dst_mem = gst_buffer_peek_memory (dst_buf, 0); + + device = GST_D3D11_MEMORY_CAST (dst_mem)->device; + device_context = gst_d3d11_device_get_device_context_handle (device); + + if (!gst_memory_map (src_mem, + &src_info, (GstMapFlags) (GST_MAP_READ | GST_MAP_D3D11))) { + GST_WARNING ("Failed to map src memory"); + gst_buffer_unref (dst_buf); + return nullptr; + } + + if (!gst_memory_map (dst_mem, + &dst_info, (GstMapFlags) (GST_MAP_WRITE | GST_MAP_D3D11))) { + GST_WARNING ("Failed to map dst memory"); + gst_memory_unmap (src_mem, &src_info); + gst_buffer_unref (dst_buf); + return nullptr; + } + + src_tex = (ID3D11Texture2D *) src_info.data; + dst_tex = (ID3D11Texture2D *) dst_info.data; + + src_tex->GetDesc (&src_desc); + dst_tex->GetDesc (&dst_desc); + + subresource_idx = + gst_d3d11_memory_get_subresource_index (GST_D3D11_MEMORY_CAST (src_mem)); + + src_box.left = 0; + src_box.top = 0; + src_box.front = 0; + src_box.back = 1; + src_box.right = MIN (src_desc.Width, dst_desc.Width); + src_box.bottom = MIN (src_desc.Height, dst_desc.Height); + + gst_d3d11_device_lock (device); + device_context->CopySubresourceRegion (dst_tex, 0, + 0, 0, 0, src_tex, subresource_idx, &src_box); + gst_d3d11_device_unlock (device); + + gst_memory_unmap (dst_mem, &dst_info); + gst_memory_unmap (src_mem, &src_info); + + return dst_buf; +} + +static GstBuffer * +gst_qsv_frame_upload_sysmem (const GstVideoInfo * info, GstBuffer * src_buf, + GstBuffer * dst_buf) +{ + GstVideoFrame src_frame, dst_frame; + + GST_TRACE ("Uploading sysmem buffer %" GST_PTR_FORMAT, src_buf); + + if (!gst_video_frame_map (&src_frame, info, src_buf, GST_MAP_READ)) { + GST_WARNING ("Failed to map src frame"); + gst_buffer_unref (dst_buf); + return nullptr; + } + + if (!gst_video_frame_map (&dst_frame, info, dst_buf, GST_MAP_WRITE)) { + GST_WARNING ("Failed to map src frame"); + gst_video_frame_unmap (&src_frame); + gst_buffer_unref (dst_buf); + return nullptr; + } + + for (guint i = 0; i < GST_VIDEO_FRAME_N_PLANES (&src_frame); i++) { + guint src_width_in_bytes, src_height; + guint dst_width_in_bytes, dst_height; + guint width_in_bytes, height; + guint src_stride, dst_stride; + guint8 *src_data, *dst_data; + + src_width_in_bytes = GST_VIDEO_FRAME_COMP_WIDTH (&src_frame, i) * + GST_VIDEO_FRAME_COMP_PSTRIDE (&src_frame, i); + src_height = GST_VIDEO_FRAME_COMP_HEIGHT (&src_frame, i); + src_stride = GST_VIDEO_FRAME_COMP_STRIDE (&src_frame, i); + + dst_width_in_bytes = GST_VIDEO_FRAME_COMP_WIDTH (&dst_frame, i) * + GST_VIDEO_FRAME_COMP_PSTRIDE (&src_frame, i); + dst_height = GST_VIDEO_FRAME_COMP_HEIGHT (&src_frame, i); + dst_stride = GST_VIDEO_FRAME_COMP_STRIDE (&dst_frame, i); + + width_in_bytes = MIN (src_width_in_bytes, dst_width_in_bytes); + height = MIN (src_height, dst_height); + + src_data = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&src_frame, i); + dst_data = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&dst_frame, i); + + for (guint j = 0; j < height; j++) { + memcpy (dst_data, src_data, width_in_bytes); + dst_data += dst_stride; + src_data += src_stride; + } + } + + gst_video_frame_unmap (&dst_frame); + gst_video_frame_unmap (&src_frame); + + return dst_buf; +} + +static GstBuffer * +gst_qsv_d3d11_allocator_upload (GstQsvAllocator * allocator, + const GstVideoInfo * info, GstBuffer * buffer, GstBufferPool * pool) +{ + GstMemory *mem; + GstD3D11Memory *dmem, *dst_dmem; + D3D11_TEXTURE2D_DESC desc, dst_desc; + GstBuffer *dst_buf; + GstFlowReturn flow_ret; + + /* 1) D3D11 buffer from the same d3d11device with ours + * 1-1) Same resolution + * -> Increase refcount and wrap with GstQsvFrame + * 1-2) Different resolution + * -> GPU copy + * 2) non-D3D11 buffer or from other d3d11 device + * -> Always CPU copy + */ + + if (!GST_IS_D3D11_BUFFER_POOL (pool)) { + GST_ERROR_OBJECT (allocator, "Not a d3d11 buffer pool"); + return nullptr; + } + + flow_ret = gst_buffer_pool_acquire_buffer (pool, &dst_buf, nullptr); + if (flow_ret != GST_FLOW_OK) { + GST_WARNING ("Failed to acquire buffer from pool, return %s", + gst_flow_get_name (flow_ret)); + return nullptr; + } + + mem = gst_buffer_peek_memory (buffer, 0); + if (!gst_is_d3d11_memory (mem) || gst_buffer_n_memory (buffer) > 1) { + /* d3d11 buffer should hold single memory object */ + return gst_qsv_frame_upload_sysmem (info, buffer, dst_buf); + } + + /* FIXME: Add support for shared texture for GPU copy or wrapping + * texture from different device */ + dmem = GST_D3D11_MEMORY_CAST (mem); + if (dmem->device != GST_D3D11_BUFFER_POOL (pool)->device) + return gst_qsv_frame_upload_sysmem (info, buffer, dst_buf); + + dst_dmem = (GstD3D11Memory *) gst_buffer_peek_memory (dst_buf, 0); + gst_d3d11_memory_get_texture_desc (dmem, &desc); + gst_d3d11_memory_get_texture_desc (dst_dmem, &dst_desc); + + if (desc.Width == dst_desc.Width && desc.Height == dst_desc.Height && + desc.Usage == D3D11_USAGE_DEFAULT) { + /* Identical size and non-staging texture, wrap without copying */ + GST_TRACE ("Wrapping D3D11 buffer without copy"); + gst_buffer_unref (dst_buf); + + return gst_buffer_ref (buffer); + } + + return gst_qsv_frame_copy_d3d11 (info, buffer, dst_buf); +} + +GstQsvAllocator * +gst_qsv_d3d11_allocator_new (GstD3D11Device * device) +{ + GstQsvD3D11Allocator *self; + + g_return_val_if_fail (GST_IS_D3D11_DEVICE (device), nullptr); + + self = (GstQsvD3D11Allocator *) + g_object_new (GST_TYPE_QSV_D3D11_ALLOCATOR, nullptr); + self->device = (GstD3D11Device *) gst_object_ref (device); + + gst_object_ref_sink (self); + + return GST_QSV_ALLOCATOR (self); +} diff --git a/subprojects/gst-plugins-bad/sys/qsv/gstqsvallocator_d3d11.h b/subprojects/gst-plugins-bad/sys/qsv/gstqsvallocator_d3d11.h new file mode 100644 index 0000000..94b242f --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/qsv/gstqsvallocator_d3d11.h @@ -0,0 +1,34 @@ +/* GStreamer + * Copyright (C) 2021 Seungha Yang + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#pragma once + +#include +#include +#include "gstqsvallocator.h" + +G_BEGIN_DECLS + +#define GST_TYPE_QSV_D3D11_ALLOCATOR (gst_qsv_d3d11_allocator_get_type()) +G_DECLARE_FINAL_TYPE (GstQsvD3D11Allocator, gst_qsv_d3d11_allocator, + GST, QSV_D3D11_ALLOCATOR, GstQsvAllocator); + +GstQsvAllocator * gst_qsv_d3d11_allocator_new (GstD3D11Device * device); + +G_END_DECLS diff --git a/subprojects/gst-plugins-bad/sys/qsv/gstqsvallocator_va.cpp b/subprojects/gst-plugins-bad/sys/qsv/gstqsvallocator_va.cpp new file mode 100644 index 0000000..afb3be5 --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/qsv/gstqsvallocator_va.cpp @@ -0,0 +1,104 @@ +/* GStreamer + * Copyright (C) 2021 Seungha Yang + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "gstqsvallocator_va.h" + +GST_DEBUG_CATEGORY_EXTERN (gst_qsv_allocator_debug); +#define GST_CAT_DEFAULT gst_qsv_allocator_debug + +struct _GstQsvVaAllocator +{ + GstQsvAllocator parent; + + GstVaDisplay *display; +}; + +#define gst_qsv_va_allocator_parent_class parent_class +G_DEFINE_TYPE (GstQsvVaAllocator, gst_qsv_va_allocator, GST_TYPE_QSV_ALLOCATOR); + +static void gst_qsv_va_allocator_dispose (GObject * object); +static mfxStatus gst_qsv_va_allocator_alloc (GstQsvAllocator * allocator, + mfxFrameAllocRequest * request, mfxFrameAllocResponse * response); +static GstBuffer *gst_qsv_va_allocator_upload (GstQsvAllocator * allocator, + const GstVideoInfo * info, GstBuffer * buffer, GstBufferPool * pool); + +static void +gst_qsv_va_allocator_class_init (GstQsvVaAllocatorClass * klass) +{ + GObjectClass *object_class = G_OBJECT_CLASS (klass); + GstQsvAllocatorClass *alloc_class = GST_QSV_ALLOCATOR_CLASS (klass); + + object_class->dispose = gst_qsv_va_allocator_dispose; + + alloc_class->alloc = GST_DEBUG_FUNCPTR (gst_qsv_va_allocator_alloc); + alloc_class->upload = GST_DEBUG_FUNCPTR (gst_qsv_va_allocator_upload); +} + +static void +gst_qsv_va_allocator_init (GstQsvVaAllocator * self) +{ +} + +static void +gst_qsv_va_allocator_dispose (GObject * object) +{ + GstQsvVaAllocator *self = GST_QSV_VA_ALLOCATOR (object); + + gst_clear_object (&self->display); + + G_OBJECT_CLASS (parent_class)->dispose (object); +} + +static mfxStatus +gst_qsv_va_allocator_alloc (GstQsvAllocator * allocator, + mfxFrameAllocRequest * request, mfxFrameAllocResponse * response) +{ + GST_ERROR_OBJECT (allocator, "Not implemented"); + + return MFX_ERR_UNSUPPORTED; +} + +static GstBuffer * +gst_qsv_va_allocator_upload (GstQsvAllocator * allocator, + const GstVideoInfo * info, GstBuffer * buffer, GstBufferPool * pool) +{ + GST_ERROR_OBJECT (allocator, "Not implemented"); + + return nullptr; +} + +GstQsvAllocator * +gst_qsv_va_allocator_new (GstVaDisplay * display) +{ + GstQsvVaAllocator *self; + + g_return_val_if_fail (GST_IS_VA_DISPLAY (display), nullptr); + + self = (GstQsvVaAllocator *) + g_object_new (GST_TYPE_QSV_VA_ALLOCATOR, nullptr); + self->display = (GstVaDisplay *) gst_object_ref (display); + + gst_object_ref_sink (self); + + return GST_QSV_ALLOCATOR (self); +} diff --git a/subprojects/gst-plugins-bad/sys/qsv/gstqsvallocator_va.h b/subprojects/gst-plugins-bad/sys/qsv/gstqsvallocator_va.h new file mode 100644 index 0000000..8c6e6e0 --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/qsv/gstqsvallocator_va.h @@ -0,0 +1,34 @@ +/* GStreamer + * Copyright (C) 2021 Seungha Yang + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#pragma once + +#include +#include +#include "gstqsvallocator.h" + +G_BEGIN_DECLS + +#define GST_TYPE_QSV_VA_ALLOCATOR (gst_qsv_va_allocator_get_type()) +G_DECLARE_FINAL_TYPE (GstQsvVaAllocator, gst_qsv_va_allocator, + GST, QSV_VA_ALLOCATOR, GstQsvAllocator); + +GstQsvAllocator * gst_qsv_va_allocator_new (GstVaDisplay * display); + +G_END_DECLS diff --git a/subprojects/gst-plugins-bad/sys/qsv/gstqsvencoder.cpp b/subprojects/gst-plugins-bad/sys/qsv/gstqsvencoder.cpp new file mode 100644 index 0000000..1a395fc --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/qsv/gstqsvencoder.cpp @@ -0,0 +1,1452 @@ +/* GStreamer + * Copyright (C) 2021 Seungha Yang + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "gstqsvencoder.h" +#include +#include +#include + +#ifdef G_OS_WIN32 +#include +#include "gstqsvallocator_d3d11.h" + +#include + +/* *INDENT-OFF* */ +using namespace Microsoft::WRL; +/* *INDENT-ON* */ +#else +#include +#include "gstqsvallocator_va.h" +#endif /* G_OS_WIN32 */ + +GST_DEBUG_CATEGORY_EXTERN (gst_qsv_encoder_debug); +#define GST_CAT_DEFAULT gst_qsv_encoder_debug + +GType +gst_qsv_coding_option_get_type (void) +{ + static GType coding_opt_type = 0; + static const GEnumValue coding_opts[] = { + {MFX_CODINGOPTION_UNKNOWN, "Unknown", "unknown"}, + {MFX_CODINGOPTION_ON, "On", "on"}, + {MFX_CODINGOPTION_OFF, "Off", "off"}, + {0, nullptr, nullptr} + }; + + if (g_once_init_enter (&coding_opt_type)) { + GType type = g_enum_register_static ("GstQsvCodingOption", + coding_opts); + g_once_init_leave (&coding_opt_type, type); + } + + return coding_opt_type; +} + +enum +{ + PROP_0, + PROP_TARGET_USAGE, + PROP_LOW_LATENCY, +}; + +#define DEFAULT_TARGET_USAGE MFX_TARGETUSAGE_BALANCED +#define DEFAULT_LOW_LATENCY FALSE + +typedef struct _GstQsvEncoderSurface +{ + mfxFrameSurface1 surface; + mfxEncodeCtrl encode_control; + + /* array of mfxPayload (e.g., SEI data) associated with this surface */ + GPtrArray *payload; + + /* holds ownership */ + GstQsvFrame *qsv_frame; +} GstQsvEncoderSurface; + +typedef struct _GstQsvEncoderTask +{ + mfxSyncPoint sync_point; + mfxBitstream bitstream; +} GstQsvEncoderTask; + +struct _GstQsvEncoderPrivate +{ + GstObject *device; + + GstVideoCodecState *input_state; + GstQsvAllocator *allocator; + + /* API specific alignment requirement (multiple of 16 or 32) */ + GstVideoInfo aligned_info; + + mfxSession session; + mfxVideoParam video_param; + + /* List of mfxExtBuffer configured by subclass, subclass will hold + * allocated memory for each mfxExtBuffer */ + GPtrArray *extra_params; + + MFXVideoENCODE *encoder; + GstQsvMemoryType mem_type; + + /* Internal buffer pool used to allocate fallback buffer when input buffer + * is not compatible with expected format/type/resolution etc */ + GstBufferPool *internal_pool; + + /* Array of GstQsvEncoderSurface, holding ownership */ + GArray *surface_pool; + guint next_surface_index; + + /* Array of GstQsvEncoderTask, holding ownership */ + GArray *task_pool; + + GQueue free_tasks; + GQueue pending_tasks; + + /* Properties */ + guint target_usage; + gboolean low_latency; +}; + +#define gst_qsv_encoder_parent_class parent_class +G_DEFINE_ABSTRACT_TYPE_WITH_PRIVATE (GstQsvEncoder, gst_qsv_encoder, + GST_TYPE_VIDEO_ENCODER); + +static void gst_qsv_encoder_dispose (GObject * object); +static void gst_qsv_encoder_finalize (GObject * object); +static void gst_qsv_encoder_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec); +static void gst_qsv_encoder_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec); + +static void gst_qsv_encoder_set_context (GstElement * element, + GstContext * context); + +static gboolean gst_qsv_encoder_open (GstVideoEncoder * encoder); +static gboolean gst_qsv_encoder_stop (GstVideoEncoder * encoder); +static gboolean gst_qsv_encoder_close (GstVideoEncoder * encoder); +static gboolean gst_qsv_encoder_set_format (GstVideoEncoder * encoder, + GstVideoCodecState * state); +static GstFlowReturn gst_qsv_encoder_handle_frame (GstVideoEncoder * encoder, + GstVideoCodecFrame * frame); +static GstFlowReturn gst_qsv_encoder_finish (GstVideoEncoder * encoder); +static gboolean gst_qsv_encoder_flush (GstVideoEncoder * encoder); +static gboolean gst_qsv_encoder_sink_query (GstVideoEncoder * encoder, + GstQuery * query); +static gboolean gst_qsv_encoder_src_query (GstVideoEncoder * encoder, + GstQuery * query); +static gboolean gst_qsv_encoder_propose_allocation (GstVideoEncoder * encoder, + GstQuery * query); + +static void gst_qsv_encoder_surface_clear (GstQsvEncoderSurface * task); +static void gst_qsv_encoder_task_clear (GstQsvEncoderTask * task); + +static void +gst_qsv_encoder_class_init (GstQsvEncoderClass * klass) +{ + GObjectClass *object_class = G_OBJECT_CLASS (klass); + GstElementClass *element_class = GST_ELEMENT_CLASS (klass); + GstVideoEncoderClass *videoenc_class = GST_VIDEO_ENCODER_CLASS (klass); + + object_class->dispose = gst_qsv_encoder_dispose; + object_class->finalize = gst_qsv_encoder_finalize; + object_class->set_property = gst_qsv_encoder_set_property; + object_class->get_property = gst_qsv_encoder_get_property; + + g_object_class_install_property (object_class, PROP_TARGET_USAGE, + g_param_spec_uint ("target-usage", "Target Usage", + "1: Best quality, 4: Balanced, 7: Best speed", + 1, 7, DEFAULT_TARGET_USAGE, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property (object_class, PROP_LOW_LATENCY, + g_param_spec_boolean ("low-latency", "Low Latency", + "Enables low-latency encoding", DEFAULT_LOW_LATENCY, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + element_class->set_context = GST_DEBUG_FUNCPTR (gst_qsv_encoder_set_context); + + videoenc_class->open = GST_DEBUG_FUNCPTR (gst_qsv_encoder_open); + videoenc_class->stop = GST_DEBUG_FUNCPTR (gst_qsv_encoder_stop); + videoenc_class->close = GST_DEBUG_FUNCPTR (gst_qsv_encoder_close); + videoenc_class->set_format = GST_DEBUG_FUNCPTR (gst_qsv_encoder_set_format); + videoenc_class->handle_frame = + GST_DEBUG_FUNCPTR (gst_qsv_encoder_handle_frame); + videoenc_class->finish = GST_DEBUG_FUNCPTR (gst_qsv_encoder_finish); + videoenc_class->flush = GST_DEBUG_FUNCPTR (gst_qsv_encoder_flush); + videoenc_class->sink_query = GST_DEBUG_FUNCPTR (gst_qsv_encoder_sink_query); + videoenc_class->src_query = GST_DEBUG_FUNCPTR (gst_qsv_encoder_src_query); + videoenc_class->propose_allocation = + GST_DEBUG_FUNCPTR (gst_qsv_encoder_propose_allocation); +} + +static void +gst_qsv_encoder_init (GstQsvEncoder * self) +{ + GstQsvEncoderPrivate *priv; + + priv = self->priv = + (GstQsvEncoderPrivate *) gst_qsv_encoder_get_instance_private (self); + + priv->extra_params = g_ptr_array_sized_new (8); + + priv->surface_pool = g_array_new (FALSE, TRUE, sizeof (GstQsvEncoderSurface)); + g_array_set_clear_func (priv->surface_pool, + (GDestroyNotify) gst_qsv_encoder_surface_clear); + + priv->task_pool = g_array_new (FALSE, TRUE, sizeof (GstQsvEncoderTask)); + g_array_set_clear_func (priv->task_pool, + (GDestroyNotify) gst_qsv_encoder_task_clear); + + g_queue_init (&priv->free_tasks); + g_queue_init (&priv->pending_tasks); + + priv->target_usage = DEFAULT_TARGET_USAGE; + priv->low_latency = DEFAULT_LOW_LATENCY; +} + +static void +gst_qsv_encoder_dispose (GObject * object) +{ + GstQsvEncoder *self = GST_QSV_ENCODER (object); + GstQsvEncoderPrivate *priv = self->priv; + + gst_clear_object (&priv->device); + + G_OBJECT_CLASS (parent_class)->dispose (object); +} + +static void +gst_qsv_encoder_finalize (GObject * object) +{ + GstQsvEncoder *self = GST_QSV_ENCODER (object); + GstQsvEncoderPrivate *priv = self->priv; + + g_ptr_array_unref (priv->extra_params); + g_array_unref (priv->task_pool); + g_array_unref (priv->surface_pool); + + G_OBJECT_CLASS (parent_class)->finalize (object); +} + +static void +gst_qsv_encoder_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec) +{ + GstQsvEncoder *self = GST_QSV_ENCODER (object); + GstQsvEncoderPrivate *priv = self->priv; + + switch (prop_id) { + case PROP_TARGET_USAGE: + priv->target_usage = g_value_get_uint (value); + break; + case PROP_LOW_LATENCY: + priv->low_latency = g_value_get_boolean (value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static void +gst_qsv_encoder_get_property (GObject * object, guint prop_id, GValue * value, + GParamSpec * pspec) +{ + GstQsvEncoder *self = GST_QSV_ENCODER (object); + GstQsvEncoderPrivate *priv = self->priv; + + switch (prop_id) { + case PROP_TARGET_USAGE: + g_value_set_uint (value, priv->target_usage); + break; + case PROP_LOW_LATENCY: + g_value_set_boolean (value, priv->low_latency); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static void +gst_qsv_encoder_set_context (GstElement * element, GstContext * context) +{ +#ifdef G_OS_WIN32 + GstQsvEncoder *self = GST_QSV_ENCODER (element); + GstQsvEncoderClass *klass = GST_QSV_ENCODER_GET_CLASS (element); + GstQsvEncoderPrivate *priv = self->priv; + + gst_d3d11_handle_set_context_for_adapter_luid (element, + context, klass->adapter_luid, (GstD3D11Device **) & priv->device); +#endif + + GST_ELEMENT_CLASS (parent_class)->set_context (element, context); +} + +#ifdef G_OS_WIN32 +static gboolean +gst_qsv_encoder_open_platform_device (GstQsvEncoder * self) +{ + GstQsvEncoderPrivate *priv = self->priv; + GstQsvEncoderClass *klass = GST_QSV_ENCODER_GET_CLASS (self); + ComPtr < ID3D10Multithread > multi_thread; + HRESULT hr; + ID3D11Device *device_handle; + mfxStatus status; + GstD3D11Device *device; + + if (!gst_d3d11_ensure_element_data_for_adapter_luid (GST_ELEMENT (self), + klass->adapter_luid, (GstD3D11Device **) & priv->device)) { + GST_ERROR_OBJECT (self, "d3d11 device is unavailable"); + return FALSE; + } + + device = GST_D3D11_DEVICE_CAST (priv->device); + priv->allocator = gst_qsv_d3d11_allocator_new (device); + + /* For D3D11 device handle to be used by QSV, multithread protection layer + * must be enabled before the MFXVideoCORE_SetHandle() call. + * + * TODO: Need to check performance impact by this mutithread protection layer, + * since it may have a negative impact on overall pipeline performance. + * If so, we should create encoding session dedicated d3d11 device and + * make use of shared resource */ + device_handle = gst_d3d11_device_get_device_handle (device); + hr = device_handle->QueryInterface (IID_PPV_ARGS (&multi_thread)); + if (!gst_d3d11_result (hr, device)) { + GST_ERROR_OBJECT (self, "ID3D10Multithread interface is unavailable"); + return FALSE; + } + + multi_thread->SetMultithreadProtected (TRUE); + status = MFXVideoCORE_SetHandle (priv->session, MFX_HANDLE_D3D11_DEVICE, + device_handle); + if (status != MFX_ERR_NONE) { + GST_ERROR_OBJECT (self, "Failed to set d3d11 device handle"); + return FALSE; + } + + /* NOTE: We never use this mfxFrameAllocator to allocate memory from our side, + * but required for QSV because: + * 1) QSV may request memory allocation for encoder's internal usage, + * MFX_FOURCC_P8 for example + * 2) Our mfxFrameAllocator provides bridge layer for + * gst_video_frame_{map,unmap} and mfxFrameAllocator::{Lock,Unlock}, + * including mfxFrameAllocator::GetHDL. + * 3) GstQsvAllocator provides GstQsvFrame pool, and therefore allocated + * GstQsvFrame struct can be re-used without per-frame malloc/free + */ + status = MFXVideoCORE_SetFrameAllocator (priv->session, + gst_qsv_allocator_get_allocator_handle (priv->allocator)); + if (status != MFX_ERR_NONE) { + GST_ERROR_OBJECT (self, "Failed to set frame allocator %d", status); + return FALSE; + } + + return TRUE; +} +#else +static gboolean +gst_qsv_encoder_open_platform_device (GstQsvEncoder * self) +{ + GstQsvEncoderPrivate *priv = self->priv; + GstQsvEncoderClass *klass = GST_QSV_ENCODER_GET_CLASS (self); + mfxStatus status; + GstVaDisplay *display; + + /* GstVADisplay context sharing is not public yet (VA plugin internal) */ + if (!priv->device) { + display = gst_va_display_drm_new_from_path (klass->display_path); + if (!display) { + GST_ERROR_OBJECT (self, "VA display is unavailable"); + return FALSE; + } + + priv->device = GST_OBJECT (display); + } else { + display = GST_VA_DISPLAY (priv->device); + } + + priv->allocator = gst_qsv_va_allocator_new (display); + + status = MFXVideoCORE_SetHandle (priv->session, MFX_HANDLE_VA_DISPLAY, + gst_va_display_get_va_dpy (display)); + if (status != MFX_ERR_NONE) { + GST_ERROR_OBJECT (self, "Failed to set VA display handle"); + return FALSE; + } + + status = MFXVideoCORE_SetFrameAllocator (priv->session, + gst_qsv_allocator_get_allocator_handle (priv->allocator)); + if (status != MFX_ERR_NONE) { + GST_ERROR_OBJECT (self, "Failed to set frame allocator %d", status); + return FALSE; + } + + return TRUE; +} +#endif + +static gboolean +gst_qsv_encoder_open (GstVideoEncoder * encoder) +{ + GstQsvEncoder *self = GST_QSV_ENCODER (encoder); + GstQsvEncoderPrivate *priv = self->priv; + GstQsvEncoderClass *klass = GST_QSV_ENCODER_GET_CLASS (self); + mfxStatus status; + + status = MFXCreateSession (gst_qsv_get_loader (), klass->impl_index, + &priv->session); + if (status != MFX_ERR_NONE) { + GST_ERROR_OBJECT (self, "Failed to create session"); + return FALSE; + } + + if (!gst_qsv_encoder_open_platform_device (self)) { + g_clear_pointer (&priv->session, MFXClose); + gst_clear_object (&priv->allocator); + gst_clear_object (&priv->device); + + return FALSE; + } + + return TRUE; +} + +static gboolean +gst_qsv_encoder_reset (GstQsvEncoder * self) +{ + GstQsvEncoderPrivate *priv = self->priv; + + if (priv->encoder) { + delete priv->encoder; + priv->encoder = nullptr; + } + + if (priv->internal_pool) { + gst_buffer_pool_set_active (priv->internal_pool, FALSE); + gst_clear_object (&priv->internal_pool); + } + + g_array_set_size (priv->surface_pool, 0); + g_array_set_size (priv->task_pool, 0); + g_queue_clear (&priv->free_tasks); + g_queue_clear (&priv->pending_tasks); + g_clear_pointer (&priv->input_state, gst_video_codec_state_unref); + + return TRUE; +} + +static gboolean +gst_qsv_encoder_stop (GstVideoEncoder * encoder) +{ + GstQsvEncoder *self = GST_QSV_ENCODER (encoder); + + return gst_qsv_encoder_reset (self); +} + +static gboolean +gst_qsv_encoder_close (GstVideoEncoder * encoder) +{ + GstQsvEncoder *self = GST_QSV_ENCODER (encoder); + GstQsvEncoderPrivate *priv = self->priv; + + g_clear_pointer (&priv->session, MFXClose); + gst_clear_object (&priv->allocator); + gst_clear_object (&priv->device); + + return TRUE; +} + +static void +gst_qsv_encoder_payload_clear (mfxPayload * payload) +{ + if (!payload) + return; + + g_free (payload->Data); + g_free (payload); +} + +static void +gst_qsv_encoder_surface_reset (GstQsvEncoderSurface * surface) +{ + if (!surface) + return; + + gst_clear_qsv_frame (&surface->qsv_frame); + g_ptr_array_set_size (surface->payload, 0); + memset (&surface->encode_control, 0, sizeof (mfxEncodeCtrl)); +} + +static void +gst_qsv_encoder_surface_clear (GstQsvEncoderSurface * surface) +{ + if (!surface) + return; + + gst_qsv_encoder_surface_reset (surface); + g_clear_pointer (&surface->payload, g_ptr_array_unref); + memset (&surface->surface, 0, sizeof (mfxFrameSurface1)); +} + +static void +gst_qsv_encoder_task_reset (GstQsvEncoder * self, GstQsvEncoderTask * task) +{ + GstQsvEncoderPrivate *priv = self->priv; + + if (!task) + return; + + task->sync_point = nullptr; + task->bitstream.DataLength = 0; + g_queue_push_head (&priv->free_tasks, task); +} + +static void +gst_qsv_encoder_task_clear (GstQsvEncoderTask * task) +{ + if (!task) + return; + + g_clear_pointer (&task->bitstream.Data, g_free); + memset (&task->bitstream, 0, sizeof (mfxBitstream)); +} + +static GstQsvEncoderSurface * +gst_qsv_encoder_get_next_surface (GstQsvEncoder * self) +{ + GstQsvEncoderPrivate *priv = self->priv; + GstQsvEncoderSurface *surface = nullptr; + + for (guint i = priv->next_surface_index; i < priv->surface_pool->len; i++) { + GstQsvEncoderSurface *iter = + &g_array_index (priv->surface_pool, GstQsvEncoderSurface, i); + + /* This means surface is still being used by QSV */ + if (iter->surface.Data.Locked > 0) + continue; + + surface = iter; + priv->next_surface_index = i; + goto out; + } + + for (guint i = 0; i < priv->next_surface_index; i++) { + GstQsvEncoderSurface *iter = + &g_array_index (priv->surface_pool, GstQsvEncoderSurface, i); + + /* This means surface is still being used by QSV */ + if (iter->surface.Data.Locked > 0) + continue; + + surface = iter; + priv->next_surface_index = i; + goto out; + } + + /* Magic number to avoid too large pool size */ + if (priv->surface_pool->len > 64) { + GST_ERROR_OBJECT (self, + "No availble surface but pool size is too large already"); + return nullptr; + } + + /* Something went wrong, increase surface pool size */ + GST_INFO_OBJECT (self, "No useable surfaces, increasing pool size to %d", + priv->surface_pool->len + 1); + + g_array_set_size (priv->surface_pool, priv->surface_pool->len + 1); + surface = &g_array_index (priv->surface_pool, GstQsvEncoderSurface, + priv->surface_pool->len - 1); + + memset (surface, 0, sizeof (GstQsvEncoderSurface)); + surface->surface.Info = + g_array_index (priv->surface_pool, GstQsvEncoderSurface, 0).surface.Info; + surface->payload = g_ptr_array_new_with_free_func ((GDestroyNotify) + gst_qsv_encoder_payload_clear); + +out: + priv->next_surface_index++; + priv->next_surface_index %= priv->surface_pool->len; + + gst_qsv_encoder_surface_reset (surface); + return surface; +} + +static mfxStatus +gst_qsv_encoder_encode_frame (GstQsvEncoder * self, + GstQsvEncoderSurface * surface, GstQsvEncoderTask * task, mfxU64 timestamp) +{ + mfxFrameSurface1 *s; + GstQsvEncoderPrivate *priv = self->priv; + mfxStatus status; + guint retry_count = 0; + /* magic number */ + const guint retry_threshold = 100; + mfxEncodeCtrl *encode_ctrl; + + if (surface) { + s = &surface->surface; + s->Data.MemId = (mfxMemId) surface->qsv_frame; + s->Data.TimeStamp = timestamp; + encode_ctrl = &surface->encode_control; + } else { + /* draining */ + s = nullptr; + encode_ctrl = nullptr; + } + + do { + status = priv->encoder->EncodeFrameAsync (encode_ctrl, + s, &task->bitstream, &task->sync_point); + + /* XXX: probably we should try to drain pending tasks if any in this case + * as documented? */ + if (status == MFX_WRN_DEVICE_BUSY && retry_count < retry_threshold) { + GST_INFO_OBJECT (self, "GPU is busy, retry count (%d/%d)", + retry_count, retry_threshold); + retry_count++; + + /* Magic number 10ms */ + g_usleep (10000); + continue; + } + + break; + } while (TRUE); + + return status; +} + +static GstVideoCodecFrame * +gst_qsv_encoder_find_output_frame (GstQsvEncoder * self, GstClockTime pts) +{ + GList *frames, *iter; + GstVideoCodecFrame *ret = nullptr; + GstVideoCodecFrame *closest = nullptr; + guint64 min_pts_abs_diff = 0; + + /* give up, just returns the oldest frame */ + if (!GST_CLOCK_TIME_IS_VALID (pts)) + return gst_video_encoder_get_oldest_frame (GST_VIDEO_ENCODER (self)); + + frames = gst_video_encoder_get_frames (GST_VIDEO_ENCODER (self)); + + for (iter = frames; iter; iter = g_list_next (iter)) { + GstVideoCodecFrame *frame = (GstVideoCodecFrame *) iter->data; + guint64 abs_diff; + + if (!GST_CLOCK_TIME_IS_VALID (frame->pts)) + continue; + + if (pts == frame->pts) { + ret = frame; + break; + } + + if (pts >= frame->pts) + abs_diff = pts - frame->pts; + else + abs_diff = frame->pts - pts; + + if (!closest || abs_diff < min_pts_abs_diff) { + closest = frame; + min_pts_abs_diff = abs_diff; + } + } + + if (!ret && closest) + ret = closest; + + if (ret) { + gst_video_codec_frame_ref (ret); + } else { + ret = gst_video_encoder_get_oldest_frame (GST_VIDEO_ENCODER (self)); + } + + if (frames) + g_list_free_full (frames, (GDestroyNotify) gst_video_codec_frame_unref); + + return ret; +} + +static GstFlowReturn +gst_qsv_encoder_finish_frame (GstQsvEncoder * self, GstQsvEncoderTask * task, + gboolean discard) +{ + GstQsvEncoderPrivate *priv = self->priv; + GstQsvEncoderClass *klass = GST_QSV_ENCODER_GET_CLASS (self); + mfxStatus status; + mfxBitstream *bs; + GstVideoCodecFrame *frame; + GstClockTime pts = GST_CLOCK_TIME_NONE; + GstClockTime dts = GST_CLOCK_TIME_NONE; + GstBuffer *buffer; + gboolean keyframe = FALSE; + guint retry_count = 0; + /* magic number */ + const guint retry_threshold = 100; + + status = MFX_ERR_NONE; + do { + /* magic number 100 ms */ + status = MFXVideoCORE_SyncOperation (priv->session, task->sync_point, 100); + + /* Retry up to 10 sec (100 ms x 100 times), that should be enough time for + * encoding a frame using hardware */ + if (status == MFX_WRN_IN_EXECUTION && retry_count < retry_threshold) { + GST_DEBUG_OBJECT (self, + "Operation is still in execution, retry count (%d/%d)", + retry_count, retry_threshold); + retry_count++; + continue; + } + + break; + } while (TRUE); + + if (discard) { + gst_qsv_encoder_task_reset (self, task); + return GST_FLOW_OK; + } + + if (status != MFX_ERR_NONE && status != MFX_ERR_NONE_PARTIAL_OUTPUT) { + gst_qsv_encoder_task_reset (self, task); + + if (status == MFX_ERR_ABORTED) { + GST_INFO_OBJECT (self, "Operation was aborted"); + return GST_FLOW_FLUSHING; + } + + GST_WARNING_OBJECT (self, "SyncOperation returned %d (%s)", + QSV_STATUS_ARGS (status)); + + return GST_FLOW_ERROR; + } + + bs = &task->bitstream; + pts = gst_qsv_timestamp_to_gst (bs->TimeStamp); + dts = gst_qsv_timestamp_to_gst ((mfxU64) bs->DecodeTimeStamp); + + if ((bs->FrameType & MFX_FRAMETYPE_IDR) != 0) + keyframe = TRUE; + + if (klass->create_output_buffer) { + buffer = klass->create_output_buffer (self, bs); + } else { + buffer = gst_buffer_new_memdup (bs->Data + bs->DataOffset, bs->DataLength); + } + gst_qsv_encoder_task_reset (self, task); + + if (!buffer) { + GST_ERROR_OBJECT (self, "No output buffer"); + return GST_FLOW_ERROR; + } + + frame = gst_qsv_encoder_find_output_frame (self, pts); + if (frame) { + frame->pts = pts; + frame->dts = dts; + frame->output_buffer = buffer; + + if (keyframe) + GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame); + + return gst_video_encoder_finish_frame (GST_VIDEO_ENCODER (self), frame); + } + + /* Empty available frame, something went wrong but we can just push this + * buffer */ + GST_WARNING_OBJECT (self, "Failed to find corresponding frame"); + GST_BUFFER_PTS (buffer) = pts; + GST_BUFFER_DTS (buffer) = dts; + + if (!keyframe) + GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); + + return gst_pad_push (GST_VIDEO_ENCODER_SRC_PAD (self), buffer); +} + +static GstFlowReturn +gst_qsv_encoder_drain (GstQsvEncoder * self, gboolean discard) +{ + GstQsvEncoderPrivate *priv = self->priv; + mfxStatus status = MFX_ERR_NONE; + GstFlowReturn ret = GST_FLOW_OK; + GstQsvEncoderTask *task; + + if (!priv->session || !priv->encoder) + return GST_FLOW_OK; + + GST_DEBUG_OBJECT (self, "Drain"); + + /* Drain pending tasks first if any */ + while (g_queue_get_length (&priv->pending_tasks) > 0) { + task = (GstQsvEncoderTask *) g_queue_pop_tail (&priv->pending_tasks); + ret = gst_qsv_encoder_finish_frame (self, task, discard); + } + + while (status == MFX_ERR_NONE) { + task = (GstQsvEncoderTask *) g_queue_pop_tail (&priv->free_tasks); + status = gst_qsv_encoder_encode_frame (self, + nullptr, task, MFX_TIMESTAMP_UNKNOWN); + + /* once it's fully drained, then driver will return more data */ + if (status == MFX_ERR_NONE && task->sync_point) { + ret = gst_qsv_encoder_finish_frame (self, task, discard); + continue; + } + + if (status != MFX_ERR_MORE_DATA) + GST_WARNING_OBJECT (self, "Unexpected status return %d (%s)", + QSV_STATUS_ARGS (status)); + + g_queue_push_head (&priv->free_tasks, task); + } + + /* Release GstQsvFrame objects */ + for (guint i = 0; i < priv->surface_pool->len; i++) { + GstQsvEncoderSurface *iter = + &g_array_index (priv->surface_pool, GstQsvEncoderSurface, i); + + if (iter->surface.Data.Locked > 0) { + GST_WARNING_OBJECT (self, + "Encoder was drained but QSV is holding surface %d", i); + continue; + } + + gst_qsv_encoder_surface_reset (iter); + } + + return ret; +} + +#ifdef G_OS_WIN32 +static gboolean +gst_qsv_encoder_prepare_d3d11_pool (GstQsvEncoder * self, + GstCaps * caps, GstVideoInfo * aligned_info) +{ + GstQsvEncoderPrivate *priv = self->priv; + GstStructure *config; + GstD3D11AllocationParams *params; + GstD3D11Device *device = GST_D3D11_DEVICE_CAST (priv->device); + + GST_DEBUG_OBJECT (self, "Use d3d11 memory pool"); + + priv->internal_pool = gst_d3d11_buffer_pool_new (device); + config = gst_buffer_pool_get_config (priv->internal_pool); + params = gst_d3d11_allocation_params_new (device, aligned_info, + (GstD3D11AllocationFlags) 0, 0); + + gst_buffer_pool_config_set_d3d11_allocation_params (config, params); + gst_d3d11_allocation_params_free (params); + gst_buffer_pool_config_set_params (config, caps, + GST_VIDEO_INFO_SIZE (aligned_info), 0, 0); + gst_buffer_pool_set_config (priv->internal_pool, config); + gst_buffer_pool_set_active (priv->internal_pool, TRUE); + + return TRUE; +} +#endif + +static gboolean +gst_qsv_encoder_prepare_system_pool (GstQsvEncoder * self, + GstCaps * caps, GstVideoInfo * aligned_info) +{ + GstQsvEncoderPrivate *priv = self->priv; + GstStructure *config; + + GST_DEBUG_OBJECT (self, "Use system memory pool"); + + priv->internal_pool = gst_video_buffer_pool_new (); + config = gst_buffer_pool_get_config (priv->internal_pool); + caps = gst_video_info_to_caps (aligned_info); + gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META); + gst_buffer_pool_config_set_params (config, + caps, GST_VIDEO_INFO_SIZE (aligned_info), 0, 0); + + gst_buffer_pool_set_config (priv->internal_pool, config); + gst_buffer_pool_set_active (priv->internal_pool, TRUE); + + return TRUE; +} + +/* Prepare internal pool, which is used to allocate fallback buffer + * when upstream buffer is not directly accessible by QSV */ +static gboolean +gst_qsv_encoder_prepare_pool (GstQsvEncoder * self, GstCaps * caps, + GstVideoInfo * aligned_info, mfxU16 * io_pattern) +{ + GstQsvEncoderPrivate *priv = self->priv; + gboolean ret = FALSE; + GstCaps *aligned_caps; + + if (priv->internal_pool) { + gst_buffer_pool_set_active (priv->internal_pool, FALSE); + gst_clear_object (&priv->internal_pool); + } + + aligned_caps = gst_video_info_to_caps (aligned_info); + + /* TODO: Add Linux video memory (VA/DMABuf) support */ +#ifdef G_OS_WIN32 + priv->mem_type = GST_QSV_VIDEO_MEMORY; + *io_pattern = MFX_IOPATTERN_IN_VIDEO_MEMORY; + + ret = gst_qsv_encoder_prepare_d3d11_pool (self, aligned_caps, aligned_info); +#endif + + if (!ret) { + priv->mem_type = GST_QSV_SYSTEM_MEMORY; + *io_pattern = MFX_IOPATTERN_IN_SYSTEM_MEMORY; + + ret = gst_qsv_encoder_prepare_system_pool (self, + aligned_caps, aligned_info); + } + gst_caps_unref (aligned_caps); + + return ret; +} + +static gboolean +gst_qsv_encoder_set_format (GstVideoEncoder * encoder, + GstVideoCodecState * state) +{ + GstQsvEncoder *self = GST_QSV_ENCODER (encoder); + GstQsvEncoderPrivate *priv = self->priv; + GstQsvEncoderClass *klass = GST_QSV_ENCODER_GET_CLASS (self); + GstVideoInfo *info; + mfxVideoParam param; + mfxFrameInfo *frame_info; + mfxFrameAllocRequest alloc_request; + mfxStatus status; + MFXVideoENCODE *encoder_handle = nullptr; + guint bitstream_size; + gboolean ret; + guint64 min_delay_frames, max_delay_frames; + GstClockTime min_latency, max_latency; + + gst_qsv_encoder_drain (self, FALSE); + gst_qsv_encoder_reset (self); + + priv->input_state = gst_video_codec_state_ref (state); + + info = &priv->input_state->info; + + encoder_handle = new MFXVideoENCODE (priv->session); + + memset (¶m, 0, sizeof (mfxVideoParam)); + + g_ptr_array_set_size (priv->extra_params, 0); + g_assert (klass->set_format); + if (!klass->set_format (self, priv->input_state, ¶m, priv->extra_params)) { + GST_ERROR_OBJECT (self, "Subclass failed to set format"); + goto error; + } + + /* LowPower mode supports smaller set of features, don't enable it for now */ + param.mfx.LowPower = MFX_CODINGOPTION_OFF; + if (priv->low_latency) + param.AsyncDepth = 1; + else + param.AsyncDepth = 4; + + param.mfx.TargetUsage = priv->target_usage; + + frame_info = ¶m.mfx.FrameInfo; + + gst_video_info_set_interlaced_format (&priv->aligned_info, + GST_VIDEO_INFO_FORMAT (info), GST_VIDEO_INFO_INTERLACE_MODE (info), + frame_info->Width, frame_info->Height); + + if (!gst_qsv_encoder_prepare_pool (self, state->caps, &priv->aligned_info, + ¶m.IOPattern)) { + GST_ERROR_OBJECT (self, "Failed to prepare pool"); + goto error; + } + +#define CHECK_STATUS(s,func) G_STMT_START { \ + if (s < MFX_ERR_NONE) { \ + GST_ERROR_OBJECT (self, G_STRINGIFY (func) " failed %d (%s)", \ + QSV_STATUS_ARGS (s)); \ + goto error; \ + } else if (status != MFX_ERR_NONE) { \ + GST_WARNING_OBJECT (self, G_STRINGIFY (func) " returned warning %d (%s)", \ + QSV_STATUS_ARGS (s)); \ + } \ +} G_STMT_END + + status = encoder_handle->Query (¶m, ¶m); + /* If device is unhappy with LowPower = OFF, try again with unknown */ + if (status < MFX_ERR_NONE) { + GST_INFO_OBJECT (self, "LowPower - OFF returned %d (%s)", + QSV_STATUS_ARGS (status)); + param.mfx.LowPower = MFX_CODINGOPTION_UNKNOWN; + } + + status = encoder_handle->Query (¶m, ¶m); + CHECK_STATUS (status, MFXVideoENCODE::Query); + + status = encoder_handle->QueryIOSurf (¶m, &alloc_request); + CHECK_STATUS (status, MFXVideoENCODE::QueryIOSurf); + + status = encoder_handle->Init (¶m); + CHECK_STATUS (status, MFXVideoENCODE::Init); + + status = encoder_handle->GetVideoParam (¶m); + CHECK_STATUS (status, MFXVideoENCODE::GetVideoParam); + +#undef CHECK_STATUS + + GST_DEBUG_OBJECT (self, "NumFrameSuggested: %d, AsyncDepth %d", + alloc_request.NumFrameSuggested, param.AsyncDepth); + + g_assert (klass->set_output_state); + ret = klass->set_output_state (self, priv->input_state, priv->session); + if (!ret) { + GST_ERROR_OBJECT (self, "Subclass failed to set output state"); + goto error; + } + + /* Prepare surface pool with size NumFrameSuggested, then if it's not + * sufficient while encoding, we can increse the pool size dynamically + * if needed */ + g_array_set_size (priv->surface_pool, alloc_request.NumFrameSuggested); + for (guint i = 0; i < priv->surface_pool->len; i++) { + GstQsvEncoderSurface *surface = &g_array_index (priv->surface_pool, + GstQsvEncoderSurface, i); + + surface->surface.Info = param.mfx.FrameInfo; + surface->payload = g_ptr_array_new_with_free_func ((GDestroyNotify) + gst_qsv_encoder_payload_clear); + } + priv->next_surface_index = 0; + + g_array_set_size (priv->task_pool, param.AsyncDepth); + bitstream_size = + (guint) param.mfx.BufferSizeInKB * param.mfx.BRCParamMultiplier * 1024; + + for (guint i = 0; i < priv->task_pool->len; i++) { + GstQsvEncoderTask *task = &g_array_index (priv->task_pool, + GstQsvEncoderTask, i); + + task->bitstream.Data = (mfxU8 *) g_malloc (bitstream_size); + task->bitstream.MaxLength = bitstream_size; + + g_queue_push_head (&priv->free_tasks, task); + } + + min_delay_frames = priv->task_pool->len; + max_delay_frames = priv->surface_pool->len + min_delay_frames; + + min_latency = gst_util_uint64_scale (min_delay_frames * GST_SECOND, + param.mfx.FrameInfo.FrameRateExtD, param.mfx.FrameInfo.FrameRateExtN); + max_latency = gst_util_uint64_scale (max_delay_frames * GST_SECOND, + param.mfx.FrameInfo.FrameRateExtD, param.mfx.FrameInfo.FrameRateExtN); + gst_video_encoder_set_latency (encoder, min_latency, max_latency); + + priv->video_param = param; + priv->encoder = encoder_handle; + + return TRUE; + +error: + if (encoder_handle) + delete encoder_handle; + + gst_qsv_encoder_reset (self); + + return FALSE; +} + +static mfxU16 +gst_qsv_encoder_get_pic_struct (GstQsvEncoder * self, + GstVideoCodecFrame * frame) +{ + GstQsvEncoderClass *klass = GST_QSV_ENCODER_GET_CLASS (self); + GstQsvEncoderPrivate *priv = self->priv; + GstVideoInfo *info = &priv->input_state->info; + + if (klass->codec_id != MFX_CODEC_AVC) + return MFX_PICSTRUCT_PROGRESSIVE; + + if (!GST_VIDEO_INFO_IS_INTERLACED (info)) + return MFX_PICSTRUCT_PROGRESSIVE; + + if (GST_VIDEO_INFO_INTERLACE_MODE (info) == GST_VIDEO_INTERLACE_MODE_MIXED) { + if (!GST_BUFFER_FLAG_IS_SET (frame->input_buffer, + GST_VIDEO_BUFFER_FLAG_INTERLACED)) { + return MFX_PICSTRUCT_PROGRESSIVE; + } + + if (GST_BUFFER_FLAG_IS_SET (frame->input_buffer, GST_VIDEO_BUFFER_FLAG_TFF)) + return MFX_PICSTRUCT_FIELD_TFF; + + return MFX_PICSTRUCT_FIELD_BFF; + } + + switch (GST_VIDEO_INFO_FIELD_ORDER (info)) { + case GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST: + return MFX_PICSTRUCT_FIELD_TFF; + break; + case GST_VIDEO_FIELD_ORDER_BOTTOM_FIELD_FIRST: + return MFX_PICSTRUCT_FIELD_BFF; + break; + default: + break; + } + + if (GST_BUFFER_FLAG_IS_SET (frame->input_buffer, GST_VIDEO_BUFFER_FLAG_TFF)) + return MFX_PICSTRUCT_FIELD_TFF; + + return MFX_PICSTRUCT_FIELD_BFF; +} + +static GstFlowReturn +gst_qsv_encoder_handle_frame (GstVideoEncoder * encoder, + GstVideoCodecFrame * frame) +{ + GstQsvEncoder *self = GST_QSV_ENCODER (encoder); + GstQsvEncoderPrivate *priv = self->priv; + GstQsvEncoderClass *klass = GST_QSV_ENCODER_GET_CLASS (self); + GstFlowReturn ret = GST_FLOW_ERROR; + GstQsvEncoderSurface *surface; + GstQsvEncoderTask *task; + mfxU64 timestamp; + mfxStatus status; + + if (klass->check_reconfigure) { + GstQsvEncoderReconfigure reconfigure; + + reconfigure = klass->check_reconfigure (self, &priv->video_param); + switch (reconfigure) { + case GST_QSV_ENCODER_RECONFIGURE_BITRATE: + /* TODO: In case of bitrate change, we can query whether we need to + * start from a new sequence or soft-reset is possible + * via MFXVideoENCODE_Query() with mfxExtEncoderResetOption struct, + * and then if soft-reset is allowed, we can avoid inefficient full-reset + * (including IDR insertion) by using MFXVideoENCODE_Reset() */ + /* fallthrough */ + case GST_QSV_ENCODER_RECONFIGURE_FULL: + { + GstVideoCodecState *state = + gst_video_codec_state_ref (priv->input_state); + gboolean rst; + + GST_INFO_OBJECT (self, "Configure encoder again"); + rst = gst_qsv_encoder_set_format (encoder, state); + gst_video_codec_state_unref (state); + + if (!rst) + return GST_FLOW_NOT_NEGOTIATED; + break; + } + default: + break; + } + } + + if (!priv->encoder) { + GST_ERROR_OBJECT (self, "Encoder object was not configured"); + return GST_FLOW_NOT_NEGOTIATED; + } + + surface = gst_qsv_encoder_get_next_surface (self); + if (!surface) { + GST_ERROR_OBJECT (self, "No available surface"); + goto out; + } + + task = (GstQsvEncoderTask *) g_queue_pop_tail (&priv->free_tasks); + g_assert (task); + + surface->qsv_frame = + gst_qsv_allocator_acquire_frame (priv->allocator, priv->mem_type, + &priv->input_state->info, frame->input_buffer, priv->internal_pool); + if (!surface->qsv_frame) { + GST_ERROR_OBJECT (self, "Failed to wrap buffer with qsv frame"); + gst_qsv_encoder_task_reset (self, task); + goto out; + } + + surface->surface.Info.PicStruct = + gst_qsv_encoder_get_pic_struct (self, frame); + + if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME (frame)) { + surface->encode_control.FrameType = + MFX_FRAMETYPE_IDR | MFX_FRAMETYPE_I | MFX_FRAMETYPE_REF; + } else { + surface->encode_control.FrameType = MFX_FRAMETYPE_UNKNOWN; + } + + if (klass->attach_payload) { + klass->attach_payload (self, frame, surface->payload); + if (surface->payload->len > 0) { + surface->encode_control.NumPayload = surface->payload->len; + surface->encode_control.Payload = (mfxPayload **) surface->payload->pdata; + } + } + + timestamp = gst_qsv_timestamp_from_gst (frame->pts); + status = gst_qsv_encoder_encode_frame (self, surface, task, timestamp); + if (status != MFX_ERR_NONE && status != MFX_ERR_MORE_DATA) { + GST_ERROR_OBJECT (self, "Failed to encode frame, ret %d (%s)", + QSV_STATUS_ARGS (status)); + gst_qsv_encoder_task_reset (self, task); + goto out; + } + + if (status == MFX_ERR_NONE && task->sync_point) { + g_queue_push_head (&priv->pending_tasks, task); + } else { + gst_qsv_encoder_task_reset (self, task); + } + + ret = GST_FLOW_OK; + /* Do not sync immediately, but record tasks which have output buffer here + * to improve throughput. + * In this way, hardware may be able to run encoding job from its background + * threads (if any). We will do sync only when there's no more free task item + */ + while (g_queue_get_length (&priv->pending_tasks) >= priv->task_pool->len) { + GstQsvEncoderTask *task = + (GstQsvEncoderTask *) g_queue_pop_tail (&priv->pending_tasks); + ret = gst_qsv_encoder_finish_frame (self, task, FALSE); + } + +out: + gst_video_codec_frame_unref (frame); + + return ret; +} + +static GstFlowReturn +gst_qsv_encoder_finish (GstVideoEncoder * encoder) +{ + GstQsvEncoder *self = GST_QSV_ENCODER (encoder); + + return gst_qsv_encoder_drain (self, FALSE); +} + +static gboolean +gst_qsv_encoder_flush (GstVideoEncoder * encoder) +{ + GstQsvEncoder *self = GST_QSV_ENCODER (encoder); + + gst_qsv_encoder_drain (self, TRUE); + + return TRUE; +} + +static gboolean +gst_qsv_encoder_handle_context_query (GstQsvEncoder * self, GstQuery * query) +{ +#ifdef G_OS_WIN32 + GstQsvEncoderPrivate *priv = self->priv; + + return gst_d3d11_handle_context_query (GST_ELEMENT (self), query, + (GstD3D11Device *) priv->device); +#endif + + return FALSE; +} + +static gboolean +gst_qsv_encoder_sink_query (GstVideoEncoder * encoder, GstQuery * query) +{ + GstQsvEncoder *self = GST_QSV_ENCODER (encoder); + + switch (GST_QUERY_TYPE (query)) { + case GST_QUERY_CONTEXT: + if (gst_qsv_encoder_handle_context_query (self, query)) + return TRUE; + break; + default: + break; + } + + return GST_VIDEO_ENCODER_CLASS (parent_class)->sink_query (encoder, query); +} + +static gboolean +gst_qsv_encoder_src_query (GstVideoEncoder * encoder, GstQuery * query) +{ + GstQsvEncoder *self = GST_QSV_ENCODER (encoder); + + switch (GST_QUERY_TYPE (query)) { + case GST_QUERY_CONTEXT: + if (gst_qsv_encoder_handle_context_query (self, query)) + return TRUE; + break; + default: + break; + } + + return GST_VIDEO_ENCODER_CLASS (parent_class)->src_query (encoder, query); +} + +#ifdef G_OS_WIN32 +static gboolean +gst_qsv_encoder_propose_allocation (GstVideoEncoder * encoder, GstQuery * query) +{ + GstQsvEncoder *self = GST_QSV_ENCODER (encoder); + GstQsvEncoderPrivate *priv = self->priv; + GstD3D11Device *device = GST_D3D11_DEVICE (priv->device); + GstVideoInfo info; + GstBufferPool *pool; + GstCaps *caps; + guint size; + GstStructure *config; + GstCapsFeatures *features; + gboolean is_d3d11 = FALSE; + + gst_query_parse_allocation (query, &caps, nullptr); + if (!caps) { + GST_WARNING_OBJECT (self, "null caps in query"); + return FALSE; + } + + if (!gst_video_info_from_caps (&info, caps)) { + GST_WARNING_OBJECT (self, "Failed to convert caps into info"); + return FALSE; + } + + features = gst_caps_get_features (caps, 0); + if (features && gst_caps_features_contains (features, + GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY)) { + GST_DEBUG_OBJECT (self, "upstream support d3d11 memory"); + pool = gst_d3d11_buffer_pool_new (device); + is_d3d11 = TRUE; + } else { + pool = gst_d3d11_staging_buffer_pool_new (device); + } + + config = gst_buffer_pool_get_config (pool); + gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META); + + if (is_d3d11) { + GstD3D11AllocationParams *d3d11_params; + GstVideoAlignment align; + + /* d3d11 buffer pool doesn't support generic video alignment + * because memory layout of CPU accessible staging texture is uncontrollable. + * Do D3D11 specific handling */ + gst_video_alignment_reset (&align); + + align.padding_right = GST_VIDEO_INFO_WIDTH (&priv->aligned_info) - + GST_VIDEO_INFO_WIDTH (&info); + align.padding_bottom = GST_VIDEO_INFO_HEIGHT (&priv->aligned_info) - + GST_VIDEO_INFO_HEIGHT (&info); + + d3d11_params = gst_d3d11_allocation_params_new (device, &info, + (GstD3D11AllocationFlags) 0, 0); + + gst_d3d11_allocation_params_alignment (d3d11_params, &align); + gst_buffer_pool_config_set_d3d11_allocation_params (config, d3d11_params); + gst_d3d11_allocation_params_free (d3d11_params); + } + + size = GST_VIDEO_INFO_SIZE (&info); + gst_buffer_pool_config_set_params (config, + caps, size, priv->surface_pool->len, 0); + + if (!gst_buffer_pool_set_config (pool, config)) { + GST_WARNING_OBJECT (self, "Failed to set pool config"); + gst_object_unref (pool); + return FALSE; + } + + /* d3d11 buffer pool will update actual CPU accessible buffer size based on + * allocated staging texture per gst_buffer_pool_set_config() call, + * need query again to get the size */ + config = gst_buffer_pool_get_config (pool); + gst_buffer_pool_config_get_params (config, nullptr, &size, nullptr, nullptr); + gst_structure_free (config); + + gst_query_add_allocation_pool (query, pool, size, priv->surface_pool->len, 0); + gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, nullptr); + gst_object_unref (pool); + + return TRUE; +} +#else +/* TODO: Add support VA/DMABuf */ +static gboolean +gst_qsv_encoder_propose_allocation (GstVideoEncoder * encoder, GstQuery * query) +{ + GstQsvEncoder *self = GST_QSV_ENCODER (encoder); + GstQsvEncoderPrivate *priv = self->priv; + GstVideoInfo info; + GstBufferPool *pool; + GstCaps *caps; + guint size; + GstStructure *config; + GstVideoAlignment align; + + gst_query_parse_allocation (query, &caps, nullptr); + if (!caps) { + GST_WARNING_OBJECT (self, "null caps in query"); + return FALSE; + } + + if (!gst_video_info_from_caps (&info, caps)) { + GST_WARNING_OBJECT (self, "Failed to convert caps into info"); + return FALSE; + } + + pool = gst_video_buffer_pool_new (); + + gst_video_alignment_reset (&align); + align.padding_right = GST_VIDEO_INFO_WIDTH (&priv->aligned_info) - + GST_VIDEO_INFO_WIDTH (&info); + align.padding_bottom = GST_VIDEO_INFO_HEIGHT (&priv->aligned_info) - + GST_VIDEO_INFO_HEIGHT (&info); + + config = gst_buffer_pool_get_config (pool); + gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META); + gst_buffer_pool_config_add_option (config, + GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT); + gst_video_info_align (&info, &align); + gst_buffer_pool_config_set_video_alignment (config, &align); + + size = GST_VIDEO_INFO_SIZE (&info); + gst_buffer_pool_config_set_params (config, + caps, size, priv->surface_pool->len, 0); + + if (!gst_buffer_pool_set_config (pool, config)) { + GST_WARNING_OBJECT (self, "Failed to set pool config"); + gst_object_unref (pool); + return FALSE; + } + + gst_query_add_allocation_pool (query, pool, size, priv->surface_pool->len, 0); + gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, nullptr); + gst_object_unref (pool); + + return TRUE; +} +#endif diff --git a/subprojects/gst-plugins-bad/sys/qsv/gstqsvencoder.h b/subprojects/gst-plugins-bad/sys/qsv/gstqsvencoder.h new file mode 100644 index 0000000..e0ed95b --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/qsv/gstqsvencoder.h @@ -0,0 +1,95 @@ +/* GStreamer + * Copyright (C) 2021 Seungha Yang + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#pragma once + +#include +#include +#include +#include "gstqsvutils.h" + +G_BEGIN_DECLS + +#define GST_TYPE_QSV_ENCODER (gst_qsv_encoder_get_type()) +#define GST_QSV_ENCODER(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_QSV_ENCODER, GstQsvEncoder)) +#define GST_QSV_ENCODER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_QSV_ENCODER, GstQsvEncoderClass)) +#define GST_IS_QSV_ENCODER(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_QSV_ENCODER)) +#define GST_IS_QSV_ENCODER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_QSV_ENCODER)) +#define GST_QSV_ENCODER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj), GST_TYPE_QSV_ENCODER, GstQsvEncoderClass)) +#define GST_QSV_ENCODER_CAST(obj) ((GstQsvEncoder *)obj) + +typedef struct _GstQsvEncoder GstQsvEncoder; +typedef struct _GstQsvEncoderClass GstQsvEncoderClass; +typedef struct _GstQsvEncoderPrivate GstQsvEncoderPrivate; + +#define GST_TYPE_QSV_CODING_OPTION (gst_qsv_coding_option_get_type()) +GType gst_qsv_coding_option_get_type (void); + +typedef enum +{ + GST_QSV_ENCODER_RECONFIGURE_NONE, + GST_QSV_ENCODER_RECONFIGURE_BITRATE, + GST_QSV_ENCODER_RECONFIGURE_FULL, +} GstQsvEncoderReconfigure; + +struct _GstQsvEncoder +{ + GstVideoEncoder parent; + + GstQsvEncoderPrivate *priv; +}; + +struct _GstQsvEncoderClass +{ + GstVideoEncoderClass parent_class; + + mfxU32 codec_id; + mfxU32 impl_index; + + /* DXGI adapter LUID, for Windows */ + gint64 adapter_luid; + + /* VA display device path, for Linux */ + gchar display_path[64]; + + gboolean (*set_format) (GstQsvEncoder * encoder, + GstVideoCodecState * state, + mfxVideoParam * param, + GPtrArray * extra_params); + + gboolean (*set_output_state) (GstQsvEncoder * encoder, + GstVideoCodecState * state, + mfxSession session); + + gboolean (*attach_payload) (GstQsvEncoder * encoder, + GstVideoCodecFrame * frame, + GPtrArray * payload); + + GstBuffer * (*create_output_buffer) (GstQsvEncoder * encoder, + mfxBitstream * bitstream); + + GstQsvEncoderReconfigure (*check_reconfigure) (GstQsvEncoder * encoder, + mfxVideoParam * param); +}; + +GType gst_qsv_encoder_get_type (void); + +G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstQsvEncoder, gst_object_unref) + +G_END_DECLS diff --git a/subprojects/gst-plugins-bad/sys/qsv/gstqsvh264enc.cpp b/subprojects/gst-plugins-bad/sys/qsv/gstqsvh264enc.cpp new file mode 100644 index 0000000..53e1794 --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/qsv/gstqsvh264enc.cpp @@ -0,0 +1,1846 @@ +/* GStreamer + * Copyright (C) 2021 Seungha Yang + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "gstqsvh264enc.h" +#include +#include +#include +#include +#include +#include + +#ifdef G_OS_WIN32 +#include +#else +#include +#endif + +GST_DEBUG_CATEGORY_EXTERN (gst_qsv_h264_enc_debug); +#define GST_CAT_DEFAULT gst_qsv_h264_enc_debug + +typedef enum +{ + GST_QSV_H264_ENC_SEI_INSERT, + GST_QSV_H264_ENC_SEI_INSERT_AND_DROP, + GST_QSV_H264_ENC_SEI_DISABLED, +} GstQsvH264EncSeiInsertMode; + +#define GST_TYPE_QSV_H264_ENC_SEI_INSERT_MODE (gst_qsv_h264_enc_sei_insert_mode_get_type ()) +static GType +gst_qsv_h264_enc_sei_insert_mode_get_type (void) +{ + static GType sei_insert_mode_type = 0; + static const GEnumValue insert_modes[] = { + {GST_QSV_H264_ENC_SEI_INSERT, "Insert SEI", "insert"}, + {GST_QSV_H264_ENC_SEI_INSERT_AND_DROP, + "Insert SEI and remove corresponding meta from output buffer", + "insert-and-drop"}, + {GST_QSV_H264_ENC_SEI_DISABLED, "Disable SEI insertion", "disabled"}, + {0, nullptr, nullptr} + }; + + if (g_once_init_enter (&sei_insert_mode_type)) { + GType type = + g_enum_register_static ("GstQsvH264EncSeiInsertMode", insert_modes); + g_once_init_leave (&sei_insert_mode_type, type); + } + + return sei_insert_mode_type; +} + +#define GST_TYPE_QSV_H264_ENC_RATE_CONTROL (gst_qsv_h264_enc_rate_control_get_type ()) +static GType +gst_qsv_h264_enc_rate_control_get_type (void) +{ + static GType rate_control_type = 0; + static const GEnumValue rate_controls[] = { + {MFX_RATECONTROL_CBR, "Constant Bitrate", "cbr"}, + {MFX_RATECONTROL_VBR, "Variable Bitrate", "vbr"}, + {MFX_RATECONTROL_CQP, "Constant Quantizer", "cqp"}, + {MFX_RATECONTROL_AVBR, "Average Bitrate", "avbr"}, + {MFX_RATECONTROL_LA, "VBR with look ahead (Non HRD compliant)", "la_vbr"}, + {MFX_RATECONTROL_ICQ, "Intelligent CQP", "icq"}, + {MFX_RATECONTROL_VCM, "Video Conferencing Mode (Non HRD compliant)", "vcm"}, + {MFX_RATECONTROL_LA_ICQ, "Intelligent CQP with LA (Non HRD compliant)", + "la_icq"}, + {MFX_RATECONTROL_LA_HRD, "HRD compliant LA", "la_hrd"}, + {MFX_RATECONTROL_QVBR, "VBR with CQP", "qvbr"}, + {0, nullptr, nullptr} + }; + + if (g_once_init_enter (&rate_control_type)) { + GType type = + g_enum_register_static ("GstQsvH264EncRateControl", rate_controls); + g_once_init_leave (&rate_control_type, type); + } + + return rate_control_type; +} + +#define GST_TYPE_QSV_H264_ENC_RC_LOOKAHEAD_DS (gst_qsv_h264_enc_rc_lookahead_ds_get_type ()) +static GType +gst_qsv_h264_enc_rc_lookahead_ds_get_type (void) +{ + static GType rc_lookahead_ds_type = 0; + static const GEnumValue rc_lookahead_ds[] = { + {MFX_LOOKAHEAD_DS_UNKNOWN, "Unknown", "unknown"}, + {MFX_LOOKAHEAD_DS_OFF, "Do not use down sampling", "off"}, + {MFX_LOOKAHEAD_DS_2x, + "Down sample frames two times before estimation", "2x"}, + {MFX_LOOKAHEAD_DS_4x, + "Down sample frames four times before estimation", "4x"}, + {0, nullptr, nullptr} + }; + + if (g_once_init_enter (&rc_lookahead_ds_type)) { + GType type = + g_enum_register_static ("GstQsvH264EncRCLookAheadDS", rc_lookahead_ds); + g_once_init_leave (&rc_lookahead_ds_type, type); + } + + return rc_lookahead_ds_type; +} + +enum +{ + PROP_0, + PROP_ADAPTER_LUID, + PROP_DEVICE_PATH, + PROP_CABAC, + PROP_MIN_QP_I, + PROP_MIN_QP_P, + PROP_MIN_QP_B, + PROP_MAX_QP_I, + PROP_MAX_QP_P, + PROP_MAX_QP_B, + PROP_QP_I, + PROP_QP_P, + PROP_QP_B, + PROP_GOP_SIZE, + PROP_I_FRAMES, + PROP_B_FRAMES, + PROP_REF_FRAMES, + PROP_BITRATE, + PROP_MAX_BITRATE, + PROP_RATE_CONTROL, + PROP_RC_LOOKAHEAD, + PROP_RC_LOOKAHEAD_DS, + PROP_AVBR_ACCURACY, + PROP_AVBR_CONVERGENCE, + PROP_ICQ_QUALITY, + PROP_QVBR_QUALITY, + PROP_CC_INSERT, +}; + +#define DEFAULT_CABAC MFX_CODINGOPTION_UNKNOWN +#define DEFAULT_QP 0 +#define DEFAULT_GOP_SIZE 0 +#define DEFAULT_I_FRAMES 0 +#define DEFAULT_B_FRAMES 0 +#define DEFAULT_REF_FRAMES 2 +#define DEFAULT_BITRATE 2000 +#define DEFAULT_MAX_BITRATE 0 +#define DEFAULT_RATE_CONTROL MFX_RATECONTROL_CBR +#define DEFAULT_RC_LOOKAHEAD 10 +#define DEFAULT_RC_LOOKAHEAD_DS MFX_LOOKAHEAD_DS_UNKNOWN +#define DEFAULT_AVBR_ACCURACY 0 +#define DEFAULT_AVBR_CONVERGENCE 0 +#define DEFAULT_IQC_QUALITY 0 +#define DEFAULT_QVBR_QUALITY 0 +#define DEFAULT_CC_INSERT GST_QSV_H264_ENC_SEI_INSERT + +typedef struct _GstQsvH264EncClassData +{ + GstCaps *sink_caps; + GstCaps *src_caps; + guint impl_index; + gint64 adapter_luid; + gchar *display_path; +} GstQsvH264EncClassData; + +typedef struct _GstQsvH264Enc +{ + GstQsvEncoder parent; + + mfxExtVideoSignalInfo signal_info; + mfxExtCodingOption option; + mfxExtCodingOption2 option2; + mfxExtCodingOption3 option3; + + gboolean packetized; + GstH264NalParser *parser; + + mfxU16 profile; + + GMutex prop_lock; + /* protected by prop_lock */ + gboolean bitrate_updated; + gboolean property_updated; + + /* properties */ + mfxU16 cabac; + guint min_qp_i; + guint min_qp_p; + guint min_qp_b; + guint max_qp_i; + guint max_qp_p; + guint max_qp_b; + guint qp_i; + guint qp_p; + guint qp_b; + guint gop_size; + guint iframes; + guint bframes; + guint ref_frames; + guint bitrate; + guint max_bitrate; + mfxU16 rate_control; + guint rc_lookahead; + mfxU16 rc_lookahead_ds; + guint avbr_accuracy; + guint avbr_convergence; + guint icq_quality; + guint qvbr_quality; + GstQsvH264EncSeiInsertMode cc_insert; +} GstQsvH264Enc; + +typedef struct _GstQsvH264EncClass +{ + GstQsvEncoderClass parent_class; +} GstQsvH264EncClass; + +static GstElementClass *parent_class = nullptr; + +#define GST_QSV_H264_ENC(object) ((GstQsvH264Enc *) (object)) +#define GST_QSV_H264_ENC_GET_CLASS(object) \ + (G_TYPE_INSTANCE_GET_CLASS ((object),G_TYPE_FROM_INSTANCE (object),GstQsvH264EncClass)) + +static void gst_qsv_h264_enc_finalize (GObject * object); +static void gst_qsv_h264_enc_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec); +static void gst_qsv_h264_enc_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec); + +static gboolean gst_qsv_h264_enc_start (GstVideoEncoder * encoder); +static gboolean gst_qsv_h264_enc_transform_meta (GstVideoEncoder * encoder, + GstVideoCodecFrame * frame, GstMeta * meta); +static GstCaps *gst_qsv_h264_enc_getcaps (GstVideoEncoder * encoder, + GstCaps * filter); + +static gboolean gst_qsv_h264_enc_set_format (GstQsvEncoder * encoder, + GstVideoCodecState * state, mfxVideoParam * param, + GPtrArray * extra_params); +static gboolean gst_qsv_h264_enc_set_output_state (GstQsvEncoder * encoder, + GstVideoCodecState * state, mfxSession session); +static gboolean gst_qsv_h264_enc_attach_payload (GstQsvEncoder * encoder, + GstVideoCodecFrame * frame, GPtrArray * payload); +static GstBuffer *gst_qsv_h264_enc_create_output_buffer (GstQsvEncoder * + encoder, mfxBitstream * bitstream); +static GstQsvEncoderReconfigure +gst_qsv_h264_enc_check_reconfigure (GstQsvEncoder * encoder, + mfxVideoParam * param); + +static void +gst_qsv_h264_enc_class_init (GstQsvH264EncClass * klass, gpointer data) +{ + GObjectClass *object_class = G_OBJECT_CLASS (klass); + GstElementClass *element_class = GST_ELEMENT_CLASS (klass); + GstVideoEncoderClass *encoder_class = GST_VIDEO_ENCODER_CLASS (klass); + GstQsvEncoderClass *qsvenc_class = GST_QSV_ENCODER_CLASS (klass); + GstQsvH264EncClassData *cdata = (GstQsvH264EncClassData *) data; + + qsvenc_class->codec_id = MFX_CODEC_AVC; + qsvenc_class->impl_index = cdata->impl_index; + qsvenc_class->adapter_luid = cdata->adapter_luid; + if (cdata->display_path) + strcpy (qsvenc_class->display_path, cdata->display_path); + + object_class->finalize = gst_qsv_h264_enc_finalize; + object_class->set_property = gst_qsv_h264_enc_set_property; + object_class->get_property = gst_qsv_h264_enc_get_property; + +#ifdef G_OS_WIN32 + g_object_class_install_property (object_class, PROP_ADAPTER_LUID, + g_param_spec_int64 ("adapter-luid", "Adapter LUID", + "DXGI Adapter LUID (Locally Unique Identifier) of created device", + G_MININT64, G_MAXINT64, qsvenc_class->adapter_luid, + (GParamFlags) (GST_PARAM_CONDITIONALLY_AVAILABLE | G_PARAM_READABLE | + G_PARAM_STATIC_STRINGS))); +#else + g_object_class_install_property (object_class, PROP_DEVICE_PATH, + g_param_spec_string ("device-path", "Device Path", + "DRM device path", cdata->display_path, + (GParamFlags) (GST_PARAM_CONDITIONALLY_AVAILABLE | + G_PARAM_READABLE | G_PARAM_STATIC_STRINGS))); +#endif + + g_object_class_install_property (object_class, PROP_CABAC, + g_param_spec_enum ("cabac", "Cabac", "Enables CABAC entropy coding", + GST_TYPE_QSV_CODING_OPTION, DEFAULT_CABAC, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property (object_class, PROP_MIN_QP_I, + g_param_spec_uint ("min-qpi", "Min QP I", + "Minimum allowed QP value for I-frame types (0: no limitations)", + 0, 51, DEFAULT_QP, (GParamFlags) + (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property (object_class, PROP_MIN_QP_P, + g_param_spec_uint ("min-qpp", "Min QP P", + "Minimum allowed QP value for P-frame types (0: no limitations)", + 0, 51, DEFAULT_QP, (GParamFlags) + (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property (object_class, PROP_MIN_QP_B, + g_param_spec_uint ("min-qpb", "Min QP B", + "Minimum allowed QP value for B-frame types (0: no limitations)", + 0, 51, DEFAULT_QP, (GParamFlags) + (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property (object_class, PROP_MAX_QP_I, + g_param_spec_uint ("max-qpi", "Max QP I", + "Maximum allowed QP value for I-frame types (0: no limitations)", + 0, 51, DEFAULT_QP, (GParamFlags) + (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property (object_class, PROP_MAX_QP_P, + g_param_spec_uint ("max-qpp", "Max QP P", + "Maximum allowed QP value for P-frame types (0: no limitations)", + 0, 51, DEFAULT_QP, (GParamFlags) + (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property (object_class, PROP_MAX_QP_B, + g_param_spec_uint ("max-qpb", "Max QP B", + "Maximum allowed QP value for B-frame types (0: no limitations)", + 0, 51, DEFAULT_QP, (GParamFlags) + (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property (object_class, PROP_QP_I, + g_param_spec_uint ("qpi", "QP I", + "Constant quantizer for I frames (0: no limitations)", + 0, 51, DEFAULT_QP, (GParamFlags) + (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property (object_class, PROP_QP_P, + g_param_spec_uint ("qpp", "QP P", + "Constant quantizer for P frames (0: no limitations)", + 0, 51, DEFAULT_QP, (GParamFlags) + (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property (object_class, PROP_QP_B, + g_param_spec_uint ("qpb", "QP B", + "Constant quantizer for B frames (0: no limitations)", + 0, 51, DEFAULT_QP, (GParamFlags) + (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property (object_class, PROP_GOP_SIZE, + g_param_spec_uint ("gop-size", "GOP Size", + "Number of pictures within a GOP (0: unspecified)", + 0, G_MAXINT, DEFAULT_GOP_SIZE, (GParamFlags) + (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property (object_class, PROP_I_FRAMES, + g_param_spec_uint ("i-frames", "I Frames", + "Number of I frames between IDR frames" + "(0: every I frame is an IDR frame)", + 0, G_MAXINT, DEFAULT_I_FRAMES, (GParamFlags) + (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property (object_class, PROP_B_FRAMES, + g_param_spec_uint ("b-frames", "B Frames", + "Number of B frames between I and P frames", + 0, G_MAXINT, DEFAULT_B_FRAMES, (GParamFlags) + (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property (object_class, PROP_REF_FRAMES, + g_param_spec_uint ("ref-frames", "Reference Frames", + "Number of reference frames (0: unspecified)", + 0, 16, DEFAULT_REF_FRAMES, (GParamFlags) + (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property (object_class, PROP_BITRATE, + g_param_spec_uint ("bitrate", "Bitrate", + "Target bitrate in kbit/sec, Ignored when selected rate-control mode " + "is constant QP variants (i.e., \"cqp\", \"icq\", and \"la_icq\")", + 0, G_MAXINT, DEFAULT_BITRATE, (GParamFlags) + (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property (object_class, PROP_MAX_BITRATE, + g_param_spec_uint ("max-bitrate", "Max Bitrate", + "Maximum bitrate in kbit/sec, Ignored when selected rate-control mode " + "is constant QP variants (i.e., \"cqp\", \"icq\", and \"la_icq\")", + 0, G_MAXINT, DEFAULT_MAX_BITRATE, (GParamFlags) + (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property (object_class, PROP_RATE_CONTROL, + g_param_spec_enum ("rate-control", "Rate Control", + "Rate Control Method", GST_TYPE_QSV_H264_ENC_RATE_CONTROL, + DEFAULT_RATE_CONTROL, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property (object_class, PROP_RC_LOOKAHEAD, + g_param_spec_uint ("rc-lookahead", "Rate Control Look-ahead", + "Number of frames to look ahead for Rate Control, used for " + "\"la_vbr\", \"la_icq\", and \"la_hrd\" rate-control modes", + 10, 100, DEFAULT_RC_LOOKAHEAD, (GParamFlags) + (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property (object_class, PROP_RC_LOOKAHEAD_DS, + g_param_spec_enum ("rc-lookahead-ds", + "Rate Control Look-ahead Downsampling", + "Downsampling method in look-ahead rate control", + GST_TYPE_QSV_H264_ENC_RC_LOOKAHEAD_DS, DEFAULT_RC_LOOKAHEAD_DS, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property (object_class, PROP_AVBR_ACCURACY, + g_param_spec_uint ("avbr-accuracy", "AVBR Accuracy", + "AVBR Accuracy in the unit of tenth of percent", + 0, G_MAXUINT16, DEFAULT_AVBR_ACCURACY, (GParamFlags) + (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property (object_class, PROP_AVBR_CONVERGENCE, + g_param_spec_uint ("avbr-convergence", "AVBR Convergence", + "AVBR Convergence in the unit of 100 frames", + 0, G_MAXUINT16, DEFAULT_AVBR_ACCURACY, (GParamFlags) + (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property (object_class, PROP_ICQ_QUALITY, + g_param_spec_uint ("icq-quality", "ICQ Quality", + "Intelligent Constant Quality (0: default)", + 0, 51, DEFAULT_IQC_QUALITY, (GParamFlags) + (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property (object_class, PROP_QVBR_QUALITY, + g_param_spec_uint ("qvbr-quality", "QVBR Quality", + "Quality level used for \"qvbr\" rate-control mode (0: default)", + 0, 51, DEFAULT_QVBR_QUALITY, (GParamFlags) + (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property (object_class, PROP_CC_INSERT, + g_param_spec_enum ("cc-insert", + "Closed Caption Insert", + "Closed Caption Insert mode. " + "Only CEA-708 RAW format is supported for now", + GST_TYPE_QSV_H264_ENC_SEI_INSERT_MODE, DEFAULT_CC_INSERT, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + parent_class = (GstElementClass *) g_type_class_peek_parent (klass); + gst_element_class_set_static_metadata (element_class, + "Intel Quick Sync Video H.264 Encoder", + "Codec/Encoder/Video/Hardware", + "Intel Quick Sync Video H.264 Encoder", + "Seungha Yang "); + + gst_element_class_add_pad_template (element_class, + gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, + cdata->sink_caps)); + gst_element_class_add_pad_template (element_class, + gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, + cdata->src_caps)); + + encoder_class->start = GST_DEBUG_FUNCPTR (gst_qsv_h264_enc_start); + encoder_class->transform_meta = + GST_DEBUG_FUNCPTR (gst_qsv_h264_enc_transform_meta); + encoder_class->getcaps = GST_DEBUG_FUNCPTR (gst_qsv_h264_enc_getcaps); + + qsvenc_class->set_format = GST_DEBUG_FUNCPTR (gst_qsv_h264_enc_set_format); + qsvenc_class->set_output_state = + GST_DEBUG_FUNCPTR (gst_qsv_h264_enc_set_output_state); + qsvenc_class->attach_payload = + GST_DEBUG_FUNCPTR (gst_qsv_h264_enc_attach_payload); + qsvenc_class->create_output_buffer = + GST_DEBUG_FUNCPTR (gst_qsv_h264_enc_create_output_buffer); + qsvenc_class->check_reconfigure = + GST_DEBUG_FUNCPTR (gst_qsv_h264_enc_check_reconfigure); + + gst_caps_unref (cdata->sink_caps); + gst_caps_unref (cdata->src_caps); + g_free (cdata->display_path); + g_free (cdata); +} + +static void +gst_qsv_h264_enc_init (GstQsvH264Enc * self) +{ + self->cabac = DEFAULT_CABAC; + self->min_qp_i = DEFAULT_QP; + self->min_qp_p = DEFAULT_QP; + self->min_qp_b = DEFAULT_QP; + self->max_qp_i = DEFAULT_QP; + self->max_qp_p = DEFAULT_QP; + self->max_qp_p = DEFAULT_QP; + self->qp_i = DEFAULT_QP; + self->qp_p = DEFAULT_QP; + self->qp_b = DEFAULT_QP; + self->gop_size = DEFAULT_GOP_SIZE; + self->iframes = DEFAULT_I_FRAMES; + self->bframes = DEFAULT_B_FRAMES; + self->ref_frames = DEFAULT_REF_FRAMES; + self->bitrate = DEFAULT_BITRATE; + self->max_bitrate = DEFAULT_MAX_BITRATE; + self->rate_control = DEFAULT_RATE_CONTROL; + self->rc_lookahead = DEFAULT_RC_LOOKAHEAD; + self->rc_lookahead_ds = DEFAULT_RC_LOOKAHEAD_DS; + self->avbr_accuracy = DEFAULT_AVBR_ACCURACY; + self->avbr_convergence = DEFAULT_AVBR_CONVERGENCE; + self->icq_quality = DEFAULT_IQC_QUALITY; + self->qvbr_quality = DEFAULT_QVBR_QUALITY; + self->cc_insert = DEFAULT_CC_INSERT; + + g_mutex_init (&self->prop_lock); + + self->parser = gst_h264_nal_parser_new (); +} + +static void +gst_qsv_h264_enc_finalize (GObject * object) +{ + GstQsvH264Enc *self = GST_QSV_H264_ENC (object); + + g_mutex_clear (&self->prop_lock); + gst_h264_nal_parser_free (self->parser); + + G_OBJECT_CLASS (parent_class)->finalize (object); +} + +static void +gst_qsv_h264_enc_check_update_uint (GstQsvH264Enc * self, guint * old_val, + guint new_val, gboolean is_bitrate_param) +{ + if (*old_val == new_val) + return; + + g_mutex_lock (&self->prop_lock); + *old_val = new_val; + if (is_bitrate_param) + self->bitrate_updated = TRUE; + else + self->property_updated = TRUE; + g_mutex_unlock (&self->prop_lock); +} + +static void +gst_qsv_h264_enc_check_update_enum (GstQsvH264Enc * self, mfxU16 * old_val, + gint new_val) +{ + if (*old_val == (mfxU16) new_val) + return; + + g_mutex_lock (&self->prop_lock); + *old_val = (mfxU16) new_val; + self->property_updated = TRUE; + g_mutex_unlock (&self->prop_lock); +} + +static void +gst_qsv_h264_enc_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec) +{ + GstQsvH264Enc *self = GST_QSV_H264_ENC (object); + + switch (prop_id) { + case PROP_CABAC: + gst_qsv_h264_enc_check_update_enum (self, &self->cabac, + g_value_get_enum (value)); + break; + /* MIN/MAX QP change requires new sequence */ + case PROP_MIN_QP_I: + gst_qsv_h264_enc_check_update_uint (self, &self->min_qp_i, + g_value_get_uint (value), FALSE); + break; + case PROP_MIN_QP_P: + gst_qsv_h264_enc_check_update_uint (self, &self->min_qp_p, + g_value_get_uint (value), FALSE); + break; + case PROP_MIN_QP_B: + gst_qsv_h264_enc_check_update_uint (self, &self->min_qp_b, + g_value_get_uint (value), FALSE); + break; + case PROP_MAX_QP_I: + gst_qsv_h264_enc_check_update_uint (self, &self->max_qp_i, + g_value_get_uint (value), FALSE); + break; + case PROP_MAX_QP_P: + gst_qsv_h264_enc_check_update_uint (self, &self->max_qp_p, + g_value_get_uint (value), FALSE); + break; + case PROP_MAX_QP_B: + gst_qsv_h264_enc_check_update_uint (self, &self->max_qp_b, + g_value_get_uint (value), FALSE); + break; + case PROP_QP_I: + gst_qsv_h264_enc_check_update_uint (self, &self->qp_i, + g_value_get_uint (value), TRUE); + break; + case PROP_QP_P: + gst_qsv_h264_enc_check_update_uint (self, &self->qp_p, + g_value_get_uint (value), TRUE); + break; + case PROP_QP_B: + gst_qsv_h264_enc_check_update_uint (self, &self->qp_b, + g_value_get_uint (value), TRUE); + break; + case PROP_GOP_SIZE: + gst_qsv_h264_enc_check_update_uint (self, &self->gop_size, + g_value_get_uint (value), FALSE); + break; + case PROP_I_FRAMES: + gst_qsv_h264_enc_check_update_uint (self, &self->iframes, + g_value_get_uint (value), FALSE); + break; + case PROP_B_FRAMES: + gst_qsv_h264_enc_check_update_uint (self, &self->bframes, + g_value_get_uint (value), FALSE); + break; + case PROP_REF_FRAMES: + gst_qsv_h264_enc_check_update_uint (self, &self->ref_frames, + g_value_get_uint (value), FALSE); + break; + case PROP_BITRATE: + gst_qsv_h264_enc_check_update_uint (self, &self->bitrate, + g_value_get_uint (value), TRUE); + break; + case PROP_MAX_BITRATE: + gst_qsv_h264_enc_check_update_uint (self, &self->max_bitrate, + g_value_get_uint (value), TRUE); + break; + case PROP_RATE_CONTROL: + gst_qsv_h264_enc_check_update_enum (self, &self->rate_control, + g_value_get_enum (value)); + break; + case PROP_RC_LOOKAHEAD: + gst_qsv_h264_enc_check_update_uint (self, &self->rc_lookahead, + g_value_get_uint (value), FALSE); + break; + case PROP_RC_LOOKAHEAD_DS: + gst_qsv_h264_enc_check_update_enum (self, &self->rc_lookahead_ds, + g_value_get_enum (value)); + break; + case PROP_AVBR_ACCURACY: + gst_qsv_h264_enc_check_update_uint (self, &self->avbr_accuracy, + g_value_get_uint (value), FALSE); + break; + case PROP_AVBR_CONVERGENCE: + gst_qsv_h264_enc_check_update_uint (self, &self->avbr_convergence, + g_value_get_uint (value), FALSE); + break; + case PROP_ICQ_QUALITY: + gst_qsv_h264_enc_check_update_uint (self, &self->icq_quality, + g_value_get_uint (value), FALSE); + break; + case PROP_QVBR_QUALITY: + gst_qsv_h264_enc_check_update_uint (self, &self->qvbr_quality, + g_value_get_uint (value), FALSE); + break; + case PROP_CC_INSERT: + /* This property is unrelated to encoder-reset */ + self->cc_insert = (GstQsvH264EncSeiInsertMode) g_value_get_enum (value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static void +gst_qsv_h264_enc_get_property (GObject * object, guint prop_id, GValue * value, + GParamSpec * pspec) +{ + GstQsvH264Enc *self = GST_QSV_H264_ENC (object); + GstQsvEncoderClass *klass = GST_QSV_ENCODER_GET_CLASS (self); + + switch (prop_id) { + case PROP_ADAPTER_LUID: + g_value_set_int64 (value, klass->adapter_luid); + break; + case PROP_DEVICE_PATH: + g_value_set_string (value, klass->display_path); + break; + case PROP_CABAC: + g_value_set_enum (value, self->cabac); + break; + case PROP_MIN_QP_I: + g_value_set_uint (value, self->min_qp_i); + break; + case PROP_MIN_QP_P: + g_value_set_uint (value, self->min_qp_p); + break; + case PROP_MIN_QP_B: + g_value_set_uint (value, self->min_qp_b); + break; + case PROP_MAX_QP_I: + g_value_set_uint (value, self->max_qp_i); + break; + case PROP_MAX_QP_P: + g_value_set_uint (value, self->max_qp_p); + break; + case PROP_MAX_QP_B: + g_value_set_uint (value, self->max_qp_b); + break; + case PROP_QP_I: + g_value_set_uint (value, self->qp_i); + break; + case PROP_QP_P: + g_value_set_uint (value, self->qp_p); + break; + case PROP_QP_B: + g_value_set_uint (value, self->qp_b); + break; + case PROP_GOP_SIZE: + g_value_set_uint (value, self->gop_size); + break; + case PROP_I_FRAMES: + g_value_set_uint (value, self->iframes); + break; + case PROP_B_FRAMES: + g_value_set_uint (value, self->bframes); + break; + case PROP_REF_FRAMES: + g_value_set_uint (value, self->ref_frames); + break; + case PROP_BITRATE: + g_value_set_uint (value, self->bitrate); + break; + case PROP_MAX_BITRATE: + g_value_set_uint (value, self->max_bitrate); + break; + case PROP_RATE_CONTROL: + g_value_set_enum (value, self->rate_control); + break; + case PROP_RC_LOOKAHEAD: + g_value_set_uint (value, self->rc_lookahead); + break; + case PROP_RC_LOOKAHEAD_DS: + g_value_set_enum (value, self->rc_lookahead_ds); + break; + case PROP_AVBR_ACCURACY: + g_value_set_uint (value, self->avbr_accuracy); + break; + case PROP_AVBR_CONVERGENCE: + g_value_set_uint (value, self->avbr_convergence); + break; + case PROP_ICQ_QUALITY: + g_value_set_uint (value, self->icq_quality); + break; + case PROP_QVBR_QUALITY: + g_value_set_uint (value, self->qvbr_quality); + break; + case PROP_CC_INSERT: + g_value_set_enum (value, self->cc_insert); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static gboolean +gst_qsv_h264_enc_start (GstVideoEncoder * encoder) +{ + /* To avoid negative DTS when B frame is enabled */ + gst_video_encoder_set_min_pts (encoder, GST_SECOND * 60 * 60 * 1000); + + return TRUE; +} + +static gboolean +gst_qsv_h264_enc_transform_meta (GstVideoEncoder * encoder, + GstVideoCodecFrame * frame, GstMeta * meta) +{ + GstQsvH264Enc *self = GST_QSV_H264_ENC (encoder); + GstVideoCaptionMeta *cc_meta; + + /* We need to handle only case CC meta should be dropped */ + if (self->cc_insert != GST_QSV_H264_ENC_SEI_INSERT_AND_DROP) + goto out; + + if (meta->info->api != GST_VIDEO_CAPTION_META_API_TYPE) + goto out; + + cc_meta = (GstVideoCaptionMeta *) meta; + if (cc_meta->caption_type != GST_VIDEO_CAPTION_TYPE_CEA708_RAW) + goto out; + + /* Don't copy this meta into output buffer */ + return FALSE; + +out: + return GST_VIDEO_ENCODER_CLASS (parent_class)->transform_meta (encoder, + frame, meta); +} + +static GstCaps * +gst_qsv_h264_enc_getcaps (GstVideoEncoder * encoder, GstCaps * filter) +{ + GstQsvH264Enc *self = GST_QSV_H264_ENC (encoder); + GstCaps *allowed_caps; + GstCaps *template_caps; + GstCaps *supported_caps; + std::set < std::string > downstream_profiles; + + allowed_caps = gst_pad_get_allowed_caps (encoder->srcpad); + + /* Shouldn't be any or empty though, just return template caps in this case */ + if (!allowed_caps || gst_caps_is_empty (allowed_caps) || + gst_caps_is_any (allowed_caps)) { + gst_clear_caps (&allowed_caps); + + return gst_video_encoder_proxy_getcaps (encoder, nullptr, filter); + } + + /* Check if downstream specified profile explicitly, then filter out + * incompatible interlaced field */ + for (guint i = 0; i < gst_caps_get_size (allowed_caps); i++) { + const GValue *profile_value; + const gchar *profile; + GstStructure *s; + + s = gst_caps_get_structure (allowed_caps, i); + profile_value = gst_structure_get_value (s, "profile"); + if (!profile_value) + continue; + + if (GST_VALUE_HOLDS_LIST (profile_value)) { + for (guint j = 0; j < gst_value_list_get_size (profile_value); j++) { + const GValue *p = gst_value_list_get_value (profile_value, j); + + if (!G_VALUE_HOLDS_STRING (p)) + continue; + + profile = g_value_get_string (p); + if (profile) + downstream_profiles.insert (profile); + } + + } else if (G_VALUE_HOLDS_STRING (profile_value)) { + profile = g_value_get_string (profile_value); + if (profile) + downstream_profiles.insert (profile); + } + } + gst_clear_caps (&allowed_caps); + + GST_DEBUG_OBJECT (self, "Downstream specified %" G_GSIZE_FORMAT " profiles", + downstream_profiles.size ()); + + /* Caps returned by gst_pad_get_allowed_caps() should hold profile field + * already */ + if (downstream_profiles.size () == 0) { + GST_WARNING_OBJECT (self, + "Allowed caps holds no profile field %" GST_PTR_FORMAT, allowed_caps); + + return gst_video_encoder_proxy_getcaps (encoder, nullptr, filter); + } + + /* Profile allows interlaced? */ + /* *INDENT-OFF* */ + gboolean can_support_interlaced = FALSE; + for (const auto &iter: downstream_profiles) { + if (iter == "high" || iter == "main") { + can_support_interlaced = TRUE; + break; + } + } + /* *INDENT-ON* */ + + GST_DEBUG_OBJECT (self, "Downstream %s support interlaced format", + can_support_interlaced ? "can" : "cannot"); + + if (can_support_interlaced) { + /* No special handling is needed */ + return gst_video_encoder_proxy_getcaps (encoder, nullptr, filter); + } + + template_caps = gst_pad_get_pad_template_caps (encoder->sinkpad); + template_caps = gst_caps_make_writable (template_caps); + + gst_caps_set_simple (template_caps, "interlace-mode", G_TYPE_STRING, + "progressive", nullptr); + + supported_caps = gst_video_encoder_proxy_getcaps (encoder, + template_caps, filter); + gst_caps_unref (template_caps); + + GST_DEBUG_OBJECT (self, "Returning %" GST_PTR_FORMAT, supported_caps); + + return supported_caps; +} + +typedef struct +{ + mfxU16 profile; + const gchar *profile_str; +} H264Profile; + +static const H264Profile profile_map[] = { + {MFX_PROFILE_AVC_HIGH, "high"}, + {MFX_PROFILE_AVC_MAIN, "main"}, + {MFX_PROFILE_AVC_CONSTRAINED_BASELINE, "constrained-baseline"}, + {MFX_PROFILE_AVC_PROGRESSIVE_HIGH, "progressive-high"}, + {MFX_PROFILE_AVC_CONSTRAINED_HIGH, "constrained-high"}, + {MFX_PROFILE_AVC_BASELINE, "baseline"} +}; + +static const gchar * +gst_qsv_h264_profile_to_string (mfxU16 profile) +{ + for (guint i = 0; i < G_N_ELEMENTS (profile_map); i++) { + if (profile_map[i].profile == profile) + return profile_map[i].profile_str; + } + + return nullptr; +} + +static mfxU16 +gst_qsv_h264_profile_string_to_value (const gchar * profile_str) +{ + for (guint i = 0; i < G_N_ELEMENTS (profile_map); i++) { + if (g_strcmp0 (profile_map[i].profile_str, profile_str) == 0) + return profile_map[i].profile; + } + + return MFX_PROFILE_UNKNOWN; +} + +static void +gst_qsv_h264_enc_init_extra_params (GstQsvH264Enc * self) +{ + memset (&self->signal_info, 0, sizeof (mfxExtVideoSignalInfo)); + memset (&self->option, 0, sizeof (mfxExtCodingOption)); + memset (&self->option2, 0, sizeof (mfxExtCodingOption2)); + memset (&self->option3, 0, sizeof (mfxExtCodingOption3)); + + self->signal_info.Header.BufferId = MFX_EXTBUFF_VIDEO_SIGNAL_INFO; + self->signal_info.Header.BufferSz = sizeof (mfxExtVideoSignalInfo); + + self->option.Header.BufferId = MFX_EXTBUFF_CODING_OPTION; + self->option.Header.BufferSz = sizeof (mfxExtCodingOption); + + self->option2.Header.BufferId = MFX_EXTBUFF_CODING_OPTION2; + self->option2.Header.BufferSz = sizeof (mfxExtCodingOption2); + + self->option3.Header.BufferId = MFX_EXTBUFF_CODING_OPTION3; + self->option3.Header.BufferSz = sizeof (mfxExtCodingOption3); +} + +static gboolean +gst_qsv_h264_enc_set_format (GstQsvEncoder * encoder, + GstVideoCodecState * state, mfxVideoParam * param, GPtrArray * extra_params) +{ + GstQsvH264Enc *self = GST_QSV_H264_ENC (encoder); + GstCaps *allowed_caps, *fixated_caps; + std::set < std::string > downstream_profiles; + std::set < std::string > tmp; + guint bframes = self->bframes; + mfxU16 cabac = self->cabac; + std::string profile_str; + mfxU16 mfx_profile = MFX_PROFILE_UNKNOWN; + GstVideoInfo *info = &state->info; + mfxExtVideoSignalInfo *signal_info = nullptr; + mfxExtCodingOption *option; + mfxExtCodingOption2 *option2; + mfxExtCodingOption3 *option3; + GstStructure *s; + const gchar *stream_format; + mfxFrameInfo *frame_info; + + frame_info = ¶m->mfx.FrameInfo; + + /* QSV specific alignment requirement: + * width/height should be multiple of 16, and for interlaced encoding, + * height should be multiple of 32 */ + frame_info->Width = GST_ROUND_UP_16 (info->width); + if (GST_VIDEO_INFO_IS_INTERLACED (info)) { + frame_info->Height = GST_ROUND_UP_32 (info->height); + switch (GST_VIDEO_INFO_FIELD_ORDER (info)) { + case GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST: + frame_info->PicStruct = MFX_PICSTRUCT_FIELD_TFF; + break; + case GST_VIDEO_FIELD_ORDER_BOTTOM_FIELD_FIRST: + frame_info->PicStruct = MFX_PICSTRUCT_FIELD_BFF; + break; + default: + frame_info->PicStruct = MFX_PICSTRUCT_UNKNOWN; + break; + } + } else { + frame_info->Height = GST_ROUND_UP_16 (info->height); + frame_info->PicStruct = MFX_PICSTRUCT_PROGRESSIVE; + } + + /* QSV wouldn't be happy with this size, increase */ + if (frame_info->Width == 16) + frame_info->Width = 32; + + if (frame_info->Height == 16) + frame_info->Height = 32; + + frame_info->CropW = info->width; + frame_info->CropH = info->height; + if (GST_VIDEO_INFO_FPS_N (info) > 0 && GST_VIDEO_INFO_FPS_D (info) > 0) { + frame_info->FrameRateExtN = GST_VIDEO_INFO_FPS_N (info); + frame_info->FrameRateExtD = GST_VIDEO_INFO_FPS_D (info); + } else { + /* HACK: Same as x264enc */ + frame_info->FrameRateExtN = 25; + frame_info->FrameRateExtD = 1; + } + + frame_info->AspectRatioW = GST_VIDEO_INFO_PAR_N (info); + frame_info->AspectRatioH = GST_VIDEO_INFO_PAR_D (info); + + /* TODO: update for non 4:2:0 formats. Currently NV12 only */ + frame_info->ChromaFormat = MFX_CHROMAFORMAT_YUV420; + switch (GST_VIDEO_INFO_FORMAT (info)) { + case GST_VIDEO_FORMAT_NV12: + frame_info->FourCC = MFX_FOURCC_NV12; + frame_info->BitDepthLuma = 8; + frame_info->BitDepthChroma = 8; + break; + default: + GST_ERROR_OBJECT (self, "Unexpected format %s", + gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (info))); + return FALSE; + } + + allowed_caps = gst_pad_get_allowed_caps (GST_VIDEO_ENCODER_SRC_PAD (encoder)); + if (!allowed_caps) { + GST_WARNING_OBJECT (self, "Failed to get allowed caps"); + return FALSE; + } + + gst_qsv_h264_enc_init_extra_params (self); + option = &self->option; + option2 = &self->option2; + option3 = &self->option3; + + self->packetized = FALSE; + + fixated_caps = gst_caps_fixate (gst_caps_copy (allowed_caps)); + s = gst_caps_get_structure (fixated_caps, 0); + stream_format = gst_structure_get_string (s, "stream-format"); + if (g_strcmp0 (stream_format, "avc") == 0) + self->packetized = TRUE; + gst_caps_unref (fixated_caps); + + for (guint i = 0; i < gst_caps_get_size (allowed_caps); i++) { + const GValue *profile_value; + const gchar *profile; + + s = gst_caps_get_structure (allowed_caps, i); + profile_value = gst_structure_get_value (s, "profile"); + if (!profile_value) + continue; + + if (GST_VALUE_HOLDS_LIST (profile_value)) { + for (guint j = 0; j < gst_value_list_get_size (profile_value); j++) { + const GValue *p = gst_value_list_get_value (profile_value, j); + + if (!G_VALUE_HOLDS_STRING (p)) + continue; + + profile = g_value_get_string (p); + if (profile) + downstream_profiles.insert (profile); + } + + } else if (G_VALUE_HOLDS_STRING (profile_value)) { + profile = g_value_get_string (profile_value); + if (profile) + downstream_profiles.insert (profile); + } + } + + GST_DEBUG_OBJECT (self, "Downstream supports %" G_GSIZE_FORMAT " profiles", + downstream_profiles.size ()); + + /* Prune incompatible profiles */ + if ((param->mfx.FrameInfo.PicStruct & MFX_PICSTRUCT_PROGRESSIVE) == 0) { + /* Interlaced, only main and high profiles are allowed */ + downstream_profiles.erase (gst_qsv_h264_profile_to_string + (MFX_PROFILE_AVC_CONSTRAINED_BASELINE)); + downstream_profiles.erase (gst_qsv_h264_profile_to_string + (MFX_PROFILE_AVC_PROGRESSIVE_HIGH)); + downstream_profiles.erase (gst_qsv_h264_profile_to_string + (MFX_PROFILE_AVC_CONSTRAINED_HIGH)); + downstream_profiles.erase (gst_qsv_h264_profile_to_string + (MFX_PROFILE_AVC_BASELINE)); + } + + if (downstream_profiles.empty ()) { + GST_WARNING_OBJECT (self, "No compatible profile was detected"); + gst_clear_caps (&allowed_caps); + return FALSE; + } + + if (bframes > 0) { + /* baseline and constrained-high doesn't support bframe */ + tmp = downstream_profiles; + tmp.erase (gst_qsv_h264_profile_to_string + (MFX_PROFILE_AVC_CONSTRAINED_BASELINE)); + tmp.erase (gst_qsv_h264_profile_to_string + (MFX_PROFILE_AVC_CONSTRAINED_HIGH)); + tmp.erase (gst_qsv_h264_profile_to_string (MFX_PROFILE_AVC_BASELINE)); + + if (tmp.empty ()) { + GST_WARNING_OBJECT (self, "None of downstream profile supports bframes"); + bframes = 0; + tmp = downstream_profiles; + } + + downstream_profiles = tmp; + } + + if (cabac == MFX_CODINGOPTION_ON) { + /* baseline doesn't support cabac */ + tmp = downstream_profiles; + tmp.erase (gst_qsv_h264_profile_to_string + (MFX_PROFILE_AVC_CONSTRAINED_BASELINE)); + tmp.erase (gst_qsv_h264_profile_to_string (MFX_PROFILE_AVC_BASELINE)); + + if (tmp.empty ()) { + GST_WARNING_OBJECT (self, "None of downstream profile supports cabac"); + cabac = MFX_CODINGOPTION_OFF; + tmp = downstream_profiles; + } + downstream_profiles = tmp; + } + + /* we have our preference order */ + /* *INDENT-OFF* */ + for (guint i = 0; i < G_N_ELEMENTS (profile_map); i++) { + auto it = downstream_profiles.find (profile_map[i].profile_str); + if (it != downstream_profiles.end ()) { + profile_str = *it; + break; + } + } + /* *INDENT-ON* */ + + if (profile_str.empty ()) { + GST_WARNING_OBJECT (self, "Failed to determine profile"); + return FALSE; + } + + GST_DEBUG_OBJECT (self, "Selected profile %s", profile_str.c_str ()); + mfx_profile = gst_qsv_h264_profile_string_to_value (profile_str.c_str ()); + + gst_clear_caps (&allowed_caps); + + if (cabac == MFX_CODINGOPTION_UNKNOWN) { + switch (mfx_profile) { + case MFX_PROFILE_AVC_CONSTRAINED_BASELINE: + case MFX_PROFILE_AVC_BASELINE: + cabac = MFX_CODINGOPTION_OFF; + break; + default: + cabac = MFX_CODINGOPTION_ON; + break; + } + } + + g_mutex_lock (&self->prop_lock); + param->mfx.CodecId = MFX_CODEC_AVC; + param->mfx.CodecProfile = mfx_profile; + param->mfx.GopRefDist = bframes + 1; + param->mfx.GopPicSize = self->gop_size; + param->mfx.IdrInterval = self->iframes; + param->mfx.RateControlMethod = self->rate_control; + param->mfx.NumRefFrame = self->ref_frames; + + /* Calculate multiplier to avoid uint16 overflow */ + guint max_val = MAX (self->bitrate, self->max_bitrate); + guint multiplier = (max_val + 0x10000) / 0x10000; + + switch (param->mfx.RateControlMethod) { + case MFX_RATECONTROL_CBR: + case MFX_RATECONTROL_VBR: + case MFX_RATECONTROL_VCM: + case MFX_RATECONTROL_QVBR: + param->mfx.TargetKbps = self->bitrate / multiplier; + param->mfx.MaxKbps = self->max_bitrate / multiplier; + param->mfx.BRCParamMultiplier = (mfxU16) multiplier; + break; + case MFX_RATECONTROL_CQP: + param->mfx.QPI = self->qp_i; + param->mfx.QPP = self->qp_p; + param->mfx.QPB = self->qp_b; + break; + case MFX_RATECONTROL_AVBR: + param->mfx.TargetKbps = self->bitrate; + param->mfx.Accuracy = self->avbr_accuracy; + param->mfx.Convergence = self->avbr_convergence; + param->mfx.BRCParamMultiplier = (mfxU16) multiplier; + break; + case MFX_RATECONTROL_LA: + param->mfx.TargetKbps = self->bitrate; + param->mfx.BRCParamMultiplier = (mfxU16) multiplier; + break; + case MFX_RATECONTROL_LA_HRD: + param->mfx.TargetKbps = self->bitrate; + param->mfx.MaxKbps = self->max_bitrate; + param->mfx.BRCParamMultiplier = (mfxU16) multiplier; + break; + case MFX_RATECONTROL_ICQ: + case MFX_RATECONTROL_LA_ICQ: + param->mfx.ICQQuality = self->icq_quality; + break; + default: + GST_WARNING_OBJECT (self, + "Unhandled rate-control method %d", self->rate_control); + break; + } + + /* Write signal info only when upstream caps contains valid colorimetry, + * because derived default colorimetry in gst_video_info_from_caps() tends to + * very wrong in various cases, and it's even worse than "unknown" */ + if (state->caps) { + GstStructure *s = gst_caps_get_structure (state->caps, 0); + GstVideoColorimetry cinfo; + const gchar *str; + + str = gst_structure_get_string (s, "colorimetry"); + if (str && gst_video_colorimetry_from_string (&cinfo, str)) { + signal_info = &self->signal_info; + + /* 0: Component, 1: PAL, 2: NTSC, 3: SECAM, 4: MAC, 5: Unspecified */ + signal_info->VideoFormat = 5; + if (cinfo.range == GST_VIDEO_COLOR_RANGE_0_255) + signal_info->VideoFullRange = 1; + else + signal_info->VideoFullRange = 0; + signal_info->ColourDescriptionPresent = 1; + signal_info->ColourPrimaries = + gst_video_color_primaries_to_iso (cinfo.primaries); + signal_info->TransferCharacteristics = + gst_video_transfer_function_to_iso (cinfo.transfer); + signal_info->MatrixCoefficients = + gst_video_color_matrix_to_iso (cinfo.matrix); + } + } + + if (cabac == MFX_CODINGOPTION_OFF) + option->CAVLC = MFX_CODINGOPTION_ON; + else + option->CAVLC = MFX_CODINGOPTION_OFF; + + /* TODO: property ? */ + option->AUDelimiter = MFX_CODINGOPTION_ON; + + /* Enables PicTiming SEI by default */ + option->PicTimingSEI = MFX_CODINGOPTION_ON; + + /* VUI is useful in various cases, so we don't want to disable it */ + option2->DisableVUI = MFX_CODINGOPTION_OFF; + + /* Do not repeat PPS */ + option2->RepeatPPS = MFX_CODINGOPTION_OFF; + + if (param->mfx.RateControlMethod == MFX_RATECONTROL_LA || + param->mfx.RateControlMethod == MFX_RATECONTROL_LA_HRD || + param->mfx.RateControlMethod == MFX_RATECONTROL_LA_ICQ) { + option2->LookAheadDS = self->rc_lookahead_ds; + option2->LookAheadDepth = self->rc_lookahead; + } + + option2->MinQPI = self->min_qp_i; + option2->MinQPP = self->min_qp_p; + option2->MinQPB = self->min_qp_b; + option2->MaxQPI = self->max_qp_i; + option2->MaxQPP = self->max_qp_p; + option2->MaxQPB = self->max_qp_b; + + /* QSV wants MFX_B_REF_PYRAMID when more than 1 b-frame is enabled */ + if (param->mfx.GopRefDist > 2) + option2->BRefType = MFX_B_REF_PYRAMID; + + /* Upstream specified framerate, we will believe it's fixed framerate */ + if (GST_VIDEO_INFO_FPS_N (info) > 0 && GST_VIDEO_INFO_FPS_D (info) > 0) { + option2->FixedFrameRate = MFX_CODINGOPTION_ON; + option3->TimingInfoPresent = MFX_CODINGOPTION_ON; + } + + if (param->mfx.RateControlMethod == MFX_RATECONTROL_QVBR) + option3->QVBRQuality = self->qvbr_quality; + + if (signal_info) + g_ptr_array_add (extra_params, signal_info); + g_ptr_array_add (extra_params, option); + g_ptr_array_add (extra_params, option2); + g_ptr_array_add (extra_params, option3); + + param->ExtParam = (mfxExtBuffer **) extra_params->pdata; + param->NumExtParam = extra_params->len; + + self->bitrate_updated = FALSE; + self->property_updated = FALSE; + + g_mutex_unlock (&self->prop_lock); + + return TRUE; +} + +static gboolean +gst_qsv_h264_enc_set_output_state (GstQsvEncoder * encoder, + GstVideoCodecState * state, mfxSession session) +{ + GstQsvH264Enc *self = GST_QSV_H264_ENC (encoder); + GstCaps *caps; + GstTagList *tags; + GstVideoCodecState *out_state; + guint bitrate, max_bitrate; + guint multiplier = 1; + mfxVideoParam param; + const gchar *profile_str; + mfxStatus status; + mfxExtCodingOptionSPSPPS sps_pps; + mfxExtBuffer *ext_buffers[1]; + mfxU8 sps[1024]; + mfxU8 pps[1024]; + GstBuffer *codec_data = nullptr; + + memset (¶m, 0, sizeof (mfxVideoParam)); + memset (&sps_pps, 0, sizeof (mfxExtCodingOptionSPSPPS)); + if (self->packetized) { + sps_pps.Header.BufferId = MFX_EXTBUFF_CODING_OPTION_SPSPPS; + sps_pps.Header.BufferSz = sizeof (mfxExtCodingOptionSPSPPS); + + sps_pps.SPSBuffer = sps; + sps_pps.SPSBufSize = sizeof (sps); + + sps_pps.PPSBuffer = pps; + sps_pps.PPSBufSize = sizeof (pps); + + ext_buffers[0] = (mfxExtBuffer *) & sps_pps; + + param.NumExtParam = 1; + param.ExtParam = ext_buffers; + } + + status = MFXVideoENCODE_GetVideoParam (session, ¶m); + if (status < MFX_ERR_NONE) { + GST_ERROR_OBJECT (self, "Failed to get video param %d (%s)", + QSV_STATUS_ARGS (status)); + return FALSE; + } else if (status != MFX_ERR_NONE) { + GST_WARNING_OBJECT (self, "GetVideoParam returned warning %d (%s)", + QSV_STATUS_ARGS (status)); + } + + if (self->packetized) { + GstH264ParserResult rst; + GstH264NalUnit sps_nalu, pps_nalu; + GstMapInfo info; + guint8 *data; + guint8 profile_idc, profile_comp, level_idc; + const guint nal_length_size = 4; + const guint num_sps = 1; + const guint num_pps = 1; + + rst = gst_h264_parser_identify_nalu_unchecked (self->parser, + sps, 0, sps_pps.SPSBufSize, &sps_nalu); + if (rst != GST_H264_PARSER_OK) { + GST_ERROR_OBJECT (self, "Failed to identify SPS nal"); + return FALSE; + } + + if (sps_nalu.size < 4) { + GST_ERROR_OBJECT (self, "Too small sps nal size %d", sps_nalu.size); + return FALSE; + } + + data = sps_nalu.data + sps_nalu.offset; + profile_idc = data[0]; + profile_comp = data[1]; + level_idc = data[2]; + + rst = gst_h264_parser_identify_nalu_unchecked (self->parser, + pps, 0, sps_pps.PPSBufSize, &pps_nalu); + if (rst != GST_H264_PARSER_OK) { + GST_ERROR_OBJECT (self, "Failed to identify PPS nal"); + return FALSE; + } + + /* 5: configuration version, profile, compatibility, level, nal length + * 1: num sps + * 2: sps size bytes + * sizeof (sps) + * 1: num pps + * 2: pps size bytes + * sizeof (pps) + * + * -> 11 + sps_size + pps_size + */ + codec_data = gst_buffer_new_and_alloc (11 + sps_nalu.size + pps_nalu.size); + + gst_buffer_map (codec_data, &info, GST_MAP_WRITE); + + data = (guint8 *) info.data; + data[0] = 1; + data[1] = profile_idc; + data[2] = profile_comp; + data[3] = level_idc; + data[4] = 0xfc | (nal_length_size - 1); + data[5] = 0xe0 | num_sps; + data += 6; + GST_WRITE_UINT16_BE (data, sps_nalu.size); + data += 2; + memcpy (data, sps_nalu.data + sps_nalu.offset, sps_nalu.size); + data += sps_nalu.size; + + data[0] = num_pps; + data++; + + GST_WRITE_UINT16_BE (data, pps_nalu.size); + data += 2; + memcpy (data, pps_nalu.data + pps_nalu.offset, pps_nalu.size); + + gst_buffer_unmap (codec_data, &info); + } + + caps = gst_caps_from_string ("video/x-h264, alignment = (string) au"); + profile_str = gst_qsv_h264_profile_to_string (param.mfx.CodecProfile); + if (profile_str) + gst_caps_set_simple (caps, "profile", G_TYPE_STRING, profile_str, nullptr); + + if (self->packetized) { + gst_caps_set_simple (caps, "stream-format", G_TYPE_STRING, "avc", + "codec_data", GST_TYPE_BUFFER, codec_data, nullptr); + gst_buffer_unref (codec_data); + } else { + gst_caps_set_simple (caps, "stream-format", G_TYPE_STRING, "byte-stream", + nullptr); + } + + out_state = gst_video_encoder_set_output_state (GST_VIDEO_ENCODER (encoder), + caps, state); + gst_video_codec_state_unref (out_state); + + tags = gst_tag_list_new_empty (); + gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_ENCODER, "qsvh264enc", + nullptr); + + if (param.mfx.BRCParamMultiplier > 0) + multiplier = param.mfx.BRCParamMultiplier; + + switch (param.mfx.RateControlMethod) { + case MFX_RATECONTROL_CQP: + case MFX_RATECONTROL_ICQ: + case MFX_RATECONTROL_LA_ICQ: + /* We don't know target/max bitrate in this case */ + break; + default: + max_bitrate = (guint) param.mfx.MaxKbps * multiplier; + bitrate = (guint) param.mfx.TargetKbps * multiplier; + if (bitrate > 0) { + gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, + GST_TAG_NOMINAL_BITRATE, bitrate * 1000, nullptr); + } + + if (max_bitrate > 0) { + gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, + GST_TAG_MAXIMUM_BITRATE, max_bitrate * 1000, nullptr); + } + break; + } + + gst_video_encoder_merge_tags (GST_VIDEO_ENCODER (encoder), + tags, GST_TAG_MERGE_REPLACE); + gst_tag_list_unref (tags); + + return TRUE; +} + +static gboolean +gst_qsv_h264_enc_foreach_caption_meta (GstBuffer * buffer, GstMeta ** meta, + GPtrArray * payload) +{ + GstVideoCaptionMeta *cc_meta; + GstByteWriter br; + guint payload_size; + guint extra_size; + mfxPayload *p; + + if ((*meta)->info->api != GST_VIDEO_CAPTION_META_API_TYPE) + return TRUE; + + cc_meta = (GstVideoCaptionMeta *) (*meta); + + if (cc_meta->caption_type != GST_VIDEO_CAPTION_TYPE_CEA708_RAW) + return TRUE; + + /* QSV requires full sei_message() structure */ + /* 1 byte contry_code + 10 bytes CEA-708 specific data + caption data */ + payload_size = 11 + cc_meta->size; + extra_size = payload_size / 255; + + /* 1 byte SEI type + 1 byte SEI payload size (+ extra) + payload data */ + gst_byte_writer_init_with_size (&br, 2 + extra_size + payload_size, FALSE); + + /* SEI type */ + gst_byte_writer_put_uint8 (&br, 4); + + /* SEI payload size */ + while (payload_size >= 0xff) { + gst_byte_writer_put_uint8 (&br, 0xff); + payload_size -= 0xff; + } + gst_byte_writer_put_uint8 (&br, payload_size); + + /* 8-bits itu_t_t35_country_code */ + gst_byte_writer_put_uint8 (&br, 181); + + /* 16-bits itu_t_t35_provider_code */ + gst_byte_writer_put_uint8 (&br, 0); + gst_byte_writer_put_uint8 (&br, 49); + + /* 32-bits ATSC_user_identifier */ + gst_byte_writer_put_uint8 (&br, 'G'); + gst_byte_writer_put_uint8 (&br, 'A'); + gst_byte_writer_put_uint8 (&br, '9'); + gst_byte_writer_put_uint8 (&br, '4'); + + /* 8-bits ATSC1_data_user_data_type_code */ + gst_byte_writer_put_uint8 (&br, 3); + + /* 8-bits: + * 1 bit process_em_data_flag (0) + * 1 bit process_cc_data_flag (1) + * 1 bit additional_data_flag (0) + * 5-bits cc_count + */ + gst_byte_writer_put_uint8 (&br, ((cc_meta->size / 3) & 0x1f) | 0x40); + + /* 8 bits em_data, unused */ + gst_byte_writer_put_uint8 (&br, 255); + + gst_byte_writer_put_data (&br, cc_meta->data, cc_meta->size); + + /* 8 marker bits */ + gst_byte_writer_put_uint8 (&br, 255); + + p = g_new0 (mfxPayload, 1); + p->BufSize = gst_byte_writer_get_pos (&br); + p->NumBit = p->BufSize * 8; + p->Type = 4; + p->Data = gst_byte_writer_reset_and_get_data (&br); + + g_ptr_array_add (payload, p); + + return TRUE; +} + +static gboolean +gst_qsv_h264_enc_attach_payload (GstQsvEncoder * encoder, + GstVideoCodecFrame * frame, GPtrArray * payload) +{ + GstQsvH264Enc *self = GST_QSV_H264_ENC (encoder); + + if (self->cc_insert == GST_QSV_H264_ENC_SEI_DISABLED) + return TRUE; + + gst_buffer_foreach_meta (frame->input_buffer, + (GstBufferForeachMetaFunc) gst_qsv_h264_enc_foreach_caption_meta, + payload); + + return TRUE; +} + +static GstBuffer * +gst_qsv_h264_enc_create_output_buffer (GstQsvEncoder * encoder, + mfxBitstream * bitstream) +{ + GstQsvH264Enc *self = GST_QSV_H264_ENC (encoder); + GstBuffer *buf; + GstH264ParserResult rst; + GstH264NalUnit nalu; + + if (!self->packetized) { + buf = gst_buffer_new_memdup (bitstream->Data + bitstream->DataOffset, + bitstream->DataLength); + } else { + buf = gst_buffer_new (); + rst = gst_h264_parser_identify_nalu (self->parser, + bitstream->Data + bitstream->DataOffset, 0, bitstream->DataLength, + &nalu); + if (rst == GST_H264_PARSER_NO_NAL_END) + rst = GST_H264_PARSER_OK; + + while (rst == GST_H264_PARSER_OK) { + GstMemory *mem; + guint8 *data; + + data = (guint8 *) g_malloc0 (nalu.size + 4); + GST_WRITE_UINT32_BE (data, nalu.size); + memcpy (data + 4, nalu.data + nalu.offset, nalu.size); + + mem = gst_memory_new_wrapped ((GstMemoryFlags) 0, data, nalu.size + 4, + 0, nalu.size + 4, data, (GDestroyNotify) g_free); + gst_buffer_append_memory (buf, mem); + + rst = gst_h264_parser_identify_nalu (self->parser, + bitstream->Data + bitstream->DataOffset, nalu.offset + nalu.size, + bitstream->DataLength, &nalu); + + if (rst == GST_H264_PARSER_NO_NAL_END) + rst = GST_H264_PARSER_OK; + } + } + + /* This buffer must be the end of a frame boundary */ + GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_MARKER); + + return buf; +} + +static GstQsvEncoderReconfigure +gst_qsv_h264_enc_check_reconfigure (GstQsvEncoder * encoder, + mfxVideoParam * param) +{ + GstQsvH264Enc *self = GST_QSV_H264_ENC (encoder); + + g_mutex_lock (&self->prop_lock); + + if (self->property_updated) { + g_mutex_unlock (&self->prop_lock); + return GST_QSV_ENCODER_RECONFIGURE_FULL; + } + + if (self->bitrate_updated) { + /* Update @param with updated bitrate values so that baseclass can + * call MFXVideoENCODE_Query() with updated values */ + param->mfx.TargetKbps = self->bitrate; + param->mfx.MaxKbps = self->max_bitrate; + param->mfx.QPI = self->qp_i; + param->mfx.QPP = self->qp_p; + param->mfx.QPB = self->qp_b; + g_mutex_unlock (&self->prop_lock); + + return GST_QSV_ENCODER_RECONFIGURE_BITRATE; + } + + g_mutex_unlock (&self->prop_lock); + + return GST_QSV_ENCODER_RECONFIGURE_NONE; +} + +typedef struct +{ + guint width; + guint height; +} Resolution; + +void +gst_qsv_h264_enc_register (GstPlugin * plugin, guint rank, guint impl_index, + GstObject * device, mfxSession session) +{ + mfxStatus status; + mfxVideoParam param; + mfxInfoMFX *mfx; + static const Resolution resolutions_to_check[] = { + {1280, 720}, {1920, 1088}, {2560, 1440}, {3840, 2160}, {4096, 2160}, + {7680, 4320}, {8192, 4320} + }; + std::vector < mfxU16 > supported_profiles; + Resolution max_resolution; + bool supports_interlaced = false; + + memset (¶m, 0, sizeof (mfxVideoParam)); + memset (&max_resolution, 0, sizeof (Resolution)); + + param.AsyncDepth = 4; + param.IOPattern = MFX_IOPATTERN_IN_VIDEO_MEMORY; + + mfx = ¶m.mfx; + mfx->CodecId = MFX_CODEC_AVC; + + mfx->FrameInfo.Width = GST_ROUND_UP_16 (320); + mfx->FrameInfo.Height = GST_ROUND_UP_16 (240); + mfx->FrameInfo.CropW = 320; + mfx->FrameInfo.CropH = 240; + mfx->FrameInfo.FrameRateExtN = 30; + mfx->FrameInfo.FrameRateExtD = 1; + mfx->FrameInfo.AspectRatioW = 1; + mfx->FrameInfo.AspectRatioH = 1; + mfx->FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420; + mfx->FrameInfo.FourCC = MFX_FOURCC_NV12; + mfx->FrameInfo.BitDepthLuma = 8; + mfx->FrameInfo.BitDepthChroma = 8; + mfx->FrameInfo.PicStruct = MFX_PICSTRUCT_PROGRESSIVE; + + /* Check supported profiles */ + for (guint i = 0; i < G_N_ELEMENTS (profile_map); i++) { + mfx->CodecProfile = profile_map[i].profile; + + if (MFXVideoENCODE_Query (session, ¶m, ¶m) != MFX_ERR_NONE) + continue; + + supported_profiles.push_back (profile_map[i].profile); + } + + if (supported_profiles.empty ()) { + GST_INFO ("Device doesn't support H.264 encoding"); + return; + } + + mfx->CodecProfile = supported_profiles[0]; + + /* Check max-resolution */ + for (guint i = 0; i < G_N_ELEMENTS (resolutions_to_check); i++) { + mfx->FrameInfo.Width = GST_ROUND_UP_16 (resolutions_to_check[i].width); + mfx->FrameInfo.Height = GST_ROUND_UP_16 (resolutions_to_check[i].height); + mfx->FrameInfo.CropW = resolutions_to_check[i].width; + mfx->FrameInfo.CropH = resolutions_to_check[i].height; + + if (MFXVideoENCODE_Query (session, ¶m, ¶m) != MFX_ERR_NONE) + break; + + max_resolution.width = resolutions_to_check[i].width; + max_resolution.height = resolutions_to_check[i].height; + } + + GST_INFO ("Maximum supported resolution: %dx%d", + max_resolution.width, max_resolution.height); + + /* TODO: check supported rate-control methods and expose only supported + * methods, since the device might not be able to support some of them */ + + /* Check interlaced encoding */ + /* *INDENT-OFF* */ + for (const auto &iter: supported_profiles) { + if (iter == MFX_PROFILE_AVC_MAIN || + iter == MFX_PROFILE_AVC_HIGH) { + /* Make sure non-lowpower mode, otherwise QSV will not accept + * interlaced format during Query() */ + mfx->LowPower = MFX_CODINGOPTION_UNKNOWN; + + /* Interlaced encoding is not compatible with MFX_RATECONTROL_VCM, + * make sure profile */ + mfx->RateControlMethod = MFX_RATECONTROL_CBR; + + /* Too high (MFX_LEVEL_AVC_41) or low (MFX_LEVEL_AVC_21) level will not be + * accepted for interlaced encoding */ + mfx->CodecLevel = MFX_LEVEL_UNKNOWN; + mfx->CodecProfile = iter; + + mfx->FrameInfo.Width = GST_ROUND_UP_16 (320); + mfx->FrameInfo.Height = GST_ROUND_UP_32 (240); + mfx->FrameInfo.CropW = 320; + mfx->FrameInfo.CropH = 240; + mfx->FrameInfo.PicStruct = MFX_PICSTRUCT_FIELD_TFF; + + status = MFXVideoENCODE_Query (session, ¶m, ¶m); + + if (status == MFX_ERR_NONE) { + GST_INFO ("Interlaced encoding is supported"); + supports_interlaced = true; + break; + } + } + } + /* *INDENT-ON* */ + + /* To cover both landscape and portrait, + * select max value (width in this case) */ + guint resolution = MAX (max_resolution.width, max_resolution.height); + std::string sink_caps_str = "video/x-raw, format=(string) NV12"; + + sink_caps_str += ", width=(int) [ 16, " + std::to_string (resolution) + " ]"; + sink_caps_str += ", height=(int) [ 16, " + std::to_string (resolution) + " ]"; + if (!supports_interlaced) + sink_caps_str += ", interlace-mode = (string) progressive"; + + GstCaps *sink_caps = gst_caps_from_string (sink_caps_str.c_str ()); + + /* TODO: Add support for VA */ +#ifdef G_OS_WIN32 + GstCaps *d3d11_caps = gst_caps_copy (sink_caps); + GstCapsFeatures *caps_features = + gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY, nullptr); + gst_caps_set_features_simple (d3d11_caps, caps_features); + gst_caps_append (d3d11_caps, sink_caps); + sink_caps = d3d11_caps; +#endif + + std::string src_caps_str = "video/x-h264"; + src_caps_str += ", width=(int) [ 16, " + std::to_string (resolution) + " ]"; + src_caps_str += ", height=(int) [ 16, " + std::to_string (resolution) + " ]"; + + src_caps_str += ", stream-format= (string) { avc, byte-stream }"; + src_caps_str += ", alignment=(string) au"; + /* *INDENT-OFF* */ + if (supported_profiles.size () > 1) { + src_caps_str += ", profile=(string) { "; + bool first = true; + for (const auto &iter: supported_profiles) { + if (!first) { + src_caps_str += ", "; + } + + src_caps_str += gst_qsv_h264_profile_to_string (iter); + first = false; + } + src_caps_str += " }"; + } else { + src_caps_str += ", profile=(string) "; + src_caps_str += gst_qsv_h264_profile_to_string (supported_profiles[0]); + } + /* *INDENT-ON* */ + + GstCaps *src_caps = gst_caps_from_string (src_caps_str.c_str ()); + + GST_MINI_OBJECT_FLAG_SET (sink_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED); + GST_MINI_OBJECT_FLAG_SET (src_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED); + + GstQsvH264EncClassData *cdata = g_new0 (GstQsvH264EncClassData, 1); + cdata->sink_caps = sink_caps; + cdata->src_caps = src_caps; + cdata->impl_index = impl_index; + +#ifdef G_OS_WIN32 + gint64 device_luid; + g_object_get (device, "adapter-luid", &device_luid, nullptr); + cdata->adapter_luid = device_luid; +#else + gchar *display_path; + g_object_get (device, "path", &display_path, nullptr); + cdata->display_path = display_path; +#endif + + GType type; + gchar *type_name; + gchar *feature_name; + GTypeInfo type_info = { + sizeof (GstQsvH264EncClass), + nullptr, + nullptr, + (GClassInitFunc) gst_qsv_h264_enc_class_init, + nullptr, + cdata, + sizeof (GstQsvH264Enc), + 0, + (GInstanceInitFunc) gst_qsv_h264_enc_init, + }; + + type_name = g_strdup ("GstQsvH264Enc"); + feature_name = g_strdup ("qsvh264enc"); + + gint index = 0; + while (g_type_from_name (type_name)) { + index++; + g_free (type_name); + g_free (feature_name); + type_name = g_strdup_printf ("GstQsvH264Device%dEnc", index); + feature_name = g_strdup_printf ("qsvh264device%denc", index); + } + + type = g_type_register_static (GST_TYPE_QSV_ENCODER, type_name, &type_info, + (GTypeFlags) 0); + + if (rank > 0 && index != 0) + rank--; + + if (!gst_element_register (plugin, feature_name, rank, type)) + GST_WARNING ("Failed to register plugin '%s'", type_name); + + g_free (type_name); + g_free (feature_name); +} diff --git a/subprojects/gst-plugins-bad/sys/qsv/gstqsvh264enc.h b/subprojects/gst-plugins-bad/sys/qsv/gstqsvh264enc.h new file mode 100644 index 0000000..ebdaf49 --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/qsv/gstqsvh264enc.h @@ -0,0 +1,34 @@ +/* GStreamer + * Copyright (C) 2021 Seungha Yang + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#pragma once + +#include +#include +#include "gstqsvencoder.h" + +G_BEGIN_DECLS + +void gst_qsv_h264_enc_register (GstPlugin * plugin, + guint rank, + guint impl_index, + GstObject * device, + mfxSession session); + +G_END_DECLS diff --git a/subprojects/gst-plugins-bad/sys/qsv/gstqsvutils.cpp b/subprojects/gst-plugins-bad/sys/qsv/gstqsvutils.cpp new file mode 100644 index 0000000..d4fd62f --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/qsv/gstqsvutils.cpp @@ -0,0 +1,203 @@ +/* GStreamer + * Copyright (C) 2021 Seungha Yang + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "gstqsvutils.h" + +#ifdef G_OS_WIN32 +#include +#include + +/* *INDENT-OFF* */ +using namespace Microsoft::WRL; +/* *INDENT-ON* */ +#else +#include +#endif + +static mfxLoader _loader = nullptr; + +mfxLoader +gst_qsv_get_loader (void) +{ + static gsize load_once = 0; + + if (g_once_init_enter (&load_once)) { + _loader = MFXLoad (); + g_once_init_leave (&load_once, 1); + } + + return _loader; +} + +void +gst_qsv_deinit (void) +{ + g_clear_pointer (&_loader, MFXUnload); +} + +#ifdef G_OS_WIN32 +static GList * +gst_qsv_get_d3d11_devices (void) +{ + GList *rst = nullptr; + HRESULT hr; + ComPtr < IDXGIFactory1 > factory; + + hr = CreateDXGIFactory1 (IID_PPV_ARGS (&factory)); + if (FAILED (hr)) + return nullptr; + + for (guint idx = 0;; idx++) { + ComPtr < IDXGIAdapter1 > adapter; + DXGI_ADAPTER_DESC desc; + gint64 luid; + GstD3D11Device *device; + ComPtr < ID3D10Multithread > multi_thread; + ID3D11Device *device_handle; + + hr = factory->EnumAdapters1 (idx, &adapter); + if (FAILED (hr)) + return rst; + + hr = adapter->GetDesc (&desc); + if (FAILED (hr)) + continue; + + if (desc.VendorId != 0x8086) + continue; + + luid = gst_d3d11_luid_to_int64 (&desc.AdapterLuid); + device = gst_d3d11_device_new_for_adapter_luid (luid, + D3D11_CREATE_DEVICE_BGRA_SUPPORT); + + if (!device) + continue; + + device_handle = gst_d3d11_device_get_device_handle (device); + hr = device_handle->QueryInterface (IID_PPV_ARGS (&multi_thread)); + if (FAILED (hr)) { + gst_object_unref (device); + continue; + } + + /* Should enable mutithread protection layer, otherwise QSV will return + * error code when this handle is passed to QSV via + * MFXVideoCORE_SetHandle() */ + multi_thread->SetMultithreadProtected (TRUE); + + rst = g_list_append (rst, device); + } + + return rst; +} +#else /* G_OS_WIN32 */ +static GList * +gst_qsv_get_va_displays (void) +{ + gchar path[64]; + GList *rst = nullptr; + + for (guint i = 0; i < 8; i++) { + GstVaDisplay *display; + GstVaImplementation impl; + + g_snprintf (path, sizeof (path), "/dev/dri/renderD%d", 128 + i); + if (!g_file_test (path, G_FILE_TEST_EXISTS)) + continue; + + display = gst_va_display_drm_new_from_path (path); + if (!display) + continue; + + impl = gst_va_display_get_implementation (display); + if (impl != GST_VA_IMPLEMENTATION_INTEL_I965 && + impl != GST_VA_IMPLEMENTATION_INTEL_IHD) { + gst_object_unref (display); + continue; + } + + rst = g_list_append (rst, display); + } + + return rst; +} +#endif + +GList * +gst_qsv_get_platform_devices (void) +{ +#ifdef G_OS_WIN32 + return gst_qsv_get_d3d11_devices (); +#else + return gst_qsv_get_va_displays (); +#endif +} + +const gchar * +gst_qsv_status_to_string (mfxStatus status) +{ +#define CASE(err) \ + case err: \ + return G_STRINGIFY (err); + + switch (status) { + CASE (MFX_ERR_NONE); + CASE (MFX_ERR_UNKNOWN); + CASE (MFX_ERR_NULL_PTR); + CASE (MFX_ERR_UNSUPPORTED); + CASE (MFX_ERR_MEMORY_ALLOC); + CASE (MFX_ERR_NOT_ENOUGH_BUFFER); + CASE (MFX_ERR_INVALID_HANDLE); + CASE (MFX_ERR_LOCK_MEMORY); + CASE (MFX_ERR_NOT_INITIALIZED); + CASE (MFX_ERR_NOT_FOUND); + CASE (MFX_ERR_MORE_DATA); + CASE (MFX_ERR_MORE_SURFACE); + CASE (MFX_ERR_ABORTED); + CASE (MFX_ERR_DEVICE_LOST); + CASE (MFX_ERR_INCOMPATIBLE_VIDEO_PARAM); + CASE (MFX_ERR_INVALID_VIDEO_PARAM); + CASE (MFX_ERR_UNDEFINED_BEHAVIOR); + CASE (MFX_ERR_DEVICE_FAILED); + CASE (MFX_ERR_MORE_BITSTREAM); + CASE (MFX_ERR_GPU_HANG); + CASE (MFX_ERR_REALLOC_SURFACE); + CASE (MFX_ERR_RESOURCE_MAPPED); + CASE (MFX_ERR_NOT_IMPLEMENTED); + CASE (MFX_WRN_IN_EXECUTION); + CASE (MFX_WRN_DEVICE_BUSY); + CASE (MFX_WRN_VIDEO_PARAM_CHANGED); + CASE (MFX_WRN_PARTIAL_ACCELERATION); + CASE (MFX_WRN_INCOMPATIBLE_VIDEO_PARAM); + CASE (MFX_WRN_VALUE_NOT_CHANGED); + CASE (MFX_WRN_OUT_OF_RANGE); + CASE (MFX_WRN_FILTER_SKIPPED); + CASE (MFX_ERR_NONE_PARTIAL_OUTPUT); + CASE (MFX_WRN_ALLOC_TIMEOUT_EXPIRED); + default: + break; + } +#undef CASE + + return "Unknown"; +} diff --git a/subprojects/gst-plugins-bad/sys/qsv/gstqsvutils.h b/subprojects/gst-plugins-bad/sys/qsv/gstqsvutils.h new file mode 100644 index 0000000..d1a540f --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/qsv/gstqsvutils.h @@ -0,0 +1,57 @@ +/* GStreamer + * Copyright (C) 2021 Seungha Yang + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#pragma once + +#include +#include + +G_BEGIN_DECLS + +mfxLoader gst_qsv_get_loader (void); + +void gst_qsv_deinit (void); + +GList * gst_qsv_get_platform_devices (void); + +const gchar * gst_qsv_status_to_string (mfxStatus status); + +/* helper macro for debugging log */ +#define QSV_STATUS_ARGS(status) \ + status, gst_qsv_status_to_string (status) + +static inline GstClockTime +gst_qsv_timestamp_to_gst (mfxU64 timestamp) +{ + if (timestamp == (mfxU64) MFX_TIMESTAMP_UNKNOWN) + return GST_CLOCK_TIME_NONE; + + return gst_util_uint64_scale (timestamp, GST_SECOND, 90000); +} + +static inline mfxU64 +gst_qsv_timestamp_from_gst (GstClockTime timestamp) +{ + if (!GST_CLOCK_TIME_IS_VALID (timestamp)) + return (mfxU64) MFX_TIMESTAMP_UNKNOWN; + + return gst_util_uint64_scale (timestamp, 90000, GST_SECOND); +} + +G_END_DECLS diff --git a/subprojects/gst-plugins-bad/sys/qsv/libmfx/meson.build b/subprojects/gst-plugins-bad/sys/qsv/libmfx/meson.build new file mode 100644 index 0000000..db78fa87 --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/qsv/libmfx/meson.build @@ -0,0 +1,100 @@ +mfx_win_sources = [ + 'dispatcher/windows/main.cpp', + 'dispatcher/windows/mfx_dispatcher_log.cpp', + 'dispatcher/windows/mfx_dispatcher.cpp', + 'dispatcher/windows/mfx_dxva2_device.cpp', + 'dispatcher/windows/mfx_function_table.cpp', + 'dispatcher/windows/mfx_load_dll.cpp', +] + +mfx_win32_sources = [ + 'dispatcher/windows/mfx_critical_section.cpp', + 'dispatcher/windows/mfx_driver_store_loader.cpp', + 'dispatcher/windows/mfx_library_iterator.cpp', + 'dispatcher/windows/mfx_win_reg_key.cpp', +] + +mfx_uwp_sources = [ + 'dispatcher/windows/mfx_dispatcher_uwp.cpp', + 'dispatcher/windows/mfx_driver_store_loader.cpp', +] + +mfx_linux_sources = [ + 'dispatcher/linux/mfxloader.cpp', +] + +vpl_sources = [ + 'dispatcher/vpl/mfx_dispatcher_vpl_config.cpp', + 'dispatcher/vpl/mfx_dispatcher_vpl_loader.cpp', + 'dispatcher/vpl/mfx_dispatcher_vpl_log.cpp', + 'dispatcher/vpl/mfx_dispatcher_vpl_lowlatency.cpp', + 'dispatcher/vpl/mfx_dispatcher_vpl_msdk.cpp', + 'dispatcher/vpl/mfx_dispatcher_vpl.cpp', +] + +libmfx_extra_args = [ + '-DONEVPL_EXPERIMENTAL' +] +libmfx_extra_deps = [] + +libmfx_sources = vpl_sources +if host_system == 'windows' + libmfx_sources += mfx_win_sources + # FIXME: check UWP only + libmfx_sources += mfx_win32_sources +elif host_system == 'linux' + libmfx_sources += mfx_linux_sources + + # Unlike Windows (libmfxhw64.dll is part of driver so it's system library), + # user can build/install libmfx on Linux, so we need to define + # "MFX_MODULES_DIR" for dispatcher to be able to search libmfx from + # additional search path. + libmfx_modules_dir = get_option('mfx-modules-dir') + if libmfx_modules_dir == '' + # This "libdir" will be likely wrong but may be fine since libmfx library + # will be installed in the distro default library path as part of libmfx package + # and dispatcher will try to load library from the distro default library path first + libmfx_modules_dir = join_paths(prefix, get_option('libdir')) + endif + + libmfx_extra_args += ['-DMFX_MODULES_DIR="@0@"'.format(libmfx_modules_dir)] + libmfx_extra_deps += [ + cc.find_library('dl'), + cc.find_library('pthread'), + ] +else + error('Only Windows or Linux build is supported') +endif + +# suppress build warnings +if cc.get_id() == 'msvc' + libmfx_extra_args += cc.get_supported_arguments([ + '/wd4189', # local variable is initialized but not referenced + ]) +else + libmfx_extra_args += cc.get_supported_arguments([ + '-Wno-missing-declarations', + '-Wno-deprecated-declarations', + '-Wno-redundant-decls', + '-Wno-unused-but-set-variable', + '-Wno-unused-variable', + # clang complains + '-Wno-missing-braces', + '-Wno-format-nonliteral', + ]) +endif + +libmfx_incl = include_directories('dispatcher', 'api') + +libmfx_static = static_library('libmfx-static', + libmfx_sources, + c_args : libmfx_extra_args, + cpp_args : libmfx_extra_args, + dependencies : libmfx_extra_deps, + include_directories : libmfx_incl +) + +libmfx_internal_dep = declare_dependency( + link_with : libmfx_static, + include_directories: [libmfx_incl, include_directories('api/vpl')] +) diff --git a/subprojects/gst-plugins-bad/sys/qsv/meson.build b/subprojects/gst-plugins-bad/sys/qsv/meson.build new file mode 100644 index 0000000..ba31bd1 --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/qsv/meson.build @@ -0,0 +1,92 @@ +qsv_sources = [ + 'gstqsvallocator.cpp', + 'gstqsvencoder.cpp', + 'gstqsvh264enc.cpp', + 'gstqsvutils.cpp', + 'plugin.cpp', +] + +qsv_d3d11_sources = [ + 'gstqsvallocator_d3d11.cpp', +] + +qsv_va_sources = [ + 'gstqsvallocator_va.cpp', +] + +extra_args = [ + '-DGST_USE_UNSTABLE_API', +] + +qsv_option = get_option('qsv') +if qsv_option.disabled() + subdir_done() +endif + +qsv_platform_deps = [] +if host_system == 'windows' + if not gstd3d11_dep.found() + if qsv_option.enabled() + error('The qsv was enabled explicitly, but required d3d11 was not found') + else + subdir_done() + endif + endif + + code = ''' + #include + #if !(WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_APP) && !WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP)) + #error "Not building for UWP" + #endif''' + if cc.compiles(code, name : 'building for UWP') + if qsv_option.enabled() + error('qsv plugin does not support UWP') + else + subdir_done() + endif + endif + + qsv_sources += qsv_d3d11_sources + qsv_platform_deps += [gstd3d11_dep] +elif host_system == 'linux' and host_machine.cpu_family() == 'x86_64' + if not gstva_dep.found() + if qsv_option.enabled() + error('The qsv was enabled explicitly, but required va was not found') + else + subdir_done() + endif + endif + qsv_sources += qsv_va_sources + qsv_platform_deps += [gstva_dep] +else + if qsv_option.enabled() + error('QSV plugin supports only Windows or Linux') + else + subdir_done() + endif +endif + +# suppress deprecated use of MFXInitEx. We don't use the method, +# but used in "mfxvideo++.h" +# and MinGW 32bits compiler seems to be complaining about redundant-decls +if cc.get_id() != 'msvc' + extra_args += cc.get_supported_arguments([ + '-Wno-redundant-decls', + '-Wno-deprecated-declarations', + ]) +endif + +subdir('libmfx') + +gstqsv = library('gstqsv', + qsv_sources, + c_args : gst_plugins_bad_args + extra_args, + cpp_args : gst_plugins_bad_args + extra_args, + include_directories : [configinc], + dependencies : [gstbase_dep, gstvideo_dep, gstcodecparsers_dep, libmfx_internal_dep] + qsv_platform_deps, + install : true, + install_dir : plugins_install_dir, +) + +pkgconfig.generate(gstqsv, install_dir : plugins_pkgconfig_install_dir) +plugins += [gstqsv] diff --git a/subprojects/gst-plugins-bad/sys/qsv/plugin.cpp b/subprojects/gst-plugins-bad/sys/qsv/plugin.cpp new file mode 100644 index 0000000..b102e28 --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/qsv/plugin.cpp @@ -0,0 +1,266 @@ +/* GStreamer + * Copyright (C) 2021 Seungha Yang + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include +#include "gstqsvutils.h" +#include "gstqsvh264enc.h" +#include + +#ifdef G_OS_WIN32 +#define WIN32_LEAN_AND_MEAN +#include +#include +#include +#else +#include +#include +#endif + +GST_DEBUG_CATEGORY (gst_qsv_debug); +GST_DEBUG_CATEGORY (gst_qsv_allocator_debug); +GST_DEBUG_CATEGORY (gst_qsv_encoder_debug); +GST_DEBUG_CATEGORY (gst_qsv_h264_enc_debug); + +#define GST_CAT_DEFAULT gst_qsv_debug + +#ifdef G_OS_WIN32 +#define MFX_ACCEL_MODE MFX_ACCEL_MODE_VIA_D3D11 +#else +#define MFX_ACCEL_MODE MFX_ACCEL_MODE_VIA_VAAPI +#endif + +#ifdef G_OS_WIN32 +static mfxSession +create_session_with_platform_device (mfxLoader loader, + mfxImplDescription * desc, guint impl_index, GstObject ** d3d11_device, + GList ** devices) +{ + mfxSession session = nullptr; + mfxStatus status; + GstD3D11Device *selected = nullptr; + GList *list = *devices; + GList *iter; + mfxU16 device_id = 0; + + *d3d11_device = nullptr; + + status = MFXCreateSession (loader, impl_index, &session); + if (status != MFX_ERR_NONE) { + GST_WARNING ("Failed to create session with index %d, %d (%s)", + impl_index, QSV_STATUS_ARGS (status)); + return nullptr; + } + + if (desc->ApiVersion.Major >= 2 || + (desc->ApiVersion.Major == 1 && desc->ApiVersion.Minor >= 19)) { + mfxPlatform platform; + + memset (&platform, 0, sizeof (mfxPlatform)); + + if (MFXVideoCORE_QueryPlatform (session, &platform) == MFX_ERR_NONE) { + device_id = platform.DeviceId; + + /* XXX: re-create session, MFXVideoCORE_QueryPlatform() may cause + * later MFXVideoCORE_SetHandle() call failed with + * MFX_ERR_UNDEFINED_BEHAVIOR error */ + g_clear_pointer (&session, MFXClose); + + status = MFXCreateSession (loader, impl_index, &session); + if (status != MFX_ERR_NONE) { + GST_WARNING ("Failed to re-create session with index %d, %d (%s)", + impl_index, QSV_STATUS_ARGS (status)); + return nullptr; + } + } + } + + if (device_id) { + for (iter = list; iter; iter = g_list_next (iter)) { + GstD3D11Device *dev = GST_D3D11_DEVICE (iter->data); + guint dev_id; + + g_object_get (dev, "device-id", &dev_id, nullptr); + if (dev_id == (guint) device_id) { + selected = dev; + list = g_list_delete_link (list, iter); + break; + } + } + } + + if (!selected) { + /* Unknown device id, pick the first device */ + selected = GST_D3D11_DEVICE (list->data); + list = g_list_delete_link (list, list); + } + + *devices = list; + + status = MFXVideoCORE_SetHandle (session, MFX_HANDLE_D3D11_DEVICE, + gst_d3d11_device_get_device_handle (selected)); + if (status != MFX_ERR_NONE) { + GST_WARNING ("Failed to set d3d11 device handle, %d (%s)", + QSV_STATUS_ARGS (status)); + gst_object_unref (selected); + MFXClose (session); + + return nullptr; + } + + *d3d11_device = GST_OBJECT (selected); + + return session; +} +#else +static mfxSession +create_session_with_platform_device (mfxLoader loader, + mfxImplDescription * desc, guint impl_index, GstObject ** va_display, + GList ** devices) +{ + mfxSession session = nullptr; + mfxStatus status; + GstVaDisplay *selected; + GList *list = *devices; + + *va_display = nullptr; + + status = MFXCreateSession (loader, impl_index, &session); + if (status != MFX_ERR_NONE) { + GST_WARNING ("Failed to create session with index %d, %d (%s)", + impl_index, QSV_STATUS_ARGS (status)); + return nullptr; + } + + /* XXX: what's the relation between implementation index and VA display ? + * Pick the first available device for now */ + selected = GST_VA_DISPLAY (list->data); + list = g_list_delete_link (list, list); + *devices = list; + + status = MFXVideoCORE_SetHandle (session, MFX_HANDLE_VA_DISPLAY, + gst_va_display_get_va_dpy (selected)); + if (status != MFX_ERR_NONE) { + GST_WARNING ("Failed to set display handle, %d (%s)", + QSV_STATUS_ARGS (status)); + gst_object_unref (selected); + MFXClose (session); + + return nullptr; + } + + *va_display = GST_OBJECT (selected); + + return session; +} +#endif + +static void +plugin_deinit (gpointer data) +{ + gst_qsv_deinit (); +} + +static gboolean +plugin_init (GstPlugin * plugin) +{ + mfxLoader loader; + guint i = 0; + GList *platform_devices = nullptr; + +#ifdef G_OS_WIN32 + /* D3D11 Video API is supported since Windows 8. + * Do we want to support old OS (Windows 7 for example) with D3D9 ?? */ + if (!IsWindows8OrGreater ()) + return TRUE; +#endif + + GST_DEBUG_CATEGORY_INIT (gst_qsv_debug, "qsv", 0, "Intel Quick Sync Video"); + + loader = gst_qsv_get_loader (); + if (!loader) + return TRUE; + + platform_devices = gst_qsv_get_platform_devices (); + if (!platform_devices) { + gst_qsv_deinit (); + return TRUE; + } + + GST_INFO ("Found %d platform devices", g_list_length (platform_devices)); + + GST_DEBUG_CATEGORY_INIT (gst_qsv_encoder_debug, + "qsvencoder", 0, "qsvencoder"); + GST_DEBUG_CATEGORY_INIT (gst_qsv_allocator_debug, + "gstqsvallocator", 0, "gstqsvallocator"); + GST_DEBUG_CATEGORY_INIT (gst_qsv_h264_enc_debug, + "qsvh264enc", 0, "qsvh264enc"); + + do { + mfxStatus status = MFX_ERR_NONE; + mfxSession session = nullptr; + mfxImplDescription *desc = nullptr; + GstObject *device = nullptr; + + status = MFXEnumImplementations (loader, + i, MFX_IMPLCAPS_IMPLDESCSTRUCTURE, (mfxHDL *) & desc); + + if (status != MFX_ERR_NONE) + break; + + if ((desc->Impl & MFX_IMPL_TYPE_HARDWARE) == 0) + goto next; + + if ((desc->AccelerationMode & MFX_ACCEL_MODE) == 0) + goto next; + + session = create_session_with_platform_device (loader, desc, i, &device, + &platform_devices); + if (!session) + goto next; + + gst_qsv_h264_enc_register (plugin, GST_RANK_NONE, i, device, session); + + next: + MFXDispReleaseImplDescription (loader, desc); + g_clear_pointer (&session, MFXClose); + gst_clear_object (&device); + i++; + + /* What's the possible maximum number of impl/device ? */ + } while (i < 16 && platform_devices != nullptr); + + if (platform_devices) + g_list_free_full (platform_devices, (GDestroyNotify) gst_object_unref); + + g_object_set_data_full (G_OBJECT (plugin), "plugin-qsv-shutdown", + (gpointer) "shutdown-data", (GDestroyNotify) plugin_deinit); + + return TRUE; +} + +GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, + GST_VERSION_MINOR, + qsv, + "Intel Quick Sync Video plugin", + plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN) -- 2.7.4