option('aes', type : 'feature', value : 'auto', description : 'AES encryption/decryption plugin')
option('aom', type : 'feature', value : 'auto', description : 'AOM AV1 video codec plugin')
option('avtp', type : 'feature', value : 'auto', description : 'Audio/Video Transport Protocol (AVTP) plugin')
+option('amfcodec', type : 'feature', value : 'auto', description : 'AMD AMF codec plugin')
option('androidmedia', type : 'feature', value : 'auto', description : 'Video capture and codec plugins for Android')
option('applemedia', type : 'feature', value : 'auto', description : 'Video capture and codec access plugins for macOS and iOS')
option('asio', type : 'feature', value : 'auto', description : 'Steinberg Audio Streaming Input Output (ASIO) plugin')
--- /dev/null
+/* GStreamer
+ * Copyright (C) 2022 Seungha Yang <seungha@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <components/Component.h>
+#include <core/Factory.h>
+#include "gstamfencoder.h"
+
+#include <gst/d3d11/gstd3d11.h>
+#include <wrl.h>
+#include <string.h>
+#include <timeapi.h>
+
+/* *INDENT-OFF* */
+using namespace Microsoft::WRL;
+using namespace amf;
+/* *INDENT-ON* */
+
+GST_DEBUG_CATEGORY_STATIC (gst_amf_encoder_debug);
+#define GST_CAT_DEFAULT gst_amf_encoder_debug
+
+static GUID AMFTextureArrayIndexGUID = { 0x28115527, 0xe7c3, 0x4b66, 0x99, 0xd3,
+ 0x4f, 0x2a, 0xe6, 0xb4, 0x7f, 0xaf
+};
+
+#define GST_AMF_BUFFER_PROP L"GstAmfFrameData"
+
+typedef struct
+{
+ GstBuffer *buffer;
+ GstMapInfo info;
+} GstAmfEncoderFrameData;
+
+struct _GstAmfEncoderPrivate
+{
+ gint64 adapter_luid;
+ const wchar_t *codec_id;
+
+ GstD3D11Device *device;
+ AMFContext *context;
+ AMFComponent *comp;
+ GstBufferPool *internal_pool;
+
+ GstVideoCodecState *input_state;
+
+ /* High precision clock */
+ guint timer_resolution;
+};
+
+#define gst_amf_encoder_parent_class parent_class
+G_DEFINE_ABSTRACT_TYPE_WITH_PRIVATE (GstAmfEncoder, gst_amf_encoder,
+ GST_TYPE_VIDEO_ENCODER);
+
+static void gst_amf_encoder_dispose (GObject * object);
+static void gst_amf_encoder_finalize (GObject * object);
+static void gst_amf_encoder_set_context (GstElement * element,
+ GstContext * context);
+static gboolean gst_amf_encoder_open (GstVideoEncoder * encoder);
+static gboolean gst_amf_encoder_stop (GstVideoEncoder * encoder);
+static gboolean gst_amf_encoder_close (GstVideoEncoder * encoder);
+static gboolean gst_amf_encoder_set_format (GstVideoEncoder * encoder,
+ GstVideoCodecState * state);
+static GstFlowReturn gst_amf_encoder_handle_frame (GstVideoEncoder * encoder,
+ GstVideoCodecFrame * frame);
+static GstFlowReturn gst_amf_encoder_finish (GstVideoEncoder * encoder);
+static gboolean gst_amf_encoder_flush (GstVideoEncoder * encoder);
+static gboolean gst_amf_encoder_sink_query (GstVideoEncoder * encoder,
+ GstQuery * query);
+static gboolean gst_amf_encoder_src_query (GstVideoEncoder * encoder,
+ GstQuery * query);
+static gboolean gst_amf_encoder_propose_allocation (GstVideoEncoder * encoder,
+ GstQuery * query);
+
+static void
+gst_amf_encoder_class_init (GstAmfEncoderClass * klass)
+{
+ GObjectClass *object_class = G_OBJECT_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstVideoEncoderClass *videoenc_class = GST_VIDEO_ENCODER_CLASS (klass);
+
+ object_class->dispose = gst_amf_encoder_dispose;
+ object_class->finalize = gst_amf_encoder_finalize;
+
+ element_class->set_context = GST_DEBUG_FUNCPTR (gst_amf_encoder_set_context);
+
+ videoenc_class->open = GST_DEBUG_FUNCPTR (gst_amf_encoder_open);
+ videoenc_class->stop = GST_DEBUG_FUNCPTR (gst_amf_encoder_stop);
+ videoenc_class->close = GST_DEBUG_FUNCPTR (gst_amf_encoder_close);
+ videoenc_class->set_format = GST_DEBUG_FUNCPTR (gst_amf_encoder_set_format);
+ videoenc_class->handle_frame =
+ GST_DEBUG_FUNCPTR (gst_amf_encoder_handle_frame);
+ videoenc_class->finish = GST_DEBUG_FUNCPTR (gst_amf_encoder_finish);
+ videoenc_class->flush = GST_DEBUG_FUNCPTR (gst_amf_encoder_flush);
+ videoenc_class->sink_query = GST_DEBUG_FUNCPTR (gst_amf_encoder_sink_query);
+ videoenc_class->src_query = GST_DEBUG_FUNCPTR (gst_amf_encoder_src_query);
+ videoenc_class->propose_allocation =
+ GST_DEBUG_FUNCPTR (gst_amf_encoder_propose_allocation);
+
+ GST_DEBUG_CATEGORY_INIT (gst_amf_encoder_debug,
+ "amfencoder", 0, "amfencoder");
+}
+
+static void
+gst_amf_encoder_init (GstAmfEncoder * self)
+{
+ GstAmfEncoderPrivate *priv;
+ TIMECAPS time_caps;
+
+ priv = self->priv =
+ (GstAmfEncoderPrivate *) gst_amf_encoder_get_instance_private (self);
+
+ gst_video_encoder_set_min_pts (GST_VIDEO_ENCODER (self),
+ GST_SECOND * 60 * 60 * 1000);
+
+ if (timeGetDevCaps (&time_caps, sizeof (TIMECAPS)) == TIMERR_NOERROR) {
+ guint resolution;
+ MMRESULT ret;
+
+ resolution = MIN (MAX (time_caps.wPeriodMin, 1), time_caps.wPeriodMax);
+ ret = timeBeginPeriod (resolution);
+ if (ret == TIMERR_NOERROR)
+ priv->timer_resolution = resolution;
+ }
+}
+
+static void
+gst_amf_encoder_dispose (GObject * object)
+{
+ GstAmfEncoder *self = GST_AMF_ENCODER (object);
+ GstAmfEncoderPrivate *priv = self->priv;
+
+ gst_clear_object (&priv->device);
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+}
+
+static void
+gst_amf_encoder_finalize (GObject * object)
+{
+ GstAmfEncoder *self = GST_AMF_ENCODER (object);
+ GstAmfEncoderPrivate *priv = self->priv;
+
+ if (priv->timer_resolution)
+ timeEndPeriod (priv->timer_resolution);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_amf_encoder_set_context (GstElement * element, GstContext * context)
+{
+ GstAmfEncoder *self = GST_AMF_ENCODER (element);
+ GstAmfEncoderPrivate *priv = self->priv;
+
+ gst_d3d11_handle_set_context_for_adapter_luid (element,
+ context, priv->adapter_luid, &priv->device);
+
+ GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
+}
+
+static gboolean
+gst_amf_encoder_open (GstVideoEncoder * encoder)
+{
+ GstAmfEncoder *self = GST_AMF_ENCODER (encoder);
+ GstAmfEncoderPrivate *priv = self->priv;
+ ComPtr < ID3D10Multithread > multi_thread;
+ ID3D11Device *device_handle;
+ AMFFactory *factory = (AMFFactory *) gst_amf_get_factory ();
+ AMF_RESULT result;
+ HRESULT hr;
+ D3D_FEATURE_LEVEL feature_level;
+ AMF_DX_VERSION dx_ver = AMF_DX11_1;
+
+ if (!gst_d3d11_ensure_element_data_for_adapter_luid (GST_ELEMENT (self),
+ priv->adapter_luid, &priv->device)) {
+ GST_ERROR_OBJECT (self, "d3d11 device is unavailable");
+ return FALSE;
+ }
+
+ device_handle = gst_d3d11_device_get_device_handle (priv->device);
+ feature_level = device_handle->GetFeatureLevel ();
+ if (feature_level >= D3D_FEATURE_LEVEL_11_1)
+ dx_ver = AMF_DX11_1;
+ else
+ dx_ver = AMF_DX11_0;
+
+ hr = device_handle->QueryInterface (IID_PPV_ARGS (&multi_thread));
+ if (!gst_d3d11_result (hr, priv->device)) {
+ GST_ERROR_OBJECT (self, "ID3D10Multithread interface is unavailable");
+ goto error;
+ }
+
+ multi_thread->SetMultithreadProtected (TRUE);
+
+ result = factory->CreateContext (&priv->context);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to create context");
+ goto error;
+ }
+
+ result = priv->context->InitDX11 (device_handle, dx_ver);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to init context");
+ goto error;
+ }
+
+ return TRUE;
+
+error:
+ gst_clear_object (&priv->device);
+ if (priv->context)
+ priv->context->Release ();
+ priv->context = nullptr;
+
+ return FALSE;
+}
+
+static gboolean
+gst_amf_encoder_reset (GstAmfEncoder * self)
+{
+ GstAmfEncoderPrivate *priv = self->priv;
+
+ GST_LOG_OBJECT (self, "Reset");
+
+ if (priv->internal_pool) {
+ gst_buffer_pool_set_active (priv->internal_pool, FALSE);
+ gst_clear_object (&priv->internal_pool);
+ }
+
+ if (priv->comp) {
+ priv->comp->Terminate ();
+ priv->comp->Release ();
+ priv->comp = nullptr;
+ }
+
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_amf_encoder_process_output (GstAmfEncoder * self, AMFBuffer * buffer)
+{
+ GstAmfEncoderClass *klass = GST_AMF_ENCODER_GET_CLASS (self);
+ GstVideoEncoder *venc = GST_VIDEO_ENCODER_CAST (self);
+ AMF_RESULT result;
+ GstVideoCodecFrame *frame = nullptr;
+ GstBuffer *output_buffer;
+ gboolean sync_point = FALSE;
+
+ GST_TRACE_OBJECT (self, "Process output");
+
+ if (buffer->HasProperty (GST_AMF_BUFFER_PROP)) {
+ AMFInterfacePtr iface;
+ result = buffer->GetProperty (GST_AMF_BUFFER_PROP, &iface);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to get prop buffer, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ } else {
+ AMFBufferPtr prop_buffer = AMFBufferPtr (iface);
+ if (prop_buffer) {
+ guint32 system_frame_number = *((guint32 *) prop_buffer->GetNative ());
+ frame = gst_video_encoder_get_frame (venc, system_frame_number);
+ }
+ }
+ } else {
+ GST_WARNING_OBJECT (self, "AMFData does not hold user data");
+ }
+
+ if (!frame) {
+ GST_WARNING_OBJECT (self, "Failed to get find associated codec frame");
+ frame = gst_video_encoder_get_oldest_frame (venc);
+ }
+
+ output_buffer = klass->create_output_buffer (self, buffer, &sync_point);
+
+ if (!output_buffer) {
+ GST_WARNING_OBJECT (self, "Empty output buffer");
+ return GST_FLOW_OK;
+ }
+
+ GST_BUFFER_FLAG_SET (output_buffer, GST_BUFFER_FLAG_MARKER);
+
+ if (frame) {
+ frame->output_buffer = output_buffer;
+
+ if (sync_point)
+ GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
+ } else {
+ if (!sync_point)
+ GST_BUFFER_FLAG_SET (output_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ return gst_pad_push (GST_VIDEO_ENCODER_SRC_PAD (self), output_buffer);
+ }
+
+ gst_video_codec_frame_set_user_data (frame, nullptr, nullptr);
+
+ return gst_video_encoder_finish_frame (venc, frame);
+}
+
+static AMF_RESULT
+gst_amf_encoder_query_output (GstAmfEncoder * self, AMFBuffer ** buffer)
+{
+ GstAmfEncoderPrivate *priv = self->priv;
+ AMF_RESULT result;
+ AMFDataPtr data;
+ AMFBufferPtr buf;
+
+ result = priv->comp->QueryOutput (&data);
+ if (result != AMF_OK)
+ return result;
+
+ if (!data) {
+ GST_LOG_OBJECT (self, "Empty data");
+ return AMF_REPEAT;
+ }
+
+ buf = AMFBufferPtr (data);
+ if (!buf) {
+ GST_ERROR_OBJECT (self, "Failed to convert data to buffer");
+ return AMF_NO_INTERFACE;
+ }
+
+ *buffer = buf.Detach ();
+
+ return AMF_OK;
+}
+
+static GstFlowReturn
+gst_amf_encoder_try_output (GstAmfEncoder * self, gboolean do_wait)
+{
+ AMFBufferPtr buffer;
+ AMF_RESULT result;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+again:
+ result = gst_amf_encoder_query_output (self, &buffer);
+ if (buffer) {
+ ret = gst_amf_encoder_process_output (self, buffer.GetPtr ());
+ if (ret != GST_FLOW_OK) {
+ GST_INFO_OBJECT (self, "Process output returned %s",
+ gst_flow_get_name (ret));
+ }
+ } else if (result == AMF_REPEAT || result == AMF_OK) {
+ GST_TRACE_OBJECT (self, "Output is not ready, do_wait %d", do_wait);
+ if (do_wait) {
+ g_usleep (1000);
+ goto again;
+ }
+ } else if (result == AMF_EOF) {
+ GST_DEBUG_OBJECT (self, "Output queue is drained");
+ ret = GST_VIDEO_ENCODER_FLOW_NEED_DATA;
+ } else {
+ GST_ERROR_OBJECT (self, "query output returned %" GST_AMF_RESULT_FORMAT,
+ GST_AMF_RESULT_ARGS (result));
+ ret = GST_FLOW_ERROR;
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_amf_encoder_drain (GstAmfEncoder * self, gboolean flushing)
+{
+ GstAmfEncoderPrivate *priv = self->priv;
+ AMF_RESULT result;
+ GstFlowReturn ret;
+
+ if (!priv->comp)
+ return TRUE;
+
+ GST_DEBUG_OBJECT (self, "%s", flushing ? "Flush" : "Drain");
+ if (flushing)
+ goto done;
+
+ result = priv->comp->Drain ();
+ if (result != AMF_OK) {
+ GST_WARNING_OBJECT (self, "Drain returned %" GST_AMF_RESULT_FORMAT,
+ GST_AMF_RESULT_ARGS (result));
+ goto done;
+ }
+
+ do {
+ ret = gst_amf_encoder_try_output (self, TRUE);
+ } while (ret == GST_FLOW_OK);
+
+done:
+ gst_amf_encoder_reset (self);
+
+ return TRUE;
+}
+
+static gboolean
+gst_amf_encoder_stop (GstVideoEncoder * encoder)
+{
+ GstAmfEncoder *self = GST_AMF_ENCODER (encoder);
+ GstAmfEncoderPrivate *priv = self->priv;
+
+ GST_DEBUG_OBJECT (self, "Stop");
+
+ gst_amf_encoder_drain (self, TRUE);
+
+ g_clear_pointer (&priv->input_state, gst_video_codec_state_unref);
+
+ return TRUE;
+}
+
+static gboolean
+gst_amf_encoder_close (GstVideoEncoder * encoder)
+{
+ GstAmfEncoder *self = GST_AMF_ENCODER (encoder);
+ GstAmfEncoderPrivate *priv = self->priv;
+
+ GST_DEBUG_OBJECT (self, "Close");
+
+ if (priv->context) {
+ priv->context->Terminate ();
+ priv->context->Release ();
+ priv->context = nullptr;
+ }
+
+ gst_clear_object (&priv->device);
+
+ return TRUE;
+}
+
+static gboolean
+gst_amf_encoder_prepare_internal_pool (GstAmfEncoder * self)
+{
+ GstAmfEncoderPrivate *priv = self->priv;
+ GstVideoInfo *info = &priv->input_state->info;
+ GstCaps *caps = priv->input_state->caps;
+ GstStructure *config;
+ GstD3D11AllocationParams *params;
+
+ if (priv->internal_pool) {
+ gst_buffer_pool_set_active (priv->internal_pool, FALSE);
+ gst_clear_object (&priv->internal_pool);
+ }
+
+ priv->internal_pool = gst_d3d11_buffer_pool_new (priv->device);
+ config = gst_buffer_pool_get_config (priv->internal_pool);
+ gst_buffer_pool_config_set_params (config, caps,
+ GST_VIDEO_INFO_SIZE (info), 0, 0);
+
+ params = gst_d3d11_allocation_params_new (priv->device, info,
+ (GstD3D11AllocationFlags) 0, 0);
+ params->desc[0].MiscFlags = D3D11_RESOURCE_MISC_SHARED;
+
+ gst_buffer_pool_config_set_d3d11_allocation_params (config, params);
+ gst_d3d11_allocation_params_free (params);
+
+ if (!gst_buffer_pool_set_config (priv->internal_pool, config)) {
+ GST_ERROR_OBJECT (self, "Failed to set config");
+ gst_clear_object (&priv->internal_pool);
+ return FALSE;
+ }
+
+ if (!gst_buffer_pool_set_active (priv->internal_pool, TRUE)) {
+ GST_ERROR_OBJECT (self, "Failed to set active");
+ gst_clear_object (&priv->internal_pool);
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+static gboolean
+gst_amf_encoder_open_component (GstAmfEncoder * self)
+{
+ GstAmfEncoderClass *klass = GST_AMF_ENCODER_GET_CLASS (self);
+ GstAmfEncoderPrivate *priv = self->priv;
+ AMFFactory *factory = (AMFFactory *) gst_amf_get_factory ();
+ AMFComponentPtr comp;
+ AMF_RESULT result;
+
+ gst_amf_encoder_drain (self, FALSE);
+
+ if (!gst_amf_encoder_prepare_internal_pool (self))
+ return FALSE;
+
+ result = factory->CreateComponent (priv->context, priv->codec_id, &comp);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to create component, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ return FALSE;
+ }
+
+ if (!klass->set_format (self, priv->input_state, comp.GetPtr ())) {
+ GST_ERROR_OBJECT (self, "Failed to set format");
+ return FALSE;
+ }
+
+ if (!klass->set_output_state (self, priv->input_state, comp.GetPtr ())) {
+ GST_ERROR_OBJECT (self, "Failed to set output state");
+ return FALSE;
+ }
+
+ priv->comp = comp.Detach ();
+
+ return TRUE;
+}
+
+static gboolean
+gst_amf_encoder_set_format (GstVideoEncoder * encoder,
+ GstVideoCodecState * state)
+{
+ GstAmfEncoder *self = GST_AMF_ENCODER (encoder);
+ GstAmfEncoderPrivate *priv = self->priv;
+
+ g_clear_pointer (&priv->input_state, gst_video_codec_state_unref);
+ priv->input_state = gst_video_codec_state_ref (state);
+
+ return gst_amf_encoder_open_component (self);
+}
+
+static GstBuffer *
+gst_amf_encoder_upload_sysmem (GstAmfEncoder * self, GstBuffer * src_buf,
+ const GstVideoInfo * info)
+{
+ GstAmfEncoderPrivate *priv = self->priv;
+ GstVideoFrame src_frame, dst_frame;
+ GstBuffer *dst_buf;
+ GstFlowReturn ret;
+
+ GST_TRACE_OBJECT (self, "Uploading sysmem buffer");
+
+ ret = gst_buffer_pool_acquire_buffer (priv->internal_pool, &dst_buf, nullptr);
+ if (ret != GST_FLOW_OK) {
+ GST_ERROR_OBJECT (self, "Failed to acquire buffer");
+ return nullptr;
+ }
+
+ if (!gst_video_frame_map (&src_frame, info, src_buf, GST_MAP_READ)) {
+ GST_WARNING ("Failed to map src frame");
+ gst_buffer_unref (dst_buf);
+ return nullptr;
+ }
+
+ if (!gst_video_frame_map (&dst_frame, info, dst_buf, GST_MAP_WRITE)) {
+ GST_WARNING ("Failed to map src frame");
+ gst_video_frame_unmap (&src_frame);
+ gst_buffer_unref (dst_buf);
+ return nullptr;
+ }
+
+ for (guint i = 0; i < GST_VIDEO_FRAME_N_PLANES (&src_frame); i++) {
+ guint src_width_in_bytes, src_height;
+ guint dst_width_in_bytes, dst_height;
+ guint width_in_bytes, height;
+ guint src_stride, dst_stride;
+ guint8 *src_data, *dst_data;
+
+ src_width_in_bytes = GST_VIDEO_FRAME_COMP_WIDTH (&src_frame, i) *
+ GST_VIDEO_FRAME_COMP_PSTRIDE (&src_frame, i);
+ src_height = GST_VIDEO_FRAME_COMP_HEIGHT (&src_frame, i);
+ src_stride = GST_VIDEO_FRAME_COMP_STRIDE (&src_frame, i);
+
+ dst_width_in_bytes = GST_VIDEO_FRAME_COMP_WIDTH (&dst_frame, i) *
+ GST_VIDEO_FRAME_COMP_PSTRIDE (&src_frame, i);
+ dst_height = GST_VIDEO_FRAME_COMP_HEIGHT (&src_frame, i);
+ dst_stride = GST_VIDEO_FRAME_COMP_STRIDE (&dst_frame, i);
+
+ width_in_bytes = MIN (src_width_in_bytes, dst_width_in_bytes);
+ height = MIN (src_height, dst_height);
+
+ src_data = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&src_frame, i);
+ dst_data = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&dst_frame, i);
+
+ for (guint j = 0; j < height; j++) {
+ memcpy (dst_data, src_data, width_in_bytes);
+ dst_data += dst_stride;
+ src_data += src_stride;
+ }
+ }
+
+ gst_video_frame_unmap (&dst_frame);
+ gst_video_frame_unmap (&src_frame);
+
+ return dst_buf;
+}
+
+static GstBuffer *
+gst_amf_encoder_copy_d3d11 (GstAmfEncoder * self, GstBuffer * src_buffer,
+ gboolean shared)
+{
+ GstAmfEncoderPrivate *priv = self->priv;
+ D3D11_TEXTURE2D_DESC src_desc, dst_desc;
+ D3D11_BOX src_box;
+ guint subresource_idx;
+ GstMemory *src_mem, *dst_mem;
+ GstMapInfo src_info, dst_info;
+ ID3D11Texture2D *src_tex, *dst_tex;
+ ID3D11Device *device_handle;
+ ID3D11DeviceContext *device_context;
+ GstBuffer *dst_buffer;
+ GstFlowReturn ret;
+ ComPtr < IDXGIResource > dxgi_resource;
+ ComPtr < ID3D11Texture2D > shared_texture;
+ ComPtr < ID3D11Query > query;
+ D3D11_QUERY_DESC query_desc;
+ BOOL sync_done = FALSE;
+ HANDLE shared_handle;
+ GstD3D11Device *device;
+ HRESULT hr;
+
+ ret = gst_buffer_pool_acquire_buffer (priv->internal_pool,
+ &dst_buffer, nullptr);
+ if (ret != GST_FLOW_OK) {
+ GST_ERROR_OBJECT (self, "Failed to acquire buffer");
+ return nullptr;
+ }
+
+ src_mem = gst_buffer_peek_memory (src_buffer, 0);
+ dst_mem = gst_buffer_peek_memory (dst_buffer, 0);
+
+ device = GST_D3D11_MEMORY_CAST (src_mem)->device;
+
+ device_handle = gst_d3d11_device_get_device_handle (device);
+ device_context = gst_d3d11_device_get_device_context_handle (device);
+
+ if (!gst_memory_map (src_mem, &src_info,
+ (GstMapFlags) (GST_MAP_READ | GST_MAP_D3D11))) {
+ GST_ERROR_OBJECT (self, "Failed to map src memory");
+ gst_buffer_unref (dst_buffer);
+ return nullptr;
+ }
+
+ if (!gst_memory_map (dst_mem, &dst_info,
+ (GstMapFlags) (GST_MAP_WRITE | GST_MAP_D3D11))) {
+ GST_ERROR_OBJECT (self, "Failed to map dst memory");
+ gst_memory_unmap (src_mem, &src_info);
+ gst_buffer_unref (dst_buffer);
+ return nullptr;
+ }
+
+ src_tex = (ID3D11Texture2D *) src_info.data;
+ dst_tex = (ID3D11Texture2D *) dst_info.data;
+
+ gst_d3d11_memory_get_texture_desc (GST_D3D11_MEMORY_CAST (src_mem),
+ &src_desc);
+ gst_d3d11_memory_get_texture_desc (GST_D3D11_MEMORY_CAST (dst_mem),
+ &dst_desc);
+ subresource_idx =
+ gst_d3d11_memory_get_subresource_index (GST_D3D11_MEMORY_CAST (src_mem));
+
+ if (shared) {
+ hr = dst_tex->QueryInterface (IID_PPV_ARGS (&dxgi_resource));
+ if (!gst_d3d11_result (hr, priv->device)) {
+ GST_ERROR_OBJECT (self,
+ "IDXGIResource interface is not available, hr: 0x%x", (guint) hr);
+ goto error;
+ }
+
+ hr = dxgi_resource->GetSharedHandle (&shared_handle);
+ if (!gst_d3d11_result (hr, priv->device)) {
+ GST_ERROR_OBJECT (self, "Failed to get shared handle, hr: 0x%x",
+ (guint) hr);
+ goto error;
+ }
+
+ hr = device_handle->OpenSharedResource (shared_handle,
+ IID_PPV_ARGS (&shared_texture));
+
+ if (!gst_d3d11_result (hr, device)) {
+ GST_ERROR_OBJECT (self, "Failed to get shared texture, hr: 0x%x",
+ (guint) hr);
+ goto error;
+ }
+
+ dst_tex = shared_texture.Get ();
+ }
+
+ src_box.left = 0;
+ src_box.top = 0;
+ src_box.front = 0;
+ src_box.back = 1;
+ src_box.right = MIN (src_desc.Width, dst_desc.Width);
+ src_box.bottom = MIN (src_desc.Height, dst_desc.Height);
+
+ if (shared) {
+ query_desc.Query = D3D11_QUERY_EVENT;
+ query_desc.MiscFlags = 0;
+
+ hr = device_handle->CreateQuery (&query_desc, &query);
+ if (!gst_d3d11_result (hr, device)) {
+ GST_ERROR_OBJECT (self, "Couldn't Create event query, hr: 0x%x",
+ (guint) hr);
+ goto error;
+ }
+
+ gst_d3d11_device_lock (device);
+ }
+
+ device_context->CopySubresourceRegion (dst_tex, 0,
+ 0, 0, 0, src_tex, subresource_idx, &src_box);
+
+ if (shared) {
+ device_context->End (query.Get ());
+ do {
+ hr = device_context->GetData (query.Get (), &sync_done, sizeof (BOOL), 0);
+ } while (!sync_done && (hr == S_OK || hr == S_FALSE));
+
+ if (!gst_d3d11_result (hr, device)) {
+ GST_ERROR_OBJECT (self, "Couldn't sync GPU operation, hr: 0x%x",
+ (guint) hr);
+ gst_d3d11_device_unlock (device);
+ goto error;
+ }
+
+ gst_d3d11_device_unlock (device);
+ }
+
+ gst_memory_unmap (dst_mem, &dst_info);
+ gst_memory_unmap (src_mem, &src_info);
+
+ return dst_buffer;
+
+error:
+ gst_memory_unmap (dst_mem, &dst_info);
+ gst_memory_unmap (src_mem, &src_info);
+ gst_buffer_unref (dst_buffer);
+
+ return nullptr;
+}
+
+static GstBuffer *
+gst_amf_encoder_upload_buffer (GstAmfEncoder * self, GstBuffer * buffer)
+{
+ GstAmfEncoderPrivate *priv = self->priv;
+ GstVideoInfo *info = &priv->input_state->info;
+ GstMemory *mem;
+ GstD3D11Memory *dmem;
+ D3D11_TEXTURE2D_DESC desc;
+ GstBuffer *ret;
+
+ mem = gst_buffer_peek_memory (buffer, 0);
+ if (!gst_is_d3d11_memory (mem) || gst_buffer_n_memory (buffer) > 1) {
+ /* d3d11 buffer should hold single memory object */
+ return gst_amf_encoder_upload_sysmem (self, buffer, info);
+ }
+
+ dmem = GST_D3D11_MEMORY_CAST (mem);
+ if (dmem->device != priv->device) {
+ gint64 adapter_luid;
+
+ g_object_get (dmem->device, "adapter-luid", &adapter_luid, nullptr);
+ if (adapter_luid == priv->adapter_luid) {
+ GST_LOG_OBJECT (self, "Different device but same GPU, copy d3d11");
+ gst_d3d11_device_lock (priv->device);
+ ret = gst_amf_encoder_copy_d3d11 (self, buffer, TRUE);
+ gst_d3d11_device_unlock (priv->device);
+
+ return ret;
+ } else {
+ GST_LOG_OBJECT (self, "Different device, system copy");
+ return gst_amf_encoder_upload_sysmem (self, buffer, info);
+ }
+ }
+
+ gst_d3d11_memory_get_texture_desc (dmem, &desc);
+ if (desc.Usage != D3D11_USAGE_DEFAULT) {
+ GST_TRACE_OBJECT (self, "Not a default usage texture, d3d11 copy");
+ gst_d3d11_device_lock (priv->device);
+ ret = gst_amf_encoder_copy_d3d11 (self, buffer, TRUE);
+ gst_d3d11_device_unlock (priv->device);
+
+ return ret;
+ }
+
+ return gst_buffer_ref (buffer);
+}
+
+static void
+gst_amf_frame_data_free (GstAmfEncoderFrameData * data)
+{
+ if (!data)
+ return;
+
+ gst_buffer_unmap (data->buffer, &data->info);
+ gst_buffer_unref (data->buffer);
+ g_free (data);
+}
+
+static GstFlowReturn
+gst_amf_encoder_submit_input (GstAmfEncoder * self, AMFSurface * surface)
+{
+ GstAmfEncoderPrivate *priv = self->priv;
+ AMF_RESULT result;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ do {
+ result = priv->comp->SubmitInput (surface);
+ if (result == AMF_OK || result == AMF_NEED_MORE_INPUT) {
+ GST_TRACE_OBJECT (self, "SubmitInput returned %" GST_AMF_RESULT_FORMAT,
+ GST_AMF_RESULT_ARGS (result));
+ ret = GST_FLOW_OK;
+ break;
+ }
+
+ if (result != AMF_INPUT_FULL) {
+ GST_ERROR_OBJECT (self, "SubmitInput returned %" GST_AMF_RESULT_FORMAT,
+ GST_AMF_RESULT_ARGS (result));
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+
+ ret = gst_amf_encoder_try_output (self, TRUE);
+ if (ret != GST_FLOW_OK) {
+ GST_INFO_OBJECT (self, "Try output returned %s", gst_flow_get_name (ret));
+ break;
+ }
+ } while (TRUE);
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_amf_encoder_handle_frame (GstVideoEncoder * encoder,
+ GstVideoCodecFrame * frame)
+{
+ GstAmfEncoder *self = GST_AMF_ENCODER (encoder);
+ GstAmfEncoderClass *klass = GST_AMF_ENCODER_GET_CLASS (self);
+ GstAmfEncoderPrivate *priv = self->priv;
+ GstVideoInfo *info = &priv->input_state->info;
+ GstBuffer *buffer;
+ AMFBufferPtr user_data;
+ AMFSurfacePtr surface;
+ AMF_RESULT result;
+ guint32 *system_frame_number;
+ guint subresource_index;
+ GstAmfEncoderFrameData *frame_data;
+ ID3D11Texture2D *texture;
+ gboolean need_reconfigure;
+ GstFlowReturn ret;
+
+ if (!priv->comp && !gst_amf_encoder_open_component (self)) {
+ GST_ERROR_OBJECT (self, "Encoder object was not configured");
+ goto error;
+ }
+
+ need_reconfigure = klass->check_reconfigure (self);
+ if (need_reconfigure && !gst_amf_encoder_open_component (self)) {
+ GST_ERROR_OBJECT (self, "Failed to reconfigure encoder");
+ goto error;
+ }
+
+ result = priv->context->AllocBuffer (AMF_MEMORY_HOST,
+ sizeof (guint32), &user_data);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to allocate user data buffer, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ goto error;
+ }
+
+ system_frame_number = (guint32 *) user_data->GetNative ();
+ *system_frame_number = frame->system_frame_number;
+
+ buffer = gst_amf_encoder_upload_buffer (self, frame->input_buffer);
+ if (!buffer)
+ goto error;
+
+ frame_data = g_new0 (GstAmfEncoderFrameData, 1);
+ frame_data->buffer = buffer;
+ gst_buffer_map (frame_data->buffer, &frame_data->info,
+ (GstMapFlags) (GST_MAP_READ | GST_MAP_D3D11));
+ gst_video_codec_frame_set_user_data (frame, frame_data,
+ (GDestroyNotify) gst_amf_frame_data_free);
+
+ subresource_index = GPOINTER_TO_UINT (frame_data->info.user_data[0]);
+
+ gst_d3d11_device_lock (priv->device);
+ texture = (ID3D11Texture2D *) frame_data->info.data;
+ texture->SetPrivateData (AMFTextureArrayIndexGUID,
+ sizeof (guint), &subresource_index);
+ result = priv->context->CreateSurfaceFromDX11Native (texture,
+ &surface, nullptr);
+ gst_d3d11_device_unlock (priv->device);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to create surface, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ goto error;
+ }
+
+ surface->SetCrop (0, 0, info->width, info->height);
+ surface->SetPts (frame->pts / 100);
+ if (GST_CLOCK_TIME_IS_VALID (frame->duration))
+ surface->SetDuration (frame->duration / 100);
+
+ result = surface->SetProperty (GST_AMF_BUFFER_PROP, user_data);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to set user data on AMF surface");
+ goto error;
+ }
+
+ klass->set_surface_prop (self, frame, surface.GetPtr ());
+ gst_video_codec_frame_unref (frame);
+
+ ret = gst_amf_encoder_submit_input (self, surface.GetPtr ());
+ if (ret == GST_FLOW_OK)
+ ret = gst_amf_encoder_try_output (self, FALSE);
+
+ return ret;
+
+error:
+ gst_video_encoder_finish_frame (encoder, frame);
+
+ return GST_FLOW_ERROR;
+}
+
+static GstFlowReturn
+gst_amf_encoder_finish (GstVideoEncoder * encoder)
+{
+ GstAmfEncoder *self = GST_AMF_ENCODER (encoder);
+
+ GST_DEBUG_OBJECT (self, "Finish");
+
+ gst_amf_encoder_drain (self, FALSE);
+
+ return GST_FLOW_OK;
+}
+
+static gboolean
+gst_amf_encoder_flush (GstVideoEncoder * encoder)
+{
+ GstAmfEncoder *self = GST_AMF_ENCODER (encoder);
+
+ GST_DEBUG_OBJECT (self, "Flush");
+
+ gst_amf_encoder_drain (self, TRUE);
+
+ return TRUE;
+}
+
+static gboolean
+gst_amf_encoder_handle_context_query (GstAmfEncoder * self, GstQuery * query)
+{
+ GstAmfEncoderPrivate *priv = self->priv;
+
+ return gst_d3d11_handle_context_query (GST_ELEMENT (self), query,
+ priv->device);
+}
+
+static gboolean
+gst_amf_encoder_sink_query (GstVideoEncoder * encoder, GstQuery * query)
+{
+ GstAmfEncoder *self = GST_AMF_ENCODER (encoder);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CONTEXT:
+ if (gst_amf_encoder_handle_context_query (self, query))
+ return TRUE;
+ break;
+ default:
+ break;
+ }
+
+ return GST_VIDEO_ENCODER_CLASS (parent_class)->sink_query (encoder, query);
+}
+
+static gboolean
+gst_amf_encoder_src_query (GstVideoEncoder * encoder, GstQuery * query)
+{
+ GstAmfEncoder *self = GST_AMF_ENCODER (encoder);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CONTEXT:
+ if (gst_amf_encoder_handle_context_query (self, query))
+ return TRUE;
+ break;
+ default:
+ break;
+ }
+
+ return GST_VIDEO_ENCODER_CLASS (parent_class)->src_query (encoder, query);
+}
+
+static gboolean
+gst_amf_encoder_propose_allocation (GstVideoEncoder * encoder, GstQuery * query)
+{
+ GstAmfEncoder *self = GST_AMF_ENCODER (encoder);
+ GstAmfEncoderPrivate *priv = self->priv;
+ GstD3D11Device *device = GST_D3D11_DEVICE (priv->device);
+ GstVideoInfo info;
+ GstBufferPool *pool;
+ GstCaps *caps;
+ guint size;
+ GstStructure *config;
+ GstCapsFeatures *features;
+ gboolean is_d3d11 = FALSE;
+
+ gst_query_parse_allocation (query, &caps, nullptr);
+ if (!caps) {
+ GST_WARNING_OBJECT (self, "null caps in query");
+ return FALSE;
+ }
+
+ if (!gst_video_info_from_caps (&info, caps)) {
+ GST_WARNING_OBJECT (self, "Failed to convert caps into info");
+ return FALSE;
+ }
+
+ features = gst_caps_get_features (caps, 0);
+ if (features && gst_caps_features_contains (features,
+ GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY)) {
+ GST_DEBUG_OBJECT (self, "upstream support d3d11 memory");
+ pool = gst_d3d11_buffer_pool_new (device);
+ is_d3d11 = TRUE;
+ } else {
+ pool = gst_d3d11_staging_buffer_pool_new (device);
+ }
+
+ config = gst_buffer_pool_get_config (pool);
+ gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
+
+ size = GST_VIDEO_INFO_SIZE (&info);
+
+ /* XXX: AMF API does not provide information about internal queue size,
+ * use hardcoded value 16 */
+ gst_buffer_pool_config_set_params (config, caps, size, 16, 0);
+
+ if (!gst_buffer_pool_set_config (pool, config)) {
+ GST_WARNING_OBJECT (self, "Failed to set pool config");
+ gst_object_unref (pool);
+ return FALSE;
+ }
+
+ /* d3d11 buffer pool will update actual CPU accessible buffer size based on
+ * allocated staging texture per gst_buffer_pool_set_config() call,
+ * need query again to get the size */
+ config = gst_buffer_pool_get_config (pool);
+ gst_buffer_pool_config_get_params (config, nullptr, &size, nullptr, nullptr);
+ gst_structure_free (config);
+
+ gst_query_add_allocation_pool (query, pool, size, 16, 0);
+ gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, nullptr);
+ gst_object_unref (pool);
+
+ return TRUE;
+}
+
+void
+gst_amf_encoder_set_subclass_data (GstAmfEncoder * encoder, gint64 adapter_luid,
+ const wchar_t *codec_id)
+{
+ GstAmfEncoderPrivate *priv;
+
+ g_return_if_fail (GST_IS_AMF_ENCODER (encoder));
+
+ priv = encoder->priv;
+ priv->adapter_luid = adapter_luid;
+ priv->codec_id = codec_id;
+}
--- /dev/null
+/* GStreamer
+ * Copyright (C) 2022 Seungha Yang <seungha@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#pragma once
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+#include "gstamfutils.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_AMF_ENCODER (gst_amf_encoder_get_type())
+#define GST_AMF_ENCODER(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_AMF_ENCODER, GstAmfEncoder))
+#define GST_AMF_ENCODER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_AMF_ENCODER, GstAmfEncoderClass))
+#define GST_IS_AMF_ENCODER(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_AMF_ENCODER))
+#define GST_IS_AMF_ENCODER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_AMF_ENCODER))
+#define GST_AMF_ENCODER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj), GST_TYPE_AMF_ENCODER, GstAmfEncoderClass))
+#define GST_AMF_ENCODER_CAST(obj) ((GstAmfEncoder *)obj)
+
+typedef struct _GstAmfEncoder GstAmfEncoder;
+typedef struct _GstAmfEncoderClass GstAmfEncoderClass;
+typedef struct _GstAmfEncoderPrivate GstAmfEncoderPrivate;
+
+struct _GstAmfEncoder
+{
+ GstVideoEncoder parent;
+
+ GstAmfEncoderPrivate *priv;
+};
+
+struct _GstAmfEncoderClass
+{
+ GstVideoEncoderClass parent_class;
+
+ gboolean (*set_format) (GstAmfEncoder * encoder,
+ GstVideoCodecState * state,
+ gpointer component);
+
+ gboolean (*set_output_state) (GstAmfEncoder * encoder,
+ GstVideoCodecState * state,
+ gpointer component);
+
+ gboolean (*set_surface_prop) (GstAmfEncoder * encoder,
+ GstVideoCodecFrame * frame,
+ gpointer surface);
+
+ GstBuffer * (*create_output_buffer) (GstAmfEncoder * encoder,
+ gpointer data,
+ gboolean * sync_point);
+
+ gboolean (*check_reconfigure) (GstAmfEncoder * encoder);
+};
+
+GType gst_amf_encoder_get_type (void);
+
+void gst_amf_encoder_set_subclass_data (GstAmfEncoder * encoder,
+ gint64 adapter_luid,
+ const wchar_t * codec_id);
+
+G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstAmfEncoder, gst_object_unref)
+
+G_END_DECLS
--- /dev/null
+/* GStreamer
+ * Copyright (C) 2022 Seungha Yang <seungha@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstamfh264enc.h"
+#include <components/Component.h>
+#include <components/VideoEncoderVCE.h>
+#include <core/Factory.h>
+#include <gst/codecparsers/gsth264parser.h>
+#include <gst/pbutils/codec-utils.h>
+#include <string>
+#include <set>
+#include <string.h>
+
+using namespace amf;
+
+GST_DEBUG_CATEGORY_STATIC (gst_amf_h264_enc_debug);
+#define GST_CAT_DEFAULT gst_amf_h264_enc_debug
+
+static GTypeClass *parent_class = nullptr;
+
+typedef struct
+{
+ amf_int64 max_bitrate;
+ amf_int64 num_of_streams;
+ amf_int64 max_profile;
+ amf_int64 max_level;
+ amf_int64 bframes;
+ amf_int64 min_ref_frames;
+ amf_int64 max_ref_frames;
+ amf_int64 max_temporal_layers;
+ amf_int64 fixed_slice_mode;
+ amf_int64 num_of_hw_instances;
+ amf_int64 color_conversion;
+ amf_int64 pre_analysis;
+ amf_int64 roi_map;
+ amf_int64 max_throughput;
+ amf_int64 query_timeout_support;
+ amf_int64 default_qp_i;
+ amf_int64 default_qp_p;
+ amf_int64 default_qp_b;
+ gboolean interlace_supported;
+ guint valign;
+} GstAmfH264EncDeviceCaps;
+
+#define GST_TYPE_AMF_H264_ENC_USAGE (gst_amf_h264_enc_usage_get_type ())
+static GType
+gst_amf_h264_enc_usage_get_type (void)
+{
+ static GType usage_type = 0;
+ static const GEnumValue usages[] = {
+ {AMF_VIDEO_ENCODER_USAGE_TRANSCODING, "Transcoding", "transcoding"},
+ {AMF_VIDEO_ENCODER_USAGE_ULTRA_LOW_LATENCY, "Ultra Low Latency",
+ "ultra-low-latency"},
+ {AMF_VIDEO_ENCODER_USAGE_LOW_LATENCY, "Low Latency", "low-latency"},
+ {AMF_VIDEO_ENCODER_USAGE_WEBCAM, "Webcam", "webcam"},
+ {0, nullptr, nullptr}
+ };
+
+ if (g_once_init_enter (&usage_type)) {
+ GType type = g_enum_register_static ("GstAmfH264EncUsage", usages);
+ g_once_init_leave (&usage_type, type);
+ }
+
+ return usage_type;
+}
+
+#define GST_TYPE_AMF_H264_ENC_RATE_CONTROL (gst_amf_h264_enc_rate_control_get_type ())
+static GType
+gst_amf_h264_enc_rate_control_get_type (void)
+{
+ static GType rate_control_type = 0;
+ static const GEnumValue rate_controls[] = {
+ {AMF_VIDEO_ENCODER_RATE_CONTROL_METHOD_UNKNOWN, "Default, depends on Usage",
+ "default"},
+ {AMF_VIDEO_ENCODER_RATE_CONTROL_METHOD_CONSTANT_QP, "Constant QP", "cqp"},
+ {AMF_VIDEO_ENCODER_RATE_CONTROL_METHOD_CBR, "Constant Bitrate", "cbr"},
+ {AMF_VIDEO_ENCODER_RATE_CONTROL_METHOD_PEAK_CONSTRAINED_VBR,
+ "Peak Constrained VBR", "vbr"},
+ {AMF_VIDEO_ENCODER_RATE_CONTROL_METHOD_LATENCY_CONSTRAINED_VBR,
+ "Latency Constrained VBR", "lcvbr"},
+ {0, nullptr, nullptr}
+ };
+
+ if (g_once_init_enter (&rate_control_type)) {
+ GType type =
+ g_enum_register_static ("GstAmfH264EncRateControl", rate_controls);
+ g_once_init_leave (&rate_control_type, type);
+ }
+
+ return rate_control_type;
+}
+
+#define AMF_VIDEO_ENCODER_QUALITY_PRESET_UNKNOWN -1
+
+#define GST_TYPE_AMF_H264_ENC_PRESET (gst_amf_h264_enc_preset_get_type ())
+static GType
+gst_amf_h264_enc_preset_get_type (void)
+{
+ static GType preset_type = 0;
+ static const GEnumValue presets[] = {
+ {AMF_VIDEO_ENCODER_QUALITY_PRESET_UNKNOWN, "Default, depends on USAGE",
+ "default"},
+ {AMF_VIDEO_ENCODER_QUALITY_PRESET_BALANCED, "Balanced", "balanced"},
+ {AMF_VIDEO_ENCODER_QUALITY_PRESET_SPEED, "Speed", "speed"},
+ {AMF_VIDEO_ENCODER_QUALITY_PRESET_QUALITY, "Quality", "quality"},
+ {0, nullptr, nullptr}
+ };
+
+ if (g_once_init_enter (&preset_type)) {
+ GType type = g_enum_register_static ("GstAmfH264EncPreset", presets);
+ g_once_init_leave (&preset_type, type);
+ }
+
+ return preset_type;
+}
+
+typedef struct
+{
+ GstCaps *sink_caps;
+ GstCaps *src_caps;
+
+ gint64 adapter_luid;
+
+ GstAmfH264EncDeviceCaps dev_caps;
+} GstAmfH264EncClassData;
+
+enum
+{
+ PROP_0,
+ PROP_ADAPTER_LUID,
+ PROP_USAGE,
+ PROP_RATE_CONTROL,
+ PROP_PRESET,
+ PROP_BITRATE,
+ PROP_MAX_BITRATE,
+ PROP_GOP_SIZE,
+ PROP_MIN_QP,
+ PROP_MAX_QP,
+ PROP_QP_I,
+ PROP_QP_P,
+ PROP_REF_FRAMES,
+ PROP_AUD,
+ PROP_CABAC,
+};
+
+#define DEFAULT_USAGE AMF_VIDEO_ENCODER_USAGE_TRANSCODING
+#define DEFAULT_RATE_CONTROL AMF_VIDEO_ENCODER_RATE_CONTROL_METHOD_UNKNOWN
+#define DEFAULT_PRESET AMF_VIDEO_ENCODER_QUALITY_PRESET_UNKNOWN
+#define DEFAULT_BITRATE 0
+#define DEFAULT_MAX_BITRATE 0
+#define DEFAULT_GOP_SIZE -1
+#define DEFAULT_MIN_MAX_QP -1
+#define DEFAULT_AUD TRUE
+#define DEFAULT_CABAC TRUE
+
+typedef struct _GstAmfH264Enc
+{
+ GstAmfEncoder parent;
+
+ gboolean packetized;
+ GstH264NalParser *parser;
+
+ GMutex prop_lock;
+ gboolean property_updated;
+
+ gint usage;
+ gint rate_control;
+ gint preset;
+ guint bitrate;
+ guint max_bitrate;
+ gint gop_size;
+ gint min_qp;
+ gint max_qp;
+ guint qp_i;
+ guint qp_p;
+ guint ref_frames;
+
+ gboolean aud;
+ gboolean cabac;
+} GstAmfH264Enc;
+
+typedef struct _GstAmfH264EncClass
+{
+ GstAmfEncoderClass parent_class;
+ GstAmfH264EncDeviceCaps dev_caps;
+
+ gint64 adapter_luid;
+} GstAmfH264EncClass;
+
+#define GST_AMF_H264_ENC(object) ((GstAmfH264Enc *) (object))
+#define GST_AMF_H264_ENC_GET_CLASS(object) \
+ (G_TYPE_INSTANCE_GET_CLASS ((object),G_TYPE_FROM_INSTANCE (object),GstAmfH264EncClass))
+
+static void gst_amf_h264_enc_finalize (GObject * object);
+static void gst_amf_h264_enc_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_amf_h264_enc_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static GstCaps *gst_amf_h264_enc_getcaps (GstVideoEncoder * encoder,
+ GstCaps * filter);
+static gboolean gst_amf_h264_enc_set_format (GstAmfEncoder * encoder,
+ GstVideoCodecState * state, gpointer component);
+static gboolean gst_amf_h264_enc_set_output_state (GstAmfEncoder * encoder,
+ GstVideoCodecState * state, gpointer component);
+static gboolean gst_amf_h264_enc_set_surfrace_prop (GstAmfEncoder * encoder,
+ GstVideoCodecFrame * frame, gpointer surface);
+static GstBuffer *gst_amf_h264_enc_create_output_buffer (GstAmfEncoder *
+ encoder, gpointer data, gboolean * sync_point);
+static gboolean gst_amf_h264_enc_check_reconfigure (GstAmfEncoder * encoder);
+
+static void
+gst_amf_h264_enc_class_init (GstAmfH264EncClass * klass, gpointer data)
+{
+ GObjectClass *object_class = G_OBJECT_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstVideoEncoderClass *videoenc_class = GST_VIDEO_ENCODER_CLASS (klass);
+ GstAmfEncoderClass *amf_class = GST_AMF_ENCODER_CLASS (klass);
+ GstAmfH264EncClassData *cdata = (GstAmfH264EncClassData *) data;
+ GstAmfH264EncDeviceCaps *dev_caps = &cdata->dev_caps;
+ GParamFlags param_flags = (GParamFlags) (G_PARAM_READWRITE |
+ GST_PARAM_MUTABLE_PLAYING | G_PARAM_STATIC_STRINGS);
+
+ parent_class = (GTypeClass *) g_type_class_peek_parent (klass);
+
+ object_class->finalize = gst_amf_h264_enc_finalize;
+ object_class->set_property = gst_amf_h264_enc_set_property;
+ object_class->get_property = gst_amf_h264_enc_get_property;
+
+ g_object_class_install_property (object_class, PROP_ADAPTER_LUID,
+ g_param_spec_int64 ("adapter-luid", "Adapter LUID",
+ "DXGI Adapter LUID (Locally Unique Identifier) of associated GPU",
+ G_MININT64, G_MAXINT64, cdata->adapter_luid, param_flags));
+ g_object_class_install_property (object_class, PROP_USAGE,
+ g_param_spec_enum ("usage", "Usage",
+ "Target usage", GST_TYPE_AMF_H264_ENC_USAGE,
+ DEFAULT_USAGE, param_flags));
+ g_object_class_install_property (object_class, PROP_RATE_CONTROL,
+ g_param_spec_enum ("rate-control", "Rate Control",
+ "Rate Control Method", GST_TYPE_AMF_H264_ENC_RATE_CONTROL,
+ DEFAULT_RATE_CONTROL, param_flags));
+ g_object_class_install_property (object_class, PROP_PRESET,
+ g_param_spec_enum ("preset", "Preset",
+ "Preset", GST_TYPE_AMF_H264_ENC_PRESET, DEFAULT_PRESET, param_flags));
+ g_object_class_install_property (object_class, PROP_BITRATE,
+ g_param_spec_uint ("bitrate", "Bitrate",
+ "Target bitrate in kbit/sec (0: USAGE default)",
+ 0, G_MAXINT / 1000, DEFAULT_BITRATE, param_flags));
+ g_object_class_install_property (object_class, PROP_MAX_BITRATE,
+ g_param_spec_uint ("max-bitrate", "Max Bitrate",
+ "Maximum bitrate in kbit/sec (0: USAGE default)",
+ 0, G_MAXINT / 1000, DEFAULT_MAX_BITRATE, param_flags));
+ g_object_class_install_property (object_class, PROP_GOP_SIZE,
+ g_param_spec_int ("gop-size", "GOP Size",
+ "Number of pictures within a GOP (-1: USAGE default)",
+ -1, G_MAXINT, DEFAULT_GOP_SIZE, param_flags));
+ g_object_class_install_property (object_class, PROP_MIN_QP,
+ g_param_spec_int ("min-qp", "Min QP",
+ "Minimum allowed QP value (-1: USAGE default)",
+ -1, 51, DEFAULT_MIN_MAX_QP, param_flags));
+ g_object_class_install_property (object_class, PROP_MAX_QP,
+ g_param_spec_int ("max-qp", "Max QP",
+ "Maximum allowed QP value (-1: USAGE default)",
+ -1, 51, DEFAULT_MIN_MAX_QP, param_flags));
+ g_object_class_install_property (object_class, PROP_QP_I,
+ g_param_spec_uint ("qp-i", "QP I",
+ "Constant QP for I frames", 0, 51,
+ (guint) dev_caps->default_qp_i, param_flags));
+ g_object_class_install_property (object_class, PROP_QP_P,
+ g_param_spec_uint ("qp-p", "QP P",
+ "Constant QP for P frames", 0, 51,
+ (guint) dev_caps->default_qp_p, param_flags));
+ g_object_class_install_property (object_class, PROP_REF_FRAMES,
+ g_param_spec_uint ("ref-frames", "Reference Frames",
+ "Number of reference frames", (guint) dev_caps->min_ref_frames,
+ (guint) dev_caps->max_ref_frames,
+ (guint) dev_caps->min_ref_frames, param_flags));
+ g_object_class_install_property (object_class, PROP_AUD,
+ g_param_spec_boolean ("aud", "AUD",
+ "Use AU (Access Unit) delimiter", DEFAULT_AUD, param_flags));
+ g_object_class_install_property (object_class, PROP_CABAC,
+ g_param_spec_boolean ("cabac", "CABAC",
+ "Enable CABAC entropy coding", TRUE, param_flags));
+
+ gst_element_class_set_metadata (element_class,
+ "AMD AMF H.264 Video Encoder",
+ "Codec/Encoder/Video/Hardware",
+ "Encode H.264 video streams using AMF API",
+ "Seungha Yang <seungha@centricular.com>");
+
+ gst_element_class_add_pad_template (element_class,
+ gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
+ cdata->sink_caps));
+ gst_element_class_add_pad_template (element_class,
+ gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
+ cdata->src_caps));
+
+ videoenc_class->getcaps = GST_DEBUG_FUNCPTR (gst_amf_h264_enc_getcaps);
+
+ amf_class->set_format = GST_DEBUG_FUNCPTR (gst_amf_h264_enc_set_format);
+ amf_class->set_output_state =
+ GST_DEBUG_FUNCPTR (gst_amf_h264_enc_set_output_state);
+ amf_class->set_surface_prop =
+ GST_DEBUG_FUNCPTR (gst_amf_h264_enc_set_surfrace_prop);
+ amf_class->create_output_buffer =
+ GST_DEBUG_FUNCPTR (gst_amf_h264_enc_create_output_buffer);
+ amf_class->check_reconfigure =
+ GST_DEBUG_FUNCPTR (gst_amf_h264_enc_check_reconfigure);
+
+ klass->dev_caps = cdata->dev_caps;
+ klass->adapter_luid = cdata->adapter_luid;
+
+ gst_caps_unref (cdata->sink_caps);
+ gst_caps_unref (cdata->src_caps);
+ g_free (cdata);
+}
+
+static void
+gst_amf_h264_enc_init (GstAmfH264Enc * self)
+{
+ GstAmfH264EncClass *klass = GST_AMF_H264_ENC_GET_CLASS (self);
+ GstAmfH264EncDeviceCaps *dev_caps = &klass->dev_caps;
+
+ gst_amf_encoder_set_subclass_data (GST_AMF_ENCODER (self),
+ klass->adapter_luid, AMFVideoEncoderVCE_AVC);
+
+ self->parser = gst_h264_nal_parser_new ();
+
+ g_mutex_init (&self->prop_lock);
+
+ self->usage = DEFAULT_USAGE;
+ self->rate_control = DEFAULT_RATE_CONTROL;
+ self->preset = DEFAULT_PRESET;
+ self->bitrate = DEFAULT_BITRATE;
+ self->max_bitrate = DEFAULT_MAX_BITRATE;
+ self->gop_size = DEFAULT_GOP_SIZE;
+ self->min_qp = DEFAULT_MIN_MAX_QP;
+ self->max_qp = DEFAULT_MIN_MAX_QP;
+ self->qp_i = (guint) dev_caps->default_qp_i;
+ self->qp_p = (guint) dev_caps->default_qp_p;
+ self->ref_frames = (guint) dev_caps->min_ref_frames;
+ self->aud = DEFAULT_AUD;
+ self->cabac = DEFAULT_CABAC;
+}
+
+static void
+gst_amf_h264_enc_finalize (GObject * object)
+{
+ GstAmfH264Enc *self = GST_AMF_H264_ENC (object);
+
+ gst_h264_nal_parser_free (self->parser);
+ g_mutex_clear (&self->prop_lock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+update_int (GstAmfH264Enc * self, gint * old_val, const GValue * new_val)
+{
+ gint val = g_value_get_int (new_val);
+
+ if (*old_val == val)
+ return;
+
+ *old_val = val;
+ self->property_updated = TRUE;
+}
+
+static void
+update_uint (GstAmfH264Enc * self, guint * old_val, const GValue * new_val)
+{
+ guint val = g_value_get_uint (new_val);
+
+ if (*old_val == val)
+ return;
+
+ *old_val = val;
+ self->property_updated = TRUE;
+}
+
+static void
+update_enum (GstAmfH264Enc * self, gint * old_val, const GValue * new_val)
+{
+ gint val = g_value_get_enum (new_val);
+
+ if (*old_val == val)
+ return;
+
+ *old_val = val;
+ self->property_updated = TRUE;
+}
+
+static void
+update_bool (GstAmfH264Enc * self, gboolean * old_val, const GValue * new_val)
+{
+ gboolean val = g_value_get_boolean (new_val);
+
+ if (*old_val == val)
+ return;
+
+ *old_val = val;
+ self->property_updated = TRUE;
+}
+
+static void
+gst_amf_h264_enc_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstAmfH264Enc *self = GST_AMF_H264_ENC (object);
+
+ g_mutex_lock (&self->prop_lock);
+ switch (prop_id) {
+ case PROP_USAGE:
+ update_enum (self, &self->usage, value);
+ break;
+ case PROP_RATE_CONTROL:
+ update_enum (self, &self->rate_control, value);
+ break;
+ case PROP_PRESET:
+ update_enum (self, &self->preset, value);
+ break;
+ case PROP_BITRATE:
+ update_uint (self, &self->bitrate, value);
+ break;
+ case PROP_MAX_BITRATE:
+ update_uint (self, &self->max_bitrate, value);
+ break;
+ case PROP_GOP_SIZE:
+ update_int (self, &self->gop_size, value);
+ break;
+ case PROP_MIN_QP:
+ update_int (self, &self->min_qp, value);
+ break;
+ case PROP_MAX_QP:
+ update_int (self, &self->max_qp, value);
+ break;
+ case PROP_QP_I:
+ update_uint (self, &self->qp_i, value);
+ break;
+ case PROP_QP_P:
+ update_uint (self, &self->qp_p, value);
+ break;
+ case PROP_REF_FRAMES:
+ update_uint (self, &self->ref_frames, value);
+ break;
+ case PROP_AUD:
+ /* This is per frame property, don't need to reset encoder */
+ self->aud = g_value_get_boolean (value);
+ break;
+ case PROP_CABAC:
+ update_bool (self, &self->cabac, value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ g_mutex_unlock (&self->prop_lock);
+}
+
+static void
+gst_amf_h264_enc_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstAmfH264EncClass *klass = GST_AMF_H264_ENC_GET_CLASS (object);
+ GstAmfH264Enc *self = GST_AMF_H264_ENC (object);
+
+ switch (prop_id) {
+ case PROP_ADAPTER_LUID:
+ g_value_set_int64 (value, klass->adapter_luid);
+ break;
+ case PROP_USAGE:
+ g_value_set_enum (value, self->usage);
+ break;
+ case PROP_RATE_CONTROL:
+ g_value_set_enum (value, self->rate_control);
+ break;
+ case PROP_PRESET:
+ g_value_set_enum (value, self->preset);
+ break;
+ case PROP_BITRATE:
+ g_value_set_uint (value, self->bitrate);
+ break;
+ case PROP_MAX_BITRATE:
+ g_value_set_uint (value, self->max_bitrate);
+ break;
+ case PROP_GOP_SIZE:
+ g_value_set_int (value, self->gop_size);
+ break;
+ case PROP_MIN_QP:
+ g_value_set_int (value, self->min_qp);
+ break;
+ case PROP_MAX_QP:
+ g_value_set_int (value, self->max_qp);
+ break;
+ case PROP_QP_I:
+ g_value_set_uint (value, self->qp_i);
+ break;
+ case PROP_QP_P:
+ g_value_set_uint (value, self->qp_p);
+ break;
+ case PROP_REF_FRAMES:
+ g_value_set_uint (value, self->ref_frames);
+ break;
+ case PROP_AUD:
+ g_value_set_boolean (value, self->aud);
+ break;
+ case PROP_CABAC:
+ g_value_set_boolean (value, self->cabac);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_amf_h264_enc_get_downstream_profiles_and_format (GstAmfH264Enc * self,
+ std::set < std::string > &downstream_profiles, gboolean * packetized)
+{
+ GstCaps *allowed_caps;
+ GstStructure *s;
+ const gchar *stream_format;
+
+ allowed_caps = gst_pad_get_allowed_caps (GST_VIDEO_ENCODER_SRC_PAD (self));
+
+ if (!allowed_caps || gst_caps_is_empty (allowed_caps) ||
+ gst_caps_is_any (allowed_caps)) {
+ gst_clear_caps (&allowed_caps);
+
+ return;
+ }
+
+ for (guint i = 0; i < gst_caps_get_size (allowed_caps); i++) {
+ const GValue *profile_value;
+ const gchar *profile;
+
+ s = gst_caps_get_structure (allowed_caps, i);
+ profile_value = gst_structure_get_value (s, "profile");
+ if (!profile_value)
+ continue;
+
+ if (GST_VALUE_HOLDS_LIST (profile_value)) {
+ for (guint j = 0; j < gst_value_list_get_size (profile_value); j++) {
+ const GValue *p = gst_value_list_get_value (profile_value, j);
+
+ if (!G_VALUE_HOLDS_STRING (p))
+ continue;
+
+ profile = g_value_get_string (p);
+ if (profile)
+ downstream_profiles.insert (profile);
+ }
+
+ } else if (G_VALUE_HOLDS_STRING (profile_value)) {
+ profile = g_value_get_string (profile_value);
+ if (profile)
+ downstream_profiles.insert (profile);
+ }
+ }
+
+ if (packetized) {
+ *packetized = FALSE;
+ allowed_caps = gst_caps_fixate (allowed_caps);
+ s = gst_caps_get_structure (allowed_caps, 0);
+ stream_format = gst_structure_get_string (s, "stream-format");
+ if (g_strcmp0 (stream_format, "avc") == 0)
+ *packetized = TRUE;
+ }
+
+ gst_caps_unref (allowed_caps);
+}
+
+static GstCaps *
+gst_amf_h264_enc_getcaps (GstVideoEncoder * encoder, GstCaps * filter)
+{
+ GstAmfH264Enc *self = GST_AMF_H264_ENC (encoder);
+ GstAmfH264EncClass *klass = GST_AMF_H264_ENC_GET_CLASS (self);
+ GstCaps *template_caps;
+ GstCaps *supported_caps;
+ std::set < std::string > downstream_profiles;
+
+ if (!klass->dev_caps.interlace_supported)
+ return gst_video_encoder_proxy_getcaps (encoder, nullptr, filter);
+
+ gst_amf_h264_enc_get_downstream_profiles_and_format (self,
+ downstream_profiles, nullptr);
+
+ GST_DEBUG_OBJECT (self, "Downstream specified %" G_GSIZE_FORMAT " profiles",
+ downstream_profiles.size ());
+
+ if (downstream_profiles.size () == 0)
+ return gst_video_encoder_proxy_getcaps (encoder, NULL, filter);
+
+ /* Profile allows interlaced? */
+ /* *INDENT-OFF* */
+ gboolean can_support_interlaced = FALSE;
+ for (const auto &iter: downstream_profiles) {
+ if (iter == "high" || iter == "main" || iter == "constrained-high") {
+ can_support_interlaced = TRUE;
+ break;
+ }
+ }
+ /* *INDENT-ON* */
+
+ GST_DEBUG_OBJECT (self, "Downstream %s support interlaced format",
+ can_support_interlaced ? "can" : "cannot");
+
+ if (can_support_interlaced) {
+ /* No special handling is needed */
+ return gst_video_encoder_proxy_getcaps (encoder, nullptr, filter);
+ }
+
+ template_caps = gst_pad_get_pad_template_caps (encoder->sinkpad);
+ template_caps = gst_caps_make_writable (template_caps);
+
+ gst_caps_set_simple (template_caps, "interlace-mode", G_TYPE_STRING,
+ "progressive", nullptr);
+
+ supported_caps = gst_video_encoder_proxy_getcaps (encoder,
+ template_caps, filter);
+ gst_caps_unref (template_caps);
+
+ GST_DEBUG_OBJECT (self, "Returning %" GST_PTR_FORMAT, supported_caps);
+
+ return supported_caps;
+}
+
+static gboolean
+gst_amf_h264_enc_set_format (GstAmfEncoder * encoder,
+ GstVideoCodecState * state, gpointer component)
+{
+ GstAmfH264Enc *self = GST_AMF_H264_ENC (encoder);
+ GstAmfH264EncClass *klass = GST_AMF_H264_ENC_GET_CLASS (self);
+ GstAmfH264EncDeviceCaps *dev_caps = &klass->dev_caps;
+ AMFComponent *comp = (AMFComponent *) component;
+ GstVideoInfo *info = &state->info;
+ std::set < std::string > downstream_profiles;
+ AMF_VIDEO_ENCODER_PROFILE_ENUM profile = AMF_VIDEO_ENCODER_PROFILE_UNKNOWN;
+ AMF_RESULT result;
+ AMFRate framerate;
+ AMFRatio aspect_ratio;
+ amf_int64 int64_val;
+ amf_bool boolean_val;
+ AMF_VIDEO_ENCODER_RATE_CONTROL_METHOD_ENUM rc_mode;
+ AMF_VIDEO_ENCODER_CODING_ENUM cabac = AMF_VIDEO_ENCODER_UNDEFINED;
+
+ self->packetized = FALSE;
+
+ gst_amf_h264_enc_get_downstream_profiles_and_format (self,
+ downstream_profiles, &self->packetized);
+
+ if (downstream_profiles.empty ()) {
+ GST_ERROR_OBJECT (self, "Unable to get downstream profile");
+ return FALSE;
+ }
+
+ if (GST_VIDEO_INFO_IS_INTERLACED (info)) {
+ downstream_profiles.erase ("constrained-high");
+ downstream_profiles.erase ("constrained-baseline");
+ downstream_profiles.erase ("baseline");
+
+ if (downstream_profiles.empty ()) {
+ GST_ERROR_OBJECT (self,
+ "None of downstream profile supports interlaced encoding");
+ return FALSE;
+ }
+ }
+
+ if (downstream_profiles.find ("main") != downstream_profiles.end ()) {
+ profile = AMF_VIDEO_ENCODER_PROFILE_MAIN;
+ } else if (downstream_profiles.find ("high") != downstream_profiles.end ()) {
+ profile = AMF_VIDEO_ENCODER_PROFILE_HIGH;
+ } else if (downstream_profiles.find ("constrained-high") !=
+ downstream_profiles.end ()) {
+ if (dev_caps->max_profile >=
+ (gint64) AMF_VIDEO_ENCODER_PROFILE_CONSTRAINED_HIGH) {
+ profile = AMF_VIDEO_ENCODER_PROFILE_CONSTRAINED_HIGH;
+ } else {
+ profile = AMF_VIDEO_ENCODER_PROFILE_HIGH;
+ }
+ } else if (downstream_profiles.find ("constrained-baseline") !=
+ downstream_profiles.end ()) {
+ if (dev_caps->max_profile >=
+ (gint64) AMF_VIDEO_ENCODER_PROFILE_CONSTRAINED_BASELINE) {
+ profile = AMF_VIDEO_ENCODER_PROFILE_CONSTRAINED_BASELINE;
+ } else {
+ profile = AMF_VIDEO_ENCODER_PROFILE_BASELINE;
+ }
+ } else if (downstream_profiles.find ("baseline") !=
+ downstream_profiles.end ()) {
+ profile = AMF_VIDEO_ENCODER_PROFILE_BASELINE;
+ } else {
+ GST_ERROR_OBJECT (self, "Failed to determine profile");
+ return FALSE;
+ }
+
+ g_mutex_lock (&self->prop_lock);
+ /* Configure static properties first before Init() */
+ result = comp->SetProperty (AMF_VIDEO_ENCODER_FRAMESIZE,
+ AMFConstructSize (info->width, info->height));
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to set frame size, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ goto error;
+ }
+
+ result = comp->SetProperty (AMF_VIDEO_ENCODER_USAGE, (amf_int64) self->usage);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to set usage, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ goto error;
+ }
+
+ result = comp->SetProperty (AMF_VIDEO_ENCODER_PROFILE, (amf_int64) profile);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to set profile, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ goto error;
+ }
+
+ result = comp->SetProperty (AMF_VIDEO_ENCODER_MAX_NUM_REFRAMES,
+ (amf_int64) self->ref_frames);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to set ref-frames, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ goto error;
+ }
+
+ aspect_ratio = AMFConstructRatio (info->par_n, info->par_d);
+ result = comp->SetProperty (AMF_VIDEO_ENCODER_ASPECT_RATIO, aspect_ratio);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to set aspect ratio, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ goto error;
+ }
+
+ if (info->colorimetry.range == GST_VIDEO_COLOR_RANGE_0_255)
+ boolean_val = true;
+ else
+ boolean_val = false;
+
+ result = comp->SetProperty (AMF_VIDEO_ENCODER_FULL_RANGE_COLOR, boolean_val);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to set full-range-color, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ goto error;
+ }
+
+ if (self->rate_control != AMF_VIDEO_ENCODER_RATE_CONTROL_METHOD_UNKNOWN) {
+ result = comp->SetProperty (AMF_VIDEO_ENCODER_RATE_CONTROL_METHOD,
+ (amf_int64) self->rate_control);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to set rate-control, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ goto error;
+ }
+ }
+
+ result = comp->Init (AMF_SURFACE_NV12, info->width, info->height);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to init component, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ goto error;
+ }
+
+ /* dynamic properties */
+ result = comp->GetProperty (AMF_VIDEO_ENCODER_RATE_CONTROL_METHOD,
+ &int64_val);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to get rate-control method, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ goto error;
+ }
+
+ rc_mode = (AMF_VIDEO_ENCODER_RATE_CONTROL_METHOD_ENUM) int64_val;
+ if (self->min_qp >= 0)
+ comp->SetProperty (AMF_VIDEO_ENCODER_MIN_QP, (amf_int64) self->min_qp);
+ if (self->max_qp >= 0)
+ comp->SetProperty (AMF_VIDEO_ENCODER_MAX_QP, (amf_int64) self->max_qp);
+
+ comp->SetProperty (AMF_VIDEO_ENCODER_QP_I, (amf_int64) self->qp_i);
+ comp->SetProperty (AMF_VIDEO_ENCODER_QP_P, (amf_int64) self->qp_p);
+
+ switch (rc_mode) {
+ case AMF_VIDEO_ENCODER_RATE_CONTROL_METHOD_CBR:
+ if (self->bitrate > 0) {
+ comp->SetProperty (AMF_VIDEO_ENCODER_TARGET_BITRATE,
+ (amf_int64) self->bitrate * 1000);
+ comp->SetProperty (AMF_VIDEO_ENCODER_PEAK_BITRATE,
+ (amf_int64) self->bitrate * 1000);
+ }
+ break;
+ case AMF_VIDEO_ENCODER_RATE_CONTROL_METHOD_PEAK_CONSTRAINED_VBR:
+ case AMF_VIDEO_ENCODER_RATE_CONTROL_METHOD_LATENCY_CONSTRAINED_VBR:
+ if (self->bitrate > 0) {
+ comp->SetProperty (AMF_VIDEO_ENCODER_TARGET_BITRATE,
+ (amf_int64) self->bitrate * 1000);
+ }
+ if (self->max_bitrate > 0) {
+ comp->SetProperty (AMF_VIDEO_ENCODER_PEAK_BITRATE,
+ (amf_int64) self->max_bitrate * 1000);
+ }
+ break;
+ default:
+ break;
+ }
+
+ /* Disable frame skip for now, need investigation the behavior */
+ result = comp->SetProperty (AMF_VIDEO_ENCODER_RATE_CONTROL_SKIP_FRAME_ENABLE,
+ (amf_bool) false);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to disable skip frame, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ goto error;
+ }
+
+ if (info->fps_n > 0 && info->fps_d) {
+ framerate = AMFConstructRate (info->fps_n, info->fps_d);
+ } else {
+ framerate = AMFConstructRate (25, 1);
+ }
+
+ result = comp->SetProperty (AMF_VIDEO_ENCODER_FRAMERATE, framerate);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to set frame rate, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ goto error;
+ }
+
+ if (self->gop_size >= 0) {
+ result = comp->SetProperty (AMF_VIDEO_ENCODER_IDR_PERIOD,
+ (amf_int64) self->gop_size);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to set IDR period, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ goto error;
+ }
+ }
+
+ if (profile != AMF_VIDEO_ENCODER_PROFILE_BASELINE &&
+ profile != AMF_VIDEO_ENCODER_PROFILE_CONSTRAINED_BASELINE) {
+ if (self->cabac)
+ cabac = AMF_VIDEO_ENCODER_CABAC;
+ else
+ cabac = AMF_VIDEO_ENCODER_CALV;
+ }
+
+ result = comp->SetProperty (AMF_VIDEO_ENCODER_CABAC_ENABLE,
+ (amf_int64) cabac);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to set cabac, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ goto error;
+ }
+
+ self->property_updated = FALSE;
+ g_mutex_unlock (&self->prop_lock);
+
+ return TRUE;
+
+error:
+ g_mutex_unlock (&self->prop_lock);
+
+ return FALSE;
+}
+
+static gboolean
+gst_amf_h264_enc_set_output_state (GstAmfEncoder * encoder,
+ GstVideoCodecState * state, gpointer component)
+{
+ GstAmfH264Enc *self = GST_AMF_H264_ENC (encoder);
+ AMFComponent *comp = (AMFComponent *) component;
+ GstVideoCodecState *output_state;
+ GstCaps *caps;
+ const gchar *profile_from_sps;
+ std::set < std::string > downstream_profiles;
+ std::string caps_str;
+ GstTagList *tags;
+ GstBuffer *codec_data = nullptr;
+ GstH264NalUnit sps_nalu, pps_nalu;
+ GstH264ParserResult rst;
+ AMF_RESULT result;
+ AMFInterfacePtr iface;
+ AMFBufferPtr spspps_buf;
+ guint8 *spspps;
+ amf_size spspps_size;
+
+ result = comp->GetProperty (AMF_VIDEO_ENCODER_EXTRADATA, &iface);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to get extra data, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ return FALSE;
+ }
+
+ spspps_buf = AMFBufferPtr (iface);
+ if (!spspps_buf) {
+ GST_ERROR_OBJECT (self, "Failed to set get AMFBuffer interface, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ return FALSE;
+ }
+
+ spspps_size = spspps_buf->GetSize ();
+ if (spspps_size < 4) {
+ GST_ERROR_OBJECT (self, "Too small spspps size %d", (guint) spspps_size);
+ return FALSE;
+ }
+
+ spspps = (guint8 *) spspps_buf->GetNative ();
+ if (!spspps) {
+ GST_ERROR_OBJECT (self, "Null SPS/PPS");
+ return FALSE;
+ }
+
+ caps_str = "video/x-h264, alignment = (string) au";
+ gst_amf_h264_enc_get_downstream_profiles_and_format (self,
+ downstream_profiles, nullptr);
+
+ rst = gst_h264_parser_identify_nalu (self->parser,
+ spspps, 0, spspps_size, &sps_nalu);
+ if (rst != GST_H264_PARSER_OK) {
+ GST_ERROR_OBJECT (self, "Failed to identify SPS nal");
+ return FALSE;
+ }
+
+ if (sps_nalu.size < 4) {
+ GST_ERROR_OBJECT (self, "Too small sps nal size %d", sps_nalu.size);
+ return FALSE;
+ }
+
+ rst = gst_h264_parser_identify_nalu_unchecked (self->parser,
+ spspps, sps_nalu.offset + sps_nalu.size, spspps_size, &pps_nalu);
+ if (rst != GST_H264_PARSER_OK && self->packetized) {
+ GST_ERROR_OBJECT (self, "Failed to identify PPS nal, %d", rst);
+ return FALSE;
+ }
+
+ if (self->packetized) {
+ GstMapInfo info;
+ guint8 *data;
+ guint8 profile_idc, profile_comp, level_idc;
+ const guint nal_length_size = 4;
+ const guint num_sps = 1;
+ const guint num_pps = 1;
+
+ data = sps_nalu.data + sps_nalu.offset + sps_nalu.header_bytes;
+ profile_idc = data[0];
+ profile_comp = data[1];
+ level_idc = data[2];
+
+ /* 5: configuration version, profile, compatibility, level, nal length
+ * 1: num sps
+ * 2: sps size bytes
+ * sizeof (sps)
+ * 1: num pps
+ * 2: pps size bytes
+ * sizeof (pps)
+ *
+ * -> 11 + sps_size + pps_size
+ */
+ codec_data = gst_buffer_new_and_alloc (11 + sps_nalu.size + pps_nalu.size);
+
+ gst_buffer_map (codec_data, &info, GST_MAP_WRITE);
+
+ data = (guint8 *) info.data;
+ data[0] = 1;
+ data[1] = profile_idc;
+ data[2] = profile_comp;
+ data[3] = level_idc;
+ data[4] = 0xfc | (nal_length_size - 1);
+ data[5] = 0xe0 | num_sps;
+ data += 6;
+ GST_WRITE_UINT16_BE (data, sps_nalu.size);
+ data += 2;
+ memcpy (data, sps_nalu.data + sps_nalu.offset, sps_nalu.size);
+ data += sps_nalu.size;
+
+ data[0] = num_pps;
+ data++;
+
+ GST_WRITE_UINT16_BE (data, pps_nalu.size);
+ data += 2;
+ memcpy (data, pps_nalu.data + pps_nalu.offset, pps_nalu.size);
+
+ gst_buffer_unmap (codec_data, &info);
+ }
+
+ profile_from_sps =
+ gst_codec_utils_h264_get_profile (sps_nalu.data + sps_nalu.offset +
+ sps_nalu.header_bytes, 3);
+
+ if (!profile_from_sps) {
+ GST_WARNING_OBJECT (self, "Failed to parse profile from SPS");
+ } else if (!downstream_profiles.empty ()) {
+ if (downstream_profiles.find (profile_from_sps) !=
+ downstream_profiles.end ()) {
+ caps_str += ", profile = (string) " + std::string (profile_from_sps);
+ } else if (downstream_profiles.find ("baseline") !=
+ downstream_profiles.end () &&
+ strcmp (profile_from_sps, "constrained-baseline") == 0) {
+ caps_str += ", profile = (string) baseline";
+ } else if (downstream_profiles.find ("constrained-baseline") !=
+ downstream_profiles.end () &&
+ strcmp (profile_from_sps, "constrained-baseline") == 0) {
+ caps_str += ", profile = (string) constrained-baseline";
+ }
+ } else {
+ caps_str += ", profile = (string) " + std::string (profile_from_sps);
+ }
+
+ if (self->packetized) {
+ caps_str += ", stream-format = (string) avc";
+ } else {
+ caps_str += ", stream-format = (string) byte-stream";
+ }
+
+ caps = gst_caps_from_string (caps_str.c_str ());
+
+ if (self->packetized) {
+ gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, codec_data,
+ nullptr);
+ gst_buffer_unref (codec_data);
+ }
+
+ output_state = gst_video_encoder_set_output_state (GST_VIDEO_ENCODER (self),
+ caps, state);
+
+ GST_INFO_OBJECT (self, "Output caps: %" GST_PTR_FORMAT, output_state->caps);
+ gst_video_codec_state_unref (output_state);
+
+ tags = gst_tag_list_new_empty ();
+ gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_ENCODER,
+ "amfh264enc", nullptr);
+
+ gst_video_encoder_merge_tags (GST_VIDEO_ENCODER (encoder),
+ tags, GST_TAG_MERGE_REPLACE);
+ gst_tag_list_unref (tags);
+
+ return TRUE;
+}
+
+static gboolean
+gst_amf_h264_enc_set_surfrace_prop (GstAmfEncoder * encoder,
+ GstVideoCodecFrame * frame, gpointer surface)
+{
+ GstAmfH264Enc *self = GST_AMF_H264_ENC (encoder);
+ AMFSurface *surf = (AMFSurface *) surface;
+ AMF_RESULT result;
+ amf_bool insert_aud = self->aud ? true : false;
+
+ if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME (frame)) {
+ amf_int64 type = (amf_int64) AMF_VIDEO_ENCODER_PICTURE_TYPE_IDR;
+ result = surf->SetProperty (AMF_VIDEO_ENCODER_FORCE_PICTURE_TYPE, type);
+ if (result != AMF_OK) {
+ GST_WARNING_OBJECT (encoder, "Failed to set force idr, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ }
+ }
+
+ result = surf->SetProperty (AMF_VIDEO_ENCODER_INSERT_AUD, &insert_aud);
+ if (result != AMF_OK) {
+ GST_WARNING_OBJECT (encoder, "Failed to set AUD, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ }
+
+ return TRUE;
+}
+
+static GstBuffer *
+gst_amf_h264_enc_create_output_buffer (GstAmfEncoder * encoder,
+ gpointer data, gboolean * sync_point)
+{
+ GstAmfH264Enc *self = GST_AMF_H264_ENC (encoder);
+ AMFBuffer *amf_buf = (AMFBuffer *) data;
+ GstBuffer *buf;
+ GstH264ParserResult rst;
+ GstH264NalUnit nalu;
+ guint8 *data_ptr;
+ gsize data_size;
+ amf_int64 output_type = 0;
+ AMF_RESULT result;
+
+ data_ptr = (guint8 *) amf_buf->GetNative ();
+ data_size = amf_buf->GetSize ();
+
+ if (!data_ptr || data_size == 0) {
+ GST_WARNING_OBJECT (self, "Empty buffer");
+ return nullptr;
+ }
+
+ if (!self->packetized) {
+ buf = gst_buffer_new_memdup (data_ptr, data_size);
+ } else {
+ buf = gst_buffer_new ();
+ rst = gst_h264_parser_identify_nalu (self->parser,
+ data_ptr, 0, data_size, &nalu);
+ if (rst == GST_H264_PARSER_NO_NAL_END)
+ rst = GST_H264_PARSER_OK;
+
+ while (rst == GST_H264_PARSER_OK) {
+ GstMemory *mem;
+ guint8 *data;
+
+ data = (guint8 *) g_malloc0 (nalu.size + 4);
+ GST_WRITE_UINT32_BE (data, nalu.size);
+ memcpy (data + 4, nalu.data + nalu.offset, nalu.size);
+
+ mem = gst_memory_new_wrapped ((GstMemoryFlags) 0, data, nalu.size + 4,
+ 0, nalu.size + 4, data, (GDestroyNotify) g_free);
+ gst_buffer_append_memory (buf, mem);
+
+ rst = gst_h264_parser_identify_nalu (self->parser,
+ data_ptr, nalu.offset + nalu.size, data_size, &nalu);
+
+ if (rst == GST_H264_PARSER_NO_NAL_END)
+ rst = GST_H264_PARSER_OK;
+ }
+ }
+
+ result = amf_buf->GetProperty (AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE,
+ &output_type);
+ if (result == AMF_OK &&
+ output_type == (amf_int64) AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE_IDR) {
+ *sync_point = TRUE;
+ }
+
+ return buf;
+}
+
+static gboolean
+gst_amf_h264_enc_check_reconfigure (GstAmfEncoder * encoder)
+{
+ GstAmfH264Enc *self = GST_AMF_H264_ENC (encoder);
+ gboolean ret;
+
+ g_mutex_lock (&self->prop_lock);
+ ret = self->property_updated;
+ g_mutex_unlock (&self->prop_lock);
+
+ return ret;
+}
+
+static GstAmfH264EncClassData *
+gst_amf_h264_enc_create_class_data (GstD3D11Device * device,
+ AMFComponent * comp)
+{
+ AMF_RESULT result;
+ GstAmfH264EncDeviceCaps dev_caps = { 0, };
+ std::string sink_caps_str;
+ std::string src_caps_str;
+ std::set < std::string > profiles;
+ std::string profile_str;
+ std::string resolution_str;
+ GstAmfH264EncClassData *cdata;
+ AMFCapsPtr amf_caps;
+ AMFIOCapsPtr in_iocaps;
+ AMFIOCapsPtr out_iocaps;
+ amf_int32 in_min_width = 0, in_max_width = 0;
+ amf_int32 in_min_height = 0, in_max_height = 0;
+ amf_int32 out_min_width = 0, out_max_width = 0;
+ amf_int32 out_min_height = 0, out_max_height = 0;
+ amf_bool interlace_supported;
+ amf_int32 num_val;
+ gboolean have_nv12 = FALSE;
+ gboolean d3d11_supported = FALSE;
+ gint min_width, max_width, min_height, max_height;
+ GstCaps *sink_caps;
+ GstCaps *system_caps;
+
+ result = comp->GetCaps (&amf_caps);
+ if (result != AMF_OK) {
+ GST_WARNING_OBJECT (device, "Unable to get caps");
+ return nullptr;
+ }
+
+ result = amf_caps->GetInputCaps (&in_iocaps);
+ if (result != AMF_OK) {
+ GST_WARNING_OBJECT (device, "Unable to get input io caps");
+ return nullptr;
+ }
+
+ in_iocaps->GetWidthRange (&in_min_width, &in_max_width);
+ in_iocaps->GetHeightRange (&in_min_height, &in_max_height);
+ dev_caps.valign = in_iocaps->GetVertAlign ();
+ interlace_supported = in_iocaps->IsInterlacedSupported ();
+
+ GST_INFO_OBJECT (device, "Input width: [%d, %d], height: [%d, %d], "
+ "valign: %d, interlace supported: %d",
+ in_min_width, in_max_width, in_min_height, in_max_height, dev_caps.valign,
+ interlace_supported);
+
+ if (interlace_supported)
+ dev_caps.interlace_supported = TRUE;
+
+ num_val = in_iocaps->GetNumOfFormats ();
+ GST_LOG_OBJECT (device, "Input format count: %d", num_val);
+ for (guint i = 0; i < num_val; i++) {
+ AMF_SURFACE_FORMAT format;
+ amf_bool native;
+
+ result = in_iocaps->GetFormatAt (i, &format, &native);
+ if (result != AMF_OK)
+ continue;
+
+ GST_INFO_OBJECT (device, "Format %d supported, native %d", format, native);
+ if (format == AMF_SURFACE_NV12)
+ have_nv12 = TRUE;
+ }
+
+ if (!have_nv12) {
+ GST_WARNING_OBJECT (device, "NV12 is not supported");
+ return nullptr;
+ }
+
+ num_val = in_iocaps->GetNumOfMemoryTypes ();
+ GST_LOG_OBJECT (device, "Input memory type count: %d", num_val);
+ for (guint i = 0; i < num_val; i++) {
+ AMF_MEMORY_TYPE type;
+ amf_bool native;
+
+ result = in_iocaps->GetMemoryTypeAt (i, &type, &native);
+ if (result != AMF_OK)
+ continue;
+
+ GST_INFO_OBJECT (device,
+ "MemoryType %d supported, native %d", type, native);
+ if (type == AMF_MEMORY_DX11)
+ d3d11_supported = TRUE;
+ }
+
+ if (!d3d11_supported) {
+ GST_WARNING_OBJECT (device, "D3D11 is not supported");
+ return nullptr;
+ }
+
+ result = amf_caps->GetOutputCaps (&out_iocaps);
+ if (result != AMF_OK) {
+ GST_WARNING_OBJECT (device, "Unable to get input io caps");
+ return nullptr;
+ }
+
+ out_iocaps->GetWidthRange (&out_min_width, &out_max_width);
+ out_iocaps->GetHeightRange (&out_min_height, &out_max_height);
+
+ GST_INFO_OBJECT (device, "Output width: [%d, %d], height: [%d, %d]",
+ in_min_width, in_max_width, in_min_height, in_max_height);
+
+#define QUERY_CAPS_PROP(prop,val) G_STMT_START { \
+ amf_int64 _val; \
+ result = amf_caps->GetProperty (prop, &_val); \
+ if (result == AMF_OK) { \
+ GST_INFO_OBJECT (device, G_STRINGIFY (val) ": %" G_GINT64_FORMAT, _val); \
+ dev_caps.val = _val; \
+ } \
+} G_STMT_END
+
+ QUERY_CAPS_PROP (AMF_VIDEO_ENCODER_CAP_MAX_BITRATE, max_bitrate);
+ QUERY_CAPS_PROP (AMF_VIDEO_ENCODER_CAP_NUM_OF_STREAMS, num_of_streams);
+ QUERY_CAPS_PROP (AMF_VIDEO_ENCODER_CAP_MAX_PROFILE, max_profile);
+ QUERY_CAPS_PROP (AMF_VIDEO_ENCODER_CAP_MAX_LEVEL, max_level);
+ QUERY_CAPS_PROP (AMF_VIDEO_ENCODER_CAP_BFRAMES, bframes);
+ QUERY_CAPS_PROP (AMF_VIDEO_ENCODER_CAP_MIN_REFERENCE_FRAMES, min_ref_frames);
+ QUERY_CAPS_PROP (AMF_VIDEO_ENCODER_CAP_MAX_REFERENCE_FRAMES, max_ref_frames);
+ QUERY_CAPS_PROP (AMF_VIDEO_ENCODER_CAP_MAX_TEMPORAL_LAYERS,
+ max_temporal_layers);
+ QUERY_CAPS_PROP (AMF_VIDEO_ENCODER_CAP_FIXED_SLICE_MODE, fixed_slice_mode);
+ QUERY_CAPS_PROP (AMF_VIDEO_ENCODER_CAP_NUM_OF_HW_INSTANCES,
+ num_of_hw_instances);
+ QUERY_CAPS_PROP (AMF_VIDEO_ENCODER_CAP_COLOR_CONVERSION, color_conversion);
+ QUERY_CAPS_PROP (AMF_VIDEO_ENCODER_CAP_PRE_ANALYSIS, pre_analysis);
+ QUERY_CAPS_PROP (AMF_VIDEO_ENCODER_CAP_ROI, roi_map);
+ QUERY_CAPS_PROP (AMF_VIDEO_ENCODER_CAP_MAX_THROUGHPUT, max_throughput);
+ QUERY_CAPS_PROP (AMF_VIDEO_ENCODER_CAPS_QUERY_TIMEOUT_SUPPORT,
+ query_timeout_support);
+#undef QUERY_CAPS_PROP
+
+#define QUERY_DEFAULT_PROP(prop,val,default_val) G_STMT_START { \
+ const AMFPropertyInfo *pinfo = nullptr; \
+ result = comp->GetPropertyInfo (prop, &pinfo); \
+ if (result == AMF_OK && pinfo) { \
+ dev_caps.val = AMFVariantGetInt64 (&pinfo->defaultValue); \
+ GST_INFO_OBJECT (device, G_STRINGIFY (val) ": %" G_GINT64_FORMAT, \
+ dev_caps.val); \
+ } else { \
+ dev_caps.val = default_val; \
+ } \
+} G_STMT_END
+
+ QUERY_DEFAULT_PROP (AMF_VIDEO_ENCODER_QP_I, default_qp_i, 22);
+ QUERY_DEFAULT_PROP (AMF_VIDEO_ENCODER_QP_I, default_qp_p, 22);
+ QUERY_DEFAULT_PROP (AMF_VIDEO_ENCODER_QP_I, default_qp_b, 22);
+#undef QUERY_DEFAULT_PROP
+
+ min_width = MAX (in_min_width, 1);
+ max_width = in_max_width;
+ if (max_width == 0) {
+ GST_WARNING_OBJECT (device, "Unknown max width, assuming 4096");
+ max_width = 4096;
+ }
+
+ min_height = MAX (in_min_height, 1);
+ max_height = in_max_height;
+ if (max_height == 0) {
+ GST_WARNING_OBJECT (device, "Unknown max height, assuming 4096");
+ max_height = 4096;
+ }
+
+ if (dev_caps.max_profile >= (gint64) AMF_VIDEO_ENCODER_PROFILE_BASELINE) {
+ profiles.insert ("baseline");
+ profiles.insert ("constrained-baseline");
+ }
+
+ if (dev_caps.max_profile >= (gint64) AMF_VIDEO_ENCODER_PROFILE_MAIN)
+ profiles.insert ("main");
+
+ if (dev_caps.max_profile >= (gint64) AMF_VIDEO_ENCODER_PROFILE_HIGH) {
+ profiles.insert ("high");
+ }
+
+ if (dev_caps.max_profile >=
+ (gint64) AMF_VIDEO_ENCODER_PROFILE_CONSTRAINED_HIGH) {
+ profiles.insert ("constrained-high");
+ }
+
+ if (profiles.empty ()) {
+ GST_WARNING_OBJECT (device, "Failed to determine profile support");
+ return nullptr;
+ }
+#define APPEND_STRING(dst,set,str) G_STMT_START { \
+ if (set.find(str) != set.end()) { \
+ if (!first) \
+ dst += ", "; \
+ dst += str; \
+ first = FALSE; \
+ } \
+} G_STMT_END
+
+ if (profiles.size () == 1) {
+ profile_str = "profile = (string) " + *(profiles.begin ());
+ } else {
+ gboolean first = TRUE;
+
+ profile_str = "profile = (string) { ";
+ APPEND_STRING (profile_str, profiles, "main");
+ APPEND_STRING (profile_str, profiles, "high");
+ APPEND_STRING (profile_str, profiles, "constrained-high");
+ APPEND_STRING (profile_str, profiles, "constrained-baseline");
+ APPEND_STRING (profile_str, profiles, "baseline");
+ profile_str += " } ";
+ }
+#undef APPEND_STRING
+
+ resolution_str = "width = (int) [ " + std::to_string (min_width)
+ + ", " + std::to_string (max_width) + " ]";
+ resolution_str += ", height = (int) [ " + std::to_string (min_height)
+ + ", " + std::to_string (max_height) + " ]";
+
+ sink_caps_str = "video/x-raw, format = (string) NV12, " + resolution_str;
+ if (dev_caps.interlace_supported > 0) {
+ sink_caps_str += ", interlace-mode = (string) { interleaved, mixed }";
+ } else {
+ sink_caps_str += ", interlace-mode = (string) progressive";
+ }
+
+ src_caps_str = "video/x-h264, " + resolution_str + ", " + profile_str +
+ ", stream-format = (string) { avc, byte-stream }, alignment = (string) au";
+
+ system_caps = gst_caps_from_string (sink_caps_str.c_str ());
+ sink_caps = gst_caps_copy (system_caps);
+ gst_caps_set_features (sink_caps, 0,
+ gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY, nullptr));
+ gst_caps_append (sink_caps, system_caps);
+
+ cdata = g_new0 (GstAmfH264EncClassData, 1);
+ cdata->sink_caps = sink_caps;
+ cdata->src_caps = gst_caps_from_string (src_caps_str.c_str ());
+ cdata->dev_caps = dev_caps;
+ g_object_get (device, "adapter-luid", &cdata->adapter_luid, nullptr);
+
+ GST_MINI_OBJECT_FLAG_SET (cdata->sink_caps,
+ GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
+ GST_MINI_OBJECT_FLAG_SET (cdata->src_caps,
+ GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
+
+ GST_DEBUG_OBJECT (device, "Sink caps %" GST_PTR_FORMAT, cdata->sink_caps);
+ GST_DEBUG_OBJECT (device, "Src caps %" GST_PTR_FORMAT, cdata->src_caps);
+
+ return cdata;
+}
+
+void
+gst_amf_h264_enc_register_d3d11 (GstPlugin * plugin, GstD3D11Device * device,
+ gpointer context, guint rank)
+{
+ GstAmfH264EncClassData *cdata;
+ AMFContext *amf_context = (AMFContext *) context;
+ AMFFactory *factory = (AMFFactory *) gst_amf_get_factory ();
+ AMFComponentPtr comp;
+ AMF_RESULT result;
+
+ GST_DEBUG_CATEGORY_INIT (gst_amf_h264_enc_debug, "amfh264enc", 0,
+ "amfh264enc");
+
+ result = factory->CreateComponent (amf_context, AMFVideoEncoderVCE_AVC,
+ &comp);
+ if (result != AMF_OK) {
+ GST_WARNING_OBJECT (device, "Failed to create component, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ return;
+ }
+
+ cdata = gst_amf_h264_enc_create_class_data (device, comp.GetPtr ());
+ if (!cdata)
+ return;
+
+ GType type;
+ gchar *type_name;
+ gchar *feature_name;
+ GTypeInfo type_info = {
+ sizeof (GstAmfH264EncClass),
+ nullptr,
+ nullptr,
+ (GClassInitFunc) gst_amf_h264_enc_class_init,
+ nullptr,
+ cdata,
+ sizeof (GstAmfH264Enc),
+ 0,
+ (GInstanceInitFunc) gst_amf_h264_enc_init,
+ };
+
+ type_name = g_strdup ("GstAmfH264Enc");
+ feature_name = g_strdup ("amfh264enc");
+
+ gint index = 0;
+ while (g_type_from_name (type_name)) {
+ index++;
+ g_free (type_name);
+ g_free (feature_name);
+ type_name = g_strdup_printf ("GstAmfH264Device%dEnc", index);
+ feature_name = g_strdup_printf ("amfh264device%denc", index);
+ }
+
+ type = g_type_register_static (GST_TYPE_AMF_ENCODER, type_name,
+ &type_info, (GTypeFlags) 0);
+
+ if (rank > 0 && index != 0)
+ rank--;
+
+ if (!gst_element_register (plugin, feature_name, rank, type))
+ GST_WARNING ("Failed to register plugin '%s'", type_name);
+
+ g_free (type_name);
+ g_free (feature_name);
+}
--- /dev/null
+/* GStreamer
+ * Copyright (C) 2022 Seungha Yang <seungha@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#pragma once
+
+#include "gstamfencoder.h"
+#include <gst/d3d11/gstd3d11.h>
+
+G_BEGIN_DECLS
+
+void gst_amf_h264_enc_register_d3d11 (GstPlugin * plugin,
+ GstD3D11Device * device,
+ gpointer context,
+ guint rank);
+
+G_END_DECLS
--- /dev/null
+/* GStreamer
+ * Copyright (C) 2022 Seungha Yang <seungha@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstamfh265enc.h"
+#include <components/Component.h>
+#include <components/VideoEncoderHEVC.h>
+#include <core/Factory.h>
+#include <string>
+#include <vector>
+#include <string.h>
+
+using namespace amf;
+
+GST_DEBUG_CATEGORY_STATIC (gst_amf_h265_enc_debug);
+#define GST_CAT_DEFAULT gst_amf_h265_enc_debug
+
+static GTypeClass *parent_class = nullptr;
+
+typedef struct
+{
+ amf_int64 max_bitrate;
+ amf_int64 num_of_streams;
+ amf_int64 max_profile;
+ amf_int64 max_tier;
+ amf_int64 max_level;
+ amf_int64 min_ref_frames;
+ amf_int64 max_ref_frames;
+ amf_int64 num_of_hw_instances;
+ amf_int64 color_conversion;
+ amf_int64 pre_analysis;
+ amf_int64 roi_map;
+ amf_int64 max_throughput;
+ amf_int64 query_timeout_support;
+ amf_int64 default_qp_i;
+ amf_int64 default_qp_p;
+ amf_int64 min_gop_size;
+ amf_int64 max_gop_size;
+ amf_int64 default_gop_size;
+ guint valign;
+} GstAmfH265EncDeviceCaps;
+
+#define GST_TYPE_AMF_H265_ENC_USAGE (gst_amf_h265_enc_usage_get_type ())
+static GType
+gst_amf_h265_enc_usage_get_type (void)
+{
+ static GType usage_type = 0;
+ static const GEnumValue usages[] = {
+ {AMF_VIDEO_ENCODER_HEVC_USAGE_TRANSCODING, "Transcoding", "transcoding"},
+ {AMF_VIDEO_ENCODER_HEVC_USAGE_ULTRA_LOW_LATENCY, "Ultra Low Latency",
+ "ultra-low-latency"},
+ {AMF_VIDEO_ENCODER_HEVC_USAGE_LOW_LATENCY, "Low Latency", "low-latency"},
+ {AMF_VIDEO_ENCODER_HEVC_USAGE_WEBCAM, "Webcam", "webcam"},
+ {0, nullptr, nullptr}
+ };
+
+ if (g_once_init_enter (&usage_type)) {
+ GType type = g_enum_register_static ("GstAmfH265EncUsage", usages);
+ g_once_init_leave (&usage_type, type);
+ }
+
+ return usage_type;
+}
+
+#define GST_TYPE_AMF_H265_ENC_RATE_CONTROL (gst_amf_h265_enc_rate_control_get_type ())
+static GType
+gst_amf_h265_enc_rate_control_get_type (void)
+{
+ static GType rate_control_type = 0;
+ static const GEnumValue rate_controls[] = {
+ {AMF_VIDEO_ENCODER_HEVC_RATE_CONTROL_METHOD_UNKNOWN,
+ "Default, depends on Usage",
+ "default"},
+ {AMF_VIDEO_ENCODER_HEVC_RATE_CONTROL_METHOD_CONSTANT_QP, "Constant QP",
+ "cqp"},
+ {AMF_VIDEO_ENCODER_HEVC_RATE_CONTROL_METHOD_LATENCY_CONSTRAINED_VBR,
+ "Latency Constrained VBR", "lcvbr"},
+ {AMF_VIDEO_ENCODER_HEVC_RATE_CONTROL_METHOD_PEAK_CONSTRAINED_VBR,
+ "Peak Constrained VBR", "vbr"},
+ {AMF_VIDEO_ENCODER_HEVC_RATE_CONTROL_METHOD_CBR, "Constant Bitrate", "cbr"},
+ {0, nullptr, nullptr}
+ };
+
+ if (g_once_init_enter (&rate_control_type)) {
+ GType type =
+ g_enum_register_static ("GstAmfH265EncRateControl", rate_controls);
+ g_once_init_leave (&rate_control_type, type);
+ }
+
+ return rate_control_type;
+}
+
+#define AMF_VIDEO_ENCODER_HEVC_QUALITY_PRESET_UNKNOWN -1
+
+#define GST_TYPE_AMF_H265_ENC_PRESET (gst_amf_h265_enc_preset_get_type ())
+static GType
+gst_amf_h265_enc_preset_get_type (void)
+{
+ static GType preset_type = 0;
+ static const GEnumValue presets[] = {
+ {AMF_VIDEO_ENCODER_HEVC_QUALITY_PRESET_UNKNOWN, "Default, depends on USAGE",
+ "default"},
+ {AMF_VIDEO_ENCODER_HEVC_QUALITY_PRESET_QUALITY, "Quality", "quality"},
+ {AMF_VIDEO_ENCODER_HEVC_QUALITY_PRESET_BALANCED, "Balanced", "balanced"},
+ {AMF_VIDEO_ENCODER_HEVC_QUALITY_PRESET_SPEED, "Speed", "speed"},
+ {0, nullptr, nullptr}
+ };
+
+ if (g_once_init_enter (&preset_type)) {
+ GType type = g_enum_register_static ("GstAmfH265EncPreset", presets);
+ g_once_init_leave (&preset_type, type);
+ }
+
+ return preset_type;
+}
+
+typedef struct
+{
+ GstCaps *sink_caps;
+ GstCaps *src_caps;
+
+ gint64 adapter_luid;
+
+ GstAmfH265EncDeviceCaps dev_caps;
+} GstAmfH265EncClassData;
+
+enum
+{
+ PROP_0,
+ PROP_ADAPTER_LUID,
+ PROP_USAGE,
+ PROP_RATE_CONTROL,
+ PROP_PRESET,
+ PROP_BITRATE,
+ PROP_MAX_BITRATE,
+ PROP_GOP_SIZE,
+ PROP_MIN_QP_I,
+ PROP_MAX_QP_I,
+ PROP_MIN_QP_P,
+ PROP_MAX_QP_P,
+ PROP_QP_I,
+ PROP_QP_P,
+ PROP_REF_FRAMES,
+ PROP_AUD,
+};
+
+#define DEFAULT_USAGE AMF_VIDEO_ENCODER_HEVC_USAGE_TRANSCODING
+#define DEFAULT_RATE_CONTROL AMF_VIDEO_ENCODER_HEVC_RATE_CONTROL_METHOD_UNKNOWN
+#define DEFAULT_PRESET AMF_VIDEO_ENCODER_HEVC_QUALITY_PRESET_UNKNOWN
+#define DEFAULT_BITRATE 0
+#define DEFAULT_MAX_BITRATE 0
+#define DEFAULT_MIN_MAX_QP -1
+#define DEFAULT_AUD TRUE
+
+typedef struct _GstAmfH265Enc
+{
+ GstAmfEncoder parent;
+
+ GMutex prop_lock;
+ gboolean property_updated;
+
+ gint usage;
+ gint rate_control;
+ gint preset;
+ guint bitrate;
+ guint max_bitrate;
+ guint gop_size;
+ gint min_qp_i;
+ gint max_qp_i;
+ gint min_qp_p;
+ gint max_qp_p;
+ guint qp_i;
+ guint qp_p;
+ guint ref_frames;
+
+ gboolean aud;
+} GstAmfH265Enc;
+
+typedef struct _GstAmfH265EncClass
+{
+ GstAmfEncoderClass parent_class;
+ GstAmfH265EncDeviceCaps dev_caps;
+
+ gint64 adapter_luid;
+} GstAmfH265EncClass;
+
+#define GST_AMF_H265_ENC(object) ((GstAmfH265Enc *) (object))
+#define GST_AMF_H265_ENC_GET_CLASS(object) \
+ (G_TYPE_INSTANCE_GET_CLASS ((object),G_TYPE_FROM_INSTANCE (object),GstAmfH265EncClass))
+
+static void gst_amf_h265_enc_finalize (GObject * object);
+static void gst_amf_h265_enc_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_amf_h265_enc_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static gboolean gst_amf_h265_enc_set_format (GstAmfEncoder * encoder,
+ GstVideoCodecState * state, gpointer component);
+static gboolean gst_amf_h265_enc_set_output_state (GstAmfEncoder * encoder,
+ GstVideoCodecState * state, gpointer component);
+static gboolean gst_amf_h265_enc_set_surfrace_prop (GstAmfEncoder * encoder,
+ GstVideoCodecFrame * frame, gpointer surface);
+static GstBuffer *gst_amf_h265_enc_create_output_buffer (GstAmfEncoder *
+ encoder, gpointer data, gboolean * sync_point);
+static gboolean gst_amf_h265_enc_check_reconfigure (GstAmfEncoder * encoder);
+
+static void
+gst_amf_h265_enc_class_init (GstAmfH265EncClass * klass, gpointer data)
+{
+ GObjectClass *object_class = G_OBJECT_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstAmfEncoderClass *amf_class = GST_AMF_ENCODER_CLASS (klass);
+ GstAmfH265EncClassData *cdata = (GstAmfH265EncClassData *) data;
+ GstAmfH265EncDeviceCaps *dev_caps = &cdata->dev_caps;
+ GParamFlags param_flags = (GParamFlags) (G_PARAM_READWRITE |
+ GST_PARAM_MUTABLE_PLAYING | G_PARAM_STATIC_STRINGS);
+
+ parent_class = (GTypeClass *) g_type_class_peek_parent (klass);
+
+ object_class->finalize = gst_amf_h265_enc_finalize;
+ object_class->set_property = gst_amf_h265_enc_set_property;
+ object_class->get_property = gst_amf_h265_enc_get_property;
+
+ g_object_class_install_property (object_class, PROP_ADAPTER_LUID,
+ g_param_spec_int64 ("adapter-luid", "Adapter LUID",
+ "DXGI Adapter LUID (Locally Unique Identifier) of associated GPU",
+ G_MININT64, G_MAXINT64, cdata->adapter_luid, param_flags));
+ g_object_class_install_property (object_class, PROP_USAGE,
+ g_param_spec_enum ("usage", "Usage",
+ "Target usage", GST_TYPE_AMF_H265_ENC_USAGE,
+ DEFAULT_USAGE, param_flags));
+ g_object_class_install_property (object_class, PROP_RATE_CONTROL,
+ g_param_spec_enum ("rate-control", "Rate Control",
+ "Rate Control Method", GST_TYPE_AMF_H265_ENC_RATE_CONTROL,
+ DEFAULT_RATE_CONTROL, param_flags));
+ g_object_class_install_property (object_class, PROP_PRESET,
+ g_param_spec_enum ("preset", "Preset",
+ "Preset", GST_TYPE_AMF_H265_ENC_PRESET, DEFAULT_PRESET, param_flags));
+ g_object_class_install_property (object_class, PROP_BITRATE,
+ g_param_spec_uint ("bitrate", "Bitrate",
+ "Target bitrate in kbit/sec (0: USAGE default)",
+ 0, G_MAXINT / 1000, DEFAULT_BITRATE, param_flags));
+ g_object_class_install_property (object_class, PROP_MAX_BITRATE,
+ g_param_spec_uint ("max-bitrate", "Max Bitrate",
+ "Maximum bitrate in kbit/sec (0: USAGE default)",
+ 0, G_MAXINT / 1000, DEFAULT_MAX_BITRATE, param_flags));
+ g_object_class_install_property (object_class, PROP_GOP_SIZE,
+ g_param_spec_uint ("gop-size", "GOP Size",
+ "Number of pictures within a GOP",
+ (guint) dev_caps->min_gop_size, (guint) dev_caps->max_gop_size,
+ (guint) dev_caps->default_gop_size, param_flags));
+ g_object_class_install_property (object_class, PROP_MIN_QP_I,
+ g_param_spec_int ("min-qp-i", "Min QP I",
+ "Minimum allowed QP value for I frames (-1: USAGE default)",
+ -1, 51, DEFAULT_MIN_MAX_QP, param_flags));
+ g_object_class_install_property (object_class, PROP_MAX_QP_I,
+ g_param_spec_int ("max-qp-i", "Max QP I",
+ "Maximum allowed QP value for I frames (-1: USAGE default)",
+ -1, 51, DEFAULT_MIN_MAX_QP, param_flags));
+ g_object_class_install_property (object_class, PROP_MIN_QP_P,
+ g_param_spec_int ("min-qp-p", "Min QP P",
+ "Minimum allowed QP value for P frames (-1: USAGE default)",
+ -1, 51, DEFAULT_MIN_MAX_QP, param_flags));
+ g_object_class_install_property (object_class, PROP_MAX_QP_P,
+ g_param_spec_int ("max-qp-p", "Max QP P",
+ "Maximum allowed QP value for P frames (-1: USAGE default)",
+ -1, 51, DEFAULT_MIN_MAX_QP, param_flags));
+ g_object_class_install_property (object_class, PROP_QP_I,
+ g_param_spec_uint ("qp-i", "QP I",
+ "Constant QP for I frames", 0, 51,
+ (guint) dev_caps->default_qp_i, param_flags));
+ g_object_class_install_property (object_class, PROP_QP_P,
+ g_param_spec_uint ("qp-p", "QP P",
+ "Constant QP for P frames", 0, 51,
+ (guint) dev_caps->default_qp_p, param_flags));
+ g_object_class_install_property (object_class, PROP_REF_FRAMES,
+ g_param_spec_uint ("ref-frames", "Reference Frames",
+ "Number of reference frames", (guint) dev_caps->min_ref_frames,
+ (guint) dev_caps->max_ref_frames,
+ (guint) dev_caps->min_ref_frames, param_flags));
+ g_object_class_install_property (object_class, PROP_AUD,
+ g_param_spec_boolean ("aud", "AUD",
+ "Use AU (Access Unit) delimiter", DEFAULT_AUD, param_flags));
+
+ gst_element_class_set_metadata (element_class,
+ "AMD AMF H.265 Video Encoder",
+ "Codec/Encoder/Video/Hardware",
+ "Encode H.265 video streams using AMF API",
+ "Seungha Yang <seungha@centricular.com>");
+
+ gst_element_class_add_pad_template (element_class,
+ gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
+ cdata->sink_caps));
+ gst_element_class_add_pad_template (element_class,
+ gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
+ cdata->src_caps));
+
+ amf_class->set_format = GST_DEBUG_FUNCPTR (gst_amf_h265_enc_set_format);
+ amf_class->set_output_state =
+ GST_DEBUG_FUNCPTR (gst_amf_h265_enc_set_output_state);
+ amf_class->set_surface_prop =
+ GST_DEBUG_FUNCPTR (gst_amf_h265_enc_set_surfrace_prop);
+ amf_class->create_output_buffer =
+ GST_DEBUG_FUNCPTR (gst_amf_h265_enc_create_output_buffer);
+ amf_class->check_reconfigure =
+ GST_DEBUG_FUNCPTR (gst_amf_h265_enc_check_reconfigure);
+
+ klass->dev_caps = cdata->dev_caps;
+ klass->adapter_luid = cdata->adapter_luid;
+
+ gst_caps_unref (cdata->sink_caps);
+ gst_caps_unref (cdata->src_caps);
+ g_free (cdata);
+}
+
+static void
+gst_amf_h265_enc_init (GstAmfH265Enc * self)
+{
+ GstAmfH265EncClass *klass = GST_AMF_H265_ENC_GET_CLASS (self);
+ GstAmfH265EncDeviceCaps *dev_caps = &klass->dev_caps;
+
+ gst_amf_encoder_set_subclass_data (GST_AMF_ENCODER (self),
+ klass->adapter_luid, AMFVideoEncoder_HEVC);
+
+ g_mutex_init (&self->prop_lock);
+
+ self->usage = DEFAULT_USAGE;
+ self->rate_control = DEFAULT_RATE_CONTROL;
+ self->preset = DEFAULT_PRESET;
+ self->bitrate = DEFAULT_BITRATE;
+ self->max_bitrate = DEFAULT_MAX_BITRATE;
+ self->gop_size = (guint) dev_caps->default_gop_size;
+ self->min_qp_i = DEFAULT_MIN_MAX_QP;
+ self->max_qp_i = DEFAULT_MIN_MAX_QP;
+ self->min_qp_p = DEFAULT_MIN_MAX_QP;
+ self->max_qp_p = DEFAULT_MIN_MAX_QP;
+ self->qp_i = (guint) dev_caps->default_qp_i;
+ self->qp_p = (guint) dev_caps->default_qp_p;
+ self->ref_frames = (guint) dev_caps->min_ref_frames;
+ self->aud = DEFAULT_AUD;
+}
+
+static void
+gst_amf_h265_enc_finalize (GObject * object)
+{
+ GstAmfH265Enc *self = GST_AMF_H265_ENC (object);
+
+ g_mutex_clear (&self->prop_lock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+update_int (GstAmfH265Enc * self, gint * old_val, const GValue * new_val)
+{
+ gint val = g_value_get_int (new_val);
+
+ if (*old_val == val)
+ return;
+
+ *old_val = val;
+ self->property_updated = TRUE;
+}
+
+static void
+update_uint (GstAmfH265Enc * self, guint * old_val, const GValue * new_val)
+{
+ guint val = g_value_get_uint (new_val);
+
+ if (*old_val == val)
+ return;
+
+ *old_val = val;
+ self->property_updated = TRUE;
+}
+
+static void
+update_enum (GstAmfH265Enc * self, gint * old_val, const GValue * new_val)
+{
+ gint val = g_value_get_enum (new_val);
+
+ if (*old_val == val)
+ return;
+
+ *old_val = val;
+ self->property_updated = TRUE;
+}
+
+static void
+update_bool (GstAmfH265Enc * self, gboolean * old_val, const GValue * new_val)
+{
+ gboolean val = g_value_get_boolean (new_val);
+
+ if (*old_val == val)
+ return;
+
+ *old_val = val;
+ self->property_updated = TRUE;
+}
+
+static void
+gst_amf_h265_enc_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstAmfH265Enc *self = GST_AMF_H265_ENC (object);
+
+ g_mutex_lock (&self->prop_lock);
+ switch (prop_id) {
+ case PROP_USAGE:
+ update_enum (self, &self->usage, value);
+ break;
+ case PROP_RATE_CONTROL:
+ update_enum (self, &self->rate_control, value);
+ break;
+ case PROP_PRESET:
+ update_enum (self, &self->preset, value);
+ break;
+ case PROP_BITRATE:
+ update_uint (self, &self->bitrate, value);
+ break;
+ case PROP_MAX_BITRATE:
+ update_uint (self, &self->max_bitrate, value);
+ break;
+ case PROP_GOP_SIZE:
+ update_uint (self, &self->gop_size, value);
+ break;
+ case PROP_MIN_QP_I:
+ update_int (self, &self->min_qp_i, value);
+ break;
+ case PROP_MAX_QP_I:
+ update_int (self, &self->max_qp_i, value);
+ break;
+ case PROP_MIN_QP_P:
+ update_int (self, &self->min_qp_p, value);
+ break;
+ case PROP_MAX_QP_P:
+ update_int (self, &self->max_qp_p, value);
+ break;
+ case PROP_QP_I:
+ update_uint (self, &self->qp_i, value);
+ break;
+ case PROP_QP_P:
+ update_uint (self, &self->qp_p, value);
+ break;
+ case PROP_REF_FRAMES:
+ update_uint (self, &self->ref_frames, value);
+ break;
+ case PROP_AUD:
+ /* This is per frame property, don't need to reset encoder */
+ self->aud = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ g_mutex_unlock (&self->prop_lock);
+}
+
+static void
+gst_amf_h265_enc_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstAmfH265EncClass *klass = GST_AMF_H265_ENC_GET_CLASS (object);
+ GstAmfH265Enc *self = GST_AMF_H265_ENC (object);
+
+ switch (prop_id) {
+ case PROP_ADAPTER_LUID:
+ g_value_set_int64 (value, klass->adapter_luid);
+ break;
+ case PROP_USAGE:
+ g_value_set_enum (value, self->usage);
+ break;
+ case PROP_RATE_CONTROL:
+ g_value_set_enum (value, self->rate_control);
+ break;
+ case PROP_PRESET:
+ g_value_set_enum (value, self->preset);
+ break;
+ case PROP_BITRATE:
+ g_value_set_uint (value, self->bitrate);
+ break;
+ case PROP_MAX_BITRATE:
+ g_value_set_uint (value, self->max_bitrate);
+ break;
+ case PROP_GOP_SIZE:
+ g_value_set_uint (value, self->gop_size);
+ break;
+ case PROP_MIN_QP_I:
+ g_value_set_int (value, self->min_qp_i);
+ break;
+ case PROP_MAX_QP_I:
+ g_value_set_int (value, self->max_qp_i);
+ break;
+ case PROP_MIN_QP_P:
+ g_value_set_int (value, self->min_qp_p);
+ break;
+ case PROP_MAX_QP_P:
+ g_value_set_int (value, self->max_qp_p);
+ break;
+ case PROP_QP_I:
+ g_value_set_uint (value, self->qp_i);
+ break;
+ case PROP_QP_P:
+ g_value_set_uint (value, self->qp_p);
+ break;
+ case PROP_REF_FRAMES:
+ g_value_set_uint (value, self->ref_frames);
+ break;
+ case PROP_AUD:
+ g_value_set_boolean (value, self->aud);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static gboolean
+gst_amf_h265_enc_set_format (GstAmfEncoder * encoder,
+ GstVideoCodecState * state, gpointer component)
+{
+ GstAmfH265Enc *self = GST_AMF_H265_ENC (encoder);
+ AMFComponent *comp = (AMFComponent *) component;
+ GstVideoInfo *info = &state->info;
+ AMF_RESULT result;
+ AMFRate framerate;
+ AMFRatio aspect_ratio;
+ amf_int64 int64_val;
+ AMF_VIDEO_ENCODER_HEVC_RATE_CONTROL_METHOD_ENUM rc_mode;
+
+ g_mutex_lock (&self->prop_lock);
+ result = comp->SetProperty (AMF_VIDEO_ENCODER_HEVC_FRAMESIZE,
+ AMFConstructSize (info->width, info->height));
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to set frame size, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ goto error;
+ }
+
+ result = comp->SetProperty (AMF_VIDEO_ENCODER_HEVC_USAGE,
+ (amf_int64) self->usage);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to set usage, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ goto error;
+ }
+
+ result = comp->SetProperty (AMF_VIDEO_ENCODER_HEVC_PROFILE,
+ (amf_int64) AMF_VIDEO_ENCODER_HEVC_PROFILE_MAIN);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to set profile, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ goto error;
+ }
+
+ result = comp->SetProperty (AMF_VIDEO_ENCODER_HEVC_MAX_NUM_REFRAMES,
+ (amf_int64) self->ref_frames);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to set ref-frames, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ goto error;
+ }
+
+
+ aspect_ratio = AMFConstructRatio (info->par_n, info->par_d);
+ result = comp->SetProperty (AMF_VIDEO_ENCODER_HEVC_ASPECT_RATIO,
+ aspect_ratio);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to set aspect ratio, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ goto error;
+ }
+
+ if (info->colorimetry.range == GST_VIDEO_COLOR_RANGE_0_255)
+ int64_val = AMF_VIDEO_ENCODER_HEVC_NOMINAL_RANGE_FULL;
+ else
+ int64_val = AMF_VIDEO_ENCODER_HEVC_NOMINAL_RANGE_STUDIO;
+
+ result = comp->SetProperty (AMF_VIDEO_ENCODER_HEVC_NOMINAL_RANGE, int64_val);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to set full-range-color, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ goto error;
+ }
+
+ result = comp->Init (AMF_SURFACE_NV12, info->width, info->height);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to init component, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ goto error;
+ }
+
+ if (self->rate_control != AMF_VIDEO_ENCODER_HEVC_RATE_CONTROL_METHOD_UNKNOWN) {
+ result = comp->SetProperty (AMF_VIDEO_ENCODER_HEVC_RATE_CONTROL_METHOD,
+ (amf_int64) self->rate_control);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to set rate-control, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ goto error;
+ }
+ }
+
+ result = comp->GetProperty (AMF_VIDEO_ENCODER_HEVC_RATE_CONTROL_METHOD,
+ &int64_val);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to get rate-control method, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ goto error;
+ }
+
+ rc_mode = (AMF_VIDEO_ENCODER_HEVC_RATE_CONTROL_METHOD_ENUM) int64_val;
+ if (self->min_qp_i >= 0) {
+ comp->SetProperty (AMF_VIDEO_ENCODER_HEVC_MIN_QP_I,
+ (amf_int64) self->min_qp_i);
+ }
+ if (self->max_qp_i >= 0) {
+ comp->SetProperty (AMF_VIDEO_ENCODER_HEVC_MAX_QP_I,
+ (amf_int64) self->max_qp_i);
+ }
+ if (self->min_qp_p >= 0) {
+ comp->SetProperty (AMF_VIDEO_ENCODER_HEVC_MIN_QP_P,
+ (amf_int64) self->min_qp_p);
+ }
+ if (self->max_qp_p >= 0) {
+ comp->SetProperty (AMF_VIDEO_ENCODER_HEVC_MAX_QP_P,
+ (amf_int64) self->max_qp_p);
+ }
+
+ comp->SetProperty (AMF_VIDEO_ENCODER_HEVC_QP_I, (amf_int64) self->qp_i);
+ comp->SetProperty (AMF_VIDEO_ENCODER_HEVC_QP_P, (amf_int64) self->qp_p);
+
+ switch (rc_mode) {
+ case AMF_VIDEO_ENCODER_HEVC_RATE_CONTROL_METHOD_CBR:
+ if (self->bitrate > 0) {
+ comp->SetProperty (AMF_VIDEO_ENCODER_HEVC_TARGET_BITRATE,
+ (amf_int64) self->bitrate * 1000);
+ comp->SetProperty (AMF_VIDEO_ENCODER_HEVC_PEAK_BITRATE,
+ (amf_int64) self->bitrate * 1000);
+ }
+ break;
+ case AMF_VIDEO_ENCODER_HEVC_RATE_CONTROL_METHOD_PEAK_CONSTRAINED_VBR:
+ case AMF_VIDEO_ENCODER_HEVC_RATE_CONTROL_METHOD_LATENCY_CONSTRAINED_VBR:
+ if (self->bitrate > 0) {
+ comp->SetProperty (AMF_VIDEO_ENCODER_HEVC_TARGET_BITRATE,
+ (amf_int64) self->bitrate * 1000);
+ }
+ if (self->max_bitrate > 0) {
+ comp->SetProperty (AMF_VIDEO_ENCODER_HEVC_PEAK_BITRATE,
+ (amf_int64) self->max_bitrate * 1000);
+ }
+ break;
+ default:
+ break;
+ }
+
+ /* Disable frame skip for now, need investigation the behavior */
+ result =
+ comp->SetProperty (AMF_VIDEO_ENCODER_HEVC_RATE_CONTROL_SKIP_FRAME_ENABLE,
+ (amf_bool) false);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to disable skip frame, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ goto error;
+ }
+
+ if (info->fps_n > 0 && info->fps_d) {
+ framerate = AMFConstructRate (info->fps_n, info->fps_d);
+ } else {
+ framerate = AMFConstructRate (25, 1);
+ }
+
+ result = comp->SetProperty (AMF_VIDEO_ENCODER_HEVC_FRAMERATE, framerate);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to set frame rate, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ goto error;
+ }
+
+ result = comp->SetProperty (AMF_VIDEO_ENCODER_HEVC_GOP_SIZE,
+ (amf_int64) self->gop_size);
+ if (result != AMF_OK) {
+ GST_ERROR_OBJECT (self, "Failed to set gop-size, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ goto error;
+ }
+
+ self->property_updated = FALSE;
+ g_mutex_unlock (&self->prop_lock);
+
+ return TRUE;
+
+error:
+ g_mutex_unlock (&self->prop_lock);
+
+ return FALSE;
+}
+
+static gboolean
+gst_amf_h265_enc_set_output_state (GstAmfEncoder * encoder,
+ GstVideoCodecState * state, gpointer component)
+{
+ GstAmfH265Enc *self = GST_AMF_H265_ENC (encoder);
+ GstVideoCodecState *output_state;
+ GstCaps *caps;
+ GstTagList *tags;
+
+ caps = gst_caps_from_string ("video/x-h265, alignment = (string) au"
+ ", stream-format = (string) byte-stream, profile = (string) main");
+ output_state = gst_video_encoder_set_output_state (GST_VIDEO_ENCODER (self),
+ caps, state);
+
+ GST_INFO_OBJECT (self, "Output caps: %" GST_PTR_FORMAT, output_state->caps);
+ gst_video_codec_state_unref (output_state);
+
+ tags = gst_tag_list_new_empty ();
+ gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_ENCODER,
+ "amfh265enc", nullptr);
+
+ gst_video_encoder_merge_tags (GST_VIDEO_ENCODER (encoder),
+ tags, GST_TAG_MERGE_REPLACE);
+ gst_tag_list_unref (tags);
+
+ return TRUE;
+}
+
+static gboolean
+gst_amf_h265_enc_set_surfrace_prop (GstAmfEncoder * encoder,
+ GstVideoCodecFrame * frame, gpointer surface)
+{
+ GstAmfH265Enc *self = GST_AMF_H265_ENC (encoder);
+ AMFSurface *surf = (AMFSurface *) surface;
+ AMF_RESULT result;
+ amf_bool insert_aud = self->aud ? true : false;
+
+ if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME (frame)) {
+ amf_int64 type = (amf_int64) AMF_VIDEO_ENCODER_HEVC_PICTURE_TYPE_IDR;
+ result = surf->SetProperty (AMF_VIDEO_ENCODER_HEVC_FORCE_PICTURE_TYPE,
+ type);
+ if (result != AMF_OK) {
+ GST_WARNING_OBJECT (encoder, "Failed to set force idr, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ }
+ }
+
+ result = surf->SetProperty (AMF_VIDEO_ENCODER_HEVC_INSERT_AUD, &insert_aud);
+ if (result != AMF_OK) {
+ GST_WARNING_OBJECT (encoder, "Failed to set AUD, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ }
+
+ return TRUE;
+}
+
+static GstBuffer *
+gst_amf_h265_enc_create_output_buffer (GstAmfEncoder * encoder,
+ gpointer data, gboolean * sync_point)
+{
+ GstAmfH265Enc *self = GST_AMF_H265_ENC (encoder);
+ AMFBuffer *amf_buf = (AMFBuffer *) data;
+ GstBuffer *buf;
+ guint8 *data_ptr;
+ gsize data_size;
+ amf_int64 output_type = 0;
+ AMF_RESULT result;
+
+ data_ptr = (guint8 *) amf_buf->GetNative ();
+ data_size = amf_buf->GetSize ();
+
+ if (!data_ptr || data_size == 0) {
+ GST_WARNING_OBJECT (self, "Empty buffer");
+ return nullptr;
+ }
+
+ buf = gst_buffer_new_memdup (data_ptr, data_size);
+
+ result = amf_buf->GetProperty (AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE,
+ &output_type);
+ if (result == AMF_OK &&
+ output_type == (amf_int64) AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE_IDR) {
+ *sync_point = TRUE;
+ }
+
+ return buf;
+}
+
+static gboolean
+gst_amf_h265_enc_check_reconfigure (GstAmfEncoder * encoder)
+{
+ GstAmfH265Enc *self = GST_AMF_H265_ENC (encoder);
+ gboolean ret;
+
+ g_mutex_lock (&self->prop_lock);
+ ret = self->property_updated;
+ g_mutex_unlock (&self->prop_lock);
+
+ return ret;
+}
+
+static GstAmfH265EncClassData *
+gst_amf_h265_enc_create_class_data (GstD3D11Device * device,
+ AMFComponent * comp)
+{
+ AMF_RESULT result;
+ GstAmfH265EncDeviceCaps dev_caps = { 0, };
+ std::string sink_caps_str;
+ std::string src_caps_str;
+ std::vector < std::string > profiles;
+ std::string resolution_str;
+ GstAmfH265EncClassData *cdata;
+ AMFCapsPtr amf_caps;
+ AMFIOCapsPtr in_iocaps;
+ AMFIOCapsPtr out_iocaps;
+ amf_int32 in_min_width = 0, in_max_width = 0;
+ amf_int32 in_min_height = 0, in_max_height = 0;
+ amf_int32 out_min_width = 0, out_max_width = 0;
+ amf_int32 out_min_height = 0, out_max_height = 0;
+ amf_int32 num_val;
+ gboolean have_nv12 = FALSE;
+ gboolean d3d11_supported = FALSE;
+ gint min_width, max_width, min_height, max_height;
+ GstCaps *sink_caps;
+ GstCaps *system_caps;
+
+ result = comp->GetCaps (&amf_caps);
+ if (result != AMF_OK) {
+ GST_WARNING_OBJECT (device, "Unable to get caps");
+ return nullptr;
+ }
+
+ result = amf_caps->GetInputCaps (&in_iocaps);
+ if (result != AMF_OK) {
+ GST_WARNING_OBJECT (device, "Unable to get input io caps");
+ return nullptr;
+ }
+
+ in_iocaps->GetWidthRange (&in_min_width, &in_max_width);
+ in_iocaps->GetHeightRange (&in_min_height, &in_max_height);
+ dev_caps.valign = in_iocaps->GetVertAlign ();
+
+ GST_INFO_OBJECT (device, "Input width: [%d, %d], height: [%d, %d], "
+ "valign: %d", in_min_width, in_max_width, in_min_height, in_max_height,
+ dev_caps.valign);
+
+ num_val = in_iocaps->GetNumOfFormats ();
+ GST_LOG_OBJECT (device, "Input format count: %d", num_val);
+ for (guint i = 0; i < num_val; i++) {
+ AMF_SURFACE_FORMAT format;
+ amf_bool native;
+
+ result = in_iocaps->GetFormatAt (i, &format, &native);
+ if (result != AMF_OK)
+ continue;
+
+ GST_INFO_OBJECT (device, "Format %d supported, native %d", format, native);
+ if (format == AMF_SURFACE_NV12)
+ have_nv12 = TRUE;
+ }
+
+ if (!have_nv12) {
+ GST_WARNING_OBJECT (device, "NV12 is not supported");
+ return nullptr;
+ }
+
+ num_val = in_iocaps->GetNumOfMemoryTypes ();
+ GST_LOG_OBJECT (device, "Input memory type count: %d", num_val);
+ for (guint i = 0; i < num_val; i++) {
+ AMF_MEMORY_TYPE type;
+ amf_bool native;
+
+ result = in_iocaps->GetMemoryTypeAt (i, &type, &native);
+ if (result != AMF_OK)
+ continue;
+
+ GST_INFO_OBJECT (device,
+ "MemoryType %d supported, native %d", type, native);
+ if (type == AMF_MEMORY_DX11)
+ d3d11_supported = TRUE;
+ }
+
+ if (!d3d11_supported) {
+ GST_WARNING_OBJECT (device, "D3D11 is not supported");
+ return nullptr;
+ }
+
+ result = amf_caps->GetOutputCaps (&out_iocaps);
+ if (result != AMF_OK) {
+ GST_WARNING_OBJECT (device, "Unable to get input io caps");
+ return nullptr;
+ }
+
+ out_iocaps->GetWidthRange (&out_min_width, &out_max_width);
+ out_iocaps->GetHeightRange (&out_min_height, &out_max_height);
+
+ GST_INFO_OBJECT (device, "Output width: [%d, %d], height: [%d, %d]",
+ in_min_width, in_max_width, in_min_height, in_max_height);
+
+#define QUERY_CAPS_PROP(prop,val) G_STMT_START { \
+ amf_int64 _val; \
+ result = amf_caps->GetProperty (prop, &_val); \
+ if (result == AMF_OK) { \
+ GST_INFO_OBJECT (device, G_STRINGIFY (val) ": %" G_GINT64_FORMAT, _val); \
+ dev_caps.val = _val; \
+ } \
+} G_STMT_END
+
+ QUERY_CAPS_PROP (AMF_VIDEO_ENCODER_HEVC_CAP_MAX_BITRATE, max_bitrate);
+ QUERY_CAPS_PROP (AMF_VIDEO_ENCODER_HEVC_CAP_NUM_OF_STREAMS, num_of_streams);
+ QUERY_CAPS_PROP (AMF_VIDEO_ENCODER_HEVC_CAP_MAX_PROFILE, max_profile);
+ QUERY_CAPS_PROP (AMF_VIDEO_ENCODER_HEVC_CAP_MAX_TIER, max_tier);
+ QUERY_CAPS_PROP (AMF_VIDEO_ENCODER_HEVC_CAP_MAX_LEVEL, max_level);
+ QUERY_CAPS_PROP (AMF_VIDEO_ENCODER_HEVC_CAP_MIN_REFERENCE_FRAMES,
+ min_ref_frames);
+ QUERY_CAPS_PROP (AMF_VIDEO_ENCODER_HEVC_CAP_MAX_REFERENCE_FRAMES,
+ max_ref_frames);
+ QUERY_CAPS_PROP (AMF_VIDEO_ENCODER_HEVC_CAP_NUM_OF_HW_INSTANCES,
+ num_of_hw_instances);
+ QUERY_CAPS_PROP (AMF_VIDEO_ENCODER_HEVC_CAP_COLOR_CONVERSION,
+ color_conversion);
+ QUERY_CAPS_PROP (AMF_VIDEO_ENCODER_HEVC_CAP_PRE_ANALYSIS, pre_analysis);
+ QUERY_CAPS_PROP (AMF_VIDEO_ENCODER_HEVC_CAP_ROI, roi_map);
+ QUERY_CAPS_PROP (AMF_VIDEO_ENCODER_HEVC_CAP_MAX_THROUGHPUT, max_throughput);
+ QUERY_CAPS_PROP (AMF_VIDEO_ENCODER_CAPS_HEVC_QUERY_TIMEOUT_SUPPORT,
+ query_timeout_support);
+#undef QUERY_CAPS_PROP
+
+#define QUERY_DEFAULT_PROP(prop,val,default_val) G_STMT_START { \
+ const AMFPropertyInfo *pinfo = nullptr; \
+ result = comp->GetPropertyInfo (prop, &pinfo); \
+ if (result == AMF_OK && pinfo) { \
+ dev_caps.val = AMFVariantGetInt64 (&pinfo->defaultValue); \
+ GST_INFO_OBJECT (device, G_STRINGIFY (val) ": %" G_GINT64_FORMAT, \
+ dev_caps.val); \
+ } else { \
+ dev_caps.val = default_val; \
+ } \
+} G_STMT_END
+
+ QUERY_DEFAULT_PROP (AMF_VIDEO_ENCODER_HEVC_QP_I, default_qp_i, 26);
+ QUERY_DEFAULT_PROP (AMF_VIDEO_ENCODER_HEVC_QP_P, default_qp_p, 26);
+#undef QUERY_DEFAULT_PROP
+
+ {
+ const AMFPropertyInfo *pinfo = nullptr;
+ result = comp->GetPropertyInfo (AMF_VIDEO_ENCODER_HEVC_GOP_SIZE, &pinfo);
+ if (result == AMF_OK && pinfo) {
+ dev_caps.default_gop_size = AMFVariantGetInt64 (&pinfo->defaultValue);
+ dev_caps.min_gop_size = AMFVariantGetInt64 (&pinfo->minValue);
+ dev_caps.max_gop_size = AMFVariantGetInt64 (&pinfo->maxValue);
+ GST_INFO_OBJECT (device, "gop-size: default %d, min %d, max %d",
+ (guint) dev_caps.default_gop_size,
+ (guint) dev_caps.min_gop_size, (guint) dev_caps.max_gop_size);
+ } else {
+ dev_caps.default_gop_size = 30;
+ dev_caps.min_gop_size = 0;
+ dev_caps.max_gop_size = G_MAXINT;
+ }
+ }
+
+ min_width = MAX (in_min_width, 1);
+ max_width = in_max_width;
+ if (max_width == 0) {
+ GST_WARNING_OBJECT (device, "Unknown max width, assuming 4096");
+ max_width = 4096;
+ }
+
+ min_height = MAX (in_min_height, 1);
+ max_height = in_max_height;
+ if (max_height == 0) {
+ GST_WARNING_OBJECT (device, "Unknown max height, assuming 4096");
+ max_height = 4096;
+ }
+
+ resolution_str = "width = (int) [ " + std::to_string (min_width)
+ + ", " + std::to_string (max_width) + " ]";
+ resolution_str += ", height = (int) [ " + std::to_string (min_height)
+ + ", " + std::to_string (max_height) + " ]";
+
+ sink_caps_str = "video/x-raw, format = (string) NV12, " + resolution_str +
+ ", interlace-mode = (string) progressive";
+ src_caps_str = "video/x-h265, " + resolution_str + ", profile = (string) main"
+ ", stream-format = (string) byte-stream, alignment = (string) au";
+
+ system_caps = gst_caps_from_string (sink_caps_str.c_str ());
+ sink_caps = gst_caps_copy (system_caps);
+ gst_caps_set_features (sink_caps, 0,
+ gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY, nullptr));
+ gst_caps_append (sink_caps, system_caps);
+
+ cdata = g_new0 (GstAmfH265EncClassData, 1);
+ cdata->sink_caps = sink_caps;
+ cdata->src_caps = gst_caps_from_string (src_caps_str.c_str ());
+ cdata->dev_caps = dev_caps;
+ g_object_get (device, "adapter-luid", &cdata->adapter_luid, nullptr);
+
+ GST_MINI_OBJECT_FLAG_SET (cdata->sink_caps,
+ GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
+ GST_MINI_OBJECT_FLAG_SET (cdata->src_caps,
+ GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
+
+ GST_DEBUG_OBJECT (device, "Sink caps %" GST_PTR_FORMAT, cdata->sink_caps);
+ GST_DEBUG_OBJECT (device, "Src caps %" GST_PTR_FORMAT, cdata->src_caps);
+
+ return cdata;
+}
+
+void
+gst_amf_h265_enc_register_d3d11 (GstPlugin * plugin, GstD3D11Device * device,
+ gpointer context, guint rank)
+{
+ GstAmfH265EncClassData *cdata;
+ AMFContext *amf_context = (AMFContext *) context;
+ AMFFactory *factory = (AMFFactory *) gst_amf_get_factory ();
+ AMFComponentPtr comp;
+ AMF_RESULT result;
+
+ GST_DEBUG_CATEGORY_INIT (gst_amf_h265_enc_debug, "amfh265enc", 0,
+ "amfh265enc");
+
+ result = factory->CreateComponent (amf_context, AMFVideoEncoder_HEVC, &comp);
+ if (result != AMF_OK) {
+ GST_WARNING_OBJECT (device, "Failed to create component, result %"
+ GST_AMF_RESULT_FORMAT, GST_AMF_RESULT_ARGS (result));
+ return;
+ }
+
+ cdata = gst_amf_h265_enc_create_class_data (device, comp.GetPtr ());
+ if (!cdata)
+ return;
+
+ GType type;
+ gchar *type_name;
+ gchar *feature_name;
+ GTypeInfo type_info = {
+ sizeof (GstAmfH265EncClass),
+ nullptr,
+ nullptr,
+ (GClassInitFunc) gst_amf_h265_enc_class_init,
+ nullptr,
+ cdata,
+ sizeof (GstAmfH265Enc),
+ 0,
+ (GInstanceInitFunc) gst_amf_h265_enc_init,
+ };
+
+ type_name = g_strdup ("GstAmfH265Enc");
+ feature_name = g_strdup ("amfh265enc");
+
+ gint index = 0;
+ while (g_type_from_name (type_name)) {
+ index++;
+ g_free (type_name);
+ g_free (feature_name);
+ type_name = g_strdup_printf ("GstAmfH265Device%dEnc", index);
+ feature_name = g_strdup_printf ("amfh265device%denc", index);
+ }
+
+ type = g_type_register_static (GST_TYPE_AMF_ENCODER, type_name,
+ &type_info, (GTypeFlags) 0);
+
+ if (rank > 0 && index != 0)
+ rank--;
+
+ if (!gst_element_register (plugin, feature_name, rank, type))
+ GST_WARNING ("Failed to register plugin '%s'", type_name);
+
+ g_free (type_name);
+ g_free (feature_name);
+}
--- /dev/null
+/* GStreamer
+ * Copyright (C) 2022 Seungha Yang <seungha@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#pragma once
+
+#include "gstamfencoder.h"
+#include <gst/d3d11/gstd3d11.h>
+
+G_BEGIN_DECLS
+
+void gst_amf_h265_enc_register_d3d11 (GstPlugin * plugin,
+ GstD3D11Device * device,
+ gpointer context,
+ guint rank);
+
+G_END_DECLS
--- /dev/null
+/* GStreamer
+ * Copyright (C) 2022 Seungha Yang <seungha@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <core/Factory.h>
+#include "gstamfutils.h"
+#include <gmodule.h>
+
+using namespace amf;
+
+AMFFactory *_factory = nullptr;
+static gboolean loaded = FALSE;
+
+static gboolean
+gst_amf_load_library (void)
+{
+ AMF_RESULT result;
+ GModule *amf_module = nullptr;
+ AMFInit_Fn init_func = nullptr;
+
+ amf_module = g_module_open (AMF_DLL_NAMEA, G_MODULE_BIND_LAZY);
+ if (!amf_module)
+ return FALSE;
+
+ if (!g_module_symbol (amf_module, AMF_INIT_FUNCTION_NAME, (gpointer *)
+ & init_func)) {
+ g_module_close (amf_module);
+ amf_module = nullptr;
+
+ return FALSE;
+ }
+
+ result = init_func (AMF_FULL_VERSION, &_factory);
+ if (result != AMF_OK) {
+ g_module_close (amf_module);
+ amf_module = nullptr;
+ _factory = nullptr;
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+gboolean
+gst_amf_init_once (void)
+{
+ static gsize init_once = 0;
+
+ if (g_once_init_enter (&init_once)) {
+ loaded = gst_amf_load_library ();
+ g_once_init_leave (&init_once, 1);
+ }
+
+ return loaded;
+}
+
+gpointer
+gst_amf_get_factory (void)
+{
+ return (gpointer) _factory;
+}
+
+const gchar *
+gst_amf_result_to_string (AMF_RESULT result)
+{
+#define CASE(err) \
+ case err: \
+ return G_STRINGIFY (err);
+
+ switch (result) {
+ CASE (AMF_OK);
+ CASE (AMF_FAIL);
+ CASE (AMF_UNEXPECTED);
+ CASE (AMF_ACCESS_DENIED);
+ CASE (AMF_INVALID_ARG);
+ CASE (AMF_OUT_OF_RANGE);
+ CASE (AMF_OUT_OF_MEMORY);
+ CASE (AMF_INVALID_POINTER);
+ CASE (AMF_NO_INTERFACE);
+ CASE (AMF_NOT_IMPLEMENTED);
+ CASE (AMF_NOT_SUPPORTED);
+ CASE (AMF_NOT_FOUND);
+ CASE (AMF_ALREADY_INITIALIZED);
+ CASE (AMF_NOT_INITIALIZED);
+ CASE (AMF_INVALID_FORMAT);
+ CASE (AMF_WRONG_STATE);
+ CASE (AMF_FILE_NOT_OPEN);
+ CASE (AMF_NO_DEVICE);
+ CASE (AMF_DIRECTX_FAILED);
+ CASE (AMF_OPENCL_FAILED);
+ CASE (AMF_GLX_FAILED);
+ CASE (AMF_XV_FAILED);
+ CASE (AMF_ALSA_FAILED);
+ CASE (AMF_EOF);
+ CASE (AMF_REPEAT);
+ CASE (AMF_INPUT_FULL);
+ CASE (AMF_RESOLUTION_CHANGED);
+ CASE (AMF_RESOLUTION_UPDATED);
+ CASE (AMF_INVALID_DATA_TYPE);
+ CASE (AMF_INVALID_RESOLUTION);
+ CASE (AMF_CODEC_NOT_SUPPORTED);
+ CASE (AMF_SURFACE_FORMAT_NOT_SUPPORTED);
+ CASE (AMF_SURFACE_MUST_BE_SHARED);
+ CASE (AMF_DECODER_NOT_PRESENT);
+ CASE (AMF_DECODER_SURFACE_ALLOCATION_FAILED);
+ CASE (AMF_DECODER_NO_FREE_SURFACES);
+ CASE (AMF_ENCODER_NOT_PRESENT);
+ CASE (AMF_DEM_ERROR);
+ CASE (AMF_DEM_PROPERTY_READONLY);
+ CASE (AMF_DEM_REMOTE_DISPLAY_CREATE_FAILED);
+ CASE (AMF_DEM_START_ENCODING_FAILED);
+ CASE (AMF_DEM_QUERY_OUTPUT_FAILED);
+ CASE (AMF_TAN_CLIPPING_WAS_REQUIRED);
+ CASE (AMF_TAN_UNSUPPORTED_VERSION);
+ CASE (AMF_NEED_MORE_INPUT);
+ default:
+ break;
+ }
+#undef CASE
+ return "Unknown";
+}
--- /dev/null
+/* GStreamer
+ * Copyright (C) 2022 Seungha Yang <seungha@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#pragma once
+
+#include <gst/gst.h>
+#include <core/Result.h>
+
+G_BEGIN_DECLS
+
+gboolean gst_amf_init_once (void);
+
+gpointer gst_amf_get_factory (void);
+
+const gchar * gst_amf_result_to_string (AMF_RESULT result);
+#define GST_AMF_RESULT_FORMAT "s (%d)"
+#define GST_AMF_RESULT_ARGS(r) gst_amf_result_to_string (r), r
+
+G_END_DECLS
--- /dev/null
+amf_sources = [
+ 'gstamfencoder.cpp',
+ 'gstamfh264enc.cpp',
+ 'gstamfh265enc.cpp',
+ 'gstamfutils.cpp',
+ 'plugin.cpp',
+]
+
+amf_option = get_option('amfcodec')
+if amf_option.disabled()
+ subdir_done()
+endif
+
+platform_deps = []
+extra_args = ['-DGST_USE_UNSTABLE_API']
+if host_system == 'windows'
+ if not gstd3d11_dep.found()
+ if amf_option.enabled()
+ error('The amf was enabled explicitly, but required d3d11 was not found')
+ else
+ subdir_done()
+ endif
+ endif
+
+ code = '''
+ #include <windows.h>
+ #if !(WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_APP) && !WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP))
+ #error "Not building for UWP"
+ #endif'''
+ if cc.compiles(code, name : 'building for UWP')
+ if amf_option.enabled()
+ error('amf plugin does not support UWP')
+ else
+ subdir_done()
+ endif
+ endif
+
+ # Encoder needs to do sleep() by API design
+ winmm_lib = cc.find_library('winmm', required: amf_option)
+ if not winmm_lib.found() or not cc.has_header('timeapi.h')
+ subdir_done()
+ endif
+
+ platform_deps += [gstd3d11_dep, winmm_lib]
+else
+ if amf_option.enabled()
+ error('amf plugin supports only Windows')
+ else
+ subdir_done()
+ endif
+endif
+
+# and MinGW 32bits compiler seems to be complaining about redundant-decls
+if cc.get_id() != 'msvc'
+ extra_args += cc.get_supported_arguments([
+ '-Wno-redundant-decls',
+ ])
+endif
+
+gstamfcodec = library('gstamfcodec',
+ amf_sources,
+ c_args : gst_plugins_bad_args + extra_args,
+ cpp_args : gst_plugins_bad_args + extra_args,
+ include_directories : [configinc, include_directories('include')],
+ dependencies : [gstbase_dep, gstvideo_dep, gstpbutils_dep, gstcodecparsers_dep, gmodule_dep] + platform_deps,
+ install : true,
+ install_dir : plugins_install_dir,
+)
+
+pkgconfig.generate(gstamfcodec, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstamfcodec]
--- /dev/null
+/* GStreamer
+ * Copyright (C) 2022 Seungha Yang <seungha@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <gst/d3d11/gstd3d11.h>
+#include <wrl.h>
+#include <core/Factory.h>
+#include <versionhelpers.h>
+#include "gstamfutils.h"
+#include "gstamfh264enc.h"
+#include "gstamfh265enc.h"
+
+/* *INDENT-OFF* */
+using namespace Microsoft::WRL;
+using namespace amf;
+/* *INDENT-ON* */
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ AMFFactory *amf_factory;
+ ComPtr < IDXGIFactory1 > factory;
+ HRESULT hr;
+
+ if (!IsWindows8OrGreater ())
+ return TRUE;
+
+ if (!gst_amf_init_once ())
+ return TRUE;
+
+ amf_factory = (AMFFactory *) gst_amf_get_factory ();
+ if (!amf_factory)
+ return TRUE;
+
+ hr = CreateDXGIFactory1 (IID_PPV_ARGS (&factory));
+ if (FAILED (hr))
+ return TRUE;
+
+ /* Enumerate AMD GPUs */
+ for (guint idx = 0;; idx++) {
+ ComPtr < IDXGIAdapter1 > adapter;
+ AMFContextPtr context;
+ DXGI_ADAPTER_DESC desc;
+ gint64 luid;
+ GstD3D11Device *device;
+ ID3D11Device *device_handle;
+ AMF_RESULT result;
+ D3D_FEATURE_LEVEL feature_level;
+ AMF_DX_VERSION dx_ver = AMF_DX11_1;
+
+ hr = factory->EnumAdapters1 (idx, &adapter);
+ if (FAILED (hr))
+ break;
+
+ hr = adapter->GetDesc (&desc);
+ if (FAILED (hr))
+ continue;
+
+ if (desc.VendorId != 0x1002 && desc.VendorId != 0x1022)
+ continue;
+
+ luid = gst_d3d11_luid_to_int64 (&desc.AdapterLuid);
+ device = gst_d3d11_device_new_for_adapter_luid (luid,
+ D3D11_CREATE_DEVICE_BGRA_SUPPORT);
+
+ if (!device)
+ continue;
+
+ device_handle = gst_d3d11_device_get_device_handle (device);
+ feature_level = device_handle->GetFeatureLevel ();
+ if (feature_level >= D3D_FEATURE_LEVEL_11_1)
+ dx_ver = AMF_DX11_1;
+ else
+ dx_ver = AMF_DX11_0;
+
+ result = amf_factory->CreateContext (&context);
+ if (result == AMF_OK)
+ result = context->InitDX11 (device_handle, dx_ver);
+
+ if (result == AMF_OK) {
+ gst_amf_h264_enc_register_d3d11 (plugin, device,
+ (gpointer) context.GetPtr (), GST_RANK_NONE);
+ gst_amf_h265_enc_register_d3d11 (plugin, device,
+ (gpointer) context.GetPtr (), GST_RANK_NONE);
+ }
+
+ gst_clear_object (&device);
+ }
+
+ return TRUE;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ amfcodec,
+ "AMD AMF Codec plugin",
+ plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
+subdir('amfcodec')
subdir('androidmedia')
subdir('applemedia')
subdir('asio')