GstVideoInfo info;
GstVideoFrame frame;
GstQsvMemoryType mem_type;
-
- /* For direct GPU access */
- GstMapInfo map_info;
+ GstMapFlags map_flags;
};
GST_DEFINE_MINI_OBJECT_TYPE (GstQsvFrame, gst_qsv_frame);
return frame->buffer;
}
+gboolean
+gst_qsv_frame_set_buffer (GstQsvFrame * frame, GstBuffer * buffer)
+{
+ g_return_val_if_fail (GST_IS_QSV_FRAME (frame), FALSE);
+
+ g_mutex_lock (&frame->lock);
+ if (frame->buffer == buffer) {
+ g_mutex_unlock (&frame->lock);
+ return TRUE;
+ }
+
+ if (frame->map_count > 0) {
+ GST_ERROR ("frame is locked");
+ g_mutex_unlock (&frame->lock);
+
+ return FALSE;
+ }
+
+ gst_clear_buffer (&frame->buffer);
+ frame->buffer = buffer;
+ g_mutex_unlock (&frame->lock);
+
+ return TRUE;
+}
+
struct _GstQsvAllocatorPrivate
{
GstAtomicQueue *queue;
mfxFrameAllocator allocator;
+ mfxFrameAllocResponse response;
+ guint16 extra_alloc_size;
+ gboolean dummy_alloc;
};
#define gst_qsv_allocator_parent_class parent_class
mfxHDL * handle);
static mfxStatus gst_qsv_allocator_free (mfxHDL pthis,
mfxFrameAllocResponse * response);
+static GstBuffer *gst_qsv_allocator_download_default (GstQsvAllocator * self,
+ const GstVideoInfo * info, gboolean force_copy, GstQsvFrame * frame,
+ GstBufferPool * pool);
static void
gst_qsv_allocator_class_init (GstQsvAllocatorClass * klass)
GObjectClass *object_class = G_OBJECT_CLASS (klass);
object_class->finalize = gst_qsv_allocator_finalize;
+
+ klass->download = GST_DEBUG_FUNCPTR (gst_qsv_allocator_download_default);
}
static void
gst_qsv_frame_unref (frame);
gst_atomic_queue_unref (priv->queue);
+ gst_qsv_allocator_free ((mfxHDL) self, &priv->response);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static mfxStatus
-gst_qsv_allocator_alloc_default (GstQsvAllocator * self,
+gst_qsv_allocator_alloc_default (GstQsvAllocator * self, gboolean dummy_alloc,
mfxFrameAllocRequest * request, mfxFrameAllocResponse * response)
{
GstQsvFrame **mids = nullptr;
GstVideoInfo info;
+ GstVideoAlignment align;
GstVideoFormat format = GST_VIDEO_FORMAT_UNKNOWN;
-
- GST_TRACE_OBJECT (self, "Alloc");
+ GstBufferPool *pool;
+ GstCaps *caps;
+ GstStructure *config;
/* Something unexpected and went wrong */
if ((request->Type & MFX_MEMTYPE_SYSTEM_MEMORY) == 0) {
response->NumFrameActual = request->NumFrameSuggested;
gst_video_info_set_format (&info,
- format, request->Info.Width, request->Info.Height);
+ format, request->Info.CropW, request->Info.CropH);
+
+ if (dummy_alloc) {
+ for (guint i = 0; i < request->NumFrameSuggested; i++) {
+ mids[i] = gst_qsv_allocator_acquire_frame (self,
+ GST_QSV_SYSTEM_MEMORY, &info, nullptr, nullptr);
+ }
+
+ response->mids = (mfxMemId *) mids;
+
+ return MFX_ERR_NONE;
+ }
+
+ caps = gst_video_info_to_caps (&info);
+ if (!caps) {
+ GST_ERROR_OBJECT (self, "Failed to convert video-info to caps");
+ return MFX_ERR_UNSUPPORTED;
+ }
+
+ gst_video_alignment_reset (&align);
+ align.padding_right = request->Info.Width - request->Info.CropW;
+ align.padding_bottom = request->Info.Height - request->Info.CropH;
+
+ pool = gst_video_buffer_pool_new ();
+ config = gst_buffer_pool_get_config (pool);
+ gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
+ gst_buffer_pool_config_add_option (config,
+ GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT);
+ gst_buffer_pool_config_set_video_alignment (config, &align);
+ gst_buffer_pool_config_set_params (config, caps, GST_VIDEO_INFO_SIZE (&info),
+ 0, 0);
+ gst_caps_unref (caps);
+ gst_buffer_pool_set_config (pool, config);
+ gst_buffer_pool_set_active (pool, TRUE);
+
for (guint i = 0; i < request->NumFrameSuggested; i++) {
GstBuffer *buffer;
- buffer = gst_buffer_new_and_alloc (info.size);
+ if (gst_buffer_pool_acquire_buffer (pool, &buffer, nullptr) != GST_FLOW_OK) {
+ GST_ERROR_OBJECT (self, "Failed to allocate texture buffer");
+ gst_buffer_pool_set_active (pool, FALSE);
+ gst_object_unref (pool);
+ goto error;
+ }
+
mids[i] = gst_qsv_allocator_acquire_frame (self,
GST_QSV_SYSTEM_MEMORY, &info, buffer, nullptr);
- gst_buffer_unref (buffer);
}
+ gst_buffer_pool_set_active (pool, FALSE);
+ gst_object_unref (pool);
+
response->mids = (mfxMemId *) mids;
return MFX_ERR_NONE;
+
+error:
+ if (mids) {
+ for (guint i = 0; i < response->NumFrameActual; i++)
+ gst_clear_qsv_frame (&mids[i]);
+
+ g_free (mids);
+ }
+
+ response->NumFrameActual = 0;
+
+ return MFX_ERR_MEMORY_ALLOC;
+}
+
+static gboolean
+gst_qsv_allocator_copy_cached_response (GstQsvAllocator * self,
+ mfxFrameAllocResponse * dst, mfxFrameAllocResponse * src)
+{
+ GstQsvFrame **mids;
+
+ if (src->NumFrameActual == 0)
+ return FALSE;
+
+ mids = g_new0 (GstQsvFrame *, src->NumFrameActual);
+
+ for (guint i = 0; i < src->NumFrameActual; i++) {
+ GstQsvFrame *frame = (GstQsvFrame *) src->mids[i];
+
+ mids[i] = gst_qsv_frame_ref (frame);
+ }
+
+ dst->NumFrameActual = src->NumFrameActual;
+ dst->mids = (mfxMemId *) mids;
+
+ return TRUE;
}
static mfxStatus
mfxFrameAllocRequest * request, mfxFrameAllocResponse * response)
{
GstQsvAllocator *self = GST_QSV_ALLOCATOR (pthis);
+ GstQsvAllocatorPrivate *priv = self->priv;
GstQsvAllocatorClass *klass;
+ mfxStatus status;
+ mfxFrameAllocRequest req = *request;
+ gboolean dummy_alloc = priv->dummy_alloc;
+
+ GST_INFO_OBJECT (self, "Alloc, Request Type: 0x%x, %dx%d (%dx%d)",
+ req.Type, req.Info.Width, req.Info.Height,
+ req.Info.CropW, req.Info.CropH);
+
+ /* Apply extra_alloc_size only for GST internal use case */
+ if ((request->Type & MFX_MEMTYPE_EXTERNAL_FRAME) != 0)
+ req.NumFrameSuggested += priv->extra_alloc_size;
- if ((request->Type & MFX_MEMTYPE_SYSTEM_MEMORY) != 0)
- return gst_qsv_allocator_alloc_default (self, request, response);
+ if (req.Info.CropW == 0 || req.Info.CropH == 0) {
+ req.Info.CropW = req.Info.Width;
+ req.Info.CropH = req.Info.Height;
+ }
+
+ if (request->Info.FourCC == MFX_FOURCC_P8 ||
+ (request->Type & MFX_MEMTYPE_EXTERNAL_FRAME) == 0) {
+ dummy_alloc = FALSE;
+ }
- klass = GST_QSV_ALLOCATOR_GET_CLASS (self);
+ GST_INFO_OBJECT (self, "Dummy alloc %d", dummy_alloc);
+
+ if ((request->Type & MFX_MEMTYPE_SYSTEM_MEMORY) != 0) {
+ status = gst_qsv_allocator_alloc_default (self,
+ dummy_alloc, &req, response);
+ } else {
+ klass = GST_QSV_ALLOCATOR_GET_CLASS (self);
+ g_assert (klass->alloc);
- g_assert (klass->alloc);
+ status = klass->alloc (self, dummy_alloc, &req, response);
+ }
+
+ if (status != MFX_ERR_NONE)
+ return status;
+
+ /* Cache this respons so that this can be accessible from GST side */
+ if (dummy_alloc) {
+ gst_qsv_allocator_free ((mfxHDL) self, &priv->response);
+ gst_qsv_allocator_copy_cached_response (self, &priv->response, response);
+ }
- return klass->alloc (self, request, response);
+ return MFX_ERR_NONE;
}
static mfxStatus
GST_TRACE_OBJECT (self, "Lock mfxMemId %p", mid);
g_mutex_lock (&frame->lock);
+ if (!frame->buffer) {
+ GST_ERROR_OBJECT (self, "MemId %p doesn't hold buffer", mid);
+ g_mutex_unlock (&frame->lock);
+ return MFX_ERR_LOCK_MEMORY;
+ }
+
if (frame->map_count == 0) {
gst_video_frame_map (&frame->frame, &frame->info, frame->buffer,
- GST_MAP_READ);
+ (GstMapFlags) GST_MAP_READWRITE);
}
frame->map_count++;
{
GstQsvAllocator *self = GST_QSV_ALLOCATOR (pthis);
GstQsvFrame *frame = GST_QSV_FRAME_CAST (mid);
+ GstMapInfo map_info;
- if (frame->mem_type != GST_QSV_VIDEO_MEMORY) {
+ if (!GST_QSV_MEM_TYPE_IS_VIDEO (frame->mem_type)) {
GST_ERROR_OBJECT (self, "Unexpected call");
+ return MFX_ERR_UNSUPPORTED;
+ }
+
+ g_mutex_lock (&frame->lock);
+ if (!frame->buffer) {
+ GST_ERROR_OBJECT (self, "MemId %p doesn't hold buffer", mid);
+ g_mutex_unlock (&frame->lock);
return MFX_ERR_UNSUPPORTED;
}
- if (!frame->map_info.data) {
- GST_ERROR_OBJECT (self, "No mapped data");
+ g_assert ((frame->map_flags & GST_MAP_QSV) != 0);
+ if (!gst_buffer_map (frame->buffer, &map_info, frame->map_flags)) {
+ GST_ERROR_OBJECT (self, "Failed to map buffer");
+ g_mutex_unlock (&frame->lock);
+
return MFX_ERR_UNSUPPORTED;
}
#ifdef G_OS_WIN32
mfxHDLPair *pair = (mfxHDLPair *) handle;
- pair->first = (mfxHDL) frame->map_info.data;
+ pair->first = (mfxHDL) map_info.data;
/* GstD3D11 will fill user_data[0] with subresource index */
- pair->second = (mfxHDL) frame->map_info.user_data[0];
+ pair->second = (mfxHDL) map_info.user_data[0];
#else
- *handle = (mfxHDL) frame->map_info.data;
+ *handle = (mfxHDL) map_info.data;
#endif
+ /* XXX: Ideally we should unmap only when this surface is unlocked... */
+ gst_buffer_unmap (frame->buffer, &map_info);
+ g_mutex_unlock (&frame->lock);
+
return MFX_ERR_NONE;
}
gst_clear_qsv_frame (&frames[i]);
g_clear_pointer (&response->mids, g_free);
+ response->NumFrameActual = 0;
return MFX_ERR_NONE;
}
gst_video_frame_unmap (&frame->frame);
}
frame->map_count = 0;
+ gst_clear_buffer (&frame->buffer);
g_mutex_unlock (&frame->lock);
- if (frame->mem_type == GST_QSV_VIDEO_MEMORY && frame->map_info.data)
- gst_buffer_unmap (frame->buffer, &frame->map_info);
-
- memset (&frame->map_info, 0, sizeof (GstMapInfo));
-
- gst_clear_buffer (&frame->buffer);
GST_MINI_OBJECT_CAST (frame)->dispose = nullptr;
frame->allocator = nullptr;
* @allocator: a #GstQsvAllocator
* @mem_type: a memory type
* @info: a #GstVideoInfo
- * @buffer: (transfer none): a #GstBuffer
+ * @buffer: (nullable) (transfer full): a #GstBuffer
* @pool: (nullable): a #GstBufferPool
*
* Uploads @buffer to video memory if required, and wraps GstBuffer using
{
GstQsvAllocatorPrivate *priv;
GstQsvFrame *frame;
+ guint32 map_flags = 0;
g_return_val_if_fail (GST_IS_QSV_ALLOCATOR (allocator), nullptr);
+ if (GST_QSV_MEM_TYPE_IS_SYSTEM (mem_type) &&
+ GST_QSV_MEM_TYPE_IS_VIDEO (mem_type)) {
+ GST_ERROR_OBJECT (allocator, "Invalid memory type");
+ return nullptr;
+ }
+
+ if (GST_QSV_MEM_TYPE_IS_VIDEO (mem_type)) {
+ map_flags = GST_MAP_QSV;
+
+ if ((mem_type & GST_QSV_ENCODER_IN_MEMORY) != 0) {
+ map_flags |= GST_MAP_READ;
+ } else if ((mem_type & GST_QSV_DECODER_OUT_MEMORY) != 0) {
+ map_flags |= GST_MAP_WRITE;
+ } else {
+ GST_ERROR_OBJECT (allocator,
+ "Unknown read/write access for video memory");
+ return nullptr;
+ }
+ } else {
+ map_flags = GST_MAP_READWRITE;
+ }
+
priv = allocator->priv;
frame = (GstQsvFrame *) gst_atomic_queue_pop (priv->queue);
frame = gst_qsv_frame_new ();
frame->mem_type = mem_type;
- frame->allocator = (GstQsvAllocator *) gst_object_ref (allocator);
- GST_MINI_OBJECT_CAST (frame)->dispose =
- (GstMiniObjectDisposeFunction) gst_qsv_frame_dispose;
+ frame->map_flags = (GstMapFlags) map_flags;
+ frame->info = *info;
if (!pool) {
- frame->buffer = gst_buffer_ref (buffer);
- frame->info = *info;
- } else {
+ frame->buffer = buffer;
+ } else if (buffer) {
GstBuffer *upload_buf;
- if (mem_type == GST_QSV_SYSTEM_MEMORY) {
+ frame->allocator = (GstQsvAllocator *) gst_object_ref (allocator);
+ GST_MINI_OBJECT_CAST (frame)->dispose =
+ (GstMiniObjectDisposeFunction) gst_qsv_frame_dispose;
+
+ if (GST_QSV_MEM_TYPE_IS_SYSTEM (mem_type)) {
upload_buf = gst_qsv_allocator_upload_default (allocator, info, buffer,
pool);
} else {
upload_buf = klass->upload (allocator, info, buffer, pool);
}
+ gst_buffer_unref (buffer);
+
if (!upload_buf) {
GST_WARNING_OBJECT (allocator, "Failed to upload buffer");
gst_qsv_frame_unref (frame);
}
frame->buffer = upload_buf;
- frame->info = *info;
}
- if (mem_type == GST_QSV_VIDEO_MEMORY) {
- /* TODO: we need to know context whether this memory is for
- * output (e.g., decoder or vpp), but we have only encoder
- * implementation at the moment, so GST_MAP_READ should be fine */
- if (!gst_buffer_map (frame->buffer, &frame->map_info,
- (GstMapFlags) (GST_MAP_READ | GST_MAP_QSV))) {
- GST_ERROR_OBJECT (allocator, "Failed to map video buffer");
- gst_qsv_frame_unref (frame);
+ return frame;
+}
- return nullptr;
- }
+static GstBuffer *
+gst_qsv_allocator_download_default (GstQsvAllocator * self,
+ const GstVideoInfo * info, gboolean force_copy, GstQsvFrame * frame,
+ GstBufferPool * pool)
+{
+ GstBuffer *buffer = nullptr;
+ GstFlowReturn ret;
+ GstVideoFrame dst_frame;
+ mfxStatus status;
+ mfxFrameData dummy;
+ gboolean copy_ret;
+
+ GST_TRACE_OBJECT (self, "Download");
+
+ if (!force_copy)
+ return gst_buffer_ref (frame->buffer);
+
+ ret = gst_buffer_pool_acquire_buffer (pool, &buffer, nullptr);
+ if (ret != GST_FLOW_OK) {
+ GST_WARNING_OBJECT (self, "Failed to acquire buffer");
+ return nullptr;
}
- return frame;
+ /* Use gst_qsv_allocator_lock() instead of gst_video_frame_map() to avoid
+ * redundant map if it's already locked by driver, already locked by driver
+ * sounds unsafe situaltion though */
+ status = gst_qsv_allocator_lock ((mfxHDL) self, (mfxMemId) frame, &dummy);
+ if (status != MFX_ERR_NONE) {
+ gst_buffer_unref (buffer);
+ GST_ERROR_OBJECT (self, "Failed to lock frame");
+ return nullptr;
+ }
+
+ if (!gst_video_frame_map (&dst_frame, &frame->info, buffer, GST_MAP_WRITE)) {
+ gst_qsv_allocator_unlock ((mfxHDL) self, (mfxMemId) frame, &dummy);
+ gst_buffer_unref (buffer);
+ GST_ERROR_OBJECT (self, "Failed to map output buffer");
+ return nullptr;
+ }
+
+ copy_ret = gst_video_frame_copy (&dst_frame, &frame->frame);
+ gst_qsv_allocator_unlock ((mfxHDL) self, (mfxMemId) frame, &dummy);
+ gst_video_frame_unmap (&dst_frame);
+
+ if (!copy_ret) {
+ GST_ERROR_OBJECT (self, "Failed to copy frame");
+ gst_buffer_unref (buffer);
+ return nullptr;
+ }
+
+ return buffer;
+}
+
+GstBuffer *
+gst_qsv_allocator_download_frame (GstQsvAllocator * allocator,
+ gboolean force_copy, GstQsvFrame * frame, GstBufferPool * pool)
+{
+ GstQsvAllocatorClass *klass;
+
+ g_return_val_if_fail (GST_IS_QSV_ALLOCATOR (allocator), nullptr);
+ g_return_val_if_fail (GST_IS_QSV_FRAME (frame), nullptr);
+ g_return_val_if_fail (GST_IS_BUFFER_POOL (pool), nullptr);
+
+ if (GST_QSV_MEM_TYPE_IS_SYSTEM (frame->mem_type)) {
+ return gst_qsv_allocator_download_default (allocator, &frame->info,
+ force_copy, frame, pool);
+ }
+
+ klass = GST_QSV_ALLOCATOR_GET_CLASS (allocator);
+ g_assert (klass->download);
+
+ return klass->download (allocator, &frame->info, force_copy, frame, pool);
}
mfxFrameAllocator *
return &allocator->priv->allocator;
}
+
+gboolean
+gst_qsv_allocator_get_cached_response (GstQsvAllocator * allocator,
+ mfxFrameAllocResponse * response)
+{
+ g_return_val_if_fail (GST_IS_QSV_ALLOCATOR (allocator), FALSE);
+
+ return gst_qsv_allocator_copy_cached_response (allocator,
+ response, &allocator->priv->response);
+}
+
+void
+gst_qsv_allocator_set_options (GstQsvAllocator * allocator,
+ guint16 extra_alloc_size, gboolean dummy_alloc)
+{
+ g_return_if_fail (GST_IS_QSV_ALLOCATOR (allocator));
+
+ allocator->priv->extra_alloc_size = extra_alloc_size;
+ allocator->priv->dummy_alloc = dummy_alloc;
+}
GstBuffer * gst_qsv_frame_peek_buffer (GstQsvFrame * frame);
+gboolean gst_qsv_frame_set_buffer (GstQsvFrame * frame,
+ GstBuffer * buffer);
+
static inline GstQsvFrame *
gst_qsv_frame_ref (GstQsvFrame * frame)
{
typedef enum
{
- GST_QSV_SYSTEM_MEMORY,
- GST_QSV_VIDEO_MEMORY,
+ GST_QSV_SYSTEM_MEMORY = (1 << 0),
+ GST_QSV_VIDEO_MEMORY = (1 << 1),
+ GST_QSV_ENCODER_IN_MEMORY = (1 << 2),
+ GST_QSV_DECODER_OUT_MEMORY = (1 << 3),
} GstQsvMemoryType;
+#define GST_QSV_MEM_TYPE_IS_SYSTEM(type) ((type & GST_QSV_SYSTEM_MEMORY) != 0)
+#define GST_QSV_MEM_TYPE_IS_VIDEO(type) ((type & GST_QSV_VIDEO_MEMORY) != 0)
+
struct _GstQsvAllocator
{
GstObject parent;
GstObjectClass parent_class;
mfxStatus (*alloc) (GstQsvAllocator * allocator,
+ gboolean dummy_alloc,
mfxFrameAllocRequest * request,
mfxFrameAllocResponse * response);
const GstVideoInfo * info,
GstBuffer * buffer,
GstBufferPool * pool);
+
+ GstBuffer * (*download) (GstQsvAllocator * allocator,
+ const GstVideoInfo * info,
+ gboolean force_copy,
+ GstQsvFrame * frame,
+ GstBufferPool * pool);
};
GType gst_qsv_allocator_get_type (void);
GstBuffer * buffer,
GstBufferPool * pool);
+GstBuffer * gst_qsv_allocator_download_frame (GstQsvAllocator * allocator,
+ gboolean force_copy,
+ GstQsvFrame * frame,
+ GstBufferPool * pool);
+
mfxFrameAllocator * gst_qsv_allocator_get_allocator_handle (GstQsvAllocator * allocator);
+gboolean gst_qsv_allocator_get_cached_response (GstQsvAllocator * allocator,
+ mfxFrameAllocResponse * response);
+
+void gst_qsv_allocator_set_options (GstQsvAllocator * allocator,
+ guint16 extra_alloc_size,
+ gboolean dummy_alloc);
+
G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstQsvAllocator, gst_object_unref)
G_END_DECLS
+
+#ifdef __cplusplus
+inline GstQsvMemoryType
+operator | (const GstQsvMemoryType & lhs, const GstQsvMemoryType & rhs)
+{
+ return static_cast<GstQsvMemoryType> (static_cast<guint>(lhs) |
+ static_cast<guint> (rhs));
+}
+
+inline GstQsvMemoryType &
+operator |= (GstQsvMemoryType & lhs, const GstQsvMemoryType & rhs)
+{
+ return lhs = lhs | rhs;
+}
+#endif
static void gst_qsv_d3d11_allocator_dispose (GObject * object);
static mfxStatus gst_qsv_d3d11_allocator_alloc (GstQsvAllocator * allocator,
- mfxFrameAllocRequest * request, mfxFrameAllocResponse * response);
+ gboolean dummy_alloc, mfxFrameAllocRequest * request,
+ mfxFrameAllocResponse * response);
static GstBuffer *gst_qsv_d3d11_allocator_upload (GstQsvAllocator * allocator,
const GstVideoInfo * info, GstBuffer * buffer, GstBufferPool * pool);
+static GstBuffer *gst_qsv_d3d11_allocator_download (GstQsvAllocator * allocator,
+ const GstVideoInfo * info, gboolean force_copy, GstQsvFrame * frame,
+ GstBufferPool * pool);
static void
gst_qsv_d3d11_allocator_class_init (GstQsvD3D11AllocatorClass * klass)
alloc_class->alloc = GST_DEBUG_FUNCPTR (gst_qsv_d3d11_allocator_alloc);
alloc_class->upload = GST_DEBUG_FUNCPTR (gst_qsv_d3d11_allocator_upload);
+ alloc_class->download = GST_DEBUG_FUNCPTR (gst_qsv_d3d11_allocator_download);
}
static void
static mfxStatus
gst_qsv_d3d11_allocator_alloc (GstQsvAllocator * allocator,
- mfxFrameAllocRequest * request, mfxFrameAllocResponse * response)
+ gboolean dummy_alloc, mfxFrameAllocRequest * request,
+ mfxFrameAllocResponse * response)
{
GstQsvD3D11Allocator *self = GST_QSV_D3D11_ALLOCATOR (allocator);
DXGI_FORMAT dxgi_format = DXGI_FORMAT_UNKNOWN;
GstQsvFrame **mids = nullptr;
- GST_TRACE_OBJECT (self, "Alloc");
-
/* Something unexpected and went wrong */
if ((request->Type & MFX_MEMTYPE_SYSTEM_MEMORY) != 0) {
GST_ERROR_OBJECT (self,
if (!mem) {
GST_ERROR_OBJECT (self, "Failed to allocate buffer");
-
return MFX_ERR_MEMORY_ALLOC;
}
mids = g_new0 (GstQsvFrame *, 1);
response->NumFrameActual = 1;
mids[0] = gst_qsv_allocator_acquire_frame (allocator,
- GST_QSV_VIDEO_MEMORY, &info, buffer, nullptr);
- gst_buffer_unref (buffer);
+ GST_QSV_VIDEO_MEMORY | GST_QSV_ENCODER_IN_MEMORY, &info, buffer,
+ nullptr);
} else {
GstBufferPool *pool;
GstVideoFormat format;
GstStructure *config;
GstD3D11AllocationParams *params;
guint bind_flags = 0;
+ GstVideoAlignment align;
+ GstQsvMemoryType mem_type = GST_QSV_VIDEO_MEMORY;
- if ((request->Type & MFX_MEMTYPE_VIDEO_MEMORY_ENCODER_TARGET) != 0)
+ if ((request->Type & MFX_MEMTYPE_VIDEO_MEMORY_ENCODER_TARGET) != 0) {
bind_flags |= D3D11_BIND_VIDEO_ENCODER;
+ mem_type |= GST_QSV_ENCODER_IN_MEMORY;
+ }
+
+ if ((request->Type & MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET) != 0) {
+ bind_flags |= D3D11_BIND_DECODER;
+ mem_type |= GST_QSV_DECODER_OUT_MEMORY;
+ }
+
+ if (mem_type == GST_QSV_VIDEO_MEMORY) {
+ GST_ERROR_OBJECT (self, "Unknown read/write access");
+ return MFX_ERR_UNSUPPORTED;
+ }
+
+ mids = g_new0 (GstQsvFrame *, request->NumFrameSuggested);
+ response->NumFrameActual = request->NumFrameSuggested;
format = gst_d3d11_dxgi_format_to_gst (dxgi_format);
gst_video_info_set_format (&info,
- format, request->Info.Width, request->Info.Height);
+ format, request->Info.CropW, request->Info.CropH);
+
+ if (dummy_alloc) {
+ for (guint i = 0; i < request->NumFrameSuggested; i++) {
+ mids[i] = gst_qsv_allocator_acquire_frame (allocator,
+ mem_type, &info, nullptr, nullptr);
+ }
+
+ response->mids = (mfxMemId *) mids;
+
+ return MFX_ERR_NONE;
+ }
+
caps = gst_video_info_to_caps (&info);
+ gst_video_alignment_reset (&align);
+ align.padding_right = request->Info.Width - request->Info.CropW;
+ align.padding_bottom = request->Info.Height - request->Info.CropH;
pool = gst_d3d11_buffer_pool_new (self->device);
params = gst_d3d11_allocation_params_new (self->device, &info,
(GstD3D11AllocationFlags) 0, bind_flags);
+ gst_d3d11_allocation_params_alignment (params, &align);
+
config = gst_buffer_pool_get_config (pool);
gst_buffer_pool_config_set_d3d11_allocation_params (config, params);
gst_d3d11_allocation_params_free (params);
gst_buffer_pool_set_config (pool, config);
gst_buffer_pool_set_active (pool, TRUE);
- mids = g_new0 (GstQsvFrame *, request->NumFrameSuggested);
- response->NumFrameActual = request->NumFrameSuggested;
for (guint i = 0; i < request->NumFrameSuggested; i++) {
GstBuffer *buffer;
}
mids[i] = gst_qsv_allocator_acquire_frame (allocator,
- GST_QSV_VIDEO_MEMORY, &info, buffer, nullptr);
- gst_buffer_unref (buffer);
+ mem_type, &info, buffer, nullptr);
}
+
gst_buffer_pool_set_active (pool, FALSE);
gst_object_unref (pool);
}
return gst_qsv_frame_copy_d3d11 (info, buffer, dst_buf);
}
+static GstBuffer *
+gst_qsv_d3d11_allocator_download (GstQsvAllocator * allocator,
+ const GstVideoInfo * info, gboolean force_copy, GstQsvFrame * frame,
+ GstBufferPool * pool)
+{
+ GstBuffer *src_buf, *dst_buf;
+ GstMemory *mem;
+ GstD3D11Memory *dmem;
+ GstFlowReturn ret;
+
+ GST_TRACE_OBJECT (allocator, "Download");
+
+ src_buf = gst_qsv_frame_peek_buffer (frame);
+
+ if (!force_copy)
+ return gst_buffer_ref (src_buf);
+
+ mem = gst_buffer_peek_memory (src_buf, 0);
+ if (!gst_is_d3d11_memory (mem) || gst_buffer_n_memory (src_buf) != 1) {
+ GST_ERROR_OBJECT (allocator, "frame holds invalid d3d11 memory");
+ return nullptr;
+ }
+
+ if (!GST_IS_D3D11_BUFFER_POOL (pool) &&
+ !GST_IS_D3D11_STAGING_BUFFER_POOL (pool)) {
+ GST_TRACE_OBJECT (allocator, "Output is not d3d11 memory");
+ goto fallback;
+ }
+
+ dmem = GST_D3D11_MEMORY_CAST (mem);
+
+ /* both pool and qsvframe should hold the same d3d11 device already */
+ if (GST_IS_D3D11_BUFFER_POOL (pool)) {
+ GstD3D11BufferPool *d3d11_pool = GST_D3D11_BUFFER_POOL (pool);
+
+ if (d3d11_pool->device != dmem->device) {
+ GST_WARNING_OBJECT (allocator, "Pool holds different device");
+ goto fallback;
+ }
+ } else {
+ GstD3D11StagingBufferPool *d3d11_pool =
+ GST_D3D11_STAGING_BUFFER_POOL (pool);
+
+ if (d3d11_pool->device != dmem->device) {
+ GST_WARNING_OBJECT (allocator, "Staging pool holds different device");
+ goto fallback;
+ }
+ }
+
+ ret = gst_buffer_pool_acquire_buffer (pool, &dst_buf, nullptr);
+ if (ret != GST_FLOW_OK) {
+ GST_WARNING_OBJECT (allocator, "Failed to allocate output buffer");
+ return nullptr;
+ }
+
+ return gst_qsv_frame_copy_d3d11 (info, src_buf, dst_buf);
+
+fallback:
+ GST_MINI_OBJECT_FLAG_SET (mem, GST_D3D11_MEMORY_TRANSFER_NEED_DOWNLOAD);
+
+ return GST_QSV_ALLOCATOR_CLASS (parent_class)->download (allocator,
+ info, TRUE, frame, pool);
+}
+
GstQsvAllocator *
gst_qsv_d3d11_allocator_new (GstD3D11Device * device)
{
static void gst_qsv_va_allocator_dispose (GObject * object);
static mfxStatus gst_qsv_va_allocator_alloc (GstQsvAllocator * allocator,
- mfxFrameAllocRequest * request, mfxFrameAllocResponse * response);
+ gboolean dummy_alloc, mfxFrameAllocRequest * request,
+ mfxFrameAllocResponse * response);
static GstBuffer *gst_qsv_va_allocator_upload (GstQsvAllocator * allocator,
const GstVideoInfo * info, GstBuffer * buffer, GstBufferPool * pool);
+static GstBuffer *gst_qsv_va_allocator_download (GstQsvAllocator * allocator,
+ const GstVideoInfo * info, gboolean force_copy, GstQsvFrame * frame,
+ GstBufferPool * pool);
static void
gst_qsv_va_allocator_class_init (GstQsvVaAllocatorClass * klass)
alloc_class->alloc = GST_DEBUG_FUNCPTR (gst_qsv_va_allocator_alloc);
alloc_class->upload = GST_DEBUG_FUNCPTR (gst_qsv_va_allocator_upload);
+ alloc_class->download = GST_DEBUG_FUNCPTR (gst_qsv_va_allocator_download);
}
static void
}
static mfxStatus
-gst_qsv_va_allocator_alloc (GstQsvAllocator * allocator,
+gst_qsv_va_allocator_alloc (GstQsvAllocator * allocator, gboolean dummy_alloc,
mfxFrameAllocRequest * request, mfxFrameAllocResponse * response)
{
GST_ERROR_OBJECT (allocator, "Not implemented");
return nullptr;
}
+static GstBuffer *
+gst_qsv_va_allocator_download (GstQsvAllocator * allocator,
+ const GstVideoInfo * info, gboolean force_copy, GstQsvFrame * frame,
+ GstBufferPool * pool)
+{
+ GST_ERROR_OBJECT (allocator, "Not implemented");
+
+ return nullptr;
+}
+
GstQsvAllocator *
gst_qsv_va_allocator_new (GstVaDisplay * display)
{
--- /dev/null
+/* GStreamer
+ * Copyright (C) 2022 Seungha Yang <seungha@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstqsvdecoder.h"
+#include <mfxvideo++.h>
+#include <string.h>
+
+#ifdef G_OS_WIN32
+#include <gst/d3d11/gstd3d11.h>
+#include "gstqsvallocator_d3d11.h"
+
+#include <wrl.h>
+
+/* *INDENT-OFF* */
+using namespace Microsoft::WRL;
+/* *INDENT-ON* */
+#else
+#include <gst/va/gstvadisplay_drm.h>
+#include "gstqsvallocator_va.h"
+#endif /* G_OS_WIN32 */
+
+GST_DEBUG_CATEGORY_EXTERN (gst_qsv_decoder_debug);
+#define GST_CAT_DEFAULT gst_qsv_decoder_debug
+
+#define GST_QSV_DECODER_FLOW_NEW_SEQUENCE GST_FLOW_CUSTOM_SUCCESS_1
+
+enum
+{
+ PROP_0,
+ PROP_ADAPTER_LUID,
+ PROP_DEVICE_PATH,
+};
+
+typedef struct _GstQsvDecoderSurface
+{
+ mfxFrameSurface1 surface;
+
+ /* mfxFrameSurface1:Data:MemId */
+ GstQsvFrame *frame;
+ gboolean need_output;
+} GstQsvDecoderSurface;
+
+typedef struct _GstQsvDecoderTask
+{
+ mfxSyncPoint sync_point;
+
+ /* without ownership */
+ GstQsvDecoderSurface *surface;
+} GstQsvDecoderTask;
+
+struct _GstQsvDecoderPrivate
+{
+ GstObject *device;
+
+ GstVideoCodecState *input_state;
+ GstVideoCodecState *output_state;
+ GstQsvAllocator *allocator;
+
+ GstBufferPool *internal_pool;
+
+ GstVideoInfo info;
+ GstVideoInfo aligned_info;
+
+ mfxSession session;
+ mfxVideoParam video_param;
+
+ /* holding allocated GstQsvFrame, should be cleared via
+ * mfxFrameAllocator::Free() */
+ mfxFrameAllocResponse response;
+
+ MFXVideoDECODE *decoder;
+ GstQsvMemoryType mem_type;
+ gboolean can_direct_render;
+
+ gboolean is_live;
+
+ /* Array of GstQsvDecoderSurface */
+ GArray *surface_pool;
+
+ /* Array of GstQsvDecoderTask */
+ GArray *task_pool;
+ guint next_task_index;
+};
+
+#define gst_qsv_decoder_parent_class parent_class
+G_DEFINE_ABSTRACT_TYPE_WITH_PRIVATE (GstQsvDecoder, gst_qsv_decoder,
+ GST_TYPE_VIDEO_DECODER);
+
+static void gst_qsv_decoder_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_qsv_decoder_dispose (GObject * object);
+static void gst_qsv_decoder_finalize (GObject * object);
+
+static void gst_qsv_decoder_set_context (GstElement * element,
+ GstContext * context);
+
+static gboolean gst_qsv_decoder_open (GstVideoDecoder * decoder);
+static gboolean gst_qsv_decoder_stop (GstVideoDecoder * decoder);
+static gboolean gst_qsv_decoder_close (GstVideoDecoder * decoder);
+static gboolean gst_qsv_decoder_set_format (GstVideoDecoder * decoder,
+ GstVideoCodecState * state);
+static gboolean gst_qsv_decoder_negotiate (GstVideoDecoder * decoder);
+static gboolean gst_qsv_decoder_decide_allocation (GstVideoDecoder * decoder,
+ GstQuery * query);
+static gboolean gst_qsv_decoder_sink_query (GstVideoDecoder * decoder,
+ GstQuery * query);
+static gboolean gst_qsv_decoder_src_query (GstVideoDecoder * decoder,
+ GstQuery * query);
+static GstFlowReturn gst_qsv_decoder_handle_frame (GstVideoDecoder * decoder,
+ GstVideoCodecFrame * frame);
+static gboolean gst_qsv_decoder_flush (GstVideoDecoder * decoder);
+static GstFlowReturn gst_qsv_decoder_finish (GstVideoDecoder * decoder);
+static GstFlowReturn gst_qsv_decoder_drain (GstVideoDecoder * decoder);
+
+static void gst_qsv_decoder_surface_clear (GstQsvDecoderSurface * surface);
+static void gst_qsv_decoder_task_clear (GstQsvDecoderTask * task);
+
+static void
+gst_qsv_decoder_class_init (GstQsvDecoderClass * klass)
+{
+ GObjectClass *object_class = G_OBJECT_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstVideoDecoderClass *videodec_class = GST_VIDEO_DECODER_CLASS (klass);
+
+ object_class->get_property = gst_qsv_decoder_get_property;
+ object_class->dispose = gst_qsv_decoder_dispose;
+ object_class->finalize = gst_qsv_decoder_finalize;
+
+#ifdef G_OS_WIN32
+ g_object_class_install_property (object_class, PROP_ADAPTER_LUID,
+ g_param_spec_int64 ("adapter-luid", "Adapter LUID",
+ "DXGI Adapter LUID (Locally Unique Identifier) of created device",
+ G_MININT64, G_MAXINT64, 0,
+ (GParamFlags) (GST_PARAM_CONDITIONALLY_AVAILABLE | G_PARAM_READABLE |
+ G_PARAM_STATIC_STRINGS)));
+#else
+ g_object_class_install_property (object_class, PROP_DEVICE_PATH,
+ g_param_spec_string ("device-path", "Device Path",
+ "DRM device path", nullptr,
+ (GParamFlags) (GST_PARAM_CONDITIONALLY_AVAILABLE |
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
+#endif
+
+ element_class->set_context = GST_DEBUG_FUNCPTR (gst_qsv_decoder_set_context);
+
+ videodec_class->open = GST_DEBUG_FUNCPTR (gst_qsv_decoder_open);
+ videodec_class->stop = GST_DEBUG_FUNCPTR (gst_qsv_decoder_stop);
+ videodec_class->close = GST_DEBUG_FUNCPTR (gst_qsv_decoder_close);
+ videodec_class->negotiate = GST_DEBUG_FUNCPTR (gst_qsv_decoder_negotiate);
+ videodec_class->decide_allocation =
+ GST_DEBUG_FUNCPTR (gst_qsv_decoder_decide_allocation);
+ videodec_class->sink_query = GST_DEBUG_FUNCPTR (gst_qsv_decoder_sink_query);
+ videodec_class->src_query = GST_DEBUG_FUNCPTR (gst_qsv_decoder_src_query);
+ videodec_class->set_format = GST_DEBUG_FUNCPTR (gst_qsv_decoder_set_format);
+ videodec_class->handle_frame =
+ GST_DEBUG_FUNCPTR (gst_qsv_decoder_handle_frame);
+ videodec_class->drain = GST_DEBUG_FUNCPTR (gst_qsv_decoder_drain);
+ videodec_class->finish = GST_DEBUG_FUNCPTR (gst_qsv_decoder_finish);
+ videodec_class->flush = GST_DEBUG_FUNCPTR (gst_qsv_decoder_flush);
+}
+
+static void
+gst_qsv_decoder_init (GstQsvDecoder * self)
+{
+ GstQsvDecoderPrivate *priv;
+
+ priv = self->priv =
+ (GstQsvDecoderPrivate *) gst_qsv_decoder_get_instance_private (self);
+
+ priv->surface_pool = g_array_new (FALSE, TRUE, sizeof (GstQsvDecoderSurface));
+ g_array_set_clear_func (priv->surface_pool,
+ (GDestroyNotify) gst_qsv_decoder_surface_clear);
+
+ priv->task_pool = g_array_new (FALSE, TRUE, sizeof (GstQsvDecoderTask));
+ g_array_set_clear_func (priv->task_pool,
+ (GDestroyNotify) gst_qsv_decoder_task_clear);
+
+ gst_video_decoder_set_packetized (GST_VIDEO_DECODER (self), TRUE);
+}
+
+static void
+gst_qsv_decoder_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstQsvDecoderClass *klass = GST_QSV_DECODER_GET_CLASS (object);
+
+ switch (prop_id) {
+ case PROP_ADAPTER_LUID:
+ g_value_set_int64 (value, klass->adapter_luid);
+ break;
+ case PROP_DEVICE_PATH:
+ g_value_set_string (value, klass->display_path);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_qsv_decoder_dispose (GObject * object)
+{
+ GstQsvDecoder *self = GST_QSV_DECODER (object);
+ GstQsvDecoderPrivate *priv = self->priv;
+
+ gst_clear_object (&priv->device);
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+}
+
+static void
+gst_qsv_decoder_finalize (GObject * object)
+{
+ GstQsvDecoder *self = GST_QSV_DECODER (object);
+ GstQsvDecoderPrivate *priv = self->priv;
+
+ g_array_unref (priv->task_pool);
+ g_array_unref (priv->surface_pool);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_qsv_decoder_set_context (GstElement * element, GstContext * context)
+{
+#ifdef G_OS_WIN32
+ GstQsvDecoder *self = GST_QSV_DECODER (element);
+ GstQsvDecoderClass *klass = GST_QSV_DECODER_GET_CLASS (element);
+ GstQsvDecoderPrivate *priv = self->priv;
+
+ gst_d3d11_handle_set_context_for_adapter_luid (element,
+ context, klass->adapter_luid, (GstD3D11Device **) & priv->device);
+#endif
+
+ GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
+}
+
+#ifdef G_OS_WIN32
+static gboolean
+gst_qsv_decoder_open_platform_device (GstQsvDecoder * self)
+{
+ GstQsvDecoderPrivate *priv = self->priv;
+ GstQsvDecoderClass *klass = GST_QSV_DECODER_GET_CLASS (self);
+ ComPtr < ID3D10Multithread > multi_thread;
+ HRESULT hr;
+ ID3D11Device *device_handle;
+ mfxStatus status;
+ GstD3D11Device *device;
+
+ if (!gst_d3d11_ensure_element_data_for_adapter_luid (GST_ELEMENT (self),
+ klass->adapter_luid, (GstD3D11Device **) & priv->device)) {
+ GST_ERROR_OBJECT (self, "d3d11 device is unavailable");
+ return FALSE;
+ }
+
+ device = GST_D3D11_DEVICE_CAST (priv->device);
+ priv->allocator = gst_qsv_d3d11_allocator_new (device);
+
+ /* For D3D11 device handle to be used by QSV, multithread protection layer
+ * must be enabled before the MFXVideoCORE_SetHandle() call.
+ *
+ * TODO: Need to check performance impact by this mutithread protection layer,
+ * since it may have a negative impact on overall pipeline performance.
+ * If so, we should create decoding session dedicated d3d11 device and
+ * make use of shared resource */
+ device_handle = gst_d3d11_device_get_device_handle (device);
+ hr = device_handle->QueryInterface (IID_PPV_ARGS (&multi_thread));
+ if (!gst_d3d11_result (hr, device)) {
+ GST_ERROR_OBJECT (self, "ID3D10Multithread interface is unavailable");
+ return FALSE;
+ }
+
+ multi_thread->SetMultithreadProtected (TRUE);
+ status = MFXVideoCORE_SetHandle (priv->session, MFX_HANDLE_D3D11_DEVICE,
+ device_handle);
+ if (status != MFX_ERR_NONE) {
+ GST_ERROR_OBJECT (self, "Failed to set d3d11 device handle");
+ return FALSE;
+ }
+
+ /* Similar to the QSV encoder, we don't use this allocator for actual
+ * D3D11 texture allocation. But still required because of QSV API design.
+ */
+ status = MFXVideoCORE_SetFrameAllocator (priv->session,
+ gst_qsv_allocator_get_allocator_handle (priv->allocator));
+ if (status != MFX_ERR_NONE) {
+ GST_ERROR_OBJECT (self, "Failed to set frame allocator %d", status);
+ return FALSE;
+ }
+
+ return TRUE;
+}
+#else
+static gboolean
+gst_qsv_decoder_open_platform_device (GstQsvDecoder * self)
+{
+ GstQsvDecoderPrivate *priv = self->priv;
+ GstQsvDecoderClass *klass = GST_QSV_DECODER_GET_CLASS (self);
+ mfxStatus status;
+ GstVaDisplay *display;
+
+ /* TODO: use GstVADisplay context sharing */
+ if (!priv->device) {
+ display = gst_va_display_drm_new_from_path (klass->display_path);
+ if (!display) {
+ GST_ERROR_OBJECT (self, "VA display is unavailable");
+ return FALSE;
+ }
+
+ priv->device = GST_OBJECT (display);
+ } else {
+ display = GST_VA_DISPLAY (priv->device);
+ }
+
+ priv->allocator = gst_qsv_va_allocator_new (display);
+
+ status = MFXVideoCORE_SetHandle (priv->session, MFX_HANDLE_VA_DISPLAY,
+ gst_va_display_get_va_dpy (display));
+ if (status != MFX_ERR_NONE) {
+ GST_ERROR_OBJECT (self, "Failed to set VA display handle");
+ return FALSE;
+ }
+
+ status = MFXVideoCORE_SetFrameAllocator (priv->session,
+ gst_qsv_allocator_get_allocator_handle (priv->allocator));
+ if (status != MFX_ERR_NONE) {
+ GST_ERROR_OBJECT (self, "Failed to set frame allocator %d", status);
+ return FALSE;
+ }
+
+ return TRUE;
+}
+#endif
+
+static gboolean
+gst_qsv_decoder_open (GstVideoDecoder * decoder)
+{
+ GstQsvDecoder *self = GST_QSV_DECODER (decoder);
+ GstQsvDecoderPrivate *priv = self->priv;
+ GstQsvDecoderClass *klass = GST_QSV_DECODER_GET_CLASS (self);
+ mfxStatus status;
+
+ status = MFXCreateSession (gst_qsv_get_loader (), klass->impl_index,
+ &priv->session);
+ if (status != MFX_ERR_NONE) {
+ GST_ERROR_OBJECT (self, "Failed to create session");
+ return FALSE;
+ }
+
+ if (!gst_qsv_decoder_open_platform_device (self)) {
+ g_clear_pointer (&priv->session, MFXClose);
+ gst_clear_object (&priv->allocator);
+ gst_clear_object (&priv->device);
+
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+static gboolean
+gst_qsv_decoder_reset (GstQsvDecoder * self)
+{
+ GstQsvDecoderPrivate *priv = self->priv;
+
+ GST_DEBUG_OBJECT (self, "Reset");
+
+ if (priv->decoder) {
+ delete priv->decoder;
+ priv->decoder = nullptr;
+ }
+
+ if (priv->internal_pool) {
+ gst_buffer_pool_set_active (priv->internal_pool, FALSE);
+ gst_clear_object (&priv->internal_pool);
+ }
+
+ if (priv->allocator) {
+ mfxFrameAllocator *alloc =
+ gst_qsv_allocator_get_allocator_handle (priv->allocator);
+ alloc->Free ((mfxHDL) priv->allocator, &priv->response);
+ }
+ memset (&priv->response, 0, sizeof (mfxFrameAllocResponse));
+
+ g_array_set_size (priv->surface_pool, 0);
+ g_array_set_size (priv->task_pool, 0);
+
+ return TRUE;
+}
+
+static gboolean
+gst_qsv_decoder_stop (GstVideoDecoder * decoder)
+{
+ GstQsvDecoder *self = GST_QSV_DECODER (decoder);
+ GstQsvDecoderPrivate *priv = self->priv;
+
+ g_clear_pointer (&priv->input_state, gst_video_codec_state_unref);
+ g_clear_pointer (&priv->output_state, gst_video_codec_state_unref);
+
+ return gst_qsv_decoder_reset (self);
+}
+
+static gboolean
+gst_qsv_decoder_close (GstVideoDecoder * decoder)
+{
+ GstQsvDecoder *self = GST_QSV_DECODER (decoder);
+ GstQsvDecoderPrivate *priv = self->priv;
+
+ g_clear_pointer (&priv->session, MFXClose);
+ gst_clear_object (&priv->allocator);
+ gst_clear_object (&priv->device);
+
+ return TRUE;
+}
+
+static void
+gst_qsv_decoder_surface_clear (GstQsvDecoderSurface * surface)
+{
+ if (!surface)
+ return;
+
+ memset (surface, 0, sizeof (GstQsvDecoderSurface));
+}
+
+static void
+gst_qsv_decoder_task_clear (GstQsvDecoderTask * task)
+{
+ if (!task)
+ return;
+
+ if (task->surface) {
+ task->surface->need_output = FALSE;
+ if (task->surface->frame && task->surface->surface.Data.Locked == 0)
+ gst_qsv_frame_set_buffer (task->surface->frame, nullptr);
+ }
+
+ task->surface = nullptr;
+ task->sync_point = nullptr;
+}
+
+static GstQsvDecoderSurface *
+gst_qsv_decoder_get_next_surface (GstQsvDecoder * self)
+{
+ GstQsvDecoderPrivate *priv = self->priv;
+ GstQsvDecoderSurface *surface = nullptr;
+ GstBuffer *buffer;
+ GstFlowReturn ret;
+
+ /* Clear unlocked frames as well */
+ for (guint i = 0; i < priv->surface_pool->len; i++) {
+ GstQsvDecoderSurface *iter =
+ &g_array_index (priv->surface_pool, GstQsvDecoderSurface, i);
+
+ if (iter->surface.Data.Locked > 0 || iter->need_output)
+ continue;
+
+ gst_qsv_frame_set_buffer (iter->frame, nullptr);
+
+ if (!surface)
+ surface = iter;
+ }
+
+ if (!surface) {
+ GST_ERROR_OBJECT (self, "Failed to find unlocked surface");
+ return nullptr;
+ }
+
+ ret = gst_buffer_pool_acquire_buffer (priv->internal_pool, &buffer, nullptr);
+ if (ret != GST_FLOW_OK) {
+ GST_ERROR_OBJECT (self, "Failed to allocate buffer");
+ return nullptr;
+ }
+
+ gst_qsv_frame_set_buffer (surface->frame, buffer);
+
+ return surface;
+}
+
+static GstQsvDecoderTask *
+gst_qsv_decoder_get_next_task (GstQsvDecoder * self)
+{
+ GstQsvDecoderPrivate *priv = self->priv;
+ GstQsvDecoderTask *task;
+
+ task = &g_array_index (priv->task_pool,
+ GstQsvDecoderTask, priv->next_task_index);
+ priv->next_task_index++;
+ priv->next_task_index %= priv->task_pool->len;
+
+ return task;
+}
+
+static GstVideoCodecFrame *
+gst_qsv_decoder_find_output_frame (GstQsvDecoder * self, GstClockTime pts)
+{
+ GList *frames, *iter;
+ GstVideoCodecFrame *ret = nullptr;
+ GstVideoCodecFrame *closest = nullptr;
+ guint64 min_pts_abs_diff = 0;
+
+ /* give up, just returns the oldest frame */
+ if (!GST_CLOCK_TIME_IS_VALID (pts))
+ return gst_video_decoder_get_oldest_frame (GST_VIDEO_DECODER (self));
+
+ frames = gst_video_decoder_get_frames (GST_VIDEO_DECODER (self));
+
+ for (iter = frames; iter; iter = g_list_next (iter)) {
+ GstVideoCodecFrame *frame = (GstVideoCodecFrame *) iter->data;
+ guint64 abs_diff;
+
+ if (!GST_CLOCK_TIME_IS_VALID (frame->pts))
+ continue;
+
+ if (pts == frame->pts) {
+ ret = frame;
+ break;
+ }
+
+ if (pts >= frame->pts)
+ abs_diff = pts - frame->pts;
+ else
+ abs_diff = frame->pts - pts;
+
+ if (!closest || abs_diff < min_pts_abs_diff) {
+ closest = frame;
+ min_pts_abs_diff = abs_diff;
+ }
+ }
+
+ if (!ret && closest)
+ ret = closest;
+
+ if (ret) {
+ gst_video_codec_frame_ref (ret);
+
+ /* Release older frames, it can happen if input buffer holds only single
+ * field in case of H264 */
+ for (iter = frames; iter; iter = g_list_next (iter)) {
+ GstVideoCodecFrame *frame = (GstVideoCodecFrame *) iter->data;
+
+ if (frame == ret)
+ continue;
+
+ if (!GST_CLOCK_TIME_IS_VALID (frame->pts))
+ continue;
+
+ if (frame->pts < ret->pts) {
+ gst_video_decoder_release_frame (GST_VIDEO_DECODER (self),
+ gst_video_codec_frame_ref (frame));
+ }
+ }
+ } else {
+ ret = gst_video_decoder_get_oldest_frame (GST_VIDEO_DECODER (self));
+ }
+
+ if (frames)
+ g_list_free_full (frames, (GDestroyNotify) gst_video_codec_frame_unref);
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_qsv_decoder_finish_frame (GstQsvDecoder * self, GstQsvDecoderTask * task,
+ gboolean flushing)
+{
+ GstVideoDecoder *vdec = GST_VIDEO_DECODER (self);
+ GstQsvDecoderPrivate *priv = self->priv;
+ mfxStatus status;
+ GstVideoCodecFrame *frame;
+ GstClockTime pts = GST_CLOCK_TIME_NONE;
+ GstBuffer *buffer;
+ guint retry_count = 0;
+ /* magic number */
+ const guint retry_threshold = 100;
+ GstQsvDecoderSurface *surface = task->surface;
+ GstBufferPool *pool;
+ gboolean force_copy = FALSE;
+
+ g_assert (surface != nullptr);
+ g_assert (task->sync_point != nullptr);
+
+ status = MFX_ERR_NONE;
+ do {
+ /* magic number 100 ms */
+ status = MFXVideoCORE_SyncOperation (priv->session, task->sync_point, 100);
+
+ /* Retry up to 10 sec (100 ms x 100 times), that should be enough time for
+ * decoding a frame using hardware */
+ if (status == MFX_WRN_IN_EXECUTION && retry_count < retry_threshold) {
+ GST_DEBUG_OBJECT (self,
+ "Operation is still in execution, retry count (%d/%d)",
+ retry_count, retry_threshold);
+ retry_count++;
+ continue;
+ }
+
+ break;
+ } while (TRUE);
+
+ if (flushing) {
+ gst_qsv_decoder_task_clear (task);
+ return GST_FLOW_OK;
+ }
+
+ if (status != MFX_ERR_NONE) {
+ gst_qsv_decoder_task_clear (task);
+
+ if (status == MFX_ERR_ABORTED) {
+ GST_INFO_OBJECT (self, "Operation was aborted");
+ return GST_FLOW_FLUSHING;
+ }
+
+ GST_WARNING_OBJECT (self, "SyncOperation returned %d (%s)",
+ QSV_STATUS_ARGS (status));
+
+ return GST_FLOW_ERROR;
+ }
+
+ pts = gst_qsv_timestamp_to_gst (surface->surface.Data.TimeStamp);
+ pool = gst_video_decoder_get_buffer_pool (vdec);
+ if (!pool) {
+ GST_ERROR_OBJECT (self, "Decoder doesn't hold buffer pool");
+ gst_qsv_decoder_task_clear (task);
+ return GST_FLOW_ERROR;
+ }
+
+ /* Copy decoded frame in case of reverse playback, too many bound frame to
+ * decoder may cause driver unhappy */
+ if (!priv->can_direct_render || vdec->input_segment.rate < 0.0)
+ force_copy = TRUE;
+
+ /* TODO: Handle non-zero crop-{x,y} position via crop meta or similar */
+ buffer = gst_qsv_allocator_download_frame (priv->allocator, force_copy,
+ surface->frame, pool);
+ gst_object_unref (pool);
+ gst_qsv_decoder_task_clear (task);
+
+ if (!buffer) {
+ GST_ERROR_OBJECT (self, "No output buffer");
+ return GST_FLOW_ERROR;
+ }
+
+ if (priv->aligned_info.interlace_mode == GST_VIDEO_INTERLACE_MODE_MIXED) {
+ if ((surface->surface.Info.PicStruct & MFX_PICSTRUCT_FIELD_TFF) != 0) {
+ GST_BUFFER_FLAG_SET (buffer,
+ GST_VIDEO_BUFFER_FLAG_TFF | GST_VIDEO_BUFFER_FLAG_INTERLACED);
+ } else if ((surface->surface.Info.PicStruct & MFX_PICSTRUCT_FIELD_BFF) != 0) {
+ GST_BUFFER_FLAG_SET (buffer, GST_VIDEO_BUFFER_FLAG_INTERLACED);
+ GST_BUFFER_FLAG_UNSET (buffer, GST_VIDEO_BUFFER_FLAG_TFF);
+ }
+ }
+
+ frame = gst_qsv_decoder_find_output_frame (self, pts);
+ if (frame) {
+ frame->pts = pts;
+ frame->output_buffer = buffer;
+
+ return gst_video_decoder_finish_frame (vdec, frame);
+ }
+
+ /* Empty available frame, something went wrong but we can just push this
+ * buffer */
+ GST_WARNING_OBJECT (self, "Failed to find corresponding frame");
+ GST_BUFFER_PTS (buffer) = pts;
+
+ return gst_pad_push (GST_VIDEO_DECODER_SRC_PAD (self), buffer);
+}
+
+static GstFlowReturn
+gst_qsv_decoder_decode_frame (GstQsvDecoder * self, mfxBitstream * bitstream,
+ gboolean flushing)
+{
+ GstQsvDecoderPrivate *priv = self->priv;
+ mfxStatus status;
+ guint retry_count = 0;
+ /* magic number */
+ const guint retry_threshold = 1000;
+ GstQsvDecoderSurface *surface = nullptr;
+ GstFlowReturn ret;
+
+ do {
+ mfxFrameSurface1 *out_surface = nullptr;
+ GstQsvDecoderTask *task = gst_qsv_decoder_get_next_task (self);
+ if (task->sync_point) {
+ ret = gst_qsv_decoder_finish_frame (self, task, flushing);
+
+ if (ret != GST_FLOW_OK)
+ return ret;
+ }
+
+ if (!surface)
+ surface = gst_qsv_decoder_get_next_surface (self);
+
+ if (!surface) {
+ GST_ERROR_OBJECT (self, "No available surface");
+ return GST_FLOW_ERROR;
+ }
+
+ status = priv->decoder->DecodeFrameAsync (bitstream, &surface->surface,
+ &out_surface, &task->sync_point);
+
+ if (status != MFX_ERR_NONE) {
+ GST_LOG_OBJECT (self, "DecodeFrameAsync returned %d (%s)",
+ QSV_STATUS_ARGS (status));
+ }
+
+ if (out_surface) {
+ g_assert (task->sync_point != nullptr);
+
+ for (guint i = 0; i < priv->surface_pool->len; i++) {
+ GstQsvDecoderSurface *iter =
+ &g_array_index (priv->surface_pool, GstQsvDecoderSurface, i);
+
+ if (iter->surface.Data.MemId == out_surface->Data.MemId) {
+ task->surface = iter;
+ break;
+ }
+ }
+
+ if (!task->surface) {
+ GST_ERROR_OBJECT (self, "Failed to find surface");
+ gst_qsv_decoder_task_clear (task);
+ return GST_FLOW_ERROR;
+ }
+
+ /* Make need-output to hold underlying GstBuffer until output happens */
+ task->surface->need_output = TRUE;
+ }
+
+ switch (status) {
+ case MFX_ERR_NONE:
+ case MFX_WRN_VIDEO_PARAM_CHANGED:{
+ if (surface->surface.Data.Locked > 0)
+ surface = nullptr;
+
+ if (bitstream && bitstream->DataLength == 0)
+ return GST_FLOW_OK;
+
+ break;
+ }
+ case MFX_ERR_MORE_SURFACE:
+ return GST_FLOW_OK;
+ case MFX_ERR_INCOMPATIBLE_VIDEO_PARAM:
+ GST_DEBUG_OBJECT (self, "Found new sequence");
+ return GST_QSV_DECODER_FLOW_NEW_SEQUENCE;
+ case MFX_ERR_MORE_DATA:
+ return GST_VIDEO_DECODER_FLOW_NEED_DATA;
+ case MFX_WRN_DEVICE_BUSY:
+ GST_LOG_OBJECT (self, "GPU is busy, retry count (%d/%d)",
+ retry_count, retry_threshold);
+
+ if (retry_count > retry_threshold) {
+ GST_ERROR_OBJECT (self, "Give up");
+ return GST_FLOW_ERROR;
+ }
+
+ retry_count++;
+
+ /* Magic number 1ms */
+ g_usleep (1000);
+ break;
+ default:
+ if (status < MFX_ERR_NONE) {
+ GST_ERROR_OBJECT (self, "Got error %d (%s)",
+ QSV_STATUS_ARGS (status));
+ return GST_FLOW_ERROR;
+ }
+ break;
+ }
+ } while (TRUE);
+
+ return GST_FLOW_ERROR;
+}
+
+static GstFlowReturn
+gst_qsv_decoder_drain_internal (GstQsvDecoder * self, gboolean flushing)
+{
+ GstQsvDecoderPrivate *priv = self->priv;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ if (!priv->session || !priv->decoder)
+ return GST_FLOW_OK;
+
+ do {
+ ret = gst_qsv_decoder_decode_frame (self, nullptr, flushing);
+ } while (ret != GST_VIDEO_DECODER_FLOW_NEED_DATA && ret >= GST_FLOW_OK);
+
+ for (guint i = 0; i < priv->task_pool->len; i++) {
+ GstQsvDecoderTask *task = gst_qsv_decoder_get_next_task (self);
+
+ if (!task->sync_point)
+ continue;
+
+ ret = gst_qsv_decoder_finish_frame (self, task, flushing);
+ }
+
+ switch (ret) {
+ case GST_VIDEO_DECODER_FLOW_NEED_DATA:
+ case GST_QSV_DECODER_FLOW_NEW_SEQUENCE:
+ return GST_FLOW_OK;
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_qsv_decoder_set_format (GstVideoDecoder * decoder,
+ GstVideoCodecState * state)
+{
+ GstQsvDecoder *self = GST_QSV_DECODER (decoder);
+ GstQsvDecoderPrivate *priv = self->priv;
+ GstQsvDecoderClass *klass = GST_QSV_DECODER_GET_CLASS (self);
+ GstQuery *query;
+
+ GST_DEBUG_OBJECT (self, "Set format with %" GST_PTR_FORMAT, state->caps);
+
+ gst_qsv_decoder_drain_internal (self, FALSE);
+
+ g_clear_pointer (&priv->input_state, gst_video_codec_state_unref);
+
+ priv->input_state = gst_video_codec_state_ref (state);
+
+ memset (&priv->video_param, 0, sizeof (mfxVideoParam));
+ priv->video_param.mfx.CodecId = klass->codec_id;
+
+ /* If upstream is live, we will use single async-depth for low-latency
+ * decoding */
+ query = gst_query_new_latency ();
+ if (gst_pad_peer_query (GST_VIDEO_DECODER_SINK_PAD (self), query))
+ gst_query_parse_latency (query, &priv->is_live, nullptr, nullptr);
+ gst_query_unref (query);
+
+ /* We will open decoder later once sequence header is parsed */
+
+ return TRUE;
+}
+
+#ifdef G_OS_WIN32
+static gboolean
+gst_qsv_decoder_prepare_d3d11_pool (GstQsvDecoder * self,
+ GstCaps * caps, GstVideoInfo * info, GstVideoAlignment * align)
+{
+ GstQsvDecoderPrivate *priv = self->priv;
+ GstStructure *config;
+ GstD3D11AllocationParams *params;
+ GstD3D11Device *device = GST_D3D11_DEVICE_CAST (priv->device);
+
+ GST_DEBUG_OBJECT (self, "Use d3d11 memory pool");
+
+ priv->internal_pool = gst_d3d11_buffer_pool_new (device);
+ config = gst_buffer_pool_get_config (priv->internal_pool);
+ /* Bind to shader resource as well for this texture can be used
+ * in generic pixel shader */
+ params = gst_d3d11_allocation_params_new (device, info,
+ (GstD3D11AllocationFlags) 0,
+ D3D11_BIND_DECODER | D3D11_BIND_SHADER_RESOURCE);
+ gst_d3d11_allocation_params_alignment (params, align);
+ gst_buffer_pool_config_set_d3d11_allocation_params (config, params);
+ gst_d3d11_allocation_params_free (params);
+ gst_buffer_pool_config_set_params (config, caps,
+ GST_VIDEO_INFO_SIZE (info), 0, 0);
+ gst_buffer_pool_set_config (priv->internal_pool, config);
+ gst_buffer_pool_set_active (priv->internal_pool, TRUE);
+
+ return TRUE;
+}
+#endif
+
+static gboolean
+gst_qsv_decoder_prepare_system_pool (GstQsvDecoder * self,
+ GstCaps * caps, GstVideoInfo * info, GstVideoAlignment * align)
+{
+ GstQsvDecoderPrivate *priv = self->priv;
+ GstStructure *config;
+
+ GST_DEBUG_OBJECT (self, "Use system memory pool");
+
+ priv->internal_pool = gst_video_buffer_pool_new ();
+ config = gst_buffer_pool_get_config (priv->internal_pool);
+ gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
+ gst_buffer_pool_config_add_option (config,
+ GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT);
+ gst_buffer_pool_config_set_video_alignment (config, align);
+ gst_buffer_pool_config_set_params (config,
+ caps, GST_VIDEO_INFO_SIZE (info), 0, 0);
+
+ gst_buffer_pool_set_config (priv->internal_pool, config);
+ gst_buffer_pool_set_active (priv->internal_pool, TRUE);
+
+ return TRUE;
+}
+
+static gboolean
+gst_qsv_decoder_prepare_pool (GstQsvDecoder * self, mfxU16 * io_pattern)
+{
+ GstQsvDecoderPrivate *priv = self->priv;
+ gboolean ret = FALSE;
+ GstCaps *caps;
+ GstVideoAlignment align;
+
+ if (priv->internal_pool) {
+ gst_buffer_pool_set_active (priv->internal_pool, FALSE);
+ gst_clear_object (&priv->internal_pool);
+ }
+
+ caps = gst_video_info_to_caps (&priv->info);
+ if (!caps) {
+ GST_ERROR_OBJECT (self, "Failed to convet video-info to caps");
+ return FALSE;
+ }
+
+ gst_video_alignment_reset (&align);
+ align.padding_left = priv->aligned_info.width - priv->info.width;
+ align.padding_bottom = priv->aligned_info.height - priv->info.height;
+
+ /* TODO: Add Linux video memory (VA/DMABuf) support */
+#ifdef G_OS_WIN32
+ priv->mem_type = GST_QSV_VIDEO_MEMORY | GST_QSV_DECODER_OUT_MEMORY;
+ *io_pattern = MFX_IOPATTERN_OUT_VIDEO_MEMORY;
+
+ ret = gst_qsv_decoder_prepare_d3d11_pool (self, caps, &priv->info, &align);
+#endif
+
+ if (!ret) {
+ priv->mem_type = GST_QSV_SYSTEM_MEMORY | GST_QSV_DECODER_OUT_MEMORY;
+ *io_pattern = MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
+
+ ret = gst_qsv_decoder_prepare_system_pool (self, caps, &priv->info, &align);
+ }
+ gst_caps_unref (caps);
+
+ return ret;
+}
+
+static gboolean
+gst_qsv_decoder_init_session (GstQsvDecoder * self)
+{
+ GstQsvDecoderPrivate *priv = self->priv;
+ mfxVideoParam *param = &priv->video_param;
+ mfxFrameInfo *frame_info = ¶m->mfx.FrameInfo;
+ MFXVideoDECODE *decoder_handle = nullptr;
+ mfxFrameAllocRequest request;
+ mfxStatus status;
+
+ GST_DEBUG_OBJECT (self, "Init session");
+
+ memset (&request, 0, sizeof (mfxFrameAllocRequest));
+
+ gst_qsv_decoder_reset (self);
+
+ if (!gst_qsv_decoder_prepare_pool (self, ¶m->IOPattern)) {
+ GST_ERROR_OBJECT (self, "Failed to prepare pool");
+ goto error;
+ }
+
+ param->AsyncDepth = priv->is_live ? 1 : 4;
+
+ decoder_handle = new MFXVideoDECODE (priv->session);
+
+ /* Additional 4 frames for margin. Actually large pool size would be fine
+ * because we don't pre-allocate underlying output memory objects */
+ gst_qsv_allocator_set_options (priv->allocator, param->AsyncDepth + 4, TRUE);
+
+ status = decoder_handle->Query (param, param);
+ QSV_CHECK_STATUS (self, status, MFXVideoDECODE::Query);
+
+ status = decoder_handle->QueryIOSurf (param, &request);
+ QSV_CHECK_STATUS (self, status, MFXVideoDECODE::QueryIOSurf);
+
+ status = decoder_handle->Init (param);
+ QSV_CHECK_STATUS (self, status, MFXVideoDECODE::Init);
+
+ status = decoder_handle->GetVideoParam (param);
+ QSV_CHECK_STATUS (self, status, MFXVideoDECODE::GetVideoParam);
+
+ /* In case that we use video memory, MFXVideoDECODE::Init() will invoke
+ * mfxFrameAllocator::Alloc(). Pull the pre-allocated dummy GstQsvFrame
+ * objects here and fill with GstBuffer later when needed */
+ if (GST_QSV_MEM_TYPE_IS_SYSTEM (priv->mem_type)) {
+ mfxFrameAllocator *alloc_handle =
+ gst_qsv_allocator_get_allocator_handle (priv->allocator);
+
+ request.Type |= MFX_MEMTYPE_EXTERNAL_FRAME;
+ status = alloc_handle->Alloc ((mfxHDL) priv->allocator, &request,
+ &priv->response);
+ if (status != MFX_ERR_NONE) {
+ GST_ERROR_OBJECT (self, "Failed to allocate system memory frames");
+ goto error;
+ }
+ } else if (!gst_qsv_allocator_get_cached_response (priv->allocator,
+ &priv->response)) {
+ GST_ERROR_OBJECT (self, "Failed to get cached response");
+ goto error;
+ }
+
+ g_array_set_size (priv->surface_pool, priv->response.NumFrameActual);
+ for (guint i = 0; i < priv->surface_pool->len; i++) {
+ GstQsvDecoderSurface *surface = &g_array_index (priv->surface_pool,
+ GstQsvDecoderSurface, i);
+ GstBuffer *buf;
+
+ gst_qsv_decoder_surface_clear (surface);
+ surface->surface.Info = *frame_info;
+ surface->surface.Data.MemId = priv->response.mids[i];
+
+ /* holds casted object without ref, to make code cleaner */
+ surface->frame = (GstQsvFrame *) surface->surface.Data.MemId;
+
+ /* This frame must not hold buffer at this moment */
+ buf = gst_qsv_frame_peek_buffer (surface->frame);
+ g_assert (buf == nullptr);
+ }
+
+ g_array_set_size (priv->task_pool, param->AsyncDepth);
+ for (guint i = 0; i < priv->task_pool->len; i++) {
+ GstQsvDecoderTask *task = &g_array_index (priv->task_pool,
+ GstQsvDecoderTask, i);
+
+ gst_qsv_decoder_task_clear (task);
+ }
+
+ priv->decoder = decoder_handle;
+
+ return TRUE;
+
+error:
+ if (decoder_handle)
+ delete decoder_handle;
+
+ gst_qsv_decoder_reset (self);
+
+ return FALSE;
+}
+
+static gboolean
+gst_qsv_decoder_negotiate (GstVideoDecoder * decoder)
+{
+ GstQsvDecoder *self = GST_QSV_DECODER (decoder);
+ GstQsvDecoderPrivate *priv = self->priv;
+ GstQsvDecoderClass *klass = GST_QSV_DECODER_GET_CLASS (self);
+ guint width, height;
+ guint coded_width, coded_height;
+ guint aligned_width, aligned_height;
+ mfxVideoParam *param = &priv->video_param;
+ mfxFrameInfo *frame_info = ¶m->mfx.FrameInfo;
+ GstVideoFormat format = GST_VIDEO_FORMAT_UNKNOWN;
+ GstVideoInterlaceMode interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
+
+ width = coded_width = frame_info->Width;
+ height = coded_height = frame_info->Height;
+
+ if (frame_info->CropW > 0 && frame_info->CropH > 0) {
+ width = frame_info->CropW;
+ height = frame_info->CropH;
+ }
+
+ switch (frame_info->FourCC) {
+ case MFX_FOURCC_NV12:
+ format = GST_VIDEO_FORMAT_NV12;
+ break;
+ default:
+ break;
+ }
+
+ if (format == GST_VIDEO_FORMAT_UNKNOWN) {
+ GST_ERROR_OBJECT (self, "Unknown video format");
+ return FALSE;
+ }
+
+ aligned_width = GST_ROUND_UP_16 (coded_width);
+ if (klass->codec_id == MFX_CODEC_AVC) {
+ if (frame_info->PicStruct == MFX_PICSTRUCT_PROGRESSIVE) {
+ aligned_height = GST_ROUND_UP_16 (coded_height);
+ } else {
+ aligned_height = GST_ROUND_UP_32 (coded_height);
+ /* In theory, tff/bff can be altered in a sequence */
+ interlace_mode = GST_VIDEO_INTERLACE_MODE_MIXED;
+ }
+ } else {
+ aligned_height = GST_ROUND_UP_16 (coded_height);
+ }
+
+ frame_info->Width = aligned_width;
+ frame_info->Height = aligned_height;
+
+ gst_video_info_set_interlaced_format (&priv->info, format,
+ interlace_mode, width, height);
+ gst_video_info_set_interlaced_format (&priv->aligned_info, format,
+ interlace_mode, aligned_width, aligned_height);
+
+ g_clear_pointer (&priv->output_state, gst_video_codec_state_unref);
+ priv->output_state =
+ gst_video_decoder_set_interlaced_output_state (GST_VIDEO_DECODER (self),
+ format, interlace_mode, width, height, priv->input_state);
+
+ priv->output_state->caps = gst_video_info_to_caps (&priv->output_state->info);
+ priv->can_direct_render = FALSE;
+
+#ifdef G_OS_WIN32
+ GstCaps *peer_caps =
+ gst_pad_get_allowed_caps (GST_VIDEO_DECODER_SRC_PAD (self));
+ GST_DEBUG_OBJECT (self, "Allowed caps %" GST_PTR_FORMAT, peer_caps);
+
+ if (!peer_caps || gst_caps_is_any (peer_caps)) {
+ GST_DEBUG_OBJECT (self,
+ "cannot determine output format, use system memory");
+ } else {
+ GstCapsFeatures *features;
+ guint size = gst_caps_get_size (peer_caps);
+
+ for (guint i = 0; i < size; i++) {
+ features = gst_caps_get_features (peer_caps, i);
+
+ if (!features)
+ continue;
+
+ if (gst_caps_features_contains (features,
+ GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY)) {
+ priv->can_direct_render = TRUE;
+ break;
+ }
+ }
+ }
+ gst_clear_caps (&peer_caps);
+
+ if (priv->can_direct_render) {
+ GST_DEBUG_OBJECT (self, "Downstream supports D3D11 memory");
+ gst_caps_set_features (priv->output_state->caps, 0,
+ gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY, nullptr));
+ }
+#endif
+
+ GST_DEBUG_OBJECT (self,
+ "Negotiating with %" GST_PTR_FORMAT, priv->output_state->caps);
+
+ return GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder);
+}
+
+#ifdef G_OS_WIN32
+static gboolean
+gst_qsv_decoder_decide_allocation (GstVideoDecoder * decoder, GstQuery * query)
+{
+ GstQsvDecoder *self = GST_QSV_DECODER (decoder);
+ GstQsvDecoderPrivate *priv = self->priv;
+ GstCaps *outcaps;
+ GstBufferPool *pool = nullptr;
+ guint n, size, min = 0, max = 0;
+ GstVideoInfo vinfo;
+ GstStructure *config;
+ GstD3D11AllocationParams *d3d11_params;
+ gboolean use_d3d11_pool;
+ gboolean has_videometa;
+ GstD3D11Device *device = GST_D3D11_DEVICE (priv->device);
+
+ gst_query_parse_allocation (query, &outcaps, nullptr);
+
+ if (!outcaps) {
+ GST_DEBUG_OBJECT (decoder, "No output caps");
+ return FALSE;
+ }
+
+ has_videometa = gst_query_find_allocation_meta (query,
+ GST_VIDEO_META_API_TYPE, nullptr);
+ use_d3d11_pool = priv->can_direct_render;
+
+ gst_video_info_from_caps (&vinfo, outcaps);
+ n = gst_query_get_n_allocation_pools (query);
+ if (n > 0)
+ gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
+
+ if (pool) {
+ if (use_d3d11_pool) {
+ if (!GST_IS_D3D11_BUFFER_POOL (pool)) {
+ GST_DEBUG_OBJECT (self,
+ "Downstream pool is not d3d11, will create new one");
+ gst_clear_object (&pool);
+ } else {
+ GstD3D11BufferPool *dpool = GST_D3D11_BUFFER_POOL (pool);
+ if (dpool->device != device) {
+ GST_DEBUG_OBJECT (self, "Different device, will create new one");
+ gst_clear_object (&pool);
+ }
+ }
+ } else if (has_videometa) {
+ /* We will use d3d11 staging buffer pool */
+ gst_clear_object (&pool);
+ }
+ }
+
+ if (!pool) {
+ if (use_d3d11_pool)
+ pool = gst_d3d11_buffer_pool_new (device);
+ else if (has_videometa)
+ pool = gst_d3d11_staging_buffer_pool_new (device);
+ else
+ pool = gst_video_buffer_pool_new ();
+
+ size = (guint) vinfo.size;
+ }
+
+ config = gst_buffer_pool_get_config (pool);
+ gst_buffer_pool_config_set_params (config, outcaps, size, min, max);
+ gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
+
+ /* Decoder will use internal pool to output but this pool is required for
+ * copying in case of reverse playback */
+ if (use_d3d11_pool) {
+ d3d11_params = gst_buffer_pool_config_get_d3d11_allocation_params (config);
+ if (!d3d11_params)
+ d3d11_params = gst_d3d11_allocation_params_new (device, &vinfo,
+ (GstD3D11AllocationFlags) 0, 0);
+ /* Use both render target (for videoprocessor) and shader resource
+ * for (pixel shader) bind flags for downstream to be able to use consistent
+ * conversion path even when we copy textures */
+ d3d11_params->desc[0].BindFlags |=
+ (D3D11_BIND_DECODER | D3D11_BIND_SHADER_RESOURCE);
+
+ gst_buffer_pool_config_set_d3d11_allocation_params (config, d3d11_params);
+ gst_d3d11_allocation_params_free (d3d11_params);
+ }
+
+ gst_buffer_pool_set_config (pool, config);
+ /* d3d11 buffer pool will update buffer size based on allocated texture,
+ * get size from config again */
+ config = gst_buffer_pool_get_config (pool);
+ gst_buffer_pool_config_get_params (config, nullptr, &size, nullptr, nullptr);
+ gst_structure_free (config);
+
+ if (n > 0)
+ gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
+ else
+ gst_query_add_allocation_pool (query, pool, size, min, max);
+ gst_object_unref (pool);
+
+ return GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (decoder,
+ query);
+}
+#else
+static gboolean
+gst_qsv_decoder_decide_allocation (GstVideoDecoder * decoder, GstQuery * query)
+{
+ /* TODO: add VA support */
+ return GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (decoder,
+ query);
+}
+#endif /* G_OS_WIN32 */
+
+static gboolean
+gst_qsv_decoder_handle_context_query (GstQsvDecoder * self, GstQuery * query)
+{
+#ifdef G_OS_WIN32
+ GstQsvDecoderPrivate *priv = self->priv;
+
+ return gst_d3d11_handle_context_query (GST_ELEMENT (self), query,
+ (GstD3D11Device *) priv->device);
+#endif
+
+ return FALSE;
+}
+
+static gboolean
+gst_qsv_decoder_sink_query (GstVideoDecoder * decoder, GstQuery * query)
+{
+ GstQsvDecoder *self = GST_QSV_DECODER (decoder);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CONTEXT:
+ if (gst_qsv_decoder_handle_context_query (self, query))
+ return TRUE;
+ break;
+ default:
+ break;
+ }
+
+ return GST_VIDEO_DECODER_CLASS (parent_class)->sink_query (decoder, query);
+}
+
+static gboolean
+gst_qsv_decoder_src_query (GstVideoDecoder * decoder, GstQuery * query)
+{
+ GstQsvDecoder *self = GST_QSV_DECODER (decoder);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CONTEXT:
+ if (gst_qsv_decoder_handle_context_query (self, query))
+ return TRUE;
+ break;
+ default:
+ break;
+ }
+
+ return GST_VIDEO_DECODER_CLASS (parent_class)->src_query (decoder, query);
+}
+
+static GstFlowReturn
+gst_qsv_decoder_handle_frame (GstVideoDecoder * decoder,
+ GstVideoCodecFrame * frame)
+{
+ GstQsvDecoder *self = GST_QSV_DECODER (decoder);
+ GstQsvDecoderPrivate *priv = self->priv;
+ mfxBitstream bs;
+ GstMapInfo info;
+ mfxStatus status;
+ GstFlowReturn ret = GST_FLOW_ERROR;
+ gboolean was_reconfigured = FALSE;
+
+ if (!gst_buffer_map (frame->input_buffer, &info, GST_MAP_READ)) {
+ GST_ERROR_OBJECT (self, "Failed to map input buffer");
+ gst_video_decoder_release_frame (decoder, frame);
+ return GST_FLOW_ERROR;
+ }
+
+ memset (&bs, 0, sizeof (mfxBitstream));
+
+ bs.Data = (mfxU8 *) info.data;
+ bs.DataLength = bs.MaxLength = (mfxU32) info.size;
+ bs.TimeStamp = gst_qsv_timestamp_from_gst (frame->pts);
+
+new_sequence:
+ if (!priv->decoder) {
+ status = MFXVideoDECODE_DecodeHeader (priv->session,
+ &bs, &priv->video_param);
+
+ if (status != MFX_ERR_NONE) {
+ if (status == MFX_ERR_MORE_DATA) {
+ GST_WARNING_OBJECT (self, "Need more date to parse header");
+ ret = GST_FLOW_OK;
+ } else {
+ GST_ERROR_OBJECT (self, "Failed to parse header %d (%s)",
+ QSV_STATUS_ARGS (status));
+ }
+
+ goto unmap_and_error;
+ }
+
+ if (!gst_video_decoder_negotiate (decoder)) {
+ GST_ERROR_OBJECT (self, "Failed to negotiate");
+ ret = GST_FLOW_NOT_NEGOTIATED;
+ goto unmap_and_error;
+ }
+
+ if (!gst_qsv_decoder_init_session (self)) {
+ GST_ERROR_OBJECT (self, "Failed to init session");
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ if (!priv->decoder) {
+ GST_ERROR_OBJECT (self, "Decoder object was not configured");
+ ret = GST_FLOW_NOT_NEGOTIATED;
+ goto unmap_and_error;
+ }
+
+ ret = gst_qsv_decoder_decode_frame (self, &bs, FALSE);
+
+ switch (ret) {
+ case GST_QSV_DECODER_FLOW_NEW_SEQUENCE:
+ if (!was_reconfigured) {
+ gst_qsv_decoder_drain_internal (self, FALSE);
+ gst_qsv_decoder_reset (self);
+ was_reconfigured = TRUE;
+
+ goto new_sequence;
+ }
+
+ ret = GST_FLOW_ERROR;
+ break;
+ case GST_VIDEO_DECODER_FLOW_NEED_DATA:
+ ret = GST_FLOW_OK;
+ break;
+ default:
+ break;
+ }
+
+ gst_buffer_unmap (frame->input_buffer, &info);
+ gst_video_codec_frame_unref (frame);
+
+ return ret;
+
+unmap_and_error:
+ gst_buffer_unmap (frame->input_buffer, &info);
+ gst_video_decoder_release_frame (decoder, frame);
+
+ return ret;
+}
+
+static gboolean
+gst_qsv_decoder_flush (GstVideoDecoder * decoder)
+{
+ GstQsvDecoder *self = GST_QSV_DECODER (decoder);
+
+ GST_DEBUG_OBJECT (self, "Flush");
+
+ gst_qsv_decoder_drain_internal (self, TRUE);
+
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_qsv_decoder_finish (GstVideoDecoder * decoder)
+{
+ GstQsvDecoder *self = GST_QSV_DECODER (decoder);
+
+ GST_DEBUG_OBJECT (self, "Finish");
+
+ return gst_qsv_decoder_drain_internal (self, FALSE);
+}
+
+static GstFlowReturn
+gst_qsv_decoder_drain (GstVideoDecoder * decoder)
+{
+ GstQsvDecoder *self = GST_QSV_DECODER (decoder);
+
+ GST_DEBUG_OBJECT (self, "Drain");
+
+ return gst_qsv_decoder_drain_internal (self, FALSE);
+}
--- /dev/null
+/* GStreamer
+ * Copyright (C) 2022 Seungha Yang <seungha@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#pragma once
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+#include <mfx.h>
+#include "gstqsvutils.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_QSV_DECODER (gst_qsv_decoder_get_type())
+#define GST_QSV_DECODER(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_QSV_DECODER, GstQsvDecoder))
+#define GST_QSV_DECODER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_QSV_DECODER, GstQsvDecoderClass))
+#define GST_IS_QSV_DECODER(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_QSV_DECODER))
+#define GST_IS_QSV_DECODER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_QSV_DECODER))
+#define GST_QSV_DECODER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj), GST_TYPE_QSV_DECODER, GstQsvDecoderClass))
+#define GST_QSV_DECODER_CAST(obj) ((GstQsvDecoder *)obj)
+
+typedef struct _GstQsvDecoder GstQsvDecoder;
+typedef struct _GstQsvDecoderClass GstQsvDecoderClass;
+typedef struct _GstQsvDecoderPrivate GstQsvDecoderPrivate;
+
+typedef struct _GstQsvDecoderClassData
+{
+ guint impl_index;
+ gint64 adapter_luid;
+ gchar *display_path;
+
+ GstCaps *sink_caps;
+ GstCaps *src_caps;
+} GstQsvDecoderClassData;
+
+struct _GstQsvDecoder
+{
+ GstVideoDecoder parent;
+
+ GstQsvDecoderPrivate *priv;
+};
+
+struct _GstQsvDecoderClass
+{
+ GstVideoDecoderClass parent_class;
+
+ mfxU32 codec_id;
+ mfxU32 impl_index;
+
+ /* DXGI adapter LUID, for Windows */
+ gint64 adapter_luid;
+
+ /* VA display device path, for Linux */
+ gchar display_path[64];
+};
+
+GType gst_qsv_decoder_get_type (void);
+
+G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstQsvDecoder, gst_object_unref)
+
+G_END_DECLS
/* TODO: Add Linux video memory (VA/DMABuf) support */
#ifdef G_OS_WIN32
- priv->mem_type = GST_QSV_VIDEO_MEMORY;
+ priv->mem_type = GST_QSV_VIDEO_MEMORY | GST_QSV_ENCODER_IN_MEMORY;
*io_pattern = MFX_IOPATTERN_IN_VIDEO_MEMORY;
ret = gst_qsv_encoder_prepare_d3d11_pool (self, aligned_caps, aligned_info);
#endif
if (!ret) {
- priv->mem_type = GST_QSV_SYSTEM_MEMORY;
+ priv->mem_type = GST_QSV_SYSTEM_MEMORY | GST_QSV_ENCODER_IN_MEMORY;
*io_pattern = MFX_IOPATTERN_IN_SYSTEM_MEMORY;
ret = gst_qsv_encoder_prepare_system_pool (self,
goto error;
}
-#define CHECK_STATUS(s,func) G_STMT_START { \
- if (s < MFX_ERR_NONE) { \
- GST_ERROR_OBJECT (self, G_STRINGIFY (func) " failed %d (%s)", \
- QSV_STATUS_ARGS (s)); \
- goto error; \
- } else if (status != MFX_ERR_NONE) { \
- GST_WARNING_OBJECT (self, G_STRINGIFY (func) " returned warning %d (%s)", \
- QSV_STATUS_ARGS (s)); \
- } \
-} G_STMT_END
-
status = encoder_handle->Query (¶m, ¶m);
/* If device is unhappy with LowPower = OFF, try again with unknown */
if (status < MFX_ERR_NONE) {
}
status = encoder_handle->Query (¶m, ¶m);
- CHECK_STATUS (status, MFXVideoENCODE::Query);
+ QSV_CHECK_STATUS (self, status, MFXVideoENCODE::Query);
status = encoder_handle->QueryIOSurf (¶m, &alloc_request);
- CHECK_STATUS (status, MFXVideoENCODE::QueryIOSurf);
+ QSV_CHECK_STATUS (self, status, MFXVideoENCODE::QueryIOSurf);
status = encoder_handle->Init (¶m);
- CHECK_STATUS (status, MFXVideoENCODE::Init);
+ QSV_CHECK_STATUS (self, status, MFXVideoENCODE::Init);
status = encoder_handle->GetVideoParam (¶m);
- CHECK_STATUS (status, MFXVideoENCODE::GetVideoParam);
-
-#undef CHECK_STATUS
+ QSV_CHECK_STATUS (self, status, MFXVideoENCODE::GetVideoParam);
GST_DEBUG_OBJECT (self, "NumFrameSuggested: %d, AsyncDepth %d",
alloc_request.NumFrameSuggested, param.AsyncDepth);
surface->qsv_frame =
gst_qsv_allocator_acquire_frame (priv->allocator, priv->mem_type,
- &priv->input_state->info, frame->input_buffer, priv->internal_pool);
+ &priv->input_state->info, gst_buffer_ref (frame->input_buffer),
+ priv->internal_pool);
+
if (!surface->qsv_frame) {
GST_ERROR_OBJECT (self, "Failed to wrap buffer with qsv frame");
gst_qsv_encoder_task_reset (self, task);
--- /dev/null
+/* GStreamer
+ * Copyright (C) 2022 Seungha Yang <seungha@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstqsvh264dec.h"
+#include <string>
+#include <string.h>
+
+#ifdef G_OS_WIN32
+#include <gst/d3d11/gstd3d11.h>
+#else
+#include <gst/va/gstvadisplay_drm.h>
+#endif
+
+GST_DEBUG_CATEGORY_EXTERN (gst_qsv_h264_dec_debug);
+#define GST_CAT_DEFAULT gst_qsv_h264_dec_debug
+
+typedef struct _GstQsvH264Dec
+{
+ GstQsvDecoder parent;
+} GstQsvH264Dec;
+
+typedef struct _GstQsvH264DecClass
+{
+ GstQsvDecoderClass parent_class;
+} GstQsvH264DecClass;
+
+static void
+gst_qsv_h264_dec_class_init (GstQsvH264DecClass * klass, gpointer data)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstQsvDecoderClass *qsvdec_class = GST_QSV_DECODER_CLASS (klass);
+ GstQsvDecoderClassData *cdata = (GstQsvDecoderClassData *) data;
+
+ qsvdec_class->codec_id = MFX_CODEC_AVC;
+ qsvdec_class->impl_index = cdata->impl_index;
+ qsvdec_class->adapter_luid = cdata->adapter_luid;
+ if (cdata->display_path) {
+ strncpy (qsvdec_class->display_path, cdata->display_path,
+ sizeof (qsvdec_class->display_path));
+ }
+
+ gst_element_class_set_static_metadata (element_class,
+ "Intel Quick Sync Video H.264 Decoder",
+ "Codec/Decoder/Video/Hardware",
+ "Intel Quick Sync Video H.264 Decoder",
+ "Seungha Yang <seungha@centricular.com>");
+
+ gst_element_class_add_pad_template (element_class,
+ gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
+ cdata->sink_caps));
+ gst_element_class_add_pad_template (element_class,
+ gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
+ cdata->src_caps));
+
+ gst_caps_unref (cdata->sink_caps);
+ gst_caps_unref (cdata->src_caps);
+ g_free (cdata->display_path);
+ g_free (cdata);
+}
+
+static void
+gst_qsv_h264_dec_init (GstQsvH264Dec * self)
+{
+}
+
+typedef struct
+{
+ guint width;
+ guint height;
+} Resolution;
+
+void
+gst_qsv_h264_dec_register (GstPlugin * plugin, guint rank, guint impl_index,
+ GstObject * device, mfxSession session)
+{
+ mfxVideoParam param;
+ mfxInfoMFX *mfx;
+ static const Resolution resolutions_to_check[] = {
+ {1280, 720}, {1920, 1088}, {2560, 1440}, {3840, 2160}, {4096, 2160},
+ {7680, 4320}, {8192, 4320}
+ };
+ Resolution max_resolution;
+
+ memset (¶m, 0, sizeof (mfxVideoParam));
+ memset (&max_resolution, 0, sizeof (Resolution));
+
+ param.AsyncDepth = 4;
+ param.IOPattern = MFX_IOPATTERN_OUT_VIDEO_MEMORY;
+
+ mfx = ¶m.mfx;
+ mfx->CodecId = MFX_CODEC_AVC;
+
+ mfx->FrameInfo.FrameRateExtN = 30;
+ mfx->FrameInfo.FrameRateExtD = 1;
+ mfx->FrameInfo.AspectRatioW = 1;
+ mfx->FrameInfo.AspectRatioH = 1;
+ mfx->FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
+ mfx->FrameInfo.FourCC = MFX_FOURCC_NV12;
+ mfx->FrameInfo.BitDepthLuma = 8;
+ mfx->FrameInfo.BitDepthChroma = 8;
+ mfx->FrameInfo.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
+ mfx->CodecProfile = MFX_PROFILE_AVC_MAIN;
+
+ /* Check max-resolution */
+ for (guint i = 0; i < G_N_ELEMENTS (resolutions_to_check); i++) {
+ mfx->FrameInfo.Width = GST_ROUND_UP_16 (resolutions_to_check[i].width);
+ mfx->FrameInfo.Height = GST_ROUND_UP_16 (resolutions_to_check[i].height);
+ mfx->FrameInfo.CropW = resolutions_to_check[i].width;
+ mfx->FrameInfo.CropH = resolutions_to_check[i].height;
+
+ if (MFXVideoDECODE_Query (session, ¶m, ¶m) != MFX_ERR_NONE)
+ break;
+
+ max_resolution.width = resolutions_to_check[i].width;
+ max_resolution.height = resolutions_to_check[i].height;
+ }
+
+ GST_INFO ("Maximum supported resolution: %dx%d",
+ max_resolution.width, max_resolution.height);
+
+ /* To cover both landscape and portrait,
+ * select max value (width in this case) */
+ guint resolution = MAX (max_resolution.width, max_resolution.height);
+ std::string src_caps_str = "video/x-raw, format=(string) NV12";
+
+ src_caps_str += ", width=(int) [ 16, " + std::to_string (resolution) + " ]";
+ src_caps_str += ", height=(int) [ 16, " + std::to_string (resolution) + " ]";
+
+ GstCaps *src_caps = gst_caps_from_string (src_caps_str.c_str ());
+
+ /* TODO: Add support for VA */
+#ifdef G_OS_WIN32
+ GstCaps *d3d11_caps = gst_caps_copy (src_caps);
+ GstCapsFeatures *caps_features =
+ gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY, nullptr);
+ gst_caps_set_features_simple (d3d11_caps, caps_features);
+ gst_caps_append (d3d11_caps, src_caps);
+ src_caps = d3d11_caps;
+#endif
+
+ std::string sink_caps_str = "video/x-h264";
+ sink_caps_str += ", width=(int) [ 16, " + std::to_string (resolution) + " ]";
+ sink_caps_str += ", height=(int) [ 16, " + std::to_string (resolution) + " ]";
+
+ sink_caps_str += ", stream-format=(string) byte-stream";
+ sink_caps_str += ", alignment=(string) au";
+ sink_caps_str += ", profile=(string) { high, progressive-high, "
+ "constrained-high, main, constrained-baseline, baseline } ";
+
+ GstCaps *sink_caps = gst_caps_from_string (sink_caps_str.c_str ());
+
+ GST_MINI_OBJECT_FLAG_SET (sink_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
+ GST_MINI_OBJECT_FLAG_SET (src_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
+
+ GstQsvDecoderClassData *cdata = g_new0 (GstQsvDecoderClassData, 1);
+ cdata->sink_caps = sink_caps;
+ cdata->src_caps = src_caps;
+ cdata->impl_index = impl_index;
+
+#ifdef G_OS_WIN32
+ gint64 device_luid;
+ g_object_get (device, "adapter-luid", &device_luid, nullptr);
+ cdata->adapter_luid = device_luid;
+#else
+ gchar *display_path;
+ g_object_get (device, "path", &display_path, nullptr);
+ cdata->display_path = display_path;
+#endif
+
+ GType type;
+ gchar *type_name;
+ gchar *feature_name;
+ GTypeInfo type_info = {
+ sizeof (GstQsvH264DecClass),
+ nullptr,
+ nullptr,
+ (GClassInitFunc) gst_qsv_h264_dec_class_init,
+ nullptr,
+ cdata,
+ sizeof (GstQsvH264Dec),
+ 0,
+ (GInstanceInitFunc) gst_qsv_h264_dec_init,
+ };
+
+ type_name = g_strdup ("GstQsvH264Dec");
+ feature_name = g_strdup ("qsvh264dec");
+
+ gint index = 0;
+ while (g_type_from_name (type_name)) {
+ index++;
+ g_free (type_name);
+ g_free (feature_name);
+ type_name = g_strdup_printf ("GstQsvH264Device%dDec", index);
+ feature_name = g_strdup_printf ("qsvh264device%ddec", index);
+ }
+
+ type = g_type_register_static (GST_TYPE_QSV_DECODER, type_name, &type_info,
+ (GTypeFlags) 0);
+
+ if (rank > 0 && index != 0)
+ rank--;
+
+ if (!gst_element_register (plugin, feature_name, rank, type))
+ GST_WARNING ("Failed to register plugin '%s'", type_name);
+
+ g_free (type_name);
+ g_free (feature_name);
+}
--- /dev/null
+/* GStreamer
+ * Copyright (C) 2022 Seungha Yang <seungha@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#pragma once
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+#include "gstqsvdecoder.h"
+
+G_BEGIN_DECLS
+
+void gst_qsv_h264_dec_register (GstPlugin * plugin,
+ guint rank,
+ guint impl_index,
+ GstObject * device,
+ mfxSession session);
+
+G_END_DECLS
const gchar * gst_qsv_status_to_string (mfxStatus status);
-/* helper macro for debugging log */
#define QSV_STATUS_ARGS(status) \
status, gst_qsv_status_to_string (status)
+#define QSV_CHECK_STATUS(e,s,f) G_STMT_START { \
+ if (s < MFX_ERR_NONE) { \
+ GST_ERROR_OBJECT (e, G_STRINGIFY (f) " failed %d (%s)", \
+ QSV_STATUS_ARGS (s)); \
+ goto error; \
+ } else if (status != MFX_ERR_NONE) { \
+ GST_WARNING_OBJECT (e, G_STRINGIFY (f) " returned warning %d (%s)", \
+ QSV_STATUS_ARGS (s)); \
+ } \
+} G_STMT_END
+
static inline GstClockTime
gst_qsv_timestamp_to_gst (mfxU64 timestamp)
{
qsv_sources = [
'gstqsvallocator.cpp',
+ 'gstqsvdecoder.cpp',
'gstqsvencoder.cpp',
+ 'gstqsvh264dec.cpp',
'gstqsvh264enc.cpp',
'gstqsvh265enc.cpp',
'gstqsvutils.cpp',
#include <gst/gst.h>
#include <mfx.h>
#include "gstqsvutils.h"
+#include "gstqsvh264dec.h"
#include "gstqsvh264enc.h"
#include "gstqsvh265enc.h"
#include "gstqsvvp9enc.h"
GST_DEBUG_CATEGORY (gst_qsv_debug);
GST_DEBUG_CATEGORY (gst_qsv_allocator_debug);
+GST_DEBUG_CATEGORY (gst_qsv_decoder_debug);
GST_DEBUG_CATEGORY (gst_qsv_encoder_debug);
+GST_DEBUG_CATEGORY (gst_qsv_h264_dec_debug);
GST_DEBUG_CATEGORY (gst_qsv_h264_enc_debug);
GST_DEBUG_CATEGORY (gst_qsv_h265_enc_debug);
GST_DEBUG_CATEGORY (gst_qsv_vp9_enc_debug);
GST_INFO ("Found %d platform devices", g_list_length (platform_devices));
- GST_DEBUG_CATEGORY_INIT (gst_qsv_encoder_debug,
- "qsvencoder", 0, "qsvencoder");
GST_DEBUG_CATEGORY_INIT (gst_qsv_allocator_debug,
"qsvallocator", 0, "qsvallocator");
+ GST_DEBUG_CATEGORY_INIT (gst_qsv_decoder_debug,
+ "qsvdecoder", 0, "qsvdecoder");
+ GST_DEBUG_CATEGORY_INIT (gst_qsv_encoder_debug,
+ "qsvencoder", 0, "qsvencoder");
+ GST_DEBUG_CATEGORY_INIT (gst_qsv_h264_dec_debug,
+ "qsvh264dec", 0, "qsvh264dec");
GST_DEBUG_CATEGORY_INIT (gst_qsv_h264_enc_debug,
"qsvh264enc", 0, "qsvh264enc");
GST_DEBUG_CATEGORY_INIT (gst_qsv_h265_enc_debug,
if (!session)
goto next;
+ gst_qsv_h264_dec_register (plugin, GST_RANK_MARGINAL, i, device, session);
+
gst_qsv_h264_enc_register (plugin, GST_RANK_NONE, i, device, session);
gst_qsv_h265_enc_register (plugin, GST_RANK_NONE, i, device, session);
gst_qsv_vp9_enc_register (plugin, GST_RANK_NONE, i, device, session);