static GstAllocator *_vulkan_buffer_memory_allocator;
-static gboolean
-_find_memory_type_index_with_type_properties (GstVulkanDevice * device,
- guint32 typeBits, VkFlags properties, guint32 * typeIndex)
-{
- guint32 i;
-
- /* Search memtypes to find first index with those properties */
- for (i = 0; i < 32; i++) {
- if ((typeBits & 1) == 1) {
- /* Type is available, does it match user properties? */
- if ((device->memory_properties.memoryTypes[i].
- propertyFlags & properties) == properties) {
- *typeIndex = i;
- return TRUE;
- }
- }
- typeBits >>= 1;
- }
-
- return FALSE;
-}
-
#define GST_VK_BUFFER_CREATE_INFO_INIT GST_VK_STRUCT_8
#define GST_VK_BUFFER_CREATE_INFO(info, pNext, flags, size, usage, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices ) \
G_STMT_START { \
static void
_vk_buffer_mem_init (GstVulkanBufferMemory * mem, GstAllocator * allocator,
- GstMemory * parent, GstVulkanDevice * device, GstAllocationParams * params,
- gsize size, gpointer user_data, GDestroyNotify notify)
+ GstMemory * parent, GstVulkanDevice * device, VkBufferUsageFlags usage,
+ GstAllocationParams * params, gsize size, gpointer user_data,
+ GDestroyNotify notify)
{
gsize align = gst_memory_alignment, offset = 0, maxsize = size;
GstMemoryFlags flags = 0;
GstVulkanBufferMemory *mem = NULL;
GstAllocationParams params = { 0, };
VkBufferCreateInfo buffer_info;
- guint32 memory_type_index;
GError *error = NULL;
VkBuffer buffer;
VkResult err;
vkGetBufferMemoryRequirements (device->device, buffer, &mem->requirements);
params.align = mem->requirements.alignment;
- _vk_buffer_mem_init (mem, allocator, parent, device, ¶ms,
+ _vk_buffer_mem_init (mem, allocator, parent, device, usage, ¶ms,
mem->requirements.size, user_data, notify);
mem->buffer = buffer;
- if (!_find_memory_type_index_with_type_properties (device,
- mem->requirements.memoryTypeBits, mem_prop_flags,
- &memory_type_index)) {
- GST_CAT_ERROR (GST_CAT_VULKAN_BUFFER_MEMORY,
- "Could not find suitable memory type");
- goto error;
- }
-
- mem->vk_mem = (GstVulkanMemory *)
- gst_vulkan_memory_alloc (device, memory_type_index, ¶ms,
- mem->requirements.size, mem_prop_flags);
- if (!mem->vk_mem) {
- GST_CAT_ERROR (GST_CAT_VULKAN_BUFFER_MEMORY,
- "Failed to allocate device memory");
- goto error;
- }
-
- err =
- vkBindBufferMemory (device->device, mem->buffer, mem->vk_mem->mem_ptr,
- 0 /* offset */ );
- if (gst_vulkan_error_to_g_error (err, &error, "vkBindBufferMemory") < 0)
- goto vk_error;
-
if (usage & (VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT |
VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT |
VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
/* no device memory so no mapping */
params.flags = GST_MEMORY_FLAG_NOT_MAPPABLE | GST_MEMORY_FLAG_READONLY;
- _vk_buffer_mem_init (mem, allocator, parent, device, ¶ms,
+ _vk_buffer_mem_init (mem, allocator, parent, device, usage, ¶ms,
mem->requirements.size, user_data, notify);
mem->wrapped = TRUE;
GstMapInfo *vk_map_info;
/* FIXME: possible barrier needed */
+ g_mutex_lock (&mem->lock);
- if (!mem->vk_mem)
+ if (!mem->vk_mem) {
+ g_mutex_unlock (&mem->lock);
return NULL;
+ }
vk_map_info = g_new0 (GstMapInfo, 1);
info->user_data[0] = vk_map_info;
if (!gst_memory_map ((GstMemory *) mem->vk_mem, vk_map_info, info->flags)) {
g_free (vk_map_info);
+ g_mutex_unlock (&mem->lock);
return NULL;
}
+ g_mutex_unlock (&mem->lock);
return vk_map_info->data;
}
static void
_vk_buffer_mem_unmap_full (GstVulkanBufferMemory * mem, GstMapInfo * info)
{
+ g_mutex_lock (&mem->lock);
gst_memory_unmap ((GstMemory *) mem->vk_mem, info->user_data[0]);
+ g_mutex_unlock (&mem->lock);
g_free (info->user_data[0]);
}
}
GstMemory *
+gst_vulkan_buffer_memory_alloc_bind (GstVulkanDevice * device, VkFormat format,
+ gsize size, VkBufferUsageFlags usage, VkMemoryPropertyFlags mem_prop_flags)
+{
+ GstAllocationParams params = { 0, };
+ GstVulkanBufferMemory *mem;
+ GstVulkanMemory *dev_mem;
+ guint32 type_idx;
+
+ mem =
+ (GstVulkanBufferMemory *) gst_vulkan_buffer_memory_alloc (device, format,
+ size, usage, mem_prop_flags);
+ if (!mem)
+ return NULL;
+
+ if (!gst_vulkan_memory_find_memory_type_index_with_type_properties (device,
+ mem->requirements.memoryTypeBits, mem_prop_flags, &type_idx)) {
+ gst_memory_unref (GST_MEMORY_CAST (mem));
+ return NULL;
+ }
+
+ /* XXX: assumes alignment is a power of 2 */
+ params.align = mem->requirements.alignment - 1;
+ dev_mem = (GstVulkanMemory *) gst_vulkan_memory_alloc (device, type_idx,
+ ¶ms, mem->requirements.size, mem_prop_flags);
+ if (!dev_mem) {
+ gst_memory_unref (GST_MEMORY_CAST (mem));
+ return NULL;
+ }
+
+ if (!gst_vulkan_buffer_memory_bind (mem, dev_mem)) {
+ gst_memory_unref (GST_MEMORY_CAST (dev_mem));
+ gst_memory_unref (GST_MEMORY_CAST (mem));
+ return NULL;
+ }
+ gst_memory_unref (GST_MEMORY_CAST (dev_mem));
+
+ return (GstMemory *) mem;
+}
+
+GstMemory *
gst_vulkan_buffer_memory_wrapped (GstVulkanDevice * device, VkBuffer buffer,
VkFormat format, VkBufferUsageFlags usage, gpointer user_data,
GDestroyNotify notify)
return (GstMemory *) mem;
}
+gboolean
+gst_vulkan_buffer_memory_bind (GstVulkanBufferMemory * buf_mem,
+ GstVulkanMemory * memory)
+{
+ gsize maxsize;
+
+ g_return_val_if_fail (gst_is_vulkan_buffer_memory (GST_MEMORY_CAST (buf_mem)),
+ FALSE);
+ g_return_val_if_fail (gst_is_vulkan_memory (GST_MEMORY_CAST (memory)), FALSE);
+
+ /* will we overrun the allocated data */
+ gst_memory_get_sizes (GST_MEMORY_CAST (memory), NULL, &maxsize);
+ g_return_val_if_fail (memory->vk_offset + buf_mem->requirements.size <=
+ maxsize, FALSE);
+
+ g_mutex_lock (&buf_mem->lock);
+
+ if (buf_mem->vk_mem) {
+ guint vk_mem_map_count = buf_mem->vk_mem->map_count;
+ if (vk_mem_map_count > 0) {
+ g_mutex_unlock (&buf_mem->lock);
+ g_return_val_if_fail (vk_mem_map_count > 0, FALSE);
+ }
+ gst_memory_unref (GST_MEMORY_CAST (buf_mem->vk_mem));
+ }
+
+ vkBindBufferMemory (buf_mem->device->device, buf_mem->buffer, memory->mem_ptr,
+ memory->vk_offset);
+
+ buf_mem->vk_mem =
+ (GstVulkanMemory *) gst_memory_ref (GST_MEMORY_CAST (memory));
+ g_mutex_unlock (&buf_mem->lock);
+
+ return TRUE;
+}
+
G_DEFINE_TYPE (GstVulkanBufferMemoryAllocator,
gst_vulkan_buffer_memory_allocator, GST_TYPE_ALLOCATOR);
GstVulkanMemory *vk_mem;
VkMemoryRequirements requirements;
+ VkBufferUsageFlags usage;
GMutex lock;
gboolean wrapped;
VkBufferUsageFlags usage,
VkMemoryPropertyFlags mem_prop_flags);
+GstMemory * gst_vulkan_buffer_memory_alloc_bind (GstVulkanDevice * device,
+ VkFormat format,
+ gsize size,
+ VkBufferUsageFlags usage,
+ VkMemoryPropertyFlags mem_prop_flags);
+
GstMemory * gst_vulkan_buffer_memory_wrapped (GstVulkanDevice * device,
VkBuffer buffer,
VkFormat format,
gpointer user_data,
GDestroyNotify notify);
+gboolean gst_vulkan_buffer_memory_bind (GstVulkanBufferMemory * buf_mem,
+ GstVulkanMemory * memory);
+
G_END_DECLS
#endif /* _VK_BUFFER_MEMORY_H_ */
}
static gboolean
-_find_memory_type_index_with_type_properties (GstVulkanDevice * device,
- guint32 typeBits, VkFlags properties, guint32 * typeIndex)
-{
- guint32 i;
-
- /* Search memtypes to find first index with those properties */
- for (i = 0; i < 32; i++) {
- if ((typeBits & 1) == 1) {
- /* Type is available, does it match user properties? */
- if ((device->memory_properties.
- memoryTypes[i].propertyFlags & properties) == properties) {
- *typeIndex = i;
- return TRUE;
- }
- }
- typeBits >>= 1;
- }
-
- return FALSE;
-}
-
-static gboolean
_create_info_from_args (VkImageCreateInfo * info, VkFormat format, gsize width,
gsize height, VkImageTiling tiling, VkImageUsageFlags usage)
{
static void
_vk_image_mem_init (GstVulkanImageMemory * mem, GstAllocator * allocator,
- GstMemory * parent, GstVulkanDevice * device, GstAllocationParams * params,
- gsize size, gpointer user_data, GDestroyNotify notify)
+ GstMemory * parent, GstVulkanDevice * device, VkImageUsageFlags usage,
+ GstAllocationParams * params, gsize size, gpointer user_data,
+ GDestroyNotify notify)
{
gsize align = gst_memory_alignment, offset = 0, maxsize = size;
GstMemoryFlags flags = 0;
mem->device = gst_object_ref (device);
mem->image_layout = VK_IMAGE_LAYOUT_UNDEFINED;
+ mem->usage = usage;
mem->wrapped = FALSE;
mem->notify = notify;
mem->user_data = user_data;
GstAllocationParams params = { 0, };
VkImageViewCreateInfo view_info;
VkImageCreateInfo image_info;
- guint32 memory_type_index;
VkPhysicalDevice gpu;
GError *error = NULL;
VkImage image;
vkGetImageMemoryRequirements (device->device, image, &mem->requirements);
params.align = mem->requirements.alignment;
- _vk_image_mem_init (mem, allocator, parent, device, ¶ms,
+ _vk_image_mem_init (mem, allocator, parent, device, usage, ¶ms,
mem->requirements.size, user_data, notify);
mem->create_info = image_info;
mem->image = image;
vkGetPhysicalDeviceImageFormatProperties (gpu, format, VK_IMAGE_TYPE_2D,
tiling, usage, 0, &mem->format_properties);
- if (!_find_memory_type_index_with_type_properties (device,
- mem->requirements.memoryTypeBits, mem_prop_flags,
- &memory_type_index)) {
- GST_CAT_ERROR (GST_CAT_VULKAN_IMAGE_MEMORY,
- "Could not find suitable memory type");
- goto error;
- }
-
- mem->vk_mem = (GstVulkanMemory *)
- gst_vulkan_memory_alloc (device, memory_type_index, ¶ms,
- mem->requirements.size, mem_prop_flags);
- if (!mem->vk_mem) {
- GST_CAT_ERROR (GST_CAT_VULKAN_IMAGE_MEMORY,
- "Failed to allocate device memory");
- goto error;
- }
-
- err =
- vkBindImageMemory (device->device, mem->image, mem->vk_mem->mem_ptr,
- 0 /* offset */ );
- if (gst_vulkan_error_to_g_error (err, &error, "vkBindImageMemory") < 0)
- goto vk_error;
-
if (usage & (VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT |
VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)) {
_view_create_info (mem->image, format, &view_info);
vkGetImageMemoryRequirements (device->device, mem->image, &mem->requirements);
params.flags = GST_MEMORY_FLAG_NOT_MAPPABLE | GST_MEMORY_FLAG_READONLY;
- _vk_image_mem_init (mem, allocator, parent, device, ¶ms,
+ _vk_image_mem_init (mem, allocator, parent, device, usage, ¶ms,
mem->requirements.size, user_data, notify);
mem->wrapped = TRUE;
GstMapInfo *vk_map_info;
/* FIXME: possible layout transformation needed */
+ g_mutex_lock (&mem->lock);
- if (!mem->vk_mem)
+ if (!mem->vk_mem) {
+ g_mutex_unlock (&mem->lock);
return NULL;
+ }
vk_map_info = g_new0 (GstMapInfo, 1);
info->user_data[0] = vk_map_info;
if (!gst_memory_map ((GstMemory *) mem->vk_mem, vk_map_info, info->flags)) {
g_free (vk_map_info);
+ g_mutex_unlock (&mem->lock);
return NULL;
}
+ g_mutex_unlock (&mem->lock);
return vk_map_info->data;
}
static void
_vk_image_mem_unmap_full (GstVulkanImageMemory * mem, GstMapInfo * info)
{
+ g_mutex_lock (&mem->lock);
gst_memory_unmap ((GstMemory *) mem->vk_mem, info->user_data[0]);
+ g_mutex_unlock (&mem->lock);
g_free (info->user_data[0]);
}
/**
* gst_vulkan_image_memory_alloc:
- * @device:a #GstVulkanDevice
- * @memory_type_index: the Vulkan memory type index
- * @params: a #GstAllocationParams
- * @size: the size to allocate
*
* Allocated a new #GstVulkanImageMemory.
*
}
GstMemory *
+gst_vulkan_image_memory_alloc_bind (GstVulkanDevice * device, VkFormat format,
+ gsize width, gsize height, VkImageTiling tiling, VkImageUsageFlags usage,
+ VkMemoryPropertyFlags mem_prop_flags)
+{
+ GstAllocationParams params = { 0, };
+ GstVulkanImageMemory *mem;
+ GstVulkanMemory *dev_mem;
+ guint32 type_idx;
+
+ mem =
+ (GstVulkanImageMemory *) gst_vulkan_image_memory_alloc (device, format,
+ width, height, tiling, usage, mem_prop_flags);
+ if (!mem)
+ return NULL;
+
+ if (!gst_vulkan_memory_find_memory_type_index_with_type_properties (device,
+ mem->requirements.memoryTypeBits, mem_prop_flags, &type_idx)) {
+ gst_memory_unref (GST_MEMORY_CAST (mem));
+ return NULL;
+ }
+
+ /* XXX: assumes alignment is a power of 2 */
+ params.align = mem->requirements.alignment - 1;
+ dev_mem = (GstVulkanMemory *) gst_vulkan_memory_alloc (device, type_idx,
+ ¶ms, mem->requirements.size, mem_prop_flags);
+ if (!dev_mem) {
+ gst_memory_unref (GST_MEMORY_CAST (mem));
+ return NULL;
+ }
+
+ if (!gst_vulkan_image_memory_bind (mem, dev_mem)) {
+ gst_memory_unref (GST_MEMORY_CAST (dev_mem));
+ gst_memory_unref (GST_MEMORY_CAST (mem));
+ return NULL;
+ }
+ gst_memory_unref (GST_MEMORY_CAST (dev_mem));
+
+ return (GstMemory *) mem;
+}
+
+GstMemory *
gst_vulkan_image_memory_wrapped (GstVulkanDevice * device, VkImage image,
VkFormat format, gsize width, gsize height, VkImageTiling tiling,
VkImageUsageFlags usage, gpointer user_data, GDestroyNotify notify)
return image->create_info.extent.height;
}
+gboolean
+gst_vulkan_image_memory_bind (GstVulkanImageMemory * img_mem,
+ GstVulkanMemory * memory)
+{
+ gsize maxsize;
+
+ g_return_val_if_fail (gst_is_vulkan_image_memory (GST_MEMORY_CAST (img_mem)),
+ FALSE);
+ g_return_val_if_fail (gst_is_vulkan_memory (GST_MEMORY_CAST (memory)), FALSE);
+
+ /* will we overrun the allocated data? */
+ gst_memory_get_sizes (GST_MEMORY_CAST (memory), NULL, &maxsize);
+ g_return_val_if_fail (memory->vk_offset + img_mem->requirements.size <=
+ maxsize, FALSE);
+
+ g_mutex_lock (&img_mem->lock);
+
+ if (img_mem->vk_mem) {
+ guint vk_mem_map_count = img_mem->vk_mem->map_count;
+ if (vk_mem_map_count > 0) {
+ g_mutex_unlock (&img_mem->lock);
+ g_return_val_if_fail (vk_mem_map_count > 0, FALSE);
+ }
+ gst_memory_unref (GST_MEMORY_CAST (img_mem->vk_mem));
+ }
+
+ vkBindImageMemory (img_mem->device->device, img_mem->image, memory->mem_ptr,
+ memory->vk_offset);
+ img_mem->vk_mem =
+ (GstVulkanMemory *) gst_memory_ref (GST_MEMORY_CAST (memory));
+ g_mutex_unlock (&img_mem->lock);
+
+ return TRUE;
+}
+
G_DEFINE_TYPE (GstVulkanImageMemoryAllocator, gst_vulkan_image_memory_allocator,
GST_TYPE_ALLOCATOR);
VkImageCreateInfo create_info;
VkMemoryRequirements requirements;
VkImageFormatProperties format_properties;
+ VkImageUsageFlags usage;
GMutex lock;
gboolean wrapped;
VkImageUsageFlags usage,
VkMemoryPropertyFlags mem_prop_flags);
+GstMemory * gst_vulkan_image_memory_alloc_bind (GstVulkanDevice * device,
+ VkFormat format,
+ gsize width,
+ gsize height,
+ VkImageTiling tiling,
+ VkImageUsageFlags usage,
+ VkMemoryPropertyFlags mem_prop_flags);
+
GstMemory * gst_vulkan_image_memory_wrapped (GstVulkanDevice * device,
VkImage image,
VkFormat format,
VkImageUsageFlags usage,
gpointer user_data,
GDestroyNotify notify);
+
+gboolean gst_vulkan_image_memory_bind (GstVulkanImageMemory * img_mem,
+ GstVulkanMemory * memory);
+
gboolean gst_vulkan_image_memory_set_layout (GstVulkanImageMemory * vk_mem,
VkImageLayout,
VkImageMemoryBarrier * barrier);
{ a, b, c, d, e, f }
#define GST_VK_STRUCT_7(a, b, c, d, e, f, g) \
{ a, b, c, d, e, f, g }
+#define GST_VK_STRUCT_8(a, b, c, d, e, f, g, h) \
+ { a, b, c, d, e, f, g, h }
#define GST_VK_BUFFER_IMAGE_COPY_INIT GST_VK_STRUCT_6
#define GST_VK_BUFFER_IMAGE_COPY(info,bufferOffset_,bufferRowLength_,bufferImageHeight_,imageSubresourceLayers_,imageOffset_,imageExtent_) \
* Vulkan device memory.
*/
+/* WARNING: while suballocation is allowed, nothing prevents aliasing which
+ * requires external synchronisation */
+
#define GST_CAT_DEFUALT GST_CAT_VULKAN_MEMORY
GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFUALT);
mem->properties = mem_prop_flags;
mem->notify = notify;
mem->user_data = user_data;
+ mem->vk_offset = 0;
g_mutex_init (&mem->lock);
VkMemoryPropertyFlags mem_props_flags, gpointer user_data,
GDestroyNotify notify)
{
- GstVulkanMemory *mem = g_slice_new0 (GstVulkanMemory);
+ GstVulkanMemory *mem = g_new0 (GstVulkanMemory, 1);
GError *error = NULL;
VkResult err;
return NULL;
}
- err = vkMapMemory (mem->device->device, mem->mem_ptr, 0, size, 0, &data);
+ err = vkMapMemory (mem->device->device, mem->mem_ptr, mem->vk_offset,
+ size, 0, &data);
if (gst_vulkan_error_to_g_error (err, &error, "vkMapMemory") < 0) {
GST_CAT_ERROR (GST_CAT_VULKAN_MEMORY, "Failed to map device memory %s",
error->message);
}
static GstMemory *
-_vk_mem_share (GstVulkanMemory * mem, gssize offset, gssize size)
+_vk_mem_share (GstVulkanMemory * mem, gssize offset, gsize size)
{
- return NULL;
+ GstVulkanMemory *shared = g_new0 (GstVulkanMemory, 1);
+ GstVulkanMemory *parent = mem;
+ GstAllocationParams params = { 0, };
+
+ if (size == -1)
+ size = mem->mem.size - offset;
+
+ g_return_val_if_fail (size > 0, NULL);
+
+ while ((parent = (GstVulkanMemory *) (GST_MEMORY_CAST (parent)->parent)));
+
+ params.flags = GST_MEMORY_FLAGS (mem);
+ params.align = GST_MEMORY_CAST (parent)->align;
+
+ _vk_mem_init (shared, _vulkan_memory_allocator, GST_MEMORY_CAST (mem),
+ parent->device, parent->alloc_info.memoryTypeIndex, ¶ms, size,
+ parent->properties, NULL, NULL);
+ shared->mem_ptr = parent->mem_ptr;
+ shared->wrapped = TRUE;
+ shared->vk_offset = offset + mem->vk_offset;
+
+ return GST_MEMORY_CAST (shared);
}
static gboolean
if (mem->notify)
mem->notify (mem->user_data);
- vkFreeMemory (mem->device->device, mem->mem_ptr, NULL);
+ if (mem->mem_ptr && !mem->wrapped)
+ vkFreeMemory (mem->device->device, mem->mem_ptr, NULL);
gst_object_unref (mem->device);
}
+gboolean
+gst_vulkan_memory_find_memory_type_index_with_type_properties (GstVulkanDevice *
+ device, guint32 typeBits, VkMemoryPropertyFlags properties,
+ guint32 * typeIndex)
+{
+ guint32 i;
+
+ /* Search memtypes to find first index with those properties */
+ for (i = 0; i < 32; i++) {
+ if ((typeBits & 1) == 1) {
+ /* Type is available, does it match user properties? */
+ if ((device->memory_properties.memoryTypes[i].
+ propertyFlags & properties) == properties) {
+ *typeIndex = i;
+ return TRUE;
+ }
+ }
+ typeBits >>= 1;
+ }
+
+ return FALSE;
+}
+
/**
* gst_vulkan_memory_alloc:
* @device:a #GstVulkanDevice
/* <protected> */
GMutex lock;
+ guint map_count;
/* <private> */
GDestroyNotify notify;
VkMemoryAllocateInfo alloc_info;
VkMemoryPropertyFlags properties;
+
+ /* we need our own offset because GstMemory's is used to offset into the
+ * mapped pointer which when suballocating, we need to avoid. This in
+ * relation to the root memory */
+ guint64 vk_offset;
+ gboolean wrapped;
};
/**
gsize size,
VkMemoryPropertyFlags mem_prop_flags);
+gboolean gst_vulkan_memory_find_memory_type_index_with_type_properties (GstVulkanDevice * device,
+ guint32 typeBits,
+ VkMemoryPropertyFlags properties,
+ guint32 * typeIndex);
+
G_END_DECLS
#endif /* _GST_VULKAN_BASE_BUFFER_H_ */
supports_present =
gst_vulkan_window_get_presentation_support (swapper->window,
swapper->device, i);
- if ((swapper->device->
- queue_family_props[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0) {
+ if ((swapper->device->queue_family_props[i].
+ queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0) {
if (supports_present) {
/* found one that supports both */
graphics_queue = present_queue = i;
n_images_wanted = swapper->surf_props.maxImageCount;
}
- if (swapper->surf_props.
- supportedTransforms & VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR) {
+ if (swapper->
+ surf_props.supportedTransforms & VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR) {
preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
} else {
preTransform = swapper->surf_props.currentTransform;
"Incorrect usage flags available for the swap images");
return FALSE;
}
- if ((swapper->
- surf_props.supportedUsageFlags & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)
+ if ((swapper->surf_props.
+ supportedUsageFlags & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)
!= 0) {
usage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
} else {
return FALSE;
}
- buf_mem =
- (GstVulkanBufferMemory *) gst_vulkan_buffer_memory_alloc (swapper->device,
- swap_mem->create_info.format, GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0) *
- GST_VIDEO_FRAME_COMP_HEIGHT (&vframe, 0),
+ size =
+ GST_VIDEO_FRAME_PLANE_STRIDE (&vframe,
+ 0) * GST_VIDEO_FRAME_COMP_HEIGHT (&vframe, 0);
+ buf_mem = (GstVulkanBufferMemory *)
+ gst_vulkan_buffer_memory_alloc_bind (swapper->device,
+ swap_mem->create_info.format, size,
VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
return FALSE;
}
- size =
- GST_VIDEO_FRAME_PLANE_STRIDE (&vframe,
- 0) * GST_VIDEO_FRAME_COMP_HEIGHT (&vframe, 0);
g_assert (buf_map_info.size >= size);
memcpy (buf_map_info.data, vframe.data[0], size);
gst_memory_unmap ((GstMemory *) buf_mem, &buf_map_info);