2 * GStreamer Plugins Vulkan
3 * Copyright (C) 2019 Matthew Waters <matthew@centricular.com>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
25 #include "vkfullscreenquad.h"
26 #include "vkelementutils.h"
28 #define GST_CAT_DEFAULT gst_vulkan_full_screen_quad_debug
29 GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
31 typedef struct _GstVulkanFullScreenQuad GstVulkanFullScreenQuad;
33 struct _GstVulkanFullScreenQuadPrivate
44 GstMemory *push_constants;
45 gsize push_constants_size;
47 GstVulkanHandle *vert;
48 GstVulkanHandle *frag;
51 G_DEFINE_TYPE_WITH_CODE (GstVulkanFullScreenQuad, gst_vulkan_full_screen_quad,
52 GST_TYPE_OBJECT, GST_DEBUG_CATEGORY_INIT (gst_vulkan_full_screen_quad_debug,
53 "vulkanfullscreenquad", 0, "vulkan fullscreen quad render");
54 G_ADD_PRIVATE (GstVulkanFullScreenQuad));
56 #define GET_PRIV(self) gst_vulkan_full_screen_quad_get_instance_private (self)
58 struct Vertex vertices[] = {
59 {-1.0f, -1.0f, 0.0f, 0.0f, 0.0f},
60 {1.0f, -1.0f, 0.0f, 1.0f, 0.0f},
61 {1.0f, 1.0f, 0.0f, 1.0f, 1.0f},
62 {-1.0f, 1.0f, 0.0f, 0.0f, 1.0f},
70 create_sampler (GstVulkanFullScreenQuad * self, GError ** error)
73 VkSamplerCreateInfo samplerInfo = {
74 .sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
75 .magFilter = VK_FILTER_LINEAR,
76 .minFilter = VK_FILTER_LINEAR,
77 .addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
78 .addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
79 .addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
80 .anisotropyEnable = VK_FALSE,
82 .borderColor = VK_BORDER_COLOR_INT_OPAQUE_BLACK,
83 .unnormalizedCoordinates = VK_FALSE,
84 .compareEnable = VK_FALSE,
85 .compareOp = VK_COMPARE_OP_ALWAYS,
86 .mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR,
96 vkCreateSampler (self->queue->device->device, &samplerInfo, NULL,
98 if (gst_vulkan_error_to_g_error (err, error, "vkCreateSampler") < 0) {
102 self->sampler = gst_vulkan_handle_new_wrapped (self->queue->device,
103 GST_VULKAN_HANDLE_TYPE_SAMPLER, (GstVulkanHandleTypedef) sampler,
104 gst_vulkan_handle_free_sampler, NULL);
109 static GstVulkanDescriptorSet *
110 get_and_update_descriptor_set (GstVulkanFullScreenQuad * self,
111 GstVulkanImageView ** views, GError ** error)
113 GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
114 GstVulkanDescriptorSet *set;
117 if (!create_sampler (self, error))
121 gst_vulkan_descriptor_cache_acquire (self->descriptor_cache, error)))
125 VkWriteDescriptorSet writes[GST_VIDEO_MAX_PLANES + 1];
126 VkDescriptorImageInfo image_info[GST_VIDEO_MAX_PLANES];
127 VkDescriptorBufferInfo buffer_info;
132 if (priv->uniforms) {
133 buffer_info = (VkDescriptorBufferInfo) {
134 .buffer = ((GstVulkanBufferMemory *) priv->uniforms)->buffer,
136 .range = priv->uniform_size
139 writes[write_n++] = (VkWriteDescriptorSet) {
140 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
144 .dstArrayElement = 0,
145 .descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
146 .descriptorCount = 1,
147 .pBufferInfo = &buffer_info
151 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->in_info); i++) {
152 image_info[i] = (VkDescriptorImageInfo) {
153 .imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
154 .imageView = views[i]->view,
155 .sampler = (VkSampler) self->sampler->handle
158 writes[write_n++] = (VkWriteDescriptorSet) {
159 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
163 .dstArrayElement = 0,
164 .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
165 .descriptorCount = 1,
166 .pImageInfo = &image_info[i]
170 vkUpdateDescriptorSets (self->queue->device->device, write_n, writes, 0,
178 create_descriptor_set_layout (GstVulkanFullScreenQuad * self, GError ** error)
180 VkDescriptorSetLayoutBinding bindings[GST_VIDEO_MAX_PLANES + 1] = { {0,} };
181 VkDescriptorSetLayoutCreateInfo layout_info;
182 VkDescriptorSetLayout descriptor_set_layout;
183 int descriptor_n = 0;
188 bindings[descriptor_n++] = (VkDescriptorSetLayoutBinding) {
190 .descriptorCount = 1,
191 .descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
192 .pImmutableSamplers = NULL,
193 .stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT
195 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->in_info); i++) {
196 bindings[descriptor_n++] = (VkDescriptorSetLayoutBinding) {
198 .descriptorCount = 1,
199 .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
200 .pImmutableSamplers = NULL,
201 .stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT
205 layout_info = (VkDescriptorSetLayoutCreateInfo) {
206 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
208 .bindingCount = descriptor_n,
209 .pBindings = bindings
214 vkCreateDescriptorSetLayout (self->queue->device->device, &layout_info,
215 NULL, &descriptor_set_layout);
216 if (gst_vulkan_error_to_g_error (err, error,
217 "vkCreateDescriptorSetLayout") < 0) {
221 self->descriptor_set_layout =
222 gst_vulkan_handle_new_wrapped (self->queue->device,
223 GST_VULKAN_HANDLE_TYPE_DESCRIPTOR_SET_LAYOUT,
224 (GstVulkanHandleTypedef) descriptor_set_layout,
225 gst_vulkan_handle_free_descriptor_set_layout, NULL);
231 create_pipeline_layout (GstVulkanFullScreenQuad * self, GError ** error)
233 VkPipelineLayoutCreateInfo pipeline_layout_info;
234 VkPipelineLayout pipeline_layout;
237 if (!self->descriptor_set_layout)
238 if (!create_descriptor_set_layout (self, error))
242 pipeline_layout_info = (VkPipelineLayoutCreateInfo) {
243 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
246 .pSetLayouts = (VkDescriptorSetLayout *) &self->descriptor_set_layout->handle,
247 .pushConstantRangeCount = 0,
248 .pPushConstantRanges = NULL,
253 vkCreatePipelineLayout (self->queue->device->device,
254 &pipeline_layout_info, NULL, &pipeline_layout);
255 if (gst_vulkan_error_to_g_error (err, error, "vkCreatePipelineLayout") < 0) {
259 self->pipeline_layout = gst_vulkan_handle_new_wrapped (self->queue->device,
260 GST_VULKAN_HANDLE_TYPE_PIPELINE_LAYOUT,
261 (GstVulkanHandleTypedef) pipeline_layout,
262 gst_vulkan_handle_free_pipeline_layout, NULL);
268 create_render_pass (GstVulkanFullScreenQuad * self, GError ** error)
270 VkAttachmentDescription color_attachments[GST_VIDEO_MAX_PLANES];
271 VkAttachmentReference color_attachment_refs[GST_VIDEO_MAX_PLANES];
272 VkRenderPassCreateInfo render_pass_info;
273 VkSubpassDescription subpass;
274 VkRenderPass render_pass;
278 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->out_info); i++) {
280 color_attachments[i] = (VkAttachmentDescription) {
281 .format = gst_vulkan_format_from_video_info (&self->out_info, i),
282 .samples = VK_SAMPLE_COUNT_1_BIT,
283 .loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR,
284 .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
285 .stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE,
286 .stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE,
287 /* FIXME: share this between elements to avoid pipeline barriers */
288 .initialLayout = VK_IMAGE_LAYOUT_UNDEFINED,
289 .finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
292 color_attachment_refs[i] = (VkAttachmentReference) {
294 .layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
300 subpass = (VkSubpassDescription) {
301 .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
302 .colorAttachmentCount = GST_VIDEO_INFO_N_PLANES (&self->out_info),
303 .pColorAttachments = color_attachment_refs
306 render_pass_info = (VkRenderPassCreateInfo) {
307 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
309 .attachmentCount = GST_VIDEO_INFO_N_PLANES (&self->out_info),
310 .pAttachments = color_attachments,
312 .pSubpasses = &subpass
317 vkCreateRenderPass (self->queue->device->device, &render_pass_info, NULL,
319 if (gst_vulkan_error_to_g_error (err, error, "vkCreateRenderPass") < 0) {
323 self->render_pass = gst_vulkan_handle_new_wrapped (self->queue->device,
324 GST_VULKAN_HANDLE_TYPE_RENDER_PASS,
325 (GstVulkanHandleTypedef) render_pass,
326 gst_vulkan_handle_free_render_pass, NULL);
332 create_pipeline (GstVulkanFullScreenQuad * self, GError ** error)
334 GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
335 VkVertexInputBindingDescription vertex_binding;
336 VkVertexInputAttributeDescription attribute_descriptions[2];
337 VkPipelineShaderStageCreateInfo shader_create_info[2];
338 VkPipelineVertexInputStateCreateInfo vertex_input_info;
339 VkPipelineInputAssemblyStateCreateInfo input_assembly;
340 VkPipelineViewportStateCreateInfo viewport_state;
341 VkPipelineRasterizationStateCreateInfo rasterizer;
342 VkPipelineMultisampleStateCreateInfo multisampling;
343 VkPipelineColorBlendAttachmentState
344 color_blend_attachments[GST_VIDEO_MAX_PLANES];
345 VkPipelineColorBlendStateCreateInfo color_blending;
346 VkGraphicsPipelineCreateInfo pipeline_create_info;
350 if (!priv->vert || !priv->frag) {
351 g_set_error_literal (error, GST_VULKAN_ERROR,
352 VK_ERROR_INITIALIZATION_FAILED, "Missing shader information");
356 if (!self->pipeline_layout)
357 if (!create_pipeline_layout (self, error))
360 if (!self->render_pass)
361 if (!create_render_pass (self, error))
365 shader_create_info[0] = (VkPipelineShaderStageCreateInfo) {
366 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
368 .stage = VK_SHADER_STAGE_VERTEX_BIT,
369 .module = (VkShaderModule) priv->vert->handle,
373 shader_create_info[1] = (VkPipelineShaderStageCreateInfo) {
374 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
376 .stage = VK_SHADER_STAGE_FRAGMENT_BIT,
377 .module = (VkShaderModule) priv->frag->handle,
382 vertex_binding = (VkVertexInputBindingDescription) {
384 .stride = sizeof (struct Vertex),
385 .inputRate = VK_VERTEX_INPUT_RATE_VERTEX
388 attribute_descriptions[0] = (VkVertexInputAttributeDescription) {
391 .format = VK_FORMAT_R32G32B32_SFLOAT,
392 .offset = G_STRUCT_OFFSET (struct Vertex, x)
394 attribute_descriptions[1] = (VkVertexInputAttributeDescription) {
397 .format = VK_FORMAT_R32G32_SFLOAT,
398 .offset = G_STRUCT_OFFSET (struct Vertex, s)
401 vertex_input_info = (VkPipelineVertexInputStateCreateInfo) {
402 .sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
404 .vertexBindingDescriptionCount = 1,
405 .pVertexBindingDescriptions = &vertex_binding,
406 .vertexAttributeDescriptionCount = 2,
407 .pVertexAttributeDescriptions = attribute_descriptions,
410 input_assembly = (VkPipelineInputAssemblyStateCreateInfo) {
411 .sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
413 .topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
414 .primitiveRestartEnable = VK_FALSE
417 viewport_state = (VkPipelineViewportStateCreateInfo) {
418 .sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
421 .pViewports = &(VkViewport) {
424 .width = (float) GST_VIDEO_INFO_WIDTH (&self->out_info),
425 .height = (float) GST_VIDEO_INFO_HEIGHT (&self->out_info),
430 .pScissors = &(VkRect2D) {
433 GST_VIDEO_INFO_WIDTH (&self->out_info),
434 GST_VIDEO_INFO_HEIGHT (&self->out_info)
439 rasterizer = (VkPipelineRasterizationStateCreateInfo) {
440 .sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
442 .depthClampEnable = VK_FALSE,
443 .rasterizerDiscardEnable = VK_FALSE,
444 .polygonMode = VK_POLYGON_MODE_FILL,
446 .cullMode = VK_CULL_MODE_NONE,
447 .frontFace = VK_FRONT_FACE_CLOCKWISE,
448 .depthBiasEnable = VK_FALSE
451 multisampling = (VkPipelineMultisampleStateCreateInfo) {
452 .sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
454 .sampleShadingEnable = VK_FALSE,
455 .rasterizationSamples = VK_SAMPLE_COUNT_1_BIT
458 color_blend_attachments[0] = (VkPipelineColorBlendAttachmentState) {
459 .colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT,
460 .blendEnable = VK_FALSE
462 color_blend_attachments[1] = (VkPipelineColorBlendAttachmentState) {
463 .colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT,
464 .blendEnable = VK_FALSE
466 color_blend_attachments[2] = (VkPipelineColorBlendAttachmentState) {
467 .colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT,
468 .blendEnable = VK_FALSE
470 color_blend_attachments[3] = (VkPipelineColorBlendAttachmentState) {
471 .colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT,
472 .blendEnable = VK_FALSE
475 color_blending = (VkPipelineColorBlendStateCreateInfo) {
476 .sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
478 .logicOpEnable = VK_FALSE,
479 .logicOp = VK_LOGIC_OP_COPY,
480 .attachmentCount = GST_VIDEO_INFO_N_PLANES (&self->out_info),
481 .pAttachments = color_blend_attachments,
482 .blendConstants = { 0.0f, 0.0f, 0.0f, 0.0f }
485 pipeline_create_info = (VkGraphicsPipelineCreateInfo) {
486 .sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
489 .pStages = shader_create_info,
490 .pVertexInputState = &vertex_input_info,
491 .pInputAssemblyState = &input_assembly,
492 .pViewportState = &viewport_state,
493 .pRasterizationState = &rasterizer,
494 .pMultisampleState = &multisampling,
495 .pColorBlendState = &color_blending,
496 .layout = (VkPipelineLayout) self->pipeline_layout->handle,
497 .renderPass = (VkRenderPass) self->render_pass->handle,
499 .basePipelineHandle = VK_NULL_HANDLE
504 vkCreateGraphicsPipelines (self->queue->device->device, VK_NULL_HANDLE, 1,
505 &pipeline_create_info, NULL, &pipeline);
506 if (gst_vulkan_error_to_g_error (err, error, "vkCreateGraphicsPipelines") < 0) {
510 self->graphics_pipeline = gst_vulkan_handle_new_wrapped (self->queue->device,
511 GST_VULKAN_HANDLE_TYPE_PIPELINE, (GstVulkanHandleTypedef) pipeline,
512 gst_vulkan_handle_free_pipeline, NULL);
518 create_descriptor_pool (GstVulkanFullScreenQuad * self, GError ** error)
520 GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
521 VkDescriptorPoolCreateInfo pool_info;
522 gsize max_sets = 32; /* FIXME: don't hardcode this! */
524 VkDescriptorPoolSize pool_sizes[2];
525 VkDescriptorPool pool;
526 GstVulkanDescriptorPool *ret;
530 pool_sizes[0] = (VkDescriptorPoolSize) {
531 .type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
532 .descriptorCount = max_sets * GST_VIDEO_INFO_N_PLANES (&self->in_info),
535 if (priv->uniforms) {
536 pool_sizes[1] = (VkDescriptorPoolSize) {
537 .type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
538 .descriptorCount = max_sets
543 pool_info = (VkDescriptorPoolCreateInfo) {
544 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
546 .flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
547 .poolSizeCount = n_pools,
548 .pPoolSizes = pool_sizes,
554 vkCreateDescriptorPool (self->queue->device->device, &pool_info, NULL,
556 if (gst_vulkan_error_to_g_error (err, error, "vkCreateDescriptorPool") < 0) {
561 gst_vulkan_descriptor_pool_new_wrapped (self->queue->device, pool,
563 self->descriptor_cache =
564 gst_vulkan_descriptor_cache_new (ret, 1, &self->descriptor_set_layout);
565 gst_object_unref (ret);
571 create_framebuffer (GstVulkanFullScreenQuad * self, GstVulkanImageView ** views,
574 VkImageView attachments[GST_VIDEO_MAX_PLANES] = { 0, };
575 VkFramebufferCreateInfo framebuffer_info;
576 VkFramebuffer framebuffer;
580 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->out_info); i++) {
581 attachments[i] = views[i]->view;
585 framebuffer_info = (VkFramebufferCreateInfo) {
586 .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
588 .renderPass = (VkRenderPass) self->render_pass->handle,
589 .attachmentCount = GST_VIDEO_INFO_N_PLANES (&self->out_info),
590 .pAttachments = attachments,
591 .width = GST_VIDEO_INFO_WIDTH (&self->out_info),
592 .height = GST_VIDEO_INFO_HEIGHT (&self->out_info),
598 vkCreateFramebuffer (self->queue->device->device, &framebuffer_info, NULL,
600 if (gst_vulkan_error_to_g_error (err, error, "vkCreateFramebuffer") < 0) {
604 self->framebuffer = gst_vulkan_handle_new_wrapped (self->queue->device,
605 GST_VULKAN_HANDLE_TYPE_FRAMEBUFFER, (GstVulkanHandleTypedef) framebuffer,
606 gst_vulkan_handle_free_framebuffer, NULL);
611 #define LAST_FENCE_OR_ALWAYS_SIGNALLED(self,device) \
612 self->last_fence ? gst_vulkan_fence_ref (self->last_fence) : gst_vulkan_fence_new_always_signalled (device)
615 gst_vulkan_full_screen_quad_get_last_fence (GstVulkanFullScreenQuad * self)
617 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), NULL);
619 return LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
623 clear_descriptor_set (GstVulkanFullScreenQuad * self)
625 GstVulkanFence *last_fence =
626 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
628 if (self->descriptor_set)
629 gst_vulkan_trash_list_add (self->trash_list,
630 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
631 gst_vulkan_trash_mini_object_unref,
632 (GstMiniObject *) self->descriptor_set));
633 self->descriptor_set = NULL;
635 gst_vulkan_fence_unref (last_fence);
639 clear_framebuffer (GstVulkanFullScreenQuad * self)
641 GstVulkanFence *last_fence =
642 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
644 if (self->framebuffer)
645 gst_vulkan_trash_list_add (self->trash_list,
646 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
647 gst_vulkan_trash_mini_object_unref,
648 (GstMiniObject *) self->framebuffer));
649 self->framebuffer = NULL;
651 gst_vulkan_fence_unref (last_fence);
655 clear_command_pool (GstVulkanFullScreenQuad * self)
657 GstVulkanFence *last_fence =
658 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
661 gst_vulkan_trash_list_add (self->trash_list,
662 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
663 gst_vulkan_trash_object_unref, (GstObject *) self->cmd_pool));
664 self->cmd_pool = NULL;
666 gst_vulkan_fence_unref (last_fence);
670 clear_sampler (GstVulkanFullScreenQuad * self)
672 GstVulkanFence *last_fence =
673 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
676 gst_vulkan_trash_list_add (self->trash_list,
677 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
678 gst_vulkan_trash_mini_object_unref,
679 (GstMiniObject *) self->sampler));
680 self->sampler = NULL;
682 gst_vulkan_fence_unref (last_fence);
686 clear_descriptor_cache (GstVulkanFullScreenQuad * self)
688 GstVulkanFence *last_fence =
689 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
691 if (self->descriptor_cache)
692 gst_vulkan_trash_list_add (self->trash_list,
693 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
694 gst_vulkan_trash_object_unref,
695 (GstObject *) self->descriptor_cache));
696 self->descriptor_cache = NULL;
698 gst_vulkan_fence_unref (last_fence);
702 clear_shaders (GstVulkanFullScreenQuad * self)
704 GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
705 GstVulkanFence *last_fence =
706 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
709 gst_vulkan_trash_list_add (self->trash_list,
710 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
711 gst_vulkan_trash_mini_object_unref, (GstMiniObject *) priv->vert));
715 gst_vulkan_trash_list_add (self->trash_list,
716 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
717 gst_vulkan_trash_mini_object_unref, (GstMiniObject *) priv->frag));
720 gst_vulkan_fence_unref (last_fence);
724 clear_uniform_data (GstVulkanFullScreenQuad * self)
726 GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
727 GstVulkanFence *last_fence =
728 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
731 gst_vulkan_trash_list_add (self->trash_list,
732 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
733 gst_vulkan_trash_mini_object_unref,
734 (GstMiniObject *) priv->uniforms));
735 priv->uniforms = NULL;
736 priv->uniform_size = 0;
738 gst_vulkan_fence_unref (last_fence);
742 destroy_pipeline (GstVulkanFullScreenQuad * self)
744 GstVulkanFence *last_fence =
745 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
747 if (self->render_pass)
748 gst_vulkan_trash_list_add (self->trash_list,
749 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
750 gst_vulkan_trash_mini_object_unref,
751 (GstMiniObject *) self->render_pass));
752 self->render_pass = NULL;
753 if (self->pipeline_layout)
754 gst_vulkan_trash_list_add (self->trash_list,
755 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
756 gst_vulkan_trash_mini_object_unref,
757 (GstMiniObject *) self->pipeline_layout));
758 self->pipeline_layout = NULL;
759 if (self->graphics_pipeline)
760 gst_vulkan_trash_list_add (self->trash_list,
761 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
762 gst_vulkan_trash_mini_object_unref,
763 (GstMiniObject *) self->graphics_pipeline));
764 self->graphics_pipeline = NULL;
765 if (self->descriptor_set_layout)
766 gst_vulkan_trash_list_add (self->trash_list,
767 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
768 gst_vulkan_trash_mini_object_unref,
769 (GstMiniObject *) self->descriptor_set_layout));
770 self->descriptor_set_layout = NULL;
772 gst_vulkan_fence_unref (last_fence);
774 gst_vulkan_trash_list_gc (self->trash_list);
778 gst_vulkan_full_screen_quad_init (GstVulkanFullScreenQuad * self)
780 self->trash_list = gst_vulkan_trash_fence_list_new ();
783 GstVulkanFullScreenQuad *
784 gst_vulkan_full_screen_quad_new (GstVulkanQueue * queue)
786 GstVulkanFullScreenQuad *self;
788 g_return_val_if_fail (GST_IS_VULKAN_QUEUE (queue), NULL);
790 self = g_object_new (GST_TYPE_VULKAN_FULL_SCREEN_QUAD, NULL);
791 self->queue = gst_object_ref (queue);
797 gst_vulkan_full_screen_quad_finalize (GObject * object)
799 GstVulkanFullScreenQuad *self = GST_VULKAN_FULL_SCREEN_QUAD (object);
800 GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
802 destroy_pipeline (self);
803 clear_command_pool (self);
804 clear_sampler (self);
805 clear_framebuffer (self);
806 clear_descriptor_set (self);
807 clear_descriptor_cache (self);
808 clear_shaders (self);
809 clear_uniform_data (self);
811 gst_vulkan_trash_list_wait (self->trash_list, -1);
812 gst_vulkan_trash_list_gc (self->trash_list);
813 gst_clear_object (&self->trash_list);
815 gst_clear_mini_object (((GstMiniObject **) & priv->vertices));
816 gst_clear_mini_object (((GstMiniObject **) & priv->indices));
818 gst_clear_mini_object (((GstMiniObject **) & self->last_fence));
820 gst_clear_object (&self->queue);
822 gst_clear_buffer (&priv->inbuf);
823 gst_clear_buffer (&priv->outbuf);
825 G_OBJECT_CLASS (gst_vulkan_full_screen_quad_parent_class)->finalize (object);
829 gst_vulkan_full_screen_quad_class_init (GstVulkanFullScreenQuadClass * klass)
831 GObjectClass *obj_class = G_OBJECT_CLASS (klass);
833 obj_class->finalize = gst_vulkan_full_screen_quad_finalize;
837 gst_vulkan_full_screen_quad_set_info (GstVulkanFullScreenQuad * self,
838 GstVideoInfo * in_info, GstVideoInfo * out_info)
840 self->out_info = *out_info;
841 self->in_info = *in_info;
843 destroy_pipeline (self);
844 clear_framebuffer (self);
845 clear_descriptor_set (self);
846 clear_descriptor_cache (self);
847 clear_uniform_data (self);
853 gst_vulkan_full_screen_quad_set_input_buffer (GstVulkanFullScreenQuad * self,
854 GstBuffer * buffer, GError ** error)
856 GstVulkanFullScreenQuadPrivate *priv;
858 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
860 priv = GET_PRIV (self);
862 gst_buffer_replace (&priv->inbuf, buffer);
863 clear_descriptor_set (self);
868 gst_vulkan_full_screen_quad_set_output_buffer (GstVulkanFullScreenQuad * self,
869 GstBuffer * buffer, GError ** error)
871 GstVulkanFullScreenQuadPrivate *priv;
873 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
875 priv = GET_PRIV (self);
877 gst_buffer_replace (&priv->outbuf, buffer);
878 clear_framebuffer (self);
883 gst_vulkan_full_screen_quad_set_shaders (GstVulkanFullScreenQuad * self,
884 GstVulkanHandle * vert, GstVulkanHandle * frag)
886 GstVulkanFullScreenQuadPrivate *priv;
888 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
889 g_return_val_if_fail (vert != NULL, FALSE);
890 g_return_val_if_fail (vert->type == GST_VULKAN_HANDLE_TYPE_SHADER, FALSE);
891 g_return_val_if_fail (frag != NULL, FALSE);
892 g_return_val_if_fail (frag->type == GST_VULKAN_HANDLE_TYPE_SHADER, FALSE);
894 priv = GET_PRIV (self);
896 clear_shaders (self);
897 destroy_pipeline (self);
899 priv->vert = gst_vulkan_handle_ref (vert);
900 priv->frag = gst_vulkan_handle_ref (frag);
906 gst_vulkan_full_screen_quad_set_uniform_buffer (GstVulkanFullScreenQuad * self,
907 GstMemory * uniforms, GError ** error)
909 GstVulkanFullScreenQuadPrivate *priv;
911 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
912 g_return_val_if_fail (uniforms == NULL
913 || gst_is_vulkan_buffer_memory (uniforms), FALSE);
915 priv = GET_PRIV (self);
917 clear_uniform_data (self);
919 priv->uniforms = gst_memory_ref (uniforms);
920 priv->uniform_size = gst_memory_get_sizes (uniforms, NULL, NULL);
926 static GstVulkanImageMemory *
927 peek_image_from_buffer (GstBuffer * buffer, guint i)
929 GstMemory *mem = gst_buffer_peek_memory (buffer, i);
930 g_return_val_if_fail (gst_is_vulkan_image_memory (mem), NULL);
931 return (GstVulkanImageMemory *) mem;
935 ensure_vertex_data (GstVulkanFullScreenQuad * self, GError ** error)
937 GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
940 if (!priv->vertices) {
941 priv->vertices = gst_vulkan_buffer_memory_alloc (self->queue->device,
942 sizeof (vertices), VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
943 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
944 VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
947 if (!gst_memory_map (priv->vertices, &map_info, GST_MAP_WRITE)) {
948 g_set_error_literal (error, GST_VULKAN_ERROR, VK_ERROR_MEMORY_MAP_FAILED,
949 "Failed to map memory");
953 memcpy (map_info.data, vertices, map_info.size);
954 gst_memory_unmap (priv->vertices, &map_info);
956 if (!priv->indices) {
957 priv->indices = gst_vulkan_buffer_memory_alloc (self->queue->device,
958 sizeof (indices), VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
959 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
960 VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
963 if (!gst_memory_map (priv->indices, &map_info, GST_MAP_WRITE)) {
964 g_set_error_literal (error, GST_VULKAN_ERROR, VK_ERROR_MEMORY_MAP_FAILED,
965 "Failed to map memory");
969 memcpy (map_info.data, indices, map_info.size);
970 gst_memory_unmap (priv->indices, &map_info);
972 priv->n_indices = G_N_ELEMENTS (indices);
978 gst_memory_unref (priv->vertices);
979 priv->vertices = NULL;
981 gst_memory_unref (priv->indices);
982 priv->indices = NULL;
988 gst_vulkan_full_screen_quad_draw (GstVulkanFullScreenQuad * self,
991 GstVulkanCommandBuffer *cmd = NULL;
992 GstVulkanFence *fence = NULL;
995 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
997 fence = gst_vulkan_device_create_fence (self->queue->device, error);
1001 if (!gst_vulkan_full_screen_quad_prepare_draw (self, fence, error))
1004 if (!(cmd = gst_vulkan_command_pool_create (self->cmd_pool, error)))
1008 VkCommandBufferBeginInfo cmd_buf_info = { 0, };
1011 cmd_buf_info = (VkCommandBufferBeginInfo) {
1012 .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
1014 .flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
1015 .pInheritanceInfo = NULL
1019 gst_vulkan_command_buffer_lock (cmd);
1020 err = vkBeginCommandBuffer (cmd->cmd, &cmd_buf_info);
1021 if (gst_vulkan_error_to_g_error (err, error, "vkBeginCommandBuffer") < 0)
1025 if (!gst_vulkan_full_screen_quad_fill_command_buffer (self, cmd, fence,
1029 err = vkEndCommandBuffer (cmd->cmd);
1030 gst_vulkan_command_buffer_unlock (cmd);
1031 if (gst_vulkan_error_to_g_error (err, error, "vkEndCommandBuffer") < 0)
1034 if (!gst_vulkan_full_screen_quad_submit (self, cmd, fence, error))
1037 gst_vulkan_fence_unref (fence);
1042 gst_vulkan_command_buffer_unlock (cmd);
1045 gst_clear_mini_object ((GstMiniObject **) & cmd);
1046 gst_clear_mini_object ((GstMiniObject **) & fence);
1051 gst_vulkan_full_screen_quad_prepare_draw (GstVulkanFullScreenQuad * self,
1052 GstVulkanFence * fence, GError ** error)
1054 GstVulkanFullScreenQuadPrivate *priv;
1055 GstVulkanImageView *in_views[GST_VIDEO_MAX_PLANES] = { NULL, };
1056 GstVulkanImageView *out_views[GST_VIDEO_MAX_PLANES] = { NULL, };
1059 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
1060 g_return_val_if_fail (fence != NULL, FALSE);
1062 priv = GET_PRIV (self);
1064 if (!self->graphics_pipeline)
1065 if (!create_pipeline (self, error))
1068 if (!ensure_vertex_data (self, error))
1071 if (!self->descriptor_cache)
1072 if (!create_descriptor_pool (self, error))
1075 if (!self->descriptor_set) {
1076 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->in_info); i++) {
1077 GstVulkanImageMemory *img_mem = peek_image_from_buffer (priv->inbuf, i);
1078 if (!gst_is_vulkan_image_memory ((GstMemory *) img_mem)) {
1079 g_set_error_literal (error, GST_VULKAN_ERROR, GST_VULKAN_FAILED,
1080 "Input memory must be a GstVulkanImageMemory");
1083 in_views[i] = get_or_create_image_view (img_mem);
1084 gst_vulkan_trash_list_add (self->trash_list,
1085 gst_vulkan_trash_list_acquire (self->trash_list, fence,
1086 gst_vulkan_trash_mini_object_unref,
1087 (GstMiniObject *) in_views[i]));
1089 if (!(self->descriptor_set =
1090 get_and_update_descriptor_set (self, in_views, error)))
1094 if (!self->framebuffer) {
1095 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->out_info); i++) {
1096 GstVulkanImageMemory *img_mem = peek_image_from_buffer (priv->outbuf, i);
1097 if (!gst_is_vulkan_image_memory ((GstMemory *) img_mem)) {
1098 g_set_error_literal (error, GST_VULKAN_ERROR, GST_VULKAN_FAILED,
1099 "Output memory must be a GstVulkanImageMemory");
1102 out_views[i] = get_or_create_image_view (img_mem);
1103 gst_vulkan_trash_list_add (self->trash_list,
1104 gst_vulkan_trash_list_acquire (self->trash_list, fence,
1105 gst_vulkan_trash_mini_object_unref,
1106 (GstMiniObject *) out_views[i]));
1108 if (!create_framebuffer (self, out_views, error))
1112 if (!self->cmd_pool)
1113 if (!(self->cmd_pool =
1114 gst_vulkan_queue_create_command_pool (self->queue, error)))
1120 for (i = 0; i < GST_VIDEO_MAX_PLANES; i++)
1121 gst_clear_mini_object ((GstMiniObject **) & in_views[i]);
1122 for (i = 0; i < GST_VIDEO_MAX_PLANES; i++)
1123 gst_clear_mini_object ((GstMiniObject **) & out_views[i]);
1128 * gst_vulkan_full_screen_quad_fill_command_buffer:
1129 * @self: a #GstVulkanFullScreenQuad
1130 * @cmd: the #GstVulkanCommandBuffer to fill with commands
1131 * @error: a #GError to fill on error
1133 * Returns: whether @cmd could be filled with the necessary commands
1136 gst_vulkan_full_screen_quad_fill_command_buffer (GstVulkanFullScreenQuad * self,
1137 GstVulkanCommandBuffer * cmd, GstVulkanFence * fence, GError ** error)
1139 GstVulkanFullScreenQuadPrivate *priv;
1140 GstVulkanImageView *in_views[GST_VIDEO_MAX_PLANES] = { NULL, };
1141 GstVulkanImageView *out_views[GST_VIDEO_MAX_PLANES] = { NULL, };
1144 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
1145 g_return_val_if_fail (cmd != NULL, FALSE);
1146 g_return_val_if_fail (fence != NULL, FALSE);
1148 priv = GET_PRIV (self);
1150 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->in_info); i++) {
1151 GstVulkanImageMemory *img_mem = peek_image_from_buffer (priv->inbuf, i);
1152 if (!gst_is_vulkan_image_memory ((GstMemory *) img_mem)) {
1153 g_set_error_literal (error, GST_VULKAN_ERROR, GST_VULKAN_FAILED,
1154 "Input memory must be a GstVulkanImageMemory");
1157 in_views[i] = get_or_create_image_view (img_mem);
1158 gst_vulkan_trash_list_add (self->trash_list,
1159 gst_vulkan_trash_list_acquire (self->trash_list, fence,
1160 gst_vulkan_trash_mini_object_unref, (GstMiniObject *) in_views[i]));
1162 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->out_info); i++) {
1163 GstVulkanImageMemory *img_mem = peek_image_from_buffer (priv->outbuf, i);
1164 if (!gst_is_vulkan_image_memory ((GstMemory *) img_mem)) {
1165 g_set_error_literal (error, GST_VULKAN_ERROR, GST_VULKAN_FAILED,
1166 "Output memory must be a GstVulkanImageMemory");
1169 out_views[i] = get_or_create_image_view (img_mem);
1170 gst_vulkan_trash_list_add (self->trash_list,
1171 gst_vulkan_trash_list_acquire (self->trash_list, fence,
1172 gst_vulkan_trash_mini_object_unref,
1173 (GstMiniObject *) out_views[i]));
1176 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->in_info); i++) {
1178 VkImageMemoryBarrier in_image_memory_barrier = {
1179 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
1181 .srcAccessMask = in_views[i]->image->barrier.parent.access_flags,
1182 .dstAccessMask = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
1183 .oldLayout = in_views[i]->image->barrier.image_layout,
1184 .newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
1185 /* FIXME: implement exclusive transfers */
1186 .srcQueueFamilyIndex = 0,
1187 .dstQueueFamilyIndex = 0,
1188 .image = in_views[i]->image->image,
1189 .subresourceRange = in_views[i]->image->barrier.subresource_range
1193 vkCmdPipelineBarrier (cmd->cmd,
1194 in_views[i]->image->barrier.parent.pipeline_stages,
1195 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0, 0, NULL, 0, NULL, 1,
1196 &in_image_memory_barrier);
1198 in_views[i]->image->barrier.parent.pipeline_stages =
1199 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
1200 in_views[i]->image->barrier.parent.access_flags =
1201 in_image_memory_barrier.dstAccessMask;
1202 in_views[i]->image->barrier.image_layout =
1203 in_image_memory_barrier.newLayout;
1206 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->out_info); i++) {
1208 VkImageMemoryBarrier out_image_memory_barrier = {
1209 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
1211 .srcAccessMask = out_views[i]->image->barrier.parent.access_flags,
1212 .dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
1213 .oldLayout = out_views[i]->image->barrier.image_layout,
1214 .newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
1215 /* FIXME: implement exclusive transfers */
1216 .srcQueueFamilyIndex = 0,
1217 .dstQueueFamilyIndex = 0,
1218 .image = out_views[i]->image->image,
1219 .subresourceRange = out_views[i]->image->barrier.subresource_range
1223 vkCmdPipelineBarrier (cmd->cmd,
1224 out_views[i]->image->barrier.parent.pipeline_stages,
1225 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, 0, NULL, 0, NULL, 1,
1226 &out_image_memory_barrier);
1228 out_views[i]->image->barrier.parent.pipeline_stages =
1229 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
1230 out_views[i]->image->barrier.parent.access_flags =
1231 out_image_memory_barrier.dstAccessMask;
1232 out_views[i]->image->barrier.image_layout =
1233 out_image_memory_barrier.newLayout;
1238 VkClearValue clearColor = {{{ 0.0f, 0.0f, 0.0f, 1.0f }}};
1239 VkClearValue clearColors[GST_VIDEO_MAX_PLANES] = {
1240 clearColor, clearColor, clearColor, clearColor,
1242 VkRenderPassBeginInfo render_pass_info = {
1243 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
1244 .renderPass = (VkRenderPass) self->render_pass->handle,
1245 .framebuffer = (VkFramebuffer) self->framebuffer->handle,
1246 .renderArea.offset = { 0, 0 },
1247 .renderArea.extent = {
1248 GST_VIDEO_INFO_WIDTH (&self->out_info),
1249 GST_VIDEO_INFO_HEIGHT (&self->out_info)
1251 .clearValueCount = GST_VIDEO_INFO_N_PLANES (&self->out_info),
1252 .pClearValues = clearColors,
1255 VkDeviceSize offsets[] = { 0 };
1257 vkCmdBindDescriptorSets (cmd->cmd, VK_PIPELINE_BIND_POINT_GRAPHICS,
1258 (VkPipelineLayout) self->pipeline_layout->handle, 0, 1,
1259 &self->descriptor_set->set, 0, NULL);
1261 vkCmdBeginRenderPass (cmd->cmd, &render_pass_info,
1262 VK_SUBPASS_CONTENTS_INLINE);
1263 vkCmdBindPipeline (cmd->cmd, VK_PIPELINE_BIND_POINT_GRAPHICS,
1264 (VkPipeline) self->graphics_pipeline->handle);
1265 vkCmdBindVertexBuffers (cmd->cmd, 0, 1,
1266 &((GstVulkanBufferMemory *) priv->vertices)->buffer, offsets);
1267 vkCmdBindIndexBuffer (cmd->cmd,
1268 ((GstVulkanBufferMemory *) priv->indices)->buffer, 0,
1269 VK_INDEX_TYPE_UINT16);
1270 vkCmdDrawIndexed (cmd->cmd, priv->n_indices, 1, 0, 0, 0);
1271 vkCmdEndRenderPass (cmd->cmd);
1281 * gst_vulkan_full_screen_quad_submit:
1282 * @self: a #GstVulkanFullScreenQuad
1283 * @cmd: (transfer full): a #GstVulkanCommandBuffer to submit
1284 * @fence: a #GstVulkanFence to signal on completion
1285 * @error: a #GError to fill on error
1287 * Returns: whether @cmd could be submitted to the queue
1290 gst_vulkan_full_screen_quad_submit (GstVulkanFullScreenQuad * self,
1291 GstVulkanCommandBuffer * cmd, GstVulkanFence * fence, GError ** error)
1295 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
1296 g_return_val_if_fail (cmd != NULL, FALSE);
1297 g_return_val_if_fail (fence != NULL, FALSE);
1301 VkSubmitInfo submit_info = {
1302 .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
1304 .waitSemaphoreCount = 0,
1305 .pWaitSemaphores = NULL,
1306 .pWaitDstStageMask = NULL,
1307 .commandBufferCount = 1,
1308 .pCommandBuffers = &cmd->cmd,
1309 .signalSemaphoreCount = 0,
1310 .pSignalSemaphores = NULL,
1314 gst_vulkan_queue_submit_lock (self->queue);
1316 vkQueueSubmit (self->queue->queue, 1, &submit_info,
1317 GST_VULKAN_FENCE_FENCE (fence));
1318 gst_vulkan_queue_submit_unlock (self->queue);
1319 if (gst_vulkan_error_to_g_error (err, error, "vkQueueSubmit") < 0)
1323 gst_vulkan_trash_list_add (self->trash_list,
1324 gst_vulkan_trash_list_acquire (self->trash_list, fence,
1325 gst_vulkan_trash_mini_object_unref, GST_MINI_OBJECT_CAST (cmd)));
1327 gst_vulkan_trash_list_gc (self->trash_list);
1329 if (self->last_fence)
1330 gst_vulkan_fence_unref (self->last_fence);
1331 self->last_fence = gst_vulkan_fence_ref (fence);