2 * GStreamer Plugins Vulkan
3 * Copyright (C) 2019 Matthew Waters <matthew@centricular.com>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
25 #include "gstvkfullscreenquad.h"
27 #define GST_CAT_DEFAULT gst_vulkan_full_screen_quad_debug
28 GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
30 /* XXX: privatise this on moving to lib */
37 struct _GstVulkanFullScreenQuadPrivate
48 GstVulkanHandle *vert;
49 GstVulkanHandle *frag;
52 G_DEFINE_TYPE_WITH_CODE (GstVulkanFullScreenQuad, gst_vulkan_full_screen_quad,
53 GST_TYPE_OBJECT, GST_DEBUG_CATEGORY_INIT (gst_vulkan_full_screen_quad_debug,
54 "vulkanfullscreenquad", 0, "vulkan fullscreen quad render");
55 G_ADD_PRIVATE (GstVulkanFullScreenQuad));
57 #define GET_PRIV(self) gst_vulkan_full_screen_quad_get_instance_private (self)
59 struct Vertex vertices[] = {
60 {-1.0f, -1.0f, 0.0f, 0.0f, 0.0f},
61 {1.0f, -1.0f, 0.0f, 1.0f, 0.0f},
62 {1.0f, 1.0f, 0.0f, 1.0f, 1.0f},
63 {-1.0f, 1.0f, 0.0f, 0.0f, 1.0f},
71 create_sampler (GstVulkanFullScreenQuad * self, GError ** error)
74 VkSamplerCreateInfo samplerInfo = {
75 .sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
76 .magFilter = VK_FILTER_LINEAR,
77 .minFilter = VK_FILTER_LINEAR,
78 .addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
79 .addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
80 .addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
81 .anisotropyEnable = VK_FALSE,
83 .borderColor = VK_BORDER_COLOR_INT_OPAQUE_BLACK,
84 .unnormalizedCoordinates = VK_FALSE,
85 .compareEnable = VK_FALSE,
86 .compareOp = VK_COMPARE_OP_ALWAYS,
87 .mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR,
97 vkCreateSampler (self->queue->device->device, &samplerInfo, NULL,
99 if (gst_vulkan_error_to_g_error (err, error, "vkCreateSampler") < 0) {
103 self->sampler = gst_vulkan_handle_new_wrapped (self->queue->device,
104 GST_VULKAN_HANDLE_TYPE_SAMPLER, (GstVulkanHandleTypedef) sampler,
105 gst_vulkan_handle_free_sampler, NULL);
110 static GstVulkanDescriptorSet *
111 get_and_update_descriptor_set (GstVulkanFullScreenQuad * self,
112 GstVulkanImageView ** views, GError ** error)
114 GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
115 GstVulkanDescriptorSet *set;
118 if (!create_sampler (self, error))
122 gst_vulkan_descriptor_cache_acquire (self->descriptor_cache, error)))
126 VkWriteDescriptorSet writes[GST_VIDEO_MAX_PLANES + 1];
127 VkDescriptorImageInfo image_info[GST_VIDEO_MAX_PLANES];
128 VkDescriptorBufferInfo buffer_info;
133 if (priv->uniforms) {
134 buffer_info = (VkDescriptorBufferInfo) {
135 .buffer = ((GstVulkanBufferMemory *) priv->uniforms)->buffer,
137 .range = priv->uniform_size
140 writes[write_n++] = (VkWriteDescriptorSet) {
141 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
145 .dstArrayElement = 0,
146 .descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
147 .descriptorCount = 1,
148 .pBufferInfo = &buffer_info
152 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->in_info); i++) {
153 image_info[i] = (VkDescriptorImageInfo) {
154 .imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
155 .imageView = views[i]->view,
156 .sampler = (VkSampler) self->sampler->handle
159 writes[write_n++] = (VkWriteDescriptorSet) {
160 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
164 .dstArrayElement = 0,
165 .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
166 .descriptorCount = 1,
167 .pImageInfo = &image_info[i]
171 vkUpdateDescriptorSets (self->queue->device->device, write_n, writes, 0,
179 create_descriptor_set_layout (GstVulkanFullScreenQuad * self, GError ** error)
181 VkDescriptorSetLayoutBinding bindings[GST_VIDEO_MAX_PLANES + 1] = { {0,} };
182 VkDescriptorSetLayoutCreateInfo layout_info;
183 VkDescriptorSetLayout descriptor_set_layout;
184 int descriptor_n = 0;
189 bindings[descriptor_n++] = (VkDescriptorSetLayoutBinding) {
191 .descriptorCount = 1,
192 .descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
193 .pImmutableSamplers = NULL,
194 .stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT
196 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->in_info); i++) {
197 bindings[descriptor_n++] = (VkDescriptorSetLayoutBinding) {
199 .descriptorCount = 1,
200 .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
201 .pImmutableSamplers = NULL,
202 .stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT
206 layout_info = (VkDescriptorSetLayoutCreateInfo) {
207 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
209 .bindingCount = descriptor_n,
210 .pBindings = bindings
215 vkCreateDescriptorSetLayout (self->queue->device->device, &layout_info,
216 NULL, &descriptor_set_layout);
217 if (gst_vulkan_error_to_g_error (err, error,
218 "vkCreateDescriptorSetLayout") < 0) {
222 self->descriptor_set_layout =
223 gst_vulkan_handle_new_wrapped (self->queue->device,
224 GST_VULKAN_HANDLE_TYPE_DESCRIPTOR_SET_LAYOUT,
225 (GstVulkanHandleTypedef) descriptor_set_layout,
226 gst_vulkan_handle_free_descriptor_set_layout, NULL);
232 create_pipeline_layout (GstVulkanFullScreenQuad * self, GError ** error)
234 VkPipelineLayoutCreateInfo pipeline_layout_info;
235 VkPipelineLayout pipeline_layout;
238 if (!self->descriptor_set_layout)
239 if (!create_descriptor_set_layout (self, error))
243 pipeline_layout_info = (VkPipelineLayoutCreateInfo) {
244 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
247 .pSetLayouts = (VkDescriptorSetLayout *) &self->descriptor_set_layout->handle,
248 .pushConstantRangeCount = 0,
249 .pPushConstantRanges = NULL,
254 vkCreatePipelineLayout (self->queue->device->device,
255 &pipeline_layout_info, NULL, &pipeline_layout);
256 if (gst_vulkan_error_to_g_error (err, error, "vkCreatePipelineLayout") < 0) {
260 self->pipeline_layout = gst_vulkan_handle_new_wrapped (self->queue->device,
261 GST_VULKAN_HANDLE_TYPE_PIPELINE_LAYOUT,
262 (GstVulkanHandleTypedef) pipeline_layout,
263 gst_vulkan_handle_free_pipeline_layout, NULL);
269 create_render_pass (GstVulkanFullScreenQuad * self, GError ** error)
271 VkAttachmentDescription color_attachments[GST_VIDEO_MAX_PLANES];
272 VkAttachmentReference color_attachment_refs[GST_VIDEO_MAX_PLANES];
273 VkRenderPassCreateInfo render_pass_info;
274 VkSubpassDescription subpass;
275 VkRenderPass render_pass;
279 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->out_info); i++) {
281 color_attachments[i] = (VkAttachmentDescription) {
282 .format = gst_vulkan_format_from_video_info (&self->out_info, i),
283 .samples = VK_SAMPLE_COUNT_1_BIT,
284 .loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR,
285 .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
286 .stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE,
287 .stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE,
288 /* FIXME: share this between elements to avoid pipeline barriers */
289 .initialLayout = VK_IMAGE_LAYOUT_UNDEFINED,
290 .finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
293 color_attachment_refs[i] = (VkAttachmentReference) {
295 .layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
301 subpass = (VkSubpassDescription) {
302 .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
303 .colorAttachmentCount = GST_VIDEO_INFO_N_PLANES (&self->out_info),
304 .pColorAttachments = color_attachment_refs
307 render_pass_info = (VkRenderPassCreateInfo) {
308 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
310 .attachmentCount = GST_VIDEO_INFO_N_PLANES (&self->out_info),
311 .pAttachments = color_attachments,
313 .pSubpasses = &subpass
318 vkCreateRenderPass (self->queue->device->device, &render_pass_info, NULL,
320 if (gst_vulkan_error_to_g_error (err, error, "vkCreateRenderPass") < 0) {
324 self->render_pass = gst_vulkan_handle_new_wrapped (self->queue->device,
325 GST_VULKAN_HANDLE_TYPE_RENDER_PASS,
326 (GstVulkanHandleTypedef) render_pass,
327 gst_vulkan_handle_free_render_pass, NULL);
333 create_pipeline (GstVulkanFullScreenQuad * self, GError ** error)
335 GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
336 VkVertexInputBindingDescription vertex_binding;
337 VkVertexInputAttributeDescription attribute_descriptions[2];
338 VkPipelineShaderStageCreateInfo shader_create_info[2];
339 VkPipelineVertexInputStateCreateInfo vertex_input_info;
340 VkPipelineInputAssemblyStateCreateInfo input_assembly;
341 VkPipelineViewportStateCreateInfo viewport_state;
342 VkPipelineRasterizationStateCreateInfo rasterizer;
343 VkPipelineMultisampleStateCreateInfo multisampling;
344 VkPipelineColorBlendAttachmentState
345 color_blend_attachments[GST_VIDEO_MAX_PLANES];
346 VkPipelineColorBlendStateCreateInfo color_blending;
347 VkGraphicsPipelineCreateInfo pipeline_create_info;
351 if (!priv->vert || !priv->frag) {
352 g_set_error_literal (error, GST_VULKAN_ERROR,
353 VK_ERROR_INITIALIZATION_FAILED, "Missing shader information");
357 if (!self->pipeline_layout)
358 if (!create_pipeline_layout (self, error))
361 if (!self->render_pass)
362 if (!create_render_pass (self, error))
366 shader_create_info[0] = (VkPipelineShaderStageCreateInfo) {
367 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
369 .stage = VK_SHADER_STAGE_VERTEX_BIT,
370 .module = (VkShaderModule) priv->vert->handle,
374 shader_create_info[1] = (VkPipelineShaderStageCreateInfo) {
375 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
377 .stage = VK_SHADER_STAGE_FRAGMENT_BIT,
378 .module = (VkShaderModule) priv->frag->handle,
383 vertex_binding = (VkVertexInputBindingDescription) {
385 .stride = sizeof (struct Vertex),
386 .inputRate = VK_VERTEX_INPUT_RATE_VERTEX
389 attribute_descriptions[0] = (VkVertexInputAttributeDescription) {
392 .format = VK_FORMAT_R32G32B32_SFLOAT,
393 .offset = G_STRUCT_OFFSET (struct Vertex, x)
395 attribute_descriptions[1] = (VkVertexInputAttributeDescription) {
398 .format = VK_FORMAT_R32G32_SFLOAT,
399 .offset = G_STRUCT_OFFSET (struct Vertex, s)
402 vertex_input_info = (VkPipelineVertexInputStateCreateInfo) {
403 .sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
405 .vertexBindingDescriptionCount = 1,
406 .pVertexBindingDescriptions = &vertex_binding,
407 .vertexAttributeDescriptionCount = 2,
408 .pVertexAttributeDescriptions = attribute_descriptions,
411 input_assembly = (VkPipelineInputAssemblyStateCreateInfo) {
412 .sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
414 .topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
415 .primitiveRestartEnable = VK_FALSE
418 viewport_state = (VkPipelineViewportStateCreateInfo) {
419 .sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
422 .pViewports = &(VkViewport) {
425 .width = (float) GST_VIDEO_INFO_WIDTH (&self->out_info),
426 .height = (float) GST_VIDEO_INFO_HEIGHT (&self->out_info),
431 .pScissors = &(VkRect2D) {
434 GST_VIDEO_INFO_WIDTH (&self->out_info),
435 GST_VIDEO_INFO_HEIGHT (&self->out_info)
440 rasterizer = (VkPipelineRasterizationStateCreateInfo) {
441 .sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
443 .depthClampEnable = VK_FALSE,
444 .rasterizerDiscardEnable = VK_FALSE,
445 .polygonMode = VK_POLYGON_MODE_FILL,
447 .cullMode = VK_CULL_MODE_NONE,
448 .frontFace = VK_FRONT_FACE_CLOCKWISE,
449 .depthBiasEnable = VK_FALSE
452 multisampling = (VkPipelineMultisampleStateCreateInfo) {
453 .sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
455 .sampleShadingEnable = VK_FALSE,
456 .rasterizationSamples = VK_SAMPLE_COUNT_1_BIT
459 color_blend_attachments[0] = (VkPipelineColorBlendAttachmentState) {
460 .colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT,
461 .blendEnable = VK_FALSE
463 color_blend_attachments[1] = (VkPipelineColorBlendAttachmentState) {
464 .colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT,
465 .blendEnable = VK_FALSE
467 color_blend_attachments[2] = (VkPipelineColorBlendAttachmentState) {
468 .colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT,
469 .blendEnable = VK_FALSE
471 color_blend_attachments[3] = (VkPipelineColorBlendAttachmentState) {
472 .colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT,
473 .blendEnable = VK_FALSE
476 color_blending = (VkPipelineColorBlendStateCreateInfo) {
477 .sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
479 .logicOpEnable = VK_FALSE,
480 .logicOp = VK_LOGIC_OP_COPY,
481 .attachmentCount = GST_VIDEO_INFO_N_PLANES (&self->out_info),
482 .pAttachments = color_blend_attachments,
483 .blendConstants = { 0.0f, 0.0f, 0.0f, 0.0f }
486 pipeline_create_info = (VkGraphicsPipelineCreateInfo) {
487 .sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
490 .pStages = shader_create_info,
491 .pVertexInputState = &vertex_input_info,
492 .pInputAssemblyState = &input_assembly,
493 .pViewportState = &viewport_state,
494 .pRasterizationState = &rasterizer,
495 .pMultisampleState = &multisampling,
496 .pColorBlendState = &color_blending,
497 .layout = (VkPipelineLayout) self->pipeline_layout->handle,
498 .renderPass = (VkRenderPass) self->render_pass->handle,
500 .basePipelineHandle = VK_NULL_HANDLE
505 vkCreateGraphicsPipelines (self->queue->device->device, VK_NULL_HANDLE, 1,
506 &pipeline_create_info, NULL, &pipeline);
507 if (gst_vulkan_error_to_g_error (err, error, "vkCreateGraphicsPipelines") < 0) {
511 self->graphics_pipeline = gst_vulkan_handle_new_wrapped (self->queue->device,
512 GST_VULKAN_HANDLE_TYPE_PIPELINE, (GstVulkanHandleTypedef) pipeline,
513 gst_vulkan_handle_free_pipeline, NULL);
519 create_descriptor_pool (GstVulkanFullScreenQuad * self, GError ** error)
521 GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
522 VkDescriptorPoolCreateInfo pool_info;
523 gsize max_sets = 32; /* FIXME: don't hardcode this! */
525 VkDescriptorPoolSize pool_sizes[2];
526 VkDescriptorPool pool;
527 GstVulkanDescriptorPool *ret;
531 pool_sizes[0] = (VkDescriptorPoolSize) {
532 .type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
533 .descriptorCount = max_sets * GST_VIDEO_INFO_N_PLANES (&self->in_info),
536 if (priv->uniforms) {
537 pool_sizes[1] = (VkDescriptorPoolSize) {
538 .type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
539 .descriptorCount = max_sets
544 pool_info = (VkDescriptorPoolCreateInfo) {
545 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
547 .flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
548 .poolSizeCount = n_pools,
549 .pPoolSizes = pool_sizes,
555 vkCreateDescriptorPool (self->queue->device->device, &pool_info, NULL,
557 if (gst_vulkan_error_to_g_error (err, error, "vkCreateDescriptorPool") < 0) {
562 gst_vulkan_descriptor_pool_new_wrapped (self->queue->device, pool,
564 self->descriptor_cache =
565 gst_vulkan_descriptor_cache_new (ret, 1, &self->descriptor_set_layout);
566 gst_object_unref (ret);
572 create_framebuffer (GstVulkanFullScreenQuad * self, GstVulkanImageView ** views,
575 VkImageView attachments[GST_VIDEO_MAX_PLANES] = { 0, };
576 VkFramebufferCreateInfo framebuffer_info;
577 VkFramebuffer framebuffer;
581 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->out_info); i++) {
582 attachments[i] = views[i]->view;
586 framebuffer_info = (VkFramebufferCreateInfo) {
587 .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
589 .renderPass = (VkRenderPass) self->render_pass->handle,
590 .attachmentCount = GST_VIDEO_INFO_N_PLANES (&self->out_info),
591 .pAttachments = attachments,
592 .width = GST_VIDEO_INFO_WIDTH (&self->out_info),
593 .height = GST_VIDEO_INFO_HEIGHT (&self->out_info),
599 vkCreateFramebuffer (self->queue->device->device, &framebuffer_info, NULL,
601 if (gst_vulkan_error_to_g_error (err, error, "vkCreateFramebuffer") < 0) {
605 self->framebuffer = gst_vulkan_handle_new_wrapped (self->queue->device,
606 GST_VULKAN_HANDLE_TYPE_FRAMEBUFFER, (GstVulkanHandleTypedef) framebuffer,
607 gst_vulkan_handle_free_framebuffer, NULL);
612 #define LAST_FENCE_OR_ALWAYS_SIGNALLED(self,device) \
613 self->last_fence ? gst_vulkan_fence_ref (self->last_fence) : gst_vulkan_fence_new_always_signalled (device)
616 gst_vulkan_full_screen_quad_get_last_fence (GstVulkanFullScreenQuad * self)
618 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), NULL);
620 return LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
624 clear_descriptor_set (GstVulkanFullScreenQuad * self)
626 GstVulkanFence *last_fence =
627 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
629 if (self->descriptor_set)
630 gst_vulkan_trash_list_add (self->trash_list,
631 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
632 gst_vulkan_trash_mini_object_unref,
633 (GstMiniObject *) self->descriptor_set));
634 self->descriptor_set = NULL;
636 gst_vulkan_fence_unref (last_fence);
640 clear_framebuffer (GstVulkanFullScreenQuad * self)
642 GstVulkanFence *last_fence =
643 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
645 if (self->framebuffer)
646 gst_vulkan_trash_list_add (self->trash_list,
647 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
648 gst_vulkan_trash_mini_object_unref,
649 (GstMiniObject *) self->framebuffer));
650 self->framebuffer = NULL;
652 gst_vulkan_fence_unref (last_fence);
656 clear_command_pool (GstVulkanFullScreenQuad * self)
658 GstVulkanFence *last_fence =
659 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
662 gst_vulkan_trash_list_add (self->trash_list,
663 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
664 gst_vulkan_trash_object_unref, (GstObject *) self->cmd_pool));
665 self->cmd_pool = NULL;
667 gst_vulkan_fence_unref (last_fence);
671 clear_sampler (GstVulkanFullScreenQuad * self)
673 GstVulkanFence *last_fence =
674 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
677 gst_vulkan_trash_list_add (self->trash_list,
678 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
679 gst_vulkan_trash_mini_object_unref,
680 (GstMiniObject *) self->sampler));
681 self->sampler = NULL;
683 gst_vulkan_fence_unref (last_fence);
687 clear_descriptor_cache (GstVulkanFullScreenQuad * self)
689 GstVulkanFence *last_fence =
690 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
692 if (self->descriptor_cache)
693 gst_vulkan_trash_list_add (self->trash_list,
694 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
695 gst_vulkan_trash_object_unref,
696 (GstObject *) self->descriptor_cache));
697 self->descriptor_cache = NULL;
699 gst_vulkan_fence_unref (last_fence);
703 clear_shaders (GstVulkanFullScreenQuad * self)
705 GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
706 GstVulkanFence *last_fence =
707 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
710 gst_vulkan_trash_list_add (self->trash_list,
711 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
712 gst_vulkan_trash_mini_object_unref, (GstMiniObject *) priv->vert));
716 gst_vulkan_trash_list_add (self->trash_list,
717 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
718 gst_vulkan_trash_mini_object_unref, (GstMiniObject *) priv->frag));
721 gst_vulkan_fence_unref (last_fence);
725 clear_uniform_data (GstVulkanFullScreenQuad * self)
727 GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
728 GstVulkanFence *last_fence =
729 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
732 gst_vulkan_trash_list_add (self->trash_list,
733 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
734 gst_vulkan_trash_mini_object_unref,
735 (GstMiniObject *) priv->uniforms));
736 priv->uniforms = NULL;
737 priv->uniform_size = 0;
739 gst_vulkan_fence_unref (last_fence);
743 clear_index_data (GstVulkanFullScreenQuad * self)
745 GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
746 GstVulkanFence *last_fence =
747 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
750 gst_vulkan_trash_list_add (self->trash_list,
751 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
752 gst_vulkan_trash_mini_object_unref,
753 (GstMiniObject *) priv->indices));
754 priv->indices = NULL;
757 gst_vulkan_fence_unref (last_fence);
761 clear_vertex_data (GstVulkanFullScreenQuad * self)
763 GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
764 GstVulkanFence *last_fence =
765 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
768 gst_vulkan_trash_list_add (self->trash_list,
769 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
770 gst_vulkan_trash_mini_object_unref,
771 (GstMiniObject *) priv->vertices));
772 priv->vertices = NULL;
774 gst_vulkan_fence_unref (last_fence);
778 clear_render_pass (GstVulkanFullScreenQuad * self)
780 GstVulkanFence *last_fence =
781 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
783 if (self->render_pass)
784 gst_vulkan_trash_list_add (self->trash_list,
785 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
786 gst_vulkan_trash_mini_object_unref,
787 (GstMiniObject *) self->render_pass));
788 self->render_pass = NULL;
790 gst_vulkan_fence_unref (last_fence);
794 clear_pipeline_layout (GstVulkanFullScreenQuad * self)
796 GstVulkanFence *last_fence =
797 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
799 if (self->pipeline_layout)
800 gst_vulkan_trash_list_add (self->trash_list,
801 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
802 gst_vulkan_trash_mini_object_unref,
803 (GstMiniObject *) self->pipeline_layout));
804 self->pipeline_layout = NULL;
806 gst_vulkan_fence_unref (last_fence);
810 clear_graphics_pipeline (GstVulkanFullScreenQuad * self)
812 GstVulkanFence *last_fence =
813 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
815 if (self->graphics_pipeline)
816 gst_vulkan_trash_list_add (self->trash_list,
817 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
818 gst_vulkan_trash_mini_object_unref,
819 (GstMiniObject *) self->graphics_pipeline));
820 self->graphics_pipeline = NULL;
822 gst_vulkan_fence_unref (last_fence);
826 clear_descriptor_set_layout (GstVulkanFullScreenQuad * self)
828 GstVulkanFence *last_fence =
829 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
831 if (self->descriptor_set_layout)
832 gst_vulkan_trash_list_add (self->trash_list,
833 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
834 gst_vulkan_trash_mini_object_unref,
835 (GstMiniObject *) self->descriptor_set_layout));
836 self->descriptor_set_layout = NULL;
838 gst_vulkan_fence_unref (last_fence);
842 destroy_pipeline (GstVulkanFullScreenQuad * self)
844 GstVulkanFence *last_fence =
845 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
847 clear_render_pass (self);
848 clear_pipeline_layout (self);
849 clear_graphics_pipeline (self);
850 clear_descriptor_set_layout (self);
852 gst_vulkan_fence_unref (last_fence);
854 gst_vulkan_trash_list_gc (self->trash_list);
858 gst_vulkan_full_screen_quad_init (GstVulkanFullScreenQuad * self)
860 self->trash_list = gst_vulkan_trash_fence_list_new ();
863 GstVulkanFullScreenQuad *
864 gst_vulkan_full_screen_quad_new (GstVulkanQueue * queue)
866 GstVulkanFullScreenQuad *self;
868 g_return_val_if_fail (GST_IS_VULKAN_QUEUE (queue), NULL);
870 self = g_object_new (GST_TYPE_VULKAN_FULL_SCREEN_QUAD, NULL);
871 self->queue = gst_object_ref (queue);
877 gst_vulkan_full_screen_quad_finalize (GObject * object)
879 GstVulkanFullScreenQuad *self = GST_VULKAN_FULL_SCREEN_QUAD (object);
880 GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
882 destroy_pipeline (self);
883 clear_command_pool (self);
884 clear_sampler (self);
885 clear_framebuffer (self);
886 clear_descriptor_set (self);
887 clear_descriptor_cache (self);
888 clear_shaders (self);
889 clear_uniform_data (self);
890 clear_index_data (self);
891 clear_vertex_data (self);
893 gst_vulkan_trash_list_wait (self->trash_list, -1);
894 gst_vulkan_trash_list_gc (self->trash_list);
895 gst_clear_object (&self->trash_list);
897 gst_clear_mini_object (((GstMiniObject **) & self->last_fence));
899 gst_clear_object (&self->queue);
901 gst_clear_buffer (&priv->inbuf);
902 gst_clear_buffer (&priv->outbuf);
904 G_OBJECT_CLASS (gst_vulkan_full_screen_quad_parent_class)->finalize (object);
908 gst_vulkan_full_screen_quad_class_init (GstVulkanFullScreenQuadClass * klass)
910 GObjectClass *obj_class = G_OBJECT_CLASS (klass);
912 obj_class->finalize = gst_vulkan_full_screen_quad_finalize;
916 gst_vulkan_full_screen_quad_set_info (GstVulkanFullScreenQuad * self,
917 GstVideoInfo * in_info, GstVideoInfo * out_info)
919 self->out_info = *out_info;
920 self->in_info = *in_info;
922 destroy_pipeline (self);
923 clear_framebuffer (self);
924 clear_descriptor_set (self);
925 clear_descriptor_cache (self);
926 clear_uniform_data (self);
932 gst_vulkan_full_screen_quad_set_input_buffer (GstVulkanFullScreenQuad * self,
933 GstBuffer * buffer, GError ** error)
935 GstVulkanFullScreenQuadPrivate *priv;
937 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
939 priv = GET_PRIV (self);
941 gst_buffer_replace (&priv->inbuf, buffer);
942 clear_descriptor_set (self);
947 gst_vulkan_full_screen_quad_set_output_buffer (GstVulkanFullScreenQuad * self,
948 GstBuffer * buffer, GError ** error)
950 GstVulkanFullScreenQuadPrivate *priv;
952 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
954 priv = GET_PRIV (self);
956 gst_buffer_replace (&priv->outbuf, buffer);
957 clear_framebuffer (self);
962 gst_vulkan_full_screen_quad_set_shaders (GstVulkanFullScreenQuad * self,
963 GstVulkanHandle * vert, GstVulkanHandle * frag)
965 GstVulkanFullScreenQuadPrivate *priv;
967 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
968 g_return_val_if_fail (vert != NULL, FALSE);
969 g_return_val_if_fail (vert->type == GST_VULKAN_HANDLE_TYPE_SHADER, FALSE);
970 g_return_val_if_fail (frag != NULL, FALSE);
971 g_return_val_if_fail (frag->type == GST_VULKAN_HANDLE_TYPE_SHADER, FALSE);
973 priv = GET_PRIV (self);
975 clear_shaders (self);
976 destroy_pipeline (self);
978 priv->vert = gst_vulkan_handle_ref (vert);
979 priv->frag = gst_vulkan_handle_ref (frag);
985 gst_vulkan_full_screen_quad_set_uniform_buffer (GstVulkanFullScreenQuad * self,
986 GstMemory * uniforms, GError ** error)
988 GstVulkanFullScreenQuadPrivate *priv;
990 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
991 g_return_val_if_fail (uniforms == NULL
992 || gst_is_vulkan_buffer_memory (uniforms), FALSE);
994 priv = GET_PRIV (self);
996 clear_uniform_data (self);
998 priv->uniforms = gst_memory_ref (uniforms);
999 priv->uniform_size = gst_memory_get_sizes (uniforms, NULL, NULL);
1006 gst_vulkan_full_screen_quad_set_index_buffer (GstVulkanFullScreenQuad * self,
1007 GstMemory * indices, gsize n_indices, GError ** error)
1009 GstVulkanFullScreenQuadPrivate *priv;
1011 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
1012 g_return_val_if_fail (indices == NULL
1013 || gst_is_vulkan_buffer_memory (indices), FALSE);
1015 priv = GET_PRIV (self);
1017 clear_index_data (self);
1019 priv->indices = gst_memory_ref (indices);
1020 priv->n_indices = n_indices;
1027 gst_vulkan_full_screen_quad_set_vertex_buffer (GstVulkanFullScreenQuad * self,
1028 GstMemory * vertices, GError ** error)
1030 GstVulkanFullScreenQuadPrivate *priv;
1032 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
1033 g_return_val_if_fail (vertices == NULL
1034 || gst_is_vulkan_buffer_memory (vertices), FALSE);
1036 priv = GET_PRIV (self);
1038 clear_vertex_data (self);
1040 priv->vertices = gst_memory_ref (vertices);
1046 static GstVulkanImageMemory *
1047 peek_image_from_buffer (GstBuffer * buffer, guint i)
1049 GstMemory *mem = gst_buffer_peek_memory (buffer, i);
1050 g_return_val_if_fail (gst_is_vulkan_image_memory (mem), NULL);
1051 return (GstVulkanImageMemory *) mem;
1055 ensure_vertex_data (GstVulkanFullScreenQuad * self, GError ** error)
1057 GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
1058 GstMapInfo map_info;
1060 if (!priv->vertices) {
1061 priv->vertices = gst_vulkan_buffer_memory_alloc (self->queue->device,
1062 sizeof (vertices), VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
1063 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
1064 VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
1066 if (!gst_memory_map (priv->vertices, &map_info, GST_MAP_WRITE)) {
1067 g_set_error_literal (error, GST_VULKAN_ERROR, VK_ERROR_MEMORY_MAP_FAILED,
1068 "Failed to map memory");
1072 memcpy (map_info.data, vertices, map_info.size);
1073 gst_memory_unmap (priv->vertices, &map_info);
1076 if (!priv->indices) {
1077 priv->indices = gst_vulkan_buffer_memory_alloc (self->queue->device,
1078 sizeof (indices), VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
1079 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
1080 VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
1082 if (!gst_memory_map (priv->indices, &map_info, GST_MAP_WRITE)) {
1083 g_set_error_literal (error, GST_VULKAN_ERROR, VK_ERROR_MEMORY_MAP_FAILED,
1084 "Failed to map memory");
1088 memcpy (map_info.data, indices, map_info.size);
1089 gst_memory_unmap (priv->indices, &map_info);
1091 priv->n_indices = G_N_ELEMENTS (indices);
1098 gst_memory_unref (priv->vertices);
1099 priv->vertices = NULL;
1101 gst_memory_unref (priv->indices);
1102 priv->indices = NULL;
1103 priv->n_indices = 0;
1108 gst_vulkan_full_screen_quad_draw (GstVulkanFullScreenQuad * self,
1111 GstVulkanCommandBuffer *cmd = NULL;
1112 GstVulkanFence *fence = NULL;
1115 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
1117 fence = gst_vulkan_device_create_fence (self->queue->device, error);
1121 if (!gst_vulkan_full_screen_quad_prepare_draw (self, fence, error))
1124 if (!(cmd = gst_vulkan_command_pool_create (self->cmd_pool, error)))
1128 VkCommandBufferBeginInfo cmd_buf_info = { 0, };
1131 cmd_buf_info = (VkCommandBufferBeginInfo) {
1132 .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
1134 .flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
1135 .pInheritanceInfo = NULL
1139 gst_vulkan_command_buffer_lock (cmd);
1140 err = vkBeginCommandBuffer (cmd->cmd, &cmd_buf_info);
1141 if (gst_vulkan_error_to_g_error (err, error, "vkBeginCommandBuffer") < 0)
1145 if (!gst_vulkan_full_screen_quad_fill_command_buffer (self, cmd, fence,
1149 err = vkEndCommandBuffer (cmd->cmd);
1150 gst_vulkan_command_buffer_unlock (cmd);
1151 if (gst_vulkan_error_to_g_error (err, error, "vkEndCommandBuffer") < 0)
1154 if (!gst_vulkan_full_screen_quad_submit (self, cmd, fence, error))
1157 gst_vulkan_fence_unref (fence);
1162 gst_vulkan_command_buffer_unlock (cmd);
1165 gst_clear_mini_object ((GstMiniObject **) & cmd);
1166 gst_clear_mini_object ((GstMiniObject **) & fence);
1171 gst_vulkan_full_screen_quad_prepare_draw (GstVulkanFullScreenQuad * self,
1172 GstVulkanFence * fence, GError ** error)
1174 GstVulkanFullScreenQuadPrivate *priv;
1175 GstVulkanImageView *in_views[GST_VIDEO_MAX_PLANES] = { NULL, };
1176 GstVulkanImageView *out_views[GST_VIDEO_MAX_PLANES] = { NULL, };
1179 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
1180 g_return_val_if_fail (fence != NULL, FALSE);
1182 priv = GET_PRIV (self);
1184 if (!self->graphics_pipeline)
1185 if (!create_pipeline (self, error))
1188 if (!ensure_vertex_data (self, error))
1191 if (!self->descriptor_cache)
1192 if (!create_descriptor_pool (self, error))
1195 if (!self->descriptor_set) {
1196 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->in_info); i++) {
1197 GstVulkanImageMemory *img_mem = peek_image_from_buffer (priv->inbuf, i);
1198 if (!gst_is_vulkan_image_memory ((GstMemory *) img_mem)) {
1199 g_set_error_literal (error, GST_VULKAN_ERROR, GST_VULKAN_FAILED,
1200 "Input memory must be a GstVulkanImageMemory");
1203 in_views[i] = gst_vulkan_get_or_create_image_view (img_mem);
1204 gst_vulkan_trash_list_add (self->trash_list,
1205 gst_vulkan_trash_list_acquire (self->trash_list, fence,
1206 gst_vulkan_trash_mini_object_unref,
1207 (GstMiniObject *) in_views[i]));
1209 if (!(self->descriptor_set =
1210 get_and_update_descriptor_set (self, in_views, error)))
1214 if (!self->framebuffer) {
1215 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->out_info); i++) {
1216 GstVulkanImageMemory *img_mem = peek_image_from_buffer (priv->outbuf, i);
1217 if (!gst_is_vulkan_image_memory ((GstMemory *) img_mem)) {
1218 g_set_error_literal (error, GST_VULKAN_ERROR, GST_VULKAN_FAILED,
1219 "Output memory must be a GstVulkanImageMemory");
1222 out_views[i] = gst_vulkan_get_or_create_image_view (img_mem);
1223 gst_vulkan_trash_list_add (self->trash_list,
1224 gst_vulkan_trash_list_acquire (self->trash_list, fence,
1225 gst_vulkan_trash_mini_object_unref,
1226 (GstMiniObject *) out_views[i]));
1228 if (!create_framebuffer (self, out_views, error))
1232 if (!self->cmd_pool)
1233 if (!(self->cmd_pool =
1234 gst_vulkan_queue_create_command_pool (self->queue, error)))
1240 for (i = 0; i < GST_VIDEO_MAX_PLANES; i++)
1241 gst_clear_mini_object ((GstMiniObject **) & in_views[i]);
1242 for (i = 0; i < GST_VIDEO_MAX_PLANES; i++)
1243 gst_clear_mini_object ((GstMiniObject **) & out_views[i]);
1248 * gst_vulkan_full_screen_quad_fill_command_buffer:
1249 * @self: a #GstVulkanFullScreenQuad
1250 * @cmd: the #GstVulkanCommandBuffer to fill with commands
1251 * @error: a #GError to fill on error
1253 * Returns: whether @cmd could be filled with the necessary commands
1256 gst_vulkan_full_screen_quad_fill_command_buffer (GstVulkanFullScreenQuad * self,
1257 GstVulkanCommandBuffer * cmd, GstVulkanFence * fence, GError ** error)
1259 GstVulkanFullScreenQuadPrivate *priv;
1260 GstVulkanImageView *in_views[GST_VIDEO_MAX_PLANES] = { NULL, };
1261 GstVulkanImageView *out_views[GST_VIDEO_MAX_PLANES] = { NULL, };
1264 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
1265 g_return_val_if_fail (cmd != NULL, FALSE);
1266 g_return_val_if_fail (fence != NULL, FALSE);
1268 priv = GET_PRIV (self);
1270 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->in_info); i++) {
1271 GstVulkanImageMemory *img_mem = peek_image_from_buffer (priv->inbuf, i);
1272 if (!gst_is_vulkan_image_memory ((GstMemory *) img_mem)) {
1273 g_set_error_literal (error, GST_VULKAN_ERROR, GST_VULKAN_FAILED,
1274 "Input memory must be a GstVulkanImageMemory");
1277 in_views[i] = gst_vulkan_get_or_create_image_view (img_mem);
1278 gst_vulkan_trash_list_add (self->trash_list,
1279 gst_vulkan_trash_list_acquire (self->trash_list, fence,
1280 gst_vulkan_trash_mini_object_unref, (GstMiniObject *) in_views[i]));
1282 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->out_info); i++) {
1283 GstVulkanImageMemory *img_mem = peek_image_from_buffer (priv->outbuf, i);
1284 if (!gst_is_vulkan_image_memory ((GstMemory *) img_mem)) {
1285 g_set_error_literal (error, GST_VULKAN_ERROR, GST_VULKAN_FAILED,
1286 "Output memory must be a GstVulkanImageMemory");
1289 out_views[i] = gst_vulkan_get_or_create_image_view (img_mem);
1290 gst_vulkan_trash_list_add (self->trash_list,
1291 gst_vulkan_trash_list_acquire (self->trash_list, fence,
1292 gst_vulkan_trash_mini_object_unref,
1293 (GstMiniObject *) out_views[i]));
1296 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->in_info); i++) {
1298 VkImageMemoryBarrier in_image_memory_barrier = {
1299 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
1301 .srcAccessMask = in_views[i]->image->barrier.parent.access_flags,
1302 .dstAccessMask = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
1303 .oldLayout = in_views[i]->image->barrier.image_layout,
1304 .newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
1305 /* FIXME: implement exclusive transfers */
1306 .srcQueueFamilyIndex = 0,
1307 .dstQueueFamilyIndex = 0,
1308 .image = in_views[i]->image->image,
1309 .subresourceRange = in_views[i]->image->barrier.subresource_range
1313 vkCmdPipelineBarrier (cmd->cmd,
1314 in_views[i]->image->barrier.parent.pipeline_stages,
1315 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0, 0, NULL, 0, NULL, 1,
1316 &in_image_memory_barrier);
1318 in_views[i]->image->barrier.parent.pipeline_stages =
1319 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
1320 in_views[i]->image->barrier.parent.access_flags =
1321 in_image_memory_barrier.dstAccessMask;
1322 in_views[i]->image->barrier.image_layout =
1323 in_image_memory_barrier.newLayout;
1326 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->out_info); i++) {
1328 VkImageMemoryBarrier out_image_memory_barrier = {
1329 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
1331 .srcAccessMask = out_views[i]->image->barrier.parent.access_flags,
1332 .dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
1333 .oldLayout = out_views[i]->image->barrier.image_layout,
1334 .newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
1335 /* FIXME: implement exclusive transfers */
1336 .srcQueueFamilyIndex = 0,
1337 .dstQueueFamilyIndex = 0,
1338 .image = out_views[i]->image->image,
1339 .subresourceRange = out_views[i]->image->barrier.subresource_range
1343 vkCmdPipelineBarrier (cmd->cmd,
1344 out_views[i]->image->barrier.parent.pipeline_stages,
1345 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, 0, NULL, 0, NULL, 1,
1346 &out_image_memory_barrier);
1348 out_views[i]->image->barrier.parent.pipeline_stages =
1349 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
1350 out_views[i]->image->barrier.parent.access_flags =
1351 out_image_memory_barrier.dstAccessMask;
1352 out_views[i]->image->barrier.image_layout =
1353 out_image_memory_barrier.newLayout;
1358 VkClearValue clearColor = {{{ 0.0f, 0.0f, 0.0f, 1.0f }}};
1359 VkClearValue clearColors[GST_VIDEO_MAX_PLANES] = {
1360 clearColor, clearColor, clearColor, clearColor,
1362 VkRenderPassBeginInfo render_pass_info = {
1363 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
1364 .renderPass = (VkRenderPass) self->render_pass->handle,
1365 .framebuffer = (VkFramebuffer) self->framebuffer->handle,
1366 .renderArea.offset = { 0, 0 },
1367 .renderArea.extent = {
1368 GST_VIDEO_INFO_WIDTH (&self->out_info),
1369 GST_VIDEO_INFO_HEIGHT (&self->out_info)
1371 .clearValueCount = GST_VIDEO_INFO_N_PLANES (&self->out_info),
1372 .pClearValues = clearColors,
1375 VkDeviceSize offsets[] = { 0 };
1377 vkCmdBindDescriptorSets (cmd->cmd, VK_PIPELINE_BIND_POINT_GRAPHICS,
1378 (VkPipelineLayout) self->pipeline_layout->handle, 0, 1,
1379 &self->descriptor_set->set, 0, NULL);
1381 vkCmdBeginRenderPass (cmd->cmd, &render_pass_info,
1382 VK_SUBPASS_CONTENTS_INLINE);
1383 vkCmdBindPipeline (cmd->cmd, VK_PIPELINE_BIND_POINT_GRAPHICS,
1384 (VkPipeline) self->graphics_pipeline->handle);
1385 vkCmdBindVertexBuffers (cmd->cmd, 0, 1,
1386 &((GstVulkanBufferMemory *) priv->vertices)->buffer, offsets);
1387 vkCmdBindIndexBuffer (cmd->cmd,
1388 ((GstVulkanBufferMemory *) priv->indices)->buffer, 0,
1389 VK_INDEX_TYPE_UINT16);
1390 vkCmdDrawIndexed (cmd->cmd, priv->n_indices, 1, 0, 0, 0);
1391 vkCmdEndRenderPass (cmd->cmd);
1401 * gst_vulkan_full_screen_quad_submit:
1402 * @self: a #GstVulkanFullScreenQuad
1403 * @cmd: (transfer full): a #GstVulkanCommandBuffer to submit
1404 * @fence: a #GstVulkanFence to signal on completion
1405 * @error: a #GError to fill on error
1407 * Returns: whether @cmd could be submitted to the queue
1410 gst_vulkan_full_screen_quad_submit (GstVulkanFullScreenQuad * self,
1411 GstVulkanCommandBuffer * cmd, GstVulkanFence * fence, GError ** error)
1415 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
1416 g_return_val_if_fail (cmd != NULL, FALSE);
1417 g_return_val_if_fail (fence != NULL, FALSE);
1421 VkSubmitInfo submit_info = {
1422 .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
1424 .waitSemaphoreCount = 0,
1425 .pWaitSemaphores = NULL,
1426 .pWaitDstStageMask = NULL,
1427 .commandBufferCount = 1,
1428 .pCommandBuffers = &cmd->cmd,
1429 .signalSemaphoreCount = 0,
1430 .pSignalSemaphores = NULL,
1434 gst_vulkan_queue_submit_lock (self->queue);
1436 vkQueueSubmit (self->queue->queue, 1, &submit_info,
1437 GST_VULKAN_FENCE_FENCE (fence));
1438 gst_vulkan_queue_submit_unlock (self->queue);
1439 if (gst_vulkan_error_to_g_error (err, error, "vkQueueSubmit") < 0)
1443 gst_vulkan_trash_list_add (self->trash_list,
1444 gst_vulkan_trash_list_acquire (self->trash_list, fence,
1445 gst_vulkan_trash_mini_object_unref, GST_MINI_OBJECT_CAST (cmd)));
1447 gst_vulkan_trash_list_gc (self->trash_list);
1449 if (self->last_fence)
1450 gst_vulkan_fence_unref (self->last_fence);
1451 self->last_fence = gst_vulkan_fence_ref (fence);