vulkan: move fullscreenquad object to library
[platform/upstream/gstreamer.git] / gst-libs / gst / vulkan / gstvkfullscreenquad.c
1 /*
2  * GStreamer Plugins Vulkan
3  * Copyright (C) 2019 Matthew Waters <matthew@centricular.com>
4  *
5  * This library is free software; you can redistribute it and/or
6  * modify it under the terms of the GNU Library General Public
7  * License as published by the Free Software Foundation; either
8  * version 2 of the License, or (at your option) any later version.
9  *
10  * This library is distributed in the hope that it will be useful,
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13  * Library General Public License for more details.
14  *
15  * You should have received a copy of the GNU Library General Public
16  * License along with this library; if not, write to the
17  * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18  * Boston, MA 02110-1301, USA.
19  */
20
21 #ifdef HAVE_CONFIG_H
22 #include "config.h"
23 #endif
24
25 #include "gstvkfullscreenquad.h"
26
27 #define GST_CAT_DEFAULT gst_vulkan_full_screen_quad_debug
28 GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
29
30 /* XXX: privatise this on moving to lib */
31 struct Vertex
32 {
33   float x, y, z;
34   float s, t;
35 };
36
37 struct _GstVulkanFullScreenQuadPrivate
38 {
39   GstBuffer *inbuf;
40   GstBuffer *outbuf;
41
42   GstMemory *vertices;
43   GstMemory *indices;
44   gsize n_indices;
45   GstMemory *uniforms;
46   gsize uniform_size;
47
48   GstVulkanHandle *vert;
49   GstVulkanHandle *frag;
50 };
51
52 G_DEFINE_TYPE_WITH_CODE (GstVulkanFullScreenQuad, gst_vulkan_full_screen_quad,
53     GST_TYPE_OBJECT, GST_DEBUG_CATEGORY_INIT (gst_vulkan_full_screen_quad_debug,
54         "vulkanfullscreenquad", 0, "vulkan fullscreen quad render");
55     G_ADD_PRIVATE (GstVulkanFullScreenQuad));
56
57 #define GET_PRIV(self) gst_vulkan_full_screen_quad_get_instance_private (self)
58
59 struct Vertex vertices[] = {
60   {-1.0f, -1.0f, 0.0f, 0.0f, 0.0f},
61   {1.0f, -1.0f, 0.0f, 1.0f, 0.0f},
62   {1.0f, 1.0f, 0.0f, 1.0f, 1.0f},
63   {-1.0f, 1.0f, 0.0f, 0.0f, 1.0f},
64 };
65
66 gushort indices[] = {
67   0, 1, 2, 0, 2, 3,
68 };
69
70 static gboolean
71 create_sampler (GstVulkanFullScreenQuad * self, GError ** error)
72 {
73   /* *INDENT-OFF* */
74   VkSamplerCreateInfo samplerInfo = {
75       .sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
76       .magFilter = VK_FILTER_LINEAR,
77       .minFilter = VK_FILTER_LINEAR,
78       .addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
79       .addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
80       .addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
81       .anisotropyEnable = VK_FALSE,
82       .maxAnisotropy = 1,
83       .borderColor = VK_BORDER_COLOR_INT_OPAQUE_BLACK,
84       .unnormalizedCoordinates = VK_FALSE,
85       .compareEnable = VK_FALSE,
86       .compareOp = VK_COMPARE_OP_ALWAYS,
87       .mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR,
88       .mipLodBias = 0.0f,
89       .minLod = 0.0f,
90       .maxLod = 0.0f
91   };
92   /* *INDENT-ON* */
93   VkSampler sampler;
94   VkResult err;
95
96   err =
97       vkCreateSampler (self->queue->device->device, &samplerInfo, NULL,
98       &sampler);
99   if (gst_vulkan_error_to_g_error (err, error, "vkCreateSampler") < 0) {
100     return FALSE;
101   }
102
103   self->sampler = gst_vulkan_handle_new_wrapped (self->queue->device,
104       GST_VULKAN_HANDLE_TYPE_SAMPLER, (GstVulkanHandleTypedef) sampler,
105       gst_vulkan_handle_free_sampler, NULL);
106
107   return TRUE;
108 }
109
110 static GstVulkanDescriptorSet *
111 get_and_update_descriptor_set (GstVulkanFullScreenQuad * self,
112     GstVulkanImageView ** views, GError ** error)
113 {
114   GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
115   GstVulkanDescriptorSet *set;
116
117   if (!self->sampler)
118     if (!create_sampler (self, error))
119       return NULL;
120
121   if (!(set =
122           gst_vulkan_descriptor_cache_acquire (self->descriptor_cache, error)))
123     return NULL;
124
125   {
126     VkWriteDescriptorSet writes[GST_VIDEO_MAX_PLANES + 1];
127     VkDescriptorImageInfo image_info[GST_VIDEO_MAX_PLANES];
128     VkDescriptorBufferInfo buffer_info;
129     int write_n = 0;
130     int i;
131
132     /* *INDENT-OFF* */
133     if (priv->uniforms) {
134       buffer_info = (VkDescriptorBufferInfo) {
135           .buffer = ((GstVulkanBufferMemory *) priv->uniforms)->buffer,
136           .offset = 0,
137           .range = priv->uniform_size
138       };
139
140       writes[write_n++] = (VkWriteDescriptorSet) {
141           .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
142           .pNext = NULL,
143           .dstSet = set->set,
144           .dstBinding = 0,
145           .dstArrayElement = 0,
146           .descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
147           .descriptorCount = 1,
148           .pBufferInfo = &buffer_info
149       };
150     }
151
152     for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->in_info); i++) {
153       image_info[i] = (VkDescriptorImageInfo) {
154           .imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
155           .imageView = views[i]->view,
156           .sampler = (VkSampler) self->sampler->handle
157       };
158
159       writes[write_n++] = (VkWriteDescriptorSet) {
160           .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
161           .pNext = NULL,
162           .dstSet = set->set,
163           .dstBinding = i + 1,
164           .dstArrayElement = 0,
165           .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
166           .descriptorCount = 1,
167           .pImageInfo = &image_info[i]
168       };
169     }
170     /* *INDENT-ON* */
171     vkUpdateDescriptorSets (self->queue->device->device, write_n, writes, 0,
172         NULL);
173   }
174
175   return set;
176 }
177
178 static gboolean
179 create_descriptor_set_layout (GstVulkanFullScreenQuad * self, GError ** error)
180 {
181   VkDescriptorSetLayoutBinding bindings[GST_VIDEO_MAX_PLANES + 1] = { {0,} };
182   VkDescriptorSetLayoutCreateInfo layout_info;
183   VkDescriptorSetLayout descriptor_set_layout;
184   int descriptor_n = 0;
185   VkResult err;
186   int i;
187
188   /* *INDENT-OFF* */
189   bindings[descriptor_n++] = (VkDescriptorSetLayoutBinding) {
190       .binding = 0,
191       .descriptorCount = 1,
192       .descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
193       .pImmutableSamplers = NULL,
194       .stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT
195   };
196   for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->in_info); i++) {
197     bindings[descriptor_n++] = (VkDescriptorSetLayoutBinding) {
198       .binding = i+1,
199       .descriptorCount = 1,
200       .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
201       .pImmutableSamplers = NULL,
202       .stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT
203     };
204   };
205
206   layout_info = (VkDescriptorSetLayoutCreateInfo) {
207       .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
208       .pNext = NULL,
209       .bindingCount = descriptor_n,
210       .pBindings = bindings
211   };
212   /* *INDENT-ON* */
213
214   err =
215       vkCreateDescriptorSetLayout (self->queue->device->device, &layout_info,
216       NULL, &descriptor_set_layout);
217   if (gst_vulkan_error_to_g_error (err, error,
218           "vkCreateDescriptorSetLayout") < 0) {
219     return FALSE;
220   }
221
222   self->descriptor_set_layout =
223       gst_vulkan_handle_new_wrapped (self->queue->device,
224       GST_VULKAN_HANDLE_TYPE_DESCRIPTOR_SET_LAYOUT,
225       (GstVulkanHandleTypedef) descriptor_set_layout,
226       gst_vulkan_handle_free_descriptor_set_layout, NULL);
227
228   return TRUE;
229 }
230
231 static gboolean
232 create_pipeline_layout (GstVulkanFullScreenQuad * self, GError ** error)
233 {
234   VkPipelineLayoutCreateInfo pipeline_layout_info;
235   VkPipelineLayout pipeline_layout;
236   VkResult err;
237
238   if (!self->descriptor_set_layout)
239     if (!create_descriptor_set_layout (self, error))
240       return FALSE;
241
242   /* *INDENT-OFF* */
243   pipeline_layout_info = (VkPipelineLayoutCreateInfo) {
244       .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
245       .pNext = NULL,
246       .setLayoutCount = 1,
247       .pSetLayouts = (VkDescriptorSetLayout *) &self->descriptor_set_layout->handle,
248       .pushConstantRangeCount = 0,
249       .pPushConstantRanges = NULL,
250   };
251   /* *INDENT-ON* */
252
253   err =
254       vkCreatePipelineLayout (self->queue->device->device,
255       &pipeline_layout_info, NULL, &pipeline_layout);
256   if (gst_vulkan_error_to_g_error (err, error, "vkCreatePipelineLayout") < 0) {
257     return FALSE;
258   }
259
260   self->pipeline_layout = gst_vulkan_handle_new_wrapped (self->queue->device,
261       GST_VULKAN_HANDLE_TYPE_PIPELINE_LAYOUT,
262       (GstVulkanHandleTypedef) pipeline_layout,
263       gst_vulkan_handle_free_pipeline_layout, NULL);
264
265   return TRUE;
266 }
267
268 static gboolean
269 create_render_pass (GstVulkanFullScreenQuad * self, GError ** error)
270 {
271   VkAttachmentDescription color_attachments[GST_VIDEO_MAX_PLANES];
272   VkAttachmentReference color_attachment_refs[GST_VIDEO_MAX_PLANES];
273   VkRenderPassCreateInfo render_pass_info;
274   VkSubpassDescription subpass;
275   VkRenderPass render_pass;
276   VkResult err;
277   int i;
278
279   for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->out_info); i++) {
280     /* *INDENT-OFF* */
281     color_attachments[i] = (VkAttachmentDescription) {
282         .format = gst_vulkan_format_from_video_info (&self->out_info, i),
283         .samples = VK_SAMPLE_COUNT_1_BIT,
284         .loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR,
285         .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
286         .stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE,
287         .stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE,
288         /* FIXME: share this between elements to avoid pipeline barriers */
289         .initialLayout = VK_IMAGE_LAYOUT_UNDEFINED,
290         .finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
291     };
292
293     color_attachment_refs[i] = (VkAttachmentReference) {
294       .attachment = i,
295       .layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
296     };
297     /* *INDENT-ON* */
298   }
299
300   /* *INDENT-OFF* */
301   subpass = (VkSubpassDescription) {
302       .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
303       .colorAttachmentCount = GST_VIDEO_INFO_N_PLANES (&self->out_info),
304       .pColorAttachments = color_attachment_refs
305   };
306
307   render_pass_info = (VkRenderPassCreateInfo) {
308       .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
309       .pNext = NULL,
310       .attachmentCount = GST_VIDEO_INFO_N_PLANES (&self->out_info),
311       .pAttachments = color_attachments,
312       .subpassCount = 1,
313       .pSubpasses = &subpass
314   };
315   /* *INDENT-ON* */
316
317   err =
318       vkCreateRenderPass (self->queue->device->device, &render_pass_info, NULL,
319       &render_pass);
320   if (gst_vulkan_error_to_g_error (err, error, "vkCreateRenderPass") < 0) {
321     return FALSE;
322   }
323
324   self->render_pass = gst_vulkan_handle_new_wrapped (self->queue->device,
325       GST_VULKAN_HANDLE_TYPE_RENDER_PASS,
326       (GstVulkanHandleTypedef) render_pass,
327       gst_vulkan_handle_free_render_pass, NULL);
328
329   return TRUE;
330 }
331
332 static gboolean
333 create_pipeline (GstVulkanFullScreenQuad * self, GError ** error)
334 {
335   GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
336   VkVertexInputBindingDescription vertex_binding;
337   VkVertexInputAttributeDescription attribute_descriptions[2];
338   VkPipelineShaderStageCreateInfo shader_create_info[2];
339   VkPipelineVertexInputStateCreateInfo vertex_input_info;
340   VkPipelineInputAssemblyStateCreateInfo input_assembly;
341   VkPipelineViewportStateCreateInfo viewport_state;
342   VkPipelineRasterizationStateCreateInfo rasterizer;
343   VkPipelineMultisampleStateCreateInfo multisampling;
344   VkPipelineColorBlendAttachmentState
345       color_blend_attachments[GST_VIDEO_MAX_PLANES];
346   VkPipelineColorBlendStateCreateInfo color_blending;
347   VkGraphicsPipelineCreateInfo pipeline_create_info;
348   VkPipeline pipeline;
349   VkResult err;
350
351   if (!priv->vert || !priv->frag) {
352     g_set_error_literal (error, GST_VULKAN_ERROR,
353         VK_ERROR_INITIALIZATION_FAILED, "Missing shader information");
354     return FALSE;
355   }
356
357   if (!self->pipeline_layout)
358     if (!create_pipeline_layout (self, error))
359       return FALSE;
360
361   if (!self->render_pass)
362     if (!create_render_pass (self, error))
363       return FALSE;
364
365   /* *INDENT-OFF* */
366   shader_create_info[0] = (VkPipelineShaderStageCreateInfo) {
367       .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
368       .pNext = NULL,
369       .stage = VK_SHADER_STAGE_VERTEX_BIT,
370       .module = (VkShaderModule) priv->vert->handle,
371       .pName = "main"
372   };
373
374   shader_create_info[1] = (VkPipelineShaderStageCreateInfo) {
375       .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
376       .pNext = NULL,
377       .stage = VK_SHADER_STAGE_FRAGMENT_BIT,
378       .module = (VkShaderModule) priv->frag->handle,
379       .pName = "main"
380   };
381
382   /* *INDENT-OFF* */
383   vertex_binding = (VkVertexInputBindingDescription) {
384       .binding = 0,
385       .stride = sizeof (struct Vertex),
386       .inputRate = VK_VERTEX_INPUT_RATE_VERTEX
387   };
388
389   attribute_descriptions[0] = (VkVertexInputAttributeDescription) {
390       .binding = 0,
391       .location = 0,
392       .format = VK_FORMAT_R32G32B32_SFLOAT,
393       .offset = G_STRUCT_OFFSET (struct Vertex, x)
394   };
395   attribute_descriptions[1] = (VkVertexInputAttributeDescription) {
396       .binding = 0,
397       .location = 1,
398       .format = VK_FORMAT_R32G32_SFLOAT,
399       .offset = G_STRUCT_OFFSET (struct Vertex, s)
400   };
401
402   vertex_input_info = (VkPipelineVertexInputStateCreateInfo) {
403       .sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
404       .pNext = NULL,
405       .vertexBindingDescriptionCount = 1,
406       .pVertexBindingDescriptions = &vertex_binding,
407       .vertexAttributeDescriptionCount = 2,
408       .pVertexAttributeDescriptions = attribute_descriptions,
409   };
410
411   input_assembly = (VkPipelineInputAssemblyStateCreateInfo) {
412       .sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
413       .pNext = NULL,
414       .topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
415       .primitiveRestartEnable = VK_FALSE
416   };
417
418   viewport_state = (VkPipelineViewportStateCreateInfo) {
419       .sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
420       .pNext = NULL,
421       .viewportCount = 1,
422       .pViewports = &(VkViewport) {
423           .x = 0.0f,
424           .y = 0.0f,
425           .width = (float) GST_VIDEO_INFO_WIDTH (&self->out_info),
426           .height = (float) GST_VIDEO_INFO_HEIGHT (&self->out_info),
427           .minDepth = 0.0f,
428           .maxDepth = 1.0f
429       },
430       .scissorCount = 1,
431       .pScissors = &(VkRect2D) {
432           .offset = { 0, 0 },
433           .extent = {
434               GST_VIDEO_INFO_WIDTH (&self->out_info),
435               GST_VIDEO_INFO_HEIGHT (&self->out_info)
436           }
437       }
438   };
439
440   rasterizer = (VkPipelineRasterizationStateCreateInfo) {
441       .sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
442       .pNext = NULL,
443       .depthClampEnable = VK_FALSE,
444       .rasterizerDiscardEnable = VK_FALSE,
445       .polygonMode = VK_POLYGON_MODE_FILL,
446       .lineWidth = 1.0f,
447       .cullMode = VK_CULL_MODE_NONE,
448       .frontFace = VK_FRONT_FACE_CLOCKWISE,
449       .depthBiasEnable = VK_FALSE
450   };
451
452   multisampling = (VkPipelineMultisampleStateCreateInfo) {
453       .sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
454       .pNext = NULL,
455       .sampleShadingEnable = VK_FALSE,
456       .rasterizationSamples = VK_SAMPLE_COUNT_1_BIT
457   };
458
459   color_blend_attachments[0] = (VkPipelineColorBlendAttachmentState) {
460       .colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT,
461       .blendEnable = VK_FALSE
462   };
463   color_blend_attachments[1] = (VkPipelineColorBlendAttachmentState) {
464       .colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT,
465       .blendEnable = VK_FALSE
466   };
467   color_blend_attachments[2] = (VkPipelineColorBlendAttachmentState) {
468       .colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT,
469       .blendEnable = VK_FALSE
470   };
471   color_blend_attachments[3] = (VkPipelineColorBlendAttachmentState) {
472       .colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT,
473       .blendEnable = VK_FALSE
474   };
475
476   color_blending = (VkPipelineColorBlendStateCreateInfo) {
477       .sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
478       .pNext = NULL,
479       .logicOpEnable = VK_FALSE,
480       .logicOp = VK_LOGIC_OP_COPY,
481       .attachmentCount = GST_VIDEO_INFO_N_PLANES (&self->out_info),   
482       .pAttachments = color_blend_attachments,
483       .blendConstants = { 0.0f, 0.0f, 0.0f, 0.0f }
484   };
485
486   pipeline_create_info = (VkGraphicsPipelineCreateInfo) {
487       .sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
488       .pNext = NULL,
489       .stageCount = 2,
490       .pStages = shader_create_info,
491       .pVertexInputState = &vertex_input_info,
492       .pInputAssemblyState = &input_assembly,
493       .pViewportState = &viewport_state,
494       .pRasterizationState = &rasterizer,
495       .pMultisampleState = &multisampling,
496       .pColorBlendState = &color_blending,
497       .layout = (VkPipelineLayout) self->pipeline_layout->handle,
498       .renderPass = (VkRenderPass) self->render_pass->handle,
499       .subpass = 0,
500       .basePipelineHandle = VK_NULL_HANDLE
501   };
502   /* *INDENT-ON* */
503
504   err =
505       vkCreateGraphicsPipelines (self->queue->device->device, VK_NULL_HANDLE, 1,
506       &pipeline_create_info, NULL, &pipeline);
507   if (gst_vulkan_error_to_g_error (err, error, "vkCreateGraphicsPipelines") < 0) {
508     return FALSE;
509   }
510
511   self->graphics_pipeline = gst_vulkan_handle_new_wrapped (self->queue->device,
512       GST_VULKAN_HANDLE_TYPE_PIPELINE, (GstVulkanHandleTypedef) pipeline,
513       gst_vulkan_handle_free_pipeline, NULL);
514
515   return TRUE;
516 }
517
518 static gboolean
519 create_descriptor_pool (GstVulkanFullScreenQuad * self, GError ** error)
520 {
521   GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
522   VkDescriptorPoolCreateInfo pool_info;
523   gsize max_sets = 32;          /* FIXME: don't hardcode this! */
524   guint n_pools = 1;
525   VkDescriptorPoolSize pool_sizes[2];
526   VkDescriptorPool pool;
527   GstVulkanDescriptorPool *ret;
528   VkResult err;
529
530   /* *INDENT-OFF* */
531   pool_sizes[0] = (VkDescriptorPoolSize) {
532       .type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
533       .descriptorCount = max_sets * GST_VIDEO_INFO_N_PLANES (&self->in_info),
534   };
535
536   if (priv->uniforms) {
537     pool_sizes[1] = (VkDescriptorPoolSize) {
538         .type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
539         .descriptorCount = max_sets
540     };
541     n_pools++;
542   }
543
544   pool_info = (VkDescriptorPoolCreateInfo) {
545       .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
546       .pNext = NULL,
547       .flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
548       .poolSizeCount = n_pools,
549       .pPoolSizes = pool_sizes,
550       .maxSets = max_sets
551   };
552   /* *INDENT-ON* */
553
554   err =
555       vkCreateDescriptorPool (self->queue->device->device, &pool_info, NULL,
556       &pool);
557   if (gst_vulkan_error_to_g_error (err, error, "vkCreateDescriptorPool") < 0) {
558     return FALSE;
559   }
560
561   ret =
562       gst_vulkan_descriptor_pool_new_wrapped (self->queue->device, pool,
563       max_sets);
564   self->descriptor_cache =
565       gst_vulkan_descriptor_cache_new (ret, 1, &self->descriptor_set_layout);
566   gst_object_unref (ret);
567
568   return TRUE;
569 }
570
571 static gboolean
572 create_framebuffer (GstVulkanFullScreenQuad * self, GstVulkanImageView ** views,
573     GError ** error)
574 {
575   VkImageView attachments[GST_VIDEO_MAX_PLANES] = { 0, };
576   VkFramebufferCreateInfo framebuffer_info;
577   VkFramebuffer framebuffer;
578   VkResult err;
579   int i;
580
581   for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->out_info); i++) {
582     attachments[i] = views[i]->view;
583   }
584
585   /* *INDENT-OFF* */
586   framebuffer_info = (VkFramebufferCreateInfo) {
587       .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
588       .pNext = NULL,
589       .renderPass = (VkRenderPass) self->render_pass->handle,
590       .attachmentCount = GST_VIDEO_INFO_N_PLANES (&self->out_info),
591       .pAttachments = attachments,
592       .width = GST_VIDEO_INFO_WIDTH (&self->out_info),
593       .height = GST_VIDEO_INFO_HEIGHT (&self->out_info),
594       .layers = 1
595   };
596   /* *INDENT-ON* */
597
598   err =
599       vkCreateFramebuffer (self->queue->device->device, &framebuffer_info, NULL,
600       &framebuffer);
601   if (gst_vulkan_error_to_g_error (err, error, "vkCreateFramebuffer") < 0) {
602     return FALSE;
603   }
604
605   self->framebuffer = gst_vulkan_handle_new_wrapped (self->queue->device,
606       GST_VULKAN_HANDLE_TYPE_FRAMEBUFFER, (GstVulkanHandleTypedef) framebuffer,
607       gst_vulkan_handle_free_framebuffer, NULL);
608
609   return TRUE;
610 }
611
612 #define LAST_FENCE_OR_ALWAYS_SIGNALLED(self,device) \
613     self->last_fence ? gst_vulkan_fence_ref (self->last_fence) : gst_vulkan_fence_new_always_signalled (device)
614
615 GstVulkanFence *
616 gst_vulkan_full_screen_quad_get_last_fence (GstVulkanFullScreenQuad * self)
617 {
618   g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), NULL);
619
620   return LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
621 }
622
623 static void
624 clear_descriptor_set (GstVulkanFullScreenQuad * self)
625 {
626   GstVulkanFence *last_fence =
627       LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
628
629   if (self->descriptor_set)
630     gst_vulkan_trash_list_add (self->trash_list,
631         gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
632             gst_vulkan_trash_mini_object_unref,
633             (GstMiniObject *) self->descriptor_set));
634   self->descriptor_set = NULL;
635
636   gst_vulkan_fence_unref (last_fence);
637 }
638
639 static void
640 clear_framebuffer (GstVulkanFullScreenQuad * self)
641 {
642   GstVulkanFence *last_fence =
643       LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
644
645   if (self->framebuffer)
646     gst_vulkan_trash_list_add (self->trash_list,
647         gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
648             gst_vulkan_trash_mini_object_unref,
649             (GstMiniObject *) self->framebuffer));
650   self->framebuffer = NULL;
651
652   gst_vulkan_fence_unref (last_fence);
653 }
654
655 static void
656 clear_command_pool (GstVulkanFullScreenQuad * self)
657 {
658   GstVulkanFence *last_fence =
659       LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
660
661   if (self->cmd_pool)
662     gst_vulkan_trash_list_add (self->trash_list,
663         gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
664             gst_vulkan_trash_object_unref, (GstObject *) self->cmd_pool));
665   self->cmd_pool = NULL;
666
667   gst_vulkan_fence_unref (last_fence);
668 }
669
670 static void
671 clear_sampler (GstVulkanFullScreenQuad * self)
672 {
673   GstVulkanFence *last_fence =
674       LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
675
676   if (self->sampler)
677     gst_vulkan_trash_list_add (self->trash_list,
678         gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
679             gst_vulkan_trash_mini_object_unref,
680             (GstMiniObject *) self->sampler));
681   self->sampler = NULL;
682
683   gst_vulkan_fence_unref (last_fence);
684 }
685
686 static void
687 clear_descriptor_cache (GstVulkanFullScreenQuad * self)
688 {
689   GstVulkanFence *last_fence =
690       LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
691
692   if (self->descriptor_cache)
693     gst_vulkan_trash_list_add (self->trash_list,
694         gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
695             gst_vulkan_trash_object_unref,
696             (GstObject *) self->descriptor_cache));
697   self->descriptor_cache = NULL;
698
699   gst_vulkan_fence_unref (last_fence);
700 }
701
702 static void
703 clear_shaders (GstVulkanFullScreenQuad * self)
704 {
705   GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
706   GstVulkanFence *last_fence =
707       LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
708
709   if (priv->vert)
710     gst_vulkan_trash_list_add (self->trash_list,
711         gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
712             gst_vulkan_trash_mini_object_unref, (GstMiniObject *) priv->vert));
713   priv->vert = NULL;
714
715   if (priv->frag)
716     gst_vulkan_trash_list_add (self->trash_list,
717         gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
718             gst_vulkan_trash_mini_object_unref, (GstMiniObject *) priv->frag));
719   priv->frag = NULL;
720
721   gst_vulkan_fence_unref (last_fence);
722 }
723
724 static void
725 clear_uniform_data (GstVulkanFullScreenQuad * self)
726 {
727   GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
728   GstVulkanFence *last_fence =
729       LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
730
731   if (priv->uniforms)
732     gst_vulkan_trash_list_add (self->trash_list,
733         gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
734             gst_vulkan_trash_mini_object_unref,
735             (GstMiniObject *) priv->uniforms));
736   priv->uniforms = NULL;
737   priv->uniform_size = 0;
738
739   gst_vulkan_fence_unref (last_fence);
740 }
741
742 static void
743 clear_index_data (GstVulkanFullScreenQuad * self)
744 {
745   GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
746   GstVulkanFence *last_fence =
747       LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
748
749   if (priv->indices)
750     gst_vulkan_trash_list_add (self->trash_list,
751         gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
752             gst_vulkan_trash_mini_object_unref,
753             (GstMiniObject *) priv->indices));
754   priv->indices = NULL;
755   priv->n_indices = 0;
756
757   gst_vulkan_fence_unref (last_fence);
758 }
759
760 static void
761 clear_vertex_data (GstVulkanFullScreenQuad * self)
762 {
763   GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
764   GstVulkanFence *last_fence =
765       LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
766
767   if (priv->vertices)
768     gst_vulkan_trash_list_add (self->trash_list,
769         gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
770             gst_vulkan_trash_mini_object_unref,
771             (GstMiniObject *) priv->vertices));
772   priv->vertices = NULL;
773
774   gst_vulkan_fence_unref (last_fence);
775 }
776
777 static void
778 clear_render_pass (GstVulkanFullScreenQuad * self)
779 {
780   GstVulkanFence *last_fence =
781       LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
782
783   if (self->render_pass)
784     gst_vulkan_trash_list_add (self->trash_list,
785         gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
786             gst_vulkan_trash_mini_object_unref,
787             (GstMiniObject *) self->render_pass));
788   self->render_pass = NULL;
789
790   gst_vulkan_fence_unref (last_fence);
791 }
792
793 static void
794 clear_pipeline_layout (GstVulkanFullScreenQuad * self)
795 {
796   GstVulkanFence *last_fence =
797       LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
798
799   if (self->pipeline_layout)
800     gst_vulkan_trash_list_add (self->trash_list,
801         gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
802             gst_vulkan_trash_mini_object_unref,
803             (GstMiniObject *) self->pipeline_layout));
804   self->pipeline_layout = NULL;
805
806   gst_vulkan_fence_unref (last_fence);
807 }
808
809 static void
810 clear_graphics_pipeline (GstVulkanFullScreenQuad * self)
811 {
812   GstVulkanFence *last_fence =
813       LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
814
815   if (self->graphics_pipeline)
816     gst_vulkan_trash_list_add (self->trash_list,
817         gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
818             gst_vulkan_trash_mini_object_unref,
819             (GstMiniObject *) self->graphics_pipeline));
820   self->graphics_pipeline = NULL;
821
822   gst_vulkan_fence_unref (last_fence);
823 }
824
825 static void
826 clear_descriptor_set_layout (GstVulkanFullScreenQuad * self)
827 {
828   GstVulkanFence *last_fence =
829       LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
830
831   if (self->descriptor_set_layout)
832     gst_vulkan_trash_list_add (self->trash_list,
833         gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
834             gst_vulkan_trash_mini_object_unref,
835             (GstMiniObject *) self->descriptor_set_layout));
836   self->descriptor_set_layout = NULL;
837
838   gst_vulkan_fence_unref (last_fence);
839 }
840
841 static void
842 destroy_pipeline (GstVulkanFullScreenQuad * self)
843 {
844   GstVulkanFence *last_fence =
845       LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
846
847   clear_render_pass (self);
848   clear_pipeline_layout (self);
849   clear_graphics_pipeline (self);
850   clear_descriptor_set_layout (self);
851
852   gst_vulkan_fence_unref (last_fence);
853
854   gst_vulkan_trash_list_gc (self->trash_list);
855 }
856
857 void
858 gst_vulkan_full_screen_quad_init (GstVulkanFullScreenQuad * self)
859 {
860   self->trash_list = gst_vulkan_trash_fence_list_new ();
861 }
862
863 GstVulkanFullScreenQuad *
864 gst_vulkan_full_screen_quad_new (GstVulkanQueue * queue)
865 {
866   GstVulkanFullScreenQuad *self;
867
868   g_return_val_if_fail (GST_IS_VULKAN_QUEUE (queue), NULL);
869
870   self = g_object_new (GST_TYPE_VULKAN_FULL_SCREEN_QUAD, NULL);
871   self->queue = gst_object_ref (queue);
872
873   return self;
874 }
875
876 static void
877 gst_vulkan_full_screen_quad_finalize (GObject * object)
878 {
879   GstVulkanFullScreenQuad *self = GST_VULKAN_FULL_SCREEN_QUAD (object);
880   GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
881
882   destroy_pipeline (self);
883   clear_command_pool (self);
884   clear_sampler (self);
885   clear_framebuffer (self);
886   clear_descriptor_set (self);
887   clear_descriptor_cache (self);
888   clear_shaders (self);
889   clear_uniform_data (self);
890   clear_index_data (self);
891   clear_vertex_data (self);
892
893   gst_vulkan_trash_list_wait (self->trash_list, -1);
894   gst_vulkan_trash_list_gc (self->trash_list);
895   gst_clear_object (&self->trash_list);
896
897   gst_clear_mini_object (((GstMiniObject **) & self->last_fence));
898
899   gst_clear_object (&self->queue);
900
901   gst_clear_buffer (&priv->inbuf);
902   gst_clear_buffer (&priv->outbuf);
903
904   G_OBJECT_CLASS (gst_vulkan_full_screen_quad_parent_class)->finalize (object);
905 }
906
907 static void
908 gst_vulkan_full_screen_quad_class_init (GstVulkanFullScreenQuadClass * klass)
909 {
910   GObjectClass *obj_class = G_OBJECT_CLASS (klass);
911
912   obj_class->finalize = gst_vulkan_full_screen_quad_finalize;
913 }
914
915 gboolean
916 gst_vulkan_full_screen_quad_set_info (GstVulkanFullScreenQuad * self,
917     GstVideoInfo * in_info, GstVideoInfo * out_info)
918 {
919   self->out_info = *out_info;
920   self->in_info = *in_info;
921
922   destroy_pipeline (self);
923   clear_framebuffer (self);
924   clear_descriptor_set (self);
925   clear_descriptor_cache (self);
926   clear_uniform_data (self);
927
928   return TRUE;
929 }
930
931 gboolean
932 gst_vulkan_full_screen_quad_set_input_buffer (GstVulkanFullScreenQuad * self,
933     GstBuffer * buffer, GError ** error)
934 {
935   GstVulkanFullScreenQuadPrivate *priv;
936
937   g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
938
939   priv = GET_PRIV (self);
940
941   gst_buffer_replace (&priv->inbuf, buffer);
942   clear_descriptor_set (self);
943   return TRUE;
944 }
945
946 gboolean
947 gst_vulkan_full_screen_quad_set_output_buffer (GstVulkanFullScreenQuad * self,
948     GstBuffer * buffer, GError ** error)
949 {
950   GstVulkanFullScreenQuadPrivate *priv;
951
952   g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
953
954   priv = GET_PRIV (self);
955
956   gst_buffer_replace (&priv->outbuf, buffer);
957   clear_framebuffer (self);
958   return TRUE;
959 }
960
961 gboolean
962 gst_vulkan_full_screen_quad_set_shaders (GstVulkanFullScreenQuad * self,
963     GstVulkanHandle * vert, GstVulkanHandle * frag)
964 {
965   GstVulkanFullScreenQuadPrivate *priv;
966
967   g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
968   g_return_val_if_fail (vert != NULL, FALSE);
969   g_return_val_if_fail (vert->type == GST_VULKAN_HANDLE_TYPE_SHADER, FALSE);
970   g_return_val_if_fail (frag != NULL, FALSE);
971   g_return_val_if_fail (frag->type == GST_VULKAN_HANDLE_TYPE_SHADER, FALSE);
972
973   priv = GET_PRIV (self);
974
975   clear_shaders (self);
976   destroy_pipeline (self);
977
978   priv->vert = gst_vulkan_handle_ref (vert);
979   priv->frag = gst_vulkan_handle_ref (frag);
980
981   return TRUE;
982 }
983
984 gboolean
985 gst_vulkan_full_screen_quad_set_uniform_buffer (GstVulkanFullScreenQuad * self,
986     GstMemory * uniforms, GError ** error)
987 {
988   GstVulkanFullScreenQuadPrivate *priv;
989
990   g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
991   g_return_val_if_fail (uniforms == NULL
992       || gst_is_vulkan_buffer_memory (uniforms), FALSE);
993
994   priv = GET_PRIV (self);
995
996   clear_uniform_data (self);
997   if (uniforms) {
998     priv->uniforms = gst_memory_ref (uniforms);
999     priv->uniform_size = gst_memory_get_sizes (uniforms, NULL, NULL);
1000   }
1001
1002   return TRUE;
1003 }
1004
1005 gboolean
1006 gst_vulkan_full_screen_quad_set_index_buffer (GstVulkanFullScreenQuad * self,
1007     GstMemory * indices, gsize n_indices, GError ** error)
1008 {
1009   GstVulkanFullScreenQuadPrivate *priv;
1010
1011   g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
1012   g_return_val_if_fail (indices == NULL
1013       || gst_is_vulkan_buffer_memory (indices), FALSE);
1014
1015   priv = GET_PRIV (self);
1016
1017   clear_index_data (self);
1018   if (indices) {
1019     priv->indices = gst_memory_ref (indices);
1020     priv->n_indices = n_indices;
1021   }
1022
1023   return TRUE;
1024 }
1025
1026 gboolean
1027 gst_vulkan_full_screen_quad_set_vertex_buffer (GstVulkanFullScreenQuad * self,
1028     GstMemory * vertices, GError ** error)
1029 {
1030   GstVulkanFullScreenQuadPrivate *priv;
1031
1032   g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
1033   g_return_val_if_fail (vertices == NULL
1034       || gst_is_vulkan_buffer_memory (vertices), FALSE);
1035
1036   priv = GET_PRIV (self);
1037
1038   clear_vertex_data (self);
1039   if (vertices) {
1040     priv->vertices = gst_memory_ref (vertices);
1041   }
1042
1043   return TRUE;
1044 }
1045
1046 static GstVulkanImageMemory *
1047 peek_image_from_buffer (GstBuffer * buffer, guint i)
1048 {
1049   GstMemory *mem = gst_buffer_peek_memory (buffer, i);
1050   g_return_val_if_fail (gst_is_vulkan_image_memory (mem), NULL);
1051   return (GstVulkanImageMemory *) mem;
1052 }
1053
1054 static gboolean
1055 ensure_vertex_data (GstVulkanFullScreenQuad * self, GError ** error)
1056 {
1057   GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
1058   GstMapInfo map_info;
1059
1060   if (!priv->vertices) {
1061     priv->vertices = gst_vulkan_buffer_memory_alloc (self->queue->device,
1062         sizeof (vertices), VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
1063         VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
1064         VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
1065
1066     if (!gst_memory_map (priv->vertices, &map_info, GST_MAP_WRITE)) {
1067       g_set_error_literal (error, GST_VULKAN_ERROR, VK_ERROR_MEMORY_MAP_FAILED,
1068           "Failed to map memory");
1069       goto failure;
1070     }
1071
1072     memcpy (map_info.data, vertices, map_info.size);
1073     gst_memory_unmap (priv->vertices, &map_info);
1074   }
1075
1076   if (!priv->indices) {
1077     priv->indices = gst_vulkan_buffer_memory_alloc (self->queue->device,
1078         sizeof (indices), VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
1079         VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
1080         VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
1081
1082     if (!gst_memory_map (priv->indices, &map_info, GST_MAP_WRITE)) {
1083       g_set_error_literal (error, GST_VULKAN_ERROR, VK_ERROR_MEMORY_MAP_FAILED,
1084           "Failed to map memory");
1085       goto failure;
1086     }
1087
1088     memcpy (map_info.data, indices, map_info.size);
1089     gst_memory_unmap (priv->indices, &map_info);
1090
1091     priv->n_indices = G_N_ELEMENTS (indices);
1092   }
1093
1094   return TRUE;
1095
1096 failure:
1097   if (priv->vertices)
1098     gst_memory_unref (priv->vertices);
1099   priv->vertices = NULL;
1100   if (priv->indices)
1101     gst_memory_unref (priv->indices);
1102   priv->indices = NULL;
1103   priv->n_indices = 0;
1104   return FALSE;
1105 }
1106
1107 gboolean
1108 gst_vulkan_full_screen_quad_draw (GstVulkanFullScreenQuad * self,
1109     GError ** error)
1110 {
1111   GstVulkanCommandBuffer *cmd = NULL;
1112   GstVulkanFence *fence = NULL;
1113   VkResult err;
1114
1115   g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
1116
1117   fence = gst_vulkan_device_create_fence (self->queue->device, error);
1118   if (!fence)
1119     goto error;
1120
1121   if (!gst_vulkan_full_screen_quad_prepare_draw (self, fence, error))
1122     goto error;
1123
1124   if (!(cmd = gst_vulkan_command_pool_create (self->cmd_pool, error)))
1125     goto error;
1126
1127   {
1128     VkCommandBufferBeginInfo cmd_buf_info = { 0, };
1129
1130     /* *INDENT-OFF* */
1131     cmd_buf_info = (VkCommandBufferBeginInfo) {
1132         .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
1133         .pNext = NULL,
1134         .flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
1135         .pInheritanceInfo = NULL
1136     };
1137     /* *INDENT-ON* */
1138
1139     gst_vulkan_command_buffer_lock (cmd);
1140     err = vkBeginCommandBuffer (cmd->cmd, &cmd_buf_info);
1141     if (gst_vulkan_error_to_g_error (err, error, "vkBeginCommandBuffer") < 0)
1142       goto unlock_error;
1143   }
1144
1145   if (!gst_vulkan_full_screen_quad_fill_command_buffer (self, cmd, fence,
1146           error))
1147     goto unlock_error;
1148
1149   err = vkEndCommandBuffer (cmd->cmd);
1150   gst_vulkan_command_buffer_unlock (cmd);
1151   if (gst_vulkan_error_to_g_error (err, error, "vkEndCommandBuffer") < 0)
1152     goto error;
1153
1154   if (!gst_vulkan_full_screen_quad_submit (self, cmd, fence, error))
1155     goto error;
1156
1157   gst_vulkan_fence_unref (fence);
1158
1159   return TRUE;
1160
1161 unlock_error:
1162   gst_vulkan_command_buffer_unlock (cmd);
1163
1164 error:
1165   gst_clear_mini_object ((GstMiniObject **) & cmd);
1166   gst_clear_mini_object ((GstMiniObject **) & fence);
1167   return FALSE;
1168 }
1169
1170 gboolean
1171 gst_vulkan_full_screen_quad_prepare_draw (GstVulkanFullScreenQuad * self,
1172     GstVulkanFence * fence, GError ** error)
1173 {
1174   GstVulkanFullScreenQuadPrivate *priv;
1175   GstVulkanImageView *in_views[GST_VIDEO_MAX_PLANES] = { NULL, };
1176   GstVulkanImageView *out_views[GST_VIDEO_MAX_PLANES] = { NULL, };
1177   int i;
1178
1179   g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
1180   g_return_val_if_fail (fence != NULL, FALSE);
1181
1182   priv = GET_PRIV (self);
1183
1184   if (!self->graphics_pipeline)
1185     if (!create_pipeline (self, error))
1186       return FALSE;
1187
1188   if (!ensure_vertex_data (self, error))
1189     goto error;
1190
1191   if (!self->descriptor_cache)
1192     if (!create_descriptor_pool (self, error))
1193       goto error;
1194
1195   if (!self->descriptor_set) {
1196     for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->in_info); i++) {
1197       GstVulkanImageMemory *img_mem = peek_image_from_buffer (priv->inbuf, i);
1198       if (!gst_is_vulkan_image_memory ((GstMemory *) img_mem)) {
1199         g_set_error_literal (error, GST_VULKAN_ERROR, GST_VULKAN_FAILED,
1200             "Input memory must be a GstVulkanImageMemory");
1201         goto error;
1202       }
1203       in_views[i] = gst_vulkan_get_or_create_image_view (img_mem);
1204       gst_vulkan_trash_list_add (self->trash_list,
1205           gst_vulkan_trash_list_acquire (self->trash_list, fence,
1206               gst_vulkan_trash_mini_object_unref,
1207               (GstMiniObject *) in_views[i]));
1208     }
1209     if (!(self->descriptor_set =
1210             get_and_update_descriptor_set (self, in_views, error)))
1211       goto error;
1212   }
1213
1214   if (!self->framebuffer) {
1215     for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->out_info); i++) {
1216       GstVulkanImageMemory *img_mem = peek_image_from_buffer (priv->outbuf, i);
1217       if (!gst_is_vulkan_image_memory ((GstMemory *) img_mem)) {
1218         g_set_error_literal (error, GST_VULKAN_ERROR, GST_VULKAN_FAILED,
1219             "Output memory must be a GstVulkanImageMemory");
1220         goto error;
1221       }
1222       out_views[i] = gst_vulkan_get_or_create_image_view (img_mem);
1223       gst_vulkan_trash_list_add (self->trash_list,
1224           gst_vulkan_trash_list_acquire (self->trash_list, fence,
1225               gst_vulkan_trash_mini_object_unref,
1226               (GstMiniObject *) out_views[i]));
1227     }
1228     if (!create_framebuffer (self, out_views, error))
1229       goto error;
1230   }
1231
1232   if (!self->cmd_pool)
1233     if (!(self->cmd_pool =
1234             gst_vulkan_queue_create_command_pool (self->queue, error)))
1235       goto error;
1236
1237   return TRUE;
1238
1239 error:
1240   for (i = 0; i < GST_VIDEO_MAX_PLANES; i++)
1241     gst_clear_mini_object ((GstMiniObject **) & in_views[i]);
1242   for (i = 0; i < GST_VIDEO_MAX_PLANES; i++)
1243     gst_clear_mini_object ((GstMiniObject **) & out_views[i]);
1244   return FALSE;
1245 }
1246
1247 /**
1248  * gst_vulkan_full_screen_quad_fill_command_buffer:
1249  * @self: a #GstVulkanFullScreenQuad
1250  * @cmd: the #GstVulkanCommandBuffer to fill with commands
1251  * @error: a #GError to fill on error
1252  *
1253  * Returns: whether @cmd could be filled with the necessary commands
1254  */
1255 gboolean
1256 gst_vulkan_full_screen_quad_fill_command_buffer (GstVulkanFullScreenQuad * self,
1257     GstVulkanCommandBuffer * cmd, GstVulkanFence * fence, GError ** error)
1258 {
1259   GstVulkanFullScreenQuadPrivate *priv;
1260   GstVulkanImageView *in_views[GST_VIDEO_MAX_PLANES] = { NULL, };
1261   GstVulkanImageView *out_views[GST_VIDEO_MAX_PLANES] = { NULL, };
1262   int i;
1263
1264   g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
1265   g_return_val_if_fail (cmd != NULL, FALSE);
1266   g_return_val_if_fail (fence != NULL, FALSE);
1267
1268   priv = GET_PRIV (self);
1269
1270   for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->in_info); i++) {
1271     GstVulkanImageMemory *img_mem = peek_image_from_buffer (priv->inbuf, i);
1272     if (!gst_is_vulkan_image_memory ((GstMemory *) img_mem)) {
1273       g_set_error_literal (error, GST_VULKAN_ERROR, GST_VULKAN_FAILED,
1274           "Input memory must be a GstVulkanImageMemory");
1275       goto error;
1276     }
1277     in_views[i] = gst_vulkan_get_or_create_image_view (img_mem);
1278     gst_vulkan_trash_list_add (self->trash_list,
1279         gst_vulkan_trash_list_acquire (self->trash_list, fence,
1280             gst_vulkan_trash_mini_object_unref, (GstMiniObject *) in_views[i]));
1281   }
1282   for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->out_info); i++) {
1283     GstVulkanImageMemory *img_mem = peek_image_from_buffer (priv->outbuf, i);
1284     if (!gst_is_vulkan_image_memory ((GstMemory *) img_mem)) {
1285       g_set_error_literal (error, GST_VULKAN_ERROR, GST_VULKAN_FAILED,
1286           "Output memory must be a GstVulkanImageMemory");
1287       goto error;
1288     }
1289     out_views[i] = gst_vulkan_get_or_create_image_view (img_mem);
1290     gst_vulkan_trash_list_add (self->trash_list,
1291         gst_vulkan_trash_list_acquire (self->trash_list, fence,
1292             gst_vulkan_trash_mini_object_unref,
1293             (GstMiniObject *) out_views[i]));
1294   }
1295
1296   for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->in_info); i++) {
1297     /* *INDENT-OFF* */
1298     VkImageMemoryBarrier in_image_memory_barrier = {
1299         .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
1300         .pNext = NULL,
1301         .srcAccessMask = in_views[i]->image->barrier.parent.access_flags,
1302         .dstAccessMask = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
1303         .oldLayout = in_views[i]->image->barrier.image_layout,
1304         .newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
1305         /* FIXME: implement exclusive transfers */
1306         .srcQueueFamilyIndex = 0,
1307         .dstQueueFamilyIndex = 0,
1308         .image = in_views[i]->image->image,
1309         .subresourceRange = in_views[i]->image->barrier.subresource_range
1310     };
1311     /* *INDENT-ON* */
1312
1313     vkCmdPipelineBarrier (cmd->cmd,
1314         in_views[i]->image->barrier.parent.pipeline_stages,
1315         VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0, 0, NULL, 0, NULL, 1,
1316         &in_image_memory_barrier);
1317
1318     in_views[i]->image->barrier.parent.pipeline_stages =
1319         VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
1320     in_views[i]->image->barrier.parent.access_flags =
1321         in_image_memory_barrier.dstAccessMask;
1322     in_views[i]->image->barrier.image_layout =
1323         in_image_memory_barrier.newLayout;
1324   }
1325
1326   for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->out_info); i++) {
1327     /* *INDENT-OFF* */
1328     VkImageMemoryBarrier out_image_memory_barrier = {
1329         .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
1330         .pNext = NULL,
1331         .srcAccessMask = out_views[i]->image->barrier.parent.access_flags,
1332         .dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
1333         .oldLayout = out_views[i]->image->barrier.image_layout,
1334         .newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
1335         /* FIXME: implement exclusive transfers */
1336         .srcQueueFamilyIndex = 0,
1337         .dstQueueFamilyIndex = 0,
1338         .image = out_views[i]->image->image,
1339         .subresourceRange = out_views[i]->image->barrier.subresource_range
1340     };
1341     /* *INDENT-ON* */
1342
1343     vkCmdPipelineBarrier (cmd->cmd,
1344         out_views[i]->image->barrier.parent.pipeline_stages,
1345         VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, 0, NULL, 0, NULL, 1,
1346         &out_image_memory_barrier);
1347
1348     out_views[i]->image->barrier.parent.pipeline_stages =
1349         VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
1350     out_views[i]->image->barrier.parent.access_flags =
1351         out_image_memory_barrier.dstAccessMask;
1352     out_views[i]->image->barrier.image_layout =
1353         out_image_memory_barrier.newLayout;
1354   }
1355
1356   {
1357     /* *INDENT-OFF* */
1358     VkClearValue clearColor = {{{ 0.0f, 0.0f, 0.0f, 1.0f }}};
1359     VkClearValue clearColors[GST_VIDEO_MAX_PLANES] = {
1360       clearColor, clearColor, clearColor, clearColor,
1361     };
1362     VkRenderPassBeginInfo render_pass_info = {
1363         .sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
1364         .renderPass = (VkRenderPass) self->render_pass->handle,
1365         .framebuffer = (VkFramebuffer) self->framebuffer->handle,
1366         .renderArea.offset = { 0, 0 },
1367         .renderArea.extent = {
1368             GST_VIDEO_INFO_WIDTH (&self->out_info),
1369             GST_VIDEO_INFO_HEIGHT (&self->out_info)
1370         },
1371         .clearValueCount = GST_VIDEO_INFO_N_PLANES (&self->out_info),
1372         .pClearValues = clearColors,
1373     };
1374     /* *INDENT-ON* */
1375     VkDeviceSize offsets[] = { 0 };
1376
1377     vkCmdBindDescriptorSets (cmd->cmd, VK_PIPELINE_BIND_POINT_GRAPHICS,
1378         (VkPipelineLayout) self->pipeline_layout->handle, 0, 1,
1379         &self->descriptor_set->set, 0, NULL);
1380
1381     vkCmdBeginRenderPass (cmd->cmd, &render_pass_info,
1382         VK_SUBPASS_CONTENTS_INLINE);
1383     vkCmdBindPipeline (cmd->cmd, VK_PIPELINE_BIND_POINT_GRAPHICS,
1384         (VkPipeline) self->graphics_pipeline->handle);
1385     vkCmdBindVertexBuffers (cmd->cmd, 0, 1,
1386         &((GstVulkanBufferMemory *) priv->vertices)->buffer, offsets);
1387     vkCmdBindIndexBuffer (cmd->cmd,
1388         ((GstVulkanBufferMemory *) priv->indices)->buffer, 0,
1389         VK_INDEX_TYPE_UINT16);
1390     vkCmdDrawIndexed (cmd->cmd, priv->n_indices, 1, 0, 0, 0);
1391     vkCmdEndRenderPass (cmd->cmd);
1392   }
1393
1394   return TRUE;
1395
1396 error:
1397   return FALSE;
1398 }
1399
1400 /**
1401  * gst_vulkan_full_screen_quad_submit:
1402  * @self: a #GstVulkanFullScreenQuad
1403  * @cmd: (transfer full): a #GstVulkanCommandBuffer to submit
1404  * @fence: a #GstVulkanFence to signal on completion
1405  * @error: a #GError to fill on error
1406  *
1407  * Returns: whether @cmd could be submitted to the queue
1408  */
1409 gboolean
1410 gst_vulkan_full_screen_quad_submit (GstVulkanFullScreenQuad * self,
1411     GstVulkanCommandBuffer * cmd, GstVulkanFence * fence, GError ** error)
1412 {
1413   VkResult err;
1414
1415   g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
1416   g_return_val_if_fail (cmd != NULL, FALSE);
1417   g_return_val_if_fail (fence != NULL, FALSE);
1418
1419   {
1420     /* *INDENT-OFF* */
1421     VkSubmitInfo submit_info = {
1422         .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
1423         .pNext = NULL,
1424         .waitSemaphoreCount = 0,
1425         .pWaitSemaphores = NULL,
1426         .pWaitDstStageMask = NULL,
1427         .commandBufferCount = 1,
1428         .pCommandBuffers = &cmd->cmd,
1429         .signalSemaphoreCount = 0,
1430         .pSignalSemaphores = NULL,
1431     };
1432     /* *INDENT-ON* */
1433
1434     gst_vulkan_queue_submit_lock (self->queue);
1435     err =
1436         vkQueueSubmit (self->queue->queue, 1, &submit_info,
1437         GST_VULKAN_FENCE_FENCE (fence));
1438     gst_vulkan_queue_submit_unlock (self->queue);
1439     if (gst_vulkan_error_to_g_error (err, error, "vkQueueSubmit") < 0)
1440       goto error;
1441   }
1442
1443   gst_vulkan_trash_list_add (self->trash_list,
1444       gst_vulkan_trash_list_acquire (self->trash_list, fence,
1445           gst_vulkan_trash_mini_object_unref, GST_MINI_OBJECT_CAST (cmd)));
1446
1447   gst_vulkan_trash_list_gc (self->trash_list);
1448
1449   if (self->last_fence)
1450     gst_vulkan_fence_unref (self->last_fence);
1451   self->last_fence = gst_vulkan_fence_ref (fence);
1452
1453   return TRUE;
1454
1455 error:
1456   return FALSE;
1457 }