4f35c6159d6ebab30f619938d924dd9d3877d575
[platform/upstream/gstreamer.git] / ext / vulkan / vkfullscreenquad.c
1 /*
2  * GStreamer Plugins Vulkan
3  * Copyright (C) 2019 Matthew Waters <matthew@centricular.com>
4  *
5  * This library is free software; you can redistribute it and/or
6  * modify it under the terms of the GNU Library General Public
7  * License as published by the Free Software Foundation; either
8  * version 2 of the License, or (at your option) any later version.
9  *
10  * This library is distributed in the hope that it will be useful,
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13  * Library General Public License for more details.
14  *
15  * You should have received a copy of the GNU Library General Public
16  * License along with this library; if not, write to the
17  * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18  * Boston, MA 02110-1301, USA.
19  */
20
21 #ifdef HAVE_CONFIG_H
22 #include "config.h"
23 #endif
24
25 #include "vkfullscreenquad.h"
26 #include "vkelementutils.h"
27
28 #define GST_CAT_DEFAULT gst_vulkan_full_screen_quad_debug
29 GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
30
31 typedef struct _GstVulkanFullScreenQuad GstVulkanFullScreenQuad;
32
33 struct _GstVulkanFullScreenQuadPrivate
34 {
35   GstBuffer *inbuf;
36   GstBuffer *outbuf;
37
38   GstMemory *vertices;
39   GstMemory *indices;
40   gsize n_indices;
41   GstMemory *uniforms;
42   gsize uniform_size;
43
44   GstMemory *push_constants;
45   gsize push_constants_size;
46
47   GstVulkanHandle *vert;
48   GstVulkanHandle *frag;
49 };
50
51 G_DEFINE_TYPE_WITH_CODE (GstVulkanFullScreenQuad, gst_vulkan_full_screen_quad,
52     GST_TYPE_OBJECT, GST_DEBUG_CATEGORY_INIT (gst_vulkan_full_screen_quad_debug,
53         "vulkanfullscreenquad", 0, "vulkan fullscreen quad render");
54     G_ADD_PRIVATE (GstVulkanFullScreenQuad));
55
56 #define GET_PRIV(self) gst_vulkan_full_screen_quad_get_instance_private (self)
57
58 struct Vertex vertices[] = {
59   {-1.0f, -1.0f, 0.0f, 0.0f, 0.0f},
60   {1.0f, -1.0f, 0.0f, 1.0f, 0.0f},
61   {1.0f, 1.0f, 0.0f, 1.0f, 1.0f},
62   {-1.0f, 1.0f, 0.0f, 0.0f, 1.0f},
63 };
64
65 gushort indices[] = {
66   0, 1, 2, 0, 2, 3,
67 };
68
69 static gboolean
70 create_sampler (GstVulkanFullScreenQuad * self, GError ** error)
71 {
72   /* *INDENT-OFF* */
73   VkSamplerCreateInfo samplerInfo = {
74       .sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
75       .magFilter = VK_FILTER_LINEAR,
76       .minFilter = VK_FILTER_LINEAR,
77       .addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
78       .addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
79       .addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
80       .anisotropyEnable = VK_FALSE,
81       .maxAnisotropy = 1,
82       .borderColor = VK_BORDER_COLOR_INT_OPAQUE_BLACK,
83       .unnormalizedCoordinates = VK_FALSE,
84       .compareEnable = VK_FALSE,
85       .compareOp = VK_COMPARE_OP_ALWAYS,
86       .mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR,
87       .mipLodBias = 0.0f,
88       .minLod = 0.0f,
89       .maxLod = 0.0f
90   };
91   /* *INDENT-ON* */
92   VkSampler sampler;
93   VkResult err;
94
95   err =
96       vkCreateSampler (self->queue->device->device, &samplerInfo, NULL,
97       &sampler);
98   if (gst_vulkan_error_to_g_error (err, error, "vkCreateSampler") < 0) {
99     return FALSE;
100   }
101
102   self->sampler = gst_vulkan_handle_new_wrapped (self->queue->device,
103       GST_VULKAN_HANDLE_TYPE_SAMPLER, (GstVulkanHandleTypedef) sampler,
104       gst_vulkan_handle_free_sampler, NULL);
105
106   return TRUE;
107 }
108
109 static GstVulkanDescriptorSet *
110 get_and_update_descriptor_set (GstVulkanFullScreenQuad * self,
111     GstVulkanImageView ** views, GError ** error)
112 {
113   GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
114   GstVulkanDescriptorSet *set;
115
116   if (!self->sampler)
117     if (!create_sampler (self, error))
118       return NULL;
119
120   if (!(set =
121           gst_vulkan_descriptor_cache_acquire (self->descriptor_cache, error)))
122     return NULL;
123
124   {
125     VkWriteDescriptorSet writes[GST_VIDEO_MAX_PLANES + 1];
126     VkDescriptorImageInfo image_info[GST_VIDEO_MAX_PLANES];
127     VkDescriptorBufferInfo buffer_info;
128     int write_n = 0;
129     int i;
130
131     /* *INDENT-OFF* */
132     if (priv->uniforms) {
133       buffer_info = (VkDescriptorBufferInfo) {
134           .buffer = ((GstVulkanBufferMemory *) priv->uniforms)->buffer,
135           .offset = 0,
136           .range = priv->uniform_size
137       };
138
139       writes[write_n++] = (VkWriteDescriptorSet) {
140           .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
141           .pNext = NULL,
142           .dstSet = set->set,
143           .dstBinding = 0,
144           .dstArrayElement = 0,
145           .descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
146           .descriptorCount = 1,
147           .pBufferInfo = &buffer_info
148       };
149     }
150
151     for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->in_info); i++) {
152       image_info[i] = (VkDescriptorImageInfo) {
153           .imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
154           .imageView = views[i]->view,
155           .sampler = (VkSampler) self->sampler->handle
156       };
157
158       writes[write_n++] = (VkWriteDescriptorSet) {
159           .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
160           .pNext = NULL,
161           .dstSet = set->set,
162           .dstBinding = i + 1,
163           .dstArrayElement = 0,
164           .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
165           .descriptorCount = 1,
166           .pImageInfo = &image_info[i]
167       };
168     }
169     /* *INDENT-ON* */
170     vkUpdateDescriptorSets (self->queue->device->device, write_n, writes, 0,
171         NULL);
172   }
173
174   return set;
175 }
176
177 static gboolean
178 create_descriptor_set_layout (GstVulkanFullScreenQuad * self, GError ** error)
179 {
180   VkDescriptorSetLayoutBinding bindings[GST_VIDEO_MAX_PLANES + 1] = { {0,} };
181   VkDescriptorSetLayoutCreateInfo layout_info;
182   VkDescriptorSetLayout descriptor_set_layout;
183   int descriptor_n = 0;
184   VkResult err;
185   int i;
186
187   /* *INDENT-OFF* */
188   bindings[descriptor_n++] = (VkDescriptorSetLayoutBinding) {
189       .binding = 0,
190       .descriptorCount = 1,
191       .descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
192       .pImmutableSamplers = NULL,
193       .stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT
194   };
195   for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->in_info); i++) {
196     bindings[descriptor_n++] = (VkDescriptorSetLayoutBinding) {
197       .binding = i+1,
198       .descriptorCount = 1,
199       .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
200       .pImmutableSamplers = NULL,
201       .stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT
202     };
203   };
204
205   layout_info = (VkDescriptorSetLayoutCreateInfo) {
206       .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
207       .pNext = NULL,
208       .bindingCount = descriptor_n,
209       .pBindings = bindings
210   };
211   /* *INDENT-ON* */
212
213   err =
214       vkCreateDescriptorSetLayout (self->queue->device->device, &layout_info,
215       NULL, &descriptor_set_layout);
216   if (gst_vulkan_error_to_g_error (err, error,
217           "vkCreateDescriptorSetLayout") < 0) {
218     return FALSE;
219   }
220
221   self->descriptor_set_layout =
222       gst_vulkan_handle_new_wrapped (self->queue->device,
223       GST_VULKAN_HANDLE_TYPE_DESCRIPTOR_SET_LAYOUT,
224       (GstVulkanHandleTypedef) descriptor_set_layout,
225       gst_vulkan_handle_free_descriptor_set_layout, NULL);
226
227   return TRUE;
228 }
229
230 static gboolean
231 create_pipeline_layout (GstVulkanFullScreenQuad * self, GError ** error)
232 {
233   VkPipelineLayoutCreateInfo pipeline_layout_info;
234   VkPipelineLayout pipeline_layout;
235   VkResult err;
236
237   if (!self->descriptor_set_layout)
238     if (!create_descriptor_set_layout (self, error))
239       return FALSE;
240
241   /* *INDENT-OFF* */
242   pipeline_layout_info = (VkPipelineLayoutCreateInfo) {
243       .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
244       .pNext = NULL,
245       .setLayoutCount = 1,
246       .pSetLayouts = (VkDescriptorSetLayout *) &self->descriptor_set_layout->handle,
247       .pushConstantRangeCount = 0,
248       .pPushConstantRanges = NULL,
249   };
250   /* *INDENT-ON* */
251
252   err =
253       vkCreatePipelineLayout (self->queue->device->device,
254       &pipeline_layout_info, NULL, &pipeline_layout);
255   if (gst_vulkan_error_to_g_error (err, error, "vkCreatePipelineLayout") < 0) {
256     return FALSE;
257   }
258
259   self->pipeline_layout = gst_vulkan_handle_new_wrapped (self->queue->device,
260       GST_VULKAN_HANDLE_TYPE_PIPELINE_LAYOUT,
261       (GstVulkanHandleTypedef) pipeline_layout,
262       gst_vulkan_handle_free_pipeline_layout, NULL);
263
264   return TRUE;
265 }
266
267 static gboolean
268 create_render_pass (GstVulkanFullScreenQuad * self, GError ** error)
269 {
270   VkAttachmentDescription color_attachments[GST_VIDEO_MAX_PLANES];
271   VkAttachmentReference color_attachment_refs[GST_VIDEO_MAX_PLANES];
272   VkRenderPassCreateInfo render_pass_info;
273   VkSubpassDescription subpass;
274   VkRenderPass render_pass;
275   VkResult err;
276   int i;
277
278   for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->out_info); i++) {
279     /* *INDENT-OFF* */
280     color_attachments[i] = (VkAttachmentDescription) {
281         .format = gst_vulkan_format_from_video_info (&self->out_info, i),
282         .samples = VK_SAMPLE_COUNT_1_BIT,
283         .loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR,
284         .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
285         .stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE,
286         .stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE,
287         /* FIXME: share this between elements to avoid pipeline barriers */
288         .initialLayout = VK_IMAGE_LAYOUT_UNDEFINED,
289         .finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
290     };
291
292     color_attachment_refs[i] = (VkAttachmentReference) {
293       .attachment = i,
294       .layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
295     };
296     /* *INDENT-ON* */
297   }
298
299   /* *INDENT-OFF* */
300   subpass = (VkSubpassDescription) {
301       .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
302       .colorAttachmentCount = GST_VIDEO_INFO_N_PLANES (&self->out_info),
303       .pColorAttachments = color_attachment_refs
304   };
305
306   render_pass_info = (VkRenderPassCreateInfo) {
307       .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
308       .pNext = NULL,
309       .attachmentCount = GST_VIDEO_INFO_N_PLANES (&self->out_info),
310       .pAttachments = color_attachments,
311       .subpassCount = 1,
312       .pSubpasses = &subpass
313   };
314   /* *INDENT-ON* */
315
316   err =
317       vkCreateRenderPass (self->queue->device->device, &render_pass_info, NULL,
318       &render_pass);
319   if (gst_vulkan_error_to_g_error (err, error, "vkCreateRenderPass") < 0) {
320     return FALSE;
321   }
322
323   self->render_pass = gst_vulkan_handle_new_wrapped (self->queue->device,
324       GST_VULKAN_HANDLE_TYPE_RENDER_PASS,
325       (GstVulkanHandleTypedef) render_pass,
326       gst_vulkan_handle_free_render_pass, NULL);
327
328   return TRUE;
329 }
330
331 static gboolean
332 create_pipeline (GstVulkanFullScreenQuad * self, GError ** error)
333 {
334   GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
335   VkVertexInputBindingDescription vertex_binding;
336   VkVertexInputAttributeDescription attribute_descriptions[2];
337   VkPipelineShaderStageCreateInfo shader_create_info[2];
338   VkPipelineVertexInputStateCreateInfo vertex_input_info;
339   VkPipelineInputAssemblyStateCreateInfo input_assembly;
340   VkPipelineViewportStateCreateInfo viewport_state;
341   VkPipelineRasterizationStateCreateInfo rasterizer;
342   VkPipelineMultisampleStateCreateInfo multisampling;
343   VkPipelineColorBlendAttachmentState
344       color_blend_attachments[GST_VIDEO_MAX_PLANES];
345   VkPipelineColorBlendStateCreateInfo color_blending;
346   VkGraphicsPipelineCreateInfo pipeline_create_info;
347   VkPipeline pipeline;
348   VkResult err;
349
350   if (!priv->vert || !priv->frag) {
351     g_set_error_literal (error, GST_VULKAN_ERROR,
352         VK_ERROR_INITIALIZATION_FAILED, "Missing shader information");
353     return FALSE;
354   }
355
356   if (!self->pipeline_layout)
357     if (!create_pipeline_layout (self, error))
358       return FALSE;
359
360   if (!self->render_pass)
361     if (!create_render_pass (self, error))
362       return FALSE;
363
364   /* *INDENT-OFF* */
365   shader_create_info[0] = (VkPipelineShaderStageCreateInfo) {
366       .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
367       .pNext = NULL,
368       .stage = VK_SHADER_STAGE_VERTEX_BIT,
369       .module = (VkShaderModule) priv->vert->handle,
370       .pName = "main"
371   };
372
373   shader_create_info[1] = (VkPipelineShaderStageCreateInfo) {
374       .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
375       .pNext = NULL,
376       .stage = VK_SHADER_STAGE_FRAGMENT_BIT,
377       .module = (VkShaderModule) priv->frag->handle,
378       .pName = "main"
379   };
380
381   /* *INDENT-OFF* */
382   vertex_binding = (VkVertexInputBindingDescription) {
383       .binding = 0,
384       .stride = sizeof (struct Vertex),
385       .inputRate = VK_VERTEX_INPUT_RATE_VERTEX
386   };
387
388   attribute_descriptions[0] = (VkVertexInputAttributeDescription) {
389       .binding = 0,
390       .location = 0,
391       .format = VK_FORMAT_R32G32B32_SFLOAT,
392       .offset = G_STRUCT_OFFSET (struct Vertex, x)
393   };
394   attribute_descriptions[1] = (VkVertexInputAttributeDescription) {
395       .binding = 0,
396       .location = 1,
397       .format = VK_FORMAT_R32G32_SFLOAT,
398       .offset = G_STRUCT_OFFSET (struct Vertex, s)
399   };
400
401   vertex_input_info = (VkPipelineVertexInputStateCreateInfo) {
402       .sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
403       .pNext = NULL,
404       .vertexBindingDescriptionCount = 1,
405       .pVertexBindingDescriptions = &vertex_binding,
406       .vertexAttributeDescriptionCount = 2,
407       .pVertexAttributeDescriptions = attribute_descriptions,
408   };
409
410   input_assembly = (VkPipelineInputAssemblyStateCreateInfo) {
411       .sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
412       .pNext = NULL,
413       .topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
414       .primitiveRestartEnable = VK_FALSE
415   };
416
417   viewport_state = (VkPipelineViewportStateCreateInfo) {
418       .sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
419       .pNext = NULL,
420       .viewportCount = 1,
421       .pViewports = &(VkViewport) {
422           .x = 0.0f,
423           .y = 0.0f,
424           .width = (float) GST_VIDEO_INFO_WIDTH (&self->out_info),
425           .height = (float) GST_VIDEO_INFO_HEIGHT (&self->out_info),
426           .minDepth = 0.0f,
427           .maxDepth = 1.0f
428       },
429       .scissorCount = 1,
430       .pScissors = &(VkRect2D) {
431           .offset = { 0, 0 },
432           .extent = {
433               GST_VIDEO_INFO_WIDTH (&self->out_info),
434               GST_VIDEO_INFO_HEIGHT (&self->out_info)
435           }
436       }
437   };
438
439   rasterizer = (VkPipelineRasterizationStateCreateInfo) {
440       .sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
441       .pNext = NULL,
442       .depthClampEnable = VK_FALSE,
443       .rasterizerDiscardEnable = VK_FALSE,
444       .polygonMode = VK_POLYGON_MODE_FILL,
445       .lineWidth = 1.0f,
446       .cullMode = VK_CULL_MODE_NONE,
447       .frontFace = VK_FRONT_FACE_CLOCKWISE,
448       .depthBiasEnable = VK_FALSE
449   };
450
451   multisampling = (VkPipelineMultisampleStateCreateInfo) {
452       .sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
453       .pNext = NULL,
454       .sampleShadingEnable = VK_FALSE,
455       .rasterizationSamples = VK_SAMPLE_COUNT_1_BIT
456   };
457
458   color_blend_attachments[0] = (VkPipelineColorBlendAttachmentState) {
459       .colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT,
460       .blendEnable = VK_FALSE
461   };
462   color_blend_attachments[1] = (VkPipelineColorBlendAttachmentState) {
463       .colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT,
464       .blendEnable = VK_FALSE
465   };
466   color_blend_attachments[2] = (VkPipelineColorBlendAttachmentState) {
467       .colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT,
468       .blendEnable = VK_FALSE
469   };
470   color_blend_attachments[3] = (VkPipelineColorBlendAttachmentState) {
471       .colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT,
472       .blendEnable = VK_FALSE
473   };
474
475   color_blending = (VkPipelineColorBlendStateCreateInfo) {
476       .sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
477       .pNext = NULL,
478       .logicOpEnable = VK_FALSE,
479       .logicOp = VK_LOGIC_OP_COPY,
480       .attachmentCount = GST_VIDEO_INFO_N_PLANES (&self->out_info),   
481       .pAttachments = color_blend_attachments,
482       .blendConstants = { 0.0f, 0.0f, 0.0f, 0.0f }
483   };
484
485   pipeline_create_info = (VkGraphicsPipelineCreateInfo) {
486       .sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
487       .pNext = NULL,
488       .stageCount = 2,
489       .pStages = shader_create_info,
490       .pVertexInputState = &vertex_input_info,
491       .pInputAssemblyState = &input_assembly,
492       .pViewportState = &viewport_state,
493       .pRasterizationState = &rasterizer,
494       .pMultisampleState = &multisampling,
495       .pColorBlendState = &color_blending,
496       .layout = (VkPipelineLayout) self->pipeline_layout->handle,
497       .renderPass = (VkRenderPass) self->render_pass->handle,
498       .subpass = 0,
499       .basePipelineHandle = VK_NULL_HANDLE
500   };
501   /* *INDENT-ON* */
502
503   err =
504       vkCreateGraphicsPipelines (self->queue->device->device, VK_NULL_HANDLE, 1,
505       &pipeline_create_info, NULL, &pipeline);
506   if (gst_vulkan_error_to_g_error (err, error, "vkCreateGraphicsPipelines") < 0) {
507     return FALSE;
508   }
509
510   self->graphics_pipeline = gst_vulkan_handle_new_wrapped (self->queue->device,
511       GST_VULKAN_HANDLE_TYPE_PIPELINE, (GstVulkanHandleTypedef) pipeline,
512       gst_vulkan_handle_free_pipeline, NULL);
513
514   return TRUE;
515 }
516
517 static gboolean
518 create_descriptor_pool (GstVulkanFullScreenQuad * self, GError ** error)
519 {
520   GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
521   VkDescriptorPoolCreateInfo pool_info;
522   gsize max_sets = 32;          /* FIXME: don't hardcode this! */
523   guint n_pools = 1;
524   VkDescriptorPoolSize pool_sizes[2];
525   VkDescriptorPool pool;
526   GstVulkanDescriptorPool *ret;
527   VkResult err;
528
529   /* *INDENT-OFF* */
530   pool_sizes[0] = (VkDescriptorPoolSize) {
531       .type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
532       .descriptorCount = max_sets * GST_VIDEO_INFO_N_PLANES (&self->in_info),
533   };
534
535   if (priv->uniforms) {
536     pool_sizes[1] = (VkDescriptorPoolSize) {
537         .type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
538         .descriptorCount = max_sets
539     };
540     n_pools++;
541   }
542
543   pool_info = (VkDescriptorPoolCreateInfo) {
544       .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
545       .pNext = NULL,
546       .flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
547       .poolSizeCount = n_pools,
548       .pPoolSizes = pool_sizes,
549       .maxSets = max_sets
550   };
551   /* *INDENT-ON* */
552
553   err =
554       vkCreateDescriptorPool (self->queue->device->device, &pool_info, NULL,
555       &pool);
556   if (gst_vulkan_error_to_g_error (err, error, "vkCreateDescriptorPool") < 0) {
557     return FALSE;
558   }
559
560   ret =
561       gst_vulkan_descriptor_pool_new_wrapped (self->queue->device, pool,
562       max_sets);
563   self->descriptor_cache =
564       gst_vulkan_descriptor_cache_new (ret, 1, &self->descriptor_set_layout);
565   gst_object_unref (ret);
566
567   return TRUE;
568 }
569
570 static gboolean
571 create_framebuffer (GstVulkanFullScreenQuad * self, GstVulkanImageView ** views,
572     GError ** error)
573 {
574   VkImageView attachments[GST_VIDEO_MAX_PLANES] = { 0, };
575   VkFramebufferCreateInfo framebuffer_info;
576   VkFramebuffer framebuffer;
577   VkResult err;
578   int i;
579
580   for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->out_info); i++) {
581     attachments[i] = views[i]->view;
582   }
583
584   /* *INDENT-OFF* */
585   framebuffer_info = (VkFramebufferCreateInfo) {
586       .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
587       .pNext = NULL,
588       .renderPass = (VkRenderPass) self->render_pass->handle,
589       .attachmentCount = GST_VIDEO_INFO_N_PLANES (&self->out_info),
590       .pAttachments = attachments,
591       .width = GST_VIDEO_INFO_WIDTH (&self->out_info),
592       .height = GST_VIDEO_INFO_HEIGHT (&self->out_info),
593       .layers = 1
594   };
595   /* *INDENT-ON* */
596
597   err =
598       vkCreateFramebuffer (self->queue->device->device, &framebuffer_info, NULL,
599       &framebuffer);
600   if (gst_vulkan_error_to_g_error (err, error, "vkCreateFramebuffer") < 0) {
601     return FALSE;
602   }
603
604   self->framebuffer = gst_vulkan_handle_new_wrapped (self->queue->device,
605       GST_VULKAN_HANDLE_TYPE_FRAMEBUFFER, (GstVulkanHandleTypedef) framebuffer,
606       gst_vulkan_handle_free_framebuffer, NULL);
607
608   return TRUE;
609 }
610
611 #define LAST_FENCE_OR_ALWAYS_SIGNALLED(self,device) \
612     self->last_fence ? gst_vulkan_fence_ref (self->last_fence) : gst_vulkan_fence_new_always_signalled (device)
613
614 GstVulkanFence *
615 gst_vulkan_full_screen_quad_get_last_fence (GstVulkanFullScreenQuad * self)
616 {
617   g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), NULL);
618
619   return LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
620 }
621
622 static void
623 clear_descriptor_set (GstVulkanFullScreenQuad * self)
624 {
625   GstVulkanFence *last_fence =
626       LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
627
628   if (self->descriptor_set)
629     gst_vulkan_trash_list_add (self->trash_list,
630         gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
631             gst_vulkan_trash_mini_object_unref,
632             (GstMiniObject *) self->descriptor_set));
633   self->descriptor_set = NULL;
634
635   gst_vulkan_fence_unref (last_fence);
636 }
637
638 static void
639 clear_framebuffer (GstVulkanFullScreenQuad * self)
640 {
641   GstVulkanFence *last_fence =
642       LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
643
644   if (self->framebuffer)
645     gst_vulkan_trash_list_add (self->trash_list,
646         gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
647             gst_vulkan_trash_mini_object_unref,
648             (GstMiniObject *) self->framebuffer));
649   self->framebuffer = NULL;
650
651   gst_vulkan_fence_unref (last_fence);
652 }
653
654 static void
655 clear_command_pool (GstVulkanFullScreenQuad * self)
656 {
657   GstVulkanFence *last_fence =
658       LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
659
660   if (self->cmd_pool)
661     gst_vulkan_trash_list_add (self->trash_list,
662         gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
663             gst_vulkan_trash_object_unref, (GstObject *) self->cmd_pool));
664   self->cmd_pool = NULL;
665
666   gst_vulkan_fence_unref (last_fence);
667 }
668
669 static void
670 clear_sampler (GstVulkanFullScreenQuad * self)
671 {
672   GstVulkanFence *last_fence =
673       LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
674
675   if (self->sampler)
676     gst_vulkan_trash_list_add (self->trash_list,
677         gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
678             gst_vulkan_trash_mini_object_unref,
679             (GstMiniObject *) self->sampler));
680   self->sampler = NULL;
681
682   gst_vulkan_fence_unref (last_fence);
683 }
684
685 static void
686 clear_descriptor_cache (GstVulkanFullScreenQuad * self)
687 {
688   GstVulkanFence *last_fence =
689       LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
690
691   if (self->descriptor_cache)
692     gst_vulkan_trash_list_add (self->trash_list,
693         gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
694             gst_vulkan_trash_object_unref,
695             (GstObject *) self->descriptor_cache));
696   self->descriptor_cache = NULL;
697
698   gst_vulkan_fence_unref (last_fence);
699 }
700
701 static void
702 clear_shaders (GstVulkanFullScreenQuad * self)
703 {
704   GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
705   GstVulkanFence *last_fence =
706       LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
707
708   if (priv->vert)
709     gst_vulkan_trash_list_add (self->trash_list,
710         gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
711             gst_vulkan_trash_mini_object_unref, (GstMiniObject *) priv->vert));
712   priv->vert = NULL;
713
714   if (priv->frag)
715     gst_vulkan_trash_list_add (self->trash_list,
716         gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
717             gst_vulkan_trash_mini_object_unref, (GstMiniObject *) priv->frag));
718   priv->frag = NULL;
719
720   gst_vulkan_fence_unref (last_fence);
721 }
722
723 static void
724 clear_uniform_data (GstVulkanFullScreenQuad * self)
725 {
726   GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
727   GstVulkanFence *last_fence =
728       LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
729
730   if (priv->uniforms)
731     gst_vulkan_trash_list_add (self->trash_list,
732         gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
733             gst_vulkan_trash_mini_object_unref,
734             (GstMiniObject *) priv->uniforms));
735   priv->uniforms = NULL;
736   priv->uniform_size = 0;
737
738   gst_vulkan_fence_unref (last_fence);
739 }
740
741 static void
742 destroy_pipeline (GstVulkanFullScreenQuad * self)
743 {
744   GstVulkanFence *last_fence =
745       LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
746
747   if (self->render_pass)
748     gst_vulkan_trash_list_add (self->trash_list,
749         gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
750             gst_vulkan_trash_mini_object_unref,
751             (GstMiniObject *) self->render_pass));
752   self->render_pass = NULL;
753   if (self->pipeline_layout)
754     gst_vulkan_trash_list_add (self->trash_list,
755         gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
756             gst_vulkan_trash_mini_object_unref,
757             (GstMiniObject *) self->pipeline_layout));
758   self->pipeline_layout = NULL;
759   if (self->graphics_pipeline)
760     gst_vulkan_trash_list_add (self->trash_list,
761         gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
762             gst_vulkan_trash_mini_object_unref,
763             (GstMiniObject *) self->graphics_pipeline));
764   self->graphics_pipeline = NULL;
765   if (self->descriptor_set_layout)
766     gst_vulkan_trash_list_add (self->trash_list,
767         gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
768             gst_vulkan_trash_mini_object_unref,
769             (GstMiniObject *) self->descriptor_set_layout));
770   self->descriptor_set_layout = NULL;
771
772   gst_vulkan_fence_unref (last_fence);
773
774   gst_vulkan_trash_list_gc (self->trash_list);
775 }
776
777 void
778 gst_vulkan_full_screen_quad_init (GstVulkanFullScreenQuad * self)
779 {
780   self->trash_list = gst_vulkan_trash_fence_list_new ();
781 }
782
783 GstVulkanFullScreenQuad *
784 gst_vulkan_full_screen_quad_new (GstVulkanQueue * queue)
785 {
786   GstVulkanFullScreenQuad *self;
787
788   g_return_val_if_fail (GST_IS_VULKAN_QUEUE (queue), NULL);
789
790   self = g_object_new (GST_TYPE_VULKAN_FULL_SCREEN_QUAD, NULL);
791   self->queue = gst_object_ref (queue);
792
793   return self;
794 }
795
796 static void
797 gst_vulkan_full_screen_quad_finalize (GObject * object)
798 {
799   GstVulkanFullScreenQuad *self = GST_VULKAN_FULL_SCREEN_QUAD (object);
800   GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
801
802   destroy_pipeline (self);
803   clear_command_pool (self);
804   clear_sampler (self);
805   clear_framebuffer (self);
806   clear_descriptor_set (self);
807   clear_descriptor_cache (self);
808   clear_shaders (self);
809   clear_uniform_data (self);
810
811   gst_vulkan_trash_list_wait (self->trash_list, -1);
812   gst_vulkan_trash_list_gc (self->trash_list);
813   gst_clear_object (&self->trash_list);
814
815   gst_clear_mini_object (((GstMiniObject **) & priv->vertices));
816   gst_clear_mini_object (((GstMiniObject **) & priv->indices));
817
818   gst_clear_mini_object (((GstMiniObject **) & self->last_fence));
819
820   gst_clear_object (&self->queue);
821
822   gst_clear_buffer (&priv->inbuf);
823   gst_clear_buffer (&priv->outbuf);
824
825   G_OBJECT_CLASS (gst_vulkan_full_screen_quad_parent_class)->finalize (object);
826 }
827
828 static void
829 gst_vulkan_full_screen_quad_class_init (GstVulkanFullScreenQuadClass * klass)
830 {
831   GObjectClass *obj_class = G_OBJECT_CLASS (klass);
832
833   obj_class->finalize = gst_vulkan_full_screen_quad_finalize;
834 }
835
836 gboolean
837 gst_vulkan_full_screen_quad_set_info (GstVulkanFullScreenQuad * self,
838     GstVideoInfo * in_info, GstVideoInfo * out_info)
839 {
840   self->out_info = *out_info;
841   self->in_info = *in_info;
842
843   destroy_pipeline (self);
844   clear_framebuffer (self);
845   clear_descriptor_set (self);
846   clear_descriptor_cache (self);
847   clear_uniform_data (self);
848
849   return TRUE;
850 }
851
852 gboolean
853 gst_vulkan_full_screen_quad_set_input_buffer (GstVulkanFullScreenQuad * self,
854     GstBuffer * buffer, GError ** error)
855 {
856   GstVulkanFullScreenQuadPrivate *priv;
857
858   g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
859
860   priv = GET_PRIV (self);
861
862   gst_buffer_replace (&priv->inbuf, buffer);
863   clear_descriptor_set (self);
864   return TRUE;
865 }
866
867 gboolean
868 gst_vulkan_full_screen_quad_set_output_buffer (GstVulkanFullScreenQuad * self,
869     GstBuffer * buffer, GError ** error)
870 {
871   GstVulkanFullScreenQuadPrivate *priv;
872
873   g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
874
875   priv = GET_PRIV (self);
876
877   gst_buffer_replace (&priv->outbuf, buffer);
878   clear_framebuffer (self);
879   return TRUE;
880 }
881
882 gboolean
883 gst_vulkan_full_screen_quad_set_shaders (GstVulkanFullScreenQuad * self,
884     GstVulkanHandle * vert, GstVulkanHandle * frag)
885 {
886   GstVulkanFullScreenQuadPrivate *priv;
887
888   g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
889   g_return_val_if_fail (vert != NULL, FALSE);
890   g_return_val_if_fail (vert->type == GST_VULKAN_HANDLE_TYPE_SHADER, FALSE);
891   g_return_val_if_fail (frag != NULL, FALSE);
892   g_return_val_if_fail (frag->type == GST_VULKAN_HANDLE_TYPE_SHADER, FALSE);
893
894   priv = GET_PRIV (self);
895
896   clear_shaders (self);
897   destroy_pipeline (self);
898
899   priv->vert = gst_vulkan_handle_ref (vert);
900   priv->frag = gst_vulkan_handle_ref (frag);
901
902   return TRUE;
903 }
904
905 gboolean
906 gst_vulkan_full_screen_quad_set_uniform_buffer (GstVulkanFullScreenQuad * self,
907     GstMemory * uniforms, GError ** error)
908 {
909   GstVulkanFullScreenQuadPrivate *priv;
910
911   g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
912   g_return_val_if_fail (uniforms == NULL
913       || gst_is_vulkan_buffer_memory (uniforms), FALSE);
914
915   priv = GET_PRIV (self);
916
917   clear_uniform_data (self);
918   if (uniforms) {
919     priv->uniforms = gst_memory_ref (uniforms);
920     priv->uniform_size = gst_memory_get_sizes (uniforms, NULL, NULL);
921   }
922
923   return TRUE;
924 }
925
926 static GstVulkanImageMemory *
927 peek_image_from_buffer (GstBuffer * buffer, guint i)
928 {
929   GstMemory *mem = gst_buffer_peek_memory (buffer, i);
930   g_return_val_if_fail (gst_is_vulkan_image_memory (mem), NULL);
931   return (GstVulkanImageMemory *) mem;
932 }
933
934 static gboolean
935 ensure_vertex_data (GstVulkanFullScreenQuad * self, GError ** error)
936 {
937   GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
938   GstMapInfo map_info;
939
940   if (!priv->vertices) {
941     priv->vertices = gst_vulkan_buffer_memory_alloc (self->queue->device,
942         sizeof (vertices), VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
943         VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
944         VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
945   }
946
947   if (!gst_memory_map (priv->vertices, &map_info, GST_MAP_WRITE)) {
948     g_set_error_literal (error, GST_VULKAN_ERROR, VK_ERROR_MEMORY_MAP_FAILED,
949         "Failed to map memory");
950     goto failure;
951   }
952
953   memcpy (map_info.data, vertices, map_info.size);
954   gst_memory_unmap (priv->vertices, &map_info);
955
956   if (!priv->indices) {
957     priv->indices = gst_vulkan_buffer_memory_alloc (self->queue->device,
958         sizeof (indices), VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
959         VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
960         VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
961   }
962
963   if (!gst_memory_map (priv->indices, &map_info, GST_MAP_WRITE)) {
964     g_set_error_literal (error, GST_VULKAN_ERROR, VK_ERROR_MEMORY_MAP_FAILED,
965         "Failed to map memory");
966     goto failure;
967   }
968
969   memcpy (map_info.data, indices, map_info.size);
970   gst_memory_unmap (priv->indices, &map_info);
971
972   priv->n_indices = G_N_ELEMENTS (indices);
973
974   return TRUE;
975
976 failure:
977   if (priv->vertices)
978     gst_memory_unref (priv->vertices);
979   priv->vertices = NULL;
980   if (priv->indices)
981     gst_memory_unref (priv->indices);
982   priv->indices = NULL;
983   priv->n_indices = 0;
984   return FALSE;
985 }
986
987 gboolean
988 gst_vulkan_full_screen_quad_draw (GstVulkanFullScreenQuad * self,
989     GError ** error)
990 {
991   GstVulkanCommandBuffer *cmd = NULL;
992   GstVulkanFence *fence = NULL;
993   VkResult err;
994
995   g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
996
997   fence = gst_vulkan_device_create_fence (self->queue->device, error);
998   if (!fence)
999     goto error;
1000
1001   if (!gst_vulkan_full_screen_quad_prepare_draw (self, fence, error))
1002     goto error;
1003
1004   if (!(cmd = gst_vulkan_command_pool_create (self->cmd_pool, error)))
1005     goto error;
1006
1007   {
1008     VkCommandBufferBeginInfo cmd_buf_info = { 0, };
1009
1010     /* *INDENT-OFF* */
1011     cmd_buf_info = (VkCommandBufferBeginInfo) {
1012         .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
1013         .pNext = NULL,
1014         .flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
1015         .pInheritanceInfo = NULL
1016     };
1017     /* *INDENT-ON* */
1018
1019     gst_vulkan_command_buffer_lock (cmd);
1020     err = vkBeginCommandBuffer (cmd->cmd, &cmd_buf_info);
1021     if (gst_vulkan_error_to_g_error (err, error, "vkBeginCommandBuffer") < 0)
1022       goto unlock_error;
1023   }
1024
1025   if (!gst_vulkan_full_screen_quad_fill_command_buffer (self, cmd, fence,
1026           error))
1027     goto unlock_error;
1028
1029   err = vkEndCommandBuffer (cmd->cmd);
1030   gst_vulkan_command_buffer_unlock (cmd);
1031   if (gst_vulkan_error_to_g_error (err, error, "vkEndCommandBuffer") < 0)
1032     goto error;
1033
1034   if (!gst_vulkan_full_screen_quad_submit (self, cmd, fence, error))
1035     goto error;
1036
1037   gst_vulkan_fence_unref (fence);
1038
1039   return TRUE;
1040
1041 unlock_error:
1042   gst_vulkan_command_buffer_unlock (cmd);
1043
1044 error:
1045   gst_clear_mini_object ((GstMiniObject **) & cmd);
1046   gst_clear_mini_object ((GstMiniObject **) & fence);
1047   return FALSE;
1048 }
1049
1050 gboolean
1051 gst_vulkan_full_screen_quad_prepare_draw (GstVulkanFullScreenQuad * self,
1052     GstVulkanFence * fence, GError ** error)
1053 {
1054   GstVulkanFullScreenQuadPrivate *priv;
1055   GstVulkanImageView *in_views[GST_VIDEO_MAX_PLANES] = { NULL, };
1056   GstVulkanImageView *out_views[GST_VIDEO_MAX_PLANES] = { NULL, };
1057   int i;
1058
1059   g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
1060   g_return_val_if_fail (fence != NULL, FALSE);
1061
1062   priv = GET_PRIV (self);
1063
1064   if (!self->graphics_pipeline)
1065     if (!create_pipeline (self, error))
1066       return FALSE;
1067
1068   if (!ensure_vertex_data (self, error))
1069     goto error;
1070
1071   if (!self->descriptor_cache)
1072     if (!create_descriptor_pool (self, error))
1073       goto error;
1074
1075   if (!self->descriptor_set) {
1076     for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->in_info); i++) {
1077       GstVulkanImageMemory *img_mem = peek_image_from_buffer (priv->inbuf, i);
1078       if (!gst_is_vulkan_image_memory ((GstMemory *) img_mem)) {
1079         g_set_error_literal (error, GST_VULKAN_ERROR, GST_VULKAN_FAILED,
1080             "Input memory must be a GstVulkanImageMemory");
1081         goto error;
1082       }
1083       in_views[i] = get_or_create_image_view (img_mem);
1084       gst_vulkan_trash_list_add (self->trash_list,
1085           gst_vulkan_trash_list_acquire (self->trash_list, fence,
1086               gst_vulkan_trash_mini_object_unref,
1087               (GstMiniObject *) in_views[i]));
1088     }
1089     if (!(self->descriptor_set =
1090             get_and_update_descriptor_set (self, in_views, error)))
1091       goto error;
1092   }
1093
1094   if (!self->framebuffer) {
1095     for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->out_info); i++) {
1096       GstVulkanImageMemory *img_mem = peek_image_from_buffer (priv->outbuf, i);
1097       if (!gst_is_vulkan_image_memory ((GstMemory *) img_mem)) {
1098         g_set_error_literal (error, GST_VULKAN_ERROR, GST_VULKAN_FAILED,
1099             "Output memory must be a GstVulkanImageMemory");
1100         goto error;
1101       }
1102       out_views[i] = get_or_create_image_view (img_mem);
1103       gst_vulkan_trash_list_add (self->trash_list,
1104           gst_vulkan_trash_list_acquire (self->trash_list, fence,
1105               gst_vulkan_trash_mini_object_unref,
1106               (GstMiniObject *) out_views[i]));
1107     }
1108     if (!create_framebuffer (self, out_views, error))
1109       goto error;
1110   }
1111
1112   if (!self->cmd_pool)
1113     if (!(self->cmd_pool =
1114             gst_vulkan_queue_create_command_pool (self->queue, error)))
1115       goto error;
1116
1117   return TRUE;
1118
1119 error:
1120   for (i = 0; i < GST_VIDEO_MAX_PLANES; i++)
1121     gst_clear_mini_object ((GstMiniObject **) & in_views[i]);
1122   for (i = 0; i < GST_VIDEO_MAX_PLANES; i++)
1123     gst_clear_mini_object ((GstMiniObject **) & out_views[i]);
1124   return FALSE;
1125 }
1126
1127 /**
1128  * gst_vulkan_full_screen_quad_fill_command_buffer:
1129  * @self: a #GstVulkanFullScreenQuad
1130  * @cmd: the #GstVulkanCommandBuffer to fill with commands
1131  * @error: a #GError to fill on error
1132  *
1133  * Returns: whether @cmd could be filled with the necessary commands
1134  */
1135 gboolean
1136 gst_vulkan_full_screen_quad_fill_command_buffer (GstVulkanFullScreenQuad * self,
1137     GstVulkanCommandBuffer * cmd, GstVulkanFence * fence, GError ** error)
1138 {
1139   GstVulkanFullScreenQuadPrivate *priv;
1140   GstVulkanImageView *in_views[GST_VIDEO_MAX_PLANES] = { NULL, };
1141   GstVulkanImageView *out_views[GST_VIDEO_MAX_PLANES] = { NULL, };
1142   int i;
1143
1144   g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
1145   g_return_val_if_fail (cmd != NULL, FALSE);
1146   g_return_val_if_fail (fence != NULL, FALSE);
1147
1148   priv = GET_PRIV (self);
1149
1150   for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->in_info); i++) {
1151     GstVulkanImageMemory *img_mem = peek_image_from_buffer (priv->inbuf, i);
1152     if (!gst_is_vulkan_image_memory ((GstMemory *) img_mem)) {
1153       g_set_error_literal (error, GST_VULKAN_ERROR, GST_VULKAN_FAILED,
1154           "Input memory must be a GstVulkanImageMemory");
1155       goto error;
1156     }
1157     in_views[i] = get_or_create_image_view (img_mem);
1158     gst_vulkan_trash_list_add (self->trash_list,
1159         gst_vulkan_trash_list_acquire (self->trash_list, fence,
1160             gst_vulkan_trash_mini_object_unref, (GstMiniObject *) in_views[i]));
1161   }
1162   for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->out_info); i++) {
1163     GstVulkanImageMemory *img_mem = peek_image_from_buffer (priv->outbuf, i);
1164     if (!gst_is_vulkan_image_memory ((GstMemory *) img_mem)) {
1165       g_set_error_literal (error, GST_VULKAN_ERROR, GST_VULKAN_FAILED,
1166           "Output memory must be a GstVulkanImageMemory");
1167       goto error;
1168     }
1169     out_views[i] = get_or_create_image_view (img_mem);
1170     gst_vulkan_trash_list_add (self->trash_list,
1171         gst_vulkan_trash_list_acquire (self->trash_list, fence,
1172             gst_vulkan_trash_mini_object_unref,
1173             (GstMiniObject *) out_views[i]));
1174   }
1175
1176   for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->in_info); i++) {
1177     /* *INDENT-OFF* */
1178     VkImageMemoryBarrier in_image_memory_barrier = {
1179         .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
1180         .pNext = NULL,
1181         .srcAccessMask = in_views[i]->image->barrier.parent.access_flags,
1182         .dstAccessMask = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
1183         .oldLayout = in_views[i]->image->barrier.image_layout,
1184         .newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
1185         /* FIXME: implement exclusive transfers */
1186         .srcQueueFamilyIndex = 0,
1187         .dstQueueFamilyIndex = 0,
1188         .image = in_views[i]->image->image,
1189         .subresourceRange = in_views[i]->image->barrier.subresource_range
1190     };
1191     /* *INDENT-ON* */
1192
1193     vkCmdPipelineBarrier (cmd->cmd,
1194         in_views[i]->image->barrier.parent.pipeline_stages,
1195         VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0, 0, NULL, 0, NULL, 1,
1196         &in_image_memory_barrier);
1197
1198     in_views[i]->image->barrier.parent.pipeline_stages =
1199         VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
1200     in_views[i]->image->barrier.parent.access_flags =
1201         in_image_memory_barrier.dstAccessMask;
1202     in_views[i]->image->barrier.image_layout =
1203         in_image_memory_barrier.newLayout;
1204   }
1205
1206   for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->out_info); i++) {
1207     /* *INDENT-OFF* */
1208     VkImageMemoryBarrier out_image_memory_barrier = {
1209         .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
1210         .pNext = NULL,
1211         .srcAccessMask = out_views[i]->image->barrier.parent.access_flags,
1212         .dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
1213         .oldLayout = out_views[i]->image->barrier.image_layout,
1214         .newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
1215         /* FIXME: implement exclusive transfers */
1216         .srcQueueFamilyIndex = 0,
1217         .dstQueueFamilyIndex = 0,
1218         .image = out_views[i]->image->image,
1219         .subresourceRange = out_views[i]->image->barrier.subresource_range
1220     };
1221     /* *INDENT-ON* */
1222
1223     vkCmdPipelineBarrier (cmd->cmd,
1224         out_views[i]->image->barrier.parent.pipeline_stages,
1225         VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, 0, NULL, 0, NULL, 1,
1226         &out_image_memory_barrier);
1227
1228     out_views[i]->image->barrier.parent.pipeline_stages =
1229         VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
1230     out_views[i]->image->barrier.parent.access_flags =
1231         out_image_memory_barrier.dstAccessMask;
1232     out_views[i]->image->barrier.image_layout =
1233         out_image_memory_barrier.newLayout;
1234   }
1235
1236   {
1237     /* *INDENT-OFF* */
1238     VkClearValue clearColor = {{{ 0.0f, 0.0f, 0.0f, 1.0f }}};
1239     VkClearValue clearColors[GST_VIDEO_MAX_PLANES] = {
1240       clearColor, clearColor, clearColor, clearColor,
1241     };
1242     VkRenderPassBeginInfo render_pass_info = {
1243         .sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
1244         .renderPass = (VkRenderPass) self->render_pass->handle,
1245         .framebuffer = (VkFramebuffer) self->framebuffer->handle,
1246         .renderArea.offset = { 0, 0 },
1247         .renderArea.extent = {
1248             GST_VIDEO_INFO_WIDTH (&self->out_info),
1249             GST_VIDEO_INFO_HEIGHT (&self->out_info)
1250         },
1251         .clearValueCount = GST_VIDEO_INFO_N_PLANES (&self->out_info),
1252         .pClearValues = clearColors,
1253     };
1254     /* *INDENT-ON* */
1255     VkDeviceSize offsets[] = { 0 };
1256
1257     vkCmdBindDescriptorSets (cmd->cmd, VK_PIPELINE_BIND_POINT_GRAPHICS,
1258         (VkPipelineLayout) self->pipeline_layout->handle, 0, 1,
1259         &self->descriptor_set->set, 0, NULL);
1260
1261     vkCmdBeginRenderPass (cmd->cmd, &render_pass_info,
1262         VK_SUBPASS_CONTENTS_INLINE);
1263     vkCmdBindPipeline (cmd->cmd, VK_PIPELINE_BIND_POINT_GRAPHICS,
1264         (VkPipeline) self->graphics_pipeline->handle);
1265     vkCmdBindVertexBuffers (cmd->cmd, 0, 1,
1266         &((GstVulkanBufferMemory *) priv->vertices)->buffer, offsets);
1267     vkCmdBindIndexBuffer (cmd->cmd,
1268         ((GstVulkanBufferMemory *) priv->indices)->buffer, 0,
1269         VK_INDEX_TYPE_UINT16);
1270     vkCmdDrawIndexed (cmd->cmd, priv->n_indices, 1, 0, 0, 0);
1271     vkCmdEndRenderPass (cmd->cmd);
1272   }
1273
1274   return TRUE;
1275
1276 error:
1277   return FALSE;
1278 }
1279
1280 /**
1281  * gst_vulkan_full_screen_quad_submit:
1282  * @self: a #GstVulkanFullScreenQuad
1283  * @cmd: (transfer full): a #GstVulkanCommandBuffer to submit
1284  * @fence: a #GstVulkanFence to signal on completion
1285  * @error: a #GError to fill on error
1286  *
1287  * Returns: whether @cmd could be submitted to the queue
1288  */
1289 gboolean
1290 gst_vulkan_full_screen_quad_submit (GstVulkanFullScreenQuad * self,
1291     GstVulkanCommandBuffer * cmd, GstVulkanFence * fence, GError ** error)
1292 {
1293   VkResult err;
1294
1295   g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
1296   g_return_val_if_fail (cmd != NULL, FALSE);
1297   g_return_val_if_fail (fence != NULL, FALSE);
1298
1299   {
1300     /* *INDENT-OFF* */
1301     VkSubmitInfo submit_info = {
1302         .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
1303         .pNext = NULL,
1304         .waitSemaphoreCount = 0,
1305         .pWaitSemaphores = NULL,
1306         .pWaitDstStageMask = NULL,
1307         .commandBufferCount = 1,
1308         .pCommandBuffers = &cmd->cmd,
1309         .signalSemaphoreCount = 0,
1310         .pSignalSemaphores = NULL,
1311     };
1312     /* *INDENT-ON* */
1313
1314     gst_vulkan_queue_submit_lock (self->queue);
1315     err =
1316         vkQueueSubmit (self->queue->queue, 1, &submit_info,
1317         GST_VULKAN_FENCE_FENCE (fence));
1318     gst_vulkan_queue_submit_unlock (self->queue);
1319     if (gst_vulkan_error_to_g_error (err, error, "vkQueueSubmit") < 0)
1320       goto error;
1321   }
1322
1323   gst_vulkan_trash_list_add (self->trash_list,
1324       gst_vulkan_trash_list_acquire (self->trash_list, fence,
1325           gst_vulkan_trash_mini_object_unref, GST_MINI_OBJECT_CAST (cmd)));
1326
1327   gst_vulkan_trash_list_gc (self->trash_list);
1328
1329   if (self->last_fence)
1330     gst_vulkan_fence_unref (self->last_fence);
1331   self->last_fence = gst_vulkan_fence_ref (fence);
1332
1333   return TRUE;
1334
1335 error:
1336   return FALSE;
1337 }