2 * Copyright (c) 2017-2019, 2021 Arm Limited.
4 * SPDX-License-Identifier: MIT
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
25 #define VK_USE_PLATFORM_WAYLAND_KHR 1
27 #include "swapchain.hpp"
28 #include "swapchain_wl_helpers.hpp"
37 #include <drm_fourcc.h>
39 #include "util/drm/drm_utils.hpp"
41 #if VULKAN_WSI_DEBUG > 0
42 #define WSI_PRINT_ERROR(...) fprintf(stderr, __FILE__, __LINE__, __func__, ##__VA_ARGS__)
44 #define WSI_PRINT_ERROR(...) (void)0
52 struct swapchain::wayland_image_data
59 VkDeviceMemory memory;
62 swapchain::swapchain(layer::device_private_data &dev_data, const VkAllocationCallbacks *pAllocator)
63 : swapchain_base(dev_data, pAllocator)
66 , m_dmabuf_interface(nullptr)
67 , m_surface_queue(nullptr)
68 , m_buffer_queue(nullptr)
69 , m_present_pending(false)
73 swapchain::~swapchain()
77 if (m_dmabuf_interface != nullptr)
79 zwp_linux_dmabuf_v1_destroy(m_dmabuf_interface);
82 res = wsialloc_delete(&m_wsi_allocator);
85 WSI_PRINT_ERROR("error deleting the allocator: %d\n", res);
87 if (m_surface_queue != nullptr)
89 wl_event_queue_destroy(m_surface_queue);
91 if (m_buffer_queue != nullptr)
93 wl_event_queue_destroy(m_buffer_queue);
97 static void roundtrip_cb_done(void *data, wl_callback *cb, uint32_t cb_data)
101 bool *cb_recvd = reinterpret_cast<bool *>(data);
107 int swapchain::roundtrip()
110 const wl_callback_listener listener = { roundtrip_cb_done };
111 bool cb_recvd = false;
113 wl_callback *cb = wl_display_sync(m_display);
116 WSI_PRINT_ERROR("failed to create wl_display::sync callback\n");
121 wl_proxy_set_queue((wl_proxy *)cb, m_surface_queue);
123 res = wl_callback_add_listener(cb, &listener, &cb_recvd);
126 WSI_PRINT_ERROR("error setting wl_display::sync callback listener\n");
130 res = wl_display_flush(m_display);
133 WSI_PRINT_ERROR("error performing a flush on the display\n");
138 res = dispatch_queue(m_display, m_surface_queue, 1000);
139 } while (res > 0 && !cb_recvd);
143 WSI_PRINT_ERROR("error dispatching on the surface queue\n");
148 WSI_PRINT_ERROR("timeout waiting for roundtrip callback\n");
155 wl_callback_destroy(cb);
163 wl_event_queue *queue;
166 VkResult swapchain::init_platform(VkDevice device, const VkSwapchainCreateInfoKHR *pSwapchainCreateInfo)
168 VkIcdSurfaceWayland *vk_surf = reinterpret_cast<VkIcdSurfaceWayland *>(pSwapchainCreateInfo->surface);
170 m_display = vk_surf->display;
171 m_surface = vk_surf->surface;
173 m_surface_queue = wl_display_create_queue(m_display);
174 if (m_surface_queue == nullptr)
176 WSI_PRINT_ERROR("Failed to create wl surface display_queue.\n");
177 return VK_ERROR_INITIALIZATION_FAILED;
180 m_buffer_queue = wl_display_create_queue(m_display);
181 if (m_buffer_queue == nullptr)
183 WSI_PRINT_ERROR("Failed to create wl buffer display_queue.\n");
184 return VK_ERROR_INITIALIZATION_FAILED;
187 wl_registry *registry = wl_display_get_registry(m_display);
188 if (registry == nullptr)
190 WSI_PRINT_ERROR("Failed to get wl display registry.\n");
191 return VK_ERROR_INITIALIZATION_FAILED;
194 wl_proxy_set_queue((struct wl_proxy *)registry, m_surface_queue);
196 const wl_registry_listener registry_listener = { registry_handler };
197 int res = wl_registry_add_listener(registry, ®istry_listener, &m_dmabuf_interface);
200 WSI_PRINT_ERROR("Failed to add registry listener.\n");
201 return VK_ERROR_INITIALIZATION_FAILED;
207 WSI_PRINT_ERROR("Roundtrip failed.\n");
208 return VK_ERROR_INITIALIZATION_FAILED;
211 /* we should have the dma_buf interface by now */
212 assert(m_dmabuf_interface);
214 wl_registry_destroy(registry);
216 res = wsialloc_new(-1, &m_wsi_allocator);
219 WSI_PRINT_ERROR("Failed to create wsi allocator.\n");
220 return VK_ERROR_INITIALIZATION_FAILED;
226 static void create_succeeded(void *data, struct zwp_linux_buffer_params_v1 *params, struct wl_buffer *buffer)
228 struct wl_buffer **wayland_buffer = (struct wl_buffer **)data;
229 *wayland_buffer = buffer;
232 static const struct zwp_linux_buffer_params_v1_listener params_listener = { create_succeeded, NULL };
234 static void buffer_release(void *data, struct wl_buffer *wayl_buffer)
236 swapchain *sc = (swapchain *)data;
237 sc->release_buffer(wayl_buffer);
240 void swapchain::release_buffer(struct wl_buffer *wayl_buffer)
243 for (i = 0; i < m_swapchain_images.size(); i++)
245 wayland_image_data *data;
246 data = (wayland_image_data *)m_swapchain_images[i].data;
247 if (data->buffer == wayl_buffer)
254 /* check we found a buffer to unpresent */
255 assert(i < m_swapchain_images.size());
258 static struct wl_buffer_listener buffer_listener = { buffer_release };
260 VkResult swapchain::allocate_image(const VkImageCreateInfo &image_create_info, wayland_image_data *image_data,
263 VkResult result = VK_SUCCESS;
265 image_data->buffer = nullptr;
266 image_data->buffer_fd = -1;
267 image_data->memory = VK_NULL_HANDLE;
269 VkExternalImageFormatPropertiesKHR external_props = {};
270 external_props.sType = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR;
272 VkImageFormatProperties2KHR format_props = {};
273 format_props.sType = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR;
274 format_props.pNext = &external_props;
276 VkPhysicalDeviceExternalImageFormatInfoKHR external_info = {};
277 external_info.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR;
278 external_info.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
280 VkPhysicalDeviceImageDrmFormatModifierInfoEXT drm_mod_info = {};
281 drm_mod_info.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT;
282 drm_mod_info.pNext = &external_info;
283 drm_mod_info.drmFormatModifier = DRM_FORMAT_MOD_LINEAR;
284 drm_mod_info.sharingMode = image_create_info.sharingMode;
285 drm_mod_info.queueFamilyIndexCount = image_create_info.queueFamilyIndexCount;
286 drm_mod_info.pQueueFamilyIndices = image_create_info.pQueueFamilyIndices;
288 VkPhysicalDeviceImageFormatInfo2KHR info = {};
289 info.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR;
290 info.pNext = &drm_mod_info;
291 info.format = image_create_info.format;
292 info.type = image_create_info.imageType;
293 info.tiling = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT;
294 info.usage = image_create_info.usage;
295 info.flags = image_create_info.flags;
297 result = m_device_data.instance_data.disp.GetPhysicalDeviceImageFormatProperties2KHR(m_device_data.physical_device,
298 &info, &format_props);
300 if (result != VK_SUCCESS)
302 WSI_PRINT_ERROR("Failed to get physical device format support.\n");
305 if (format_props.imageFormatProperties.maxExtent.width < image_create_info.extent.width ||
306 format_props.imageFormatProperties.maxExtent.height < image_create_info.extent.height ||
307 format_props.imageFormatProperties.maxExtent.depth < image_create_info.extent.depth)
309 WSI_PRINT_ERROR("Physical device does not support required extent.\n");
310 return VK_ERROR_INITIALIZATION_FAILED;
312 if (format_props.imageFormatProperties.maxMipLevels < image_create_info.mipLevels ||
313 format_props.imageFormatProperties.maxArrayLayers < image_create_info.arrayLayers)
315 WSI_PRINT_ERROR("Physical device does not support required array layers or mip levels.\n");
316 return VK_ERROR_INITIALIZATION_FAILED;
318 if ((format_props.imageFormatProperties.sampleCounts & image_create_info.samples) != image_create_info.samples)
320 WSI_PRINT_ERROR("Physical device does not support required sample count.\n");
321 return VK_ERROR_INITIALIZATION_FAILED;
324 if (external_props.externalMemoryProperties.externalMemoryFeatures & VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR)
326 /* TODO: Handle exportable images which use ICD allocated memory in preference to an external allocator. */
328 if (!(external_props.externalMemoryProperties.externalMemoryFeatures &
329 VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR))
331 WSI_PRINT_ERROR("Export/Import not supported.\n");
332 return VK_ERROR_INITIALIZATION_FAILED;
336 /* TODO: Handle Dedicated allocation bit. */
337 uint32_t fourcc = util::drm::vk_to_drm_format(image_create_info.format);
340 wsialloc_alloc(&m_wsi_allocator, fourcc, image_create_info.extent.width, image_create_info.extent.height,
341 &image_data->stride, &image_data->buffer_fd, &image_data->offset, nullptr);
344 WSI_PRINT_ERROR("Failed allocation of DMA Buffer.\n");
345 return VK_ERROR_OUT_OF_HOST_MEMORY;
349 assert(image_data->stride >= 0);
350 VkSubresourceLayout image_layout = {};
351 image_layout.offset = image_data->offset;
352 image_layout.rowPitch = static_cast<uint32_t>(image_data->stride);
353 VkImageDrmFormatModifierExplicitCreateInfoEXT drm_mod_info = {};
354 drm_mod_info.sType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT;
355 drm_mod_info.pNext = image_create_info.pNext;
356 drm_mod_info.drmFormatModifier = DRM_FORMAT_MOD_LINEAR;
357 drm_mod_info.drmFormatModifierPlaneCount = 1;
358 drm_mod_info.pPlaneLayouts = &image_layout;
360 VkExternalMemoryImageCreateInfoKHR external_info = {};
361 external_info.sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR;
362 external_info.pNext = &drm_mod_info;
363 external_info.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
365 VkImageCreateInfo image_info = image_create_info;
366 image_info.pNext = &external_info;
367 image_info.tiling = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT;
368 result = m_device_data.disp.CreateImage(m_device, &image_info, get_allocation_callbacks(), image);
370 if (result != VK_SUCCESS)
372 WSI_PRINT_ERROR("Image creation failed.\n");
376 VkMemoryFdPropertiesKHR mem_props = {};
377 mem_props.sType = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR;
379 result = m_device_data.disp.GetMemoryFdPropertiesKHR(m_device, VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
380 image_data->buffer_fd, &mem_props);
381 if (result != VK_SUCCESS)
383 WSI_PRINT_ERROR("Error querying Fd properties.\n");
388 for (mem_idx = 0; mem_idx < VK_MAX_MEMORY_TYPES; mem_idx++)
390 if (mem_props.memoryTypeBits & (1 << mem_idx))
395 off_t dma_buf_size = lseek(image_data->buffer_fd, 0, SEEK_END);
396 if (dma_buf_size < 0)
398 WSI_PRINT_ERROR("Failed to get DMA Buf size.\n");
399 return VK_ERROR_OUT_OF_HOST_MEMORY;
402 VkImportMemoryFdInfoKHR import_mem_info = {};
403 import_mem_info.sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR;
404 import_mem_info.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
405 import_mem_info.fd = image_data->buffer_fd;
407 VkMemoryAllocateInfo alloc_info = {};
408 alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
409 alloc_info.pNext = &import_mem_info;
410 alloc_info.allocationSize = static_cast<uint64_t>(dma_buf_size);
411 alloc_info.memoryTypeIndex = mem_idx;
413 result = m_device_data.disp.AllocateMemory(m_device, &alloc_info, get_allocation_callbacks(), &image_data->memory);
415 if (result != VK_SUCCESS)
417 WSI_PRINT_ERROR("Failed to import memory.\n");
420 result = m_device_data.disp.BindImageMemory(m_device, *image, image_data->memory, 0);
426 VkResult swapchain::create_image(const VkImageCreateInfo &image_create_info, swapchain_image &image)
428 uint32_t fourcc = util::drm::vk_to_drm_format(image_create_info.format);
431 VkResult result = VK_SUCCESS;
433 wayland_image_data *image_data = nullptr;
434 VkFenceCreateInfo fenceInfo = { VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, 0 };
436 /* Create image_data */
437 if (get_allocation_callbacks() != nullptr)
439 image_data = static_cast<wayland_image_data *>(
440 get_allocation_callbacks()->pfnAllocation(get_allocation_callbacks()->pUserData, sizeof(wayland_image_data),
441 alignof(wayland_image_data), VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
445 image_data = static_cast<wayland_image_data *>(malloc(sizeof(wayland_image_data)));
447 if (image_data == nullptr)
449 result = VK_ERROR_OUT_OF_HOST_MEMORY;
453 image.data = reinterpret_cast<void *>(image_data);
454 image.status = swapchain_image::FREE;
455 result = allocate_image(image_create_info, image_data, &image.image);
456 if (result != VK_SUCCESS)
458 WSI_PRINT_ERROR("Failed to allocate image.\n");
462 /* create a wl_buffer using the dma_buf protocol */
463 struct zwp_linux_buffer_params_v1 *params;
464 params = zwp_linux_dmabuf_v1_create_params(m_dmabuf_interface);
465 zwp_linux_buffer_params_v1_add(params, image_data->buffer_fd, 0, image_data->offset, image_data->stride, 0, 0);
466 wl_proxy_set_queue((struct wl_proxy *)params, m_surface_queue);
467 res = zwp_linux_buffer_params_v1_add_listener(params, ¶ms_listener, &image_data->buffer);
470 result = VK_ERROR_INITIALIZATION_FAILED;
473 zwp_linux_buffer_params_v1_create(params, image_create_info.extent.width, image_create_info.extent.height, fourcc,
476 /* TODO: don't roundtrip - we should be able to send the create request now,
477 * and only wait for it on first present. only do this once, not for all buffers created */
481 result = VK_ERROR_INITIALIZATION_FAILED;
485 /* should now have a wl_buffer */
486 assert(image_data->buffer);
487 zwp_linux_buffer_params_v1_destroy(params);
488 wl_proxy_set_queue((struct wl_proxy *)image_data->buffer, m_buffer_queue);
489 res = wl_buffer_add_listener(image_data->buffer, &buffer_listener, this);
492 result = VK_ERROR_INITIALIZATION_FAILED;
496 /* Initialize presentation fence. */
497 result = m_device_data.disp.CreateFence(m_device, &fenceInfo, get_allocation_callbacks(), &image.present_fence);
500 if (result != VK_SUCCESS)
502 destroy_image(image);
508 static void frame_done(void *data, wl_callback *cb, uint32_t cb_data)
512 bool *present_pending = reinterpret_cast<bool *>(data);
513 assert(present_pending);
515 *present_pending = false;
517 wl_callback_destroy(cb);
520 void swapchain::present_image(uint32_t pendingIndex)
523 wayland_image_data *image_data = reinterpret_cast<wayland_image_data *>(m_swapchain_images[pendingIndex].data);
524 /* if a frame is already pending, wait for a hint to present again */
525 if (m_present_pending)
527 assert(m_present_mode == VK_PRESENT_MODE_FIFO_KHR);
530 /* block waiting for the compositor to return the wl_surface::frame
531 * callback. We may want to change this to timeout after a period of
532 * time if the compositor isn't responding (perhaps because the
535 res = dispatch_queue(m_display, m_surface_queue, -1);
536 } while (res > 0 && m_present_pending);
540 WSI_PRINT_ERROR("error waiting for Wayland compositor frame hint\n");
542 /* try to present anyway */
546 wl_surface_attach(m_surface, image_data->buffer, 0, 0);
547 /* TODO: work out damage */
548 wl_surface_damage(m_surface, 0, 0, INT32_MAX, INT32_MAX);
550 if (m_present_mode == VK_PRESENT_MODE_FIFO_KHR)
552 /* request a hint when we can present the _next_ frame */
553 wl_callback *cb = wl_surface_frame(m_surface);
556 wl_proxy_set_queue((wl_proxy *)cb, m_surface_queue);
557 static const wl_callback_listener frame_listener = { frame_done };
558 m_present_pending = true;
559 wl_callback_add_listener(cb, &frame_listener, &m_present_pending);
564 assert(m_present_mode == VK_PRESENT_MODE_MAILBOX_KHR);
565 /* weston only _queues_ wl_buffer::release events. This means when the
566 * compositor flushes the client it only sends the events if some other events
569 * As such we have to request a sync callback - we discard it straight away
570 * as we don't actually need the callback, but it means the
571 * wl_buffer::release event is actually sent.
573 wl_callback *cb = wl_display_sync(m_display);
577 wl_callback_destroy(cb);
581 wl_surface_commit(m_surface);
582 res = wl_display_flush(m_display);
585 WSI_PRINT_ERROR("error flushing the display\n");
586 /* Setting the swapchain as invalid */
591 void swapchain::destroy_image(swapchain_image &image)
593 if (image.status != swapchain_image::INVALID)
595 if (image.present_fence != VK_NULL_HANDLE)
597 m_device_data.disp.DestroyFence(m_device, image.present_fence, get_allocation_callbacks());
598 image.present_fence = VK_NULL_HANDLE;
601 if (image.image != VK_NULL_HANDLE)
603 m_device_data.disp.DestroyImage(m_device, image.image, get_allocation_callbacks());
604 image.image = VK_NULL_HANDLE;
607 if (image.data != nullptr)
609 auto image_data = reinterpret_cast<wayland_image_data *>(image.data);
610 if (image_data->buffer != nullptr)
612 wl_buffer_destroy(image_data->buffer);
614 if (image_data->memory != VK_NULL_HANDLE)
616 m_device_data.disp.FreeMemory(m_device, image_data->memory, get_allocation_callbacks());
618 else if (image_data->buffer_fd >= 0)
620 close(image_data->buffer_fd);
623 if (get_allocation_callbacks() != nullptr)
625 get_allocation_callbacks()->pfnFree(get_allocation_callbacks()->pUserData, image_data);
631 image.data = nullptr;
634 image.status = swapchain_image::INVALID;
637 bool swapchain::free_image_found()
639 for (auto &img : m_swapchain_images)
641 if (img.status == swapchain_image::FREE)
649 VkResult swapchain::get_free_buffer(uint64_t *timeout)
653 if (*timeout >= INT_MAX * 1000llu * 1000llu)
655 ms_timeout = INT_MAX;
659 ms_timeout = *timeout / 1000llu / 1000llu;
662 /* The current dispatch_queue implementation will return if any
663 * events are returned, even if no events are dispatched to the buffer
664 * queue. Therefore dispatch repeatedly until a buffer has been freed.
668 res = dispatch_queue(m_display, m_buffer_queue, ms_timeout);
669 } while (!free_image_found() && res > 0);
689 return VK_ERROR_DEVICE_LOST;
693 } // namespace wayland