2 * Copyright © 2015 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24 #include <wayland-client.h>
35 #include "drm-uapi/drm_fourcc.h"
38 #include "wsi_common_private.h"
39 #include "wsi_common_wayland.h"
40 #include "wayland-drm-client-protocol.h"
41 #include "linux-dmabuf-unstable-v1-client-protocol.h"
43 #include <util/compiler.h>
44 #include <util/hash_table.h>
45 #include <util/timespec.h>
46 #include <util/u_vector.h>
50 struct wsi_wl_display_drm {
51 struct wl_drm * wl_drm;
52 struct u_vector formats;
53 uint32_t capabilities;
56 struct wsi_wl_display_dmabuf {
57 struct zwp_linux_dmabuf_v1 * wl_dmabuf;
58 struct u_vector formats;
60 struct u_vector argb8888;
61 struct u_vector xrgb8888;
65 struct wsi_wl_display {
66 /* The real wl_display */
67 struct wl_display * wl_display;
68 /* Actually a proxy wrapper around the event queue */
69 struct wl_display * wl_display_wrapper;
70 struct wl_event_queue * queue;
72 struct wsi_wl_display_drm drm;
73 struct wsi_wl_display_dmabuf dmabuf;
75 struct wsi_wayland *wsi_wl;
77 /* Points to formats in wsi_wl_display_drm or wsi_wl_display_dmabuf */
78 struct u_vector * formats;
80 /* Only used for displays created by wsi_wl_display_create */
85 struct wsi_interface base;
87 struct wsi_device *wsi;
89 const VkAllocationCallbacks *alloc;
90 VkPhysicalDevice physical_device;
94 wsi_wl_display_add_vk_format(struct wsi_wl_display *display,
95 struct u_vector *formats, VkFormat format)
97 /* Don't add a format that's already in the list */
99 u_vector_foreach(f, formats)
103 /* Don't add formats that aren't renderable. */
104 VkFormatProperties props;
106 display->wsi_wl->wsi->GetPhysicalDeviceFormatProperties(display->wsi_wl->physical_device,
108 if (!(props.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT))
111 f = u_vector_add(formats);
117 wsi_wl_display_add_wl_format(struct wsi_wl_display *display,
118 struct u_vector *formats, uint32_t wl_format)
122 /* TODO: These are only available when VK_EXT_4444_formats is enabled, so
123 * we probably need to make their use conditional on this extension. */
124 case WL_DRM_FORMAT_ARGB4444:
125 case WL_DRM_FORMAT_XRGB4444:
126 wsi_wl_display_add_vk_format(display, formats,
127 VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT);
129 case WL_DRM_FORMAT_ABGR4444:
130 case WL_DRM_FORMAT_XBGR4444:
131 wsi_wl_display_add_vk_format(display, formats,
132 VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT);
136 /* Vulkan _PACKN formats have the same component order as DRM formats
137 * on little endian systems, on big endian there exists no analog. */
138 #if MESA_LITTLE_ENDIAN
139 case WL_DRM_FORMAT_RGBA4444:
140 case WL_DRM_FORMAT_RGBX4444:
141 wsi_wl_display_add_vk_format(display, formats,
142 VK_FORMAT_R4G4B4A4_UNORM_PACK16);
144 case WL_DRM_FORMAT_BGRA4444:
145 case WL_DRM_FORMAT_BGRX4444:
146 wsi_wl_display_add_vk_format(display, formats,
147 VK_FORMAT_B4G4R4A4_UNORM_PACK16);
149 case WL_DRM_FORMAT_RGB565:
150 wsi_wl_display_add_vk_format(display, formats,
151 VK_FORMAT_R5G6B5_UNORM_PACK16);
153 case WL_DRM_FORMAT_BGR565:
154 wsi_wl_display_add_vk_format(display, formats,
155 VK_FORMAT_B5G6R5_UNORM_PACK16);
157 case WL_DRM_FORMAT_ARGB1555:
158 case WL_DRM_FORMAT_XRGB1555:
159 wsi_wl_display_add_vk_format(display, formats,
160 VK_FORMAT_A1R5G5B5_UNORM_PACK16);
162 case WL_DRM_FORMAT_RGBA5551:
163 case WL_DRM_FORMAT_RGBX5551:
164 wsi_wl_display_add_vk_format(display, formats,
165 VK_FORMAT_R5G5B5A1_UNORM_PACK16);
167 case WL_DRM_FORMAT_BGRA5551:
168 case WL_DRM_FORMAT_BGRX5551:
169 wsi_wl_display_add_vk_format(display, formats,
170 VK_FORMAT_B5G5R5A1_UNORM_PACK16);
172 case WL_DRM_FORMAT_ARGB2101010:
173 case WL_DRM_FORMAT_XRGB2101010:
174 wsi_wl_display_add_vk_format(display, formats,
175 VK_FORMAT_A2R10G10B10_UNORM_PACK32);
177 case WL_DRM_FORMAT_ABGR2101010:
178 case WL_DRM_FORMAT_XBGR2101010:
179 wsi_wl_display_add_vk_format(display, formats,
180 VK_FORMAT_A2B10G10R10_UNORM_PACK32);
184 /* Non-packed 8-bit formats have an inverted channel order compared to the
185 * little endian DRM formats, because the DRM channel ordering is high->low
186 * but the vulkan channel ordering is in memory byte order */
187 case WL_DRM_FORMAT_XBGR8888:
188 wsi_wl_display_add_vk_format(display, formats,
189 VK_FORMAT_R8G8B8_UNORM);
191 case WL_DRM_FORMAT_ABGR8888:
192 wsi_wl_display_add_vk_format(display, formats,
193 VK_FORMAT_R8G8B8A8_UNORM);
195 case WL_DRM_FORMAT_XRGB8888:
196 wsi_wl_display_add_vk_format(display, formats,
197 VK_FORMAT_B8G8R8_SRGB);
198 wsi_wl_display_add_vk_format(display, formats,
199 VK_FORMAT_B8G8R8_UNORM);
201 case WL_DRM_FORMAT_ARGB8888:
202 wsi_wl_display_add_vk_format(display, formats,
203 VK_FORMAT_B8G8R8A8_SRGB);
204 wsi_wl_display_add_vk_format(display, formats,
205 VK_FORMAT_B8G8R8A8_UNORM);
211 drm_handle_device(void *data, struct wl_drm *drm, const char *name)
216 wl_drm_format_for_vk_format(VkFormat vk_format, bool alpha)
220 case VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT:
221 return alpha ? WL_DRM_FORMAT_ARGB4444 : WL_DRM_FORMAT_XRGB4444;
222 case VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT:
223 return alpha ? WL_DRM_FORMAT_ABGR4444 : WL_DRM_FORMAT_XBGR4444;
225 #if MESA_LITTLE_ENDIAN
226 case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
227 return alpha ? WL_DRM_FORMAT_RGBA4444 : WL_DRM_FORMAT_RGBX4444;
228 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
229 return alpha ? WL_DRM_FORMAT_BGRA4444 : WL_DRM_FORMAT_BGRX4444;
230 case VK_FORMAT_R5G6B5_UNORM_PACK16:
231 return WL_DRM_FORMAT_RGB565;
232 case VK_FORMAT_B5G6R5_UNORM_PACK16:
233 return WL_DRM_FORMAT_BGR565;
234 case VK_FORMAT_A1R5G5B5_UNORM_PACK16:
235 return alpha ? WL_DRM_FORMAT_ARGB1555 : WL_DRM_FORMAT_XRGB1555;
236 case VK_FORMAT_R5G5B5A1_UNORM_PACK16:
237 return alpha ? WL_DRM_FORMAT_RGBA5551 : WL_DRM_FORMAT_RGBX5551;
238 case VK_FORMAT_B5G5R5A1_UNORM_PACK16:
239 return alpha ? WL_DRM_FORMAT_BGRA5551 : WL_DRM_FORMAT_BGRX5551;
240 case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
241 return alpha ? WL_DRM_FORMAT_ARGB2101010 : WL_DRM_FORMAT_XRGB2101010;
242 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
243 return alpha ? WL_DRM_FORMAT_ABGR2101010 : WL_DRM_FORMAT_XBGR2101010;
245 case VK_FORMAT_R8G8B8_UNORM:
246 return WL_DRM_FORMAT_XBGR8888;
247 case VK_FORMAT_R8G8B8A8_UNORM:
248 return alpha ? WL_DRM_FORMAT_ABGR8888 : WL_DRM_FORMAT_XBGR8888;
249 case VK_FORMAT_B8G8R8_UNORM:
250 case VK_FORMAT_B8G8R8_SRGB:
251 return WL_DRM_FORMAT_BGRX8888;
252 case VK_FORMAT_B8G8R8A8_UNORM:
253 case VK_FORMAT_B8G8R8A8_SRGB:
254 return alpha ? WL_DRM_FORMAT_ARGB8888 : WL_DRM_FORMAT_XRGB8888;
257 assert(!"Unsupported Vulkan format");
263 drm_handle_format(void *data, struct wl_drm *drm, uint32_t wl_format)
265 struct wsi_wl_display *display = data;
266 if (display->drm.formats.element_size == 0)
269 wsi_wl_display_add_wl_format(display, &display->drm.formats, wl_format);
273 drm_handle_authenticated(void *data, struct wl_drm *drm)
278 drm_handle_capabilities(void *data, struct wl_drm *drm, uint32_t capabilities)
280 struct wsi_wl_display *display = data;
282 display->drm.capabilities = capabilities;
285 static const struct wl_drm_listener drm_listener = {
288 drm_handle_authenticated,
289 drm_handle_capabilities,
293 dmabuf_handle_format(void *data, struct zwp_linux_dmabuf_v1 *dmabuf,
296 /* Formats are implicitly advertised by the modifier event, so we ignore
301 dmabuf_handle_modifier(void *data, struct zwp_linux_dmabuf_v1 *dmabuf,
302 uint32_t format, uint32_t modifier_hi,
303 uint32_t modifier_lo)
305 struct wsi_wl_display *display = data;
306 struct u_vector *modifiers;
307 uint64_t *mod = NULL;
309 /* If we're not fetching formats, don't fetch modifiers either. */
310 if (display->dmabuf.formats.element_size == 0)
313 if (modifier_hi == (DRM_FORMAT_MOD_INVALID >> 32) &&
314 modifier_lo == (DRM_FORMAT_MOD_INVALID & 0xffffffff))
318 case WL_DRM_FORMAT_ARGB8888:
319 modifiers = &display->dmabuf.modifiers.argb8888;
321 case WL_DRM_FORMAT_XRGB8888:
322 modifiers = &display->dmabuf.modifiers.xrgb8888;
325 return; /* Unsupported format */
328 wsi_wl_display_add_wl_format(display, &display->dmabuf.formats, format);
330 mod = u_vector_add(modifiers);
334 *mod = (uint64_t) modifier_hi << 32;
335 *mod |= (uint64_t) (modifier_lo & 0xffffffff);
338 static const struct zwp_linux_dmabuf_v1_listener dmabuf_listener = {
339 dmabuf_handle_format,
340 dmabuf_handle_modifier,
344 registry_handle_global(void *data, struct wl_registry *registry,
345 uint32_t name, const char *interface, uint32_t version)
347 struct wsi_wl_display *display = data;
349 if (strcmp(interface, "wl_drm") == 0) {
350 assert(display->drm.wl_drm == NULL);
352 assert(version >= 2);
353 display->drm.wl_drm =
354 wl_registry_bind(registry, name, &wl_drm_interface, 2);
355 wl_drm_add_listener(display->drm.wl_drm, &drm_listener, display);
356 } else if (strcmp(interface, "zwp_linux_dmabuf_v1") == 0 && version >= 3 &&
357 display->wsi_wl->wsi->supports_modifiers) {
358 display->dmabuf.wl_dmabuf =
359 wl_registry_bind(registry, name, &zwp_linux_dmabuf_v1_interface, 3);
360 zwp_linux_dmabuf_v1_add_listener(display->dmabuf.wl_dmabuf,
361 &dmabuf_listener, display);
366 registry_handle_global_remove(void *data, struct wl_registry *registry,
370 static const struct wl_registry_listener registry_listener = {
371 registry_handle_global,
372 registry_handle_global_remove
376 wsi_wl_display_finish(struct wsi_wl_display *display)
378 assert(display->refcount == 0);
380 u_vector_finish(&display->drm.formats);
381 u_vector_finish(&display->dmabuf.formats);
382 u_vector_finish(&display->dmabuf.modifiers.argb8888);
383 u_vector_finish(&display->dmabuf.modifiers.xrgb8888);
384 if (display->drm.wl_drm)
385 wl_drm_destroy(display->drm.wl_drm);
386 if (display->dmabuf.wl_dmabuf)
387 zwp_linux_dmabuf_v1_destroy(display->dmabuf.wl_dmabuf);
388 if (display->wl_display_wrapper)
389 wl_proxy_wrapper_destroy(display->wl_display_wrapper);
391 wl_event_queue_destroy(display->queue);
395 wsi_wl_display_init(struct wsi_wayland *wsi_wl,
396 struct wsi_wl_display *display,
397 struct wl_display *wl_display,
398 bool get_format_list)
400 VkResult result = VK_SUCCESS;
401 memset(display, 0, sizeof(*display));
403 display->wsi_wl = wsi_wl;
404 display->wl_display = wl_display;
406 if (get_format_list) {
407 if (!u_vector_init(&display->drm.formats, sizeof(VkFormat), 8) ||
408 !u_vector_init(&display->dmabuf.formats, sizeof(VkFormat), 8) ||
409 !u_vector_init(&display->dmabuf.modifiers.argb8888,
410 sizeof(uint64_t), 32) ||
411 !u_vector_init(&display->dmabuf.modifiers.xrgb8888,
412 sizeof(uint64_t), 32)) {
413 result = VK_ERROR_OUT_OF_HOST_MEMORY;
418 display->queue = wl_display_create_queue(wl_display);
419 if (!display->queue) {
420 result = VK_ERROR_OUT_OF_HOST_MEMORY;
424 display->wl_display_wrapper = wl_proxy_create_wrapper(wl_display);
425 if (!display->wl_display_wrapper) {
426 result = VK_ERROR_OUT_OF_HOST_MEMORY;
430 wl_proxy_set_queue((struct wl_proxy *) display->wl_display_wrapper,
433 struct wl_registry *registry =
434 wl_display_get_registry(display->wl_display_wrapper);
436 result = VK_ERROR_OUT_OF_HOST_MEMORY;
440 wl_registry_add_listener(registry, ®istry_listener, display);
442 /* Round-trip to get wl_drms and zwp_linux_dmabuf_v1 globals */
443 wl_display_roundtrip_queue(display->wl_display, display->queue);
445 /* Round-trip again to get formats, modifiers and capabilities */
446 if (display->drm.wl_drm || display->dmabuf.wl_dmabuf)
447 wl_display_roundtrip_queue(display->wl_display, display->queue);
449 if (wsi_wl->wsi->force_bgra8_unorm_first) {
450 /* Find BGRA8_UNORM in the list and swap it to the first position if we
451 * can find it. Some apps get confused if SRGB is first in the list.
453 VkFormat *first_fmt = u_vector_head(display->formats);
455 u_vector_foreach(iter_fmt, display->formats) {
456 if (*iter_fmt == VK_FORMAT_B8G8R8A8_UNORM) {
457 *iter_fmt = *first_fmt;
458 *first_fmt = VK_FORMAT_B8G8R8A8_UNORM;
464 /* We need prime support for wl_drm */
465 if (display->drm.wl_drm &&
466 (display->drm.capabilities & WL_DRM_CAPABILITY_PRIME)) {
467 display->formats = &display->drm.formats;
468 } else if (display->dmabuf.wl_dmabuf) {
469 display->formats = &display->dmabuf.formats;
472 if (!display->formats) {
473 result = VK_ERROR_SURFACE_LOST_KHR;
477 /* We don't need this anymore */
478 wl_registry_destroy(registry);
480 display->refcount = 0;
486 wl_registry_destroy(registry);
489 wsi_wl_display_finish(display);
494 wsi_wl_display_create(struct wsi_wayland *wsi, struct wl_display *wl_display,
495 struct wsi_wl_display **display_out)
497 struct wsi_wl_display *display =
498 vk_alloc(wsi->alloc, sizeof(*display), 8,
499 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
501 return VK_ERROR_OUT_OF_HOST_MEMORY;
503 VkResult result = wsi_wl_display_init(wsi, display, wl_display, true);
504 if (result != VK_SUCCESS) {
505 vk_free(wsi->alloc, display);
510 *display_out = display;
515 static struct wsi_wl_display *
516 wsi_wl_display_ref(struct wsi_wl_display *display)
523 wsi_wl_display_unref(struct wsi_wl_display *display)
525 if (display->refcount-- > 1)
528 struct wsi_wayland *wsi = display->wsi_wl;
529 wsi_wl_display_finish(display);
530 vk_free(wsi->alloc, display);
534 wsi_wl_get_presentation_support(struct wsi_device *wsi_device,
535 struct wl_display *wl_display)
537 struct wsi_wayland *wsi =
538 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
540 struct wsi_wl_display display;
541 VkResult ret = wsi_wl_display_init(wsi, &display, wl_display, false);
542 if (ret == VK_SUCCESS)
543 wsi_wl_display_finish(&display);
545 return ret == VK_SUCCESS;
549 wsi_wl_surface_get_support(VkIcdSurfaceBase *surface,
550 struct wsi_device *wsi_device,
551 uint32_t queueFamilyIndex,
552 VkBool32* pSupported)
559 static const VkPresentModeKHR present_modes[] = {
560 VK_PRESENT_MODE_MAILBOX_KHR,
561 VK_PRESENT_MODE_FIFO_KHR,
565 wsi_wl_surface_get_capabilities(VkIcdSurfaceBase *surface,
566 struct wsi_device *wsi_device,
567 VkSurfaceCapabilitiesKHR* caps)
569 /* For true mailbox mode, we need at least 4 images:
570 * 1) One to scan out from
571 * 2) One to have queued for scan-out
572 * 3) One to be currently held by the Wayland compositor
573 * 4) One to render to
575 caps->minImageCount = 4;
576 /* There is no real maximum */
577 caps->maxImageCount = 0;
579 caps->currentExtent = (VkExtent2D) { UINT32_MAX, UINT32_MAX };
580 caps->minImageExtent = (VkExtent2D) { 1, 1 };
581 caps->maxImageExtent = (VkExtent2D) {
582 wsi_device->maxImageDimension2D,
583 wsi_device->maxImageDimension2D,
586 caps->supportedTransforms = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
587 caps->currentTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
588 caps->maxImageArrayLayers = 1;
590 caps->supportedCompositeAlpha =
591 VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR |
592 VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR;
594 caps->supportedUsageFlags =
595 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
596 VK_IMAGE_USAGE_SAMPLED_BIT |
597 VK_IMAGE_USAGE_TRANSFER_DST_BIT |
598 VK_IMAGE_USAGE_STORAGE_BIT |
599 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
605 wsi_wl_surface_get_capabilities2(VkIcdSurfaceBase *surface,
606 struct wsi_device *wsi_device,
607 const void *info_next,
608 VkSurfaceCapabilities2KHR* caps)
610 assert(caps->sType == VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR);
613 wsi_wl_surface_get_capabilities(surface, wsi_device,
614 &caps->surfaceCapabilities);
616 vk_foreach_struct(ext, caps->pNext) {
617 switch (ext->sType) {
618 case VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR: {
619 VkSurfaceProtectedCapabilitiesKHR *protected = (void *)ext;
620 protected->supportsProtected = VK_FALSE;
634 wsi_wl_surface_get_formats(VkIcdSurfaceBase *icd_surface,
635 struct wsi_device *wsi_device,
636 uint32_t* pSurfaceFormatCount,
637 VkSurfaceFormatKHR* pSurfaceFormats)
639 VkIcdSurfaceWayland *surface = (VkIcdSurfaceWayland *)icd_surface;
640 struct wsi_wayland *wsi =
641 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
643 struct wsi_wl_display display;
644 if (wsi_wl_display_init(wsi, &display, surface->display, true))
645 return VK_ERROR_SURFACE_LOST_KHR;
647 VK_OUTARRAY_MAKE(out, pSurfaceFormats, pSurfaceFormatCount);
650 u_vector_foreach(disp_fmt, display.formats) {
651 vk_outarray_append(&out, out_fmt) {
652 out_fmt->format = *disp_fmt;
653 out_fmt->colorSpace = VK_COLORSPACE_SRGB_NONLINEAR_KHR;
657 wsi_wl_display_finish(&display);
659 return vk_outarray_status(&out);
663 wsi_wl_surface_get_formats2(VkIcdSurfaceBase *icd_surface,
664 struct wsi_device *wsi_device,
665 const void *info_next,
666 uint32_t* pSurfaceFormatCount,
667 VkSurfaceFormat2KHR* pSurfaceFormats)
669 VkIcdSurfaceWayland *surface = (VkIcdSurfaceWayland *)icd_surface;
670 struct wsi_wayland *wsi =
671 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
673 struct wsi_wl_display display;
674 if (wsi_wl_display_init(wsi, &display, surface->display, true))
675 return VK_ERROR_SURFACE_LOST_KHR;
677 VK_OUTARRAY_MAKE(out, pSurfaceFormats, pSurfaceFormatCount);
680 u_vector_foreach(disp_fmt, display.formats) {
681 vk_outarray_append(&out, out_fmt) {
682 out_fmt->surfaceFormat.format = *disp_fmt;
683 out_fmt->surfaceFormat.colorSpace = VK_COLORSPACE_SRGB_NONLINEAR_KHR;
687 wsi_wl_display_finish(&display);
689 return vk_outarray_status(&out);
693 wsi_wl_surface_get_present_modes(VkIcdSurfaceBase *surface,
694 uint32_t* pPresentModeCount,
695 VkPresentModeKHR* pPresentModes)
697 if (pPresentModes == NULL) {
698 *pPresentModeCount = ARRAY_SIZE(present_modes);
702 *pPresentModeCount = MIN2(*pPresentModeCount, ARRAY_SIZE(present_modes));
703 typed_memcpy(pPresentModes, present_modes, *pPresentModeCount);
705 if (*pPresentModeCount < ARRAY_SIZE(present_modes))
706 return VK_INCOMPLETE;
712 wsi_wl_surface_get_present_rectangles(VkIcdSurfaceBase *surface,
713 struct wsi_device *wsi_device,
714 uint32_t* pRectCount,
717 VK_OUTARRAY_MAKE(out, pRects, pRectCount);
719 vk_outarray_append(&out, rect) {
720 /* We don't know a size so just return the usual "I don't know." */
723 .extent = { UINT32_MAX, UINT32_MAX },
727 return vk_outarray_status(&out);
730 VkResult wsi_create_wl_surface(const VkAllocationCallbacks *pAllocator,
731 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
732 VkSurfaceKHR *pSurface)
734 VkIcdSurfaceWayland *surface;
736 surface = vk_alloc(pAllocator, sizeof *surface, 8,
737 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
739 return VK_ERROR_OUT_OF_HOST_MEMORY;
741 surface->base.platform = VK_ICD_WSI_PLATFORM_WAYLAND;
742 surface->display = pCreateInfo->display;
743 surface->surface = pCreateInfo->surface;
745 *pSurface = VkIcdSurfaceBase_to_handle(&surface->base);
750 struct wsi_wl_image {
751 struct wsi_image base;
752 struct wl_buffer * buffer;
756 struct wsi_wl_swapchain {
757 struct wsi_swapchain base;
759 struct wsi_wl_display *display;
761 struct wl_surface * surface;
763 /* non-NULL when wl_drm should be used for wl_buffer creation; otherwise,
764 * zwp_linux_dmabuf_v1 should be used.
766 struct wl_drm * drm_wrapper;
768 struct wl_callback * frame;
774 uint32_t num_drm_modifiers;
775 const uint64_t * drm_modifiers;
777 VkPresentModeKHR present_mode;
780 struct wsi_wl_image images[0];
782 VK_DEFINE_NONDISP_HANDLE_CASTS(wsi_wl_swapchain, base.base, VkSwapchainKHR,
783 VK_OBJECT_TYPE_SWAPCHAIN_KHR)
785 static struct wsi_image *
786 wsi_wl_swapchain_get_wsi_image(struct wsi_swapchain *wsi_chain,
787 uint32_t image_index)
789 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)wsi_chain;
790 return &chain->images[image_index].base;
794 wsi_wl_swapchain_acquire_next_image(struct wsi_swapchain *wsi_chain,
795 const VkAcquireNextImageInfoKHR *info,
796 uint32_t *image_index)
798 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)wsi_chain;
799 struct timespec start_time, end_time;
800 struct timespec rel_timeout;
801 int wl_fd = wl_display_get_fd(chain->display->wl_display);
803 timespec_from_nsec(&rel_timeout, info->timeout);
805 clock_gettime(CLOCK_MONOTONIC, &start_time);
806 timespec_add(&end_time, &rel_timeout, &start_time);
809 /* Try to dispatch potential events. */
810 int ret = wl_display_dispatch_queue_pending(chain->display->wl_display,
811 chain->display->queue);
813 return VK_ERROR_OUT_OF_DATE_KHR;
815 /* Try to find a free image. */
816 for (uint32_t i = 0; i < chain->base.image_count; i++) {
817 if (!chain->images[i].busy) {
818 /* We found a non-busy image */
820 chain->images[i].busy = true;
825 /* Check for timeout. */
826 struct timespec current_time;
827 clock_gettime(CLOCK_MONOTONIC, ¤t_time);
828 if (timespec_after(¤t_time, &end_time))
831 /* Try to read events from the server. */
832 ret = wl_display_prepare_read_queue(chain->display->wl_display,
833 chain->display->queue);
835 /* Another thread might have read events for our queue already. Go
836 * back to dispatch them.
840 return VK_ERROR_OUT_OF_DATE_KHR;
843 struct pollfd pollfd = {
847 timespec_sub(&rel_timeout, &end_time, ¤t_time);
848 ret = ppoll(&pollfd, 1, &rel_timeout, NULL);
851 wl_display_cancel_read(chain->display->wl_display);
853 /* If ppoll() was interrupted, try again. */
854 if (lerrno == EINTR || lerrno == EAGAIN)
856 return VK_ERROR_OUT_OF_DATE_KHR;
862 ret = wl_display_read_events(chain->display->wl_display);
864 return VK_ERROR_OUT_OF_DATE_KHR;
869 frame_handle_done(void *data, struct wl_callback *callback, uint32_t serial)
871 struct wsi_wl_swapchain *chain = data;
874 chain->fifo_ready = true;
876 wl_callback_destroy(callback);
879 static const struct wl_callback_listener frame_listener = {
884 wsi_wl_swapchain_queue_present(struct wsi_swapchain *wsi_chain,
885 uint32_t image_index,
886 const VkPresentRegionKHR *damage)
888 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)wsi_chain;
890 if (chain->base.present_mode == VK_PRESENT_MODE_FIFO_KHR) {
891 while (!chain->fifo_ready) {
892 int ret = wl_display_dispatch_queue(chain->display->wl_display,
893 chain->display->queue);
895 return VK_ERROR_OUT_OF_DATE_KHR;
899 assert(image_index < chain->base.image_count);
900 wl_surface_attach(chain->surface, chain->images[image_index].buffer, 0, 0);
902 if (wl_surface_get_version(chain->surface) >= 4 && damage &&
903 damage->pRectangles && damage->rectangleCount > 0) {
904 for (unsigned i = 0; i < damage->rectangleCount; i++) {
905 const VkRectLayerKHR *rect = &damage->pRectangles[i];
906 assert(rect->layer == 0);
907 wl_surface_damage_buffer(chain->surface,
908 rect->offset.x, rect->offset.y,
909 rect->extent.width, rect->extent.height);
912 wl_surface_damage(chain->surface, 0, 0, INT32_MAX, INT32_MAX);
915 if (chain->base.present_mode == VK_PRESENT_MODE_FIFO_KHR) {
916 chain->frame = wl_surface_frame(chain->surface);
917 wl_callback_add_listener(chain->frame, &frame_listener, chain);
918 chain->fifo_ready = false;
921 chain->images[image_index].busy = true;
922 wl_surface_commit(chain->surface);
923 wl_display_flush(chain->display->wl_display);
929 buffer_handle_release(void *data, struct wl_buffer *buffer)
931 struct wsi_wl_image *image = data;
933 assert(image->buffer == buffer);
938 static const struct wl_buffer_listener buffer_listener = {
939 buffer_handle_release,
943 wsi_wl_image_init(struct wsi_wl_swapchain *chain,
944 struct wsi_wl_image *image,
945 const VkSwapchainCreateInfoKHR *pCreateInfo,
946 const VkAllocationCallbacks* pAllocator)
948 struct wsi_wl_display *display = chain->display;
951 result = wsi_create_native_image(&chain->base, pCreateInfo,
952 chain->num_drm_modifiers > 0 ? 1 : 0,
953 &chain->num_drm_modifiers,
954 &chain->drm_modifiers, &image->base);
956 if (result != VK_SUCCESS)
959 if (!chain->drm_wrapper) {
960 /* Only request modifiers if we have dmabuf, else it must be implicit. */
961 assert(display->dmabuf.wl_dmabuf);
962 assert(image->base.drm_modifier != DRM_FORMAT_MOD_INVALID);
964 struct zwp_linux_buffer_params_v1 *params =
965 zwp_linux_dmabuf_v1_create_params(display->dmabuf.wl_dmabuf);
966 wl_proxy_set_queue((struct wl_proxy *) params, chain->display->queue);
968 for (int i = 0; i < image->base.num_planes; i++) {
969 zwp_linux_buffer_params_v1_add(params,
972 image->base.offsets[i],
973 image->base.row_pitches[i],
974 image->base.drm_modifier >> 32,
975 image->base.drm_modifier & 0xffffffff);
976 close(image->base.fds[i]);
980 zwp_linux_buffer_params_v1_create_immed(params,
982 chain->extent.height,
985 zwp_linux_buffer_params_v1_destroy(params);
987 /* Without passing modifiers, we can't have multi-plane RGB images. */
988 assert(image->base.num_planes == 1);
989 assert(image->base.drm_modifier == DRM_FORMAT_MOD_INVALID);
992 wl_drm_create_prime_buffer(chain->drm_wrapper,
993 image->base.fds[0], /* name */
995 chain->extent.height,
997 image->base.offsets[0],
998 image->base.row_pitches[0],
999 0, 0, 0, 0 /* unused */);
1000 close(image->base.fds[0]);
1006 wl_buffer_add_listener(image->buffer, &buffer_listener, image);
1011 wsi_destroy_image(&chain->base, &image->base);
1017 wsi_wl_swapchain_destroy(struct wsi_swapchain *wsi_chain,
1018 const VkAllocationCallbacks *pAllocator)
1020 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)wsi_chain;
1022 for (uint32_t i = 0; i < chain->base.image_count; i++) {
1023 if (chain->images[i].buffer) {
1024 wl_buffer_destroy(chain->images[i].buffer);
1025 wsi_destroy_image(&chain->base, &chain->images[i].base);
1030 wl_callback_destroy(chain->frame);
1032 wl_proxy_wrapper_destroy(chain->surface);
1033 if (chain->drm_wrapper)
1034 wl_proxy_wrapper_destroy(chain->drm_wrapper);
1037 wsi_wl_display_unref(chain->display);
1039 wsi_swapchain_finish(&chain->base);
1041 vk_free(pAllocator, chain);
1047 wsi_wl_surface_create_swapchain(VkIcdSurfaceBase *icd_surface,
1049 struct wsi_device *wsi_device,
1050 const VkSwapchainCreateInfoKHR* pCreateInfo,
1051 const VkAllocationCallbacks* pAllocator,
1052 struct wsi_swapchain **swapchain_out)
1054 VkIcdSurfaceWayland *surface = (VkIcdSurfaceWayland *)icd_surface;
1055 struct wsi_wayland *wsi =
1056 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
1057 struct wsi_wl_swapchain *chain;
1060 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR);
1062 int num_images = pCreateInfo->minImageCount;
1064 size_t size = sizeof(*chain) + num_images * sizeof(chain->images[0]);
1065 chain = vk_alloc(pAllocator, size, 8,
1066 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1068 return VK_ERROR_OUT_OF_HOST_MEMORY;
1070 result = wsi_swapchain_init(wsi_device, &chain->base, device,
1071 pCreateInfo, pAllocator);
1072 if (result != VK_SUCCESS) {
1073 vk_free(pAllocator, chain);
1077 /* Mark a bunch of stuff as NULL. This way we can just call
1078 * destroy_swapchain for cleanup.
1080 for (uint32_t i = 0; i < num_images; i++)
1081 chain->images[i].buffer = NULL;
1082 chain->surface = NULL;
1083 chain->drm_wrapper = NULL;
1084 chain->frame = NULL;
1086 bool alpha = pCreateInfo->compositeAlpha ==
1087 VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR;
1089 chain->base.destroy = wsi_wl_swapchain_destroy;
1090 chain->base.get_wsi_image = wsi_wl_swapchain_get_wsi_image;
1091 chain->base.acquire_next_image = wsi_wl_swapchain_acquire_next_image;
1092 chain->base.queue_present = wsi_wl_swapchain_queue_present;
1093 chain->base.present_mode = wsi_swapchain_get_present_mode(wsi_device, pCreateInfo);
1094 chain->base.image_count = num_images;
1095 chain->extent = pCreateInfo->imageExtent;
1096 chain->vk_format = pCreateInfo->imageFormat;
1097 chain->drm_format = wl_drm_format_for_vk_format(chain->vk_format, alpha);
1099 if (pCreateInfo->oldSwapchain) {
1100 /* If we have an oldSwapchain parameter, copy the display struct over
1101 * from the old one so we don't have to fully re-initialize it.
1103 VK_FROM_HANDLE(wsi_wl_swapchain, old_chain, pCreateInfo->oldSwapchain);
1104 chain->display = wsi_wl_display_ref(old_chain->display);
1106 chain->display = NULL;
1107 result = wsi_wl_display_create(wsi, surface->display, &chain->display);
1108 if (result != VK_SUCCESS)
1112 chain->surface = wl_proxy_create_wrapper(surface->surface);
1113 if (!chain->surface) {
1114 result = VK_ERROR_OUT_OF_HOST_MEMORY;
1117 wl_proxy_set_queue((struct wl_proxy *) chain->surface,
1118 chain->display->queue);
1120 chain->num_drm_modifiers = 0;
1121 chain->drm_modifiers = 0;
1123 /* Use explicit DRM format modifiers when both the server and the driver
1126 if (chain->display->dmabuf.wl_dmabuf &&
1127 chain->base.wsi->supports_modifiers) {
1128 struct u_vector *modifiers;
1129 switch (chain->drm_format) {
1130 case WL_DRM_FORMAT_ARGB8888:
1131 modifiers = &chain->display->dmabuf.modifiers.argb8888;
1133 case WL_DRM_FORMAT_XRGB8888:
1134 modifiers = &chain->display->dmabuf.modifiers.xrgb8888;
1142 chain->drm_modifiers = u_vector_tail(modifiers);
1143 chain->num_drm_modifiers = u_vector_length(modifiers);
1147 /* When there are explicit DRM format modifiers, we must use
1148 * zwp_linux_dmabuf_v1 for wl_buffer creation. Otherwise, we must use
1151 if (!chain->num_drm_modifiers) {
1152 assert(chain->display->drm.wl_drm);
1154 chain->drm_wrapper =
1155 wl_proxy_create_wrapper(chain->display->drm.wl_drm);
1156 if (!chain->drm_wrapper) {
1157 result = VK_ERROR_OUT_OF_HOST_MEMORY;
1160 wl_proxy_set_queue((struct wl_proxy *) chain->drm_wrapper,
1161 chain->display->queue);
1164 chain->fifo_ready = true;
1166 for (uint32_t i = 0; i < chain->base.image_count; i++) {
1167 result = wsi_wl_image_init(chain, &chain->images[i],
1168 pCreateInfo, pAllocator);
1169 if (result != VK_SUCCESS)
1171 chain->images[i].busy = false;
1174 *swapchain_out = &chain->base;
1179 wsi_wl_swapchain_destroy(&chain->base, pAllocator);
1185 wsi_wl_init_wsi(struct wsi_device *wsi_device,
1186 const VkAllocationCallbacks *alloc,
1187 VkPhysicalDevice physical_device)
1189 struct wsi_wayland *wsi;
1192 wsi = vk_alloc(alloc, sizeof(*wsi), 8,
1193 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
1195 result = VK_ERROR_OUT_OF_HOST_MEMORY;
1199 wsi->physical_device = physical_device;
1201 wsi->wsi = wsi_device;
1203 wsi->base.get_support = wsi_wl_surface_get_support;
1204 wsi->base.get_capabilities2 = wsi_wl_surface_get_capabilities2;
1205 wsi->base.get_formats = wsi_wl_surface_get_formats;
1206 wsi->base.get_formats2 = wsi_wl_surface_get_formats2;
1207 wsi->base.get_present_modes = wsi_wl_surface_get_present_modes;
1208 wsi->base.get_present_rectangles = wsi_wl_surface_get_present_rectangles;
1209 wsi->base.create_swapchain = wsi_wl_surface_create_swapchain;
1211 wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND] = &wsi->base;
1216 wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND] = NULL;
1222 wsi_wl_finish_wsi(struct wsi_device *wsi_device,
1223 const VkAllocationCallbacks *alloc)
1225 struct wsi_wayland *wsi =
1226 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
1230 vk_free(alloc, wsi);