libtpl_egl_la_SOURCES += tpl_wayland_egl.c \
tpl_wl_egl_thread.c \
tpl_wayland_egl_thread.c \
- tpl_wayland_vk_wsi.c \
tpl_wl_vk_thread.c \
wayland-vulkan/wayland-vulkan-protocol.c
endif
+++ /dev/null
-#define inline __inline__
-
-#include <wayland-client.h>
-
-#ifndef USE_TBM_QUEUE
-#define USE_TBM_QUEUE
-#endif
-
-#include <gbm.h>
-#include <gbm/gbm_tbm.h>
-#include <gbm/gbm_tbmint.h>
-
-#undef inline
-
-#include "tpl_internal.h"
-
-#include <string.h>
-#include <fcntl.h>
-#include <unistd.h>
-
-#include <tbm_bufmgr.h>
-#include <tbm_surface.h>
-#include <tbm_surface_internal.h>
-#include <tbm_surface_queue.h>
-#include <wayland-tbm-client.h>
-#include <wayland-tbm-server.h>
-
-typedef struct _tpl_gbm_display tpl_gbm_display_t;
-typedef struct _tpl_gbm_surface tpl_gbm_surface_t;
-typedef struct _tpl_gbm_buffer tpl_gbm_buffer_t;
-
-struct _tpl_gbm_display {
- tbm_bufmgr bufmgr;
-};
-
-struct _tpl_gbm_surface {
- tbm_surface_queue_h tbm_queue;
- tbm_surface_h current_buffer;
- tpl_bool_t reset; /* TRUE if queue was reset by external */
-};
-
-struct _tpl_gbm_buffer {
- tpl_display_t *display;
- tpl_gbm_buffer_t **tpl_gbm_surface;
- tbm_bo bo;
- struct gbm_bo *gbm_bo;
- struct wl_listener destroy_listener;
-};
-
-static int tpl_gbm_buffer_key;
-#define KEY_TPL_GBM_BUFFER (unsigned long)(&tpl_gbm_buffer_key)
-
-static void __tpl_gbm_buffer_free(tpl_gbm_buffer_t *gbm_buffer);
-static inline tpl_gbm_buffer_t *
-__tpl_gbm_get_gbm_buffer_from_tbm_surface(tbm_surface_h surface)
-{
- tpl_gbm_buffer_t *buf = NULL;
-
- if (!tbm_surface_internal_is_valid(surface))
- return NULL;
-
- tbm_surface_internal_get_user_data(surface, KEY_TPL_GBM_BUFFER, (void **)&buf);
-
- return buf;
-}
-
-static inline void
-__tpl_gbm_set_gbm_buffer_to_tbm_surface(tbm_surface_h surface,
- tpl_gbm_buffer_t *buf)
-{
- tbm_surface_internal_add_user_data(surface, KEY_TPL_GBM_BUFFER,
- (tbm_data_free)__tpl_gbm_buffer_free);
- tbm_surface_internal_set_user_data(surface, KEY_TPL_GBM_BUFFER, buf);
-}
-
-static TPL_INLINE tpl_bool_t
-__tpl_gbm_display_is_gbm_device(tpl_handle_t native_dpy)
-{
- TPL_ASSERT(native_dpy);
-
- /* MAGIC CHECK: A native display handle is a gbm_device if the de-referenced first value
- is a memory address pointing gbm_create_surface(). */
- if (*(void **)native_dpy == gbm_create_device)
- return TPL_TRUE;
-
- return TPL_FALSE;
-}
-
-static tpl_result_t
-__tpl_gbm_display_init(tpl_display_t *display)
-{
- tpl_gbm_display_t *gbm_display = NULL;
-
- TPL_ASSERT(display);
-
- /* Do not allow default display in gbm. */
- if (!display->native_handle) {
- TPL_ERR("native_handle is NULL. Can not allow default display in gbm.");
- return TPL_ERROR_INVALID_PARAMETER;
- }
-
- gbm_display = (tpl_gbm_display_t *) calloc(1, sizeof(tpl_gbm_display_t));
- if (!gbm_display) return TPL_ERROR_OUT_OF_MEMORY;
-
- display->bufmgr_fd = dup(gbm_device_get_fd(display->native_handle));
- gbm_display->bufmgr = tbm_bufmgr_init(display->bufmgr_fd);
- display->backend.data = gbm_display;
-
- TPL_LOG_B("GBM", "[INIT] tpl_gbm_display_t(%p) bufmgr_fd(%d) bufmgr(%p)",
- gbm_display, display->bufmgr_fd, gbm_display->bufmgr);
- return TPL_ERROR_NONE;
-}
-
-static void
-__tpl_gbm_display_fini(tpl_display_t *display)
-{
- tpl_gbm_display_t *gbm_display;
-
- TPL_ASSERT(display);
-
- gbm_display = (tpl_gbm_display_t *)display->backend.data;
-
- if (gbm_display) {
- TPL_LOG_B("GBM", "[FINI] tpl_gbm_display_t(%p) bufmgr(%p)",
- gbm_display, gbm_display->bufmgr);
- tbm_bufmgr_deinit(gbm_display->bufmgr);
- free(gbm_display);
- } else {
- TPL_ERR("Failed to finalize gbm_display.");
- }
-
- close(display->bufmgr_fd);
- display->backend.data = NULL;
-}
-
-static tpl_result_t
-__tpl_gbm_display_query_config(tpl_display_t *display,
- tpl_surface_type_t surface_type, int red_size,
- int green_size, int blue_size, int alpha_size,
- int color_depth, int *native_visual_id,
- tpl_bool_t *is_slow)
-{
- TPL_ASSERT(display);
-
- if ((surface_type == TPL_SURFACE_TYPE_WINDOW) && (red_size == 8)
- && (green_size == 8) && (blue_size == 8)
- && ((color_depth == 32) || (color_depth == 24))) {
- if (alpha_size == 8) {
- if (gbm_device_is_format_supported(
- (struct gbm_device *)display->native_handle,
- GBM_FORMAT_ARGB8888, GBM_BO_USE_RENDERING) == 1) {
- if (native_visual_id)
- *native_visual_id = GBM_FORMAT_ARGB8888;
- } else return TPL_ERROR_INVALID_PARAMETER;
-
- if (is_slow != NULL) *is_slow = TPL_FALSE;
-
- return TPL_ERROR_NONE;
- }
- if (alpha_size == 0) {
- if (gbm_device_is_format_supported(
- (struct gbm_device *)display->native_handle,
- GBM_FORMAT_XRGB8888,
- GBM_BO_USE_RENDERING) == 1) {
- if (native_visual_id)
- *native_visual_id = GBM_FORMAT_XRGB8888;
- } else return TPL_ERROR_INVALID_PARAMETER;
-
- if (is_slow != NULL) *is_slow = TPL_FALSE;
-
- return TPL_ERROR_NONE;
- }
- }
-
- return TPL_ERROR_INVALID_PARAMETER;
-}
-
-static tpl_result_t
-__tpl_gbm_display_filter_config(tpl_display_t *display, int *visual_id,
- int alpha_size)
-{
- TPL_IGNORE(display);
-
- if (visual_id != NULL && *visual_id == GBM_FORMAT_ARGB8888
- && alpha_size == 0) {
- *visual_id = GBM_FORMAT_XRGB8888;
- return TPL_ERROR_NONE;
- }
-
- return TPL_ERROR_INVALID_PARAMETER;
-}
-
-static tpl_result_t
-__tpl_gbm_display_get_window_info(tpl_display_t *display, tpl_handle_t window,
- int *width, int *height, tbm_format *format,
- int depth, int a_size)
-{
- TPL_ASSERT(display);
- TPL_ASSERT(window);
-
- struct gbm_surface *gbm_surface = (struct gbm_surface *)window;
- tbm_surface_queue_h surf_queue = (tbm_surface_queue_h)gbm_tbm_get_surface_queue(
- gbm_surface);
- if (!surf_queue) {
- TPL_ERR("Failed to get tbm_surface_queue from gbm_surface.");
- return TPL_ERROR_INVALID_OPERATION;
- }
-
- if (width) *width = tbm_surface_queue_get_width(surf_queue);
- if (height) *height = tbm_surface_queue_get_height(surf_queue);
- if (format) *format = tbm_surface_queue_get_format(surf_queue);
-
- return TPL_ERROR_NONE;
-}
-
-static tpl_result_t
-__tpl_gbm_display_get_pixmap_info(tpl_display_t *display, tpl_handle_t pixmap,
- int *width, int *height, tbm_format *format)
-{
- tbm_surface_h tbm_surface = NULL;
-
- tbm_surface = wayland_tbm_server_get_surface(NULL,
- (struct wl_resource *)pixmap);
- if (!tbm_surface) {
- TPL_ERR("Failed to get tbm_surface_h from native pixmap.");
- return TPL_ERROR_INVALID_OPERATION;
- }
-
- if (width) *width = tbm_surface_get_width(tbm_surface);
- if (height) *height = tbm_surface_get_height(tbm_surface);
- if (format) *format = tbm_surface_get_format(tbm_surface);
-
- return TPL_ERROR_NONE;
-}
-
-static tbm_surface_h
-__tpl_gbm_display_get_buffer_from_native_pixmap(tpl_handle_t pixmap)
-{
- tbm_surface_h tbm_surface = NULL;
-
- TPL_ASSERT(pixmap);
-
- tbm_surface = wayland_tbm_server_get_surface(NULL,
- (struct wl_resource *)pixmap);
- if (!tbm_surface) {
- TPL_ERR("Failed to get tbm_surface_h from wayland_tbm.");
- return NULL;
- }
-
- TPL_LOG_B("GBM", "[PIXMAP] wl_resource(%p) tbm_surface(%p) bo(%d)", pixmap,
- tbm_surface,
- tbm_bo_export(tbm_surface_internal_get_bo(tbm_surface, 0)));
-
- return tbm_surface;
-}
-
-static void
-__cb_tbm_surface_queue_reset_callback(tbm_surface_queue_h surface_queue,
- void *data)
-{
- tpl_surface_t *surface = NULL;
- tpl_gbm_surface_t *tpl_gbm_surface = NULL;
-
- surface = (tpl_surface_t *)data;
- TPL_CHECK_ON_NULL_RETURN(surface);
-
- tpl_gbm_surface = (tpl_gbm_surface_t *)surface->backend.data;
- TPL_CHECK_ON_NULL_RETURN(tpl_gbm_surface);
-
- TPL_LOG_B("GBM",
- "[QUEUE_RESET_CB] tpl_gbm_surface_t(%p) surface_queue(%p)",
- data, surface_queue);
-
- if (surface->reset_cb)
- surface->reset_cb(surface->reset_data);
-}
-
-static tpl_result_t
-__tpl_gbm_surface_init(tpl_surface_t *surface)
-{
- tpl_gbm_surface_t *tpl_gbm_surface = NULL;
- TPL_ASSERT(surface);
-
- tpl_gbm_surface = (tpl_gbm_surface_t *) calloc(1, sizeof(tpl_gbm_surface_t));
- if (!tpl_gbm_surface) {
- TPL_ERR("Failed to allocate new gbm backend surface.");
- return TPL_ERROR_OUT_OF_MEMORY;
- }
-
- surface->backend.data = (void *)tpl_gbm_surface;
- tpl_gbm_surface->tbm_queue = NULL;
- tpl_gbm_surface->current_buffer = NULL;
-
- if (surface->type == TPL_SURFACE_TYPE_WINDOW) {
- struct gbm_surface *gbm_surface = (struct gbm_surface *)surface->native_handle;
- tpl_gbm_surface->tbm_queue =
- (tbm_surface_queue_h)gbm_tbm_get_surface_queue(gbm_surface);
- if (!tpl_gbm_surface->tbm_queue) {
- TPL_ERR("Failed to get tbm_surface_queue from gbm_surface.");
- goto error;
- }
-
- /* Set reset_callback to tbm_queue */
- if (tbm_surface_queue_add_reset_cb(tpl_gbm_surface->tbm_queue,
- __cb_tbm_surface_queue_reset_callback,
- (void *)surface)) {
- TPL_ERR("TBM surface queue add reset cb failed!");
- goto error;
- }
-
- if (__tpl_gbm_display_get_window_info(surface->display,
- surface->native_handle, &surface->width,
- &surface->height, NULL, 0, 0) != TPL_ERROR_NONE) {
- TPL_ERR("Failed to get native window info.");
- goto error;
- }
-
- TPL_LOG_B("GBM", "[INIT] WINDOW|tpl_gbm_surface_t(%p) tbm_queue(%p) (%dx%d)",
- tpl_gbm_surface, tpl_gbm_surface->tbm_queue,
- surface->width, surface->height);
-
- return TPL_ERROR_NONE;
- } else if (surface->type == TPL_SURFACE_TYPE_PIXMAP) {
- if (__tpl_gbm_display_get_pixmap_info(surface->display,
- surface->native_handle, &surface->width,
- &surface->height, NULL) != TPL_ERROR_NONE) {
- TPL_ERR("Failed to get native pixmap info.");
- goto error;
- }
-
- TPL_LOG_B("GBM", "[INIT] PIXMAP|tpl_gbm_surface_t(%p) (%dx%d)",
- tpl_gbm_surface, surface->width, surface->height);
-
- return TPL_ERROR_NONE;
- }
-
-error:
- free(tpl_gbm_surface);
- surface->backend.data = NULL;
-
- return TPL_ERROR_INVALID_OPERATION;
-}
-
-static void
-__tpl_gbm_surface_fini(tpl_surface_t *surface)
-{
- tpl_gbm_surface_t *gbm_surface = NULL;
-
- TPL_ASSERT(surface);
- TPL_ASSERT(surface->display);
-
- gbm_surface = (tpl_gbm_surface_t *) surface->backend.data;
- if (!gbm_surface) return;
-
- if (gbm_surface->current_buffer)
- tbm_surface_internal_unref(gbm_surface->current_buffer);
-
- TPL_LOG_B("GBM", "[FINI] tpl_surface_t(%p) tpl_gbm_surface_t(%p)",
- surface, gbm_surface);
-
- free(gbm_surface);
- surface->backend.data = NULL;
-}
-
-static tpl_result_t
-__tpl_gbm_surface_enqueue_buffer(tpl_surface_t *surface,
- tbm_surface_h tbm_surface, int num_rects,
- const int *rects, tbm_fd sync_fence)
-{
- tpl_gbm_buffer_t *gbm_buffer = NULL;
- tpl_gbm_surface_t *gbm_surface = NULL;
- int ret = 0;
- int union_x, union_y;
- int union_w, union_h;
-
- TPL_ASSERT(surface);
- TPL_ASSERT(surface->display);
- TPL_ASSERT(surface->display->native_handle);
- TPL_ASSERT(tbm_surface);
-
- gbm_surface = (tpl_gbm_surface_t *)surface->backend.data;
- if (!gbm_surface) {
- TPL_ERR("tpl_gbm_surface_t is invalid. tpl_surface_t(%p)",
- surface);
- return TPL_ERROR_INVALID_PARAMETER;
- }
-
- if (!tbm_surface_internal_is_valid(tbm_surface)) {
- TPL_ERR("Failed to enqueue tbm_surface(%p) Invalid value.", tbm_surface);
- return TPL_ERROR_INVALID_PARAMETER;
- }
-
- gbm_buffer = __tpl_gbm_get_gbm_buffer_from_tbm_surface(tbm_surface);
- if (!gbm_buffer) {
- TPL_ERR("Filed to get gbm_buffer from tbm_surface(%p).", tbm_surface);
- return TPL_ERROR_INVALID_PARAMETER;
- }
-
- TRACE_ASYNC_END((int)gbm_buffer, "[DEQ]~[ENQ] BO_NAME:%d",
- tbm_bo_export(gbm_buffer->bo));
-
- TPL_IMAGE_DUMP(tbm_surface, surface->width, surface->height);
-
- tbm_surface_internal_unref(tbm_surface);
-
- if (!gbm_surface->tbm_queue) {
- TPL_ERR("tbm_surface_queue is invalid. tpl_gbm_surface_t(%p)",
- gbm_surface);
- return TPL_ERROR_INVALID_PARAMETER;
- }
-
- if (sync_fence != -1) {
- tbm_sync_fence_wait(sync_fence, -1);
- close(sync_fence);
- }
-
- /* If there are given damage rects for given tbm_surface, */
- if (num_rects != 0 && rects != NULL) {
- int i;
- int left = surface->width;
- int bottom = surface->height;
- int right = 0, top = 0;
-
- /* Carculate the union region of given damage rectangles */
- for (i = 0; i < num_rects; i++) {
- int rect_i = i * 4;
- int x = rects[rect_i];
- int y = rects[rect_i + 1];
- int w = rects[rect_i + 2];
- int h = rects[rect_i + 3];
-
- left = (x < left) ? x : left;
- bottom = (y < bottom) ? y : bottom;
- right = ((x + w) > right) ? (x + w) : right;
- top = ((y + h) > top) ? (y + h) : top;
- }
-
- /* Calibrate so as not to exceed the range. */
- left = (left < 0) ? 0 : left;
- bottom = (bottom < 0) ? 0 : bottom;
- right = (right > surface->width) ? surface->width : right;
- top = (top > surface->height) ? surface->height : top;
-
- /* And set its union rect to tbm_surface as its damage region. */
- union_w = right - left;
- union_h = top - bottom;
- union_x = left;
- union_y = top;
- } else {
- /* If there are no any damage rects,
- * set its full size of surface as its damage region. */
- union_w = surface->width;
- union_h = surface->height;
- union_x = 0;
- union_y = 0;
- }
-
- if (!(ret = tbm_surface_internal_set_damage(tbm_surface, union_x, union_y,
- union_w, union_h)))
- TPL_WARN("Failed to set damage rect to tbm_surface(%p)", tbm_surface);
-
- if (tbm_surface_queue_enqueue(gbm_surface->tbm_queue, tbm_surface)
- != TBM_SURFACE_QUEUE_ERROR_NONE) {
- TPL_ERR("tbm_surface_queue_enqueue failed. tbm_surface_queue(%p) tbm_surface(%p)",
- gbm_surface->tbm_queue, tbm_surface);
- return TPL_ERROR_INVALID_PARAMETER;
- }
-
- TPL_LOG_B("GBM", "[ENQ] tpl_gbm_surface_t(%p) tbm_surface(%p) bo(%d)",
- gbm_surface, tbm_surface, tbm_bo_export(gbm_buffer->bo));
-
- TRACE_MARK("[ENQ] BO_NAME:%d", tbm_bo_export(gbm_buffer->bo));
- return TPL_ERROR_NONE;
-}
-
-static tpl_bool_t
-__tpl_gbm_surface_validate(tpl_surface_t *surface)
-{
- TPL_IGNORE(surface);
-
- return TPL_TRUE;
-}
-
-static tbm_surface_h
-__tpl_gbm_surface_dequeue_buffer(tpl_surface_t *surface, uint64_t timeout_ns,
- tbm_fd *sync_fence)
-{
- tbm_bo bo;
- tbm_surface_h tbm_surface = NULL;
- tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_INVALID_QUEUE;
- tpl_gbm_buffer_t *gbm_buffer = NULL;
-
- tpl_gbm_surface_t *gbm_surface = NULL;
-
- TPL_ASSERT(surface);
- TPL_ASSERT(surface->native_handle);
- TPL_ASSERT(surface->display);
- TPL_ASSERT(surface->display->native_handle);
-
- if (sync_fence)
- *sync_fence = -1;
-
- gbm_surface = (tpl_gbm_surface_t *)surface->backend.data;
-
- TRACE_BEGIN("WAITING FOR DEQUEUEABLE");
- if (tbm_surface_queue_can_dequeue(gbm_surface->tbm_queue, 1) == 1)
- tsq_err = tbm_surface_queue_dequeue(gbm_surface->tbm_queue, &tbm_surface);
-
- if (!tbm_surface) {
- TPL_ERR("Failed to get tbm_surface from tbm_surface_queue | tsq_err = %d",
- tsq_err);
- TRACE_END();
- return NULL;
- }
- TRACE_END();
-
- /* Inc ref count about tbm_surface */
- /* It will be dec when before tbm_surface_queue_enqueue called */
- tbm_surface_internal_ref(tbm_surface);
-
- gbm_buffer = __tpl_gbm_get_gbm_buffer_from_tbm_surface(tbm_surface);
- if (gbm_buffer) {
- TRACE_MARK("[DEQ][REUSED]BO_NAME:%d", tbm_bo_export(gbm_buffer->bo));
- TRACE_ASYNC_BEGIN((int)gbm_buffer, "[DEQ]~[ENQ] BO_NAME:%d",
- tbm_bo_export(gbm_buffer->bo));
- TPL_LOG_B("GBM", "[DEQ][R] tpl_gbm_surface_t(%p) tbm_surface(%p) bo(%d)",
- gbm_surface, tbm_surface, tbm_bo_export(gbm_buffer->bo));
- return tbm_surface;
- }
-
- if (!(bo = tbm_surface_internal_get_bo(tbm_surface, 0))) {
- TPL_ERR("Failed to get tbm_bo from tbm_surface");
- tbm_surface_internal_unref(tbm_surface);
- return NULL;
- }
-
- gbm_buffer = (tpl_gbm_buffer_t *) calloc(1, sizeof(tpl_gbm_buffer_t));
- if (!gbm_buffer) {
- TPL_ERR("Mem alloc for gbm_buffer failed!");
- tbm_surface_internal_unref(tbm_surface);
- return NULL;
- }
-
- gbm_buffer->display = surface->display;
- gbm_buffer->bo = bo;
-
- gbm_surface->current_buffer = tbm_surface;
-
- __tpl_gbm_set_gbm_buffer_to_tbm_surface(tbm_surface, gbm_buffer);
-
- TRACE_MARK("[DEQ][NEW]BO_NAME:%d", tbm_bo_export(gbm_buffer->bo));
- TRACE_ASYNC_BEGIN((int)gbm_buffer, "[DEQ]~[ENQ] BO_NAME:%d",
- tbm_bo_export(gbm_buffer->bo));
- TPL_LOG_B("GBM", "[DEQ][N] tpl_gbm_surface_t(%p) tbm_surface(%p) bo(%d)",
- gbm_surface, tbm_surface, tbm_bo_export(bo));
- return tbm_surface;
-}
-
-static void
-__tpl_gbm_buffer_free(tpl_gbm_buffer_t *gbm_buffer)
-{
- TPL_ASSERT(gbm_buffer);
- free(gbm_buffer);
-}
-
-tpl_bool_t
-__tpl_display_choose_backend_gbm(tpl_handle_t native_dpy)
-{
- if (native_dpy == NULL)
- return TPL_FALSE;
-
- if (__tpl_gbm_display_is_gbm_device(native_dpy))
- return TPL_TRUE;
-
- return TPL_FALSE;
-}
-
-void
-__tpl_display_init_backend_gbm(tpl_display_backend_t *backend)
-{
- TPL_ASSERT(backend);
-
- backend->type = TPL_BACKEND_GBM;
- backend->data = NULL;
-
- backend->init = __tpl_gbm_display_init;
- backend->fini = __tpl_gbm_display_fini;
- backend->query_config = __tpl_gbm_display_query_config;
- backend->filter_config = __tpl_gbm_display_filter_config;
- backend->get_window_info = __tpl_gbm_display_get_window_info;
- backend->get_pixmap_info = __tpl_gbm_display_get_pixmap_info;
- backend->get_buffer_from_native_pixmap =
- __tpl_gbm_display_get_buffer_from_native_pixmap;
-}
-
-void
-__tpl_surface_init_backend_gbm(tpl_surface_backend_t *backend)
-{
- TPL_ASSERT(backend);
-
- backend->type = TPL_BACKEND_GBM;
- backend->data = NULL;
-
- backend->init = __tpl_gbm_surface_init;
- backend->fini = __tpl_gbm_surface_fini;
- backend->validate = __tpl_gbm_surface_validate;
- backend->dequeue_buffer = __tpl_gbm_surface_dequeue_buffer;
- backend->enqueue_buffer = __tpl_gbm_surface_enqueue_buffer;
-}
+++ /dev/null
-#define inline __inline__
-#include <wayland-client.h>
-#undef inline
-
-#include "tpl_internal.h"
-
-#include <tbm_surface.h>
-#include <tbm_surface_internal.h>
-#include <tbm_surface_queue.h>
-#include <wayland-tbm-client.h>
-
-#include <tbm_sync.h>
-
-#include "wayland-vulkan/wayland-vulkan-client-protocol.h"
-
-#define CLIENT_QUEUE_SIZE 3
-
-typedef struct _tpl_wayland_vk_wsi_display tpl_wayland_vk_wsi_display_t;
-typedef struct _tpl_wayland_vk_wsi_surface tpl_wayland_vk_wsi_surface_t;
-typedef struct _tpl_wayland_vk_wsi_buffer tpl_wayland_vk_wsi_buffer_t;
-
-struct _tpl_wayland_vk_wsi_display {
- struct wayland_tbm_client *wl_tbm_client;
- struct {
- int min_buffer;
- int max_buffer;
- int present_modes;
- } surface_capabilities;
- struct wayland_vulkan *wl_vk_client;
-};
-
-struct _tpl_wayland_vk_wsi_surface {
- tbm_surface_queue_h tbm_queue;
- int buffer_count;
- int present_mode;
-};
-
-struct _tpl_wayland_vk_wsi_buffer {
- tpl_display_t *display;
- struct wl_proxy *wl_proxy;
- tbm_fd sync_timeline;
- unsigned int sync_timestamp;
-};
-
-static const struct wl_registry_listener registry_listener;
-static const struct wl_callback_listener sync_listener;
-static const struct wl_callback_listener frame_listener;
-static const struct wl_buffer_listener buffer_release_listener;
-
-#define TPL_BUFFER_CACHE_MAX_ENTRIES 40
-
-static int tpl_wayland_vk_wsi_buffer_key;
-#define KEY_tpl_wayland_vk_wsi_buffer (unsigned long)(&tpl_wayland_vk_wsi_buffer_key)
-
-static void __tpl_wayland_vk_wsi_buffer_free(tpl_wayland_vk_wsi_buffer_t
- *wayland_vk_wsi_buffer);
-static tpl_result_t __tpl_wayland_vk_wsi_surface_destroy_swapchain(
- tpl_surface_t *surface);
-
-static TPL_INLINE tpl_wayland_vk_wsi_buffer_t *
-__tpl_wayland_vk_wsi_get_wayland_buffer_from_tbm_surface(tbm_surface_h surface)
-{
- tpl_wayland_vk_wsi_buffer_t *buf = NULL;
-
- if (!tbm_surface_internal_is_valid(surface))
- return NULL;
-
- tbm_surface_internal_get_user_data(surface, KEY_tpl_wayland_vk_wsi_buffer,
- (void **)&buf);
- return buf;
-}
-
-static TPL_INLINE void
-__tpl_wayland_vk_wsi_set_wayland_buffer_to_tbm_surface(tbm_surface_h surface,
- tpl_wayland_vk_wsi_buffer_t *buf)
-{
- tbm_surface_internal_add_user_data(surface,
- KEY_tpl_wayland_vk_wsi_buffer,
- (tbm_data_free)__tpl_wayland_vk_wsi_buffer_free);
- tbm_surface_internal_set_user_data(surface,
- KEY_tpl_wayland_vk_wsi_buffer, buf);
-}
-
-static TPL_INLINE tpl_bool_t
-__tpl_wayland_vk_wsi_display_is_wl_display(tpl_handle_t native_dpy)
-{
- TPL_ASSERT(native_dpy);
-
- struct wl_interface *wl_egl_native_dpy = *(void **) native_dpy;
-
- /* MAGIC CHECK: A native display handle is a wl_display if the de-referenced first value
- is a memory address pointing the structure of wl_display_interface. */
- if (wl_egl_native_dpy == &wl_display_interface) {
- return TPL_TRUE;
- }
-
- if (strncmp(wl_egl_native_dpy->name, wl_display_interface.name,
- strlen(wl_display_interface.name)) == 0) {
- return TPL_TRUE;
- }
-
- return TPL_FALSE;
-}
-
-static int
-__tpl_wayland_vk_wsi_display_roundtrip(tpl_display_t *display)
-{
- struct wl_display *wl_dpy;
- struct wl_callback *callback;
- int done = 0, ret = 0;
-
- TPL_ASSERT(display);
- TPL_ASSERT(display->native_handle);
- TPL_ASSERT(display->backend.data);
-
- wl_dpy = (struct wl_display *) display->native_handle;
-
- callback = wl_display_sync(wl_dpy);
- wl_callback_add_listener(callback, &sync_listener, &done);
-
- while (ret != -1 && !done) {
- ret = wl_display_dispatch(wl_dpy);
- }
-
- return ret;
-}
-
-static void
-__tpl_wayland_vk_wsi_support_present_mode_listener(void *data,
- struct wayland_vulkan *wayland_vulkan,
- uint32_t mode)
-{
- tpl_wayland_vk_wsi_display_t *wayland_vk_wsi_display = data;
-
- switch (mode) {
- case WAYLAND_VULKAN_PRESENT_MODE_TYPE_IMMEDIATE:
- wayland_vk_wsi_display->surface_capabilities.present_modes
- |= TPL_DISPLAY_PRESENT_MODE_IMMEDIATE;
- break;
- case WAYLAND_VULKAN_PRESENT_MODE_TYPE_MAILBOX:
- wayland_vk_wsi_display->surface_capabilities.present_modes
- |= TPL_DISPLAY_PRESENT_MODE_MAILBOX;
- break;
- case WAYLAND_VULKAN_PRESENT_MODE_TYPE_FIFO:
- wayland_vk_wsi_display->surface_capabilities.present_modes
- |= TPL_DISPLAY_PRESENT_MODE_FIFO;
- break;
- case WAYLAND_VULKAN_PRESENT_MODE_TYPE_FIFO_RELAXED:
- wayland_vk_wsi_display->surface_capabilities.present_modes
- |= TPL_DISPLAY_PRESENT_MODE_FIFO_RELAXED;
- break;
- default:
- TPL_WARN("server sent unknown present type: %d", mode);
- }
-}
-
-static struct wayland_vulkan_listener wl_vk_listener = {
- __tpl_wayland_vk_wsi_support_present_mode_listener,
-};
-
-static void
-__tpl_wayland_vk_wsi_registry_handle_global(void *data, struct wl_registry *registry,
- uint32_t name, const char *interface, uint32_t version)
-{
- tpl_wayland_vk_wsi_display_t *wayland_vk_wsi_display = data;
-
- if (!strcmp(interface, "wayland_vulkan")) {
- wayland_vk_wsi_display->wl_vk_client =
- wl_registry_bind(registry, name, &wayland_vulkan_interface, version);
- }
-}
-
-static const struct wl_registry_listener registry_listener = {
- __tpl_wayland_vk_wsi_registry_handle_global,
- NULL
-};
-
-static tpl_result_t
-__tpl_wayland_vk_wsi_display_init(tpl_display_t *display)
-{
- tpl_wayland_vk_wsi_display_t *wayland_vk_wsi_display = NULL;
-
- TPL_ASSERT(display);
-
- /* Do not allow default display in wayland. */
- if (!display->native_handle) {
- TPL_ERR("Invalid native handle for display.");
- return TPL_ERROR_INVALID_PARAMETER;
- }
-
- wayland_vk_wsi_display = (tpl_wayland_vk_wsi_display_t *) calloc(1,
- sizeof(tpl_wayland_vk_wsi_display_t));
- if (!wayland_vk_wsi_display) {
- TPL_ERR("Failed to allocate memory for new tpl_wayland_vk_wsi_display_t.");
- return TPL_ERROR_OUT_OF_MEMORY;
- }
-
- wayland_vk_wsi_display->surface_capabilities.min_buffer = 2;
- wayland_vk_wsi_display->surface_capabilities.max_buffer = CLIENT_QUEUE_SIZE;
- wayland_vk_wsi_display->surface_capabilities.present_modes =
- TPL_DISPLAY_PRESENT_MODE_MAILBOX;
-
- display->backend.data = wayland_vk_wsi_display;
-
- if (__tpl_wayland_vk_wsi_display_is_wl_display(display->native_handle)) {
- struct wl_display *wl_dpy =
- (struct wl_display *)display->native_handle;
- struct wl_registry *wl_registry;
-
- wayland_vk_wsi_display->wl_tbm_client =
- wayland_tbm_client_init((struct wl_display *) wl_dpy);
-
- if (!wayland_vk_wsi_display->wl_tbm_client) {
- TPL_ERR("Wayland TBM initialization failed!");
- goto free_wl_display;
- }
-
- wl_registry = wl_display_get_registry(wl_dpy);
- /* check wl_registry */
- wl_registry_add_listener(wl_registry, ®istry_listener, wayland_vk_wsi_display);
- wl_display_roundtrip(wl_dpy);
-
- if (wayland_vk_wsi_display->wl_vk_client)
- wayland_vulkan_add_listener(wayland_vk_wsi_display->wl_vk_client,
- &wl_vk_listener, wayland_vk_wsi_display);
-
- wl_display_roundtrip(wl_dpy);
- wl_registry_destroy(wl_registry);
- } else {
- goto free_wl_display;
- }
-
- return TPL_ERROR_NONE;
-
-free_wl_display:
- if (wayland_vk_wsi_display) {
- free(wayland_vk_wsi_display);
- display->backend.data = NULL;
- }
- return TPL_ERROR_INVALID_OPERATION;
-}
-
-static void
-__tpl_wayland_vk_wsi_display_fini(tpl_display_t *display)
-{
- tpl_wayland_vk_wsi_display_t *wayland_vk_wsi_display;
-
- TPL_ASSERT(display);
-
- wayland_vk_wsi_display = (tpl_wayland_vk_wsi_display_t *)display->backend.data;
- if (wayland_vk_wsi_display) {
- wayland_tbm_client_deinit(wayland_vk_wsi_display->wl_tbm_client);
- if (wayland_vk_wsi_display->wl_vk_client)
- wayland_vulkan_destroy(wayland_vk_wsi_display->wl_vk_client);
- free(wayland_vk_wsi_display);
- }
- display->backend.data = NULL;
-}
-
-static tpl_result_t
-__tpl_wayland_vk_wsi_display_query_config(tpl_display_t *display,
- tpl_surface_type_t surface_type,
- int red_size, int green_size,
- int blue_size, int alpha_size,
- int color_depth, int *native_visual_id,
- tpl_bool_t *is_slow)
-{
- TPL_ASSERT(display);
-
- if (surface_type == TPL_SURFACE_TYPE_WINDOW && red_size == 8 &&
- green_size == 8 && blue_size == 8 &&
- (color_depth == 32 || color_depth == 24)) {
-
- if (alpha_size == 8) {
- if (native_visual_id) *native_visual_id = TBM_FORMAT_ARGB8888;
- if (is_slow) *is_slow = TPL_FALSE;
- return TPL_ERROR_NONE;
- }
- if (alpha_size == 0) {
- if (native_visual_id) *native_visual_id = TBM_FORMAT_XRGB8888;
- if (is_slow) *is_slow = TPL_FALSE;
- return TPL_ERROR_NONE;
- }
- }
-
- return TPL_ERROR_INVALID_PARAMETER;
-}
-
-static tpl_result_t
-__tpl_wayland_vk_wsi_display_filter_config(tpl_display_t *display,
- int *visual_id,
- int alpha_size)
-{
- TPL_IGNORE(display);
- TPL_IGNORE(visual_id);
- TPL_IGNORE(alpha_size);
- return TPL_ERROR_NONE;
-}
-
-static tpl_result_t
-__tpl_wayland_vk_wsi_display_query_window_supported_buffer_count(
- tpl_display_t *display,
- tpl_handle_t window, int *min, int *max)
-{
- tpl_wayland_vk_wsi_display_t *wayland_vk_wsi_display = NULL;
-
- TPL_ASSERT(display);
- TPL_ASSERT(window);
-
- wayland_vk_wsi_display = (tpl_wayland_vk_wsi_display_t *)display->backend.data;
-
- if (!wayland_vk_wsi_display) return TPL_ERROR_INVALID_OPERATION;
-
- if (min) *min = wayland_vk_wsi_display->surface_capabilities.min_buffer;
- if (max) *max = wayland_vk_wsi_display->surface_capabilities.max_buffer;
-
- return TPL_ERROR_NONE;
-}
-
-static tpl_result_t
-__tpl_wayland_vk_wsi_display_query_window_supported_present_modes(
- tpl_display_t *display,
- tpl_handle_t window, int *modes)
-{
- tpl_wayland_vk_wsi_display_t *wayland_vk_wsi_display = NULL;
-
- TPL_ASSERT(display);
- TPL_ASSERT(window);
-
- wayland_vk_wsi_display = (tpl_wayland_vk_wsi_display_t *)display->backend.data;
-
- if (!wayland_vk_wsi_display) return TPL_ERROR_INVALID_OPERATION;
-
- if (modes) {
- *modes = TPL_DISPLAY_PRESENT_MODE_MAILBOX | TPL_DISPLAY_PRESENT_MODE_IMMEDIATE |
- wayland_vk_wsi_display->surface_capabilities.present_modes;
- }
-
- return TPL_ERROR_NONE;
-}
-
-static tpl_result_t
-__tpl_wayland_vk_wsi_surface_init(tpl_surface_t *surface)
-{
- tpl_wayland_vk_wsi_surface_t *wayland_vk_wsi_surface = NULL;
-
- TPL_ASSERT(surface);
- TPL_ASSERT(surface->type == TPL_SURFACE_TYPE_WINDOW);
- TPL_ASSERT(surface->native_handle);
-
- wayland_vk_wsi_surface = (tpl_wayland_vk_wsi_surface_t *) calloc(1,
- sizeof(tpl_wayland_vk_wsi_surface_t));
- if (!wayland_vk_wsi_surface) {
- TPL_ERR("Failed to allocate memory for new tpl_wayland_vk_wsi_surface_t.");
- return TPL_ERROR_OUT_OF_MEMORY;
- }
-
- surface->backend.data = (void *)wayland_vk_wsi_surface;
- wayland_vk_wsi_surface->tbm_queue = NULL;
-
- return TPL_ERROR_NONE;
-}
-
-static void
-__tpl_wayland_vk_wsi_surface_fini(tpl_surface_t *surface)
-{
- tpl_wayland_vk_wsi_surface_t *wayland_vk_wsi_surface = NULL;
- tpl_wayland_vk_wsi_display_t *wayland_vk_wsi_display = NULL;
-
- TPL_ASSERT(surface);
- TPL_ASSERT(surface->display);
-
- wayland_vk_wsi_surface = (tpl_wayland_vk_wsi_surface_t *) surface->backend.data;
- if (wayland_vk_wsi_surface == NULL) return;
-
- wayland_vk_wsi_display = (tpl_wayland_vk_wsi_display_t *)
- surface->display->backend.data;
- if (wayland_vk_wsi_display == NULL) return;
-
- if (wayland_vk_wsi_surface->tbm_queue)
- __tpl_wayland_vk_wsi_surface_destroy_swapchain(surface);
-
- free(wayland_vk_wsi_surface);
- surface->backend.data = NULL;
-}
-
-static void
-__tpl_wayland_vk_wsi_surface_commit_buffer(tpl_surface_t *surface,
- tbm_surface_h tbm_surface)
-{
- TPL_ASSERT(surface);
- TPL_ASSERT(surface->display);
- TPL_ASSERT(surface->display->native_handle);
- TPL_ASSERT(tbm_surface);
- TPL_ASSERT(tbm_surface_internal_is_valid(tbm_surface));
-
- struct wl_surface *wl_sfc = NULL;
- struct wl_callback *frame_callback = NULL;
- tpl_wayland_vk_wsi_surface_t *wayland_vk_wsi_surface =
- (tpl_wayland_vk_wsi_surface_t *) surface->backend.data;
- tpl_wayland_vk_wsi_buffer_t *wayland_vk_wsi_buffer =
- __tpl_wayland_vk_wsi_get_wayland_buffer_from_tbm_surface(tbm_surface);
- TPL_ASSERT(wayland_vk_wsi_buffer);
-
-
- wl_sfc = (struct wl_surface *)surface->native_handle;
-
- tbm_surface_internal_ref(tbm_surface);
- wl_surface_attach(wl_sfc, (void *)wayland_vk_wsi_buffer->wl_proxy, 0, 0);
-
- /* TODO: add num_rects and rects to tpl_wayland_vk_wsi_buffer_t */
- wl_surface_damage(wl_sfc, 0, 0, surface->width, surface->height);
-
- frame_callback = wl_surface_frame(wl_sfc);
- wl_callback_add_listener(frame_callback, &frame_listener, tbm_surface);
-
- wl_surface_commit(wl_sfc);
-
- wl_display_flush(surface->display->native_handle);
- wayland_vk_wsi_buffer->sync_timestamp++;
-
- tbm_surface_queue_release(wayland_vk_wsi_surface->tbm_queue, tbm_surface);
-}
-
-static tpl_result_t
-__tpl_wayland_vk_wsi_surface_enqueue_buffer(tpl_surface_t *surface,
- tbm_surface_h tbm_surface,
- int num_rects, const int *rects,
- tbm_fd sync_fence)
-{
-
- TPL_ASSERT(surface);
- TPL_ASSERT(surface->display);
- TPL_ASSERT(surface->display->native_handle);
- TPL_ASSERT(tbm_surface);
-
- tpl_wayland_vk_wsi_surface_t *wayland_vk_wsi_surface =
- (tpl_wayland_vk_wsi_surface_t *) surface->backend.data;
- tpl_wayland_vk_wsi_buffer_t *wayland_vk_wsi_buffer = NULL;
- tbm_surface_queue_error_e tsq_err;
-
- if (!tbm_surface_internal_is_valid(tbm_surface)) {
- TPL_ERR("Failed to enqueue tbm_surface(%p) Invalid value.", tbm_surface);
- return TPL_ERROR_INVALID_PARAMETER;
- }
-
- wayland_vk_wsi_buffer =
- __tpl_wayland_vk_wsi_get_wayland_buffer_from_tbm_surface(tbm_surface);
- TPL_ASSERT(wayland_vk_wsi_buffer);
-
- TPL_IMAGE_DUMP(tbm_surface, surface->width, surface->height);
-
- tbm_surface_internal_unref(tbm_surface);
-
- tsq_err = tbm_surface_queue_enqueue(wayland_vk_wsi_surface->tbm_queue,
- tbm_surface);
- if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
- TPL_ERR("Failed to enqeueue tbm_surface. | tsq_err = %d", tsq_err);
- return TPL_ERROR_INVALID_OPERATION;
- }
-
- if (sync_fence != -1) {
- /* non worker thread mode */
- /* TODO: set max wait time */
- if (tbm_sync_fence_wait(sync_fence, -1) != 1) {
- char buf[1024];
- strerror_r(errno, buf, sizeof(buf));
- TPL_ERR("Failed to wait sync. | error: %d(%s)", errno, buf);
- }
- close(sync_fence);
- }
-
- tsq_err = tbm_surface_queue_acquire(wayland_vk_wsi_surface->tbm_queue,
- &tbm_surface);
- if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
- TPL_ERR("Failed to acquire tbm_surface. | tsq_err = %d", tsq_err);
- return TPL_ERROR_INVALID_OPERATION;
- }
-
- __tpl_wayland_vk_wsi_surface_commit_buffer(surface, tbm_surface);
-
-
- /*
- * tbm_surface insert to free queue.
- * tbm_surface_can_dequeue always return true in single thread.
- * __tpl_wayland_vk_wsi_surface_dequeue_buffer doesn't call wl_display_dispatch.
- * wayland event queue are fulled and occur broken pipe.
- * so need call wl_display_dispatch.
- * need discussion wl_display_dispatch position(in dequeue or worker thread ??).
- */
- wl_display_dispatch(surface->display->native_handle);
-
- return TPL_ERROR_NONE;
-}
-
-static tpl_bool_t
-__tpl_wayland_vk_wsi_surface_validate(tpl_surface_t *surface)
-{
- TPL_ASSERT(surface);
- TPL_ASSERT(surface->backend.data);
-
- return TPL_TRUE;
-}
-
-static tbm_surface_h
-__tpl_wayland_vk_wsi_surface_dequeue_buffer(tpl_surface_t *surface,
- uint64_t timeout_ns,
- tbm_fd *sync_fence)
-{
- TPL_ASSERT(surface);
- TPL_ASSERT(surface->backend.data);
- TPL_ASSERT(surface->display);
-
- tbm_surface_h tbm_surface = NULL;
- tpl_wayland_vk_wsi_buffer_t *wayland_vk_wsi_buffer = NULL;
- tpl_wayland_vk_wsi_surface_t *wayland_vk_wsi_surface =
- (tpl_wayland_vk_wsi_surface_t *)surface->backend.data;
- tpl_wayland_vk_wsi_display_t *wayland_vk_wsi_display =
- (tpl_wayland_vk_wsi_display_t *)surface->display->backend.data;
- struct wl_proxy *wl_proxy = NULL;
- tbm_surface_queue_error_e tsq_err = 0;
-
- if (sync_fence)
- *sync_fence = -1;
-
- TPL_OBJECT_UNLOCK(surface);
- while (tbm_surface_queue_can_dequeue(
- wayland_vk_wsi_surface->tbm_queue, 0) == 0) {
- /* Application sent all buffers to the server. Wait for server response. */
-
- if (wl_display_dispatch(surface->display->native_handle) == -1) {
- TPL_OBJECT_LOCK(surface);
- return NULL;
- }
- }
- TPL_OBJECT_LOCK(surface);
-
- tsq_err = tbm_surface_queue_dequeue(wayland_vk_wsi_surface->tbm_queue,
- &tbm_surface);
-
- if (!tbm_surface) {
- TPL_ERR("Failed to get tbm_surface from tbm_surface_queue | tsq_err = %d",
- tsq_err);
- return NULL;
- }
-
- tbm_surface_internal_ref(tbm_surface);
-
- if ((wayland_vk_wsi_buffer =
- __tpl_wayland_vk_wsi_get_wayland_buffer_from_tbm_surface(
- tbm_surface)) != NULL) {
- if (sync_fence) {
- if (wayland_vk_wsi_buffer->sync_timestamp) {
- /* first return -1 */
- char name[32];
- snprintf(name, 32, "%d",
- tbm_bo_export(tbm_surface_internal_get_bo(tbm_surface, 0)));
- *sync_fence = tbm_sync_fence_create(wayland_vk_wsi_buffer->sync_timeline,
- name,
- wayland_vk_wsi_buffer->sync_timestamp);
- if (*sync_fence == -1) {
- char buf[1024];
- strerror_r(errno, buf, sizeof(buf));
- TPL_ERR("Failed to create TBM sync fence: %d(%s)", errno, buf);
- }
- } else {
- *sync_fence = -1;
- }
- }
- return tbm_surface;
- }
-
- wayland_vk_wsi_buffer = (tpl_wayland_vk_wsi_buffer_t *) calloc(1,
- sizeof(tpl_wayland_vk_wsi_buffer_t));
- if (!wayland_vk_wsi_buffer) {
- TPL_ERR("Mem alloc for wayland_vk_wsi_buffer failed!");
- tbm_surface_internal_unref(tbm_surface);
- return NULL;
- }
-
- wl_proxy = (struct wl_proxy *)wayland_tbm_client_create_buffer(
- wayland_vk_wsi_display->wl_tbm_client, tbm_surface);
- if (!wl_proxy) {
- TPL_ERR("Failed to create TBM client buffer!");
- tbm_surface_internal_unref(tbm_surface);
- free(wayland_vk_wsi_buffer);
- return NULL;
- }
-
- /* can change signaled sync */
- if (sync_fence)
- *sync_fence = -1;
- wayland_vk_wsi_buffer->sync_timeline = tbm_sync_timeline_create();
- if (wayland_vk_wsi_buffer->sync_timeline == -1) {
- char buf[1024];
- strerror_r(errno, buf, sizeof(buf));
- TPL_ERR("Failed to create TBM sync timeline: %d(%s)", errno, buf);
- wl_proxy_destroy(wl_proxy);
- tbm_surface_internal_unref(tbm_surface);
- free(wayland_vk_wsi_buffer);
- return NULL;
- }
- wayland_vk_wsi_buffer->sync_timestamp = 0;
- wayland_tbm_client_set_sync_timeline(wayland_vk_wsi_display->wl_tbm_client,
- (void *)wl_proxy,
- wayland_vk_wsi_buffer->sync_timeline);
-
- wl_buffer_add_listener((void *)wl_proxy, &buffer_release_listener,
- tbm_surface);
-
- wl_display_flush((struct wl_display *)surface->display->native_handle);
-
- wayland_vk_wsi_buffer->display = surface->display;
- wayland_vk_wsi_buffer->wl_proxy = wl_proxy;
-
- __tpl_wayland_vk_wsi_set_wayland_buffer_to_tbm_surface(tbm_surface,
- wayland_vk_wsi_buffer);
-
- return tbm_surface;
-}
-
-static tpl_result_t
-__tpl_wayland_vk_wsi_surface_get_swapchain_buffers(tpl_surface_t *surface,
- tbm_surface_h **buffers,
- int *buffer_count)
-{
- tbm_surface_h buffer = NULL;
- tbm_surface_h *swapchain_buffers = NULL;
- tpl_wayland_vk_wsi_surface_t *wayland_vk_wsi_surface = NULL;
- tbm_surface_queue_error_e tsq_err;
- int i, dequeue_count;
- tpl_result_t ret = TPL_ERROR_NONE;
-
- TPL_ASSERT(surface);
- TPL_ASSERT(surface->backend.data);
- TPL_ASSERT(surface->display);
- TPL_ASSERT(buffers);
- TPL_ASSERT(buffer_count);
-
- wayland_vk_wsi_surface = (tpl_wayland_vk_wsi_surface_t *)surface->backend.data;
- swapchain_buffers = (tbm_surface_h *)calloc(
- wayland_vk_wsi_surface->buffer_count, sizeof(tbm_surface_h));
- if (!swapchain_buffers) {
- TPL_ERR("Failed to allocate memory for buffers.");
- return TPL_ERROR_OUT_OF_MEMORY;
- }
-
- for (i = 0 ; i < wayland_vk_wsi_surface->buffer_count ; i++) {
- tsq_err = tbm_surface_queue_dequeue(wayland_vk_wsi_surface->tbm_queue, &buffer);
- if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
- TPL_ERR("Failed to get tbm_surface from tbm_surface_queue | tsq_err = %d",
- tsq_err);
- dequeue_count = i;
- ret = TPL_ERROR_OUT_OF_MEMORY;
- goto get_buffer_fail;
- }
- swapchain_buffers[i] = buffer;
- }
-
- for (i = 0 ; i < wayland_vk_wsi_surface->buffer_count ; i++) {
- tsq_err = tbm_surface_queue_release(wayland_vk_wsi_surface->tbm_queue,
- swapchain_buffers[i]);
- if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
- TPL_ERR("Failed to release tbm_surface. | tsq_err = %d", tsq_err);
- ret = TPL_ERROR_INVALID_OPERATION;
- goto release_buffer_fail;
- }
- }
-
- *buffers = swapchain_buffers;
- *buffer_count = wayland_vk_wsi_surface->buffer_count;
- return TPL_ERROR_NONE;
-
-get_buffer_fail:
- for (i = 0 ; i < dequeue_count ; i++) {
- tsq_err = tbm_surface_queue_release(wayland_vk_wsi_surface->tbm_queue,
- swapchain_buffers[i]);
- if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
- TPL_ERR("Failed to release tbm_surface. | tsq_err = %d", tsq_err);
- goto release_buffer_fail;
- }
- }
-
-release_buffer_fail:
- free(swapchain_buffers);
- return ret;
-}
-
-static tpl_result_t
-__tpl_wayland_vk_wsi_surface_create_swapchain(tpl_surface_t *surface,
- tbm_format format, int width,
- int height, int buffer_count, int present_mode)
-{
- tpl_wayland_vk_wsi_surface_t *wayland_vk_wsi_surface = NULL;
- tpl_wayland_vk_wsi_display_t *wayland_vk_wsi_display = NULL;
-
- TPL_ASSERT(surface);
- TPL_ASSERT(surface->backend.data);
- TPL_ASSERT(surface->display);
-
- wayland_vk_wsi_surface = (tpl_wayland_vk_wsi_surface_t *) surface->backend.data;
- TPL_ASSERT(wayland_vk_wsi_surface);
-
- wayland_vk_wsi_display = (tpl_wayland_vk_wsi_display_t *)
- surface->display->backend.data;
- TPL_ASSERT(wayland_vk_wsi_display);
-
- if ((buffer_count < wayland_vk_wsi_display->surface_capabilities.min_buffer)
- || (buffer_count > wayland_vk_wsi_display->surface_capabilities.max_buffer)) {
- TPL_ERR("Invalid buffer_count!");
- return TPL_ERROR_INVALID_PARAMETER;
- }
-
- /* FIXME: vblank has performance problem so replace all present mode to MAILBOX */
- present_mode = TPL_DISPLAY_PRESENT_MODE_MAILBOX;
-
- if ((present_mode & wayland_vk_wsi_display->surface_capabilities.present_modes) == 0) {
- /* server not supported current mode check client mode */
- switch (present_mode) {
- case TPL_DISPLAY_PRESENT_MODE_MAILBOX:
- case TPL_DISPLAY_PRESENT_MODE_IMMEDIATE:
- break;
- default:
- TPL_ERR("Unsupported present mode: %d", present_mode);
- return TPL_ERROR_INVALID_PARAMETER;
- }
- }
-
- wayland_vk_wsi_surface->present_mode = present_mode;
-
- wayland_vk_wsi_surface->tbm_queue = tbm_surface_queue_create(buffer_count,
- width,
- height,
- TBM_FORMAT_ARGB8888,
- 0);
-
- if (!wayland_vk_wsi_surface->tbm_queue) {
- TPL_ERR("TBM surface queue creation failed!");
- return TPL_ERROR_OUT_OF_MEMORY;
- }
-
- wayland_vk_wsi_surface->buffer_count = buffer_count;
-
- surface->width = width;
- surface->height = height;
-
- return TPL_ERROR_NONE;
-}
-
-static tpl_result_t
-__tpl_wayland_vk_wsi_surface_destroy_swapchain(tpl_surface_t *surface)
-{
- tpl_wayland_vk_wsi_surface_t *wayland_vk_wsi_surface = NULL;
-
- TPL_ASSERT(surface);
- TPL_ASSERT(surface->backend.data);
- TPL_ASSERT(surface->display);
-
- wayland_vk_wsi_surface = (tpl_wayland_vk_wsi_surface_t *) surface->backend.data;
- TPL_ASSERT(wayland_vk_wsi_surface);
-
- if (surface->type == TPL_SURFACE_TYPE_WINDOW) {
-
- wl_display_flush(surface->display->native_handle);
- __tpl_wayland_vk_wsi_display_roundtrip(surface->display);
-
- tbm_surface_queue_destroy(wayland_vk_wsi_surface->tbm_queue);
- wayland_vk_wsi_surface->tbm_queue = NULL;
- }
-
- return TPL_ERROR_NONE;
-}
-
-static void
-__tpl_wayland_vk_wsi_buffer_free(tpl_wayland_vk_wsi_buffer_t
- *wayland_vk_wsi_buffer)
-{
- TPL_ASSERT(wayland_vk_wsi_buffer);
- TPL_ASSERT(wayland_vk_wsi_buffer->display);
-
- tpl_wayland_vk_wsi_display_t *wayland_vk_wsi_display =
- (tpl_wayland_vk_wsi_display_t *)wayland_vk_wsi_buffer->display->backend.data;
-
- wl_display_flush((struct wl_display *)
- wayland_vk_wsi_buffer->display->native_handle);
-
- if (wayland_vk_wsi_buffer->wl_proxy)
- wayland_tbm_client_destroy_buffer(wayland_vk_wsi_display->wl_tbm_client,
- (void *)wayland_vk_wsi_buffer->wl_proxy);
-
- if (wayland_vk_wsi_buffer->sync_timeline != -1)
- close(wayland_vk_wsi_buffer->sync_timeline);
-
- free(wayland_vk_wsi_buffer);
-}
-
-tpl_bool_t
-__tpl_display_choose_backend_wayland_vk_wsi(tpl_handle_t native_dpy)
-{
- if (!native_dpy) return TPL_FALSE;
-
- if (__tpl_wayland_vk_wsi_display_is_wl_display(native_dpy))
- return TPL_TRUE;
-
- return TPL_FALSE;
-}
-
-void
-__tpl_display_init_backend_wayland_vk_wsi(tpl_display_backend_t *backend)
-{
- TPL_ASSERT(backend);
-
- backend->type = TPL_BACKEND_WAYLAND_VULKAN_WSI;
- backend->data = NULL;
-
- backend->init = __tpl_wayland_vk_wsi_display_init;
- backend->fini = __tpl_wayland_vk_wsi_display_fini;
- backend->query_config = __tpl_wayland_vk_wsi_display_query_config;
- backend->filter_config = __tpl_wayland_vk_wsi_display_filter_config;
- backend->query_window_supported_buffer_count =
- __tpl_wayland_vk_wsi_display_query_window_supported_buffer_count;
- backend->query_window_supported_present_modes =
- __tpl_wayland_vk_wsi_display_query_window_supported_present_modes;
-}
-
-void
-__tpl_surface_init_backend_wayland_vk_wsi(tpl_surface_backend_t *backend)
-{
- TPL_ASSERT(backend);
-
- backend->type = TPL_BACKEND_WAYLAND_VULKAN_WSI;
- backend->data = NULL;
-
- backend->init = __tpl_wayland_vk_wsi_surface_init;
- backend->fini = __tpl_wayland_vk_wsi_surface_fini;
- backend->validate = __tpl_wayland_vk_wsi_surface_validate;
- backend->dequeue_buffer = __tpl_wayland_vk_wsi_surface_dequeue_buffer;
- backend->enqueue_buffer = __tpl_wayland_vk_wsi_surface_enqueue_buffer;
- backend->get_swapchain_buffers =
- __tpl_wayland_vk_wsi_surface_get_swapchain_buffers;
- backend->create_swapchain = __tpl_wayland_vk_wsi_surface_create_swapchain;
- backend->destroy_swapchain = __tpl_wayland_vk_wsi_surface_destroy_swapchain;
-}
-
-static void
-__cb_client_sync_callback(void *data, struct wl_callback *callback,
- uint32_t serial)
-{
- int *done;
-
- TPL_ASSERT(data);
-
- done = data;
- *done = 1;
-
- wl_callback_destroy(callback);
-}
-
-static const struct wl_callback_listener sync_listener = {
- __cb_client_sync_callback
-};
-
-static void
-__cb_client_frame_callback(void *data, struct wl_callback *callback,
- uint32_t time)
-{
- /* We moved the buffer reclaim logic to buffer_release_callback().
- buffer_release_callback() is more suitable point to delete or reuse buffer instead of frame_callback().
- But we remain this callback because buffer_release_callback() works only when frame_callback() is activated.*/
- TPL_IGNORE(data);
- TPL_IGNORE(time);
-
- wl_callback_destroy(callback);
-}
-
-static const struct wl_callback_listener frame_listener = {
- __cb_client_frame_callback
-};
-
-static void
-__cb_client_buffer_release_callback(void *data, struct wl_proxy *proxy)
-{
- tpl_wayland_vk_wsi_buffer_t *wayland_vk_wsi_buffer = NULL;
- tbm_surface_h tbm_surface = NULL;
-
- TPL_ASSERT(data);
-
- tbm_surface = (tbm_surface_h) data;
-
- wayland_vk_wsi_buffer =
- __tpl_wayland_vk_wsi_get_wayland_buffer_from_tbm_surface(tbm_surface);
-
- if (wayland_vk_wsi_buffer)
- tbm_surface_internal_unref(tbm_surface);
-}
-
-static const struct wl_buffer_listener buffer_release_listener = {
- (void *)__cb_client_buffer_release_callback,
-};
+++ /dev/null
-#include <stdlib.h>
-#include <stdint.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <fcntl.h>
-#include <unistd.h>
-#include <pthread.h>
-
-#include <X11/Xlib.h>
-#include <X11/Xutil.h>
-#include <X11/Xproto.h>
-
-#include <dri2/dri2.h>
-#include <tbm_bufmgr.h>
-
-#include "tpl_internal.h"
-
-#include "tpl_x11_internal.h"
-
-static pthread_mutex_t global_mutex = PTHREAD_MUTEX_INITIALIZER;
-
-pthread_mutex_t
-__tpl_x11_get_global_mutex()
-{
- return global_mutex;
-}
-
-void
-__tpl_x11_swap_str_to_swap_type(char *str, tpl_x11_swap_type_t *type)
-{
- int swap_type;
-
- TPL_ASSERT(type);
-
- if (str == NULL)
- return;
-
- swap_type = strtol(str, NULL, 0);
-
- switch (swap_type) {
- case TPL_X11_SWAP_TYPE_SYNC:
- case TPL_X11_SWAP_TYPE_ASYNC:
- case TPL_X11_SWAP_TYPE_LAZY:
- *type = swap_type;
- break;
- default:
- break;
- }
-}
-
-tpl_buffer_t *
-__tpl_x11_surface_buffer_cache_find(tpl_list_t *buffer_cache,
- unsigned int name)
-{
- tpl_list_node_t *node;
-
- TPL_ASSERT(buffer_cache);
-
- node = __tpl_list_get_front_node(buffer_cache);
-
- while (node) {
- tpl_buffer_t *buffer = (tpl_buffer_t *) __tpl_list_node_get_data(node);
-
- TPL_ASSERT(buffer);
-
- if (buffer->key == name)
- return buffer;
-
- node = __tpl_list_node_next(node);
- }
-
- return NULL;
-}
-
-void
-__tpl_x11_surface_buffer_cache_remove(tpl_list_t *buffer_cache,
- unsigned int name)
-{
- tpl_list_node_t *node;
-
- TPL_ASSERT(buffer_cache);
-
- node = __tpl_list_get_front_node(buffer_cache);
-
- while (node) {
- tpl_buffer_t *buffer = (tpl_buffer_t *) __tpl_list_node_get_data(node);
-
- TPL_ASSERT(buffer);
-
- if (buffer->key == name) {
- tpl_object_unreference(&buffer->base);
- __tpl_list_remove(node, NULL);
- return;
- }
-
- node = __tpl_list_node_next(node);
- }
-}
-
-tpl_bool_t
-__tpl_x11_surface_buffer_cache_add(tpl_list_t *buffer_cache,
- tpl_buffer_t *buffer)
-{
- TPL_ASSERT(buffer_cache);
- TPL_ASSERT(buffer);
-
- if (__tpl_list_get_count(buffer_cache) >= TPL_BUFFER_CACHE_MAX_ENTRIES) {
- tpl_buffer_t *evict = __tpl_list_pop_front(buffer_cache, NULL);
-
- TPL_ASSERT(evict);
-
- tpl_object_unreference(&evict->base);
- }
-
- if (-1 == tpl_object_reference(&buffer->base))
- return TPL_FALSE;
-
- return __tpl_list_push_back(buffer_cache, (void *)buffer);
-}
-
-void
-__tpl_x11_surface_buffer_cache_clear(tpl_list_t *buffer_cache)
-{
- TPL_ASSERT(buffer_cache);
-
- __tpl_list_fini(buffer_cache, (tpl_free_func_t)tpl_object_unreference);
-}
-
-
-tpl_bool_t
-__tpl_x11_display_query_config(tpl_display_t *display,
- tpl_surface_type_t surface_type, int red_size,
- int green_size, int blue_size, int alpha_size,
- int color_depth, int *native_visual_id, tpl_bool_t *is_slow)
-{
- Display *native_display;
-
- TPL_IGNORE(alpha_size);
-
- TPL_ASSERT(display);
- TPL_ASSERT(display->native_handle);
-
- native_display = (Display *)display->native_handle;
-
- if (red_size != TPL_DONT_CARE || green_size != TPL_DONT_CARE ||
- blue_size != TPL_DONT_CARE || color_depth != TPL_DONT_CARE) {
- if (surface_type == TPL_SURFACE_TYPE_WINDOW) {
- XVisualInfo *visual_formats;
- int num_visual_formats;
- int i;
-
- visual_formats = XGetVisualInfo(native_display, 0, NULL,
- &num_visual_formats);
- TPL_ASSERT(visual_formats);
- for (i = 0; i < num_visual_formats; i++) {
- int clz[3];
- int col_size[3];
-
- clz[0] = __tpl_util_clz(visual_formats[i].red_mask);
- clz[1] = __tpl_util_clz(visual_formats[i].green_mask);
- clz[2] = __tpl_util_clz(visual_formats[i].blue_mask);
-
- col_size[0] = clz[1] - clz[0];
- col_size[1] = clz[2] - clz[1];
- col_size[2] = 32 - clz[2];
-
- if ((red_size == TPL_DONT_CARE || col_size[0] == red_size) &&
- (green_size == TPL_DONT_CARE || col_size[1] == green_size) &&
- (blue_size == TPL_DONT_CARE || col_size[2] == blue_size)) {
- if (native_visual_id != NULL)
- *native_visual_id = visual_formats[i].visualid;
-
- if (is_slow != NULL)
- *is_slow = TPL_FALSE;
-
- return TPL_TRUE;
- }
- }
- XFree(visual_formats);
- visual_formats = NULL;
- }
-
- if (surface_type == TPL_SURFACE_TYPE_PIXMAP) {
- XPixmapFormatValues *pixmap_formats;
- int num_pixmap_formats;
- int i;
-
- pixmap_formats = XListPixmapFormats(native_display, &num_pixmap_formats);
- TPL_ASSERT(pixmap_formats);
- for (i = 0; i < num_pixmap_formats; i++) {
- if (color_depth == TPL_DONT_CARE ||
- pixmap_formats[i].depth == color_depth) {
- if (is_slow != NULL)
- *is_slow = TPL_FALSE;
-
- return TPL_TRUE;
- }
- }
- XFree(pixmap_formats);
- pixmap_formats = NULL;
- }
-
- return TPL_FALSE;
-
- }
-
- return TPL_TRUE;
-}
-
-#if 0
-static void tpl_handle_and_free_error( Display *dpy, xcb_generic_error_t *error,
- const char *request_string )
-{
- char error_txt[256];
-
- if ( error ) {
- int len = sizeof(error_txt) / sizeof(error_txt[0]);
-
- XGetErrorText( dpy, error->error_code, error_txt, len );
- error_txt[ len - 1] = '\0';
- TPL_WARN("%s failed \"[%d]:%s\"", request_string, error->error_code,
- error_txt );
- free(error);
- } else {
- TPL_WARN("%s failed \"Unknown error\"", request_string );
- }
-}
-
-static tpl_bool_t tpl_check_reply_for_error(Display *dpy,
- xcb_generic_reply_t *reply, xcb_generic_error_t *error,
- const char *request_string)
-{
- tpl_bool_t retval = TPL_FALSE;
-
- if (error || reply == NULL) {
- tpl_handle_and_free_error( dpy, error, request_string );
- } else {
- retval = TPL_TRUE;
- }
-
- return retval;
-}
-static XVisualInfo *tpl_find_visual( Display *dpy, xcb_visualid_t visual_id )
-{
- XVisualInfo *visual_info;
- XVisualInfo visual_info_template;
- int matching_count;
-
- visual_info_template.visualid = visual_id;
-
- visual_info = XGetVisualInfo(dpy, VisualIDMask, &visual_info_template,
- &matching_count);
-
-
- return visual_info;
-}
-static int tpl_get_alpha_offset( int offset_r, int offset_g, int offset_b,
- int bpp )
-{
- int ret = -1;
-
- TPL_CHECK_ON_FALSE_ASSERT_FAIL( bpp == 32,
- "alpha only supported for 32bits pixel formats");
-
- if ( offset_r != 0 && offset_g != 0 && offset_b != 0 ) {
- ret = 0;
- } else if ( offset_r != 24 && offset_g != 24 && offset_b != 24 ) {
- ret = 24;
- } else {
- TPL_CHECK_ON_FALSE_ASSERT_FAIL(TPL_FALSE,
- "Alpha component has to be at either the offset 0 or 24");
- }
-
- return ret;
-}
-static int tpl_get_offset( unsigned long mask, int depth )
-{
- int res = -1;
- int count;
-
- for (count = 0; count < depth; count++) {
- if (mask & 1) {
- res = count;
- break;
- }
- mask = mask >> 1;
- }
-
- return res;
-}
-/* Convert the given combination of offsets and bpp into a color buffer format */
-static tpl_format_t tpl_offsets_to_color_buffer_format( int offset_r,
- int offset_g, int offset_b, int offset_a, int bpp )
-{
- tpl_format_t retval = TPL_FORMAT_INVALID;
-
- if ( offset_b == 11 && offset_g == 5 && offset_r == 0 && offset_a == -1 &&
- bpp == 16) {
- retval = TPL_FORMAT_BGR565;
- } else if ( offset_r == 11 && offset_g == 5 && offset_b == 0 &&
- offset_a == -1 && bpp == 16) {
- retval = TPL_FORMAT_RGB565;
- }
-
- else if ( offset_a == 24 && offset_b == 16 && offset_g == 8 &&
- offset_r == 0 && bpp == 32) {
- retval = TPL_FORMAT_ABGR8888;
- } else if ( offset_a == 24 && offset_r == 16 && offset_g == 8 &&
- offset_b == 0 && bpp == 32) {
- retval = TPL_FORMAT_ARGB8888;
- } else if ( offset_b == 24 && offset_g == 16 && offset_r == 8 &&
- offset_a == 0 && bpp == 32) {
- retval = TPL_FORMAT_BGRA8888;
- } else if ( offset_r == 24 && offset_g == 16 && offset_b == 8 &&
- offset_a == 0 && bpp == 32) {
- retval = TPL_FORMAT_RGBA8888;
- }
-
- else if ( offset_b == 16 && offset_g == 8 && offset_r == 0 &&
- offset_a == -1 && bpp == 32) {
- retval = TPL_FORMAT_XBGR8888;
- } else if ( offset_r == 16 && offset_g == 8 && offset_b == 0 &&
- offset_a == -1 && bpp == 32) {
- retval = TPL_FORMAT_XRGB8888;
- } else if ( offset_b == 24 && offset_g == 16 && offset_r == 8 &&
- offset_a == -1 && bpp == 32) {
- retval = TPL_FORMAT_BGRX8888;
- } else if ( offset_r == 24 && offset_g == 16 && offset_b == 8 &&
- offset_a == -1 && bpp == 32) {
- retval = TPL_FORMAT_RGBX8888;
- }
-
- else if ( offset_b == 16 && offset_g == 8 && offset_r == 0 &&
- offset_a == -1 && bpp == 24) {
- retval = TPL_FORMAT_BGR888;
- } else if ( offset_r == 16 && offset_g == 8 && offset_b == 0 &&
- offset_a == -1 && bpp == 24) {
- retval = TPL_FORMAT_RGB888;
- }
-
- else if ( offset_a == 12 && offset_b == 8 && offset_g == 4 &&
- offset_r == 0 && bpp == 16) {
- retval = TPL_FORMAT_ABGR4444;
- } else if ( offset_a == 12 && offset_r == 8 && offset_g == 4 &&
- offset_b == 0 && bpp == 16) {
- retval = TPL_FORMAT_ARGB4444;
- } else if ( offset_b == 12 && offset_g == 8 && offset_r == 4 &&
- offset_a == 0 && bpp == 16) {
- retval = TPL_FORMAT_BGRA4444;
- } else if ( offset_r == 12 && offset_g == 8 && offset_b == 4 &&
- offset_a == 0 && bpp == 16) {
- retval = TPL_FORMAT_RGBA4444;
- }
-
- else if ( offset_a == 15 && offset_b == 10 && offset_g == 5 &&
- offset_r == 0 && bpp == 16) {
- retval = TPL_FORMAT_ABGR1555;
- } else if ( offset_a == 15 && offset_r == 10 && offset_g == 5 &&
- offset_b == 0 && bpp == 16) {
- retval = TPL_FORMAT_ARGB1555;
- } else if ( offset_b == 11 && offset_g == 6 && offset_r == 1 &&
- offset_a == 0 && bpp == 16) {
- retval = TPL_FORMAT_BGRA5551;
- } else if ( offset_r == 11 && offset_g == 6 && offset_b == 1 &&
- offset_a == 0 && bpp == 16) {
- retval = TPL_FORMAT_RGBA5551;
- }
-
- else {
- TPL_WARN("Format not supported: offset_r=%d, offset_g=%d, offset_b=%d, offset_a=%d, bpp=%d",
- offset_r, offset_g, offset_b, offset_a, bpp);
- }
-
- return retval;
-}
-#endif
-
-tpl_bool_t
-__tpl_x11_display_get_window_info(tpl_display_t *display, tpl_handle_t window,
- int *width, int *height, tpl_format_t *format, int depth, int a_size)
-{
- Status x_res;
- XWindowAttributes att;
-
- TPL_IGNORE(depth);
- TPL_IGNORE(a_size);
-
- TPL_ASSERT(display);
- TPL_ASSERT(display->native_handle);
-
- x_res = XGetWindowAttributes((Display *)display->native_handle, (Window)window,
- &att);
-
- if (x_res != BadWindow) {
- if (format != NULL) {
- switch (att.depth) {
- case 32:
- *format = TPL_FORMAT_ARGB8888;
- break;
- case 24:
- *format = TPL_FORMAT_XRGB8888;
- break;
- case 16:
- *format = TPL_FORMAT_RGB565;
- break;
- default:
- *format = TPL_FORMAT_INVALID;
- break;
- }
- }
- if (width != NULL) *width = att.width;
- if (height != NULL) *height = att.height;
- return TPL_TRUE;
- }
-
- return TPL_FALSE;
-}
-
-tpl_bool_t
-__tpl_x11_display_get_pixmap_info(tpl_display_t *display, tpl_handle_t pixmap,
- int *width, int *height, tpl_format_t *format)
-{
- Status x_res;
- Window root = None;
- int x, y;
- unsigned int w, h, bw, d;
-
- TPL_ASSERT(display);
- TPL_ASSERT(display->native_handle);
-
- x_res = XGetGeometry((Display *)display->native_handle, (Pixmap)pixmap, &root,
- &x, &y, &w, &h, &bw, &d);
-
- if (x_res != BadDrawable) {
- if (format != NULL) {
- switch (d) {
- case 32:
- *format = TPL_FORMAT_ARGB8888;
- break;
- case 24:
- *format = TPL_FORMAT_XRGB8888;
- break;
- case 16:
- *format = TPL_FORMAT_RGB565;
- break;
- default:
- *format = TPL_FORMAT_INVALID;
- break;
- }
- }
- if (width != NULL) *width = w;
- if (height != NULL) *height = h;
- if (format != NULL)
- *format = TPL_FORMAT_ARGB8888;/*TODO: temp for argb8888*/
- return TPL_TRUE;
- }
-
- return TPL_FALSE;
-}
-
-void
-__tpl_x11_display_flush(tpl_display_t *display)
-{
- Display *native_display;
-
- TPL_ASSERT(display);
- TPL_ASSERT(display->native_handle);
-
- native_display = (Display *) display->native_handle;
- XFlush(native_display);
- XSync(native_display, False);
-}
-
-tpl_bool_t
-__tpl_x11_buffer_init(tpl_buffer_t *buffer)
-{
- TPL_IGNORE(buffer);
-
- return TPL_TRUE;
-}
-
-void
-__tpl_x11_buffer_fini(tpl_buffer_t *buffer)
-{
- TPL_ASSERT(buffer);
-
- if (buffer->backend.data) {
- tbm_bo_map((tbm_bo)buffer->backend.data, TBM_DEVICE_3D, TBM_OPTION_READ);
- tbm_bo_unmap((tbm_bo)buffer->backend.data);
- tbm_bo_unref((tbm_bo)buffer->backend.data);
- buffer->backend.data = NULL;
- }
-}
-
-void *
-__tpl_x11_buffer_map(tpl_buffer_t *buffer, int size)
-{
- tbm_bo bo;
- tbm_bo_handle handle;
-
- TPL_ASSERT(buffer);
- TPL_ASSERT(buffer->backend.data);
-
- bo = (tbm_bo) buffer->backend.data;
- handle = tbm_bo_get_handle(bo, TBM_DEVICE_CPU);
-
- return handle.ptr;
-}
-
-void
-__tpl_x11_buffer_unmap(tpl_buffer_t *buffer, void *ptr, int size)
-{
- TPL_IGNORE(buffer);
- TPL_IGNORE(ptr);
- TPL_IGNORE(size);
-
- /* Do nothing. */
-}
-
-tpl_bool_t
-__tpl_x11_buffer_lock(tpl_buffer_t *buffer, tpl_lock_usage_t usage)
-{
- tbm_bo bo;
- tbm_bo_handle handle;
-
- TPL_ASSERT(buffer);
- TPL_ASSERT(buffer->backend.data);
-
- bo = (tbm_bo) buffer->backend.data;
-
- TPL_OBJECT_UNLOCK(buffer);
-
- switch (usage) {
- case TPL_LOCK_USAGE_GPU_READ:
- handle = tbm_bo_map(bo, TBM_DEVICE_3D, TBM_OPTION_READ);
- break;
- case TPL_LOCK_USAGE_GPU_WRITE:
- handle = tbm_bo_map(bo, TBM_DEVICE_3D, TBM_OPTION_WRITE);
- break;
- case TPL_LOCK_USAGE_CPU_READ:
- handle = tbm_bo_map(bo, TBM_DEVICE_CPU, TBM_OPTION_READ);
- break;
- case TPL_LOCK_USAGE_CPU_WRITE:
- handle = tbm_bo_map(bo, TBM_DEVICE_CPU, TBM_OPTION_WRITE);
- break;
- default:
- TPL_ASSERT(TPL_FALSE);
- return TPL_FALSE;
- }
-
- TPL_OBJECT_LOCK(buffer);
-
- if (handle.u32 != 0 || handle.ptr != NULL)
- return TPL_FALSE;
-
- return TPL_TRUE;
-}
-
-void
-__tpl_x11_buffer_unlock(tpl_buffer_t *buffer)
-{
- tbm_bo bo;
-
- TPL_ASSERT(buffer);
- TPL_ASSERT(buffer->backend.data);
-
- bo = (tbm_bo) buffer->backend.data;
-
- TPL_OBJECT_UNLOCK(buffer);
- tbm_bo_unmap(bo);
- TPL_OBJECT_LOCK(buffer);
-}
-
-tpl_bool_t __tpl_x11_buffer_get_reused_flag(tpl_buffer_t *buffer)
-{
- TPL_ASSERT(buffer);
-
- if (DRI2_BUFFER_IS_REUSED(buffer->backend.flags))
- return TPL_TRUE;
- else
- return TPL_FALSE;
-}
-
-void __tpl_x11_display_wait_native(tpl_display_t *display)
-{
- Display *xlib_display;
-
- TPL_ASSERT(display);
-
- xlib_display = (Display *) display->native_handle;
- if (xlib_display != NULL) {
- /* Leave events in the queue since we only care they have arrived. */
- XSync(xlib_display, 0);
- }
-}
+++ /dev/null
-#include <stdlib.h>
-#include <stdint.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <fcntl.h>
-#include <unistd.h>
-#include <pthread.h>
-
-#include <X11/Xlib.h>
-#include <X11/Xutil.h>
-#include <X11/Xproto.h>
-#include <X11/Xlib-xcb.h>
-
-#include <libdrm/drm.h>
-#include <xf86drm.h>
-
-#include <dri2/dri2.h>
-#include <tbm_bufmgr.h>
-
-#include "tpl_internal.h"
-
-#include "tpl_x11_internal.h"
-
-
-typedef struct _tpl_x11_dri2_surface tpl_x11_dri2_surface_t;
-
-
-struct _tpl_x11_dri2_surface {
- int latest_post_interval;
- XserverRegion damage;
- tpl_list_t buffer_cache;
- tpl_buffer_t *latest_render_target;
-};
-
-
-
-static tpl_x11_global_t global = {
- 0,
- NULL,
- -1,
- NULL,
- TPL_X11_SWAP_TYPE_ASYNC,
- TPL_X11_SWAP_TYPE_SYNC
-};
-
-static Display *
-__tpl_x11_dri2_get_worker_display(void)
-{
- Display *display;
- pthread_mutex_t mutex = __tpl_x11_get_global_mutex();
-
- pthread_mutex_lock(&mutex);
- TPL_ASSERT(global.display_count > 0);
-
- /* Use dummy display for worker thread. :-) */
- display = global.worker_display;
-
- pthread_mutex_unlock(&mutex);
-
- return display;
-}
-
-static void
-__tpl_x11_dri2_surface_post_internal(tpl_surface_t *surface, tpl_frame_t *frame,
- tpl_bool_t is_worker)
-{
- Display *display;
- Drawable drawable;
- CARD64 swap_count;
- tpl_x11_dri2_surface_t *x11_surface;
- XRectangle *xrects;
- XRectangle xrects_stack[TPL_STACK_XRECTANGLE_SIZE];
- int interval = frame->interval;
-
- TPL_ASSERT(frame);
- TPL_ASSERT(surface);
- TPL_ASSERT(surface->backend.data);
-
- x11_surface = (tpl_x11_dri2_surface_t *)surface->backend.data;
-
- if (is_worker)
- display = __tpl_x11_dri2_get_worker_display();
- else
- display = surface->display->native_handle;
-
- drawable = (Drawable)surface->native_handle;
-
- if (interval < 1)
- interval = 1;
-
- if (interval != x11_surface->latest_post_interval) {
- DRI2SwapInterval(display, drawable, interval);
- x11_surface->latest_post_interval = interval;
- }
-
- if (__tpl_region_is_empty(&frame->damage)) {
- DRI2SwapBuffers(display, drawable, 0, 0, 0, &swap_count);
- } else {
- int i;
-
- if (frame->damage.num_rects > TPL_STACK_XRECTANGLE_SIZE) {
- xrects = (XRectangle *) malloc(sizeof(XRectangle) *
- frame->damage.num_rects);
- } else {
- xrects = &xrects_stack[0];
- }
-
- for (i = 0; i < frame->damage.num_rects; i++) {
- const int *rects = &frame->damage.rects[i * 4];
-
- xrects[i].x = rects[0];
- xrects[i].y = frame->buffer->height - rects[1] - rects[3];
- xrects[i].width = rects[2];
- xrects[i].height = rects[3];
- }
-
- if (x11_surface->damage == None) {
- x11_surface->damage =
- XFixesCreateRegion(display, xrects, frame->damage.num_rects);
- } else {
- XFixesSetRegion(display, x11_surface->damage,
- xrects, frame->damage.num_rects);
- }
-
- DRI2SwapBuffersWithRegion(display, drawable, x11_surface->damage, &swap_count);
- }
-
- frame->state = TPL_FRAME_STATE_POSTED;
-}
-
-static tpl_bool_t
-__tpl_x11_dri2_display_init(tpl_display_t *display)
-{
- pthread_mutex_t mutex;
-
- TPL_ASSERT(display);
-
- mutex = __tpl_x11_get_global_mutex();
-
- if (display->native_handle == NULL) {
- display->native_handle = XOpenDisplay(NULL);
- if (NULL == display->native_handle) {
- TPL_ERR("XOpenDisplay failed!");
- return TPL_FALSE;
- }
- }
-
- display->xcb_connection = XGetXCBConnection( (Display *)
- display->native_handle );
- if ( NULL == display->xcb_connection ) {
- TPL_WARN("XGetXCBConnection failed");
- }
-
- pthread_mutex_lock(&mutex);
-
- if (global.display_count == 0) {
- Bool xres = False;
- char *drv = NULL;
- char *dev = NULL;
- int major = -1;
- int minor = -1;
- int event_base = -1;
- int error_base = -1;
- Window root = 0;
- drm_magic_t magic;
-
- /* Open a dummy display connection. */
- global.worker_display = XOpenDisplay(NULL);
- if (NULL == global.worker_display) {
- TPL_ERR("XOpenDisplay failed!");
- return TPL_FALSE;
- }
-
- /* Get default root window. */
- root = DefaultRootWindow(global.worker_display);
-
- /* Initialize DRI2. */
- xres = DRI2QueryExtension(global.worker_display, &event_base, &error_base);
- if (True != xres) {
- TPL_ERR("DRI2QueryExtension failed!");
- return TPL_FALSE;
- }
-
- xres = DRI2QueryVersion(global.worker_display, &major, &minor);
- if (True != xres) {
- TPL_ERR("DRI2QueryVersion failed!");
- return TPL_FALSE;
- }
-
- xres = DRI2Connect(global.worker_display, root, &drv, &dev);
- if (True != xres) {
- TPL_ERR("DRI2Connect failed!");
- return TPL_FALSE;
- }
-
- /* Initialize buffer manager. */
- global.bufmgr_fd = open(dev, O_RDWR);
- drmGetMagic(global.bufmgr_fd, &magic);
-
- /* DRI2 authentication. */
- xres = DRI2Authenticate(global.worker_display, root, magic);
- if (True != xres) {
- TPL_ERR("DRI2Authenciate failed!");
- return TPL_FALSE;
- }
-
- global.bufmgr = tbm_bufmgr_init(global.bufmgr_fd);
-
- /* Initialize swap type configuration. */
- __tpl_x11_swap_str_to_swap_type(tpl_getenv(EGL_X11_WINDOW_SWAP_TYPE_ENV_NAME),
- &global.win_swap_type);
-
- __tpl_x11_swap_str_to_swap_type(tpl_getenv(EGL_X11_FB_SWAP_TYPE_ENV_NAME),
- &global.fb_swap_type);
- }
-
- global.display_count++;
- display->bufmgr_fd = global.bufmgr_fd;
-
- pthread_mutex_unlock(&mutex);
- return TPL_TRUE;
-}
-
-static void
-__tpl_x11_dri2_display_fini(tpl_display_t *display)
-{
-
- pthread_mutex_t mutex = __tpl_x11_get_global_mutex();
-
- TPL_IGNORE(display);
-
- pthread_mutex_lock(&mutex);
-
- if (--global.display_count == 0) {
- tbm_bufmgr_deinit(global.bufmgr);
- close(global.bufmgr_fd);
- XCloseDisplay(global.worker_display);
-
- global.worker_display = NULL;
- global.bufmgr_fd = -1;
- global.bufmgr = NULL;
- }
-
- pthread_mutex_unlock(&mutex);
-
-}
-
-static tpl_bool_t
-__tpl_x11_dri2_surface_init(tpl_surface_t *surface)
-{
- Display *display;
- Drawable drawable;
- tpl_x11_dri2_surface_t *x11_surface;
- tpl_format_t format = TPL_FORMAT_INVALID;
-
- TPL_ASSERT(surface);
-
- if (surface->type == TPL_SURFACE_TYPE_WINDOW) {
- if (!__tpl_x11_display_get_window_info(surface->display, surface->native_handle,
- &surface->width, &surface->height, NULL, 0, 0))
- return TPL_FALSE;
- } else {
- if (!__tpl_x11_display_get_pixmap_info(surface->display, surface->native_handle,
- &surface->width, &surface->height, &format))
- return TPL_FALSE;
- }
-
- x11_surface = (tpl_x11_dri2_surface_t *) calloc(1,
- sizeof(tpl_x11_dri2_surface_t));
-
- if (x11_surface == NULL) {
- TPL_ERR("Failed to allocate memory for X11 surface!");
- return TPL_FALSE;
- }
-
- x11_surface->latest_post_interval = -1;
- __tpl_list_init(&x11_surface->buffer_cache);
-
- display = (Display *)surface->display->native_handle;
- drawable = (Drawable)surface->native_handle;
- DRI2CreateDrawable(display, drawable);
-
- surface->backend.data = (void *)x11_surface;
-
- return TPL_TRUE;
-}
-
-static void
-__tpl_x11_dri2_surface_fini(tpl_surface_t *surface)
-{
- Display *display;
- Drawable drawable;
- tpl_x11_dri2_surface_t *x11_surface;
-
- TPL_ASSERT(surface);
- TPL_ASSERT(surface->display);
- TPL_ASSERT(surface->display->native_handle);
-
- display = (Display *)surface->display->native_handle;
- drawable = (Drawable)surface->native_handle;
- x11_surface = (tpl_x11_dri2_surface_t *)surface->backend.data;
-
- if (x11_surface) {
- __tpl_x11_surface_buffer_cache_clear(&x11_surface->buffer_cache);
-
- if (x11_surface->damage)
- XFixesDestroyRegion(display, x11_surface->damage);
-
- free(x11_surface);
- }
-
- DRI2DestroyDrawable(display, drawable);
- surface->backend.data = NULL;
-}
-
-
-static void
-__tpl_x11_dri2_surface_post(tpl_surface_t *surface, tpl_frame_t *frame)
-{
- TPL_ASSERT(frame);
- TPL_ASSERT(surface);
-
- __tpl_x11_dri2_surface_post_internal(surface, frame, TPL_TRUE);
-}
-
-static void
-__tpl_x11_surface_begin_frame(tpl_surface_t *surface)
-{
- tpl_frame_t *prev_frame;
-
- TPL_ASSERT(surface);
-
- if (surface->type != TPL_SURFACE_TYPE_WINDOW) {
- TPL_ERR("Surface type is not of window type!");
- return;
- }
-
- prev_frame = __tpl_surface_get_latest_frame(surface);
-
- if (prev_frame && prev_frame->state != TPL_FRAME_STATE_POSTED) {
- if ((DRI2_BUFFER_IS_FB(prev_frame->buffer->backend.flags) &&
- global.fb_swap_type == TPL_X11_SWAP_TYPE_SYNC) ||
- (!DRI2_BUFFER_IS_FB(prev_frame->buffer->backend.flags) &&
- global.win_swap_type == TPL_X11_SWAP_TYPE_SYNC)) {
- __tpl_surface_wait_all_frames(surface);
- }
- }
-}
-
-static tpl_bool_t
-__tpl_x11_surface_validate_frame(tpl_surface_t *surface)
-{
- tpl_x11_dri2_surface_t *x11_surface;
-
- TPL_ASSERT(surface);
- TPL_ASSERT(surface->backend.data);
-
- x11_surface = (tpl_x11_dri2_surface_t *) surface->backend.data;
-
- if (surface->type != TPL_SURFACE_TYPE_WINDOW)
- return TPL_TRUE;
-
- if (NULL == surface->frame)
- return TPL_TRUE;
-
- if ((DRI2_BUFFER_IS_FB(surface->frame->buffer->backend.flags) &&
- global.fb_swap_type == TPL_X11_SWAP_TYPE_LAZY) ||
- (!DRI2_BUFFER_IS_FB(surface->frame->buffer->backend.flags) &&
- global.win_swap_type == TPL_X11_SWAP_TYPE_LAZY)) {
- if (x11_surface->latest_render_target == surface->frame->buffer) {
- __tpl_surface_wait_all_frames(surface);
- return TPL_FALSE;
- }
- }
-
- return TPL_TRUE;
-}
-
-static void
-__tpl_x11_surface_end_frame(tpl_surface_t *surface)
-{
- tpl_frame_t *frame;
- tpl_x11_dri2_surface_t *x11_surface;
-
- TPL_ASSERT(surface);
- TPL_ASSERT(surface->backend.data);
-
- frame = __tpl_surface_get_latest_frame(surface);
- x11_surface = (tpl_x11_dri2_surface_t *) surface->backend.data;
-
- if (frame) {
- x11_surface->latest_render_target = frame->buffer;
-
- if ((DRI2_BUFFER_IS_FB(frame->buffer->backend.flags) &&
- global.fb_swap_type == TPL_X11_SWAP_TYPE_ASYNC) ||
- (!DRI2_BUFFER_IS_FB(frame->buffer->backend.flags) &&
- global.win_swap_type == TPL_X11_SWAP_TYPE_ASYNC)) {
- __tpl_x11_dri2_surface_post_internal(surface, frame, TPL_FALSE);
- }
- }
-}
-
-static tpl_buffer_t *
-__tpl_x11_dri2_surface_get_buffer(tpl_surface_t *surface,
- tpl_bool_t *reset_buffers)
-{
- tpl_buffer_t *buffer = NULL;
- Display *display;
- Drawable drawable;
- DRI2Buffer *dri2_buffers;
- uint32_t attachments[1] = { DRI2BufferBackLeft };
- tbm_bo bo;
- tbm_bo_handle bo_handle;
- int width, height, num_buffers;
- tpl_x11_dri2_surface_t *x11_surface;
-
- TPL_ASSERT(surface);
- TPL_ASSERT(surface->backend.data);
-
- x11_surface = (tpl_x11_dri2_surface_t *)surface->backend.data;
-
- if (surface->type == TPL_SURFACE_TYPE_PIXMAP)
- attachments[0] = DRI2BufferFrontLeft;
-
- display = (Display *)surface->display->native_handle;
- drawable = (Drawable)surface->native_handle;
-
- /* Get the current buffer via DRI2. */
- dri2_buffers = DRI2GetBuffers(display, drawable,
- &width, &height, attachments, 1, &num_buffers);
- if (dri2_buffers == NULL) {
- TPL_ERR("DRI2GetBuffers failed!");
- goto err_buffer;
- }
-
- if (DRI2_BUFFER_IS_REUSED(dri2_buffers[0].flags)) {
- /* Buffer is reused. So it should be in the buffer cache.
- * However, sometimes we get a strange result of having reused flag for a newly
- * received buffer. I don't know the meaning of such cases but just handle it. */
- buffer = __tpl_x11_surface_buffer_cache_find(&x11_surface->buffer_cache,
- dri2_buffers[0].name);
-
- if (buffer) {
- /* Need to update buffer flag */
- buffer->backend.flags = dri2_buffers[0].flags;
- /* just update the buffer age. */
-#if (TIZEN_FEATURES_ENABLE)
- buffer->age = DRI2_BUFFER_GET_AGE(dri2_buffers[0].flags);
-#endif
- goto done;
- }
- } else {
- /* Buffer configuration of the server is changed. We have to reset all previsouly
- * received buffers. */
- __tpl_x11_surface_buffer_cache_clear(&x11_surface->buffer_cache);
- }
-
- /* Create a TBM buffer object for the buffer name. */
- bo = tbm_bo_import(global.bufmgr, dri2_buffers[0].name);
-
- if (bo == NULL) {
- TPL_ERR("TBM bo import failed!");
- goto done;
- }
-
- bo_handle = tbm_bo_get_handle(bo, TBM_DEVICE_3D);
-
- /* Create tpl buffer. */
- buffer = __tpl_buffer_alloc(surface, (size_t) dri2_buffers[0].name,
- (int) bo_handle.u32,
- width, height, dri2_buffers[0].cpp * 8, dri2_buffers[0].pitch);
- if (NULL == buffer) {
- TPL_ERR("TPL buffer alloc failed!");
- goto err_buffer;
- }
-
-#if (TIZEN_FEATURES_ENABLE)
- buffer->age = DRI2_BUFFER_GET_AGE(dri2_buffers[0].flags);
-#endif
- buffer->backend.data = (void *)bo;
- buffer->backend.flags = dri2_buffers[0].flags;
-
- /* Add the buffer to the buffer cache. The cache will hold a reference to the buffer. */
- __tpl_x11_surface_buffer_cache_add(&x11_surface->buffer_cache, buffer);
- tpl_object_unreference(&buffer->base);
-
-done:
- if (reset_buffers) {
- /* Users use this output value to check if they have to reset previous buffers. */
- *reset_buffers = !DRI2_BUFFER_IS_REUSED(dri2_buffers[0].flags) ||
- width != surface->width || height != surface->height;
- }
-
- XFree(dri2_buffers);
-err_buffer:
- return buffer;
-}
-
-tpl_bool_t
-__tpl_display_choose_backend_x11_dri2(tpl_handle_t native_dpy)
-{
- /* X11 display accepts any type of handle. So other backends must be choosen before this. */
- return TPL_TRUE;
-}
-
-void
-__tpl_display_init_backend_x11_dri2(tpl_display_backend_t *backend)
-{
- TPL_ASSERT(backend);
-
- backend->type = TPL_BACKEND_X11_DRI2;
- backend->data = NULL;
-
- backend->init = __tpl_x11_dri2_display_init;
- backend->fini = __tpl_x11_dri2_display_fini;
- backend->query_config = __tpl_x11_display_query_config;
- backend->get_window_info = __tpl_x11_display_get_window_info;
- backend->get_pixmap_info = __tpl_x11_display_get_pixmap_info;
- backend->flush = __tpl_x11_display_flush;
- backend->wait_native = __tpl_x11_display_wait_native;
-}
-
-void
-__tpl_surface_init_backend_x11_dri2(tpl_surface_backend_t *backend)
-{
- TPL_ASSERT(backend);
-
- backend->type = TPL_BACKEND_X11_DRI2;
- backend->data = NULL;
-
- backend->init = __tpl_x11_dri2_surface_init;
- backend->fini = __tpl_x11_dri2_surface_fini;
- backend->begin_frame = __tpl_x11_surface_begin_frame;
- backend->end_frame = __tpl_x11_surface_end_frame;
- backend->validate_frame = __tpl_x11_surface_validate_frame;
- backend->get_buffer = __tpl_x11_dri2_surface_get_buffer;
- backend->post = __tpl_x11_dri2_surface_post;
-}
-
-void
-__tpl_buffer_init_backend_x11_dri2(tpl_buffer_backend_t *backend)
-{
- TPL_ASSERT(backend);
-
- backend->type = TPL_BACKEND_X11_DRI2;
- backend->data = NULL;
-
- backend->init = __tpl_x11_buffer_init;
- backend->fini = __tpl_x11_buffer_fini;
- backend->map = __tpl_x11_buffer_map;
- backend->unmap = __tpl_x11_buffer_unmap;
- backend->lock = __tpl_x11_buffer_lock;
- backend->unlock = __tpl_x11_buffer_unlock;
-}
+++ /dev/null
-#include <stdio.h>
-#include <stdlib.h>
-#include <string.h>
-#include <stdint.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <fcntl.h>
-#include <unistd.h>
-#include <pthread.h>
-#include <dlfcn.h>
-
-#include <X11/Xlib-xcb.h>
-#include <X11/Xlib.h>
-#include <X11/Xutil.h>
-#include <X11/Xproto.h>
-#include <X11/extensions/Xfixes.h>
-
-
-#include <libdrm/drm.h>
-#include <xf86drm.h>
-
-#include <xcb/xcb.h>
-#include <xcb/dri3.h>
-#include <xcb/xcbext.h>
-#include <xcb/present.h>
-#include <xcb/sync.h>
-
-#include <tbm_bufmgr.h>
-
-#include "tpl_internal.h"
-
-#include "tpl_x11_internal.h"
-
-static int dri3_max_back = 0;/*max number of back buffer*/
-#define DRI3_NUM_BUFFERS 20
-#define DRI3_BUFFER_REUSED 0x08
-
-/* 2015-04-15 joonbum.ko@samsung.com */
-/* Add macro function for pitch align calculation.*/
-#define SIZE_ALIGN( value, base ) (((value) + ((base) - 1)) & ~((base) - 1))
-#define ALIGNMENT_PITCH_ARGB 64
-
-
-#define USE_FENCE 0
-
-typedef struct _tpl_x11_dri3_surface tpl_x11_dri3_surface_t;
-
-struct _tpl_x11_dri3_surface {
- int latest_post_interval;
- XserverRegion damage;
- tpl_list_t buffer_cache;
- tpl_buffer_t *latest_render_target;
-
- void *drawable;
-};
-
-enum dri3_buffer_type {
- dri3_buffer_back = 0,
- dri3_buffer_front = 1
-};
-
-enum dri3_buffer_status {
- dri3_buffer_idle = 0,
- dri3_buffer_busy = 1,
- dri3_buffer_posted = 2
-};
-
-typedef struct _dri3_buffer {
- tbm_bo tbo;
- uint32_t pixmap;
- enum dri3_buffer_status status; /* Set on swap, cleared on IdleNotify */
- void *driverPrivate;
-
- /*param of buffer */
- uint32_t size;
- uint32_t pitch;
- uint32_t cpp;
- uint32_t flags;
- int32_t width, height;
- uint64_t last_swap;
- int32_t own_pixmap; /* We allocated the pixmap ID,
- free on destroy */
- uint32_t dma_buf_fd; /* fd of dma buffer */
- /* [BEGIN: 20141125-xuelian.bai] Add old dma fd to save old fd
- * before use new fd */
- /* 2015-04-08 joonbum.ko@samsung.com */
- /* Change old buffer name to old_bo_name from old_dma_fd */
- /* uint32_t old_dma_fd; */
- uint32_t old_bo_name;
- /* [END: 20141125-xuelian.bai] */
- enum dri3_buffer_type buffer_type; /* back=0,front=1 */
-
- /* [BEGIN: 20140119-leiba.sun] Add support for buffer age */
- uint32_t buffer_age;
- /* [END:20150119-leiba.sun] */
-} dri3_buffer;
-
-typedef struct _dri3_drawable {
- Display *dpy;
- XID xDrawable;
-
- tbm_bufmgr bufmgr; /* tbm bufmgr */
-
- int32_t width, height, depth;
- int32_t swap_interval;
- uint8_t have_back;
- uint8_t have_fake_front;
- tpl_bool_t is_pixmap; /*whether the drawable is pixmap*/
- uint8_t flipping; /*whether the drawable can use pageFlip*/
-
- uint32_t present_capabilities; /* Present extension capabilities*/
- uint64_t send_sbc; /* swap buffer counter */
- uint64_t recv_sbc;
- uint64_t ust, msc; /* Last received UST/MSC values */
- uint32_t send_msc_serial; /* Serial numbers for tracking
- wait_for_msc events */
- uint32_t recv_msc_serial;
-
- dri3_buffer *buffers[DRI3_NUM_BUFFERS]; /*buffer array of all buffers*/
- int cur_back;
-
- uint32_t stamp;
- xcb_present_event_t eid;
- xcb_special_event_t *special_event;
-} dri3_drawable;
-
-typedef struct _dri3_drawable_node {
- XID xDrawable;
- dri3_drawable *drawable;
-} dri3_drawable_node;
-
-static tpl_x11_global_t global = {
- 0,
- NULL,
- -1,
- NULL,
- TPL_X11_SWAP_TYPE_LAZY,
- TPL_X11_SWAP_TYPE_LAZY
-};
-
-static tpl_list_t dri3_drawable_list;
-static void
-dri3_free_render_buffer(dri3_drawable *pdraw, dri3_buffer *buffer);
-static void dri3_flush_present_events(dri3_drawable *priv);
-/* Wrapper around xcb_dri3_open*/
-static int
-dri3_open(Display *dpy, Window root, CARD32 provider)
-{
- xcb_dri3_open_cookie_t cookie;
- xcb_dri3_open_reply_t *reply;
- xcb_connection_t *c;
- int fd;
-
- TPL_ASSERT(dpy);
-
- c = XGetXCBConnection(dpy);
-
- cookie = xcb_dri3_open(c,
- root,
- provider);
-
- reply = xcb_dri3_open_reply(c, cookie, NULL);
- if (!reply) {
- TPL_ERR("XCB DRI3 open failed!");
- return -1;
- }
-
- if (reply->nfd != 1) {
- TPL_ERR("XCB DRI3 open reply failed!");
- free(reply);
- return -1;
- }
-
- fd = xcb_dri3_open_reply_fds(c, reply)[0];
- fcntl(fd, F_SETFD, FD_CLOEXEC);
-
- free(reply);
- return fd;
-}
-
-static tpl_bool_t
-dri3_display_init(Display *dpy)
-{
- /* Initialize DRI3 & DRM */
- xcb_connection_t *c;
- xcb_dri3_query_version_cookie_t dri3_cookie;
- xcb_dri3_query_version_reply_t *dri3_reply;
- xcb_present_query_version_cookie_t present_cookie;
- xcb_present_query_version_reply_t *present_reply;
- xcb_generic_error_t *error;
- const xcb_query_extension_reply_t *extension;
- xcb_extension_t xcb_dri3_id = { "DRI3", 0 };
- xcb_extension_t xcb_present_id = { "Present", 0 };
-
- TPL_ASSERT(dpy);
-
- c = XGetXCBConnection(dpy);
-
- xcb_prefetch_extension_data(c, &xcb_dri3_id);
- xcb_prefetch_extension_data(c, &xcb_present_id);
-
- extension = xcb_get_extension_data(c, &xcb_dri3_id);
- if (!(extension && extension->present)) {
- TPL_ERR("XCB get extension failed!");
- return TPL_FALSE;
- }
-
- extension = xcb_get_extension_data(c, &xcb_present_id);
- if (!(extension && extension->present)) {
- TPL_ERR("XCB get extension failed!");
- return TPL_FALSE;
- }
-
- dri3_cookie = xcb_dri3_query_version(c,
- XCB_DRI3_MAJOR_VERSION,
- XCB_DRI3_MINOR_VERSION);
- dri3_reply = xcb_dri3_query_version_reply(c, dri3_cookie, &error);
- if (!dri3_reply) {
- TPL_ERR("XCB version query failed!");
- free(error);
- return TPL_FALSE;
- }
- free(dri3_reply);
-
- present_cookie = xcb_present_query_version(c,
- XCB_PRESENT_MAJOR_VERSION,
- XCB_PRESENT_MINOR_VERSION);
- present_reply = xcb_present_query_version_reply(c, present_cookie, &error);
- if (!present_reply) {
- TPL_ERR("Present version query failed!");
- free(error);
- return TPL_FALSE;
- }
- free(present_reply);
- return TPL_TRUE;
-}
-
-static void *
-dri3_create_drawable(Display *dpy, XID xDrawable)
-{
- dri3_drawable *pdraw = NULL;
- xcb_connection_t *c;
- xcb_get_geometry_cookie_t geom_cookie;
- xcb_get_geometry_reply_t *geom_reply;
- int i;
- tpl_list_node_t *node;
- dri3_drawable_node *drawable_node;
-
- TPL_ASSERT(dpy);
-
- c = XGetXCBConnection(dpy);
-
- /* Check drawable list to find that if it has been created*/
- node = __tpl_list_get_front_node(&dri3_drawable_list);
- while (node) {
- dri3_drawable_node *drawable = (dri3_drawable_node *) __tpl_list_node_get_data(
- node);
-
- if (drawable->xDrawable == xDrawable) {
- pdraw = drawable->drawable;
- return (void *)pdraw;/* Reuse old drawable */
- }
- node = __tpl_list_node_next(node);
- }
- pdraw = calloc(1, sizeof(*pdraw));
- if (NULL == pdraw) {
- TPL_ERR("Failed to allocate memory!");
- return NULL;
- }
-
- geom_cookie = xcb_get_geometry(c, xDrawable);
- geom_reply = xcb_get_geometry_reply(c, geom_cookie, NULL);
- if (NULL == geom_reply) {
- TPL_ERR("XCB get geometry failed!");
- free(pdraw);
- return NULL;
- }
-
- pdraw->bufmgr = global.bufmgr;
- pdraw->width = geom_reply->width;
- pdraw->height = geom_reply->height;
- pdraw->depth = geom_reply->depth;
- pdraw->is_pixmap = TPL_FALSE;
-
- free(geom_reply);
- pdraw->dpy = global.worker_display;
- pdraw->xDrawable = xDrawable;
-
- for (i = 0; i < dri3_max_back + 1; i++)
- pdraw->buffers[i] = NULL;
-
- /* Add new allocated drawable to drawable list */
- drawable_node = calloc(1, sizeof(dri3_drawable_node));
- if (NULL == drawable_node) {
- TPL_ERR("Failed to allocate memory for drawable node!");
- free(pdraw);
- return NULL;
- }
-
- drawable_node->drawable = pdraw;
- drawable_node->xDrawable = xDrawable;
- if (TPL_TRUE != __tpl_list_push_back(&dri3_drawable_list,
- (void *)drawable_node)) {
- TPL_ERR("List operation failed!");
- free(pdraw);
- free(drawable_node);
- return NULL;
- }
-
- return (void *)pdraw;
-}
-
-static void
-dri3_destroy_drawable(Display *dpy, XID xDrawable)
-{
- dri3_drawable *pdraw;
- xcb_connection_t *c;
- int i;
- tpl_list_node_t *node;
- dri3_drawable_node *drawable;
-
- TPL_ASSERT(dpy);
-
- c = XGetXCBConnection(dpy);
-
- /* Remove drawable from list */
- node = __tpl_list_get_front_node(&dri3_drawable_list);
- while (node) {
- drawable = (dri3_drawable_node *) __tpl_list_node_get_data(node);
-
- if (drawable->xDrawable == xDrawable) {
- pdraw = drawable->drawable;
-
- if (!pdraw)
- return;
-
- for (i = 0; i < dri3_max_back + 1; i++) {
- if (pdraw->buffers[i])
- dri3_free_render_buffer(pdraw, pdraw->buffers[i]);
- }
-
- if (pdraw->special_event)
- xcb_unregister_for_special_event(c, pdraw->special_event);
- free(pdraw);
- pdraw = NULL;
- __tpl_list_remove(node, free);
- return;
- }
-
- node = __tpl_list_node_next(node);
- }
-
- /* If didn't find the drawable, means it is already free*/
- return;
-}
-
-/** dri3_update_drawable
- *
- * Called the first time we use the drawable and then
- * after we receive present configure notify events to
- * track the geometry of the drawable
- */
-static int
-dri3_update_drawable(void *loaderPrivate)
-{
- dri3_drawable *priv = loaderPrivate;
- xcb_connection_t *c;
- xcb_extension_t xcb_present_id = { "Present", 0 };
-
- TPL_ASSERT(priv);
- TPL_ASSERT(priv->dpy);
-
- c = XGetXCBConnection(priv->dpy);
-
- /* First time through, go get the current drawable geometry
- */ /*TODO*/
- if (priv->special_event == NULL) {
- xcb_get_geometry_cookie_t geom_cookie;
- xcb_get_geometry_reply_t *geom_reply;
- xcb_void_cookie_t cookie;
- xcb_generic_error_t *error;
- xcb_present_query_capabilities_cookie_t present_capabilities_cookie;
- xcb_present_query_capabilities_reply_t *present_capabilities_reply;
-
-
- /* Try to select for input on the window.
- *
- * If the drawable is a window, this will get our events
- * delivered.
- *
- * Otherwise, we'll get a BadWindow error back from this
- * request which will let us know that the drawable is a
- * pixmap instead.
- */
- cookie = xcb_present_select_input_checked(c,
- (priv->eid = xcb_generate_id(c)),
- priv->xDrawable,
- XCB_PRESENT_EVENT_MASK_CONFIGURE_NOTIFY |
- XCB_PRESENT_EVENT_MASK_COMPLETE_NOTIFY |
- XCB_PRESENT_EVENT_MASK_IDLE_NOTIFY);
-
- present_capabilities_cookie = xcb_present_query_capabilities(c,
- priv->xDrawable);
-
- /* Create an XCB event queue to hold present events outside of the usual
- * application event queue
- */
- priv->special_event = xcb_register_for_special_xge(c,
- &xcb_present_id,
- priv->eid,
- &priv->stamp);
-
- geom_cookie = xcb_get_geometry(c, priv->xDrawable);
-
- geom_reply = xcb_get_geometry_reply(c, geom_cookie, NULL);
- if (NULL == geom_reply) {
- TPL_ERR("Failed to get geometry reply!");
- return TPL_FALSE;
- }
-
- priv->width = geom_reply->width;
- priv->height = geom_reply->height;
- priv->depth = geom_reply->depth;
- priv->is_pixmap = TPL_FALSE;
-
- free(geom_reply);
-
- /* Check to see if our select input call failed. If it failed
- * with a BadWindow error, then assume the drawable is a pixmap.
- * Destroy the special event queue created above and mark the
- * drawable as a pixmap
- */
-
- error = xcb_request_check(c, cookie);
-
- present_capabilities_reply = xcb_present_query_capabilities_reply(c,
- present_capabilities_cookie,
- NULL);
-
- if (present_capabilities_reply) {
- priv->present_capabilities = present_capabilities_reply->capabilities;
- free(present_capabilities_reply);
- } else
- priv->present_capabilities = 0;
-
- if (error) {
- if (error->error_code != BadWindow) {
- free(error);
- return TPL_FALSE;
- }
- priv->is_pixmap = TPL_TRUE;
- xcb_unregister_for_special_event(c, priv->special_event);
- priv->special_event = NULL;
- }
- }
- dri3_flush_present_events(priv);
- return TPL_TRUE;
-}
-
-/******************************************
- * dri3_handle_present_event
- * Process Present event from xserver
- *****************************************/
-static void
-dri3_handle_present_event(dri3_drawable *priv, xcb_present_generic_event_t *ge)
-{
- TPL_ASSERT(priv);
- TPL_ASSERT(ge);
-
- switch (ge->evtype) {
- case XCB_PRESENT_CONFIGURE_NOTIFY: {
- TRACE_BEGIN("DRI3:PRESENT_CONFIGURE_NOTIFY");
- xcb_present_configure_notify_event_t *ce = (void *) ge;
- priv->width = ce->width;
- priv->height = ce->height;
- TRACE_END();
- break;
- }
-
- case XCB_PRESENT_COMPLETE_NOTIFY: {
- TRACE_BEGIN("DRI3:PRESENT_COMPLETE_NOTIFY");
- xcb_present_complete_notify_event_t *ce = (void *) ge;
- /* Compute the processed SBC number from the received
- * 32-bit serial number merged with the upper 32-bits
- * of the sent 64-bit serial number while checking for
- * wrap
- */
- if (ce->kind == XCB_PRESENT_COMPLETE_KIND_PIXMAP) {
- priv->recv_sbc =
- (priv->send_sbc & 0xffffffff00000000LL) |
- ce->serial;
- if (priv->recv_sbc > priv->send_sbc)
- priv->recv_sbc -= 0x100000000;
- switch (ce->mode) {
- case XCB_PRESENT_COMPLETE_MODE_FLIP:
- priv->flipping = 1;
- break;
- case XCB_PRESENT_COMPLETE_MODE_COPY:
- priv->flipping = 0;
- break;
- }
- } else {
- priv->recv_msc_serial = ce->serial;
- }
-
- priv->ust = ce->ust;
- priv->msc = ce->msc;
- TRACE_END();
- break;
- }
-
- case XCB_PRESENT_EVENT_IDLE_NOTIFY: {
- xcb_present_idle_notify_event_t *ie = (void *) ge;
- uint32_t b;
-
- for (b = 0; b < sizeof (priv->buffers) / sizeof (priv->buffers[0]); b++) {
- dri3_buffer *buf = priv->buffers[b];
-
- if (buf && buf->pixmap == ie->pixmap) {
- TRACE_MARK("IDLE:%d", tbm_bo_export(priv->buffers[b]->tbo));
- buf->status = dri3_buffer_idle;
- break;
- }
- }
- break;
- }
- }
- free(ge);
-}
-
-/******************************************************
-* dri3_flush_present_events
-*
-* Process any present events that have been received from the X server
-* called when get buffer or swap buffer
-******************************************************/
-static void
-dri3_flush_present_events(dri3_drawable *priv)
-{
- xcb_connection_t *c;
-
- TPL_ASSERT(priv);
- TPL_ASSERT(priv->dpy);
-
- c = XGetXCBConnection(priv->dpy);
-
- TRACE_BEGIN("DRI3:FLUSH_PRESENT_EVENTS");
- /* Check to see if any configuration changes have occurred
- * since we were last invoked
- */
- if (priv->special_event) {
- xcb_generic_event_t *ev;
-
- while ((ev = xcb_poll_for_special_event(c, priv->special_event)) != NULL) {
- xcb_present_generic_event_t *ge = (void *) ev;
- dri3_handle_present_event(priv, ge);
- }
- }
- TRACE_END();
-}
-
-static tpl_bool_t
-dri3_wait_for_notify(xcb_connection_t *c, dri3_drawable *priv)
-{
- xcb_generic_event_t *ev;
- xcb_present_generic_event_t *ge;
-
- TPL_ASSERT(c);
- TPL_ASSERT(priv);
-
- TRACE_BEGIN("TPL:DRI3:WAIT_FOR_NOTIFY");
-
- if (((uint32_t)priv->send_sbc) == 0) {
- TRACE_END();
- return TPL_TRUE;
- }
- for (;;) {
- if ( (uint32_t)priv->send_sbc <= (uint32_t)priv->recv_sbc ) {
- TRACE_END();
- return TPL_TRUE;
- }
-
- xcb_flush(c);
- ev = xcb_wait_for_special_event(c, priv->special_event);
- if (!ev) {
- TRACE_END();
- return TPL_FALSE;
- }
- ge = (void *) ev;
- dri3_handle_present_event(priv, ge);
- }
-}
-
-/** dri3_find_back
- *
- * Find an idle back buffer. If there isn't one, then
- * wait for a present idle notify event from the X server
- */
-static int
-dri3_find_back(xcb_connection_t *c, dri3_drawable *priv)
-{
- int b;
- xcb_generic_event_t *ev;
- xcb_present_generic_event_t *ge;
-
- TPL_ASSERT(c);
- TPL_ASSERT(priv);
-
- for (;;) {
- for (b = 0; b < dri3_max_back; b++) {
- int id = (b + priv->cur_back + 1) % dri3_max_back;
- int pre_id = (id + dri3_max_back - 2) % dri3_max_back;
-
- dri3_buffer *buffer = priv->buffers[id];
- dri3_buffer *pre_buffer = priv->buffers[pre_id];
-
- if (pre_buffer && pre_buffer->status != dri3_buffer_posted)
- pre_buffer->status = dri3_buffer_idle;
-
- if (!buffer || buffer->status == dri3_buffer_idle) {
- priv->cur_back = id;
- return id;
- }
- }
-
- xcb_flush(c);
- TRACE_BEGIN("DDK:DRI3:XCBWAIT");
- ev = xcb_wait_for_special_event(c, priv->special_event);
- TRACE_END();
-
- if (!ev) {
- return -1;
- }
-
- ge = (void *) ev;
- dri3_handle_present_event(priv, ge);
- }
-}
-
-/** dri3_alloc_render_buffer
- *
- * allocate a render buffer and create an X pixmap from that
- *
- */
-static dri3_buffer *
-dri3_alloc_render_buffer(dri3_drawable *priv,
- int width, int height, int depth, int cpp)
-{
- Display *dpy;
- Drawable draw;
- dri3_buffer *buffer = NULL;
- xcb_connection_t *c;
- xcb_pixmap_t pixmap = 0;
- int buffer_fd;
- int size;
- tbm_bo_handle handle;
- xcb_void_cookie_t cookie;
- xcb_generic_error_t *error;
-
- TPL_ASSERT(priv);
- TPL_ASSERT(priv->dpy);
-
- dpy = priv->dpy;
- draw = priv->xDrawable;
-
- c = XGetXCBConnection(dpy);
-
- /* Allocate the image from the driver
- */
- buffer = calloc(1, sizeof (dri3_buffer));
- if (!buffer) {
- TPL_ERR("Failed to allocate buffer!");
- goto no_buffer;
- }
-
- /* size = height * width * depth/8;*/
- /* size = ((width * 32)>>5) * 4 * height; */
- /* calculate pitch and size by input parameter cpp */
- /* buffer->pitch = width*(cpp/8); */
-
- /* Modify the calculation of pitch (strdie) */
- buffer->pitch = SIZE_ALIGN((width * cpp) >> 3, ALIGNMENT_PITCH_ARGB);
-
- size = buffer->pitch * height;
-
- buffer->tbo = tbm_bo_alloc(priv->bufmgr, size, TBM_BO_DEFAULT);
- if (NULL == buffer->tbo) {
- TPL_ERR("TBM bo alloc failed!");
- free(buffer);
- goto no_buffer;
- }
-
- /* dup tbo, because X will close it */
- /* 2015-04-08 joonbum.ko@samsung.com */
- /* delete tbm_bo_get_handle function call and
- add tbm_bo_export_fd function call */
-
- handle = tbm_bo_get_handle(buffer->tbo, TBM_DEVICE_3D);
- buffer_fd = dup(handle.u32);
-
- /* buffer_fd = tbm_bo_export_fd(buffer->tbo);*/
- /* 2015-04-08 joonbum.ko@samsung.com */
- /* disable the value dma_buf_fd */
- buffer->dma_buf_fd = handle.u32;
- buffer->size = size;
- cookie = xcb_dri3_pixmap_from_buffer_checked(c,
- (pixmap = xcb_generate_id(c)),
- draw,
- buffer->size,
- width, height, buffer->pitch,
- depth, cpp,
- buffer_fd);
- error = xcb_request_check( c, cookie);
- /* 2015-04-08 joonbum.ko@samsung.com */
- /* buffer_fd is unuseful */
- /* close(buffer_fd);*/
-
- if (error) {
- TPL_ERR("No pixmap!");
- goto no_pixmap;
- }
- if (0 == pixmap) {
- TPL_ERR("No pixmap!");
- goto no_pixmap;
- }
-
- buffer->pixmap = pixmap;
- buffer->own_pixmap = TPL_TRUE;
- buffer->width = width;
- buffer->height = height;
- buffer->flags = 0;
-
- return buffer;
-no_pixmap:
- tbm_bo_unref(buffer->tbo);
- free(buffer);
-no_buffer:
- return NULL;
-}
-
-/** dri3_free_render_buffer
- *
- * Free everything associated with one render buffer including pixmap, fence
- * stuff
- */
-static void
-dri3_free_render_buffer(dri3_drawable *pdraw, dri3_buffer *buffer)
-{
- xcb_connection_t *c;
-
- TPL_ASSERT(pdraw);
- TPL_ASSERT(buffer);
- TPL_ASSERT(pdraw->dpy);
-
- c = XGetXCBConnection(pdraw->dpy);
-
- /* 2015-04-08 joonbum.ko@samsung.com */
- /* if drawable type is pixmap, it requires only free buffer */
- if (!pdraw->is_pixmap) {
- if (buffer->own_pixmap)
- xcb_free_pixmap(c, buffer->pixmap);
- tbm_bo_unref(buffer->tbo);
- /* added a ref when created and unref while free, see dri3_get_pixmap_buffer */
- }
-
- buffer = NULL;
-}
-
-
-/** dri3_get_window_buffer
- *
- * Find a front or back buffer, allocating new ones as necessary
- */
-
-/* 2015-04-08 joonbum.ko@samsung.com */
-/* Change the value of old_dma_fd to old_bo_name */
-static dri3_buffer *
-dri3_get_window_buffer(void *loaderPrivate, int cpp)
-{
- dri3_drawable *priv = loaderPrivate;
- xcb_connection_t *c;
- dri3_buffer *backbuffer = NULL;
- int back_buf_id, reuse = 1;
- uint32_t old_bo_name = 0;
-
- TPL_ASSERT(priv);
- TPL_ASSERT(priv->dpy);
-
- c = XGetXCBConnection(priv->dpy);
-
- TRACE_BEGIN("DDK:DRI3:GETBUFFERS:WINDOW");
- TRACE_BEGIN("DDK:DRI3:FINDBACK");
- back_buf_id = dri3_find_back(c, priv);
- TRACE_END();
-
- backbuffer = priv->buffers[back_buf_id];
-
- /* Allocate a new buffer if there isn't an old one, or if that
- * old one is the wrong size.
- */
- if (!backbuffer || backbuffer->width != priv->width ||
- backbuffer->height != priv->height ) {
- dri3_buffer *new_buffer;
-
- /* Allocate the new buffers
- */
- TRACE_BEGIN("DDK:DRI3:ALLOCRENDERBUFFER");
- new_buffer = dri3_alloc_render_buffer(priv,
- priv->width, priv->height, priv->depth, cpp);
- TRACE_END();
-
- if (!new_buffer) {
- TRACE_END();
- return NULL;
- }
- if (backbuffer) {
- /* [BEGIN: 20141125-xuelian.bai] Size not match,this buffer
- * must be removed from buffer cache, so we have to save
- * dma_buf_fd of old buffer.*/
- old_bo_name = tbm_bo_export(backbuffer->tbo);
- /* [END: 20141125-xuelian.bai] */
- TRACE_BEGIN("DDK:DRI3:FREERENDERBUFFER");
- dri3_free_render_buffer(priv, backbuffer);
- TRACE_END();
- }
- backbuffer = new_buffer;
- backbuffer->buffer_type = dri3_buffer_back;
- backbuffer->old_bo_name = old_bo_name;
- priv->buffers[back_buf_id] = backbuffer;
- reuse = 0;
- }
-
- backbuffer->flags = DRI2_BUFFER_FB;
- backbuffer->status = dri3_buffer_busy;
- if (reuse) {
- backbuffer->flags |= DRI2_BUFFER_REUSED;
- }
- /* Return the requested buffer */
- TRACE_END();
-
- TRACE_MARK("%d", tbm_bo_export(backbuffer->tbo));
-
- return backbuffer;
-}
-
-/* 2015-04-07 joonbum.ko@samsung.com */
-/* modify internal flow of dri3_get_pixmap_buffer */
-/* add 3rd argument for stride information */
-static dri3_buffer *
-dri3_get_pixmap_buffer(void *loaderPrivate, Pixmap pixmap,
- int cpp)/*TODO:format*/
-{
- dri3_drawable *pdraw = loaderPrivate;
- dri3_buffer *buffer = NULL;
- xcb_dri3_buffer_from_pixmap_cookie_t bp_cookie;
- xcb_dri3_buffer_from_pixmap_reply_t *bp_reply;
- int *fds;
- Display *dpy;
- xcb_connection_t *c;
- tbm_bo tbo = NULL;
-
- TPL_ASSERT(pdraw);
- TPL_ASSERT(pdraw->dpy);
-
- TRACE_BEGIN("DDK:DRI3:GETBUFFERS:PIXMAP");
-
- dpy = pdraw->dpy;
- c = XGetXCBConnection(dpy);
-
- /* Get an FD for the pixmap object
- */
- bp_cookie = xcb_dri3_buffer_from_pixmap(c, pixmap);
- bp_reply = xcb_dri3_buffer_from_pixmap_reply(c, bp_cookie, NULL);
- if (!bp_reply) {
- goto no_image;
- }
- fds = xcb_dri3_buffer_from_pixmap_reply_fds(c, bp_reply);
-
- tbo = tbm_bo_import_fd(pdraw->bufmgr, (tbm_fd)(*fds));
-
- if (!buffer) {
- buffer = calloc(1, sizeof (dri3_buffer));
- if (!buffer)
- goto no_buffer;
- }
-
- buffer->tbo = tbo;
- /* 2015-04-08 joonbum.ko@samsung.com */
- /* disable the value dma_buf_fd */
- buffer->dma_buf_fd = *fds;
- buffer->pixmap = pixmap;
- buffer->own_pixmap = TPL_FALSE;
- buffer->width = bp_reply->width;
- buffer->height = bp_reply->height;
- buffer->buffer_type = dri3_buffer_front;
- buffer->flags = DRI3_BUFFER_REUSED;
- /* 2015-04-07 joonbum.ko@samsung.com */
- /* add buffer information(cpp, pitch, size) */
- buffer->cpp = cpp;
- buffer->pitch = bp_reply->stride;
- buffer->size = buffer->pitch * bp_reply->height;
-
- pdraw->buffers[dri3_max_back] = buffer;
-
- /* 2015-04-08 joonbum.ko@samsung.com */
- /* fds is unuseful */
- close(*fds);
- TRACE_END();
- return buffer;
-
- /* 2015-04-09 joonbum.ko@samsung.com */
- /* change the lable order */
-no_image:
- if (buffer)
- free(buffer);
-no_buffer:
- TRACE_END();
- return NULL;
-}
-
-static dri3_buffer *dri3_get_buffers(XID drawable, void *loaderPrivate,
- unsigned int *attachments, int cpp)
-{
- dri3_drawable *priv = loaderPrivate;
- dri3_buffer *buffer = NULL;
-
- TPL_ASSERT(priv);
- TPL_ASSERT(attachments);
-
- TRACE_BEGIN("DDK:DRI3:GETBUFFERS");
-
- if (drawable != priv->xDrawable) {
- TPL_ERR("Drawable mismatch!");
- TRACE_END();
- return NULL;
- }
-
- if (!dri3_update_drawable(loaderPrivate)) {
- TPL_ERR("Update drawable failed!");
- TRACE_END();
- return NULL;
- }
-
- if (*attachments == dri3_buffer_front)
- buffer = dri3_get_pixmap_buffer(loaderPrivate,
- priv->xDrawable, cpp);
- else
- buffer = dri3_get_window_buffer(loaderPrivate, cpp);
-
- if (NULL == buffer) {
- TPL_ERR("Get buffer failed!");
- return NULL;
- }
-
- TRACE_END();
-
- return buffer;
-}
-
-/******************************************************
- * dri3_swap_buffers
- * swap back buffer with front buffer
- * Make the current back buffer visible using the present extension
- * if (region_t==0),swap whole frame, else swap with region
- ******************************************************/
-static int64_t
-dri3_swap_buffers(Display *dpy, void *priv, tpl_buffer_t *frame_buffer,
- int interval, XID region_t)
-{
-
- int64_t ret = -1;
- int64_t target_msc = 0;
- int64_t divisor = 0;
- int64_t remainder = 0;
- xcb_connection_t *c;
- dri3_drawable *pDrawable;
- dri3_buffer *back = NULL;
- int i = 0;
-
- TPL_ASSERT(dpy);
- TPL_ASSERT(priv);
- TPL_ASSERT(frame_buffer);
-
- c = XGetXCBConnection(dpy);
-
- pDrawable = (dri3_drawable *) priv;
- back = (dri3_buffer *) frame_buffer->backend.data;
-
- if ((back == NULL) || (pDrawable == NULL) || (pDrawable->is_pixmap != 0)) {
- TRACE_END();
- return ret;
- }
-
- /* Process any present events that have been received from the X
- * server until receive complete notify.
- */
- if (!dri3_wait_for_notify(c, pDrawable)) {
- TRACE_END();
- return ret;
- }
- /* [BEGIN: 20140119-leiba.sun] Add support for buffer age
- * When swap buffer, increase buffer age of every back buffer */
- for (i = 0; i < dri3_max_back; i++) {
- if ((pDrawable->buffers[i] != NULL) && (pDrawable->buffers[i]->buffer_age > 0))
- pDrawable->buffers[i]->buffer_age++;
- }
- back->buffer_age = 1;
- /* [END:20150119-leiba.sun] */
- /* set busy flag */
- back->status = dri3_buffer_posted;
-
- /* Compute when we want the frame shown by taking the last known
- * successful MSC and adding in a swap interval for each outstanding
- * swap request
- */
- if (pDrawable->swap_interval != interval)
- pDrawable->swap_interval = interval;
-
- ++pDrawable->send_sbc;
- if (target_msc == 0)
- target_msc = pDrawable->msc + pDrawable->swap_interval *
- (pDrawable->send_sbc - pDrawable->recv_sbc);
-
- back->last_swap = pDrawable->send_sbc;
-
- TRACE_MARK("SWAP:%d", tbm_bo_export(back->tbo)) ;
- xcb_present_pixmap(c,
- pDrawable->xDrawable, /* dst */
- back->pixmap, /* src */
- (uint32_t) pDrawable->send_sbc,
- 0, /* valid */
- region_t, /* update */
- 0, /* x_off */
- 0, /* y_off */
- None, /* target_crtc */
- None,
- 0,
- XCB_PRESENT_OPTION_NONE,
- /*target_msc*/0,
- divisor,
- remainder, 0, NULL);
-
- ret = (int64_t) pDrawable->send_sbc;
-
- xcb_flush(c);
-
- ++(pDrawable->stamp);
-
- return ret;
-}
-
-tpl_bool_t
-__tpl_x11_dri3_buffer_init(tpl_buffer_t *buffer)
-{
- TPL_IGNORE(buffer);
- return TPL_TRUE;
-}
-
-void
-__tpl_x11_dri3_buffer_fini(tpl_buffer_t *buffer)
-{
- dri3_buffer *back;
-
- TPL_ASSERT(buffer);
-
- back = (dri3_buffer *)buffer->backend.data;
-
- if (back) {
- tbm_bo bo = back->tbo;
- tbm_bo_map(bo, TBM_DEVICE_3D, TBM_OPTION_READ);
- tbm_bo_unmap(bo);
- tbm_bo_unref(bo);
- buffer->backend.data = NULL;
- free(back);
- }
-}
-
-void *
-__tpl_x11_dri3_buffer_map(tpl_buffer_t *buffer, int size)
-{
- tbm_bo bo;
- tbm_bo_handle handle;
-
- TPL_ASSERT(buffer);
-
- TPL_IGNORE(size);
- bo = ((dri3_buffer *)buffer->backend.data)->tbo;
- TPL_ASSERT(bo);
-
- handle = tbm_bo_get_handle(bo, TBM_DEVICE_CPU);
- return handle.ptr;
-}
-
-void
-__tpl_x11_dri3_buffer_unmap(tpl_buffer_t *buffer, void *ptr, int size)
-{
- TPL_IGNORE(buffer);
- TPL_IGNORE(ptr);
- TPL_IGNORE(size);
-
- /* Do nothing. */
-}
-
-tpl_bool_t
-__tpl_x11_dri3_buffer_lock(tpl_buffer_t *buffer, tpl_lock_usage_t usage)
-{
- tbm_bo bo;
- tbm_bo_handle handle;
- dri3_buffer *back;
-
- TPL_ASSERT(buffer);
- TPL_ASSERT(buffer->backend.data);
-
- back = (dri3_buffer *) buffer->backend.data;
- bo = back->tbo;
-
- if (NULL == bo) {
- TPL_ERR("bo is NULL!");
- return TPL_FALSE;
- }
-
- TRACE_BEGIN("TPL:BUFFERLOCK:%d", tbm_bo_export(bo));
-
- TPL_OBJECT_UNLOCK(buffer);
-
- switch (usage) {
- case TPL_LOCK_USAGE_GPU_READ:
- handle = tbm_bo_map(bo, TBM_DEVICE_3D, TBM_OPTION_READ);
- break;
- case TPL_LOCK_USAGE_GPU_WRITE:
- handle = tbm_bo_map(bo, TBM_DEVICE_3D, TBM_OPTION_WRITE);
- break;
- case TPL_LOCK_USAGE_CPU_READ:
- handle = tbm_bo_map(bo, TBM_DEVICE_CPU, TBM_OPTION_READ);
- break;
- case TPL_LOCK_USAGE_CPU_WRITE:
- handle = tbm_bo_map(bo, TBM_DEVICE_CPU, TBM_OPTION_WRITE);
- break;
- default:
- TPL_ASSERT(TPL_FALSE);
- return TPL_FALSE;
- }
-
- TPL_OBJECT_LOCK(buffer);
-
- if (handle.u32 != 0 || handle.ptr != NULL) {
- TRACE_END();
- return TPL_FALSE;
- }
- TRACE_END();
- return TPL_TRUE;
-}
-
-void
-__tpl_x11_dri3_buffer_unlock(tpl_buffer_t *buffer)
-{
- dri3_buffer *back;
- tbm_bo bo;
-
- TPL_ASSERT(buffer);
-
- back = (dri3_buffer *) buffer->backend.data;
- bo = back->tbo;
-
- if (NULL == bo) {
- TPL_ERR("bo is NULL!");
- return;
- }
-
- TRACE_BEGIN("TPL:BUFFERUNLOCK:%d", tbm_bo_export(back->tbo));
-
- TPL_OBJECT_UNLOCK(buffer);
- tbm_bo_unmap(bo);
- TPL_OBJECT_LOCK(buffer);
-
- TRACE_END();
-}
-
-static Display *
-__tpl_x11_dri3_get_worker_display()
-{
- Display *display;
- pthread_mutex_t mutex = __tpl_x11_get_global_mutex();
-
- pthread_mutex_lock(&mutex);
- TPL_ASSERT(global.display_count > 0);
-
- /* Use dummy display for worker thread. :-) */
- display = global.worker_display;
-
- pthread_mutex_unlock(&mutex);
-
- return display;
-}
-
-static tpl_bool_t
-__tpl_x11_dri3_display_init(tpl_display_t *display)
-{
- pthread_mutex_t mutex = __tpl_x11_get_global_mutex();
-
- TPL_ASSERT(display);
-
- XInitThreads();
- if (display->native_handle == NULL) {
- display->native_handle = XOpenDisplay(NULL);
- TPL_ASSERT(display->native_handle != NULL);
- }
-
- pthread_mutex_lock(&mutex);
-
- if (global.display_count == 0) {
- tpl_bool_t xres = TPL_FALSE;
- Window root = 0;
- drm_magic_t magic;
-
- /* Open a dummy display connection. */
-
- global.worker_display = XOpenDisplay(NULL);
- TPL_ASSERT(global.worker_display != NULL);
-
- /* Get default root window. */
- root = DefaultRootWindow(global.worker_display);
-
- /* Initialize DRI3. */
- xres = dri3_display_init(global.worker_display);
- TPL_ASSERT(xres == TPL_TRUE);
-
-
- /* Initialize buffer manager. */
- global.bufmgr_fd = dri3_open(global.worker_display, root, 0);
- drmGetMagic(global.bufmgr_fd, &magic);
- global.bufmgr = tbm_bufmgr_init(global.bufmgr_fd);
-
- __tpl_list_init(&dri3_drawable_list);
-
- /* [BEGIN: 20141125-xuelian.bai] Add env for setting number of back buffers*/
- {
- const char *backend_env = NULL;
- int count = 0;
- backend_env = tpl_getenv("MALI_EGL_DRI3_BUF_NUM");
- /* 2015-05-13 joonbum.ko@samsung.com */
- /* Change the value of dri3_max_back 5 to 3 */
- if (!backend_env || strlen(backend_env) == 0)
- dri3_max_back = 3; /* Default value is 3*/
- else {
- count = atoi(backend_env);
- if (count == 1)/* one buffer doesn't work,min is 2 */
- dri3_max_back = 2;
- else if (count < 20)
- dri3_max_back = count;
- else
- dri3_max_back = 5;
- }
- }
- /* [END: 20141125-xuelian.bai] */
- }
-
- global.display_count++;
- display->bufmgr_fd = global.bufmgr_fd;
-
- pthread_mutex_unlock(&mutex);
- return TPL_TRUE;
-}
-
-static void
-__tpl_x11_dri3_display_fini(tpl_display_t *display)
-{
- pthread_mutex_t mutex = __tpl_x11_get_global_mutex();
- TPL_IGNORE(display);
- pthread_mutex_lock(&mutex);
-
- if (--global.display_count == 0) {
- tbm_bufmgr_deinit(global.bufmgr);
- close(global.bufmgr_fd);
- XCloseDisplay(global.worker_display);
-
- global.worker_display = NULL;
- global.bufmgr_fd = -1;
- global.bufmgr = NULL;
-
- __tpl_list_fini(&dri3_drawable_list, NULL);
- }
-
- pthread_mutex_unlock(&mutex);
-
-}
-
-static tpl_bool_t
-__tpl_x11_dri3_surface_init(tpl_surface_t *surface)
-{
- Display *display = NULL;
- XID drawable;
- tpl_x11_dri3_surface_t *x11_surface;
-
- TPL_ASSERT(surface);
-
- x11_surface = (tpl_x11_dri3_surface_t *)calloc(1,
- sizeof(tpl_x11_dri3_surface_t));
- if (x11_surface == NULL) {
- TPL_ERR("Failed to allocate buffer!");
- return TPL_FALSE;
- }
-
- x11_surface->latest_post_interval = -1;
- __tpl_list_init(&x11_surface->buffer_cache);
-
- display = (Display *)surface->display->native_handle;
- drawable = (XID)surface->native_handle;
-
- x11_surface->drawable = dri3_create_drawable(display, drawable);
-
- surface->backend.data = (void *)x11_surface;
- if (surface->type == TPL_SURFACE_TYPE_WINDOW) {
- __tpl_x11_display_get_window_info(surface->display,
- surface->native_handle,
- &surface->width, &surface->height, NULL, 0, 0);
- } else {
- __tpl_x11_display_get_pixmap_info(surface->display,
- surface->native_handle,
- &surface->width, &surface->height, NULL);
- }
-
- return TPL_TRUE;
-}
-
-static void
-__tpl_x11_dri3_surface_fini(tpl_surface_t *surface)
-{
- Display *display;
- tpl_x11_dri3_surface_t *x11_surface;
-
- TPL_ASSERT(surface);
- TPL_ASSERT(surface->display);
- TPL_ASSERT(surface->display->native_handle);
-
- display = (Display *) surface->display->native_handle;
- x11_surface = (tpl_x11_dri3_surface_t *) surface->backend.data;
-
- dri3_destroy_drawable(display, (XID)surface->native_handle);
-
- if (x11_surface) {
- __tpl_x11_surface_buffer_cache_clear(&x11_surface->buffer_cache);
-
-
- if (x11_surface->damage)
- XFixesDestroyRegion(display, x11_surface->damage);
-
- free(x11_surface);
- }
-
- surface->backend.data = NULL;
-}
-
-static void
-__tpl_x11_dri3_surface_post_internal(tpl_surface_t *surface,
- tpl_frame_t *frame,
- tpl_bool_t is_worker)
-{
- Display *display = NULL;
- tpl_x11_dri3_surface_t *x11_surface;
- XRectangle *xrects;
- XRectangle xrects_stack[TPL_STACK_XRECTANGLE_SIZE];
-
- TPL_ASSERT(surface);
- TPL_ASSERT(frame);
-
- TRACE_BEGIN("DDK:DRI3:SWAPBUFFERS");
- x11_surface = (tpl_x11_dri3_surface_t *)surface->backend.data;
-
- display = __tpl_x11_dri3_get_worker_display();
-
- if (frame->interval != x11_surface->latest_post_interval) {
- x11_surface->latest_post_interval = frame->interval;/*FIXME:set interval?*/
- }
-
- if (__tpl_region_is_empty(&frame->damage)) {
- dri3_swap_buffers(display, x11_surface->drawable, frame->buffer, 0, 0);
- } else {
- int i;
-
- if (frame->damage.num_rects > TPL_STACK_XRECTANGLE_SIZE) {
- xrects = (XRectangle *)malloc(sizeof(XRectangle) *
- frame->damage.num_rects);
- } else {
- xrects = &xrects_stack[0];
- }
-
- for (i = 0; i < frame->damage.num_rects; i++) {
- const int *rects = &frame->damage.rects[i * 4];
-
- xrects[i].x = rects[0];
- xrects[i].y = frame->buffer->height - rects[1] -
- rects[3];
- xrects[i].width = rects[2];
- xrects[i].height = rects[3];
- }
-
- if (x11_surface->damage == None) {
- x11_surface->damage =
- XFixesCreateRegion(display, xrects,
- frame->damage.num_rects);
- } else {
- XFixesSetRegion(display, x11_surface->damage,
- xrects, frame->damage.num_rects);
- }
-
- dri3_swap_buffers(display, x11_surface->drawable, frame->buffer, 0,
- x11_surface->damage);
- }
- frame->state = TPL_FRAME_STATE_POSTED;
-
- TRACE_END();
-}
-
-static void
-__tpl_x11_dri3_surface_post(tpl_surface_t *surface, tpl_frame_t *frame)
-{
- TPL_ASSERT(surface);
- TPL_ASSERT(frame);
-
- __tpl_x11_dri3_surface_post_internal(surface, frame, TPL_TRUE);
-}
-
-static tpl_bool_t
-__tpl_x11_dri3_surface_begin_frame(tpl_surface_t *surface)
-{
- tpl_frame_t *prev_frame;
-
- TPL_ASSERT(surface);
-
- if (surface->type != TPL_SURFACE_TYPE_WINDOW)
- return TPL_TRUE;
-
- prev_frame = __tpl_surface_get_latest_frame(surface);
-
- if (prev_frame && prev_frame->state != TPL_FRAME_STATE_POSTED) {
- if ((DRI2_BUFFER_IS_FB(prev_frame->buffer->backend.flags) &&
- global.fb_swap_type == TPL_X11_SWAP_TYPE_SYNC) ||
- (!DRI2_BUFFER_IS_FB(prev_frame->buffer->backend.flags) &&
- global.win_swap_type == TPL_X11_SWAP_TYPE_SYNC)) {
- __tpl_surface_wait_all_frames(surface);
- }
- }
-
- return TPL_TRUE;
-}
-
-static tpl_bool_t
-__tpl_x11_dri3_surface_validate_frame(tpl_surface_t *surface)
-{
- tpl_frame_t *prev_frame;
- if (surface->type != TPL_SURFACE_TYPE_WINDOW)
- return TPL_TRUE;
-
- if (surface->frame == NULL)
- return TPL_TRUE;
-
- prev_frame = __tpl_surface_get_latest_frame(surface);
-
- if (prev_frame && prev_frame->state != TPL_FRAME_STATE_POSTED) {
- if ((DRI2_BUFFER_IS_FB(prev_frame->buffer->backend.flags) &&
- global.fb_swap_type == TPL_X11_SWAP_TYPE_LAZY) ||
- (!DRI2_BUFFER_IS_FB(prev_frame->buffer->backend.flags) &&
- global.win_swap_type == TPL_X11_SWAP_TYPE_LAZY)) {
- __tpl_surface_wait_all_frames(surface);
- return TPL_TRUE;
- }
- }
- return TPL_TRUE;
-}
-
-static tpl_bool_t
-__tpl_x11_dri3_surface_end_frame(tpl_surface_t *surface)
-{
- tpl_frame_t *frame;
- tpl_x11_dri3_surface_t *x11_surface;
-
- TPL_ASSERT(surface);
- TPL_ASSERT(surface->backend.data);
-
- frame = __tpl_surface_get_latest_frame(surface);
- x11_surface = (tpl_x11_dri3_surface_t *) surface->backend.data;
-
- if (frame) {
- x11_surface->latest_render_target = frame->buffer;
-
- if ((DRI2_BUFFER_IS_FB(frame->buffer->backend.flags) &&
- global.fb_swap_type == TPL_X11_SWAP_TYPE_ASYNC) ||
- (!DRI2_BUFFER_IS_FB(frame->buffer->backend.flags) &&
- global.win_swap_type == TPL_X11_SWAP_TYPE_ASYNC)) {
- __tpl_x11_dri3_surface_post_internal(surface, frame, TPL_FALSE);
- }
- }
-
- return TPL_TRUE;
-}
-
-/* 2015-04-08 joonbum.ko@samsung.com */
-/* change the key value of tpl_buffer_t from dma_buf_fd to tbo name */
-static tpl_buffer_t *
-__tpl_x11_dri3_surface_get_buffer(tpl_surface_t *surface,
- tpl_bool_t *reset_buffers)
-{
- Drawable drawable;
- dri3_buffer *buffer = NULL;
- tpl_buffer_t *tpl_buffer = NULL;
- uint32_t attachments[1] = { dri3_buffer_back };
- tbm_bo bo;
- tbm_bo_handle bo_handle;
- tpl_x11_dri3_surface_t *x11_surface;
- int cpp = 0;
-
- TPL_ASSERT(surface);
-
- x11_surface = (tpl_x11_dri3_surface_t *)surface->backend.data;
-
- if (surface->type == TPL_SURFACE_TYPE_PIXMAP) {
- attachments[0] = dri3_buffer_front;
- }
-
- drawable = (Drawable)surface->native_handle;
-
- /* [BEGIN: 20141125-xing.huang] Get the current buffer via DRI3. */
- cpp = 32;/*_mali_surface_specifier_bpp(&(surface->sformat)); cpp get from mali is not right */
- /* [END: 20141125-xing.huang] */
-
- buffer = dri3_get_buffers(drawable, x11_surface->drawable, attachments, cpp);
-
- if (DRI2_BUFFER_IS_REUSED(buffer->flags)) {
- tpl_buffer = __tpl_x11_surface_buffer_cache_find(
- &x11_surface->buffer_cache,
- tbm_bo_export(buffer->tbo));
-
- if (tpl_buffer) {
- /* If the buffer name is reused and there's a cache
- * entry for that name, just update the buffer age
- * and return. */
- /* [BEGIN: 20140119-leiba.sun] Add support for buffer age */
- tpl_buffer->age = buffer->buffer_age;
- /* [END:20150119-leiba.sun] */
-
- if (surface->type == TPL_SURFACE_TYPE_PIXMAP)
- tbm_bo_unref (buffer->tbo);
-
- goto done;
- }
- }
-
- if (!tpl_buffer) {
- /* Remove the buffer from the cache. */
- __tpl_x11_surface_buffer_cache_remove(
- &x11_surface->buffer_cache,
- tbm_bo_export(buffer->tbo));
- if (buffer->old_bo_name != 0) {
- __tpl_x11_surface_buffer_cache_remove(
- &x11_surface->buffer_cache,
- buffer->old_bo_name);
- buffer->old_bo_name = 0;
- }
- }
-
- bo = buffer->tbo;
-
- if (bo == NULL) {
- TPL_ERR("bo is NULL!");
- goto done;
- }
-
- bo_handle = tbm_bo_get_handle(bo, TBM_DEVICE_3D);
-
- /* Create tpl buffer. */
- tpl_buffer = __tpl_buffer_alloc(surface, (size_t) tbm_bo_export(buffer->tbo),
- (int)bo_handle.u32,
- buffer->width, buffer->height, buffer->cpp * 8, buffer->pitch);
- if (NULL == tpl_buffer) {
- TPL_ERR("TPL buffer alloc failed!");
- goto done;
- }
-
- if (surface->type != TPL_SURFACE_TYPE_PIXMAP)
- tbm_bo_ref(buffer->tbo);
-
- tpl_buffer->age = DRI2_BUFFER_GET_AGE(buffer->flags);
- tpl_buffer->backend.data = (void *)buffer;
- tpl_buffer->backend.flags = buffer->flags;
- /* [BEGIN: 20140119-leiba.sun] Add support for buffer age
- * save surface for later use */
- tpl_buffer->surface = surface;
- /* [END:20150119-leiba.sun] */
-
- __tpl_x11_surface_buffer_cache_add(&x11_surface->buffer_cache, tpl_buffer);
- tpl_object_unreference(&tpl_buffer->base);
-done:
- if (reset_buffers) {
- /* Users use this output value to check if they have to reset previous buffers. */
- *reset_buffers = !DRI2_BUFFER_IS_REUSED(buffer->flags) ||
- buffer->width != surface->width || buffer->height != surface->height;
- }
-
- return tpl_buffer;
-}
-
-/* [BEGIN: 20140119-leiba.sun] Add support for buffer age */
-int
-__tpl_x11_dri3_get_buffer_age(tpl_buffer_t *buffer)
-{
- dri3_buffer *back;
-
- TPL_ASSERT(buffer);
-
- back = (dri3_buffer *) buffer->backend.data;
-
- TPL_ASSERT(back);
-
- return back->buffer_age;
-}
-/* [END:20150119-leiba.sun] */
-
-
-tpl_bool_t
-__tpl_display_choose_backend_x11_dri3(tpl_handle_t native_dpy)
-{
- TPL_IGNORE(native_dpy);
- /* X11 display accepts any type of handle. So other backends must be choosen before this. */
- return TPL_TRUE;
-}
-
-void
-__tpl_display_init_backend_x11_dri3(tpl_display_backend_t *backend)
-{
- TPL_ASSERT(backend);
-
- backend->type = TPL_BACKEND_X11_DRI3;
- backend->data = NULL;
-
- backend->init = __tpl_x11_dri3_display_init;
- backend->fini = __tpl_x11_dri3_display_fini;
- backend->query_config = __tpl_x11_display_query_config;
- backend->filter_config = NULL;
- backend->get_window_info = __tpl_x11_display_get_window_info;
- backend->get_pixmap_info = __tpl_x11_display_get_pixmap_info;
- backend->flush = __tpl_x11_display_flush;
-}
-
-void
-__tpl_surface_init_backend_x11_dri3(tpl_surface_backend_t *backend)
-{
- TPL_ASSERT(backend);
-
- backend->type = TPL_BACKEND_X11_DRI3;
- backend->data = NULL;
-
- backend->init = __tpl_x11_dri3_surface_init;
- backend->fini = __tpl_x11_dri3_surface_fini;
- backend->begin_frame = __tpl_x11_dri3_surface_begin_frame;
- backend->end_frame = __tpl_x11_dri3_surface_end_frame;
- backend->validate_frame = __tpl_x11_dri3_surface_validate_frame;
- backend->get_buffer = __tpl_x11_dri3_surface_get_buffer;
- backend->post = __tpl_x11_dri3_surface_post;
-}
-
-void
-__tpl_buffer_init_backend_x11_dri3(tpl_buffer_backend_t *backend)
-{
- TPL_ASSERT(backend);
-
- backend->type = TPL_BACKEND_X11_DRI3;
- backend->data = NULL;
-
- backend->init = __tpl_x11_dri3_buffer_init;
- backend->fini = __tpl_x11_dri3_buffer_fini;
- backend->map = __tpl_x11_dri3_buffer_map;
- backend->unmap = __tpl_x11_dri3_buffer_unmap;
- backend->lock = __tpl_x11_dri3_buffer_lock;
- backend->unlock = __tpl_x11_dri3_buffer_unlock;
- /* [BEGIN: 20140119-leiba.sun] Add support for buffer age */
- backend->get_buffer_age = __tpl_x11_dri3_get_buffer_age;
- /* [END:20150119-leiba.sun] */
-}
+++ /dev/null
-#ifndef TPL_X11_INTERNAL_H
-#define TPL_X11_INTERNAL_H
-
-#include "tpl.h"
-#include <stdlib.h>
-#include <pthread.h>
-
-#include "tpl_utils.h"
-
-#define TIZEN_FEATURES_ENABLE 0
-
-#define DRI2_BUFFER_FB 0x02
-#define DRI2_BUFFER_MAPPED 0x04
-#define DRI2_BUFFER_REUSED 0x08
-#define DRI2_BUFFER_AGE 0x70 /* 01110000 */
-
-#define DRI2_BUFFER_IS_FB(flag) ((flag & DRI2_BUFFER_FB) ? 1 : 0)
-#define DRI2_BUFFER_IS_REUSED(flag) ((flag & DRI2_BUFFER_REUSED) ? 1 : 0)
-#define DRI2_BUFFER_GET_AGE(flag) ((flag & DRI2_BUFFER_AGE) >> 4)
-
-#define TPL_STACK_XRECTANGLE_SIZE 16
-/* [BEGIN: 20141125-xuelian.bai] DRI3 need lots of buffer cache. or it will get
- * slow */
-#define TPL_BUFFER_CACHE_MAX_ENTRIES 40
-/* [END: 20141125-xuelian.bai] */
-
-#define EGL_X11_WINDOW_SWAP_TYPE_ENV_NAME "EGL_X11_SWAP_TYPE_WINDOW"
-#define EGL_X11_FB_SWAP_TYPE_ENV_NAME "EGL_X11_SWAP_TYPE_FB"
-
-typedef struct _tpl_x11_global tpl_x11_global_t;
-
-typedef enum {
- TPL_X11_SWAP_TYPE_ERROR = -1,
- TPL_X11_SWAP_TYPE_SYNC = 0,
- TPL_X11_SWAP_TYPE_ASYNC,
- TPL_X11_SWAP_TYPE_LAZY,
- TPL_X11_SWAP_TYPE_MAX
-} tpl_x11_swap_type_t;
-
-struct _tpl_x11_global {
- int display_count;
-
- Display *worker_display;
- int bufmgr_fd;
- tbm_bufmgr bufmgr;
-
- tpl_x11_swap_type_t win_swap_type;
- tpl_x11_swap_type_t fb_swap_type;
-};
-
-pthread_mutex_t
-__tpl_x11_get_global_mutex(void);
-
-void
-__tpl_x11_swap_str_to_swap_type(char *str, tpl_x11_swap_type_t *type);
-
-tpl_buffer_t *
-__tpl_x11_surface_buffer_cache_find(tpl_list_t *buffer_cache,
- unsigned int name);
-void
-__tpl_x11_surface_buffer_cache_remove(tpl_list_t *buffer_cache,
- unsigned int name);
-tpl_bool_t
-__tpl_x11_surface_buffer_cache_add(tpl_list_t *buffer_cache,
- tpl_buffer_t *buffer);
-void
-__tpl_x11_surface_buffer_cache_clear(tpl_list_t *buffer_cache);
-tpl_bool_t
-__tpl_x11_display_query_config(tpl_display_t *display,
- tpl_surface_type_t surface_type, int red_size,
- int green_size, int blue_size, int alpha_size,
- int color_depth, int *native_visual_id, tpl_bool_t *is_slow);
-tpl_bool_t
-__tpl_x11_display_get_window_info(tpl_display_t *display, tpl_handle_t window,
- int *width, int *height, tpl_format_t *format, int depth, int a_size);
-tpl_bool_t
-__tpl_x11_display_get_pixmap_info(tpl_display_t *display, tpl_handle_t pixmap,
- int *width, int *height, tpl_format_t *format);
-void
-__tpl_x11_display_flush(tpl_display_t *display);
-tpl_bool_t
-__tpl_x11_buffer_init(tpl_buffer_t *buffer);
-void
-__tpl_x11_buffer_fini(tpl_buffer_t *buffer);
-void *
-__tpl_x11_buffer_map(tpl_buffer_t *buffer, int size);
-void
-__tpl_x11_buffer_unmap(tpl_buffer_t *buffer, void *ptr, int size);
-tpl_bool_t
-__tpl_x11_buffer_lock(tpl_buffer_t *buffer, tpl_lock_usage_t usage);
-void
-__tpl_x11_buffer_unlock(tpl_buffer_t *buffer);
-tpl_bool_t __tpl_x11_buffer_get_reused_flag(tpl_buffer_t *buffer);
-void
-__tpl_x11_display_wait_native(tpl_display_t *display);
-
-#endif /* TPL_X11_INTERNAL_H */
--- /dev/null
+#define inline __inline__
+
+#include <wayland-client.h>
+
+#ifndef USE_TBM_QUEUE
+#define USE_TBM_QUEUE
+#endif
+
+#include <gbm.h>
+#include <gbm/gbm_tbm.h>
+#include <gbm/gbm_tbmint.h>
+
+#undef inline
+
+#include "tpl_internal.h"
+
+#include <string.h>
+#include <fcntl.h>
+#include <unistd.h>
+
+#include <tbm_bufmgr.h>
+#include <tbm_surface.h>
+#include <tbm_surface_internal.h>
+#include <tbm_surface_queue.h>
+#include <wayland-tbm-client.h>
+#include <wayland-tbm-server.h>
+
+typedef struct _tpl_gbm_display tpl_gbm_display_t;
+typedef struct _tpl_gbm_surface tpl_gbm_surface_t;
+typedef struct _tpl_gbm_buffer tpl_gbm_buffer_t;
+
+struct _tpl_gbm_display {
+ tbm_bufmgr bufmgr;
+};
+
+struct _tpl_gbm_surface {
+ tbm_surface_queue_h tbm_queue;
+ tbm_surface_h current_buffer;
+ tpl_bool_t reset; /* TRUE if queue was reset by external */
+};
+
+struct _tpl_gbm_buffer {
+ tpl_display_t *display;
+ tpl_gbm_buffer_t **tpl_gbm_surface;
+ tbm_bo bo;
+ struct gbm_bo *gbm_bo;
+ struct wl_listener destroy_listener;
+};
+
+static int tpl_gbm_buffer_key;
+#define KEY_TPL_GBM_BUFFER (unsigned long)(&tpl_gbm_buffer_key)
+
+static void __tpl_gbm_buffer_free(tpl_gbm_buffer_t *gbm_buffer);
+static inline tpl_gbm_buffer_t *
+__tpl_gbm_get_gbm_buffer_from_tbm_surface(tbm_surface_h surface)
+{
+ tpl_gbm_buffer_t *buf = NULL;
+
+ if (!tbm_surface_internal_is_valid(surface))
+ return NULL;
+
+ tbm_surface_internal_get_user_data(surface, KEY_TPL_GBM_BUFFER, (void **)&buf);
+
+ return buf;
+}
+
+static inline void
+__tpl_gbm_set_gbm_buffer_to_tbm_surface(tbm_surface_h surface,
+ tpl_gbm_buffer_t *buf)
+{
+ tbm_surface_internal_add_user_data(surface, KEY_TPL_GBM_BUFFER,
+ (tbm_data_free)__tpl_gbm_buffer_free);
+ tbm_surface_internal_set_user_data(surface, KEY_TPL_GBM_BUFFER, buf);
+}
+
+static TPL_INLINE tpl_bool_t
+__tpl_gbm_display_is_gbm_device(tpl_handle_t native_dpy)
+{
+ TPL_ASSERT(native_dpy);
+
+ /* MAGIC CHECK: A native display handle is a gbm_device if the de-referenced first value
+ is a memory address pointing gbm_create_surface(). */
+ if (*(void **)native_dpy == gbm_create_device)
+ return TPL_TRUE;
+
+ return TPL_FALSE;
+}
+
+static tpl_result_t
+__tpl_gbm_display_init(tpl_display_t *display)
+{
+ tpl_gbm_display_t *gbm_display = NULL;
+
+ TPL_ASSERT(display);
+
+ /* Do not allow default display in gbm. */
+ if (!display->native_handle) {
+ TPL_ERR("native_handle is NULL. Can not allow default display in gbm.");
+ return TPL_ERROR_INVALID_PARAMETER;
+ }
+
+ gbm_display = (tpl_gbm_display_t *) calloc(1, sizeof(tpl_gbm_display_t));
+ if (!gbm_display) return TPL_ERROR_OUT_OF_MEMORY;
+
+ display->bufmgr_fd = dup(gbm_device_get_fd(display->native_handle));
+ gbm_display->bufmgr = tbm_bufmgr_init(display->bufmgr_fd);
+ display->backend.data = gbm_display;
+
+ TPL_LOG_B("GBM", "[INIT] tpl_gbm_display_t(%p) bufmgr_fd(%d) bufmgr(%p)",
+ gbm_display, display->bufmgr_fd, gbm_display->bufmgr);
+ return TPL_ERROR_NONE;
+}
+
+static void
+__tpl_gbm_display_fini(tpl_display_t *display)
+{
+ tpl_gbm_display_t *gbm_display;
+
+ TPL_ASSERT(display);
+
+ gbm_display = (tpl_gbm_display_t *)display->backend.data;
+
+ if (gbm_display) {
+ TPL_LOG_B("GBM", "[FINI] tpl_gbm_display_t(%p) bufmgr(%p)",
+ gbm_display, gbm_display->bufmgr);
+ tbm_bufmgr_deinit(gbm_display->bufmgr);
+ free(gbm_display);
+ } else {
+ TPL_ERR("Failed to finalize gbm_display.");
+ }
+
+ close(display->bufmgr_fd);
+ display->backend.data = NULL;
+}
+
+static tpl_result_t
+__tpl_gbm_display_query_config(tpl_display_t *display,
+ tpl_surface_type_t surface_type, int red_size,
+ int green_size, int blue_size, int alpha_size,
+ int color_depth, int *native_visual_id,
+ tpl_bool_t *is_slow)
+{
+ TPL_ASSERT(display);
+
+ if ((surface_type == TPL_SURFACE_TYPE_WINDOW) && (red_size == 8)
+ && (green_size == 8) && (blue_size == 8)
+ && ((color_depth == 32) || (color_depth == 24))) {
+ if (alpha_size == 8) {
+ if (gbm_device_is_format_supported(
+ (struct gbm_device *)display->native_handle,
+ GBM_FORMAT_ARGB8888, GBM_BO_USE_RENDERING) == 1) {
+ if (native_visual_id)
+ *native_visual_id = GBM_FORMAT_ARGB8888;
+ } else return TPL_ERROR_INVALID_PARAMETER;
+
+ if (is_slow != NULL) *is_slow = TPL_FALSE;
+
+ return TPL_ERROR_NONE;
+ }
+ if (alpha_size == 0) {
+ if (gbm_device_is_format_supported(
+ (struct gbm_device *)display->native_handle,
+ GBM_FORMAT_XRGB8888,
+ GBM_BO_USE_RENDERING) == 1) {
+ if (native_visual_id)
+ *native_visual_id = GBM_FORMAT_XRGB8888;
+ } else return TPL_ERROR_INVALID_PARAMETER;
+
+ if (is_slow != NULL) *is_slow = TPL_FALSE;
+
+ return TPL_ERROR_NONE;
+ }
+ }
+
+ return TPL_ERROR_INVALID_PARAMETER;
+}
+
+static tpl_result_t
+__tpl_gbm_display_filter_config(tpl_display_t *display, int *visual_id,
+ int alpha_size)
+{
+ TPL_IGNORE(display);
+
+ if (visual_id != NULL && *visual_id == GBM_FORMAT_ARGB8888
+ && alpha_size == 0) {
+ *visual_id = GBM_FORMAT_XRGB8888;
+ return TPL_ERROR_NONE;
+ }
+
+ return TPL_ERROR_INVALID_PARAMETER;
+}
+
+static tpl_result_t
+__tpl_gbm_display_get_window_info(tpl_display_t *display, tpl_handle_t window,
+ int *width, int *height, tbm_format *format,
+ int depth, int a_size)
+{
+ TPL_ASSERT(display);
+ TPL_ASSERT(window);
+
+ struct gbm_surface *gbm_surface = (struct gbm_surface *)window;
+ tbm_surface_queue_h surf_queue = (tbm_surface_queue_h)gbm_tbm_get_surface_queue(
+ gbm_surface);
+ if (!surf_queue) {
+ TPL_ERR("Failed to get tbm_surface_queue from gbm_surface.");
+ return TPL_ERROR_INVALID_OPERATION;
+ }
+
+ if (width) *width = tbm_surface_queue_get_width(surf_queue);
+ if (height) *height = tbm_surface_queue_get_height(surf_queue);
+ if (format) *format = tbm_surface_queue_get_format(surf_queue);
+
+ return TPL_ERROR_NONE;
+}
+
+static tpl_result_t
+__tpl_gbm_display_get_pixmap_info(tpl_display_t *display, tpl_handle_t pixmap,
+ int *width, int *height, tbm_format *format)
+{
+ tbm_surface_h tbm_surface = NULL;
+
+ tbm_surface = wayland_tbm_server_get_surface(NULL,
+ (struct wl_resource *)pixmap);
+ if (!tbm_surface) {
+ TPL_ERR("Failed to get tbm_surface_h from native pixmap.");
+ return TPL_ERROR_INVALID_OPERATION;
+ }
+
+ if (width) *width = tbm_surface_get_width(tbm_surface);
+ if (height) *height = tbm_surface_get_height(tbm_surface);
+ if (format) *format = tbm_surface_get_format(tbm_surface);
+
+ return TPL_ERROR_NONE;
+}
+
+static tbm_surface_h
+__tpl_gbm_display_get_buffer_from_native_pixmap(tpl_handle_t pixmap)
+{
+ tbm_surface_h tbm_surface = NULL;
+
+ TPL_ASSERT(pixmap);
+
+ tbm_surface = wayland_tbm_server_get_surface(NULL,
+ (struct wl_resource *)pixmap);
+ if (!tbm_surface) {
+ TPL_ERR("Failed to get tbm_surface_h from wayland_tbm.");
+ return NULL;
+ }
+
+ TPL_LOG_B("GBM", "[PIXMAP] wl_resource(%p) tbm_surface(%p) bo(%d)", pixmap,
+ tbm_surface,
+ tbm_bo_export(tbm_surface_internal_get_bo(tbm_surface, 0)));
+
+ return tbm_surface;
+}
+
+static void
+__cb_tbm_surface_queue_reset_callback(tbm_surface_queue_h surface_queue,
+ void *data)
+{
+ tpl_surface_t *surface = NULL;
+ tpl_gbm_surface_t *tpl_gbm_surface = NULL;
+
+ surface = (tpl_surface_t *)data;
+ TPL_CHECK_ON_NULL_RETURN(surface);
+
+ tpl_gbm_surface = (tpl_gbm_surface_t *)surface->backend.data;
+ TPL_CHECK_ON_NULL_RETURN(tpl_gbm_surface);
+
+ TPL_LOG_B("GBM",
+ "[QUEUE_RESET_CB] tpl_gbm_surface_t(%p) surface_queue(%p)",
+ data, surface_queue);
+
+ if (surface->reset_cb)
+ surface->reset_cb(surface->reset_data);
+}
+
+static tpl_result_t
+__tpl_gbm_surface_init(tpl_surface_t *surface)
+{
+ tpl_gbm_surface_t *tpl_gbm_surface = NULL;
+ TPL_ASSERT(surface);
+
+ tpl_gbm_surface = (tpl_gbm_surface_t *) calloc(1, sizeof(tpl_gbm_surface_t));
+ if (!tpl_gbm_surface) {
+ TPL_ERR("Failed to allocate new gbm backend surface.");
+ return TPL_ERROR_OUT_OF_MEMORY;
+ }
+
+ surface->backend.data = (void *)tpl_gbm_surface;
+ tpl_gbm_surface->tbm_queue = NULL;
+ tpl_gbm_surface->current_buffer = NULL;
+
+ if (surface->type == TPL_SURFACE_TYPE_WINDOW) {
+ struct gbm_surface *gbm_surface = (struct gbm_surface *)surface->native_handle;
+ tpl_gbm_surface->tbm_queue =
+ (tbm_surface_queue_h)gbm_tbm_get_surface_queue(gbm_surface);
+ if (!tpl_gbm_surface->tbm_queue) {
+ TPL_ERR("Failed to get tbm_surface_queue from gbm_surface.");
+ goto error;
+ }
+
+ /* Set reset_callback to tbm_queue */
+ if (tbm_surface_queue_add_reset_cb(tpl_gbm_surface->tbm_queue,
+ __cb_tbm_surface_queue_reset_callback,
+ (void *)surface)) {
+ TPL_ERR("TBM surface queue add reset cb failed!");
+ goto error;
+ }
+
+ if (__tpl_gbm_display_get_window_info(surface->display,
+ surface->native_handle, &surface->width,
+ &surface->height, NULL, 0, 0) != TPL_ERROR_NONE) {
+ TPL_ERR("Failed to get native window info.");
+ goto error;
+ }
+
+ TPL_LOG_B("GBM", "[INIT] WINDOW|tpl_gbm_surface_t(%p) tbm_queue(%p) (%dx%d)",
+ tpl_gbm_surface, tpl_gbm_surface->tbm_queue,
+ surface->width, surface->height);
+
+ return TPL_ERROR_NONE;
+ } else if (surface->type == TPL_SURFACE_TYPE_PIXMAP) {
+ if (__tpl_gbm_display_get_pixmap_info(surface->display,
+ surface->native_handle, &surface->width,
+ &surface->height, NULL) != TPL_ERROR_NONE) {
+ TPL_ERR("Failed to get native pixmap info.");
+ goto error;
+ }
+
+ TPL_LOG_B("GBM", "[INIT] PIXMAP|tpl_gbm_surface_t(%p) (%dx%d)",
+ tpl_gbm_surface, surface->width, surface->height);
+
+ return TPL_ERROR_NONE;
+ }
+
+error:
+ free(tpl_gbm_surface);
+ surface->backend.data = NULL;
+
+ return TPL_ERROR_INVALID_OPERATION;
+}
+
+static void
+__tpl_gbm_surface_fini(tpl_surface_t *surface)
+{
+ tpl_gbm_surface_t *gbm_surface = NULL;
+
+ TPL_ASSERT(surface);
+ TPL_ASSERT(surface->display);
+
+ gbm_surface = (tpl_gbm_surface_t *) surface->backend.data;
+ if (!gbm_surface) return;
+
+ if (gbm_surface->current_buffer)
+ tbm_surface_internal_unref(gbm_surface->current_buffer);
+
+ TPL_LOG_B("GBM", "[FINI] tpl_surface_t(%p) tpl_gbm_surface_t(%p)",
+ surface, gbm_surface);
+
+ free(gbm_surface);
+ surface->backend.data = NULL;
+}
+
+static tpl_result_t
+__tpl_gbm_surface_enqueue_buffer(tpl_surface_t *surface,
+ tbm_surface_h tbm_surface, int num_rects,
+ const int *rects, tbm_fd sync_fence)
+{
+ tpl_gbm_buffer_t *gbm_buffer = NULL;
+ tpl_gbm_surface_t *gbm_surface = NULL;
+ int ret = 0;
+ int union_x, union_y;
+ int union_w, union_h;
+
+ TPL_ASSERT(surface);
+ TPL_ASSERT(surface->display);
+ TPL_ASSERT(surface->display->native_handle);
+ TPL_ASSERT(tbm_surface);
+
+ gbm_surface = (tpl_gbm_surface_t *)surface->backend.data;
+ if (!gbm_surface) {
+ TPL_ERR("tpl_gbm_surface_t is invalid. tpl_surface_t(%p)",
+ surface);
+ return TPL_ERROR_INVALID_PARAMETER;
+ }
+
+ if (!tbm_surface_internal_is_valid(tbm_surface)) {
+ TPL_ERR("Failed to enqueue tbm_surface(%p) Invalid value.", tbm_surface);
+ return TPL_ERROR_INVALID_PARAMETER;
+ }
+
+ gbm_buffer = __tpl_gbm_get_gbm_buffer_from_tbm_surface(tbm_surface);
+ if (!gbm_buffer) {
+ TPL_ERR("Filed to get gbm_buffer from tbm_surface(%p).", tbm_surface);
+ return TPL_ERROR_INVALID_PARAMETER;
+ }
+
+ TRACE_ASYNC_END((int)gbm_buffer, "[DEQ]~[ENQ] BO_NAME:%d",
+ tbm_bo_export(gbm_buffer->bo));
+
+ TPL_IMAGE_DUMP(tbm_surface, surface->width, surface->height);
+
+ tbm_surface_internal_unref(tbm_surface);
+
+ if (!gbm_surface->tbm_queue) {
+ TPL_ERR("tbm_surface_queue is invalid. tpl_gbm_surface_t(%p)",
+ gbm_surface);
+ return TPL_ERROR_INVALID_PARAMETER;
+ }
+
+ if (sync_fence != -1) {
+ tbm_sync_fence_wait(sync_fence, -1);
+ close(sync_fence);
+ }
+
+ /* If there are given damage rects for given tbm_surface, */
+ if (num_rects != 0 && rects != NULL) {
+ int i;
+ int left = surface->width;
+ int bottom = surface->height;
+ int right = 0, top = 0;
+
+ /* Carculate the union region of given damage rectangles */
+ for (i = 0; i < num_rects; i++) {
+ int rect_i = i * 4;
+ int x = rects[rect_i];
+ int y = rects[rect_i + 1];
+ int w = rects[rect_i + 2];
+ int h = rects[rect_i + 3];
+
+ left = (x < left) ? x : left;
+ bottom = (y < bottom) ? y : bottom;
+ right = ((x + w) > right) ? (x + w) : right;
+ top = ((y + h) > top) ? (y + h) : top;
+ }
+
+ /* Calibrate so as not to exceed the range. */
+ left = (left < 0) ? 0 : left;
+ bottom = (bottom < 0) ? 0 : bottom;
+ right = (right > surface->width) ? surface->width : right;
+ top = (top > surface->height) ? surface->height : top;
+
+ /* And set its union rect to tbm_surface as its damage region. */
+ union_w = right - left;
+ union_h = top - bottom;
+ union_x = left;
+ union_y = top;
+ } else {
+ /* If there are no any damage rects,
+ * set its full size of surface as its damage region. */
+ union_w = surface->width;
+ union_h = surface->height;
+ union_x = 0;
+ union_y = 0;
+ }
+
+ if (!(ret = tbm_surface_internal_set_damage(tbm_surface, union_x, union_y,
+ union_w, union_h)))
+ TPL_WARN("Failed to set damage rect to tbm_surface(%p)", tbm_surface);
+
+ if (tbm_surface_queue_enqueue(gbm_surface->tbm_queue, tbm_surface)
+ != TBM_SURFACE_QUEUE_ERROR_NONE) {
+ TPL_ERR("tbm_surface_queue_enqueue failed. tbm_surface_queue(%p) tbm_surface(%p)",
+ gbm_surface->tbm_queue, tbm_surface);
+ return TPL_ERROR_INVALID_PARAMETER;
+ }
+
+ TPL_LOG_B("GBM", "[ENQ] tpl_gbm_surface_t(%p) tbm_surface(%p) bo(%d)",
+ gbm_surface, tbm_surface, tbm_bo_export(gbm_buffer->bo));
+
+ TRACE_MARK("[ENQ] BO_NAME:%d", tbm_bo_export(gbm_buffer->bo));
+ return TPL_ERROR_NONE;
+}
+
+static tpl_bool_t
+__tpl_gbm_surface_validate(tpl_surface_t *surface)
+{
+ TPL_IGNORE(surface);
+
+ return TPL_TRUE;
+}
+
+static tbm_surface_h
+__tpl_gbm_surface_dequeue_buffer(tpl_surface_t *surface, uint64_t timeout_ns,
+ tbm_fd *sync_fence)
+{
+ tbm_bo bo;
+ tbm_surface_h tbm_surface = NULL;
+ tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_INVALID_QUEUE;
+ tpl_gbm_buffer_t *gbm_buffer = NULL;
+
+ tpl_gbm_surface_t *gbm_surface = NULL;
+
+ TPL_ASSERT(surface);
+ TPL_ASSERT(surface->native_handle);
+ TPL_ASSERT(surface->display);
+ TPL_ASSERT(surface->display->native_handle);
+
+ if (sync_fence)
+ *sync_fence = -1;
+
+ gbm_surface = (tpl_gbm_surface_t *)surface->backend.data;
+
+ TRACE_BEGIN("WAITING FOR DEQUEUEABLE");
+ if (tbm_surface_queue_can_dequeue(gbm_surface->tbm_queue, 1) == 1)
+ tsq_err = tbm_surface_queue_dequeue(gbm_surface->tbm_queue, &tbm_surface);
+
+ if (!tbm_surface) {
+ TPL_ERR("Failed to get tbm_surface from tbm_surface_queue | tsq_err = %d",
+ tsq_err);
+ TRACE_END();
+ return NULL;
+ }
+ TRACE_END();
+
+ /* Inc ref count about tbm_surface */
+ /* It will be dec when before tbm_surface_queue_enqueue called */
+ tbm_surface_internal_ref(tbm_surface);
+
+ gbm_buffer = __tpl_gbm_get_gbm_buffer_from_tbm_surface(tbm_surface);
+ if (gbm_buffer) {
+ TRACE_MARK("[DEQ][REUSED]BO_NAME:%d", tbm_bo_export(gbm_buffer->bo));
+ TRACE_ASYNC_BEGIN((int)gbm_buffer, "[DEQ]~[ENQ] BO_NAME:%d",
+ tbm_bo_export(gbm_buffer->bo));
+ TPL_LOG_B("GBM", "[DEQ][R] tpl_gbm_surface_t(%p) tbm_surface(%p) bo(%d)",
+ gbm_surface, tbm_surface, tbm_bo_export(gbm_buffer->bo));
+ return tbm_surface;
+ }
+
+ if (!(bo = tbm_surface_internal_get_bo(tbm_surface, 0))) {
+ TPL_ERR("Failed to get tbm_bo from tbm_surface");
+ tbm_surface_internal_unref(tbm_surface);
+ return NULL;
+ }
+
+ gbm_buffer = (tpl_gbm_buffer_t *) calloc(1, sizeof(tpl_gbm_buffer_t));
+ if (!gbm_buffer) {
+ TPL_ERR("Mem alloc for gbm_buffer failed!");
+ tbm_surface_internal_unref(tbm_surface);
+ return NULL;
+ }
+
+ gbm_buffer->display = surface->display;
+ gbm_buffer->bo = bo;
+
+ gbm_surface->current_buffer = tbm_surface;
+
+ __tpl_gbm_set_gbm_buffer_to_tbm_surface(tbm_surface, gbm_buffer);
+
+ TRACE_MARK("[DEQ][NEW]BO_NAME:%d", tbm_bo_export(gbm_buffer->bo));
+ TRACE_ASYNC_BEGIN((int)gbm_buffer, "[DEQ]~[ENQ] BO_NAME:%d",
+ tbm_bo_export(gbm_buffer->bo));
+ TPL_LOG_B("GBM", "[DEQ][N] tpl_gbm_surface_t(%p) tbm_surface(%p) bo(%d)",
+ gbm_surface, tbm_surface, tbm_bo_export(bo));
+ return tbm_surface;
+}
+
+static void
+__tpl_gbm_buffer_free(tpl_gbm_buffer_t *gbm_buffer)
+{
+ TPL_ASSERT(gbm_buffer);
+ free(gbm_buffer);
+}
+
+tpl_bool_t
+__tpl_display_choose_backend_gbm(tpl_handle_t native_dpy)
+{
+ if (native_dpy == NULL)
+ return TPL_FALSE;
+
+ if (__tpl_gbm_display_is_gbm_device(native_dpy))
+ return TPL_TRUE;
+
+ return TPL_FALSE;
+}
+
+void
+__tpl_display_init_backend_gbm(tpl_display_backend_t *backend)
+{
+ TPL_ASSERT(backend);
+
+ backend->type = TPL_BACKEND_GBM;
+ backend->data = NULL;
+
+ backend->init = __tpl_gbm_display_init;
+ backend->fini = __tpl_gbm_display_fini;
+ backend->query_config = __tpl_gbm_display_query_config;
+ backend->filter_config = __tpl_gbm_display_filter_config;
+ backend->get_window_info = __tpl_gbm_display_get_window_info;
+ backend->get_pixmap_info = __tpl_gbm_display_get_pixmap_info;
+ backend->get_buffer_from_native_pixmap =
+ __tpl_gbm_display_get_buffer_from_native_pixmap;
+}
+
+void
+__tpl_surface_init_backend_gbm(tpl_surface_backend_t *backend)
+{
+ TPL_ASSERT(backend);
+
+ backend->type = TPL_BACKEND_GBM;
+ backend->data = NULL;
+
+ backend->init = __tpl_gbm_surface_init;
+ backend->fini = __tpl_gbm_surface_fini;
+ backend->validate = __tpl_gbm_surface_validate;
+ backend->dequeue_buffer = __tpl_gbm_surface_dequeue_buffer;
+ backend->enqueue_buffer = __tpl_gbm_surface_enqueue_buffer;
+}
--- /dev/null
+#define inline __inline__
+#include <wayland-client.h>
+#undef inline
+
+#include "tpl_internal.h"
+
+#include <tbm_surface.h>
+#include <tbm_surface_internal.h>
+#include <tbm_surface_queue.h>
+#include <wayland-tbm-client.h>
+
+#include <tbm_sync.h>
+
+#include "wayland-vulkan/wayland-vulkan-client-protocol.h"
+
+#define CLIENT_QUEUE_SIZE 3
+
+typedef struct _tpl_wayland_vk_wsi_display tpl_wayland_vk_wsi_display_t;
+typedef struct _tpl_wayland_vk_wsi_surface tpl_wayland_vk_wsi_surface_t;
+typedef struct _tpl_wayland_vk_wsi_buffer tpl_wayland_vk_wsi_buffer_t;
+
+struct _tpl_wayland_vk_wsi_display {
+ struct wayland_tbm_client *wl_tbm_client;
+ struct {
+ int min_buffer;
+ int max_buffer;
+ int present_modes;
+ } surface_capabilities;
+ struct wayland_vulkan *wl_vk_client;
+};
+
+struct _tpl_wayland_vk_wsi_surface {
+ tbm_surface_queue_h tbm_queue;
+ int buffer_count;
+ int present_mode;
+};
+
+struct _tpl_wayland_vk_wsi_buffer {
+ tpl_display_t *display;
+ struct wl_proxy *wl_proxy;
+ tbm_fd sync_timeline;
+ unsigned int sync_timestamp;
+};
+
+static const struct wl_registry_listener registry_listener;
+static const struct wl_callback_listener sync_listener;
+static const struct wl_callback_listener frame_listener;
+static const struct wl_buffer_listener buffer_release_listener;
+
+#define TPL_BUFFER_CACHE_MAX_ENTRIES 40
+
+static int tpl_wayland_vk_wsi_buffer_key;
+#define KEY_tpl_wayland_vk_wsi_buffer (unsigned long)(&tpl_wayland_vk_wsi_buffer_key)
+
+static void __tpl_wayland_vk_wsi_buffer_free(tpl_wayland_vk_wsi_buffer_t
+ *wayland_vk_wsi_buffer);
+static tpl_result_t __tpl_wayland_vk_wsi_surface_destroy_swapchain(
+ tpl_surface_t *surface);
+
+static TPL_INLINE tpl_wayland_vk_wsi_buffer_t *
+__tpl_wayland_vk_wsi_get_wayland_buffer_from_tbm_surface(tbm_surface_h surface)
+{
+ tpl_wayland_vk_wsi_buffer_t *buf = NULL;
+
+ if (!tbm_surface_internal_is_valid(surface))
+ return NULL;
+
+ tbm_surface_internal_get_user_data(surface, KEY_tpl_wayland_vk_wsi_buffer,
+ (void **)&buf);
+ return buf;
+}
+
+static TPL_INLINE void
+__tpl_wayland_vk_wsi_set_wayland_buffer_to_tbm_surface(tbm_surface_h surface,
+ tpl_wayland_vk_wsi_buffer_t *buf)
+{
+ tbm_surface_internal_add_user_data(surface,
+ KEY_tpl_wayland_vk_wsi_buffer,
+ (tbm_data_free)__tpl_wayland_vk_wsi_buffer_free);
+ tbm_surface_internal_set_user_data(surface,
+ KEY_tpl_wayland_vk_wsi_buffer, buf);
+}
+
+static TPL_INLINE tpl_bool_t
+__tpl_wayland_vk_wsi_display_is_wl_display(tpl_handle_t native_dpy)
+{
+ TPL_ASSERT(native_dpy);
+
+ struct wl_interface *wl_egl_native_dpy = *(void **) native_dpy;
+
+ /* MAGIC CHECK: A native display handle is a wl_display if the de-referenced first value
+ is a memory address pointing the structure of wl_display_interface. */
+ if (wl_egl_native_dpy == &wl_display_interface) {
+ return TPL_TRUE;
+ }
+
+ if (strncmp(wl_egl_native_dpy->name, wl_display_interface.name,
+ strlen(wl_display_interface.name)) == 0) {
+ return TPL_TRUE;
+ }
+
+ return TPL_FALSE;
+}
+
+static int
+__tpl_wayland_vk_wsi_display_roundtrip(tpl_display_t *display)
+{
+ struct wl_display *wl_dpy;
+ struct wl_callback *callback;
+ int done = 0, ret = 0;
+
+ TPL_ASSERT(display);
+ TPL_ASSERT(display->native_handle);
+ TPL_ASSERT(display->backend.data);
+
+ wl_dpy = (struct wl_display *) display->native_handle;
+
+ callback = wl_display_sync(wl_dpy);
+ wl_callback_add_listener(callback, &sync_listener, &done);
+
+ while (ret != -1 && !done) {
+ ret = wl_display_dispatch(wl_dpy);
+ }
+
+ return ret;
+}
+
+static void
+__tpl_wayland_vk_wsi_support_present_mode_listener(void *data,
+ struct wayland_vulkan *wayland_vulkan,
+ uint32_t mode)
+{
+ tpl_wayland_vk_wsi_display_t *wayland_vk_wsi_display = data;
+
+ switch (mode) {
+ case WAYLAND_VULKAN_PRESENT_MODE_TYPE_IMMEDIATE:
+ wayland_vk_wsi_display->surface_capabilities.present_modes
+ |= TPL_DISPLAY_PRESENT_MODE_IMMEDIATE;
+ break;
+ case WAYLAND_VULKAN_PRESENT_MODE_TYPE_MAILBOX:
+ wayland_vk_wsi_display->surface_capabilities.present_modes
+ |= TPL_DISPLAY_PRESENT_MODE_MAILBOX;
+ break;
+ case WAYLAND_VULKAN_PRESENT_MODE_TYPE_FIFO:
+ wayland_vk_wsi_display->surface_capabilities.present_modes
+ |= TPL_DISPLAY_PRESENT_MODE_FIFO;
+ break;
+ case WAYLAND_VULKAN_PRESENT_MODE_TYPE_FIFO_RELAXED:
+ wayland_vk_wsi_display->surface_capabilities.present_modes
+ |= TPL_DISPLAY_PRESENT_MODE_FIFO_RELAXED;
+ break;
+ default:
+ TPL_WARN("server sent unknown present type: %d", mode);
+ }
+}
+
+static struct wayland_vulkan_listener wl_vk_listener = {
+ __tpl_wayland_vk_wsi_support_present_mode_listener,
+};
+
+static void
+__tpl_wayland_vk_wsi_registry_handle_global(void *data, struct wl_registry *registry,
+ uint32_t name, const char *interface, uint32_t version)
+{
+ tpl_wayland_vk_wsi_display_t *wayland_vk_wsi_display = data;
+
+ if (!strcmp(interface, "wayland_vulkan")) {
+ wayland_vk_wsi_display->wl_vk_client =
+ wl_registry_bind(registry, name, &wayland_vulkan_interface, version);
+ }
+}
+
+static const struct wl_registry_listener registry_listener = {
+ __tpl_wayland_vk_wsi_registry_handle_global,
+ NULL
+};
+
+static tpl_result_t
+__tpl_wayland_vk_wsi_display_init(tpl_display_t *display)
+{
+ tpl_wayland_vk_wsi_display_t *wayland_vk_wsi_display = NULL;
+
+ TPL_ASSERT(display);
+
+ /* Do not allow default display in wayland. */
+ if (!display->native_handle) {
+ TPL_ERR("Invalid native handle for display.");
+ return TPL_ERROR_INVALID_PARAMETER;
+ }
+
+ wayland_vk_wsi_display = (tpl_wayland_vk_wsi_display_t *) calloc(1,
+ sizeof(tpl_wayland_vk_wsi_display_t));
+ if (!wayland_vk_wsi_display) {
+ TPL_ERR("Failed to allocate memory for new tpl_wayland_vk_wsi_display_t.");
+ return TPL_ERROR_OUT_OF_MEMORY;
+ }
+
+ wayland_vk_wsi_display->surface_capabilities.min_buffer = 2;
+ wayland_vk_wsi_display->surface_capabilities.max_buffer = CLIENT_QUEUE_SIZE;
+ wayland_vk_wsi_display->surface_capabilities.present_modes =
+ TPL_DISPLAY_PRESENT_MODE_MAILBOX;
+
+ display->backend.data = wayland_vk_wsi_display;
+
+ if (__tpl_wayland_vk_wsi_display_is_wl_display(display->native_handle)) {
+ struct wl_display *wl_dpy =
+ (struct wl_display *)display->native_handle;
+ struct wl_registry *wl_registry;
+
+ wayland_vk_wsi_display->wl_tbm_client =
+ wayland_tbm_client_init((struct wl_display *) wl_dpy);
+
+ if (!wayland_vk_wsi_display->wl_tbm_client) {
+ TPL_ERR("Wayland TBM initialization failed!");
+ goto free_wl_display;
+ }
+
+ wl_registry = wl_display_get_registry(wl_dpy);
+ /* check wl_registry */
+ wl_registry_add_listener(wl_registry, ®istry_listener, wayland_vk_wsi_display);
+ wl_display_roundtrip(wl_dpy);
+
+ if (wayland_vk_wsi_display->wl_vk_client)
+ wayland_vulkan_add_listener(wayland_vk_wsi_display->wl_vk_client,
+ &wl_vk_listener, wayland_vk_wsi_display);
+
+ wl_display_roundtrip(wl_dpy);
+ wl_registry_destroy(wl_registry);
+ } else {
+ goto free_wl_display;
+ }
+
+ return TPL_ERROR_NONE;
+
+free_wl_display:
+ if (wayland_vk_wsi_display) {
+ free(wayland_vk_wsi_display);
+ display->backend.data = NULL;
+ }
+ return TPL_ERROR_INVALID_OPERATION;
+}
+
+static void
+__tpl_wayland_vk_wsi_display_fini(tpl_display_t *display)
+{
+ tpl_wayland_vk_wsi_display_t *wayland_vk_wsi_display;
+
+ TPL_ASSERT(display);
+
+ wayland_vk_wsi_display = (tpl_wayland_vk_wsi_display_t *)display->backend.data;
+ if (wayland_vk_wsi_display) {
+ wayland_tbm_client_deinit(wayland_vk_wsi_display->wl_tbm_client);
+ if (wayland_vk_wsi_display->wl_vk_client)
+ wayland_vulkan_destroy(wayland_vk_wsi_display->wl_vk_client);
+ free(wayland_vk_wsi_display);
+ }
+ display->backend.data = NULL;
+}
+
+static tpl_result_t
+__tpl_wayland_vk_wsi_display_query_config(tpl_display_t *display,
+ tpl_surface_type_t surface_type,
+ int red_size, int green_size,
+ int blue_size, int alpha_size,
+ int color_depth, int *native_visual_id,
+ tpl_bool_t *is_slow)
+{
+ TPL_ASSERT(display);
+
+ if (surface_type == TPL_SURFACE_TYPE_WINDOW && red_size == 8 &&
+ green_size == 8 && blue_size == 8 &&
+ (color_depth == 32 || color_depth == 24)) {
+
+ if (alpha_size == 8) {
+ if (native_visual_id) *native_visual_id = TBM_FORMAT_ARGB8888;
+ if (is_slow) *is_slow = TPL_FALSE;
+ return TPL_ERROR_NONE;
+ }
+ if (alpha_size == 0) {
+ if (native_visual_id) *native_visual_id = TBM_FORMAT_XRGB8888;
+ if (is_slow) *is_slow = TPL_FALSE;
+ return TPL_ERROR_NONE;
+ }
+ }
+
+ return TPL_ERROR_INVALID_PARAMETER;
+}
+
+static tpl_result_t
+__tpl_wayland_vk_wsi_display_filter_config(tpl_display_t *display,
+ int *visual_id,
+ int alpha_size)
+{
+ TPL_IGNORE(display);
+ TPL_IGNORE(visual_id);
+ TPL_IGNORE(alpha_size);
+ return TPL_ERROR_NONE;
+}
+
+static tpl_result_t
+__tpl_wayland_vk_wsi_display_query_window_supported_buffer_count(
+ tpl_display_t *display,
+ tpl_handle_t window, int *min, int *max)
+{
+ tpl_wayland_vk_wsi_display_t *wayland_vk_wsi_display = NULL;
+
+ TPL_ASSERT(display);
+ TPL_ASSERT(window);
+
+ wayland_vk_wsi_display = (tpl_wayland_vk_wsi_display_t *)display->backend.data;
+
+ if (!wayland_vk_wsi_display) return TPL_ERROR_INVALID_OPERATION;
+
+ if (min) *min = wayland_vk_wsi_display->surface_capabilities.min_buffer;
+ if (max) *max = wayland_vk_wsi_display->surface_capabilities.max_buffer;
+
+ return TPL_ERROR_NONE;
+}
+
+static tpl_result_t
+__tpl_wayland_vk_wsi_display_query_window_supported_present_modes(
+ tpl_display_t *display,
+ tpl_handle_t window, int *modes)
+{
+ tpl_wayland_vk_wsi_display_t *wayland_vk_wsi_display = NULL;
+
+ TPL_ASSERT(display);
+ TPL_ASSERT(window);
+
+ wayland_vk_wsi_display = (tpl_wayland_vk_wsi_display_t *)display->backend.data;
+
+ if (!wayland_vk_wsi_display) return TPL_ERROR_INVALID_OPERATION;
+
+ if (modes) {
+ *modes = TPL_DISPLAY_PRESENT_MODE_MAILBOX | TPL_DISPLAY_PRESENT_MODE_IMMEDIATE |
+ wayland_vk_wsi_display->surface_capabilities.present_modes;
+ }
+
+ return TPL_ERROR_NONE;
+}
+
+static tpl_result_t
+__tpl_wayland_vk_wsi_surface_init(tpl_surface_t *surface)
+{
+ tpl_wayland_vk_wsi_surface_t *wayland_vk_wsi_surface = NULL;
+
+ TPL_ASSERT(surface);
+ TPL_ASSERT(surface->type == TPL_SURFACE_TYPE_WINDOW);
+ TPL_ASSERT(surface->native_handle);
+
+ wayland_vk_wsi_surface = (tpl_wayland_vk_wsi_surface_t *) calloc(1,
+ sizeof(tpl_wayland_vk_wsi_surface_t));
+ if (!wayland_vk_wsi_surface) {
+ TPL_ERR("Failed to allocate memory for new tpl_wayland_vk_wsi_surface_t.");
+ return TPL_ERROR_OUT_OF_MEMORY;
+ }
+
+ surface->backend.data = (void *)wayland_vk_wsi_surface;
+ wayland_vk_wsi_surface->tbm_queue = NULL;
+
+ return TPL_ERROR_NONE;
+}
+
+static void
+__tpl_wayland_vk_wsi_surface_fini(tpl_surface_t *surface)
+{
+ tpl_wayland_vk_wsi_surface_t *wayland_vk_wsi_surface = NULL;
+ tpl_wayland_vk_wsi_display_t *wayland_vk_wsi_display = NULL;
+
+ TPL_ASSERT(surface);
+ TPL_ASSERT(surface->display);
+
+ wayland_vk_wsi_surface = (tpl_wayland_vk_wsi_surface_t *) surface->backend.data;
+ if (wayland_vk_wsi_surface == NULL) return;
+
+ wayland_vk_wsi_display = (tpl_wayland_vk_wsi_display_t *)
+ surface->display->backend.data;
+ if (wayland_vk_wsi_display == NULL) return;
+
+ if (wayland_vk_wsi_surface->tbm_queue)
+ __tpl_wayland_vk_wsi_surface_destroy_swapchain(surface);
+
+ free(wayland_vk_wsi_surface);
+ surface->backend.data = NULL;
+}
+
+static void
+__tpl_wayland_vk_wsi_surface_commit_buffer(tpl_surface_t *surface,
+ tbm_surface_h tbm_surface)
+{
+ TPL_ASSERT(surface);
+ TPL_ASSERT(surface->display);
+ TPL_ASSERT(surface->display->native_handle);
+ TPL_ASSERT(tbm_surface);
+ TPL_ASSERT(tbm_surface_internal_is_valid(tbm_surface));
+
+ struct wl_surface *wl_sfc = NULL;
+ struct wl_callback *frame_callback = NULL;
+ tpl_wayland_vk_wsi_surface_t *wayland_vk_wsi_surface =
+ (tpl_wayland_vk_wsi_surface_t *) surface->backend.data;
+ tpl_wayland_vk_wsi_buffer_t *wayland_vk_wsi_buffer =
+ __tpl_wayland_vk_wsi_get_wayland_buffer_from_tbm_surface(tbm_surface);
+ TPL_ASSERT(wayland_vk_wsi_buffer);
+
+
+ wl_sfc = (struct wl_surface *)surface->native_handle;
+
+ tbm_surface_internal_ref(tbm_surface);
+ wl_surface_attach(wl_sfc, (void *)wayland_vk_wsi_buffer->wl_proxy, 0, 0);
+
+ /* TODO: add num_rects and rects to tpl_wayland_vk_wsi_buffer_t */
+ wl_surface_damage(wl_sfc, 0, 0, surface->width, surface->height);
+
+ frame_callback = wl_surface_frame(wl_sfc);
+ wl_callback_add_listener(frame_callback, &frame_listener, tbm_surface);
+
+ wl_surface_commit(wl_sfc);
+
+ wl_display_flush(surface->display->native_handle);
+ wayland_vk_wsi_buffer->sync_timestamp++;
+
+ tbm_surface_queue_release(wayland_vk_wsi_surface->tbm_queue, tbm_surface);
+}
+
+static tpl_result_t
+__tpl_wayland_vk_wsi_surface_enqueue_buffer(tpl_surface_t *surface,
+ tbm_surface_h tbm_surface,
+ int num_rects, const int *rects,
+ tbm_fd sync_fence)
+{
+
+ TPL_ASSERT(surface);
+ TPL_ASSERT(surface->display);
+ TPL_ASSERT(surface->display->native_handle);
+ TPL_ASSERT(tbm_surface);
+
+ tpl_wayland_vk_wsi_surface_t *wayland_vk_wsi_surface =
+ (tpl_wayland_vk_wsi_surface_t *) surface->backend.data;
+ tpl_wayland_vk_wsi_buffer_t *wayland_vk_wsi_buffer = NULL;
+ tbm_surface_queue_error_e tsq_err;
+
+ if (!tbm_surface_internal_is_valid(tbm_surface)) {
+ TPL_ERR("Failed to enqueue tbm_surface(%p) Invalid value.", tbm_surface);
+ return TPL_ERROR_INVALID_PARAMETER;
+ }
+
+ wayland_vk_wsi_buffer =
+ __tpl_wayland_vk_wsi_get_wayland_buffer_from_tbm_surface(tbm_surface);
+ TPL_ASSERT(wayland_vk_wsi_buffer);
+
+ TPL_IMAGE_DUMP(tbm_surface, surface->width, surface->height);
+
+ tbm_surface_internal_unref(tbm_surface);
+
+ tsq_err = tbm_surface_queue_enqueue(wayland_vk_wsi_surface->tbm_queue,
+ tbm_surface);
+ if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
+ TPL_ERR("Failed to enqeueue tbm_surface. | tsq_err = %d", tsq_err);
+ return TPL_ERROR_INVALID_OPERATION;
+ }
+
+ if (sync_fence != -1) {
+ /* non worker thread mode */
+ /* TODO: set max wait time */
+ if (tbm_sync_fence_wait(sync_fence, -1) != 1) {
+ char buf[1024];
+ strerror_r(errno, buf, sizeof(buf));
+ TPL_ERR("Failed to wait sync. | error: %d(%s)", errno, buf);
+ }
+ close(sync_fence);
+ }
+
+ tsq_err = tbm_surface_queue_acquire(wayland_vk_wsi_surface->tbm_queue,
+ &tbm_surface);
+ if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
+ TPL_ERR("Failed to acquire tbm_surface. | tsq_err = %d", tsq_err);
+ return TPL_ERROR_INVALID_OPERATION;
+ }
+
+ __tpl_wayland_vk_wsi_surface_commit_buffer(surface, tbm_surface);
+
+
+ /*
+ * tbm_surface insert to free queue.
+ * tbm_surface_can_dequeue always return true in single thread.
+ * __tpl_wayland_vk_wsi_surface_dequeue_buffer doesn't call wl_display_dispatch.
+ * wayland event queue are fulled and occur broken pipe.
+ * so need call wl_display_dispatch.
+ * need discussion wl_display_dispatch position(in dequeue or worker thread ??).
+ */
+ wl_display_dispatch(surface->display->native_handle);
+
+ return TPL_ERROR_NONE;
+}
+
+static tpl_bool_t
+__tpl_wayland_vk_wsi_surface_validate(tpl_surface_t *surface)
+{
+ TPL_ASSERT(surface);
+ TPL_ASSERT(surface->backend.data);
+
+ return TPL_TRUE;
+}
+
+static tbm_surface_h
+__tpl_wayland_vk_wsi_surface_dequeue_buffer(tpl_surface_t *surface,
+ uint64_t timeout_ns,
+ tbm_fd *sync_fence)
+{
+ TPL_ASSERT(surface);
+ TPL_ASSERT(surface->backend.data);
+ TPL_ASSERT(surface->display);
+
+ tbm_surface_h tbm_surface = NULL;
+ tpl_wayland_vk_wsi_buffer_t *wayland_vk_wsi_buffer = NULL;
+ tpl_wayland_vk_wsi_surface_t *wayland_vk_wsi_surface =
+ (tpl_wayland_vk_wsi_surface_t *)surface->backend.data;
+ tpl_wayland_vk_wsi_display_t *wayland_vk_wsi_display =
+ (tpl_wayland_vk_wsi_display_t *)surface->display->backend.data;
+ struct wl_proxy *wl_proxy = NULL;
+ tbm_surface_queue_error_e tsq_err = 0;
+
+ if (sync_fence)
+ *sync_fence = -1;
+
+ TPL_OBJECT_UNLOCK(surface);
+ while (tbm_surface_queue_can_dequeue(
+ wayland_vk_wsi_surface->tbm_queue, 0) == 0) {
+ /* Application sent all buffers to the server. Wait for server response. */
+
+ if (wl_display_dispatch(surface->display->native_handle) == -1) {
+ TPL_OBJECT_LOCK(surface);
+ return NULL;
+ }
+ }
+ TPL_OBJECT_LOCK(surface);
+
+ tsq_err = tbm_surface_queue_dequeue(wayland_vk_wsi_surface->tbm_queue,
+ &tbm_surface);
+
+ if (!tbm_surface) {
+ TPL_ERR("Failed to get tbm_surface from tbm_surface_queue | tsq_err = %d",
+ tsq_err);
+ return NULL;
+ }
+
+ tbm_surface_internal_ref(tbm_surface);
+
+ if ((wayland_vk_wsi_buffer =
+ __tpl_wayland_vk_wsi_get_wayland_buffer_from_tbm_surface(
+ tbm_surface)) != NULL) {
+ if (sync_fence) {
+ if (wayland_vk_wsi_buffer->sync_timestamp) {
+ /* first return -1 */
+ char name[32];
+ snprintf(name, 32, "%d",
+ tbm_bo_export(tbm_surface_internal_get_bo(tbm_surface, 0)));
+ *sync_fence = tbm_sync_fence_create(wayland_vk_wsi_buffer->sync_timeline,
+ name,
+ wayland_vk_wsi_buffer->sync_timestamp);
+ if (*sync_fence == -1) {
+ char buf[1024];
+ strerror_r(errno, buf, sizeof(buf));
+ TPL_ERR("Failed to create TBM sync fence: %d(%s)", errno, buf);
+ }
+ } else {
+ *sync_fence = -1;
+ }
+ }
+ return tbm_surface;
+ }
+
+ wayland_vk_wsi_buffer = (tpl_wayland_vk_wsi_buffer_t *) calloc(1,
+ sizeof(tpl_wayland_vk_wsi_buffer_t));
+ if (!wayland_vk_wsi_buffer) {
+ TPL_ERR("Mem alloc for wayland_vk_wsi_buffer failed!");
+ tbm_surface_internal_unref(tbm_surface);
+ return NULL;
+ }
+
+ wl_proxy = (struct wl_proxy *)wayland_tbm_client_create_buffer(
+ wayland_vk_wsi_display->wl_tbm_client, tbm_surface);
+ if (!wl_proxy) {
+ TPL_ERR("Failed to create TBM client buffer!");
+ tbm_surface_internal_unref(tbm_surface);
+ free(wayland_vk_wsi_buffer);
+ return NULL;
+ }
+
+ /* can change signaled sync */
+ if (sync_fence)
+ *sync_fence = -1;
+ wayland_vk_wsi_buffer->sync_timeline = tbm_sync_timeline_create();
+ if (wayland_vk_wsi_buffer->sync_timeline == -1) {
+ char buf[1024];
+ strerror_r(errno, buf, sizeof(buf));
+ TPL_ERR("Failed to create TBM sync timeline: %d(%s)", errno, buf);
+ wl_proxy_destroy(wl_proxy);
+ tbm_surface_internal_unref(tbm_surface);
+ free(wayland_vk_wsi_buffer);
+ return NULL;
+ }
+ wayland_vk_wsi_buffer->sync_timestamp = 0;
+ wayland_tbm_client_set_sync_timeline(wayland_vk_wsi_display->wl_tbm_client,
+ (void *)wl_proxy,
+ wayland_vk_wsi_buffer->sync_timeline);
+
+ wl_buffer_add_listener((void *)wl_proxy, &buffer_release_listener,
+ tbm_surface);
+
+ wl_display_flush((struct wl_display *)surface->display->native_handle);
+
+ wayland_vk_wsi_buffer->display = surface->display;
+ wayland_vk_wsi_buffer->wl_proxy = wl_proxy;
+
+ __tpl_wayland_vk_wsi_set_wayland_buffer_to_tbm_surface(tbm_surface,
+ wayland_vk_wsi_buffer);
+
+ return tbm_surface;
+}
+
+static tpl_result_t
+__tpl_wayland_vk_wsi_surface_get_swapchain_buffers(tpl_surface_t *surface,
+ tbm_surface_h **buffers,
+ int *buffer_count)
+{
+ tbm_surface_h buffer = NULL;
+ tbm_surface_h *swapchain_buffers = NULL;
+ tpl_wayland_vk_wsi_surface_t *wayland_vk_wsi_surface = NULL;
+ tbm_surface_queue_error_e tsq_err;
+ int i, dequeue_count;
+ tpl_result_t ret = TPL_ERROR_NONE;
+
+ TPL_ASSERT(surface);
+ TPL_ASSERT(surface->backend.data);
+ TPL_ASSERT(surface->display);
+ TPL_ASSERT(buffers);
+ TPL_ASSERT(buffer_count);
+
+ wayland_vk_wsi_surface = (tpl_wayland_vk_wsi_surface_t *)surface->backend.data;
+ swapchain_buffers = (tbm_surface_h *)calloc(
+ wayland_vk_wsi_surface->buffer_count, sizeof(tbm_surface_h));
+ if (!swapchain_buffers) {
+ TPL_ERR("Failed to allocate memory for buffers.");
+ return TPL_ERROR_OUT_OF_MEMORY;
+ }
+
+ for (i = 0 ; i < wayland_vk_wsi_surface->buffer_count ; i++) {
+ tsq_err = tbm_surface_queue_dequeue(wayland_vk_wsi_surface->tbm_queue, &buffer);
+ if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
+ TPL_ERR("Failed to get tbm_surface from tbm_surface_queue | tsq_err = %d",
+ tsq_err);
+ dequeue_count = i;
+ ret = TPL_ERROR_OUT_OF_MEMORY;
+ goto get_buffer_fail;
+ }
+ swapchain_buffers[i] = buffer;
+ }
+
+ for (i = 0 ; i < wayland_vk_wsi_surface->buffer_count ; i++) {
+ tsq_err = tbm_surface_queue_release(wayland_vk_wsi_surface->tbm_queue,
+ swapchain_buffers[i]);
+ if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
+ TPL_ERR("Failed to release tbm_surface. | tsq_err = %d", tsq_err);
+ ret = TPL_ERROR_INVALID_OPERATION;
+ goto release_buffer_fail;
+ }
+ }
+
+ *buffers = swapchain_buffers;
+ *buffer_count = wayland_vk_wsi_surface->buffer_count;
+ return TPL_ERROR_NONE;
+
+get_buffer_fail:
+ for (i = 0 ; i < dequeue_count ; i++) {
+ tsq_err = tbm_surface_queue_release(wayland_vk_wsi_surface->tbm_queue,
+ swapchain_buffers[i]);
+ if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
+ TPL_ERR("Failed to release tbm_surface. | tsq_err = %d", tsq_err);
+ goto release_buffer_fail;
+ }
+ }
+
+release_buffer_fail:
+ free(swapchain_buffers);
+ return ret;
+}
+
+static tpl_result_t
+__tpl_wayland_vk_wsi_surface_create_swapchain(tpl_surface_t *surface,
+ tbm_format format, int width,
+ int height, int buffer_count, int present_mode)
+{
+ tpl_wayland_vk_wsi_surface_t *wayland_vk_wsi_surface = NULL;
+ tpl_wayland_vk_wsi_display_t *wayland_vk_wsi_display = NULL;
+
+ TPL_ASSERT(surface);
+ TPL_ASSERT(surface->backend.data);
+ TPL_ASSERT(surface->display);
+
+ wayland_vk_wsi_surface = (tpl_wayland_vk_wsi_surface_t *) surface->backend.data;
+ TPL_ASSERT(wayland_vk_wsi_surface);
+
+ wayland_vk_wsi_display = (tpl_wayland_vk_wsi_display_t *)
+ surface->display->backend.data;
+ TPL_ASSERT(wayland_vk_wsi_display);
+
+ if ((buffer_count < wayland_vk_wsi_display->surface_capabilities.min_buffer)
+ || (buffer_count > wayland_vk_wsi_display->surface_capabilities.max_buffer)) {
+ TPL_ERR("Invalid buffer_count!");
+ return TPL_ERROR_INVALID_PARAMETER;
+ }
+
+ /* FIXME: vblank has performance problem so replace all present mode to MAILBOX */
+ present_mode = TPL_DISPLAY_PRESENT_MODE_MAILBOX;
+
+ if ((present_mode & wayland_vk_wsi_display->surface_capabilities.present_modes) == 0) {
+ /* server not supported current mode check client mode */
+ switch (present_mode) {
+ case TPL_DISPLAY_PRESENT_MODE_MAILBOX:
+ case TPL_DISPLAY_PRESENT_MODE_IMMEDIATE:
+ break;
+ default:
+ TPL_ERR("Unsupported present mode: %d", present_mode);
+ return TPL_ERROR_INVALID_PARAMETER;
+ }
+ }
+
+ wayland_vk_wsi_surface->present_mode = present_mode;
+
+ wayland_vk_wsi_surface->tbm_queue = tbm_surface_queue_create(buffer_count,
+ width,
+ height,
+ TBM_FORMAT_ARGB8888,
+ 0);
+
+ if (!wayland_vk_wsi_surface->tbm_queue) {
+ TPL_ERR("TBM surface queue creation failed!");
+ return TPL_ERROR_OUT_OF_MEMORY;
+ }
+
+ wayland_vk_wsi_surface->buffer_count = buffer_count;
+
+ surface->width = width;
+ surface->height = height;
+
+ return TPL_ERROR_NONE;
+}
+
+static tpl_result_t
+__tpl_wayland_vk_wsi_surface_destroy_swapchain(tpl_surface_t *surface)
+{
+ tpl_wayland_vk_wsi_surface_t *wayland_vk_wsi_surface = NULL;
+
+ TPL_ASSERT(surface);
+ TPL_ASSERT(surface->backend.data);
+ TPL_ASSERT(surface->display);
+
+ wayland_vk_wsi_surface = (tpl_wayland_vk_wsi_surface_t *) surface->backend.data;
+ TPL_ASSERT(wayland_vk_wsi_surface);
+
+ if (surface->type == TPL_SURFACE_TYPE_WINDOW) {
+
+ wl_display_flush(surface->display->native_handle);
+ __tpl_wayland_vk_wsi_display_roundtrip(surface->display);
+
+ tbm_surface_queue_destroy(wayland_vk_wsi_surface->tbm_queue);
+ wayland_vk_wsi_surface->tbm_queue = NULL;
+ }
+
+ return TPL_ERROR_NONE;
+}
+
+static void
+__tpl_wayland_vk_wsi_buffer_free(tpl_wayland_vk_wsi_buffer_t
+ *wayland_vk_wsi_buffer)
+{
+ TPL_ASSERT(wayland_vk_wsi_buffer);
+ TPL_ASSERT(wayland_vk_wsi_buffer->display);
+
+ tpl_wayland_vk_wsi_display_t *wayland_vk_wsi_display =
+ (tpl_wayland_vk_wsi_display_t *)wayland_vk_wsi_buffer->display->backend.data;
+
+ wl_display_flush((struct wl_display *)
+ wayland_vk_wsi_buffer->display->native_handle);
+
+ if (wayland_vk_wsi_buffer->wl_proxy)
+ wayland_tbm_client_destroy_buffer(wayland_vk_wsi_display->wl_tbm_client,
+ (void *)wayland_vk_wsi_buffer->wl_proxy);
+
+ if (wayland_vk_wsi_buffer->sync_timeline != -1)
+ close(wayland_vk_wsi_buffer->sync_timeline);
+
+ free(wayland_vk_wsi_buffer);
+}
+
+tpl_bool_t
+__tpl_display_choose_backend_wayland_vk_wsi(tpl_handle_t native_dpy)
+{
+ if (!native_dpy) return TPL_FALSE;
+
+ if (__tpl_wayland_vk_wsi_display_is_wl_display(native_dpy))
+ return TPL_TRUE;
+
+ return TPL_FALSE;
+}
+
+void
+__tpl_display_init_backend_wayland_vk_wsi(tpl_display_backend_t *backend)
+{
+ TPL_ASSERT(backend);
+
+ backend->type = TPL_BACKEND_WAYLAND_VULKAN_WSI;
+ backend->data = NULL;
+
+ backend->init = __tpl_wayland_vk_wsi_display_init;
+ backend->fini = __tpl_wayland_vk_wsi_display_fini;
+ backend->query_config = __tpl_wayland_vk_wsi_display_query_config;
+ backend->filter_config = __tpl_wayland_vk_wsi_display_filter_config;
+ backend->query_window_supported_buffer_count =
+ __tpl_wayland_vk_wsi_display_query_window_supported_buffer_count;
+ backend->query_window_supported_present_modes =
+ __tpl_wayland_vk_wsi_display_query_window_supported_present_modes;
+}
+
+void
+__tpl_surface_init_backend_wayland_vk_wsi(tpl_surface_backend_t *backend)
+{
+ TPL_ASSERT(backend);
+
+ backend->type = TPL_BACKEND_WAYLAND_VULKAN_WSI;
+ backend->data = NULL;
+
+ backend->init = __tpl_wayland_vk_wsi_surface_init;
+ backend->fini = __tpl_wayland_vk_wsi_surface_fini;
+ backend->validate = __tpl_wayland_vk_wsi_surface_validate;
+ backend->dequeue_buffer = __tpl_wayland_vk_wsi_surface_dequeue_buffer;
+ backend->enqueue_buffer = __tpl_wayland_vk_wsi_surface_enqueue_buffer;
+ backend->get_swapchain_buffers =
+ __tpl_wayland_vk_wsi_surface_get_swapchain_buffers;
+ backend->create_swapchain = __tpl_wayland_vk_wsi_surface_create_swapchain;
+ backend->destroy_swapchain = __tpl_wayland_vk_wsi_surface_destroy_swapchain;
+}
+
+static void
+__cb_client_sync_callback(void *data, struct wl_callback *callback,
+ uint32_t serial)
+{
+ int *done;
+
+ TPL_ASSERT(data);
+
+ done = data;
+ *done = 1;
+
+ wl_callback_destroy(callback);
+}
+
+static const struct wl_callback_listener sync_listener = {
+ __cb_client_sync_callback
+};
+
+static void
+__cb_client_frame_callback(void *data, struct wl_callback *callback,
+ uint32_t time)
+{
+ /* We moved the buffer reclaim logic to buffer_release_callback().
+ buffer_release_callback() is more suitable point to delete or reuse buffer instead of frame_callback().
+ But we remain this callback because buffer_release_callback() works only when frame_callback() is activated.*/
+ TPL_IGNORE(data);
+ TPL_IGNORE(time);
+
+ wl_callback_destroy(callback);
+}
+
+static const struct wl_callback_listener frame_listener = {
+ __cb_client_frame_callback
+};
+
+static void
+__cb_client_buffer_release_callback(void *data, struct wl_proxy *proxy)
+{
+ tpl_wayland_vk_wsi_buffer_t *wayland_vk_wsi_buffer = NULL;
+ tbm_surface_h tbm_surface = NULL;
+
+ TPL_ASSERT(data);
+
+ tbm_surface = (tbm_surface_h) data;
+
+ wayland_vk_wsi_buffer =
+ __tpl_wayland_vk_wsi_get_wayland_buffer_from_tbm_surface(tbm_surface);
+
+ if (wayland_vk_wsi_buffer)
+ tbm_surface_internal_unref(tbm_surface);
+}
+
+static const struct wl_buffer_listener buffer_release_listener = {
+ (void *)__cb_client_buffer_release_callback,
+};
--- /dev/null
+#include <stdlib.h>
+#include <stdint.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <unistd.h>
+#include <pthread.h>
+
+#include <X11/Xlib.h>
+#include <X11/Xutil.h>
+#include <X11/Xproto.h>
+
+#include <dri2/dri2.h>
+#include <tbm_bufmgr.h>
+
+#include "tpl_internal.h"
+
+#include "tpl_x11_internal.h"
+
+static pthread_mutex_t global_mutex = PTHREAD_MUTEX_INITIALIZER;
+
+pthread_mutex_t
+__tpl_x11_get_global_mutex()
+{
+ return global_mutex;
+}
+
+void
+__tpl_x11_swap_str_to_swap_type(char *str, tpl_x11_swap_type_t *type)
+{
+ int swap_type;
+
+ TPL_ASSERT(type);
+
+ if (str == NULL)
+ return;
+
+ swap_type = strtol(str, NULL, 0);
+
+ switch (swap_type) {
+ case TPL_X11_SWAP_TYPE_SYNC:
+ case TPL_X11_SWAP_TYPE_ASYNC:
+ case TPL_X11_SWAP_TYPE_LAZY:
+ *type = swap_type;
+ break;
+ default:
+ break;
+ }
+}
+
+tpl_buffer_t *
+__tpl_x11_surface_buffer_cache_find(tpl_list_t *buffer_cache,
+ unsigned int name)
+{
+ tpl_list_node_t *node;
+
+ TPL_ASSERT(buffer_cache);
+
+ node = __tpl_list_get_front_node(buffer_cache);
+
+ while (node) {
+ tpl_buffer_t *buffer = (tpl_buffer_t *) __tpl_list_node_get_data(node);
+
+ TPL_ASSERT(buffer);
+
+ if (buffer->key == name)
+ return buffer;
+
+ node = __tpl_list_node_next(node);
+ }
+
+ return NULL;
+}
+
+void
+__tpl_x11_surface_buffer_cache_remove(tpl_list_t *buffer_cache,
+ unsigned int name)
+{
+ tpl_list_node_t *node;
+
+ TPL_ASSERT(buffer_cache);
+
+ node = __tpl_list_get_front_node(buffer_cache);
+
+ while (node) {
+ tpl_buffer_t *buffer = (tpl_buffer_t *) __tpl_list_node_get_data(node);
+
+ TPL_ASSERT(buffer);
+
+ if (buffer->key == name) {
+ tpl_object_unreference(&buffer->base);
+ __tpl_list_remove(node, NULL);
+ return;
+ }
+
+ node = __tpl_list_node_next(node);
+ }
+}
+
+tpl_bool_t
+__tpl_x11_surface_buffer_cache_add(tpl_list_t *buffer_cache,
+ tpl_buffer_t *buffer)
+{
+ TPL_ASSERT(buffer_cache);
+ TPL_ASSERT(buffer);
+
+ if (__tpl_list_get_count(buffer_cache) >= TPL_BUFFER_CACHE_MAX_ENTRIES) {
+ tpl_buffer_t *evict = __tpl_list_pop_front(buffer_cache, NULL);
+
+ TPL_ASSERT(evict);
+
+ tpl_object_unreference(&evict->base);
+ }
+
+ if (-1 == tpl_object_reference(&buffer->base))
+ return TPL_FALSE;
+
+ return __tpl_list_push_back(buffer_cache, (void *)buffer);
+}
+
+void
+__tpl_x11_surface_buffer_cache_clear(tpl_list_t *buffer_cache)
+{
+ TPL_ASSERT(buffer_cache);
+
+ __tpl_list_fini(buffer_cache, (tpl_free_func_t)tpl_object_unreference);
+}
+
+
+tpl_bool_t
+__tpl_x11_display_query_config(tpl_display_t *display,
+ tpl_surface_type_t surface_type, int red_size,
+ int green_size, int blue_size, int alpha_size,
+ int color_depth, int *native_visual_id, tpl_bool_t *is_slow)
+{
+ Display *native_display;
+
+ TPL_IGNORE(alpha_size);
+
+ TPL_ASSERT(display);
+ TPL_ASSERT(display->native_handle);
+
+ native_display = (Display *)display->native_handle;
+
+ if (red_size != TPL_DONT_CARE || green_size != TPL_DONT_CARE ||
+ blue_size != TPL_DONT_CARE || color_depth != TPL_DONT_CARE) {
+ if (surface_type == TPL_SURFACE_TYPE_WINDOW) {
+ XVisualInfo *visual_formats;
+ int num_visual_formats;
+ int i;
+
+ visual_formats = XGetVisualInfo(native_display, 0, NULL,
+ &num_visual_formats);
+ TPL_ASSERT(visual_formats);
+ for (i = 0; i < num_visual_formats; i++) {
+ int clz[3];
+ int col_size[3];
+
+ clz[0] = __tpl_util_clz(visual_formats[i].red_mask);
+ clz[1] = __tpl_util_clz(visual_formats[i].green_mask);
+ clz[2] = __tpl_util_clz(visual_formats[i].blue_mask);
+
+ col_size[0] = clz[1] - clz[0];
+ col_size[1] = clz[2] - clz[1];
+ col_size[2] = 32 - clz[2];
+
+ if ((red_size == TPL_DONT_CARE || col_size[0] == red_size) &&
+ (green_size == TPL_DONT_CARE || col_size[1] == green_size) &&
+ (blue_size == TPL_DONT_CARE || col_size[2] == blue_size)) {
+ if (native_visual_id != NULL)
+ *native_visual_id = visual_formats[i].visualid;
+
+ if (is_slow != NULL)
+ *is_slow = TPL_FALSE;
+
+ return TPL_TRUE;
+ }
+ }
+ XFree(visual_formats);
+ visual_formats = NULL;
+ }
+
+ if (surface_type == TPL_SURFACE_TYPE_PIXMAP) {
+ XPixmapFormatValues *pixmap_formats;
+ int num_pixmap_formats;
+ int i;
+
+ pixmap_formats = XListPixmapFormats(native_display, &num_pixmap_formats);
+ TPL_ASSERT(pixmap_formats);
+ for (i = 0; i < num_pixmap_formats; i++) {
+ if (color_depth == TPL_DONT_CARE ||
+ pixmap_formats[i].depth == color_depth) {
+ if (is_slow != NULL)
+ *is_slow = TPL_FALSE;
+
+ return TPL_TRUE;
+ }
+ }
+ XFree(pixmap_formats);
+ pixmap_formats = NULL;
+ }
+
+ return TPL_FALSE;
+
+ }
+
+ return TPL_TRUE;
+}
+
+#if 0
+static void tpl_handle_and_free_error( Display *dpy, xcb_generic_error_t *error,
+ const char *request_string )
+{
+ char error_txt[256];
+
+ if ( error ) {
+ int len = sizeof(error_txt) / sizeof(error_txt[0]);
+
+ XGetErrorText( dpy, error->error_code, error_txt, len );
+ error_txt[ len - 1] = '\0';
+ TPL_WARN("%s failed \"[%d]:%s\"", request_string, error->error_code,
+ error_txt );
+ free(error);
+ } else {
+ TPL_WARN("%s failed \"Unknown error\"", request_string );
+ }
+}
+
+static tpl_bool_t tpl_check_reply_for_error(Display *dpy,
+ xcb_generic_reply_t *reply, xcb_generic_error_t *error,
+ const char *request_string)
+{
+ tpl_bool_t retval = TPL_FALSE;
+
+ if (error || reply == NULL) {
+ tpl_handle_and_free_error( dpy, error, request_string );
+ } else {
+ retval = TPL_TRUE;
+ }
+
+ return retval;
+}
+static XVisualInfo *tpl_find_visual( Display *dpy, xcb_visualid_t visual_id )
+{
+ XVisualInfo *visual_info;
+ XVisualInfo visual_info_template;
+ int matching_count;
+
+ visual_info_template.visualid = visual_id;
+
+ visual_info = XGetVisualInfo(dpy, VisualIDMask, &visual_info_template,
+ &matching_count);
+
+
+ return visual_info;
+}
+static int tpl_get_alpha_offset( int offset_r, int offset_g, int offset_b,
+ int bpp )
+{
+ int ret = -1;
+
+ TPL_CHECK_ON_FALSE_ASSERT_FAIL( bpp == 32,
+ "alpha only supported for 32bits pixel formats");
+
+ if ( offset_r != 0 && offset_g != 0 && offset_b != 0 ) {
+ ret = 0;
+ } else if ( offset_r != 24 && offset_g != 24 && offset_b != 24 ) {
+ ret = 24;
+ } else {
+ TPL_CHECK_ON_FALSE_ASSERT_FAIL(TPL_FALSE,
+ "Alpha component has to be at either the offset 0 or 24");
+ }
+
+ return ret;
+}
+static int tpl_get_offset( unsigned long mask, int depth )
+{
+ int res = -1;
+ int count;
+
+ for (count = 0; count < depth; count++) {
+ if (mask & 1) {
+ res = count;
+ break;
+ }
+ mask = mask >> 1;
+ }
+
+ return res;
+}
+/* Convert the given combination of offsets and bpp into a color buffer format */
+static tpl_format_t tpl_offsets_to_color_buffer_format( int offset_r,
+ int offset_g, int offset_b, int offset_a, int bpp )
+{
+ tpl_format_t retval = TPL_FORMAT_INVALID;
+
+ if ( offset_b == 11 && offset_g == 5 && offset_r == 0 && offset_a == -1 &&
+ bpp == 16) {
+ retval = TPL_FORMAT_BGR565;
+ } else if ( offset_r == 11 && offset_g == 5 && offset_b == 0 &&
+ offset_a == -1 && bpp == 16) {
+ retval = TPL_FORMAT_RGB565;
+ }
+
+ else if ( offset_a == 24 && offset_b == 16 && offset_g == 8 &&
+ offset_r == 0 && bpp == 32) {
+ retval = TPL_FORMAT_ABGR8888;
+ } else if ( offset_a == 24 && offset_r == 16 && offset_g == 8 &&
+ offset_b == 0 && bpp == 32) {
+ retval = TPL_FORMAT_ARGB8888;
+ } else if ( offset_b == 24 && offset_g == 16 && offset_r == 8 &&
+ offset_a == 0 && bpp == 32) {
+ retval = TPL_FORMAT_BGRA8888;
+ } else if ( offset_r == 24 && offset_g == 16 && offset_b == 8 &&
+ offset_a == 0 && bpp == 32) {
+ retval = TPL_FORMAT_RGBA8888;
+ }
+
+ else if ( offset_b == 16 && offset_g == 8 && offset_r == 0 &&
+ offset_a == -1 && bpp == 32) {
+ retval = TPL_FORMAT_XBGR8888;
+ } else if ( offset_r == 16 && offset_g == 8 && offset_b == 0 &&
+ offset_a == -1 && bpp == 32) {
+ retval = TPL_FORMAT_XRGB8888;
+ } else if ( offset_b == 24 && offset_g == 16 && offset_r == 8 &&
+ offset_a == -1 && bpp == 32) {
+ retval = TPL_FORMAT_BGRX8888;
+ } else if ( offset_r == 24 && offset_g == 16 && offset_b == 8 &&
+ offset_a == -1 && bpp == 32) {
+ retval = TPL_FORMAT_RGBX8888;
+ }
+
+ else if ( offset_b == 16 && offset_g == 8 && offset_r == 0 &&
+ offset_a == -1 && bpp == 24) {
+ retval = TPL_FORMAT_BGR888;
+ } else if ( offset_r == 16 && offset_g == 8 && offset_b == 0 &&
+ offset_a == -1 && bpp == 24) {
+ retval = TPL_FORMAT_RGB888;
+ }
+
+ else if ( offset_a == 12 && offset_b == 8 && offset_g == 4 &&
+ offset_r == 0 && bpp == 16) {
+ retval = TPL_FORMAT_ABGR4444;
+ } else if ( offset_a == 12 && offset_r == 8 && offset_g == 4 &&
+ offset_b == 0 && bpp == 16) {
+ retval = TPL_FORMAT_ARGB4444;
+ } else if ( offset_b == 12 && offset_g == 8 && offset_r == 4 &&
+ offset_a == 0 && bpp == 16) {
+ retval = TPL_FORMAT_BGRA4444;
+ } else if ( offset_r == 12 && offset_g == 8 && offset_b == 4 &&
+ offset_a == 0 && bpp == 16) {
+ retval = TPL_FORMAT_RGBA4444;
+ }
+
+ else if ( offset_a == 15 && offset_b == 10 && offset_g == 5 &&
+ offset_r == 0 && bpp == 16) {
+ retval = TPL_FORMAT_ABGR1555;
+ } else if ( offset_a == 15 && offset_r == 10 && offset_g == 5 &&
+ offset_b == 0 && bpp == 16) {
+ retval = TPL_FORMAT_ARGB1555;
+ } else if ( offset_b == 11 && offset_g == 6 && offset_r == 1 &&
+ offset_a == 0 && bpp == 16) {
+ retval = TPL_FORMAT_BGRA5551;
+ } else if ( offset_r == 11 && offset_g == 6 && offset_b == 1 &&
+ offset_a == 0 && bpp == 16) {
+ retval = TPL_FORMAT_RGBA5551;
+ }
+
+ else {
+ TPL_WARN("Format not supported: offset_r=%d, offset_g=%d, offset_b=%d, offset_a=%d, bpp=%d",
+ offset_r, offset_g, offset_b, offset_a, bpp);
+ }
+
+ return retval;
+}
+#endif
+
+tpl_bool_t
+__tpl_x11_display_get_window_info(tpl_display_t *display, tpl_handle_t window,
+ int *width, int *height, tpl_format_t *format, int depth, int a_size)
+{
+ Status x_res;
+ XWindowAttributes att;
+
+ TPL_IGNORE(depth);
+ TPL_IGNORE(a_size);
+
+ TPL_ASSERT(display);
+ TPL_ASSERT(display->native_handle);
+
+ x_res = XGetWindowAttributes((Display *)display->native_handle, (Window)window,
+ &att);
+
+ if (x_res != BadWindow) {
+ if (format != NULL) {
+ switch (att.depth) {
+ case 32:
+ *format = TPL_FORMAT_ARGB8888;
+ break;
+ case 24:
+ *format = TPL_FORMAT_XRGB8888;
+ break;
+ case 16:
+ *format = TPL_FORMAT_RGB565;
+ break;
+ default:
+ *format = TPL_FORMAT_INVALID;
+ break;
+ }
+ }
+ if (width != NULL) *width = att.width;
+ if (height != NULL) *height = att.height;
+ return TPL_TRUE;
+ }
+
+ return TPL_FALSE;
+}
+
+tpl_bool_t
+__tpl_x11_display_get_pixmap_info(tpl_display_t *display, tpl_handle_t pixmap,
+ int *width, int *height, tpl_format_t *format)
+{
+ Status x_res;
+ Window root = None;
+ int x, y;
+ unsigned int w, h, bw, d;
+
+ TPL_ASSERT(display);
+ TPL_ASSERT(display->native_handle);
+
+ x_res = XGetGeometry((Display *)display->native_handle, (Pixmap)pixmap, &root,
+ &x, &y, &w, &h, &bw, &d);
+
+ if (x_res != BadDrawable) {
+ if (format != NULL) {
+ switch (d) {
+ case 32:
+ *format = TPL_FORMAT_ARGB8888;
+ break;
+ case 24:
+ *format = TPL_FORMAT_XRGB8888;
+ break;
+ case 16:
+ *format = TPL_FORMAT_RGB565;
+ break;
+ default:
+ *format = TPL_FORMAT_INVALID;
+ break;
+ }
+ }
+ if (width != NULL) *width = w;
+ if (height != NULL) *height = h;
+ if (format != NULL)
+ *format = TPL_FORMAT_ARGB8888;/*TODO: temp for argb8888*/
+ return TPL_TRUE;
+ }
+
+ return TPL_FALSE;
+}
+
+void
+__tpl_x11_display_flush(tpl_display_t *display)
+{
+ Display *native_display;
+
+ TPL_ASSERT(display);
+ TPL_ASSERT(display->native_handle);
+
+ native_display = (Display *) display->native_handle;
+ XFlush(native_display);
+ XSync(native_display, False);
+}
+
+tpl_bool_t
+__tpl_x11_buffer_init(tpl_buffer_t *buffer)
+{
+ TPL_IGNORE(buffer);
+
+ return TPL_TRUE;
+}
+
+void
+__tpl_x11_buffer_fini(tpl_buffer_t *buffer)
+{
+ TPL_ASSERT(buffer);
+
+ if (buffer->backend.data) {
+ tbm_bo_map((tbm_bo)buffer->backend.data, TBM_DEVICE_3D, TBM_OPTION_READ);
+ tbm_bo_unmap((tbm_bo)buffer->backend.data);
+ tbm_bo_unref((tbm_bo)buffer->backend.data);
+ buffer->backend.data = NULL;
+ }
+}
+
+void *
+__tpl_x11_buffer_map(tpl_buffer_t *buffer, int size)
+{
+ tbm_bo bo;
+ tbm_bo_handle handle;
+
+ TPL_ASSERT(buffer);
+ TPL_ASSERT(buffer->backend.data);
+
+ bo = (tbm_bo) buffer->backend.data;
+ handle = tbm_bo_get_handle(bo, TBM_DEVICE_CPU);
+
+ return handle.ptr;
+}
+
+void
+__tpl_x11_buffer_unmap(tpl_buffer_t *buffer, void *ptr, int size)
+{
+ TPL_IGNORE(buffer);
+ TPL_IGNORE(ptr);
+ TPL_IGNORE(size);
+
+ /* Do nothing. */
+}
+
+tpl_bool_t
+__tpl_x11_buffer_lock(tpl_buffer_t *buffer, tpl_lock_usage_t usage)
+{
+ tbm_bo bo;
+ tbm_bo_handle handle;
+
+ TPL_ASSERT(buffer);
+ TPL_ASSERT(buffer->backend.data);
+
+ bo = (tbm_bo) buffer->backend.data;
+
+ TPL_OBJECT_UNLOCK(buffer);
+
+ switch (usage) {
+ case TPL_LOCK_USAGE_GPU_READ:
+ handle = tbm_bo_map(bo, TBM_DEVICE_3D, TBM_OPTION_READ);
+ break;
+ case TPL_LOCK_USAGE_GPU_WRITE:
+ handle = tbm_bo_map(bo, TBM_DEVICE_3D, TBM_OPTION_WRITE);
+ break;
+ case TPL_LOCK_USAGE_CPU_READ:
+ handle = tbm_bo_map(bo, TBM_DEVICE_CPU, TBM_OPTION_READ);
+ break;
+ case TPL_LOCK_USAGE_CPU_WRITE:
+ handle = tbm_bo_map(bo, TBM_DEVICE_CPU, TBM_OPTION_WRITE);
+ break;
+ default:
+ TPL_ASSERT(TPL_FALSE);
+ return TPL_FALSE;
+ }
+
+ TPL_OBJECT_LOCK(buffer);
+
+ if (handle.u32 != 0 || handle.ptr != NULL)
+ return TPL_FALSE;
+
+ return TPL_TRUE;
+}
+
+void
+__tpl_x11_buffer_unlock(tpl_buffer_t *buffer)
+{
+ tbm_bo bo;
+
+ TPL_ASSERT(buffer);
+ TPL_ASSERT(buffer->backend.data);
+
+ bo = (tbm_bo) buffer->backend.data;
+
+ TPL_OBJECT_UNLOCK(buffer);
+ tbm_bo_unmap(bo);
+ TPL_OBJECT_LOCK(buffer);
+}
+
+tpl_bool_t __tpl_x11_buffer_get_reused_flag(tpl_buffer_t *buffer)
+{
+ TPL_ASSERT(buffer);
+
+ if (DRI2_BUFFER_IS_REUSED(buffer->backend.flags))
+ return TPL_TRUE;
+ else
+ return TPL_FALSE;
+}
+
+void __tpl_x11_display_wait_native(tpl_display_t *display)
+{
+ Display *xlib_display;
+
+ TPL_ASSERT(display);
+
+ xlib_display = (Display *) display->native_handle;
+ if (xlib_display != NULL) {
+ /* Leave events in the queue since we only care they have arrived. */
+ XSync(xlib_display, 0);
+ }
+}
--- /dev/null
+#include <stdlib.h>
+#include <stdint.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <unistd.h>
+#include <pthread.h>
+
+#include <X11/Xlib.h>
+#include <X11/Xutil.h>
+#include <X11/Xproto.h>
+#include <X11/Xlib-xcb.h>
+
+#include <libdrm/drm.h>
+#include <xf86drm.h>
+
+#include <dri2/dri2.h>
+#include <tbm_bufmgr.h>
+
+#include "tpl_internal.h"
+
+#include "tpl_x11_internal.h"
+
+
+typedef struct _tpl_x11_dri2_surface tpl_x11_dri2_surface_t;
+
+
+struct _tpl_x11_dri2_surface {
+ int latest_post_interval;
+ XserverRegion damage;
+ tpl_list_t buffer_cache;
+ tpl_buffer_t *latest_render_target;
+};
+
+
+
+static tpl_x11_global_t global = {
+ 0,
+ NULL,
+ -1,
+ NULL,
+ TPL_X11_SWAP_TYPE_ASYNC,
+ TPL_X11_SWAP_TYPE_SYNC
+};
+
+static Display *
+__tpl_x11_dri2_get_worker_display(void)
+{
+ Display *display;
+ pthread_mutex_t mutex = __tpl_x11_get_global_mutex();
+
+ pthread_mutex_lock(&mutex);
+ TPL_ASSERT(global.display_count > 0);
+
+ /* Use dummy display for worker thread. :-) */
+ display = global.worker_display;
+
+ pthread_mutex_unlock(&mutex);
+
+ return display;
+}
+
+static void
+__tpl_x11_dri2_surface_post_internal(tpl_surface_t *surface, tpl_frame_t *frame,
+ tpl_bool_t is_worker)
+{
+ Display *display;
+ Drawable drawable;
+ CARD64 swap_count;
+ tpl_x11_dri2_surface_t *x11_surface;
+ XRectangle *xrects;
+ XRectangle xrects_stack[TPL_STACK_XRECTANGLE_SIZE];
+ int interval = frame->interval;
+
+ TPL_ASSERT(frame);
+ TPL_ASSERT(surface);
+ TPL_ASSERT(surface->backend.data);
+
+ x11_surface = (tpl_x11_dri2_surface_t *)surface->backend.data;
+
+ if (is_worker)
+ display = __tpl_x11_dri2_get_worker_display();
+ else
+ display = surface->display->native_handle;
+
+ drawable = (Drawable)surface->native_handle;
+
+ if (interval < 1)
+ interval = 1;
+
+ if (interval != x11_surface->latest_post_interval) {
+ DRI2SwapInterval(display, drawable, interval);
+ x11_surface->latest_post_interval = interval;
+ }
+
+ if (__tpl_region_is_empty(&frame->damage)) {
+ DRI2SwapBuffers(display, drawable, 0, 0, 0, &swap_count);
+ } else {
+ int i;
+
+ if (frame->damage.num_rects > TPL_STACK_XRECTANGLE_SIZE) {
+ xrects = (XRectangle *) malloc(sizeof(XRectangle) *
+ frame->damage.num_rects);
+ } else {
+ xrects = &xrects_stack[0];
+ }
+
+ for (i = 0; i < frame->damage.num_rects; i++) {
+ const int *rects = &frame->damage.rects[i * 4];
+
+ xrects[i].x = rects[0];
+ xrects[i].y = frame->buffer->height - rects[1] - rects[3];
+ xrects[i].width = rects[2];
+ xrects[i].height = rects[3];
+ }
+
+ if (x11_surface->damage == None) {
+ x11_surface->damage =
+ XFixesCreateRegion(display, xrects, frame->damage.num_rects);
+ } else {
+ XFixesSetRegion(display, x11_surface->damage,
+ xrects, frame->damage.num_rects);
+ }
+
+ DRI2SwapBuffersWithRegion(display, drawable, x11_surface->damage, &swap_count);
+ }
+
+ frame->state = TPL_FRAME_STATE_POSTED;
+}
+
+static tpl_bool_t
+__tpl_x11_dri2_display_init(tpl_display_t *display)
+{
+ pthread_mutex_t mutex;
+
+ TPL_ASSERT(display);
+
+ mutex = __tpl_x11_get_global_mutex();
+
+ if (display->native_handle == NULL) {
+ display->native_handle = XOpenDisplay(NULL);
+ if (NULL == display->native_handle) {
+ TPL_ERR("XOpenDisplay failed!");
+ return TPL_FALSE;
+ }
+ }
+
+ display->xcb_connection = XGetXCBConnection( (Display *)
+ display->native_handle );
+ if ( NULL == display->xcb_connection ) {
+ TPL_WARN("XGetXCBConnection failed");
+ }
+
+ pthread_mutex_lock(&mutex);
+
+ if (global.display_count == 0) {
+ Bool xres = False;
+ char *drv = NULL;
+ char *dev = NULL;
+ int major = -1;
+ int minor = -1;
+ int event_base = -1;
+ int error_base = -1;
+ Window root = 0;
+ drm_magic_t magic;
+
+ /* Open a dummy display connection. */
+ global.worker_display = XOpenDisplay(NULL);
+ if (NULL == global.worker_display) {
+ TPL_ERR("XOpenDisplay failed!");
+ return TPL_FALSE;
+ }
+
+ /* Get default root window. */
+ root = DefaultRootWindow(global.worker_display);
+
+ /* Initialize DRI2. */
+ xres = DRI2QueryExtension(global.worker_display, &event_base, &error_base);
+ if (True != xres) {
+ TPL_ERR("DRI2QueryExtension failed!");
+ return TPL_FALSE;
+ }
+
+ xres = DRI2QueryVersion(global.worker_display, &major, &minor);
+ if (True != xres) {
+ TPL_ERR("DRI2QueryVersion failed!");
+ return TPL_FALSE;
+ }
+
+ xres = DRI2Connect(global.worker_display, root, &drv, &dev);
+ if (True != xres) {
+ TPL_ERR("DRI2Connect failed!");
+ return TPL_FALSE;
+ }
+
+ /* Initialize buffer manager. */
+ global.bufmgr_fd = open(dev, O_RDWR);
+ drmGetMagic(global.bufmgr_fd, &magic);
+
+ /* DRI2 authentication. */
+ xres = DRI2Authenticate(global.worker_display, root, magic);
+ if (True != xres) {
+ TPL_ERR("DRI2Authenciate failed!");
+ return TPL_FALSE;
+ }
+
+ global.bufmgr = tbm_bufmgr_init(global.bufmgr_fd);
+
+ /* Initialize swap type configuration. */
+ __tpl_x11_swap_str_to_swap_type(tpl_getenv(EGL_X11_WINDOW_SWAP_TYPE_ENV_NAME),
+ &global.win_swap_type);
+
+ __tpl_x11_swap_str_to_swap_type(tpl_getenv(EGL_X11_FB_SWAP_TYPE_ENV_NAME),
+ &global.fb_swap_type);
+ }
+
+ global.display_count++;
+ display->bufmgr_fd = global.bufmgr_fd;
+
+ pthread_mutex_unlock(&mutex);
+ return TPL_TRUE;
+}
+
+static void
+__tpl_x11_dri2_display_fini(tpl_display_t *display)
+{
+
+ pthread_mutex_t mutex = __tpl_x11_get_global_mutex();
+
+ TPL_IGNORE(display);
+
+ pthread_mutex_lock(&mutex);
+
+ if (--global.display_count == 0) {
+ tbm_bufmgr_deinit(global.bufmgr);
+ close(global.bufmgr_fd);
+ XCloseDisplay(global.worker_display);
+
+ global.worker_display = NULL;
+ global.bufmgr_fd = -1;
+ global.bufmgr = NULL;
+ }
+
+ pthread_mutex_unlock(&mutex);
+
+}
+
+static tpl_bool_t
+__tpl_x11_dri2_surface_init(tpl_surface_t *surface)
+{
+ Display *display;
+ Drawable drawable;
+ tpl_x11_dri2_surface_t *x11_surface;
+ tpl_format_t format = TPL_FORMAT_INVALID;
+
+ TPL_ASSERT(surface);
+
+ if (surface->type == TPL_SURFACE_TYPE_WINDOW) {
+ if (!__tpl_x11_display_get_window_info(surface->display, surface->native_handle,
+ &surface->width, &surface->height, NULL, 0, 0))
+ return TPL_FALSE;
+ } else {
+ if (!__tpl_x11_display_get_pixmap_info(surface->display, surface->native_handle,
+ &surface->width, &surface->height, &format))
+ return TPL_FALSE;
+ }
+
+ x11_surface = (tpl_x11_dri2_surface_t *) calloc(1,
+ sizeof(tpl_x11_dri2_surface_t));
+
+ if (x11_surface == NULL) {
+ TPL_ERR("Failed to allocate memory for X11 surface!");
+ return TPL_FALSE;
+ }
+
+ x11_surface->latest_post_interval = -1;
+ __tpl_list_init(&x11_surface->buffer_cache);
+
+ display = (Display *)surface->display->native_handle;
+ drawable = (Drawable)surface->native_handle;
+ DRI2CreateDrawable(display, drawable);
+
+ surface->backend.data = (void *)x11_surface;
+
+ return TPL_TRUE;
+}
+
+static void
+__tpl_x11_dri2_surface_fini(tpl_surface_t *surface)
+{
+ Display *display;
+ Drawable drawable;
+ tpl_x11_dri2_surface_t *x11_surface;
+
+ TPL_ASSERT(surface);
+ TPL_ASSERT(surface->display);
+ TPL_ASSERT(surface->display->native_handle);
+
+ display = (Display *)surface->display->native_handle;
+ drawable = (Drawable)surface->native_handle;
+ x11_surface = (tpl_x11_dri2_surface_t *)surface->backend.data;
+
+ if (x11_surface) {
+ __tpl_x11_surface_buffer_cache_clear(&x11_surface->buffer_cache);
+
+ if (x11_surface->damage)
+ XFixesDestroyRegion(display, x11_surface->damage);
+
+ free(x11_surface);
+ }
+
+ DRI2DestroyDrawable(display, drawable);
+ surface->backend.data = NULL;
+}
+
+
+static void
+__tpl_x11_dri2_surface_post(tpl_surface_t *surface, tpl_frame_t *frame)
+{
+ TPL_ASSERT(frame);
+ TPL_ASSERT(surface);
+
+ __tpl_x11_dri2_surface_post_internal(surface, frame, TPL_TRUE);
+}
+
+static void
+__tpl_x11_surface_begin_frame(tpl_surface_t *surface)
+{
+ tpl_frame_t *prev_frame;
+
+ TPL_ASSERT(surface);
+
+ if (surface->type != TPL_SURFACE_TYPE_WINDOW) {
+ TPL_ERR("Surface type is not of window type!");
+ return;
+ }
+
+ prev_frame = __tpl_surface_get_latest_frame(surface);
+
+ if (prev_frame && prev_frame->state != TPL_FRAME_STATE_POSTED) {
+ if ((DRI2_BUFFER_IS_FB(prev_frame->buffer->backend.flags) &&
+ global.fb_swap_type == TPL_X11_SWAP_TYPE_SYNC) ||
+ (!DRI2_BUFFER_IS_FB(prev_frame->buffer->backend.flags) &&
+ global.win_swap_type == TPL_X11_SWAP_TYPE_SYNC)) {
+ __tpl_surface_wait_all_frames(surface);
+ }
+ }
+}
+
+static tpl_bool_t
+__tpl_x11_surface_validate_frame(tpl_surface_t *surface)
+{
+ tpl_x11_dri2_surface_t *x11_surface;
+
+ TPL_ASSERT(surface);
+ TPL_ASSERT(surface->backend.data);
+
+ x11_surface = (tpl_x11_dri2_surface_t *) surface->backend.data;
+
+ if (surface->type != TPL_SURFACE_TYPE_WINDOW)
+ return TPL_TRUE;
+
+ if (NULL == surface->frame)
+ return TPL_TRUE;
+
+ if ((DRI2_BUFFER_IS_FB(surface->frame->buffer->backend.flags) &&
+ global.fb_swap_type == TPL_X11_SWAP_TYPE_LAZY) ||
+ (!DRI2_BUFFER_IS_FB(surface->frame->buffer->backend.flags) &&
+ global.win_swap_type == TPL_X11_SWAP_TYPE_LAZY)) {
+ if (x11_surface->latest_render_target == surface->frame->buffer) {
+ __tpl_surface_wait_all_frames(surface);
+ return TPL_FALSE;
+ }
+ }
+
+ return TPL_TRUE;
+}
+
+static void
+__tpl_x11_surface_end_frame(tpl_surface_t *surface)
+{
+ tpl_frame_t *frame;
+ tpl_x11_dri2_surface_t *x11_surface;
+
+ TPL_ASSERT(surface);
+ TPL_ASSERT(surface->backend.data);
+
+ frame = __tpl_surface_get_latest_frame(surface);
+ x11_surface = (tpl_x11_dri2_surface_t *) surface->backend.data;
+
+ if (frame) {
+ x11_surface->latest_render_target = frame->buffer;
+
+ if ((DRI2_BUFFER_IS_FB(frame->buffer->backend.flags) &&
+ global.fb_swap_type == TPL_X11_SWAP_TYPE_ASYNC) ||
+ (!DRI2_BUFFER_IS_FB(frame->buffer->backend.flags) &&
+ global.win_swap_type == TPL_X11_SWAP_TYPE_ASYNC)) {
+ __tpl_x11_dri2_surface_post_internal(surface, frame, TPL_FALSE);
+ }
+ }
+}
+
+static tpl_buffer_t *
+__tpl_x11_dri2_surface_get_buffer(tpl_surface_t *surface,
+ tpl_bool_t *reset_buffers)
+{
+ tpl_buffer_t *buffer = NULL;
+ Display *display;
+ Drawable drawable;
+ DRI2Buffer *dri2_buffers;
+ uint32_t attachments[1] = { DRI2BufferBackLeft };
+ tbm_bo bo;
+ tbm_bo_handle bo_handle;
+ int width, height, num_buffers;
+ tpl_x11_dri2_surface_t *x11_surface;
+
+ TPL_ASSERT(surface);
+ TPL_ASSERT(surface->backend.data);
+
+ x11_surface = (tpl_x11_dri2_surface_t *)surface->backend.data;
+
+ if (surface->type == TPL_SURFACE_TYPE_PIXMAP)
+ attachments[0] = DRI2BufferFrontLeft;
+
+ display = (Display *)surface->display->native_handle;
+ drawable = (Drawable)surface->native_handle;
+
+ /* Get the current buffer via DRI2. */
+ dri2_buffers = DRI2GetBuffers(display, drawable,
+ &width, &height, attachments, 1, &num_buffers);
+ if (dri2_buffers == NULL) {
+ TPL_ERR("DRI2GetBuffers failed!");
+ goto err_buffer;
+ }
+
+ if (DRI2_BUFFER_IS_REUSED(dri2_buffers[0].flags)) {
+ /* Buffer is reused. So it should be in the buffer cache.
+ * However, sometimes we get a strange result of having reused flag for a newly
+ * received buffer. I don't know the meaning of such cases but just handle it. */
+ buffer = __tpl_x11_surface_buffer_cache_find(&x11_surface->buffer_cache,
+ dri2_buffers[0].name);
+
+ if (buffer) {
+ /* Need to update buffer flag */
+ buffer->backend.flags = dri2_buffers[0].flags;
+ /* just update the buffer age. */
+#if (TIZEN_FEATURES_ENABLE)
+ buffer->age = DRI2_BUFFER_GET_AGE(dri2_buffers[0].flags);
+#endif
+ goto done;
+ }
+ } else {
+ /* Buffer configuration of the server is changed. We have to reset all previsouly
+ * received buffers. */
+ __tpl_x11_surface_buffer_cache_clear(&x11_surface->buffer_cache);
+ }
+
+ /* Create a TBM buffer object for the buffer name. */
+ bo = tbm_bo_import(global.bufmgr, dri2_buffers[0].name);
+
+ if (bo == NULL) {
+ TPL_ERR("TBM bo import failed!");
+ goto done;
+ }
+
+ bo_handle = tbm_bo_get_handle(bo, TBM_DEVICE_3D);
+
+ /* Create tpl buffer. */
+ buffer = __tpl_buffer_alloc(surface, (size_t) dri2_buffers[0].name,
+ (int) bo_handle.u32,
+ width, height, dri2_buffers[0].cpp * 8, dri2_buffers[0].pitch);
+ if (NULL == buffer) {
+ TPL_ERR("TPL buffer alloc failed!");
+ goto err_buffer;
+ }
+
+#if (TIZEN_FEATURES_ENABLE)
+ buffer->age = DRI2_BUFFER_GET_AGE(dri2_buffers[0].flags);
+#endif
+ buffer->backend.data = (void *)bo;
+ buffer->backend.flags = dri2_buffers[0].flags;
+
+ /* Add the buffer to the buffer cache. The cache will hold a reference to the buffer. */
+ __tpl_x11_surface_buffer_cache_add(&x11_surface->buffer_cache, buffer);
+ tpl_object_unreference(&buffer->base);
+
+done:
+ if (reset_buffers) {
+ /* Users use this output value to check if they have to reset previous buffers. */
+ *reset_buffers = !DRI2_BUFFER_IS_REUSED(dri2_buffers[0].flags) ||
+ width != surface->width || height != surface->height;
+ }
+
+ XFree(dri2_buffers);
+err_buffer:
+ return buffer;
+}
+
+tpl_bool_t
+__tpl_display_choose_backend_x11_dri2(tpl_handle_t native_dpy)
+{
+ /* X11 display accepts any type of handle. So other backends must be choosen before this. */
+ return TPL_TRUE;
+}
+
+void
+__tpl_display_init_backend_x11_dri2(tpl_display_backend_t *backend)
+{
+ TPL_ASSERT(backend);
+
+ backend->type = TPL_BACKEND_X11_DRI2;
+ backend->data = NULL;
+
+ backend->init = __tpl_x11_dri2_display_init;
+ backend->fini = __tpl_x11_dri2_display_fini;
+ backend->query_config = __tpl_x11_display_query_config;
+ backend->get_window_info = __tpl_x11_display_get_window_info;
+ backend->get_pixmap_info = __tpl_x11_display_get_pixmap_info;
+ backend->flush = __tpl_x11_display_flush;
+ backend->wait_native = __tpl_x11_display_wait_native;
+}
+
+void
+__tpl_surface_init_backend_x11_dri2(tpl_surface_backend_t *backend)
+{
+ TPL_ASSERT(backend);
+
+ backend->type = TPL_BACKEND_X11_DRI2;
+ backend->data = NULL;
+
+ backend->init = __tpl_x11_dri2_surface_init;
+ backend->fini = __tpl_x11_dri2_surface_fini;
+ backend->begin_frame = __tpl_x11_surface_begin_frame;
+ backend->end_frame = __tpl_x11_surface_end_frame;
+ backend->validate_frame = __tpl_x11_surface_validate_frame;
+ backend->get_buffer = __tpl_x11_dri2_surface_get_buffer;
+ backend->post = __tpl_x11_dri2_surface_post;
+}
+
+void
+__tpl_buffer_init_backend_x11_dri2(tpl_buffer_backend_t *backend)
+{
+ TPL_ASSERT(backend);
+
+ backend->type = TPL_BACKEND_X11_DRI2;
+ backend->data = NULL;
+
+ backend->init = __tpl_x11_buffer_init;
+ backend->fini = __tpl_x11_buffer_fini;
+ backend->map = __tpl_x11_buffer_map;
+ backend->unmap = __tpl_x11_buffer_unmap;
+ backend->lock = __tpl_x11_buffer_lock;
+ backend->unlock = __tpl_x11_buffer_unlock;
+}
--- /dev/null
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <stdint.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <unistd.h>
+#include <pthread.h>
+#include <dlfcn.h>
+
+#include <X11/Xlib-xcb.h>
+#include <X11/Xlib.h>
+#include <X11/Xutil.h>
+#include <X11/Xproto.h>
+#include <X11/extensions/Xfixes.h>
+
+
+#include <libdrm/drm.h>
+#include <xf86drm.h>
+
+#include <xcb/xcb.h>
+#include <xcb/dri3.h>
+#include <xcb/xcbext.h>
+#include <xcb/present.h>
+#include <xcb/sync.h>
+
+#include <tbm_bufmgr.h>
+
+#include "tpl_internal.h"
+
+#include "tpl_x11_internal.h"
+
+static int dri3_max_back = 0;/*max number of back buffer*/
+#define DRI3_NUM_BUFFERS 20
+#define DRI3_BUFFER_REUSED 0x08
+
+/* 2015-04-15 joonbum.ko@samsung.com */
+/* Add macro function for pitch align calculation.*/
+#define SIZE_ALIGN( value, base ) (((value) + ((base) - 1)) & ~((base) - 1))
+#define ALIGNMENT_PITCH_ARGB 64
+
+
+#define USE_FENCE 0
+
+typedef struct _tpl_x11_dri3_surface tpl_x11_dri3_surface_t;
+
+struct _tpl_x11_dri3_surface {
+ int latest_post_interval;
+ XserverRegion damage;
+ tpl_list_t buffer_cache;
+ tpl_buffer_t *latest_render_target;
+
+ void *drawable;
+};
+
+enum dri3_buffer_type {
+ dri3_buffer_back = 0,
+ dri3_buffer_front = 1
+};
+
+enum dri3_buffer_status {
+ dri3_buffer_idle = 0,
+ dri3_buffer_busy = 1,
+ dri3_buffer_posted = 2
+};
+
+typedef struct _dri3_buffer {
+ tbm_bo tbo;
+ uint32_t pixmap;
+ enum dri3_buffer_status status; /* Set on swap, cleared on IdleNotify */
+ void *driverPrivate;
+
+ /*param of buffer */
+ uint32_t size;
+ uint32_t pitch;
+ uint32_t cpp;
+ uint32_t flags;
+ int32_t width, height;
+ uint64_t last_swap;
+ int32_t own_pixmap; /* We allocated the pixmap ID,
+ free on destroy */
+ uint32_t dma_buf_fd; /* fd of dma buffer */
+ /* [BEGIN: 20141125-xuelian.bai] Add old dma fd to save old fd
+ * before use new fd */
+ /* 2015-04-08 joonbum.ko@samsung.com */
+ /* Change old buffer name to old_bo_name from old_dma_fd */
+ /* uint32_t old_dma_fd; */
+ uint32_t old_bo_name;
+ /* [END: 20141125-xuelian.bai] */
+ enum dri3_buffer_type buffer_type; /* back=0,front=1 */
+
+ /* [BEGIN: 20140119-leiba.sun] Add support for buffer age */
+ uint32_t buffer_age;
+ /* [END:20150119-leiba.sun] */
+} dri3_buffer;
+
+typedef struct _dri3_drawable {
+ Display *dpy;
+ XID xDrawable;
+
+ tbm_bufmgr bufmgr; /* tbm bufmgr */
+
+ int32_t width, height, depth;
+ int32_t swap_interval;
+ uint8_t have_back;
+ uint8_t have_fake_front;
+ tpl_bool_t is_pixmap; /*whether the drawable is pixmap*/
+ uint8_t flipping; /*whether the drawable can use pageFlip*/
+
+ uint32_t present_capabilities; /* Present extension capabilities*/
+ uint64_t send_sbc; /* swap buffer counter */
+ uint64_t recv_sbc;
+ uint64_t ust, msc; /* Last received UST/MSC values */
+ uint32_t send_msc_serial; /* Serial numbers for tracking
+ wait_for_msc events */
+ uint32_t recv_msc_serial;
+
+ dri3_buffer *buffers[DRI3_NUM_BUFFERS]; /*buffer array of all buffers*/
+ int cur_back;
+
+ uint32_t stamp;
+ xcb_present_event_t eid;
+ xcb_special_event_t *special_event;
+} dri3_drawable;
+
+typedef struct _dri3_drawable_node {
+ XID xDrawable;
+ dri3_drawable *drawable;
+} dri3_drawable_node;
+
+static tpl_x11_global_t global = {
+ 0,
+ NULL,
+ -1,
+ NULL,
+ TPL_X11_SWAP_TYPE_LAZY,
+ TPL_X11_SWAP_TYPE_LAZY
+};
+
+static tpl_list_t dri3_drawable_list;
+static void
+dri3_free_render_buffer(dri3_drawable *pdraw, dri3_buffer *buffer);
+static void dri3_flush_present_events(dri3_drawable *priv);
+/* Wrapper around xcb_dri3_open*/
+static int
+dri3_open(Display *dpy, Window root, CARD32 provider)
+{
+ xcb_dri3_open_cookie_t cookie;
+ xcb_dri3_open_reply_t *reply;
+ xcb_connection_t *c;
+ int fd;
+
+ TPL_ASSERT(dpy);
+
+ c = XGetXCBConnection(dpy);
+
+ cookie = xcb_dri3_open(c,
+ root,
+ provider);
+
+ reply = xcb_dri3_open_reply(c, cookie, NULL);
+ if (!reply) {
+ TPL_ERR("XCB DRI3 open failed!");
+ return -1;
+ }
+
+ if (reply->nfd != 1) {
+ TPL_ERR("XCB DRI3 open reply failed!");
+ free(reply);
+ return -1;
+ }
+
+ fd = xcb_dri3_open_reply_fds(c, reply)[0];
+ fcntl(fd, F_SETFD, FD_CLOEXEC);
+
+ free(reply);
+ return fd;
+}
+
+static tpl_bool_t
+dri3_display_init(Display *dpy)
+{
+ /* Initialize DRI3 & DRM */
+ xcb_connection_t *c;
+ xcb_dri3_query_version_cookie_t dri3_cookie;
+ xcb_dri3_query_version_reply_t *dri3_reply;
+ xcb_present_query_version_cookie_t present_cookie;
+ xcb_present_query_version_reply_t *present_reply;
+ xcb_generic_error_t *error;
+ const xcb_query_extension_reply_t *extension;
+ xcb_extension_t xcb_dri3_id = { "DRI3", 0 };
+ xcb_extension_t xcb_present_id = { "Present", 0 };
+
+ TPL_ASSERT(dpy);
+
+ c = XGetXCBConnection(dpy);
+
+ xcb_prefetch_extension_data(c, &xcb_dri3_id);
+ xcb_prefetch_extension_data(c, &xcb_present_id);
+
+ extension = xcb_get_extension_data(c, &xcb_dri3_id);
+ if (!(extension && extension->present)) {
+ TPL_ERR("XCB get extension failed!");
+ return TPL_FALSE;
+ }
+
+ extension = xcb_get_extension_data(c, &xcb_present_id);
+ if (!(extension && extension->present)) {
+ TPL_ERR("XCB get extension failed!");
+ return TPL_FALSE;
+ }
+
+ dri3_cookie = xcb_dri3_query_version(c,
+ XCB_DRI3_MAJOR_VERSION,
+ XCB_DRI3_MINOR_VERSION);
+ dri3_reply = xcb_dri3_query_version_reply(c, dri3_cookie, &error);
+ if (!dri3_reply) {
+ TPL_ERR("XCB version query failed!");
+ free(error);
+ return TPL_FALSE;
+ }
+ free(dri3_reply);
+
+ present_cookie = xcb_present_query_version(c,
+ XCB_PRESENT_MAJOR_VERSION,
+ XCB_PRESENT_MINOR_VERSION);
+ present_reply = xcb_present_query_version_reply(c, present_cookie, &error);
+ if (!present_reply) {
+ TPL_ERR("Present version query failed!");
+ free(error);
+ return TPL_FALSE;
+ }
+ free(present_reply);
+ return TPL_TRUE;
+}
+
+static void *
+dri3_create_drawable(Display *dpy, XID xDrawable)
+{
+ dri3_drawable *pdraw = NULL;
+ xcb_connection_t *c;
+ xcb_get_geometry_cookie_t geom_cookie;
+ xcb_get_geometry_reply_t *geom_reply;
+ int i;
+ tpl_list_node_t *node;
+ dri3_drawable_node *drawable_node;
+
+ TPL_ASSERT(dpy);
+
+ c = XGetXCBConnection(dpy);
+
+ /* Check drawable list to find that if it has been created*/
+ node = __tpl_list_get_front_node(&dri3_drawable_list);
+ while (node) {
+ dri3_drawable_node *drawable = (dri3_drawable_node *) __tpl_list_node_get_data(
+ node);
+
+ if (drawable->xDrawable == xDrawable) {
+ pdraw = drawable->drawable;
+ return (void *)pdraw;/* Reuse old drawable */
+ }
+ node = __tpl_list_node_next(node);
+ }
+ pdraw = calloc(1, sizeof(*pdraw));
+ if (NULL == pdraw) {
+ TPL_ERR("Failed to allocate memory!");
+ return NULL;
+ }
+
+ geom_cookie = xcb_get_geometry(c, xDrawable);
+ geom_reply = xcb_get_geometry_reply(c, geom_cookie, NULL);
+ if (NULL == geom_reply) {
+ TPL_ERR("XCB get geometry failed!");
+ free(pdraw);
+ return NULL;
+ }
+
+ pdraw->bufmgr = global.bufmgr;
+ pdraw->width = geom_reply->width;
+ pdraw->height = geom_reply->height;
+ pdraw->depth = geom_reply->depth;
+ pdraw->is_pixmap = TPL_FALSE;
+
+ free(geom_reply);
+ pdraw->dpy = global.worker_display;
+ pdraw->xDrawable = xDrawable;
+
+ for (i = 0; i < dri3_max_back + 1; i++)
+ pdraw->buffers[i] = NULL;
+
+ /* Add new allocated drawable to drawable list */
+ drawable_node = calloc(1, sizeof(dri3_drawable_node));
+ if (NULL == drawable_node) {
+ TPL_ERR("Failed to allocate memory for drawable node!");
+ free(pdraw);
+ return NULL;
+ }
+
+ drawable_node->drawable = pdraw;
+ drawable_node->xDrawable = xDrawable;
+ if (TPL_TRUE != __tpl_list_push_back(&dri3_drawable_list,
+ (void *)drawable_node)) {
+ TPL_ERR("List operation failed!");
+ free(pdraw);
+ free(drawable_node);
+ return NULL;
+ }
+
+ return (void *)pdraw;
+}
+
+static void
+dri3_destroy_drawable(Display *dpy, XID xDrawable)
+{
+ dri3_drawable *pdraw;
+ xcb_connection_t *c;
+ int i;
+ tpl_list_node_t *node;
+ dri3_drawable_node *drawable;
+
+ TPL_ASSERT(dpy);
+
+ c = XGetXCBConnection(dpy);
+
+ /* Remove drawable from list */
+ node = __tpl_list_get_front_node(&dri3_drawable_list);
+ while (node) {
+ drawable = (dri3_drawable_node *) __tpl_list_node_get_data(node);
+
+ if (drawable->xDrawable == xDrawable) {
+ pdraw = drawable->drawable;
+
+ if (!pdraw)
+ return;
+
+ for (i = 0; i < dri3_max_back + 1; i++) {
+ if (pdraw->buffers[i])
+ dri3_free_render_buffer(pdraw, pdraw->buffers[i]);
+ }
+
+ if (pdraw->special_event)
+ xcb_unregister_for_special_event(c, pdraw->special_event);
+ free(pdraw);
+ pdraw = NULL;
+ __tpl_list_remove(node, free);
+ return;
+ }
+
+ node = __tpl_list_node_next(node);
+ }
+
+ /* If didn't find the drawable, means it is already free*/
+ return;
+}
+
+/** dri3_update_drawable
+ *
+ * Called the first time we use the drawable and then
+ * after we receive present configure notify events to
+ * track the geometry of the drawable
+ */
+static int
+dri3_update_drawable(void *loaderPrivate)
+{
+ dri3_drawable *priv = loaderPrivate;
+ xcb_connection_t *c;
+ xcb_extension_t xcb_present_id = { "Present", 0 };
+
+ TPL_ASSERT(priv);
+ TPL_ASSERT(priv->dpy);
+
+ c = XGetXCBConnection(priv->dpy);
+
+ /* First time through, go get the current drawable geometry
+ */ /*TODO*/
+ if (priv->special_event == NULL) {
+ xcb_get_geometry_cookie_t geom_cookie;
+ xcb_get_geometry_reply_t *geom_reply;
+ xcb_void_cookie_t cookie;
+ xcb_generic_error_t *error;
+ xcb_present_query_capabilities_cookie_t present_capabilities_cookie;
+ xcb_present_query_capabilities_reply_t *present_capabilities_reply;
+
+
+ /* Try to select for input on the window.
+ *
+ * If the drawable is a window, this will get our events
+ * delivered.
+ *
+ * Otherwise, we'll get a BadWindow error back from this
+ * request which will let us know that the drawable is a
+ * pixmap instead.
+ */
+ cookie = xcb_present_select_input_checked(c,
+ (priv->eid = xcb_generate_id(c)),
+ priv->xDrawable,
+ XCB_PRESENT_EVENT_MASK_CONFIGURE_NOTIFY |
+ XCB_PRESENT_EVENT_MASK_COMPLETE_NOTIFY |
+ XCB_PRESENT_EVENT_MASK_IDLE_NOTIFY);
+
+ present_capabilities_cookie = xcb_present_query_capabilities(c,
+ priv->xDrawable);
+
+ /* Create an XCB event queue to hold present events outside of the usual
+ * application event queue
+ */
+ priv->special_event = xcb_register_for_special_xge(c,
+ &xcb_present_id,
+ priv->eid,
+ &priv->stamp);
+
+ geom_cookie = xcb_get_geometry(c, priv->xDrawable);
+
+ geom_reply = xcb_get_geometry_reply(c, geom_cookie, NULL);
+ if (NULL == geom_reply) {
+ TPL_ERR("Failed to get geometry reply!");
+ return TPL_FALSE;
+ }
+
+ priv->width = geom_reply->width;
+ priv->height = geom_reply->height;
+ priv->depth = geom_reply->depth;
+ priv->is_pixmap = TPL_FALSE;
+
+ free(geom_reply);
+
+ /* Check to see if our select input call failed. If it failed
+ * with a BadWindow error, then assume the drawable is a pixmap.
+ * Destroy the special event queue created above and mark the
+ * drawable as a pixmap
+ */
+
+ error = xcb_request_check(c, cookie);
+
+ present_capabilities_reply = xcb_present_query_capabilities_reply(c,
+ present_capabilities_cookie,
+ NULL);
+
+ if (present_capabilities_reply) {
+ priv->present_capabilities = present_capabilities_reply->capabilities;
+ free(present_capabilities_reply);
+ } else
+ priv->present_capabilities = 0;
+
+ if (error) {
+ if (error->error_code != BadWindow) {
+ free(error);
+ return TPL_FALSE;
+ }
+ priv->is_pixmap = TPL_TRUE;
+ xcb_unregister_for_special_event(c, priv->special_event);
+ priv->special_event = NULL;
+ }
+ }
+ dri3_flush_present_events(priv);
+ return TPL_TRUE;
+}
+
+/******************************************
+ * dri3_handle_present_event
+ * Process Present event from xserver
+ *****************************************/
+static void
+dri3_handle_present_event(dri3_drawable *priv, xcb_present_generic_event_t *ge)
+{
+ TPL_ASSERT(priv);
+ TPL_ASSERT(ge);
+
+ switch (ge->evtype) {
+ case XCB_PRESENT_CONFIGURE_NOTIFY: {
+ TRACE_BEGIN("DRI3:PRESENT_CONFIGURE_NOTIFY");
+ xcb_present_configure_notify_event_t *ce = (void *) ge;
+ priv->width = ce->width;
+ priv->height = ce->height;
+ TRACE_END();
+ break;
+ }
+
+ case XCB_PRESENT_COMPLETE_NOTIFY: {
+ TRACE_BEGIN("DRI3:PRESENT_COMPLETE_NOTIFY");
+ xcb_present_complete_notify_event_t *ce = (void *) ge;
+ /* Compute the processed SBC number from the received
+ * 32-bit serial number merged with the upper 32-bits
+ * of the sent 64-bit serial number while checking for
+ * wrap
+ */
+ if (ce->kind == XCB_PRESENT_COMPLETE_KIND_PIXMAP) {
+ priv->recv_sbc =
+ (priv->send_sbc & 0xffffffff00000000LL) |
+ ce->serial;
+ if (priv->recv_sbc > priv->send_sbc)
+ priv->recv_sbc -= 0x100000000;
+ switch (ce->mode) {
+ case XCB_PRESENT_COMPLETE_MODE_FLIP:
+ priv->flipping = 1;
+ break;
+ case XCB_PRESENT_COMPLETE_MODE_COPY:
+ priv->flipping = 0;
+ break;
+ }
+ } else {
+ priv->recv_msc_serial = ce->serial;
+ }
+
+ priv->ust = ce->ust;
+ priv->msc = ce->msc;
+ TRACE_END();
+ break;
+ }
+
+ case XCB_PRESENT_EVENT_IDLE_NOTIFY: {
+ xcb_present_idle_notify_event_t *ie = (void *) ge;
+ uint32_t b;
+
+ for (b = 0; b < sizeof (priv->buffers) / sizeof (priv->buffers[0]); b++) {
+ dri3_buffer *buf = priv->buffers[b];
+
+ if (buf && buf->pixmap == ie->pixmap) {
+ TRACE_MARK("IDLE:%d", tbm_bo_export(priv->buffers[b]->tbo));
+ buf->status = dri3_buffer_idle;
+ break;
+ }
+ }
+ break;
+ }
+ }
+ free(ge);
+}
+
+/******************************************************
+* dri3_flush_present_events
+*
+* Process any present events that have been received from the X server
+* called when get buffer or swap buffer
+******************************************************/
+static void
+dri3_flush_present_events(dri3_drawable *priv)
+{
+ xcb_connection_t *c;
+
+ TPL_ASSERT(priv);
+ TPL_ASSERT(priv->dpy);
+
+ c = XGetXCBConnection(priv->dpy);
+
+ TRACE_BEGIN("DRI3:FLUSH_PRESENT_EVENTS");
+ /* Check to see if any configuration changes have occurred
+ * since we were last invoked
+ */
+ if (priv->special_event) {
+ xcb_generic_event_t *ev;
+
+ while ((ev = xcb_poll_for_special_event(c, priv->special_event)) != NULL) {
+ xcb_present_generic_event_t *ge = (void *) ev;
+ dri3_handle_present_event(priv, ge);
+ }
+ }
+ TRACE_END();
+}
+
+static tpl_bool_t
+dri3_wait_for_notify(xcb_connection_t *c, dri3_drawable *priv)
+{
+ xcb_generic_event_t *ev;
+ xcb_present_generic_event_t *ge;
+
+ TPL_ASSERT(c);
+ TPL_ASSERT(priv);
+
+ TRACE_BEGIN("TPL:DRI3:WAIT_FOR_NOTIFY");
+
+ if (((uint32_t)priv->send_sbc) == 0) {
+ TRACE_END();
+ return TPL_TRUE;
+ }
+ for (;;) {
+ if ( (uint32_t)priv->send_sbc <= (uint32_t)priv->recv_sbc ) {
+ TRACE_END();
+ return TPL_TRUE;
+ }
+
+ xcb_flush(c);
+ ev = xcb_wait_for_special_event(c, priv->special_event);
+ if (!ev) {
+ TRACE_END();
+ return TPL_FALSE;
+ }
+ ge = (void *) ev;
+ dri3_handle_present_event(priv, ge);
+ }
+}
+
+/** dri3_find_back
+ *
+ * Find an idle back buffer. If there isn't one, then
+ * wait for a present idle notify event from the X server
+ */
+static int
+dri3_find_back(xcb_connection_t *c, dri3_drawable *priv)
+{
+ int b;
+ xcb_generic_event_t *ev;
+ xcb_present_generic_event_t *ge;
+
+ TPL_ASSERT(c);
+ TPL_ASSERT(priv);
+
+ for (;;) {
+ for (b = 0; b < dri3_max_back; b++) {
+ int id = (b + priv->cur_back + 1) % dri3_max_back;
+ int pre_id = (id + dri3_max_back - 2) % dri3_max_back;
+
+ dri3_buffer *buffer = priv->buffers[id];
+ dri3_buffer *pre_buffer = priv->buffers[pre_id];
+
+ if (pre_buffer && pre_buffer->status != dri3_buffer_posted)
+ pre_buffer->status = dri3_buffer_idle;
+
+ if (!buffer || buffer->status == dri3_buffer_idle) {
+ priv->cur_back = id;
+ return id;
+ }
+ }
+
+ xcb_flush(c);
+ TRACE_BEGIN("DDK:DRI3:XCBWAIT");
+ ev = xcb_wait_for_special_event(c, priv->special_event);
+ TRACE_END();
+
+ if (!ev) {
+ return -1;
+ }
+
+ ge = (void *) ev;
+ dri3_handle_present_event(priv, ge);
+ }
+}
+
+/** dri3_alloc_render_buffer
+ *
+ * allocate a render buffer and create an X pixmap from that
+ *
+ */
+static dri3_buffer *
+dri3_alloc_render_buffer(dri3_drawable *priv,
+ int width, int height, int depth, int cpp)
+{
+ Display *dpy;
+ Drawable draw;
+ dri3_buffer *buffer = NULL;
+ xcb_connection_t *c;
+ xcb_pixmap_t pixmap = 0;
+ int buffer_fd;
+ int size;
+ tbm_bo_handle handle;
+ xcb_void_cookie_t cookie;
+ xcb_generic_error_t *error;
+
+ TPL_ASSERT(priv);
+ TPL_ASSERT(priv->dpy);
+
+ dpy = priv->dpy;
+ draw = priv->xDrawable;
+
+ c = XGetXCBConnection(dpy);
+
+ /* Allocate the image from the driver
+ */
+ buffer = calloc(1, sizeof (dri3_buffer));
+ if (!buffer) {
+ TPL_ERR("Failed to allocate buffer!");
+ goto no_buffer;
+ }
+
+ /* size = height * width * depth/8;*/
+ /* size = ((width * 32)>>5) * 4 * height; */
+ /* calculate pitch and size by input parameter cpp */
+ /* buffer->pitch = width*(cpp/8); */
+
+ /* Modify the calculation of pitch (strdie) */
+ buffer->pitch = SIZE_ALIGN((width * cpp) >> 3, ALIGNMENT_PITCH_ARGB);
+
+ size = buffer->pitch * height;
+
+ buffer->tbo = tbm_bo_alloc(priv->bufmgr, size, TBM_BO_DEFAULT);
+ if (NULL == buffer->tbo) {
+ TPL_ERR("TBM bo alloc failed!");
+ free(buffer);
+ goto no_buffer;
+ }
+
+ /* dup tbo, because X will close it */
+ /* 2015-04-08 joonbum.ko@samsung.com */
+ /* delete tbm_bo_get_handle function call and
+ add tbm_bo_export_fd function call */
+
+ handle = tbm_bo_get_handle(buffer->tbo, TBM_DEVICE_3D);
+ buffer_fd = dup(handle.u32);
+
+ /* buffer_fd = tbm_bo_export_fd(buffer->tbo);*/
+ /* 2015-04-08 joonbum.ko@samsung.com */
+ /* disable the value dma_buf_fd */
+ buffer->dma_buf_fd = handle.u32;
+ buffer->size = size;
+ cookie = xcb_dri3_pixmap_from_buffer_checked(c,
+ (pixmap = xcb_generate_id(c)),
+ draw,
+ buffer->size,
+ width, height, buffer->pitch,
+ depth, cpp,
+ buffer_fd);
+ error = xcb_request_check( c, cookie);
+ /* 2015-04-08 joonbum.ko@samsung.com */
+ /* buffer_fd is unuseful */
+ /* close(buffer_fd);*/
+
+ if (error) {
+ TPL_ERR("No pixmap!");
+ goto no_pixmap;
+ }
+ if (0 == pixmap) {
+ TPL_ERR("No pixmap!");
+ goto no_pixmap;
+ }
+
+ buffer->pixmap = pixmap;
+ buffer->own_pixmap = TPL_TRUE;
+ buffer->width = width;
+ buffer->height = height;
+ buffer->flags = 0;
+
+ return buffer;
+no_pixmap:
+ tbm_bo_unref(buffer->tbo);
+ free(buffer);
+no_buffer:
+ return NULL;
+}
+
+/** dri3_free_render_buffer
+ *
+ * Free everything associated with one render buffer including pixmap, fence
+ * stuff
+ */
+static void
+dri3_free_render_buffer(dri3_drawable *pdraw, dri3_buffer *buffer)
+{
+ xcb_connection_t *c;
+
+ TPL_ASSERT(pdraw);
+ TPL_ASSERT(buffer);
+ TPL_ASSERT(pdraw->dpy);
+
+ c = XGetXCBConnection(pdraw->dpy);
+
+ /* 2015-04-08 joonbum.ko@samsung.com */
+ /* if drawable type is pixmap, it requires only free buffer */
+ if (!pdraw->is_pixmap) {
+ if (buffer->own_pixmap)
+ xcb_free_pixmap(c, buffer->pixmap);
+ tbm_bo_unref(buffer->tbo);
+ /* added a ref when created and unref while free, see dri3_get_pixmap_buffer */
+ }
+
+ buffer = NULL;
+}
+
+
+/** dri3_get_window_buffer
+ *
+ * Find a front or back buffer, allocating new ones as necessary
+ */
+
+/* 2015-04-08 joonbum.ko@samsung.com */
+/* Change the value of old_dma_fd to old_bo_name */
+static dri3_buffer *
+dri3_get_window_buffer(void *loaderPrivate, int cpp)
+{
+ dri3_drawable *priv = loaderPrivate;
+ xcb_connection_t *c;
+ dri3_buffer *backbuffer = NULL;
+ int back_buf_id, reuse = 1;
+ uint32_t old_bo_name = 0;
+
+ TPL_ASSERT(priv);
+ TPL_ASSERT(priv->dpy);
+
+ c = XGetXCBConnection(priv->dpy);
+
+ TRACE_BEGIN("DDK:DRI3:GETBUFFERS:WINDOW");
+ TRACE_BEGIN("DDK:DRI3:FINDBACK");
+ back_buf_id = dri3_find_back(c, priv);
+ TRACE_END();
+
+ backbuffer = priv->buffers[back_buf_id];
+
+ /* Allocate a new buffer if there isn't an old one, or if that
+ * old one is the wrong size.
+ */
+ if (!backbuffer || backbuffer->width != priv->width ||
+ backbuffer->height != priv->height ) {
+ dri3_buffer *new_buffer;
+
+ /* Allocate the new buffers
+ */
+ TRACE_BEGIN("DDK:DRI3:ALLOCRENDERBUFFER");
+ new_buffer = dri3_alloc_render_buffer(priv,
+ priv->width, priv->height, priv->depth, cpp);
+ TRACE_END();
+
+ if (!new_buffer) {
+ TRACE_END();
+ return NULL;
+ }
+ if (backbuffer) {
+ /* [BEGIN: 20141125-xuelian.bai] Size not match,this buffer
+ * must be removed from buffer cache, so we have to save
+ * dma_buf_fd of old buffer.*/
+ old_bo_name = tbm_bo_export(backbuffer->tbo);
+ /* [END: 20141125-xuelian.bai] */
+ TRACE_BEGIN("DDK:DRI3:FREERENDERBUFFER");
+ dri3_free_render_buffer(priv, backbuffer);
+ TRACE_END();
+ }
+ backbuffer = new_buffer;
+ backbuffer->buffer_type = dri3_buffer_back;
+ backbuffer->old_bo_name = old_bo_name;
+ priv->buffers[back_buf_id] = backbuffer;
+ reuse = 0;
+ }
+
+ backbuffer->flags = DRI2_BUFFER_FB;
+ backbuffer->status = dri3_buffer_busy;
+ if (reuse) {
+ backbuffer->flags |= DRI2_BUFFER_REUSED;
+ }
+ /* Return the requested buffer */
+ TRACE_END();
+
+ TRACE_MARK("%d", tbm_bo_export(backbuffer->tbo));
+
+ return backbuffer;
+}
+
+/* 2015-04-07 joonbum.ko@samsung.com */
+/* modify internal flow of dri3_get_pixmap_buffer */
+/* add 3rd argument for stride information */
+static dri3_buffer *
+dri3_get_pixmap_buffer(void *loaderPrivate, Pixmap pixmap,
+ int cpp)/*TODO:format*/
+{
+ dri3_drawable *pdraw = loaderPrivate;
+ dri3_buffer *buffer = NULL;
+ xcb_dri3_buffer_from_pixmap_cookie_t bp_cookie;
+ xcb_dri3_buffer_from_pixmap_reply_t *bp_reply;
+ int *fds;
+ Display *dpy;
+ xcb_connection_t *c;
+ tbm_bo tbo = NULL;
+
+ TPL_ASSERT(pdraw);
+ TPL_ASSERT(pdraw->dpy);
+
+ TRACE_BEGIN("DDK:DRI3:GETBUFFERS:PIXMAP");
+
+ dpy = pdraw->dpy;
+ c = XGetXCBConnection(dpy);
+
+ /* Get an FD for the pixmap object
+ */
+ bp_cookie = xcb_dri3_buffer_from_pixmap(c, pixmap);
+ bp_reply = xcb_dri3_buffer_from_pixmap_reply(c, bp_cookie, NULL);
+ if (!bp_reply) {
+ goto no_image;
+ }
+ fds = xcb_dri3_buffer_from_pixmap_reply_fds(c, bp_reply);
+
+ tbo = tbm_bo_import_fd(pdraw->bufmgr, (tbm_fd)(*fds));
+
+ if (!buffer) {
+ buffer = calloc(1, sizeof (dri3_buffer));
+ if (!buffer)
+ goto no_buffer;
+ }
+
+ buffer->tbo = tbo;
+ /* 2015-04-08 joonbum.ko@samsung.com */
+ /* disable the value dma_buf_fd */
+ buffer->dma_buf_fd = *fds;
+ buffer->pixmap = pixmap;
+ buffer->own_pixmap = TPL_FALSE;
+ buffer->width = bp_reply->width;
+ buffer->height = bp_reply->height;
+ buffer->buffer_type = dri3_buffer_front;
+ buffer->flags = DRI3_BUFFER_REUSED;
+ /* 2015-04-07 joonbum.ko@samsung.com */
+ /* add buffer information(cpp, pitch, size) */
+ buffer->cpp = cpp;
+ buffer->pitch = bp_reply->stride;
+ buffer->size = buffer->pitch * bp_reply->height;
+
+ pdraw->buffers[dri3_max_back] = buffer;
+
+ /* 2015-04-08 joonbum.ko@samsung.com */
+ /* fds is unuseful */
+ close(*fds);
+ TRACE_END();
+ return buffer;
+
+ /* 2015-04-09 joonbum.ko@samsung.com */
+ /* change the lable order */
+no_image:
+ if (buffer)
+ free(buffer);
+no_buffer:
+ TRACE_END();
+ return NULL;
+}
+
+static dri3_buffer *dri3_get_buffers(XID drawable, void *loaderPrivate,
+ unsigned int *attachments, int cpp)
+{
+ dri3_drawable *priv = loaderPrivate;
+ dri3_buffer *buffer = NULL;
+
+ TPL_ASSERT(priv);
+ TPL_ASSERT(attachments);
+
+ TRACE_BEGIN("DDK:DRI3:GETBUFFERS");
+
+ if (drawable != priv->xDrawable) {
+ TPL_ERR("Drawable mismatch!");
+ TRACE_END();
+ return NULL;
+ }
+
+ if (!dri3_update_drawable(loaderPrivate)) {
+ TPL_ERR("Update drawable failed!");
+ TRACE_END();
+ return NULL;
+ }
+
+ if (*attachments == dri3_buffer_front)
+ buffer = dri3_get_pixmap_buffer(loaderPrivate,
+ priv->xDrawable, cpp);
+ else
+ buffer = dri3_get_window_buffer(loaderPrivate, cpp);
+
+ if (NULL == buffer) {
+ TPL_ERR("Get buffer failed!");
+ return NULL;
+ }
+
+ TRACE_END();
+
+ return buffer;
+}
+
+/******************************************************
+ * dri3_swap_buffers
+ * swap back buffer with front buffer
+ * Make the current back buffer visible using the present extension
+ * if (region_t==0),swap whole frame, else swap with region
+ ******************************************************/
+static int64_t
+dri3_swap_buffers(Display *dpy, void *priv, tpl_buffer_t *frame_buffer,
+ int interval, XID region_t)
+{
+
+ int64_t ret = -1;
+ int64_t target_msc = 0;
+ int64_t divisor = 0;
+ int64_t remainder = 0;
+ xcb_connection_t *c;
+ dri3_drawable *pDrawable;
+ dri3_buffer *back = NULL;
+ int i = 0;
+
+ TPL_ASSERT(dpy);
+ TPL_ASSERT(priv);
+ TPL_ASSERT(frame_buffer);
+
+ c = XGetXCBConnection(dpy);
+
+ pDrawable = (dri3_drawable *) priv;
+ back = (dri3_buffer *) frame_buffer->backend.data;
+
+ if ((back == NULL) || (pDrawable == NULL) || (pDrawable->is_pixmap != 0)) {
+ TRACE_END();
+ return ret;
+ }
+
+ /* Process any present events that have been received from the X
+ * server until receive complete notify.
+ */
+ if (!dri3_wait_for_notify(c, pDrawable)) {
+ TRACE_END();
+ return ret;
+ }
+ /* [BEGIN: 20140119-leiba.sun] Add support for buffer age
+ * When swap buffer, increase buffer age of every back buffer */
+ for (i = 0; i < dri3_max_back; i++) {
+ if ((pDrawable->buffers[i] != NULL) && (pDrawable->buffers[i]->buffer_age > 0))
+ pDrawable->buffers[i]->buffer_age++;
+ }
+ back->buffer_age = 1;
+ /* [END:20150119-leiba.sun] */
+ /* set busy flag */
+ back->status = dri3_buffer_posted;
+
+ /* Compute when we want the frame shown by taking the last known
+ * successful MSC and adding in a swap interval for each outstanding
+ * swap request
+ */
+ if (pDrawable->swap_interval != interval)
+ pDrawable->swap_interval = interval;
+
+ ++pDrawable->send_sbc;
+ if (target_msc == 0)
+ target_msc = pDrawable->msc + pDrawable->swap_interval *
+ (pDrawable->send_sbc - pDrawable->recv_sbc);
+
+ back->last_swap = pDrawable->send_sbc;
+
+ TRACE_MARK("SWAP:%d", tbm_bo_export(back->tbo)) ;
+ xcb_present_pixmap(c,
+ pDrawable->xDrawable, /* dst */
+ back->pixmap, /* src */
+ (uint32_t) pDrawable->send_sbc,
+ 0, /* valid */
+ region_t, /* update */
+ 0, /* x_off */
+ 0, /* y_off */
+ None, /* target_crtc */
+ None,
+ 0,
+ XCB_PRESENT_OPTION_NONE,
+ /*target_msc*/0,
+ divisor,
+ remainder, 0, NULL);
+
+ ret = (int64_t) pDrawable->send_sbc;
+
+ xcb_flush(c);
+
+ ++(pDrawable->stamp);
+
+ return ret;
+}
+
+tpl_bool_t
+__tpl_x11_dri3_buffer_init(tpl_buffer_t *buffer)
+{
+ TPL_IGNORE(buffer);
+ return TPL_TRUE;
+}
+
+void
+__tpl_x11_dri3_buffer_fini(tpl_buffer_t *buffer)
+{
+ dri3_buffer *back;
+
+ TPL_ASSERT(buffer);
+
+ back = (dri3_buffer *)buffer->backend.data;
+
+ if (back) {
+ tbm_bo bo = back->tbo;
+ tbm_bo_map(bo, TBM_DEVICE_3D, TBM_OPTION_READ);
+ tbm_bo_unmap(bo);
+ tbm_bo_unref(bo);
+ buffer->backend.data = NULL;
+ free(back);
+ }
+}
+
+void *
+__tpl_x11_dri3_buffer_map(tpl_buffer_t *buffer, int size)
+{
+ tbm_bo bo;
+ tbm_bo_handle handle;
+
+ TPL_ASSERT(buffer);
+
+ TPL_IGNORE(size);
+ bo = ((dri3_buffer *)buffer->backend.data)->tbo;
+ TPL_ASSERT(bo);
+
+ handle = tbm_bo_get_handle(bo, TBM_DEVICE_CPU);
+ return handle.ptr;
+}
+
+void
+__tpl_x11_dri3_buffer_unmap(tpl_buffer_t *buffer, void *ptr, int size)
+{
+ TPL_IGNORE(buffer);
+ TPL_IGNORE(ptr);
+ TPL_IGNORE(size);
+
+ /* Do nothing. */
+}
+
+tpl_bool_t
+__tpl_x11_dri3_buffer_lock(tpl_buffer_t *buffer, tpl_lock_usage_t usage)
+{
+ tbm_bo bo;
+ tbm_bo_handle handle;
+ dri3_buffer *back;
+
+ TPL_ASSERT(buffer);
+ TPL_ASSERT(buffer->backend.data);
+
+ back = (dri3_buffer *) buffer->backend.data;
+ bo = back->tbo;
+
+ if (NULL == bo) {
+ TPL_ERR("bo is NULL!");
+ return TPL_FALSE;
+ }
+
+ TRACE_BEGIN("TPL:BUFFERLOCK:%d", tbm_bo_export(bo));
+
+ TPL_OBJECT_UNLOCK(buffer);
+
+ switch (usage) {
+ case TPL_LOCK_USAGE_GPU_READ:
+ handle = tbm_bo_map(bo, TBM_DEVICE_3D, TBM_OPTION_READ);
+ break;
+ case TPL_LOCK_USAGE_GPU_WRITE:
+ handle = tbm_bo_map(bo, TBM_DEVICE_3D, TBM_OPTION_WRITE);
+ break;
+ case TPL_LOCK_USAGE_CPU_READ:
+ handle = tbm_bo_map(bo, TBM_DEVICE_CPU, TBM_OPTION_READ);
+ break;
+ case TPL_LOCK_USAGE_CPU_WRITE:
+ handle = tbm_bo_map(bo, TBM_DEVICE_CPU, TBM_OPTION_WRITE);
+ break;
+ default:
+ TPL_ASSERT(TPL_FALSE);
+ return TPL_FALSE;
+ }
+
+ TPL_OBJECT_LOCK(buffer);
+
+ if (handle.u32 != 0 || handle.ptr != NULL) {
+ TRACE_END();
+ return TPL_FALSE;
+ }
+ TRACE_END();
+ return TPL_TRUE;
+}
+
+void
+__tpl_x11_dri3_buffer_unlock(tpl_buffer_t *buffer)
+{
+ dri3_buffer *back;
+ tbm_bo bo;
+
+ TPL_ASSERT(buffer);
+
+ back = (dri3_buffer *) buffer->backend.data;
+ bo = back->tbo;
+
+ if (NULL == bo) {
+ TPL_ERR("bo is NULL!");
+ return;
+ }
+
+ TRACE_BEGIN("TPL:BUFFERUNLOCK:%d", tbm_bo_export(back->tbo));
+
+ TPL_OBJECT_UNLOCK(buffer);
+ tbm_bo_unmap(bo);
+ TPL_OBJECT_LOCK(buffer);
+
+ TRACE_END();
+}
+
+static Display *
+__tpl_x11_dri3_get_worker_display()
+{
+ Display *display;
+ pthread_mutex_t mutex = __tpl_x11_get_global_mutex();
+
+ pthread_mutex_lock(&mutex);
+ TPL_ASSERT(global.display_count > 0);
+
+ /* Use dummy display for worker thread. :-) */
+ display = global.worker_display;
+
+ pthread_mutex_unlock(&mutex);
+
+ return display;
+}
+
+static tpl_bool_t
+__tpl_x11_dri3_display_init(tpl_display_t *display)
+{
+ pthread_mutex_t mutex = __tpl_x11_get_global_mutex();
+
+ TPL_ASSERT(display);
+
+ XInitThreads();
+ if (display->native_handle == NULL) {
+ display->native_handle = XOpenDisplay(NULL);
+ TPL_ASSERT(display->native_handle != NULL);
+ }
+
+ pthread_mutex_lock(&mutex);
+
+ if (global.display_count == 0) {
+ tpl_bool_t xres = TPL_FALSE;
+ Window root = 0;
+ drm_magic_t magic;
+
+ /* Open a dummy display connection. */
+
+ global.worker_display = XOpenDisplay(NULL);
+ TPL_ASSERT(global.worker_display != NULL);
+
+ /* Get default root window. */
+ root = DefaultRootWindow(global.worker_display);
+
+ /* Initialize DRI3. */
+ xres = dri3_display_init(global.worker_display);
+ TPL_ASSERT(xres == TPL_TRUE);
+
+
+ /* Initialize buffer manager. */
+ global.bufmgr_fd = dri3_open(global.worker_display, root, 0);
+ drmGetMagic(global.bufmgr_fd, &magic);
+ global.bufmgr = tbm_bufmgr_init(global.bufmgr_fd);
+
+ __tpl_list_init(&dri3_drawable_list);
+
+ /* [BEGIN: 20141125-xuelian.bai] Add env for setting number of back buffers*/
+ {
+ const char *backend_env = NULL;
+ int count = 0;
+ backend_env = tpl_getenv("MALI_EGL_DRI3_BUF_NUM");
+ /* 2015-05-13 joonbum.ko@samsung.com */
+ /* Change the value of dri3_max_back 5 to 3 */
+ if (!backend_env || strlen(backend_env) == 0)
+ dri3_max_back = 3; /* Default value is 3*/
+ else {
+ count = atoi(backend_env);
+ if (count == 1)/* one buffer doesn't work,min is 2 */
+ dri3_max_back = 2;
+ else if (count < 20)
+ dri3_max_back = count;
+ else
+ dri3_max_back = 5;
+ }
+ }
+ /* [END: 20141125-xuelian.bai] */
+ }
+
+ global.display_count++;
+ display->bufmgr_fd = global.bufmgr_fd;
+
+ pthread_mutex_unlock(&mutex);
+ return TPL_TRUE;
+}
+
+static void
+__tpl_x11_dri3_display_fini(tpl_display_t *display)
+{
+ pthread_mutex_t mutex = __tpl_x11_get_global_mutex();
+ TPL_IGNORE(display);
+ pthread_mutex_lock(&mutex);
+
+ if (--global.display_count == 0) {
+ tbm_bufmgr_deinit(global.bufmgr);
+ close(global.bufmgr_fd);
+ XCloseDisplay(global.worker_display);
+
+ global.worker_display = NULL;
+ global.bufmgr_fd = -1;
+ global.bufmgr = NULL;
+
+ __tpl_list_fini(&dri3_drawable_list, NULL);
+ }
+
+ pthread_mutex_unlock(&mutex);
+
+}
+
+static tpl_bool_t
+__tpl_x11_dri3_surface_init(tpl_surface_t *surface)
+{
+ Display *display = NULL;
+ XID drawable;
+ tpl_x11_dri3_surface_t *x11_surface;
+
+ TPL_ASSERT(surface);
+
+ x11_surface = (tpl_x11_dri3_surface_t *)calloc(1,
+ sizeof(tpl_x11_dri3_surface_t));
+ if (x11_surface == NULL) {
+ TPL_ERR("Failed to allocate buffer!");
+ return TPL_FALSE;
+ }
+
+ x11_surface->latest_post_interval = -1;
+ __tpl_list_init(&x11_surface->buffer_cache);
+
+ display = (Display *)surface->display->native_handle;
+ drawable = (XID)surface->native_handle;
+
+ x11_surface->drawable = dri3_create_drawable(display, drawable);
+
+ surface->backend.data = (void *)x11_surface;
+ if (surface->type == TPL_SURFACE_TYPE_WINDOW) {
+ __tpl_x11_display_get_window_info(surface->display,
+ surface->native_handle,
+ &surface->width, &surface->height, NULL, 0, 0);
+ } else {
+ __tpl_x11_display_get_pixmap_info(surface->display,
+ surface->native_handle,
+ &surface->width, &surface->height, NULL);
+ }
+
+ return TPL_TRUE;
+}
+
+static void
+__tpl_x11_dri3_surface_fini(tpl_surface_t *surface)
+{
+ Display *display;
+ tpl_x11_dri3_surface_t *x11_surface;
+
+ TPL_ASSERT(surface);
+ TPL_ASSERT(surface->display);
+ TPL_ASSERT(surface->display->native_handle);
+
+ display = (Display *) surface->display->native_handle;
+ x11_surface = (tpl_x11_dri3_surface_t *) surface->backend.data;
+
+ dri3_destroy_drawable(display, (XID)surface->native_handle);
+
+ if (x11_surface) {
+ __tpl_x11_surface_buffer_cache_clear(&x11_surface->buffer_cache);
+
+
+ if (x11_surface->damage)
+ XFixesDestroyRegion(display, x11_surface->damage);
+
+ free(x11_surface);
+ }
+
+ surface->backend.data = NULL;
+}
+
+static void
+__tpl_x11_dri3_surface_post_internal(tpl_surface_t *surface,
+ tpl_frame_t *frame,
+ tpl_bool_t is_worker)
+{
+ Display *display = NULL;
+ tpl_x11_dri3_surface_t *x11_surface;
+ XRectangle *xrects;
+ XRectangle xrects_stack[TPL_STACK_XRECTANGLE_SIZE];
+
+ TPL_ASSERT(surface);
+ TPL_ASSERT(frame);
+
+ TRACE_BEGIN("DDK:DRI3:SWAPBUFFERS");
+ x11_surface = (tpl_x11_dri3_surface_t *)surface->backend.data;
+
+ display = __tpl_x11_dri3_get_worker_display();
+
+ if (frame->interval != x11_surface->latest_post_interval) {
+ x11_surface->latest_post_interval = frame->interval;/*FIXME:set interval?*/
+ }
+
+ if (__tpl_region_is_empty(&frame->damage)) {
+ dri3_swap_buffers(display, x11_surface->drawable, frame->buffer, 0, 0);
+ } else {
+ int i;
+
+ if (frame->damage.num_rects > TPL_STACK_XRECTANGLE_SIZE) {
+ xrects = (XRectangle *)malloc(sizeof(XRectangle) *
+ frame->damage.num_rects);
+ } else {
+ xrects = &xrects_stack[0];
+ }
+
+ for (i = 0; i < frame->damage.num_rects; i++) {
+ const int *rects = &frame->damage.rects[i * 4];
+
+ xrects[i].x = rects[0];
+ xrects[i].y = frame->buffer->height - rects[1] -
+ rects[3];
+ xrects[i].width = rects[2];
+ xrects[i].height = rects[3];
+ }
+
+ if (x11_surface->damage == None) {
+ x11_surface->damage =
+ XFixesCreateRegion(display, xrects,
+ frame->damage.num_rects);
+ } else {
+ XFixesSetRegion(display, x11_surface->damage,
+ xrects, frame->damage.num_rects);
+ }
+
+ dri3_swap_buffers(display, x11_surface->drawable, frame->buffer, 0,
+ x11_surface->damage);
+ }
+ frame->state = TPL_FRAME_STATE_POSTED;
+
+ TRACE_END();
+}
+
+static void
+__tpl_x11_dri3_surface_post(tpl_surface_t *surface, tpl_frame_t *frame)
+{
+ TPL_ASSERT(surface);
+ TPL_ASSERT(frame);
+
+ __tpl_x11_dri3_surface_post_internal(surface, frame, TPL_TRUE);
+}
+
+static tpl_bool_t
+__tpl_x11_dri3_surface_begin_frame(tpl_surface_t *surface)
+{
+ tpl_frame_t *prev_frame;
+
+ TPL_ASSERT(surface);
+
+ if (surface->type != TPL_SURFACE_TYPE_WINDOW)
+ return TPL_TRUE;
+
+ prev_frame = __tpl_surface_get_latest_frame(surface);
+
+ if (prev_frame && prev_frame->state != TPL_FRAME_STATE_POSTED) {
+ if ((DRI2_BUFFER_IS_FB(prev_frame->buffer->backend.flags) &&
+ global.fb_swap_type == TPL_X11_SWAP_TYPE_SYNC) ||
+ (!DRI2_BUFFER_IS_FB(prev_frame->buffer->backend.flags) &&
+ global.win_swap_type == TPL_X11_SWAP_TYPE_SYNC)) {
+ __tpl_surface_wait_all_frames(surface);
+ }
+ }
+
+ return TPL_TRUE;
+}
+
+static tpl_bool_t
+__tpl_x11_dri3_surface_validate_frame(tpl_surface_t *surface)
+{
+ tpl_frame_t *prev_frame;
+ if (surface->type != TPL_SURFACE_TYPE_WINDOW)
+ return TPL_TRUE;
+
+ if (surface->frame == NULL)
+ return TPL_TRUE;
+
+ prev_frame = __tpl_surface_get_latest_frame(surface);
+
+ if (prev_frame && prev_frame->state != TPL_FRAME_STATE_POSTED) {
+ if ((DRI2_BUFFER_IS_FB(prev_frame->buffer->backend.flags) &&
+ global.fb_swap_type == TPL_X11_SWAP_TYPE_LAZY) ||
+ (!DRI2_BUFFER_IS_FB(prev_frame->buffer->backend.flags) &&
+ global.win_swap_type == TPL_X11_SWAP_TYPE_LAZY)) {
+ __tpl_surface_wait_all_frames(surface);
+ return TPL_TRUE;
+ }
+ }
+ return TPL_TRUE;
+}
+
+static tpl_bool_t
+__tpl_x11_dri3_surface_end_frame(tpl_surface_t *surface)
+{
+ tpl_frame_t *frame;
+ tpl_x11_dri3_surface_t *x11_surface;
+
+ TPL_ASSERT(surface);
+ TPL_ASSERT(surface->backend.data);
+
+ frame = __tpl_surface_get_latest_frame(surface);
+ x11_surface = (tpl_x11_dri3_surface_t *) surface->backend.data;
+
+ if (frame) {
+ x11_surface->latest_render_target = frame->buffer;
+
+ if ((DRI2_BUFFER_IS_FB(frame->buffer->backend.flags) &&
+ global.fb_swap_type == TPL_X11_SWAP_TYPE_ASYNC) ||
+ (!DRI2_BUFFER_IS_FB(frame->buffer->backend.flags) &&
+ global.win_swap_type == TPL_X11_SWAP_TYPE_ASYNC)) {
+ __tpl_x11_dri3_surface_post_internal(surface, frame, TPL_FALSE);
+ }
+ }
+
+ return TPL_TRUE;
+}
+
+/* 2015-04-08 joonbum.ko@samsung.com */
+/* change the key value of tpl_buffer_t from dma_buf_fd to tbo name */
+static tpl_buffer_t *
+__tpl_x11_dri3_surface_get_buffer(tpl_surface_t *surface,
+ tpl_bool_t *reset_buffers)
+{
+ Drawable drawable;
+ dri3_buffer *buffer = NULL;
+ tpl_buffer_t *tpl_buffer = NULL;
+ uint32_t attachments[1] = { dri3_buffer_back };
+ tbm_bo bo;
+ tbm_bo_handle bo_handle;
+ tpl_x11_dri3_surface_t *x11_surface;
+ int cpp = 0;
+
+ TPL_ASSERT(surface);
+
+ x11_surface = (tpl_x11_dri3_surface_t *)surface->backend.data;
+
+ if (surface->type == TPL_SURFACE_TYPE_PIXMAP) {
+ attachments[0] = dri3_buffer_front;
+ }
+
+ drawable = (Drawable)surface->native_handle;
+
+ /* [BEGIN: 20141125-xing.huang] Get the current buffer via DRI3. */
+ cpp = 32;/*_mali_surface_specifier_bpp(&(surface->sformat)); cpp get from mali is not right */
+ /* [END: 20141125-xing.huang] */
+
+ buffer = dri3_get_buffers(drawable, x11_surface->drawable, attachments, cpp);
+
+ if (DRI2_BUFFER_IS_REUSED(buffer->flags)) {
+ tpl_buffer = __tpl_x11_surface_buffer_cache_find(
+ &x11_surface->buffer_cache,
+ tbm_bo_export(buffer->tbo));
+
+ if (tpl_buffer) {
+ /* If the buffer name is reused and there's a cache
+ * entry for that name, just update the buffer age
+ * and return. */
+ /* [BEGIN: 20140119-leiba.sun] Add support for buffer age */
+ tpl_buffer->age = buffer->buffer_age;
+ /* [END:20150119-leiba.sun] */
+
+ if (surface->type == TPL_SURFACE_TYPE_PIXMAP)
+ tbm_bo_unref (buffer->tbo);
+
+ goto done;
+ }
+ }
+
+ if (!tpl_buffer) {
+ /* Remove the buffer from the cache. */
+ __tpl_x11_surface_buffer_cache_remove(
+ &x11_surface->buffer_cache,
+ tbm_bo_export(buffer->tbo));
+ if (buffer->old_bo_name != 0) {
+ __tpl_x11_surface_buffer_cache_remove(
+ &x11_surface->buffer_cache,
+ buffer->old_bo_name);
+ buffer->old_bo_name = 0;
+ }
+ }
+
+ bo = buffer->tbo;
+
+ if (bo == NULL) {
+ TPL_ERR("bo is NULL!");
+ goto done;
+ }
+
+ bo_handle = tbm_bo_get_handle(bo, TBM_DEVICE_3D);
+
+ /* Create tpl buffer. */
+ tpl_buffer = __tpl_buffer_alloc(surface, (size_t) tbm_bo_export(buffer->tbo),
+ (int)bo_handle.u32,
+ buffer->width, buffer->height, buffer->cpp * 8, buffer->pitch);
+ if (NULL == tpl_buffer) {
+ TPL_ERR("TPL buffer alloc failed!");
+ goto done;
+ }
+
+ if (surface->type != TPL_SURFACE_TYPE_PIXMAP)
+ tbm_bo_ref(buffer->tbo);
+
+ tpl_buffer->age = DRI2_BUFFER_GET_AGE(buffer->flags);
+ tpl_buffer->backend.data = (void *)buffer;
+ tpl_buffer->backend.flags = buffer->flags;
+ /* [BEGIN: 20140119-leiba.sun] Add support for buffer age
+ * save surface for later use */
+ tpl_buffer->surface = surface;
+ /* [END:20150119-leiba.sun] */
+
+ __tpl_x11_surface_buffer_cache_add(&x11_surface->buffer_cache, tpl_buffer);
+ tpl_object_unreference(&tpl_buffer->base);
+done:
+ if (reset_buffers) {
+ /* Users use this output value to check if they have to reset previous buffers. */
+ *reset_buffers = !DRI2_BUFFER_IS_REUSED(buffer->flags) ||
+ buffer->width != surface->width || buffer->height != surface->height;
+ }
+
+ return tpl_buffer;
+}
+
+/* [BEGIN: 20140119-leiba.sun] Add support for buffer age */
+int
+__tpl_x11_dri3_get_buffer_age(tpl_buffer_t *buffer)
+{
+ dri3_buffer *back;
+
+ TPL_ASSERT(buffer);
+
+ back = (dri3_buffer *) buffer->backend.data;
+
+ TPL_ASSERT(back);
+
+ return back->buffer_age;
+}
+/* [END:20150119-leiba.sun] */
+
+
+tpl_bool_t
+__tpl_display_choose_backend_x11_dri3(tpl_handle_t native_dpy)
+{
+ TPL_IGNORE(native_dpy);
+ /* X11 display accepts any type of handle. So other backends must be choosen before this. */
+ return TPL_TRUE;
+}
+
+void
+__tpl_display_init_backend_x11_dri3(tpl_display_backend_t *backend)
+{
+ TPL_ASSERT(backend);
+
+ backend->type = TPL_BACKEND_X11_DRI3;
+ backend->data = NULL;
+
+ backend->init = __tpl_x11_dri3_display_init;
+ backend->fini = __tpl_x11_dri3_display_fini;
+ backend->query_config = __tpl_x11_display_query_config;
+ backend->filter_config = NULL;
+ backend->get_window_info = __tpl_x11_display_get_window_info;
+ backend->get_pixmap_info = __tpl_x11_display_get_pixmap_info;
+ backend->flush = __tpl_x11_display_flush;
+}
+
+void
+__tpl_surface_init_backend_x11_dri3(tpl_surface_backend_t *backend)
+{
+ TPL_ASSERT(backend);
+
+ backend->type = TPL_BACKEND_X11_DRI3;
+ backend->data = NULL;
+
+ backend->init = __tpl_x11_dri3_surface_init;
+ backend->fini = __tpl_x11_dri3_surface_fini;
+ backend->begin_frame = __tpl_x11_dri3_surface_begin_frame;
+ backend->end_frame = __tpl_x11_dri3_surface_end_frame;
+ backend->validate_frame = __tpl_x11_dri3_surface_validate_frame;
+ backend->get_buffer = __tpl_x11_dri3_surface_get_buffer;
+ backend->post = __tpl_x11_dri3_surface_post;
+}
+
+void
+__tpl_buffer_init_backend_x11_dri3(tpl_buffer_backend_t *backend)
+{
+ TPL_ASSERT(backend);
+
+ backend->type = TPL_BACKEND_X11_DRI3;
+ backend->data = NULL;
+
+ backend->init = __tpl_x11_dri3_buffer_init;
+ backend->fini = __tpl_x11_dri3_buffer_fini;
+ backend->map = __tpl_x11_dri3_buffer_map;
+ backend->unmap = __tpl_x11_dri3_buffer_unmap;
+ backend->lock = __tpl_x11_dri3_buffer_lock;
+ backend->unlock = __tpl_x11_dri3_buffer_unlock;
+ /* [BEGIN: 20140119-leiba.sun] Add support for buffer age */
+ backend->get_buffer_age = __tpl_x11_dri3_get_buffer_age;
+ /* [END:20150119-leiba.sun] */
+}
--- /dev/null
+#ifndef TPL_X11_INTERNAL_H
+#define TPL_X11_INTERNAL_H
+
+#include "tpl.h"
+#include <stdlib.h>
+#include <pthread.h>
+
+#include "tpl_utils.h"
+
+#define TIZEN_FEATURES_ENABLE 0
+
+#define DRI2_BUFFER_FB 0x02
+#define DRI2_BUFFER_MAPPED 0x04
+#define DRI2_BUFFER_REUSED 0x08
+#define DRI2_BUFFER_AGE 0x70 /* 01110000 */
+
+#define DRI2_BUFFER_IS_FB(flag) ((flag & DRI2_BUFFER_FB) ? 1 : 0)
+#define DRI2_BUFFER_IS_REUSED(flag) ((flag & DRI2_BUFFER_REUSED) ? 1 : 0)
+#define DRI2_BUFFER_GET_AGE(flag) ((flag & DRI2_BUFFER_AGE) >> 4)
+
+#define TPL_STACK_XRECTANGLE_SIZE 16
+/* [BEGIN: 20141125-xuelian.bai] DRI3 need lots of buffer cache. or it will get
+ * slow */
+#define TPL_BUFFER_CACHE_MAX_ENTRIES 40
+/* [END: 20141125-xuelian.bai] */
+
+#define EGL_X11_WINDOW_SWAP_TYPE_ENV_NAME "EGL_X11_SWAP_TYPE_WINDOW"
+#define EGL_X11_FB_SWAP_TYPE_ENV_NAME "EGL_X11_SWAP_TYPE_FB"
+
+typedef struct _tpl_x11_global tpl_x11_global_t;
+
+typedef enum {
+ TPL_X11_SWAP_TYPE_ERROR = -1,
+ TPL_X11_SWAP_TYPE_SYNC = 0,
+ TPL_X11_SWAP_TYPE_ASYNC,
+ TPL_X11_SWAP_TYPE_LAZY,
+ TPL_X11_SWAP_TYPE_MAX
+} tpl_x11_swap_type_t;
+
+struct _tpl_x11_global {
+ int display_count;
+
+ Display *worker_display;
+ int bufmgr_fd;
+ tbm_bufmgr bufmgr;
+
+ tpl_x11_swap_type_t win_swap_type;
+ tpl_x11_swap_type_t fb_swap_type;
+};
+
+pthread_mutex_t
+__tpl_x11_get_global_mutex(void);
+
+void
+__tpl_x11_swap_str_to_swap_type(char *str, tpl_x11_swap_type_t *type);
+
+tpl_buffer_t *
+__tpl_x11_surface_buffer_cache_find(tpl_list_t *buffer_cache,
+ unsigned int name);
+void
+__tpl_x11_surface_buffer_cache_remove(tpl_list_t *buffer_cache,
+ unsigned int name);
+tpl_bool_t
+__tpl_x11_surface_buffer_cache_add(tpl_list_t *buffer_cache,
+ tpl_buffer_t *buffer);
+void
+__tpl_x11_surface_buffer_cache_clear(tpl_list_t *buffer_cache);
+tpl_bool_t
+__tpl_x11_display_query_config(tpl_display_t *display,
+ tpl_surface_type_t surface_type, int red_size,
+ int green_size, int blue_size, int alpha_size,
+ int color_depth, int *native_visual_id, tpl_bool_t *is_slow);
+tpl_bool_t
+__tpl_x11_display_get_window_info(tpl_display_t *display, tpl_handle_t window,
+ int *width, int *height, tpl_format_t *format, int depth, int a_size);
+tpl_bool_t
+__tpl_x11_display_get_pixmap_info(tpl_display_t *display, tpl_handle_t pixmap,
+ int *width, int *height, tpl_format_t *format);
+void
+__tpl_x11_display_flush(tpl_display_t *display);
+tpl_bool_t
+__tpl_x11_buffer_init(tpl_buffer_t *buffer);
+void
+__tpl_x11_buffer_fini(tpl_buffer_t *buffer);
+void *
+__tpl_x11_buffer_map(tpl_buffer_t *buffer, int size);
+void
+__tpl_x11_buffer_unmap(tpl_buffer_t *buffer, void *ptr, int size);
+tpl_bool_t
+__tpl_x11_buffer_lock(tpl_buffer_t *buffer, tpl_lock_usage_t usage);
+void
+__tpl_x11_buffer_unlock(tpl_buffer_t *buffer);
+tpl_bool_t __tpl_x11_buffer_get_reused_flag(tpl_buffer_t *buffer);
+void
+__tpl_x11_display_wait_native(tpl_display_t *display);
+
+#endif /* TPL_X11_INTERNAL_H */