1 #define inline __inline__
3 #include <wayland-client.h>
4 #include <wayland-egl-backend.h>
5 #include "wayland-egl-tizen/wayland-egl-tizen.h"
6 #include "wayland-egl-tizen/wayland-egl-tizen-priv.h"
10 #include "tpl_internal.h"
16 #include <tbm_bufmgr.h>
17 #include <tbm_surface.h>
18 #include <tbm_surface_internal.h>
19 #include <tbm_surface_queue.h>
20 #include <wayland-tbm-client.h>
21 #include <wayland-tbm-server.h>
22 #include <tdm_client.h>
24 #ifndef TIZEN_FEATURE_ENABLE
25 #define TIZEN_FEATURE_ENABLE 1
28 #if TIZEN_FEATURE_ENABLE
29 #include <tizen-surface-client-protocol.h>
32 typedef struct _tpl_wayland_egl_display tpl_wayland_egl_display_t;
33 typedef struct _tpl_wayland_egl_surface tpl_wayland_egl_surface_t;
34 typedef struct _tpl_wayland_egl_buffer tpl_wayland_egl_buffer_t;
36 struct _tpl_wayland_egl_display {
37 struct wayland_tbm_client *wl_tbm_client;
38 struct wl_proxy *wl_tbm; /* wayland_tbm_client proxy */
39 tdm_client *tdm_client;
40 struct wl_display *wl_dpy;
41 struct wl_event_queue *wl_tbm_event_queue;
42 #if TIZEN_FEATURE_ENABLE
43 struct tizen_surface_shm *tizen_surface_shm; /* used for surface buffer_flush */
45 pthread_mutex_t wl_event_mutex;
46 pthread_mutex_t tdm_mutex;
49 struct _tpl_wayland_egl_surface {
51 tbm_surface_queue_h tbm_queue;
52 tbm_surface_h current_buffer;
54 tpl_bool_t reset; /* TRUE if queue reseted by external */
55 tdm_client_vblank *tdm_vblank; /* vblank object for each wl_surface */
56 tpl_bool_t vblank_done;
57 tpl_bool_t is_activated;
58 tpl_bool_t set_serial_is_used; /* Will be deprecated */
61 tpl_list_t *attached_buffers; /* list for tracking [ACQ]~[REL] buffers */
62 tpl_list_t *dequeued_buffers; /* list for tracking [DEQ]~[ENQ] buffers */
63 #if TIZEN_FEATURE_ENABLE
64 struct tizen_surface_shm_flusher *tizen_surface_shm_flusher; /* wl_proxy for buffer flush */
68 struct _tpl_wayland_egl_buffer {
69 tpl_wayland_egl_display_t *display;
70 tpl_wayland_egl_surface_t *wayland_egl_surface;
76 tpl_bool_t w_rotated; /* TRUE if need to call wayland_tbm_client_set_buffer_transform */
77 tpl_bool_t is_new; /* for frontbuffer mode */
78 tpl_bool_t need_to_release; /* for checking need release */
79 struct wl_proxy *wl_proxy; /* wl_buffer proxy */
80 unsigned int serial; /* increase while dequeue */
83 static tpl_list_t *committed_wl_buffers = NULL;
84 static pthread_mutex_t g_list_mutex;
86 static const struct wl_buffer_listener buffer_release_listener;
88 static int tpl_wayland_egl_buffer_key;
89 #define KEY_tpl_wayland_egl_buffer (unsigned long)(&tpl_wayland_egl_buffer_key)
91 #if TIZEN_FEATURE_ENABLE
93 __tpl_wayland_egl_display_buffer_flusher_init(
94 tpl_wayland_egl_display_t *wayland_egl_display);
96 __tpl_wayland_egl_display_buffer_flusher_fini(
97 tpl_wayland_egl_display_t *wayland_egl_display);
99 __tpl_wayland_egl_surface_buffer_flusher_init(tpl_surface_t *surface);
101 __tpl_wayland_egl_surface_buffer_flusher_fini(tpl_surface_t *surface);
104 __tpl_wayland_egl_buffer_free(tpl_wayland_egl_buffer_t *wayland_egl_buffer);
106 static struct tizen_private *
107 tizen_private_create()
109 struct tizen_private *private = NULL;
110 private = (struct tizen_private *)calloc(1, sizeof(struct tizen_private));
112 private->magic = WL_EGL_TIZEN_MAGIC;
113 private->rotation = 0;
114 private->frontbuffer_mode = 0;
115 private->transform = 0;
116 private->window_transform = 0;
119 private->data = NULL;
120 private->rotate_callback = NULL;
121 private->get_rotation_capability = NULL;
122 private->set_window_serial_callback = NULL;
123 private->set_frontbuffer_callback = NULL;
124 private->create_commit_sync_fd = NULL;
125 private->create_presentation_sync_fd = NULL;
126 private->merge_sync_fds = NULL;
132 static struct tizen_private *
133 _get_tizen_private(struct wl_egl_window * wl_egl_window)
135 if (wl_egl_window && wl_egl_window->driver_private)
136 return (struct tizen_private *)wl_egl_window->driver_private;
141 static TPL_INLINE tpl_wayland_egl_buffer_t *
142 __tpl_wayland_egl_get_wayland_buffer_from_tbm_surface(tbm_surface_h surface)
144 tpl_wayland_egl_buffer_t *buf = NULL;
146 if (!tbm_surface_internal_is_valid(surface))
149 tbm_surface_internal_get_user_data(surface, KEY_tpl_wayland_egl_buffer,
155 static TPL_INLINE void
156 __tpl_wayland_egl_set_wayland_buffer_to_tbm_surface(tbm_surface_h surface,
157 tpl_wayland_egl_buffer_t *buf)
159 tbm_surface_internal_add_user_data(surface, KEY_tpl_wayland_egl_buffer,
160 (tbm_data_free)__tpl_wayland_egl_buffer_free);
162 tbm_surface_internal_set_user_data(surface, KEY_tpl_wayland_egl_buffer,
166 static TPL_INLINE tpl_bool_t
167 __tpl_wayland_egl_display_is_wl_display(tpl_handle_t native_dpy)
169 TPL_ASSERT(native_dpy);
171 struct wl_interface *wl_egl_native_dpy = *(void **) native_dpy;
173 /* MAGIC CHECK: A native display handle is a wl_display if the de-referenced first value
174 is a memory address pointing the structure of wl_display_interface. */
175 if (wl_egl_native_dpy == &wl_display_interface)
178 if (strncmp(wl_egl_native_dpy->name, wl_display_interface.name,
179 strlen(wl_display_interface.name)) == 0) {
187 __tpl_wayland_egl_display_init(tpl_display_t *display)
189 tpl_wayland_egl_display_t *wayland_egl_display = NULL;
193 /* Do not allow default display in wayland. */
194 if (!display->native_handle) {
195 TPL_ERR("Invalid native handle for display.");
196 return TPL_ERROR_INVALID_PARAMETER;
199 wayland_egl_display = (tpl_wayland_egl_display_t *) calloc(1,
200 sizeof(tpl_wayland_egl_display_t));
201 if (!wayland_egl_display) {
202 TPL_ERR("Failed to allocate memory for new tpl_wayland_egl_display_t.");
203 return TPL_ERROR_OUT_OF_MEMORY;
206 display->backend.data = wayland_egl_display;
207 display->bufmgr_fd = -1;
209 if (__tpl_wayland_egl_display_is_wl_display(display->native_handle)) {
210 tdm_error tdm_err = 0;
211 struct wl_display *wl_dpy =
212 (struct wl_display *)display->native_handle;
213 wayland_egl_display->wl_tbm_client =
214 wayland_tbm_client_init((struct wl_display *) wl_dpy);
215 char *env = tpl_getenv("TPL_WAIT_VBLANK");
217 if (!wayland_egl_display->wl_tbm_client) {
218 TPL_ERR("Wayland TBM initialization failed!");
219 goto free_wl_display;
222 wayland_egl_display->wl_tbm_event_queue =
223 wl_display_create_queue(wl_dpy);
224 if (!wayland_egl_display->wl_tbm_event_queue) {
225 TPL_ERR("Failed to create wl_event_queue.");
226 goto free_wl_display;
229 if (!wayland_tbm_client_set_event_queue(
230 wayland_egl_display->wl_tbm_client,
231 wayland_egl_display->wl_tbm_event_queue)) {
232 TPL_ERR("Failed to set event_queue to wl_tbm.");
233 goto free_wl_display;
236 if (env == NULL || atoi(env)) {
237 TPL_LOG_B("WL_EGL", "[INIT] ENABLE wait vblank.");
238 wayland_egl_display->tdm_client = tdm_client_create(&tdm_err);
239 if (!wayland_egl_display->tdm_client) {
240 TPL_ERR("tdm client initialization failed! tdm_err=%d", tdm_err);
241 goto free_wl_display;
244 TPL_LOG_B("WL_EGL", "[INIT] DISABLE wait vblank.");
245 wayland_egl_display->tdm_client = NULL;
248 if (pthread_mutex_init(&wayland_egl_display->wl_event_mutex, NULL)) {
249 TPL_ERR("Failed to initialize wl_event_mutex.");
250 goto free_wl_display;
253 if (pthread_mutex_init(&wayland_egl_display->tdm_mutex, NULL)) {
254 TPL_ERR("Failed to initialize tdm_mutex.");
255 goto free_wl_display;
258 if (!committed_wl_buffers) {
259 committed_wl_buffers = __tpl_list_alloc();
260 if (!committed_wl_buffers)
261 TPL_ERR("Failed to allocate committed_wl_buffers list.");
262 if (pthread_mutex_init(&g_list_mutex, NULL) != 0)
263 TPL_ERR("g_list_mutex init failed.");
266 wayland_egl_display->wl_dpy = wl_dpy;
267 #if TIZEN_FEATURE_ENABLE
268 __tpl_wayland_egl_display_buffer_flusher_init(wayland_egl_display);
272 TPL_ERR("Invalid native handle for display.");
273 goto free_wl_display;
276 TPL_LOG_B("WL_EGL", "[INIT] tpl_wayland_egl_display_t(%p) wl_tbm_client(%p)",
277 wayland_egl_display, wayland_egl_display->wl_tbm_client);
279 return TPL_ERROR_NONE;
282 if (wayland_egl_display->tdm_client)
283 tdm_client_destroy(wayland_egl_display->tdm_client);
284 if (wayland_egl_display->wl_tbm_client)
285 wayland_tbm_client_set_event_queue(wayland_egl_display->wl_tbm_client, NULL);
286 if (wayland_egl_display->wl_tbm_client)
287 wayland_tbm_client_deinit(wayland_egl_display->wl_tbm_client);
288 if (wayland_egl_display->wl_tbm_event_queue)
289 wl_event_queue_destroy(wayland_egl_display->wl_tbm_event_queue);
291 pthread_mutex_destroy(&wayland_egl_display->wl_event_mutex);
292 pthread_mutex_destroy(&wayland_egl_display->tdm_mutex);
294 wayland_egl_display->wl_tbm_event_queue = NULL;
295 wayland_egl_display->wl_tbm_client = NULL;
296 wayland_egl_display->tdm_client = NULL;
297 wayland_egl_display->wl_dpy = NULL;
299 free(wayland_egl_display);
300 display->backend.data = NULL;
301 return TPL_ERROR_INVALID_OPERATION;
305 __tpl_wayland_egl_display_fini(tpl_display_t *display)
307 tpl_wayland_egl_display_t *wayland_egl_display;
311 wayland_egl_display = (tpl_wayland_egl_display_t *)display->backend.data;
312 if (wayland_egl_display) {
314 int tdm_lock_res = 0;
315 TPL_LOG_B("WL_EGL", "[FINI] tpl_wayland_egl_display_t(%p) wl_tbm_client(%p)",
316 wayland_egl_display, wayland_egl_display->wl_tbm_client);
318 lock_res = pthread_mutex_lock(&wayland_egl_display->wl_event_mutex);
319 #if TIZEN_FEATURE_ENABLE
320 __tpl_wayland_egl_display_buffer_flusher_fini(wayland_egl_display);
322 tdm_lock_res = pthread_mutex_lock(&wayland_egl_display->tdm_mutex);
323 if (wayland_egl_display->tdm_client)
324 tdm_client_destroy(wayland_egl_display->tdm_client);
325 if (tdm_lock_res == 0) pthread_mutex_unlock(&wayland_egl_display->tdm_mutex);
327 if (wayland_egl_display->wl_tbm_client)
328 wayland_tbm_client_set_event_queue(wayland_egl_display->wl_tbm_client, NULL);
330 if (wayland_egl_display->wl_tbm_client)
331 wayland_tbm_client_deinit(wayland_egl_display->wl_tbm_client);
333 if (wayland_egl_display->wl_tbm_event_queue)
334 wl_event_queue_destroy(wayland_egl_display->wl_tbm_event_queue);
336 if (lock_res == 0) pthread_mutex_unlock(&wayland_egl_display->wl_event_mutex);
337 if (pthread_mutex_destroy(&wayland_egl_display->wl_event_mutex))
338 TPL_ERR("Failed to destroy wl_event_mutex(%p)",
339 &wayland_egl_display->wl_event_mutex);
341 if (pthread_mutex_destroy(&wayland_egl_display->tdm_mutex))
342 TPL_ERR("Failed to destroy tdm_mutex(%p)",
343 &wayland_egl_display->tdm_mutex);
345 wayland_egl_display->wl_tbm_event_queue = NULL;
346 wayland_egl_display->wl_tbm_client = NULL;
347 wayland_egl_display->tdm_client = NULL;
348 wayland_egl_display->wl_dpy = NULL;
349 free(wayland_egl_display);
352 if (pthread_mutex_lock(&g_list_mutex) == 0) {
353 if (committed_wl_buffers)
354 __tpl_list_free(committed_wl_buffers, NULL);
355 committed_wl_buffers = NULL;
356 pthread_mutex_unlock(&g_list_mutex);
359 if (pthread_mutex_destroy(&g_list_mutex) != 0)
360 TPL_ERR("Failed to destroy g_list_mutex");
362 display->backend.data = NULL;
366 __tpl_wayland_egl_display_query_config(tpl_display_t *display,
367 tpl_surface_type_t surface_type,
368 int red_size, int green_size,
369 int blue_size, int alpha_size,
370 int color_depth, int *native_visual_id,
375 if (surface_type == TPL_SURFACE_TYPE_WINDOW && red_size == 8 &&
376 green_size == 8 && blue_size == 8 &&
377 (color_depth == 32 || color_depth == 24)) {
379 if (alpha_size == 8) {
380 if (native_visual_id) *native_visual_id = TBM_FORMAT_ARGB8888;
381 if (is_slow) *is_slow = TPL_FALSE;
382 return TPL_ERROR_NONE;
384 if (alpha_size == 0) {
385 if (native_visual_id) *native_visual_id = TBM_FORMAT_XRGB8888;
386 if (is_slow) *is_slow = TPL_FALSE;
387 return TPL_ERROR_NONE;
391 return TPL_ERROR_INVALID_PARAMETER;
395 __tpl_wayland_egl_display_filter_config(tpl_display_t *display, int *visual_id,
399 TPL_IGNORE(visual_id);
400 TPL_IGNORE(alpha_size);
401 return TPL_ERROR_NONE;
405 __tpl_wayland_egl_display_get_window_info(tpl_display_t *display,
406 tpl_handle_t window, int *width,
407 int *height, tbm_format *format,
408 int depth, int a_size)
413 struct wl_egl_window *wl_egl_window = (struct wl_egl_window *)window;
414 struct tizen_private *tizen_private = _get_tizen_private(wl_egl_window);
417 /* Wayland-egl window doesn't have native format information.
418 It is fixed from 'EGLconfig' when called eglCreateWindowSurface().
419 So we use the tpl_surface format instead. */
420 if (tizen_private && tizen_private->data) {
421 tpl_surface_t *surface = (tpl_surface_t *)tizen_private->data;
422 *format = surface->format;
424 if (a_size == 8) *format = TBM_FORMAT_ARGB8888;
425 else if (a_size == 0) *format = TBM_FORMAT_XRGB8888;
428 if (width != NULL) *width = wl_egl_window->width;
429 if (height != NULL) *height = wl_egl_window->height;
431 return TPL_ERROR_NONE;
435 __tpl_wayland_egl_display_get_pixmap_info(tpl_display_t *display,
436 tpl_handle_t pixmap, int *width,
437 int *height, tbm_format *format)
439 tbm_surface_h tbm_surface = NULL;
441 tbm_surface = wayland_tbm_server_get_surface(NULL,
442 (struct wl_resource *)pixmap);
444 TPL_ERR("Failed to get tbm_surface_h from native pixmap.");
445 return TPL_ERROR_INVALID_OPERATION;
448 if (width) *width = tbm_surface_get_width(tbm_surface);
449 if (height) *height = tbm_surface_get_height(tbm_surface);
450 if (format) *format = tbm_surface_get_format(tbm_surface);
452 return TPL_ERROR_NONE;
456 __tpl_wayland_egl_display_get_buffer_from_native_pixmap(tpl_handle_t pixmap)
458 tbm_surface_h tbm_surface = NULL;
462 tbm_surface = wayland_tbm_server_get_surface(NULL,
463 (struct wl_resource *)pixmap);
465 TPL_ERR("Failed to get tbm_surface_h from wayland_tbm.");
473 __cb_client_window_resize_callback(struct wl_egl_window *wl_egl_window,
477 __cb_client_window_destroy_callback(void *private);
480 __cb_client_window_rotate_callback(struct wl_egl_window *wl_egl_window,
484 __cb_client_window_get_rotation_capability(struct wl_egl_window *wl_egl_window,
488 __cb_client_window_set_frontbuffer_mode(struct wl_egl_window *wl_egl_window,
489 void *private, int set);
492 __cb_client_window_set_window_serial_callback(struct wl_egl_window *wl_egl_window,
493 void *private, unsigned int serial);
496 __cb_tbm_surface_queue_reset_callback(tbm_surface_queue_h surface_queue,
499 tpl_surface_t *surface = NULL;
500 tpl_wayland_egl_surface_t *wayland_egl_surface = NULL;
502 surface = (tpl_surface_t *)data;
503 TPL_CHECK_ON_NULL_RETURN(surface);
505 wayland_egl_surface = (tpl_wayland_egl_surface_t *)surface->backend.data;
506 TPL_CHECK_ON_NULL_RETURN(wayland_egl_surface);
509 "[QUEUE_RESET_CB] tpl_wayland_egl_surface_t(%p) surface_queue(%p)",
510 data, surface_queue);
512 TPL_OBJECT_LOCK(&wayland_egl_surface->base);
514 wayland_egl_surface->reset = TPL_TRUE;
516 TPL_OBJECT_UNLOCK(&wayland_egl_surface->base);
518 if (surface->reset_cb)
519 surface->reset_cb(surface->reset_data);
523 __tpl_wayland_egl_surface_create_vblank(tpl_wayland_egl_surface_t
524 *wayland_egl_surface,
525 tdm_client *tdm_client)
527 tdm_client_output *tdm_output = NULL;
528 tdm_error tdm_err_ret;
530 TPL_ASSERT(wayland_egl_surface);
531 TPL_ASSERT(tdm_client);
533 tdm_output = tdm_client_get_output(tdm_client, "primary", &tdm_err_ret);
535 TPL_ERR("Failed to get tdm client output. tdm_err(%d)", tdm_err_ret);
536 return TPL_ERROR_INVALID_OPERATION;
539 wayland_egl_surface->tdm_vblank =
540 tdm_client_output_create_vblank(tdm_output, &tdm_err_ret);
541 if (!wayland_egl_surface->tdm_vblank) {
542 TPL_ERR("Failed to create tdm vblank object. tdm_err(%d)", tdm_err_ret);
543 return TPL_ERROR_INVALID_OPERATION;
546 tdm_client_vblank_set_enable_fake(wayland_egl_surface->tdm_vblank, 1);
547 tdm_client_vblank_set_sync(wayland_egl_surface->tdm_vblank, 0);
550 "[TDM_VBLANK_INIT] tpl_wayland_egl_surface_t(%p) tdm_vblank(%p)",
551 wayland_egl_surface, wayland_egl_surface->tdm_vblank);
553 return TPL_ERROR_NONE;
557 __tpl_wayland_egl_surface_init(tpl_surface_t *surface)
559 tpl_wayland_egl_display_t *wayland_egl_display;
560 tpl_wayland_egl_surface_t *wayland_egl_surface;
561 struct wl_egl_window *wl_egl_window;
562 struct tizen_private *tizen_private = NULL;
563 tbm_bufmgr bufmgr = NULL;
564 unsigned int capability;
565 int flags = TBM_BO_DEFAULT;
568 TPL_ASSERT(surface->display);
569 TPL_ASSERT(surface->type == TPL_SURFACE_TYPE_WINDOW);
570 TPL_ASSERT(surface->native_handle);
572 wayland_egl_display =
573 (tpl_wayland_egl_display_t *)surface->display->backend.data;
574 wl_egl_window = (struct wl_egl_window *)surface->native_handle;
576 wayland_egl_surface = (tpl_wayland_egl_surface_t *) calloc(1,
577 sizeof(tpl_wayland_egl_surface_t));
578 if (!wayland_egl_surface) {
579 TPL_ERR("Failed to allocate memory for new tpl_wayland_egl_surface_t.");
580 return TPL_ERROR_OUT_OF_MEMORY;
583 if (__tpl_object_init(&wayland_egl_surface->base, TPL_OBJECT_SURFACE,
584 NULL) != TPL_ERROR_NONE) {
585 TPL_ERR("Failed to initialize backend surface's base class!");
586 goto tpl_object_init_fail;
589 surface->backend.data = (void *)wayland_egl_surface;
590 wayland_egl_surface->tbm_queue = NULL;
591 wayland_egl_surface->resized = TPL_FALSE;
592 wayland_egl_surface->reset = TPL_FALSE;
593 wayland_egl_surface->vblank_done = TPL_TRUE;
594 wayland_egl_surface->is_activated = TPL_FALSE;
595 wayland_egl_surface->current_buffer = NULL;
596 wayland_egl_surface->set_serial_is_used = TPL_FALSE;
597 wayland_egl_surface->serial = 0;
599 wayland_egl_surface->attached_buffers = __tpl_list_alloc();
600 if (!wayland_egl_surface->attached_buffers) {
601 TPL_ERR("Failed to allocate attached buffers tracking lists.");
602 goto alloc_attached_buffers_fail;
605 wayland_egl_surface->dequeued_buffers = __tpl_list_alloc();
606 if (!wayland_egl_surface->dequeued_buffers) {
607 TPL_ERR("Failed to allocate dequeue buffers tracking lists.");
608 goto alloc_dequeue_buffers_fail;
611 bufmgr = tbm_bufmgr_init(-1);
612 capability = tbm_bufmgr_get_capability(bufmgr);
613 tbm_bufmgr_deinit(bufmgr);
615 if (capability & TBM_BUFMGR_CAPABILITY_TILED_MEMORY)
616 flags = TBM_BO_TILED;
618 if (wl_egl_window->surface) {
619 if (flags == TBM_BO_TILED) {
620 wayland_egl_surface->tbm_queue = wayland_tbm_client_create_surface_queue_tiled(
621 wayland_egl_display->wl_tbm_client,
622 wl_egl_window->surface,
623 surface->num_buffers,
624 wl_egl_window->width,
625 wl_egl_window->height,
628 wayland_egl_surface->tbm_queue = wayland_tbm_client_create_surface_queue(
629 wayland_egl_display->wl_tbm_client,
630 wl_egl_window->surface,
631 surface->num_buffers,
632 wl_egl_window->width,
633 wl_egl_window->height,
637 /*Why wl_surface is NULL ?*/
638 wayland_egl_surface->tbm_queue = tbm_surface_queue_sequence_create(
639 surface->num_buffers,
640 wl_egl_window->width,
641 wl_egl_window->height,
645 if (!wayland_egl_surface->tbm_queue) {
646 TPL_ERR("TBM surface queue creation failed!");
647 goto queue_create_fail;
650 if (tbm_surface_queue_set_modes(wayland_egl_surface->tbm_queue,
651 TBM_SURFACE_QUEUE_MODE_GUARANTEE_CYCLE) !=
652 TBM_SURFACE_QUEUE_ERROR_NONE) {
653 TPL_ERR("Failed to set queue mode to tbm_surface_queue(%p)",
654 wayland_egl_surface->tbm_queue);
655 goto guarantee_mode_set_fail;
658 /* Set reset_callback to tbm_queue */
659 if (tbm_surface_queue_add_reset_cb(wayland_egl_surface->tbm_queue,
660 __cb_tbm_surface_queue_reset_callback,
662 TPL_ERR("TBM surface queue add reset cb failed!");
663 goto add_reset_cb_fail;
667 if (wl_egl_window->driver_private)
668 tizen_private = _get_tizen_private(wl_egl_window);
670 tizen_private = tizen_private_create();
671 wl_egl_window->driver_private = (void *)tizen_private;
674 if (!tizen_private) {
675 TPL_ERR("Failed to create tizen_private for wl_egl_window(%p)", wl_egl_window);
676 goto tizen_private_create_fail;
679 surface->width = wl_egl_window->width;
680 surface->height = wl_egl_window->height;
681 surface->rotation = tizen_private->rotation;
682 surface->rotation_capability = TPL_FALSE;
684 wayland_egl_surface->latest_transform = -1;
685 wl_egl_window->resize_callback = (void *)__cb_client_window_resize_callback;
686 wl_egl_window->destroy_window_callback = (void *)__cb_client_window_destroy_callback;
688 tizen_private->data = (void *)surface;
689 tizen_private->rotate_callback = (void *)__cb_client_window_rotate_callback;
690 tizen_private->get_rotation_capability = (void *)
691 __cb_client_window_get_rotation_capability;
692 tizen_private->set_frontbuffer_callback = (void *)
693 __cb_client_window_set_frontbuffer_mode;
694 tizen_private->set_window_serial_callback = (void *)
695 __cb_client_window_set_window_serial_callback;
697 /* tdm_vblank object decide to be maintained every tpl_wayland_egl_surface
698 for the case where the several surfaces is created in one display connection. */
699 if (wayland_egl_display->tdm_client) {
700 int tdm_lock_res = pthread_mutex_lock(&wayland_egl_display->tdm_mutex);
701 if (TPL_ERROR_NONE != __tpl_wayland_egl_surface_create_vblank(
703 wayland_egl_display->tdm_client)) {
704 TPL_ERR("TBM surface create vblank failed!");
705 if (tdm_lock_res == 0) pthread_mutex_unlock(&wayland_egl_display->tdm_mutex);
706 goto create_vblank_fail;
708 if (tdm_lock_res == 0) pthread_mutex_unlock(&wayland_egl_display->tdm_mutex);
710 #if TIZEN_FEATURE_ENABLE
711 __tpl_wayland_egl_surface_buffer_flusher_init(surface);
714 "[INIT] tpl_surface_t(%p) tpl_wayland_egl_surface_t(%p) tbm_queue(%p)",
715 surface, wayland_egl_surface,
716 wayland_egl_surface->tbm_queue);
718 "[INIT] tpl_wayland_egl_surface_t(%p) wl_egl_window(%p) (%dx%d)",
719 wayland_egl_surface, wl_egl_window, surface->width, surface->height);
721 return TPL_ERROR_NONE;
723 tizen_private_create_fail:
725 tbm_surface_queue_remove_reset_cb(wayland_egl_surface->tbm_queue,
726 __cb_tbm_surface_queue_reset_callback,
728 guarantee_mode_set_fail:
730 tbm_surface_queue_destroy(wayland_egl_surface->tbm_queue);
731 wayland_egl_surface->tbm_queue = NULL;
733 __tpl_list_free(wayland_egl_surface->dequeued_buffers, NULL);
734 alloc_dequeue_buffers_fail:
735 __tpl_list_free(wayland_egl_surface->attached_buffers, NULL);
736 alloc_attached_buffers_fail:
737 __tpl_object_fini(&wayland_egl_surface->base);
738 tpl_object_init_fail:
739 free(wayland_egl_surface);
740 surface->backend.data = NULL;
741 return TPL_ERROR_INVALID_OPERATION;
745 __tpl_wayland_egl_surface_fini(tpl_surface_t *surface)
747 tpl_wayland_egl_surface_t *wayland_egl_surface = NULL;
748 tpl_wayland_egl_display_t *wayland_egl_display = NULL;
751 TPL_ASSERT(surface->display);
753 wayland_egl_surface = (tpl_wayland_egl_surface_t *) surface->backend.data;
754 TPL_CHECK_ON_NULL_RETURN(wayland_egl_surface);
756 wayland_egl_display = (tpl_wayland_egl_display_t *)
757 surface->display->backend.data;
758 TPL_CHECK_ON_NULL_RETURN(wayland_egl_display);
760 if (surface->type == TPL_SURFACE_TYPE_WINDOW) {
761 struct wl_egl_window *wl_egl_window = (struct wl_egl_window *)
762 surface->native_handle;
763 struct tizen_private *tizen_private = _get_tizen_private(wl_egl_window);
766 TPL_ASSERT(wl_egl_window);
767 /* TPL_ASSERT(wl_egl_window->surface); */ /* to be enabled once evas/gl patch is in place */
770 tizen_private->data = NULL;
771 tizen_private->rotate_callback = NULL;
772 tizen_private->get_rotation_capability = NULL;
773 tizen_private->set_frontbuffer_callback = NULL;
774 tizen_private->set_window_serial_callback = NULL;
776 tizen_private = NULL;
779 wl_egl_window->driver_private = NULL;
780 wl_egl_window->resize_callback = NULL;
781 wl_egl_window->destroy_window_callback = NULL;
782 #if TIZEN_FEATURE_ENABLE
783 __tpl_wayland_egl_surface_buffer_flusher_fini(surface);
785 if (wayland_egl_surface->tdm_vblank) {
786 int tdm_lock_res = pthread_mutex_lock(&wayland_egl_display->tdm_mutex);
788 "[TDM_VBLANK_FINI] tpl_wayland_egl_surface_t(%p) tdm_vblank(%p)",
789 wayland_egl_surface, wayland_egl_surface->tdm_vblank);
790 tdm_client_vblank_destroy(wayland_egl_surface->tdm_vblank);
791 if (tdm_lock_res == 0) pthread_mutex_unlock(&wayland_egl_display->tdm_mutex);
794 wl_display_flush(wayland_egl_display->wl_dpy);
796 lock_res = pthread_mutex_lock(&wayland_egl_display->wl_event_mutex);
797 wl_display_dispatch_queue_pending(wayland_egl_display->wl_dpy,
798 wayland_egl_display->wl_tbm_event_queue);
799 /* When surface is destroyed, unreference tbm_surface which tracked by
800 * the list of attached_buffers in order to free the created resources.
801 * (tpl_wayland_egl_buffer_t or wl_buffer) */
802 TPL_OBJECT_LOCK(&wayland_egl_surface->base);
803 if (wayland_egl_surface->attached_buffers) {
804 while (!__tpl_list_is_empty(wayland_egl_surface->attached_buffers)) {
805 tbm_surface_queue_error_e tsq_err;
806 tbm_surface_h tbm_surface =
807 __tpl_list_pop_front(wayland_egl_surface->attached_buffers, NULL);
808 TRACE_ASYNC_END((intptr_t)tbm_surface, "[COMMIT ~ RELEASE_CB] BO_NAME:%d",
809 tbm_bo_export(tbm_surface_internal_get_bo(
811 tbm_surface_internal_unref(tbm_surface);
812 tsq_err = tbm_surface_queue_release(wayland_egl_surface->tbm_queue, tbm_surface);
813 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
814 TPL_ERR("Failed to release. tbm_surface(%p) tsq_err(%d)",
815 tbm_surface, tsq_err);
818 __tpl_list_free(wayland_egl_surface->attached_buffers, NULL);
819 wayland_egl_surface->attached_buffers = NULL;
821 TPL_OBJECT_UNLOCK(&wayland_egl_surface->base);
823 if (lock_res == 0) pthread_mutex_unlock(&wayland_egl_display->wl_event_mutex);
826 "[FINI] tpl_wayland_egl_surface_t(%p) wl_egl_window(%p) tbm_queue(%p)",
827 wayland_egl_surface, wl_egl_window, wayland_egl_surface->tbm_queue);
828 tbm_surface_queue_destroy(wayland_egl_surface->tbm_queue);
829 wayland_egl_surface->tbm_queue = NULL;
832 /* the list of dequeued_buffers just does deletion */
833 if (wayland_egl_surface->dequeued_buffers) {
834 TPL_OBJECT_LOCK(&wayland_egl_surface->base);
835 __tpl_list_free(wayland_egl_surface->dequeued_buffers, NULL);
836 wayland_egl_surface->dequeued_buffers = NULL;
837 TPL_OBJECT_UNLOCK(&wayland_egl_surface->base);
840 __tpl_object_fini(&wayland_egl_surface->base);
841 free(wayland_egl_surface);
842 surface->backend.data = NULL;
846 __tpl_wayland_egl_surface_wait_vblank(tpl_surface_t *surface)
848 tdm_error tdm_err = 0;
849 tpl_wayland_egl_display_t *wayland_egl_display =
850 (tpl_wayland_egl_display_t *)surface->display->backend.data;
851 tpl_wayland_egl_surface_t *wayland_egl_surface =
852 (tpl_wayland_egl_surface_t *)surface->backend.data;
853 int tdm_lock_res = 0;
855 TPL_OBJECT_UNLOCK(surface);
856 tdm_lock_res = pthread_mutex_lock(&wayland_egl_display->tdm_mutex);
857 while (wayland_egl_surface->vblank_done == TPL_FALSE &&
858 wayland_egl_surface->tdm_vblank != NULL &&
859 tdm_client_vblank_is_waiting(wayland_egl_surface->tdm_vblank)) {
860 tdm_err = tdm_client_handle_events_timeout(wayland_egl_display->tdm_client,
862 if (tdm_err == TDM_ERROR_TIMEOUT) {
863 TPL_ERR("Failed to tdm_client_handle_events. timeour (>1000)");
864 wayland_egl_surface->vblank_done = TPL_TRUE;
868 if (tdm_err != TDM_ERROR_NONE) {
869 TPL_ERR("Failed to tdm_client_handle_events.");
873 if (tdm_lock_res == 0) pthread_mutex_unlock(&wayland_egl_display->tdm_mutex);
874 TPL_OBJECT_LOCK(surface);
878 __cb_tdm_client_wait_vblank(tdm_client_vblank *vblank, tdm_error error,
879 unsigned int sequence, unsigned int tv_sec,
880 unsigned int tv_usec, void *user_data)
882 tpl_wayland_egl_surface_t *wayland_egl_surface =
883 (tpl_wayland_egl_surface_t *)user_data;
884 wayland_egl_surface->vblank_done = TPL_TRUE;
885 TRACE_MARK("TDM_CLIENT_VBLANK");
889 __tpl_wayland_egl_surface_commit(tpl_surface_t *surface,
890 tbm_surface_h tbm_surface,
891 int num_rects, const int *rects)
893 tpl_wayland_egl_buffer_t *wayland_egl_buffer = NULL;
894 struct wl_egl_window *wl_egl_window =
895 (struct wl_egl_window *)surface->native_handle;
896 tpl_wayland_egl_display_t *wayland_egl_display =
897 (tpl_wayland_egl_display_t *) surface->display->backend.data;
898 tpl_wayland_egl_surface_t *wayland_egl_surface =
899 (tpl_wayland_egl_surface_t *) surface->backend.data;
900 tdm_error tdm_err = 0;
904 __tpl_wayland_egl_get_wayland_buffer_from_tbm_surface(tbm_surface);
905 TPL_ASSERT(wayland_egl_buffer);
907 TRACE_MARK("[COMMIT] BO_NAME:%d", tbm_bo_export(wayland_egl_buffer->bo));
909 TPL_IMAGE_DUMP(tbm_surface, surface->width, surface->height);
911 version = wl_proxy_get_version((struct wl_proxy *)wl_egl_window->surface);
913 if (wayland_egl_buffer->w_rotated == TPL_TRUE) {
915 wayland_tbm_client_set_buffer_transform(
916 wayland_egl_display->wl_tbm_client,
917 (void *)wayland_egl_buffer->wl_proxy,
918 wayland_egl_buffer->window_transform);
920 wayland_egl_buffer->w_rotated = TPL_FALSE;
923 if (wayland_egl_surface->latest_transform != wayland_egl_buffer->transform) {
924 wayland_egl_surface->latest_transform = wayland_egl_buffer->transform;
926 wl_surface_set_buffer_transform(wl_egl_window->surface,
927 wayland_egl_buffer->transform);
931 wl_surface_attach(wl_egl_window->surface, (void *)wayland_egl_buffer->wl_proxy,
932 wayland_egl_buffer->dx, wayland_egl_buffer->dy);
934 wl_egl_window->attached_width = wayland_egl_buffer->width;
935 wl_egl_window->attached_height = wayland_egl_buffer->height;
937 if (num_rects < 1 || rects == NULL) {
939 wl_surface_damage(wl_egl_window->surface,
940 wayland_egl_buffer->dx,
941 wayland_egl_buffer->dy,
942 wayland_egl_buffer->width,
943 wayland_egl_buffer->height);
945 wl_surface_damage_buffer(wl_egl_window->surface,
948 wayland_egl_buffer->width,
949 wayland_egl_buffer->height);
954 for (i = 0; i < num_rects; i++) {
955 /* The rectangles are specified relative to the bottom-left of the
956 * GL surface. So, these rectanglesd has to be converted to
957 * WINDOW(Top-left) coord like below.
958 * y = [WINDOW.HEIGHT] - (RECT.Y + RECT.HEIGHT) */
960 wayland_egl_buffer->height - (rects[i * 4 + 1] + rects[i * 4 + 3]);
962 wl_surface_damage(wl_egl_window->surface,
963 rects[i * 4 + 0], inverted_y,
964 rects[i * 4 + 2], rects[i * 4 + 3]);
966 wl_surface_damage_buffer(wl_egl_window->surface,
967 rects[i * 4 + 0], inverted_y,
968 rects[i * 4 + 2], rects[i * 4 + 3]);
973 wayland_tbm_client_set_buffer_serial(wayland_egl_display->wl_tbm_client,
974 (void *)wayland_egl_buffer->wl_proxy,
975 wayland_egl_buffer->serial);
977 wayland_egl_buffer->need_to_release = TPL_TRUE;
979 wl_surface_commit(wl_egl_window->surface);
981 wl_display_flush(wayland_egl_display->wl_dpy);
984 "[COMMIT] wl_surface(%p) wl_egl_window(%p) wl_buffer(%p)(%dx%d)",
985 wl_egl_window->surface, wl_egl_window,
986 wayland_egl_buffer->wl_proxy,
987 wayland_egl_buffer->width, wayland_egl_buffer->height);
989 TPL_OBJECT_LOCK(&wayland_egl_surface->base);
990 if (wayland_egl_surface->attached_buffers) {
991 /* Start tracking of this tbm_surface until release_cb called. */
992 __tpl_list_push_back(wayland_egl_surface->attached_buffers,
993 (void *)tbm_surface);
995 TPL_OBJECT_UNLOCK(&wayland_egl_surface->base);
997 if (pthread_mutex_lock(&g_list_mutex) == 0) {
998 if (committed_wl_buffers) {
999 /* Start tracking of wl_buffer which is committed by this wayland_egl_surface */
1000 __tpl_list_push_back(committed_wl_buffers,
1001 (void *)wayland_egl_buffer->wl_proxy);
1003 pthread_mutex_unlock(&g_list_mutex);
1006 /* TPL_WAIT_VBLANK = 1 */
1007 if (wayland_egl_display->tdm_client && surface->post_interval > 0) {
1008 int tdm_lock_res = pthread_mutex_lock(&wayland_egl_display->tdm_mutex);
1009 tdm_err = tdm_client_vblank_wait(wayland_egl_surface->tdm_vblank,
1010 surface->post_interval, /* interval */
1011 __cb_tdm_client_wait_vblank, /* handler */
1012 surface->backend.data); /* user_data */
1014 if (tdm_err == TDM_ERROR_NONE)
1015 wayland_egl_surface->vblank_done = TPL_FALSE;
1017 TPL_ERR("Failed to tdm_client_wait_vblank. error:%d", tdm_err);
1018 if (tdm_lock_res == 0) pthread_mutex_unlock(&wayland_egl_display->tdm_mutex);
1021 TRACE_ASYNC_BEGIN((intptr_t)tbm_surface, "[COMMIT ~ RELEASE_CB] BO_NAME:%d",
1022 tbm_bo_export(wayland_egl_buffer->bo));
1026 __tpl_wayland_egl_surface_enqueue_buffer(tpl_surface_t *surface,
1027 tbm_surface_h tbm_surface,
1028 int num_rects, const int *rects, tbm_fd sync_fence)
1030 TPL_ASSERT(surface);
1031 TPL_ASSERT(surface->display);
1032 TPL_ASSERT(tbm_surface);
1033 TPL_OBJECT_CHECK_RETURN(surface, TPL_ERROR_INVALID_PARAMETER);
1035 tpl_wayland_egl_surface_t *wayland_egl_surface =
1036 (tpl_wayland_egl_surface_t *) surface->backend.data;
1037 tpl_wayland_egl_buffer_t *wayland_egl_buffer = NULL;
1038 tbm_surface_queue_error_e tsq_err;
1040 if (!wayland_egl_surface) return TPL_ERROR_INVALID_PARAMETER;
1042 if (!tbm_surface_internal_is_valid(tbm_surface)) {
1043 TPL_ERR("Failed to enqueue tbm_surface(%p) Invalid value.",
1045 return TPL_ERROR_INVALID_PARAMETER;
1048 TRACE_MARK("[ENQ] BO_NAME:%d",
1049 tbm_bo_export(tbm_surface_internal_get_bo(tbm_surface, 0)));
1052 "[ENQ] tpl_wayland_egl_surface_t(%p) tbm_queue(%p) tbm_surface(%p) bo(%d)",
1053 wayland_egl_surface, wayland_egl_surface->tbm_queue,
1054 tbm_surface, tbm_bo_export(tbm_surface_internal_get_bo(tbm_surface, 0)));
1056 wayland_egl_buffer =
1057 __tpl_wayland_egl_get_wayland_buffer_from_tbm_surface(tbm_surface);
1059 if (!wayland_egl_buffer) return TPL_ERROR_INVALID_PARAMETER;
1061 /* In frontbuffer mode, will skip tbm_surface_queue_enqueue, acquire, and
1062 * commit if surface->frontbuffer that is already set and the tbm_surface
1063 * client want to enqueue are the same.
1065 if (surface->is_frontbuffer_mode && surface->frontbuffer == tbm_surface) {
1066 /* The first buffer to be activated in frontbuffer mode muse be
1067 * committed. Subsequence frames do not need to be committed because
1068 * the buffer is already displayed.
1070 if (wayland_egl_buffer->is_new) {
1071 TPL_LOG_D("[FRONTBUFFER RENDERING MODE]", "tbm_surface(%p) bo(%d)",
1072 tbm_surface, tbm_bo_export(wayland_egl_buffer->bo));
1075 "[ENQ_SKIP][F] Client already uses frontbuffer(%p)",
1076 surface->frontbuffer);
1077 TRACE_ASYNC_END((intptr_t)wayland_egl_buffer, "[DEQ]~[ENQ] BO_NAME:%d",
1078 tbm_bo_export(wayland_egl_buffer->bo));
1079 return TPL_ERROR_NONE;
1083 wayland_egl_buffer->is_new = TPL_FALSE;
1085 if (sync_fence != -1) {
1086 tbm_sync_fence_wait(sync_fence, -1);
1090 if (wayland_egl_surface->dequeued_buffers) {
1091 TPL_OBJECT_LOCK(&wayland_egl_surface->base);
1092 /* Stop tracking of this render_done tbm_surface. */
1093 __tpl_list_remove_data(wayland_egl_surface->dequeued_buffers,
1094 (void *)tbm_surface, TPL_FIRST, NULL);
1095 TPL_OBJECT_UNLOCK(&wayland_egl_surface->base);
1098 tsq_err = tbm_surface_queue_enqueue(wayland_egl_surface->tbm_queue,
1100 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
1101 TPL_ERR("Failed to enqueue tbm_surface(%p) to tbm_queue(%p) | tsq_err = %d",
1102 tbm_surface, wayland_egl_surface->tbm_queue, tsq_err);
1103 return TPL_ERROR_INVALID_OPERATION;
1106 TRACE_ASYNC_END((intptr_t)wayland_egl_buffer, "[DEQ]~[ENQ] BO_NAME:%d",
1107 tbm_bo_export(wayland_egl_buffer->bo));
1109 TRACE_BEGIN("[WAIT_VBLANK]");
1110 if (wayland_egl_surface->vblank_done == TPL_FALSE && surface->post_interval > 0)
1111 __tpl_wayland_egl_surface_wait_vblank(surface);
1114 tsq_err = tbm_surface_queue_acquire(wayland_egl_surface->tbm_queue,
1116 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
1117 TPL_ERR("Failed to acquire tbm_surface(%p) from tbm_queue(%p). | tsq_err = %d",
1118 tbm_surface, wayland_egl_surface->tbm_queue, tsq_err);
1119 return TPL_ERROR_INVALID_OPERATION;
1122 __tpl_wayland_egl_surface_commit(surface, tbm_surface, num_rects, rects);
1124 return TPL_ERROR_NONE;
1128 __tpl_wayland_egl_surface_validate(tpl_surface_t *surface)
1130 tpl_bool_t retval = TPL_TRUE;
1132 TPL_ASSERT(surface);
1133 TPL_ASSERT(surface->backend.data);
1135 tpl_wayland_egl_surface_t *wayland_egl_surface =
1136 (tpl_wayland_egl_surface_t *)surface->backend.data;
1138 retval = !(wayland_egl_surface->resized || wayland_egl_surface->reset);
1144 __tpl_wayland_egl_surface_wait_dequeuable(tpl_surface_t *surface)
1146 tpl_wayland_egl_display_t *wayland_egl_display = NULL;
1147 tpl_wayland_egl_surface_t *wayland_egl_surface = NULL;
1148 tpl_result_t ret = TPL_ERROR_NONE;
1150 wayland_egl_display = (tpl_wayland_egl_display_t *)
1151 surface->display->backend.data;
1152 wayland_egl_surface = (tpl_wayland_egl_surface_t *)surface->backend.data;
1154 wl_display_dispatch_queue_pending(wayland_egl_display->wl_dpy,
1155 wayland_egl_display->wl_tbm_event_queue);
1157 if (tbm_surface_queue_can_dequeue(wayland_egl_surface->tbm_queue, 0)) {
1158 return TPL_ERROR_NONE;
1161 TRACE_BEGIN("WAITING FOR DEQUEUEABLE");
1162 TPL_OBJECT_UNLOCK(surface);
1164 /* Dispatching "wayland_egl_display->wl_tbm_event_queue" handles
1165 * wl_buffer_release event, wl_tbm event, and wl_tbm_queue event.
1167 * 1. wl_tbm proxy handles what received below wayland events.
1168 * - buffer_attached_with_id
1169 * - buffer_attached_with_fd
1170 * 2. wl_tbm_queue handles what received below wayland events.
1174 while (tbm_surface_queue_can_dequeue(
1175 wayland_egl_surface->tbm_queue, 0) == 0) {
1176 /* Application sent all buffers to the server. Wait for server response. */
1177 if (wl_display_dispatch_queue(wayland_egl_display->wl_dpy,
1178 wayland_egl_display->wl_tbm_event_queue) == -1) {
1181 strerror_r(errno, buf, sizeof(buf));
1183 TPL_ERR("falied to wl_display_dispatch_queue. error:%d(%s)", errno,
1186 dpy_err = wl_display_get_error(wayland_egl_display->wl_dpy);
1187 if (dpy_err == EPROTO) {
1188 const struct wl_interface *err_interface;
1189 uint32_t err_proxy_id, err_code;
1190 err_code = wl_display_get_protocol_error(wayland_egl_display->wl_dpy,
1191 &err_interface, &err_proxy_id);
1192 TPL_ERR("[Protocol Error] interface: %s, error_code: %d, proxy_id: %d",
1193 err_interface->name, err_code, err_proxy_id);
1196 ret = TPL_ERROR_INVALID_OPERATION;
1197 TPL_OBJECT_LOCK(surface);
1204 TPL_OBJECT_LOCK(surface);
1212 __tpl_wayland_egl_surface_cancel_dequeued_buffer(tpl_surface_t *surface,
1213 tbm_surface_h tbm_surface)
1215 tpl_wayland_egl_surface_t *wayland_egl_surface = NULL;
1216 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
1218 wayland_egl_surface = (tpl_wayland_egl_surface_t *)surface->backend.data;
1219 if (!wayland_egl_surface) {
1220 TPL_ERR("Invalid backend surface. surface(%p) wayland_egl_surface(%p)",
1221 surface, wayland_egl_surface);
1222 return TPL_ERROR_INVALID_PARAMETER;
1225 if (wayland_egl_surface->dequeued_buffers) {
1226 TPL_OBJECT_LOCK(&wayland_egl_surface->base);
1227 /* Stop tracking of this render_done tbm_surface. */
1228 __tpl_list_remove_data(wayland_egl_surface->dequeued_buffers,
1229 (void *)tbm_surface, TPL_FIRST, NULL);
1230 TPL_OBJECT_UNLOCK(&wayland_egl_surface->base);
1233 if (!tbm_surface_internal_is_valid(tbm_surface)) {
1234 TPL_WARN("Invalid buffer. tbm_surface(%p)", tbm_surface);
1235 return TPL_ERROR_INVALID_PARAMETER;
1238 tbm_surface_internal_unref(tbm_surface);
1240 tsq_err = tbm_surface_queue_cancel_dequeue(wayland_egl_surface->tbm_queue,
1242 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
1243 TPL_ERR("Failed to cancel dequeue tbm_surface(%p) to tbm_queue(%p)",
1244 tbm_surface, wayland_egl_surface->tbm_queue);
1245 return TPL_ERROR_INVALID_OPERATION;
1248 TPL_LOG_B("WL_EGL", "[CANCEL BUFFER] tpl_surface(%p) tbm_surface(%p)",
1249 surface, tbm_surface);
1251 return TPL_ERROR_NONE;
1254 static tbm_surface_h
1255 __tpl_wayland_egl_surface_dequeue_buffer(tpl_surface_t *surface, uint64_t timeout_ns,
1258 TPL_ASSERT(surface);
1259 TPL_ASSERT(surface->backend.data);
1260 TPL_ASSERT(surface->display);
1261 TPL_ASSERT(surface->display->backend.data);
1262 TPL_OBJECT_CHECK_RETURN(surface, NULL);
1264 tbm_surface_h tbm_surface = NULL;
1265 tpl_wayland_egl_buffer_t *wayland_egl_buffer = NULL;
1266 tpl_wayland_egl_surface_t *wayland_egl_surface =
1267 (tpl_wayland_egl_surface_t *)surface->backend.data;
1268 tpl_wayland_egl_display_t *wayland_egl_display =
1269 (tpl_wayland_egl_display_t *)surface->display->backend.data;
1270 struct wl_proxy *wl_proxy = NULL;
1271 struct wl_egl_window *wl_egl_window =
1272 (struct wl_egl_window *)surface->native_handle;
1273 struct tizen_private *tizen_private = _get_tizen_private(wl_egl_window);
1274 tbm_surface_queue_error_e tsq_err = 0;
1275 int is_activated = 0;
1278 TPL_ASSERT(tizen_private);
1283 lock_res = pthread_mutex_lock(&wayland_egl_display->wl_event_mutex);
1285 /* Check whether the surface was resized by wayland_egl */
1286 if (wayland_egl_surface->resized == TPL_TRUE) {
1287 int width, height, format;
1288 width = wl_egl_window->width;
1289 height = wl_egl_window->height;
1290 format = tbm_surface_queue_get_format(wayland_egl_surface->tbm_queue);
1292 tbm_surface_queue_reset(wayland_egl_surface->tbm_queue, width, height, format);
1293 surface->width = width;
1294 surface->height = height;
1296 wayland_egl_surface->resized = TPL_FALSE;
1297 wayland_egl_surface->reset = TPL_FALSE;
1300 if (__tpl_wayland_egl_surface_wait_dequeuable(surface)) {
1301 TPL_ERR("Failed to wait dequeueable buffer");
1302 if (lock_res == 0) pthread_mutex_unlock(&wayland_egl_display->wl_event_mutex);
1306 /* wayland client can check their states (ACTIVATED or DEACTIVATED) with
1307 * below function [wayland_tbm_client_queue_check_activate()].
1308 * This function has to be called between
1309 * __tpl_wayland_egl_surface_wait_dequeuable and tbm_surface_queue_dequeue()
1310 * in order to know what state the buffer will be dequeued next.
1312 * ACTIVATED state means non-composite mode. Client can get buffers which
1313 can be displayed directly(without compositing).
1314 * DEACTIVATED state means composite mode. Client's buffer will be displayed
1315 by compositor(E20) with compositing.
1317 is_activated = wayland_tbm_client_queue_check_activate(
1318 wayland_egl_display->wl_tbm_client,
1319 wayland_egl_surface->tbm_queue);
1321 if (surface->is_frontbuffer_mode && surface->frontbuffer != NULL) {
1322 /* If surface->frontbuffer is already set in frontbuffer mode,
1323 * it will return that frontbuffer if it is still activated,
1324 * otherwise dequeue the new buffer after initializing
1325 * surface->frontbuffer to NULL. */
1326 if (is_activated && !wayland_egl_surface->reset) {
1328 "[DEQ][F] surface->frontbuffer(%p) BO_NAME(%d)",
1329 surface->frontbuffer,
1330 tbm_bo_export(tbm_surface_internal_get_bo(
1331 surface->frontbuffer, 0)));
1332 wayland_egl_buffer =
1333 __tpl_wayland_egl_get_wayland_buffer_from_tbm_surface(
1334 surface->frontbuffer);
1335 if (wayland_egl_buffer)
1336 TRACE_ASYNC_BEGIN((intptr_t)wayland_egl_buffer,
1337 "[DEQ]~[ENQ] BO_NAME:%d",
1338 tbm_bo_export(wayland_egl_buffer->bo));
1339 if (lock_res == 0) pthread_mutex_unlock(&wayland_egl_display->wl_event_mutex);
1340 return surface->frontbuffer;
1342 surface->frontbuffer = NULL;
1344 surface->frontbuffer = NULL;
1347 tsq_err = tbm_surface_queue_dequeue(wayland_egl_surface->tbm_queue,
1350 TPL_ERR("Failed to get tbm_surface from tbm_surface_queue | tsq_err = %d",
1352 if (lock_res == 0) pthread_mutex_unlock(&wayland_egl_display->wl_event_mutex);
1356 tbm_surface_internal_ref(tbm_surface);
1358 if (surface->is_frontbuffer_mode && is_activated)
1359 surface->frontbuffer = tbm_surface;
1361 if ((wayland_egl_buffer =
1362 __tpl_wayland_egl_get_wayland_buffer_from_tbm_surface(tbm_surface)) != NULL) {
1363 TRACE_MARK("[DEQ][REUSED]BO_NAME:%d", tbm_bo_export(wayland_egl_buffer->bo));
1364 TRACE_ASYNC_BEGIN((intptr_t)wayland_egl_buffer, "[DEQ]~[ENQ] BO_NAME:%d",
1365 tbm_bo_export(wayland_egl_buffer->bo));
1367 wayland_egl_buffer->dx = wl_egl_window->dx;
1368 wayland_egl_buffer->dy = wl_egl_window->dy;
1369 wayland_egl_buffer->width = wl_egl_window->width;
1370 wayland_egl_buffer->height = wl_egl_window->height;
1371 wayland_egl_buffer->transform = tizen_private->transform;
1373 if (wayland_egl_buffer->window_transform != tizen_private->window_transform) {
1374 wayland_egl_buffer->window_transform = tizen_private->window_transform;
1375 wayland_egl_buffer->w_rotated = TPL_TRUE;
1377 wayland_egl_buffer->w_rotated = TPL_FALSE;
1380 if (wayland_egl_surface->is_activated != is_activated)
1381 wayland_egl_buffer->is_new = TPL_TRUE;
1383 wayland_egl_surface->reset = TPL_FALSE;
1384 wayland_egl_surface->is_activated = is_activated;
1386 if (wayland_egl_surface->dequeued_buffers) {
1387 TPL_OBJECT_LOCK(&wayland_egl_surface->base);
1388 /* Start tracking of this tbm_surface until enqueue */
1389 __tpl_list_push_back(wayland_egl_surface->dequeued_buffers,
1390 (void *)tbm_surface);
1391 TPL_OBJECT_UNLOCK(&wayland_egl_surface->base);
1395 "[DEQ][R] tpl_wayland_surface_t(%p) wl_buffer(%p) tbm_surface(%p) bo(%d)",
1396 wayland_egl_surface,
1397 wayland_egl_buffer->wl_proxy,
1398 tbm_surface, tbm_bo_export(wayland_egl_buffer->bo));
1400 "[DEQ] size(%dx%d) transform(%d) w_transform(%d) w_rotated(%s)",
1401 wayland_egl_buffer->width, wayland_egl_buffer->height,
1402 wayland_egl_buffer->transform,
1403 wayland_egl_buffer->window_transform,
1404 wayland_egl_buffer->w_rotated ? "[TRUE]" : "[FALSE]");
1406 if (lock_res == 0) pthread_mutex_unlock(&wayland_egl_display->wl_event_mutex);
1410 wayland_egl_buffer = (tpl_wayland_egl_buffer_t *) calloc(1,
1411 sizeof(tpl_wayland_egl_buffer_t));
1412 if (!wayland_egl_buffer) {
1413 TPL_ERR("Mem alloc for wayland_egl_buffer failed!");
1414 tbm_surface_internal_unref(tbm_surface);
1415 tbm_surface_queue_cancel_dequeue(wayland_egl_surface->tbm_queue, tbm_surface);
1416 if (lock_res == 0) pthread_mutex_unlock(&wayland_egl_display->wl_event_mutex);
1421 (struct wl_proxy *)wayland_tbm_client_create_buffer(
1422 wayland_egl_display->wl_tbm_client, tbm_surface);
1424 TPL_ERR("Failed to create TBM client buffer!");
1425 tbm_surface_internal_unref(tbm_surface);
1426 tbm_surface_queue_cancel_dequeue(wayland_egl_surface->tbm_queue, tbm_surface);
1427 free(wayland_egl_buffer);
1428 if (lock_res == 0) pthread_mutex_unlock(&wayland_egl_display->wl_event_mutex);
1432 wl_buffer_add_listener((void *)wl_proxy, &buffer_release_listener,
1435 wl_display_flush(wayland_egl_display->wl_dpy);
1437 if (wayland_egl_surface->set_serial_is_used) {
1438 wayland_egl_buffer->serial = wayland_egl_surface->serial;
1440 ++tizen_private->serial;
1441 wayland_egl_buffer->serial = tizen_private->serial;
1444 wayland_egl_buffer->dx = wl_egl_window->dx;
1445 wayland_egl_buffer->dy = wl_egl_window->dy;
1446 wayland_egl_buffer->width = wl_egl_window->width;
1447 wayland_egl_buffer->height = wl_egl_window->height;
1448 wayland_egl_buffer->display = wayland_egl_display;
1449 wayland_egl_buffer->wl_proxy = wl_proxy;
1450 wayland_egl_buffer->bo = tbm_surface_internal_get_bo(tbm_surface, 0);
1451 wayland_egl_buffer->wayland_egl_surface = wayland_egl_surface;
1452 wayland_egl_buffer->transform = tizen_private->transform;
1453 wayland_egl_buffer->window_transform = -1;
1455 if (wayland_egl_buffer->window_transform != tizen_private->window_transform) {
1456 wayland_egl_buffer->window_transform = tizen_private->window_transform;
1457 wayland_egl_buffer->w_rotated = TPL_TRUE;
1459 wayland_egl_buffer->w_rotated = TPL_FALSE;
1462 /* 'is_new' flag is to check wheter it is a new buffer need to commit
1463 * in frontbuffer mode. */
1464 wayland_egl_buffer->is_new = TPL_TRUE;
1465 wayland_egl_surface->is_activated = is_activated;
1467 wayland_egl_surface->current_buffer = tbm_surface;
1468 wayland_egl_surface->reset = TPL_FALSE;
1470 __tpl_wayland_egl_set_wayland_buffer_to_tbm_surface(tbm_surface,
1471 wayland_egl_buffer);
1473 TRACE_MARK("[DEQ][NEW]BO_NAME:%d", tbm_bo_export(wayland_egl_buffer->bo));
1474 TRACE_ASYNC_BEGIN((intptr_t)wayland_egl_buffer, "[DEQ]~[ENQ] BO_NAME:%d",
1475 tbm_bo_export(wayland_egl_buffer->bo));
1477 "[DEQ][N] tpl_wayland_egl_buffer_t(%p) wl_buffer(%p) tbm_surface(%p) bo(%d)",
1478 wayland_egl_buffer, wayland_egl_buffer->wl_proxy, tbm_surface,
1479 tbm_bo_export(wayland_egl_buffer->bo));
1482 "[DEQ] size(%dx%d) transform(%d) w_transform(%d) w_rotated(%s)",
1483 wayland_egl_buffer->width, wayland_egl_buffer->height,
1484 wayland_egl_buffer->transform,
1485 wayland_egl_buffer->window_transform,
1486 wayland_egl_buffer->w_rotated ? "[TRUE]" : "[FALSE]");
1488 if (wayland_egl_surface->dequeued_buffers) {
1489 TPL_OBJECT_LOCK(&wayland_egl_surface->base);
1490 __tpl_list_push_back(wayland_egl_surface->dequeued_buffers,
1491 (void *)tbm_surface);
1492 TPL_OBJECT_UNLOCK(&wayland_egl_surface->base);
1495 if (lock_res == 0) pthread_mutex_unlock(&wayland_egl_display->wl_event_mutex);
1500 __tpl_wayland_egl_buffer_free(tpl_wayland_egl_buffer_t *wayland_egl_buffer)
1502 TPL_ASSERT(wayland_egl_buffer);
1503 TPL_ASSERT(wayland_egl_buffer->display);
1505 tpl_wayland_egl_display_t *wayland_egl_display = wayland_egl_buffer->display;
1507 TPL_LOG_B("WL_EGL", "[FREE] tpl_wayland_egl_buffer_t(%p) wl_buffer(%p)",
1508 wayland_egl_buffer, wayland_egl_buffer->wl_proxy);
1509 wl_display_flush(wayland_egl_display->wl_dpy);
1511 if (wayland_egl_buffer->wl_proxy)
1512 wayland_tbm_client_destroy_buffer(wayland_egl_display->wl_tbm_client,
1513 (void *)wayland_egl_buffer->wl_proxy);
1515 free(wayland_egl_buffer);
1519 __tpl_display_choose_backend_wayland_egl(tpl_handle_t native_dpy)
1521 if (!native_dpy) return TPL_FALSE;
1523 if (__tpl_wayland_egl_display_is_wl_display(native_dpy))
1530 __tpl_display_init_backend_wayland_egl(tpl_display_backend_t *backend)
1532 TPL_ASSERT(backend);
1534 backend->type = TPL_BACKEND_WAYLAND;
1535 backend->data = NULL;
1537 backend->init = __tpl_wayland_egl_display_init;
1538 backend->fini = __tpl_wayland_egl_display_fini;
1539 backend->query_config = __tpl_wayland_egl_display_query_config;
1540 backend->filter_config = __tpl_wayland_egl_display_filter_config;
1541 backend->get_window_info = __tpl_wayland_egl_display_get_window_info;
1542 backend->get_pixmap_info = __tpl_wayland_egl_display_get_pixmap_info;
1543 backend->get_buffer_from_native_pixmap =
1544 __tpl_wayland_egl_display_get_buffer_from_native_pixmap;
1548 __tpl_surface_init_backend_wayland_egl(tpl_surface_backend_t *backend)
1550 TPL_ASSERT(backend);
1552 backend->type = TPL_BACKEND_WAYLAND;
1553 backend->data = NULL;
1555 backend->init = __tpl_wayland_egl_surface_init;
1556 backend->fini = __tpl_wayland_egl_surface_fini;
1557 backend->validate = __tpl_wayland_egl_surface_validate;
1558 backend->cancel_dequeued_buffer =
1559 __tpl_wayland_egl_surface_cancel_dequeued_buffer;
1560 backend->dequeue_buffer = __tpl_wayland_egl_surface_dequeue_buffer;
1561 backend->enqueue_buffer = __tpl_wayland_egl_surface_enqueue_buffer;
1565 __cb_client_buffer_release_callback(void *data, struct wl_proxy *proxy)
1567 tbm_surface_h tbm_surface = NULL;
1568 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
1570 if (proxy && (pthread_mutex_lock(&g_list_mutex) == 0)) {
1571 if (committed_wl_buffers) {
1572 /* Look for the given wl_proxy in the global list(committed_wl_buffers),
1573 * whether its release event has not been processed since wl_surface_commit
1574 * with this wl_proxy */
1575 tpl_list_node_t *node =
1576 __tpl_list_find_node(committed_wl_buffers, (void *)proxy,
1579 /* If the proxy can not be found in the committed_wl_buffers list,
1580 * it has not been committed or has already been released.
1581 * In this case, it is not an error, but the log will be printed. */
1583 TPL_ERR("wl_buffer(%p) already has been released.", proxy);
1584 pthread_mutex_unlock(&g_list_mutex);
1590 tbm_surface = (tbm_surface_h) data;
1592 TRACE_ASYNC_END((intptr_t)tbm_surface, "[COMMIT ~ RELEASE_CB] BO_NAME:%d",
1593 tbm_bo_export(tbm_surface_internal_get_bo(tbm_surface, 0)));
1594 TPL_LOG_B("WL_EGL", "[RELEASE_CB] wl_buffer(%p) tbm_surface(%p) bo(%d)",
1596 tbm_bo_export(tbm_surface_internal_get_bo(tbm_surface, 0)));
1598 if (tbm_surface_internal_is_valid(tbm_surface)) {
1599 tpl_wayland_egl_surface_t *wayland_egl_surface = NULL;
1600 tpl_wayland_egl_buffer_t *wayland_egl_buffer = NULL;
1602 wayland_egl_buffer =
1603 __tpl_wayland_egl_get_wayland_buffer_from_tbm_surface(tbm_surface);
1605 if (wayland_egl_buffer) {
1606 if (wayland_egl_buffer->need_to_release) {
1607 wayland_egl_surface = wayland_egl_buffer->wayland_egl_surface;
1609 TPL_OBJECT_LOCK(&wayland_egl_surface->base);
1610 if (wayland_egl_surface->attached_buffers) {
1611 /* Stop tracking of this released tbm_surface. */
1612 __tpl_list_remove_data(wayland_egl_surface->attached_buffers,
1613 (void *)tbm_surface, TPL_FIRST, NULL);
1615 TPL_OBJECT_UNLOCK(&wayland_egl_surface->base);
1617 tsq_err = tbm_surface_queue_release(wayland_egl_surface->tbm_queue,
1619 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
1620 TPL_ERR("Failed to release tbm_surface(%p) to tbm_queue(%p) tsq_err(%d)",
1621 tbm_surface, wayland_egl_surface->tbm_queue, tsq_err);
1623 wayland_egl_buffer->need_to_release = TPL_FALSE;
1625 tbm_surface_internal_unref(tbm_surface);
1627 /* This wl_buffer should be removed from committed_wl_buffers list. */
1628 __tpl_list_remove_data(committed_wl_buffers, (void *)proxy,
1632 TPL_WARN("No need to release buffer | wl_buffer(%p) tbm_surface(%p) bo(%d)",
1634 tbm_bo_export(tbm_surface_internal_get_bo(tbm_surface, 0)));
1638 TPL_ERR("Failed to process release_event. Invalid tbm_surface(%p)", tbm_surface);
1641 pthread_mutex_unlock(&g_list_mutex);
1644 static const struct wl_buffer_listener buffer_release_listener = {
1645 (void *)__cb_client_buffer_release_callback,
1649 __cb_client_window_destroy_callback(void *private)
1651 struct tizen_private *tizen_private = (struct tizen_private *)private;
1652 tpl_surface_t *surface = NULL;
1653 struct wl_egl_window *wl_egl_window = NULL;
1655 if (!tizen_private) {
1656 TPL_WARN("[DESTROY_CB] Already destroyed surface");
1660 surface = (tpl_surface_t *)tizen_private->data;
1662 TPL_LOG_B("WL_EGL", "[DESTROY_CB] wl_egl_window(%p) tpl_surface(%p)",
1663 surface->native_handle, surface);
1664 wl_egl_window = (struct wl_egl_window *)surface->native_handle;
1666 wl_egl_window->driver_private = NULL;
1667 surface->native_handle = NULL;
1670 tizen_private->set_window_serial_callback = NULL;
1671 tizen_private->rotate_callback = NULL;
1672 tizen_private->get_rotation_capability = NULL;
1673 tizen_private->set_frontbuffer_callback = NULL;
1674 tizen_private->data = NULL;
1675 free(tizen_private);
1676 tizen_private = NULL;
1680 __cb_client_window_resize_callback(struct wl_egl_window *wl_egl_window,
1683 TPL_ASSERT(private);
1684 TPL_ASSERT(wl_egl_window);
1686 int cur_w, cur_h, req_w, req_h;
1687 struct tizen_private *tizen_private = (struct tizen_private *)private;
1688 tpl_surface_t *surface = (tpl_surface_t *)tizen_private->data;
1689 tpl_wayland_egl_surface_t *wayland_egl_surface =
1690 (tpl_wayland_egl_surface_t *)surface->backend.data;
1692 cur_w = tbm_surface_queue_get_width(wayland_egl_surface->tbm_queue);
1693 cur_h = tbm_surface_queue_get_height(wayland_egl_surface->tbm_queue);
1694 req_w = wl_egl_window->width;
1695 req_h = wl_egl_window->height;
1697 TPL_LOG_B("WL_EGL", "[RESIZE_CB] wl_egl_window(%p) (%dx%d) -> (%dx%d)",
1698 wl_egl_window, cur_w, cur_h, req_w, req_h);
1700 /* Check whether the surface was resized by wayland_egl */
1701 if ((req_w != cur_w) || (req_h != cur_h))
1702 wayland_egl_surface->resized = TPL_TRUE;
1706 __cb_client_window_rotate_callback(struct wl_egl_window *wl_egl_window,
1709 TPL_ASSERT(private);
1710 TPL_ASSERT(wl_egl_window);
1713 struct tizen_private *tizen_private = (struct tizen_private *)private;
1714 tpl_surface_t *surface = (tpl_surface_t *)tizen_private->data;
1716 rotation = tizen_private->rotation;
1718 TPL_LOG_B("WL_EGL", "[ROTATE_CB] wl_egl_window(%p) (%d) -> (%d)",
1719 wl_egl_window, surface->rotation, rotation);
1720 /* Check whether the surface was resized by wayland_egl */
1721 surface->rotation = rotation;
1725 __cb_client_window_get_rotation_capability(struct wl_egl_window *wl_egl_window,
1728 int rotation_capability = WL_EGL_WINDOW_TIZEN_CAPABILITY_NONE;
1729 TPL_ASSERT(private);
1730 TPL_ASSERT(wl_egl_window);
1731 struct tizen_private *tizen_private = (struct tizen_private *)private;
1732 tpl_surface_t *surface = (tpl_surface_t *)tizen_private->data;
1734 if (TPL_TRUE == surface->rotation_capability)
1735 rotation_capability = WL_EGL_WINDOW_TIZEN_CAPABILITY_ROTATION_SUPPORTED;
1737 rotation_capability = WL_EGL_WINDOW_TIZEN_CAPABILITY_ROTATION_UNSUPPORTED;
1739 return rotation_capability;
1743 __cb_client_window_set_frontbuffer_mode(struct wl_egl_window *wl_egl_window,
1744 void *private, int set)
1746 TPL_ASSERT(private);
1747 TPL_ASSERT(wl_egl_window);
1748 struct tizen_private *tizen_private = (struct tizen_private *)private;
1749 tpl_surface_t *surface = (tpl_surface_t *)tizen_private->data;
1752 surface->is_frontbuffer_mode = TPL_TRUE;
1754 surface->is_frontbuffer_mode = TPL_FALSE;
1758 __cb_client_window_set_window_serial_callback(struct wl_egl_window *wl_egl_window,
1759 void *private, unsigned int serial)
1761 TPL_ASSERT(private);
1762 TPL_ASSERT(wl_egl_window);
1764 struct tizen_private *tizen_private = (struct tizen_private *)private;
1765 tpl_surface_t *surface = (tpl_surface_t *)tizen_private->data;
1767 TPL_ASSERT(surface->backend.data);
1769 tpl_wayland_egl_surface_t *wayland_egl_surface =
1770 (tpl_wayland_egl_surface_t *)surface->backend.data;
1772 wayland_egl_surface->set_serial_is_used = TPL_TRUE;
1773 wayland_egl_surface->serial = serial;
1777 #define IMPL_TIZEN_SURFACE_SHM_VERSION 2
1779 #if TIZEN_FEATURE_ENABLE
1781 __cb_resistry_global_callback(void *data, struct wl_registry *wl_registry,
1782 uint32_t name, const char *interface,
1785 tpl_wayland_egl_display_t *wayland_egl_display = data;
1787 if (!strcmp(interface, "tizen_surface_shm")) {
1788 wayland_egl_display->tizen_surface_shm =
1789 wl_registry_bind(wl_registry,
1791 &tizen_surface_shm_interface,
1792 ((version < IMPL_TIZEN_SURFACE_SHM_VERSION) ?
1793 version : IMPL_TIZEN_SURFACE_SHM_VERSION));
1799 __cb_resistry_global_remove_callback(void *data,
1800 struct wl_registry *wl_registry,
1805 static const struct wl_registry_listener registry_listener = {
1806 __cb_resistry_global_callback,
1807 __cb_resistry_global_remove_callback
1811 __tpl_wayland_egl_display_buffer_flusher_init(
1812 tpl_wayland_egl_display_t *wayland_egl_display)
1814 struct wl_registry *registry = NULL;
1815 struct wl_event_queue *queue = NULL;
1816 struct wl_display *display_wrapper = NULL;
1819 queue = wl_display_create_queue(wayland_egl_display->wl_dpy);
1821 TPL_ERR("Failed to create wl_queue");
1825 display_wrapper = wl_proxy_create_wrapper(wayland_egl_display->wl_dpy);
1826 if (!display_wrapper) {
1827 TPL_ERR("Failed to create a proxy wrapper of wl_display");
1831 wl_proxy_set_queue((struct wl_proxy *)display_wrapper, queue);
1833 registry = wl_display_get_registry(display_wrapper);
1835 TPL_ERR("Failed to create wl_registry");
1839 wl_proxy_wrapper_destroy(display_wrapper);
1840 display_wrapper = NULL;
1842 if (wl_registry_add_listener(registry, ®istry_listener,
1843 wayland_egl_display)) {
1844 TPL_ERR("Failed to wl_registry_add_listener");
1848 ret = wl_display_roundtrip_queue(wayland_egl_display->wl_dpy, queue);
1850 TPL_ERR("Failed to wl_display_roundtrip_queue ret:%d, err:%d", ret, errno);
1854 /* set tizen_surface_shm's queue as client's default queue */
1855 if (wayland_egl_display->tizen_surface_shm)
1856 wl_proxy_set_queue((struct wl_proxy *)wayland_egl_display->tizen_surface_shm,
1860 if (display_wrapper)
1861 wl_proxy_wrapper_destroy(display_wrapper);
1863 wl_registry_destroy(registry);
1865 wl_event_queue_destroy(queue);
1869 __tpl_wayland_egl_display_buffer_flusher_fini(
1870 tpl_wayland_egl_display_t *wayland_egl_display)
1872 if (wayland_egl_display->tizen_surface_shm) {
1873 tizen_surface_shm_destroy(wayland_egl_display->tizen_surface_shm);
1874 wayland_egl_display->tizen_surface_shm = NULL;
1878 static void __cb_tizen_surface_shm_flusher_flush_callback(void *data,
1879 struct tizen_surface_shm_flusher *tizen_surface_shm_flusher)
1881 tpl_surface_t *surface = data;
1882 tpl_wayland_egl_surface_t *wayland_egl_surface;
1884 TPL_CHECK_ON_NULL_RETURN(surface);
1885 wayland_egl_surface = surface->backend.data;
1886 TPL_CHECK_ON_NULL_RETURN(wayland_egl_surface);
1887 TPL_CHECK_ON_NULL_RETURN(wayland_egl_surface->tbm_queue);
1889 TPL_LOG_B("WL_EGL", "[FLUSH_CB] tpl_wayland_egl_surface_t(%p)",
1890 wayland_egl_surface);
1892 tbm_surface_queue_flush(wayland_egl_surface->tbm_queue);
1894 /* Only when client call tpl_surface_dequeue_buffer(), client can do
1895 * unreference tbm_surface although there are release events in the event queue,
1896 * After tbm_surface_queue_flush, queue has no tbm_surface, client can do
1897 * unreference attached buffers using the list of attached_buffers.
1898 * Then, client does not need to wait for release_callback to unreference
1902 if (pthread_mutex_lock(&g_list_mutex) == 0) {
1903 TPL_OBJECT_LOCK(&wayland_egl_surface->base);
1904 if (wayland_egl_surface->attached_buffers) {
1905 while (!__tpl_list_is_empty(wayland_egl_surface->attached_buffers)) {
1906 tbm_surface_queue_error_e tsq_err;
1907 tbm_surface_h tbm_surface =
1908 __tpl_list_pop_front(wayland_egl_surface->attached_buffers, NULL);
1909 tpl_wayland_egl_buffer_t *wayland_egl_buffer =
1910 __tpl_wayland_egl_get_wayland_buffer_from_tbm_surface(tbm_surface);
1912 TRACE_ASYNC_END((intptr_t)tbm_surface, "[COMMIT ~ RELEASE_CB] BO_NAME:%d",
1913 tbm_bo_export(tbm_surface_internal_get_bo(
1916 if (wayland_egl_buffer) {
1917 __tpl_list_remove_data(committed_wl_buffers, (void *)wayland_egl_buffer->wl_proxy,
1921 tbm_surface_internal_unref(tbm_surface);
1922 tsq_err = tbm_surface_queue_release(wayland_egl_surface->tbm_queue, tbm_surface);
1923 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
1924 TPL_ERR("Failed to release. tbm_surface(%p) tsq_err(%d)",
1925 tbm_surface, tsq_err);
1928 TPL_OBJECT_UNLOCK(&wayland_egl_surface->base);
1929 pthread_mutex_unlock(&g_list_mutex);
1933 static void __cb_tizen_surface_shm_flusher_free_flush_callback(void *data,
1934 struct tizen_surface_shm_flusher *tizen_surface_shm_flusher)
1936 tpl_surface_t *surface = data;
1937 tpl_wayland_egl_surface_t *wayland_egl_surface;
1939 TPL_CHECK_ON_NULL_RETURN(surface);
1940 wayland_egl_surface = surface->backend.data;
1941 TPL_CHECK_ON_NULL_RETURN(wayland_egl_surface);
1942 TPL_CHECK_ON_NULL_RETURN(wayland_egl_surface->tbm_queue);
1944 TPL_LOG_B("WL_EGL", "[FLUSH_CB] tpl_wayland_egl_surface_t(%p)",
1945 wayland_egl_surface);
1947 tbm_surface_queue_free_flush(wayland_egl_surface->tbm_queue);
1950 static const struct tizen_surface_shm_flusher_listener
1951 tizen_surface_shm_flusher_listener = {
1952 __cb_tizen_surface_shm_flusher_flush_callback,
1953 __cb_tizen_surface_shm_flusher_free_flush_callback
1957 __tpl_wayland_egl_surface_buffer_flusher_init(tpl_surface_t *surface)
1959 tpl_wayland_egl_display_t *wayland_egl_display = surface->display->backend.data;
1960 tpl_wayland_egl_surface_t *wayland_egl_surface = surface->backend.data;
1961 struct wl_egl_window *wl_egl_window = (struct wl_egl_window *)
1962 surface->native_handle;
1964 if (!wayland_egl_display->tizen_surface_shm)
1967 wayland_egl_surface->tizen_surface_shm_flusher =
1968 tizen_surface_shm_get_flusher(wayland_egl_display->tizen_surface_shm,
1969 wl_egl_window->surface);
1970 tizen_surface_shm_flusher_add_listener(
1971 wayland_egl_surface->tizen_surface_shm_flusher,
1972 &tizen_surface_shm_flusher_listener, surface);
1976 __tpl_wayland_egl_surface_buffer_flusher_fini(tpl_surface_t *surface)
1978 tpl_wayland_egl_surface_t *wayland_egl_surface = surface->backend.data;
1980 if (wayland_egl_surface->tizen_surface_shm_flusher) {
1981 tizen_surface_shm_flusher_destroy(
1982 wayland_egl_surface->tizen_surface_shm_flusher);
1983 wayland_egl_surface->tizen_surface_shm_flusher = NULL;