2 #include "tpl_internal.h"
7 #include <sys/eventfd.h>
9 #include <tbm_bufmgr.h>
10 #include <tbm_surface.h>
11 #include <tbm_surface_internal.h>
12 #include <tbm_surface_queue.h>
14 #include <wayland-client.h>
15 #include <wayland-tbm-server.h>
16 #include <wayland-tbm-client.h>
17 #include <wayland-egl-backend.h>
19 #include <tdm_client.h>
21 #include "wayland-egl-tizen/wayland-egl-tizen.h"
22 #include "wayland-egl-tizen/wayland-egl-tizen-priv.h"
24 #include <tizen-surface-client-protocol.h>
25 #include <presentation-time-client-protocol.h>
26 #include <linux-explicit-synchronization-unstable-v1-client-protocol.h>
28 #include "tpl_utils_gthread.h"
30 static int wl_egl_buffer_key;
31 #define KEY_WL_EGL_BUFFER (unsigned long)(&wl_egl_buffer_key)
33 /* In wayland, application and compositor create its own drawing buffers. Recommend size is more than 2. */
34 #define CLIENT_QUEUE_SIZE 3
35 #define BUFFER_ARRAY_SIZE (CLIENT_QUEUE_SIZE * 2)
37 typedef struct _tpl_wl_egl_display tpl_wl_egl_display_t;
38 typedef struct _tpl_wl_egl_surface tpl_wl_egl_surface_t;
39 typedef struct _tpl_wl_egl_buffer tpl_wl_egl_buffer_t;
41 struct _tpl_wl_egl_display {
42 tpl_gsource *disp_source;
44 tpl_gmutex wl_event_mutex;
46 struct wl_display *wl_display;
47 struct wl_event_queue *ev_queue;
48 struct wayland_tbm_client *wl_tbm_client;
49 int last_error; /* errno of the last wl_display error*/
51 tpl_bool_t wl_initialized;
52 tpl_bool_t tdm_initialized;
54 tdm_client *tdm_client;
55 tpl_gsource *tdm_source;
58 tpl_bool_t use_wait_vblank;
59 tpl_bool_t use_explicit_sync;
62 struct tizen_surface_shm *tss; /* used for surface buffer_flush */
63 struct wp_presentation *presentation; /* for presentation feedback */
64 struct zwp_linux_explicit_synchronization_v1 *explicit_sync; /* for explicit fence sync */
67 struct _tpl_wl_egl_surface {
68 tpl_gsource *surf_source;
70 tbm_surface_queue_h tbm_queue;
72 struct wl_egl_window *wl_egl_window;
73 struct wl_surface *wl_surface;
74 struct zwp_linux_surface_synchronization_v1 *surface_sync; /* for explicit fence sync */
75 struct tizen_surface_shm_flusher *tss_flusher; /* used for surface buffer_flush */
77 tdm_client_vblank *vblank;
79 /* surface information */
90 tpl_wl_egl_display_t *wl_egl_display;
91 tpl_surface_t *tpl_surface;
93 /* wl_egl_buffer array for buffer tracing */
94 tpl_wl_egl_buffer_t *buffers[BUFFER_ARRAY_SIZE];
95 int buffer_cnt; /* the number of using wl_egl_buffers */
96 tpl_gmutex buffers_mutex;
98 tpl_list_t *vblank_waiting_buffers; /* for FIFO/FIFO_RELAXED modes */
99 tpl_list_t *presentation_feedbacks; /* for tracing presentation feedbacks */
111 tpl_gmutex surf_mutex;
114 /* for waiting draw done */
115 tpl_bool_t use_render_done_fence;
116 tpl_bool_t is_activated;
117 tpl_bool_t reset; /* TRUE if queue reseted by external */
118 tpl_bool_t need_to_enqueue;
119 tpl_bool_t prerotation_capability;
120 tpl_bool_t vblank_done;
121 tpl_bool_t set_serial_is_used;
124 typedef enum buffer_status {
134 struct _tpl_wl_egl_buffer {
135 tbm_surface_h tbm_surface;
138 struct wl_proxy *wl_buffer;
139 int dx, dy; /* position to attach to wl_surface */
140 int width, height; /* size to attach to wl_surface */
142 buffer_status_t status; /* for tracing buffer status */
143 int idx; /* position index in buffers array of wl_egl_surface */
145 /* for damage region */
149 /* for wayland_tbm_client_set_buffer_transform */
151 tpl_bool_t w_rotated;
153 /* for wl_surface_set_buffer_transform */
156 /* for wayland_tbm_client_set_buffer_serial */
159 /* for checking need_to_commit (frontbuffer mode) */
160 tpl_bool_t need_to_commit;
162 /* for checking draw done */
163 tpl_bool_t draw_done;
166 /* to get release event via zwp_linux_buffer_release_v1 */
167 struct zwp_linux_buffer_release_v1 *buffer_release;
169 /* each buffers own its release_fence_fd, until it passes ownership
171 int32_t release_fence_fd;
173 /* each buffers own its acquire_fence_fd.
174 * If it use zwp_linux_buffer_release_v1 the ownership of this fd
175 * will be passed to display server
176 * Otherwise it will be used as a fence waiting for render done
178 int32_t acquire_fence_fd;
180 /* Fd to send a signal when wl_surface_commit with this buffer */
181 int32_t commit_sync_fd;
183 /* Fd to send a siganl when receive the
184 * presentation feedback from display server */
185 int32_t presentation_sync_fd;
187 tpl_gsource *waiting_source;
192 tpl_wl_egl_surface_t *wl_egl_surface;
195 struct pst_feedback {
196 /* to get presentation feedback from display server */
197 struct wp_presentation_feedback *presentation_feedback;
202 tpl_wl_egl_surface_t *wl_egl_surface;
207 _get_tbm_surface_bo_name(tbm_surface_h tbm_surface);
209 _print_buffer_lists(tpl_wl_egl_surface_t *wl_egl_surface);
211 __cb_wl_egl_buffer_free(tpl_wl_egl_buffer_t *wl_egl_buffer);
212 static tpl_wl_egl_buffer_t *
213 _get_wl_egl_buffer(tbm_surface_h tbm_surface);
215 _write_to_eventfd(int eventfd);
217 _thread_wl_egl_surface_init(tpl_wl_egl_surface_t *wl_egl_surface);
219 _thread_surface_queue_acquire(tpl_wl_egl_surface_t *wl_egl_surface);
221 _thread_wl_surface_commit(tpl_wl_egl_surface_t *wl_egl_surface,
222 tpl_wl_egl_buffer_t *wl_egl_buffer);
225 _check_native_handle_is_wl_display(tpl_handle_t display)
227 struct wl_interface *wl_egl_native_dpy = *(void **) display;
229 if (!wl_egl_native_dpy) {
230 TPL_ERR("Invalid parameter. native_display(%p)", wl_egl_native_dpy);
234 /* MAGIC CHECK: A native display handle is a wl_display if the de-referenced first value
235 is a memory address pointing the structure of wl_display_interface. */
236 if (wl_egl_native_dpy == &wl_display_interface)
239 if (strncmp(wl_egl_native_dpy->name, wl_display_interface.name,
240 strlen(wl_display_interface.name)) == 0) {
248 __thread_func_tdm_dispatch(tpl_gsource *gsource, uint64_t message)
250 tpl_wl_egl_display_t *wl_egl_display = NULL;
251 tdm_error tdm_err = TDM_ERROR_NONE;
255 wl_egl_display = (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
256 if (!wl_egl_display) {
257 TPL_ERR("Failed to get wl_egl_display from gsource(%p)", gsource);
258 TPL_WARN("tdm_source(%p) will be removed from thread.", gsource);
262 tdm_err = tdm_client_handle_events(wl_egl_display->tdm_client);
264 /* If an error occurs in tdm_client_handle_events, it cannot be recovered.
265 * When tdm_source is no longer available due to an unexpected situation,
266 * wl_egl_thread must remove it from the thread and destroy it.
267 * In that case, tdm_vblank can no longer be used for surfaces and displays
268 * that used this tdm_source. */
269 if (tdm_err != TDM_ERROR_NONE) {
270 TPL_ERR("Error occured in tdm_client_handle_events. tdm_err(%d)",
272 TPL_WARN("tdm_source(%p) will be removed from thread.", gsource);
274 tpl_gsource_destroy(gsource, TPL_FALSE);
276 wl_egl_display->tdm_source = NULL;
285 __thread_func_tdm_finalize(tpl_gsource *gsource)
287 tpl_wl_egl_display_t *wl_egl_display = NULL;
289 wl_egl_display = (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
291 TPL_LOG_T("WL_EGL", "tdm_destroy| wl_egl_display(%p) tdm_client(%p)",
292 wl_egl_display, wl_egl_display->tdm_client);
294 if (wl_egl_display->tdm_client) {
295 tdm_client_destroy(wl_egl_display->tdm_client);
296 wl_egl_display->tdm_client = NULL;
297 wl_egl_display->tdm_display_fd = -1;
300 wl_egl_display->tdm_initialized = TPL_FALSE;
303 static tpl_gsource_functions tdm_funcs = {
306 .dispatch = __thread_func_tdm_dispatch,
307 .finalize = __thread_func_tdm_finalize,
311 _thread_tdm_init(tpl_wl_egl_display_t *wl_egl_display)
313 tdm_client *tdm_client = NULL;
314 int tdm_display_fd = -1;
315 tdm_error tdm_err = TDM_ERROR_NONE;
317 tdm_client = tdm_client_create(&tdm_err);
318 if (!tdm_client || tdm_err != TDM_ERROR_NONE) {
319 TPL_ERR("TDM_ERROR:%d Failed to create tdm_client\n", tdm_err);
320 return TPL_ERROR_INVALID_OPERATION;
323 tdm_err = tdm_client_get_fd(tdm_client, &tdm_display_fd);
324 if (tdm_display_fd < 0 || tdm_err != TDM_ERROR_NONE) {
325 TPL_ERR("TDM_ERROR:%d Failed to get tdm_client fd\n", tdm_err);
326 tdm_client_destroy(tdm_client);
327 return TPL_ERROR_INVALID_OPERATION;
330 wl_egl_display->tdm_display_fd = tdm_display_fd;
331 wl_egl_display->tdm_client = tdm_client;
332 wl_egl_display->tdm_source = NULL;
333 wl_egl_display->tdm_initialized = TPL_TRUE;
335 TPL_INFO("[TDM_CLIENT_INIT]",
336 "wl_egl_display(%p) tdm_client(%p) tdm_display_fd(%d)",
337 wl_egl_display, tdm_client, tdm_display_fd);
339 return TPL_ERROR_NONE;
342 #define IMPL_TIZEN_SURFACE_SHM_VERSION 2
345 __cb_wl_resistry_global_callback(void *data, struct wl_registry *wl_registry,
346 uint32_t name, const char *interface,
349 tpl_wl_egl_display_t *wl_egl_display = (tpl_wl_egl_display_t *)data;
351 if (!strcmp(interface, "tizen_surface_shm")) {
352 wl_egl_display->tss = wl_registry_bind(wl_registry,
354 &tizen_surface_shm_interface,
355 ((version < IMPL_TIZEN_SURFACE_SHM_VERSION) ?
356 version : IMPL_TIZEN_SURFACE_SHM_VERSION));
357 } else if (!strcmp(interface, wp_presentation_interface.name)) {
358 wl_egl_display->presentation =
359 wl_registry_bind(wl_registry,
360 name, &wp_presentation_interface, 1);
361 TPL_DEBUG("bind wp_presentation_interface");
362 } else if (strcmp(interface, "zwp_linux_explicit_synchronization_v1") == 0) {
363 char *env = tpl_getenv("TPL_EFS");
364 if (env && atoi(env)) {
365 wl_egl_display->explicit_sync =
366 wl_registry_bind(wl_registry, name,
367 &zwp_linux_explicit_synchronization_v1_interface, 1);
368 wl_egl_display->use_explicit_sync = TPL_TRUE;
369 TPL_DEBUG("bind zwp_linux_explicit_synchronization_v1_interface");
371 wl_egl_display->use_explicit_sync = TPL_FALSE;
377 __cb_wl_resistry_global_remove_callback(void *data,
378 struct wl_registry *wl_registry,
383 static const struct wl_registry_listener registry_listener = {
384 __cb_wl_resistry_global_callback,
385 __cb_wl_resistry_global_remove_callback
389 _wl_display_print_err(tpl_wl_egl_display_t *wl_egl_display,
390 const char *func_name)
394 strerror_r(errno, buf, sizeof(buf));
396 if (wl_egl_display->last_error == errno)
399 TPL_ERR("falied to %s. error:%d(%s)", func_name, errno, buf);
401 dpy_err = wl_display_get_error(wl_egl_display->wl_display);
402 if (dpy_err == EPROTO) {
403 const struct wl_interface *err_interface;
404 uint32_t err_proxy_id, err_code;
405 err_code = wl_display_get_protocol_error(wl_egl_display->wl_display,
408 TPL_ERR("[Protocol Error] interface: %s, error_code: %d, proxy_id: %d",
409 err_interface->name, err_code, err_proxy_id);
412 wl_egl_display->last_error = errno;
416 _thread_wl_display_init(tpl_wl_egl_display_t *wl_egl_display)
418 struct wl_registry *registry = NULL;
419 struct wl_event_queue *queue = NULL;
420 struct wl_display *display_wrapper = NULL;
421 struct wl_proxy *wl_tbm = NULL;
422 struct wayland_tbm_client *wl_tbm_client = NULL;
424 tpl_result_t result = TPL_ERROR_NONE;
426 queue = wl_display_create_queue(wl_egl_display->wl_display);
428 TPL_ERR("Failed to create wl_queue wl_display(%p)",
429 wl_egl_display->wl_display);
430 result = TPL_ERROR_INVALID_OPERATION;
434 wl_egl_display->ev_queue = wl_display_create_queue(wl_egl_display->wl_display);
435 if (!wl_egl_display->ev_queue) {
436 TPL_ERR("Failed to create wl_queue wl_display(%p)",
437 wl_egl_display->wl_display);
438 result = TPL_ERROR_INVALID_OPERATION;
442 display_wrapper = wl_proxy_create_wrapper(wl_egl_display->wl_display);
443 if (!display_wrapper) {
444 TPL_ERR("Failed to create a proxy wrapper of wl_display(%p)",
445 wl_egl_display->wl_display);
446 result = TPL_ERROR_INVALID_OPERATION;
450 wl_proxy_set_queue((struct wl_proxy *)display_wrapper, queue);
452 registry = wl_display_get_registry(display_wrapper);
454 TPL_ERR("Failed to create wl_registry");
455 result = TPL_ERROR_INVALID_OPERATION;
459 wl_proxy_wrapper_destroy(display_wrapper);
460 display_wrapper = NULL;
462 wl_tbm_client = wayland_tbm_client_init(wl_egl_display->wl_display);
463 if (!wl_tbm_client) {
464 TPL_ERR("Failed to initialize wl_tbm_client.");
465 result = TPL_ERROR_INVALID_CONNECTION;
469 wl_tbm = (struct wl_proxy *)wayland_tbm_client_get_wl_tbm(wl_tbm_client);
471 TPL_ERR("Failed to get wl_tbm from wl_tbm_client(%p)", wl_tbm_client);
472 result = TPL_ERROR_INVALID_CONNECTION;
476 wl_proxy_set_queue(wl_tbm, wl_egl_display->ev_queue);
477 wl_egl_display->wl_tbm_client = wl_tbm_client;
479 if (wl_registry_add_listener(registry, ®istry_listener,
481 TPL_ERR("Failed to wl_registry_add_listener");
482 result = TPL_ERROR_INVALID_OPERATION;
486 ret = wl_display_roundtrip_queue(wl_egl_display->wl_display, queue);
488 _wl_display_print_err(wl_egl_display, "roundtrip_queue");
489 result = TPL_ERROR_INVALID_OPERATION;
493 /* set tizen_surface_shm's queue as client's private queue */
494 if (wl_egl_display->tss) {
495 wl_proxy_set_queue((struct wl_proxy *)wl_egl_display->tss,
496 wl_egl_display->ev_queue);
497 TPL_LOG_T("WL_EGL", "tizen_surface_shm(%p) init.", wl_egl_display->tss);
500 if (wl_egl_display->presentation) {
501 wl_proxy_set_queue((struct wl_proxy *)wl_egl_display->presentation,
502 wl_egl_display->ev_queue);
503 TPL_LOG_T("WL_EGL", "wp_presentation(%p) init.",
504 wl_egl_display->presentation);
507 if (wl_egl_display->explicit_sync) {
508 wl_proxy_set_queue((struct wl_proxy *)wl_egl_display->explicit_sync,
509 wl_egl_display->ev_queue);
510 TPL_LOG_T("WL_EGL", "zwp_linux_explicit_synchronization_v1(%p) init.",
511 wl_egl_display->explicit_sync);
514 wl_egl_display->wl_initialized = TPL_TRUE;
516 TPL_INFO("[WAYLAND_INIT]",
517 "wl_egl_display(%p) wl_display(%p) wl_tbm_client(%p) event_queue(%p)",
518 wl_egl_display, wl_egl_display->wl_display,
519 wl_egl_display->wl_tbm_client, wl_egl_display->ev_queue);
520 TPL_INFO("[WAYLAND_INIT]",
521 "tizen_surface_shm(%p) wp_presentation(%p) explicit_sync(%p)",
522 wl_egl_display->tss, wl_egl_display->presentation,
523 wl_egl_display->explicit_sync);
527 wl_proxy_wrapper_destroy(display_wrapper);
529 wl_registry_destroy(registry);
531 wl_event_queue_destroy(queue);
537 _thread_wl_display_fini(tpl_wl_egl_display_t *wl_egl_display)
539 /* If wl_egl_display is in prepared state, cancel it */
540 if (wl_egl_display->prepared) {
541 wl_display_cancel_read(wl_egl_display->wl_display);
542 wl_egl_display->prepared = TPL_FALSE;
545 if (wl_display_dispatch_queue_pending(wl_egl_display->wl_display,
546 wl_egl_display->ev_queue) == -1) {
547 _wl_display_print_err(wl_egl_display, "dispatch_queue_pending");
550 if (wl_egl_display->tss) {
551 TPL_INFO("[TIZEN_SURFACE_SHM_DESTROY]",
552 "wl_egl_display(%p) tizen_surface_shm(%p) fini.",
553 wl_egl_display, wl_egl_display->tss);
554 tizen_surface_shm_destroy(wl_egl_display->tss);
555 wl_egl_display->tss = NULL;
558 if (wl_egl_display->presentation) {
559 TPL_INFO("[WP_PRESENTATION_DESTROY]",
560 "wl_egl_display(%p) wp_presentation(%p) fini.",
561 wl_egl_display, wl_egl_display->presentation);
562 wp_presentation_destroy(wl_egl_display->presentation);
563 wl_egl_display->presentation = NULL;
566 if (wl_egl_display->explicit_sync) {
567 TPL_INFO("[EXPLICIT_SYNC_DESTROY]",
568 "wl_egl_display(%p) zwp_linux_explicit_synchronization_v1(%p) fini.",
569 wl_egl_display, wl_egl_display->explicit_sync);
570 zwp_linux_explicit_synchronization_v1_destroy(wl_egl_display->explicit_sync);
571 wl_egl_display->explicit_sync = NULL;
574 if (wl_egl_display->wl_tbm_client) {
575 struct wl_proxy *wl_tbm = NULL;
577 wl_tbm = (struct wl_proxy *)wayland_tbm_client_get_wl_tbm(
578 wl_egl_display->wl_tbm_client);
580 wl_proxy_set_queue(wl_tbm, NULL);
583 TPL_INFO("[WL_TBM_DEINIT]",
584 "wl_egl_display(%p) wl_tbm_client(%p)",
585 wl_egl_display, wl_egl_display->wl_tbm_client);
586 wayland_tbm_client_deinit(wl_egl_display->wl_tbm_client);
587 wl_egl_display->wl_tbm_client = NULL;
590 wl_event_queue_destroy(wl_egl_display->ev_queue);
592 wl_egl_display->wl_initialized = TPL_FALSE;
594 TPL_INFO("[DISPLAY_FINI]", "wl_egl_display(%p) wl_display(%p)",
595 wl_egl_display, wl_egl_display->wl_display);
599 _thread_init(void *data)
601 tpl_wl_egl_display_t *wl_egl_display = (tpl_wl_egl_display_t *)data;
603 if (_thread_wl_display_init(wl_egl_display) != TPL_ERROR_NONE) {
604 TPL_ERR("Failed to initialize wl_egl_display(%p) with wl_display(%p)",
605 wl_egl_display, wl_egl_display->wl_display);
608 if (_thread_tdm_init(wl_egl_display) != TPL_ERROR_NONE) {
609 TPL_WARN("Failed to initialize tdm-client. TPL_WAIT_VLANK:DISABLED");
612 return wl_egl_display;
616 __thread_func_disp_prepare(tpl_gsource *gsource)
618 tpl_wl_egl_display_t *wl_egl_display =
619 (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
621 /* If this wl_egl_display is already prepared,
622 * do nothing in this function. */
623 if (wl_egl_display->prepared)
626 /* If there is a last_error, there is no need to poll,
627 * so skip directly to dispatch.
628 * prepare -> dispatch */
629 if (wl_egl_display->last_error)
632 while (wl_display_prepare_read_queue(wl_egl_display->wl_display,
633 wl_egl_display->ev_queue) != 0) {
634 if (wl_display_dispatch_queue_pending(wl_egl_display->wl_display,
635 wl_egl_display->ev_queue) == -1) {
636 _wl_display_print_err(wl_egl_display, "dispatch_queue_pending");
640 wl_egl_display->prepared = TPL_TRUE;
642 wl_display_flush(wl_egl_display->wl_display);
648 __thread_func_disp_check(tpl_gsource *gsource)
650 tpl_wl_egl_display_t *wl_egl_display =
651 (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
652 tpl_bool_t ret = TPL_FALSE;
654 if (!wl_egl_display->prepared)
657 /* If prepared, but last_error is set,
658 * cancel_read is executed and FALSE is returned.
659 * That can lead to G_SOURCE_REMOVE by calling disp_prepare again
660 * and skipping disp_check from prepare to disp_dispatch.
661 * check -> prepare -> dispatch -> G_SOURCE_REMOVE */
662 if (wl_egl_display->prepared && wl_egl_display->last_error) {
663 wl_display_cancel_read(wl_egl_display->wl_display);
667 if (tpl_gsource_check_io_condition(gsource)) {
668 if (wl_display_read_events(wl_egl_display->wl_display) == -1)
669 _wl_display_print_err(wl_egl_display, "read_event");
672 wl_display_cancel_read(wl_egl_display->wl_display);
676 wl_egl_display->prepared = TPL_FALSE;
682 __thread_func_disp_dispatch(tpl_gsource *gsource, uint64_t message)
684 tpl_wl_egl_display_t *wl_egl_display =
685 (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
689 /* If there is last_error, SOURCE_REMOVE should be returned
690 * to remove the gsource from the main loop.
691 * This is because wl_egl_display is not valid since last_error was set.*/
692 if (wl_egl_display->last_error) {
696 tpl_gmutex_lock(&wl_egl_display->wl_event_mutex);
697 if (tpl_gsource_check_io_condition(gsource)) {
698 if (wl_display_dispatch_queue_pending(wl_egl_display->wl_display,
699 wl_egl_display->ev_queue) == -1) {
700 _wl_display_print_err(wl_egl_display, "dispatch_queue_pending");
704 wl_display_flush(wl_egl_display->wl_display);
705 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
711 __thread_func_disp_finalize(tpl_gsource *gsource)
713 tpl_wl_egl_display_t *wl_egl_display =
714 (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
716 if (wl_egl_display->wl_initialized)
717 _thread_wl_display_fini(wl_egl_display);
719 TPL_LOG_T("WL_EGL", "finalize| wl_egl_display(%p) tpl_gsource(%p)",
720 wl_egl_display, gsource);
726 static tpl_gsource_functions disp_funcs = {
727 .prepare = __thread_func_disp_prepare,
728 .check = __thread_func_disp_check,
729 .dispatch = __thread_func_disp_dispatch,
730 .finalize = __thread_func_disp_finalize,
734 __tpl_wl_egl_display_init(tpl_display_t *display)
736 tpl_wl_egl_display_t *wl_egl_display = NULL;
740 /* Do not allow default display in wayland. */
741 if (!display->native_handle) {
742 TPL_ERR("Invalid native handle for display.");
743 return TPL_ERROR_INVALID_PARAMETER;
746 if (!_check_native_handle_is_wl_display(display->native_handle)) {
747 TPL_ERR("native_handle(%p) is not wl_display", display->native_handle);
748 return TPL_ERROR_INVALID_PARAMETER;
751 wl_egl_display = (tpl_wl_egl_display_t *) calloc(1,
752 sizeof(tpl_wl_egl_display_t));
753 if (!wl_egl_display) {
754 TPL_ERR("Failed to allocate memory for new tpl_wl_egl_display_t.");
755 return TPL_ERROR_OUT_OF_MEMORY;
758 display->backend.data = wl_egl_display;
759 display->bufmgr_fd = -1;
761 wl_egl_display->tdm_initialized = TPL_FALSE;
762 wl_egl_display->wl_initialized = TPL_FALSE;
764 wl_egl_display->ev_queue = NULL;
765 wl_egl_display->wl_display = (struct wl_display *)display->native_handle;
766 wl_egl_display->last_error = 0;
767 wl_egl_display->use_explicit_sync = TPL_FALSE; // default disabled
768 wl_egl_display->prepared = TPL_FALSE;
770 /* Wayland Interfaces */
771 wl_egl_display->tss = NULL;
772 wl_egl_display->presentation = NULL;
773 wl_egl_display->explicit_sync = NULL;
774 wl_egl_display->wl_tbm_client = NULL;
776 wl_egl_display->use_wait_vblank = TPL_TRUE; // default enabled
778 char *env = tpl_getenv("TPL_WAIT_VBLANK");
779 if (env && !atoi(env)) {
780 wl_egl_display->use_wait_vblank = TPL_FALSE;
784 tpl_gmutex_init(&wl_egl_display->wl_event_mutex);
787 wl_egl_display->thread = tpl_gthread_create("wl_egl_thread",
788 (tpl_gthread_func)_thread_init, (void *)wl_egl_display);
789 if (!wl_egl_display->thread) {
790 TPL_ERR("Failed to create wl_egl_thread");
794 wl_egl_display->disp_source = tpl_gsource_create(wl_egl_display->thread,
795 (void *)wl_egl_display,
796 wl_display_get_fd(wl_egl_display->wl_display),
797 &disp_funcs, SOURCE_TYPE_NORMAL);
798 if (!wl_egl_display->disp_source) {
799 TPL_ERR("Failed to add native_display(%p) to thread(%p)",
800 display->native_handle,
801 wl_egl_display->thread);
805 wl_egl_display->tdm_source = tpl_gsource_create(wl_egl_display->thread,
806 (void *)wl_egl_display,
807 wl_egl_display->tdm_display_fd,
808 &tdm_funcs, SOURCE_TYPE_NORMAL);
809 if (!wl_egl_display->tdm_source) {
810 TPL_ERR("Failed to create tdm_gsource\n");
814 TPL_INFO("[DISPLAY_INIT]",
815 "wl_egl_display(%p) tpl_gthread(%p) wl_display(%p)",
817 wl_egl_display->thread,
818 wl_egl_display->wl_display);
820 TPL_INFO("[DISPLAY_INIT]",
821 "USE_WAIT_VBLANK(%s) TIZEN_SURFACE_SHM(%s) USE_EXPLICIT_SYNC(%s)",
822 wl_egl_display->use_wait_vblank ? "TRUE" : "FALSE",
823 wl_egl_display->tss ? "TRUE" : "FALSE",
824 wl_egl_display->use_explicit_sync ? "TRUE" : "FALSE");
826 return TPL_ERROR_NONE;
829 if (wl_egl_display->thread) {
830 if (wl_egl_display->tdm_source)
831 tpl_gsource_destroy(wl_egl_display->tdm_source, TPL_TRUE);
832 if (wl_egl_display->disp_source)
833 tpl_gsource_destroy(wl_egl_display->disp_source, TPL_TRUE);
835 tpl_gthread_destroy(wl_egl_display->thread);
838 wl_egl_display->thread = NULL;
839 free(wl_egl_display);
841 display->backend.data = NULL;
842 return TPL_ERROR_INVALID_OPERATION;
846 __tpl_wl_egl_display_fini(tpl_display_t *display)
848 tpl_wl_egl_display_t *wl_egl_display;
852 wl_egl_display = (tpl_wl_egl_display_t *)display->backend.data;
853 if (wl_egl_display) {
854 TPL_INFO("[DISPLAY_FINI]",
855 "wl_egl_display(%p) tpl_gthread(%p) wl_display(%p)",
857 wl_egl_display->thread,
858 wl_egl_display->wl_display);
860 if (wl_egl_display->tdm_source && wl_egl_display->tdm_initialized) {
861 tpl_gsource_destroy(wl_egl_display->tdm_source, TPL_TRUE);
862 wl_egl_display->tdm_source = NULL;
865 if (wl_egl_display->disp_source) {
866 tpl_gsource_destroy(wl_egl_display->disp_source, TPL_TRUE);
867 wl_egl_display->disp_source = NULL;
870 if (wl_egl_display->thread) {
871 tpl_gthread_destroy(wl_egl_display->thread);
872 wl_egl_display->thread = NULL;
875 tpl_gmutex_clear(&wl_egl_display->wl_event_mutex);
877 free(wl_egl_display);
880 display->backend.data = NULL;
884 __tpl_wl_egl_display_query_config(tpl_display_t *display,
885 tpl_surface_type_t surface_type,
886 int red_size, int green_size,
887 int blue_size, int alpha_size,
888 int color_depth, int *native_visual_id,
893 if (surface_type == TPL_SURFACE_TYPE_WINDOW && red_size == 8 &&
894 green_size == 8 && blue_size == 8 &&
895 (color_depth == 32 || color_depth == 24)) {
897 if (alpha_size == 8) {
898 if (native_visual_id) *native_visual_id = TBM_FORMAT_ARGB8888;
899 if (is_slow) *is_slow = TPL_FALSE;
900 return TPL_ERROR_NONE;
902 if (alpha_size == 0) {
903 if (native_visual_id) *native_visual_id = TBM_FORMAT_XRGB8888;
904 if (is_slow) *is_slow = TPL_FALSE;
905 return TPL_ERROR_NONE;
909 return TPL_ERROR_INVALID_PARAMETER;
913 __tpl_wl_egl_display_filter_config(tpl_display_t *display, int *visual_id,
917 TPL_IGNORE(visual_id);
918 TPL_IGNORE(alpha_size);
919 return TPL_ERROR_NONE;
923 __tpl_wl_egl_display_get_window_info(tpl_display_t *display,
924 tpl_handle_t window, int *width,
925 int *height, tbm_format *format,
926 int depth, int a_size)
928 tpl_result_t ret = TPL_ERROR_NONE;
929 struct wl_egl_window *wl_egl_window = (struct wl_egl_window *)window;
934 if (!wl_egl_window) {
935 TPL_ERR("Invalid parameter. tpl_handle_t(%p)", window);
936 return TPL_ERROR_INVALID_PARAMETER;
939 if (width) *width = wl_egl_window->width;
940 if (height) *height = wl_egl_window->height;
942 struct tizen_private *tizen_private = (struct tizen_private *)wl_egl_window->driver_private;
943 if (tizen_private && tizen_private->data) {
944 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
945 *format = wl_egl_surface->format;
948 *format = TBM_FORMAT_ARGB8888;
950 *format = TBM_FORMAT_XRGB8888;
958 __tpl_wl_egl_display_get_pixmap_info(tpl_display_t *display,
959 tpl_handle_t pixmap, int *width,
960 int *height, tbm_format *format)
962 tbm_surface_h tbm_surface = NULL;
965 TPL_ERR("Invalid parameter. tpl_handle_t(%p)", pixmap);
966 return TPL_ERROR_INVALID_PARAMETER;
969 tbm_surface = wayland_tbm_server_get_surface(NULL,
970 (struct wl_resource *)pixmap);
972 TPL_ERR("Failed to get tbm_surface from wayland_tbm.");
973 return TPL_ERROR_INVALID_PARAMETER;
976 if (width) *width = tbm_surface_get_width(tbm_surface);
977 if (height) *height = tbm_surface_get_height(tbm_surface);
978 if (format) *format = tbm_surface_get_format(tbm_surface);
980 return TPL_ERROR_NONE;
984 __tpl_wl_egl_display_get_buffer_from_native_pixmap(tpl_handle_t pixmap)
986 tbm_surface_h tbm_surface = NULL;
990 tbm_surface = wayland_tbm_server_get_surface(NULL,
991 (struct wl_resource *)pixmap);
993 TPL_ERR("Failed to get tbm_surface_h from wayland_tbm.");
1001 __tpl_display_choose_backend_wl_egl_thread2(tpl_handle_t native_dpy)
1003 struct wl_interface *wl_egl_native_dpy = *(void **) native_dpy;
1004 TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_native_dpy, TPL_FALSE);
1006 /* MAGIC CHECK: A native display handle is a wl_display if the de-referenced first value
1007 is a memory address pointing the structure of wl_display_interface. */
1008 if (wl_egl_native_dpy == &wl_display_interface)
1011 if (strncmp(wl_egl_native_dpy->name, wl_display_interface.name,
1012 strlen(wl_display_interface.name)) == 0) {
1019 /* -- BEGIN -- wl_egl_window callback functions */
1021 __cb_destroy_callback(void *private)
1023 struct tizen_private *tizen_private = (struct tizen_private *)private;
1024 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
1026 if (!tizen_private) {
1027 TPL_LOG_B("WL_EGL", "[DESTROY_CB] Already destroyed surface");
1031 wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1032 if (wl_egl_surface) {
1033 TPL_WARN("[DESTROY_CB][!!!ABNORMAL BEHAVIOR!!!] wl_egl_window(%p) is destroyed.",
1034 wl_egl_surface->wl_egl_window);
1035 TPL_WARN("[DESTROY_CB] native window should be destroyed after eglDestroySurface.");
1037 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1038 wl_egl_surface->wl_egl_window->destroy_window_callback = NULL;
1039 wl_egl_surface->wl_egl_window->resize_callback = NULL;
1040 wl_egl_surface->wl_egl_window->driver_private = NULL;
1041 wl_egl_surface->wl_egl_window = NULL;
1042 wl_egl_surface->wl_surface = NULL;
1044 tizen_private->set_window_serial_callback = NULL;
1045 tizen_private->rotate_callback = NULL;
1046 tizen_private->get_rotation_capability = NULL;
1047 tizen_private->set_frontbuffer_callback = NULL;
1048 tizen_private->create_commit_sync_fd = NULL;
1049 tizen_private->create_presentation_sync_fd = NULL;
1050 tizen_private->data = NULL;
1052 free(tizen_private);
1053 tizen_private = NULL;
1054 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1059 __cb_resize_callback(struct wl_egl_window *wl_egl_window, void *private)
1061 TPL_ASSERT(private);
1062 TPL_ASSERT(wl_egl_window);
1064 struct tizen_private *tizen_private = (struct tizen_private *)private;
1065 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1066 int cur_w, cur_h, req_w, req_h, format;
1068 if (!wl_egl_surface) {
1069 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1074 format = wl_egl_surface->format;
1075 cur_w = wl_egl_surface->width;
1076 cur_h = wl_egl_surface->height;
1077 req_w = wl_egl_window->width;
1078 req_h = wl_egl_window->height;
1080 TPL_INFO("[WINDOW_RESIZE]",
1081 "wl_egl_surface(%p) wl_egl_window(%p) (%dx%d) -> (%dx%d)",
1082 wl_egl_surface, wl_egl_window, cur_w, cur_h, req_w, req_h);
1084 if (tbm_surface_queue_reset(wl_egl_surface->tbm_queue, req_w, req_h, format)
1085 != TBM_SURFACE_QUEUE_ERROR_NONE) {
1086 TPL_ERR("Failed to reset tbm_surface_queue(%p)", wl_egl_surface->tbm_queue);
1090 /* -- END -- wl_egl_window callback functions */
1092 /* -- BEGIN -- wl_egl_window tizen private callback functions */
1094 /* There is no usecase for using prerotation callback below */
1096 __cb_rotate_callback(struct wl_egl_window *wl_egl_window, void *private)
1098 TPL_ASSERT(private);
1099 TPL_ASSERT(wl_egl_window);
1101 struct tizen_private *tizen_private = (struct tizen_private *)private;
1102 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1103 int rotation = tizen_private->rotation;
1105 if (!wl_egl_surface) {
1106 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1111 TPL_INFO("[WINDOW_ROTATE]",
1112 "wl_egl_surface(%p) wl_egl_window(%p) (%d) -> (%d)",
1113 wl_egl_surface, wl_egl_window,
1114 wl_egl_surface->rotation, rotation);
1116 wl_egl_surface->rotation = rotation;
1119 /* There is no usecase for using prerotation callback below */
1121 __cb_get_rotation_capability(struct wl_egl_window *wl_egl_window,
1124 TPL_ASSERT(private);
1125 TPL_ASSERT(wl_egl_window);
1127 int rotation_capability = WL_EGL_WINDOW_TIZEN_CAPABILITY_NONE;
1128 struct tizen_private *tizen_private = (struct tizen_private *)private;
1129 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1131 if (!wl_egl_surface) {
1132 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1134 return rotation_capability;
1137 if (wl_egl_surface->prerotation_capability == TPL_TRUE)
1138 rotation_capability = WL_EGL_WINDOW_TIZEN_CAPABILITY_ROTATION_SUPPORTED;
1140 rotation_capability = WL_EGL_WINDOW_TIZEN_CAPABILITY_ROTATION_UNSUPPORTED;
1143 return rotation_capability;
1147 __cb_set_window_serial_callback(struct wl_egl_window *wl_egl_window,
1148 void *private, unsigned int serial)
1150 TPL_ASSERT(private);
1151 TPL_ASSERT(wl_egl_window);
1153 struct tizen_private *tizen_private = (struct tizen_private *)private;
1154 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1156 if (!wl_egl_surface) {
1157 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1162 wl_egl_surface->set_serial_is_used = TPL_TRUE;
1163 wl_egl_surface->serial = serial;
1167 __cb_create_commit_sync_fd(struct wl_egl_window *wl_egl_window, void *private)
1169 TPL_ASSERT(private);
1170 TPL_ASSERT(wl_egl_window);
1172 int commit_sync_fd = -1;
1174 struct tizen_private *tizen_private = (struct tizen_private *)private;
1175 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1177 if (!wl_egl_surface) {
1178 TPL_ERR("Invalid parameter. wl_egl_surface(%p) is NULL", wl_egl_surface);
1182 tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
1184 if (wl_egl_surface->commit_sync.fd != -1) {
1185 commit_sync_fd = dup(wl_egl_surface->commit_sync.fd);
1186 TRACE_MARK("[ONLY_DUP] commit_sync_fd(%d) dup(%d)",
1187 wl_egl_surface->commit_sync.fd, commit_sync_fd);
1188 TPL_DEBUG("[DUP_COMMIT_SYNC] wl_egl_surface(%p) commit_sync_fd(%d) dup(%d)",
1189 wl_egl_surface, wl_egl_surface->commit_sync.fd, commit_sync_fd);
1190 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
1191 return commit_sync_fd;
1194 wl_egl_surface->commit_sync.fd = eventfd(0, EFD_CLOEXEC);
1195 if (wl_egl_surface->commit_sync.fd == -1) {
1196 TPL_ERR("Failed to create commit_sync_fd. wl_egl_surface(%p)", wl_egl_surface);
1197 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
1201 commit_sync_fd = dup(wl_egl_surface->commit_sync.fd);
1203 TRACE_MARK("[CREATE] commit_sync_fd(%d) dup(%d)",
1204 wl_egl_surface->commit_sync.fd, commit_sync_fd);
1205 TPL_DEBUG("[CREATE_COMMIT_SYNC] wl_egl_surface(%p) commit_sync_fd(%d)",
1206 wl_egl_surface, commit_sync_fd);
1208 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
1210 return commit_sync_fd;
1214 __cb_create_presentation_sync_fd(struct wl_egl_window *wl_egl_window, void *private)
1216 TPL_ASSERT(private);
1217 TPL_ASSERT(wl_egl_window);
1219 int presentation_sync_fd = -1;
1221 struct tizen_private *tizen_private = (struct tizen_private *)private;
1222 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1224 if (!wl_egl_surface) {
1225 TPL_ERR("Invalid parameter. wl_egl_surface is NULL");
1229 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
1230 if (wl_egl_surface->presentation_sync.fd != -1) {
1231 presentation_sync_fd = dup(wl_egl_surface->presentation_sync.fd);
1232 TRACE_MARK("[ONLY_DUP] presentation_sync_fd(%d) dup(%d)",
1233 wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1234 TPL_DEBUG("[DUP_PRESENTATION_SYNC] wl_egl_surface(%p) presentation_sync_fd(%d) dup(%d)",
1235 wl_egl_surface, wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1236 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1237 return presentation_sync_fd;
1240 wl_egl_surface->presentation_sync.fd = eventfd(0, EFD_CLOEXEC);
1241 if (wl_egl_surface->presentation_sync.fd == -1) {
1242 TPL_ERR("Failed to create presentation_sync_fd. wl_egl_surface(%p)", wl_egl_surface);
1243 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1247 presentation_sync_fd = dup(wl_egl_surface->presentation_sync.fd);
1248 TRACE_MARK("[CREATE] presentation_sync_fd(%d) dup(%d)",
1249 wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1250 TPL_DEBUG("[CREATE_PRESENTATION_SYNC] wl_egl_surface(%p) presentation_sync_fd(%d) dup(%d)",
1251 wl_egl_surface, wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1253 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1255 return presentation_sync_fd;
1257 /* -- END -- wl_egl_window tizen private callback functions */
1259 /* -- BEGIN -- tizen_surface_shm_flusher_listener */
1260 static void __cb_tss_flusher_flush_callback(void *data,
1261 struct tizen_surface_shm_flusher *tss_flusher)
1263 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)data;
1264 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
1266 TPL_INFO("[BUFFER_FLUSH]", "wl_egl_surface(%p) tbm_queue(%p)",
1267 wl_egl_surface, wl_egl_surface->tbm_queue);
1269 tsq_err = tbm_surface_queue_flush(wl_egl_surface->tbm_queue);
1270 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
1271 TPL_ERR("Failed to flush tbm_queue(%p)", wl_egl_surface->tbm_queue);
1276 static void __cb_tss_flusher_free_flush_callback(void *data,
1277 struct tizen_surface_shm_flusher *tss_flusher)
1279 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)data;
1280 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
1282 TPL_INFO("[FREE_BUFFER_FLUSH]", "wl_egl_surface(%p) tbm_queue(%p)",
1283 wl_egl_surface, wl_egl_surface->tbm_queue);
1285 tsq_err = tbm_surface_queue_free_flush(wl_egl_surface->tbm_queue);
1286 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
1287 TPL_ERR("Failed to free flush tbm_queue(%p)", wl_egl_surface->tbm_queue);
1292 static const struct tizen_surface_shm_flusher_listener
1293 tss_flusher_listener = {
1294 __cb_tss_flusher_flush_callback,
1295 __cb_tss_flusher_free_flush_callback
1297 /* -- END -- tizen_surface_shm_flusher_listener */
1300 /* -- BEGIN -- tbm_surface_queue callback funstions */
1302 __cb_tbm_queue_reset_callback(tbm_surface_queue_h tbm_queue,
1305 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
1306 tpl_wl_egl_display_t *wl_egl_display = NULL;
1307 tpl_surface_t *surface = NULL;
1308 tpl_bool_t is_activated = TPL_FALSE;
1311 wl_egl_surface = (tpl_wl_egl_surface_t *)data;
1312 TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
1314 wl_egl_display = wl_egl_surface->wl_egl_display;
1315 TPL_CHECK_ON_NULL_RETURN(wl_egl_display);
1317 surface = wl_egl_surface->tpl_surface;
1318 TPL_CHECK_ON_NULL_RETURN(surface);
1320 /* When the queue is resized, change the reset flag to TPL_TRUE to reflect
1321 * the changed window size at the next frame. */
1322 width = tbm_surface_queue_get_width(tbm_queue);
1323 height = tbm_surface_queue_get_height(tbm_queue);
1324 if (surface->width != width || surface->height != height) {
1325 TPL_INFO("[QUEUE_RESIZE]",
1326 "wl_egl_surface(%p) tbm_queue(%p) (%dx%d) -> (%dx%d)",
1327 wl_egl_surface, tbm_queue,
1328 surface->width, surface->height, width, height);
1331 /* When queue_reset_callback is called, if is_activated is different from
1332 * its previous state change the reset flag to TPL_TRUE to get a new buffer
1333 * with the changed state(ACTIVATED/DEACTIVATED) at the next frame. */
1334 is_activated = wayland_tbm_client_queue_check_activate(wl_egl_display->wl_tbm_client,
1335 wl_egl_surface->tbm_queue);
1336 if (wl_egl_surface->is_activated != is_activated) {
1338 TPL_INFO("[ACTIVATED]",
1339 "wl_egl_surface(%p) wl_surface(%p) tbm_queue(%p)",
1340 wl_egl_surface, wl_egl_surface->wl_surface, tbm_queue);
1342 TPL_LOG_T("[DEACTIVATED]",
1343 " wl_egl_surface(%p) wl_surface(%p) tbm_queue(%p)",
1344 wl_egl_surface, wl_egl_surface->wl_surface, tbm_queue);
1348 wl_egl_surface->reset = TPL_TRUE;
1350 if (surface->reset_cb)
1351 surface->reset_cb(surface->reset_data);
1355 __cb_tbm_queue_acquirable_callback(tbm_surface_queue_h tbm_queue,
1358 TPL_IGNORE(tbm_queue);
1360 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)data;
1361 TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
1363 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1365 tpl_gsource_send_message(wl_egl_surface->surf_source, 2);
1367 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1369 /* -- END -- tbm_surface_queue callback funstions */
1372 _thread_wl_egl_surface_fini(tpl_wl_egl_surface_t *wl_egl_surface)
1374 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
1376 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1378 TPL_INFO("[SURFACE_FINI]",
1379 "wl_egl_surface(%p) wl_egl_window(%p) wl_surface(%p)",
1380 wl_egl_surface, wl_egl_surface->wl_egl_window,
1381 wl_egl_surface->wl_surface);
1383 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
1385 if (wl_egl_display->presentation && wl_egl_surface->presentation_feedbacks) {
1386 while (!__tpl_list_is_empty(wl_egl_surface->presentation_feedbacks)) {
1387 struct pst_feedback *pst_feedback =
1388 (struct pst_feedback *)__tpl_list_pop_front(
1389 wl_egl_surface->presentation_feedbacks, NULL);
1391 _write_to_eventfd(pst_feedback->pst_sync_fd);
1392 close(pst_feedback->pst_sync_fd);
1393 pst_feedback->pst_sync_fd = -1;
1395 wp_presentation_feedback_destroy(pst_feedback->presentation_feedback);
1396 pst_feedback->presentation_feedback = NULL;
1402 __tpl_list_free(wl_egl_surface->presentation_feedbacks, NULL);
1403 wl_egl_surface->presentation_feedbacks = NULL;
1406 if (wl_egl_surface->presentation_sync.fd != -1) {
1407 _write_to_eventfd(wl_egl_surface->presentation_sync.fd);
1408 close(wl_egl_surface->presentation_sync.fd);
1409 wl_egl_surface->presentation_sync.fd = -1;
1412 if (wl_egl_surface->vblank_waiting_buffers) {
1413 __tpl_list_free(wl_egl_surface->vblank_waiting_buffers, NULL);
1414 wl_egl_surface->vblank_waiting_buffers = NULL;
1417 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1421 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
1422 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
1424 tpl_bool_t need_to_release = TPL_FALSE;
1425 tpl_bool_t need_to_cancel = TPL_FALSE;
1427 while (wl_egl_surface->buffer_cnt) {
1428 tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
1429 wl_egl_buffer = wl_egl_surface->buffers[idx];
1430 if (wl_egl_buffer) {
1431 TPL_DEBUG("[idx:%d] wl_egl_buffer(%p) tbm_surface(%p) status(%d)",
1433 wl_egl_buffer->tbm_surface, wl_egl_buffer->status);
1435 wl_egl_surface->buffers[idx] = NULL;
1436 wl_egl_surface->buffer_cnt--;
1438 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
1442 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
1444 tpl_gmutex_lock(&wl_egl_buffer->mutex);
1446 need_to_release = (wl_egl_buffer->status == ACQUIRED ||
1447 wl_egl_buffer->status == WAITING_SIGNALED ||
1448 wl_egl_buffer->status == WAITING_VBLANK ||
1449 wl_egl_buffer->status == COMMITTED);
1451 need_to_cancel = wl_egl_buffer->status == DEQUEUED;
1453 if (wl_egl_buffer->status == WAITING_SIGNALED)
1454 tpl_gcond_wait(&wl_egl_buffer->cond, &wl_egl_buffer->mutex);
1456 if (need_to_release) {
1457 tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
1458 wl_egl_buffer->tbm_surface);
1459 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
1460 TPL_ERR("Failed to release. tbm_surface(%p) tsq_err(%d)",
1461 wl_egl_buffer->tbm_surface, tsq_err);
1464 if (need_to_cancel) {
1465 tsq_err = tbm_surface_queue_cancel_dequeue(wl_egl_surface->tbm_queue,
1466 wl_egl_buffer->tbm_surface);
1467 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
1468 TPL_ERR("Failed to release tbm_surface(%p) tsq_err(%d)",
1469 wl_egl_buffer->tbm_surface, tsq_err);
1472 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
1474 if (need_to_release || need_to_cancel)
1475 tbm_surface_internal_unref(wl_egl_buffer->tbm_surface);
1481 if (wl_egl_surface->surface_sync) {
1482 TPL_INFO("[SURFACE_SYNC_DESTROY]",
1483 "wl_egl_surface(%p) surface_sync(%p)",
1484 wl_egl_surface, wl_egl_surface->surface_sync);
1485 zwp_linux_surface_synchronization_v1_destroy(wl_egl_surface->surface_sync);
1486 wl_egl_surface->surface_sync = NULL;
1489 if (wl_egl_surface->tss_flusher) {
1490 TPL_INFO("[FLUSHER_DESTROY]",
1491 "wl_egl_surface(%p) tss_flusher(%p)",
1492 wl_egl_surface, wl_egl_surface->tss_flusher);
1493 tizen_surface_shm_flusher_destroy(wl_egl_surface->tss_flusher);
1494 wl_egl_surface->tss_flusher = NULL;
1497 if (wl_egl_surface->vblank) {
1498 TPL_INFO("[VBLANK_DESTROY]",
1499 "wl_egl_surface(%p) vblank(%p)",
1500 wl_egl_surface, wl_egl_surface->vblank);
1501 tdm_client_vblank_destroy(wl_egl_surface->vblank);
1502 wl_egl_surface->vblank = NULL;
1505 if (wl_egl_surface->tbm_queue) {
1506 TPL_INFO("[TBM_QUEUE_DESTROY]",
1507 "wl_egl_surface(%p) tbm_queue(%p)",
1508 wl_egl_surface, wl_egl_surface->tbm_queue);
1509 tbm_surface_queue_destroy(wl_egl_surface->tbm_queue);
1510 wl_egl_surface->tbm_queue = NULL;
1513 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1517 __thread_func_surf_dispatch(tpl_gsource *gsource, uint64_t message)
1519 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
1521 wl_egl_surface = (tpl_wl_egl_surface_t *)tpl_gsource_get_data(gsource);
1523 /* Initialize surface */
1525 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1526 TPL_DEBUG("wl_egl_surface(%p) initialize message received!",
1528 _thread_wl_egl_surface_init(wl_egl_surface);
1529 tpl_gcond_signal(&wl_egl_surface->surf_cond);
1530 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1531 } else if (message == 2) {
1532 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1533 TPL_DEBUG("wl_egl_surface(%p) acquirable message received!",
1535 _thread_surface_queue_acquire(wl_egl_surface);
1536 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1543 __thread_func_surf_finalize(tpl_gsource *gsource)
1545 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
1547 wl_egl_surface = (tpl_wl_egl_surface_t *)tpl_gsource_get_data(gsource);
1548 TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
1550 _thread_wl_egl_surface_fini(wl_egl_surface);
1552 TPL_DEBUG("[FINALIZE] gsource(%p) wl_egl_surface(%p)",
1553 gsource, wl_egl_surface);
1556 static tpl_gsource_functions surf_funcs = {
1559 .dispatch = __thread_func_surf_dispatch,
1560 .finalize = __thread_func_surf_finalize,
1564 __tpl_wl_egl_surface_init(tpl_surface_t *surface)
1566 tpl_wl_egl_display_t *wl_egl_display = NULL;
1567 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
1568 tpl_gsource *surf_source = NULL;
1570 struct wl_egl_window *wl_egl_window =
1571 (struct wl_egl_window *)surface->native_handle;
1573 TPL_ASSERT(surface);
1574 TPL_ASSERT(surface->display);
1575 TPL_ASSERT(surface->type == TPL_SURFACE_TYPE_WINDOW);
1576 TPL_ASSERT(surface->native_handle);
1579 (tpl_wl_egl_display_t *)surface->display->backend.data;
1580 if (!wl_egl_display) {
1581 TPL_ERR("Invalid parameter. wl_egl_display(%p)",
1583 return TPL_ERROR_INVALID_PARAMETER;
1586 wl_egl_surface = (tpl_wl_egl_surface_t *) calloc(1,
1587 sizeof(tpl_wl_egl_surface_t));
1588 if (!wl_egl_surface) {
1589 TPL_ERR("Failed to allocate memory for new tpl_wl_egl_surface_t.");
1590 return TPL_ERROR_OUT_OF_MEMORY;
1593 surf_source = tpl_gsource_create(wl_egl_display->thread, (void *)wl_egl_surface,
1594 -1, &surf_funcs, SOURCE_TYPE_NORMAL);
1596 TPL_ERR("Failed to create surf_source with wl_egl_surface(%p)",
1598 goto surf_source_create_fail;
1601 surface->backend.data = (void *)wl_egl_surface;
1602 surface->width = wl_egl_window->width;
1603 surface->height = wl_egl_window->height;
1604 surface->rotation = 0;
1606 wl_egl_surface->tpl_surface = surface;
1607 wl_egl_surface->width = wl_egl_window->width;
1608 wl_egl_surface->height = wl_egl_window->height;
1609 wl_egl_surface->format = surface->format;
1611 wl_egl_surface->surf_source = surf_source;
1612 wl_egl_surface->wl_egl_window = wl_egl_window;
1613 wl_egl_surface->wl_surface = wl_egl_window->surface;
1615 wl_egl_surface->wl_egl_display = wl_egl_display;
1617 wl_egl_surface->reset = TPL_FALSE;
1618 wl_egl_surface->is_activated = TPL_FALSE;
1619 wl_egl_surface->need_to_enqueue = TPL_TRUE;
1620 wl_egl_surface->prerotation_capability = TPL_FALSE;
1621 wl_egl_surface->vblank_done = TPL_TRUE;
1622 wl_egl_surface->use_render_done_fence = TPL_FALSE;
1623 wl_egl_surface->set_serial_is_used = TPL_FALSE;
1625 wl_egl_surface->latest_transform = 0;
1626 wl_egl_surface->render_done_cnt = 0;
1627 wl_egl_surface->serial = 0;
1629 wl_egl_surface->vblank = NULL;
1630 wl_egl_surface->tss_flusher = NULL;
1631 wl_egl_surface->surface_sync = NULL;
1633 wl_egl_surface->post_interval = surface->post_interval;
1635 wl_egl_surface->commit_sync.fd = -1;
1636 wl_egl_surface->presentation_sync.fd = -1;
1640 for (i = 0; i < BUFFER_ARRAY_SIZE; i++)
1641 wl_egl_surface->buffers[i] = NULL;
1642 wl_egl_surface->buffer_cnt = 0;
1646 struct tizen_private *tizen_private = NULL;
1648 if (wl_egl_window->driver_private)
1649 tizen_private = (struct tizen_private *)wl_egl_window->driver_private;
1651 tizen_private = tizen_private_create();
1652 wl_egl_window->driver_private = (void *)tizen_private;
1655 if (tizen_private) {
1656 tizen_private->data = (void *)wl_egl_surface;
1657 tizen_private->rotate_callback = (void *)__cb_rotate_callback;
1658 tizen_private->get_rotation_capability = (void *)
1659 __cb_get_rotation_capability;
1660 tizen_private->set_window_serial_callback = (void *)
1661 __cb_set_window_serial_callback;
1662 tizen_private->create_commit_sync_fd = (void *)__cb_create_commit_sync_fd;
1663 tizen_private->create_presentation_sync_fd = (void *)__cb_create_presentation_sync_fd;
1665 wl_egl_window->destroy_window_callback = (void *)__cb_destroy_callback;
1666 wl_egl_window->resize_callback = (void *)__cb_resize_callback;
1670 tpl_gmutex_init(&wl_egl_surface->commit_sync.mutex);
1671 tpl_gmutex_init(&wl_egl_surface->presentation_sync.mutex);
1673 tpl_gmutex_init(&wl_egl_surface->buffers_mutex);
1675 tpl_gmutex_init(&wl_egl_surface->surf_mutex);
1676 tpl_gcond_init(&wl_egl_surface->surf_cond);
1678 /* Initialize in thread */
1679 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1680 tpl_gsource_send_message(wl_egl_surface->surf_source, 1);
1681 tpl_gcond_wait(&wl_egl_surface->surf_cond, &wl_egl_surface->surf_mutex);
1682 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1684 TPL_ASSERT(wl_egl_surface->tbm_queue);
1686 TPL_INFO("[SURFACE_INIT]",
1687 "tpl_surface(%p) wl_egl_surface(%p) gsource(%p)",
1688 surface, wl_egl_surface, wl_egl_surface->surf_source);
1690 return TPL_ERROR_NONE;
1692 surf_source_create_fail:
1693 free(wl_egl_surface);
1694 surface->backend.data = NULL;
1695 return TPL_ERROR_INVALID_OPERATION;
1698 static tbm_surface_queue_h
1699 _thread_create_tbm_queue(tpl_wl_egl_surface_t *wl_egl_surface,
1700 struct wayland_tbm_client *wl_tbm_client,
1703 tbm_surface_queue_h tbm_queue = NULL;
1704 tbm_bufmgr bufmgr = NULL;
1705 unsigned int capability;
1707 struct wl_surface *wl_surface = wl_egl_surface->wl_surface;
1708 int width = wl_egl_surface->width;
1709 int height = wl_egl_surface->height;
1710 int format = wl_egl_surface->format;
1712 if (!wl_tbm_client || !wl_surface) {
1713 TPL_ERR("Invalid parameters. wl_tbm_client(%p) wl_surface(%p)",
1714 wl_tbm_client, wl_surface);
1718 bufmgr = tbm_bufmgr_init(-1);
1719 capability = tbm_bufmgr_get_capability(bufmgr);
1720 tbm_bufmgr_deinit(bufmgr);
1722 if (capability & TBM_BUFMGR_CAPABILITY_TILED_MEMORY) {
1723 tbm_queue = wayland_tbm_client_create_surface_queue_tiled(
1731 tbm_queue = wayland_tbm_client_create_surface_queue(
1741 TPL_ERR("Failed to create tbm_queue. wl_tbm_client(%p)",
1746 if (tbm_surface_queue_set_modes(
1747 tbm_queue, TBM_SURFACE_QUEUE_MODE_GUARANTEE_CYCLE) !=
1748 TBM_SURFACE_QUEUE_ERROR_NONE) {
1749 TPL_ERR("Failed to set queue mode to tbm_surface_queue(%p)",
1751 tbm_surface_queue_destroy(tbm_queue);
1755 if (tbm_surface_queue_add_reset_cb(
1757 __cb_tbm_queue_reset_callback,
1758 (void *)wl_egl_surface) != TBM_SURFACE_QUEUE_ERROR_NONE) {
1759 TPL_ERR("Failed to register reset callback to tbm_surface_queue(%p)",
1761 tbm_surface_queue_destroy(tbm_queue);
1765 if (tbm_surface_queue_add_acquirable_cb(
1767 __cb_tbm_queue_acquirable_callback,
1768 (void *)wl_egl_surface) != TBM_SURFACE_QUEUE_ERROR_NONE) {
1769 TPL_ERR("Failed to register acquirable callback to tbm_surface_queue(%p)",
1771 tbm_surface_queue_destroy(tbm_queue);
1778 static tdm_client_vblank*
1779 _thread_create_tdm_client_vblank(tdm_client *tdm_client)
1781 tdm_client_vblank *vblank = NULL;
1782 tdm_client_output *tdm_output = NULL;
1783 tdm_error tdm_err = TDM_ERROR_NONE;
1786 TPL_ERR("Invalid parameter. tdm_client(%p)", tdm_client);
1790 tdm_output = tdm_client_get_output(tdm_client, "primary", &tdm_err);
1791 if (!tdm_output || tdm_err != TDM_ERROR_NONE) {
1792 TPL_ERR("Failed to get tdm_client_output. tdm_err(%d)", tdm_err);
1796 vblank = tdm_client_output_create_vblank(tdm_output, &tdm_err);
1797 if (!vblank || tdm_err != TDM_ERROR_NONE) {
1798 TPL_ERR("Failed to create vblank. tdm_err(%d)", tdm_err);
1802 tdm_client_vblank_set_enable_fake(vblank, 1);
1803 tdm_client_vblank_set_sync(vblank, 0);
1809 _thread_wl_egl_surface_init(tpl_wl_egl_surface_t *wl_egl_surface)
1811 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
1813 wl_egl_surface->tbm_queue = _thread_create_tbm_queue(
1815 wl_egl_display->wl_tbm_client,
1817 if (!wl_egl_surface->tbm_queue) {
1818 TPL_ERR("Failed to create tbm_queue. wl_egl_surface(%p) wl_tbm_client(%p)",
1819 wl_egl_surface, wl_egl_display->wl_tbm_client);
1823 TPL_INFO("[QUEUE_CREATION]",
1824 "wl_egl_surface(%p) wl_surface(%p) wl_tbm_client(%p)",
1825 wl_egl_surface, wl_egl_surface->wl_surface,
1826 wl_egl_display->wl_tbm_client);
1827 TPL_INFO("[QUEUE_CREATION]",
1828 "tbm_queue(%p) size(%d x %d) X %d format(%d)",
1829 wl_egl_surface->tbm_queue,
1830 wl_egl_surface->width,
1831 wl_egl_surface->height,
1833 wl_egl_surface->format);
1835 wl_egl_surface->vblank = _thread_create_tdm_client_vblank(
1836 wl_egl_display->tdm_client);
1837 if (wl_egl_surface->vblank) {
1838 TPL_INFO("[VBLANK_INIT]",
1839 "wl_egl_surface(%p) tdm_client(%p) vblank(%p)",
1840 wl_egl_surface, wl_egl_display->tdm_client,
1841 wl_egl_surface->vblank);
1844 if (wl_egl_display->tss) {
1845 wl_egl_surface->tss_flusher =
1846 tizen_surface_shm_get_flusher(wl_egl_display->tss,
1847 wl_egl_surface->wl_surface);
1850 if (wl_egl_surface->tss_flusher) {
1851 tizen_surface_shm_flusher_add_listener(wl_egl_surface->tss_flusher,
1852 &tss_flusher_listener,
1854 TPL_INFO("[FLUSHER_INIT]",
1855 "wl_egl_surface(%p) tss_flusher(%p)",
1856 wl_egl_surface, wl_egl_surface->tss_flusher);
1859 if (wl_egl_display->explicit_sync && wl_egl_display->use_explicit_sync) {
1860 wl_egl_surface->surface_sync =
1861 zwp_linux_explicit_synchronization_v1_get_synchronization(
1862 wl_egl_display->explicit_sync, wl_egl_surface->wl_surface);
1863 if (wl_egl_surface->surface_sync) {
1864 TPL_INFO("[EXPLICIT_SYNC_INIT]",
1865 "wl_egl_surface(%p) surface_sync(%p)",
1866 wl_egl_surface, wl_egl_surface->surface_sync);
1868 TPL_WARN("Failed to create surface_sync. | wl_egl_surface(%p)",
1870 wl_egl_display->use_explicit_sync = TPL_FALSE;
1874 wl_egl_surface->vblank_waiting_buffers = __tpl_list_alloc();
1875 wl_egl_surface->presentation_feedbacks = __tpl_list_alloc();
1879 __tpl_wl_egl_surface_fini(tpl_surface_t *surface)
1881 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
1882 tpl_wl_egl_display_t *wl_egl_display = NULL;
1884 TPL_ASSERT(surface);
1885 TPL_ASSERT(surface->display);
1887 TPL_CHECK_ON_FALSE_RETURN(surface->type == TPL_SURFACE_TYPE_WINDOW);
1889 wl_egl_surface = (tpl_wl_egl_surface_t *) surface->backend.data;
1890 TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
1892 wl_egl_display = wl_egl_surface->wl_egl_display;
1893 TPL_CHECK_ON_NULL_RETURN(wl_egl_display);
1895 TPL_INFO("[SURFACE_FINI][BEGIN]",
1896 "wl_egl_surface(%p) wl_surface(%p) tbm_queue(%p)",
1898 wl_egl_surface->wl_surface, wl_egl_surface->tbm_queue);
1900 if (wl_egl_surface->surf_source)
1901 tpl_gsource_destroy(wl_egl_surface->surf_source, TPL_TRUE);
1902 wl_egl_surface->surf_source = NULL;
1904 _print_buffer_lists(wl_egl_surface);
1906 if (wl_egl_surface->wl_egl_window) {
1907 struct tizen_private *tizen_private = NULL;
1908 struct wl_egl_window *wl_egl_window = wl_egl_surface->wl_egl_window;
1909 TPL_INFO("[WL_EGL_WINDOW_FINI]",
1910 "wl_egl_surface(%p) wl_egl_window(%p) wl_surface(%p)",
1911 wl_egl_surface, wl_egl_window,
1912 wl_egl_surface->wl_surface);
1913 tizen_private = (struct tizen_private *)wl_egl_window->driver_private;
1914 if (tizen_private) {
1915 tizen_private->set_window_serial_callback = NULL;
1916 tizen_private->rotate_callback = NULL;
1917 tizen_private->get_rotation_capability = NULL;
1918 tizen_private->create_presentation_sync_fd = NULL;
1919 tizen_private->create_commit_sync_fd = NULL;
1920 tizen_private->set_frontbuffer_callback = NULL;
1921 tizen_private->merge_sync_fds = NULL;
1922 tizen_private->data = NULL;
1923 free(tizen_private);
1925 wl_egl_window->driver_private = NULL;
1928 wl_egl_window->destroy_window_callback = NULL;
1929 wl_egl_window->resize_callback = NULL;
1931 wl_egl_surface->wl_egl_window = NULL;
1934 wl_egl_surface->wl_surface = NULL;
1935 wl_egl_surface->wl_egl_display = NULL;
1936 wl_egl_surface->tpl_surface = NULL;
1938 tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
1939 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
1940 tpl_gmutex_clear(&wl_egl_surface->commit_sync.mutex);
1942 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
1943 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1944 tpl_gmutex_clear(&wl_egl_surface->presentation_sync.mutex);
1946 tpl_gmutex_clear(&wl_egl_surface->surf_mutex);
1947 tpl_gcond_clear(&wl_egl_surface->surf_cond);
1949 TPL_INFO("[SURFACE_FINI][END]", "wl_egl_surface(%p)", wl_egl_surface);
1951 free(wl_egl_surface);
1952 surface->backend.data = NULL;
1956 __tpl_wl_egl_surface_set_rotation_capability(tpl_surface_t *surface,
1959 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
1961 TPL_CHECK_ON_NULL_RETURN_VAL(surface, TPL_ERROR_INVALID_PARAMETER);
1963 wl_egl_surface = (tpl_wl_egl_surface_t *)surface->backend.data;
1965 TPL_CHECK_ON_TRUE_RETURN_VAL(wl_egl_surface, TPL_ERROR_INVALID_PARAMETER);
1967 TPL_INFO("[SET_PREROTATION_CAPABILITY]",
1968 "wl_egl_surface(%p) prerotation capability set to [%s]",
1969 wl_egl_surface, (set ? "TRUE" : "FALSE"));
1971 wl_egl_surface->prerotation_capability = set;
1972 return TPL_ERROR_NONE;
1976 __tpl_wl_egl_surface_set_post_interval(tpl_surface_t *surface,
1979 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
1981 TPL_CHECK_ON_NULL_RETURN_VAL(surface, TPL_ERROR_INVALID_PARAMETER);
1983 wl_egl_surface = (tpl_wl_egl_surface_t *)surface->backend.data;
1985 TPL_CHECK_ON_TRUE_RETURN_VAL(wl_egl_surface, TPL_ERROR_INVALID_PARAMETER);
1987 TPL_INFO("[SET_POST_INTERVAL]",
1988 "wl_egl_surface(%p) post_interval(%d -> %d)",
1989 wl_egl_surface, wl_egl_surface->post_interval, post_interval);
1991 wl_egl_surface->post_interval = post_interval;
1993 return TPL_ERROR_NONE;
1997 __tpl_wl_egl_surface_validate(tpl_surface_t *surface)
1999 tpl_bool_t retval = TPL_TRUE;
2001 TPL_ASSERT(surface);
2002 TPL_ASSERT(surface->backend.data);
2004 tpl_wl_egl_surface_t *wl_egl_surface =
2005 (tpl_wl_egl_surface_t *)surface->backend.data;
2007 retval = !(wl_egl_surface->reset);
2013 __tpl_wl_egl_surface_get_size(tpl_surface_t *surface, int *width, int *height)
2015 tpl_wl_egl_surface_t *wl_egl_surface =
2016 (tpl_wl_egl_surface_t *)surface->backend.data;
2019 *width = tbm_surface_queue_get_width(wl_egl_surface->tbm_queue);
2021 *height = tbm_surface_queue_get_height(wl_egl_surface->tbm_queue);
2024 #define CAN_DEQUEUE_TIMEOUT_MS 10000
2027 _tbm_queue_force_flush(tpl_wl_egl_surface_t *wl_egl_surface)
2029 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2031 _print_buffer_lists(wl_egl_surface);
2033 if ((tsq_err = tbm_surface_queue_flush(wl_egl_surface->tbm_queue))
2034 != TBM_SURFACE_QUEUE_ERROR_NONE) {
2035 TPL_ERR("Failed to flush tbm_surface_queue(%p) tsq_err(%d)",
2036 wl_egl_surface->tbm_queue, tsq_err);
2037 return TPL_ERROR_INVALID_OPERATION;
2042 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2043 for (i = 0; i < BUFFER_ARRAY_SIZE; i++) {
2044 tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
2045 wl_egl_buffer = wl_egl_surface->buffers[i];
2046 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
2047 if (wl_egl_buffer && wl_egl_buffer->status == COMMITTED) {
2048 wl_egl_buffer->status = RELEASED;
2049 tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
2050 wl_egl_buffer->tbm_surface);
2051 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2052 TPL_ERR("Failed to release. tbm_surface(%p) tsq_err(%d)",
2053 wl_egl_buffer->tbm_surface, tsq_err);
2054 tbm_surface_internal_unref(wl_egl_buffer->tbm_surface);
2059 TPL_INFO("[FORCE_FLUSH]",
2060 "wl_egl_surface(%p) tbm_queue(%p)",
2061 wl_egl_surface, wl_egl_surface->tbm_queue);
2063 return TPL_ERROR_NONE;
2067 _wl_egl_buffer_init(tpl_wl_egl_buffer_t *wl_egl_buffer,
2068 tpl_wl_egl_surface_t *wl_egl_surface)
2070 struct wl_egl_window *wl_egl_window = wl_egl_surface->wl_egl_window;
2071 struct tizen_private *tizen_private =
2072 (struct tizen_private *)wl_egl_window->driver_private;
2074 TPL_ASSERT(tizen_private);
2076 wl_egl_buffer->draw_done = TPL_FALSE;
2077 wl_egl_buffer->need_to_commit = TPL_TRUE;
2079 wl_egl_buffer->acquire_fence_fd = -1;
2080 wl_egl_buffer->release_fence_fd = -1;
2081 wl_egl_buffer->commit_sync_fd = -1;
2082 wl_egl_buffer->presentation_sync_fd = -1;
2084 wl_egl_buffer->buffer_release = NULL;
2086 wl_egl_buffer->transform = tizen_private->transform;
2088 if (wl_egl_buffer->w_transform != tizen_private->window_transform) {
2089 wl_egl_buffer->w_transform = tizen_private->window_transform;
2090 wl_egl_buffer->w_rotated = TPL_TRUE;
2093 if (wl_egl_surface->set_serial_is_used) {
2094 wl_egl_buffer->serial = wl_egl_surface->serial;
2096 wl_egl_buffer->serial = ++tizen_private->serial;
2099 if (wl_egl_buffer->rects) {
2100 free(wl_egl_buffer->rects);
2101 wl_egl_buffer->rects = NULL;
2102 wl_egl_buffer->num_rects = 0;
2106 static tpl_wl_egl_buffer_t *
2107 _get_wl_egl_buffer(tbm_surface_h tbm_surface)
2109 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2110 tbm_surface_internal_get_user_data(tbm_surface, KEY_WL_EGL_BUFFER,
2111 (void **)&wl_egl_buffer);
2112 return wl_egl_buffer;
2115 static tpl_wl_egl_buffer_t *
2116 _wl_egl_buffer_create(tpl_wl_egl_surface_t *wl_egl_surface,
2117 tbm_surface_h tbm_surface)
2119 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2120 struct wl_egl_window *wl_egl_window = wl_egl_surface->wl_egl_window;
2122 wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2124 if (!wl_egl_buffer) {
2125 wl_egl_buffer = (tpl_wl_egl_buffer_t *)calloc(1, sizeof(tpl_wl_egl_buffer_t));
2126 TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_buffer, NULL);
2128 tbm_surface_internal_add_user_data(tbm_surface, KEY_WL_EGL_BUFFER,
2129 (tbm_data_free)__cb_wl_egl_buffer_free);
2130 tbm_surface_internal_set_user_data(tbm_surface, KEY_WL_EGL_BUFFER,
2133 wl_egl_buffer->wl_buffer = NULL;
2134 wl_egl_buffer->tbm_surface = tbm_surface;
2135 wl_egl_buffer->bo_name = _get_tbm_surface_bo_name(tbm_surface);
2136 wl_egl_buffer->wl_egl_surface = wl_egl_surface;
2138 wl_egl_buffer->status = RELEASED;
2140 wl_egl_buffer->dx = wl_egl_window->dx;
2141 wl_egl_buffer->dy = wl_egl_window->dy;
2142 wl_egl_buffer->width = tbm_surface_get_width(tbm_surface);
2143 wl_egl_buffer->height = tbm_surface_get_height(tbm_surface);
2145 tpl_gmutex_init(&wl_egl_buffer->mutex);
2146 tpl_gcond_init(&wl_egl_buffer->cond);
2148 tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
2151 for (i = 0; i < BUFFER_ARRAY_SIZE; i++)
2152 if (wl_egl_surface->buffers[i] == NULL) break;
2154 wl_egl_surface->buffer_cnt++;
2155 wl_egl_surface->buffers[i] = wl_egl_buffer;
2156 wl_egl_buffer->idx = i;
2158 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
2160 TPL_INFO("[WL_EGL_BUFFER_CREATE]",
2161 "wl_egl_surface(%p) wl_egl_buffer(%p) tbm_surface(%p) bo(%d)",
2162 wl_egl_surface, wl_egl_buffer, tbm_surface,
2163 wl_egl_buffer->bo_name);
2166 _wl_egl_buffer_init(wl_egl_buffer, wl_egl_surface);
2168 return wl_egl_buffer;
2171 static tbm_surface_h
2172 __tpl_wl_egl_surface_dequeue_buffer(tpl_surface_t *surface, uint64_t timeout_ns,
2173 int32_t *release_fence)
2175 TPL_ASSERT(surface);
2176 TPL_ASSERT(surface->backend.data);
2177 TPL_ASSERT(surface->display);
2178 TPL_ASSERT(surface->display->backend.data);
2179 TPL_OBJECT_CHECK_RETURN(surface, NULL);
2181 tpl_wl_egl_surface_t *wl_egl_surface =
2182 (tpl_wl_egl_surface_t *)surface->backend.data;
2183 tpl_wl_egl_display_t *wl_egl_display =
2184 (tpl_wl_egl_display_t *)surface->display->backend.data;
2185 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2187 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2188 tpl_bool_t is_activated = 0;
2190 tbm_surface_h tbm_surface = NULL;
2192 TPL_OBJECT_UNLOCK(surface);
2193 tsq_err = tbm_surface_queue_can_dequeue_wait_timeout(
2194 wl_egl_surface->tbm_queue, CAN_DEQUEUE_TIMEOUT_MS);
2195 TPL_OBJECT_LOCK(surface);
2197 /* After the can dequeue state, lock the wl_event_mutex to prevent other
2198 * events from being processed in wayland_egl_thread
2199 * during below dequeue procedure. */
2200 tpl_gmutex_lock(&wl_egl_display->wl_event_mutex);
2202 if (tsq_err == TBM_SURFACE_QUEUE_ERROR_TIMEOUT) {
2203 TPL_WARN("[CAN_DEQUEUE_TIMEOUT] queue(%p) will be reset. surface(%p)",
2204 wl_egl_surface->tbm_queue, surface);
2205 if (_tbm_queue_force_flush(wl_egl_surface) != TPL_ERROR_NONE) {
2206 TPL_ERR("Failed to timeout reset. tbm_queue(%p) surface(%p)",
2207 wl_egl_surface->tbm_queue, surface);
2208 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2211 tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2215 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2216 TPL_ERR("Failed to query can_dequeue. tbm_queue(%p) surface(%p)",
2217 wl_egl_surface->tbm_queue, surface);
2218 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2222 /* wayland client can check their states (ACTIVATED or DEACTIVATED) with
2223 * below function [wayland_tbm_client_queue_check_activate()].
2224 * This function has to be called before tbm_surface_queue_dequeue()
2225 * in order to know what state the buffer will be dequeued next.
2227 * ACTIVATED state means non-composite mode. Client can get buffers which
2228 can be displayed directly(without compositing).
2229 * DEACTIVATED state means composite mode. Client's buffer will be displayed
2230 by compositor(E20) with compositing.
2232 is_activated = wayland_tbm_client_queue_check_activate(
2233 wl_egl_display->wl_tbm_client,
2234 wl_egl_surface->tbm_queue);
2236 wl_egl_surface->is_activated = is_activated;
2238 surface->width = tbm_surface_queue_get_width(wl_egl_surface->tbm_queue);
2239 surface->height = tbm_surface_queue_get_height(wl_egl_surface->tbm_queue);
2240 wl_egl_surface->width = surface->width;
2241 wl_egl_surface->height = surface->height;
2243 if (surface->is_frontbuffer_mode && surface->frontbuffer != NULL) {
2244 /* If surface->frontbuffer is already set in frontbuffer mode,
2245 * it will return that frontbuffer if it is still activated,
2246 * otherwise dequeue the new buffer after initializing
2247 * surface->frontbuffer to NULL. */
2248 if (is_activated && !wl_egl_surface->reset) {
2249 bo_name = _get_tbm_surface_bo_name(surface->frontbuffer);
2252 "[DEQ][F] surface->frontbuffer(%p) BO_NAME(%d)",
2253 surface->frontbuffer, bo_name);
2254 TRACE_ASYNC_BEGIN((int)surface->frontbuffer,
2255 "[DEQ]~[ENQ] BO_NAME:%d",
2257 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2258 return surface->frontbuffer;
2260 surface->frontbuffer = NULL;
2261 wl_egl_surface->need_to_enqueue = TPL_TRUE;
2264 surface->frontbuffer = NULL;
2267 tsq_err = tbm_surface_queue_dequeue(wl_egl_surface->tbm_queue,
2270 TPL_ERR("Failed to dequeue from tbm_queue(%p) wl_egl_surface(%p)| tsq_err = %d",
2271 wl_egl_surface->tbm_queue, wl_egl_surface, tsq_err);
2272 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2276 tbm_surface_internal_ref(tbm_surface);
2278 wl_egl_buffer = _wl_egl_buffer_create(wl_egl_surface, tbm_surface);
2279 TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer, "Failed to create/get wl_egl_buffer.");
2281 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2282 wl_egl_buffer->status = DEQUEUED;
2284 /* If wl_egl_buffer->release_fence_fd is -1,
2285 * the tbm_surface can be used immediately.
2286 * If not, user(EGL) have to wait until signaled. */
2287 if (release_fence) {
2288 if (wl_egl_surface->surface_sync) {
2289 *release_fence = wl_egl_buffer->release_fence_fd;
2290 TPL_DEBUG("wl_egl_surface(%p) wl_egl_buffer(%p) release_fence_fd(%d)",
2291 wl_egl_surface, wl_egl_buffer, *release_fence);
2293 *release_fence = -1;
2297 if (surface->is_frontbuffer_mode && is_activated)
2298 surface->frontbuffer = tbm_surface;
2300 wl_egl_surface->reset = TPL_FALSE;
2302 TRACE_MARK("[DEQ][NEW]BO_NAME:%d", wl_egl_buffer->bo_name);
2303 TRACE_ASYNC_BEGIN((int)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", wl_egl_buffer->bo_name);
2304 TPL_LOG_T("WL_EGL", "[DEQ] wl_egl_buffer(%p) tbm_surface(%p) bo(%d) fence(%d)",
2305 wl_egl_buffer, tbm_surface, wl_egl_buffer->bo_name, release_fence ? *release_fence : -1);
2307 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2308 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2314 __tpl_wl_egl_surface_cancel_dequeued_buffer(tpl_surface_t *surface,
2315 tbm_surface_h tbm_surface)
2317 TPL_ASSERT(surface);
2318 TPL_ASSERT(surface->backend.data);
2320 tpl_wl_egl_surface_t *wl_egl_surface =
2321 (tpl_wl_egl_surface_t *)surface->backend.data;
2322 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2323 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2325 if (!tbm_surface_internal_is_valid(tbm_surface)) {
2326 TPL_ERR("Invalid buffer. tbm_surface(%p)", tbm_surface);
2327 return TPL_ERROR_INVALID_PARAMETER;
2330 wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2331 if (wl_egl_buffer) {
2332 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2333 wl_egl_buffer->status = RELEASED;
2334 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2337 tbm_surface_internal_unref(tbm_surface);
2339 tsq_err = tbm_surface_queue_cancel_dequeue(wl_egl_surface->tbm_queue,
2341 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2342 TPL_ERR("Failed to release tbm_surface(%p) surface(%p)",
2343 tbm_surface, surface);
2344 return TPL_ERROR_INVALID_OPERATION;
2347 TPL_INFO("[CANCEL_BUFFER]", "wl_egl_surface(%p) tbm_surface(%p) bo(%d)",
2348 wl_egl_surface, tbm_surface, _get_tbm_surface_bo_name(tbm_surface));
2350 return TPL_ERROR_NONE;
2354 __tpl_wl_egl_surface_enqueue_buffer(tpl_surface_t *surface,
2355 tbm_surface_h tbm_surface,
2356 int num_rects, const int *rects, int32_t acquire_fence)
2358 TPL_ASSERT(surface);
2359 TPL_ASSERT(surface->display);
2360 TPL_ASSERT(surface->backend.data);
2361 TPL_ASSERT(tbm_surface);
2362 TPL_OBJECT_CHECK_RETURN(surface, TPL_ERROR_INVALID_PARAMETER);
2364 tpl_wl_egl_surface_t *wl_egl_surface =
2365 (tpl_wl_egl_surface_t *) surface->backend.data;
2366 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2367 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2370 if (!tbm_surface_internal_is_valid(tbm_surface)) {
2371 TPL_ERR("Failed to enqueue tbm_surface(%p) Invalid value.",
2373 TRACE_ASYNC_END((int)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2374 return TPL_ERROR_INVALID_PARAMETER;
2377 bo_name = _get_tbm_surface_bo_name(tbm_surface);
2379 TRACE_MARK("[ENQ] BO_NAME:%d", bo_name);
2382 "[ENQ] wl_egl_surface(%p) tbm_surface(%p) bo(%d) fence(%d)",
2383 wl_egl_surface, tbm_surface, bo_name, acquire_fence);
2385 wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2386 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2388 /* If there are received region information, save it to wl_egl_buffer */
2389 if (num_rects && rects) {
2390 if (wl_egl_buffer->rects != NULL) {
2391 free(wl_egl_buffer->rects);
2392 wl_egl_buffer->rects = NULL;
2393 wl_egl_buffer->num_rects = 0;
2396 wl_egl_buffer->rects = (int *)calloc(1, (sizeof(int) * 4 * num_rects));
2397 wl_egl_buffer->num_rects = num_rects;
2399 if (!wl_egl_buffer->rects) {
2400 TPL_ERR("Failed to allocate memory fo damage rects info.");
2401 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2402 return TPL_ERROR_OUT_OF_MEMORY;
2405 memcpy((char *)wl_egl_buffer->rects, (char *)rects, sizeof(int) * 4 * num_rects);
2408 if (!wl_egl_surface->need_to_enqueue ||
2409 !wl_egl_buffer->need_to_commit) {
2410 TPL_WARN("[ENQ_SKIP][Frontbuffer:%s] tbm_surface(%p) need not to enqueue",
2411 ((surface->frontbuffer == tbm_surface) ? "ON" : "OFF"), tbm_surface);
2412 TRACE_ASYNC_END((int)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2413 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2414 return TPL_ERROR_NONE;
2417 /* In frontbuffer mode, will skip tbm_surface_queue_enqueue, acquire, and
2418 * commit if surface->frontbuffer that is already set and the tbm_surface
2419 * client want to enqueue are the same.
2421 if (surface->is_frontbuffer_mode) {
2422 /* The first buffer to be activated in frontbuffer mode must be
2423 * committed. Subsequence frames do not need to be committed because
2424 * the buffer is already displayed.
2426 if (surface->frontbuffer == tbm_surface)
2427 wl_egl_surface->need_to_enqueue = TPL_FALSE;
2429 if (acquire_fence != -1) {
2430 close(acquire_fence);
2435 if (wl_egl_buffer->acquire_fence_fd != -1)
2436 close(wl_egl_buffer->acquire_fence_fd);
2438 wl_egl_buffer->acquire_fence_fd = acquire_fence;
2439 wl_egl_buffer->status = ENQUEUED;
2441 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
2442 if (wl_egl_surface->presentation_sync.fd != -1) {
2443 wl_egl_buffer->presentation_sync_fd = wl_egl_surface->presentation_sync.fd;
2444 wl_egl_surface->presentation_sync.fd = -1;
2446 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
2448 tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
2449 if (wl_egl_surface->commit_sync.fd != -1) {
2450 wl_egl_buffer->commit_sync_fd = wl_egl_surface->commit_sync.fd;
2451 wl_egl_surface->commit_sync.fd = -1;
2452 TRACE_ASYNC_BEGIN(wl_egl_buffer->commit_sync_fd, "[COMMIT_SYNC] bo(%d)",
2453 _get_tbm_surface_bo_name(tbm_surface));
2455 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
2457 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2459 tsq_err = tbm_surface_queue_enqueue(wl_egl_surface->tbm_queue,
2461 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2462 tbm_surface_internal_unref(tbm_surface);
2463 TPL_ERR("Failed to enqueue tbm_surface(%p). wl_egl_surface(%p) tsq_err=%d",
2464 tbm_surface, wl_egl_surface, tsq_err);
2465 TRACE_ASYNC_END((int)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2466 return TPL_ERROR_INVALID_OPERATION;
2469 tbm_surface_internal_unref(tbm_surface);
2471 TRACE_ASYNC_END((int)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2473 return TPL_ERROR_NONE;
2477 __thread_func_waiting_source_dispatch(tpl_gsource *gsource, uint64_t message)
2479 tpl_wl_egl_buffer_t *wl_egl_buffer =
2480 (tpl_wl_egl_buffer_t *)tpl_gsource_get_data(gsource);
2481 tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
2482 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
2483 tbm_surface_h tbm_surface = wl_egl_buffer->tbm_surface;
2485 wl_egl_surface->render_done_cnt++;
2487 TRACE_ASYNC_END(wl_egl_buffer->acquire_fence_fd, "FENCE WAIT fd(%d)",
2488 wl_egl_buffer->acquire_fence_fd);
2490 TPL_DEBUG("[RENDER DONE] wl_egl_buffer(%p) tbm_surface(%p)",
2491 wl_egl_buffer, tbm_surface);
2493 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2494 tpl_gcond_signal(&wl_egl_buffer->cond);
2495 wl_egl_buffer->status = WAITING_VBLANK;
2496 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2498 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2500 if (!wl_egl_display->use_wait_vblank || wl_egl_surface->vblank_done)
2501 _thread_wl_surface_commit(wl_egl_surface, wl_egl_buffer);
2503 __tpl_list_push_back(wl_egl_surface->vblank_waiting_buffers,
2506 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2512 __thread_func_waiting_source_finalize(tpl_gsource *gsource)
2514 tpl_wl_egl_buffer_t *wl_egl_buffer =
2515 (tpl_wl_egl_buffer_t *)tpl_gsource_get_data(gsource);
2517 TPL_DEBUG("[FINALIZE] wl_egl_buffer(%p) wait_source(%p) fence_fd(%d)",
2518 wl_egl_buffer, wl_egl_buffer->waiting_source,
2519 wl_egl_buffer->acquire_fence_fd);
2521 close(wl_egl_buffer->acquire_fence_fd);
2522 wl_egl_buffer->acquire_fence_fd = -1;
2523 wl_egl_buffer->waiting_source = NULL;
2526 static tpl_gsource_functions buffer_funcs = {
2529 .dispatch = __thread_func_waiting_source_dispatch,
2530 .finalize = __thread_func_waiting_source_finalize,
2534 _thread_surface_queue_acquire(tpl_wl_egl_surface_t *wl_egl_surface)
2536 tbm_surface_h tbm_surface = NULL;
2537 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2538 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
2539 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2540 tpl_bool_t ready_to_commit = TPL_FALSE;
2542 while (tbm_surface_queue_can_acquire(wl_egl_surface->tbm_queue, 0)) {
2543 tsq_err = tbm_surface_queue_acquire(wl_egl_surface->tbm_queue,
2545 if (!tbm_surface || tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2546 TPL_ERR("Failed to acquire from tbm_queue(%p)",
2547 wl_egl_surface->tbm_queue);
2548 return TPL_ERROR_INVALID_OPERATION;
2551 tbm_surface_internal_ref(tbm_surface);
2553 wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2554 TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer != NULL,
2555 "wl_egl_buffer sould be not NULL");
2557 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2559 wl_egl_buffer->status = ACQUIRED;
2561 if (wl_egl_buffer->wl_buffer == NULL) {
2562 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
2563 wl_egl_buffer->wl_buffer =
2564 (struct wl_proxy *)wayland_tbm_client_create_buffer(
2565 wl_egl_display->wl_tbm_client, tbm_surface);
2567 if (!wl_egl_buffer->wl_buffer) {
2568 TPL_WARN("Failed to create wl_buffer. wl_tbm_client(%p) tbm_surface(%p)",
2569 wl_egl_display->wl_tbm_client, tbm_surface);
2573 if (wl_egl_buffer->acquire_fence_fd != -1) {
2574 if (wl_egl_surface->surface_sync)
2575 ready_to_commit = TPL_TRUE;
2577 if (wl_egl_buffer->waiting_source) {
2578 tpl_gsource_destroy(wl_egl_buffer->waiting_source, TPL_FALSE);
2579 wl_egl_buffer->waiting_source = NULL;
2582 wl_egl_buffer->waiting_source =
2583 tpl_gsource_create(wl_egl_display->thread, wl_egl_buffer,
2584 wl_egl_buffer->acquire_fence_fd, &buffer_funcs,
2585 SOURCE_TYPE_DISPOSABLE);
2586 wl_egl_buffer->status = WAITING_SIGNALED;
2588 TRACE_ASYNC_BEGIN(wl_egl_buffer->acquire_fence_fd, "FENCE WAIT fd(%d)",
2589 wl_egl_buffer->acquire_fence_fd);
2591 ready_to_commit = TPL_FALSE;
2595 if (ready_to_commit) {
2596 if (!wl_egl_display->use_wait_vblank || wl_egl_surface->vblank_done)
2597 ready_to_commit = TPL_TRUE;
2599 wl_egl_buffer->status = WAITING_VBLANK;
2600 __tpl_list_push_back(wl_egl_surface->vblank_waiting_buffers, wl_egl_buffer);
2601 ready_to_commit = TPL_FALSE;
2605 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2607 if (ready_to_commit)
2608 _thread_wl_surface_commit(wl_egl_surface, wl_egl_buffer);
2611 return TPL_ERROR_NONE;
2614 /* -- BEGIN -- tdm_client vblank callback function */
2616 __cb_tdm_client_vblank(tdm_client_vblank *vblank, tdm_error error,
2617 unsigned int sequence, unsigned int tv_sec,
2618 unsigned int tv_usec, void *user_data)
2620 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)user_data;
2621 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2623 TRACE_ASYNC_END((int)wl_egl_surface, "WAIT_VBLANK");
2624 TPL_DEBUG("[VBLANK] wl_egl_surface(%p)", wl_egl_surface);
2626 if (error == TDM_ERROR_TIMEOUT)
2627 TPL_WARN("[TDM_ERROR_TIMEOUT] It will keep going. wl_egl_surface(%p)",
2630 wl_egl_surface->vblank_done = TPL_TRUE;
2632 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2633 wl_egl_buffer = (tpl_wl_egl_buffer_t *)__tpl_list_pop_front(
2634 wl_egl_surface->vblank_waiting_buffers,
2637 _thread_wl_surface_commit(wl_egl_surface, wl_egl_buffer);
2638 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2640 /* -- END -- tdm_client vblank callback function */
2643 __cb_buffer_fenced_release(void *data,
2644 struct zwp_linux_buffer_release_v1 *release, int32_t fence)
2646 tpl_wl_egl_buffer_t *wl_egl_buffer = (tpl_wl_egl_buffer_t *)data;
2647 tbm_surface_h tbm_surface = NULL;
2649 TPL_CHECK_ON_NULL_RETURN(wl_egl_buffer);
2651 tbm_surface = wl_egl_buffer->tbm_surface;
2653 if (tbm_surface_internal_is_valid(tbm_surface)) {
2654 if (wl_egl_buffer->status == COMMITTED) {
2655 tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
2656 tbm_surface_queue_error_e tsq_err;
2658 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2660 zwp_linux_buffer_release_v1_destroy(wl_egl_buffer->buffer_release);
2661 wl_egl_buffer->buffer_release = NULL;
2663 wl_egl_buffer->release_fence_fd = fence;
2664 wl_egl_buffer->status = RELEASED;
2666 TRACE_MARK("[FENCED_RELEASE] BO(%d) fence(%d)",
2667 _get_tbm_surface_bo_name(tbm_surface),
2669 TRACE_ASYNC_END((int)tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
2670 _get_tbm_surface_bo_name(tbm_surface));
2673 "[FENCED_RELEASE] wl_buffer(%p) tbm_surface(%p) bo(%d) fence(%d)",
2674 wl_egl_buffer->wl_buffer, tbm_surface,
2675 _get_tbm_surface_bo_name(tbm_surface),
2678 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2680 tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
2682 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2683 TPL_ERR("tbm_surface(%p) tsq_err(%d)", tbm_surface, tsq_err);
2685 tbm_surface_internal_unref(tbm_surface);
2688 TPL_ERR("Invalid parameter | tbm_surface(%p)", tbm_surface);
2693 __cb_buffer_immediate_release(void *data,
2694 struct zwp_linux_buffer_release_v1 *release)
2696 tpl_wl_egl_buffer_t *wl_egl_buffer = (tpl_wl_egl_buffer_t *)data;
2697 tbm_surface_h tbm_surface = NULL;
2699 TPL_CHECK_ON_NULL_RETURN(wl_egl_buffer);
2701 tbm_surface = wl_egl_buffer->tbm_surface;
2703 if (tbm_surface_internal_is_valid(tbm_surface)) {
2704 if (wl_egl_buffer->status == COMMITTED) {
2705 tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
2706 tbm_surface_queue_error_e tsq_err;
2708 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2710 zwp_linux_buffer_release_v1_destroy(wl_egl_buffer->buffer_release);
2711 wl_egl_buffer->buffer_release = NULL;
2713 wl_egl_buffer->release_fence_fd = -1;
2714 wl_egl_buffer->status = RELEASED;
2716 TRACE_MARK("[IMMEDIATE_RELEASE] BO(%d)",
2717 _get_tbm_surface_bo_name(tbm_surface));
2718 TRACE_ASYNC_END((int)tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
2719 _get_tbm_surface_bo_name(tbm_surface));
2722 "[IMMEDIATE_RELEASE] wl_buffer(%p) tbm_surface(%p) bo(%d)",
2723 wl_egl_buffer->wl_buffer, tbm_surface,
2724 _get_tbm_surface_bo_name(tbm_surface));
2726 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2728 tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
2730 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2731 TPL_ERR("tbm_surface(%p) tsq_err(%d)", tbm_surface, tsq_err);
2733 tbm_surface_internal_unref(tbm_surface);
2736 TPL_ERR("Invalid parameter | tbm_surface(%p)", tbm_surface);
2740 static const struct zwp_linux_buffer_release_v1_listener zwp_release_listner = {
2741 __cb_buffer_fenced_release,
2742 __cb_buffer_immediate_release,
2746 __cb_wl_buffer_release(void *data, struct wl_proxy *wl_buffer)
2748 tpl_wl_egl_buffer_t *wl_egl_buffer = (tpl_wl_egl_buffer_t *)data;
2749 tbm_surface_h tbm_surface = NULL;
2751 TPL_CHECK_ON_NULL_RETURN(wl_egl_buffer)
2753 tbm_surface = wl_egl_buffer->tbm_surface;
2755 if (tbm_surface_internal_is_valid(tbm_surface)) {
2756 tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
2757 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_INVALID_SURFACE;
2759 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2761 if (wl_egl_buffer->status == COMMITTED) {
2763 tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
2765 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2766 TPL_ERR("tbm_surface(%p) tsq_err(%d)", tbm_surface, tsq_err);
2768 wl_egl_buffer->status = RELEASED;
2770 TRACE_MARK("[RELEASE] BO(%d)", _get_tbm_surface_bo_name(tbm_surface));
2771 TRACE_ASYNC_END((int)tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
2772 _get_tbm_surface_bo_name(tbm_surface));
2774 TPL_LOG_T("WL_EGL", "[REL] wl_buffer(%p) tbm_surface(%p) bo(%d)",
2775 wl_egl_buffer->wl_buffer, tbm_surface,
2776 _get_tbm_surface_bo_name(tbm_surface));
2779 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2781 if (tsq_err == TBM_SURFACE_QUEUE_ERROR_NONE)
2782 tbm_surface_internal_unref(tbm_surface);
2784 TPL_ERR("Invalid parameter | tbm_surface(%p)", tbm_surface);
2788 static const struct wl_buffer_listener wl_buffer_release_listener = {
2789 (void *)__cb_wl_buffer_release,
2793 __cb_presentation_feedback_sync_output(void *data,
2794 struct wp_presentation_feedback *presentation_feedback,
2795 struct wl_output *output)
2798 TPL_IGNORE(presentation_feedback);
2804 __cb_presentation_feedback_presented(void *data,
2805 struct wp_presentation_feedback *presentation_feedback,
2809 uint32_t refresh_nsec,
2814 TPL_IGNORE(tv_sec_hi);
2815 TPL_IGNORE(tv_sec_lo);
2816 TPL_IGNORE(tv_nsec);
2817 TPL_IGNORE(refresh_nsec);
2822 struct pst_feedback *pst_feedback = (struct pst_feedback *)data;
2823 tpl_wl_egl_surface_t *wl_egl_surface = pst_feedback->wl_egl_surface;
2825 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
2827 TPL_DEBUG("[FEEDBACK][PRESENTED] pst_feedback(%p) presentation_feedback(%p) bo(%d)",
2828 pst_feedback, presentation_feedback, pst_feedback->bo_name);
2830 if (pst_feedback->pst_sync_fd != -1) {
2831 int ret = _write_to_eventfd(pst_feedback->pst_sync_fd);
2833 TPL_ERR("Failed to send presentation_sync signal to fd(%d)",
2834 pst_feedback->pst_sync_fd);
2837 TRACE_ASYNC_END(pst_feedback->pst_sync_fd,
2838 "[PRESENTATION_SYNC] bo(%d)",
2839 pst_feedback->bo_name);
2841 close(pst_feedback->pst_sync_fd);
2842 pst_feedback->pst_sync_fd = -1;
2845 wp_presentation_feedback_destroy(presentation_feedback);
2847 pst_feedback->presentation_feedback = NULL;
2848 pst_feedback->wl_egl_surface = NULL;
2849 pst_feedback->bo_name = 0;
2851 __tpl_list_remove_data(wl_egl_surface->presentation_feedbacks, pst_feedback,
2856 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
2860 __cb_presentation_feedback_discarded(void *data,
2861 struct wp_presentation_feedback *presentation_feedback)
2863 struct pst_feedback *pst_feedback = (struct pst_feedback *)data;
2864 tpl_wl_egl_surface_t *wl_egl_surface = pst_feedback->wl_egl_surface;
2866 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
2868 TPL_DEBUG("[FEEDBACK][DISCARDED] pst_feedback(%p) presentation_feedback(%p) bo(%d)",
2869 pst_feedback, presentation_feedback, pst_feedback->bo_name);
2871 if (pst_feedback->pst_sync_fd != -1) {
2872 int ret = _write_to_eventfd(pst_feedback->pst_sync_fd);
2874 TPL_ERR("Failed to send presentation_sync signal to fd(%d)",
2875 pst_feedback->pst_sync_fd);
2878 TRACE_ASYNC_END(pst_feedback->pst_sync_fd,
2879 "[PRESENTATION_SYNC] bo(%d)",
2880 pst_feedback->bo_name);
2882 close(pst_feedback->pst_sync_fd);
2883 pst_feedback->pst_sync_fd = -1;
2886 wp_presentation_feedback_destroy(presentation_feedback);
2888 pst_feedback->presentation_feedback = NULL;
2889 pst_feedback->wl_egl_surface = NULL;
2890 pst_feedback->bo_name = 0;
2892 __tpl_list_remove_data(wl_egl_surface->presentation_feedbacks, pst_feedback,
2897 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
2900 static const struct wp_presentation_feedback_listener feedback_listener = {
2901 __cb_presentation_feedback_sync_output, /* sync_output feedback -*/
2902 __cb_presentation_feedback_presented,
2903 __cb_presentation_feedback_discarded
2907 _thread_surface_vblank_wait(tpl_wl_egl_surface_t *wl_egl_surface)
2909 tdm_error tdm_err = TDM_ERROR_NONE;
2910 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
2912 if (wl_egl_surface->vblank == NULL) {
2913 wl_egl_surface->vblank =
2914 _thread_create_tdm_client_vblank(wl_egl_display->tdm_client);
2915 if (!wl_egl_surface->vblank) {
2916 TPL_WARN("Failed to create vblank. wl_egl_surface(%p)",
2918 return TPL_ERROR_OUT_OF_MEMORY;
2922 tdm_err = tdm_client_vblank_wait(wl_egl_surface->vblank,
2923 wl_egl_surface->post_interval,
2924 __cb_tdm_client_vblank,
2925 (void *)wl_egl_surface);
2927 if (tdm_err == TDM_ERROR_NONE) {
2928 wl_egl_surface->vblank_done = TPL_FALSE;
2929 TRACE_ASYNC_BEGIN((int)wl_egl_surface, "WAIT_VBLANK");
2931 TPL_ERR("Failed to tdm_client_vblank_wait. tdm_err(%d)", tdm_err);
2932 return TPL_ERROR_INVALID_OPERATION;
2935 return TPL_ERROR_NONE;
2939 _thread_wl_surface_commit(tpl_wl_egl_surface_t *wl_egl_surface,
2940 tpl_wl_egl_buffer_t *wl_egl_buffer)
2942 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
2943 struct wl_surface *wl_surface = wl_egl_surface->wl_surface;
2944 struct wl_egl_window *wl_egl_window = wl_egl_surface->wl_egl_window;
2947 TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer != NULL,
2948 "wl_egl_buffer sould be not NULL");
2950 if (wl_egl_buffer->wl_buffer == NULL) {
2951 wl_egl_buffer->wl_buffer =
2952 (struct wl_proxy *)wayland_tbm_client_create_buffer(
2953 wl_egl_display->wl_tbm_client,
2954 wl_egl_buffer->tbm_surface);
2956 TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer->wl_buffer != NULL,
2957 "[FATAL] Failed to create wl_buffer");
2959 wl_buffer_add_listener((void *)wl_egl_buffer->wl_buffer,
2960 &wl_buffer_release_listener, wl_egl_buffer);
2962 version = wl_proxy_get_version((struct wl_proxy *)wl_surface);
2964 /* create presentation feedback and add listener */
2965 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
2966 if (wl_egl_display->presentation && wl_egl_buffer->presentation_sync_fd != -1) {
2968 struct pst_feedback *pst_feedback = NULL;
2969 pst_feedback = (struct pst_feedback *) calloc(1, sizeof(struct pst_feedback));
2971 pst_feedback->presentation_feedback =
2972 wp_presentation_feedback(wl_egl_display->presentation,
2975 pst_feedback->wl_egl_surface = wl_egl_surface;
2976 pst_feedback->bo_name = wl_egl_buffer->bo_name;
2978 pst_feedback->pst_sync_fd = wl_egl_buffer->presentation_sync_fd;
2979 wl_egl_buffer->presentation_sync_fd = -1;
2981 wp_presentation_feedback_add_listener(pst_feedback->presentation_feedback,
2982 &feedback_listener, pst_feedback);
2983 __tpl_list_push_back(wl_egl_surface->presentation_feedbacks, pst_feedback);
2984 TRACE_ASYNC_BEGIN(pst_feedback->pst_sync_fd,
2985 "[PRESENTATION_SYNC] bo(%d)",
2986 pst_feedback->bo_name);
2988 TPL_ERR("Failed to create presentation feedback. wl_egl_buffer(%p)",
2990 _write_to_eventfd(wl_egl_buffer->presentation_sync_fd);
2991 close(wl_egl_buffer->presentation_sync_fd);
2992 wl_egl_buffer->presentation_sync_fd = -1;
2995 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
2997 if (wl_egl_buffer->w_rotated == TPL_TRUE) {
2998 wayland_tbm_client_set_buffer_transform(
2999 wl_egl_display->wl_tbm_client,
3000 (void *)wl_egl_buffer->wl_buffer,
3001 wl_egl_buffer->w_transform);
3002 wl_egl_buffer->w_rotated = TPL_FALSE;
3005 if (wl_egl_surface->latest_transform != wl_egl_buffer->transform) {
3006 wl_egl_surface->latest_transform = wl_egl_buffer->transform;
3007 wl_surface_set_buffer_transform(wl_surface, wl_egl_buffer->transform);
3010 if (wl_egl_window) {
3011 wl_egl_window->attached_width = wl_egl_buffer->width;
3012 wl_egl_window->attached_height = wl_egl_buffer->height;
3015 wl_surface_attach(wl_surface, (void *)wl_egl_buffer->wl_buffer,
3016 wl_egl_buffer->dx, wl_egl_buffer->dy);
3018 if (wl_egl_buffer->num_rects < 1 || wl_egl_buffer->rects == NULL) {
3020 wl_surface_damage(wl_surface,
3021 wl_egl_buffer->dx, wl_egl_buffer->dy,
3022 wl_egl_buffer->width, wl_egl_buffer->height);
3024 wl_surface_damage_buffer(wl_surface,
3026 wl_egl_buffer->width, wl_egl_buffer->height);
3030 for (i = 0; i < wl_egl_buffer->num_rects; i++) {
3032 wl_egl_buffer->height - (wl_egl_buffer->rects[i * 4 + 1] +
3033 wl_egl_buffer->rects[i * 4 + 3]);
3035 wl_surface_damage(wl_surface,
3036 wl_egl_buffer->rects[i * 4 + 0],
3038 wl_egl_buffer->rects[i * 4 + 2],
3039 wl_egl_buffer->rects[i * 4 + 3]);
3041 wl_surface_damage_buffer(wl_surface,
3042 wl_egl_buffer->rects[i * 4 + 0],
3044 wl_egl_buffer->rects[i * 4 + 2],
3045 wl_egl_buffer->rects[i * 4 + 3]);
3050 wayland_tbm_client_set_buffer_serial(wl_egl_display->wl_tbm_client,
3051 (void *)wl_egl_buffer->wl_buffer,
3052 wl_egl_buffer->serial);
3054 if (wl_egl_display->use_explicit_sync &&
3055 wl_egl_surface->surface_sync) {
3057 zwp_linux_surface_synchronization_v1_set_acquire_fence(wl_egl_surface->surface_sync,
3058 wl_egl_buffer->acquire_fence_fd);
3059 TPL_DEBUG("[SET_ACQUIRE_FENCE] wl_egl_surface(%p) tbm_surface(%p) acquire_fence(%d)",
3060 wl_egl_surface, wl_egl_buffer->tbm_surface, wl_egl_buffer->acquire_fence_fd);
3061 close(wl_egl_buffer->acquire_fence_fd);
3062 wl_egl_buffer->acquire_fence_fd = -1;
3064 wl_egl_buffer->buffer_release =
3065 zwp_linux_surface_synchronization_v1_get_release(wl_egl_surface->surface_sync);
3066 if (!wl_egl_buffer->buffer_release) {
3067 TPL_ERR("Failed to get buffer_release. wl_egl_surface(%p)", wl_egl_surface);
3069 zwp_linux_buffer_release_v1_add_listener(
3070 wl_egl_buffer->buffer_release, &zwp_release_listner, wl_egl_buffer);
3071 TPL_DEBUG("add explicit_sync_release_listener.");
3075 wl_surface_commit(wl_surface);
3077 wl_display_flush(wl_egl_display->wl_display);
3079 TRACE_ASYNC_BEGIN((int)wl_egl_buffer->tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
3080 wl_egl_buffer->bo_name);
3082 wl_egl_buffer->need_to_commit = TPL_FALSE;
3083 wl_egl_buffer->status = COMMITTED;
3086 "[COMMIT] wl_egl_buffer(%p) wl_buffer(%p) tbm_surface(%p) bo(%d)",
3087 wl_egl_buffer, wl_egl_buffer->wl_buffer, wl_egl_buffer->tbm_surface,
3088 wl_egl_buffer->bo_name);
3090 if (wl_egl_display->use_wait_vblank &&
3091 _thread_surface_vblank_wait(wl_egl_surface) != TPL_ERROR_NONE)
3092 TPL_ERR("Failed to set wait vblank.");
3094 tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
3096 if (wl_egl_buffer->commit_sync_fd != -1) {
3097 int ret = _write_to_eventfd(wl_egl_buffer->commit_sync_fd);
3099 TPL_ERR("Failed to send commit_sync signal to fd(%d)", wl_egl_buffer->commit_sync_fd);
3102 TRACE_ASYNC_END(wl_egl_buffer->commit_sync_fd, "[COMMIT_SYNC] bo(%d)",
3103 wl_egl_buffer->bo_name);
3104 TPL_DEBUG("[COMMIT_SYNC][SEND] wl_egl_surface(%p) commit_sync_fd(%d)",
3105 wl_egl_surface, wl_egl_buffer->commit_sync_fd);
3107 close(wl_egl_buffer->commit_sync_fd);
3108 wl_egl_buffer->commit_sync_fd = -1;
3111 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
3115 _write_to_eventfd(int eventfd)
3120 if (eventfd == -1) {
3121 TPL_ERR("Invalid fd(-1)");
3125 ret = write(eventfd, &value, sizeof(uint64_t));
3127 TPL_ERR("failed to write to fd(%d)", eventfd);
3135 __tpl_display_init_backend_wl_egl_thread2(tpl_display_backend_t *backend)
3137 TPL_ASSERT(backend);
3139 backend->type = TPL_BACKEND_WAYLAND_THREAD;
3140 backend->data = NULL;
3142 backend->init = __tpl_wl_egl_display_init;
3143 backend->fini = __tpl_wl_egl_display_fini;
3144 backend->query_config = __tpl_wl_egl_display_query_config;
3145 backend->filter_config = __tpl_wl_egl_display_filter_config;
3146 backend->get_window_info = __tpl_wl_egl_display_get_window_info;
3147 backend->get_pixmap_info = __tpl_wl_egl_display_get_pixmap_info;
3148 backend->get_buffer_from_native_pixmap =
3149 __tpl_wl_egl_display_get_buffer_from_native_pixmap;
3153 __tpl_surface_init_backend_wl_egl_thread2(tpl_surface_backend_t *backend)
3155 TPL_ASSERT(backend);
3157 backend->type = TPL_BACKEND_WAYLAND_THREAD;
3158 backend->data = NULL;
3160 backend->init = __tpl_wl_egl_surface_init;
3161 backend->fini = __tpl_wl_egl_surface_fini;
3162 backend->validate = __tpl_wl_egl_surface_validate;
3163 backend->cancel_dequeued_buffer =
3164 __tpl_wl_egl_surface_cancel_dequeued_buffer;
3165 backend->dequeue_buffer = __tpl_wl_egl_surface_dequeue_buffer;
3166 backend->enqueue_buffer = __tpl_wl_egl_surface_enqueue_buffer;
3167 backend->set_rotation_capability =
3168 __tpl_wl_egl_surface_set_rotation_capability;
3169 backend->set_post_interval =
3170 __tpl_wl_egl_surface_set_post_interval;
3172 __tpl_wl_egl_surface_get_size;
3176 __cb_wl_egl_buffer_free(tpl_wl_egl_buffer_t *wl_egl_buffer)
3178 tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
3179 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
3181 TPL_INFO("[BUFFER_FREE]", "wl_egl_buffer(%p) wl_buffer(%p) tbm_surface(%p)",
3182 wl_egl_buffer, wl_egl_buffer->wl_buffer, wl_egl_buffer->tbm_surface);
3184 tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
3185 if (wl_egl_buffer->idx >= 0 && wl_egl_surface->buffers[wl_egl_buffer->idx]) {
3186 wl_egl_surface->buffers[wl_egl_buffer->idx] = NULL;
3187 wl_egl_surface->buffer_cnt--;
3189 wl_egl_buffer->idx = -1;
3191 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
3193 wl_display_flush(wl_egl_display->wl_display);
3195 if (wl_egl_buffer->wl_buffer)
3196 wayland_tbm_client_destroy_buffer(wl_egl_display->wl_tbm_client,
3197 (void *)wl_egl_buffer->wl_buffer);
3199 if (wl_egl_buffer->commit_sync_fd != -1) {
3200 int ret = _write_to_eventfd(wl_egl_buffer->commit_sync_fd);
3202 TPL_ERR("Failed to send commit_sync signal to fd(%d)",
3203 wl_egl_buffer->commit_sync_fd);
3204 close(wl_egl_buffer->commit_sync_fd);
3205 wl_egl_buffer->commit_sync_fd = -1;
3208 if (wl_egl_buffer->presentation_sync_fd != -1) {
3209 int ret = _write_to_eventfd(wl_egl_buffer->presentation_sync_fd);
3211 TPL_ERR("Failed to send presentation_sync signal to fd(%d)",
3212 wl_egl_buffer->presentation_sync_fd);
3213 close(wl_egl_buffer->presentation_sync_fd);
3214 wl_egl_buffer->presentation_sync_fd = -1;
3217 if (wl_egl_buffer->rects) {
3218 free(wl_egl_buffer->rects);
3219 wl_egl_buffer->rects = NULL;
3220 wl_egl_buffer->num_rects = 0;
3223 wl_egl_buffer->tbm_surface = NULL;
3224 wl_egl_buffer->bo_name = -1;
3226 free(wl_egl_buffer);
3230 _get_tbm_surface_bo_name(tbm_surface_h tbm_surface)
3232 return tbm_bo_export(tbm_surface_internal_get_bo(tbm_surface, 0));
3236 _print_buffer_lists(tpl_wl_egl_surface_t *wl_egl_surface)
3240 tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
3241 TPL_INFO("[BUFFERS_INFO]", "wl_egl_surface(%p) buffer_cnt(%d)",
3242 wl_egl_surface, wl_egl_surface->buffer_cnt);
3243 for (idx = 0; idx < BUFFER_ARRAY_SIZE; idx++) {
3244 tpl_wl_egl_buffer_t *wl_egl_buffer = wl_egl_surface->buffers[idx];
3245 if (wl_egl_buffer) {
3247 "INDEX[%d] | wl_egl_buffer(%p) tbm_surface(%p) bo(%d) | status(%d)",
3248 idx, wl_egl_buffer, wl_egl_buffer->tbm_surface,
3249 wl_egl_buffer->bo_name,
3250 wl_egl_buffer->status);
3253 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);