2 #include "tpl_internal.h"
7 #include <sys/eventfd.h>
9 #include <tbm_bufmgr.h>
10 #include <tbm_surface.h>
11 #include <tbm_surface_internal.h>
12 #include <tbm_surface_queue.h>
14 #include <wayland-client.h>
15 #include <wayland-tbm-server.h>
16 #include <wayland-tbm-client.h>
17 #include <wayland-egl-backend.h>
19 #include <tdm_client.h>
21 #include "wayland-egl-tizen/wayland-egl-tizen.h"
22 #include "wayland-egl-tizen/wayland-egl-tizen-priv.h"
24 #ifndef TIZEN_FEATURE_ENABLE
25 #define TIZEN_FEATURE_ENABLE 1
28 #if TIZEN_FEATURE_ENABLE
29 #include <tizen-surface-client-protocol.h>
30 #include <presentation-time-client-protocol.h>
31 #include <linux-explicit-synchronization-unstable-v1-client-protocol.h>
34 #include "tpl_utils_gthread.h"
36 static int wl_egl_buffer_key;
37 #define KEY_WL_EGL_BUFFER (unsigned long)(&wl_egl_buffer_key)
39 /* In wayland, application and compositor create its own drawing buffers. Recommend size is more than 2. */
40 #define BUFFER_ARRAY_SIZE 9
42 typedef struct _tpl_wl_egl_display tpl_wl_egl_display_t;
43 typedef struct _tpl_wl_egl_surface tpl_wl_egl_surface_t;
44 typedef struct _tpl_wl_egl_buffer tpl_wl_egl_buffer_t;
45 typedef struct _surface_vblank tpl_surface_vblank_t;
47 #define wl_egl_display(ptr) *wl_egl_display = (tpl_wl_egl_display_t *)ptr;
48 #define wl_egl_surface(ptr) *wl_egl_surface = (tpl_wl_egl_surface_t *)ptr;
49 #define wl_egl_buffer(ptr) *wl_egl_buffer = (tpl_wl_egl_buffer_t *)ptr;
50 #define tizen_private(ptr) *tizen_private = (struct tizen_private *)ptr;
52 struct _tpl_wl_egl_display {
53 tpl_gsource *disp_source;
55 tpl_gmutex wl_event_mutex;
57 struct wl_display *wl_display;
58 struct wl_event_queue *ev_queue;
59 struct wayland_tbm_client *wl_tbm_client;
60 int last_error; /* errno of the last wl_display error*/
62 tpl_bool_t wl_initialized;
64 tpl_bool_t use_wait_vblank;
65 tpl_bool_t use_explicit_sync;
68 /* To make sure that tpl_gsource has been successfully finalized. */
69 tpl_bool_t gsource_finalized;
70 tpl_gmutex disp_mutex;
73 tdm_client *tdm_client;
74 tpl_gsource *tdm_source;
76 tpl_bool_t tdm_initialized;
77 tpl_list_t *surface_vblanks;
79 /* To make sure that tpl_gsource has been successfully finalized. */
80 tpl_bool_t gsource_finalized;
85 #if TIZEN_FEATURE_ENABLE
86 struct tizen_surface_shm *tss; /* used for surface buffer_flush */
87 struct wp_presentation *presentation; /* for presentation feedback */
88 struct zwp_linux_explicit_synchronization_v1 *explicit_sync; /* for explicit fence sync */
92 typedef enum surf_message {
98 struct _tpl_wl_egl_surface {
99 tpl_gsource *surf_source;
101 tbm_surface_queue_h tbm_queue;
104 struct wl_egl_window *wl_egl_window;
105 struct wl_surface *wl_surface;
107 #if TIZEN_FEATURE_ENABLE
108 struct zwp_linux_surface_synchronization_v1 *surface_sync; /* for explicit fence sync */
109 struct tizen_surface_shm_flusher *tss_flusher; /* used for surface buffer_flush */
112 tpl_surface_vblank_t *vblank;
114 /* surface information */
121 int latest_transform;
125 tpl_wl_egl_display_t *wl_egl_display;
126 tpl_surface_t *tpl_surface;
128 /* wl_egl_buffer list for buffer tracing */
130 int buffer_cnt; /* the number of using wl_egl_buffers */
131 tpl_gmutex buffers_mutex;
132 tbm_surface_h last_enq_buffer;
134 tpl_list_t *presentation_feedbacks; /* for tracing presentation feedbacks */
146 tpl_gmutex surf_mutex;
149 surf_message sent_message;
151 /* for waiting draw done */
152 tpl_bool_t use_render_done_fence;
153 tpl_bool_t is_activated;
154 tpl_bool_t reset; /* TRUE if queue reseted by external */
155 tpl_bool_t need_to_enqueue;
156 tpl_bool_t prerotation_capability;
157 tpl_bool_t vblank_done;
158 tpl_bool_t vblank_enable;
159 tpl_bool_t set_serial_is_used;
160 tpl_bool_t initialized_in_thread;
161 tpl_bool_t frontbuffer_activated;
163 /* To make sure that tpl_gsource has been successfully finalized. */
164 tpl_bool_t gsource_finalized;
167 struct _surface_vblank {
168 tdm_client_vblank *tdm_vblank;
169 tpl_wl_egl_surface_t *wl_egl_surface;
170 tpl_list_t *waiting_buffers; /* for FIFO/FIFO_RELAXED modes */
174 typedef enum buffer_status {
179 WAITING_SIGNALED, // 4
184 static const char *status_to_string[7] = {
189 "WAITING_SIGNALED", // 4
190 "WAITING_VBLANK", // 5
194 struct _tpl_wl_egl_buffer {
195 tbm_surface_h tbm_surface;
198 struct wl_proxy *wl_buffer;
199 int dx, dy; /* position to attach to wl_surface */
200 int width, height; /* size to attach to wl_surface */
202 buffer_status_t status; /* for tracing buffer status */
204 /* for damage region */
208 /* for wayland_tbm_client_set_buffer_transform */
210 tpl_bool_t w_rotated;
212 /* for wl_surface_set_buffer_transform */
215 /* for wayland_tbm_client_set_buffer_serial */
218 /* for checking need_to_commit (frontbuffer mode) */
219 tpl_bool_t need_to_commit;
221 /* for checking draw done */
222 tpl_bool_t draw_done;
224 #if TIZEN_FEATURE_ENABLE
225 /* to get release event via zwp_linux_buffer_release_v1 */
226 struct zwp_linux_buffer_release_v1 *buffer_release;
228 /* each buffers own its release_fence_fd, until it passes ownership
230 int32_t release_fence_fd;
232 /* each buffers own its acquire_fence_fd.
233 * If it use zwp_linux_buffer_release_v1 the ownership of this fd
234 * will be passed to display server
235 * Otherwise it will be used as a fence waiting for render done
237 int32_t acquire_fence_fd;
239 /* Fd to send a signal when wl_surface_commit with this buffer */
240 int32_t commit_sync_fd;
242 /* Fd to send a siganl when receive the
243 * presentation feedback from display server */
244 int32_t presentation_sync_fd;
246 tpl_gsource *waiting_source;
251 tpl_wl_egl_surface_t *wl_egl_surface;
254 #if TIZEN_FEATURE_ENABLE
255 struct pst_feedback {
256 /* to get presentation feedback from display server */
257 struct wp_presentation_feedback *presentation_feedback;
262 tpl_wl_egl_surface_t *wl_egl_surface;
267 static const struct wl_buffer_listener wl_buffer_release_listener;
270 _get_tbm_surface_bo_name(tbm_surface_h tbm_surface);
272 _print_buffer_lists(tpl_wl_egl_surface_t *wl_egl_surface);
274 _check_buffer_validate(tpl_wl_egl_surface_t *wl_egl_surface, tbm_surface_h tbm_surface);
276 __cb_wl_egl_buffer_free(tpl_wl_egl_buffer_t *wl_egl_buffer);
277 static tpl_wl_egl_buffer_t *
278 _get_wl_egl_buffer(tbm_surface_h tbm_surface);
280 _write_to_eventfd(int eventfd, uint64_t value);
282 send_signal(int fd, const char *type);
284 _thread_wl_egl_surface_init(tpl_wl_egl_surface_t *wl_egl_surface);
286 _thread_surface_queue_acquire(tpl_wl_egl_surface_t *wl_egl_surface);
288 _thread_wl_surface_commit(tpl_wl_egl_surface_t *wl_egl_surface,
289 tpl_wl_egl_buffer_t *wl_egl_buffer);
291 __cb_surface_vblank_free(void *data);
293 static struct tizen_private *
294 tizen_private_create()
296 struct tizen_private *private = calloc(1, sizeof(struct tizen_private));
298 private->magic = WL_EGL_TIZEN_MAGIC;
299 private->rotation = 0;
300 private->frontbuffer_mode = 0;
301 private->transform = 0;
302 private->window_transform = 0;
305 private->data = NULL;
306 private->rotate_callback = NULL;
307 private->get_rotation_capability = NULL;
308 private->set_window_serial_callback = NULL;
309 private->set_frontbuffer_callback = NULL;
310 private->create_commit_sync_fd = NULL;
311 private->create_presentation_sync_fd = NULL;
312 private->merge_sync_fds = NULL;
319 _check_native_handle_is_wl_display(tpl_handle_t display)
321 struct wl_interface *wl_egl_native_dpy = *(void **) display;
323 if (!wl_egl_native_dpy) {
324 TPL_ERR("Invalid parameter. native_display(%p)", wl_egl_native_dpy);
328 /* MAGIC CHECK: A native display handle is a wl_display if the de-referenced first value
329 is a memory address pointing the structure of wl_display_interface. */
330 if (wl_egl_native_dpy == &wl_display_interface)
333 if (strncmp(wl_egl_native_dpy->name, wl_display_interface.name,
334 strlen(wl_display_interface.name)) == 0) {
342 __thread_func_tdm_dispatch(tpl_gsource *gsource, uint64_t message)
344 tpl_wl_egl_display_t wl_egl_display(tpl_gsource_get_data(gsource));
345 tdm_error tdm_err = TDM_ERROR_NONE;
349 if (!wl_egl_display) {
350 TPL_ERR("Failed to get wl_egl_display from gsource(%p)", gsource);
351 TPL_WARN("tdm_source(%p) will be removed from thread.", gsource);
355 tdm_err = tdm_client_handle_events(wl_egl_display->tdm.tdm_client);
357 /* If an error occurs in tdm_client_handle_events, it cannot be recovered.
358 * When tdm_source is no longer available due to an unexpected situation,
359 * wl_egl_thread must remove it from the thread and destroy it.
360 * In that case, tdm_vblank can no longer be used for surfaces and displays
361 * that used this tdm_source. */
362 if (tdm_err != TDM_ERROR_NONE) {
363 TPL_ERR("Error occured in tdm_client_handle_events. tdm_err(%d)",
365 TPL_WARN("tdm_source(%p) will be removed from thread.", gsource);
367 tpl_gsource_destroy(gsource, TPL_FALSE);
369 wl_egl_display->tdm.tdm_source = NULL;
378 __thread_func_tdm_finalize(tpl_gsource *gsource)
380 tpl_wl_egl_display_t wl_egl_display(tpl_gsource_get_data(gsource));
382 tpl_gmutex_lock(&wl_egl_display->tdm.tdm_mutex);
384 TPL_INFO("[TDM_CLIENT_FINI]",
385 "wl_egl_display(%p) tdm_client(%p) tdm_display_fd(%d)",
386 wl_egl_display, wl_egl_display->tdm.tdm_client,
387 wl_egl_display->tdm.tdm_display_fd);
389 if (wl_egl_display->tdm.tdm_client) {
391 if (wl_egl_display->tdm.surface_vblanks) {
392 __tpl_list_free(wl_egl_display->tdm.surface_vblanks,
393 __cb_surface_vblank_free);
394 wl_egl_display->tdm.surface_vblanks = NULL;
397 tdm_client_destroy(wl_egl_display->tdm.tdm_client);
398 wl_egl_display->tdm.tdm_client = NULL;
399 wl_egl_display->tdm.tdm_display_fd = -1;
400 wl_egl_display->tdm.tdm_source = NULL;
403 wl_egl_display->use_wait_vblank = TPL_FALSE;
404 wl_egl_display->tdm.tdm_initialized = TPL_FALSE;
405 wl_egl_display->tdm.gsource_finalized = TPL_TRUE;
407 tpl_gcond_signal(&wl_egl_display->tdm.tdm_cond);
408 tpl_gmutex_unlock(&wl_egl_display->tdm.tdm_mutex);
411 static tpl_gsource_functions tdm_funcs = {
414 .dispatch = __thread_func_tdm_dispatch,
415 .finalize = __thread_func_tdm_finalize,
419 _thread_tdm_init(tpl_wl_egl_display_t *wl_egl_display)
421 tdm_client *tdm_client = NULL;
422 int tdm_display_fd = -1;
423 tdm_error tdm_err = TDM_ERROR_NONE;
425 tdm_client = tdm_client_create(&tdm_err);
426 if (!tdm_client || tdm_err != TDM_ERROR_NONE) {
427 TPL_ERR("TDM_ERROR:%d Failed to create tdm_client\n", tdm_err);
428 return TPL_ERROR_INVALID_OPERATION;
431 tdm_err = tdm_client_get_fd(tdm_client, &tdm_display_fd);
432 if (tdm_display_fd < 0 || tdm_err != TDM_ERROR_NONE) {
433 TPL_ERR("TDM_ERROR:%d Failed to get tdm_client fd\n", tdm_err);
434 tdm_client_destroy(tdm_client);
435 return TPL_ERROR_INVALID_OPERATION;
438 wl_egl_display->tdm.tdm_display_fd = tdm_display_fd;
439 wl_egl_display->tdm.tdm_client = tdm_client;
440 wl_egl_display->tdm.tdm_source = NULL;
441 wl_egl_display->tdm.tdm_initialized = TPL_TRUE;
442 wl_egl_display->tdm.surface_vblanks = __tpl_list_alloc();
444 TPL_INFO("[TDM_CLIENT_INIT]",
445 "wl_egl_display(%p) tdm_client(%p) tdm_display_fd(%d)",
446 wl_egl_display, tdm_client, tdm_display_fd);
448 return TPL_ERROR_NONE;
451 #define IMPL_TIZEN_SURFACE_SHM_VERSION 2
455 __cb_wl_resistry_global_callback(void *data, struct wl_registry *wl_registry,
456 uint32_t name, const char *interface,
459 #if TIZEN_FEATURE_ENABLE
460 tpl_wl_egl_display_t wl_egl_display(data);
462 if (!strcmp(interface, "tizen_surface_shm")) {
463 wl_egl_display->tss =
464 wl_registry_bind(wl_registry,
466 &tizen_surface_shm_interface,
467 ((version < IMPL_TIZEN_SURFACE_SHM_VERSION) ?
468 version : IMPL_TIZEN_SURFACE_SHM_VERSION));
469 wl_egl_display->use_tss = TPL_TRUE;
470 } else if (!strcmp(interface, wp_presentation_interface.name)) {
471 wl_egl_display->presentation =
472 wl_registry_bind(wl_registry,
473 name, &wp_presentation_interface, 1);
474 TPL_LOG_D("[REGISTRY_BIND]",
475 "wl_egl_display(%p) bind wp_presentation_interface",
477 } else if (strcmp(interface, "zwp_linux_explicit_synchronization_v1") == 0) {
478 char *env = tpl_getenv("TPL_EFS");
479 if (env && !atoi(env)) {
480 wl_egl_display->use_explicit_sync = TPL_FALSE;
482 wl_egl_display->explicit_sync =
483 wl_registry_bind(wl_registry, name,
484 &zwp_linux_explicit_synchronization_v1_interface, 1);
485 wl_egl_display->use_explicit_sync = TPL_TRUE;
486 TPL_LOG_D("[REGISTRY_BIND]",
487 "wl_egl_display(%p) bind zwp_linux_explicit_synchronization_v1_interface",
495 __cb_wl_resistry_global_remove_callback(void *data,
496 struct wl_registry *wl_registry,
501 static const struct wl_registry_listener registry_listener = {
502 __cb_wl_resistry_global_callback,
503 __cb_wl_resistry_global_remove_callback
507 _wl_display_print_err(tpl_wl_egl_display_t *wl_egl_display,
508 const char *func_name)
512 strerror_r(errno, buf, sizeof(buf));
514 if (wl_egl_display->last_error == errno)
517 TPL_ERR("falied to %s. error:%d(%s)", func_name, errno, buf);
519 dpy_err = wl_display_get_error(wl_egl_display->wl_display);
520 if (dpy_err == EPROTO) {
521 const struct wl_interface *err_interface;
522 uint32_t err_proxy_id, err_code;
523 err_code = wl_display_get_protocol_error(wl_egl_display->wl_display,
526 TPL_ERR("[Protocol Error] interface: %s, error_code: %d, proxy_id: %d",
527 (err_interface ? err_interface->name : "UNKNOWN"),
528 err_code, err_proxy_id);
531 wl_egl_display->last_error = errno;
535 _thread_wl_display_init(tpl_wl_egl_display_t *wl_egl_display)
537 struct wl_registry *registry = NULL;
538 struct wl_event_queue *queue = NULL;
539 struct wl_display *display_wrapper = NULL;
540 struct wl_proxy *wl_tbm = NULL;
541 struct wayland_tbm_client *wl_tbm_client = NULL;
543 tpl_result_t result = TPL_ERROR_NONE;
545 queue = wl_display_create_queue(wl_egl_display->wl_display);
547 TPL_ERR("Failed to create wl_queue wl_display(%p)",
548 wl_egl_display->wl_display);
549 result = TPL_ERROR_INVALID_OPERATION;
553 wl_egl_display->ev_queue = wl_display_create_queue(wl_egl_display->wl_display);
554 if (!wl_egl_display->ev_queue) {
555 TPL_ERR("Failed to create wl_queue wl_display(%p)",
556 wl_egl_display->wl_display);
557 result = TPL_ERROR_INVALID_OPERATION;
561 display_wrapper = wl_proxy_create_wrapper(wl_egl_display->wl_display);
562 if (!display_wrapper) {
563 TPL_ERR("Failed to create a proxy wrapper of wl_display(%p)",
564 wl_egl_display->wl_display);
565 result = TPL_ERROR_INVALID_OPERATION;
569 wl_proxy_set_queue((struct wl_proxy *)display_wrapper, queue);
571 registry = wl_display_get_registry(display_wrapper);
573 TPL_ERR("Failed to create wl_registry");
574 result = TPL_ERROR_INVALID_OPERATION;
578 wl_proxy_wrapper_destroy(display_wrapper);
579 display_wrapper = NULL;
581 wl_tbm_client = wayland_tbm_client_init(wl_egl_display->wl_display);
582 if (!wl_tbm_client) {
583 TPL_ERR("Failed to initialize wl_tbm_client.");
584 result = TPL_ERROR_INVALID_CONNECTION;
588 wl_tbm = (struct wl_proxy *)wayland_tbm_client_get_wl_tbm(wl_tbm_client);
590 TPL_ERR("Failed to get wl_tbm from wl_tbm_client(%p)", wl_tbm_client);
591 result = TPL_ERROR_INVALID_CONNECTION;
595 wl_proxy_set_queue(wl_tbm, wl_egl_display->ev_queue);
596 wl_egl_display->wl_tbm_client = wl_tbm_client;
598 if (wl_registry_add_listener(registry, ®istry_listener,
600 TPL_ERR("Failed to wl_registry_add_listener");
601 result = TPL_ERROR_INVALID_OPERATION;
605 ret = wl_display_roundtrip_queue(wl_egl_display->wl_display, queue);
607 _wl_display_print_err(wl_egl_display, "roundtrip_queue");
608 result = TPL_ERROR_INVALID_OPERATION;
612 #if TIZEN_FEATURE_ENABLE
613 /* set tizen_surface_shm's queue as client's private queue */
614 if (wl_egl_display->tss) {
615 wl_proxy_set_queue((struct wl_proxy *)wl_egl_display->tss,
616 wl_egl_display->ev_queue);
617 TPL_LOG_T("WL_EGL", "tizen_surface_shm(%p) init.", wl_egl_display->tss);
620 if (wl_egl_display->presentation) {
621 wl_proxy_set_queue((struct wl_proxy *)wl_egl_display->presentation,
622 wl_egl_display->ev_queue);
623 TPL_LOG_T("WL_EGL", "wp_presentation(%p) init.",
624 wl_egl_display->presentation);
627 if (wl_egl_display->explicit_sync) {
628 wl_proxy_set_queue((struct wl_proxy *)wl_egl_display->explicit_sync,
629 wl_egl_display->ev_queue);
630 TPL_LOG_T("WL_EGL", "zwp_linux_explicit_synchronization_v1(%p) init.",
631 wl_egl_display->explicit_sync);
634 wl_egl_display->wl_initialized = TPL_TRUE;
636 TPL_INFO("[WAYLAND_INIT]",
637 "wl_egl_display(%p) wl_display(%p) wl_tbm_client(%p) event_queue(%p)",
638 wl_egl_display, wl_egl_display->wl_display,
639 wl_egl_display->wl_tbm_client, wl_egl_display->ev_queue);
640 #if TIZEN_FEATURE_ENABLE
641 TPL_INFO("[WAYLAND_INIT]",
642 "tizen_surface_shm(%p) wp_presentation(%p) explicit_sync(%p)",
643 wl_egl_display->tss, wl_egl_display->presentation,
644 wl_egl_display->explicit_sync);
648 wl_proxy_wrapper_destroy(display_wrapper);
650 wl_registry_destroy(registry);
652 wl_event_queue_destroy(queue);
658 _thread_wl_display_fini(tpl_wl_egl_display_t *wl_egl_display)
660 /* If wl_egl_display is in prepared state, cancel it */
661 if (wl_egl_display->prepared) {
662 wl_display_cancel_read(wl_egl_display->wl_display);
663 wl_egl_display->prepared = TPL_FALSE;
666 if (wl_display_roundtrip_queue(wl_egl_display->wl_display,
667 wl_egl_display->ev_queue) == -1) {
668 _wl_display_print_err(wl_egl_display, "roundtrip_queue");
671 #if TIZEN_FEATURE_ENABLE
672 if (wl_egl_display->tss) {
673 TPL_INFO("[TIZEN_SURFACE_SHM_DESTROY]",
674 "wl_egl_display(%p) tizen_surface_shm(%p) fini.",
675 wl_egl_display, wl_egl_display->tss);
676 tizen_surface_shm_destroy(wl_egl_display->tss);
677 wl_egl_display->tss = NULL;
680 if (wl_egl_display->presentation) {
681 TPL_INFO("[WP_PRESENTATION_DESTROY]",
682 "wl_egl_display(%p) wp_presentation(%p) fini.",
683 wl_egl_display, wl_egl_display->presentation);
684 wp_presentation_destroy(wl_egl_display->presentation);
685 wl_egl_display->presentation = NULL;
688 if (wl_egl_display->explicit_sync) {
689 TPL_INFO("[EXPLICIT_SYNC_DESTROY]",
690 "wl_egl_display(%p) zwp_linux_explicit_synchronization_v1(%p) fini.",
691 wl_egl_display, wl_egl_display->explicit_sync);
692 zwp_linux_explicit_synchronization_v1_destroy(wl_egl_display->explicit_sync);
693 wl_egl_display->explicit_sync = NULL;
696 if (wl_egl_display->wl_tbm_client) {
697 struct wl_proxy *wl_tbm = NULL;
699 wl_tbm = (struct wl_proxy *)wayland_tbm_client_get_wl_tbm(
700 wl_egl_display->wl_tbm_client);
702 wl_proxy_set_queue(wl_tbm, NULL);
705 TPL_INFO("[WL_TBM_DEINIT]",
706 "wl_egl_display(%p) wl_tbm_client(%p)",
707 wl_egl_display, wl_egl_display->wl_tbm_client);
708 wayland_tbm_client_deinit(wl_egl_display->wl_tbm_client);
709 wl_egl_display->wl_tbm_client = NULL;
712 wl_event_queue_destroy(wl_egl_display->ev_queue);
714 wl_egl_display->ev_queue = NULL;
715 wl_egl_display->wl_initialized = TPL_FALSE;
717 TPL_INFO("[DISPLAY_FINI]", "wl_egl_display(%p) wl_display(%p)",
718 wl_egl_display, wl_egl_display->wl_display);
722 _thread_init(void *data)
724 tpl_wl_egl_display_t wl_egl_display(data);
726 if (_thread_wl_display_init(wl_egl_display) != TPL_ERROR_NONE) {
727 TPL_ERR("Failed to initialize wl_egl_display(%p) with wl_display(%p)",
728 wl_egl_display, wl_egl_display->wl_display);
731 if (wl_egl_display->use_wait_vblank &&
732 _thread_tdm_init(wl_egl_display) != TPL_ERROR_NONE) {
733 TPL_WARN("Failed to initialize tdm-client. TPL_WAIT_VLANK:DISABLED");
736 return wl_egl_display;
740 __thread_func_disp_prepare(tpl_gsource *gsource)
742 tpl_wl_egl_display_t wl_egl_display(tpl_gsource_get_data(gsource));
744 /* If this wl_egl_display is already prepared,
745 * do nothing in this function. */
746 if (wl_egl_display->prepared)
749 /* If there is a last_error, there is no need to poll,
750 * so skip directly to dispatch.
751 * prepare -> dispatch */
752 if (wl_egl_display->last_error)
755 while (wl_display_prepare_read_queue(wl_egl_display->wl_display,
756 wl_egl_display->ev_queue) != 0) {
757 if (wl_display_dispatch_queue_pending(wl_egl_display->wl_display,
758 wl_egl_display->ev_queue) == -1) {
759 _wl_display_print_err(wl_egl_display, "dispatch_queue_pending");
763 wl_egl_display->prepared = TPL_TRUE;
765 wl_display_flush(wl_egl_display->wl_display);
771 __thread_func_disp_check(tpl_gsource *gsource)
773 tpl_wl_egl_display_t wl_egl_display(tpl_gsource_get_data(gsource));
774 tpl_bool_t ret = TPL_FALSE;
776 if (!wl_egl_display->prepared)
779 /* If prepared, but last_error is set,
780 * cancel_read is executed and FALSE is returned.
781 * That can lead to G_SOURCE_REMOVE by calling disp_prepare again
782 * and skipping disp_check from prepare to disp_dispatch.
783 * check -> prepare -> dispatch -> G_SOURCE_REMOVE */
784 if (wl_egl_display->prepared && wl_egl_display->last_error) {
785 wl_display_cancel_read(wl_egl_display->wl_display);
789 if (tpl_gsource_check_io_condition(gsource)) {
790 if (wl_display_read_events(wl_egl_display->wl_display) == -1)
791 _wl_display_print_err(wl_egl_display, "read_event");
794 wl_display_cancel_read(wl_egl_display->wl_display);
798 wl_egl_display->prepared = TPL_FALSE;
804 __thread_func_disp_dispatch(tpl_gsource *gsource, uint64_t message)
806 tpl_wl_egl_display_t wl_egl_display(tpl_gsource_get_data(gsource));
810 /* If there is last_error, SOURCE_REMOVE should be returned
811 * to remove the gsource from the main loop.
812 * This is because wl_egl_display is not valid since last_error was set.*/
813 if (wl_egl_display->last_error) {
817 tpl_gmutex_lock(&wl_egl_display->wl_event_mutex);
818 if (tpl_gsource_check_io_condition(gsource)) {
819 if (wl_display_dispatch_queue_pending(wl_egl_display->wl_display,
820 wl_egl_display->ev_queue) == -1) {
821 _wl_display_print_err(wl_egl_display, "dispatch_queue_pending");
825 wl_display_flush(wl_egl_display->wl_display);
826 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
832 __thread_func_disp_finalize(tpl_gsource *gsource)
834 tpl_wl_egl_display_t wl_egl_display(tpl_gsource_get_data(gsource));
836 tpl_gmutex_lock(&wl_egl_display->disp_mutex);
837 TPL_LOG_D("[D_FINALIZE]", "wl_egl_display(%p) tpl_gsource(%p)",
838 wl_egl_display, gsource);
840 if (wl_egl_display->wl_initialized)
841 _thread_wl_display_fini(wl_egl_display);
843 wl_egl_display->gsource_finalized = TPL_TRUE;
845 tpl_gcond_signal(&wl_egl_display->disp_cond);
846 tpl_gmutex_unlock(&wl_egl_display->disp_mutex);
852 static tpl_gsource_functions disp_funcs = {
853 .prepare = __thread_func_disp_prepare,
854 .check = __thread_func_disp_check,
855 .dispatch = __thread_func_disp_dispatch,
856 .finalize = __thread_func_disp_finalize,
860 __tpl_wl_egl_display_init(tpl_display_t *display)
862 tpl_wl_egl_display_t *wl_egl_display = NULL;
866 /* Do not allow default display in wayland. */
867 if (!display->native_handle) {
868 TPL_ERR("Invalid native handle for display.");
869 return TPL_ERROR_INVALID_PARAMETER;
872 if (!_check_native_handle_is_wl_display(display->native_handle)) {
873 TPL_ERR("native_handle(%p) is not wl_display", display->native_handle);
874 return TPL_ERROR_INVALID_PARAMETER;
877 wl_egl_display = calloc(1, sizeof(tpl_wl_egl_display_t));
878 if (!wl_egl_display) {
879 TPL_ERR("Failed to allocate memory for new tpl_wl_egl_display_t.");
880 return TPL_ERROR_OUT_OF_MEMORY;
883 display->backend.data = wl_egl_display;
884 display->bufmgr_fd = -1;
886 wl_egl_display->tdm.tdm_initialized = TPL_FALSE;
887 wl_egl_display->tdm.tdm_client = NULL;
888 wl_egl_display->tdm.tdm_display_fd = -1;
889 wl_egl_display->tdm.tdm_source = NULL;
891 wl_egl_display->wl_initialized = TPL_FALSE;
893 wl_egl_display->ev_queue = NULL;
894 wl_egl_display->wl_display = (struct wl_display *)display->native_handle;
895 wl_egl_display->last_error = 0;
896 wl_egl_display->use_tss = TPL_FALSE;
897 wl_egl_display->use_explicit_sync = TPL_FALSE; // default disabled
898 wl_egl_display->prepared = TPL_FALSE;
899 wl_egl_display->gsource_finalized = TPL_FALSE;
901 #if TIZEN_FEATURE_ENABLE
902 /* Wayland Interfaces */
903 wl_egl_display->tss = NULL;
904 wl_egl_display->presentation = NULL;
905 wl_egl_display->explicit_sync = NULL;
907 wl_egl_display->wl_tbm_client = NULL;
909 wl_egl_display->use_wait_vblank = TPL_TRUE; // default enabled
911 char *env = tpl_getenv("TPL_WAIT_VBLANK");
912 if (env && !atoi(env)) {
913 wl_egl_display->use_wait_vblank = TPL_FALSE;
917 tpl_gmutex_init(&wl_egl_display->wl_event_mutex);
919 tpl_gmutex_init(&wl_egl_display->disp_mutex);
920 tpl_gcond_init(&wl_egl_display->disp_cond);
923 wl_egl_display->thread = tpl_gthread_create("wl_egl_thread",
924 (tpl_gthread_func)_thread_init,
925 (void *)wl_egl_display);
926 if (!wl_egl_display->thread) {
927 TPL_ERR("Failed to create wl_egl_thread");
931 wl_egl_display->disp_source = tpl_gsource_create(wl_egl_display->thread,
932 (void *)wl_egl_display,
933 wl_display_get_fd(wl_egl_display->wl_display),
935 &disp_funcs, SOURCE_TYPE_NORMAL);
936 if (!wl_egl_display->disp_source) {
937 TPL_ERR("Failed to add native_display(%p) to thread(%p)",
938 display->native_handle,
939 wl_egl_display->thread);
943 if (wl_egl_display->use_wait_vblank &&
944 wl_egl_display->tdm.tdm_initialized) {
945 tpl_gmutex_init(&wl_egl_display->tdm.tdm_mutex);
946 tpl_gcond_init(&wl_egl_display->tdm.tdm_cond);
947 wl_egl_display->tdm.tdm_source = tpl_gsource_create(wl_egl_display->thread,
948 (void *)wl_egl_display,
949 wl_egl_display->tdm.tdm_display_fd,
951 &tdm_funcs, SOURCE_TYPE_NORMAL);
952 wl_egl_display->tdm.gsource_finalized = TPL_FALSE;
953 if (!wl_egl_display->tdm.tdm_source) {
954 TPL_ERR("Failed to create tdm_gsource\n");
959 wl_egl_display->use_wait_vblank = (wl_egl_display->tdm.tdm_initialized &&
960 (wl_egl_display->tdm.tdm_source != NULL));
962 TPL_INFO("[DISPLAY_INIT]",
963 "wl_egl_display(%p) tpl_gthread(%p) wl_display(%p)",
965 wl_egl_display->thread,
966 wl_egl_display->wl_display);
968 TPL_INFO("[DISPLAY_INIT]",
969 "USE_WAIT_VBLANK(%s) TIZEN_SURFACE_SHM(%s) USE_EXPLICIT_SYNC(%s)",
970 wl_egl_display->use_wait_vblank ? "TRUE" : "FALSE",
971 wl_egl_display->use_tss ? "TRUE" : "FALSE",
972 wl_egl_display->use_explicit_sync ? "TRUE" : "FALSE");
974 return TPL_ERROR_NONE;
977 if (wl_egl_display->tdm.tdm_source) {
978 tpl_gmutex_lock(&wl_egl_display->tdm.tdm_mutex);
979 // Send destroy mesage to thread
980 tpl_gsource_destroy(wl_egl_display->tdm.tdm_source, TPL_TRUE);
981 while (!wl_egl_display->tdm.gsource_finalized) {
982 tpl_gcond_wait(&wl_egl_display->tdm.tdm_cond, &wl_egl_display->tdm.tdm_mutex);
984 tpl_gmutex_unlock(&wl_egl_display->tdm.tdm_mutex);
987 if (wl_egl_display->disp_source) {
988 tpl_gmutex_lock(&wl_egl_display->disp_mutex);
989 // Send destroy mesage to thread
990 tpl_gsource_destroy(wl_egl_display->disp_source, TPL_TRUE);
991 while (!wl_egl_display->gsource_finalized) {
992 tpl_gcond_wait(&wl_egl_display->disp_cond, &wl_egl_display->disp_mutex);
994 tpl_gmutex_unlock(&wl_egl_display->disp_mutex);
997 if (wl_egl_display->thread) {
998 tpl_gthread_destroy(wl_egl_display->thread);
1001 tpl_gcond_clear(&wl_egl_display->tdm.tdm_cond);
1002 tpl_gmutex_clear(&wl_egl_display->tdm.tdm_mutex);
1003 tpl_gcond_clear(&wl_egl_display->disp_cond);
1004 tpl_gmutex_clear(&wl_egl_display->disp_mutex);
1006 wl_egl_display->thread = NULL;
1007 free(wl_egl_display);
1009 display->backend.data = NULL;
1010 return TPL_ERROR_INVALID_OPERATION;
1014 __tpl_wl_egl_display_fini(tpl_display_t *display)
1016 tpl_wl_egl_display_t wl_egl_display(display->backend.data);
1017 if (wl_egl_display) {
1018 TPL_INFO("[DISPLAY_FINI]",
1019 "wl_egl_display(%p) tpl_gthread(%p) wl_display(%p)",
1021 wl_egl_display->thread,
1022 wl_egl_display->wl_display);
1024 if (wl_egl_display->tdm.tdm_source && wl_egl_display->tdm.tdm_initialized) {
1025 /* This is a protection to prevent problems that arise in unexpected situations
1026 * that g_cond_wait cannot work normally.
1027 * When calling tpl_gsource_destroy() with destroy_in_thread is TPL_TRUE,
1028 * caller should use tpl_gcond_wait() in the loop with checking finalized flag
1030 tpl_gmutex_lock(&wl_egl_display->tdm.tdm_mutex);
1031 // Send destroy mesage to thread
1032 tpl_gsource_destroy(wl_egl_display->tdm.tdm_source, TPL_TRUE);
1033 while (!wl_egl_display->tdm.gsource_finalized) {
1034 tpl_gcond_wait(&wl_egl_display->tdm.tdm_cond, &wl_egl_display->tdm.tdm_mutex);
1036 wl_egl_display->tdm.tdm_source = NULL;
1037 tpl_gmutex_unlock(&wl_egl_display->tdm.tdm_mutex);
1040 if (wl_egl_display->disp_source) {
1041 tpl_gmutex_lock(&wl_egl_display->disp_mutex);
1042 // Send destroy mesage to thread
1043 tpl_gsource_destroy(wl_egl_display->disp_source, TPL_TRUE);
1044 /* This is a protection to prevent problems that arise in unexpected situations
1045 * that g_cond_wait cannot work normally.
1046 * When calling tpl_gsource_destroy() with destroy_in_thread is TPL_TRUE,
1047 * caller should use tpl_gcond_wait() in the loop with checking finalized flag
1049 while (!wl_egl_display->gsource_finalized) {
1050 tpl_gcond_wait(&wl_egl_display->disp_cond, &wl_egl_display->disp_mutex);
1052 wl_egl_display->disp_source = NULL;
1053 tpl_gmutex_unlock(&wl_egl_display->disp_mutex);
1056 if (wl_egl_display->thread) {
1057 tpl_gthread_destroy(wl_egl_display->thread);
1058 wl_egl_display->thread = NULL;
1061 tpl_gcond_clear(&wl_egl_display->tdm.tdm_cond);
1062 tpl_gmutex_clear(&wl_egl_display->tdm.tdm_mutex);
1063 tpl_gcond_clear(&wl_egl_display->disp_cond);
1064 tpl_gmutex_clear(&wl_egl_display->disp_mutex);
1066 tpl_gmutex_clear(&wl_egl_display->wl_event_mutex);
1068 free(wl_egl_display);
1071 display->backend.data = NULL;
1075 __tpl_wl_egl_display_query_config(tpl_display_t *display,
1076 tpl_surface_type_t surface_type,
1077 int red_size, int green_size,
1078 int blue_size, int alpha_size,
1079 int color_depth, int *native_visual_id,
1080 tpl_bool_t *is_slow)
1082 TPL_ASSERT(display);
1084 if (surface_type == TPL_SURFACE_TYPE_WINDOW && red_size == 8 &&
1085 green_size == 8 && blue_size == 8 &&
1086 (color_depth == 32 || color_depth == 24)) {
1088 if (alpha_size == 8) {
1089 if (native_visual_id) *native_visual_id = TBM_FORMAT_ARGB8888;
1090 if (is_slow) *is_slow = TPL_FALSE;
1091 return TPL_ERROR_NONE;
1093 if (alpha_size == 0) {
1094 if (native_visual_id) *native_visual_id = TBM_FORMAT_XRGB8888;
1095 if (is_slow) *is_slow = TPL_FALSE;
1096 return TPL_ERROR_NONE;
1100 return TPL_ERROR_INVALID_PARAMETER;
1104 __tpl_wl_egl_display_filter_config(tpl_display_t *display, int *visual_id,
1107 TPL_IGNORE(display);
1108 TPL_IGNORE(visual_id);
1109 TPL_IGNORE(alpha_size);
1110 return TPL_ERROR_NONE;
1114 __tpl_wl_egl_display_get_window_info(tpl_display_t *display,
1115 tpl_handle_t window, int *width,
1116 int *height, tbm_format *format,
1117 int depth, int a_size)
1119 tpl_result_t ret = TPL_ERROR_NONE;
1120 struct wl_egl_window *wl_egl_window = (struct wl_egl_window *)window;
1122 if (!wl_egl_window) {
1123 TPL_ERR("Invalid parameter. tpl_handle_t(%p)", window);
1124 return TPL_ERROR_INVALID_PARAMETER;
1127 if (width) *width = wl_egl_window->width;
1128 if (height) *height = wl_egl_window->height;
1130 struct tizen_private tizen_private(wl_egl_window->driver_private);
1131 if (tizen_private && tizen_private->data) {
1132 tpl_wl_egl_surface_t wl_egl_surface(tizen_private->data);
1133 *format = wl_egl_surface->format;
1136 *format = TBM_FORMAT_ARGB8888;
1138 *format = TBM_FORMAT_XRGB8888;
1146 __tpl_wl_egl_display_get_pixmap_info(tpl_display_t *display,
1147 tpl_handle_t pixmap, int *width,
1148 int *height, tbm_format *format)
1150 tbm_surface_h tbm_surface = NULL;
1153 TPL_ERR("Invalid parameter. tpl_handle_t(%p)", pixmap);
1154 return TPL_ERROR_INVALID_PARAMETER;
1157 tbm_surface = wayland_tbm_server_get_surface(NULL,
1158 (struct wl_resource *)pixmap);
1160 TPL_ERR("Failed to get tbm_surface from wayland_tbm.");
1161 return TPL_ERROR_INVALID_PARAMETER;
1164 if (width) *width = tbm_surface_get_width(tbm_surface);
1165 if (height) *height = tbm_surface_get_height(tbm_surface);
1166 if (format) *format = tbm_surface_get_format(tbm_surface);
1168 return TPL_ERROR_NONE;
1171 static tbm_surface_h
1172 __tpl_wl_egl_display_get_buffer_from_native_pixmap(tpl_handle_t pixmap)
1174 tbm_surface_h tbm_surface = NULL;
1178 tbm_surface = wayland_tbm_server_get_surface(NULL,
1179 (struct wl_resource *)pixmap);
1181 TPL_ERR("Failed to get tbm_surface_h from wayland_tbm.");
1189 __tpl_display_choose_backend_wl_egl_thread(tpl_handle_t native_dpy)
1191 struct wl_interface *wl_egl_native_dpy = *(void **) native_dpy;
1193 TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_native_dpy, TPL_FALSE);
1195 /* MAGIC CHECK: A native display handle is a wl_display if the de-referenced first value
1196 is a memory address pointing the structure of wl_display_interface. */
1197 if (wl_egl_native_dpy == &wl_display_interface)
1200 if (strncmp(wl_egl_native_dpy->name, wl_display_interface.name,
1201 strlen(wl_display_interface.name)) == 0) {
1208 /* -- BEGIN -- wl_egl_window callback functions */
1210 __cb_destroy_callback(void *private)
1212 struct tizen_private tizen_private(private);
1214 if (!tizen_private) {
1215 TPL_LOG_D("[WL_EGL_WINDOW_DESTROY_CALLBACK]", "Already destroyed surface");
1219 tpl_wl_egl_surface_t wl_egl_surface(tizen_private->data);
1220 if (wl_egl_surface) {
1221 TPL_WARN("[DESTROY_CB][!!!ABNORMAL BEHAVIOR!!!] wl_egl_window(%p) is destroyed.",
1222 wl_egl_surface->wl_egl_window);
1223 TPL_WARN("[DESTROY_CB] native window should be destroyed after eglDestroySurface.");
1225 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1226 wl_egl_surface->wl_egl_window->destroy_window_callback = NULL;
1227 wl_egl_surface->wl_egl_window->resize_callback = NULL;
1228 wl_egl_surface->wl_egl_window->driver_private = NULL;
1229 wl_egl_surface->wl_egl_window = NULL;
1230 wl_egl_surface->wl_surface = NULL;
1232 tizen_private->set_window_serial_callback = NULL;
1233 tizen_private->rotate_callback = NULL;
1234 tizen_private->get_rotation_capability = NULL;
1235 tizen_private->set_frontbuffer_callback = NULL;
1236 tizen_private->create_commit_sync_fd = NULL;
1237 tizen_private->create_presentation_sync_fd = NULL;
1238 tizen_private->data = NULL;
1240 free(tizen_private);
1241 tizen_private = NULL;
1242 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1247 __cb_resize_callback(struct wl_egl_window *wl_egl_window, void *private)
1249 TPL_ASSERT(private);
1251 struct tizen_private tizen_private(private);
1252 tpl_wl_egl_surface_t wl_egl_surface(tizen_private->data);
1253 int cur_w, cur_h, req_w, req_h, format;
1255 if (!wl_egl_surface) {
1256 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1261 format = wl_egl_surface->format;
1262 cur_w = wl_egl_surface->width;
1263 cur_h = wl_egl_surface->height;
1264 req_w = wl_egl_window->width;
1265 req_h = wl_egl_window->height;
1267 TPL_INFO("[WINDOW_RESIZE]",
1268 "wl_egl_surface(%p) wl_egl_window(%p) (%dx%d) -> (%dx%d)",
1269 wl_egl_surface, wl_egl_window, cur_w, cur_h, req_w, req_h);
1271 if (tbm_surface_queue_reset(wl_egl_surface->tbm_queue, req_w, req_h, format)
1272 != TBM_SURFACE_QUEUE_ERROR_NONE) {
1273 TPL_ERR("Failed to reset tbm_surface_queue(%p)", wl_egl_surface->tbm_queue);
1277 /* -- END -- wl_egl_window callback functions */
1279 /* -- BEGIN -- wl_egl_window tizen private callback functions */
1281 /* There is no usecase for using prerotation callback below */
1283 __cb_rotate_callback(struct wl_egl_window *wl_egl_window, void *private)
1285 TPL_ASSERT(private);
1287 struct tizen_private tizen_private(private);
1288 tpl_wl_egl_surface_t wl_egl_surface(tizen_private->data);
1289 int rotation = tizen_private->rotation;
1291 if (!wl_egl_surface) {
1292 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1297 TPL_INFO("[WINDOW_ROTATE]",
1298 "wl_egl_surface(%p) wl_egl_window(%p) (%d) -> (%d)",
1299 wl_egl_surface, wl_egl_window,
1300 wl_egl_surface->rotation, rotation);
1302 wl_egl_surface->rotation = rotation;
1305 /* There is no usecase for using prerotation callback below */
1307 __cb_get_rotation_capability(struct wl_egl_window *wl_egl_window,
1310 TPL_ASSERT(private);
1312 int rotation_capability = WL_EGL_WINDOW_TIZEN_CAPABILITY_NONE;
1313 struct tizen_private tizen_private(private);
1314 tpl_wl_egl_surface_t wl_egl_surface(tizen_private->data);
1316 if (!wl_egl_surface) {
1317 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1319 return rotation_capability;
1322 if (wl_egl_surface->prerotation_capability == TPL_TRUE)
1323 rotation_capability = WL_EGL_WINDOW_TIZEN_CAPABILITY_ROTATION_SUPPORTED;
1325 rotation_capability = WL_EGL_WINDOW_TIZEN_CAPABILITY_ROTATION_UNSUPPORTED;
1328 return rotation_capability;
1332 __cb_set_window_serial_callback(struct wl_egl_window *wl_egl_window,
1333 void *private, unsigned int serial)
1335 TPL_ASSERT(private);
1337 struct tizen_private tizen_private(private);
1338 tpl_wl_egl_surface_t wl_egl_surface(tizen_private->data);
1340 if (!wl_egl_surface) {
1341 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1346 wl_egl_surface->set_serial_is_used = TPL_TRUE;
1347 wl_egl_surface->serial = serial;
1351 __cb_create_commit_sync_fd(struct wl_egl_window *wl_egl_window, void *private)
1353 TPL_ASSERT(private);
1354 TPL_ASSERT(wl_egl_window);
1356 int commit_sync_fd = -1;
1358 struct tizen_private tizen_private(private);
1359 tpl_wl_egl_surface_t wl_egl_surface(tizen_private->data);
1361 if (!wl_egl_surface) {
1362 TPL_ERR("Invalid parameter. wl_egl_surface(%p) is NULL", wl_egl_surface);
1366 tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
1368 if (wl_egl_surface->commit_sync.fd != -1) {
1369 commit_sync_fd = dup(wl_egl_surface->commit_sync.fd);
1370 TRACE_MARK("[ONLY_DUP] commit_sync_fd(%d) dup(%d)",
1371 wl_egl_surface->commit_sync.fd, commit_sync_fd);
1372 TPL_LOG_D("[COMMIT_SYNC][DUP]", "wl_egl_surface(%p) commit_sync_fd(%d) dup(%d)",
1373 wl_egl_surface, wl_egl_surface->commit_sync.fd, commit_sync_fd);
1374 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
1375 return commit_sync_fd;
1378 wl_egl_surface->commit_sync.fd = eventfd(0, EFD_CLOEXEC);
1379 if (wl_egl_surface->commit_sync.fd == -1) {
1380 TPL_ERR("Failed to create commit_sync_fd. wl_egl_surface(%p)",
1382 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
1386 commit_sync_fd = dup(wl_egl_surface->commit_sync.fd);
1388 TRACE_MARK("[CREATE] commit_sync_fd(%d) dup(%d)",
1389 wl_egl_surface->commit_sync.fd, commit_sync_fd);
1390 TPL_LOG_D("[COMMIT_SYNC][CREATE]", "wl_egl_surface(%p) commit_sync_fd(%d)",
1391 wl_egl_surface, commit_sync_fd);
1393 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
1395 return commit_sync_fd;
1399 __cb_client_window_set_frontbuffer_mode(struct wl_egl_window *wl_egl_window,
1400 void *private, int set)
1402 TPL_ASSERT(private);
1403 TPL_ASSERT(wl_egl_window);
1404 struct tizen_private tizen_private(private);
1405 tpl_wl_egl_surface_t wl_egl_surface(tizen_private->data);
1406 TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
1408 tpl_surface_t *surface = wl_egl_surface->tpl_surface;
1409 TPL_CHECK_ON_NULL_RETURN(surface);
1411 tpl_bool_t is_frontbuffer_mode = set ? TPL_TRUE : TPL_FALSE;
1413 TPL_OBJECT_LOCK(surface);
1414 if (is_frontbuffer_mode == surface->is_frontbuffer_mode) {
1415 TPL_OBJECT_UNLOCK(surface);
1419 TPL_INFO("[FRONTBUFFER_MODE]",
1420 "[%s] wl_egl_surface(%p) wl_egl_window(%p)",
1421 is_frontbuffer_mode ? "ON" : "OFF",
1422 wl_egl_surface, wl_egl_window);
1424 surface->is_frontbuffer_mode = is_frontbuffer_mode;
1426 TPL_OBJECT_UNLOCK(surface);
1429 #if TIZEN_FEATURE_ENABLE
1431 __cb_create_presentation_sync_fd(struct wl_egl_window *wl_egl_window, void *private)
1433 TPL_ASSERT(private);
1434 TPL_ASSERT(wl_egl_window);
1436 int presentation_sync_fd = -1;
1438 struct tizen_private tizen_private(private);
1439 tpl_wl_egl_surface_t wl_egl_surface(tizen_private->data);
1441 if (!wl_egl_surface) {
1442 TPL_ERR("Invalid parameter. wl_egl_surface is NULL");
1446 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
1447 if (wl_egl_surface->presentation_sync.fd != -1) {
1448 presentation_sync_fd = dup(wl_egl_surface->presentation_sync.fd);
1449 TRACE_MARK("[ONLY_DUP] presentation_sync_fd(%d) dup(%d)",
1450 wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1451 TPL_LOG_D("[PRESENTATION_SYNC][DUP]", "wl_egl_surface(%p) presentation_sync_fd(%d) dup(%d)",
1452 wl_egl_surface, wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1453 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1454 return presentation_sync_fd;
1457 wl_egl_surface->presentation_sync.fd = eventfd(0, EFD_CLOEXEC);
1458 if (wl_egl_surface->presentation_sync.fd == -1) {
1459 TPL_ERR("Failed to create presentation_sync_fd. wl_egl_surface(%p)",
1461 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1465 presentation_sync_fd = dup(wl_egl_surface->presentation_sync.fd);
1466 TRACE_MARK("[CREATE] presentation_sync_fd(%d) dup(%d)",
1467 wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1468 TPL_LOG_D("[PRESENTATION_SYNC][CREATE]", "wl_egl_surface(%p) presentation_sync_fd(%d) dup(%d)",
1469 wl_egl_surface, wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1471 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1473 return presentation_sync_fd;
1475 /* -- END -- wl_egl_window tizen private callback functions */
1477 /* -- BEGIN -- tizen_surface_shm_flusher_listener */
1478 static void __cb_tss_flusher_flush_callback(void *data,
1479 struct tizen_surface_shm_flusher *tss_flusher)
1481 tpl_wl_egl_surface_t wl_egl_surface(data);
1482 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
1484 TPL_INFO("[BUFFER_FLUSH]", "wl_egl_surface(%p) tbm_queue(%p)",
1485 wl_egl_surface, wl_egl_surface->tbm_queue);
1487 tsq_err = tbm_surface_queue_flush(wl_egl_surface->tbm_queue);
1488 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
1489 TPL_ERR("Failed to flush tbm_queue(%p)", wl_egl_surface->tbm_queue);
1494 static void __cb_tss_flusher_free_flush_callback(void *data,
1495 struct tizen_surface_shm_flusher *tss_flusher)
1497 tpl_wl_egl_surface_t wl_egl_surface(data);
1498 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
1500 TPL_INFO("[FREE_BUFFER_FLUSH]", "wl_egl_surface(%p) tbm_queue(%p)",
1501 wl_egl_surface, wl_egl_surface->tbm_queue);
1503 tsq_err = tbm_surface_queue_free_flush(wl_egl_surface->tbm_queue);
1504 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
1505 TPL_ERR("Failed to free flush tbm_queue(%p)", wl_egl_surface->tbm_queue);
1510 static const struct tizen_surface_shm_flusher_listener
1511 tss_flusher_listener = {
1512 __cb_tss_flusher_flush_callback,
1513 __cb_tss_flusher_free_flush_callback
1515 /* -- END -- tizen_surface_shm_flusher_listener */
1518 /* -- BEGIN -- tbm_surface_queue callback funstions */
1520 __cb_tbm_queue_reset_callback(tbm_surface_queue_h tbm_queue,
1523 tpl_wl_egl_display_t *wl_egl_display = NULL;
1524 tpl_surface_t *surface = NULL;
1525 tpl_bool_t is_activated = TPL_FALSE;
1528 tpl_wl_egl_surface_t wl_egl_surface(data);
1529 TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
1531 wl_egl_display = wl_egl_surface->wl_egl_display;
1532 TPL_CHECK_ON_NULL_RETURN(wl_egl_display);
1534 surface = wl_egl_surface->tpl_surface;
1535 TPL_CHECK_ON_NULL_RETURN(surface);
1537 /* When the queue is resized, change the reset flag to TPL_TRUE to reflect
1538 * the changed window size at the next frame. */
1539 width = tbm_surface_queue_get_width(tbm_queue);
1540 height = tbm_surface_queue_get_height(tbm_queue);
1541 if (surface->width != width || surface->height != height) {
1542 TPL_INFO("[QUEUE_RESIZE]",
1543 "wl_egl_surface(%p) tbm_queue(%p) (%dx%d) -> (%dx%d)",
1544 wl_egl_surface, tbm_queue,
1545 surface->width, surface->height, width, height);
1548 /* When queue_reset_callback is called, if is_activated is different from
1549 * its previous state change the reset flag to TPL_TRUE to get a new buffer
1550 * with the changed state(ACTIVATED/DEACTIVATED) at the next frame. */
1551 is_activated = wayland_tbm_client_queue_check_activate(wl_egl_display->wl_tbm_client,
1552 wl_egl_surface->tbm_queue);
1553 if (wl_egl_surface->is_activated != is_activated) {
1555 TPL_INFO("[ACTIVATED]",
1556 "wl_egl_surface(%p) wl_surface(%p) tbm_queue(%p)",
1557 wl_egl_surface, wl_egl_surface->wl_surface, tbm_queue);
1559 TPL_INFO("[DEACTIVATED]",
1560 " wl_egl_surface(%p) wl_surface(%p) tbm_queue(%p)",
1561 wl_egl_surface, wl_egl_surface->wl_surface, tbm_queue);
1565 wl_egl_surface->reset = TPL_TRUE;
1567 if (surface->reset_cb)
1568 surface->reset_cb(surface->reset_data);
1572 __cb_tbm_queue_acquirable_callback(tbm_surface_queue_h tbm_queue,
1575 TPL_IGNORE(tbm_queue);
1577 tpl_wl_egl_surface_t wl_egl_surface(data);
1578 TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
1580 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1581 if (wl_egl_surface->sent_message == NONE_MESSAGE) {
1582 wl_egl_surface->sent_message = ACQUIRABLE;
1583 tpl_gsource_send_message(wl_egl_surface->surf_source,
1584 wl_egl_surface->sent_message);
1586 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1588 /* -- END -- tbm_surface_queue callback funstions */
1591 _thread_wl_egl_surface_fini(tpl_wl_egl_surface_t *wl_egl_surface)
1593 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
1595 TPL_INFO("[SURFACE_FINI]",
1596 "wl_egl_surface(%p) wl_egl_window(%p) wl_surface(%p)",
1597 wl_egl_surface, wl_egl_surface->wl_egl_window,
1598 wl_egl_surface->wl_surface);
1599 #if TIZEN_FEATURE_ENABLE
1600 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
1602 if (wl_egl_display->presentation && wl_egl_surface->presentation_feedbacks) {
1603 while (!__tpl_list_is_empty(wl_egl_surface->presentation_feedbacks)) {
1604 struct pst_feedback *pst_feedback =
1605 (struct pst_feedback *)__tpl_list_pop_front(
1606 wl_egl_surface->presentation_feedbacks, NULL);
1608 send_signal(pst_feedback->pst_sync_fd, "PST_FEEDBACK");
1609 pst_feedback->pst_sync_fd = -1;
1611 wp_presentation_feedback_destroy(pst_feedback->presentation_feedback);
1612 pst_feedback->presentation_feedback = NULL;
1618 __tpl_list_free(wl_egl_surface->presentation_feedbacks, NULL);
1619 wl_egl_surface->presentation_feedbacks = NULL;
1622 send_signal(wl_egl_surface->presentation_sync.fd, "PST_SYNC");
1623 wl_egl_surface->presentation_sync.fd = -1;
1625 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1627 if (wl_egl_surface->surface_sync) {
1628 TPL_INFO("[SURFACE_SYNC_DESTROY]",
1629 "wl_egl_surface(%p) surface_sync(%p)",
1630 wl_egl_surface, wl_egl_surface->surface_sync);
1631 zwp_linux_surface_synchronization_v1_destroy(wl_egl_surface->surface_sync);
1632 wl_egl_surface->surface_sync = NULL;
1635 if (wl_egl_surface->tss_flusher) {
1636 TPL_INFO("[FLUSHER_DESTROY]",
1637 "wl_egl_surface(%p) tss_flusher(%p)",
1638 wl_egl_surface, wl_egl_surface->tss_flusher);
1639 tizen_surface_shm_flusher_destroy(wl_egl_surface->tss_flusher);
1640 wl_egl_surface->tss_flusher = NULL;
1644 if (wl_egl_surface->tbm_queue) {
1645 TPL_INFO("[TBM_QUEUE_DESTROY]",
1646 "wl_egl_surface(%p) tbm_queue(%p)",
1647 wl_egl_surface, wl_egl_surface->tbm_queue);
1648 tbm_surface_queue_destroy(wl_egl_surface->tbm_queue);
1649 wl_egl_surface->tbm_queue = NULL;
1652 if (wl_egl_surface->vblank && wl_egl_surface->vblank->waiting_buffers) {
1653 tpl_gmutex_lock(&wl_egl_surface->vblank->mutex);
1654 __tpl_list_free(wl_egl_surface->vblank->waiting_buffers, NULL);
1655 wl_egl_surface->vblank->waiting_buffers = NULL;
1656 tpl_gmutex_unlock(&wl_egl_surface->vblank->mutex);
1659 if (wl_egl_surface->vblank) {
1660 __tpl_list_remove_data(wl_egl_display->tdm.surface_vblanks,
1661 (void *)wl_egl_surface->vblank,
1663 __cb_surface_vblank_free);
1664 wl_egl_surface->vblank = NULL;
1669 __thread_func_surf_dispatch(tpl_gsource *gsource, uint64_t message)
1671 tpl_wl_egl_surface_t wl_egl_surface(tpl_gsource_get_data(gsource));
1673 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1674 if (message == INIT_SURFACE) { /* Initialize surface */
1675 TPL_LOG_D("[MSG_RECEIVED]", "wl_egl_surface(%p) initialize message received!",
1677 _thread_wl_egl_surface_init(wl_egl_surface);
1678 wl_egl_surface->initialized_in_thread = TPL_TRUE;
1679 tpl_gcond_signal(&wl_egl_surface->surf_cond);
1680 } else if (message == ACQUIRABLE) { /* Acquirable */
1681 TPL_LOG_D("[MSG_RECEIVED]", "wl_egl_surface(%p) acquirable message received!",
1683 _thread_surface_queue_acquire(wl_egl_surface);
1686 wl_egl_surface->sent_message = NONE_MESSAGE;
1688 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1694 __thread_func_surf_finalize(tpl_gsource *gsource)
1696 tpl_wl_egl_surface_t wl_egl_surface(tpl_gsource_get_data(gsource));
1697 TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
1699 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1700 TPL_LOG_D("[S_FINALIZE]", "wl_egl_surface(%p) tpl_gsource(%p)",
1701 wl_egl_surface, gsource);
1703 _thread_wl_egl_surface_fini(wl_egl_surface);
1705 wl_egl_surface->gsource_finalized = TPL_TRUE;
1707 tpl_gcond_signal(&wl_egl_surface->surf_cond);
1708 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1711 static tpl_gsource_functions surf_funcs = {
1714 .dispatch = __thread_func_surf_dispatch,
1715 .finalize = __thread_func_surf_finalize,
1719 __tpl_wl_egl_surface_init(tpl_surface_t *surface)
1721 tpl_wl_egl_display_t wl_egl_display(surface->display->backend.data);
1722 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
1723 tpl_gsource *surf_source = NULL;
1725 struct wl_egl_window *wl_egl_window =
1726 (struct wl_egl_window *)surface->native_handle;
1728 TPL_ASSERT(surface->type == TPL_SURFACE_TYPE_WINDOW);
1729 TPL_ASSERT(surface->native_handle);
1730 TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_display, TPL_ERROR_INVALID_PARAMETER);
1732 wl_egl_surface = calloc(1, sizeof(tpl_wl_egl_surface_t));
1733 if (!wl_egl_surface) {
1734 TPL_ERR("Failed to allocate memory for new tpl_wl_egl_surface_t.");
1735 return TPL_ERROR_OUT_OF_MEMORY;
1738 surf_source = tpl_gsource_create(wl_egl_display->thread, (void *)wl_egl_surface,
1739 -1, FD_TYPE_NONE, &surf_funcs, SOURCE_TYPE_NORMAL);
1741 TPL_ERR("Failed to create surf_source with wl_egl_surface(%p)",
1743 goto surf_source_create_fail;
1746 surface->backend.data = (void *)wl_egl_surface;
1747 surface->width = wl_egl_window->width;
1748 surface->height = wl_egl_window->height;
1749 surface->rotation = 0;
1751 wl_egl_surface->tpl_surface = surface;
1752 wl_egl_surface->width = wl_egl_window->width;
1753 wl_egl_surface->height = wl_egl_window->height;
1754 wl_egl_surface->format = surface->format;
1755 wl_egl_surface->num_buffers = surface->num_buffers;
1757 wl_egl_surface->surf_source = surf_source;
1758 wl_egl_surface->wl_egl_window = wl_egl_window;
1759 wl_egl_surface->wl_surface = wl_egl_window->surface;
1761 wl_egl_surface->wl_egl_display = wl_egl_display;
1763 wl_egl_surface->reset = TPL_FALSE;
1764 wl_egl_surface->is_activated = TPL_FALSE;
1765 wl_egl_surface->need_to_enqueue = TPL_TRUE;
1766 wl_egl_surface->prerotation_capability = TPL_FALSE;
1767 wl_egl_surface->vblank_done = TPL_TRUE;
1768 wl_egl_surface->use_render_done_fence = TPL_FALSE;
1769 wl_egl_surface->set_serial_is_used = TPL_FALSE;
1770 wl_egl_surface->gsource_finalized = TPL_FALSE;
1771 wl_egl_surface->initialized_in_thread = TPL_FALSE;
1772 wl_egl_surface->frontbuffer_activated = TPL_FALSE;
1774 wl_egl_surface->latest_transform = -1;
1775 wl_egl_surface->render_done_cnt = 0;
1776 wl_egl_surface->serial = 0;
1778 wl_egl_surface->vblank = NULL;
1779 #if TIZEN_FEATURE_ENABLE
1780 wl_egl_surface->tss_flusher = NULL;
1781 wl_egl_surface->surface_sync = NULL;
1784 wl_egl_surface->post_interval = surface->post_interval;
1786 wl_egl_surface->vblank_enable = TPL_FALSE;
1788 wl_egl_surface->commit_sync.fd = -1;
1789 wl_egl_surface->presentation_sync.fd = -1;
1791 wl_egl_surface->sent_message = NONE_MESSAGE;
1792 wl_egl_surface->last_enq_buffer = NULL;
1794 wl_egl_surface->buffers = __tpl_list_alloc();
1797 struct tizen_private *tizen_private = NULL;
1799 if (wl_egl_window->driver_private)
1800 tizen_private = (struct tizen_private *)wl_egl_window->driver_private;
1802 tizen_private = tizen_private_create();
1803 wl_egl_window->driver_private = (void *)tizen_private;
1806 if (tizen_private) {
1807 tizen_private->data = (void *)wl_egl_surface;
1808 tizen_private->rotate_callback = (void *)__cb_rotate_callback;
1809 tizen_private->get_rotation_capability = (void *)
1810 __cb_get_rotation_capability;
1811 tizen_private->set_window_serial_callback = (void *)
1812 __cb_set_window_serial_callback;
1813 tizen_private->create_commit_sync_fd = (void *)__cb_create_commit_sync_fd;
1814 tizen_private->set_frontbuffer_callback = (void *)__cb_client_window_set_frontbuffer_mode;
1815 #if TIZEN_FEATURE_ENABLE
1816 tizen_private->create_presentation_sync_fd = (void *)__cb_create_presentation_sync_fd;
1818 tizen_private->create_presentation_sync_fd = NULL;
1821 wl_egl_window->destroy_window_callback = (void *)__cb_destroy_callback;
1822 wl_egl_window->resize_callback = (void *)__cb_resize_callback;
1826 tpl_gmutex_init(&wl_egl_surface->commit_sync.mutex);
1827 tpl_gmutex_init(&wl_egl_surface->presentation_sync.mutex);
1829 tpl_gmutex_init(&wl_egl_surface->buffers_mutex);
1831 tpl_gmutex_init(&wl_egl_surface->surf_mutex);
1832 tpl_gcond_init(&wl_egl_surface->surf_cond);
1834 /* Initialize in thread */
1835 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1836 wl_egl_surface->sent_message = INIT_SURFACE;
1837 tpl_gsource_send_message(wl_egl_surface->surf_source,
1838 wl_egl_surface->sent_message);
1839 while (!wl_egl_surface->initialized_in_thread)
1840 tpl_gcond_wait(&wl_egl_surface->surf_cond, &wl_egl_surface->surf_mutex);
1841 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1843 TPL_ASSERT(wl_egl_surface->tbm_queue);
1845 TPL_INFO("[SURFACE_INIT]",
1846 "tpl_surface(%p) wl_egl_surface(%p) gsource(%p)",
1847 surface, wl_egl_surface, wl_egl_surface->surf_source);
1849 return TPL_ERROR_NONE;
1851 surf_source_create_fail:
1852 free(wl_egl_surface);
1853 surface->backend.data = NULL;
1854 return TPL_ERROR_INVALID_OPERATION;
1857 static tbm_surface_queue_h
1858 _thread_create_tbm_queue(tpl_wl_egl_surface_t *wl_egl_surface,
1859 struct wayland_tbm_client *wl_tbm_client,
1862 tbm_surface_queue_h tbm_queue = NULL;
1863 tbm_bufmgr bufmgr = NULL;
1864 unsigned int capability;
1866 struct wl_surface *wl_surface = wl_egl_surface->wl_surface;
1867 int width = wl_egl_surface->width;
1868 int height = wl_egl_surface->height;
1869 int format = wl_egl_surface->format;
1871 if (!wl_tbm_client || !wl_surface) {
1872 TPL_ERR("Invalid parameters. wl_tbm_client(%p) wl_surface(%p)",
1873 wl_tbm_client, wl_surface);
1877 bufmgr = tbm_bufmgr_init(-1);
1878 capability = tbm_bufmgr_get_capability(bufmgr);
1879 tbm_bufmgr_deinit(bufmgr);
1881 if (capability & TBM_BUFMGR_CAPABILITY_TILED_MEMORY) {
1882 tbm_queue = wayland_tbm_client_create_surface_queue_tiled(
1890 tbm_queue = wayland_tbm_client_create_surface_queue(
1900 TPL_ERR("Failed to create tbm_queue. wl_tbm_client(%p)",
1905 if (tbm_surface_queue_set_modes(
1906 tbm_queue, TBM_SURFACE_QUEUE_MODE_GUARANTEE_CYCLE) !=
1907 TBM_SURFACE_QUEUE_ERROR_NONE) {
1908 TPL_ERR("Failed to set queue mode to tbm_surface_queue(%p)",
1910 tbm_surface_queue_destroy(tbm_queue);
1914 if (tbm_surface_queue_add_reset_cb(
1916 __cb_tbm_queue_reset_callback,
1917 (void *)wl_egl_surface) != TBM_SURFACE_QUEUE_ERROR_NONE) {
1918 TPL_ERR("Failed to register reset callback to tbm_surface_queue(%p)",
1920 tbm_surface_queue_destroy(tbm_queue);
1924 if (tbm_surface_queue_add_acquirable_cb(
1926 __cb_tbm_queue_acquirable_callback,
1927 (void *)wl_egl_surface) != TBM_SURFACE_QUEUE_ERROR_NONE) {
1928 TPL_ERR("Failed to register acquirable callback to tbm_surface_queue(%p)",
1930 tbm_surface_queue_destroy(tbm_queue);
1937 static tdm_client_vblank*
1938 _thread_create_tdm_client_vblank(tdm_client *tdm_client)
1940 tdm_client_vblank *tdm_vblank = NULL;
1941 tdm_client_output *tdm_output = NULL;
1942 tdm_error tdm_err = TDM_ERROR_NONE;
1945 TPL_ERR("Invalid parameter. tdm_client(%p)", tdm_client);
1949 tdm_output = tdm_client_get_output(tdm_client, "primary", &tdm_err);
1950 if (!tdm_output || tdm_err != TDM_ERROR_NONE) {
1951 TPL_ERR("Failed to get tdm_client_output. tdm_err(%d)", tdm_err);
1955 tdm_vblank = tdm_client_output_create_vblank(tdm_output, &tdm_err);
1956 if (!tdm_vblank || tdm_err != TDM_ERROR_NONE) {
1957 TPL_ERR("Failed to create tdm_vblank. tdm_err(%d)", tdm_err);
1961 tdm_err = tdm_client_handle_pending_events(tdm_client);
1962 if (tdm_err != TDM_ERROR_NONE) {
1963 TPL_ERR("Failed to handle pending events. tdm_err(%d)", tdm_err);
1966 tdm_client_vblank_set_enable_fake(tdm_vblank, 1);
1967 tdm_client_vblank_set_sync(tdm_vblank, 0);
1973 __cb_surface_vblank_free(void *data)
1975 TPL_CHECK_ON_NULL_RETURN(data);
1977 tpl_surface_vblank_t *vblank = (tpl_surface_vblank_t *)data;
1978 tpl_wl_egl_surface_t *wl_egl_surface = vblank->wl_egl_surface;
1980 TPL_INFO("[VBLANK_DESTROY]",
1981 "wl_egl_surface(%p) surface_vblank(%p) tdm_vblank(%p)",
1982 wl_egl_surface, vblank,
1983 vblank->tdm_vblank);
1985 tdm_client_vblank_destroy(vblank->tdm_vblank);
1986 vblank->tdm_vblank = NULL;
1987 vblank->wl_egl_surface = NULL;
1988 tpl_gmutex_clear(&vblank->mutex);
1992 wl_egl_surface->vblank = NULL;
1996 _thread_wl_egl_surface_init(tpl_wl_egl_surface_t *wl_egl_surface)
1998 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
1999 tpl_surface_vblank_t *vblank = NULL;
2001 wl_egl_surface->tbm_queue = _thread_create_tbm_queue(
2003 wl_egl_display->wl_tbm_client,
2004 wl_egl_surface->num_buffers);
2005 if (!wl_egl_surface->tbm_queue) {
2006 TPL_ERR("Failed to create tbm_queue. wl_egl_surface(%p) wl_tbm_client(%p)",
2007 wl_egl_surface, wl_egl_display->wl_tbm_client);
2011 TPL_INFO("[QUEUE_CREATION][1/2]",
2012 "wl_egl_surface(%p) wl_surface(%p) wl_tbm_client(%p)",
2013 wl_egl_surface, wl_egl_surface->wl_surface,
2014 wl_egl_display->wl_tbm_client);
2015 TPL_INFO("[QUEUE_CREATION][2/2]",
2016 "wl_egl_surface(%p) tbm_queue(%p) size(%d x %d) X %d format(%d)",
2018 wl_egl_surface->tbm_queue,
2019 wl_egl_surface->width,
2020 wl_egl_surface->height,
2021 wl_egl_surface->num_buffers,
2022 wl_egl_surface->format);
2024 if (wl_egl_display->use_wait_vblank) {
2025 vblank = (tpl_surface_vblank_t *)calloc(1, sizeof(tpl_surface_vblank_t));
2027 vblank->tdm_vblank = _thread_create_tdm_client_vblank(
2028 wl_egl_display->tdm.tdm_client);
2029 if (!vblank->tdm_vblank) {
2030 TPL_ERR("Failed to create tdm_vblank from tdm_client(%p)",
2031 wl_egl_display->tdm.tdm_client);
2035 vblank->waiting_buffers = __tpl_list_alloc();
2036 if (!vblank->waiting_buffers) {
2037 tdm_client_vblank_destroy(vblank->tdm_vblank);
2041 vblank->wl_egl_surface = wl_egl_surface;
2042 tpl_gmutex_init(&vblank->mutex);
2044 __tpl_list_push_back(wl_egl_display->tdm.surface_vblanks,
2047 TPL_INFO("[VBLANK_INIT]",
2048 "wl_egl_surface(%p) tdm_client(%p) tdm_vblank(%p)",
2049 wl_egl_surface, wl_egl_display->tdm.tdm_client,
2050 vblank->tdm_vblank);
2056 wl_egl_surface->vblank = vblank;
2057 wl_egl_surface->vblank_enable = (vblank != NULL &&
2058 wl_egl_surface->post_interval > 0);
2060 #if TIZEN_FEATURE_ENABLE
2061 if (wl_egl_display->tss) {
2062 wl_egl_surface->tss_flusher =
2063 tizen_surface_shm_get_flusher(wl_egl_display->tss,
2064 wl_egl_surface->wl_surface);
2067 if (wl_egl_surface->tss_flusher) {
2068 tizen_surface_shm_flusher_add_listener(wl_egl_surface->tss_flusher,
2069 &tss_flusher_listener,
2071 TPL_INFO("[FLUSHER_INIT]",
2072 "wl_egl_surface(%p) tss_flusher(%p)",
2073 wl_egl_surface, wl_egl_surface->tss_flusher);
2076 if (wl_egl_display->explicit_sync && wl_egl_display->use_explicit_sync) {
2077 wl_egl_surface->surface_sync =
2078 zwp_linux_explicit_synchronization_v1_get_synchronization(
2079 wl_egl_display->explicit_sync, wl_egl_surface->wl_surface);
2080 if (wl_egl_surface->surface_sync) {
2081 TPL_INFO("[EXPLICIT_SYNC_INIT]",
2082 "wl_egl_surface(%p) surface_sync(%p)",
2083 wl_egl_surface, wl_egl_surface->surface_sync);
2085 TPL_WARN("Failed to create surface_sync. | wl_egl_surface(%p)",
2087 wl_egl_display->use_explicit_sync = TPL_FALSE;
2091 wl_egl_surface->presentation_feedbacks = __tpl_list_alloc();
2095 _tpl_wl_egl_surface_buffer_clear(tpl_wl_egl_surface_t *wl_egl_surface)
2097 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2098 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
2099 tpl_bool_t need_to_release = TPL_FALSE;
2100 tpl_bool_t need_to_cancel = TPL_FALSE;
2101 buffer_status_t status = RELEASED;
2105 tpl_gthread_pause_in_idle(wl_egl_display->thread);
2107 buffer_cnt = __tpl_list_get_count(wl_egl_surface->buffers);
2109 while (!__tpl_list_is_empty(wl_egl_surface->buffers)) {
2110 tpl_wl_egl_buffer_t wl_egl_buffer(__tpl_list_pop_front(wl_egl_surface->buffers,
2113 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2115 status = wl_egl_buffer->status;
2117 TPL_INFO("[BUFFER_CLEAR]",
2118 "[%d/%d] wl_egl_surface(%p) wl_egl_buffer(%p) tbm_surface(%p) status(%s)",
2119 ++idx, buffer_cnt, wl_egl_surface, wl_egl_buffer,
2120 wl_egl_buffer->tbm_surface,
2121 status_to_string[status]);
2123 if (status >= ENQUEUED) {
2124 tpl_result_t wait_result = TPL_ERROR_NONE;
2126 while (status < COMMITTED && wait_result != TPL_ERROR_TIME_OUT) {
2127 tpl_gthread_continue(wl_egl_display->thread);
2128 wait_result = tpl_gcond_timed_wait(&wl_egl_buffer->cond,
2129 &wl_egl_buffer->mutex,
2131 tpl_gthread_pause_in_idle(wl_egl_display->thread);
2132 status = wl_egl_buffer->status; /* update status */
2134 if (wait_result == TPL_ERROR_TIME_OUT) {
2135 TPL_WARN("timeout occured waiting signaled. wl_egl_buffer(%p) status(%s)",
2136 wl_egl_buffer, status_to_string[status]);
2141 /* ACQUIRED, WAITING_SIGNALED, WAITING_VBLANK, COMMITTED */
2142 /* It has been acquired but has not yet been released, so this
2143 * buffer must be released. */
2144 need_to_release = (status >= ACQUIRED && status <= COMMITTED);
2146 /* After dequeue, it has not been enqueued yet
2147 * so cancel_dequeue must be performed. */
2148 need_to_cancel = (status == DEQUEUED);
2150 if (need_to_release) {
2151 tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
2152 wl_egl_buffer->tbm_surface);
2153 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2154 TPL_ERR("Failed to release. tbm_surface(%p) tsq_err(%d)",
2155 wl_egl_buffer->tbm_surface, tsq_err);
2158 if (need_to_cancel) {
2159 tsq_err = tbm_surface_queue_cancel_dequeue(wl_egl_surface->tbm_queue,
2160 wl_egl_buffer->tbm_surface);
2161 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2162 TPL_ERR("Failed to release tbm_surface(%p) tsq_err(%d)",
2163 wl_egl_buffer->tbm_surface, tsq_err);
2166 wl_egl_buffer->status = RELEASED;
2168 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2170 if (need_to_release || need_to_cancel || status == ENQUEUED)
2171 tbm_surface_internal_unref(wl_egl_buffer->tbm_surface);
2174 tpl_gthread_continue(wl_egl_display->thread);
2178 __tpl_wl_egl_surface_fini(tpl_surface_t *surface)
2180 tpl_wl_egl_display_t *wl_egl_display = NULL;
2182 TPL_ASSERT(surface);
2183 TPL_ASSERT(surface->display);
2185 TPL_CHECK_ON_FALSE_RETURN(surface->type == TPL_SURFACE_TYPE_WINDOW);
2187 tpl_wl_egl_surface_t wl_egl_surface(surface->backend.data);
2188 TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
2190 wl_egl_display = wl_egl_surface->wl_egl_display;
2191 TPL_CHECK_ON_NULL_RETURN(wl_egl_display);
2193 TPL_INFO("[SURFACE_FINI][BEGIN]",
2194 "wl_egl_surface(%p) wl_surface(%p) tbm_queue(%p)",
2196 wl_egl_surface->wl_surface, wl_egl_surface->tbm_queue);
2198 _tpl_wl_egl_surface_buffer_clear(wl_egl_surface);
2200 if (wl_egl_surface->surf_source) {
2201 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2202 // Send destroy mesage to thread
2203 tpl_gsource_destroy(wl_egl_surface->surf_source, TPL_TRUE);
2204 /* This is a protection to prevent problems that arise in unexpected situations
2205 * that g_cond_wait cannot work normally.
2206 * When calling tpl_gsource_destroy() with destroy_in_thread is TPL_TRUE,
2207 * caller should use tpl_gcond_wait() in the loop with checking finalized flag
2209 while (!wl_egl_surface->gsource_finalized) {
2210 tpl_gcond_wait(&wl_egl_surface->surf_cond, &wl_egl_surface->surf_mutex);
2212 wl_egl_surface->surf_source = NULL;
2213 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2216 if (wl_egl_surface->wl_egl_window) {
2217 struct wl_egl_window *wl_egl_window = wl_egl_surface->wl_egl_window;
2218 struct tizen_private tizen_private(wl_egl_window->driver_private);
2219 TPL_INFO("[WL_EGL_WINDOW_FINI]",
2220 "wl_egl_surface(%p) wl_egl_window(%p) wl_surface(%p)",
2221 wl_egl_surface, wl_egl_window,
2222 wl_egl_surface->wl_surface);
2224 if (tizen_private) {
2225 tizen_private->set_window_serial_callback = NULL;
2226 tizen_private->rotate_callback = NULL;
2227 tizen_private->get_rotation_capability = NULL;
2228 tizen_private->create_presentation_sync_fd = NULL;
2229 tizen_private->create_commit_sync_fd = NULL;
2230 tizen_private->set_frontbuffer_callback = NULL;
2231 tizen_private->merge_sync_fds = NULL;
2232 tizen_private->data = NULL;
2233 free(tizen_private);
2235 wl_egl_window->driver_private = NULL;
2238 wl_egl_window->destroy_window_callback = NULL;
2239 wl_egl_window->resize_callback = NULL;
2241 wl_egl_surface->wl_egl_window = NULL;
2244 wl_egl_surface->last_enq_buffer = NULL;
2246 wl_egl_surface->wl_surface = NULL;
2247 wl_egl_surface->wl_egl_display = NULL;
2248 wl_egl_surface->tpl_surface = NULL;
2250 tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
2251 __tpl_list_free(wl_egl_surface->buffers, NULL);
2252 wl_egl_surface->buffers = NULL;
2253 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
2254 tpl_gmutex_clear(&wl_egl_surface->buffers_mutex);
2256 tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
2257 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
2258 tpl_gmutex_clear(&wl_egl_surface->commit_sync.mutex);
2260 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
2261 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
2262 tpl_gmutex_clear(&wl_egl_surface->presentation_sync.mutex);
2264 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2265 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2266 tpl_gmutex_clear(&wl_egl_surface->surf_mutex);
2267 tpl_gcond_clear(&wl_egl_surface->surf_cond);
2269 TPL_INFO("[SURFACE_FINI][END]", "wl_egl_surface(%p)", wl_egl_surface);
2271 free(wl_egl_surface);
2272 surface->backend.data = NULL;
2276 __tpl_wl_egl_surface_set_rotation_capability(tpl_surface_t *surface,
2279 TPL_CHECK_ON_NULL_RETURN_VAL(surface, TPL_ERROR_INVALID_PARAMETER);
2281 tpl_wl_egl_surface_t wl_egl_surface(surface->backend.data);
2283 TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_surface, TPL_ERROR_INVALID_PARAMETER);
2285 TPL_INFO("[SET_PREROTATION_CAPABILITY]",
2286 "wl_egl_surface(%p) prerotation capability set to [%s]",
2287 wl_egl_surface, (set ? "TRUE" : "FALSE"));
2289 wl_egl_surface->prerotation_capability = set;
2290 return TPL_ERROR_NONE;
2294 __tpl_wl_egl_surface_set_post_interval(tpl_surface_t *surface,
2297 TPL_CHECK_ON_NULL_RETURN_VAL(surface, TPL_ERROR_INVALID_PARAMETER);
2299 tpl_wl_egl_surface_t wl_egl_surface(surface->backend.data);
2301 TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_surface, TPL_ERROR_INVALID_PARAMETER);
2303 TPL_INFO("[SET_POST_INTERVAL]",
2304 "wl_egl_surface(%p) post_interval(%d -> %d)",
2305 wl_egl_surface, wl_egl_surface->post_interval, post_interval);
2307 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2308 wl_egl_surface->post_interval = post_interval;
2309 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2311 return TPL_ERROR_NONE;
2315 __tpl_wl_egl_surface_validate(tpl_surface_t *surface)
2317 tpl_bool_t retval = TPL_TRUE;
2319 TPL_ASSERT(surface);
2320 TPL_ASSERT(surface->backend.data);
2322 tpl_wl_egl_surface_t wl_egl_surface(surface->backend.data);
2324 retval = !(wl_egl_surface->reset);
2330 __tpl_wl_egl_surface_get_size(tpl_surface_t *surface, int *width, int *height)
2332 tpl_wl_egl_surface_t wl_egl_surface(surface->backend.data);
2335 *width = tbm_surface_queue_get_width(wl_egl_surface->tbm_queue);
2337 *height = tbm_surface_queue_get_height(wl_egl_surface->tbm_queue);
2341 __tpl_wl_egl_surface_fence_sync_is_available(tpl_surface_t *surface)
2343 tpl_wl_egl_surface_t wl_egl_surface(surface->backend.data);
2345 return !wl_egl_surface->frontbuffer_activated;
2348 #define CAN_DEQUEUE_TIMEOUT_MS 10000
2351 _tbm_queue_force_flush(tpl_wl_egl_surface_t *wl_egl_surface)
2353 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2355 _print_buffer_lists(wl_egl_surface);
2357 if (wl_egl_surface->vblank) {
2358 tpl_gmutex_lock(&wl_egl_surface->vblank->mutex);
2360 if (wl_egl_surface->vblank->waiting_buffers)
2361 __tpl_list_fini(wl_egl_surface->vblank->waiting_buffers, NULL);
2363 tpl_gmutex_unlock(&wl_egl_surface->vblank->mutex);
2366 if ((tsq_err = tbm_surface_queue_flush(wl_egl_surface->tbm_queue))
2367 != TBM_SURFACE_QUEUE_ERROR_NONE) {
2368 TPL_ERR("Failed to flush tbm_surface_queue(%p) tsq_err(%d)",
2369 wl_egl_surface->tbm_queue, tsq_err);
2370 return TPL_ERROR_INVALID_OPERATION;
2373 while (!__tpl_list_is_empty(wl_egl_surface->buffers)) {
2374 tpl_bool_t need_to_release = TPL_FALSE;
2375 tpl_wl_egl_buffer_t wl_egl_buffer(
2376 __tpl_list_pop_front(wl_egl_surface->buffers, NULL));
2377 need_to_release = (wl_egl_buffer->status >= ACQUIRED) &&
2378 (wl_egl_buffer->status <= COMMITTED);
2380 if (need_to_release) {
2381 tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
2382 wl_egl_buffer->tbm_surface);
2383 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2384 TPL_ERR("Failed to release. tbm_surface(%p) tsq_err(%d)",
2385 wl_egl_buffer->tbm_surface, tsq_err);
2386 tbm_surface_internal_unref(wl_egl_buffer->tbm_surface);
2390 TPL_INFO("[FORCE_FLUSH]",
2391 "wl_egl_surface(%p) tbm_queue(%p)",
2392 wl_egl_surface, wl_egl_surface->tbm_queue);
2394 _print_buffer_lists(wl_egl_surface);
2396 return TPL_ERROR_NONE;
2400 _wl_egl_buffer_init(tpl_wl_egl_buffer_t *wl_egl_buffer,
2401 tpl_wl_egl_surface_t *wl_egl_surface)
2403 struct wl_egl_window *wl_egl_window = wl_egl_surface->wl_egl_window;
2404 struct tizen_private tizen_private(wl_egl_window->driver_private);
2406 TPL_ASSERT(tizen_private);
2408 wl_egl_buffer->draw_done = TPL_FALSE;
2409 wl_egl_buffer->need_to_commit = TPL_TRUE;
2410 #if TIZEN_FEATURE_ENABLE
2411 wl_egl_buffer->buffer_release = NULL;
2413 wl_egl_buffer->transform = tizen_private->transform;
2415 if (wl_egl_buffer->w_transform != tizen_private->window_transform) {
2416 wl_egl_buffer->w_transform = tizen_private->window_transform;
2417 wl_egl_buffer->w_rotated = TPL_TRUE;
2420 if (wl_egl_surface->set_serial_is_used) {
2421 wl_egl_buffer->serial = wl_egl_surface->serial;
2423 wl_egl_buffer->serial = ++tizen_private->serial;
2426 if (wl_egl_buffer->rects) {
2427 free(wl_egl_buffer->rects);
2428 wl_egl_buffer->rects = NULL;
2429 wl_egl_buffer->num_rects = 0;
2433 static tpl_wl_egl_buffer_t *
2434 _get_wl_egl_buffer(tbm_surface_h tbm_surface)
2436 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2437 tbm_surface_internal_get_user_data(tbm_surface, KEY_WL_EGL_BUFFER,
2438 (void **)&wl_egl_buffer);
2439 return wl_egl_buffer;
2442 static tpl_wl_egl_buffer_t *
2443 _wl_egl_buffer_create(tpl_wl_egl_surface_t *wl_egl_surface,
2444 tbm_surface_h tbm_surface)
2446 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2447 struct wl_egl_window *wl_egl_window = wl_egl_surface->wl_egl_window;
2449 wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2451 if (!wl_egl_buffer) {
2452 wl_egl_buffer = calloc(1, sizeof(tpl_wl_egl_buffer_t));
2453 TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_buffer, NULL);
2455 tbm_surface_internal_add_user_data(tbm_surface, KEY_WL_EGL_BUFFER,
2456 (tbm_data_free)__cb_wl_egl_buffer_free);
2457 tbm_surface_internal_set_user_data(tbm_surface, KEY_WL_EGL_BUFFER,
2460 wl_egl_buffer->wl_buffer = NULL;
2461 wl_egl_buffer->tbm_surface = tbm_surface;
2462 wl_egl_buffer->bo_name = _get_tbm_surface_bo_name(tbm_surface);
2463 wl_egl_buffer->wl_egl_surface = wl_egl_surface;
2465 wl_egl_buffer->status = RELEASED;
2467 wl_egl_buffer->acquire_fence_fd = -1;
2468 wl_egl_buffer->commit_sync_fd = -1;
2469 wl_egl_buffer->presentation_sync_fd = -1;
2470 wl_egl_buffer->release_fence_fd = -1;
2472 wl_egl_buffer->dx = wl_egl_window->dx;
2473 wl_egl_buffer->dy = wl_egl_window->dy;
2474 wl_egl_buffer->width = tbm_surface_get_width(tbm_surface);
2475 wl_egl_buffer->height = tbm_surface_get_height(tbm_surface);
2477 wl_egl_buffer->w_transform = -1;
2479 tpl_gmutex_init(&wl_egl_buffer->mutex);
2480 tpl_gcond_init(&wl_egl_buffer->cond);
2482 tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
2483 __tpl_list_push_back(wl_egl_surface->buffers, (void *)wl_egl_buffer);
2484 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
2486 TPL_INFO("[WL_EGL_BUFFER_CREATE]",
2487 "wl_egl_surface(%p) wl_egl_buffer(%p) tbm_surface(%p) bo(%d)",
2488 wl_egl_surface, wl_egl_buffer, tbm_surface,
2489 wl_egl_buffer->bo_name);
2492 _wl_egl_buffer_init(wl_egl_buffer, wl_egl_surface);
2494 return wl_egl_buffer;
2497 static tbm_surface_h
2498 __tpl_wl_egl_surface_dequeue_buffer(tpl_surface_t *surface, uint64_t timeout_ns,
2499 int32_t *release_fence)
2501 TPL_ASSERT(surface->backend.data);
2502 TPL_ASSERT(surface->display);
2503 TPL_ASSERT(surface->display->backend.data);
2505 tpl_wl_egl_surface_t wl_egl_surface(surface->backend.data);
2506 tpl_wl_egl_display_t wl_egl_display(surface->display->backend.data);
2507 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2509 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2511 tbm_surface_h tbm_surface = NULL;
2513 TPL_OBJECT_UNLOCK(surface);
2514 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2515 if (wl_egl_surface->reset == TPL_TRUE) {
2516 if (_check_buffer_validate(wl_egl_surface, wl_egl_surface->last_enq_buffer) &&
2517 tbm_surface_internal_is_valid(wl_egl_surface->last_enq_buffer)) {
2518 tbm_surface_h last_enq_buffer = wl_egl_surface->last_enq_buffer;
2519 tpl_wl_egl_buffer_t *enqueued_buffer =
2520 _get_wl_egl_buffer(last_enq_buffer);
2522 if (enqueued_buffer) {
2523 tbm_surface_internal_ref(last_enq_buffer);
2524 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2525 tpl_gmutex_lock(&enqueued_buffer->mutex);
2526 while (enqueued_buffer->status >= ENQUEUED &&
2527 enqueued_buffer->status < COMMITTED) {
2528 tpl_result_t wait_result;
2529 TPL_INFO("[DEQ_AFTER_RESET]",
2530 "wl_egl_surface(%p) waiting for previous wl_egl_buffer(%p) commit",
2531 wl_egl_surface, enqueued_buffer);
2533 wait_result = tpl_gcond_timed_wait(&enqueued_buffer->cond,
2534 &enqueued_buffer->mutex,
2536 if (wait_result == TPL_ERROR_TIME_OUT) {
2537 TPL_WARN("timeout occured waiting signaled. wl_egl_buffer(%p)",
2542 tpl_gmutex_unlock(&enqueued_buffer->mutex);
2543 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2544 tbm_surface_internal_unref(last_enq_buffer);
2548 wl_egl_surface->last_enq_buffer = NULL;
2550 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2552 tsq_err = tbm_surface_queue_can_dequeue_wait_timeout(
2553 wl_egl_surface->tbm_queue, CAN_DEQUEUE_TIMEOUT_MS);
2554 TPL_OBJECT_LOCK(surface);
2557 if (tsq_err == TBM_SURFACE_QUEUE_ERROR_TIMEOUT) {
2558 TPL_WARN("[CAN_DEQUEUE_TIMEOUT] queue(%p) will be reset. surface(%p)",
2559 wl_egl_surface->tbm_queue, surface);
2561 tpl_gthread_pause_in_idle(wl_egl_display->thread);
2562 /* Locking wl_event_mutex is a secondary means of preparing for
2563 * the failure of tpl_gthread_pause_in_idle().
2564 * If tpl_gthread_pause_in_idle()is successful,
2565 * locking wl_event_mutex does not affect. */
2566 tpl_gmutex_lock(&wl_egl_display->wl_event_mutex);
2567 if (_tbm_queue_force_flush(wl_egl_surface) != TPL_ERROR_NONE) {
2568 TPL_ERR("Failed to timeout reset. tbm_queue(%p) surface(%p)",
2569 wl_egl_surface->tbm_queue, surface);
2570 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2571 tpl_gthread_continue(wl_egl_display->thread);
2574 tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2577 wl_egl_surface->vblank_done = TPL_TRUE;
2579 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2580 tpl_gthread_continue(wl_egl_display->thread);
2583 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2584 TPL_ERR("Failed to query can_dequeue. tbm_queue(%p) surface(%p)",
2585 wl_egl_surface->tbm_queue, surface);
2589 /* After the can dequeue state, lock the wl_event_mutex to prevent other
2590 * events from being processed in wayland_egl_thread
2591 * during below dequeue procedure. */
2592 tpl_gmutex_lock(&wl_egl_display->wl_event_mutex);
2594 surface->width = tbm_surface_queue_get_width(wl_egl_surface->tbm_queue);
2595 surface->height = tbm_surface_queue_get_height(wl_egl_surface->tbm_queue);
2596 wl_egl_surface->width = surface->width;
2597 wl_egl_surface->height = surface->height;
2600 /* If surface->frontbuffer is not null, the frontbuffer rendering mode will be
2601 * maintained if the surface state meets the conditions below.
2602 * 1. surface->is_frontbuffer_mode == TPL_TRUE
2603 * - It may be changed to true or false by calling
2604 * tpl_surface_set_frontbuffer_mode(will be deprecated)
2606 * wl_egl_window_tizen_set_frontbuffer_mode (recommanded)
2607 * 2. is_activated == TPL_TRUE
2608 * - To check wheter direct display is possible.
2609 * 3. wl_egl_surface->reset == TPL_FALSE
2610 * - tbm_queue reset should not have occured due to window resize.
2611 * If surface is not satisfied with any of above conditions,
2612 * frontbuffer rendering will be stopped and surface->frontbuffer becomes null.
2614 if (surface->frontbuffer) {
2615 if (!surface->is_frontbuffer_mode ||
2616 !wl_egl_surface->is_activated ||
2617 wl_egl_surface->reset) {
2618 surface->frontbuffer = NULL;
2619 wl_egl_surface->need_to_enqueue = TPL_TRUE;
2620 wl_egl_surface->frontbuffer_activated = TPL_FALSE;
2621 TPL_INFO("[FRONTBUFFER_RENDERING_STOP]",
2622 "wl_egl_surface(%p) wl_egl_window(%p)",
2623 wl_egl_surface, wl_egl_surface->wl_egl_window);
2625 bo_name = _get_tbm_surface_bo_name(surface->frontbuffer);
2627 "[DEQ][F] surface->frontbuffer(%p) BO_NAME(%d)",
2628 surface->frontbuffer, bo_name);
2629 TRACE_ASYNC_BEGIN((intptr_t)surface->frontbuffer,
2630 "[DEQ]~[ENQ] BO_NAME:%d",
2632 wl_egl_surface->frontbuffer_activated = TPL_TRUE;
2633 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2634 return surface->frontbuffer;
2638 tsq_err = tbm_surface_queue_dequeue(wl_egl_surface->tbm_queue,
2641 TPL_ERR("Failed to dequeue from tbm_queue(%p) wl_egl_surface(%p)| tsq_err = %d",
2642 wl_egl_surface->tbm_queue, wl_egl_surface, tsq_err);
2643 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2647 tbm_surface_internal_ref(tbm_surface);
2649 wl_egl_buffer = _wl_egl_buffer_create(wl_egl_surface, tbm_surface);
2650 TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer, "Failed to create/get wl_egl_buffer.");
2652 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2653 wl_egl_buffer->status = DEQUEUED;
2655 /* If wl_egl_buffer->release_fence_fd is -1,
2656 * the tbm_surface can be used immediately.
2657 * If not, user(EGL) have to wait until signaled. */
2658 if (release_fence) {
2659 #if TIZEN_FEATURE_ENABLE
2660 if (wl_egl_display->use_explicit_sync) {
2661 *release_fence = wl_egl_buffer->release_fence_fd;
2662 TPL_LOG_D("[EXPLICIT_FENCE]", "wl_egl_surface(%p) wl_egl_buffer(%p) release_fence_fd(%d)",
2663 wl_egl_surface, wl_egl_buffer, *release_fence);
2665 wl_egl_buffer->release_fence_fd = -1;
2669 *release_fence = -1;
2673 if (surface->is_frontbuffer_mode && wl_egl_surface->is_activated) {
2674 if (surface->frontbuffer == NULL) {
2675 TPL_INFO("[FRONTBUFFER_RENDERING_START]",
2676 "wl_egl_surface(%p) wl_egl_window(%p) bo(%d)",
2677 wl_egl_surface, wl_egl_surface->wl_egl_window,
2678 _get_tbm_surface_bo_name(tbm_surface));
2680 surface->frontbuffer = tbm_surface;
2683 wl_egl_surface->reset = TPL_FALSE;
2685 TRACE_MARK("[DEQ][NEW]BO_NAME:%d", wl_egl_buffer->bo_name);
2686 TRACE_ASYNC_BEGIN((intptr_t)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d",
2687 wl_egl_buffer->bo_name);
2688 TPL_LOG_T("WL_EGL", "[DEQ] wl_egl_buffer(%p) tbm_surface(%p) bo(%d) fence(%d)",
2689 wl_egl_buffer, tbm_surface, wl_egl_buffer->bo_name,
2690 release_fence ? *release_fence : -1);
2692 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2693 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2699 __tpl_wl_egl_surface_cancel_buffer(tpl_surface_t *surface,
2700 tbm_surface_h tbm_surface)
2702 TPL_ASSERT(surface);
2703 TPL_ASSERT(surface->backend.data);
2705 tpl_wl_egl_surface_t wl_egl_surface(surface->backend.data);
2706 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2707 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2709 if (!tbm_surface_internal_is_valid(tbm_surface)) {
2710 TPL_ERR("Invalid buffer. tbm_surface(%p)", tbm_surface);
2711 return TPL_ERROR_INVALID_PARAMETER;
2714 wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2715 if (wl_egl_buffer) {
2716 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2717 wl_egl_buffer->status = RELEASED;
2718 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2721 tbm_surface_internal_unref(tbm_surface);
2723 tsq_err = tbm_surface_queue_cancel_dequeue(wl_egl_surface->tbm_queue,
2725 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2726 TPL_ERR("Failed to release tbm_surface(%p) surface(%p)",
2727 tbm_surface, surface);
2728 return TPL_ERROR_INVALID_OPERATION;
2731 TPL_INFO("[CANCEL_BUFFER]", "wl_egl_surface(%p) tbm_surface(%p) bo(%d)",
2732 wl_egl_surface, tbm_surface, _get_tbm_surface_bo_name(tbm_surface));
2734 return TPL_ERROR_NONE;
2738 __tpl_wl_egl_surface_enqueue_buffer(tpl_surface_t *surface,
2739 tbm_surface_h tbm_surface,
2740 int num_rects, const int *rects, int32_t acquire_fence)
2742 TPL_ASSERT(surface);
2743 TPL_ASSERT(surface->display);
2744 TPL_ASSERT(surface->backend.data);
2745 TPL_ASSERT(tbm_surface);
2746 TPL_OBJECT_CHECK_RETURN(surface, TPL_ERROR_INVALID_PARAMETER);
2748 tpl_wl_egl_surface_t wl_egl_surface(surface->backend.data);
2749 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2750 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2753 if (!tbm_surface_internal_is_valid(tbm_surface)) {
2754 TPL_ERR("Failed to enqueue tbm_surface(%p) Invalid value.",
2756 TRACE_ASYNC_END((intptr_t)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2757 return TPL_ERROR_INVALID_PARAMETER;
2760 wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2761 if (!wl_egl_buffer) {
2762 TPL_ERR("Failed to get wl_egl_buffer from tbm_surface(%p)", tbm_surface);
2763 return TPL_ERROR_INVALID_PARAMETER;
2766 bo_name = _get_tbm_surface_bo_name(tbm_surface);
2768 TRACE_MARK("[ENQ] BO_NAME:%d", bo_name);
2770 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2772 /* If there are received region information, save it to wl_egl_buffer */
2773 if (num_rects && rects) {
2774 if (wl_egl_buffer->rects != NULL) {
2775 free(wl_egl_buffer->rects);
2776 wl_egl_buffer->rects = NULL;
2777 wl_egl_buffer->num_rects = 0;
2780 wl_egl_buffer->rects = (int *)calloc(1, (sizeof(int) * 4 * num_rects));
2781 wl_egl_buffer->num_rects = num_rects;
2783 if (!wl_egl_buffer->rects) {
2784 TPL_ERR("Failed to allocate memory fo damage rects info.");
2785 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2786 return TPL_ERROR_OUT_OF_MEMORY;
2789 memcpy((char *)wl_egl_buffer->rects, (char *)rects, sizeof(int) * 4 * num_rects);
2792 if (!wl_egl_surface->need_to_enqueue ||
2793 !wl_egl_buffer->need_to_commit) {
2795 if (acquire_fence != -1) {
2796 close(acquire_fence);
2799 TPL_LOG_T("FRONTBUFFER_MODE", "[ENQ_SKIP] tbm_surface(%p) bo(%d) need not to enqueue",
2800 tbm_surface, _get_tbm_surface_bo_name(tbm_surface));
2801 TRACE_ASYNC_END((intptr_t)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2802 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2803 return TPL_ERROR_NONE;
2806 /* In frontbuffer mode, will skip tbm_surface_queue_enqueue, acquire, and
2807 * commit if surface->frontbuffer that is already set and the tbm_surface
2808 * client want to enqueue are the same.
2810 if (surface->is_frontbuffer_mode) {
2811 /* The first buffer to be activated in frontbuffer mode must be
2812 * committed. Subsequence frames do not need to be committed because
2813 * the buffer is already displayed.
2815 if (surface->frontbuffer == tbm_surface)
2816 wl_egl_surface->need_to_enqueue = TPL_FALSE;
2819 if (wl_egl_buffer->acquire_fence_fd != -1)
2820 close(wl_egl_buffer->acquire_fence_fd);
2822 wl_egl_buffer->acquire_fence_fd = acquire_fence;
2824 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
2825 if (wl_egl_surface->presentation_sync.fd != -1) {
2826 wl_egl_buffer->presentation_sync_fd = wl_egl_surface->presentation_sync.fd;
2827 wl_egl_surface->presentation_sync.fd = -1;
2829 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
2831 tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
2832 if (wl_egl_surface->commit_sync.fd != -1) {
2833 wl_egl_buffer->commit_sync_fd = wl_egl_surface->commit_sync.fd;
2834 wl_egl_surface->commit_sync.fd = -1;
2835 TRACE_ASYNC_BEGIN(wl_egl_buffer->commit_sync_fd, "[COMMIT_SYNC] bo(%d)",
2836 _get_tbm_surface_bo_name(tbm_surface));
2838 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
2840 wl_egl_buffer->status = ENQUEUED;
2842 "[ENQ] wl_egl_buffer(%p) tbm_surface(%p) bo(%d) fence(%d)",
2843 wl_egl_buffer, tbm_surface, bo_name, acquire_fence);
2845 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2847 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2848 wl_egl_surface->last_enq_buffer = tbm_surface;
2849 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2851 tsq_err = tbm_surface_queue_enqueue(wl_egl_surface->tbm_queue,
2853 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2854 tbm_surface_internal_unref(tbm_surface);
2855 TPL_ERR("Failed to enqueue tbm_surface(%p). wl_egl_surface(%p) tsq_err=%d",
2856 tbm_surface, wl_egl_surface, tsq_err);
2857 TRACE_ASYNC_END((intptr_t)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2858 return TPL_ERROR_INVALID_OPERATION;
2861 tbm_surface_internal_unref(tbm_surface);
2863 TRACE_ASYNC_END((intptr_t)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2865 return TPL_ERROR_NONE;
2869 __thread_func_waiting_source_dispatch(tpl_gsource *gsource, uint64_t message)
2871 tpl_wl_egl_buffer_t wl_egl_buffer(tpl_gsource_get_data(gsource));
2872 TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_buffer, TPL_FALSE);
2874 tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
2875 TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_surface, TPL_FALSE);
2877 tbm_surface_h tbm_surface = wl_egl_buffer->tbm_surface;
2878 TPL_CHECK_ON_NULL_RETURN_VAL(tbm_surface, TPL_FALSE);
2879 TPL_CHECK_ON_FALSE_RETURN_VAL(tbm_surface_internal_is_valid(tbm_surface), TPL_FALSE);
2881 wl_egl_surface->render_done_cnt++;
2883 TRACE_ASYNC_END(wl_egl_buffer->acquire_fence_fd, "FENCE WAIT fd(%d)",
2884 wl_egl_buffer->acquire_fence_fd);
2886 TPL_LOG_D("[RENDER DONE]", "wl_egl_surface(%p) wl_egl_buffer(%p) tbm_surface(%p)",
2887 wl_egl_surface, wl_egl_buffer, tbm_surface);
2889 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2890 wl_egl_buffer->status = WAITING_VBLANK;
2892 TPL_LOG_D("[FINALIZE]", "wl_egl_surface(%p) wl_egl_buffer(%p) wait_source(%p) fence_fd(%d)",
2893 wl_egl_surface, wl_egl_buffer, wl_egl_buffer->waiting_source,
2894 wl_egl_buffer->acquire_fence_fd);
2896 wl_egl_buffer->acquire_fence_fd = -1;
2897 wl_egl_buffer->waiting_source = NULL;
2899 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2901 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2903 if (!wl_egl_surface->vblank_enable || wl_egl_surface->vblank_done)
2904 _thread_wl_surface_commit(wl_egl_surface, wl_egl_buffer);
2906 tpl_gmutex_lock(&wl_egl_surface->vblank->mutex);
2907 __tpl_list_push_back(wl_egl_surface->vblank->waiting_buffers,
2909 tpl_gmutex_unlock(&wl_egl_surface->vblank->mutex);
2912 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2918 __thread_func_waiting_source_finalize(tpl_gsource *gsource)
2920 TPL_IGNORE(gsource);
2923 static tpl_gsource_functions buffer_funcs = {
2926 .dispatch = __thread_func_waiting_source_dispatch,
2927 .finalize = __thread_func_waiting_source_finalize,
2931 _thread_surface_queue_acquire(tpl_wl_egl_surface_t *wl_egl_surface)
2933 tbm_surface_h tbm_surface = NULL;
2934 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2935 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
2936 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2937 tpl_bool_t ready_to_commit = TPL_FALSE;
2939 while (tbm_surface_queue_can_acquire(wl_egl_surface->tbm_queue, 0)) {
2940 tsq_err = tbm_surface_queue_acquire(wl_egl_surface->tbm_queue,
2942 if (!tbm_surface || tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2943 TPL_ERR("Failed to acquire from tbm_queue(%p)",
2944 wl_egl_surface->tbm_queue);
2945 return TPL_ERROR_INVALID_OPERATION;
2948 tbm_surface_internal_ref(tbm_surface);
2950 wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2951 TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer != NULL,
2952 "wl_egl_buffer sould be not NULL");
2954 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2956 wl_egl_buffer->status = ACQUIRED;
2958 TPL_LOG_T("WL_EGL", "[ACQ] wl_egl_buffer(%p) tbm_surface(%p) bo(%d)",
2959 wl_egl_buffer, tbm_surface,
2960 _get_tbm_surface_bo_name(tbm_surface));
2962 if (wl_egl_buffer->acquire_fence_fd != -1) {
2963 #if TIZEN_FEATURE_ENABLE
2964 if (wl_egl_display->use_explicit_sync)
2965 ready_to_commit = TPL_TRUE;
2969 if (wl_egl_buffer->waiting_source) {
2970 tpl_gsource_destroy(wl_egl_buffer->waiting_source, TPL_FALSE);
2971 wl_egl_buffer->waiting_source = NULL;
2974 wl_egl_buffer->waiting_source =
2975 tpl_gsource_create(wl_egl_display->thread, wl_egl_buffer,
2976 wl_egl_buffer->acquire_fence_fd,
2977 FD_TYPE_FENCE, &buffer_funcs,
2978 SOURCE_TYPE_DISPOSABLE);
2979 wl_egl_buffer->status = WAITING_SIGNALED;
2981 TRACE_ASYNC_BEGIN(wl_egl_buffer->acquire_fence_fd, "FENCE WAIT fd(%d)",
2982 wl_egl_buffer->acquire_fence_fd);
2984 ready_to_commit = TPL_FALSE;
2987 ready_to_commit = TPL_TRUE;
2990 if (ready_to_commit) {
2991 if (!wl_egl_surface->vblank_enable || wl_egl_surface->vblank_done)
2992 ready_to_commit = TPL_TRUE;
2994 wl_egl_buffer->status = WAITING_VBLANK;
2995 tpl_gmutex_lock(&wl_egl_surface->vblank->mutex);
2996 __tpl_list_push_back(wl_egl_surface->vblank->waiting_buffers, wl_egl_buffer);
2997 tpl_gmutex_unlock(&wl_egl_surface->vblank->mutex);
2998 ready_to_commit = TPL_FALSE;
3002 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3004 if (ready_to_commit)
3005 _thread_wl_surface_commit(wl_egl_surface, wl_egl_buffer);
3008 return TPL_ERROR_NONE;
3011 /* -- BEGIN -- tdm_client vblank callback function */
3013 __cb_tdm_client_vblank(tdm_client_vblank *vblank, tdm_error error,
3014 unsigned int sequence, unsigned int tv_sec,
3015 unsigned int tv_usec, void *user_data)
3017 tpl_wl_egl_surface_t wl_egl_surface(user_data);
3019 TRACE_ASYNC_END((intptr_t)wl_egl_surface, "WAIT_VBLANK");
3020 TPL_LOG_D("[VBLANK_DONE]", "wl_egl_surface(%p)", wl_egl_surface);
3022 if (error == TDM_ERROR_TIMEOUT)
3023 TPL_WARN("[TDM_ERROR_TIMEOUT] It will keep going. wl_egl_surface(%p)",
3026 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
3027 wl_egl_surface->vblank_done = TPL_TRUE;
3029 if (wl_egl_surface->vblank && wl_egl_surface->vblank->waiting_buffers) {
3030 tpl_bool_t is_empty = TPL_TRUE;
3032 tpl_gmutex_lock(&wl_egl_surface->vblank->mutex);
3033 tpl_wl_egl_buffer_t wl_egl_buffer(
3034 __tpl_list_pop_front( wl_egl_surface->vblank->waiting_buffers, NULL));
3035 is_empty = __tpl_list_is_empty(wl_egl_surface->vblank->waiting_buffers);
3036 tpl_gmutex_unlock(&wl_egl_surface->vblank->mutex);
3038 if (!wl_egl_buffer) break;
3040 _thread_wl_surface_commit(wl_egl_surface, wl_egl_buffer);
3042 /* If tdm error such as TIMEOUT occured,
3043 * flush all vblank waiting buffers of its wl_egl_surface.
3044 * Otherwise, only one wl_egl_buffer will be commited per one vblank event.
3046 if (error == TDM_ERROR_NONE && wl_egl_surface->post_interval > 0)
3048 } while (!is_empty);
3050 wl_egl_surface->vblank_enable = (wl_egl_surface->post_interval > 0);
3052 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
3054 /* -- END -- tdm_client vblank callback function */
3056 #if TIZEN_FEATURE_ENABLE
3058 __cb_buffer_fenced_release(void *data,
3059 struct zwp_linux_buffer_release_v1 *release, int32_t fence)
3061 tpl_wl_egl_buffer_t wl_egl_buffer(data);
3062 tbm_surface_h tbm_surface = NULL;
3064 TPL_CHECK_ON_NULL_RETURN(wl_egl_buffer);
3066 tbm_surface = wl_egl_buffer->tbm_surface;
3068 if (tbm_surface_internal_is_valid(tbm_surface)) {
3069 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_INVALID_SURFACE;
3071 tpl_gmutex_lock(&wl_egl_buffer->mutex);
3072 if (wl_egl_buffer->status == COMMITTED) {
3073 tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
3075 zwp_linux_buffer_release_v1_destroy(wl_egl_buffer->buffer_release);
3076 wl_egl_buffer->buffer_release = NULL;
3078 wl_egl_buffer->release_fence_fd = fence;
3079 wl_egl_buffer->status = RELEASED;
3081 TRACE_MARK("[FENCED_RELEASE] BO(%d) fence(%d)",
3082 _get_tbm_surface_bo_name(tbm_surface),
3084 TRACE_ASYNC_END((intptr_t)tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
3085 _get_tbm_surface_bo_name(tbm_surface));
3088 "[FENCED_RELEASE] wl_egl_buffer(%p) tbm_surface(%p) bo(%d) fence(%d)",
3089 wl_egl_buffer, tbm_surface,
3090 _get_tbm_surface_bo_name(tbm_surface),
3093 tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
3095 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
3096 TPL_ERR("tbm_surface(%p) tsq_err(%d)", tbm_surface, tsq_err);
3099 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3101 if (tsq_err == TBM_SURFACE_QUEUE_ERROR_NONE)
3102 tbm_surface_internal_unref(tbm_surface);
3105 TPL_ERR("Invalid parameter | tbm_surface(%p)", tbm_surface);
3110 __cb_buffer_immediate_release(void *data,
3111 struct zwp_linux_buffer_release_v1 *release)
3113 tpl_wl_egl_buffer_t wl_egl_buffer(data);
3114 tbm_surface_h tbm_surface = NULL;
3116 TPL_CHECK_ON_NULL_RETURN(wl_egl_buffer);
3118 tbm_surface = wl_egl_buffer->tbm_surface;
3120 if (tbm_surface_internal_is_valid(tbm_surface)) {
3121 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_INVALID_SURFACE;
3123 tpl_gmutex_lock(&wl_egl_buffer->mutex);
3124 if (wl_egl_buffer->status == COMMITTED) {
3125 tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
3127 zwp_linux_buffer_release_v1_destroy(wl_egl_buffer->buffer_release);
3128 wl_egl_buffer->buffer_release = NULL;
3130 wl_egl_buffer->release_fence_fd = -1;
3131 wl_egl_buffer->status = RELEASED;
3133 TRACE_MARK("[IMMEDIATE_RELEASE] BO(%d)",
3134 _get_tbm_surface_bo_name(tbm_surface));
3135 TRACE_ASYNC_END((intptr_t)tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
3136 _get_tbm_surface_bo_name(tbm_surface));
3139 "[IMMEDIATE_RELEASE] wl_egl_buffer(%p) tbm_surface(%p) bo(%d)",
3140 wl_egl_buffer, tbm_surface,
3141 _get_tbm_surface_bo_name(tbm_surface));
3143 tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
3145 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
3146 TPL_ERR("tbm_surface(%p) tsq_err(%d)", tbm_surface, tsq_err);
3149 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3151 if (tsq_err == TBM_SURFACE_QUEUE_ERROR_NONE)
3152 tbm_surface_internal_unref(tbm_surface);
3155 TPL_ERR("Invalid parameter | tbm_surface(%p)", tbm_surface);
3159 static const struct zwp_linux_buffer_release_v1_listener zwp_release_listner = {
3160 __cb_buffer_fenced_release,
3161 __cb_buffer_immediate_release,
3166 __cb_wl_buffer_release(void *data, struct wl_proxy *wl_buffer)
3168 tpl_wl_egl_buffer_t wl_egl_buffer(data);
3169 tbm_surface_h tbm_surface = NULL;
3171 TPL_CHECK_ON_NULL_RETURN(wl_egl_buffer)
3173 tbm_surface = wl_egl_buffer->tbm_surface;
3175 if (tbm_surface_internal_is_valid(tbm_surface)) {
3176 tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
3177 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_INVALID_SURFACE;
3179 tpl_gmutex_lock(&wl_egl_buffer->mutex);
3181 if (wl_egl_buffer->status == COMMITTED) {
3183 tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
3185 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
3186 TPL_ERR("tbm_surface(%p) tsq_err(%d)", tbm_surface, tsq_err);
3188 wl_egl_buffer->status = RELEASED;
3190 TRACE_MARK("[RELEASE] BO(%d)", _get_tbm_surface_bo_name(tbm_surface));
3191 TRACE_ASYNC_END((intptr_t)tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
3192 _get_tbm_surface_bo_name(tbm_surface));
3194 TPL_LOG_T("WL_EGL", "[REL] wl_buffer(%p) tbm_surface(%p) bo(%d)",
3195 wl_egl_buffer->wl_buffer, tbm_surface,
3196 _get_tbm_surface_bo_name(tbm_surface));
3199 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3201 if (tsq_err == TBM_SURFACE_QUEUE_ERROR_NONE)
3202 tbm_surface_internal_unref(tbm_surface);
3204 TPL_ERR("Invalid parameter | tbm_surface(%p)", tbm_surface);
3208 static const struct wl_buffer_listener wl_buffer_release_listener = {
3209 (void *)__cb_wl_buffer_release,
3211 #if TIZEN_FEATURE_ENABLE
3213 __cb_presentation_feedback_sync_output(void *data,
3214 struct wp_presentation_feedback *presentation_feedback,
3215 struct wl_output *output)
3218 TPL_IGNORE(presentation_feedback);
3224 __cb_presentation_feedback_presented(void *data,
3225 struct wp_presentation_feedback *presentation_feedback,
3229 uint32_t refresh_nsec,
3234 TPL_IGNORE(tv_sec_hi);
3235 TPL_IGNORE(tv_sec_lo);
3236 TPL_IGNORE(tv_nsec);
3237 TPL_IGNORE(refresh_nsec);
3242 struct pst_feedback *pst_feedback = (struct pst_feedback *)data;
3243 tpl_wl_egl_surface_t *wl_egl_surface = pst_feedback->wl_egl_surface;
3245 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
3247 TPL_LOG_D("[PRESENTED]", "pst_feedback(%p) presentation_feedback(%p) bo(%d)",
3248 pst_feedback, presentation_feedback, pst_feedback->bo_name);
3250 if (pst_feedback->pst_sync_fd != -1) {
3251 TRACE_ASYNC_END(pst_feedback->pst_sync_fd,
3252 "[PRESENTATION_SYNC] bo(%d)",
3253 pst_feedback->bo_name);
3254 send_signal(pst_feedback->pst_sync_fd, "PST_FEEDBACK");
3255 pst_feedback->pst_sync_fd = -1;
3258 wp_presentation_feedback_destroy(presentation_feedback);
3260 pst_feedback->presentation_feedback = NULL;
3261 pst_feedback->wl_egl_surface = NULL;
3262 pst_feedback->bo_name = 0;
3264 __tpl_list_remove_data(wl_egl_surface->presentation_feedbacks, pst_feedback,
3269 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
3273 __cb_presentation_feedback_discarded(void *data,
3274 struct wp_presentation_feedback *presentation_feedback)
3276 struct pst_feedback *pst_feedback = (struct pst_feedback *)data;
3277 tpl_wl_egl_surface_t *wl_egl_surface = pst_feedback->wl_egl_surface;
3279 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
3281 TPL_LOG_D("[DISCARDED]", "pst_feedback(%p) presentation_feedback(%p) bo(%d)",
3282 pst_feedback, presentation_feedback, pst_feedback->bo_name);
3284 if (pst_feedback->pst_sync_fd != -1) {
3285 TRACE_ASYNC_END(pst_feedback->pst_sync_fd,
3286 "[PRESENTATION_SYNC] bo(%d)",
3287 pst_feedback->bo_name);
3288 send_signal(pst_feedback->pst_sync_fd, "PST_FEEDBACK");
3289 pst_feedback->pst_sync_fd = -1;
3292 wp_presentation_feedback_destroy(presentation_feedback);
3294 pst_feedback->presentation_feedback = NULL;
3295 pst_feedback->wl_egl_surface = NULL;
3296 pst_feedback->bo_name = 0;
3298 __tpl_list_remove_data(wl_egl_surface->presentation_feedbacks, pst_feedback,
3303 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
3306 static const struct wp_presentation_feedback_listener feedback_listener = {
3307 __cb_presentation_feedback_sync_output, /* sync_output feedback -*/
3308 __cb_presentation_feedback_presented,
3309 __cb_presentation_feedback_discarded
3314 _thread_surface_vblank_wait(tpl_wl_egl_surface_t *wl_egl_surface)
3316 tdm_error tdm_err = TDM_ERROR_NONE;
3317 tpl_surface_vblank_t *vblank = wl_egl_surface->vblank;
3319 tdm_err = tdm_client_vblank_wait(vblank->tdm_vblank,
3320 wl_egl_surface->post_interval,
3321 __cb_tdm_client_vblank,
3322 (void *)wl_egl_surface);
3324 if (tdm_err == TDM_ERROR_NONE) {
3325 wl_egl_surface->vblank_done = TPL_FALSE;
3326 TRACE_ASYNC_BEGIN((intptr_t)wl_egl_surface, "WAIT_VBLANK");
3328 TPL_ERR("Failed to tdm_client_vblank_wait. tdm_err(%d)", tdm_err);
3329 return TPL_ERROR_INVALID_OPERATION;
3332 return TPL_ERROR_NONE;
3336 _thread_wl_surface_commit(tpl_wl_egl_surface_t *wl_egl_surface,
3337 tpl_wl_egl_buffer_t *wl_egl_buffer)
3339 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
3340 struct wl_surface *wl_surface = wl_egl_surface->wl_surface;
3341 struct wl_egl_window *wl_egl_window = wl_egl_surface->wl_egl_window;
3344 TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer != NULL,
3345 "wl_egl_buffer sould be not NULL");
3347 if (wl_egl_buffer->wl_buffer == NULL) {
3348 wl_egl_buffer->wl_buffer =
3349 (struct wl_proxy *)wayland_tbm_client_create_buffer(
3350 wl_egl_display->wl_tbm_client,
3351 wl_egl_buffer->tbm_surface);
3353 TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer->wl_buffer != NULL,
3354 "[FATAL] Failed to create wl_buffer");
3356 TPL_INFO("[WL_BUFFER_CREATE]",
3357 "wl_egl_surface(%p) wl_egl_buffer(%p) wl_buffer(%p) tbm_surface(%p)",
3358 wl_egl_surface, wl_egl_buffer, wl_egl_buffer->wl_buffer,
3359 wl_egl_buffer->tbm_surface);
3361 #if TIZEN_FEATURE_ENABLE
3362 if (!wl_egl_display->use_explicit_sync ||
3363 wl_egl_buffer->acquire_fence_fd == -1)
3366 wl_buffer_add_listener((struct wl_buffer *)wl_egl_buffer->wl_buffer,
3367 &wl_buffer_release_listener,
3372 version = wl_proxy_get_version((struct wl_proxy *)wl_surface);
3374 #if TIZEN_FEATURE_ENABLE
3375 /* create presentation feedback and add listener */
3376 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
3377 if (wl_egl_display->presentation && wl_egl_buffer->presentation_sync_fd != -1) {
3379 struct pst_feedback *pst_feedback = NULL;
3380 pst_feedback = (struct pst_feedback *) calloc(1, sizeof(struct pst_feedback));
3382 pst_feedback->presentation_feedback =
3383 wp_presentation_feedback(wl_egl_display->presentation,
3386 pst_feedback->wl_egl_surface = wl_egl_surface;
3387 pst_feedback->bo_name = wl_egl_buffer->bo_name;
3389 pst_feedback->pst_sync_fd = wl_egl_buffer->presentation_sync_fd;
3390 wl_egl_buffer->presentation_sync_fd = -1;
3392 wp_presentation_feedback_add_listener(pst_feedback->presentation_feedback,
3393 &feedback_listener, pst_feedback);
3394 __tpl_list_push_back(wl_egl_surface->presentation_feedbacks, pst_feedback);
3395 TRACE_ASYNC_BEGIN(pst_feedback->pst_sync_fd,
3396 "[PRESENTATION_SYNC] bo(%d)",
3397 pst_feedback->bo_name);
3399 TPL_ERR("Failed to create presentation feedback. wl_egl_buffer(%p)",
3401 send_signal(wl_egl_buffer->presentation_sync_fd, "PST_SYNC");
3402 wl_egl_buffer->presentation_sync_fd = -1;
3405 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
3408 if (wl_egl_buffer->w_rotated == TPL_TRUE) {
3410 wayland_tbm_client_set_buffer_transform(
3411 wl_egl_display->wl_tbm_client,
3412 (void *)wl_egl_buffer->wl_buffer,
3413 wl_egl_buffer->w_transform);
3414 TPL_INFO("[W_TRANSFORM]",
3415 "wl_egl_surface(%p) wl_egl_buffer(%p) w_transform(%d)",
3416 wl_egl_surface, wl_egl_buffer, wl_egl_buffer->w_transform);
3418 wl_egl_buffer->w_rotated = TPL_FALSE;
3421 if (wl_egl_surface->latest_transform != wl_egl_buffer->transform) {
3423 wl_surface_set_buffer_transform(wl_surface, wl_egl_buffer->transform);
3424 TPL_INFO("[TRANSFORM]",
3425 "wl_egl_surface(%p) wl_egl_buffer(%p) transform(%d -> %d)",
3426 wl_egl_surface, wl_egl_buffer,
3427 wl_egl_surface->latest_transform, wl_egl_buffer->transform);
3429 wl_egl_surface->latest_transform = wl_egl_buffer->transform;
3432 if (wl_egl_window) {
3433 wl_egl_window->attached_width = wl_egl_buffer->width;
3434 wl_egl_window->attached_height = wl_egl_buffer->height;
3437 wl_surface_attach(wl_surface, (void *)wl_egl_buffer->wl_buffer,
3438 wl_egl_buffer->dx, wl_egl_buffer->dy);
3440 if (wl_egl_buffer->num_rects < 1 || wl_egl_buffer->rects == NULL) {
3442 wl_surface_damage(wl_surface,
3443 wl_egl_buffer->dx, wl_egl_buffer->dy,
3444 wl_egl_buffer->width, wl_egl_buffer->height);
3446 wl_surface_damage_buffer(wl_surface,
3448 wl_egl_buffer->width, wl_egl_buffer->height);
3452 for (i = 0; i < wl_egl_buffer->num_rects; i++) {
3454 wl_egl_buffer->height - (wl_egl_buffer->rects[i * 4 + 1] +
3455 wl_egl_buffer->rects[i * 4 + 3]);
3457 wl_surface_damage(wl_surface,
3458 wl_egl_buffer->rects[i * 4 + 0],
3460 wl_egl_buffer->rects[i * 4 + 2],
3461 wl_egl_buffer->rects[i * 4 + 3]);
3463 wl_surface_damage_buffer(wl_surface,
3464 wl_egl_buffer->rects[i * 4 + 0],
3466 wl_egl_buffer->rects[i * 4 + 2],
3467 wl_egl_buffer->rects[i * 4 + 3]);
3472 wayland_tbm_client_set_buffer_serial(wl_egl_display->wl_tbm_client,
3473 (void *)wl_egl_buffer->wl_buffer,
3474 wl_egl_buffer->serial);
3475 #if TIZEN_FEATURE_ENABLE
3476 if (wl_egl_display->use_explicit_sync &&
3477 wl_egl_buffer->acquire_fence_fd != -1) {
3479 zwp_linux_surface_synchronization_v1_set_acquire_fence(wl_egl_surface->surface_sync,
3480 wl_egl_buffer->acquire_fence_fd);
3481 TPL_LOG_D("[SET_ACQUIRE_FENCE][1/2]", "wl_egl_surface(%p) tbm_surface(%p) acquire_fence(%d)",
3482 wl_egl_surface, wl_egl_buffer->tbm_surface, wl_egl_buffer->acquire_fence_fd);
3483 close(wl_egl_buffer->acquire_fence_fd);
3484 wl_egl_buffer->acquire_fence_fd = -1;
3486 wl_egl_buffer->buffer_release =
3487 zwp_linux_surface_synchronization_v1_get_release(wl_egl_surface->surface_sync);
3488 if (!wl_egl_buffer->buffer_release) {
3489 TPL_ERR("Failed to get buffer_release. wl_egl_surface(%p)", wl_egl_surface);
3491 zwp_linux_buffer_release_v1_add_listener(
3492 wl_egl_buffer->buffer_release, &zwp_release_listner, wl_egl_buffer);
3493 TPL_LOG_D("[SET_ACQUIRE_FENCE][2/2]", "add explicit_sync_release_listener.");
3498 wl_surface_commit(wl_surface);
3500 wl_display_flush(wl_egl_display->wl_display);
3502 TRACE_ASYNC_BEGIN((intptr_t)wl_egl_buffer->tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
3503 wl_egl_buffer->bo_name);
3505 tpl_gmutex_lock(&wl_egl_buffer->mutex);
3507 wl_egl_buffer->need_to_commit = TPL_FALSE;
3508 wl_egl_buffer->status = COMMITTED;
3509 if (wl_egl_surface->last_enq_buffer == wl_egl_buffer->tbm_surface)
3510 wl_egl_surface->last_enq_buffer = NULL;
3512 tpl_gcond_signal(&wl_egl_buffer->cond);
3514 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3517 "[COMMIT] wl_egl_buffer(%p) wl_buffer(%p) tbm_surface(%p) bo(%d)",
3518 wl_egl_buffer, wl_egl_buffer->wl_buffer, wl_egl_buffer->tbm_surface,
3519 wl_egl_buffer->bo_name);
3521 if (wl_egl_surface->post_interval > 0 && wl_egl_surface->vblank != NULL) {
3522 wl_egl_surface->vblank_enable = TPL_TRUE;
3523 if (_thread_surface_vblank_wait(wl_egl_surface) != TPL_ERROR_NONE)
3524 TPL_ERR("Failed to set wait vblank.");
3527 tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
3529 if (wl_egl_buffer->commit_sync_fd != -1) {
3530 TRACE_ASYNC_END(wl_egl_buffer->commit_sync_fd, "[COMMIT_SYNC] bo(%d)",
3531 wl_egl_buffer->bo_name);
3532 TPL_LOG_D("[COMMIT_SYNC][SEND]", "wl_egl_surface(%p) commit_sync_fd(%d)",
3533 wl_egl_surface, wl_egl_buffer->commit_sync_fd);
3534 send_signal(wl_egl_buffer->commit_sync_fd, "COMMIT_SYNC");
3535 wl_egl_buffer->commit_sync_fd = -1;
3538 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
3542 _write_to_eventfd(int eventfd, uint64_t value)
3546 ret = write(eventfd, &value, sizeof(uint64_t));
3548 TPL_ERR("failed to write to fd(%d)", eventfd);
3555 static int send_signal(int fd, const char *type)
3558 if (fd < 0) return ret;
3560 ret = _write_to_eventfd(fd, 1);
3562 TPL_ERR("Failed to send %s signal to fd(%d)", type, fd);
3570 __tpl_display_init_backend_wl_egl_thread(tpl_display_backend_t *backend)
3572 TPL_ASSERT(backend);
3574 backend->type = TPL_BACKEND_WAYLAND_THREAD;
3575 backend->data = NULL;
3577 backend->init = __tpl_wl_egl_display_init;
3578 backend->fini = __tpl_wl_egl_display_fini;
3579 backend->query_config = __tpl_wl_egl_display_query_config;
3580 backend->filter_config = __tpl_wl_egl_display_filter_config;
3581 backend->get_window_info = __tpl_wl_egl_display_get_window_info;
3582 backend->get_pixmap_info = __tpl_wl_egl_display_get_pixmap_info;
3583 backend->get_buffer_from_native_pixmap =
3584 __tpl_wl_egl_display_get_buffer_from_native_pixmap;
3588 __tpl_surface_init_backend_wl_egl_thread(tpl_surface_backend_t *backend)
3590 TPL_ASSERT(backend);
3592 backend->type = TPL_BACKEND_WAYLAND_THREAD;
3593 backend->data = NULL;
3595 backend->init = __tpl_wl_egl_surface_init;
3596 backend->fini = __tpl_wl_egl_surface_fini;
3597 backend->validate = __tpl_wl_egl_surface_validate;
3598 backend->cancel_dequeued_buffer =
3599 __tpl_wl_egl_surface_cancel_buffer;
3600 backend->dequeue_buffer = __tpl_wl_egl_surface_dequeue_buffer;
3601 backend->enqueue_buffer = __tpl_wl_egl_surface_enqueue_buffer;
3602 backend->set_rotation_capability =
3603 __tpl_wl_egl_surface_set_rotation_capability;
3604 backend->set_post_interval =
3605 __tpl_wl_egl_surface_set_post_interval;
3607 __tpl_wl_egl_surface_get_size;
3608 backend->fence_sync_is_available =
3609 __tpl_wl_egl_surface_fence_sync_is_available;
3613 __cb_wl_egl_buffer_free(tpl_wl_egl_buffer_t *wl_egl_buffer)
3615 tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
3616 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
3618 TPL_INFO("[BUFFER_FREE]", "wl_egl_surface(%p) wl_egl_buffer(%p) tbm_surface(%p) bo(%d)",
3619 wl_egl_surface, wl_egl_buffer, wl_egl_buffer->tbm_surface, wl_egl_buffer->bo_name);
3621 tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
3622 if (wl_egl_surface->buffers) {
3623 __tpl_list_remove_data(wl_egl_surface->buffers, (void *)wl_egl_buffer,
3626 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
3628 if (wl_egl_surface->vblank) {
3629 tpl_gmutex_lock(&wl_egl_surface->vblank->mutex);
3630 if (wl_egl_surface->vblank->waiting_buffers)
3631 __tpl_list_remove_data(wl_egl_surface->vblank->waiting_buffers, (void *)wl_egl_buffer,
3633 tpl_gmutex_unlock(&wl_egl_surface->vblank->mutex);
3636 tpl_gmutex_lock(&wl_egl_buffer->mutex);
3638 if (wl_egl_display) {
3639 if (wl_egl_display->wl_tbm_client && wl_egl_buffer->wl_buffer) {
3640 wayland_tbm_client_destroy_buffer(wl_egl_display->wl_tbm_client,
3641 (void *)wl_egl_buffer->wl_buffer);
3642 wl_egl_buffer->wl_buffer = NULL;
3645 wl_display_flush(wl_egl_display->wl_display);
3649 #if TIZEN_FEATURE_ENABLE
3650 if (wl_egl_buffer->buffer_release) {
3651 zwp_linux_buffer_release_v1_destroy(wl_egl_buffer->buffer_release);
3652 wl_egl_buffer->buffer_release = NULL;
3655 if (wl_egl_buffer->release_fence_fd != -1) {
3656 close(wl_egl_buffer->release_fence_fd);
3657 wl_egl_buffer->release_fence_fd = -1;
3661 if (wl_egl_buffer->waiting_source) {
3662 tpl_gsource_destroy(wl_egl_buffer->waiting_source, TPL_FALSE);
3663 wl_egl_buffer->waiting_source = NULL;
3666 send_signal(wl_egl_buffer->commit_sync_fd, "COMMIT_SYNC");
3667 wl_egl_buffer->commit_sync_fd = -1;
3669 send_signal(wl_egl_buffer->presentation_sync_fd, "PST_SYNC");
3670 wl_egl_buffer->presentation_sync_fd = -1;
3672 if (wl_egl_buffer->rects) {
3673 free(wl_egl_buffer->rects);
3674 wl_egl_buffer->rects = NULL;
3675 wl_egl_buffer->num_rects = 0;
3678 wl_egl_buffer->wl_egl_surface = NULL;
3679 wl_egl_buffer->tbm_surface = NULL;
3680 wl_egl_buffer->bo_name = -1;
3681 wl_egl_buffer->status = RELEASED;
3683 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3684 tpl_gmutex_clear(&wl_egl_buffer->mutex);
3685 tpl_gcond_clear(&wl_egl_buffer->cond);
3686 free(wl_egl_buffer);
3690 _get_tbm_surface_bo_name(tbm_surface_h tbm_surface)
3692 return tbm_bo_export(tbm_surface_internal_get_bo(tbm_surface, 0));
3696 _print_buffer_lists(tpl_wl_egl_surface_t *wl_egl_surface)
3698 tpl_list_node_t *node = NULL;
3702 tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
3703 buffer_cnt = __tpl_list_get_count(wl_egl_surface->buffers);
3705 node = __tpl_list_get_front_node(wl_egl_surface->buffers);
3708 tpl_wl_egl_buffer_t wl_egl_buffer(__tpl_list_node_get_data(node));
3709 TPL_INFO("[BUFFERS_INFO]",
3710 "[%d/%d] wl_egl_surface(%p), wl_egl_buffer(%p) tbm_surface(%p) bo(%d) | status(%s)",
3711 ++idx, buffer_cnt, wl_egl_surface, wl_egl_buffer,
3712 wl_egl_buffer->tbm_surface, wl_egl_buffer->bo_name,
3713 status_to_string[wl_egl_buffer->status]);
3714 } while ((node = __tpl_list_node_next(node)));
3715 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
3719 _check_buffer_validate(tpl_wl_egl_surface_t *wl_egl_surface, tbm_surface_h tbm_surface)
3721 tpl_list_node_t *node = NULL;
3722 tpl_bool_t ret = TPL_FALSE;
3725 if (!wl_egl_surface || !tbm_surface)
3728 tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
3729 node = __tpl_list_get_front_node(wl_egl_surface->buffers);
3732 tpl_wl_egl_buffer_t wl_egl_buffer(__tpl_list_node_get_data(node));
3733 if (wl_egl_buffer->tbm_surface == tbm_surface) {
3737 } while ((node = __tpl_list_node_next(node)));
3739 if (ret == TPL_FALSE) {
3740 TPL_ERR("tbm_surface(%p) is not owned by wl_egl_surface(%p)",
3741 tbm_surface, wl_egl_surface);
3744 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);