2 #include "tpl_internal.h"
7 #include <sys/eventfd.h>
9 #include <tbm_bufmgr.h>
10 #include <tbm_surface.h>
11 #include <tbm_surface_internal.h>
12 #include <tbm_surface_queue.h>
14 #include <wayland-client.h>
15 #include <wayland-tbm-server.h>
16 #include <wayland-tbm-client.h>
17 #include <wayland-egl-backend.h>
19 #include <tdm_client.h>
21 #include "wayland-egl-tizen/wayland-egl-tizen.h"
22 #include "wayland-egl-tizen/wayland-egl-tizen-priv.h"
24 #ifndef TIZEN_FEATURE_ENABLE
25 #define TIZEN_FEATURE_ENABLE 1
28 #if TIZEN_FEATURE_ENABLE
29 #include <tizen-surface-client-protocol.h>
30 #include <presentation-time-client-protocol.h>
31 #include <linux-explicit-synchronization-unstable-v1-client-protocol.h>
34 #include "tpl_utils_gthread.h"
36 static int wl_egl_buffer_key;
37 #define KEY_WL_EGL_BUFFER (unsigned long)(&wl_egl_buffer_key)
39 /* In wayland, application and compositor create its own drawing buffers. Recommend size is more than 2. */
40 #define BUFFER_ARRAY_SIZE 9
42 typedef struct _tpl_wl_egl_display tpl_wl_egl_display_t;
43 typedef struct _tpl_wl_egl_surface tpl_wl_egl_surface_t;
44 typedef struct _tpl_wl_egl_buffer tpl_wl_egl_buffer_t;
45 typedef struct _surface_vblank tpl_surface_vblank_t;
47 struct _tpl_wl_egl_display {
48 tpl_gsource *disp_source;
50 tpl_gmutex wl_event_mutex;
52 struct wl_display *wl_display;
53 struct wl_event_queue *ev_queue;
54 struct wayland_tbm_client *wl_tbm_client;
55 int last_error; /* errno of the last wl_display error*/
57 tpl_bool_t wl_initialized;
59 tpl_bool_t use_wait_vblank;
60 tpl_bool_t use_explicit_sync;
65 tdm_client *tdm_client;
66 tpl_gsource *tdm_source;
68 tpl_bool_t tdm_initialized;
69 tpl_list_t *surface_vblanks;
72 #if TIZEN_FEATURE_ENABLE
73 struct tizen_surface_shm *tss; /* used for surface buffer_flush */
74 struct wp_presentation *presentation; /* for presentation feedback */
75 struct zwp_linux_explicit_synchronization_v1 *explicit_sync; /* for explicit fence sync */
79 typedef enum surf_message {
85 struct _tpl_wl_egl_surface {
86 tpl_gsource *surf_source;
88 tbm_surface_queue_h tbm_queue;
91 struct wl_egl_window *wl_egl_window;
92 struct wl_surface *wl_surface;
94 #if TIZEN_FEATURE_ENABLE
95 struct zwp_linux_surface_synchronization_v1 *surface_sync; /* for explicit fence sync */
96 struct tizen_surface_shm_flusher *tss_flusher; /* used for surface buffer_flush */
99 tpl_surface_vblank_t *vblank;
101 /* surface information */
108 int latest_transform;
112 tpl_wl_egl_display_t *wl_egl_display;
113 tpl_surface_t *tpl_surface;
115 /* wl_egl_buffer array for buffer tracing */
116 tpl_wl_egl_buffer_t *buffers[BUFFER_ARRAY_SIZE];
117 int buffer_cnt; /* the number of using wl_egl_buffers */
118 tpl_gmutex buffers_mutex;
119 tpl_wl_egl_buffer_t *last_deq_buffer;
121 tpl_list_t *presentation_feedbacks; /* for tracing presentation feedbacks */
133 tpl_gmutex surf_mutex;
136 surf_message sent_message;
138 /* for waiting draw done */
139 tpl_bool_t use_render_done_fence;
140 tpl_bool_t is_activated;
141 tpl_bool_t reset; /* TRUE if queue reseted by external */
142 tpl_bool_t need_to_enqueue;
143 tpl_bool_t prerotation_capability;
144 tpl_bool_t vblank_done;
145 tpl_bool_t set_serial_is_used;
148 struct _surface_vblank {
149 tdm_client_vblank *tdm_vblank;
150 tpl_wl_egl_surface_t *wl_egl_surface;
151 tpl_list_t *waiting_buffers; /* for FIFO/FIFO_RELAXED modes */
154 typedef enum buffer_status {
159 WAITING_SIGNALED, // 4
164 static const char *status_to_string[7] = {
169 "WAITING_SIGNALED", // 4
170 "WAITING_VBLANK", // 5
174 struct _tpl_wl_egl_buffer {
175 tbm_surface_h tbm_surface;
178 struct wl_proxy *wl_buffer;
179 int dx, dy; /* position to attach to wl_surface */
180 int width, height; /* size to attach to wl_surface */
182 buffer_status_t status; /* for tracing buffer status */
183 int idx; /* position index in buffers array of wl_egl_surface */
185 /* for damage region */
189 /* for wayland_tbm_client_set_buffer_transform */
191 tpl_bool_t w_rotated;
193 /* for wl_surface_set_buffer_transform */
196 /* for wayland_tbm_client_set_buffer_serial */
199 /* for checking need_to_commit (frontbuffer mode) */
200 tpl_bool_t need_to_commit;
202 /* for checking draw done */
203 tpl_bool_t draw_done;
205 #if TIZEN_FEATURE_ENABLE
206 /* to get release event via zwp_linux_buffer_release_v1 */
207 struct zwp_linux_buffer_release_v1 *buffer_release;
209 /* each buffers own its release_fence_fd, until it passes ownership
211 int32_t release_fence_fd;
213 /* each buffers own its acquire_fence_fd.
214 * If it use zwp_linux_buffer_release_v1 the ownership of this fd
215 * will be passed to display server
216 * Otherwise it will be used as a fence waiting for render done
218 int32_t acquire_fence_fd;
220 /* Fd to send a signal when wl_surface_commit with this buffer */
221 int32_t commit_sync_fd;
223 /* Fd to send a siganl when receive the
224 * presentation feedback from display server */
225 int32_t presentation_sync_fd;
227 tpl_gsource *waiting_source;
232 tpl_wl_egl_surface_t *wl_egl_surface;
235 #if TIZEN_FEATURE_ENABLE
236 struct pst_feedback {
237 /* to get presentation feedback from display server */
238 struct wp_presentation_feedback *presentation_feedback;
243 tpl_wl_egl_surface_t *wl_egl_surface;
248 static const struct wl_buffer_listener wl_buffer_release_listener;
251 _get_tbm_surface_bo_name(tbm_surface_h tbm_surface);
253 _print_buffer_lists(tpl_wl_egl_surface_t *wl_egl_surface);
255 __cb_wl_egl_buffer_free(tpl_wl_egl_buffer_t *wl_egl_buffer);
256 static tpl_wl_egl_buffer_t *
257 _get_wl_egl_buffer(tbm_surface_h tbm_surface);
259 _write_to_eventfd(int eventfd);
261 _thread_wl_egl_surface_init(tpl_wl_egl_surface_t *wl_egl_surface);
263 _thread_surface_queue_acquire(tpl_wl_egl_surface_t *wl_egl_surface);
265 _thread_wl_surface_commit(tpl_wl_egl_surface_t *wl_egl_surface,
266 tpl_wl_egl_buffer_t *wl_egl_buffer);
268 __cb_surface_vblank_free(void *data);
270 static struct tizen_private *
271 tizen_private_create()
273 struct tizen_private *private = NULL;
274 private = (struct tizen_private *)calloc(1, sizeof(struct tizen_private));
276 private->magic = WL_EGL_TIZEN_MAGIC;
277 private->rotation = 0;
278 private->frontbuffer_mode = 0;
279 private->transform = 0;
280 private->window_transform = 0;
283 private->data = NULL;
284 private->rotate_callback = NULL;
285 private->get_rotation_capability = NULL;
286 private->set_window_serial_callback = NULL;
287 private->set_frontbuffer_callback = NULL;
288 private->create_commit_sync_fd = NULL;
289 private->create_presentation_sync_fd = NULL;
290 private->merge_sync_fds = NULL;
297 _check_native_handle_is_wl_display(tpl_handle_t display)
299 struct wl_interface *wl_egl_native_dpy = *(void **) display;
301 if (!wl_egl_native_dpy) {
302 TPL_ERR("Invalid parameter. native_display(%p)", wl_egl_native_dpy);
306 /* MAGIC CHECK: A native display handle is a wl_display if the de-referenced first value
307 is a memory address pointing the structure of wl_display_interface. */
308 if (wl_egl_native_dpy == &wl_display_interface)
311 if (strncmp(wl_egl_native_dpy->name, wl_display_interface.name,
312 strlen(wl_display_interface.name)) == 0) {
320 __thread_func_tdm_dispatch(tpl_gsource *gsource, uint64_t message)
322 tpl_wl_egl_display_t *wl_egl_display = NULL;
323 tdm_error tdm_err = TDM_ERROR_NONE;
327 wl_egl_display = (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
328 if (!wl_egl_display) {
329 TPL_ERR("Failed to get wl_egl_display from gsource(%p)", gsource);
330 TPL_WARN("tdm_source(%p) will be removed from thread.", gsource);
334 tdm_err = tdm_client_handle_events(wl_egl_display->tdm.tdm_client);
336 /* If an error occurs in tdm_client_handle_events, it cannot be recovered.
337 * When tdm_source is no longer available due to an unexpected situation,
338 * wl_egl_thread must remove it from the thread and destroy it.
339 * In that case, tdm_vblank can no longer be used for surfaces and displays
340 * that used this tdm_source. */
341 if (tdm_err != TDM_ERROR_NONE) {
342 TPL_ERR("Error occured in tdm_client_handle_events. tdm_err(%d)",
344 TPL_WARN("tdm_source(%p) will be removed from thread.", gsource);
346 tpl_gsource_destroy(gsource, TPL_FALSE);
348 wl_egl_display->tdm.tdm_source = NULL;
357 __thread_func_tdm_finalize(tpl_gsource *gsource)
359 tpl_wl_egl_display_t *wl_egl_display = NULL;
361 wl_egl_display = (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
364 "tdm_destroy| wl_egl_display(%p) tdm_client(%p) tpl_gsource(%p)",
365 wl_egl_display, wl_egl_display->tdm.tdm_client, gsource);
367 if (wl_egl_display->tdm.tdm_client) {
369 if (wl_egl_display->tdm.surface_vblanks) {
370 __tpl_list_free(wl_egl_display->tdm.surface_vblanks,
371 __cb_surface_vblank_free);
372 wl_egl_display->tdm.surface_vblanks = NULL;
375 tdm_client_destroy(wl_egl_display->tdm.tdm_client);
376 wl_egl_display->tdm.tdm_client = NULL;
377 wl_egl_display->tdm.tdm_display_fd = -1;
378 wl_egl_display->tdm.tdm_source = NULL;
381 wl_egl_display->use_wait_vblank = TPL_FALSE;
382 wl_egl_display->tdm.tdm_initialized = TPL_FALSE;
385 static tpl_gsource_functions tdm_funcs = {
388 .dispatch = __thread_func_tdm_dispatch,
389 .finalize = __thread_func_tdm_finalize,
393 _thread_tdm_init(tpl_wl_egl_display_t *wl_egl_display)
395 tdm_client *tdm_client = NULL;
396 int tdm_display_fd = -1;
397 tdm_error tdm_err = TDM_ERROR_NONE;
399 tdm_client = tdm_client_create(&tdm_err);
400 if (!tdm_client || tdm_err != TDM_ERROR_NONE) {
401 TPL_ERR("TDM_ERROR:%d Failed to create tdm_client\n", tdm_err);
402 return TPL_ERROR_INVALID_OPERATION;
405 tdm_err = tdm_client_get_fd(tdm_client, &tdm_display_fd);
406 if (tdm_display_fd < 0 || tdm_err != TDM_ERROR_NONE) {
407 TPL_ERR("TDM_ERROR:%d Failed to get tdm_client fd\n", tdm_err);
408 tdm_client_destroy(tdm_client);
409 return TPL_ERROR_INVALID_OPERATION;
412 wl_egl_display->tdm.tdm_display_fd = tdm_display_fd;
413 wl_egl_display->tdm.tdm_client = tdm_client;
414 wl_egl_display->tdm.tdm_source = NULL;
415 wl_egl_display->tdm.tdm_initialized = TPL_TRUE;
416 wl_egl_display->tdm.surface_vblanks = __tpl_list_alloc();
418 TPL_INFO("[TDM_CLIENT_INIT]",
419 "wl_egl_display(%p) tdm_client(%p) tdm_display_fd(%d)",
420 wl_egl_display, tdm_client, tdm_display_fd);
422 return TPL_ERROR_NONE;
425 #define IMPL_TIZEN_SURFACE_SHM_VERSION 2
429 __cb_wl_resistry_global_callback(void *data, struct wl_registry *wl_registry,
430 uint32_t name, const char *interface,
433 #if TIZEN_FEATURE_ENABLE
434 tpl_wl_egl_display_t *wl_egl_display = (tpl_wl_egl_display_t *)data;
436 if (!strcmp(interface, "tizen_surface_shm")) {
437 wl_egl_display->tss =
438 wl_registry_bind(wl_registry,
440 &tizen_surface_shm_interface,
441 ((version < IMPL_TIZEN_SURFACE_SHM_VERSION) ?
442 version : IMPL_TIZEN_SURFACE_SHM_VERSION));
443 wl_egl_display->use_tss = TPL_TRUE;
444 } else if (!strcmp(interface, wp_presentation_interface.name)) {
445 wl_egl_display->presentation =
446 wl_registry_bind(wl_registry,
447 name, &wp_presentation_interface, 1);
448 TPL_DEBUG("bind wp_presentation_interface");
449 } else if (strcmp(interface, "zwp_linux_explicit_synchronization_v1") == 0) {
450 char *env = tpl_getenv("TPL_EFS");
451 if (env && !atoi(env)) {
452 wl_egl_display->use_explicit_sync = TPL_FALSE;
454 wl_egl_display->explicit_sync =
455 wl_registry_bind(wl_registry, name,
456 &zwp_linux_explicit_synchronization_v1_interface, 1);
457 wl_egl_display->use_explicit_sync = TPL_TRUE;
458 TPL_DEBUG("bind zwp_linux_explicit_synchronization_v1_interface");
465 __cb_wl_resistry_global_remove_callback(void *data,
466 struct wl_registry *wl_registry,
471 static const struct wl_registry_listener registry_listener = {
472 __cb_wl_resistry_global_callback,
473 __cb_wl_resistry_global_remove_callback
477 _wl_display_print_err(tpl_wl_egl_display_t *wl_egl_display,
478 const char *func_name)
482 strerror_r(errno, buf, sizeof(buf));
484 if (wl_egl_display->last_error == errno)
487 TPL_ERR("falied to %s. error:%d(%s)", func_name, errno, buf);
489 dpy_err = wl_display_get_error(wl_egl_display->wl_display);
490 if (dpy_err == EPROTO) {
491 const struct wl_interface *err_interface;
492 uint32_t err_proxy_id, err_code;
493 err_code = wl_display_get_protocol_error(wl_egl_display->wl_display,
496 TPL_ERR("[Protocol Error] interface: %s, error_code: %d, proxy_id: %d",
497 err_interface->name, err_code, err_proxy_id);
500 wl_egl_display->last_error = errno;
504 _thread_wl_display_init(tpl_wl_egl_display_t *wl_egl_display)
506 struct wl_registry *registry = NULL;
507 struct wl_event_queue *queue = NULL;
508 struct wl_display *display_wrapper = NULL;
509 struct wl_proxy *wl_tbm = NULL;
510 struct wayland_tbm_client *wl_tbm_client = NULL;
512 tpl_result_t result = TPL_ERROR_NONE;
514 queue = wl_display_create_queue(wl_egl_display->wl_display);
516 TPL_ERR("Failed to create wl_queue wl_display(%p)",
517 wl_egl_display->wl_display);
518 result = TPL_ERROR_INVALID_OPERATION;
522 wl_egl_display->ev_queue = wl_display_create_queue(wl_egl_display->wl_display);
523 if (!wl_egl_display->ev_queue) {
524 TPL_ERR("Failed to create wl_queue wl_display(%p)",
525 wl_egl_display->wl_display);
526 result = TPL_ERROR_INVALID_OPERATION;
530 display_wrapper = wl_proxy_create_wrapper(wl_egl_display->wl_display);
531 if (!display_wrapper) {
532 TPL_ERR("Failed to create a proxy wrapper of wl_display(%p)",
533 wl_egl_display->wl_display);
534 result = TPL_ERROR_INVALID_OPERATION;
538 wl_proxy_set_queue((struct wl_proxy *)display_wrapper, queue);
540 registry = wl_display_get_registry(display_wrapper);
542 TPL_ERR("Failed to create wl_registry");
543 result = TPL_ERROR_INVALID_OPERATION;
547 wl_proxy_wrapper_destroy(display_wrapper);
548 display_wrapper = NULL;
550 wl_tbm_client = wayland_tbm_client_init(wl_egl_display->wl_display);
551 if (!wl_tbm_client) {
552 TPL_ERR("Failed to initialize wl_tbm_client.");
553 result = TPL_ERROR_INVALID_CONNECTION;
557 wl_tbm = (struct wl_proxy *)wayland_tbm_client_get_wl_tbm(wl_tbm_client);
559 TPL_ERR("Failed to get wl_tbm from wl_tbm_client(%p)", wl_tbm_client);
560 result = TPL_ERROR_INVALID_CONNECTION;
564 wl_proxy_set_queue(wl_tbm, wl_egl_display->ev_queue);
565 wl_egl_display->wl_tbm_client = wl_tbm_client;
567 if (wl_registry_add_listener(registry, ®istry_listener,
569 TPL_ERR("Failed to wl_registry_add_listener");
570 result = TPL_ERROR_INVALID_OPERATION;
574 ret = wl_display_roundtrip_queue(wl_egl_display->wl_display, queue);
576 _wl_display_print_err(wl_egl_display, "roundtrip_queue");
577 result = TPL_ERROR_INVALID_OPERATION;
581 #if TIZEN_FEATURE_ENABLE
582 /* set tizen_surface_shm's queue as client's private queue */
583 if (wl_egl_display->tss) {
584 wl_proxy_set_queue((struct wl_proxy *)wl_egl_display->tss,
585 wl_egl_display->ev_queue);
586 TPL_LOG_T("WL_EGL", "tizen_surface_shm(%p) init.", wl_egl_display->tss);
589 if (wl_egl_display->presentation) {
590 wl_proxy_set_queue((struct wl_proxy *)wl_egl_display->presentation,
591 wl_egl_display->ev_queue);
592 TPL_LOG_T("WL_EGL", "wp_presentation(%p) init.",
593 wl_egl_display->presentation);
596 if (wl_egl_display->explicit_sync) {
597 wl_proxy_set_queue((struct wl_proxy *)wl_egl_display->explicit_sync,
598 wl_egl_display->ev_queue);
599 TPL_LOG_T("WL_EGL", "zwp_linux_explicit_synchronization_v1(%p) init.",
600 wl_egl_display->explicit_sync);
603 wl_egl_display->wl_initialized = TPL_TRUE;
605 TPL_INFO("[WAYLAND_INIT]",
606 "wl_egl_display(%p) wl_display(%p) wl_tbm_client(%p) event_queue(%p)",
607 wl_egl_display, wl_egl_display->wl_display,
608 wl_egl_display->wl_tbm_client, wl_egl_display->ev_queue);
609 #if TIZEN_FEATURE_ENABLE
610 TPL_INFO("[WAYLAND_INIT]",
611 "tizen_surface_shm(%p) wp_presentation(%p) explicit_sync(%p)",
612 wl_egl_display->tss, wl_egl_display->presentation,
613 wl_egl_display->explicit_sync);
617 wl_proxy_wrapper_destroy(display_wrapper);
619 wl_registry_destroy(registry);
621 wl_event_queue_destroy(queue);
627 _thread_wl_display_fini(tpl_wl_egl_display_t *wl_egl_display)
629 /* If wl_egl_display is in prepared state, cancel it */
630 if (wl_egl_display->prepared) {
631 wl_display_cancel_read(wl_egl_display->wl_display);
632 wl_egl_display->prepared = TPL_FALSE;
635 if (wl_display_dispatch_queue_pending(wl_egl_display->wl_display,
636 wl_egl_display->ev_queue) == -1) {
637 _wl_display_print_err(wl_egl_display, "dispatch_queue_pending");
639 #if TIZEN_FEATURE_ENABLE
640 if (wl_egl_display->tss) {
641 TPL_INFO("[TIZEN_SURFACE_SHM_DESTROY]",
642 "wl_egl_display(%p) tizen_surface_shm(%p) fini.",
643 wl_egl_display, wl_egl_display->tss);
644 tizen_surface_shm_destroy(wl_egl_display->tss);
645 wl_egl_display->tss = NULL;
648 if (wl_egl_display->presentation) {
649 TPL_INFO("[WP_PRESENTATION_DESTROY]",
650 "wl_egl_display(%p) wp_presentation(%p) fini.",
651 wl_egl_display, wl_egl_display->presentation);
652 wp_presentation_destroy(wl_egl_display->presentation);
653 wl_egl_display->presentation = NULL;
656 if (wl_egl_display->explicit_sync) {
657 TPL_INFO("[EXPLICIT_SYNC_DESTROY]",
658 "wl_egl_display(%p) zwp_linux_explicit_synchronization_v1(%p) fini.",
659 wl_egl_display, wl_egl_display->explicit_sync);
660 zwp_linux_explicit_synchronization_v1_destroy(wl_egl_display->explicit_sync);
661 wl_egl_display->explicit_sync = NULL;
664 if (wl_egl_display->wl_tbm_client) {
665 struct wl_proxy *wl_tbm = NULL;
667 wl_tbm = (struct wl_proxy *)wayland_tbm_client_get_wl_tbm(
668 wl_egl_display->wl_tbm_client);
670 wl_proxy_set_queue(wl_tbm, NULL);
673 TPL_INFO("[WL_TBM_DEINIT]",
674 "wl_egl_display(%p) wl_tbm_client(%p)",
675 wl_egl_display, wl_egl_display->wl_tbm_client);
676 wayland_tbm_client_deinit(wl_egl_display->wl_tbm_client);
677 wl_egl_display->wl_tbm_client = NULL;
680 wl_event_queue_destroy(wl_egl_display->ev_queue);
682 wl_egl_display->wl_initialized = TPL_FALSE;
684 TPL_INFO("[DISPLAY_FINI]", "wl_egl_display(%p) wl_display(%p)",
685 wl_egl_display, wl_egl_display->wl_display);
689 _thread_init(void *data)
691 tpl_wl_egl_display_t *wl_egl_display = (tpl_wl_egl_display_t *)data;
693 if (_thread_wl_display_init(wl_egl_display) != TPL_ERROR_NONE) {
694 TPL_ERR("Failed to initialize wl_egl_display(%p) with wl_display(%p)",
695 wl_egl_display, wl_egl_display->wl_display);
698 if (wl_egl_display->use_wait_vblank &&
699 _thread_tdm_init(wl_egl_display) != TPL_ERROR_NONE) {
700 TPL_WARN("Failed to initialize tdm-client. TPL_WAIT_VLANK:DISABLED");
703 return wl_egl_display;
707 __thread_func_disp_prepare(tpl_gsource *gsource)
709 tpl_wl_egl_display_t *wl_egl_display =
710 (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
712 /* If this wl_egl_display is already prepared,
713 * do nothing in this function. */
714 if (wl_egl_display->prepared)
717 /* If there is a last_error, there is no need to poll,
718 * so skip directly to dispatch.
719 * prepare -> dispatch */
720 if (wl_egl_display->last_error)
723 while (wl_display_prepare_read_queue(wl_egl_display->wl_display,
724 wl_egl_display->ev_queue) != 0) {
725 if (wl_display_dispatch_queue_pending(wl_egl_display->wl_display,
726 wl_egl_display->ev_queue) == -1) {
727 _wl_display_print_err(wl_egl_display, "dispatch_queue_pending");
731 wl_egl_display->prepared = TPL_TRUE;
733 wl_display_flush(wl_egl_display->wl_display);
739 __thread_func_disp_check(tpl_gsource *gsource)
741 tpl_wl_egl_display_t *wl_egl_display =
742 (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
743 tpl_bool_t ret = TPL_FALSE;
745 if (!wl_egl_display->prepared)
748 /* If prepared, but last_error is set,
749 * cancel_read is executed and FALSE is returned.
750 * That can lead to G_SOURCE_REMOVE by calling disp_prepare again
751 * and skipping disp_check from prepare to disp_dispatch.
752 * check -> prepare -> dispatch -> G_SOURCE_REMOVE */
753 if (wl_egl_display->prepared && wl_egl_display->last_error) {
754 wl_display_cancel_read(wl_egl_display->wl_display);
758 if (tpl_gsource_check_io_condition(gsource)) {
759 if (wl_display_read_events(wl_egl_display->wl_display) == -1)
760 _wl_display_print_err(wl_egl_display, "read_event");
763 wl_display_cancel_read(wl_egl_display->wl_display);
767 wl_egl_display->prepared = TPL_FALSE;
773 __thread_func_disp_dispatch(tpl_gsource *gsource, uint64_t message)
775 tpl_wl_egl_display_t *wl_egl_display =
776 (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
780 /* If there is last_error, SOURCE_REMOVE should be returned
781 * to remove the gsource from the main loop.
782 * This is because wl_egl_display is not valid since last_error was set.*/
783 if (wl_egl_display->last_error) {
787 tpl_gmutex_lock(&wl_egl_display->wl_event_mutex);
788 if (tpl_gsource_check_io_condition(gsource)) {
789 if (wl_display_dispatch_queue_pending(wl_egl_display->wl_display,
790 wl_egl_display->ev_queue) == -1) {
791 _wl_display_print_err(wl_egl_display, "dispatch_queue_pending");
795 wl_display_flush(wl_egl_display->wl_display);
796 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
802 __thread_func_disp_finalize(tpl_gsource *gsource)
804 tpl_wl_egl_display_t *wl_egl_display =
805 (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
807 if (wl_egl_display->wl_initialized)
808 _thread_wl_display_fini(wl_egl_display);
810 TPL_LOG_T("WL_EGL", "finalize| wl_egl_display(%p) tpl_gsource(%p)",
811 wl_egl_display, gsource);
817 static tpl_gsource_functions disp_funcs = {
818 .prepare = __thread_func_disp_prepare,
819 .check = __thread_func_disp_check,
820 .dispatch = __thread_func_disp_dispatch,
821 .finalize = __thread_func_disp_finalize,
825 __tpl_wl_egl_display_init(tpl_display_t *display)
827 tpl_wl_egl_display_t *wl_egl_display = NULL;
831 /* Do not allow default display in wayland. */
832 if (!display->native_handle) {
833 TPL_ERR("Invalid native handle for display.");
834 return TPL_ERROR_INVALID_PARAMETER;
837 if (!_check_native_handle_is_wl_display(display->native_handle)) {
838 TPL_ERR("native_handle(%p) is not wl_display", display->native_handle);
839 return TPL_ERROR_INVALID_PARAMETER;
842 wl_egl_display = (tpl_wl_egl_display_t *) calloc(1,
843 sizeof(tpl_wl_egl_display_t));
844 if (!wl_egl_display) {
845 TPL_ERR("Failed to allocate memory for new tpl_wl_egl_display_t.");
846 return TPL_ERROR_OUT_OF_MEMORY;
849 display->backend.data = wl_egl_display;
850 display->bufmgr_fd = -1;
852 wl_egl_display->tdm.tdm_initialized = TPL_FALSE;
853 wl_egl_display->tdm.tdm_client = NULL;
854 wl_egl_display->tdm.tdm_display_fd = -1;
855 wl_egl_display->tdm.tdm_source = NULL;
857 wl_egl_display->wl_initialized = TPL_FALSE;
859 wl_egl_display->ev_queue = NULL;
860 wl_egl_display->wl_display = (struct wl_display *)display->native_handle;
861 wl_egl_display->last_error = 0;
862 wl_egl_display->use_tss = TPL_FALSE;
863 wl_egl_display->use_explicit_sync = TPL_FALSE; // default disabled
864 wl_egl_display->prepared = TPL_FALSE;
866 #if TIZEN_FEATURE_ENABLE
867 /* Wayland Interfaces */
868 wl_egl_display->tss = NULL;
869 wl_egl_display->presentation = NULL;
870 wl_egl_display->explicit_sync = NULL;
872 wl_egl_display->wl_tbm_client = NULL;
874 wl_egl_display->use_wait_vblank = TPL_TRUE; // default enabled
876 char *env = tpl_getenv("TPL_WAIT_VBLANK");
877 if (env && !atoi(env)) {
878 wl_egl_display->use_wait_vblank = TPL_FALSE;
882 tpl_gmutex_init(&wl_egl_display->wl_event_mutex);
885 wl_egl_display->thread = tpl_gthread_create("wl_egl_thread",
886 (tpl_gthread_func)_thread_init,
887 (void *)wl_egl_display);
888 if (!wl_egl_display->thread) {
889 TPL_ERR("Failed to create wl_egl_thread");
893 wl_egl_display->disp_source = tpl_gsource_create(wl_egl_display->thread,
894 (void *)wl_egl_display,
895 wl_display_get_fd(wl_egl_display->wl_display),
896 &disp_funcs, SOURCE_TYPE_NORMAL);
897 if (!wl_egl_display->disp_source) {
898 TPL_ERR("Failed to add native_display(%p) to thread(%p)",
899 display->native_handle,
900 wl_egl_display->thread);
904 if (wl_egl_display->use_wait_vblank &&
905 wl_egl_display->tdm.tdm_initialized) {
906 wl_egl_display->tdm.tdm_source = tpl_gsource_create(wl_egl_display->thread,
907 (void *)wl_egl_display,
908 wl_egl_display->tdm.tdm_display_fd,
909 &tdm_funcs, SOURCE_TYPE_NORMAL);
910 if (!wl_egl_display->tdm.tdm_source) {
911 TPL_ERR("Failed to create tdm_gsource\n");
916 wl_egl_display->use_wait_vblank = (wl_egl_display->tdm.tdm_initialized &&
917 (wl_egl_display->tdm.tdm_source != NULL));
919 TPL_INFO("[DISPLAY_INIT]",
920 "wl_egl_display(%p) tpl_gthread(%p) wl_display(%p)",
922 wl_egl_display->thread,
923 wl_egl_display->wl_display);
925 TPL_INFO("[DISPLAY_INIT]",
926 "USE_WAIT_VBLANK(%s) TIZEN_SURFACE_SHM(%s) USE_EXPLICIT_SYNC(%s)",
927 wl_egl_display->use_wait_vblank ? "TRUE" : "FALSE",
928 wl_egl_display->use_tss ? "TRUE" : "FALSE",
929 wl_egl_display->use_explicit_sync ? "TRUE" : "FALSE");
931 return TPL_ERROR_NONE;
934 if (wl_egl_display->thread) {
935 if (wl_egl_display->tdm.tdm_source)
936 tpl_gsource_destroy(wl_egl_display->tdm.tdm_source, TPL_TRUE);
937 if (wl_egl_display->disp_source)
938 tpl_gsource_destroy(wl_egl_display->disp_source, TPL_TRUE);
940 tpl_gthread_destroy(wl_egl_display->thread);
943 wl_egl_display->thread = NULL;
944 free(wl_egl_display);
946 display->backend.data = NULL;
947 return TPL_ERROR_INVALID_OPERATION;
951 __tpl_wl_egl_display_fini(tpl_display_t *display)
953 tpl_wl_egl_display_t *wl_egl_display;
957 wl_egl_display = (tpl_wl_egl_display_t *)display->backend.data;
958 if (wl_egl_display) {
959 TPL_INFO("[DISPLAY_FINI]",
960 "wl_egl_display(%p) tpl_gthread(%p) wl_display(%p)",
962 wl_egl_display->thread,
963 wl_egl_display->wl_display);
965 if (wl_egl_display->tdm.tdm_source && wl_egl_display->tdm.tdm_initialized) {
966 tpl_gsource_destroy(wl_egl_display->tdm.tdm_source, TPL_TRUE);
967 wl_egl_display->tdm.tdm_source = NULL;
970 if (wl_egl_display->disp_source) {
971 tpl_gsource_destroy(wl_egl_display->disp_source, TPL_TRUE);
972 wl_egl_display->disp_source = NULL;
975 if (wl_egl_display->thread) {
976 tpl_gthread_destroy(wl_egl_display->thread);
977 wl_egl_display->thread = NULL;
980 tpl_gmutex_clear(&wl_egl_display->wl_event_mutex);
982 free(wl_egl_display);
985 display->backend.data = NULL;
989 __tpl_wl_egl_display_query_config(tpl_display_t *display,
990 tpl_surface_type_t surface_type,
991 int red_size, int green_size,
992 int blue_size, int alpha_size,
993 int color_depth, int *native_visual_id,
998 if (surface_type == TPL_SURFACE_TYPE_WINDOW && red_size == 8 &&
999 green_size == 8 && blue_size == 8 &&
1000 (color_depth == 32 || color_depth == 24)) {
1002 if (alpha_size == 8) {
1003 if (native_visual_id) *native_visual_id = TBM_FORMAT_ARGB8888;
1004 if (is_slow) *is_slow = TPL_FALSE;
1005 return TPL_ERROR_NONE;
1007 if (alpha_size == 0) {
1008 if (native_visual_id) *native_visual_id = TBM_FORMAT_XRGB8888;
1009 if (is_slow) *is_slow = TPL_FALSE;
1010 return TPL_ERROR_NONE;
1014 return TPL_ERROR_INVALID_PARAMETER;
1018 __tpl_wl_egl_display_filter_config(tpl_display_t *display, int *visual_id,
1021 TPL_IGNORE(display);
1022 TPL_IGNORE(visual_id);
1023 TPL_IGNORE(alpha_size);
1024 return TPL_ERROR_NONE;
1028 __tpl_wl_egl_display_get_window_info(tpl_display_t *display,
1029 tpl_handle_t window, int *width,
1030 int *height, tbm_format *format,
1031 int depth, int a_size)
1033 tpl_result_t ret = TPL_ERROR_NONE;
1034 struct wl_egl_window *wl_egl_window = (struct wl_egl_window *)window;
1036 TPL_ASSERT(display);
1039 if (!wl_egl_window) {
1040 TPL_ERR("Invalid parameter. tpl_handle_t(%p)", window);
1041 return TPL_ERROR_INVALID_PARAMETER;
1044 if (width) *width = wl_egl_window->width;
1045 if (height) *height = wl_egl_window->height;
1047 struct tizen_private *tizen_private =
1048 (struct tizen_private *)wl_egl_window->driver_private;
1049 if (tizen_private && tizen_private->data) {
1050 tpl_wl_egl_surface_t *wl_egl_surface =
1051 (tpl_wl_egl_surface_t *)tizen_private->data;
1052 *format = wl_egl_surface->format;
1055 *format = TBM_FORMAT_ARGB8888;
1057 *format = TBM_FORMAT_XRGB8888;
1065 __tpl_wl_egl_display_get_pixmap_info(tpl_display_t *display,
1066 tpl_handle_t pixmap, int *width,
1067 int *height, tbm_format *format)
1069 tbm_surface_h tbm_surface = NULL;
1072 TPL_ERR("Invalid parameter. tpl_handle_t(%p)", pixmap);
1073 return TPL_ERROR_INVALID_PARAMETER;
1076 tbm_surface = wayland_tbm_server_get_surface(NULL,
1077 (struct wl_resource *)pixmap);
1079 TPL_ERR("Failed to get tbm_surface from wayland_tbm.");
1080 return TPL_ERROR_INVALID_PARAMETER;
1083 if (width) *width = tbm_surface_get_width(tbm_surface);
1084 if (height) *height = tbm_surface_get_height(tbm_surface);
1085 if (format) *format = tbm_surface_get_format(tbm_surface);
1087 return TPL_ERROR_NONE;
1090 static tbm_surface_h
1091 __tpl_wl_egl_display_get_buffer_from_native_pixmap(tpl_handle_t pixmap)
1093 tbm_surface_h tbm_surface = NULL;
1097 tbm_surface = wayland_tbm_server_get_surface(NULL,
1098 (struct wl_resource *)pixmap);
1100 TPL_ERR("Failed to get tbm_surface_h from wayland_tbm.");
1108 __tpl_display_choose_backend_wl_egl_thread(tpl_handle_t native_dpy)
1110 struct wl_interface *wl_egl_native_dpy = *(void **) native_dpy;
1112 TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_native_dpy, TPL_FALSE);
1114 /* MAGIC CHECK: A native display handle is a wl_display if the de-referenced first value
1115 is a memory address pointing the structure of wl_display_interface. */
1116 if (wl_egl_native_dpy == &wl_display_interface)
1119 if (strncmp(wl_egl_native_dpy->name, wl_display_interface.name,
1120 strlen(wl_display_interface.name)) == 0) {
1127 /* -- BEGIN -- wl_egl_window callback functions */
1129 __cb_destroy_callback(void *private)
1131 struct tizen_private *tizen_private = (struct tizen_private *)private;
1132 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
1134 if (!tizen_private) {
1135 TPL_LOG_B("WL_EGL", "[DESTROY_CB] Already destroyed surface");
1139 wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1140 if (wl_egl_surface) {
1141 TPL_WARN("[DESTROY_CB][!!!ABNORMAL BEHAVIOR!!!] wl_egl_window(%p) is destroyed.",
1142 wl_egl_surface->wl_egl_window);
1143 TPL_WARN("[DESTROY_CB] native window should be destroyed after eglDestroySurface.");
1145 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1146 wl_egl_surface->wl_egl_window->destroy_window_callback = NULL;
1147 wl_egl_surface->wl_egl_window->resize_callback = NULL;
1148 wl_egl_surface->wl_egl_window->driver_private = NULL;
1149 wl_egl_surface->wl_egl_window = NULL;
1150 wl_egl_surface->wl_surface = NULL;
1152 tizen_private->set_window_serial_callback = NULL;
1153 tizen_private->rotate_callback = NULL;
1154 tizen_private->get_rotation_capability = NULL;
1155 tizen_private->set_frontbuffer_callback = NULL;
1156 tizen_private->create_commit_sync_fd = NULL;
1157 tizen_private->create_presentation_sync_fd = NULL;
1158 tizen_private->data = NULL;
1160 free(tizen_private);
1161 tizen_private = NULL;
1162 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1167 __cb_resize_callback(struct wl_egl_window *wl_egl_window, void *private)
1169 TPL_ASSERT(private);
1170 TPL_ASSERT(wl_egl_window);
1172 struct tizen_private *tizen_private = (struct tizen_private *)private;
1173 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1174 int cur_w, cur_h, req_w, req_h, format;
1176 if (!wl_egl_surface) {
1177 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1182 format = wl_egl_surface->format;
1183 cur_w = wl_egl_surface->width;
1184 cur_h = wl_egl_surface->height;
1185 req_w = wl_egl_window->width;
1186 req_h = wl_egl_window->height;
1188 TPL_INFO("[WINDOW_RESIZE]",
1189 "wl_egl_surface(%p) wl_egl_window(%p) (%dx%d) -> (%dx%d)",
1190 wl_egl_surface, wl_egl_window, cur_w, cur_h, req_w, req_h);
1192 if (tbm_surface_queue_reset(wl_egl_surface->tbm_queue, req_w, req_h, format)
1193 != TBM_SURFACE_QUEUE_ERROR_NONE) {
1194 TPL_ERR("Failed to reset tbm_surface_queue(%p)", wl_egl_surface->tbm_queue);
1198 /* -- END -- wl_egl_window callback functions */
1200 /* -- BEGIN -- wl_egl_window tizen private callback functions */
1202 /* There is no usecase for using prerotation callback below */
1204 __cb_rotate_callback(struct wl_egl_window *wl_egl_window, void *private)
1206 TPL_ASSERT(private);
1207 TPL_ASSERT(wl_egl_window);
1209 struct tizen_private *tizen_private = (struct tizen_private *)private;
1210 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1211 int rotation = tizen_private->rotation;
1213 if (!wl_egl_surface) {
1214 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1219 TPL_INFO("[WINDOW_ROTATE]",
1220 "wl_egl_surface(%p) wl_egl_window(%p) (%d) -> (%d)",
1221 wl_egl_surface, wl_egl_window,
1222 wl_egl_surface->rotation, rotation);
1224 wl_egl_surface->rotation = rotation;
1227 /* There is no usecase for using prerotation callback below */
1229 __cb_get_rotation_capability(struct wl_egl_window *wl_egl_window,
1232 TPL_ASSERT(private);
1233 TPL_ASSERT(wl_egl_window);
1235 int rotation_capability = WL_EGL_WINDOW_TIZEN_CAPABILITY_NONE;
1236 struct tizen_private *tizen_private = (struct tizen_private *)private;
1237 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1239 if (!wl_egl_surface) {
1240 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1242 return rotation_capability;
1245 if (wl_egl_surface->prerotation_capability == TPL_TRUE)
1246 rotation_capability = WL_EGL_WINDOW_TIZEN_CAPABILITY_ROTATION_SUPPORTED;
1248 rotation_capability = WL_EGL_WINDOW_TIZEN_CAPABILITY_ROTATION_UNSUPPORTED;
1251 return rotation_capability;
1255 __cb_set_window_serial_callback(struct wl_egl_window *wl_egl_window,
1256 void *private, unsigned int serial)
1258 TPL_ASSERT(private);
1259 TPL_ASSERT(wl_egl_window);
1261 struct tizen_private *tizen_private = (struct tizen_private *)private;
1262 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1264 if (!wl_egl_surface) {
1265 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1270 wl_egl_surface->set_serial_is_used = TPL_TRUE;
1271 wl_egl_surface->serial = serial;
1275 __cb_create_commit_sync_fd(struct wl_egl_window *wl_egl_window, void *private)
1277 TPL_ASSERT(private);
1278 TPL_ASSERT(wl_egl_window);
1280 int commit_sync_fd = -1;
1282 struct tizen_private *tizen_private = (struct tizen_private *)private;
1283 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1285 if (!wl_egl_surface) {
1286 TPL_ERR("Invalid parameter. wl_egl_surface(%p) is NULL", wl_egl_surface);
1290 tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
1292 if (wl_egl_surface->commit_sync.fd != -1) {
1293 commit_sync_fd = dup(wl_egl_surface->commit_sync.fd);
1294 TRACE_MARK("[ONLY_DUP] commit_sync_fd(%d) dup(%d)",
1295 wl_egl_surface->commit_sync.fd, commit_sync_fd);
1296 TPL_DEBUG("[DUP_COMMIT_SYNC] wl_egl_surface(%p) commit_sync_fd(%d) dup(%d)",
1297 wl_egl_surface, wl_egl_surface->commit_sync.fd, commit_sync_fd);
1298 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
1299 return commit_sync_fd;
1302 wl_egl_surface->commit_sync.fd = eventfd(0, EFD_CLOEXEC);
1303 if (wl_egl_surface->commit_sync.fd == -1) {
1304 TPL_ERR("Failed to create commit_sync_fd. wl_egl_surface(%p)",
1306 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
1310 commit_sync_fd = dup(wl_egl_surface->commit_sync.fd);
1312 TRACE_MARK("[CREATE] commit_sync_fd(%d) dup(%d)",
1313 wl_egl_surface->commit_sync.fd, commit_sync_fd);
1314 TPL_DEBUG("[CREATE_COMMIT_SYNC] wl_egl_surface(%p) commit_sync_fd(%d)",
1315 wl_egl_surface, commit_sync_fd);
1317 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
1319 return commit_sync_fd;
1322 #if TIZEN_FEATURE_ENABLE
1324 __cb_create_presentation_sync_fd(struct wl_egl_window *wl_egl_window, void *private)
1326 TPL_ASSERT(private);
1327 TPL_ASSERT(wl_egl_window);
1329 int presentation_sync_fd = -1;
1331 struct tizen_private *tizen_private = (struct tizen_private *)private;
1332 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1334 if (!wl_egl_surface) {
1335 TPL_ERR("Invalid parameter. wl_egl_surface is NULL");
1339 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
1340 if (wl_egl_surface->presentation_sync.fd != -1) {
1341 presentation_sync_fd = dup(wl_egl_surface->presentation_sync.fd);
1342 TRACE_MARK("[ONLY_DUP] presentation_sync_fd(%d) dup(%d)",
1343 wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1344 TPL_DEBUG("[DUP_PRESENTATION_SYNC] wl_egl_surface(%p) presentation_sync_fd(%d) dup(%d)",
1345 wl_egl_surface, wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1346 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1347 return presentation_sync_fd;
1350 wl_egl_surface->presentation_sync.fd = eventfd(0, EFD_CLOEXEC);
1351 if (wl_egl_surface->presentation_sync.fd == -1) {
1352 TPL_ERR("Failed to create presentation_sync_fd. wl_egl_surface(%p)",
1354 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1358 presentation_sync_fd = dup(wl_egl_surface->presentation_sync.fd);
1359 TRACE_MARK("[CREATE] presentation_sync_fd(%d) dup(%d)",
1360 wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1361 TPL_DEBUG("[CREATE_PRESENTATION_SYNC] wl_egl_surface(%p) presentation_sync_fd(%d) dup(%d)",
1362 wl_egl_surface, wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1364 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1366 return presentation_sync_fd;
1368 /* -- END -- wl_egl_window tizen private callback functions */
1370 /* -- BEGIN -- tizen_surface_shm_flusher_listener */
1371 static void __cb_tss_flusher_flush_callback(void *data,
1372 struct tizen_surface_shm_flusher *tss_flusher)
1374 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)data;
1375 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
1377 TPL_INFO("[BUFFER_FLUSH]", "wl_egl_surface(%p) tbm_queue(%p)",
1378 wl_egl_surface, wl_egl_surface->tbm_queue);
1380 _print_buffer_lists(wl_egl_surface);
1382 tsq_err = tbm_surface_queue_flush(wl_egl_surface->tbm_queue);
1383 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
1384 TPL_ERR("Failed to flush tbm_queue(%p)", wl_egl_surface->tbm_queue);
1389 static void __cb_tss_flusher_free_flush_callback(void *data,
1390 struct tizen_surface_shm_flusher *tss_flusher)
1392 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)data;
1393 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
1395 TPL_INFO("[FREE_BUFFER_FLUSH]", "wl_egl_surface(%p) tbm_queue(%p)",
1396 wl_egl_surface, wl_egl_surface->tbm_queue);
1398 _print_buffer_lists(wl_egl_surface);
1400 tsq_err = tbm_surface_queue_free_flush(wl_egl_surface->tbm_queue);
1401 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
1402 TPL_ERR("Failed to free flush tbm_queue(%p)", wl_egl_surface->tbm_queue);
1407 static const struct tizen_surface_shm_flusher_listener
1408 tss_flusher_listener = {
1409 __cb_tss_flusher_flush_callback,
1410 __cb_tss_flusher_free_flush_callback
1412 /* -- END -- tizen_surface_shm_flusher_listener */
1415 /* -- BEGIN -- tbm_surface_queue callback funstions */
1417 __cb_tbm_queue_reset_callback(tbm_surface_queue_h tbm_queue,
1420 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
1421 tpl_wl_egl_display_t *wl_egl_display = NULL;
1422 tpl_surface_t *surface = NULL;
1423 tpl_bool_t is_activated = TPL_FALSE;
1426 wl_egl_surface = (tpl_wl_egl_surface_t *)data;
1427 TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
1429 wl_egl_display = wl_egl_surface->wl_egl_display;
1430 TPL_CHECK_ON_NULL_RETURN(wl_egl_display);
1432 surface = wl_egl_surface->tpl_surface;
1433 TPL_CHECK_ON_NULL_RETURN(surface);
1435 /* When the queue is resized, change the reset flag to TPL_TRUE to reflect
1436 * the changed window size at the next frame. */
1437 width = tbm_surface_queue_get_width(tbm_queue);
1438 height = tbm_surface_queue_get_height(tbm_queue);
1439 if (surface->width != width || surface->height != height) {
1440 TPL_INFO("[QUEUE_RESIZE]",
1441 "wl_egl_surface(%p) tbm_queue(%p) (%dx%d) -> (%dx%d)",
1442 wl_egl_surface, tbm_queue,
1443 surface->width, surface->height, width, height);
1446 /* When queue_reset_callback is called, if is_activated is different from
1447 * its previous state change the reset flag to TPL_TRUE to get a new buffer
1448 * with the changed state(ACTIVATED/DEACTIVATED) at the next frame. */
1449 is_activated = wayland_tbm_client_queue_check_activate(wl_egl_display->wl_tbm_client,
1450 wl_egl_surface->tbm_queue);
1451 if (wl_egl_surface->is_activated != is_activated) {
1453 TPL_INFO("[ACTIVATED]",
1454 "wl_egl_surface(%p) wl_surface(%p) tbm_queue(%p)",
1455 wl_egl_surface, wl_egl_surface->wl_surface, tbm_queue);
1457 TPL_LOG_T("[DEACTIVATED]",
1458 " wl_egl_surface(%p) wl_surface(%p) tbm_queue(%p)",
1459 wl_egl_surface, wl_egl_surface->wl_surface, tbm_queue);
1463 wl_egl_surface->reset = TPL_TRUE;
1465 if (surface->reset_cb)
1466 surface->reset_cb(surface->reset_data);
1470 __cb_tbm_queue_acquirable_callback(tbm_surface_queue_h tbm_queue,
1473 TPL_IGNORE(tbm_queue);
1475 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)data;
1476 TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
1478 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1479 if (wl_egl_surface->sent_message == NONE_MESSAGE) {
1480 wl_egl_surface->sent_message = ACQUIRABLE;
1481 tpl_gsource_send_message(wl_egl_surface->surf_source,
1482 wl_egl_surface->sent_message);
1484 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1486 /* -- END -- tbm_surface_queue callback funstions */
1489 _thread_wl_egl_surface_fini(tpl_wl_egl_surface_t *wl_egl_surface)
1491 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
1493 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1495 TPL_INFO("[SURFACE_FINI]",
1496 "wl_egl_surface(%p) wl_egl_window(%p) wl_surface(%p)",
1497 wl_egl_surface, wl_egl_surface->wl_egl_window,
1498 wl_egl_surface->wl_surface);
1499 #if TIZEN_FEATURE_ENABLE
1500 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
1502 if (wl_egl_display->presentation && wl_egl_surface->presentation_feedbacks) {
1503 while (!__tpl_list_is_empty(wl_egl_surface->presentation_feedbacks)) {
1504 struct pst_feedback *pst_feedback =
1505 (struct pst_feedback *)__tpl_list_pop_front(
1506 wl_egl_surface->presentation_feedbacks, NULL);
1508 _write_to_eventfd(pst_feedback->pst_sync_fd);
1509 close(pst_feedback->pst_sync_fd);
1510 pst_feedback->pst_sync_fd = -1;
1512 wp_presentation_feedback_destroy(pst_feedback->presentation_feedback);
1513 pst_feedback->presentation_feedback = NULL;
1519 __tpl_list_free(wl_egl_surface->presentation_feedbacks, NULL);
1520 wl_egl_surface->presentation_feedbacks = NULL;
1523 if (wl_egl_surface->presentation_sync.fd != -1) {
1524 _write_to_eventfd(wl_egl_surface->presentation_sync.fd);
1525 close(wl_egl_surface->presentation_sync.fd);
1526 wl_egl_surface->presentation_sync.fd = -1;
1529 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1531 if (wl_egl_surface->surface_sync) {
1532 TPL_INFO("[SURFACE_SYNC_DESTROY]",
1533 "wl_egl_surface(%p) surface_sync(%p)",
1534 wl_egl_surface, wl_egl_surface->surface_sync);
1535 zwp_linux_surface_synchronization_v1_destroy(wl_egl_surface->surface_sync);
1536 wl_egl_surface->surface_sync = NULL;
1539 if (wl_egl_surface->tss_flusher) {
1540 TPL_INFO("[FLUSHER_DESTROY]",
1541 "wl_egl_surface(%p) tss_flusher(%p)",
1542 wl_egl_surface, wl_egl_surface->tss_flusher);
1543 tizen_surface_shm_flusher_destroy(wl_egl_surface->tss_flusher);
1544 wl_egl_surface->tss_flusher = NULL;
1547 if (wl_egl_surface->vblank && wl_egl_surface->vblank->waiting_buffers) {
1548 __tpl_list_free(wl_egl_surface->vblank->waiting_buffers, NULL);
1549 wl_egl_surface->vblank->waiting_buffers = NULL;
1552 if (wl_egl_surface->vblank) {
1553 __tpl_list_remove_data(wl_egl_display->tdm.surface_vblanks,
1554 (void *)wl_egl_surface->vblank,
1556 __cb_surface_vblank_free);
1557 wl_egl_surface->vblank = NULL;
1560 if (wl_egl_surface->tbm_queue) {
1561 TPL_INFO("[TBM_QUEUE_DESTROY]",
1562 "wl_egl_surface(%p) tbm_queue(%p)",
1563 wl_egl_surface, wl_egl_surface->tbm_queue);
1564 tbm_surface_queue_destroy(wl_egl_surface->tbm_queue);
1565 wl_egl_surface->tbm_queue = NULL;
1568 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1572 __thread_func_surf_dispatch(tpl_gsource *gsource, uint64_t message)
1574 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
1576 wl_egl_surface = (tpl_wl_egl_surface_t *)tpl_gsource_get_data(gsource);
1578 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1579 if (message == INIT_SURFACE) { /* Initialize surface */
1580 TPL_DEBUG("wl_egl_surface(%p) initialize message received!",
1582 _thread_wl_egl_surface_init(wl_egl_surface);
1583 tpl_gcond_signal(&wl_egl_surface->surf_cond);
1584 } else if (message == ACQUIRABLE) { /* Acquirable */
1585 TPL_DEBUG("wl_egl_surface(%p) acquirable message received!",
1587 _thread_surface_queue_acquire(wl_egl_surface);
1590 wl_egl_surface->sent_message = NONE_MESSAGE;
1592 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1598 __thread_func_surf_finalize(tpl_gsource *gsource)
1600 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
1602 wl_egl_surface = (tpl_wl_egl_surface_t *)tpl_gsource_get_data(gsource);
1603 TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
1605 _thread_wl_egl_surface_fini(wl_egl_surface);
1607 TPL_DEBUG("[FINALIZE] wl_egl_surface(%p) tpl_gsource(%p)",
1608 wl_egl_surface, gsource);
1611 static tpl_gsource_functions surf_funcs = {
1614 .dispatch = __thread_func_surf_dispatch,
1615 .finalize = __thread_func_surf_finalize,
1619 __tpl_wl_egl_surface_init(tpl_surface_t *surface)
1621 tpl_wl_egl_display_t *wl_egl_display = NULL;
1622 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
1623 tpl_gsource *surf_source = NULL;
1625 struct wl_egl_window *wl_egl_window =
1626 (struct wl_egl_window *)surface->native_handle;
1628 TPL_ASSERT(surface);
1629 TPL_ASSERT(surface->display);
1630 TPL_ASSERT(surface->type == TPL_SURFACE_TYPE_WINDOW);
1631 TPL_ASSERT(surface->native_handle);
1634 (tpl_wl_egl_display_t *)surface->display->backend.data;
1635 if (!wl_egl_display) {
1636 TPL_ERR("Invalid parameter. wl_egl_display(%p)",
1638 return TPL_ERROR_INVALID_PARAMETER;
1641 wl_egl_surface = (tpl_wl_egl_surface_t *) calloc(1,
1642 sizeof(tpl_wl_egl_surface_t));
1643 if (!wl_egl_surface) {
1644 TPL_ERR("Failed to allocate memory for new tpl_wl_egl_surface_t.");
1645 return TPL_ERROR_OUT_OF_MEMORY;
1648 surf_source = tpl_gsource_create(wl_egl_display->thread, (void *)wl_egl_surface,
1649 -1, &surf_funcs, SOURCE_TYPE_NORMAL);
1651 TPL_ERR("Failed to create surf_source with wl_egl_surface(%p)",
1653 goto surf_source_create_fail;
1656 surface->backend.data = (void *)wl_egl_surface;
1657 surface->width = wl_egl_window->width;
1658 surface->height = wl_egl_window->height;
1659 surface->rotation = 0;
1661 wl_egl_surface->tpl_surface = surface;
1662 wl_egl_surface->width = wl_egl_window->width;
1663 wl_egl_surface->height = wl_egl_window->height;
1664 wl_egl_surface->format = surface->format;
1665 wl_egl_surface->num_buffers = surface->num_buffers;
1667 wl_egl_surface->surf_source = surf_source;
1668 wl_egl_surface->wl_egl_window = wl_egl_window;
1669 wl_egl_surface->wl_surface = wl_egl_window->surface;
1671 wl_egl_surface->wl_egl_display = wl_egl_display;
1673 wl_egl_surface->reset = TPL_FALSE;
1674 wl_egl_surface->is_activated = TPL_FALSE;
1675 wl_egl_surface->need_to_enqueue = TPL_TRUE;
1676 wl_egl_surface->prerotation_capability = TPL_FALSE;
1677 wl_egl_surface->vblank_done = TPL_TRUE;
1678 wl_egl_surface->use_render_done_fence = TPL_FALSE;
1679 wl_egl_surface->set_serial_is_used = TPL_FALSE;
1681 wl_egl_surface->latest_transform = -1;
1682 wl_egl_surface->render_done_cnt = 0;
1683 wl_egl_surface->serial = 0;
1685 wl_egl_surface->vblank = NULL;
1686 #if TIZEN_FEATURE_ENABLE
1687 wl_egl_surface->tss_flusher = NULL;
1688 wl_egl_surface->surface_sync = NULL;
1691 wl_egl_surface->post_interval = surface->post_interval;
1693 wl_egl_surface->commit_sync.fd = -1;
1694 wl_egl_surface->presentation_sync.fd = -1;
1696 wl_egl_surface->sent_message = NONE_MESSAGE;
1700 for (i = 0; i < BUFFER_ARRAY_SIZE; i++)
1701 wl_egl_surface->buffers[i] = NULL;
1702 wl_egl_surface->buffer_cnt = 0;
1705 wl_egl_surface->last_deq_buffer = NULL;
1708 struct tizen_private *tizen_private = NULL;
1710 if (wl_egl_window->driver_private)
1711 tizen_private = (struct tizen_private *)wl_egl_window->driver_private;
1713 tizen_private = tizen_private_create();
1714 wl_egl_window->driver_private = (void *)tizen_private;
1717 if (tizen_private) {
1718 tizen_private->data = (void *)wl_egl_surface;
1719 tizen_private->rotate_callback = (void *)__cb_rotate_callback;
1720 tizen_private->get_rotation_capability = (void *)
1721 __cb_get_rotation_capability;
1722 tizen_private->set_window_serial_callback = (void *)
1723 __cb_set_window_serial_callback;
1724 tizen_private->create_commit_sync_fd = (void *)__cb_create_commit_sync_fd;
1725 #if TIZEN_FEATURE_ENABLE
1726 tizen_private->create_presentation_sync_fd = (void *)__cb_create_presentation_sync_fd;
1728 tizen_private->create_presentation_sync_fd = NULL;
1731 wl_egl_window->destroy_window_callback = (void *)__cb_destroy_callback;
1732 wl_egl_window->resize_callback = (void *)__cb_resize_callback;
1736 tpl_gmutex_init(&wl_egl_surface->commit_sync.mutex);
1737 tpl_gmutex_init(&wl_egl_surface->presentation_sync.mutex);
1739 tpl_gmutex_init(&wl_egl_surface->buffers_mutex);
1741 tpl_gmutex_init(&wl_egl_surface->surf_mutex);
1742 tpl_gcond_init(&wl_egl_surface->surf_cond);
1744 /* Initialize in thread */
1745 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1746 wl_egl_surface->sent_message = INIT_SURFACE;
1747 tpl_gsource_send_message(wl_egl_surface->surf_source,
1748 wl_egl_surface->sent_message);
1749 tpl_gcond_wait(&wl_egl_surface->surf_cond, &wl_egl_surface->surf_mutex);
1750 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1752 TPL_ASSERT(wl_egl_surface->tbm_queue);
1754 TPL_INFO("[SURFACE_INIT]",
1755 "tpl_surface(%p) wl_egl_surface(%p) gsource(%p)",
1756 surface, wl_egl_surface, wl_egl_surface->surf_source);
1758 return TPL_ERROR_NONE;
1760 surf_source_create_fail:
1761 free(wl_egl_surface);
1762 surface->backend.data = NULL;
1763 return TPL_ERROR_INVALID_OPERATION;
1766 static tbm_surface_queue_h
1767 _thread_create_tbm_queue(tpl_wl_egl_surface_t *wl_egl_surface,
1768 struct wayland_tbm_client *wl_tbm_client,
1771 tbm_surface_queue_h tbm_queue = NULL;
1772 tbm_bufmgr bufmgr = NULL;
1773 unsigned int capability;
1775 struct wl_surface *wl_surface = wl_egl_surface->wl_surface;
1776 int width = wl_egl_surface->width;
1777 int height = wl_egl_surface->height;
1778 int format = wl_egl_surface->format;
1780 if (!wl_tbm_client || !wl_surface) {
1781 TPL_ERR("Invalid parameters. wl_tbm_client(%p) wl_surface(%p)",
1782 wl_tbm_client, wl_surface);
1786 bufmgr = tbm_bufmgr_init(-1);
1787 capability = tbm_bufmgr_get_capability(bufmgr);
1788 tbm_bufmgr_deinit(bufmgr);
1790 if (capability & TBM_BUFMGR_CAPABILITY_TILED_MEMORY) {
1791 tbm_queue = wayland_tbm_client_create_surface_queue_tiled(
1799 tbm_queue = wayland_tbm_client_create_surface_queue(
1809 TPL_ERR("Failed to create tbm_queue. wl_tbm_client(%p)",
1814 if (tbm_surface_queue_set_modes(
1815 tbm_queue, TBM_SURFACE_QUEUE_MODE_GUARANTEE_CYCLE) !=
1816 TBM_SURFACE_QUEUE_ERROR_NONE) {
1817 TPL_ERR("Failed to set queue mode to tbm_surface_queue(%p)",
1819 tbm_surface_queue_destroy(tbm_queue);
1823 if (tbm_surface_queue_add_reset_cb(
1825 __cb_tbm_queue_reset_callback,
1826 (void *)wl_egl_surface) != TBM_SURFACE_QUEUE_ERROR_NONE) {
1827 TPL_ERR("Failed to register reset callback to tbm_surface_queue(%p)",
1829 tbm_surface_queue_destroy(tbm_queue);
1833 if (tbm_surface_queue_add_acquirable_cb(
1835 __cb_tbm_queue_acquirable_callback,
1836 (void *)wl_egl_surface) != TBM_SURFACE_QUEUE_ERROR_NONE) {
1837 TPL_ERR("Failed to register acquirable callback to tbm_surface_queue(%p)",
1839 tbm_surface_queue_destroy(tbm_queue);
1846 static tdm_client_vblank*
1847 _thread_create_tdm_client_vblank(tdm_client *tdm_client)
1849 tdm_client_vblank *tdm_vblank = NULL;
1850 tdm_client_output *tdm_output = NULL;
1851 tdm_error tdm_err = TDM_ERROR_NONE;
1854 TPL_ERR("Invalid parameter. tdm_client(%p)", tdm_client);
1858 tdm_output = tdm_client_get_output(tdm_client, "primary", &tdm_err);
1859 if (!tdm_output || tdm_err != TDM_ERROR_NONE) {
1860 TPL_ERR("Failed to get tdm_client_output. tdm_err(%d)", tdm_err);
1864 tdm_vblank = tdm_client_output_create_vblank(tdm_output, &tdm_err);
1865 if (!tdm_vblank || tdm_err != TDM_ERROR_NONE) {
1866 TPL_ERR("Failed to create tdm_vblank. tdm_err(%d)", tdm_err);
1870 tdm_client_vblank_set_enable_fake(tdm_vblank, 1);
1871 tdm_client_vblank_set_sync(tdm_vblank, 0);
1877 __cb_surface_vblank_free(void *data)
1879 TPL_CHECK_ON_NULL_RETURN(data);
1881 tpl_surface_vblank_t *vblank = (tpl_surface_vblank_t *)data;
1882 tpl_wl_egl_surface_t *wl_egl_surface = vblank->wl_egl_surface;
1884 TPL_INFO("[VBLANK_DESTROY]",
1885 "wl_egl_surface(%p) surface_vblank(%p) tdm_vblank(%p)",
1886 wl_egl_surface, vblank,
1887 vblank->tdm_vblank);
1889 tdm_client_vblank_destroy(vblank->tdm_vblank);
1890 vblank->tdm_vblank = NULL;
1891 vblank->wl_egl_surface = NULL;
1895 wl_egl_surface->vblank = NULL;
1899 _thread_wl_egl_surface_init(tpl_wl_egl_surface_t *wl_egl_surface)
1901 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
1902 tpl_surface_vblank_t *vblank = NULL;
1904 wl_egl_surface->tbm_queue = _thread_create_tbm_queue(
1906 wl_egl_display->wl_tbm_client,
1907 wl_egl_surface->num_buffers);
1908 if (!wl_egl_surface->tbm_queue) {
1909 TPL_ERR("Failed to create tbm_queue. wl_egl_surface(%p) wl_tbm_client(%p)",
1910 wl_egl_surface, wl_egl_display->wl_tbm_client);
1914 TPL_INFO("[QUEUE_CREATION]",
1915 "wl_egl_surface(%p) wl_surface(%p) wl_tbm_client(%p)",
1916 wl_egl_surface, wl_egl_surface->wl_surface,
1917 wl_egl_display->wl_tbm_client);
1918 TPL_INFO("[QUEUE_CREATION]",
1919 "tbm_queue(%p) size(%d x %d) X %d format(%d)",
1920 wl_egl_surface->tbm_queue,
1921 wl_egl_surface->width,
1922 wl_egl_surface->height,
1923 wl_egl_surface->num_buffers,
1924 wl_egl_surface->format);
1926 if (wl_egl_display->use_wait_vblank) {
1927 vblank = (tpl_surface_vblank_t *)calloc(1, sizeof(tpl_surface_vblank_t));
1929 vblank->tdm_vblank = _thread_create_tdm_client_vblank(
1930 wl_egl_display->tdm.tdm_client);
1931 if (!vblank->tdm_vblank) {
1932 TPL_ERR("Failed to create tdm_vblank from tdm_client(%p)",
1933 wl_egl_display->tdm.tdm_client);
1937 vblank->waiting_buffers = __tpl_list_alloc();
1938 vblank->wl_egl_surface = wl_egl_surface;
1940 __tpl_list_push_back(wl_egl_display->tdm.surface_vblanks,
1943 TPL_INFO("[VBLANK_INIT]",
1944 "wl_egl_surface(%p) tdm_client(%p) tdm_vblank(%p)",
1945 wl_egl_surface, wl_egl_display->tdm.tdm_client,
1946 vblank->tdm_vblank);
1951 wl_egl_surface->vblank = vblank;
1952 #if TIZEN_FEATURE_ENABLE
1953 if (wl_egl_display->tss) {
1954 wl_egl_surface->tss_flusher =
1955 tizen_surface_shm_get_flusher(wl_egl_display->tss,
1956 wl_egl_surface->wl_surface);
1959 if (wl_egl_surface->tss_flusher) {
1960 tizen_surface_shm_flusher_add_listener(wl_egl_surface->tss_flusher,
1961 &tss_flusher_listener,
1963 TPL_INFO("[FLUSHER_INIT]",
1964 "wl_egl_surface(%p) tss_flusher(%p)",
1965 wl_egl_surface, wl_egl_surface->tss_flusher);
1968 if (wl_egl_display->explicit_sync && wl_egl_display->use_explicit_sync) {
1969 wl_egl_surface->surface_sync =
1970 zwp_linux_explicit_synchronization_v1_get_synchronization(
1971 wl_egl_display->explicit_sync, wl_egl_surface->wl_surface);
1972 if (wl_egl_surface->surface_sync) {
1973 TPL_INFO("[EXPLICIT_SYNC_INIT]",
1974 "wl_egl_surface(%p) surface_sync(%p)",
1975 wl_egl_surface, wl_egl_surface->surface_sync);
1977 TPL_WARN("Failed to create surface_sync. | wl_egl_surface(%p)",
1979 wl_egl_display->use_explicit_sync = TPL_FALSE;
1983 wl_egl_surface->presentation_feedbacks = __tpl_list_alloc();
1987 _tpl_wl_egl_surface_buffer_clear(tpl_wl_egl_surface_t *wl_egl_surface)
1989 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
1990 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
1991 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
1992 tpl_bool_t need_to_release = TPL_FALSE;
1993 tpl_bool_t need_to_cancel = TPL_FALSE;
1994 buffer_status_t status = RELEASED;
1997 while (wl_egl_surface->buffer_cnt) {
1998 tpl_gmutex_lock(&wl_egl_display->wl_event_mutex);
1999 tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
2000 wl_egl_buffer = wl_egl_surface->buffers[idx];
2002 if (wl_egl_buffer) {
2003 wl_egl_surface->buffers[idx] = NULL;
2004 wl_egl_surface->buffer_cnt--;
2006 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
2007 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2012 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
2014 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2016 status = wl_egl_buffer->status;
2018 TPL_DEBUG("[idx:%d] wl_egl_buffer(%p) tbm_surface(%p) status(%s)",
2020 wl_egl_buffer->tbm_surface,
2021 status_to_string[status]);
2023 if (status >= ENQUEUED) {
2024 tpl_bool_t need_to_wait = TPL_FALSE;
2025 tpl_result_t wait_result = TPL_ERROR_NONE;
2027 need_to_wait = (status < COMMITTED);
2030 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2031 wait_result = tpl_cond_timed_wait(&wl_egl_buffer->cond,
2032 &wl_egl_buffer->mutex,
2034 tpl_gmutex_lock(&wl_egl_display->wl_event_mutex);
2036 if (wait_result == TPL_ERROR_TIME_OUT)
2037 TPL_WARN("timeout occured waiting signaled. wl_egl_buffer(%p)",
2042 status = wl_egl_buffer->status; /* update status */
2044 /* ACQUIRED, WAITING_SIGNALED, WAITING_VBLANK, COMMITTED */
2045 /* It has been acquired but has not yet been released, so this
2046 * buffer must be released. */
2047 need_to_release = (status >= ACQUIRED && status <= COMMITTED);
2049 /* After dequeue, it has not been enqueued yet
2050 * so cancel_dequeue must be performed. */
2051 need_to_cancel = (status == DEQUEUED);
2053 if (need_to_release) {
2054 tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
2055 wl_egl_buffer->tbm_surface);
2056 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2057 TPL_ERR("Failed to release. tbm_surface(%p) tsq_err(%d)",
2058 wl_egl_buffer->tbm_surface, tsq_err);
2061 if (need_to_cancel) {
2062 tsq_err = tbm_surface_queue_cancel_dequeue(wl_egl_surface->tbm_queue,
2063 wl_egl_buffer->tbm_surface);
2064 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2065 TPL_ERR("Failed to release tbm_surface(%p) tsq_err(%d)",
2066 wl_egl_buffer->tbm_surface, tsq_err);
2069 wl_egl_buffer->status = RELEASED;
2071 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2073 if (need_to_release || need_to_cancel)
2074 tbm_surface_internal_unref(wl_egl_buffer->tbm_surface);
2076 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2083 __tpl_wl_egl_surface_fini(tpl_surface_t *surface)
2085 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
2086 tpl_wl_egl_display_t *wl_egl_display = NULL;
2088 TPL_ASSERT(surface);
2089 TPL_ASSERT(surface->display);
2091 TPL_CHECK_ON_FALSE_RETURN(surface->type == TPL_SURFACE_TYPE_WINDOW);
2093 wl_egl_surface = (tpl_wl_egl_surface_t *) surface->backend.data;
2094 TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
2096 wl_egl_display = wl_egl_surface->wl_egl_display;
2097 TPL_CHECK_ON_NULL_RETURN(wl_egl_display);
2099 TPL_INFO("[SURFACE_FINI][BEGIN]",
2100 "wl_egl_surface(%p) wl_surface(%p) tbm_queue(%p)",
2102 wl_egl_surface->wl_surface, wl_egl_surface->tbm_queue);
2104 _tpl_wl_egl_surface_buffer_clear(wl_egl_surface);
2106 if (wl_egl_surface->surf_source)
2107 tpl_gsource_destroy(wl_egl_surface->surf_source, TPL_TRUE);
2108 wl_egl_surface->surf_source = NULL;
2110 _print_buffer_lists(wl_egl_surface);
2112 if (wl_egl_surface->wl_egl_window) {
2113 struct tizen_private *tizen_private = NULL;
2114 struct wl_egl_window *wl_egl_window = wl_egl_surface->wl_egl_window;
2115 TPL_INFO("[WL_EGL_WINDOW_FINI]",
2116 "wl_egl_surface(%p) wl_egl_window(%p) wl_surface(%p)",
2117 wl_egl_surface, wl_egl_window,
2118 wl_egl_surface->wl_surface);
2119 tizen_private = (struct tizen_private *)wl_egl_window->driver_private;
2120 if (tizen_private) {
2121 tizen_private->set_window_serial_callback = NULL;
2122 tizen_private->rotate_callback = NULL;
2123 tizen_private->get_rotation_capability = NULL;
2124 tizen_private->create_presentation_sync_fd = NULL;
2125 tizen_private->create_commit_sync_fd = NULL;
2126 tizen_private->set_frontbuffer_callback = NULL;
2127 tizen_private->merge_sync_fds = NULL;
2128 tizen_private->data = NULL;
2129 free(tizen_private);
2131 wl_egl_window->driver_private = NULL;
2134 wl_egl_window->destroy_window_callback = NULL;
2135 wl_egl_window->resize_callback = NULL;
2137 wl_egl_surface->wl_egl_window = NULL;
2140 wl_egl_surface->last_deq_buffer = NULL;
2142 wl_egl_surface->wl_surface = NULL;
2143 wl_egl_surface->wl_egl_display = NULL;
2144 wl_egl_surface->tpl_surface = NULL;
2146 tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
2147 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
2148 tpl_gmutex_clear(&wl_egl_surface->commit_sync.mutex);
2150 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
2151 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
2152 tpl_gmutex_clear(&wl_egl_surface->presentation_sync.mutex);
2154 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2155 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2156 tpl_gmutex_clear(&wl_egl_surface->surf_mutex);
2157 tpl_gcond_clear(&wl_egl_surface->surf_cond);
2159 TPL_INFO("[SURFACE_FINI][END]", "wl_egl_surface(%p)", wl_egl_surface);
2161 free(wl_egl_surface);
2162 surface->backend.data = NULL;
2166 __tpl_wl_egl_surface_set_rotation_capability(tpl_surface_t *surface,
2169 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
2171 TPL_CHECK_ON_NULL_RETURN_VAL(surface, TPL_ERROR_INVALID_PARAMETER);
2173 wl_egl_surface = (tpl_wl_egl_surface_t *)surface->backend.data;
2175 TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_surface, TPL_ERROR_INVALID_PARAMETER);
2177 TPL_INFO("[SET_PREROTATION_CAPABILITY]",
2178 "wl_egl_surface(%p) prerotation capability set to [%s]",
2179 wl_egl_surface, (set ? "TRUE" : "FALSE"));
2181 wl_egl_surface->prerotation_capability = set;
2182 return TPL_ERROR_NONE;
2186 __tpl_wl_egl_surface_set_post_interval(tpl_surface_t *surface,
2189 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
2191 TPL_CHECK_ON_NULL_RETURN_VAL(surface, TPL_ERROR_INVALID_PARAMETER);
2193 wl_egl_surface = (tpl_wl_egl_surface_t *)surface->backend.data;
2195 TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_surface, TPL_ERROR_INVALID_PARAMETER);
2197 TPL_INFO("[SET_POST_INTERVAL]",
2198 "wl_egl_surface(%p) post_interval(%d -> %d)",
2199 wl_egl_surface, wl_egl_surface->post_interval, post_interval);
2201 wl_egl_surface->post_interval = post_interval;
2203 return TPL_ERROR_NONE;
2207 __tpl_wl_egl_surface_validate(tpl_surface_t *surface)
2209 tpl_bool_t retval = TPL_TRUE;
2211 TPL_ASSERT(surface);
2212 TPL_ASSERT(surface->backend.data);
2214 tpl_wl_egl_surface_t *wl_egl_surface =
2215 (tpl_wl_egl_surface_t *)surface->backend.data;
2217 retval = !(wl_egl_surface->reset);
2223 __tpl_wl_egl_surface_get_size(tpl_surface_t *surface, int *width, int *height)
2225 tpl_wl_egl_surface_t *wl_egl_surface =
2226 (tpl_wl_egl_surface_t *)surface->backend.data;
2229 *width = tbm_surface_queue_get_width(wl_egl_surface->tbm_queue);
2231 *height = tbm_surface_queue_get_height(wl_egl_surface->tbm_queue);
2234 #define CAN_DEQUEUE_TIMEOUT_MS 10000
2237 _tbm_queue_force_flush(tpl_wl_egl_surface_t *wl_egl_surface)
2239 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2241 _print_buffer_lists(wl_egl_surface);
2243 if ((tsq_err = tbm_surface_queue_flush(wl_egl_surface->tbm_queue))
2244 != TBM_SURFACE_QUEUE_ERROR_NONE) {
2245 TPL_ERR("Failed to flush tbm_surface_queue(%p) tsq_err(%d)",
2246 wl_egl_surface->tbm_queue, tsq_err);
2247 return TPL_ERROR_INVALID_OPERATION;
2252 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2253 for (i = 0; i < BUFFER_ARRAY_SIZE; i++) {
2254 tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
2255 wl_egl_buffer = wl_egl_surface->buffers[i];
2256 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
2257 if (wl_egl_buffer && wl_egl_buffer->status == COMMITTED) {
2258 wl_egl_buffer->status = RELEASED;
2259 tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
2260 wl_egl_buffer->tbm_surface);
2261 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2262 TPL_ERR("Failed to release. tbm_surface(%p) tsq_err(%d)",
2263 wl_egl_buffer->tbm_surface, tsq_err);
2264 tbm_surface_internal_unref(wl_egl_buffer->tbm_surface);
2269 TPL_INFO("[FORCE_FLUSH]",
2270 "wl_egl_surface(%p) tbm_queue(%p)",
2271 wl_egl_surface, wl_egl_surface->tbm_queue);
2273 return TPL_ERROR_NONE;
2277 _wl_egl_buffer_init(tpl_wl_egl_buffer_t *wl_egl_buffer,
2278 tpl_wl_egl_surface_t *wl_egl_surface)
2280 struct wl_egl_window *wl_egl_window = wl_egl_surface->wl_egl_window;
2281 struct tizen_private *tizen_private =
2282 (struct tizen_private *)wl_egl_window->driver_private;
2284 TPL_ASSERT(tizen_private);
2286 wl_egl_buffer->draw_done = TPL_FALSE;
2287 wl_egl_buffer->need_to_commit = TPL_TRUE;
2288 #if TIZEN_FEATURE_ENABLE
2289 wl_egl_buffer->buffer_release = NULL;
2291 wl_egl_buffer->transform = tizen_private->transform;
2293 if (wl_egl_buffer->w_transform != tizen_private->window_transform) {
2294 wl_egl_buffer->w_transform = tizen_private->window_transform;
2295 wl_egl_buffer->w_rotated = TPL_TRUE;
2298 if (wl_egl_surface->set_serial_is_used) {
2299 wl_egl_buffer->serial = wl_egl_surface->serial;
2301 wl_egl_buffer->serial = ++tizen_private->serial;
2304 if (wl_egl_buffer->rects) {
2305 free(wl_egl_buffer->rects);
2306 wl_egl_buffer->rects = NULL;
2307 wl_egl_buffer->num_rects = 0;
2311 static tpl_wl_egl_buffer_t *
2312 _get_wl_egl_buffer(tbm_surface_h tbm_surface)
2314 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2315 tbm_surface_internal_get_user_data(tbm_surface, KEY_WL_EGL_BUFFER,
2316 (void **)&wl_egl_buffer);
2317 return wl_egl_buffer;
2320 static tpl_wl_egl_buffer_t *
2321 _wl_egl_buffer_create(tpl_wl_egl_surface_t *wl_egl_surface,
2322 tbm_surface_h tbm_surface)
2324 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2325 struct wl_egl_window *wl_egl_window = wl_egl_surface->wl_egl_window;
2327 wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2329 if (!wl_egl_buffer) {
2330 wl_egl_buffer = (tpl_wl_egl_buffer_t *)calloc(1, sizeof(tpl_wl_egl_buffer_t));
2331 TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_buffer, NULL);
2333 tbm_surface_internal_add_user_data(tbm_surface, KEY_WL_EGL_BUFFER,
2334 (tbm_data_free)__cb_wl_egl_buffer_free);
2335 tbm_surface_internal_set_user_data(tbm_surface, KEY_WL_EGL_BUFFER,
2338 wl_egl_buffer->wl_buffer = NULL;
2339 wl_egl_buffer->tbm_surface = tbm_surface;
2340 wl_egl_buffer->bo_name = _get_tbm_surface_bo_name(tbm_surface);
2341 wl_egl_buffer->wl_egl_surface = wl_egl_surface;
2343 wl_egl_buffer->status = RELEASED;
2345 wl_egl_buffer->acquire_fence_fd = -1;
2346 wl_egl_buffer->commit_sync_fd = -1;
2347 wl_egl_buffer->presentation_sync_fd = -1;
2348 wl_egl_buffer->release_fence_fd = -1;
2350 wl_egl_buffer->dx = wl_egl_window->dx;
2351 wl_egl_buffer->dy = wl_egl_window->dy;
2352 wl_egl_buffer->width = tbm_surface_get_width(tbm_surface);
2353 wl_egl_buffer->height = tbm_surface_get_height(tbm_surface);
2355 wl_egl_buffer->w_transform = -1;
2357 tpl_gmutex_init(&wl_egl_buffer->mutex);
2358 tpl_gcond_init(&wl_egl_buffer->cond);
2360 tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
2363 for (i = 0; i < BUFFER_ARRAY_SIZE; i++)
2364 if (wl_egl_surface->buffers[i] == NULL) break;
2366 /* If this exception is reached,
2367 * it may be a critical memory leak problem. */
2368 if (i == BUFFER_ARRAY_SIZE) {
2369 tpl_wl_egl_buffer_t *evicted_buffer = NULL;
2370 int evicted_idx = 0; /* evict the frontmost buffer */
2372 evicted_buffer = wl_egl_surface->buffers[evicted_idx];
2374 TPL_WARN("wl_egl_surface(%p) buffers array is full. evict one.",
2376 TPL_WARN("evicted buffer (%p) tbm_surface(%p) status(%s)",
2377 evicted_buffer, evicted_buffer->tbm_surface,
2378 status_to_string[evicted_buffer->status]);
2380 /* [TODO] need to think about whether there will be
2381 * better modifications */
2382 wl_egl_surface->buffer_cnt--;
2383 wl_egl_surface->buffers[evicted_idx] = NULL;
2388 wl_egl_surface->buffer_cnt++;
2389 wl_egl_surface->buffers[i] = wl_egl_buffer;
2390 wl_egl_buffer->idx = i;
2392 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
2394 TPL_INFO("[WL_EGL_BUFFER_CREATE]",
2395 "wl_egl_surface(%p) wl_egl_buffer(%p) tbm_surface(%p) bo(%d)",
2396 wl_egl_surface, wl_egl_buffer, tbm_surface,
2397 wl_egl_buffer->bo_name);
2400 _wl_egl_buffer_init(wl_egl_buffer, wl_egl_surface);
2402 return wl_egl_buffer;
2405 static tbm_surface_h
2406 __tpl_wl_egl_surface_dequeue_buffer(tpl_surface_t *surface, uint64_t timeout_ns,
2407 int32_t *release_fence)
2409 TPL_ASSERT(surface);
2410 TPL_ASSERT(surface->backend.data);
2411 TPL_ASSERT(surface->display);
2412 TPL_ASSERT(surface->display->backend.data);
2413 TPL_OBJECT_CHECK_RETURN(surface, NULL);
2415 tpl_wl_egl_surface_t *wl_egl_surface =
2416 (tpl_wl_egl_surface_t *)surface->backend.data;
2417 tpl_wl_egl_display_t *wl_egl_display =
2418 (tpl_wl_egl_display_t *)surface->display->backend.data;
2419 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2421 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2422 tpl_bool_t is_activated = 0;
2424 tbm_surface_h tbm_surface = NULL;
2426 TPL_OBJECT_UNLOCK(surface);
2427 if (wl_egl_surface->reset == TPL_TRUE && wl_egl_surface->last_deq_buffer) {
2428 tpl_wl_egl_buffer_t *last_deq_buffer = wl_egl_surface->last_deq_buffer;
2430 tpl_gmutex_lock(&last_deq_buffer->mutex);
2431 if (last_deq_buffer->status > RELEASED &&
2432 last_deq_buffer->status < COMMITTED) {
2433 tpl_result_t wait_result;
2434 wait_result = tpl_cond_timed_wait(&last_deq_buffer->cond,
2435 &last_deq_buffer->mutex,
2438 if (wait_result == TPL_ERROR_TIME_OUT)
2439 TPL_WARN("timeout occured waiting signaled. wl_egl_buffer(%p)",
2442 tpl_gmutex_unlock(&last_deq_buffer->mutex);
2444 wl_egl_surface->last_deq_buffer = NULL;
2446 tsq_err = tbm_surface_queue_can_dequeue_wait_timeout(
2447 wl_egl_surface->tbm_queue, CAN_DEQUEUE_TIMEOUT_MS);
2448 TPL_OBJECT_LOCK(surface);
2450 /* After the can dequeue state, lock the wl_event_mutex to prevent other
2451 * events from being processed in wayland_egl_thread
2452 * during below dequeue procedure. */
2453 tpl_gmutex_lock(&wl_egl_display->wl_event_mutex);
2455 if (tsq_err == TBM_SURFACE_QUEUE_ERROR_TIMEOUT) {
2456 TPL_WARN("[CAN_DEQUEUE_TIMEOUT] queue(%p) will be reset. surface(%p)",
2457 wl_egl_surface->tbm_queue, surface);
2458 if (_tbm_queue_force_flush(wl_egl_surface) != TPL_ERROR_NONE) {
2459 TPL_ERR("Failed to timeout reset. tbm_queue(%p) surface(%p)",
2460 wl_egl_surface->tbm_queue, surface);
2461 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2464 tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2468 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2469 TPL_ERR("Failed to query can_dequeue. tbm_queue(%p) surface(%p)",
2470 wl_egl_surface->tbm_queue, surface);
2471 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2475 /* wayland client can check their states (ACTIVATED or DEACTIVATED) with
2476 * below function [wayland_tbm_client_queue_check_activate()].
2477 * This function has to be called before tbm_surface_queue_dequeue()
2478 * in order to know what state the buffer will be dequeued next.
2480 * ACTIVATED state means non-composite mode. Client can get buffers which
2481 can be displayed directly(without compositing).
2482 * DEACTIVATED state means composite mode. Client's buffer will be displayed
2483 by compositor(E20) with compositing.
2485 is_activated = wayland_tbm_client_queue_check_activate(
2486 wl_egl_display->wl_tbm_client,
2487 wl_egl_surface->tbm_queue);
2489 wl_egl_surface->is_activated = is_activated;
2491 surface->width = tbm_surface_queue_get_width(wl_egl_surface->tbm_queue);
2492 surface->height = tbm_surface_queue_get_height(wl_egl_surface->tbm_queue);
2493 wl_egl_surface->width = surface->width;
2494 wl_egl_surface->height = surface->height;
2496 if (surface->is_frontbuffer_mode && surface->frontbuffer != NULL) {
2497 /* If surface->frontbuffer is already set in frontbuffer mode,
2498 * it will return that frontbuffer if it is still activated,
2499 * otherwise dequeue the new buffer after initializing
2500 * surface->frontbuffer to NULL. */
2501 if (is_activated && !wl_egl_surface->reset) {
2502 bo_name = _get_tbm_surface_bo_name(surface->frontbuffer);
2505 "[DEQ][F] surface->frontbuffer(%p) BO_NAME(%d)",
2506 surface->frontbuffer, bo_name);
2507 TRACE_ASYNC_BEGIN((int)surface->frontbuffer,
2508 "[DEQ]~[ENQ] BO_NAME:%d",
2510 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2511 return surface->frontbuffer;
2513 surface->frontbuffer = NULL;
2514 wl_egl_surface->need_to_enqueue = TPL_TRUE;
2517 surface->frontbuffer = NULL;
2520 tsq_err = tbm_surface_queue_dequeue(wl_egl_surface->tbm_queue,
2523 TPL_ERR("Failed to dequeue from tbm_queue(%p) wl_egl_surface(%p)| tsq_err = %d",
2524 wl_egl_surface->tbm_queue, wl_egl_surface, tsq_err);
2525 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2529 tbm_surface_internal_ref(tbm_surface);
2531 wl_egl_buffer = _wl_egl_buffer_create(wl_egl_surface, tbm_surface);
2532 TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer, "Failed to create/get wl_egl_buffer.");
2534 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2535 wl_egl_buffer->status = DEQUEUED;
2537 wl_egl_surface->last_deq_buffer = wl_egl_buffer;
2539 /* If wl_egl_buffer->release_fence_fd is -1,
2540 * the tbm_surface can be used immediately.
2541 * If not, user(EGL) have to wait until signaled. */
2542 if (release_fence) {
2543 #if TIZEN_FEATURE_ENABLE
2544 if (wl_egl_surface->surface_sync) {
2545 *release_fence = wl_egl_buffer->release_fence_fd;
2546 TPL_DEBUG("wl_egl_surface(%p) wl_egl_buffer(%p) release_fence_fd(%d)",
2547 wl_egl_surface, wl_egl_buffer, *release_fence);
2549 wl_egl_buffer->release_fence_fd = -1;
2553 *release_fence = -1;
2557 if (surface->is_frontbuffer_mode && is_activated)
2558 surface->frontbuffer = tbm_surface;
2560 wl_egl_surface->reset = TPL_FALSE;
2562 TRACE_MARK("[DEQ][NEW]BO_NAME:%d", wl_egl_buffer->bo_name);
2563 TRACE_ASYNC_BEGIN((int)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d",
2564 wl_egl_buffer->bo_name);
2565 TPL_LOG_T("WL_EGL", "[DEQ] wl_egl_buffer(%p) tbm_surface(%p) bo(%d) fence(%d)",
2566 wl_egl_buffer, tbm_surface, wl_egl_buffer->bo_name,
2567 release_fence ? *release_fence : -1);
2569 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2570 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2576 __tpl_wl_egl_surface_cancel_buffer(tpl_surface_t *surface,
2577 tbm_surface_h tbm_surface)
2579 TPL_ASSERT(surface);
2580 TPL_ASSERT(surface->backend.data);
2582 tpl_wl_egl_surface_t *wl_egl_surface =
2583 (tpl_wl_egl_surface_t *)surface->backend.data;
2584 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2585 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2587 if (!tbm_surface_internal_is_valid(tbm_surface)) {
2588 TPL_ERR("Invalid buffer. tbm_surface(%p)", tbm_surface);
2589 return TPL_ERROR_INVALID_PARAMETER;
2592 wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2593 if (wl_egl_buffer) {
2594 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2595 wl_egl_buffer->status = RELEASED;
2596 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2599 if (wl_egl_buffer == wl_egl_surface->last_deq_buffer)
2600 wl_egl_surface->last_deq_buffer = NULL;
2602 tbm_surface_internal_unref(tbm_surface);
2604 tsq_err = tbm_surface_queue_cancel_dequeue(wl_egl_surface->tbm_queue,
2606 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2607 TPL_ERR("Failed to release tbm_surface(%p) surface(%p)",
2608 tbm_surface, surface);
2609 return TPL_ERROR_INVALID_OPERATION;
2612 TPL_INFO("[CANCEL_BUFFER]", "wl_egl_surface(%p) tbm_surface(%p) bo(%d)",
2613 wl_egl_surface, tbm_surface, _get_tbm_surface_bo_name(tbm_surface));
2615 return TPL_ERROR_NONE;
2619 __tpl_wl_egl_surface_enqueue_buffer(tpl_surface_t *surface,
2620 tbm_surface_h tbm_surface,
2621 int num_rects, const int *rects, int32_t acquire_fence)
2623 TPL_ASSERT(surface);
2624 TPL_ASSERT(surface->display);
2625 TPL_ASSERT(surface->backend.data);
2626 TPL_ASSERT(tbm_surface);
2627 TPL_OBJECT_CHECK_RETURN(surface, TPL_ERROR_INVALID_PARAMETER);
2629 tpl_wl_egl_surface_t *wl_egl_surface =
2630 (tpl_wl_egl_surface_t *) surface->backend.data;
2631 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2632 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2635 if (!tbm_surface_internal_is_valid(tbm_surface)) {
2636 TPL_ERR("Failed to enqueue tbm_surface(%p) Invalid value.",
2638 TRACE_ASYNC_END((int)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2639 return TPL_ERROR_INVALID_PARAMETER;
2642 wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2643 if (!wl_egl_buffer) {
2644 TPL_ERR("Failed to get wl_egl_buffer from tbm_surface(%p)", tbm_surface);
2645 return TPL_ERROR_INVALID_PARAMETER;
2648 bo_name = _get_tbm_surface_bo_name(tbm_surface);
2650 TRACE_MARK("[ENQ] BO_NAME:%d", bo_name);
2652 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2654 /* If there are received region information, save it to wl_egl_buffer */
2655 if (num_rects && rects) {
2656 if (wl_egl_buffer->rects != NULL) {
2657 free(wl_egl_buffer->rects);
2658 wl_egl_buffer->rects = NULL;
2659 wl_egl_buffer->num_rects = 0;
2662 wl_egl_buffer->rects = (int *)calloc(1, (sizeof(int) * 4 * num_rects));
2663 wl_egl_buffer->num_rects = num_rects;
2665 if (!wl_egl_buffer->rects) {
2666 TPL_ERR("Failed to allocate memory fo damage rects info.");
2667 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2668 return TPL_ERROR_OUT_OF_MEMORY;
2671 memcpy((char *)wl_egl_buffer->rects, (char *)rects, sizeof(int) * 4 * num_rects);
2674 if (!wl_egl_surface->need_to_enqueue ||
2675 !wl_egl_buffer->need_to_commit) {
2676 TPL_WARN("[ENQ_SKIP][Frontbuffer:%s] tbm_surface(%p) need not to enqueue",
2677 ((surface->frontbuffer == tbm_surface) ? "ON" : "OFF"), tbm_surface);
2678 TRACE_ASYNC_END((int)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2679 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2680 return TPL_ERROR_NONE;
2683 /* In frontbuffer mode, will skip tbm_surface_queue_enqueue, acquire, and
2684 * commit if surface->frontbuffer that is already set and the tbm_surface
2685 * client want to enqueue are the same.
2687 if (surface->is_frontbuffer_mode) {
2688 /* The first buffer to be activated in frontbuffer mode must be
2689 * committed. Subsequence frames do not need to be committed because
2690 * the buffer is already displayed.
2692 if (surface->frontbuffer == tbm_surface)
2693 wl_egl_surface->need_to_enqueue = TPL_FALSE;
2695 if (acquire_fence != -1) {
2696 close(acquire_fence);
2701 if (wl_egl_buffer->acquire_fence_fd != -1)
2702 close(wl_egl_buffer->acquire_fence_fd);
2704 wl_egl_buffer->acquire_fence_fd = acquire_fence;
2706 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
2707 if (wl_egl_surface->presentation_sync.fd != -1) {
2708 wl_egl_buffer->presentation_sync_fd = wl_egl_surface->presentation_sync.fd;
2709 wl_egl_surface->presentation_sync.fd = -1;
2711 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
2713 tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
2714 if (wl_egl_surface->commit_sync.fd != -1) {
2715 wl_egl_buffer->commit_sync_fd = wl_egl_surface->commit_sync.fd;
2716 wl_egl_surface->commit_sync.fd = -1;
2717 TRACE_ASYNC_BEGIN(wl_egl_buffer->commit_sync_fd, "[COMMIT_SYNC] bo(%d)",
2718 _get_tbm_surface_bo_name(tbm_surface));
2720 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
2722 wl_egl_buffer->status = ENQUEUED;
2724 "[ENQ] wl_egl_buffer(%p) tbm_surface(%p) bo(%d) fence(%d)",
2725 wl_egl_buffer, tbm_surface, bo_name, acquire_fence);
2727 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2729 tsq_err = tbm_surface_queue_enqueue(wl_egl_surface->tbm_queue,
2731 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2732 tbm_surface_internal_unref(tbm_surface);
2733 TPL_ERR("Failed to enqueue tbm_surface(%p). wl_egl_surface(%p) tsq_err=%d",
2734 tbm_surface, wl_egl_surface, tsq_err);
2735 TRACE_ASYNC_END((int)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2736 return TPL_ERROR_INVALID_OPERATION;
2739 tbm_surface_internal_unref(tbm_surface);
2741 TRACE_ASYNC_END((int)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2743 return TPL_ERROR_NONE;
2747 __thread_func_waiting_source_dispatch(tpl_gsource *gsource, uint64_t message)
2749 tpl_wl_egl_buffer_t *wl_egl_buffer =
2750 (tpl_wl_egl_buffer_t *)tpl_gsource_get_data(gsource);
2751 tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
2752 tbm_surface_h tbm_surface = wl_egl_buffer->tbm_surface;
2754 wl_egl_surface->render_done_cnt++;
2756 TRACE_ASYNC_END(wl_egl_buffer->acquire_fence_fd, "FENCE WAIT fd(%d)",
2757 wl_egl_buffer->acquire_fence_fd);
2759 TPL_DEBUG("[RENDER DONE] wl_egl_buffer(%p) tbm_surface(%p)",
2760 wl_egl_buffer, tbm_surface);
2762 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2763 wl_egl_buffer->status = WAITING_VBLANK;
2765 TPL_DEBUG("[FINALIZE] wl_egl_buffer(%p) wait_source(%p) fence_fd(%d)",
2766 wl_egl_buffer, wl_egl_buffer->waiting_source,
2767 wl_egl_buffer->acquire_fence_fd);
2769 close(wl_egl_buffer->acquire_fence_fd);
2770 wl_egl_buffer->acquire_fence_fd = -1;
2771 wl_egl_buffer->waiting_source = NULL;
2773 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2775 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2777 if (wl_egl_surface->vblank == NULL || wl_egl_surface->vblank_done)
2778 _thread_wl_surface_commit(wl_egl_surface, wl_egl_buffer);
2780 __tpl_list_push_back(wl_egl_surface->vblank->waiting_buffers,
2783 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2789 __thread_func_waiting_source_finalize(tpl_gsource *gsource)
2791 TPL_IGNORE(gsource);
2794 static tpl_gsource_functions buffer_funcs = {
2797 .dispatch = __thread_func_waiting_source_dispatch,
2798 .finalize = __thread_func_waiting_source_finalize,
2802 _thread_surface_queue_acquire(tpl_wl_egl_surface_t *wl_egl_surface)
2804 tbm_surface_h tbm_surface = NULL;
2805 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2806 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
2807 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2808 tpl_bool_t ready_to_commit = TPL_FALSE;
2810 while (tbm_surface_queue_can_acquire(wl_egl_surface->tbm_queue, 0)) {
2811 tsq_err = tbm_surface_queue_acquire(wl_egl_surface->tbm_queue,
2813 if (!tbm_surface || tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2814 TPL_ERR("Failed to acquire from tbm_queue(%p)",
2815 wl_egl_surface->tbm_queue);
2816 return TPL_ERROR_INVALID_OPERATION;
2819 tbm_surface_internal_ref(tbm_surface);
2821 wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2822 TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer != NULL,
2823 "wl_egl_buffer sould be not NULL");
2825 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2827 wl_egl_buffer->status = ACQUIRED;
2829 TPL_LOG_T("WL_EGL", "[ACQ] wl_egl_buffer(%p) tbm_surface(%p) bo(%d)",
2830 wl_egl_buffer, tbm_surface,
2831 _get_tbm_surface_bo_name(tbm_surface));
2833 if (wl_egl_buffer->acquire_fence_fd != -1) {
2834 #if TIZEN_FEATURE_ENABLE
2835 if (wl_egl_surface->surface_sync)
2836 ready_to_commit = TPL_TRUE;
2840 if (wl_egl_buffer->waiting_source) {
2841 tpl_gsource_destroy(wl_egl_buffer->waiting_source, TPL_FALSE);
2842 wl_egl_buffer->waiting_source = NULL;
2845 wl_egl_buffer->waiting_source =
2846 tpl_gsource_create(wl_egl_display->thread, wl_egl_buffer,
2847 wl_egl_buffer->acquire_fence_fd, &buffer_funcs,
2848 SOURCE_TYPE_DISPOSABLE);
2849 wl_egl_buffer->status = WAITING_SIGNALED;
2851 TRACE_ASYNC_BEGIN(wl_egl_buffer->acquire_fence_fd, "FENCE WAIT fd(%d)",
2852 wl_egl_buffer->acquire_fence_fd);
2854 ready_to_commit = TPL_FALSE;
2857 ready_to_commit = TPL_TRUE;
2860 if (ready_to_commit) {
2861 if (wl_egl_surface->vblank == NULL || wl_egl_surface->vblank_done)
2862 ready_to_commit = TPL_TRUE;
2864 wl_egl_buffer->status = WAITING_VBLANK;
2865 __tpl_list_push_back(wl_egl_surface->vblank->waiting_buffers, wl_egl_buffer);
2866 ready_to_commit = TPL_FALSE;
2870 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2872 if (ready_to_commit)
2873 _thread_wl_surface_commit(wl_egl_surface, wl_egl_buffer);
2876 return TPL_ERROR_NONE;
2879 /* -- BEGIN -- tdm_client vblank callback function */
2881 __cb_tdm_client_vblank(tdm_client_vblank *vblank, tdm_error error,
2882 unsigned int sequence, unsigned int tv_sec,
2883 unsigned int tv_usec, void *user_data)
2885 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)user_data;
2886 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2888 TRACE_ASYNC_END((int)wl_egl_surface, "WAIT_VBLANK");
2889 TPL_DEBUG("[VBLANK] wl_egl_surface(%p)", wl_egl_surface);
2891 if (error == TDM_ERROR_TIMEOUT)
2892 TPL_WARN("[TDM_ERROR_TIMEOUT] It will keep going. wl_egl_surface(%p)",
2895 wl_egl_surface->vblank_done = TPL_TRUE;
2897 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2898 if (wl_egl_surface->vblank && wl_egl_surface->vblank->waiting_buffers) {
2899 while (!__tpl_list_is_empty(wl_egl_surface->vblank->waiting_buffers)) {
2900 wl_egl_buffer = (tpl_wl_egl_buffer_t *)__tpl_list_pop_front(
2901 wl_egl_surface->vblank->waiting_buffers,
2904 _thread_wl_surface_commit(wl_egl_surface, wl_egl_buffer);
2906 /* If tdm error such as TIEMOUT occured,
2907 * flush all vblank waiting buffers of its wl_egl_surface.
2908 * Otherwise, only one wl_egl_buffer will be commited per one vblank event.
2910 if (error == TDM_ERROR_NONE) break;
2913 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2915 /* -- END -- tdm_client vblank callback function */
2917 #if TIZEN_FEATURE_ENABLE
2919 __cb_buffer_fenced_release(void *data,
2920 struct zwp_linux_buffer_release_v1 *release, int32_t fence)
2922 tpl_wl_egl_buffer_t *wl_egl_buffer = (tpl_wl_egl_buffer_t *)data;
2923 tbm_surface_h tbm_surface = NULL;
2925 TPL_CHECK_ON_NULL_RETURN(wl_egl_buffer);
2927 tbm_surface = wl_egl_buffer->tbm_surface;
2929 if (tbm_surface_internal_is_valid(tbm_surface)) {
2931 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2932 if (wl_egl_buffer->status == COMMITTED) {
2933 tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
2934 tbm_surface_queue_error_e tsq_err;
2936 zwp_linux_buffer_release_v1_destroy(wl_egl_buffer->buffer_release);
2937 wl_egl_buffer->buffer_release = NULL;
2939 wl_egl_buffer->release_fence_fd = fence;
2940 wl_egl_buffer->status = RELEASED;
2942 TRACE_MARK("[FENCED_RELEASE] BO(%d) fence(%d)",
2943 _get_tbm_surface_bo_name(tbm_surface),
2945 TRACE_ASYNC_END((int)tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
2946 _get_tbm_surface_bo_name(tbm_surface));
2949 "[FENCED_RELEASE] wl_egl_buffer(%p) tbm_surface(%p) bo(%d) fence(%d)",
2950 wl_egl_buffer, tbm_surface,
2951 _get_tbm_surface_bo_name(tbm_surface),
2954 tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
2956 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2957 TPL_ERR("tbm_surface(%p) tsq_err(%d)", tbm_surface, tsq_err);
2959 tbm_surface_internal_unref(tbm_surface);
2962 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2965 TPL_ERR("Invalid parameter | tbm_surface(%p)", tbm_surface);
2970 __cb_buffer_immediate_release(void *data,
2971 struct zwp_linux_buffer_release_v1 *release)
2973 tpl_wl_egl_buffer_t *wl_egl_buffer = (tpl_wl_egl_buffer_t *)data;
2974 tbm_surface_h tbm_surface = NULL;
2976 TPL_CHECK_ON_NULL_RETURN(wl_egl_buffer);
2978 tbm_surface = wl_egl_buffer->tbm_surface;
2980 if (tbm_surface_internal_is_valid(tbm_surface)) {
2982 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2983 if (wl_egl_buffer->status == COMMITTED) {
2984 tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
2985 tbm_surface_queue_error_e tsq_err;
2987 zwp_linux_buffer_release_v1_destroy(wl_egl_buffer->buffer_release);
2988 wl_egl_buffer->buffer_release = NULL;
2990 wl_egl_buffer->release_fence_fd = -1;
2991 wl_egl_buffer->status = RELEASED;
2993 TRACE_MARK("[IMMEDIATE_RELEASE] BO(%d)",
2994 _get_tbm_surface_bo_name(tbm_surface));
2995 TRACE_ASYNC_END((int)tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
2996 _get_tbm_surface_bo_name(tbm_surface));
2999 "[IMMEDIATE_RELEASE] wl_egl_buffer(%p) tbm_surface(%p) bo(%d)",
3000 wl_egl_buffer, tbm_surface,
3001 _get_tbm_surface_bo_name(tbm_surface));
3003 tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
3005 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
3006 TPL_ERR("tbm_surface(%p) tsq_err(%d)", tbm_surface, tsq_err);
3008 tbm_surface_internal_unref(tbm_surface);
3011 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3014 TPL_ERR("Invalid parameter | tbm_surface(%p)", tbm_surface);
3018 static const struct zwp_linux_buffer_release_v1_listener zwp_release_listner = {
3019 __cb_buffer_fenced_release,
3020 __cb_buffer_immediate_release,
3025 __cb_wl_buffer_release(void *data, struct wl_proxy *wl_buffer)
3027 tpl_wl_egl_buffer_t *wl_egl_buffer = (tpl_wl_egl_buffer_t *)data;
3028 tbm_surface_h tbm_surface = NULL;
3030 TPL_CHECK_ON_NULL_RETURN(wl_egl_buffer)
3032 tbm_surface = wl_egl_buffer->tbm_surface;
3034 if (tbm_surface_internal_is_valid(tbm_surface)) {
3035 tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
3036 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_INVALID_SURFACE;
3038 tpl_gmutex_lock(&wl_egl_buffer->mutex);
3040 if (wl_egl_buffer->status == COMMITTED) {
3042 tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
3044 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
3045 TPL_ERR("tbm_surface(%p) tsq_err(%d)", tbm_surface, tsq_err);
3047 wl_egl_buffer->status = RELEASED;
3049 TRACE_MARK("[RELEASE] BO(%d)", _get_tbm_surface_bo_name(tbm_surface));
3050 TRACE_ASYNC_END((int)tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
3051 _get_tbm_surface_bo_name(tbm_surface));
3053 TPL_LOG_T("WL_EGL", "[REL] wl_buffer(%p) tbm_surface(%p) bo(%d)",
3054 wl_egl_buffer->wl_buffer, tbm_surface,
3055 _get_tbm_surface_bo_name(tbm_surface));
3058 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3060 if (tsq_err == TBM_SURFACE_QUEUE_ERROR_NONE)
3061 tbm_surface_internal_unref(tbm_surface);
3063 TPL_ERR("Invalid parameter | tbm_surface(%p)", tbm_surface);
3067 static const struct wl_buffer_listener wl_buffer_release_listener = {
3068 (void *)__cb_wl_buffer_release,
3070 #if TIZEN_FEATURE_ENABLE
3072 __cb_presentation_feedback_sync_output(void *data,
3073 struct wp_presentation_feedback *presentation_feedback,
3074 struct wl_output *output)
3077 TPL_IGNORE(presentation_feedback);
3083 __cb_presentation_feedback_presented(void *data,
3084 struct wp_presentation_feedback *presentation_feedback,
3088 uint32_t refresh_nsec,
3093 TPL_IGNORE(tv_sec_hi);
3094 TPL_IGNORE(tv_sec_lo);
3095 TPL_IGNORE(tv_nsec);
3096 TPL_IGNORE(refresh_nsec);
3101 struct pst_feedback *pst_feedback = (struct pst_feedback *)data;
3102 tpl_wl_egl_surface_t *wl_egl_surface = pst_feedback->wl_egl_surface;
3104 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
3106 TPL_DEBUG("[FEEDBACK][PRESENTED] pst_feedback(%p) presentation_feedback(%p) bo(%d)",
3107 pst_feedback, presentation_feedback, pst_feedback->bo_name);
3109 if (pst_feedback->pst_sync_fd != -1) {
3110 int ret = _write_to_eventfd(pst_feedback->pst_sync_fd);
3112 TPL_ERR("Failed to send presentation_sync signal to fd(%d)",
3113 pst_feedback->pst_sync_fd);
3116 TRACE_ASYNC_END(pst_feedback->pst_sync_fd,
3117 "[PRESENTATION_SYNC] bo(%d)",
3118 pst_feedback->bo_name);
3120 close(pst_feedback->pst_sync_fd);
3121 pst_feedback->pst_sync_fd = -1;
3124 wp_presentation_feedback_destroy(presentation_feedback);
3126 pst_feedback->presentation_feedback = NULL;
3127 pst_feedback->wl_egl_surface = NULL;
3128 pst_feedback->bo_name = 0;
3130 __tpl_list_remove_data(wl_egl_surface->presentation_feedbacks, pst_feedback,
3135 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
3139 __cb_presentation_feedback_discarded(void *data,
3140 struct wp_presentation_feedback *presentation_feedback)
3142 struct pst_feedback *pst_feedback = (struct pst_feedback *)data;
3143 tpl_wl_egl_surface_t *wl_egl_surface = pst_feedback->wl_egl_surface;
3145 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
3147 TPL_DEBUG("[FEEDBACK][DISCARDED] pst_feedback(%p) presentation_feedback(%p) bo(%d)",
3148 pst_feedback, presentation_feedback, pst_feedback->bo_name);
3150 if (pst_feedback->pst_sync_fd != -1) {
3151 int ret = _write_to_eventfd(pst_feedback->pst_sync_fd);
3153 TPL_ERR("Failed to send presentation_sync signal to fd(%d)",
3154 pst_feedback->pst_sync_fd);
3157 TRACE_ASYNC_END(pst_feedback->pst_sync_fd,
3158 "[PRESENTATION_SYNC] bo(%d)",
3159 pst_feedback->bo_name);
3161 close(pst_feedback->pst_sync_fd);
3162 pst_feedback->pst_sync_fd = -1;
3165 wp_presentation_feedback_destroy(presentation_feedback);
3167 pst_feedback->presentation_feedback = NULL;
3168 pst_feedback->wl_egl_surface = NULL;
3169 pst_feedback->bo_name = 0;
3171 __tpl_list_remove_data(wl_egl_surface->presentation_feedbacks, pst_feedback,
3176 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
3179 static const struct wp_presentation_feedback_listener feedback_listener = {
3180 __cb_presentation_feedback_sync_output, /* sync_output feedback -*/
3181 __cb_presentation_feedback_presented,
3182 __cb_presentation_feedback_discarded
3187 _thread_surface_vblank_wait(tpl_wl_egl_surface_t *wl_egl_surface)
3189 tdm_error tdm_err = TDM_ERROR_NONE;
3190 tpl_surface_vblank_t *vblank = wl_egl_surface->vblank;
3192 tdm_err = tdm_client_vblank_wait(vblank->tdm_vblank,
3193 wl_egl_surface->post_interval,
3194 __cb_tdm_client_vblank,
3195 (void *)wl_egl_surface);
3197 if (tdm_err == TDM_ERROR_NONE) {
3198 wl_egl_surface->vblank_done = TPL_FALSE;
3199 TRACE_ASYNC_BEGIN((int)wl_egl_surface, "WAIT_VBLANK");
3201 TPL_ERR("Failed to tdm_client_vblank_wait. tdm_err(%d)", tdm_err);
3202 return TPL_ERROR_INVALID_OPERATION;
3205 return TPL_ERROR_NONE;
3209 _thread_wl_surface_commit(tpl_wl_egl_surface_t *wl_egl_surface,
3210 tpl_wl_egl_buffer_t *wl_egl_buffer)
3212 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
3213 struct wl_surface *wl_surface = wl_egl_surface->wl_surface;
3214 struct wl_egl_window *wl_egl_window = wl_egl_surface->wl_egl_window;
3217 TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer != NULL,
3218 "wl_egl_buffer sould be not NULL");
3220 if (wl_egl_buffer->wl_buffer == NULL) {
3221 wl_egl_buffer->wl_buffer =
3222 (struct wl_proxy *)wayland_tbm_client_create_buffer(
3223 wl_egl_display->wl_tbm_client,
3224 wl_egl_buffer->tbm_surface);
3226 TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer->wl_buffer != NULL,
3227 "[FATAL] Failed to create wl_buffer");
3229 TPL_INFO("[WL_BUFFER_CREATE]",
3230 "wl_egl_buffer(%p) wl_buffer(%p) tbm_surface(%p)",
3231 wl_egl_buffer, wl_egl_buffer->wl_buffer,
3232 wl_egl_buffer->tbm_surface);
3234 #if TIZEN_FEATURE_ENABLE
3235 if (!wl_egl_display->use_explicit_sync ||
3236 !wl_egl_surface->surface_sync)
3239 wl_buffer_add_listener((struct wl_buffer *)wl_egl_buffer->wl_buffer,
3240 &wl_buffer_release_listener,
3245 version = wl_proxy_get_version((struct wl_proxy *)wl_surface);
3247 #if TIZEN_FEATURE_ENABLE
3248 /* create presentation feedback and add listener */
3249 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
3250 if (wl_egl_display->presentation && wl_egl_buffer->presentation_sync_fd != -1) {
3252 struct pst_feedback *pst_feedback = NULL;
3253 pst_feedback = (struct pst_feedback *) calloc(1, sizeof(struct pst_feedback));
3255 pst_feedback->presentation_feedback =
3256 wp_presentation_feedback(wl_egl_display->presentation,
3259 pst_feedback->wl_egl_surface = wl_egl_surface;
3260 pst_feedback->bo_name = wl_egl_buffer->bo_name;
3262 pst_feedback->pst_sync_fd = wl_egl_buffer->presentation_sync_fd;
3263 wl_egl_buffer->presentation_sync_fd = -1;
3265 wp_presentation_feedback_add_listener(pst_feedback->presentation_feedback,
3266 &feedback_listener, pst_feedback);
3267 __tpl_list_push_back(wl_egl_surface->presentation_feedbacks, pst_feedback);
3268 TRACE_ASYNC_BEGIN(pst_feedback->pst_sync_fd,
3269 "[PRESENTATION_SYNC] bo(%d)",
3270 pst_feedback->bo_name);
3272 TPL_ERR("Failed to create presentation feedback. wl_egl_buffer(%p)",
3274 _write_to_eventfd(wl_egl_buffer->presentation_sync_fd);
3275 close(wl_egl_buffer->presentation_sync_fd);
3276 wl_egl_buffer->presentation_sync_fd = -1;
3279 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
3282 if (wl_egl_buffer->w_rotated == TPL_TRUE) {
3284 wayland_tbm_client_set_buffer_transform(
3285 wl_egl_display->wl_tbm_client,
3286 (void *)wl_egl_buffer->wl_buffer,
3287 wl_egl_buffer->w_transform);
3289 wl_egl_buffer->w_rotated = TPL_FALSE;
3292 if (wl_egl_surface->latest_transform != wl_egl_buffer->transform) {
3293 wl_egl_surface->latest_transform = wl_egl_buffer->transform;
3295 wl_surface_set_buffer_transform(wl_surface, wl_egl_buffer->transform);
3298 if (wl_egl_window) {
3299 wl_egl_window->attached_width = wl_egl_buffer->width;
3300 wl_egl_window->attached_height = wl_egl_buffer->height;
3303 wl_surface_attach(wl_surface, (void *)wl_egl_buffer->wl_buffer,
3304 wl_egl_buffer->dx, wl_egl_buffer->dy);
3306 if (wl_egl_buffer->num_rects < 1 || wl_egl_buffer->rects == NULL) {
3308 wl_surface_damage(wl_surface,
3309 wl_egl_buffer->dx, wl_egl_buffer->dy,
3310 wl_egl_buffer->width, wl_egl_buffer->height);
3312 wl_surface_damage_buffer(wl_surface,
3314 wl_egl_buffer->width, wl_egl_buffer->height);
3318 for (i = 0; i < wl_egl_buffer->num_rects; i++) {
3320 wl_egl_buffer->height - (wl_egl_buffer->rects[i * 4 + 1] +
3321 wl_egl_buffer->rects[i * 4 + 3]);
3323 wl_surface_damage(wl_surface,
3324 wl_egl_buffer->rects[i * 4 + 0],
3326 wl_egl_buffer->rects[i * 4 + 2],
3327 wl_egl_buffer->rects[i * 4 + 3]);
3329 wl_surface_damage_buffer(wl_surface,
3330 wl_egl_buffer->rects[i * 4 + 0],
3332 wl_egl_buffer->rects[i * 4 + 2],
3333 wl_egl_buffer->rects[i * 4 + 3]);
3338 wayland_tbm_client_set_buffer_serial(wl_egl_display->wl_tbm_client,
3339 (void *)wl_egl_buffer->wl_buffer,
3340 wl_egl_buffer->serial);
3341 #if TIZEN_FEATURE_ENABLE
3342 if (wl_egl_display->use_explicit_sync &&
3343 wl_egl_surface->surface_sync) {
3345 zwp_linux_surface_synchronization_v1_set_acquire_fence(wl_egl_surface->surface_sync,
3346 wl_egl_buffer->acquire_fence_fd);
3347 TPL_DEBUG("[SET_ACQUIRE_FENCE] wl_egl_surface(%p) tbm_surface(%p) acquire_fence(%d)",
3348 wl_egl_surface, wl_egl_buffer->tbm_surface, wl_egl_buffer->acquire_fence_fd);
3349 close(wl_egl_buffer->acquire_fence_fd);
3350 wl_egl_buffer->acquire_fence_fd = -1;
3352 wl_egl_buffer->buffer_release =
3353 zwp_linux_surface_synchronization_v1_get_release(wl_egl_surface->surface_sync);
3354 if (!wl_egl_buffer->buffer_release) {
3355 TPL_ERR("Failed to get buffer_release. wl_egl_surface(%p)", wl_egl_surface);
3357 zwp_linux_buffer_release_v1_add_listener(
3358 wl_egl_buffer->buffer_release, &zwp_release_listner, wl_egl_buffer);
3359 TPL_DEBUG("add explicit_sync_release_listener.");
3364 wl_surface_commit(wl_surface);
3366 wl_display_flush(wl_egl_display->wl_display);
3368 TRACE_ASYNC_BEGIN((int)wl_egl_buffer->tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
3369 wl_egl_buffer->bo_name);
3371 tpl_gmutex_lock(&wl_egl_buffer->mutex);
3373 wl_egl_buffer->need_to_commit = TPL_FALSE;
3374 wl_egl_buffer->status = COMMITTED;
3376 tpl_gcond_signal(&wl_egl_buffer->cond);
3378 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3381 "[COMMIT] wl_egl_buffer(%p) wl_buffer(%p) tbm_surface(%p) bo(%d)",
3382 wl_egl_buffer, wl_egl_buffer->wl_buffer, wl_egl_buffer->tbm_surface,
3383 wl_egl_buffer->bo_name);
3385 if (wl_egl_surface->vblank != NULL &&
3386 _thread_surface_vblank_wait(wl_egl_surface) != TPL_ERROR_NONE)
3387 TPL_ERR("Failed to set wait vblank.");
3389 tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
3391 if (wl_egl_buffer->commit_sync_fd != -1) {
3392 int ret = _write_to_eventfd(wl_egl_buffer->commit_sync_fd);
3394 TPL_ERR("Failed to send commit_sync signal to fd(%d)", wl_egl_buffer->commit_sync_fd);
3397 TRACE_ASYNC_END(wl_egl_buffer->commit_sync_fd, "[COMMIT_SYNC] bo(%d)",
3398 wl_egl_buffer->bo_name);
3399 TPL_DEBUG("[COMMIT_SYNC][SEND] wl_egl_surface(%p) commit_sync_fd(%d)",
3400 wl_egl_surface, wl_egl_buffer->commit_sync_fd);
3402 close(wl_egl_buffer->commit_sync_fd);
3403 wl_egl_buffer->commit_sync_fd = -1;
3406 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
3410 _write_to_eventfd(int eventfd)
3415 if (eventfd == -1) {
3416 TPL_ERR("Invalid fd(-1)");
3420 ret = write(eventfd, &value, sizeof(uint64_t));
3422 TPL_ERR("failed to write to fd(%d)", eventfd);
3430 __tpl_display_init_backend_wl_egl_thread(tpl_display_backend_t *backend)
3432 TPL_ASSERT(backend);
3434 backend->type = TPL_BACKEND_WAYLAND_THREAD;
3435 backend->data = NULL;
3437 backend->init = __tpl_wl_egl_display_init;
3438 backend->fini = __tpl_wl_egl_display_fini;
3439 backend->query_config = __tpl_wl_egl_display_query_config;
3440 backend->filter_config = __tpl_wl_egl_display_filter_config;
3441 backend->get_window_info = __tpl_wl_egl_display_get_window_info;
3442 backend->get_pixmap_info = __tpl_wl_egl_display_get_pixmap_info;
3443 backend->get_buffer_from_native_pixmap =
3444 __tpl_wl_egl_display_get_buffer_from_native_pixmap;
3448 __tpl_surface_init_backend_wl_egl_thread(tpl_surface_backend_t *backend)
3450 TPL_ASSERT(backend);
3452 backend->type = TPL_BACKEND_WAYLAND_THREAD;
3453 backend->data = NULL;
3455 backend->init = __tpl_wl_egl_surface_init;
3456 backend->fini = __tpl_wl_egl_surface_fini;
3457 backend->validate = __tpl_wl_egl_surface_validate;
3458 backend->cancel_dequeued_buffer =
3459 __tpl_wl_egl_surface_cancel_buffer;
3460 backend->dequeue_buffer = __tpl_wl_egl_surface_dequeue_buffer;
3461 backend->enqueue_buffer = __tpl_wl_egl_surface_enqueue_buffer;
3462 backend->set_rotation_capability =
3463 __tpl_wl_egl_surface_set_rotation_capability;
3464 backend->set_post_interval =
3465 __tpl_wl_egl_surface_set_post_interval;
3467 __tpl_wl_egl_surface_get_size;
3471 __cb_wl_egl_buffer_free(tpl_wl_egl_buffer_t *wl_egl_buffer)
3473 tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
3474 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
3476 TPL_INFO("[BUFFER_FREE]", "wl_egl_buffer(%p) wl_buffer(%p) tbm_surface(%p)",
3477 wl_egl_buffer, wl_egl_buffer->wl_buffer, wl_egl_buffer->tbm_surface);
3479 tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
3480 if (wl_egl_buffer->idx >= 0 && wl_egl_surface->buffers[wl_egl_buffer->idx]) {
3481 wl_egl_surface->buffers[wl_egl_buffer->idx] = NULL;
3482 wl_egl_surface->buffer_cnt--;
3484 wl_egl_buffer->idx = -1;
3486 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
3488 if (wl_egl_display) {
3489 if (wl_egl_buffer->wl_buffer) {
3490 wayland_tbm_client_destroy_buffer(wl_egl_display->wl_tbm_client,
3491 (void *)wl_egl_buffer->wl_buffer);
3492 wl_egl_buffer->wl_buffer = NULL;
3495 wl_display_flush(wl_egl_display->wl_display);
3498 #if TIZEN_FEATURE_ENABLE
3499 if (wl_egl_buffer->buffer_release) {
3500 zwp_linux_buffer_release_v1_destroy(wl_egl_buffer->buffer_release);
3501 wl_egl_buffer->buffer_release = NULL;
3504 if (wl_egl_buffer->release_fence_fd != -1) {
3505 close(wl_egl_buffer->release_fence_fd);
3506 wl_egl_buffer->release_fence_fd = -1;
3510 if (wl_egl_buffer->waiting_source) {
3511 tpl_gsource_destroy(wl_egl_buffer->waiting_source, TPL_FALSE);
3512 wl_egl_buffer->waiting_source = NULL;
3515 if (wl_egl_buffer->commit_sync_fd != -1) {
3516 int ret = _write_to_eventfd(wl_egl_buffer->commit_sync_fd);
3518 TPL_ERR("Failed to send commit_sync signal to fd(%d)",
3519 wl_egl_buffer->commit_sync_fd);
3520 close(wl_egl_buffer->commit_sync_fd);
3521 wl_egl_buffer->commit_sync_fd = -1;
3524 if (wl_egl_buffer->presentation_sync_fd != -1) {
3525 int ret = _write_to_eventfd(wl_egl_buffer->presentation_sync_fd);
3527 TPL_ERR("Failed to send presentation_sync signal to fd(%d)",
3528 wl_egl_buffer->presentation_sync_fd);
3529 close(wl_egl_buffer->presentation_sync_fd);
3530 wl_egl_buffer->presentation_sync_fd = -1;
3533 if (wl_egl_buffer->rects) {
3534 free(wl_egl_buffer->rects);
3535 wl_egl_buffer->rects = NULL;
3536 wl_egl_buffer->num_rects = 0;
3539 wl_egl_buffer->tbm_surface = NULL;
3540 wl_egl_buffer->bo_name = -1;
3541 wl_egl_buffer->status = RELEASED;
3543 free(wl_egl_buffer);
3547 _get_tbm_surface_bo_name(tbm_surface_h tbm_surface)
3549 return tbm_bo_export(tbm_surface_internal_get_bo(tbm_surface, 0));
3553 _print_buffer_lists(tpl_wl_egl_surface_t *wl_egl_surface)
3557 tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
3558 TPL_INFO("[BUFFERS_INFO]", "wl_egl_surface(%p) buffer_cnt(%d)",
3559 wl_egl_surface, wl_egl_surface->buffer_cnt);
3560 for (idx = 0; idx < BUFFER_ARRAY_SIZE; idx++) {
3561 tpl_wl_egl_buffer_t *wl_egl_buffer = wl_egl_surface->buffers[idx];
3562 if (wl_egl_buffer) {
3564 "INDEX[%d] | wl_egl_buffer(%p) tbm_surface(%p) bo(%d) | status(%s)",
3565 idx, wl_egl_buffer, wl_egl_buffer->tbm_surface,
3566 wl_egl_buffer->bo_name,
3567 status_to_string[wl_egl_buffer->status]);
3570 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);