2 #include "tpl_internal.h"
7 #include <sys/eventfd.h>
9 #include <tbm_bufmgr.h>
10 #include <tbm_surface.h>
11 #include <tbm_surface_internal.h>
12 #include <tbm_surface_queue.h>
14 #include <wayland-client.h>
15 #include <wayland-tbm-server.h>
16 #include <wayland-tbm-client.h>
17 #include <wayland-egl-backend.h>
19 #include <tdm_client.h>
21 #include "wayland-egl-tizen/wayland-egl-tizen.h"
22 #include "wayland-egl-tizen/wayland-egl-tizen-priv.h"
24 #ifndef TIZEN_FEATURE_ENABLE
25 #define TIZEN_FEATURE_ENABLE 1
28 #if TIZEN_FEATURE_ENABLE
29 #include <tizen-surface-client-protocol.h>
30 #include <presentation-time-client-protocol.h>
31 #include <linux-explicit-synchronization-unstable-v1-client-protocol.h>
34 #include "tpl_utils_gthread.h"
36 static int wl_egl_buffer_key;
37 #define KEY_WL_EGL_BUFFER (unsigned long)(&wl_egl_buffer_key)
39 /* In wayland, application and compositor create its own drawing buffers. Recommend size is more than 2. */
40 #define BUFFER_ARRAY_SIZE 9
42 typedef struct _tpl_wl_egl_display tpl_wl_egl_display_t;
43 typedef struct _tpl_wl_egl_surface tpl_wl_egl_surface_t;
44 typedef struct _tpl_wl_egl_buffer tpl_wl_egl_buffer_t;
45 typedef struct _surface_vblank tpl_surface_vblank_t;
47 struct _tpl_wl_egl_display {
48 tpl_gsource *disp_source;
50 tpl_gmutex wl_event_mutex;
52 struct wl_display *wl_display;
53 struct wl_event_queue *ev_queue;
54 struct wayland_tbm_client *wl_tbm_client;
55 int last_error; /* errno of the last wl_display error*/
57 tpl_bool_t wl_initialized;
59 tpl_bool_t use_wait_vblank;
60 tpl_bool_t use_explicit_sync;
65 tdm_client *tdm_client;
66 tpl_gsource *tdm_source;
68 tpl_bool_t tdm_initialized;
69 tpl_list_t *surface_vblanks;
72 #if TIZEN_FEATURE_ENABLE
73 struct tizen_surface_shm *tss; /* used for surface buffer_flush */
74 struct wp_presentation *presentation; /* for presentation feedback */
75 struct zwp_linux_explicit_synchronization_v1 *explicit_sync; /* for explicit fence sync */
79 typedef enum surf_message {
85 struct _tpl_wl_egl_surface {
86 tpl_gsource *surf_source;
88 tbm_surface_queue_h tbm_queue;
91 struct wl_egl_window *wl_egl_window;
92 struct wl_surface *wl_surface;
94 #if TIZEN_FEATURE_ENABLE
95 struct zwp_linux_surface_synchronization_v1 *surface_sync; /* for explicit fence sync */
96 struct tizen_surface_shm_flusher *tss_flusher; /* used for surface buffer_flush */
99 tpl_surface_vblank_t *vblank;
101 /* surface information */
108 int latest_transform;
112 tpl_wl_egl_display_t *wl_egl_display;
113 tpl_surface_t *tpl_surface;
115 /* wl_egl_buffer array for buffer tracing */
116 tpl_wl_egl_buffer_t *buffers[BUFFER_ARRAY_SIZE];
117 int buffer_cnt; /* the number of using wl_egl_buffers */
118 tpl_gmutex buffers_mutex;
119 tpl_wl_egl_buffer_t *last_deq_buffer;
121 tpl_list_t *presentation_feedbacks; /* for tracing presentation feedbacks */
133 tpl_gmutex surf_mutex;
136 surf_message sent_message;
138 /* for waiting draw done */
139 tpl_bool_t use_render_done_fence;
140 tpl_bool_t is_activated;
141 tpl_bool_t reset; /* TRUE if queue reseted by external */
142 tpl_bool_t need_to_enqueue;
143 tpl_bool_t prerotation_capability;
144 tpl_bool_t vblank_done;
145 tpl_bool_t set_serial_is_used;
148 struct _surface_vblank {
149 tdm_client_vblank *tdm_vblank;
150 tpl_wl_egl_surface_t *wl_egl_surface;
151 tpl_list_t *waiting_buffers; /* for FIFO/FIFO_RELAXED modes */
155 typedef enum buffer_status {
160 WAITING_SIGNALED, // 4
165 static const char *status_to_string[7] = {
170 "WAITING_SIGNALED", // 4
171 "WAITING_VBLANK", // 5
175 struct _tpl_wl_egl_buffer {
176 tbm_surface_h tbm_surface;
179 struct wl_proxy *wl_buffer;
180 int dx, dy; /* position to attach to wl_surface */
181 int width, height; /* size to attach to wl_surface */
183 buffer_status_t status; /* for tracing buffer status */
184 int idx; /* position index in buffers array of wl_egl_surface */
186 /* for damage region */
190 /* for wayland_tbm_client_set_buffer_transform */
192 tpl_bool_t w_rotated;
194 /* for wl_surface_set_buffer_transform */
197 /* for wayland_tbm_client_set_buffer_serial */
200 /* for checking need_to_commit (frontbuffer mode) */
201 tpl_bool_t need_to_commit;
203 /* for checking draw done */
204 tpl_bool_t draw_done;
206 #if TIZEN_FEATURE_ENABLE
207 /* to get release event via zwp_linux_buffer_release_v1 */
208 struct zwp_linux_buffer_release_v1 *buffer_release;
210 /* each buffers own its release_fence_fd, until it passes ownership
212 int32_t release_fence_fd;
214 /* each buffers own its acquire_fence_fd.
215 * If it use zwp_linux_buffer_release_v1 the ownership of this fd
216 * will be passed to display server
217 * Otherwise it will be used as a fence waiting for render done
219 int32_t acquire_fence_fd;
221 /* Fd to send a signal when wl_surface_commit with this buffer */
222 int32_t commit_sync_fd;
224 /* Fd to send a siganl when receive the
225 * presentation feedback from display server */
226 int32_t presentation_sync_fd;
228 tpl_gsource *waiting_source;
233 tpl_wl_egl_surface_t *wl_egl_surface;
236 #if TIZEN_FEATURE_ENABLE
237 struct pst_feedback {
238 /* to get presentation feedback from display server */
239 struct wp_presentation_feedback *presentation_feedback;
244 tpl_wl_egl_surface_t *wl_egl_surface;
249 static const struct wl_buffer_listener wl_buffer_release_listener;
252 _get_tbm_surface_bo_name(tbm_surface_h tbm_surface);
254 _print_buffer_lists(tpl_wl_egl_surface_t *wl_egl_surface);
256 __cb_wl_egl_buffer_free(tpl_wl_egl_buffer_t *wl_egl_buffer);
257 static tpl_wl_egl_buffer_t *
258 _get_wl_egl_buffer(tbm_surface_h tbm_surface);
260 _write_to_eventfd(int eventfd);
262 _thread_wl_egl_surface_init(tpl_wl_egl_surface_t *wl_egl_surface);
264 _thread_surface_queue_acquire(tpl_wl_egl_surface_t *wl_egl_surface);
266 _thread_wl_surface_commit(tpl_wl_egl_surface_t *wl_egl_surface,
267 tpl_wl_egl_buffer_t *wl_egl_buffer);
269 __cb_surface_vblank_free(void *data);
271 static struct tizen_private *
272 tizen_private_create()
274 struct tizen_private *private = NULL;
275 private = (struct tizen_private *)calloc(1, sizeof(struct tizen_private));
277 private->magic = WL_EGL_TIZEN_MAGIC;
278 private->rotation = 0;
279 private->frontbuffer_mode = 0;
280 private->transform = 0;
281 private->window_transform = 0;
284 private->data = NULL;
285 private->rotate_callback = NULL;
286 private->get_rotation_capability = NULL;
287 private->set_window_serial_callback = NULL;
288 private->set_frontbuffer_callback = NULL;
289 private->create_commit_sync_fd = NULL;
290 private->create_presentation_sync_fd = NULL;
291 private->merge_sync_fds = NULL;
298 _check_native_handle_is_wl_display(tpl_handle_t display)
300 struct wl_interface *wl_egl_native_dpy = *(void **) display;
302 if (!wl_egl_native_dpy) {
303 TPL_ERR("Invalid parameter. native_display(%p)", wl_egl_native_dpy);
307 /* MAGIC CHECK: A native display handle is a wl_display if the de-referenced first value
308 is a memory address pointing the structure of wl_display_interface. */
309 if (wl_egl_native_dpy == &wl_display_interface)
312 if (strncmp(wl_egl_native_dpy->name, wl_display_interface.name,
313 strlen(wl_display_interface.name)) == 0) {
321 __thread_func_tdm_dispatch(tpl_gsource *gsource, uint64_t message)
323 tpl_wl_egl_display_t *wl_egl_display = NULL;
324 tdm_error tdm_err = TDM_ERROR_NONE;
328 wl_egl_display = (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
329 if (!wl_egl_display) {
330 TPL_ERR("Failed to get wl_egl_display from gsource(%p)", gsource);
331 TPL_WARN("tdm_source(%p) will be removed from thread.", gsource);
335 tdm_err = tdm_client_handle_events(wl_egl_display->tdm.tdm_client);
337 /* If an error occurs in tdm_client_handle_events, it cannot be recovered.
338 * When tdm_source is no longer available due to an unexpected situation,
339 * wl_egl_thread must remove it from the thread and destroy it.
340 * In that case, tdm_vblank can no longer be used for surfaces and displays
341 * that used this tdm_source. */
342 if (tdm_err != TDM_ERROR_NONE) {
343 TPL_ERR("Error occured in tdm_client_handle_events. tdm_err(%d)",
345 TPL_WARN("tdm_source(%p) will be removed from thread.", gsource);
347 tpl_gsource_destroy(gsource, TPL_FALSE);
349 wl_egl_display->tdm.tdm_source = NULL;
358 __thread_func_tdm_finalize(tpl_gsource *gsource)
360 tpl_wl_egl_display_t *wl_egl_display = NULL;
362 wl_egl_display = (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
365 "tdm_destroy| wl_egl_display(%p) tdm_client(%p) tpl_gsource(%p)",
366 wl_egl_display, wl_egl_display->tdm.tdm_client, gsource);
368 if (wl_egl_display->tdm.tdm_client) {
370 if (wl_egl_display->tdm.surface_vblanks) {
371 __tpl_list_free(wl_egl_display->tdm.surface_vblanks,
372 __cb_surface_vblank_free);
373 wl_egl_display->tdm.surface_vblanks = NULL;
376 tdm_client_destroy(wl_egl_display->tdm.tdm_client);
377 wl_egl_display->tdm.tdm_client = NULL;
378 wl_egl_display->tdm.tdm_display_fd = -1;
379 wl_egl_display->tdm.tdm_source = NULL;
382 wl_egl_display->use_wait_vblank = TPL_FALSE;
383 wl_egl_display->tdm.tdm_initialized = TPL_FALSE;
386 static tpl_gsource_functions tdm_funcs = {
389 .dispatch = __thread_func_tdm_dispatch,
390 .finalize = __thread_func_tdm_finalize,
394 _thread_tdm_init(tpl_wl_egl_display_t *wl_egl_display)
396 tdm_client *tdm_client = NULL;
397 int tdm_display_fd = -1;
398 tdm_error tdm_err = TDM_ERROR_NONE;
400 tdm_client = tdm_client_create(&tdm_err);
401 if (!tdm_client || tdm_err != TDM_ERROR_NONE) {
402 TPL_ERR("TDM_ERROR:%d Failed to create tdm_client\n", tdm_err);
403 return TPL_ERROR_INVALID_OPERATION;
406 tdm_err = tdm_client_get_fd(tdm_client, &tdm_display_fd);
407 if (tdm_display_fd < 0 || tdm_err != TDM_ERROR_NONE) {
408 TPL_ERR("TDM_ERROR:%d Failed to get tdm_client fd\n", tdm_err);
409 tdm_client_destroy(tdm_client);
410 return TPL_ERROR_INVALID_OPERATION;
413 wl_egl_display->tdm.tdm_display_fd = tdm_display_fd;
414 wl_egl_display->tdm.tdm_client = tdm_client;
415 wl_egl_display->tdm.tdm_source = NULL;
416 wl_egl_display->tdm.tdm_initialized = TPL_TRUE;
417 wl_egl_display->tdm.surface_vblanks = __tpl_list_alloc();
419 TPL_INFO("[TDM_CLIENT_INIT]",
420 "wl_egl_display(%p) tdm_client(%p) tdm_display_fd(%d)",
421 wl_egl_display, tdm_client, tdm_display_fd);
423 return TPL_ERROR_NONE;
426 #define IMPL_TIZEN_SURFACE_SHM_VERSION 2
430 __cb_wl_resistry_global_callback(void *data, struct wl_registry *wl_registry,
431 uint32_t name, const char *interface,
434 #if TIZEN_FEATURE_ENABLE
435 tpl_wl_egl_display_t *wl_egl_display = (tpl_wl_egl_display_t *)data;
437 if (!strcmp(interface, "tizen_surface_shm")) {
438 wl_egl_display->tss =
439 wl_registry_bind(wl_registry,
441 &tizen_surface_shm_interface,
442 ((version < IMPL_TIZEN_SURFACE_SHM_VERSION) ?
443 version : IMPL_TIZEN_SURFACE_SHM_VERSION));
444 wl_egl_display->use_tss = TPL_TRUE;
445 } else if (!strcmp(interface, wp_presentation_interface.name)) {
446 wl_egl_display->presentation =
447 wl_registry_bind(wl_registry,
448 name, &wp_presentation_interface, 1);
449 TPL_DEBUG("bind wp_presentation_interface");
450 } else if (strcmp(interface, "zwp_linux_explicit_synchronization_v1") == 0) {
451 char *env = tpl_getenv("TPL_EFS");
452 if (env && !atoi(env)) {
453 wl_egl_display->use_explicit_sync = TPL_FALSE;
455 wl_egl_display->explicit_sync =
456 wl_registry_bind(wl_registry, name,
457 &zwp_linux_explicit_synchronization_v1_interface, 1);
458 wl_egl_display->use_explicit_sync = TPL_TRUE;
459 TPL_DEBUG("bind zwp_linux_explicit_synchronization_v1_interface");
466 __cb_wl_resistry_global_remove_callback(void *data,
467 struct wl_registry *wl_registry,
472 static const struct wl_registry_listener registry_listener = {
473 __cb_wl_resistry_global_callback,
474 __cb_wl_resistry_global_remove_callback
478 _wl_display_print_err(tpl_wl_egl_display_t *wl_egl_display,
479 const char *func_name)
483 strerror_r(errno, buf, sizeof(buf));
485 if (wl_egl_display->last_error == errno)
488 TPL_ERR("falied to %s. error:%d(%s)", func_name, errno, buf);
490 dpy_err = wl_display_get_error(wl_egl_display->wl_display);
491 if (dpy_err == EPROTO) {
492 const struct wl_interface *err_interface;
493 uint32_t err_proxy_id, err_code;
494 err_code = wl_display_get_protocol_error(wl_egl_display->wl_display,
497 TPL_ERR("[Protocol Error] interface: %s, error_code: %d, proxy_id: %d",
498 err_interface->name, err_code, err_proxy_id);
501 wl_egl_display->last_error = errno;
505 _thread_wl_display_init(tpl_wl_egl_display_t *wl_egl_display)
507 struct wl_registry *registry = NULL;
508 struct wl_event_queue *queue = NULL;
509 struct wl_display *display_wrapper = NULL;
510 struct wl_proxy *wl_tbm = NULL;
511 struct wayland_tbm_client *wl_tbm_client = NULL;
513 tpl_result_t result = TPL_ERROR_NONE;
515 queue = wl_display_create_queue(wl_egl_display->wl_display);
517 TPL_ERR("Failed to create wl_queue wl_display(%p)",
518 wl_egl_display->wl_display);
519 result = TPL_ERROR_INVALID_OPERATION;
523 wl_egl_display->ev_queue = wl_display_create_queue(wl_egl_display->wl_display);
524 if (!wl_egl_display->ev_queue) {
525 TPL_ERR("Failed to create wl_queue wl_display(%p)",
526 wl_egl_display->wl_display);
527 result = TPL_ERROR_INVALID_OPERATION;
531 display_wrapper = wl_proxy_create_wrapper(wl_egl_display->wl_display);
532 if (!display_wrapper) {
533 TPL_ERR("Failed to create a proxy wrapper of wl_display(%p)",
534 wl_egl_display->wl_display);
535 result = TPL_ERROR_INVALID_OPERATION;
539 wl_proxy_set_queue((struct wl_proxy *)display_wrapper, queue);
541 registry = wl_display_get_registry(display_wrapper);
543 TPL_ERR("Failed to create wl_registry");
544 result = TPL_ERROR_INVALID_OPERATION;
548 wl_proxy_wrapper_destroy(display_wrapper);
549 display_wrapper = NULL;
551 wl_tbm_client = wayland_tbm_client_init(wl_egl_display->wl_display);
552 if (!wl_tbm_client) {
553 TPL_ERR("Failed to initialize wl_tbm_client.");
554 result = TPL_ERROR_INVALID_CONNECTION;
558 wl_tbm = (struct wl_proxy *)wayland_tbm_client_get_wl_tbm(wl_tbm_client);
560 TPL_ERR("Failed to get wl_tbm from wl_tbm_client(%p)", wl_tbm_client);
561 result = TPL_ERROR_INVALID_CONNECTION;
565 wl_proxy_set_queue(wl_tbm, wl_egl_display->ev_queue);
566 wl_egl_display->wl_tbm_client = wl_tbm_client;
568 if (wl_registry_add_listener(registry, ®istry_listener,
570 TPL_ERR("Failed to wl_registry_add_listener");
571 result = TPL_ERROR_INVALID_OPERATION;
575 ret = wl_display_roundtrip_queue(wl_egl_display->wl_display, queue);
577 _wl_display_print_err(wl_egl_display, "roundtrip_queue");
578 result = TPL_ERROR_INVALID_OPERATION;
582 #if TIZEN_FEATURE_ENABLE
583 /* set tizen_surface_shm's queue as client's private queue */
584 if (wl_egl_display->tss) {
585 wl_proxy_set_queue((struct wl_proxy *)wl_egl_display->tss,
586 wl_egl_display->ev_queue);
587 TPL_LOG_T("WL_EGL", "tizen_surface_shm(%p) init.", wl_egl_display->tss);
590 if (wl_egl_display->presentation) {
591 wl_proxy_set_queue((struct wl_proxy *)wl_egl_display->presentation,
592 wl_egl_display->ev_queue);
593 TPL_LOG_T("WL_EGL", "wp_presentation(%p) init.",
594 wl_egl_display->presentation);
597 if (wl_egl_display->explicit_sync) {
598 wl_proxy_set_queue((struct wl_proxy *)wl_egl_display->explicit_sync,
599 wl_egl_display->ev_queue);
600 TPL_LOG_T("WL_EGL", "zwp_linux_explicit_synchronization_v1(%p) init.",
601 wl_egl_display->explicit_sync);
604 wl_egl_display->wl_initialized = TPL_TRUE;
606 TPL_INFO("[WAYLAND_INIT]",
607 "wl_egl_display(%p) wl_display(%p) wl_tbm_client(%p) event_queue(%p)",
608 wl_egl_display, wl_egl_display->wl_display,
609 wl_egl_display->wl_tbm_client, wl_egl_display->ev_queue);
610 #if TIZEN_FEATURE_ENABLE
611 TPL_INFO("[WAYLAND_INIT]",
612 "tizen_surface_shm(%p) wp_presentation(%p) explicit_sync(%p)",
613 wl_egl_display->tss, wl_egl_display->presentation,
614 wl_egl_display->explicit_sync);
618 wl_proxy_wrapper_destroy(display_wrapper);
620 wl_registry_destroy(registry);
622 wl_event_queue_destroy(queue);
628 _thread_wl_display_fini(tpl_wl_egl_display_t *wl_egl_display)
630 /* If wl_egl_display is in prepared state, cancel it */
631 if (wl_egl_display->prepared) {
632 wl_display_cancel_read(wl_egl_display->wl_display);
633 wl_egl_display->prepared = TPL_FALSE;
636 if (wl_display_roundtrip_queue(wl_egl_display->wl_display,
637 wl_egl_display->ev_queue) == -1) {
638 _wl_display_print_err(wl_egl_display, "roundtrip_queue");
641 #if TIZEN_FEATURE_ENABLE
642 if (wl_egl_display->tss) {
643 TPL_INFO("[TIZEN_SURFACE_SHM_DESTROY]",
644 "wl_egl_display(%p) tizen_surface_shm(%p) fini.",
645 wl_egl_display, wl_egl_display->tss);
646 tizen_surface_shm_destroy(wl_egl_display->tss);
647 wl_egl_display->tss = NULL;
650 if (wl_egl_display->presentation) {
651 TPL_INFO("[WP_PRESENTATION_DESTROY]",
652 "wl_egl_display(%p) wp_presentation(%p) fini.",
653 wl_egl_display, wl_egl_display->presentation);
654 wp_presentation_destroy(wl_egl_display->presentation);
655 wl_egl_display->presentation = NULL;
658 if (wl_egl_display->explicit_sync) {
659 TPL_INFO("[EXPLICIT_SYNC_DESTROY]",
660 "wl_egl_display(%p) zwp_linux_explicit_synchronization_v1(%p) fini.",
661 wl_egl_display, wl_egl_display->explicit_sync);
662 zwp_linux_explicit_synchronization_v1_destroy(wl_egl_display->explicit_sync);
663 wl_egl_display->explicit_sync = NULL;
666 if (wl_egl_display->wl_tbm_client) {
667 struct wl_proxy *wl_tbm = NULL;
669 wl_tbm = (struct wl_proxy *)wayland_tbm_client_get_wl_tbm(
670 wl_egl_display->wl_tbm_client);
672 wl_proxy_set_queue(wl_tbm, NULL);
675 TPL_INFO("[WL_TBM_DEINIT]",
676 "wl_egl_display(%p) wl_tbm_client(%p)",
677 wl_egl_display, wl_egl_display->wl_tbm_client);
678 wayland_tbm_client_deinit(wl_egl_display->wl_tbm_client);
679 wl_egl_display->wl_tbm_client = NULL;
682 wl_event_queue_destroy(wl_egl_display->ev_queue);
684 wl_egl_display->ev_queue = NULL;
685 wl_egl_display->wl_initialized = TPL_FALSE;
687 TPL_INFO("[DISPLAY_FINI]", "wl_egl_display(%p) wl_display(%p)",
688 wl_egl_display, wl_egl_display->wl_display);
692 _thread_init(void *data)
694 tpl_wl_egl_display_t *wl_egl_display = (tpl_wl_egl_display_t *)data;
696 if (_thread_wl_display_init(wl_egl_display) != TPL_ERROR_NONE) {
697 TPL_ERR("Failed to initialize wl_egl_display(%p) with wl_display(%p)",
698 wl_egl_display, wl_egl_display->wl_display);
701 if (wl_egl_display->use_wait_vblank &&
702 _thread_tdm_init(wl_egl_display) != TPL_ERROR_NONE) {
703 TPL_WARN("Failed to initialize tdm-client. TPL_WAIT_VLANK:DISABLED");
706 return wl_egl_display;
710 __thread_func_disp_prepare(tpl_gsource *gsource)
712 tpl_wl_egl_display_t *wl_egl_display =
713 (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
715 /* If this wl_egl_display is already prepared,
716 * do nothing in this function. */
717 if (wl_egl_display->prepared)
720 /* If there is a last_error, there is no need to poll,
721 * so skip directly to dispatch.
722 * prepare -> dispatch */
723 if (wl_egl_display->last_error)
726 while (wl_display_prepare_read_queue(wl_egl_display->wl_display,
727 wl_egl_display->ev_queue) != 0) {
728 if (wl_display_dispatch_queue_pending(wl_egl_display->wl_display,
729 wl_egl_display->ev_queue) == -1) {
730 _wl_display_print_err(wl_egl_display, "dispatch_queue_pending");
734 wl_egl_display->prepared = TPL_TRUE;
736 wl_display_flush(wl_egl_display->wl_display);
742 __thread_func_disp_check(tpl_gsource *gsource)
744 tpl_wl_egl_display_t *wl_egl_display =
745 (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
746 tpl_bool_t ret = TPL_FALSE;
748 if (!wl_egl_display->prepared)
751 /* If prepared, but last_error is set,
752 * cancel_read is executed and FALSE is returned.
753 * That can lead to G_SOURCE_REMOVE by calling disp_prepare again
754 * and skipping disp_check from prepare to disp_dispatch.
755 * check -> prepare -> dispatch -> G_SOURCE_REMOVE */
756 if (wl_egl_display->prepared && wl_egl_display->last_error) {
757 wl_display_cancel_read(wl_egl_display->wl_display);
761 if (tpl_gsource_check_io_condition(gsource)) {
762 if (wl_display_read_events(wl_egl_display->wl_display) == -1)
763 _wl_display_print_err(wl_egl_display, "read_event");
766 wl_display_cancel_read(wl_egl_display->wl_display);
770 wl_egl_display->prepared = TPL_FALSE;
776 __thread_func_disp_dispatch(tpl_gsource *gsource, uint64_t message)
778 tpl_wl_egl_display_t *wl_egl_display =
779 (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
783 /* If there is last_error, SOURCE_REMOVE should be returned
784 * to remove the gsource from the main loop.
785 * This is because wl_egl_display is not valid since last_error was set.*/
786 if (wl_egl_display->last_error) {
790 tpl_gmutex_lock(&wl_egl_display->wl_event_mutex);
791 if (tpl_gsource_check_io_condition(gsource)) {
792 if (wl_display_dispatch_queue_pending(wl_egl_display->wl_display,
793 wl_egl_display->ev_queue) == -1) {
794 _wl_display_print_err(wl_egl_display, "dispatch_queue_pending");
798 wl_display_flush(wl_egl_display->wl_display);
799 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
805 __thread_func_disp_finalize(tpl_gsource *gsource)
807 tpl_wl_egl_display_t *wl_egl_display =
808 (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
810 if (wl_egl_display->wl_initialized)
811 _thread_wl_display_fini(wl_egl_display);
813 TPL_LOG_T("WL_EGL", "finalize| wl_egl_display(%p) tpl_gsource(%p)",
814 wl_egl_display, gsource);
820 static tpl_gsource_functions disp_funcs = {
821 .prepare = __thread_func_disp_prepare,
822 .check = __thread_func_disp_check,
823 .dispatch = __thread_func_disp_dispatch,
824 .finalize = __thread_func_disp_finalize,
828 __tpl_wl_egl_display_init(tpl_display_t *display)
830 tpl_wl_egl_display_t *wl_egl_display = NULL;
834 /* Do not allow default display in wayland. */
835 if (!display->native_handle) {
836 TPL_ERR("Invalid native handle for display.");
837 return TPL_ERROR_INVALID_PARAMETER;
840 if (!_check_native_handle_is_wl_display(display->native_handle)) {
841 TPL_ERR("native_handle(%p) is not wl_display", display->native_handle);
842 return TPL_ERROR_INVALID_PARAMETER;
845 wl_egl_display = (tpl_wl_egl_display_t *) calloc(1,
846 sizeof(tpl_wl_egl_display_t));
847 if (!wl_egl_display) {
848 TPL_ERR("Failed to allocate memory for new tpl_wl_egl_display_t.");
849 return TPL_ERROR_OUT_OF_MEMORY;
852 display->backend.data = wl_egl_display;
853 display->bufmgr_fd = -1;
855 wl_egl_display->tdm.tdm_initialized = TPL_FALSE;
856 wl_egl_display->tdm.tdm_client = NULL;
857 wl_egl_display->tdm.tdm_display_fd = -1;
858 wl_egl_display->tdm.tdm_source = NULL;
860 wl_egl_display->wl_initialized = TPL_FALSE;
862 wl_egl_display->ev_queue = NULL;
863 wl_egl_display->wl_display = (struct wl_display *)display->native_handle;
864 wl_egl_display->last_error = 0;
865 wl_egl_display->use_tss = TPL_FALSE;
866 wl_egl_display->use_explicit_sync = TPL_FALSE; // default disabled
867 wl_egl_display->prepared = TPL_FALSE;
869 #if TIZEN_FEATURE_ENABLE
870 /* Wayland Interfaces */
871 wl_egl_display->tss = NULL;
872 wl_egl_display->presentation = NULL;
873 wl_egl_display->explicit_sync = NULL;
875 wl_egl_display->wl_tbm_client = NULL;
877 wl_egl_display->use_wait_vblank = TPL_TRUE; // default enabled
879 char *env = tpl_getenv("TPL_WAIT_VBLANK");
880 if (env && !atoi(env)) {
881 wl_egl_display->use_wait_vblank = TPL_FALSE;
885 tpl_gmutex_init(&wl_egl_display->wl_event_mutex);
888 wl_egl_display->thread = tpl_gthread_create("wl_egl_thread",
889 (tpl_gthread_func)_thread_init,
890 (void *)wl_egl_display);
891 if (!wl_egl_display->thread) {
892 TPL_ERR("Failed to create wl_egl_thread");
896 wl_egl_display->disp_source = tpl_gsource_create(wl_egl_display->thread,
897 (void *)wl_egl_display,
898 wl_display_get_fd(wl_egl_display->wl_display),
899 &disp_funcs, SOURCE_TYPE_NORMAL);
900 if (!wl_egl_display->disp_source) {
901 TPL_ERR("Failed to add native_display(%p) to thread(%p)",
902 display->native_handle,
903 wl_egl_display->thread);
907 if (wl_egl_display->use_wait_vblank &&
908 wl_egl_display->tdm.tdm_initialized) {
909 wl_egl_display->tdm.tdm_source = tpl_gsource_create(wl_egl_display->thread,
910 (void *)wl_egl_display,
911 wl_egl_display->tdm.tdm_display_fd,
912 &tdm_funcs, SOURCE_TYPE_NORMAL);
913 if (!wl_egl_display->tdm.tdm_source) {
914 TPL_ERR("Failed to create tdm_gsource\n");
919 wl_egl_display->use_wait_vblank = (wl_egl_display->tdm.tdm_initialized &&
920 (wl_egl_display->tdm.tdm_source != NULL));
922 TPL_INFO("[DISPLAY_INIT]",
923 "wl_egl_display(%p) tpl_gthread(%p) wl_display(%p)",
925 wl_egl_display->thread,
926 wl_egl_display->wl_display);
928 TPL_INFO("[DISPLAY_INIT]",
929 "USE_WAIT_VBLANK(%s) TIZEN_SURFACE_SHM(%s) USE_EXPLICIT_SYNC(%s)",
930 wl_egl_display->use_wait_vblank ? "TRUE" : "FALSE",
931 wl_egl_display->use_tss ? "TRUE" : "FALSE",
932 wl_egl_display->use_explicit_sync ? "TRUE" : "FALSE");
934 return TPL_ERROR_NONE;
937 if (wl_egl_display->thread) {
938 if (wl_egl_display->tdm.tdm_source)
939 tpl_gsource_destroy(wl_egl_display->tdm.tdm_source, TPL_TRUE);
940 if (wl_egl_display->disp_source)
941 tpl_gsource_destroy(wl_egl_display->disp_source, TPL_TRUE);
943 tpl_gthread_destroy(wl_egl_display->thread);
946 wl_egl_display->thread = NULL;
947 free(wl_egl_display);
949 display->backend.data = NULL;
950 return TPL_ERROR_INVALID_OPERATION;
954 __tpl_wl_egl_display_fini(tpl_display_t *display)
956 tpl_wl_egl_display_t *wl_egl_display;
960 wl_egl_display = (tpl_wl_egl_display_t *)display->backend.data;
961 if (wl_egl_display) {
962 TPL_INFO("[DISPLAY_FINI]",
963 "wl_egl_display(%p) tpl_gthread(%p) wl_display(%p)",
965 wl_egl_display->thread,
966 wl_egl_display->wl_display);
968 if (wl_egl_display->tdm.tdm_source && wl_egl_display->tdm.tdm_initialized) {
969 tpl_gsource_destroy(wl_egl_display->tdm.tdm_source, TPL_TRUE);
970 wl_egl_display->tdm.tdm_source = NULL;
973 if (wl_egl_display->disp_source) {
974 tpl_gsource_destroy(wl_egl_display->disp_source, TPL_TRUE);
975 wl_egl_display->disp_source = NULL;
978 if (wl_egl_display->thread) {
979 tpl_gthread_destroy(wl_egl_display->thread);
980 wl_egl_display->thread = NULL;
983 tpl_gmutex_clear(&wl_egl_display->wl_event_mutex);
985 free(wl_egl_display);
988 display->backend.data = NULL;
992 __tpl_wl_egl_display_query_config(tpl_display_t *display,
993 tpl_surface_type_t surface_type,
994 int red_size, int green_size,
995 int blue_size, int alpha_size,
996 int color_depth, int *native_visual_id,
1001 if (surface_type == TPL_SURFACE_TYPE_WINDOW && red_size == 8 &&
1002 green_size == 8 && blue_size == 8 &&
1003 (color_depth == 32 || color_depth == 24)) {
1005 if (alpha_size == 8) {
1006 if (native_visual_id) *native_visual_id = TBM_FORMAT_ARGB8888;
1007 if (is_slow) *is_slow = TPL_FALSE;
1008 return TPL_ERROR_NONE;
1010 if (alpha_size == 0) {
1011 if (native_visual_id) *native_visual_id = TBM_FORMAT_XRGB8888;
1012 if (is_slow) *is_slow = TPL_FALSE;
1013 return TPL_ERROR_NONE;
1017 return TPL_ERROR_INVALID_PARAMETER;
1021 __tpl_wl_egl_display_filter_config(tpl_display_t *display, int *visual_id,
1024 TPL_IGNORE(display);
1025 TPL_IGNORE(visual_id);
1026 TPL_IGNORE(alpha_size);
1027 return TPL_ERROR_NONE;
1031 __tpl_wl_egl_display_get_window_info(tpl_display_t *display,
1032 tpl_handle_t window, int *width,
1033 int *height, tbm_format *format,
1034 int depth, int a_size)
1036 tpl_result_t ret = TPL_ERROR_NONE;
1037 struct wl_egl_window *wl_egl_window = (struct wl_egl_window *)window;
1039 TPL_ASSERT(display);
1042 if (!wl_egl_window) {
1043 TPL_ERR("Invalid parameter. tpl_handle_t(%p)", window);
1044 return TPL_ERROR_INVALID_PARAMETER;
1047 if (width) *width = wl_egl_window->width;
1048 if (height) *height = wl_egl_window->height;
1050 struct tizen_private *tizen_private =
1051 (struct tizen_private *)wl_egl_window->driver_private;
1052 if (tizen_private && tizen_private->data) {
1053 tpl_wl_egl_surface_t *wl_egl_surface =
1054 (tpl_wl_egl_surface_t *)tizen_private->data;
1055 *format = wl_egl_surface->format;
1058 *format = TBM_FORMAT_ARGB8888;
1060 *format = TBM_FORMAT_XRGB8888;
1068 __tpl_wl_egl_display_get_pixmap_info(tpl_display_t *display,
1069 tpl_handle_t pixmap, int *width,
1070 int *height, tbm_format *format)
1072 tbm_surface_h tbm_surface = NULL;
1075 TPL_ERR("Invalid parameter. tpl_handle_t(%p)", pixmap);
1076 return TPL_ERROR_INVALID_PARAMETER;
1079 tbm_surface = wayland_tbm_server_get_surface(NULL,
1080 (struct wl_resource *)pixmap);
1082 TPL_ERR("Failed to get tbm_surface from wayland_tbm.");
1083 return TPL_ERROR_INVALID_PARAMETER;
1086 if (width) *width = tbm_surface_get_width(tbm_surface);
1087 if (height) *height = tbm_surface_get_height(tbm_surface);
1088 if (format) *format = tbm_surface_get_format(tbm_surface);
1090 return TPL_ERROR_NONE;
1093 static tbm_surface_h
1094 __tpl_wl_egl_display_get_buffer_from_native_pixmap(tpl_handle_t pixmap)
1096 tbm_surface_h tbm_surface = NULL;
1100 tbm_surface = wayland_tbm_server_get_surface(NULL,
1101 (struct wl_resource *)pixmap);
1103 TPL_ERR("Failed to get tbm_surface_h from wayland_tbm.");
1111 __tpl_display_choose_backend_wl_egl_thread(tpl_handle_t native_dpy)
1113 struct wl_interface *wl_egl_native_dpy = *(void **) native_dpy;
1115 TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_native_dpy, TPL_FALSE);
1117 /* MAGIC CHECK: A native display handle is a wl_display if the de-referenced first value
1118 is a memory address pointing the structure of wl_display_interface. */
1119 if (wl_egl_native_dpy == &wl_display_interface)
1122 if (strncmp(wl_egl_native_dpy->name, wl_display_interface.name,
1123 strlen(wl_display_interface.name)) == 0) {
1130 /* -- BEGIN -- wl_egl_window callback functions */
1132 __cb_destroy_callback(void *private)
1134 struct tizen_private *tizen_private = (struct tizen_private *)private;
1135 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
1137 if (!tizen_private) {
1138 TPL_LOG_B("WL_EGL", "[DESTROY_CB] Already destroyed surface");
1142 wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1143 if (wl_egl_surface) {
1144 TPL_WARN("[DESTROY_CB][!!!ABNORMAL BEHAVIOR!!!] wl_egl_window(%p) is destroyed.",
1145 wl_egl_surface->wl_egl_window);
1146 TPL_WARN("[DESTROY_CB] native window should be destroyed after eglDestroySurface.");
1148 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1149 wl_egl_surface->wl_egl_window->destroy_window_callback = NULL;
1150 wl_egl_surface->wl_egl_window->resize_callback = NULL;
1151 wl_egl_surface->wl_egl_window->driver_private = NULL;
1152 wl_egl_surface->wl_egl_window = NULL;
1153 wl_egl_surface->wl_surface = NULL;
1155 tizen_private->set_window_serial_callback = NULL;
1156 tizen_private->rotate_callback = NULL;
1157 tizen_private->get_rotation_capability = NULL;
1158 tizen_private->set_frontbuffer_callback = NULL;
1159 tizen_private->create_commit_sync_fd = NULL;
1160 tizen_private->create_presentation_sync_fd = NULL;
1161 tizen_private->data = NULL;
1163 free(tizen_private);
1164 tizen_private = NULL;
1165 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1170 __cb_resize_callback(struct wl_egl_window *wl_egl_window, void *private)
1172 TPL_ASSERT(private);
1173 TPL_ASSERT(wl_egl_window);
1175 struct tizen_private *tizen_private = (struct tizen_private *)private;
1176 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1177 int cur_w, cur_h, req_w, req_h, format;
1179 if (!wl_egl_surface) {
1180 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1185 format = wl_egl_surface->format;
1186 cur_w = wl_egl_surface->width;
1187 cur_h = wl_egl_surface->height;
1188 req_w = wl_egl_window->width;
1189 req_h = wl_egl_window->height;
1191 TPL_INFO("[WINDOW_RESIZE]",
1192 "wl_egl_surface(%p) wl_egl_window(%p) (%dx%d) -> (%dx%d)",
1193 wl_egl_surface, wl_egl_window, cur_w, cur_h, req_w, req_h);
1195 if (tbm_surface_queue_reset(wl_egl_surface->tbm_queue, req_w, req_h, format)
1196 != TBM_SURFACE_QUEUE_ERROR_NONE) {
1197 TPL_ERR("Failed to reset tbm_surface_queue(%p)", wl_egl_surface->tbm_queue);
1201 /* -- END -- wl_egl_window callback functions */
1203 /* -- BEGIN -- wl_egl_window tizen private callback functions */
1205 /* There is no usecase for using prerotation callback below */
1207 __cb_rotate_callback(struct wl_egl_window *wl_egl_window, void *private)
1209 TPL_ASSERT(private);
1210 TPL_ASSERT(wl_egl_window);
1212 struct tizen_private *tizen_private = (struct tizen_private *)private;
1213 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1214 int rotation = tizen_private->rotation;
1216 if (!wl_egl_surface) {
1217 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1222 TPL_INFO("[WINDOW_ROTATE]",
1223 "wl_egl_surface(%p) wl_egl_window(%p) (%d) -> (%d)",
1224 wl_egl_surface, wl_egl_window,
1225 wl_egl_surface->rotation, rotation);
1227 wl_egl_surface->rotation = rotation;
1230 /* There is no usecase for using prerotation callback below */
1232 __cb_get_rotation_capability(struct wl_egl_window *wl_egl_window,
1235 TPL_ASSERT(private);
1236 TPL_ASSERT(wl_egl_window);
1238 int rotation_capability = WL_EGL_WINDOW_TIZEN_CAPABILITY_NONE;
1239 struct tizen_private *tizen_private = (struct tizen_private *)private;
1240 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1242 if (!wl_egl_surface) {
1243 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1245 return rotation_capability;
1248 if (wl_egl_surface->prerotation_capability == TPL_TRUE)
1249 rotation_capability = WL_EGL_WINDOW_TIZEN_CAPABILITY_ROTATION_SUPPORTED;
1251 rotation_capability = WL_EGL_WINDOW_TIZEN_CAPABILITY_ROTATION_UNSUPPORTED;
1254 return rotation_capability;
1258 __cb_set_window_serial_callback(struct wl_egl_window *wl_egl_window,
1259 void *private, unsigned int serial)
1261 TPL_ASSERT(private);
1262 TPL_ASSERT(wl_egl_window);
1264 struct tizen_private *tizen_private = (struct tizen_private *)private;
1265 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1267 if (!wl_egl_surface) {
1268 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1273 wl_egl_surface->set_serial_is_used = TPL_TRUE;
1274 wl_egl_surface->serial = serial;
1278 __cb_create_commit_sync_fd(struct wl_egl_window *wl_egl_window, void *private)
1280 TPL_ASSERT(private);
1281 TPL_ASSERT(wl_egl_window);
1283 int commit_sync_fd = -1;
1285 struct tizen_private *tizen_private = (struct tizen_private *)private;
1286 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1288 if (!wl_egl_surface) {
1289 TPL_ERR("Invalid parameter. wl_egl_surface(%p) is NULL", wl_egl_surface);
1293 tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
1295 if (wl_egl_surface->commit_sync.fd != -1) {
1296 commit_sync_fd = dup(wl_egl_surface->commit_sync.fd);
1297 TRACE_MARK("[ONLY_DUP] commit_sync_fd(%d) dup(%d)",
1298 wl_egl_surface->commit_sync.fd, commit_sync_fd);
1299 TPL_DEBUG("[DUP_COMMIT_SYNC] wl_egl_surface(%p) commit_sync_fd(%d) dup(%d)",
1300 wl_egl_surface, wl_egl_surface->commit_sync.fd, commit_sync_fd);
1301 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
1302 return commit_sync_fd;
1305 wl_egl_surface->commit_sync.fd = eventfd(0, EFD_CLOEXEC);
1306 if (wl_egl_surface->commit_sync.fd == -1) {
1307 TPL_ERR("Failed to create commit_sync_fd. wl_egl_surface(%p)",
1309 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
1313 commit_sync_fd = dup(wl_egl_surface->commit_sync.fd);
1315 TRACE_MARK("[CREATE] commit_sync_fd(%d) dup(%d)",
1316 wl_egl_surface->commit_sync.fd, commit_sync_fd);
1317 TPL_DEBUG("[CREATE_COMMIT_SYNC] wl_egl_surface(%p) commit_sync_fd(%d)",
1318 wl_egl_surface, commit_sync_fd);
1320 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
1322 return commit_sync_fd;
1325 #if TIZEN_FEATURE_ENABLE
1327 __cb_create_presentation_sync_fd(struct wl_egl_window *wl_egl_window, void *private)
1329 TPL_ASSERT(private);
1330 TPL_ASSERT(wl_egl_window);
1332 int presentation_sync_fd = -1;
1334 struct tizen_private *tizen_private = (struct tizen_private *)private;
1335 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1337 if (!wl_egl_surface) {
1338 TPL_ERR("Invalid parameter. wl_egl_surface is NULL");
1342 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
1343 if (wl_egl_surface->presentation_sync.fd != -1) {
1344 presentation_sync_fd = dup(wl_egl_surface->presentation_sync.fd);
1345 TRACE_MARK("[ONLY_DUP] presentation_sync_fd(%d) dup(%d)",
1346 wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1347 TPL_DEBUG("[DUP_PRESENTATION_SYNC] wl_egl_surface(%p) presentation_sync_fd(%d) dup(%d)",
1348 wl_egl_surface, wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1349 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1350 return presentation_sync_fd;
1353 wl_egl_surface->presentation_sync.fd = eventfd(0, EFD_CLOEXEC);
1354 if (wl_egl_surface->presentation_sync.fd == -1) {
1355 TPL_ERR("Failed to create presentation_sync_fd. wl_egl_surface(%p)",
1357 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1361 presentation_sync_fd = dup(wl_egl_surface->presentation_sync.fd);
1362 TRACE_MARK("[CREATE] presentation_sync_fd(%d) dup(%d)",
1363 wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1364 TPL_DEBUG("[CREATE_PRESENTATION_SYNC] wl_egl_surface(%p) presentation_sync_fd(%d) dup(%d)",
1365 wl_egl_surface, wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1367 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1369 return presentation_sync_fd;
1371 /* -- END -- wl_egl_window tizen private callback functions */
1373 /* -- BEGIN -- tizen_surface_shm_flusher_listener */
1374 static void __cb_tss_flusher_flush_callback(void *data,
1375 struct tizen_surface_shm_flusher *tss_flusher)
1377 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)data;
1378 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
1380 TPL_INFO("[BUFFER_FLUSH]", "wl_egl_surface(%p) tbm_queue(%p)",
1381 wl_egl_surface, wl_egl_surface->tbm_queue);
1383 _print_buffer_lists(wl_egl_surface);
1385 tsq_err = tbm_surface_queue_flush(wl_egl_surface->tbm_queue);
1386 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
1387 TPL_ERR("Failed to flush tbm_queue(%p)", wl_egl_surface->tbm_queue);
1392 static void __cb_tss_flusher_free_flush_callback(void *data,
1393 struct tizen_surface_shm_flusher *tss_flusher)
1395 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)data;
1396 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
1398 TPL_INFO("[FREE_BUFFER_FLUSH]", "wl_egl_surface(%p) tbm_queue(%p)",
1399 wl_egl_surface, wl_egl_surface->tbm_queue);
1401 _print_buffer_lists(wl_egl_surface);
1403 tsq_err = tbm_surface_queue_free_flush(wl_egl_surface->tbm_queue);
1404 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
1405 TPL_ERR("Failed to free flush tbm_queue(%p)", wl_egl_surface->tbm_queue);
1410 static const struct tizen_surface_shm_flusher_listener
1411 tss_flusher_listener = {
1412 __cb_tss_flusher_flush_callback,
1413 __cb_tss_flusher_free_flush_callback
1415 /* -- END -- tizen_surface_shm_flusher_listener */
1418 /* -- BEGIN -- tbm_surface_queue callback funstions */
1420 __cb_tbm_queue_reset_callback(tbm_surface_queue_h tbm_queue,
1423 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
1424 tpl_wl_egl_display_t *wl_egl_display = NULL;
1425 tpl_surface_t *surface = NULL;
1426 tpl_bool_t is_activated = TPL_FALSE;
1429 wl_egl_surface = (tpl_wl_egl_surface_t *)data;
1430 TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
1432 wl_egl_display = wl_egl_surface->wl_egl_display;
1433 TPL_CHECK_ON_NULL_RETURN(wl_egl_display);
1435 surface = wl_egl_surface->tpl_surface;
1436 TPL_CHECK_ON_NULL_RETURN(surface);
1438 /* When the queue is resized, change the reset flag to TPL_TRUE to reflect
1439 * the changed window size at the next frame. */
1440 width = tbm_surface_queue_get_width(tbm_queue);
1441 height = tbm_surface_queue_get_height(tbm_queue);
1442 if (surface->width != width || surface->height != height) {
1443 TPL_INFO("[QUEUE_RESIZE]",
1444 "wl_egl_surface(%p) tbm_queue(%p) (%dx%d) -> (%dx%d)",
1445 wl_egl_surface, tbm_queue,
1446 surface->width, surface->height, width, height);
1449 /* When queue_reset_callback is called, if is_activated is different from
1450 * its previous state change the reset flag to TPL_TRUE to get a new buffer
1451 * with the changed state(ACTIVATED/DEACTIVATED) at the next frame. */
1452 is_activated = wayland_tbm_client_queue_check_activate(wl_egl_display->wl_tbm_client,
1453 wl_egl_surface->tbm_queue);
1454 if (wl_egl_surface->is_activated != is_activated) {
1456 TPL_INFO("[ACTIVATED]",
1457 "wl_egl_surface(%p) wl_surface(%p) tbm_queue(%p)",
1458 wl_egl_surface, wl_egl_surface->wl_surface, tbm_queue);
1460 TPL_LOG_T("[DEACTIVATED]",
1461 " wl_egl_surface(%p) wl_surface(%p) tbm_queue(%p)",
1462 wl_egl_surface, wl_egl_surface->wl_surface, tbm_queue);
1466 wl_egl_surface->reset = TPL_TRUE;
1468 if (surface->reset_cb)
1469 surface->reset_cb(surface->reset_data);
1473 __cb_tbm_queue_acquirable_callback(tbm_surface_queue_h tbm_queue,
1476 TPL_IGNORE(tbm_queue);
1478 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)data;
1479 TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
1481 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1482 if (wl_egl_surface->sent_message == NONE_MESSAGE) {
1483 wl_egl_surface->sent_message = ACQUIRABLE;
1484 tpl_gsource_send_message(wl_egl_surface->surf_source,
1485 wl_egl_surface->sent_message);
1487 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1489 /* -- END -- tbm_surface_queue callback funstions */
1492 _thread_wl_egl_surface_fini(tpl_wl_egl_surface_t *wl_egl_surface)
1494 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
1496 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1498 TPL_INFO("[SURFACE_FINI]",
1499 "wl_egl_surface(%p) wl_egl_window(%p) wl_surface(%p)",
1500 wl_egl_surface, wl_egl_surface->wl_egl_window,
1501 wl_egl_surface->wl_surface);
1502 #if TIZEN_FEATURE_ENABLE
1503 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
1505 if (wl_egl_display->presentation && wl_egl_surface->presentation_feedbacks) {
1506 while (!__tpl_list_is_empty(wl_egl_surface->presentation_feedbacks)) {
1507 struct pst_feedback *pst_feedback =
1508 (struct pst_feedback *)__tpl_list_pop_front(
1509 wl_egl_surface->presentation_feedbacks, NULL);
1511 _write_to_eventfd(pst_feedback->pst_sync_fd);
1512 close(pst_feedback->pst_sync_fd);
1513 pst_feedback->pst_sync_fd = -1;
1515 wp_presentation_feedback_destroy(pst_feedback->presentation_feedback);
1516 pst_feedback->presentation_feedback = NULL;
1522 __tpl_list_free(wl_egl_surface->presentation_feedbacks, NULL);
1523 wl_egl_surface->presentation_feedbacks = NULL;
1526 if (wl_egl_surface->presentation_sync.fd != -1) {
1527 _write_to_eventfd(wl_egl_surface->presentation_sync.fd);
1528 close(wl_egl_surface->presentation_sync.fd);
1529 wl_egl_surface->presentation_sync.fd = -1;
1532 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1534 if (wl_egl_surface->surface_sync) {
1535 TPL_INFO("[SURFACE_SYNC_DESTROY]",
1536 "wl_egl_surface(%p) surface_sync(%p)",
1537 wl_egl_surface, wl_egl_surface->surface_sync);
1538 zwp_linux_surface_synchronization_v1_destroy(wl_egl_surface->surface_sync);
1539 wl_egl_surface->surface_sync = NULL;
1542 if (wl_egl_surface->tss_flusher) {
1543 TPL_INFO("[FLUSHER_DESTROY]",
1544 "wl_egl_surface(%p) tss_flusher(%p)",
1545 wl_egl_surface, wl_egl_surface->tss_flusher);
1546 tizen_surface_shm_flusher_destroy(wl_egl_surface->tss_flusher);
1547 wl_egl_surface->tss_flusher = NULL;
1551 if (wl_egl_surface->tbm_queue) {
1552 TPL_INFO("[TBM_QUEUE_DESTROY]",
1553 "wl_egl_surface(%p) tbm_queue(%p)",
1554 wl_egl_surface, wl_egl_surface->tbm_queue);
1555 tbm_surface_queue_destroy(wl_egl_surface->tbm_queue);
1556 wl_egl_surface->tbm_queue = NULL;
1559 if (wl_egl_surface->vblank && wl_egl_surface->vblank->waiting_buffers) {
1560 tpl_gmutex_lock(&wl_egl_surface->vblank->mutex);
1561 __tpl_list_free(wl_egl_surface->vblank->waiting_buffers, NULL);
1562 wl_egl_surface->vblank->waiting_buffers = NULL;
1563 tpl_gmutex_unlock(&wl_egl_surface->vblank->mutex);
1566 if (wl_egl_surface->vblank) {
1567 __tpl_list_remove_data(wl_egl_display->tdm.surface_vblanks,
1568 (void *)wl_egl_surface->vblank,
1570 __cb_surface_vblank_free);
1571 wl_egl_surface->vblank = NULL;
1574 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1578 __thread_func_surf_dispatch(tpl_gsource *gsource, uint64_t message)
1580 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
1582 wl_egl_surface = (tpl_wl_egl_surface_t *)tpl_gsource_get_data(gsource);
1584 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1585 if (message == INIT_SURFACE) { /* Initialize surface */
1586 TPL_DEBUG("wl_egl_surface(%p) initialize message received!",
1588 _thread_wl_egl_surface_init(wl_egl_surface);
1589 tpl_gcond_signal(&wl_egl_surface->surf_cond);
1590 } else if (message == ACQUIRABLE) { /* Acquirable */
1591 TPL_DEBUG("wl_egl_surface(%p) acquirable message received!",
1593 _thread_surface_queue_acquire(wl_egl_surface);
1596 wl_egl_surface->sent_message = NONE_MESSAGE;
1598 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1604 __thread_func_surf_finalize(tpl_gsource *gsource)
1606 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
1608 wl_egl_surface = (tpl_wl_egl_surface_t *)tpl_gsource_get_data(gsource);
1609 TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
1611 _thread_wl_egl_surface_fini(wl_egl_surface);
1613 TPL_DEBUG("[FINALIZE] wl_egl_surface(%p) tpl_gsource(%p)",
1614 wl_egl_surface, gsource);
1617 static tpl_gsource_functions surf_funcs = {
1620 .dispatch = __thread_func_surf_dispatch,
1621 .finalize = __thread_func_surf_finalize,
1625 __tpl_wl_egl_surface_init(tpl_surface_t *surface)
1627 tpl_wl_egl_display_t *wl_egl_display = NULL;
1628 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
1629 tpl_gsource *surf_source = NULL;
1631 struct wl_egl_window *wl_egl_window =
1632 (struct wl_egl_window *)surface->native_handle;
1634 TPL_ASSERT(surface);
1635 TPL_ASSERT(surface->display);
1636 TPL_ASSERT(surface->type == TPL_SURFACE_TYPE_WINDOW);
1637 TPL_ASSERT(surface->native_handle);
1640 (tpl_wl_egl_display_t *)surface->display->backend.data;
1641 if (!wl_egl_display) {
1642 TPL_ERR("Invalid parameter. wl_egl_display(%p)",
1644 return TPL_ERROR_INVALID_PARAMETER;
1647 wl_egl_surface = (tpl_wl_egl_surface_t *) calloc(1,
1648 sizeof(tpl_wl_egl_surface_t));
1649 if (!wl_egl_surface) {
1650 TPL_ERR("Failed to allocate memory for new tpl_wl_egl_surface_t.");
1651 return TPL_ERROR_OUT_OF_MEMORY;
1654 surf_source = tpl_gsource_create(wl_egl_display->thread, (void *)wl_egl_surface,
1655 -1, &surf_funcs, SOURCE_TYPE_NORMAL);
1657 TPL_ERR("Failed to create surf_source with wl_egl_surface(%p)",
1659 goto surf_source_create_fail;
1662 surface->backend.data = (void *)wl_egl_surface;
1663 surface->width = wl_egl_window->width;
1664 surface->height = wl_egl_window->height;
1665 surface->rotation = 0;
1667 wl_egl_surface->tpl_surface = surface;
1668 wl_egl_surface->width = wl_egl_window->width;
1669 wl_egl_surface->height = wl_egl_window->height;
1670 wl_egl_surface->format = surface->format;
1671 wl_egl_surface->num_buffers = surface->num_buffers;
1673 wl_egl_surface->surf_source = surf_source;
1674 wl_egl_surface->wl_egl_window = wl_egl_window;
1675 wl_egl_surface->wl_surface = wl_egl_window->surface;
1677 wl_egl_surface->wl_egl_display = wl_egl_display;
1679 wl_egl_surface->reset = TPL_FALSE;
1680 wl_egl_surface->is_activated = TPL_FALSE;
1681 wl_egl_surface->need_to_enqueue = TPL_TRUE;
1682 wl_egl_surface->prerotation_capability = TPL_FALSE;
1683 wl_egl_surface->vblank_done = TPL_TRUE;
1684 wl_egl_surface->use_render_done_fence = TPL_FALSE;
1685 wl_egl_surface->set_serial_is_used = TPL_FALSE;
1687 wl_egl_surface->latest_transform = -1;
1688 wl_egl_surface->render_done_cnt = 0;
1689 wl_egl_surface->serial = 0;
1691 wl_egl_surface->vblank = NULL;
1692 #if TIZEN_FEATURE_ENABLE
1693 wl_egl_surface->tss_flusher = NULL;
1694 wl_egl_surface->surface_sync = NULL;
1697 wl_egl_surface->post_interval = surface->post_interval;
1699 wl_egl_surface->commit_sync.fd = -1;
1700 wl_egl_surface->presentation_sync.fd = -1;
1702 wl_egl_surface->sent_message = NONE_MESSAGE;
1706 for (i = 0; i < BUFFER_ARRAY_SIZE; i++)
1707 wl_egl_surface->buffers[i] = NULL;
1708 wl_egl_surface->buffer_cnt = 0;
1711 wl_egl_surface->last_deq_buffer = NULL;
1714 struct tizen_private *tizen_private = NULL;
1716 if (wl_egl_window->driver_private)
1717 tizen_private = (struct tizen_private *)wl_egl_window->driver_private;
1719 tizen_private = tizen_private_create();
1720 wl_egl_window->driver_private = (void *)tizen_private;
1723 if (tizen_private) {
1724 tizen_private->data = (void *)wl_egl_surface;
1725 tizen_private->rotate_callback = (void *)__cb_rotate_callback;
1726 tizen_private->get_rotation_capability = (void *)
1727 __cb_get_rotation_capability;
1728 tizen_private->set_window_serial_callback = (void *)
1729 __cb_set_window_serial_callback;
1730 tizen_private->create_commit_sync_fd = (void *)__cb_create_commit_sync_fd;
1731 #if TIZEN_FEATURE_ENABLE
1732 tizen_private->create_presentation_sync_fd = (void *)__cb_create_presentation_sync_fd;
1734 tizen_private->create_presentation_sync_fd = NULL;
1737 wl_egl_window->destroy_window_callback = (void *)__cb_destroy_callback;
1738 wl_egl_window->resize_callback = (void *)__cb_resize_callback;
1742 tpl_gmutex_init(&wl_egl_surface->commit_sync.mutex);
1743 tpl_gmutex_init(&wl_egl_surface->presentation_sync.mutex);
1745 tpl_gmutex_init(&wl_egl_surface->buffers_mutex);
1747 tpl_gmutex_init(&wl_egl_surface->surf_mutex);
1748 tpl_gcond_init(&wl_egl_surface->surf_cond);
1750 /* Initialize in thread */
1751 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1752 wl_egl_surface->sent_message = INIT_SURFACE;
1753 tpl_gsource_send_message(wl_egl_surface->surf_source,
1754 wl_egl_surface->sent_message);
1755 tpl_gcond_wait(&wl_egl_surface->surf_cond, &wl_egl_surface->surf_mutex);
1756 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1758 TPL_ASSERT(wl_egl_surface->tbm_queue);
1760 TPL_INFO("[SURFACE_INIT]",
1761 "tpl_surface(%p) wl_egl_surface(%p) gsource(%p)",
1762 surface, wl_egl_surface, wl_egl_surface->surf_source);
1764 return TPL_ERROR_NONE;
1766 surf_source_create_fail:
1767 free(wl_egl_surface);
1768 surface->backend.data = NULL;
1769 return TPL_ERROR_INVALID_OPERATION;
1772 static tbm_surface_queue_h
1773 _thread_create_tbm_queue(tpl_wl_egl_surface_t *wl_egl_surface,
1774 struct wayland_tbm_client *wl_tbm_client,
1777 tbm_surface_queue_h tbm_queue = NULL;
1778 tbm_bufmgr bufmgr = NULL;
1779 unsigned int capability;
1781 struct wl_surface *wl_surface = wl_egl_surface->wl_surface;
1782 int width = wl_egl_surface->width;
1783 int height = wl_egl_surface->height;
1784 int format = wl_egl_surface->format;
1786 if (!wl_tbm_client || !wl_surface) {
1787 TPL_ERR("Invalid parameters. wl_tbm_client(%p) wl_surface(%p)",
1788 wl_tbm_client, wl_surface);
1792 bufmgr = tbm_bufmgr_init(-1);
1793 capability = tbm_bufmgr_get_capability(bufmgr);
1794 tbm_bufmgr_deinit(bufmgr);
1796 if (capability & TBM_BUFMGR_CAPABILITY_TILED_MEMORY) {
1797 tbm_queue = wayland_tbm_client_create_surface_queue_tiled(
1805 tbm_queue = wayland_tbm_client_create_surface_queue(
1815 TPL_ERR("Failed to create tbm_queue. wl_tbm_client(%p)",
1820 if (tbm_surface_queue_set_modes(
1821 tbm_queue, TBM_SURFACE_QUEUE_MODE_GUARANTEE_CYCLE) !=
1822 TBM_SURFACE_QUEUE_ERROR_NONE) {
1823 TPL_ERR("Failed to set queue mode to tbm_surface_queue(%p)",
1825 tbm_surface_queue_destroy(tbm_queue);
1829 if (tbm_surface_queue_add_reset_cb(
1831 __cb_tbm_queue_reset_callback,
1832 (void *)wl_egl_surface) != TBM_SURFACE_QUEUE_ERROR_NONE) {
1833 TPL_ERR("Failed to register reset callback to tbm_surface_queue(%p)",
1835 tbm_surface_queue_destroy(tbm_queue);
1839 if (tbm_surface_queue_add_acquirable_cb(
1841 __cb_tbm_queue_acquirable_callback,
1842 (void *)wl_egl_surface) != TBM_SURFACE_QUEUE_ERROR_NONE) {
1843 TPL_ERR("Failed to register acquirable callback to tbm_surface_queue(%p)",
1845 tbm_surface_queue_destroy(tbm_queue);
1852 static tdm_client_vblank*
1853 _thread_create_tdm_client_vblank(tdm_client *tdm_client)
1855 tdm_client_vblank *tdm_vblank = NULL;
1856 tdm_client_output *tdm_output = NULL;
1857 tdm_error tdm_err = TDM_ERROR_NONE;
1860 TPL_ERR("Invalid parameter. tdm_client(%p)", tdm_client);
1864 tdm_output = tdm_client_get_output(tdm_client, "primary", &tdm_err);
1865 if (!tdm_output || tdm_err != TDM_ERROR_NONE) {
1866 TPL_ERR("Failed to get tdm_client_output. tdm_err(%d)", tdm_err);
1870 tdm_vblank = tdm_client_output_create_vblank(tdm_output, &tdm_err);
1871 if (!tdm_vblank || tdm_err != TDM_ERROR_NONE) {
1872 TPL_ERR("Failed to create tdm_vblank. tdm_err(%d)", tdm_err);
1876 tdm_client_vblank_set_enable_fake(tdm_vblank, 1);
1877 tdm_client_vblank_set_sync(tdm_vblank, 0);
1883 __cb_surface_vblank_free(void *data)
1885 TPL_CHECK_ON_NULL_RETURN(data);
1887 tpl_surface_vblank_t *vblank = (tpl_surface_vblank_t *)data;
1888 tpl_wl_egl_surface_t *wl_egl_surface = vblank->wl_egl_surface;
1890 TPL_INFO("[VBLANK_DESTROY]",
1891 "wl_egl_surface(%p) surface_vblank(%p) tdm_vblank(%p)",
1892 wl_egl_surface, vblank,
1893 vblank->tdm_vblank);
1895 tdm_client_vblank_destroy(vblank->tdm_vblank);
1896 vblank->tdm_vblank = NULL;
1897 vblank->wl_egl_surface = NULL;
1898 tpl_gmutex_clear(&vblank->mutex);
1902 wl_egl_surface->vblank = NULL;
1906 _thread_wl_egl_surface_init(tpl_wl_egl_surface_t *wl_egl_surface)
1908 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
1909 tpl_surface_vblank_t *vblank = NULL;
1911 wl_egl_surface->tbm_queue = _thread_create_tbm_queue(
1913 wl_egl_display->wl_tbm_client,
1914 wl_egl_surface->num_buffers);
1915 if (!wl_egl_surface->tbm_queue) {
1916 TPL_ERR("Failed to create tbm_queue. wl_egl_surface(%p) wl_tbm_client(%p)",
1917 wl_egl_surface, wl_egl_display->wl_tbm_client);
1921 TPL_INFO("[QUEUE_CREATION]",
1922 "wl_egl_surface(%p) wl_surface(%p) wl_tbm_client(%p)",
1923 wl_egl_surface, wl_egl_surface->wl_surface,
1924 wl_egl_display->wl_tbm_client);
1925 TPL_INFO("[QUEUE_CREATION]",
1926 "tbm_queue(%p) size(%d x %d) X %d format(%d)",
1927 wl_egl_surface->tbm_queue,
1928 wl_egl_surface->width,
1929 wl_egl_surface->height,
1930 wl_egl_surface->num_buffers,
1931 wl_egl_surface->format);
1933 if (wl_egl_display->use_wait_vblank) {
1934 vblank = (tpl_surface_vblank_t *)calloc(1, sizeof(tpl_surface_vblank_t));
1936 vblank->tdm_vblank = _thread_create_tdm_client_vblank(
1937 wl_egl_display->tdm.tdm_client);
1938 if (!vblank->tdm_vblank) {
1939 TPL_ERR("Failed to create tdm_vblank from tdm_client(%p)",
1940 wl_egl_display->tdm.tdm_client);
1944 vblank->waiting_buffers = __tpl_list_alloc();
1945 vblank->wl_egl_surface = wl_egl_surface;
1946 tpl_gmutex_init(&vblank->mutex);
1948 __tpl_list_push_back(wl_egl_display->tdm.surface_vblanks,
1951 TPL_INFO("[VBLANK_INIT]",
1952 "wl_egl_surface(%p) tdm_client(%p) tdm_vblank(%p)",
1953 wl_egl_surface, wl_egl_display->tdm.tdm_client,
1954 vblank->tdm_vblank);
1959 wl_egl_surface->vblank = vblank;
1960 #if TIZEN_FEATURE_ENABLE
1961 if (wl_egl_display->tss) {
1962 wl_egl_surface->tss_flusher =
1963 tizen_surface_shm_get_flusher(wl_egl_display->tss,
1964 wl_egl_surface->wl_surface);
1967 if (wl_egl_surface->tss_flusher) {
1968 tizen_surface_shm_flusher_add_listener(wl_egl_surface->tss_flusher,
1969 &tss_flusher_listener,
1971 TPL_INFO("[FLUSHER_INIT]",
1972 "wl_egl_surface(%p) tss_flusher(%p)",
1973 wl_egl_surface, wl_egl_surface->tss_flusher);
1976 if (wl_egl_display->explicit_sync && wl_egl_display->use_explicit_sync) {
1977 wl_egl_surface->surface_sync =
1978 zwp_linux_explicit_synchronization_v1_get_synchronization(
1979 wl_egl_display->explicit_sync, wl_egl_surface->wl_surface);
1980 if (wl_egl_surface->surface_sync) {
1981 TPL_INFO("[EXPLICIT_SYNC_INIT]",
1982 "wl_egl_surface(%p) surface_sync(%p)",
1983 wl_egl_surface, wl_egl_surface->surface_sync);
1985 TPL_WARN("Failed to create surface_sync. | wl_egl_surface(%p)",
1987 wl_egl_display->use_explicit_sync = TPL_FALSE;
1991 wl_egl_surface->presentation_feedbacks = __tpl_list_alloc();
1995 _tpl_wl_egl_surface_buffer_clear(tpl_wl_egl_surface_t *wl_egl_surface)
1997 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
1998 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
1999 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2000 tpl_bool_t need_to_release = TPL_FALSE;
2001 tpl_bool_t need_to_cancel = TPL_FALSE;
2002 buffer_status_t status = RELEASED;
2005 while (wl_egl_surface->buffer_cnt) {
2006 tpl_gmutex_lock(&wl_egl_display->wl_event_mutex);
2007 tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
2008 wl_egl_buffer = wl_egl_surface->buffers[idx];
2010 if (wl_egl_buffer) {
2011 wl_egl_surface->buffers[idx] = NULL;
2012 wl_egl_surface->buffer_cnt--;
2014 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
2015 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2020 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
2022 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2024 status = wl_egl_buffer->status;
2026 TPL_DEBUG("[idx:%d] wl_egl_buffer(%p) tbm_surface(%p) status(%s)",
2028 wl_egl_buffer->tbm_surface,
2029 status_to_string[status]);
2031 if (status >= ENQUEUED) {
2032 tpl_bool_t need_to_wait = TPL_FALSE;
2033 tpl_result_t wait_result = TPL_ERROR_NONE;
2035 need_to_wait = (status < COMMITTED);
2038 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2039 wait_result = tpl_cond_timed_wait(&wl_egl_buffer->cond,
2040 &wl_egl_buffer->mutex,
2042 tpl_gmutex_lock(&wl_egl_display->wl_event_mutex);
2044 if (wait_result == TPL_ERROR_TIME_OUT)
2045 TPL_WARN("timeout occured waiting signaled. wl_egl_buffer(%p)",
2050 status = wl_egl_buffer->status; /* update status */
2052 /* ACQUIRED, WAITING_SIGNALED, WAITING_VBLANK, COMMITTED */
2053 /* It has been acquired but has not yet been released, so this
2054 * buffer must be released. */
2055 need_to_release = (status >= ACQUIRED && status <= COMMITTED);
2057 /* After dequeue, it has not been enqueued yet
2058 * so cancel_dequeue must be performed. */
2059 need_to_cancel = (status == DEQUEUED);
2061 if (need_to_release) {
2062 tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
2063 wl_egl_buffer->tbm_surface);
2064 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2065 TPL_ERR("Failed to release. tbm_surface(%p) tsq_err(%d)",
2066 wl_egl_buffer->tbm_surface, tsq_err);
2069 if (need_to_cancel) {
2070 tsq_err = tbm_surface_queue_cancel_dequeue(wl_egl_surface->tbm_queue,
2071 wl_egl_buffer->tbm_surface);
2072 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2073 TPL_ERR("Failed to release tbm_surface(%p) tsq_err(%d)",
2074 wl_egl_buffer->tbm_surface, tsq_err);
2077 wl_egl_buffer->status = RELEASED;
2079 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2081 if (need_to_release || need_to_cancel)
2082 tbm_surface_internal_unref(wl_egl_buffer->tbm_surface);
2084 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2091 __tpl_wl_egl_surface_fini(tpl_surface_t *surface)
2093 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
2094 tpl_wl_egl_display_t *wl_egl_display = NULL;
2096 TPL_ASSERT(surface);
2097 TPL_ASSERT(surface->display);
2099 TPL_CHECK_ON_FALSE_RETURN(surface->type == TPL_SURFACE_TYPE_WINDOW);
2101 wl_egl_surface = (tpl_wl_egl_surface_t *) surface->backend.data;
2102 TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
2104 wl_egl_display = wl_egl_surface->wl_egl_display;
2105 TPL_CHECK_ON_NULL_RETURN(wl_egl_display);
2107 TPL_INFO("[SURFACE_FINI][BEGIN]",
2108 "wl_egl_surface(%p) wl_surface(%p) tbm_queue(%p)",
2110 wl_egl_surface->wl_surface, wl_egl_surface->tbm_queue);
2112 _tpl_wl_egl_surface_buffer_clear(wl_egl_surface);
2114 if (wl_egl_surface->surf_source)
2115 tpl_gsource_destroy(wl_egl_surface->surf_source, TPL_TRUE);
2116 wl_egl_surface->surf_source = NULL;
2118 _print_buffer_lists(wl_egl_surface);
2120 if (wl_egl_surface->wl_egl_window) {
2121 struct tizen_private *tizen_private = NULL;
2122 struct wl_egl_window *wl_egl_window = wl_egl_surface->wl_egl_window;
2123 TPL_INFO("[WL_EGL_WINDOW_FINI]",
2124 "wl_egl_surface(%p) wl_egl_window(%p) wl_surface(%p)",
2125 wl_egl_surface, wl_egl_window,
2126 wl_egl_surface->wl_surface);
2127 tizen_private = (struct tizen_private *)wl_egl_window->driver_private;
2128 if (tizen_private) {
2129 tizen_private->set_window_serial_callback = NULL;
2130 tizen_private->rotate_callback = NULL;
2131 tizen_private->get_rotation_capability = NULL;
2132 tizen_private->create_presentation_sync_fd = NULL;
2133 tizen_private->create_commit_sync_fd = NULL;
2134 tizen_private->set_frontbuffer_callback = NULL;
2135 tizen_private->merge_sync_fds = NULL;
2136 tizen_private->data = NULL;
2137 free(tizen_private);
2139 wl_egl_window->driver_private = NULL;
2142 wl_egl_window->destroy_window_callback = NULL;
2143 wl_egl_window->resize_callback = NULL;
2145 wl_egl_surface->wl_egl_window = NULL;
2148 wl_egl_surface->last_deq_buffer = NULL;
2150 wl_egl_surface->wl_surface = NULL;
2151 wl_egl_surface->wl_egl_display = NULL;
2152 wl_egl_surface->tpl_surface = NULL;
2154 tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
2155 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
2156 tpl_gmutex_clear(&wl_egl_surface->commit_sync.mutex);
2158 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
2159 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
2160 tpl_gmutex_clear(&wl_egl_surface->presentation_sync.mutex);
2162 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2163 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2164 tpl_gmutex_clear(&wl_egl_surface->surf_mutex);
2165 tpl_gcond_clear(&wl_egl_surface->surf_cond);
2167 TPL_INFO("[SURFACE_FINI][END]", "wl_egl_surface(%p)", wl_egl_surface);
2169 free(wl_egl_surface);
2170 surface->backend.data = NULL;
2174 __tpl_wl_egl_surface_set_rotation_capability(tpl_surface_t *surface,
2177 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
2179 TPL_CHECK_ON_NULL_RETURN_VAL(surface, TPL_ERROR_INVALID_PARAMETER);
2181 wl_egl_surface = (tpl_wl_egl_surface_t *)surface->backend.data;
2183 TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_surface, TPL_ERROR_INVALID_PARAMETER);
2185 TPL_INFO("[SET_PREROTATION_CAPABILITY]",
2186 "wl_egl_surface(%p) prerotation capability set to [%s]",
2187 wl_egl_surface, (set ? "TRUE" : "FALSE"));
2189 wl_egl_surface->prerotation_capability = set;
2190 return TPL_ERROR_NONE;
2194 __tpl_wl_egl_surface_set_post_interval(tpl_surface_t *surface,
2197 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
2199 TPL_CHECK_ON_NULL_RETURN_VAL(surface, TPL_ERROR_INVALID_PARAMETER);
2201 wl_egl_surface = (tpl_wl_egl_surface_t *)surface->backend.data;
2203 TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_surface, TPL_ERROR_INVALID_PARAMETER);
2205 TPL_INFO("[SET_POST_INTERVAL]",
2206 "wl_egl_surface(%p) post_interval(%d -> %d)",
2207 wl_egl_surface, wl_egl_surface->post_interval, post_interval);
2209 wl_egl_surface->post_interval = post_interval;
2211 return TPL_ERROR_NONE;
2215 __tpl_wl_egl_surface_validate(tpl_surface_t *surface)
2217 tpl_bool_t retval = TPL_TRUE;
2219 TPL_ASSERT(surface);
2220 TPL_ASSERT(surface->backend.data);
2222 tpl_wl_egl_surface_t *wl_egl_surface =
2223 (tpl_wl_egl_surface_t *)surface->backend.data;
2225 retval = !(wl_egl_surface->reset);
2231 __tpl_wl_egl_surface_get_size(tpl_surface_t *surface, int *width, int *height)
2233 tpl_wl_egl_surface_t *wl_egl_surface =
2234 (tpl_wl_egl_surface_t *)surface->backend.data;
2237 *width = tbm_surface_queue_get_width(wl_egl_surface->tbm_queue);
2239 *height = tbm_surface_queue_get_height(wl_egl_surface->tbm_queue);
2242 #define CAN_DEQUEUE_TIMEOUT_MS 10000
2245 _tbm_queue_force_flush(tpl_wl_egl_surface_t *wl_egl_surface)
2247 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2249 _print_buffer_lists(wl_egl_surface);
2251 if ((tsq_err = tbm_surface_queue_flush(wl_egl_surface->tbm_queue))
2252 != TBM_SURFACE_QUEUE_ERROR_NONE) {
2253 TPL_ERR("Failed to flush tbm_surface_queue(%p) tsq_err(%d)",
2254 wl_egl_surface->tbm_queue, tsq_err);
2255 return TPL_ERROR_INVALID_OPERATION;
2260 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2261 for (i = 0; i < BUFFER_ARRAY_SIZE; i++) {
2262 tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
2263 wl_egl_buffer = wl_egl_surface->buffers[i];
2264 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
2265 if (wl_egl_buffer && wl_egl_buffer->status == COMMITTED) {
2266 wl_egl_buffer->status = RELEASED;
2267 tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
2268 wl_egl_buffer->tbm_surface);
2269 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2270 TPL_ERR("Failed to release. tbm_surface(%p) tsq_err(%d)",
2271 wl_egl_buffer->tbm_surface, tsq_err);
2272 tbm_surface_internal_unref(wl_egl_buffer->tbm_surface);
2277 TPL_INFO("[FORCE_FLUSH]",
2278 "wl_egl_surface(%p) tbm_queue(%p)",
2279 wl_egl_surface, wl_egl_surface->tbm_queue);
2281 return TPL_ERROR_NONE;
2285 _wl_egl_buffer_init(tpl_wl_egl_buffer_t *wl_egl_buffer,
2286 tpl_wl_egl_surface_t *wl_egl_surface)
2288 struct wl_egl_window *wl_egl_window = wl_egl_surface->wl_egl_window;
2289 struct tizen_private *tizen_private =
2290 (struct tizen_private *)wl_egl_window->driver_private;
2292 TPL_ASSERT(tizen_private);
2294 wl_egl_buffer->draw_done = TPL_FALSE;
2295 wl_egl_buffer->need_to_commit = TPL_TRUE;
2296 #if TIZEN_FEATURE_ENABLE
2297 wl_egl_buffer->buffer_release = NULL;
2299 wl_egl_buffer->transform = tizen_private->transform;
2301 if (wl_egl_buffer->w_transform != tizen_private->window_transform) {
2302 wl_egl_buffer->w_transform = tizen_private->window_transform;
2303 wl_egl_buffer->w_rotated = TPL_TRUE;
2306 if (wl_egl_surface->set_serial_is_used) {
2307 wl_egl_buffer->serial = wl_egl_surface->serial;
2309 wl_egl_buffer->serial = ++tizen_private->serial;
2312 if (wl_egl_buffer->rects) {
2313 free(wl_egl_buffer->rects);
2314 wl_egl_buffer->rects = NULL;
2315 wl_egl_buffer->num_rects = 0;
2319 static tpl_wl_egl_buffer_t *
2320 _get_wl_egl_buffer(tbm_surface_h tbm_surface)
2322 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2323 tbm_surface_internal_get_user_data(tbm_surface, KEY_WL_EGL_BUFFER,
2324 (void **)&wl_egl_buffer);
2325 return wl_egl_buffer;
2328 static tpl_wl_egl_buffer_t *
2329 _wl_egl_buffer_create(tpl_wl_egl_surface_t *wl_egl_surface,
2330 tbm_surface_h tbm_surface)
2332 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2333 struct wl_egl_window *wl_egl_window = wl_egl_surface->wl_egl_window;
2335 wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2337 if (!wl_egl_buffer) {
2338 wl_egl_buffer = (tpl_wl_egl_buffer_t *)calloc(1, sizeof(tpl_wl_egl_buffer_t));
2339 TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_buffer, NULL);
2341 tbm_surface_internal_add_user_data(tbm_surface, KEY_WL_EGL_BUFFER,
2342 (tbm_data_free)__cb_wl_egl_buffer_free);
2343 tbm_surface_internal_set_user_data(tbm_surface, KEY_WL_EGL_BUFFER,
2346 wl_egl_buffer->wl_buffer = NULL;
2347 wl_egl_buffer->tbm_surface = tbm_surface;
2348 wl_egl_buffer->bo_name = _get_tbm_surface_bo_name(tbm_surface);
2349 wl_egl_buffer->wl_egl_surface = wl_egl_surface;
2351 wl_egl_buffer->status = RELEASED;
2353 wl_egl_buffer->acquire_fence_fd = -1;
2354 wl_egl_buffer->commit_sync_fd = -1;
2355 wl_egl_buffer->presentation_sync_fd = -1;
2356 wl_egl_buffer->release_fence_fd = -1;
2358 wl_egl_buffer->dx = wl_egl_window->dx;
2359 wl_egl_buffer->dy = wl_egl_window->dy;
2360 wl_egl_buffer->width = tbm_surface_get_width(tbm_surface);
2361 wl_egl_buffer->height = tbm_surface_get_height(tbm_surface);
2363 wl_egl_buffer->w_transform = -1;
2365 tpl_gmutex_init(&wl_egl_buffer->mutex);
2366 tpl_gcond_init(&wl_egl_buffer->cond);
2368 tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
2371 for (i = 0; i < BUFFER_ARRAY_SIZE; i++)
2372 if (wl_egl_surface->buffers[i] == NULL) break;
2374 /* If this exception is reached,
2375 * it may be a critical memory leak problem. */
2376 if (i == BUFFER_ARRAY_SIZE) {
2377 tpl_wl_egl_buffer_t *evicted_buffer = NULL;
2378 int evicted_idx = 0; /* evict the frontmost buffer */
2380 evicted_buffer = wl_egl_surface->buffers[evicted_idx];
2382 TPL_WARN("wl_egl_surface(%p) buffers array is full. evict one.",
2384 TPL_WARN("evicted buffer (%p) tbm_surface(%p) status(%s)",
2385 evicted_buffer, evicted_buffer->tbm_surface,
2386 status_to_string[evicted_buffer->status]);
2388 /* [TODO] need to think about whether there will be
2389 * better modifications */
2390 wl_egl_surface->buffer_cnt--;
2391 wl_egl_surface->buffers[evicted_idx] = NULL;
2396 wl_egl_surface->buffer_cnt++;
2397 wl_egl_surface->buffers[i] = wl_egl_buffer;
2398 wl_egl_buffer->idx = i;
2400 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
2402 TPL_INFO("[WL_EGL_BUFFER_CREATE]",
2403 "wl_egl_surface(%p) wl_egl_buffer(%p) tbm_surface(%p) bo(%d)",
2404 wl_egl_surface, wl_egl_buffer, tbm_surface,
2405 wl_egl_buffer->bo_name);
2408 _wl_egl_buffer_init(wl_egl_buffer, wl_egl_surface);
2410 return wl_egl_buffer;
2413 static tbm_surface_h
2414 __tpl_wl_egl_surface_dequeue_buffer(tpl_surface_t *surface, uint64_t timeout_ns,
2415 int32_t *release_fence)
2417 TPL_ASSERT(surface);
2418 TPL_ASSERT(surface->backend.data);
2419 TPL_ASSERT(surface->display);
2420 TPL_ASSERT(surface->display->backend.data);
2421 TPL_OBJECT_CHECK_RETURN(surface, NULL);
2423 tpl_wl_egl_surface_t *wl_egl_surface =
2424 (tpl_wl_egl_surface_t *)surface->backend.data;
2425 tpl_wl_egl_display_t *wl_egl_display =
2426 (tpl_wl_egl_display_t *)surface->display->backend.data;
2427 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2429 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2430 tpl_bool_t is_activated = 0;
2432 tbm_surface_h tbm_surface = NULL;
2434 TPL_OBJECT_UNLOCK(surface);
2435 if (wl_egl_surface->reset == TPL_TRUE && wl_egl_surface->last_deq_buffer) {
2436 tpl_wl_egl_buffer_t *last_deq_buffer = wl_egl_surface->last_deq_buffer;
2438 tpl_gmutex_lock(&last_deq_buffer->mutex);
2439 if (last_deq_buffer->status > ENQUEUED &&
2440 last_deq_buffer->status < COMMITTED) {
2441 tpl_result_t wait_result;
2442 wait_result = tpl_cond_timed_wait(&last_deq_buffer->cond,
2443 &last_deq_buffer->mutex,
2446 if (wait_result == TPL_ERROR_TIME_OUT)
2447 TPL_WARN("timeout occured waiting signaled. wl_egl_buffer(%p)",
2450 tpl_gmutex_unlock(&last_deq_buffer->mutex);
2452 wl_egl_surface->last_deq_buffer = NULL;
2454 tsq_err = tbm_surface_queue_can_dequeue_wait_timeout(
2455 wl_egl_surface->tbm_queue, CAN_DEQUEUE_TIMEOUT_MS);
2456 TPL_OBJECT_LOCK(surface);
2458 /* After the can dequeue state, lock the wl_event_mutex to prevent other
2459 * events from being processed in wayland_egl_thread
2460 * during below dequeue procedure. */
2461 tpl_gmutex_lock(&wl_egl_display->wl_event_mutex);
2463 if (tsq_err == TBM_SURFACE_QUEUE_ERROR_TIMEOUT) {
2464 TPL_WARN("[CAN_DEQUEUE_TIMEOUT] queue(%p) will be reset. surface(%p)",
2465 wl_egl_surface->tbm_queue, surface);
2466 if (_tbm_queue_force_flush(wl_egl_surface) != TPL_ERROR_NONE) {
2467 TPL_ERR("Failed to timeout reset. tbm_queue(%p) surface(%p)",
2468 wl_egl_surface->tbm_queue, surface);
2469 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2472 tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2476 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2477 TPL_ERR("Failed to query can_dequeue. tbm_queue(%p) surface(%p)",
2478 wl_egl_surface->tbm_queue, surface);
2479 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2483 /* wayland client can check their states (ACTIVATED or DEACTIVATED) with
2484 * below function [wayland_tbm_client_queue_check_activate()].
2485 * This function has to be called before tbm_surface_queue_dequeue()
2486 * in order to know what state the buffer will be dequeued next.
2488 * ACTIVATED state means non-composite mode. Client can get buffers which
2489 can be displayed directly(without compositing).
2490 * DEACTIVATED state means composite mode. Client's buffer will be displayed
2491 by compositor(E20) with compositing.
2493 is_activated = wayland_tbm_client_queue_check_activate(
2494 wl_egl_display->wl_tbm_client,
2495 wl_egl_surface->tbm_queue);
2497 wl_egl_surface->is_activated = is_activated;
2499 surface->width = tbm_surface_queue_get_width(wl_egl_surface->tbm_queue);
2500 surface->height = tbm_surface_queue_get_height(wl_egl_surface->tbm_queue);
2501 wl_egl_surface->width = surface->width;
2502 wl_egl_surface->height = surface->height;
2504 if (surface->is_frontbuffer_mode && surface->frontbuffer != NULL) {
2505 /* If surface->frontbuffer is already set in frontbuffer mode,
2506 * it will return that frontbuffer if it is still activated,
2507 * otherwise dequeue the new buffer after initializing
2508 * surface->frontbuffer to NULL. */
2509 if (is_activated && !wl_egl_surface->reset) {
2510 bo_name = _get_tbm_surface_bo_name(surface->frontbuffer);
2513 "[DEQ][F] surface->frontbuffer(%p) BO_NAME(%d)",
2514 surface->frontbuffer, bo_name);
2515 TRACE_ASYNC_BEGIN((int)surface->frontbuffer,
2516 "[DEQ]~[ENQ] BO_NAME:%d",
2518 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2519 return surface->frontbuffer;
2521 surface->frontbuffer = NULL;
2522 wl_egl_surface->need_to_enqueue = TPL_TRUE;
2525 surface->frontbuffer = NULL;
2528 tsq_err = tbm_surface_queue_dequeue(wl_egl_surface->tbm_queue,
2531 TPL_ERR("Failed to dequeue from tbm_queue(%p) wl_egl_surface(%p)| tsq_err = %d",
2532 wl_egl_surface->tbm_queue, wl_egl_surface, tsq_err);
2533 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2537 tbm_surface_internal_ref(tbm_surface);
2539 wl_egl_buffer = _wl_egl_buffer_create(wl_egl_surface, tbm_surface);
2540 TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer, "Failed to create/get wl_egl_buffer.");
2542 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2543 wl_egl_buffer->status = DEQUEUED;
2545 wl_egl_surface->last_deq_buffer = wl_egl_buffer;
2547 /* If wl_egl_buffer->release_fence_fd is -1,
2548 * the tbm_surface can be used immediately.
2549 * If not, user(EGL) have to wait until signaled. */
2550 if (release_fence) {
2551 #if TIZEN_FEATURE_ENABLE
2552 if (wl_egl_surface->surface_sync) {
2553 *release_fence = wl_egl_buffer->release_fence_fd;
2554 TPL_DEBUG("wl_egl_surface(%p) wl_egl_buffer(%p) release_fence_fd(%d)",
2555 wl_egl_surface, wl_egl_buffer, *release_fence);
2557 wl_egl_buffer->release_fence_fd = -1;
2561 *release_fence = -1;
2565 if (surface->is_frontbuffer_mode && is_activated)
2566 surface->frontbuffer = tbm_surface;
2568 wl_egl_surface->reset = TPL_FALSE;
2570 TRACE_MARK("[DEQ][NEW]BO_NAME:%d", wl_egl_buffer->bo_name);
2571 TRACE_ASYNC_BEGIN((int)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d",
2572 wl_egl_buffer->bo_name);
2573 TPL_LOG_T("WL_EGL", "[DEQ] wl_egl_buffer(%p) tbm_surface(%p) bo(%d) fence(%d)",
2574 wl_egl_buffer, tbm_surface, wl_egl_buffer->bo_name,
2575 release_fence ? *release_fence : -1);
2577 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2578 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2584 __tpl_wl_egl_surface_cancel_buffer(tpl_surface_t *surface,
2585 tbm_surface_h tbm_surface)
2587 TPL_ASSERT(surface);
2588 TPL_ASSERT(surface->backend.data);
2590 tpl_wl_egl_surface_t *wl_egl_surface =
2591 (tpl_wl_egl_surface_t *)surface->backend.data;
2592 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2593 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2595 if (!tbm_surface_internal_is_valid(tbm_surface)) {
2596 TPL_ERR("Invalid buffer. tbm_surface(%p)", tbm_surface);
2597 return TPL_ERROR_INVALID_PARAMETER;
2600 wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2601 if (wl_egl_buffer) {
2602 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2603 wl_egl_buffer->status = RELEASED;
2604 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2607 if (wl_egl_buffer == wl_egl_surface->last_deq_buffer)
2608 wl_egl_surface->last_deq_buffer = NULL;
2610 tbm_surface_internal_unref(tbm_surface);
2612 tsq_err = tbm_surface_queue_cancel_dequeue(wl_egl_surface->tbm_queue,
2614 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2615 TPL_ERR("Failed to release tbm_surface(%p) surface(%p)",
2616 tbm_surface, surface);
2617 return TPL_ERROR_INVALID_OPERATION;
2620 TPL_INFO("[CANCEL_BUFFER]", "wl_egl_surface(%p) tbm_surface(%p) bo(%d)",
2621 wl_egl_surface, tbm_surface, _get_tbm_surface_bo_name(tbm_surface));
2623 return TPL_ERROR_NONE;
2627 __tpl_wl_egl_surface_enqueue_buffer(tpl_surface_t *surface,
2628 tbm_surface_h tbm_surface,
2629 int num_rects, const int *rects, int32_t acquire_fence)
2631 TPL_ASSERT(surface);
2632 TPL_ASSERT(surface->display);
2633 TPL_ASSERT(surface->backend.data);
2634 TPL_ASSERT(tbm_surface);
2635 TPL_OBJECT_CHECK_RETURN(surface, TPL_ERROR_INVALID_PARAMETER);
2637 tpl_wl_egl_surface_t *wl_egl_surface =
2638 (tpl_wl_egl_surface_t *) surface->backend.data;
2639 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2640 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2643 if (!tbm_surface_internal_is_valid(tbm_surface)) {
2644 TPL_ERR("Failed to enqueue tbm_surface(%p) Invalid value.",
2646 TRACE_ASYNC_END((int)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2647 return TPL_ERROR_INVALID_PARAMETER;
2650 wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2651 if (!wl_egl_buffer) {
2652 TPL_ERR("Failed to get wl_egl_buffer from tbm_surface(%p)", tbm_surface);
2653 return TPL_ERROR_INVALID_PARAMETER;
2656 bo_name = _get_tbm_surface_bo_name(tbm_surface);
2658 TRACE_MARK("[ENQ] BO_NAME:%d", bo_name);
2660 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2662 /* If there are received region information, save it to wl_egl_buffer */
2663 if (num_rects && rects) {
2664 if (wl_egl_buffer->rects != NULL) {
2665 free(wl_egl_buffer->rects);
2666 wl_egl_buffer->rects = NULL;
2667 wl_egl_buffer->num_rects = 0;
2670 wl_egl_buffer->rects = (int *)calloc(1, (sizeof(int) * 4 * num_rects));
2671 wl_egl_buffer->num_rects = num_rects;
2673 if (!wl_egl_buffer->rects) {
2674 TPL_ERR("Failed to allocate memory fo damage rects info.");
2675 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2676 return TPL_ERROR_OUT_OF_MEMORY;
2679 memcpy((char *)wl_egl_buffer->rects, (char *)rects, sizeof(int) * 4 * num_rects);
2682 if (!wl_egl_surface->need_to_enqueue ||
2683 !wl_egl_buffer->need_to_commit) {
2684 TPL_WARN("[ENQ_SKIP][Frontbuffer:%s] tbm_surface(%p) need not to enqueue",
2685 ((surface->frontbuffer == tbm_surface) ? "ON" : "OFF"), tbm_surface);
2686 TRACE_ASYNC_END((int)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2687 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2688 return TPL_ERROR_NONE;
2691 /* In frontbuffer mode, will skip tbm_surface_queue_enqueue, acquire, and
2692 * commit if surface->frontbuffer that is already set and the tbm_surface
2693 * client want to enqueue are the same.
2695 if (surface->is_frontbuffer_mode) {
2696 /* The first buffer to be activated in frontbuffer mode must be
2697 * committed. Subsequence frames do not need to be committed because
2698 * the buffer is already displayed.
2700 if (surface->frontbuffer == tbm_surface)
2701 wl_egl_surface->need_to_enqueue = TPL_FALSE;
2703 if (acquire_fence != -1) {
2704 close(acquire_fence);
2709 if (wl_egl_buffer->acquire_fence_fd != -1)
2710 close(wl_egl_buffer->acquire_fence_fd);
2712 wl_egl_buffer->acquire_fence_fd = acquire_fence;
2714 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
2715 if (wl_egl_surface->presentation_sync.fd != -1) {
2716 wl_egl_buffer->presentation_sync_fd = wl_egl_surface->presentation_sync.fd;
2717 wl_egl_surface->presentation_sync.fd = -1;
2719 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
2721 tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
2722 if (wl_egl_surface->commit_sync.fd != -1) {
2723 wl_egl_buffer->commit_sync_fd = wl_egl_surface->commit_sync.fd;
2724 wl_egl_surface->commit_sync.fd = -1;
2725 TRACE_ASYNC_BEGIN(wl_egl_buffer->commit_sync_fd, "[COMMIT_SYNC] bo(%d)",
2726 _get_tbm_surface_bo_name(tbm_surface));
2728 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
2730 wl_egl_buffer->status = ENQUEUED;
2732 "[ENQ] wl_egl_buffer(%p) tbm_surface(%p) bo(%d) fence(%d)",
2733 wl_egl_buffer, tbm_surface, bo_name, acquire_fence);
2735 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2737 tsq_err = tbm_surface_queue_enqueue(wl_egl_surface->tbm_queue,
2739 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2740 tbm_surface_internal_unref(tbm_surface);
2741 TPL_ERR("Failed to enqueue tbm_surface(%p). wl_egl_surface(%p) tsq_err=%d",
2742 tbm_surface, wl_egl_surface, tsq_err);
2743 TRACE_ASYNC_END((int)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2744 return TPL_ERROR_INVALID_OPERATION;
2747 tbm_surface_internal_unref(tbm_surface);
2749 TRACE_ASYNC_END((int)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2751 return TPL_ERROR_NONE;
2755 __thread_func_waiting_source_dispatch(tpl_gsource *gsource, uint64_t message)
2757 tpl_wl_egl_buffer_t *wl_egl_buffer =
2758 (tpl_wl_egl_buffer_t *)tpl_gsource_get_data(gsource);
2759 tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
2760 tbm_surface_h tbm_surface = wl_egl_buffer->tbm_surface;
2762 wl_egl_surface->render_done_cnt++;
2764 TRACE_ASYNC_END(wl_egl_buffer->acquire_fence_fd, "FENCE WAIT fd(%d)",
2765 wl_egl_buffer->acquire_fence_fd);
2767 TPL_DEBUG("[RENDER DONE] wl_egl_buffer(%p) tbm_surface(%p)",
2768 wl_egl_buffer, tbm_surface);
2770 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2771 wl_egl_buffer->status = WAITING_VBLANK;
2773 TPL_DEBUG("[FINALIZE] wl_egl_buffer(%p) wait_source(%p) fence_fd(%d)",
2774 wl_egl_buffer, wl_egl_buffer->waiting_source,
2775 wl_egl_buffer->acquire_fence_fd);
2777 close(wl_egl_buffer->acquire_fence_fd);
2778 wl_egl_buffer->acquire_fence_fd = -1;
2779 wl_egl_buffer->waiting_source = NULL;
2781 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2783 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2785 if (wl_egl_surface->vblank == NULL || wl_egl_surface->vblank_done)
2786 _thread_wl_surface_commit(wl_egl_surface, wl_egl_buffer);
2788 tpl_gmutex_lock(&wl_egl_surface->vblank->mutex);
2789 __tpl_list_push_back(wl_egl_surface->vblank->waiting_buffers,
2791 tpl_gmutex_unlock(&wl_egl_surface->vblank->mutex);
2794 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2800 __thread_func_waiting_source_finalize(tpl_gsource *gsource)
2802 TPL_IGNORE(gsource);
2805 static tpl_gsource_functions buffer_funcs = {
2808 .dispatch = __thread_func_waiting_source_dispatch,
2809 .finalize = __thread_func_waiting_source_finalize,
2813 _thread_surface_queue_acquire(tpl_wl_egl_surface_t *wl_egl_surface)
2815 tbm_surface_h tbm_surface = NULL;
2816 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2817 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
2818 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2819 tpl_bool_t ready_to_commit = TPL_FALSE;
2821 while (tbm_surface_queue_can_acquire(wl_egl_surface->tbm_queue, 0)) {
2822 tsq_err = tbm_surface_queue_acquire(wl_egl_surface->tbm_queue,
2824 if (!tbm_surface || tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2825 TPL_ERR("Failed to acquire from tbm_queue(%p)",
2826 wl_egl_surface->tbm_queue);
2827 return TPL_ERROR_INVALID_OPERATION;
2830 tbm_surface_internal_ref(tbm_surface);
2832 wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2833 TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer != NULL,
2834 "wl_egl_buffer sould be not NULL");
2836 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2838 wl_egl_buffer->status = ACQUIRED;
2840 TPL_LOG_T("WL_EGL", "[ACQ] wl_egl_buffer(%p) tbm_surface(%p) bo(%d)",
2841 wl_egl_buffer, tbm_surface,
2842 _get_tbm_surface_bo_name(tbm_surface));
2844 if (wl_egl_buffer->acquire_fence_fd != -1) {
2845 #if TIZEN_FEATURE_ENABLE
2846 if (wl_egl_surface->surface_sync)
2847 ready_to_commit = TPL_TRUE;
2851 if (wl_egl_buffer->waiting_source) {
2852 tpl_gsource_destroy(wl_egl_buffer->waiting_source, TPL_FALSE);
2853 wl_egl_buffer->waiting_source = NULL;
2856 wl_egl_buffer->waiting_source =
2857 tpl_gsource_create(wl_egl_display->thread, wl_egl_buffer,
2858 wl_egl_buffer->acquire_fence_fd, &buffer_funcs,
2859 SOURCE_TYPE_DISPOSABLE);
2860 wl_egl_buffer->status = WAITING_SIGNALED;
2862 TRACE_ASYNC_BEGIN(wl_egl_buffer->acquire_fence_fd, "FENCE WAIT fd(%d)",
2863 wl_egl_buffer->acquire_fence_fd);
2865 ready_to_commit = TPL_FALSE;
2868 ready_to_commit = TPL_TRUE;
2871 if (ready_to_commit) {
2872 if (wl_egl_surface->vblank == NULL || wl_egl_surface->vblank_done)
2873 ready_to_commit = TPL_TRUE;
2875 wl_egl_buffer->status = WAITING_VBLANK;
2876 tpl_gmutex_lock(&wl_egl_surface->vblank->mutex);
2877 __tpl_list_push_back(wl_egl_surface->vblank->waiting_buffers, wl_egl_buffer);
2878 tpl_gmutex_unlock(&wl_egl_surface->vblank->mutex);
2879 ready_to_commit = TPL_FALSE;
2883 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2885 if (ready_to_commit)
2886 _thread_wl_surface_commit(wl_egl_surface, wl_egl_buffer);
2889 return TPL_ERROR_NONE;
2892 /* -- BEGIN -- tdm_client vblank callback function */
2894 __cb_tdm_client_vblank(tdm_client_vblank *vblank, tdm_error error,
2895 unsigned int sequence, unsigned int tv_sec,
2896 unsigned int tv_usec, void *user_data)
2898 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)user_data;
2899 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2901 TRACE_ASYNC_END((int)wl_egl_surface, "WAIT_VBLANK");
2902 TPL_DEBUG("[VBLANK] wl_egl_surface(%p)", wl_egl_surface);
2904 if (error == TDM_ERROR_TIMEOUT)
2905 TPL_WARN("[TDM_ERROR_TIMEOUT] It will keep going. wl_egl_surface(%p)",
2908 wl_egl_surface->vblank_done = TPL_TRUE;
2910 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2911 if (wl_egl_surface->vblank && wl_egl_surface->vblank->waiting_buffers) {
2912 tpl_bool_t is_empty = TPL_TRUE;
2914 tpl_gmutex_lock(&wl_egl_surface->vblank->mutex);
2915 wl_egl_buffer = (tpl_wl_egl_buffer_t *)__tpl_list_pop_front(
2916 wl_egl_surface->vblank->waiting_buffers,
2918 is_empty = __tpl_list_is_empty(wl_egl_surface->vblank->waiting_buffers);
2919 tpl_gmutex_unlock(&wl_egl_surface->vblank->mutex);
2921 if (!wl_egl_buffer) break;
2923 _thread_wl_surface_commit(wl_egl_surface, wl_egl_buffer);
2925 /* If tdm error such as TIMEOUT occured,
2926 * flush all vblank waiting buffers of its wl_egl_surface.
2927 * Otherwise, only one wl_egl_buffer will be commited per one vblank event.
2929 if (error == TDM_ERROR_NONE) break;
2930 } while (!is_empty);
2932 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2934 /* -- END -- tdm_client vblank callback function */
2936 #if TIZEN_FEATURE_ENABLE
2938 __cb_buffer_fenced_release(void *data,
2939 struct zwp_linux_buffer_release_v1 *release, int32_t fence)
2941 tpl_wl_egl_buffer_t *wl_egl_buffer = (tpl_wl_egl_buffer_t *)data;
2942 tbm_surface_h tbm_surface = NULL;
2944 TPL_CHECK_ON_NULL_RETURN(wl_egl_buffer);
2946 tbm_surface = wl_egl_buffer->tbm_surface;
2948 if (tbm_surface_internal_is_valid(tbm_surface)) {
2950 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2951 if (wl_egl_buffer->status == COMMITTED) {
2952 tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
2953 tbm_surface_queue_error_e tsq_err;
2955 zwp_linux_buffer_release_v1_destroy(wl_egl_buffer->buffer_release);
2956 wl_egl_buffer->buffer_release = NULL;
2958 wl_egl_buffer->release_fence_fd = fence;
2959 wl_egl_buffer->status = RELEASED;
2961 TRACE_MARK("[FENCED_RELEASE] BO(%d) fence(%d)",
2962 _get_tbm_surface_bo_name(tbm_surface),
2964 TRACE_ASYNC_END((int)tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
2965 _get_tbm_surface_bo_name(tbm_surface));
2968 "[FENCED_RELEASE] wl_egl_buffer(%p) tbm_surface(%p) bo(%d) fence(%d)",
2969 wl_egl_buffer, tbm_surface,
2970 _get_tbm_surface_bo_name(tbm_surface),
2973 tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
2975 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2976 TPL_ERR("tbm_surface(%p) tsq_err(%d)", tbm_surface, tsq_err);
2978 tbm_surface_internal_unref(tbm_surface);
2981 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2984 TPL_ERR("Invalid parameter | tbm_surface(%p)", tbm_surface);
2989 __cb_buffer_immediate_release(void *data,
2990 struct zwp_linux_buffer_release_v1 *release)
2992 tpl_wl_egl_buffer_t *wl_egl_buffer = (tpl_wl_egl_buffer_t *)data;
2993 tbm_surface_h tbm_surface = NULL;
2995 TPL_CHECK_ON_NULL_RETURN(wl_egl_buffer);
2997 tbm_surface = wl_egl_buffer->tbm_surface;
2999 if (tbm_surface_internal_is_valid(tbm_surface)) {
3001 tpl_gmutex_lock(&wl_egl_buffer->mutex);
3002 if (wl_egl_buffer->status == COMMITTED) {
3003 tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
3004 tbm_surface_queue_error_e tsq_err;
3006 zwp_linux_buffer_release_v1_destroy(wl_egl_buffer->buffer_release);
3007 wl_egl_buffer->buffer_release = NULL;
3009 wl_egl_buffer->release_fence_fd = -1;
3010 wl_egl_buffer->status = RELEASED;
3012 TRACE_MARK("[IMMEDIATE_RELEASE] BO(%d)",
3013 _get_tbm_surface_bo_name(tbm_surface));
3014 TRACE_ASYNC_END((int)tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
3015 _get_tbm_surface_bo_name(tbm_surface));
3018 "[IMMEDIATE_RELEASE] wl_egl_buffer(%p) tbm_surface(%p) bo(%d)",
3019 wl_egl_buffer, tbm_surface,
3020 _get_tbm_surface_bo_name(tbm_surface));
3022 tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
3024 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
3025 TPL_ERR("tbm_surface(%p) tsq_err(%d)", tbm_surface, tsq_err);
3027 tbm_surface_internal_unref(tbm_surface);
3030 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3033 TPL_ERR("Invalid parameter | tbm_surface(%p)", tbm_surface);
3037 static const struct zwp_linux_buffer_release_v1_listener zwp_release_listner = {
3038 __cb_buffer_fenced_release,
3039 __cb_buffer_immediate_release,
3044 __cb_wl_buffer_release(void *data, struct wl_proxy *wl_buffer)
3046 tpl_wl_egl_buffer_t *wl_egl_buffer = (tpl_wl_egl_buffer_t *)data;
3047 tbm_surface_h tbm_surface = NULL;
3049 TPL_CHECK_ON_NULL_RETURN(wl_egl_buffer)
3051 tbm_surface = wl_egl_buffer->tbm_surface;
3053 if (tbm_surface_internal_is_valid(tbm_surface)) {
3054 tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
3055 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_INVALID_SURFACE;
3057 tpl_gmutex_lock(&wl_egl_buffer->mutex);
3059 if (wl_egl_buffer->status == COMMITTED) {
3061 tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
3063 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
3064 TPL_ERR("tbm_surface(%p) tsq_err(%d)", tbm_surface, tsq_err);
3066 wl_egl_buffer->status = RELEASED;
3068 TRACE_MARK("[RELEASE] BO(%d)", _get_tbm_surface_bo_name(tbm_surface));
3069 TRACE_ASYNC_END((int)tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
3070 _get_tbm_surface_bo_name(tbm_surface));
3072 TPL_LOG_T("WL_EGL", "[REL] wl_buffer(%p) tbm_surface(%p) bo(%d)",
3073 wl_egl_buffer->wl_buffer, tbm_surface,
3074 _get_tbm_surface_bo_name(tbm_surface));
3077 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3079 if (tsq_err == TBM_SURFACE_QUEUE_ERROR_NONE)
3080 tbm_surface_internal_unref(tbm_surface);
3082 TPL_ERR("Invalid parameter | tbm_surface(%p)", tbm_surface);
3086 static const struct wl_buffer_listener wl_buffer_release_listener = {
3087 (void *)__cb_wl_buffer_release,
3089 #if TIZEN_FEATURE_ENABLE
3091 __cb_presentation_feedback_sync_output(void *data,
3092 struct wp_presentation_feedback *presentation_feedback,
3093 struct wl_output *output)
3096 TPL_IGNORE(presentation_feedback);
3102 __cb_presentation_feedback_presented(void *data,
3103 struct wp_presentation_feedback *presentation_feedback,
3107 uint32_t refresh_nsec,
3112 TPL_IGNORE(tv_sec_hi);
3113 TPL_IGNORE(tv_sec_lo);
3114 TPL_IGNORE(tv_nsec);
3115 TPL_IGNORE(refresh_nsec);
3120 struct pst_feedback *pst_feedback = (struct pst_feedback *)data;
3121 tpl_wl_egl_surface_t *wl_egl_surface = pst_feedback->wl_egl_surface;
3123 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
3125 TPL_DEBUG("[FEEDBACK][PRESENTED] pst_feedback(%p) presentation_feedback(%p) bo(%d)",
3126 pst_feedback, presentation_feedback, pst_feedback->bo_name);
3128 if (pst_feedback->pst_sync_fd != -1) {
3129 int ret = _write_to_eventfd(pst_feedback->pst_sync_fd);
3131 TPL_ERR("Failed to send presentation_sync signal to fd(%d)",
3132 pst_feedback->pst_sync_fd);
3135 TRACE_ASYNC_END(pst_feedback->pst_sync_fd,
3136 "[PRESENTATION_SYNC] bo(%d)",
3137 pst_feedback->bo_name);
3139 close(pst_feedback->pst_sync_fd);
3140 pst_feedback->pst_sync_fd = -1;
3143 wp_presentation_feedback_destroy(presentation_feedback);
3145 pst_feedback->presentation_feedback = NULL;
3146 pst_feedback->wl_egl_surface = NULL;
3147 pst_feedback->bo_name = 0;
3149 __tpl_list_remove_data(wl_egl_surface->presentation_feedbacks, pst_feedback,
3154 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
3158 __cb_presentation_feedback_discarded(void *data,
3159 struct wp_presentation_feedback *presentation_feedback)
3161 struct pst_feedback *pst_feedback = (struct pst_feedback *)data;
3162 tpl_wl_egl_surface_t *wl_egl_surface = pst_feedback->wl_egl_surface;
3164 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
3166 TPL_DEBUG("[FEEDBACK][DISCARDED] pst_feedback(%p) presentation_feedback(%p) bo(%d)",
3167 pst_feedback, presentation_feedback, pst_feedback->bo_name);
3169 if (pst_feedback->pst_sync_fd != -1) {
3170 int ret = _write_to_eventfd(pst_feedback->pst_sync_fd);
3172 TPL_ERR("Failed to send presentation_sync signal to fd(%d)",
3173 pst_feedback->pst_sync_fd);
3176 TRACE_ASYNC_END(pst_feedback->pst_sync_fd,
3177 "[PRESENTATION_SYNC] bo(%d)",
3178 pst_feedback->bo_name);
3180 close(pst_feedback->pst_sync_fd);
3181 pst_feedback->pst_sync_fd = -1;
3184 wp_presentation_feedback_destroy(presentation_feedback);
3186 pst_feedback->presentation_feedback = NULL;
3187 pst_feedback->wl_egl_surface = NULL;
3188 pst_feedback->bo_name = 0;
3190 __tpl_list_remove_data(wl_egl_surface->presentation_feedbacks, pst_feedback,
3195 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
3198 static const struct wp_presentation_feedback_listener feedback_listener = {
3199 __cb_presentation_feedback_sync_output, /* sync_output feedback -*/
3200 __cb_presentation_feedback_presented,
3201 __cb_presentation_feedback_discarded
3206 _thread_surface_vblank_wait(tpl_wl_egl_surface_t *wl_egl_surface)
3208 tdm_error tdm_err = TDM_ERROR_NONE;
3209 tpl_surface_vblank_t *vblank = wl_egl_surface->vblank;
3211 tdm_err = tdm_client_vblank_wait(vblank->tdm_vblank,
3212 wl_egl_surface->post_interval,
3213 __cb_tdm_client_vblank,
3214 (void *)wl_egl_surface);
3216 if (tdm_err == TDM_ERROR_NONE) {
3217 wl_egl_surface->vblank_done = TPL_FALSE;
3218 TRACE_ASYNC_BEGIN((int)wl_egl_surface, "WAIT_VBLANK");
3220 TPL_ERR("Failed to tdm_client_vblank_wait. tdm_err(%d)", tdm_err);
3221 return TPL_ERROR_INVALID_OPERATION;
3224 return TPL_ERROR_NONE;
3228 _thread_wl_surface_commit(tpl_wl_egl_surface_t *wl_egl_surface,
3229 tpl_wl_egl_buffer_t *wl_egl_buffer)
3231 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
3232 struct wl_surface *wl_surface = wl_egl_surface->wl_surface;
3233 struct wl_egl_window *wl_egl_window = wl_egl_surface->wl_egl_window;
3236 TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer != NULL,
3237 "wl_egl_buffer sould be not NULL");
3239 if (wl_egl_buffer->wl_buffer == NULL) {
3240 wl_egl_buffer->wl_buffer =
3241 (struct wl_proxy *)wayland_tbm_client_create_buffer(
3242 wl_egl_display->wl_tbm_client,
3243 wl_egl_buffer->tbm_surface);
3245 TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer->wl_buffer != NULL,
3246 "[FATAL] Failed to create wl_buffer");
3248 TPL_INFO("[WL_BUFFER_CREATE]",
3249 "wl_egl_buffer(%p) wl_buffer(%p) tbm_surface(%p)",
3250 wl_egl_buffer, wl_egl_buffer->wl_buffer,
3251 wl_egl_buffer->tbm_surface);
3253 #if TIZEN_FEATURE_ENABLE
3254 if (!wl_egl_display->use_explicit_sync ||
3255 !wl_egl_surface->surface_sync)
3258 wl_buffer_add_listener((struct wl_buffer *)wl_egl_buffer->wl_buffer,
3259 &wl_buffer_release_listener,
3264 version = wl_proxy_get_version((struct wl_proxy *)wl_surface);
3266 #if TIZEN_FEATURE_ENABLE
3267 /* create presentation feedback and add listener */
3268 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
3269 if (wl_egl_display->presentation && wl_egl_buffer->presentation_sync_fd != -1) {
3271 struct pst_feedback *pst_feedback = NULL;
3272 pst_feedback = (struct pst_feedback *) calloc(1, sizeof(struct pst_feedback));
3274 pst_feedback->presentation_feedback =
3275 wp_presentation_feedback(wl_egl_display->presentation,
3278 pst_feedback->wl_egl_surface = wl_egl_surface;
3279 pst_feedback->bo_name = wl_egl_buffer->bo_name;
3281 pst_feedback->pst_sync_fd = wl_egl_buffer->presentation_sync_fd;
3282 wl_egl_buffer->presentation_sync_fd = -1;
3284 wp_presentation_feedback_add_listener(pst_feedback->presentation_feedback,
3285 &feedback_listener, pst_feedback);
3286 __tpl_list_push_back(wl_egl_surface->presentation_feedbacks, pst_feedback);
3287 TRACE_ASYNC_BEGIN(pst_feedback->pst_sync_fd,
3288 "[PRESENTATION_SYNC] bo(%d)",
3289 pst_feedback->bo_name);
3291 TPL_ERR("Failed to create presentation feedback. wl_egl_buffer(%p)",
3293 _write_to_eventfd(wl_egl_buffer->presentation_sync_fd);
3294 close(wl_egl_buffer->presentation_sync_fd);
3295 wl_egl_buffer->presentation_sync_fd = -1;
3298 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
3301 if (wl_egl_buffer->w_rotated == TPL_TRUE) {
3303 wayland_tbm_client_set_buffer_transform(
3304 wl_egl_display->wl_tbm_client,
3305 (void *)wl_egl_buffer->wl_buffer,
3306 wl_egl_buffer->w_transform);
3308 wl_egl_buffer->w_rotated = TPL_FALSE;
3311 if (wl_egl_surface->latest_transform != wl_egl_buffer->transform) {
3312 wl_egl_surface->latest_transform = wl_egl_buffer->transform;
3314 wl_surface_set_buffer_transform(wl_surface, wl_egl_buffer->transform);
3317 if (wl_egl_window) {
3318 wl_egl_window->attached_width = wl_egl_buffer->width;
3319 wl_egl_window->attached_height = wl_egl_buffer->height;
3322 wl_surface_attach(wl_surface, (void *)wl_egl_buffer->wl_buffer,
3323 wl_egl_buffer->dx, wl_egl_buffer->dy);
3325 if (wl_egl_buffer->num_rects < 1 || wl_egl_buffer->rects == NULL) {
3327 wl_surface_damage(wl_surface,
3328 wl_egl_buffer->dx, wl_egl_buffer->dy,
3329 wl_egl_buffer->width, wl_egl_buffer->height);
3331 wl_surface_damage_buffer(wl_surface,
3333 wl_egl_buffer->width, wl_egl_buffer->height);
3337 for (i = 0; i < wl_egl_buffer->num_rects; i++) {
3339 wl_egl_buffer->height - (wl_egl_buffer->rects[i * 4 + 1] +
3340 wl_egl_buffer->rects[i * 4 + 3]);
3342 wl_surface_damage(wl_surface,
3343 wl_egl_buffer->rects[i * 4 + 0],
3345 wl_egl_buffer->rects[i * 4 + 2],
3346 wl_egl_buffer->rects[i * 4 + 3]);
3348 wl_surface_damage_buffer(wl_surface,
3349 wl_egl_buffer->rects[i * 4 + 0],
3351 wl_egl_buffer->rects[i * 4 + 2],
3352 wl_egl_buffer->rects[i * 4 + 3]);
3357 wayland_tbm_client_set_buffer_serial(wl_egl_display->wl_tbm_client,
3358 (void *)wl_egl_buffer->wl_buffer,
3359 wl_egl_buffer->serial);
3360 #if TIZEN_FEATURE_ENABLE
3361 if (wl_egl_display->use_explicit_sync &&
3362 wl_egl_surface->surface_sync) {
3364 zwp_linux_surface_synchronization_v1_set_acquire_fence(wl_egl_surface->surface_sync,
3365 wl_egl_buffer->acquire_fence_fd);
3366 TPL_DEBUG("[SET_ACQUIRE_FENCE] wl_egl_surface(%p) tbm_surface(%p) acquire_fence(%d)",
3367 wl_egl_surface, wl_egl_buffer->tbm_surface, wl_egl_buffer->acquire_fence_fd);
3368 close(wl_egl_buffer->acquire_fence_fd);
3369 wl_egl_buffer->acquire_fence_fd = -1;
3371 wl_egl_buffer->buffer_release =
3372 zwp_linux_surface_synchronization_v1_get_release(wl_egl_surface->surface_sync);
3373 if (!wl_egl_buffer->buffer_release) {
3374 TPL_ERR("Failed to get buffer_release. wl_egl_surface(%p)", wl_egl_surface);
3376 zwp_linux_buffer_release_v1_add_listener(
3377 wl_egl_buffer->buffer_release, &zwp_release_listner, wl_egl_buffer);
3378 TPL_DEBUG("add explicit_sync_release_listener.");
3383 wl_surface_commit(wl_surface);
3385 wl_display_flush(wl_egl_display->wl_display);
3387 TRACE_ASYNC_BEGIN((int)wl_egl_buffer->tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
3388 wl_egl_buffer->bo_name);
3390 tpl_gmutex_lock(&wl_egl_buffer->mutex);
3392 wl_egl_buffer->need_to_commit = TPL_FALSE;
3393 wl_egl_buffer->status = COMMITTED;
3395 tpl_gcond_signal(&wl_egl_buffer->cond);
3397 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3400 "[COMMIT] wl_egl_buffer(%p) wl_buffer(%p) tbm_surface(%p) bo(%d)",
3401 wl_egl_buffer, wl_egl_buffer->wl_buffer, wl_egl_buffer->tbm_surface,
3402 wl_egl_buffer->bo_name);
3404 if (wl_egl_surface->vblank != NULL &&
3405 _thread_surface_vblank_wait(wl_egl_surface) != TPL_ERROR_NONE)
3406 TPL_ERR("Failed to set wait vblank.");
3408 tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
3410 if (wl_egl_buffer->commit_sync_fd != -1) {
3411 int ret = _write_to_eventfd(wl_egl_buffer->commit_sync_fd);
3413 TPL_ERR("Failed to send commit_sync signal to fd(%d)", wl_egl_buffer->commit_sync_fd);
3416 TRACE_ASYNC_END(wl_egl_buffer->commit_sync_fd, "[COMMIT_SYNC] bo(%d)",
3417 wl_egl_buffer->bo_name);
3418 TPL_DEBUG("[COMMIT_SYNC][SEND] wl_egl_surface(%p) commit_sync_fd(%d)",
3419 wl_egl_surface, wl_egl_buffer->commit_sync_fd);
3421 close(wl_egl_buffer->commit_sync_fd);
3422 wl_egl_buffer->commit_sync_fd = -1;
3425 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
3429 _write_to_eventfd(int eventfd)
3434 if (eventfd == -1) {
3435 TPL_ERR("Invalid fd(-1)");
3439 ret = write(eventfd, &value, sizeof(uint64_t));
3441 TPL_ERR("failed to write to fd(%d)", eventfd);
3449 __tpl_display_init_backend_wl_egl_thread(tpl_display_backend_t *backend)
3451 TPL_ASSERT(backend);
3453 backend->type = TPL_BACKEND_WAYLAND_THREAD;
3454 backend->data = NULL;
3456 backend->init = __tpl_wl_egl_display_init;
3457 backend->fini = __tpl_wl_egl_display_fini;
3458 backend->query_config = __tpl_wl_egl_display_query_config;
3459 backend->filter_config = __tpl_wl_egl_display_filter_config;
3460 backend->get_window_info = __tpl_wl_egl_display_get_window_info;
3461 backend->get_pixmap_info = __tpl_wl_egl_display_get_pixmap_info;
3462 backend->get_buffer_from_native_pixmap =
3463 __tpl_wl_egl_display_get_buffer_from_native_pixmap;
3467 __tpl_surface_init_backend_wl_egl_thread(tpl_surface_backend_t *backend)
3469 TPL_ASSERT(backend);
3471 backend->type = TPL_BACKEND_WAYLAND_THREAD;
3472 backend->data = NULL;
3474 backend->init = __tpl_wl_egl_surface_init;
3475 backend->fini = __tpl_wl_egl_surface_fini;
3476 backend->validate = __tpl_wl_egl_surface_validate;
3477 backend->cancel_dequeued_buffer =
3478 __tpl_wl_egl_surface_cancel_buffer;
3479 backend->dequeue_buffer = __tpl_wl_egl_surface_dequeue_buffer;
3480 backend->enqueue_buffer = __tpl_wl_egl_surface_enqueue_buffer;
3481 backend->set_rotation_capability =
3482 __tpl_wl_egl_surface_set_rotation_capability;
3483 backend->set_post_interval =
3484 __tpl_wl_egl_surface_set_post_interval;
3486 __tpl_wl_egl_surface_get_size;
3490 __cb_wl_egl_buffer_free(tpl_wl_egl_buffer_t *wl_egl_buffer)
3492 tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
3493 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
3495 TPL_INFO("[BUFFER_FREE]", "wl_egl_buffer(%p) wl_buffer(%p) tbm_surface(%p)",
3496 wl_egl_buffer, wl_egl_buffer->wl_buffer, wl_egl_buffer->tbm_surface);
3498 tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
3499 if (wl_egl_buffer->idx >= 0 && wl_egl_surface->buffers[wl_egl_buffer->idx]) {
3500 wl_egl_surface->buffers[wl_egl_buffer->idx] = NULL;
3501 wl_egl_surface->buffer_cnt--;
3503 wl_egl_buffer->idx = -1;
3506 if (wl_egl_surface->last_deq_buffer == wl_egl_buffer)
3507 wl_egl_surface->last_deq_buffer = NULL;
3509 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
3511 if (wl_egl_surface->vblank && wl_egl_surface->vblank->waiting_buffers) {
3512 tpl_gmutex_lock(&wl_egl_surface->vblank->mutex);
3513 __tpl_list_remove_data(wl_egl_surface->vblank->waiting_buffers,
3514 (void *)wl_egl_buffer,
3517 tpl_gmutex_unlock(&wl_egl_surface->vblank->mutex);
3520 if (wl_egl_display) {
3521 if (wl_egl_buffer->wl_buffer) {
3522 wayland_tbm_client_destroy_buffer(wl_egl_display->wl_tbm_client,
3523 (void *)wl_egl_buffer->wl_buffer);
3524 wl_egl_buffer->wl_buffer = NULL;
3527 wl_display_flush(wl_egl_display->wl_display);
3530 #if TIZEN_FEATURE_ENABLE
3531 if (wl_egl_buffer->buffer_release) {
3532 zwp_linux_buffer_release_v1_destroy(wl_egl_buffer->buffer_release);
3533 wl_egl_buffer->buffer_release = NULL;
3536 if (wl_egl_buffer->release_fence_fd != -1) {
3537 close(wl_egl_buffer->release_fence_fd);
3538 wl_egl_buffer->release_fence_fd = -1;
3542 if (wl_egl_buffer->waiting_source) {
3543 tpl_gsource_destroy(wl_egl_buffer->waiting_source, TPL_FALSE);
3544 wl_egl_buffer->waiting_source = NULL;
3547 if (wl_egl_buffer->commit_sync_fd != -1) {
3548 int ret = _write_to_eventfd(wl_egl_buffer->commit_sync_fd);
3550 TPL_ERR("Failed to send commit_sync signal to fd(%d)",
3551 wl_egl_buffer->commit_sync_fd);
3552 close(wl_egl_buffer->commit_sync_fd);
3553 wl_egl_buffer->commit_sync_fd = -1;
3556 if (wl_egl_buffer->presentation_sync_fd != -1) {
3557 int ret = _write_to_eventfd(wl_egl_buffer->presentation_sync_fd);
3559 TPL_ERR("Failed to send presentation_sync signal to fd(%d)",
3560 wl_egl_buffer->presentation_sync_fd);
3561 close(wl_egl_buffer->presentation_sync_fd);
3562 wl_egl_buffer->presentation_sync_fd = -1;
3565 if (wl_egl_buffer->rects) {
3566 free(wl_egl_buffer->rects);
3567 wl_egl_buffer->rects = NULL;
3568 wl_egl_buffer->num_rects = 0;
3571 wl_egl_buffer->tbm_surface = NULL;
3572 wl_egl_buffer->bo_name = -1;
3573 wl_egl_buffer->status = RELEASED;
3575 free(wl_egl_buffer);
3579 _get_tbm_surface_bo_name(tbm_surface_h tbm_surface)
3581 return tbm_bo_export(tbm_surface_internal_get_bo(tbm_surface, 0));
3585 _print_buffer_lists(tpl_wl_egl_surface_t *wl_egl_surface)
3589 tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
3590 TPL_INFO("[BUFFERS_INFO]", "wl_egl_surface(%p) buffer_cnt(%d)",
3591 wl_egl_surface, wl_egl_surface->buffer_cnt);
3592 for (idx = 0; idx < BUFFER_ARRAY_SIZE; idx++) {
3593 tpl_wl_egl_buffer_t *wl_egl_buffer = wl_egl_surface->buffers[idx];
3594 if (wl_egl_buffer) {
3596 "INDEX[%d] | wl_egl_buffer(%p) tbm_surface(%p) bo(%d) | status(%s)",
3597 idx, wl_egl_buffer, wl_egl_buffer->tbm_surface,
3598 wl_egl_buffer->bo_name,
3599 status_to_string[wl_egl_buffer->status]);
3602 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);