2 #include "tpl_internal.h"
7 #include <sys/eventfd.h>
9 #include <tbm_bufmgr.h>
10 #include <tbm_surface.h>
11 #include <tbm_surface_internal.h>
12 #include <tbm_surface_queue.h>
14 #include <wayland-client.h>
15 #include <wayland-tbm-server.h>
16 #include <wayland-tbm-client.h>
17 #include <wayland-egl-backend.h>
19 #include <tdm_client.h>
21 #include "wayland-egl-tizen/wayland-egl-tizen.h"
22 #include "wayland-egl-tizen/wayland-egl-tizen-priv.h"
24 #ifndef TIZEN_FEATURE_ENABLE
25 #define TIZEN_FEATURE_ENABLE 1
28 #if TIZEN_FEATURE_ENABLE
29 #include <tizen-surface-client-protocol.h>
30 #include <presentation-time-client-protocol.h>
31 #include <linux-explicit-synchronization-unstable-v1-client-protocol.h>
34 #include "tpl_utils_gthread.h"
36 static int wl_egl_buffer_key;
37 #define KEY_WL_EGL_BUFFER (unsigned long)(&wl_egl_buffer_key)
39 /* In wayland, application and compositor create its own drawing buffers. Recommend size is more than 2. */
40 #define BUFFER_ARRAY_SIZE 9
42 typedef struct _tpl_wl_egl_display tpl_wl_egl_display_t;
43 typedef struct _tpl_wl_egl_surface tpl_wl_egl_surface_t;
44 typedef struct _tpl_wl_egl_buffer tpl_wl_egl_buffer_t;
45 typedef struct _surface_vblank tpl_surface_vblank_t;
47 struct _tpl_wl_egl_display {
48 tpl_gsource *disp_source;
50 tpl_gmutex wl_event_mutex;
52 struct wl_display *wl_display;
53 struct wl_event_queue *ev_queue;
54 struct wayland_tbm_client *wl_tbm_client;
55 int last_error; /* errno of the last wl_display error*/
57 tpl_bool_t wl_initialized;
59 tpl_bool_t use_wait_vblank;
60 tpl_bool_t use_explicit_sync;
65 tdm_client *tdm_client;
66 tpl_gsource *tdm_source;
68 tpl_bool_t tdm_initialized;
69 tpl_list_t *surface_vblanks;
72 #if TIZEN_FEATURE_ENABLE
73 struct tizen_surface_shm *tss; /* used for surface buffer_flush */
74 struct wp_presentation *presentation; /* for presentation feedback */
75 struct zwp_linux_explicit_synchronization_v1 *explicit_sync; /* for explicit fence sync */
79 typedef enum surf_message {
85 struct _tpl_wl_egl_surface {
86 tpl_gsource *surf_source;
88 tbm_surface_queue_h tbm_queue;
91 struct wl_egl_window *wl_egl_window;
92 struct wl_surface *wl_surface;
94 #if TIZEN_FEATURE_ENABLE
95 struct zwp_linux_surface_synchronization_v1 *surface_sync; /* for explicit fence sync */
96 struct tizen_surface_shm_flusher *tss_flusher; /* used for surface buffer_flush */
99 tpl_surface_vblank_t *vblank;
101 /* surface information */
108 int latest_transform;
112 tpl_wl_egl_display_t *wl_egl_display;
113 tpl_surface_t *tpl_surface;
115 /* wl_egl_buffer array for buffer tracing */
116 tpl_wl_egl_buffer_t *buffers[BUFFER_ARRAY_SIZE];
117 int buffer_cnt; /* the number of using wl_egl_buffers */
118 tpl_gmutex buffers_mutex;
120 tpl_list_t *presentation_feedbacks; /* for tracing presentation feedbacks */
132 tpl_gmutex surf_mutex;
135 surf_message sent_message;
137 /* for waiting draw done */
138 tpl_bool_t use_render_done_fence;
139 tpl_bool_t is_activated;
140 tpl_bool_t reset; /* TRUE if queue reseted by external */
141 tpl_bool_t need_to_enqueue;
142 tpl_bool_t prerotation_capability;
143 tpl_bool_t vblank_done;
144 tpl_bool_t set_serial_is_used;
147 struct _surface_vblank {
148 tdm_client_vblank *tdm_vblank;
149 tpl_wl_egl_surface_t *wl_egl_surface;
150 tpl_list_t *waiting_buffers; /* for FIFO/FIFO_RELAXED modes */
153 typedef enum buffer_status {
158 WAITING_SIGNALED, // 4
163 static const char *status_to_string[7] = {
168 "WAITING_SIGNALED", // 4
169 "WAITING_VBLANK", // 5
173 struct _tpl_wl_egl_buffer {
174 tbm_surface_h tbm_surface;
177 struct wl_proxy *wl_buffer;
178 int dx, dy; /* position to attach to wl_surface */
179 int width, height; /* size to attach to wl_surface */
181 buffer_status_t status; /* for tracing buffer status */
182 int idx; /* position index in buffers array of wl_egl_surface */
184 /* for damage region */
188 /* for wayland_tbm_client_set_buffer_transform */
190 tpl_bool_t w_rotated;
192 /* for wl_surface_set_buffer_transform */
195 /* for wayland_tbm_client_set_buffer_serial */
198 /* for checking need_to_commit (frontbuffer mode) */
199 tpl_bool_t need_to_commit;
201 /* for checking draw done */
202 tpl_bool_t draw_done;
204 #if TIZEN_FEATURE_ENABLE
205 /* to get release event via zwp_linux_buffer_release_v1 */
206 struct zwp_linux_buffer_release_v1 *buffer_release;
208 /* each buffers own its release_fence_fd, until it passes ownership
210 int32_t release_fence_fd;
212 /* each buffers own its acquire_fence_fd.
213 * If it use zwp_linux_buffer_release_v1 the ownership of this fd
214 * will be passed to display server
215 * Otherwise it will be used as a fence waiting for render done
217 int32_t acquire_fence_fd;
219 /* Fd to send a signal when wl_surface_commit with this buffer */
220 int32_t commit_sync_fd;
222 /* Fd to send a siganl when receive the
223 * presentation feedback from display server */
224 int32_t presentation_sync_fd;
226 tpl_gsource *waiting_source;
231 tpl_wl_egl_surface_t *wl_egl_surface;
234 #if TIZEN_FEATURE_ENABLE
235 struct pst_feedback {
236 /* to get presentation feedback from display server */
237 struct wp_presentation_feedback *presentation_feedback;
242 tpl_wl_egl_surface_t *wl_egl_surface;
247 static const struct wl_buffer_listener wl_buffer_release_listener;
250 _get_tbm_surface_bo_name(tbm_surface_h tbm_surface);
252 _print_buffer_lists(tpl_wl_egl_surface_t *wl_egl_surface);
254 __cb_wl_egl_buffer_free(tpl_wl_egl_buffer_t *wl_egl_buffer);
255 static tpl_wl_egl_buffer_t *
256 _get_wl_egl_buffer(tbm_surface_h tbm_surface);
258 _write_to_eventfd(int eventfd);
260 _thread_wl_egl_surface_init(tpl_wl_egl_surface_t *wl_egl_surface);
262 _thread_surface_queue_acquire(tpl_wl_egl_surface_t *wl_egl_surface);
264 _thread_wl_surface_commit(tpl_wl_egl_surface_t *wl_egl_surface,
265 tpl_wl_egl_buffer_t *wl_egl_buffer);
267 __cb_surface_vblank_free(void *data);
269 static struct tizen_private *
270 tizen_private_create()
272 struct tizen_private *private = NULL;
273 private = (struct tizen_private *)calloc(1, sizeof(struct tizen_private));
275 private->magic = WL_EGL_TIZEN_MAGIC;
276 private->rotation = 0;
277 private->frontbuffer_mode = 0;
278 private->transform = 0;
279 private->window_transform = 0;
282 private->data = NULL;
283 private->rotate_callback = NULL;
284 private->get_rotation_capability = NULL;
285 private->set_window_serial_callback = NULL;
286 private->set_frontbuffer_callback = NULL;
287 private->create_commit_sync_fd = NULL;
288 private->create_presentation_sync_fd = NULL;
289 private->merge_sync_fds = NULL;
296 _check_native_handle_is_wl_display(tpl_handle_t display)
298 struct wl_interface *wl_egl_native_dpy = *(void **) display;
300 if (!wl_egl_native_dpy) {
301 TPL_ERR("Invalid parameter. native_display(%p)", wl_egl_native_dpy);
305 /* MAGIC CHECK: A native display handle is a wl_display if the de-referenced first value
306 is a memory address pointing the structure of wl_display_interface. */
307 if (wl_egl_native_dpy == &wl_display_interface)
310 if (strncmp(wl_egl_native_dpy->name, wl_display_interface.name,
311 strlen(wl_display_interface.name)) == 0) {
319 __thread_func_tdm_dispatch(tpl_gsource *gsource, uint64_t message)
321 tpl_wl_egl_display_t *wl_egl_display = NULL;
322 tdm_error tdm_err = TDM_ERROR_NONE;
326 wl_egl_display = (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
327 if (!wl_egl_display) {
328 TPL_ERR("Failed to get wl_egl_display from gsource(%p)", gsource);
329 TPL_WARN("tdm_source(%p) will be removed from thread.", gsource);
333 tdm_err = tdm_client_handle_events(wl_egl_display->tdm.tdm_client);
335 /* If an error occurs in tdm_client_handle_events, it cannot be recovered.
336 * When tdm_source is no longer available due to an unexpected situation,
337 * wl_egl_thread must remove it from the thread and destroy it.
338 * In that case, tdm_vblank can no longer be used for surfaces and displays
339 * that used this tdm_source. */
340 if (tdm_err != TDM_ERROR_NONE) {
341 TPL_ERR("Error occured in tdm_client_handle_events. tdm_err(%d)",
343 TPL_WARN("tdm_source(%p) will be removed from thread.", gsource);
345 tpl_gsource_destroy(gsource, TPL_FALSE);
347 wl_egl_display->tdm.tdm_source = NULL;
356 __thread_func_tdm_finalize(tpl_gsource *gsource)
358 tpl_wl_egl_display_t *wl_egl_display = NULL;
360 wl_egl_display = (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
363 "tdm_destroy| wl_egl_display(%p) tdm_client(%p) tpl_gsource(%p)",
364 wl_egl_display, wl_egl_display->tdm.tdm_client, gsource);
366 if (wl_egl_display->tdm.tdm_client) {
368 if (wl_egl_display->tdm.surface_vblanks) {
369 __tpl_list_free(wl_egl_display->tdm.surface_vblanks,
370 __cb_surface_vblank_free);
371 wl_egl_display->tdm.surface_vblanks = NULL;
374 tdm_client_destroy(wl_egl_display->tdm.tdm_client);
375 wl_egl_display->tdm.tdm_client = NULL;
376 wl_egl_display->tdm.tdm_display_fd = -1;
377 wl_egl_display->tdm.tdm_source = NULL;
380 wl_egl_display->use_wait_vblank = TPL_FALSE;
381 wl_egl_display->tdm.tdm_initialized = TPL_FALSE;
384 static tpl_gsource_functions tdm_funcs = {
387 .dispatch = __thread_func_tdm_dispatch,
388 .finalize = __thread_func_tdm_finalize,
392 _thread_tdm_init(tpl_wl_egl_display_t *wl_egl_display)
394 tdm_client *tdm_client = NULL;
395 int tdm_display_fd = -1;
396 tdm_error tdm_err = TDM_ERROR_NONE;
398 tdm_client = tdm_client_create(&tdm_err);
399 if (!tdm_client || tdm_err != TDM_ERROR_NONE) {
400 TPL_ERR("TDM_ERROR:%d Failed to create tdm_client\n", tdm_err);
401 return TPL_ERROR_INVALID_OPERATION;
404 tdm_err = tdm_client_get_fd(tdm_client, &tdm_display_fd);
405 if (tdm_display_fd < 0 || tdm_err != TDM_ERROR_NONE) {
406 TPL_ERR("TDM_ERROR:%d Failed to get tdm_client fd\n", tdm_err);
407 tdm_client_destroy(tdm_client);
408 return TPL_ERROR_INVALID_OPERATION;
411 wl_egl_display->tdm.tdm_display_fd = tdm_display_fd;
412 wl_egl_display->tdm.tdm_client = tdm_client;
413 wl_egl_display->tdm.tdm_source = NULL;
414 wl_egl_display->tdm.tdm_initialized = TPL_TRUE;
415 wl_egl_display->tdm.surface_vblanks = __tpl_list_alloc();
417 TPL_INFO("[TDM_CLIENT_INIT]",
418 "wl_egl_display(%p) tdm_client(%p) tdm_display_fd(%d)",
419 wl_egl_display, tdm_client, tdm_display_fd);
421 return TPL_ERROR_NONE;
424 #define IMPL_TIZEN_SURFACE_SHM_VERSION 2
428 __cb_wl_resistry_global_callback(void *data, struct wl_registry *wl_registry,
429 uint32_t name, const char *interface,
432 #if TIZEN_FEATURE_ENABLE
433 tpl_wl_egl_display_t *wl_egl_display = (tpl_wl_egl_display_t *)data;
435 if (!strcmp(interface, "tizen_surface_shm")) {
436 wl_egl_display->tss =
437 wl_registry_bind(wl_registry,
439 &tizen_surface_shm_interface,
440 ((version < IMPL_TIZEN_SURFACE_SHM_VERSION) ?
441 version : IMPL_TIZEN_SURFACE_SHM_VERSION));
442 wl_egl_display->use_tss = TPL_TRUE;
443 } else if (!strcmp(interface, wp_presentation_interface.name)) {
444 wl_egl_display->presentation =
445 wl_registry_bind(wl_registry,
446 name, &wp_presentation_interface, 1);
447 TPL_DEBUG("bind wp_presentation_interface");
448 } else if (strcmp(interface, "zwp_linux_explicit_synchronization_v1") == 0) {
449 char *env = tpl_getenv("TPL_EFS");
450 if (env && !atoi(env)) {
451 wl_egl_display->use_explicit_sync = TPL_FALSE;
453 wl_egl_display->explicit_sync =
454 wl_registry_bind(wl_registry, name,
455 &zwp_linux_explicit_synchronization_v1_interface, 1);
456 wl_egl_display->use_explicit_sync = TPL_TRUE;
457 TPL_DEBUG("bind zwp_linux_explicit_synchronization_v1_interface");
464 __cb_wl_resistry_global_remove_callback(void *data,
465 struct wl_registry *wl_registry,
470 static const struct wl_registry_listener registry_listener = {
471 __cb_wl_resistry_global_callback,
472 __cb_wl_resistry_global_remove_callback
476 _wl_display_print_err(tpl_wl_egl_display_t *wl_egl_display,
477 const char *func_name)
481 strerror_r(errno, buf, sizeof(buf));
483 if (wl_egl_display->last_error == errno)
486 TPL_ERR("falied to %s. error:%d(%s)", func_name, errno, buf);
488 dpy_err = wl_display_get_error(wl_egl_display->wl_display);
489 if (dpy_err == EPROTO) {
490 const struct wl_interface *err_interface;
491 uint32_t err_proxy_id, err_code;
492 err_code = wl_display_get_protocol_error(wl_egl_display->wl_display,
495 TPL_ERR("[Protocol Error] interface: %s, error_code: %d, proxy_id: %d",
496 err_interface->name, err_code, err_proxy_id);
499 wl_egl_display->last_error = errno;
503 _thread_wl_display_init(tpl_wl_egl_display_t *wl_egl_display)
505 struct wl_registry *registry = NULL;
506 struct wl_event_queue *queue = NULL;
507 struct wl_display *display_wrapper = NULL;
508 struct wl_proxy *wl_tbm = NULL;
509 struct wayland_tbm_client *wl_tbm_client = NULL;
511 tpl_result_t result = TPL_ERROR_NONE;
513 queue = wl_display_create_queue(wl_egl_display->wl_display);
515 TPL_ERR("Failed to create wl_queue wl_display(%p)",
516 wl_egl_display->wl_display);
517 result = TPL_ERROR_INVALID_OPERATION;
521 wl_egl_display->ev_queue = wl_display_create_queue(wl_egl_display->wl_display);
522 if (!wl_egl_display->ev_queue) {
523 TPL_ERR("Failed to create wl_queue wl_display(%p)",
524 wl_egl_display->wl_display);
525 result = TPL_ERROR_INVALID_OPERATION;
529 display_wrapper = wl_proxy_create_wrapper(wl_egl_display->wl_display);
530 if (!display_wrapper) {
531 TPL_ERR("Failed to create a proxy wrapper of wl_display(%p)",
532 wl_egl_display->wl_display);
533 result = TPL_ERROR_INVALID_OPERATION;
537 wl_proxy_set_queue((struct wl_proxy *)display_wrapper, queue);
539 registry = wl_display_get_registry(display_wrapper);
541 TPL_ERR("Failed to create wl_registry");
542 result = TPL_ERROR_INVALID_OPERATION;
546 wl_proxy_wrapper_destroy(display_wrapper);
547 display_wrapper = NULL;
549 wl_tbm_client = wayland_tbm_client_init(wl_egl_display->wl_display);
550 if (!wl_tbm_client) {
551 TPL_ERR("Failed to initialize wl_tbm_client.");
552 result = TPL_ERROR_INVALID_CONNECTION;
556 wl_tbm = (struct wl_proxy *)wayland_tbm_client_get_wl_tbm(wl_tbm_client);
558 TPL_ERR("Failed to get wl_tbm from wl_tbm_client(%p)", wl_tbm_client);
559 result = TPL_ERROR_INVALID_CONNECTION;
563 wl_proxy_set_queue(wl_tbm, wl_egl_display->ev_queue);
564 wl_egl_display->wl_tbm_client = wl_tbm_client;
566 if (wl_registry_add_listener(registry, ®istry_listener,
568 TPL_ERR("Failed to wl_registry_add_listener");
569 result = TPL_ERROR_INVALID_OPERATION;
573 ret = wl_display_roundtrip_queue(wl_egl_display->wl_display, queue);
575 _wl_display_print_err(wl_egl_display, "roundtrip_queue");
576 result = TPL_ERROR_INVALID_OPERATION;
580 #if TIZEN_FEATURE_ENABLE
581 /* set tizen_surface_shm's queue as client's private queue */
582 if (wl_egl_display->tss) {
583 wl_proxy_set_queue((struct wl_proxy *)wl_egl_display->tss,
584 wl_egl_display->ev_queue);
585 TPL_LOG_T("WL_EGL", "tizen_surface_shm(%p) init.", wl_egl_display->tss);
588 if (wl_egl_display->presentation) {
589 wl_proxy_set_queue((struct wl_proxy *)wl_egl_display->presentation,
590 wl_egl_display->ev_queue);
591 TPL_LOG_T("WL_EGL", "wp_presentation(%p) init.",
592 wl_egl_display->presentation);
595 if (wl_egl_display->explicit_sync) {
596 wl_proxy_set_queue((struct wl_proxy *)wl_egl_display->explicit_sync,
597 wl_egl_display->ev_queue);
598 TPL_LOG_T("WL_EGL", "zwp_linux_explicit_synchronization_v1(%p) init.",
599 wl_egl_display->explicit_sync);
602 wl_egl_display->wl_initialized = TPL_TRUE;
604 TPL_INFO("[WAYLAND_INIT]",
605 "wl_egl_display(%p) wl_display(%p) wl_tbm_client(%p) event_queue(%p)",
606 wl_egl_display, wl_egl_display->wl_display,
607 wl_egl_display->wl_tbm_client, wl_egl_display->ev_queue);
608 #if TIZEN_FEATURE_ENABLE
609 TPL_INFO("[WAYLAND_INIT]",
610 "tizen_surface_shm(%p) wp_presentation(%p) explicit_sync(%p)",
611 wl_egl_display->tss, wl_egl_display->presentation,
612 wl_egl_display->explicit_sync);
616 wl_proxy_wrapper_destroy(display_wrapper);
618 wl_registry_destroy(registry);
620 wl_event_queue_destroy(queue);
626 _thread_wl_display_fini(tpl_wl_egl_display_t *wl_egl_display)
628 /* If wl_egl_display is in prepared state, cancel it */
629 if (wl_egl_display->prepared) {
630 wl_display_cancel_read(wl_egl_display->wl_display);
631 wl_egl_display->prepared = TPL_FALSE;
634 if (wl_display_dispatch_queue_pending(wl_egl_display->wl_display,
635 wl_egl_display->ev_queue) == -1) {
636 _wl_display_print_err(wl_egl_display, "dispatch_queue_pending");
638 #if TIZEN_FEATURE_ENABLE
639 if (wl_egl_display->tss) {
640 TPL_INFO("[TIZEN_SURFACE_SHM_DESTROY]",
641 "wl_egl_display(%p) tizen_surface_shm(%p) fini.",
642 wl_egl_display, wl_egl_display->tss);
643 tizen_surface_shm_destroy(wl_egl_display->tss);
644 wl_egl_display->tss = NULL;
647 if (wl_egl_display->presentation) {
648 TPL_INFO("[WP_PRESENTATION_DESTROY]",
649 "wl_egl_display(%p) wp_presentation(%p) fini.",
650 wl_egl_display, wl_egl_display->presentation);
651 wp_presentation_destroy(wl_egl_display->presentation);
652 wl_egl_display->presentation = NULL;
655 if (wl_egl_display->explicit_sync) {
656 TPL_INFO("[EXPLICIT_SYNC_DESTROY]",
657 "wl_egl_display(%p) zwp_linux_explicit_synchronization_v1(%p) fini.",
658 wl_egl_display, wl_egl_display->explicit_sync);
659 zwp_linux_explicit_synchronization_v1_destroy(wl_egl_display->explicit_sync);
660 wl_egl_display->explicit_sync = NULL;
663 if (wl_egl_display->wl_tbm_client) {
664 struct wl_proxy *wl_tbm = NULL;
666 wl_tbm = (struct wl_proxy *)wayland_tbm_client_get_wl_tbm(
667 wl_egl_display->wl_tbm_client);
669 wl_proxy_set_queue(wl_tbm, NULL);
672 TPL_INFO("[WL_TBM_DEINIT]",
673 "wl_egl_display(%p) wl_tbm_client(%p)",
674 wl_egl_display, wl_egl_display->wl_tbm_client);
675 wayland_tbm_client_deinit(wl_egl_display->wl_tbm_client);
676 wl_egl_display->wl_tbm_client = NULL;
679 wl_event_queue_destroy(wl_egl_display->ev_queue);
681 wl_egl_display->wl_initialized = TPL_FALSE;
683 TPL_INFO("[DISPLAY_FINI]", "wl_egl_display(%p) wl_display(%p)",
684 wl_egl_display, wl_egl_display->wl_display);
688 _thread_init(void *data)
690 tpl_wl_egl_display_t *wl_egl_display = (tpl_wl_egl_display_t *)data;
692 if (_thread_wl_display_init(wl_egl_display) != TPL_ERROR_NONE) {
693 TPL_ERR("Failed to initialize wl_egl_display(%p) with wl_display(%p)",
694 wl_egl_display, wl_egl_display->wl_display);
697 if (wl_egl_display->use_wait_vblank &&
698 _thread_tdm_init(wl_egl_display) != TPL_ERROR_NONE) {
699 TPL_WARN("Failed to initialize tdm-client. TPL_WAIT_VLANK:DISABLED");
702 return wl_egl_display;
706 __thread_func_disp_prepare(tpl_gsource *gsource)
708 tpl_wl_egl_display_t *wl_egl_display =
709 (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
711 /* If this wl_egl_display is already prepared,
712 * do nothing in this function. */
713 if (wl_egl_display->prepared)
716 /* If there is a last_error, there is no need to poll,
717 * so skip directly to dispatch.
718 * prepare -> dispatch */
719 if (wl_egl_display->last_error)
722 while (wl_display_prepare_read_queue(wl_egl_display->wl_display,
723 wl_egl_display->ev_queue) != 0) {
724 if (wl_display_dispatch_queue_pending(wl_egl_display->wl_display,
725 wl_egl_display->ev_queue) == -1) {
726 _wl_display_print_err(wl_egl_display, "dispatch_queue_pending");
730 wl_egl_display->prepared = TPL_TRUE;
732 wl_display_flush(wl_egl_display->wl_display);
738 __thread_func_disp_check(tpl_gsource *gsource)
740 tpl_wl_egl_display_t *wl_egl_display =
741 (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
742 tpl_bool_t ret = TPL_FALSE;
744 if (!wl_egl_display->prepared)
747 /* If prepared, but last_error is set,
748 * cancel_read is executed and FALSE is returned.
749 * That can lead to G_SOURCE_REMOVE by calling disp_prepare again
750 * and skipping disp_check from prepare to disp_dispatch.
751 * check -> prepare -> dispatch -> G_SOURCE_REMOVE */
752 if (wl_egl_display->prepared && wl_egl_display->last_error) {
753 wl_display_cancel_read(wl_egl_display->wl_display);
757 if (tpl_gsource_check_io_condition(gsource)) {
758 if (wl_display_read_events(wl_egl_display->wl_display) == -1)
759 _wl_display_print_err(wl_egl_display, "read_event");
762 wl_display_cancel_read(wl_egl_display->wl_display);
766 wl_egl_display->prepared = TPL_FALSE;
772 __thread_func_disp_dispatch(tpl_gsource *gsource, uint64_t message)
774 tpl_wl_egl_display_t *wl_egl_display =
775 (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
779 /* If there is last_error, SOURCE_REMOVE should be returned
780 * to remove the gsource from the main loop.
781 * This is because wl_egl_display is not valid since last_error was set.*/
782 if (wl_egl_display->last_error) {
786 tpl_gmutex_lock(&wl_egl_display->wl_event_mutex);
787 if (tpl_gsource_check_io_condition(gsource)) {
788 if (wl_display_dispatch_queue_pending(wl_egl_display->wl_display,
789 wl_egl_display->ev_queue) == -1) {
790 _wl_display_print_err(wl_egl_display, "dispatch_queue_pending");
794 wl_display_flush(wl_egl_display->wl_display);
795 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
801 __thread_func_disp_finalize(tpl_gsource *gsource)
803 tpl_wl_egl_display_t *wl_egl_display =
804 (tpl_wl_egl_display_t *)tpl_gsource_get_data(gsource);
806 if (wl_egl_display->wl_initialized)
807 _thread_wl_display_fini(wl_egl_display);
809 TPL_LOG_T("WL_EGL", "finalize| wl_egl_display(%p) tpl_gsource(%p)",
810 wl_egl_display, gsource);
816 static tpl_gsource_functions disp_funcs = {
817 .prepare = __thread_func_disp_prepare,
818 .check = __thread_func_disp_check,
819 .dispatch = __thread_func_disp_dispatch,
820 .finalize = __thread_func_disp_finalize,
824 __tpl_wl_egl_display_init(tpl_display_t *display)
826 tpl_wl_egl_display_t *wl_egl_display = NULL;
830 /* Do not allow default display in wayland. */
831 if (!display->native_handle) {
832 TPL_ERR("Invalid native handle for display.");
833 return TPL_ERROR_INVALID_PARAMETER;
836 if (!_check_native_handle_is_wl_display(display->native_handle)) {
837 TPL_ERR("native_handle(%p) is not wl_display", display->native_handle);
838 return TPL_ERROR_INVALID_PARAMETER;
841 wl_egl_display = (tpl_wl_egl_display_t *) calloc(1,
842 sizeof(tpl_wl_egl_display_t));
843 if (!wl_egl_display) {
844 TPL_ERR("Failed to allocate memory for new tpl_wl_egl_display_t.");
845 return TPL_ERROR_OUT_OF_MEMORY;
848 display->backend.data = wl_egl_display;
849 display->bufmgr_fd = -1;
851 wl_egl_display->tdm.tdm_initialized = TPL_FALSE;
852 wl_egl_display->tdm.tdm_client = NULL;
853 wl_egl_display->tdm.tdm_display_fd = -1;
854 wl_egl_display->tdm.tdm_source = NULL;
856 wl_egl_display->wl_initialized = TPL_FALSE;
858 wl_egl_display->ev_queue = NULL;
859 wl_egl_display->wl_display = (struct wl_display *)display->native_handle;
860 wl_egl_display->last_error = 0;
861 wl_egl_display->use_tss = TPL_FALSE;
862 wl_egl_display->use_explicit_sync = TPL_FALSE; // default disabled
863 wl_egl_display->prepared = TPL_FALSE;
865 #if TIZEN_FEATURE_ENABLE
866 /* Wayland Interfaces */
867 wl_egl_display->tss = NULL;
868 wl_egl_display->presentation = NULL;
869 wl_egl_display->explicit_sync = NULL;
871 wl_egl_display->wl_tbm_client = NULL;
873 wl_egl_display->use_wait_vblank = TPL_TRUE; // default enabled
875 char *env = tpl_getenv("TPL_WAIT_VBLANK");
876 if (env && !atoi(env)) {
877 wl_egl_display->use_wait_vblank = TPL_FALSE;
881 tpl_gmutex_init(&wl_egl_display->wl_event_mutex);
884 wl_egl_display->thread = tpl_gthread_create("wl_egl_thread",
885 (tpl_gthread_func)_thread_init,
886 (void *)wl_egl_display);
887 if (!wl_egl_display->thread) {
888 TPL_ERR("Failed to create wl_egl_thread");
892 wl_egl_display->disp_source = tpl_gsource_create(wl_egl_display->thread,
893 (void *)wl_egl_display,
894 wl_display_get_fd(wl_egl_display->wl_display),
895 &disp_funcs, SOURCE_TYPE_NORMAL);
896 if (!wl_egl_display->disp_source) {
897 TPL_ERR("Failed to add native_display(%p) to thread(%p)",
898 display->native_handle,
899 wl_egl_display->thread);
903 if (wl_egl_display->use_wait_vblank &&
904 wl_egl_display->tdm.tdm_initialized) {
905 wl_egl_display->tdm.tdm_source = tpl_gsource_create(wl_egl_display->thread,
906 (void *)wl_egl_display,
907 wl_egl_display->tdm.tdm_display_fd,
908 &tdm_funcs, SOURCE_TYPE_NORMAL);
909 if (!wl_egl_display->tdm.tdm_source) {
910 TPL_ERR("Failed to create tdm_gsource\n");
915 wl_egl_display->use_wait_vblank = (wl_egl_display->tdm.tdm_initialized &&
916 (wl_egl_display->tdm.tdm_source != NULL));
918 TPL_INFO("[DISPLAY_INIT]",
919 "wl_egl_display(%p) tpl_gthread(%p) wl_display(%p)",
921 wl_egl_display->thread,
922 wl_egl_display->wl_display);
924 TPL_INFO("[DISPLAY_INIT]",
925 "USE_WAIT_VBLANK(%s) TIZEN_SURFACE_SHM(%s) USE_EXPLICIT_SYNC(%s)",
926 wl_egl_display->use_wait_vblank ? "TRUE" : "FALSE",
927 wl_egl_display->use_tss ? "TRUE" : "FALSE",
928 wl_egl_display->use_explicit_sync ? "TRUE" : "FALSE");
930 return TPL_ERROR_NONE;
933 if (wl_egl_display->thread) {
934 if (wl_egl_display->tdm.tdm_source)
935 tpl_gsource_destroy(wl_egl_display->tdm.tdm_source, TPL_TRUE);
936 if (wl_egl_display->disp_source)
937 tpl_gsource_destroy(wl_egl_display->disp_source, TPL_TRUE);
939 tpl_gthread_destroy(wl_egl_display->thread);
942 wl_egl_display->thread = NULL;
943 free(wl_egl_display);
945 display->backend.data = NULL;
946 return TPL_ERROR_INVALID_OPERATION;
950 __tpl_wl_egl_display_fini(tpl_display_t *display)
952 tpl_wl_egl_display_t *wl_egl_display;
956 wl_egl_display = (tpl_wl_egl_display_t *)display->backend.data;
957 if (wl_egl_display) {
958 TPL_INFO("[DISPLAY_FINI]",
959 "wl_egl_display(%p) tpl_gthread(%p) wl_display(%p)",
961 wl_egl_display->thread,
962 wl_egl_display->wl_display);
964 if (wl_egl_display->tdm.tdm_source && wl_egl_display->tdm.tdm_initialized) {
965 tpl_gsource_destroy(wl_egl_display->tdm.tdm_source, TPL_TRUE);
966 wl_egl_display->tdm.tdm_source = NULL;
969 if (wl_egl_display->disp_source) {
970 tpl_gsource_destroy(wl_egl_display->disp_source, TPL_TRUE);
971 wl_egl_display->disp_source = NULL;
974 if (wl_egl_display->thread) {
975 tpl_gthread_destroy(wl_egl_display->thread);
976 wl_egl_display->thread = NULL;
979 tpl_gmutex_clear(&wl_egl_display->wl_event_mutex);
981 free(wl_egl_display);
984 display->backend.data = NULL;
988 __tpl_wl_egl_display_query_config(tpl_display_t *display,
989 tpl_surface_type_t surface_type,
990 int red_size, int green_size,
991 int blue_size, int alpha_size,
992 int color_depth, int *native_visual_id,
997 if (surface_type == TPL_SURFACE_TYPE_WINDOW && red_size == 8 &&
998 green_size == 8 && blue_size == 8 &&
999 (color_depth == 32 || color_depth == 24)) {
1001 if (alpha_size == 8) {
1002 if (native_visual_id) *native_visual_id = TBM_FORMAT_ARGB8888;
1003 if (is_slow) *is_slow = TPL_FALSE;
1004 return TPL_ERROR_NONE;
1006 if (alpha_size == 0) {
1007 if (native_visual_id) *native_visual_id = TBM_FORMAT_XRGB8888;
1008 if (is_slow) *is_slow = TPL_FALSE;
1009 return TPL_ERROR_NONE;
1013 return TPL_ERROR_INVALID_PARAMETER;
1017 __tpl_wl_egl_display_filter_config(tpl_display_t *display, int *visual_id,
1020 TPL_IGNORE(display);
1021 TPL_IGNORE(visual_id);
1022 TPL_IGNORE(alpha_size);
1023 return TPL_ERROR_NONE;
1027 __tpl_wl_egl_display_get_window_info(tpl_display_t *display,
1028 tpl_handle_t window, int *width,
1029 int *height, tbm_format *format,
1030 int depth, int a_size)
1032 tpl_result_t ret = TPL_ERROR_NONE;
1033 struct wl_egl_window *wl_egl_window = (struct wl_egl_window *)window;
1035 TPL_ASSERT(display);
1038 if (!wl_egl_window) {
1039 TPL_ERR("Invalid parameter. tpl_handle_t(%p)", window);
1040 return TPL_ERROR_INVALID_PARAMETER;
1043 if (width) *width = wl_egl_window->width;
1044 if (height) *height = wl_egl_window->height;
1046 struct tizen_private *tizen_private =
1047 (struct tizen_private *)wl_egl_window->driver_private;
1048 if (tizen_private && tizen_private->data) {
1049 tpl_wl_egl_surface_t *wl_egl_surface =
1050 (tpl_wl_egl_surface_t *)tizen_private->data;
1051 *format = wl_egl_surface->format;
1054 *format = TBM_FORMAT_ARGB8888;
1056 *format = TBM_FORMAT_XRGB8888;
1064 __tpl_wl_egl_display_get_pixmap_info(tpl_display_t *display,
1065 tpl_handle_t pixmap, int *width,
1066 int *height, tbm_format *format)
1068 tbm_surface_h tbm_surface = NULL;
1071 TPL_ERR("Invalid parameter. tpl_handle_t(%p)", pixmap);
1072 return TPL_ERROR_INVALID_PARAMETER;
1075 tbm_surface = wayland_tbm_server_get_surface(NULL,
1076 (struct wl_resource *)pixmap);
1078 TPL_ERR("Failed to get tbm_surface from wayland_tbm.");
1079 return TPL_ERROR_INVALID_PARAMETER;
1082 if (width) *width = tbm_surface_get_width(tbm_surface);
1083 if (height) *height = tbm_surface_get_height(tbm_surface);
1084 if (format) *format = tbm_surface_get_format(tbm_surface);
1086 return TPL_ERROR_NONE;
1089 static tbm_surface_h
1090 __tpl_wl_egl_display_get_buffer_from_native_pixmap(tpl_handle_t pixmap)
1092 tbm_surface_h tbm_surface = NULL;
1096 tbm_surface = wayland_tbm_server_get_surface(NULL,
1097 (struct wl_resource *)pixmap);
1099 TPL_ERR("Failed to get tbm_surface_h from wayland_tbm.");
1107 __tpl_display_choose_backend_wl_egl_thread(tpl_handle_t native_dpy)
1109 struct wl_interface *wl_egl_native_dpy = *(void **) native_dpy;
1111 TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_native_dpy, TPL_FALSE);
1113 /* MAGIC CHECK: A native display handle is a wl_display if the de-referenced first value
1114 is a memory address pointing the structure of wl_display_interface. */
1115 if (wl_egl_native_dpy == &wl_display_interface)
1118 if (strncmp(wl_egl_native_dpy->name, wl_display_interface.name,
1119 strlen(wl_display_interface.name)) == 0) {
1126 /* -- BEGIN -- wl_egl_window callback functions */
1128 __cb_destroy_callback(void *private)
1130 struct tizen_private *tizen_private = (struct tizen_private *)private;
1131 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
1133 if (!tizen_private) {
1134 TPL_LOG_B("WL_EGL", "[DESTROY_CB] Already destroyed surface");
1138 wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1139 if (wl_egl_surface) {
1140 TPL_WARN("[DESTROY_CB][!!!ABNORMAL BEHAVIOR!!!] wl_egl_window(%p) is destroyed.",
1141 wl_egl_surface->wl_egl_window);
1142 TPL_WARN("[DESTROY_CB] native window should be destroyed after eglDestroySurface.");
1144 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1145 wl_egl_surface->wl_egl_window->destroy_window_callback = NULL;
1146 wl_egl_surface->wl_egl_window->resize_callback = NULL;
1147 wl_egl_surface->wl_egl_window->driver_private = NULL;
1148 wl_egl_surface->wl_egl_window = NULL;
1149 wl_egl_surface->wl_surface = NULL;
1151 tizen_private->set_window_serial_callback = NULL;
1152 tizen_private->rotate_callback = NULL;
1153 tizen_private->get_rotation_capability = NULL;
1154 tizen_private->set_frontbuffer_callback = NULL;
1155 tizen_private->create_commit_sync_fd = NULL;
1156 tizen_private->create_presentation_sync_fd = NULL;
1157 tizen_private->data = NULL;
1159 free(tizen_private);
1160 tizen_private = NULL;
1161 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1166 __cb_resize_callback(struct wl_egl_window *wl_egl_window, void *private)
1168 TPL_ASSERT(private);
1169 TPL_ASSERT(wl_egl_window);
1171 struct tizen_private *tizen_private = (struct tizen_private *)private;
1172 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1173 int cur_w, cur_h, req_w, req_h, format;
1175 if (!wl_egl_surface) {
1176 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1181 format = wl_egl_surface->format;
1182 cur_w = wl_egl_surface->width;
1183 cur_h = wl_egl_surface->height;
1184 req_w = wl_egl_window->width;
1185 req_h = wl_egl_window->height;
1187 TPL_INFO("[WINDOW_RESIZE]",
1188 "wl_egl_surface(%p) wl_egl_window(%p) (%dx%d) -> (%dx%d)",
1189 wl_egl_surface, wl_egl_window, cur_w, cur_h, req_w, req_h);
1191 if (tbm_surface_queue_reset(wl_egl_surface->tbm_queue, req_w, req_h, format)
1192 != TBM_SURFACE_QUEUE_ERROR_NONE) {
1193 TPL_ERR("Failed to reset tbm_surface_queue(%p)", wl_egl_surface->tbm_queue);
1197 /* -- END -- wl_egl_window callback functions */
1199 /* -- BEGIN -- wl_egl_window tizen private callback functions */
1201 /* There is no usecase for using prerotation callback below */
1203 __cb_rotate_callback(struct wl_egl_window *wl_egl_window, void *private)
1205 TPL_ASSERT(private);
1206 TPL_ASSERT(wl_egl_window);
1208 struct tizen_private *tizen_private = (struct tizen_private *)private;
1209 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1210 int rotation = tizen_private->rotation;
1212 if (!wl_egl_surface) {
1213 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1218 TPL_INFO("[WINDOW_ROTATE]",
1219 "wl_egl_surface(%p) wl_egl_window(%p) (%d) -> (%d)",
1220 wl_egl_surface, wl_egl_window,
1221 wl_egl_surface->rotation, rotation);
1223 wl_egl_surface->rotation = rotation;
1226 /* There is no usecase for using prerotation callback below */
1228 __cb_get_rotation_capability(struct wl_egl_window *wl_egl_window,
1231 TPL_ASSERT(private);
1232 TPL_ASSERT(wl_egl_window);
1234 int rotation_capability = WL_EGL_WINDOW_TIZEN_CAPABILITY_NONE;
1235 struct tizen_private *tizen_private = (struct tizen_private *)private;
1236 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1238 if (!wl_egl_surface) {
1239 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1241 return rotation_capability;
1244 if (wl_egl_surface->prerotation_capability == TPL_TRUE)
1245 rotation_capability = WL_EGL_WINDOW_TIZEN_CAPABILITY_ROTATION_SUPPORTED;
1247 rotation_capability = WL_EGL_WINDOW_TIZEN_CAPABILITY_ROTATION_UNSUPPORTED;
1250 return rotation_capability;
1254 __cb_set_window_serial_callback(struct wl_egl_window *wl_egl_window,
1255 void *private, unsigned int serial)
1257 TPL_ASSERT(private);
1258 TPL_ASSERT(wl_egl_window);
1260 struct tizen_private *tizen_private = (struct tizen_private *)private;
1261 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1263 if (!wl_egl_surface) {
1264 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1269 wl_egl_surface->set_serial_is_used = TPL_TRUE;
1270 wl_egl_surface->serial = serial;
1274 __cb_create_commit_sync_fd(struct wl_egl_window *wl_egl_window, void *private)
1276 TPL_ASSERT(private);
1277 TPL_ASSERT(wl_egl_window);
1279 int commit_sync_fd = -1;
1281 struct tizen_private *tizen_private = (struct tizen_private *)private;
1282 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1284 if (!wl_egl_surface) {
1285 TPL_ERR("Invalid parameter. wl_egl_surface(%p) is NULL", wl_egl_surface);
1289 tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
1291 if (wl_egl_surface->commit_sync.fd != -1) {
1292 commit_sync_fd = dup(wl_egl_surface->commit_sync.fd);
1293 TRACE_MARK("[ONLY_DUP] commit_sync_fd(%d) dup(%d)",
1294 wl_egl_surface->commit_sync.fd, commit_sync_fd);
1295 TPL_DEBUG("[DUP_COMMIT_SYNC] wl_egl_surface(%p) commit_sync_fd(%d) dup(%d)",
1296 wl_egl_surface, wl_egl_surface->commit_sync.fd, commit_sync_fd);
1297 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
1298 return commit_sync_fd;
1301 wl_egl_surface->commit_sync.fd = eventfd(0, EFD_CLOEXEC);
1302 if (wl_egl_surface->commit_sync.fd == -1) {
1303 TPL_ERR("Failed to create commit_sync_fd. wl_egl_surface(%p)",
1305 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
1309 commit_sync_fd = dup(wl_egl_surface->commit_sync.fd);
1311 TRACE_MARK("[CREATE] commit_sync_fd(%d) dup(%d)",
1312 wl_egl_surface->commit_sync.fd, commit_sync_fd);
1313 TPL_DEBUG("[CREATE_COMMIT_SYNC] wl_egl_surface(%p) commit_sync_fd(%d)",
1314 wl_egl_surface, commit_sync_fd);
1316 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
1318 return commit_sync_fd;
1321 #if TIZEN_FEATURE_ENABLE
1323 __cb_create_presentation_sync_fd(struct wl_egl_window *wl_egl_window, void *private)
1325 TPL_ASSERT(private);
1326 TPL_ASSERT(wl_egl_window);
1328 int presentation_sync_fd = -1;
1330 struct tizen_private *tizen_private = (struct tizen_private *)private;
1331 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)tizen_private->data;
1333 if (!wl_egl_surface) {
1334 TPL_ERR("Invalid parameter. wl_egl_surface is NULL");
1338 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
1339 if (wl_egl_surface->presentation_sync.fd != -1) {
1340 presentation_sync_fd = dup(wl_egl_surface->presentation_sync.fd);
1341 TRACE_MARK("[ONLY_DUP] presentation_sync_fd(%d) dup(%d)",
1342 wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1343 TPL_DEBUG("[DUP_PRESENTATION_SYNC] wl_egl_surface(%p) presentation_sync_fd(%d) dup(%d)",
1344 wl_egl_surface, wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1345 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1346 return presentation_sync_fd;
1349 wl_egl_surface->presentation_sync.fd = eventfd(0, EFD_CLOEXEC);
1350 if (wl_egl_surface->presentation_sync.fd == -1) {
1351 TPL_ERR("Failed to create presentation_sync_fd. wl_egl_surface(%p)",
1353 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1357 presentation_sync_fd = dup(wl_egl_surface->presentation_sync.fd);
1358 TRACE_MARK("[CREATE] presentation_sync_fd(%d) dup(%d)",
1359 wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1360 TPL_DEBUG("[CREATE_PRESENTATION_SYNC] wl_egl_surface(%p) presentation_sync_fd(%d) dup(%d)",
1361 wl_egl_surface, wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1363 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1365 return presentation_sync_fd;
1367 /* -- END -- wl_egl_window tizen private callback functions */
1369 /* -- BEGIN -- tizen_surface_shm_flusher_listener */
1370 static void __cb_tss_flusher_flush_callback(void *data,
1371 struct tizen_surface_shm_flusher *tss_flusher)
1373 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)data;
1374 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
1376 TPL_INFO("[BUFFER_FLUSH]", "wl_egl_surface(%p) tbm_queue(%p)",
1377 wl_egl_surface, wl_egl_surface->tbm_queue);
1379 _print_buffer_lists(wl_egl_surface);
1381 tsq_err = tbm_surface_queue_flush(wl_egl_surface->tbm_queue);
1382 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
1383 TPL_ERR("Failed to flush tbm_queue(%p)", wl_egl_surface->tbm_queue);
1388 static void __cb_tss_flusher_free_flush_callback(void *data,
1389 struct tizen_surface_shm_flusher *tss_flusher)
1391 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)data;
1392 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
1394 TPL_INFO("[FREE_BUFFER_FLUSH]", "wl_egl_surface(%p) tbm_queue(%p)",
1395 wl_egl_surface, wl_egl_surface->tbm_queue);
1397 _print_buffer_lists(wl_egl_surface);
1399 tsq_err = tbm_surface_queue_free_flush(wl_egl_surface->tbm_queue);
1400 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
1401 TPL_ERR("Failed to free flush tbm_queue(%p)", wl_egl_surface->tbm_queue);
1406 static const struct tizen_surface_shm_flusher_listener
1407 tss_flusher_listener = {
1408 __cb_tss_flusher_flush_callback,
1409 __cb_tss_flusher_free_flush_callback
1411 /* -- END -- tizen_surface_shm_flusher_listener */
1414 /* -- BEGIN -- tbm_surface_queue callback funstions */
1416 __cb_tbm_queue_reset_callback(tbm_surface_queue_h tbm_queue,
1419 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
1420 tpl_wl_egl_display_t *wl_egl_display = NULL;
1421 tpl_surface_t *surface = NULL;
1422 tpl_bool_t is_activated = TPL_FALSE;
1425 wl_egl_surface = (tpl_wl_egl_surface_t *)data;
1426 TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
1428 wl_egl_display = wl_egl_surface->wl_egl_display;
1429 TPL_CHECK_ON_NULL_RETURN(wl_egl_display);
1431 surface = wl_egl_surface->tpl_surface;
1432 TPL_CHECK_ON_NULL_RETURN(surface);
1434 /* When the queue is resized, change the reset flag to TPL_TRUE to reflect
1435 * the changed window size at the next frame. */
1436 width = tbm_surface_queue_get_width(tbm_queue);
1437 height = tbm_surface_queue_get_height(tbm_queue);
1438 if (surface->width != width || surface->height != height) {
1439 TPL_INFO("[QUEUE_RESIZE]",
1440 "wl_egl_surface(%p) tbm_queue(%p) (%dx%d) -> (%dx%d)",
1441 wl_egl_surface, tbm_queue,
1442 surface->width, surface->height, width, height);
1445 /* When queue_reset_callback is called, if is_activated is different from
1446 * its previous state change the reset flag to TPL_TRUE to get a new buffer
1447 * with the changed state(ACTIVATED/DEACTIVATED) at the next frame. */
1448 is_activated = wayland_tbm_client_queue_check_activate(wl_egl_display->wl_tbm_client,
1449 wl_egl_surface->tbm_queue);
1450 if (wl_egl_surface->is_activated != is_activated) {
1452 TPL_INFO("[ACTIVATED]",
1453 "wl_egl_surface(%p) wl_surface(%p) tbm_queue(%p)",
1454 wl_egl_surface, wl_egl_surface->wl_surface, tbm_queue);
1456 TPL_LOG_T("[DEACTIVATED]",
1457 " wl_egl_surface(%p) wl_surface(%p) tbm_queue(%p)",
1458 wl_egl_surface, wl_egl_surface->wl_surface, tbm_queue);
1462 wl_egl_surface->reset = TPL_TRUE;
1464 if (surface->reset_cb)
1465 surface->reset_cb(surface->reset_data);
1469 __cb_tbm_queue_acquirable_callback(tbm_surface_queue_h tbm_queue,
1472 TPL_IGNORE(tbm_queue);
1474 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)data;
1475 TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
1477 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1478 if (wl_egl_surface->sent_message == NONE_MESSAGE) {
1479 wl_egl_surface->sent_message = ACQUIRABLE;
1480 tpl_gsource_send_message(wl_egl_surface->surf_source,
1481 wl_egl_surface->sent_message);
1483 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1485 /* -- END -- tbm_surface_queue callback funstions */
1488 _thread_wl_egl_surface_fini(tpl_wl_egl_surface_t *wl_egl_surface)
1490 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
1492 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1494 TPL_INFO("[SURFACE_FINI]",
1495 "wl_egl_surface(%p) wl_egl_window(%p) wl_surface(%p)",
1496 wl_egl_surface, wl_egl_surface->wl_egl_window,
1497 wl_egl_surface->wl_surface);
1498 #if TIZEN_FEATURE_ENABLE
1499 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
1501 if (wl_egl_display->presentation && wl_egl_surface->presentation_feedbacks) {
1502 while (!__tpl_list_is_empty(wl_egl_surface->presentation_feedbacks)) {
1503 struct pst_feedback *pst_feedback =
1504 (struct pst_feedback *)__tpl_list_pop_front(
1505 wl_egl_surface->presentation_feedbacks, NULL);
1507 _write_to_eventfd(pst_feedback->pst_sync_fd);
1508 close(pst_feedback->pst_sync_fd);
1509 pst_feedback->pst_sync_fd = -1;
1511 wp_presentation_feedback_destroy(pst_feedback->presentation_feedback);
1512 pst_feedback->presentation_feedback = NULL;
1518 __tpl_list_free(wl_egl_surface->presentation_feedbacks, NULL);
1519 wl_egl_surface->presentation_feedbacks = NULL;
1522 if (wl_egl_surface->presentation_sync.fd != -1) {
1523 _write_to_eventfd(wl_egl_surface->presentation_sync.fd);
1524 close(wl_egl_surface->presentation_sync.fd);
1525 wl_egl_surface->presentation_sync.fd = -1;
1528 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1530 if (wl_egl_surface->surface_sync) {
1531 TPL_INFO("[SURFACE_SYNC_DESTROY]",
1532 "wl_egl_surface(%p) surface_sync(%p)",
1533 wl_egl_surface, wl_egl_surface->surface_sync);
1534 zwp_linux_surface_synchronization_v1_destroy(wl_egl_surface->surface_sync);
1535 wl_egl_surface->surface_sync = NULL;
1538 if (wl_egl_surface->tss_flusher) {
1539 TPL_INFO("[FLUSHER_DESTROY]",
1540 "wl_egl_surface(%p) tss_flusher(%p)",
1541 wl_egl_surface, wl_egl_surface->tss_flusher);
1542 tizen_surface_shm_flusher_destroy(wl_egl_surface->tss_flusher);
1543 wl_egl_surface->tss_flusher = NULL;
1546 if (wl_egl_surface->vblank && wl_egl_surface->vblank->waiting_buffers) {
1547 __tpl_list_free(wl_egl_surface->vblank->waiting_buffers, NULL);
1548 wl_egl_surface->vblank->waiting_buffers = NULL;
1551 if (wl_egl_surface->vblank) {
1552 __tpl_list_remove_data(wl_egl_display->tdm.surface_vblanks,
1553 (void *)wl_egl_surface->vblank,
1555 __cb_surface_vblank_free);
1556 wl_egl_surface->vblank = NULL;
1559 if (wl_egl_surface->tbm_queue) {
1560 TPL_INFO("[TBM_QUEUE_DESTROY]",
1561 "wl_egl_surface(%p) tbm_queue(%p)",
1562 wl_egl_surface, wl_egl_surface->tbm_queue);
1563 tbm_surface_queue_destroy(wl_egl_surface->tbm_queue);
1564 wl_egl_surface->tbm_queue = NULL;
1567 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1571 __thread_func_surf_dispatch(tpl_gsource *gsource, uint64_t message)
1573 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
1575 wl_egl_surface = (tpl_wl_egl_surface_t *)tpl_gsource_get_data(gsource);
1577 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1578 if (message == INIT_SURFACE) { /* Initialize surface */
1579 TPL_DEBUG("wl_egl_surface(%p) initialize message received!",
1581 _thread_wl_egl_surface_init(wl_egl_surface);
1582 tpl_gcond_signal(&wl_egl_surface->surf_cond);
1583 } else if (message == ACQUIRABLE) { /* Acquirable */
1584 TPL_DEBUG("wl_egl_surface(%p) acquirable message received!",
1586 _thread_surface_queue_acquire(wl_egl_surface);
1589 wl_egl_surface->sent_message = NONE_MESSAGE;
1591 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1597 __thread_func_surf_finalize(tpl_gsource *gsource)
1599 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
1601 wl_egl_surface = (tpl_wl_egl_surface_t *)tpl_gsource_get_data(gsource);
1602 TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
1604 _thread_wl_egl_surface_fini(wl_egl_surface);
1606 TPL_DEBUG("[FINALIZE] wl_egl_surface(%p) tpl_gsource(%p)",
1607 wl_egl_surface, gsource);
1610 static tpl_gsource_functions surf_funcs = {
1613 .dispatch = __thread_func_surf_dispatch,
1614 .finalize = __thread_func_surf_finalize,
1618 __tpl_wl_egl_surface_init(tpl_surface_t *surface)
1620 tpl_wl_egl_display_t *wl_egl_display = NULL;
1621 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
1622 tpl_gsource *surf_source = NULL;
1624 struct wl_egl_window *wl_egl_window =
1625 (struct wl_egl_window *)surface->native_handle;
1627 TPL_ASSERT(surface);
1628 TPL_ASSERT(surface->display);
1629 TPL_ASSERT(surface->type == TPL_SURFACE_TYPE_WINDOW);
1630 TPL_ASSERT(surface->native_handle);
1633 (tpl_wl_egl_display_t *)surface->display->backend.data;
1634 if (!wl_egl_display) {
1635 TPL_ERR("Invalid parameter. wl_egl_display(%p)",
1637 return TPL_ERROR_INVALID_PARAMETER;
1640 wl_egl_surface = (tpl_wl_egl_surface_t *) calloc(1,
1641 sizeof(tpl_wl_egl_surface_t));
1642 if (!wl_egl_surface) {
1643 TPL_ERR("Failed to allocate memory for new tpl_wl_egl_surface_t.");
1644 return TPL_ERROR_OUT_OF_MEMORY;
1647 surf_source = tpl_gsource_create(wl_egl_display->thread, (void *)wl_egl_surface,
1648 -1, &surf_funcs, SOURCE_TYPE_NORMAL);
1650 TPL_ERR("Failed to create surf_source with wl_egl_surface(%p)",
1652 goto surf_source_create_fail;
1655 surface->backend.data = (void *)wl_egl_surface;
1656 surface->width = wl_egl_window->width;
1657 surface->height = wl_egl_window->height;
1658 surface->rotation = 0;
1660 wl_egl_surface->tpl_surface = surface;
1661 wl_egl_surface->width = wl_egl_window->width;
1662 wl_egl_surface->height = wl_egl_window->height;
1663 wl_egl_surface->format = surface->format;
1664 wl_egl_surface->num_buffers = surface->num_buffers;
1666 wl_egl_surface->surf_source = surf_source;
1667 wl_egl_surface->wl_egl_window = wl_egl_window;
1668 wl_egl_surface->wl_surface = wl_egl_window->surface;
1670 wl_egl_surface->wl_egl_display = wl_egl_display;
1672 wl_egl_surface->reset = TPL_FALSE;
1673 wl_egl_surface->is_activated = TPL_FALSE;
1674 wl_egl_surface->need_to_enqueue = TPL_TRUE;
1675 wl_egl_surface->prerotation_capability = TPL_FALSE;
1676 wl_egl_surface->vblank_done = TPL_TRUE;
1677 wl_egl_surface->use_render_done_fence = TPL_FALSE;
1678 wl_egl_surface->set_serial_is_used = TPL_FALSE;
1680 wl_egl_surface->latest_transform = -1;
1681 wl_egl_surface->render_done_cnt = 0;
1682 wl_egl_surface->serial = 0;
1684 wl_egl_surface->vblank = NULL;
1685 #if TIZEN_FEATURE_ENABLE
1686 wl_egl_surface->tss_flusher = NULL;
1687 wl_egl_surface->surface_sync = NULL;
1690 wl_egl_surface->post_interval = surface->post_interval;
1692 wl_egl_surface->commit_sync.fd = -1;
1693 wl_egl_surface->presentation_sync.fd = -1;
1695 wl_egl_surface->sent_message = NONE_MESSAGE;
1699 for (i = 0; i < BUFFER_ARRAY_SIZE; i++)
1700 wl_egl_surface->buffers[i] = NULL;
1701 wl_egl_surface->buffer_cnt = 0;
1705 struct tizen_private *tizen_private = NULL;
1707 if (wl_egl_window->driver_private)
1708 tizen_private = (struct tizen_private *)wl_egl_window->driver_private;
1710 tizen_private = tizen_private_create();
1711 wl_egl_window->driver_private = (void *)tizen_private;
1714 if (tizen_private) {
1715 tizen_private->data = (void *)wl_egl_surface;
1716 tizen_private->rotate_callback = (void *)__cb_rotate_callback;
1717 tizen_private->get_rotation_capability = (void *)
1718 __cb_get_rotation_capability;
1719 tizen_private->set_window_serial_callback = (void *)
1720 __cb_set_window_serial_callback;
1721 tizen_private->create_commit_sync_fd = (void *)__cb_create_commit_sync_fd;
1722 #if TIZEN_FEATURE_ENABLE
1723 tizen_private->create_presentation_sync_fd = (void *)__cb_create_presentation_sync_fd;
1725 tizen_private->create_presentation_sync_fd = NULL;
1728 wl_egl_window->destroy_window_callback = (void *)__cb_destroy_callback;
1729 wl_egl_window->resize_callback = (void *)__cb_resize_callback;
1733 tpl_gmutex_init(&wl_egl_surface->commit_sync.mutex);
1734 tpl_gmutex_init(&wl_egl_surface->presentation_sync.mutex);
1736 tpl_gmutex_init(&wl_egl_surface->buffers_mutex);
1738 tpl_gmutex_init(&wl_egl_surface->surf_mutex);
1739 tpl_gcond_init(&wl_egl_surface->surf_cond);
1741 /* Initialize in thread */
1742 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1743 wl_egl_surface->sent_message = INIT_SURFACE;
1744 tpl_gsource_send_message(wl_egl_surface->surf_source,
1745 wl_egl_surface->sent_message);
1746 tpl_gcond_wait(&wl_egl_surface->surf_cond, &wl_egl_surface->surf_mutex);
1747 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1749 TPL_ASSERT(wl_egl_surface->tbm_queue);
1751 TPL_INFO("[SURFACE_INIT]",
1752 "tpl_surface(%p) wl_egl_surface(%p) gsource(%p)",
1753 surface, wl_egl_surface, wl_egl_surface->surf_source);
1755 return TPL_ERROR_NONE;
1757 surf_source_create_fail:
1758 free(wl_egl_surface);
1759 surface->backend.data = NULL;
1760 return TPL_ERROR_INVALID_OPERATION;
1763 static tbm_surface_queue_h
1764 _thread_create_tbm_queue(tpl_wl_egl_surface_t *wl_egl_surface,
1765 struct wayland_tbm_client *wl_tbm_client,
1768 tbm_surface_queue_h tbm_queue = NULL;
1769 tbm_bufmgr bufmgr = NULL;
1770 unsigned int capability;
1772 struct wl_surface *wl_surface = wl_egl_surface->wl_surface;
1773 int width = wl_egl_surface->width;
1774 int height = wl_egl_surface->height;
1775 int format = wl_egl_surface->format;
1777 if (!wl_tbm_client || !wl_surface) {
1778 TPL_ERR("Invalid parameters. wl_tbm_client(%p) wl_surface(%p)",
1779 wl_tbm_client, wl_surface);
1783 bufmgr = tbm_bufmgr_init(-1);
1784 capability = tbm_bufmgr_get_capability(bufmgr);
1785 tbm_bufmgr_deinit(bufmgr);
1787 if (capability & TBM_BUFMGR_CAPABILITY_TILED_MEMORY) {
1788 tbm_queue = wayland_tbm_client_create_surface_queue_tiled(
1796 tbm_queue = wayland_tbm_client_create_surface_queue(
1806 TPL_ERR("Failed to create tbm_queue. wl_tbm_client(%p)",
1811 if (tbm_surface_queue_set_modes(
1812 tbm_queue, TBM_SURFACE_QUEUE_MODE_GUARANTEE_CYCLE) !=
1813 TBM_SURFACE_QUEUE_ERROR_NONE) {
1814 TPL_ERR("Failed to set queue mode to tbm_surface_queue(%p)",
1816 tbm_surface_queue_destroy(tbm_queue);
1820 if (tbm_surface_queue_add_reset_cb(
1822 __cb_tbm_queue_reset_callback,
1823 (void *)wl_egl_surface) != TBM_SURFACE_QUEUE_ERROR_NONE) {
1824 TPL_ERR("Failed to register reset callback to tbm_surface_queue(%p)",
1826 tbm_surface_queue_destroy(tbm_queue);
1830 if (tbm_surface_queue_add_acquirable_cb(
1832 __cb_tbm_queue_acquirable_callback,
1833 (void *)wl_egl_surface) != TBM_SURFACE_QUEUE_ERROR_NONE) {
1834 TPL_ERR("Failed to register acquirable callback to tbm_surface_queue(%p)",
1836 tbm_surface_queue_destroy(tbm_queue);
1843 static tdm_client_vblank*
1844 _thread_create_tdm_client_vblank(tdm_client *tdm_client)
1846 tdm_client_vblank *tdm_vblank = NULL;
1847 tdm_client_output *tdm_output = NULL;
1848 tdm_error tdm_err = TDM_ERROR_NONE;
1851 TPL_ERR("Invalid parameter. tdm_client(%p)", tdm_client);
1855 tdm_output = tdm_client_get_output(tdm_client, "primary", &tdm_err);
1856 if (!tdm_output || tdm_err != TDM_ERROR_NONE) {
1857 TPL_ERR("Failed to get tdm_client_output. tdm_err(%d)", tdm_err);
1861 tdm_vblank = tdm_client_output_create_vblank(tdm_output, &tdm_err);
1862 if (!tdm_vblank || tdm_err != TDM_ERROR_NONE) {
1863 TPL_ERR("Failed to create tdm_vblank. tdm_err(%d)", tdm_err);
1867 tdm_client_vblank_set_enable_fake(tdm_vblank, 1);
1868 tdm_client_vblank_set_sync(tdm_vblank, 0);
1874 __cb_surface_vblank_free(void *data)
1876 TPL_CHECK_ON_NULL_RETURN(data);
1878 tpl_surface_vblank_t *vblank = (tpl_surface_vblank_t *)data;
1879 tpl_wl_egl_surface_t *wl_egl_surface = vblank->wl_egl_surface;
1881 TPL_INFO("[VBLANK_DESTROY]",
1882 "wl_egl_surface(%p) surface_vblank(%p) tdm_vblank(%p)",
1883 wl_egl_surface, vblank,
1884 vblank->tdm_vblank);
1886 tdm_client_vblank_destroy(vblank->tdm_vblank);
1887 vblank->tdm_vblank = NULL;
1888 vblank->wl_egl_surface = NULL;
1892 wl_egl_surface->vblank = NULL;
1896 _thread_wl_egl_surface_init(tpl_wl_egl_surface_t *wl_egl_surface)
1898 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
1899 tpl_surface_vblank_t *vblank = NULL;
1901 wl_egl_surface->tbm_queue = _thread_create_tbm_queue(
1903 wl_egl_display->wl_tbm_client,
1904 wl_egl_surface->num_buffers);
1905 if (!wl_egl_surface->tbm_queue) {
1906 TPL_ERR("Failed to create tbm_queue. wl_egl_surface(%p) wl_tbm_client(%p)",
1907 wl_egl_surface, wl_egl_display->wl_tbm_client);
1911 TPL_INFO("[QUEUE_CREATION]",
1912 "wl_egl_surface(%p) wl_surface(%p) wl_tbm_client(%p)",
1913 wl_egl_surface, wl_egl_surface->wl_surface,
1914 wl_egl_display->wl_tbm_client);
1915 TPL_INFO("[QUEUE_CREATION]",
1916 "tbm_queue(%p) size(%d x %d) X %d format(%d)",
1917 wl_egl_surface->tbm_queue,
1918 wl_egl_surface->width,
1919 wl_egl_surface->height,
1920 wl_egl_surface->num_buffers,
1921 wl_egl_surface->format);
1923 if (wl_egl_display->use_wait_vblank) {
1924 vblank = (tpl_surface_vblank_t *)calloc(1, sizeof(tpl_surface_vblank_t));
1926 vblank->tdm_vblank = _thread_create_tdm_client_vblank(
1927 wl_egl_display->tdm.tdm_client);
1928 if (!vblank->tdm_vblank) {
1929 TPL_ERR("Failed to create tdm_vblank from tdm_client(%p)",
1930 wl_egl_display->tdm.tdm_client);
1934 vblank->waiting_buffers = __tpl_list_alloc();
1935 vblank->wl_egl_surface = wl_egl_surface;
1937 __tpl_list_push_back(wl_egl_display->tdm.surface_vblanks,
1940 TPL_INFO("[VBLANK_INIT]",
1941 "wl_egl_surface(%p) tdm_client(%p) tdm_vblank(%p)",
1942 wl_egl_surface, wl_egl_display->tdm.tdm_client,
1943 vblank->tdm_vblank);
1948 wl_egl_surface->vblank = vblank;
1949 #if TIZEN_FEATURE_ENABLE
1950 if (wl_egl_display->tss) {
1951 wl_egl_surface->tss_flusher =
1952 tizen_surface_shm_get_flusher(wl_egl_display->tss,
1953 wl_egl_surface->wl_surface);
1956 if (wl_egl_surface->tss_flusher) {
1957 tizen_surface_shm_flusher_add_listener(wl_egl_surface->tss_flusher,
1958 &tss_flusher_listener,
1960 TPL_INFO("[FLUSHER_INIT]",
1961 "wl_egl_surface(%p) tss_flusher(%p)",
1962 wl_egl_surface, wl_egl_surface->tss_flusher);
1965 if (wl_egl_display->explicit_sync && wl_egl_display->use_explicit_sync) {
1966 wl_egl_surface->surface_sync =
1967 zwp_linux_explicit_synchronization_v1_get_synchronization(
1968 wl_egl_display->explicit_sync, wl_egl_surface->wl_surface);
1969 if (wl_egl_surface->surface_sync) {
1970 TPL_INFO("[EXPLICIT_SYNC_INIT]",
1971 "wl_egl_surface(%p) surface_sync(%p)",
1972 wl_egl_surface, wl_egl_surface->surface_sync);
1974 TPL_WARN("Failed to create surface_sync. | wl_egl_surface(%p)",
1976 wl_egl_display->use_explicit_sync = TPL_FALSE;
1980 wl_egl_surface->presentation_feedbacks = __tpl_list_alloc();
1984 _tpl_wl_egl_surface_buffer_clear(tpl_wl_egl_surface_t *wl_egl_surface)
1986 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
1987 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
1988 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
1989 tpl_bool_t need_to_release = TPL_FALSE;
1990 tpl_bool_t need_to_cancel = TPL_FALSE;
1991 buffer_status_t status = RELEASED;
1994 while (wl_egl_surface->buffer_cnt) {
1995 tpl_gmutex_lock(&wl_egl_display->wl_event_mutex);
1996 tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
1997 wl_egl_buffer = wl_egl_surface->buffers[idx];
1999 if (wl_egl_buffer) {
2000 wl_egl_surface->buffers[idx] = NULL;
2001 wl_egl_surface->buffer_cnt--;
2003 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
2004 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2009 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
2011 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2013 status = wl_egl_buffer->status;
2015 TPL_DEBUG("[idx:%d] wl_egl_buffer(%p) tbm_surface(%p) status(%s)",
2017 wl_egl_buffer->tbm_surface,
2018 status_to_string[status]);
2020 if (status >= ENQUEUED) {
2021 tpl_bool_t need_to_wait = TPL_FALSE;
2022 tpl_result_t wait_result = TPL_ERROR_NONE;
2024 need_to_wait = (status < COMMITTED);
2027 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2028 wait_result = tpl_cond_timed_wait(&wl_egl_buffer->cond,
2029 &wl_egl_buffer->mutex,
2031 tpl_gmutex_lock(&wl_egl_display->wl_event_mutex);
2033 if (wait_result == TPL_ERROR_TIME_OUT)
2034 TPL_WARN("timeout occured waiting signaled. wl_egl_buffer(%p)",
2039 status = wl_egl_buffer->status; /* update status */
2041 /* ACQUIRED, WAITING_SIGNALED, WAITING_VBLANK, COMMITTED */
2042 /* It has been acquired but has not yet been released, so this
2043 * buffer must be released. */
2044 need_to_release = (status >= ACQUIRED && status <= COMMITTED);
2046 /* After dequeue, it has not been enqueued yet
2047 * so cancel_dequeue must be performed. */
2048 need_to_cancel = (status == DEQUEUED);
2050 if (need_to_release) {
2051 tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
2052 wl_egl_buffer->tbm_surface);
2053 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2054 TPL_ERR("Failed to release. tbm_surface(%p) tsq_err(%d)",
2055 wl_egl_buffer->tbm_surface, tsq_err);
2058 if (need_to_cancel) {
2059 tsq_err = tbm_surface_queue_cancel_dequeue(wl_egl_surface->tbm_queue,
2060 wl_egl_buffer->tbm_surface);
2061 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2062 TPL_ERR("Failed to release tbm_surface(%p) tsq_err(%d)",
2063 wl_egl_buffer->tbm_surface, tsq_err);
2066 wl_egl_buffer->status = RELEASED;
2068 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2070 if (need_to_release || need_to_cancel)
2071 tbm_surface_internal_unref(wl_egl_buffer->tbm_surface);
2073 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2080 __tpl_wl_egl_surface_fini(tpl_surface_t *surface)
2082 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
2083 tpl_wl_egl_display_t *wl_egl_display = NULL;
2085 TPL_ASSERT(surface);
2086 TPL_ASSERT(surface->display);
2088 TPL_CHECK_ON_FALSE_RETURN(surface->type == TPL_SURFACE_TYPE_WINDOW);
2090 wl_egl_surface = (tpl_wl_egl_surface_t *) surface->backend.data;
2091 TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
2093 wl_egl_display = wl_egl_surface->wl_egl_display;
2094 TPL_CHECK_ON_NULL_RETURN(wl_egl_display);
2096 TPL_INFO("[SURFACE_FINI][BEGIN]",
2097 "wl_egl_surface(%p) wl_surface(%p) tbm_queue(%p)",
2099 wl_egl_surface->wl_surface, wl_egl_surface->tbm_queue);
2101 _tpl_wl_egl_surface_buffer_clear(wl_egl_surface);
2103 if (wl_egl_surface->surf_source)
2104 tpl_gsource_destroy(wl_egl_surface->surf_source, TPL_TRUE);
2105 wl_egl_surface->surf_source = NULL;
2107 _print_buffer_lists(wl_egl_surface);
2109 if (wl_egl_surface->wl_egl_window) {
2110 struct tizen_private *tizen_private = NULL;
2111 struct wl_egl_window *wl_egl_window = wl_egl_surface->wl_egl_window;
2112 TPL_INFO("[WL_EGL_WINDOW_FINI]",
2113 "wl_egl_surface(%p) wl_egl_window(%p) wl_surface(%p)",
2114 wl_egl_surface, wl_egl_window,
2115 wl_egl_surface->wl_surface);
2116 tizen_private = (struct tizen_private *)wl_egl_window->driver_private;
2117 if (tizen_private) {
2118 tizen_private->set_window_serial_callback = NULL;
2119 tizen_private->rotate_callback = NULL;
2120 tizen_private->get_rotation_capability = NULL;
2121 tizen_private->create_presentation_sync_fd = NULL;
2122 tizen_private->create_commit_sync_fd = NULL;
2123 tizen_private->set_frontbuffer_callback = NULL;
2124 tizen_private->merge_sync_fds = NULL;
2125 tizen_private->data = NULL;
2126 free(tizen_private);
2128 wl_egl_window->driver_private = NULL;
2131 wl_egl_window->destroy_window_callback = NULL;
2132 wl_egl_window->resize_callback = NULL;
2134 wl_egl_surface->wl_egl_window = NULL;
2137 wl_egl_surface->wl_surface = NULL;
2138 wl_egl_surface->wl_egl_display = NULL;
2139 wl_egl_surface->tpl_surface = NULL;
2141 tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
2142 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
2143 tpl_gmutex_clear(&wl_egl_surface->commit_sync.mutex);
2145 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
2146 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
2147 tpl_gmutex_clear(&wl_egl_surface->presentation_sync.mutex);
2149 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2150 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2151 tpl_gmutex_clear(&wl_egl_surface->surf_mutex);
2152 tpl_gcond_clear(&wl_egl_surface->surf_cond);
2154 TPL_INFO("[SURFACE_FINI][END]", "wl_egl_surface(%p)", wl_egl_surface);
2156 free(wl_egl_surface);
2157 surface->backend.data = NULL;
2161 __tpl_wl_egl_surface_set_rotation_capability(tpl_surface_t *surface,
2164 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
2166 TPL_CHECK_ON_NULL_RETURN_VAL(surface, TPL_ERROR_INVALID_PARAMETER);
2168 wl_egl_surface = (tpl_wl_egl_surface_t *)surface->backend.data;
2170 TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_surface, TPL_ERROR_INVALID_PARAMETER);
2172 TPL_INFO("[SET_PREROTATION_CAPABILITY]",
2173 "wl_egl_surface(%p) prerotation capability set to [%s]",
2174 wl_egl_surface, (set ? "TRUE" : "FALSE"));
2176 wl_egl_surface->prerotation_capability = set;
2177 return TPL_ERROR_NONE;
2181 __tpl_wl_egl_surface_set_post_interval(tpl_surface_t *surface,
2184 tpl_wl_egl_surface_t *wl_egl_surface = NULL;
2186 TPL_CHECK_ON_NULL_RETURN_VAL(surface, TPL_ERROR_INVALID_PARAMETER);
2188 wl_egl_surface = (tpl_wl_egl_surface_t *)surface->backend.data;
2190 TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_surface, TPL_ERROR_INVALID_PARAMETER);
2192 TPL_INFO("[SET_POST_INTERVAL]",
2193 "wl_egl_surface(%p) post_interval(%d -> %d)",
2194 wl_egl_surface, wl_egl_surface->post_interval, post_interval);
2196 wl_egl_surface->post_interval = post_interval;
2198 return TPL_ERROR_NONE;
2202 __tpl_wl_egl_surface_validate(tpl_surface_t *surface)
2204 tpl_bool_t retval = TPL_TRUE;
2206 TPL_ASSERT(surface);
2207 TPL_ASSERT(surface->backend.data);
2209 tpl_wl_egl_surface_t *wl_egl_surface =
2210 (tpl_wl_egl_surface_t *)surface->backend.data;
2212 retval = !(wl_egl_surface->reset);
2218 __tpl_wl_egl_surface_get_size(tpl_surface_t *surface, int *width, int *height)
2220 tpl_wl_egl_surface_t *wl_egl_surface =
2221 (tpl_wl_egl_surface_t *)surface->backend.data;
2224 *width = tbm_surface_queue_get_width(wl_egl_surface->tbm_queue);
2226 *height = tbm_surface_queue_get_height(wl_egl_surface->tbm_queue);
2229 #define CAN_DEQUEUE_TIMEOUT_MS 10000
2232 _tbm_queue_force_flush(tpl_wl_egl_surface_t *wl_egl_surface)
2234 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2236 _print_buffer_lists(wl_egl_surface);
2238 if ((tsq_err = tbm_surface_queue_flush(wl_egl_surface->tbm_queue))
2239 != TBM_SURFACE_QUEUE_ERROR_NONE) {
2240 TPL_ERR("Failed to flush tbm_surface_queue(%p) tsq_err(%d)",
2241 wl_egl_surface->tbm_queue, tsq_err);
2242 return TPL_ERROR_INVALID_OPERATION;
2247 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2248 for (i = 0; i < BUFFER_ARRAY_SIZE; i++) {
2249 tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
2250 wl_egl_buffer = wl_egl_surface->buffers[i];
2251 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
2252 if (wl_egl_buffer && wl_egl_buffer->status == COMMITTED) {
2253 wl_egl_buffer->status = RELEASED;
2254 tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
2255 wl_egl_buffer->tbm_surface);
2256 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2257 TPL_ERR("Failed to release. tbm_surface(%p) tsq_err(%d)",
2258 wl_egl_buffer->tbm_surface, tsq_err);
2259 tbm_surface_internal_unref(wl_egl_buffer->tbm_surface);
2264 TPL_INFO("[FORCE_FLUSH]",
2265 "wl_egl_surface(%p) tbm_queue(%p)",
2266 wl_egl_surface, wl_egl_surface->tbm_queue);
2268 return TPL_ERROR_NONE;
2272 _wl_egl_buffer_init(tpl_wl_egl_buffer_t *wl_egl_buffer,
2273 tpl_wl_egl_surface_t *wl_egl_surface)
2275 struct wl_egl_window *wl_egl_window = wl_egl_surface->wl_egl_window;
2276 struct tizen_private *tizen_private =
2277 (struct tizen_private *)wl_egl_window->driver_private;
2279 TPL_ASSERT(tizen_private);
2281 wl_egl_buffer->draw_done = TPL_FALSE;
2282 wl_egl_buffer->need_to_commit = TPL_TRUE;
2283 #if TIZEN_FEATURE_ENABLE
2284 wl_egl_buffer->buffer_release = NULL;
2286 wl_egl_buffer->transform = tizen_private->transform;
2288 if (wl_egl_buffer->w_transform != tizen_private->window_transform) {
2289 wl_egl_buffer->w_transform = tizen_private->window_transform;
2290 wl_egl_buffer->w_rotated = TPL_TRUE;
2293 if (wl_egl_surface->set_serial_is_used) {
2294 wl_egl_buffer->serial = wl_egl_surface->serial;
2296 wl_egl_buffer->serial = ++tizen_private->serial;
2299 if (wl_egl_buffer->rects) {
2300 free(wl_egl_buffer->rects);
2301 wl_egl_buffer->rects = NULL;
2302 wl_egl_buffer->num_rects = 0;
2306 static tpl_wl_egl_buffer_t *
2307 _get_wl_egl_buffer(tbm_surface_h tbm_surface)
2309 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2310 tbm_surface_internal_get_user_data(tbm_surface, KEY_WL_EGL_BUFFER,
2311 (void **)&wl_egl_buffer);
2312 return wl_egl_buffer;
2315 static tpl_wl_egl_buffer_t *
2316 _wl_egl_buffer_create(tpl_wl_egl_surface_t *wl_egl_surface,
2317 tbm_surface_h tbm_surface)
2319 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2320 struct wl_egl_window *wl_egl_window = wl_egl_surface->wl_egl_window;
2322 wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2324 if (!wl_egl_buffer) {
2325 wl_egl_buffer = (tpl_wl_egl_buffer_t *)calloc(1, sizeof(tpl_wl_egl_buffer_t));
2326 TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_buffer, NULL);
2328 tbm_surface_internal_add_user_data(tbm_surface, KEY_WL_EGL_BUFFER,
2329 (tbm_data_free)__cb_wl_egl_buffer_free);
2330 tbm_surface_internal_set_user_data(tbm_surface, KEY_WL_EGL_BUFFER,
2333 wl_egl_buffer->wl_buffer = NULL;
2334 wl_egl_buffer->tbm_surface = tbm_surface;
2335 wl_egl_buffer->bo_name = _get_tbm_surface_bo_name(tbm_surface);
2336 wl_egl_buffer->wl_egl_surface = wl_egl_surface;
2338 wl_egl_buffer->status = RELEASED;
2340 wl_egl_buffer->acquire_fence_fd = -1;
2341 wl_egl_buffer->commit_sync_fd = -1;
2342 wl_egl_buffer->presentation_sync_fd = -1;
2343 wl_egl_buffer->release_fence_fd = -1;
2345 wl_egl_buffer->dx = wl_egl_window->dx;
2346 wl_egl_buffer->dy = wl_egl_window->dy;
2347 wl_egl_buffer->width = tbm_surface_get_width(tbm_surface);
2348 wl_egl_buffer->height = tbm_surface_get_height(tbm_surface);
2350 wl_egl_buffer->w_transform = -1;
2352 tpl_gmutex_init(&wl_egl_buffer->mutex);
2353 tpl_gcond_init(&wl_egl_buffer->cond);
2355 tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
2358 for (i = 0; i < BUFFER_ARRAY_SIZE; i++)
2359 if (wl_egl_surface->buffers[i] == NULL) break;
2361 /* If this exception is reached,
2362 * it may be a critical memory leak problem. */
2363 if (i == BUFFER_ARRAY_SIZE) {
2364 tpl_wl_egl_buffer_t *evicted_buffer = NULL;
2365 int evicted_idx = 0; /* evict the frontmost buffer */
2367 evicted_buffer = wl_egl_surface->buffers[evicted_idx];
2369 TPL_WARN("wl_egl_surface(%p) buffers array is full. evict one.",
2371 TPL_WARN("evicted buffer (%p) tbm_surface(%p) status(%s)",
2372 evicted_buffer, evicted_buffer->tbm_surface,
2373 status_to_string[evicted_buffer->status]);
2375 /* [TODO] need to think about whether there will be
2376 * better modifications */
2377 wl_egl_surface->buffer_cnt--;
2378 wl_egl_surface->buffers[evicted_idx] = NULL;
2383 wl_egl_surface->buffer_cnt++;
2384 wl_egl_surface->buffers[i] = wl_egl_buffer;
2385 wl_egl_buffer->idx = i;
2387 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
2389 TPL_INFO("[WL_EGL_BUFFER_CREATE]",
2390 "wl_egl_surface(%p) wl_egl_buffer(%p) tbm_surface(%p) bo(%d)",
2391 wl_egl_surface, wl_egl_buffer, tbm_surface,
2392 wl_egl_buffer->bo_name);
2395 _wl_egl_buffer_init(wl_egl_buffer, wl_egl_surface);
2397 return wl_egl_buffer;
2400 static tbm_surface_h
2401 __tpl_wl_egl_surface_dequeue_buffer(tpl_surface_t *surface, uint64_t timeout_ns,
2402 int32_t *release_fence)
2404 TPL_ASSERT(surface);
2405 TPL_ASSERT(surface->backend.data);
2406 TPL_ASSERT(surface->display);
2407 TPL_ASSERT(surface->display->backend.data);
2408 TPL_OBJECT_CHECK_RETURN(surface, NULL);
2410 tpl_wl_egl_surface_t *wl_egl_surface =
2411 (tpl_wl_egl_surface_t *)surface->backend.data;
2412 tpl_wl_egl_display_t *wl_egl_display =
2413 (tpl_wl_egl_display_t *)surface->display->backend.data;
2414 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2416 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2417 tpl_bool_t is_activated = 0;
2419 tbm_surface_h tbm_surface = NULL;
2421 TPL_OBJECT_UNLOCK(surface);
2422 tsq_err = tbm_surface_queue_can_dequeue_wait_timeout(
2423 wl_egl_surface->tbm_queue, CAN_DEQUEUE_TIMEOUT_MS);
2424 TPL_OBJECT_LOCK(surface);
2426 /* After the can dequeue state, lock the wl_event_mutex to prevent other
2427 * events from being processed in wayland_egl_thread
2428 * during below dequeue procedure. */
2429 tpl_gmutex_lock(&wl_egl_display->wl_event_mutex);
2431 if (tsq_err == TBM_SURFACE_QUEUE_ERROR_TIMEOUT) {
2432 TPL_WARN("[CAN_DEQUEUE_TIMEOUT] queue(%p) will be reset. surface(%p)",
2433 wl_egl_surface->tbm_queue, surface);
2434 if (_tbm_queue_force_flush(wl_egl_surface) != TPL_ERROR_NONE) {
2435 TPL_ERR("Failed to timeout reset. tbm_queue(%p) surface(%p)",
2436 wl_egl_surface->tbm_queue, surface);
2437 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2440 tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2444 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2445 TPL_ERR("Failed to query can_dequeue. tbm_queue(%p) surface(%p)",
2446 wl_egl_surface->tbm_queue, surface);
2447 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2451 /* wayland client can check their states (ACTIVATED or DEACTIVATED) with
2452 * below function [wayland_tbm_client_queue_check_activate()].
2453 * This function has to be called before tbm_surface_queue_dequeue()
2454 * in order to know what state the buffer will be dequeued next.
2456 * ACTIVATED state means non-composite mode. Client can get buffers which
2457 can be displayed directly(without compositing).
2458 * DEACTIVATED state means composite mode. Client's buffer will be displayed
2459 by compositor(E20) with compositing.
2461 is_activated = wayland_tbm_client_queue_check_activate(
2462 wl_egl_display->wl_tbm_client,
2463 wl_egl_surface->tbm_queue);
2465 wl_egl_surface->is_activated = is_activated;
2467 surface->width = tbm_surface_queue_get_width(wl_egl_surface->tbm_queue);
2468 surface->height = tbm_surface_queue_get_height(wl_egl_surface->tbm_queue);
2469 wl_egl_surface->width = surface->width;
2470 wl_egl_surface->height = surface->height;
2472 if (surface->is_frontbuffer_mode && surface->frontbuffer != NULL) {
2473 /* If surface->frontbuffer is already set in frontbuffer mode,
2474 * it will return that frontbuffer if it is still activated,
2475 * otherwise dequeue the new buffer after initializing
2476 * surface->frontbuffer to NULL. */
2477 if (is_activated && !wl_egl_surface->reset) {
2478 bo_name = _get_tbm_surface_bo_name(surface->frontbuffer);
2481 "[DEQ][F] surface->frontbuffer(%p) BO_NAME(%d)",
2482 surface->frontbuffer, bo_name);
2483 TRACE_ASYNC_BEGIN((int)surface->frontbuffer,
2484 "[DEQ]~[ENQ] BO_NAME:%d",
2486 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2487 return surface->frontbuffer;
2489 surface->frontbuffer = NULL;
2490 wl_egl_surface->need_to_enqueue = TPL_TRUE;
2493 surface->frontbuffer = NULL;
2496 tsq_err = tbm_surface_queue_dequeue(wl_egl_surface->tbm_queue,
2499 TPL_ERR("Failed to dequeue from tbm_queue(%p) wl_egl_surface(%p)| tsq_err = %d",
2500 wl_egl_surface->tbm_queue, wl_egl_surface, tsq_err);
2501 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2505 tbm_surface_internal_ref(tbm_surface);
2507 wl_egl_buffer = _wl_egl_buffer_create(wl_egl_surface, tbm_surface);
2508 TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer, "Failed to create/get wl_egl_buffer.");
2510 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2511 wl_egl_buffer->status = DEQUEUED;
2514 /* If wl_egl_buffer->release_fence_fd is -1,
2515 * the tbm_surface can be used immediately.
2516 * If not, user(EGL) have to wait until signaled. */
2517 if (release_fence) {
2518 #if TIZEN_FEATURE_ENABLE
2519 if (wl_egl_surface->surface_sync) {
2520 *release_fence = wl_egl_buffer->release_fence_fd;
2521 TPL_DEBUG("wl_egl_surface(%p) wl_egl_buffer(%p) release_fence_fd(%d)",
2522 wl_egl_surface, wl_egl_buffer, *release_fence);
2524 wl_egl_buffer->release_fence_fd = -1;
2528 *release_fence = -1;
2532 if (surface->is_frontbuffer_mode && is_activated)
2533 surface->frontbuffer = tbm_surface;
2535 wl_egl_surface->reset = TPL_FALSE;
2537 TRACE_MARK("[DEQ][NEW]BO_NAME:%d", wl_egl_buffer->bo_name);
2538 TRACE_ASYNC_BEGIN((int)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d",
2539 wl_egl_buffer->bo_name);
2540 TPL_LOG_T("WL_EGL", "[DEQ] wl_egl_buffer(%p) tbm_surface(%p) bo(%d) fence(%d)",
2541 wl_egl_buffer, tbm_surface, wl_egl_buffer->bo_name,
2542 release_fence ? *release_fence : -1);
2544 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2545 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2551 __tpl_wl_egl_surface_cancel_buffer(tpl_surface_t *surface,
2552 tbm_surface_h tbm_surface)
2554 TPL_ASSERT(surface);
2555 TPL_ASSERT(surface->backend.data);
2557 tpl_wl_egl_surface_t *wl_egl_surface =
2558 (tpl_wl_egl_surface_t *)surface->backend.data;
2559 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2560 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2562 if (!tbm_surface_internal_is_valid(tbm_surface)) {
2563 TPL_ERR("Invalid buffer. tbm_surface(%p)", tbm_surface);
2564 return TPL_ERROR_INVALID_PARAMETER;
2567 wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2568 if (wl_egl_buffer) {
2569 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2570 wl_egl_buffer->status = RELEASED;
2571 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2574 tbm_surface_internal_unref(tbm_surface);
2576 tsq_err = tbm_surface_queue_cancel_dequeue(wl_egl_surface->tbm_queue,
2578 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2579 TPL_ERR("Failed to release tbm_surface(%p) surface(%p)",
2580 tbm_surface, surface);
2581 return TPL_ERROR_INVALID_OPERATION;
2584 TPL_INFO("[CANCEL_BUFFER]", "wl_egl_surface(%p) tbm_surface(%p) bo(%d)",
2585 wl_egl_surface, tbm_surface, _get_tbm_surface_bo_name(tbm_surface));
2587 return TPL_ERROR_NONE;
2591 __tpl_wl_egl_surface_enqueue_buffer(tpl_surface_t *surface,
2592 tbm_surface_h tbm_surface,
2593 int num_rects, const int *rects, int32_t acquire_fence)
2595 TPL_ASSERT(surface);
2596 TPL_ASSERT(surface->display);
2597 TPL_ASSERT(surface->backend.data);
2598 TPL_ASSERT(tbm_surface);
2599 TPL_OBJECT_CHECK_RETURN(surface, TPL_ERROR_INVALID_PARAMETER);
2601 tpl_wl_egl_surface_t *wl_egl_surface =
2602 (tpl_wl_egl_surface_t *) surface->backend.data;
2603 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2604 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2607 if (!tbm_surface_internal_is_valid(tbm_surface)) {
2608 TPL_ERR("Failed to enqueue tbm_surface(%p) Invalid value.",
2610 TRACE_ASYNC_END((int)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2611 return TPL_ERROR_INVALID_PARAMETER;
2614 wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2615 if (!wl_egl_buffer) {
2616 TPL_ERR("Failed to get wl_egl_buffer from tbm_surface(%p)", tbm_surface);
2617 return TPL_ERROR_INVALID_PARAMETER;
2620 bo_name = _get_tbm_surface_bo_name(tbm_surface);
2622 TRACE_MARK("[ENQ] BO_NAME:%d", bo_name);
2624 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2626 /* If there are received region information, save it to wl_egl_buffer */
2627 if (num_rects && rects) {
2628 if (wl_egl_buffer->rects != NULL) {
2629 free(wl_egl_buffer->rects);
2630 wl_egl_buffer->rects = NULL;
2631 wl_egl_buffer->num_rects = 0;
2634 wl_egl_buffer->rects = (int *)calloc(1, (sizeof(int) * 4 * num_rects));
2635 wl_egl_buffer->num_rects = num_rects;
2637 if (!wl_egl_buffer->rects) {
2638 TPL_ERR("Failed to allocate memory fo damage rects info.");
2639 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2640 return TPL_ERROR_OUT_OF_MEMORY;
2643 memcpy((char *)wl_egl_buffer->rects, (char *)rects, sizeof(int) * 4 * num_rects);
2646 if (!wl_egl_surface->need_to_enqueue ||
2647 !wl_egl_buffer->need_to_commit) {
2648 TPL_WARN("[ENQ_SKIP][Frontbuffer:%s] tbm_surface(%p) need not to enqueue",
2649 ((surface->frontbuffer == tbm_surface) ? "ON" : "OFF"), tbm_surface);
2650 TRACE_ASYNC_END((int)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2651 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2652 return TPL_ERROR_NONE;
2655 /* In frontbuffer mode, will skip tbm_surface_queue_enqueue, acquire, and
2656 * commit if surface->frontbuffer that is already set and the tbm_surface
2657 * client want to enqueue are the same.
2659 if (surface->is_frontbuffer_mode) {
2660 /* The first buffer to be activated in frontbuffer mode must be
2661 * committed. Subsequence frames do not need to be committed because
2662 * the buffer is already displayed.
2664 if (surface->frontbuffer == tbm_surface)
2665 wl_egl_surface->need_to_enqueue = TPL_FALSE;
2667 if (acquire_fence != -1) {
2668 close(acquire_fence);
2673 if (wl_egl_buffer->acquire_fence_fd != -1)
2674 close(wl_egl_buffer->acquire_fence_fd);
2676 wl_egl_buffer->acquire_fence_fd = acquire_fence;
2678 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
2679 if (wl_egl_surface->presentation_sync.fd != -1) {
2680 wl_egl_buffer->presentation_sync_fd = wl_egl_surface->presentation_sync.fd;
2681 wl_egl_surface->presentation_sync.fd = -1;
2683 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
2685 tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
2686 if (wl_egl_surface->commit_sync.fd != -1) {
2687 wl_egl_buffer->commit_sync_fd = wl_egl_surface->commit_sync.fd;
2688 wl_egl_surface->commit_sync.fd = -1;
2689 TRACE_ASYNC_BEGIN(wl_egl_buffer->commit_sync_fd, "[COMMIT_SYNC] bo(%d)",
2690 _get_tbm_surface_bo_name(tbm_surface));
2692 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
2694 wl_egl_buffer->status = ENQUEUED;
2696 "[ENQ] wl_egl_buffer(%p) tbm_surface(%p) bo(%d) fence(%d)",
2697 wl_egl_buffer, tbm_surface, bo_name, acquire_fence);
2699 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2701 tsq_err = tbm_surface_queue_enqueue(wl_egl_surface->tbm_queue,
2703 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2704 tbm_surface_internal_unref(tbm_surface);
2705 TPL_ERR("Failed to enqueue tbm_surface(%p). wl_egl_surface(%p) tsq_err=%d",
2706 tbm_surface, wl_egl_surface, tsq_err);
2707 TRACE_ASYNC_END((int)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2708 return TPL_ERROR_INVALID_OPERATION;
2711 tbm_surface_internal_unref(tbm_surface);
2713 TRACE_ASYNC_END((int)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2715 return TPL_ERROR_NONE;
2719 __thread_func_waiting_source_dispatch(tpl_gsource *gsource, uint64_t message)
2721 tpl_wl_egl_buffer_t *wl_egl_buffer =
2722 (tpl_wl_egl_buffer_t *)tpl_gsource_get_data(gsource);
2723 tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
2724 tbm_surface_h tbm_surface = wl_egl_buffer->tbm_surface;
2726 wl_egl_surface->render_done_cnt++;
2728 TRACE_ASYNC_END(wl_egl_buffer->acquire_fence_fd, "FENCE WAIT fd(%d)",
2729 wl_egl_buffer->acquire_fence_fd);
2731 TPL_DEBUG("[RENDER DONE] wl_egl_buffer(%p) tbm_surface(%p)",
2732 wl_egl_buffer, tbm_surface);
2734 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2735 wl_egl_buffer->status = WAITING_VBLANK;
2737 TPL_DEBUG("[FINALIZE] wl_egl_buffer(%p) wait_source(%p) fence_fd(%d)",
2738 wl_egl_buffer, wl_egl_buffer->waiting_source,
2739 wl_egl_buffer->acquire_fence_fd);
2741 close(wl_egl_buffer->acquire_fence_fd);
2742 wl_egl_buffer->acquire_fence_fd = -1;
2743 wl_egl_buffer->waiting_source = NULL;
2745 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2747 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2749 if (wl_egl_surface->vblank == NULL || wl_egl_surface->vblank_done)
2750 _thread_wl_surface_commit(wl_egl_surface, wl_egl_buffer);
2752 __tpl_list_push_back(wl_egl_surface->vblank->waiting_buffers,
2755 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2761 __thread_func_waiting_source_finalize(tpl_gsource *gsource)
2763 TPL_IGNORE(gsource);
2766 static tpl_gsource_functions buffer_funcs = {
2769 .dispatch = __thread_func_waiting_source_dispatch,
2770 .finalize = __thread_func_waiting_source_finalize,
2774 _thread_surface_queue_acquire(tpl_wl_egl_surface_t *wl_egl_surface)
2776 tbm_surface_h tbm_surface = NULL;
2777 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2778 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
2779 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2780 tpl_bool_t ready_to_commit = TPL_FALSE;
2782 while (tbm_surface_queue_can_acquire(wl_egl_surface->tbm_queue, 0)) {
2783 tsq_err = tbm_surface_queue_acquire(wl_egl_surface->tbm_queue,
2785 if (!tbm_surface || tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2786 TPL_ERR("Failed to acquire from tbm_queue(%p)",
2787 wl_egl_surface->tbm_queue);
2788 return TPL_ERROR_INVALID_OPERATION;
2791 tbm_surface_internal_ref(tbm_surface);
2793 wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2794 TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer != NULL,
2795 "wl_egl_buffer sould be not NULL");
2797 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2799 wl_egl_buffer->status = ACQUIRED;
2801 TPL_LOG_T("WL_EGL", "[ACQ] wl_egl_buffer(%p) tbm_surface(%p) bo(%d)",
2802 wl_egl_buffer, tbm_surface,
2803 _get_tbm_surface_bo_name(tbm_surface));
2805 if (wl_egl_buffer->acquire_fence_fd != -1) {
2806 #if TIZEN_FEATURE_ENABLE
2807 if (wl_egl_surface->surface_sync)
2808 ready_to_commit = TPL_TRUE;
2812 if (wl_egl_buffer->waiting_source) {
2813 tpl_gsource_destroy(wl_egl_buffer->waiting_source, TPL_FALSE);
2814 wl_egl_buffer->waiting_source = NULL;
2817 wl_egl_buffer->waiting_source =
2818 tpl_gsource_create(wl_egl_display->thread, wl_egl_buffer,
2819 wl_egl_buffer->acquire_fence_fd, &buffer_funcs,
2820 SOURCE_TYPE_DISPOSABLE);
2821 wl_egl_buffer->status = WAITING_SIGNALED;
2823 TRACE_ASYNC_BEGIN(wl_egl_buffer->acquire_fence_fd, "FENCE WAIT fd(%d)",
2824 wl_egl_buffer->acquire_fence_fd);
2826 ready_to_commit = TPL_FALSE;
2829 ready_to_commit = TPL_TRUE;
2832 if (ready_to_commit) {
2833 if (wl_egl_surface->vblank == NULL || wl_egl_surface->vblank_done)
2834 ready_to_commit = TPL_TRUE;
2836 wl_egl_buffer->status = WAITING_VBLANK;
2837 __tpl_list_push_back(wl_egl_surface->vblank->waiting_buffers, wl_egl_buffer);
2838 ready_to_commit = TPL_FALSE;
2842 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2844 if (ready_to_commit)
2845 _thread_wl_surface_commit(wl_egl_surface, wl_egl_buffer);
2848 return TPL_ERROR_NONE;
2851 /* -- BEGIN -- tdm_client vblank callback function */
2853 __cb_tdm_client_vblank(tdm_client_vblank *vblank, tdm_error error,
2854 unsigned int sequence, unsigned int tv_sec,
2855 unsigned int tv_usec, void *user_data)
2857 tpl_wl_egl_surface_t *wl_egl_surface = (tpl_wl_egl_surface_t *)user_data;
2858 tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2860 TRACE_ASYNC_END((int)wl_egl_surface, "WAIT_VBLANK");
2861 TPL_DEBUG("[VBLANK] wl_egl_surface(%p)", wl_egl_surface);
2863 if (error == TDM_ERROR_TIMEOUT)
2864 TPL_WARN("[TDM_ERROR_TIMEOUT] It will keep going. wl_egl_surface(%p)",
2867 wl_egl_surface->vblank_done = TPL_TRUE;
2869 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2870 if (wl_egl_surface->vblank && wl_egl_surface->vblank->waiting_buffers) {
2871 while (!__tpl_list_is_empty(wl_egl_surface->vblank->waiting_buffers)) {
2872 wl_egl_buffer = (tpl_wl_egl_buffer_t *)__tpl_list_pop_front(
2873 wl_egl_surface->vblank->waiting_buffers,
2876 _thread_wl_surface_commit(wl_egl_surface, wl_egl_buffer);
2878 /* If tdm error such as TIEMOUT occured,
2879 * flush all vblank waiting buffers of its wl_egl_surface.
2880 * Otherwise, only one wl_egl_buffer will be commited per one vblank event.
2882 if (error == TDM_ERROR_NONE) break;
2885 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2887 /* -- END -- tdm_client vblank callback function */
2889 #if TIZEN_FEATURE_ENABLE
2891 __cb_buffer_fenced_release(void *data,
2892 struct zwp_linux_buffer_release_v1 *release, int32_t fence)
2894 tpl_wl_egl_buffer_t *wl_egl_buffer = (tpl_wl_egl_buffer_t *)data;
2895 tbm_surface_h tbm_surface = NULL;
2897 TPL_CHECK_ON_NULL_RETURN(wl_egl_buffer);
2899 tbm_surface = wl_egl_buffer->tbm_surface;
2901 if (tbm_surface_internal_is_valid(tbm_surface)) {
2903 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2904 if (wl_egl_buffer->status == COMMITTED) {
2905 tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
2906 tbm_surface_queue_error_e tsq_err;
2908 zwp_linux_buffer_release_v1_destroy(wl_egl_buffer->buffer_release);
2909 wl_egl_buffer->buffer_release = NULL;
2911 wl_egl_buffer->release_fence_fd = fence;
2912 wl_egl_buffer->status = RELEASED;
2914 TRACE_MARK("[FENCED_RELEASE] BO(%d) fence(%d)",
2915 _get_tbm_surface_bo_name(tbm_surface),
2917 TRACE_ASYNC_END((int)tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
2918 _get_tbm_surface_bo_name(tbm_surface));
2921 "[FENCED_RELEASE] wl_egl_buffer(%p) tbm_surface(%p) bo(%d) fence(%d)",
2922 wl_egl_buffer, tbm_surface,
2923 _get_tbm_surface_bo_name(tbm_surface),
2926 tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
2928 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2929 TPL_ERR("tbm_surface(%p) tsq_err(%d)", tbm_surface, tsq_err);
2931 tbm_surface_internal_unref(tbm_surface);
2934 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2937 TPL_ERR("Invalid parameter | tbm_surface(%p)", tbm_surface);
2942 __cb_buffer_immediate_release(void *data,
2943 struct zwp_linux_buffer_release_v1 *release)
2945 tpl_wl_egl_buffer_t *wl_egl_buffer = (tpl_wl_egl_buffer_t *)data;
2946 tbm_surface_h tbm_surface = NULL;
2948 TPL_CHECK_ON_NULL_RETURN(wl_egl_buffer);
2950 tbm_surface = wl_egl_buffer->tbm_surface;
2952 if (tbm_surface_internal_is_valid(tbm_surface)) {
2954 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2955 if (wl_egl_buffer->status == COMMITTED) {
2956 tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
2957 tbm_surface_queue_error_e tsq_err;
2959 zwp_linux_buffer_release_v1_destroy(wl_egl_buffer->buffer_release);
2960 wl_egl_buffer->buffer_release = NULL;
2962 wl_egl_buffer->release_fence_fd = -1;
2963 wl_egl_buffer->status = RELEASED;
2965 TRACE_MARK("[IMMEDIATE_RELEASE] BO(%d)",
2966 _get_tbm_surface_bo_name(tbm_surface));
2967 TRACE_ASYNC_END((int)tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
2968 _get_tbm_surface_bo_name(tbm_surface));
2971 "[IMMEDIATE_RELEASE] wl_egl_buffer(%p) tbm_surface(%p) bo(%d)",
2972 wl_egl_buffer, tbm_surface,
2973 _get_tbm_surface_bo_name(tbm_surface));
2975 tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
2977 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2978 TPL_ERR("tbm_surface(%p) tsq_err(%d)", tbm_surface, tsq_err);
2980 tbm_surface_internal_unref(tbm_surface);
2983 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2986 TPL_ERR("Invalid parameter | tbm_surface(%p)", tbm_surface);
2990 static const struct zwp_linux_buffer_release_v1_listener zwp_release_listner = {
2991 __cb_buffer_fenced_release,
2992 __cb_buffer_immediate_release,
2997 __cb_wl_buffer_release(void *data, struct wl_proxy *wl_buffer)
2999 tpl_wl_egl_buffer_t *wl_egl_buffer = (tpl_wl_egl_buffer_t *)data;
3000 tbm_surface_h tbm_surface = NULL;
3002 TPL_CHECK_ON_NULL_RETURN(wl_egl_buffer)
3004 tbm_surface = wl_egl_buffer->tbm_surface;
3006 if (tbm_surface_internal_is_valid(tbm_surface)) {
3007 tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
3008 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_INVALID_SURFACE;
3010 tpl_gmutex_lock(&wl_egl_buffer->mutex);
3012 if (wl_egl_buffer->status == COMMITTED) {
3014 tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
3016 if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
3017 TPL_ERR("tbm_surface(%p) tsq_err(%d)", tbm_surface, tsq_err);
3019 wl_egl_buffer->status = RELEASED;
3021 TRACE_MARK("[RELEASE] BO(%d)", _get_tbm_surface_bo_name(tbm_surface));
3022 TRACE_ASYNC_END((int)tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
3023 _get_tbm_surface_bo_name(tbm_surface));
3025 TPL_LOG_T("WL_EGL", "[REL] wl_buffer(%p) tbm_surface(%p) bo(%d)",
3026 wl_egl_buffer->wl_buffer, tbm_surface,
3027 _get_tbm_surface_bo_name(tbm_surface));
3030 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3032 if (tsq_err == TBM_SURFACE_QUEUE_ERROR_NONE)
3033 tbm_surface_internal_unref(tbm_surface);
3035 TPL_ERR("Invalid parameter | tbm_surface(%p)", tbm_surface);
3039 static const struct wl_buffer_listener wl_buffer_release_listener = {
3040 (void *)__cb_wl_buffer_release,
3042 #if TIZEN_FEATURE_ENABLE
3044 __cb_presentation_feedback_sync_output(void *data,
3045 struct wp_presentation_feedback *presentation_feedback,
3046 struct wl_output *output)
3049 TPL_IGNORE(presentation_feedback);
3055 __cb_presentation_feedback_presented(void *data,
3056 struct wp_presentation_feedback *presentation_feedback,
3060 uint32_t refresh_nsec,
3065 TPL_IGNORE(tv_sec_hi);
3066 TPL_IGNORE(tv_sec_lo);
3067 TPL_IGNORE(tv_nsec);
3068 TPL_IGNORE(refresh_nsec);
3073 struct pst_feedback *pst_feedback = (struct pst_feedback *)data;
3074 tpl_wl_egl_surface_t *wl_egl_surface = pst_feedback->wl_egl_surface;
3076 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
3078 TPL_DEBUG("[FEEDBACK][PRESENTED] pst_feedback(%p) presentation_feedback(%p) bo(%d)",
3079 pst_feedback, presentation_feedback, pst_feedback->bo_name);
3081 if (pst_feedback->pst_sync_fd != -1) {
3082 int ret = _write_to_eventfd(pst_feedback->pst_sync_fd);
3084 TPL_ERR("Failed to send presentation_sync signal to fd(%d)",
3085 pst_feedback->pst_sync_fd);
3088 TRACE_ASYNC_END(pst_feedback->pst_sync_fd,
3089 "[PRESENTATION_SYNC] bo(%d)",
3090 pst_feedback->bo_name);
3092 close(pst_feedback->pst_sync_fd);
3093 pst_feedback->pst_sync_fd = -1;
3096 wp_presentation_feedback_destroy(presentation_feedback);
3098 pst_feedback->presentation_feedback = NULL;
3099 pst_feedback->wl_egl_surface = NULL;
3100 pst_feedback->bo_name = 0;
3102 __tpl_list_remove_data(wl_egl_surface->presentation_feedbacks, pst_feedback,
3107 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
3111 __cb_presentation_feedback_discarded(void *data,
3112 struct wp_presentation_feedback *presentation_feedback)
3114 struct pst_feedback *pst_feedback = (struct pst_feedback *)data;
3115 tpl_wl_egl_surface_t *wl_egl_surface = pst_feedback->wl_egl_surface;
3117 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
3119 TPL_DEBUG("[FEEDBACK][DISCARDED] pst_feedback(%p) presentation_feedback(%p) bo(%d)",
3120 pst_feedback, presentation_feedback, pst_feedback->bo_name);
3122 if (pst_feedback->pst_sync_fd != -1) {
3123 int ret = _write_to_eventfd(pst_feedback->pst_sync_fd);
3125 TPL_ERR("Failed to send presentation_sync signal to fd(%d)",
3126 pst_feedback->pst_sync_fd);
3129 TRACE_ASYNC_END(pst_feedback->pst_sync_fd,
3130 "[PRESENTATION_SYNC] bo(%d)",
3131 pst_feedback->bo_name);
3133 close(pst_feedback->pst_sync_fd);
3134 pst_feedback->pst_sync_fd = -1;
3137 wp_presentation_feedback_destroy(presentation_feedback);
3139 pst_feedback->presentation_feedback = NULL;
3140 pst_feedback->wl_egl_surface = NULL;
3141 pst_feedback->bo_name = 0;
3143 __tpl_list_remove_data(wl_egl_surface->presentation_feedbacks, pst_feedback,
3148 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
3151 static const struct wp_presentation_feedback_listener feedback_listener = {
3152 __cb_presentation_feedback_sync_output, /* sync_output feedback -*/
3153 __cb_presentation_feedback_presented,
3154 __cb_presentation_feedback_discarded
3159 _thread_surface_vblank_wait(tpl_wl_egl_surface_t *wl_egl_surface)
3161 tdm_error tdm_err = TDM_ERROR_NONE;
3162 tpl_surface_vblank_t *vblank = wl_egl_surface->vblank;
3164 tdm_err = tdm_client_vblank_wait(vblank->tdm_vblank,
3165 wl_egl_surface->post_interval,
3166 __cb_tdm_client_vblank,
3167 (void *)wl_egl_surface);
3169 if (tdm_err == TDM_ERROR_NONE) {
3170 wl_egl_surface->vblank_done = TPL_FALSE;
3171 TRACE_ASYNC_BEGIN((int)wl_egl_surface, "WAIT_VBLANK");
3173 TPL_ERR("Failed to tdm_client_vblank_wait. tdm_err(%d)", tdm_err);
3174 return TPL_ERROR_INVALID_OPERATION;
3177 return TPL_ERROR_NONE;
3181 _thread_wl_surface_commit(tpl_wl_egl_surface_t *wl_egl_surface,
3182 tpl_wl_egl_buffer_t *wl_egl_buffer)
3184 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
3185 struct wl_surface *wl_surface = wl_egl_surface->wl_surface;
3186 struct wl_egl_window *wl_egl_window = wl_egl_surface->wl_egl_window;
3189 TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer != NULL,
3190 "wl_egl_buffer sould be not NULL");
3192 if (wl_egl_buffer->wl_buffer == NULL) {
3193 wl_egl_buffer->wl_buffer =
3194 (struct wl_proxy *)wayland_tbm_client_create_buffer(
3195 wl_egl_display->wl_tbm_client,
3196 wl_egl_buffer->tbm_surface);
3198 TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer->wl_buffer != NULL,
3199 "[FATAL] Failed to create wl_buffer");
3201 TPL_INFO("[WL_BUFFER_CREATE]",
3202 "wl_egl_buffer(%p) wl_buffer(%p) tbm_surface(%p)",
3203 wl_egl_buffer, wl_egl_buffer->wl_buffer,
3204 wl_egl_buffer->tbm_surface);
3206 #if TIZEN_FEATURE_ENABLE
3207 if (!wl_egl_display->use_explicit_sync ||
3208 !wl_egl_surface->surface_sync)
3211 wl_buffer_add_listener((struct wl_buffer *)wl_egl_buffer->wl_buffer,
3212 &wl_buffer_release_listener,
3217 version = wl_proxy_get_version((struct wl_proxy *)wl_surface);
3219 #if TIZEN_FEATURE_ENABLE
3220 /* create presentation feedback and add listener */
3221 tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
3222 if (wl_egl_display->presentation && wl_egl_buffer->presentation_sync_fd != -1) {
3224 struct pst_feedback *pst_feedback = NULL;
3225 pst_feedback = (struct pst_feedback *) calloc(1, sizeof(struct pst_feedback));
3227 pst_feedback->presentation_feedback =
3228 wp_presentation_feedback(wl_egl_display->presentation,
3231 pst_feedback->wl_egl_surface = wl_egl_surface;
3232 pst_feedback->bo_name = wl_egl_buffer->bo_name;
3234 pst_feedback->pst_sync_fd = wl_egl_buffer->presentation_sync_fd;
3235 wl_egl_buffer->presentation_sync_fd = -1;
3237 wp_presentation_feedback_add_listener(pst_feedback->presentation_feedback,
3238 &feedback_listener, pst_feedback);
3239 __tpl_list_push_back(wl_egl_surface->presentation_feedbacks, pst_feedback);
3240 TRACE_ASYNC_BEGIN(pst_feedback->pst_sync_fd,
3241 "[PRESENTATION_SYNC] bo(%d)",
3242 pst_feedback->bo_name);
3244 TPL_ERR("Failed to create presentation feedback. wl_egl_buffer(%p)",
3246 _write_to_eventfd(wl_egl_buffer->presentation_sync_fd);
3247 close(wl_egl_buffer->presentation_sync_fd);
3248 wl_egl_buffer->presentation_sync_fd = -1;
3251 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
3254 if (wl_egl_buffer->w_rotated == TPL_TRUE) {
3256 wayland_tbm_client_set_buffer_transform(
3257 wl_egl_display->wl_tbm_client,
3258 (void *)wl_egl_buffer->wl_buffer,
3259 wl_egl_buffer->w_transform);
3261 wl_egl_buffer->w_rotated = TPL_FALSE;
3264 if (wl_egl_surface->latest_transform != wl_egl_buffer->transform) {
3265 wl_egl_surface->latest_transform = wl_egl_buffer->transform;
3267 wl_surface_set_buffer_transform(wl_surface, wl_egl_buffer->transform);
3270 if (wl_egl_window) {
3271 wl_egl_window->attached_width = wl_egl_buffer->width;
3272 wl_egl_window->attached_height = wl_egl_buffer->height;
3275 wl_surface_attach(wl_surface, (void *)wl_egl_buffer->wl_buffer,
3276 wl_egl_buffer->dx, wl_egl_buffer->dy);
3278 if (wl_egl_buffer->num_rects < 1 || wl_egl_buffer->rects == NULL) {
3280 wl_surface_damage(wl_surface,
3281 wl_egl_buffer->dx, wl_egl_buffer->dy,
3282 wl_egl_buffer->width, wl_egl_buffer->height);
3284 wl_surface_damage_buffer(wl_surface,
3286 wl_egl_buffer->width, wl_egl_buffer->height);
3290 for (i = 0; i < wl_egl_buffer->num_rects; i++) {
3292 wl_egl_buffer->height - (wl_egl_buffer->rects[i * 4 + 1] +
3293 wl_egl_buffer->rects[i * 4 + 3]);
3295 wl_surface_damage(wl_surface,
3296 wl_egl_buffer->rects[i * 4 + 0],
3298 wl_egl_buffer->rects[i * 4 + 2],
3299 wl_egl_buffer->rects[i * 4 + 3]);
3301 wl_surface_damage_buffer(wl_surface,
3302 wl_egl_buffer->rects[i * 4 + 0],
3304 wl_egl_buffer->rects[i * 4 + 2],
3305 wl_egl_buffer->rects[i * 4 + 3]);
3310 wayland_tbm_client_set_buffer_serial(wl_egl_display->wl_tbm_client,
3311 (void *)wl_egl_buffer->wl_buffer,
3312 wl_egl_buffer->serial);
3313 #if TIZEN_FEATURE_ENABLE
3314 if (wl_egl_display->use_explicit_sync &&
3315 wl_egl_surface->surface_sync) {
3317 zwp_linux_surface_synchronization_v1_set_acquire_fence(wl_egl_surface->surface_sync,
3318 wl_egl_buffer->acquire_fence_fd);
3319 TPL_DEBUG("[SET_ACQUIRE_FENCE] wl_egl_surface(%p) tbm_surface(%p) acquire_fence(%d)",
3320 wl_egl_surface, wl_egl_buffer->tbm_surface, wl_egl_buffer->acquire_fence_fd);
3321 close(wl_egl_buffer->acquire_fence_fd);
3322 wl_egl_buffer->acquire_fence_fd = -1;
3324 wl_egl_buffer->buffer_release =
3325 zwp_linux_surface_synchronization_v1_get_release(wl_egl_surface->surface_sync);
3326 if (!wl_egl_buffer->buffer_release) {
3327 TPL_ERR("Failed to get buffer_release. wl_egl_surface(%p)", wl_egl_surface);
3329 zwp_linux_buffer_release_v1_add_listener(
3330 wl_egl_buffer->buffer_release, &zwp_release_listner, wl_egl_buffer);
3331 TPL_DEBUG("add explicit_sync_release_listener.");
3336 wl_surface_commit(wl_surface);
3338 wl_display_flush(wl_egl_display->wl_display);
3340 TRACE_ASYNC_BEGIN((int)wl_egl_buffer->tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
3341 wl_egl_buffer->bo_name);
3343 tpl_gmutex_lock(&wl_egl_buffer->mutex);
3345 wl_egl_buffer->need_to_commit = TPL_FALSE;
3346 wl_egl_buffer->status = COMMITTED;
3348 tpl_gcond_signal(&wl_egl_buffer->cond);
3350 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3353 "[COMMIT] wl_egl_buffer(%p) wl_buffer(%p) tbm_surface(%p) bo(%d)",
3354 wl_egl_buffer, wl_egl_buffer->wl_buffer, wl_egl_buffer->tbm_surface,
3355 wl_egl_buffer->bo_name);
3357 if (wl_egl_surface->vblank != NULL &&
3358 _thread_surface_vblank_wait(wl_egl_surface) != TPL_ERROR_NONE)
3359 TPL_ERR("Failed to set wait vblank.");
3361 tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
3363 if (wl_egl_buffer->commit_sync_fd != -1) {
3364 int ret = _write_to_eventfd(wl_egl_buffer->commit_sync_fd);
3366 TPL_ERR("Failed to send commit_sync signal to fd(%d)", wl_egl_buffer->commit_sync_fd);
3369 TRACE_ASYNC_END(wl_egl_buffer->commit_sync_fd, "[COMMIT_SYNC] bo(%d)",
3370 wl_egl_buffer->bo_name);
3371 TPL_DEBUG("[COMMIT_SYNC][SEND] wl_egl_surface(%p) commit_sync_fd(%d)",
3372 wl_egl_surface, wl_egl_buffer->commit_sync_fd);
3374 close(wl_egl_buffer->commit_sync_fd);
3375 wl_egl_buffer->commit_sync_fd = -1;
3378 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
3382 _write_to_eventfd(int eventfd)
3387 if (eventfd == -1) {
3388 TPL_ERR("Invalid fd(-1)");
3392 ret = write(eventfd, &value, sizeof(uint64_t));
3394 TPL_ERR("failed to write to fd(%d)", eventfd);
3402 __tpl_display_init_backend_wl_egl_thread(tpl_display_backend_t *backend)
3404 TPL_ASSERT(backend);
3406 backend->type = TPL_BACKEND_WAYLAND_THREAD;
3407 backend->data = NULL;
3409 backend->init = __tpl_wl_egl_display_init;
3410 backend->fini = __tpl_wl_egl_display_fini;
3411 backend->query_config = __tpl_wl_egl_display_query_config;
3412 backend->filter_config = __tpl_wl_egl_display_filter_config;
3413 backend->get_window_info = __tpl_wl_egl_display_get_window_info;
3414 backend->get_pixmap_info = __tpl_wl_egl_display_get_pixmap_info;
3415 backend->get_buffer_from_native_pixmap =
3416 __tpl_wl_egl_display_get_buffer_from_native_pixmap;
3420 __tpl_surface_init_backend_wl_egl_thread(tpl_surface_backend_t *backend)
3422 TPL_ASSERT(backend);
3424 backend->type = TPL_BACKEND_WAYLAND_THREAD;
3425 backend->data = NULL;
3427 backend->init = __tpl_wl_egl_surface_init;
3428 backend->fini = __tpl_wl_egl_surface_fini;
3429 backend->validate = __tpl_wl_egl_surface_validate;
3430 backend->cancel_dequeued_buffer =
3431 __tpl_wl_egl_surface_cancel_buffer;
3432 backend->dequeue_buffer = __tpl_wl_egl_surface_dequeue_buffer;
3433 backend->enqueue_buffer = __tpl_wl_egl_surface_enqueue_buffer;
3434 backend->set_rotation_capability =
3435 __tpl_wl_egl_surface_set_rotation_capability;
3436 backend->set_post_interval =
3437 __tpl_wl_egl_surface_set_post_interval;
3439 __tpl_wl_egl_surface_get_size;
3443 __cb_wl_egl_buffer_free(tpl_wl_egl_buffer_t *wl_egl_buffer)
3445 tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
3446 tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
3448 TPL_INFO("[BUFFER_FREE]", "wl_egl_buffer(%p) wl_buffer(%p) tbm_surface(%p)",
3449 wl_egl_buffer, wl_egl_buffer->wl_buffer, wl_egl_buffer->tbm_surface);
3451 tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
3452 if (wl_egl_buffer->idx >= 0 && wl_egl_surface->buffers[wl_egl_buffer->idx]) {
3453 wl_egl_surface->buffers[wl_egl_buffer->idx] = NULL;
3454 wl_egl_surface->buffer_cnt--;
3456 wl_egl_buffer->idx = -1;
3458 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
3460 if (wl_egl_display) {
3461 if (wl_egl_buffer->wl_buffer) {
3462 wayland_tbm_client_destroy_buffer(wl_egl_display->wl_tbm_client,
3463 (void *)wl_egl_buffer->wl_buffer);
3464 wl_egl_buffer->wl_buffer = NULL;
3467 wl_display_flush(wl_egl_display->wl_display);
3470 #if TIZEN_FEATURE_ENABLE
3471 if (wl_egl_buffer->buffer_release) {
3472 zwp_linux_buffer_release_v1_destroy(wl_egl_buffer->buffer_release);
3473 wl_egl_buffer->buffer_release = NULL;
3476 if (wl_egl_buffer->release_fence_fd != -1) {
3477 close(wl_egl_buffer->release_fence_fd);
3478 wl_egl_buffer->release_fence_fd = -1;
3482 if (wl_egl_buffer->waiting_source) {
3483 tpl_gsource_destroy(wl_egl_buffer->waiting_source, TPL_FALSE);
3484 wl_egl_buffer->waiting_source = NULL;
3487 if (wl_egl_buffer->commit_sync_fd != -1) {
3488 int ret = _write_to_eventfd(wl_egl_buffer->commit_sync_fd);
3490 TPL_ERR("Failed to send commit_sync signal to fd(%d)",
3491 wl_egl_buffer->commit_sync_fd);
3492 close(wl_egl_buffer->commit_sync_fd);
3493 wl_egl_buffer->commit_sync_fd = -1;
3496 if (wl_egl_buffer->presentation_sync_fd != -1) {
3497 int ret = _write_to_eventfd(wl_egl_buffer->presentation_sync_fd);
3499 TPL_ERR("Failed to send presentation_sync signal to fd(%d)",
3500 wl_egl_buffer->presentation_sync_fd);
3501 close(wl_egl_buffer->presentation_sync_fd);
3502 wl_egl_buffer->presentation_sync_fd = -1;
3505 if (wl_egl_buffer->rects) {
3506 free(wl_egl_buffer->rects);
3507 wl_egl_buffer->rects = NULL;
3508 wl_egl_buffer->num_rects = 0;
3511 wl_egl_buffer->tbm_surface = NULL;
3512 wl_egl_buffer->bo_name = -1;
3514 free(wl_egl_buffer);
3518 _get_tbm_surface_bo_name(tbm_surface_h tbm_surface)
3520 return tbm_bo_export(tbm_surface_internal_get_bo(tbm_surface, 0));
3524 _print_buffer_lists(tpl_wl_egl_surface_t *wl_egl_surface)
3528 tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
3529 TPL_INFO("[BUFFERS_INFO]", "wl_egl_surface(%p) buffer_cnt(%d)",
3530 wl_egl_surface, wl_egl_surface->buffer_cnt);
3531 for (idx = 0; idx < BUFFER_ARRAY_SIZE; idx++) {
3532 tpl_wl_egl_buffer_t *wl_egl_buffer = wl_egl_surface->buffers[idx];
3533 if (wl_egl_buffer) {
3535 "INDEX[%d] | wl_egl_buffer(%p) tbm_surface(%p) bo(%d) | status(%s)",
3536 idx, wl_egl_buffer, wl_egl_buffer->tbm_surface,
3537 wl_egl_buffer->bo_name,
3538 status_to_string[wl_egl_buffer->status]);
3541 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);