modify the locking buffer->mutex to include surface_commit
[platform/core/uifw/libtpl-egl.git] / src / tpl_wl_egl_thread.c
1
2 #include "tpl_internal.h"
3
4 #include <string.h>
5 #include <fcntl.h>
6 #include <unistd.h>
7 #include <sys/eventfd.h>
8
9 #include <tbm_bufmgr.h>
10 #include <tbm_surface.h>
11 #include <tbm_surface_internal.h>
12 #include <tbm_surface_queue.h>
13
14 #include <wayland-client.h>
15 #include <wayland-tbm-server.h>
16 #include <wayland-tbm-client.h>
17 #include <wayland-egl-backend.h>
18
19 #include <tdm_client.h>
20
21 #include "wayland-egl-tizen/wayland-egl-tizen.h"
22 #include "wayland-egl-tizen/wayland-egl-tizen-priv.h"
23
24 #ifndef TIZEN_FEATURE_ENABLE
25 #define TIZEN_FEATURE_ENABLE 1
26 #endif
27
28 #if TIZEN_FEATURE_ENABLE
29 #include <tizen-surface-client-protocol.h>
30 #include <presentation-time-client-protocol.h>
31 #include <linux-explicit-synchronization-unstable-v1-client-protocol.h>
32 #endif
33
34 #include "tpl_utils_gthread.h"
35
36 static int wl_egl_buffer_key;
37 #define KEY_WL_EGL_BUFFER (unsigned long)(&wl_egl_buffer_key)
38
39 /* In wayland, application and compositor create its own drawing buffers. Recommend size is more than 2. */
40 #define BUFFER_ARRAY_SIZE 9
41
42 typedef struct _tpl_wl_egl_display tpl_wl_egl_display_t;
43 typedef struct _tpl_wl_egl_surface tpl_wl_egl_surface_t;
44 typedef struct _tpl_wl_egl_buffer  tpl_wl_egl_buffer_t;
45 typedef struct _surface_vblank     tpl_surface_vblank_t;
46
47 #define wl_egl_display(ptr) *wl_egl_display = (tpl_wl_egl_display_t *)ptr;
48 #define wl_egl_surface(ptr) *wl_egl_surface = (tpl_wl_egl_surface_t *)ptr;
49 #define wl_egl_buffer(ptr) *wl_egl_buffer = (tpl_wl_egl_buffer_t *)ptr;
50 #define tizen_private(ptr) *tizen_private = (struct tizen_private *)ptr;
51
52 struct _tpl_wl_egl_display {
53         tpl_gsource                  *disp_source;
54         tpl_gthread                  *thread;
55         tpl_gmutex                    wl_event_mutex;
56
57         struct wl_display            *wl_display;
58         struct wl_event_queue        *ev_queue;
59         struct wayland_tbm_client    *wl_tbm_client;
60         int                           last_error; /* errno of the last wl_display error*/
61
62         tpl_bool_t                    wl_initialized;
63
64         tpl_bool_t                    use_wait_vblank;
65         tpl_bool_t                    use_explicit_sync;
66         tpl_bool_t                    use_tss;
67         tpl_bool_t                    prepared;
68         /* To make sure that tpl_gsource has been successfully finalized. */
69         tpl_bool_t                    gsource_finalized;
70         tpl_gmutex                    disp_mutex;
71         tpl_gcond                     disp_cond;
72         struct {
73                 tdm_client               *tdm_client;
74                 tpl_gsource              *tdm_source;
75                 int                       tdm_display_fd;
76                 tpl_bool_t                tdm_initialized;
77                 tpl_list_t               *surface_vblanks;
78
79                 /* To make sure that tpl_gsource has been successfully finalized. */
80                 tpl_bool_t                gsource_finalized;
81                 tpl_gmutex                tdm_mutex;
82                 tpl_gcond                 tdm_cond;
83         } tdm;
84
85 #if TIZEN_FEATURE_ENABLE
86         struct tizen_surface_shm     *tss; /* used for surface buffer_flush */
87         struct wp_presentation       *presentation; /* for presentation feedback */
88         struct zwp_linux_explicit_synchronization_v1 *explicit_sync; /* for explicit fence sync */
89 #endif
90 };
91
92 typedef enum surf_message {
93         NONE_MESSAGE = 0,
94         INIT_SURFACE,
95         ACQUIRABLE,
96 } surf_message;
97
98 struct _tpl_wl_egl_surface {
99         tpl_gsource                  *surf_source;
100
101         tbm_surface_queue_h           tbm_queue;
102         int                           num_buffers;
103
104         struct wl_egl_window         *wl_egl_window;
105         struct wl_surface            *wl_surface;
106
107 #if TIZEN_FEATURE_ENABLE
108         struct zwp_linux_surface_synchronization_v1 *surface_sync; /* for explicit fence sync */
109         struct tizen_surface_shm_flusher *tss_flusher; /* used for surface buffer_flush */
110 #endif
111
112         tpl_surface_vblank_t         *vblank;
113
114         /* surface information */
115         unsigned int                  serial;
116
117         int                           width;
118         int                           height;
119         int                           format;
120         int                           latest_transform;
121         int                           rotation;
122         int                           post_interval;
123
124         tpl_wl_egl_display_t         *wl_egl_display;
125         tpl_surface_t                *tpl_surface;
126
127         /* wl_egl_buffer list for buffer tracing */
128         tpl_list_t                   *buffers;
129         int                           buffer_cnt; /* the number of using wl_egl_buffers */
130         tpl_gmutex                    buffers_mutex;
131         tbm_surface_h                 last_enq_buffer;
132
133         tpl_list_t                   *presentation_feedbacks; /* for tracing presentation feedbacks */
134
135         struct {
136                 tpl_gmutex                mutex;
137                 int                       fd;
138         } commit_sync;
139
140         struct {
141                 tpl_gmutex                mutex;
142                 int                       fd;
143         } presentation_sync;
144
145         tpl_gmutex                    surf_mutex;
146         tpl_gcond                     surf_cond;
147
148         surf_message                  sent_message;
149
150         /* for waiting draw done */
151         tpl_bool_t                    use_render_done_fence;
152         tpl_bool_t                    is_activated;
153         tpl_bool_t                    reset; /* TRUE if queue reseted by external  */
154         tpl_bool_t                    need_to_enqueue;
155         tpl_bool_t                    prerotation_capability;
156         tpl_bool_t                    vblank_done;
157         tpl_bool_t                    vblank_enable;
158         tpl_bool_t                    set_serial_is_used;
159         tpl_bool_t                    initialized_in_thread;
160         tpl_bool_t                    frontbuffer_activated;
161
162         /* To make sure that tpl_gsource has been successfully finalized. */
163         tpl_bool_t                    gsource_finalized;
164 };
165
166 struct _surface_vblank {
167         tdm_client_vblank            *tdm_vblank;
168         tpl_wl_egl_surface_t         *wl_egl_surface;
169         tpl_list_t                   *waiting_buffers; /* for FIFO/FIFO_RELAXED modes */
170         tpl_gmutex                    mutex;
171 };
172
173 typedef enum buffer_status {
174         RELEASED = 0,             // 0
175         DEQUEUED,                 // 1
176         ENQUEUED,                 // 2
177         ACQUIRED,                 // 3
178         WAITING_SIGNALED,         // 4
179         WAITING_VBLANK,           // 5
180         COMMITTED,                // 6
181 } buffer_status_t;
182
183 static const char *status_to_string[7] = {
184         "RELEASED",                 // 0
185         "DEQUEUED",                 // 1
186         "ENQUEUED",                 // 2
187         "ACQUIRED",                 // 3
188         "WAITING_SIGNALED",         // 4
189         "WAITING_VBLANK",           // 5
190         "COMMITTED",                // 6
191 };
192
193 struct _tpl_wl_egl_buffer {
194         tbm_surface_h                 tbm_surface;
195         int                           bo_name;
196
197         struct wl_proxy              *wl_buffer;
198         int                           dx, dy; /* position to attach to wl_surface */
199         int                           width, height; /* size to attach to wl_surface */
200
201         buffer_status_t               status; /* for tracing buffer status */
202
203         /* for damage region */
204         int                           num_rects;
205         int                          *rects;
206
207         /* for wayland_tbm_client_set_buffer_transform */
208         int                           w_transform;
209         tpl_bool_t                    w_rotated;
210
211         /* for wl_surface_set_buffer_transform */
212         int                           transform;
213
214         /* for wayland_tbm_client_set_buffer_serial */
215         unsigned int                  serial;
216
217         /* for checking need_to_commit (frontbuffer mode) */
218         tpl_bool_t                    need_to_commit;
219
220         /* for checking draw done */
221         tpl_bool_t                    draw_done;
222
223 #if TIZEN_FEATURE_ENABLE
224         /* to get release event via zwp_linux_buffer_release_v1 */
225         struct zwp_linux_buffer_release_v1 *buffer_release;
226 #endif
227         /* each buffers own its release_fence_fd, until it passes ownership
228          * to it to EGL */
229         int32_t                       release_fence_fd;
230
231         /* each buffers own its acquire_fence_fd.
232          * If it use zwp_linux_buffer_release_v1 the ownership of this fd
233          * will be passed to display server
234          * Otherwise it will be used as a fence waiting for render done
235          * on tpl thread */
236         int32_t                       acquire_fence_fd;
237
238         /* Fd to send a signal when wl_surface_commit with this buffer */
239         int32_t                       commit_sync_fd;
240
241         /* Fd to send a siganl when receive the
242          * presentation feedback from display server */
243         int32_t                       presentation_sync_fd;
244
245         tpl_gsource                  *waiting_source;
246
247         tpl_gmutex                    mutex;
248         tpl_gcond                     cond;
249
250         tpl_wl_egl_surface_t         *wl_egl_surface;
251 };
252
253 #if TIZEN_FEATURE_ENABLE
254 struct pst_feedback {
255         /* to get presentation feedback from display server */
256         struct wp_presentation_feedback *presentation_feedback;
257
258         int32_t                          pst_sync_fd;
259
260         int                              bo_name;
261         tpl_wl_egl_surface_t            *wl_egl_surface;
262
263 };
264 #endif
265
266 static const struct wl_buffer_listener wl_buffer_release_listener;
267
268 static int
269 _get_tbm_surface_bo_name(tbm_surface_h tbm_surface);
270 static void
271 _print_buffer_lists(tpl_wl_egl_surface_t *wl_egl_surface);
272 static tpl_bool_t
273 _check_buffer_validate(tpl_wl_egl_surface_t *wl_egl_surface, tbm_surface_h tbm_surface);
274 static void
275 __cb_wl_egl_buffer_free(tpl_wl_egl_buffer_t *wl_egl_buffer);
276 static tpl_wl_egl_buffer_t *
277 _get_wl_egl_buffer(tbm_surface_h tbm_surface);
278 static int
279 _write_to_eventfd(int eventfd, uint64_t value);
280 static int
281 send_signal(int fd, const char *type);
282 static void
283 _thread_wl_egl_surface_init(tpl_wl_egl_surface_t *wl_egl_surface);
284 static tpl_result_t
285 _thread_surface_queue_acquire(tpl_wl_egl_surface_t *wl_egl_surface);
286 static void
287 _thread_wl_surface_commit(tpl_wl_egl_surface_t *wl_egl_surface,
288                                                   tpl_wl_egl_buffer_t *wl_egl_buffer);
289 static void
290 __cb_surface_vblank_free(void *data);
291
292 static struct tizen_private *
293 tizen_private_create()
294 {
295         struct tizen_private *private = calloc(1, sizeof(struct tizen_private));
296         if (private) {
297                 private->magic = WL_EGL_TIZEN_MAGIC;
298                 private->rotation = 0;
299                 private->frontbuffer_mode = 0;
300                 private->transform = 0;
301                 private->window_transform = 0;
302                 private->serial = 0;
303
304                 private->data = NULL;
305                 private->rotate_callback = NULL;
306                 private->get_rotation_capability = NULL;
307                 private->set_window_serial_callback = NULL;
308                 private->set_frontbuffer_callback = NULL;
309                 private->create_commit_sync_fd = NULL;
310                 private->create_presentation_sync_fd = NULL;
311                 private->merge_sync_fds = NULL;
312         }
313
314         return private;
315 }
316
317 static tpl_bool_t
318 _check_native_handle_is_wl_display(tpl_handle_t display)
319 {
320         struct wl_interface *wl_egl_native_dpy = *(void **) display;
321
322         if (!wl_egl_native_dpy) {
323                 TPL_ERR("Invalid parameter. native_display(%p)", wl_egl_native_dpy);
324                 return TPL_FALSE;
325         }
326
327         /* MAGIC CHECK: A native display handle is a wl_display if the de-referenced first value
328            is a memory address pointing the structure of wl_display_interface. */
329         if (wl_egl_native_dpy == &wl_display_interface)
330                 return TPL_TRUE;
331
332         if (strncmp(wl_egl_native_dpy->name, wl_display_interface.name,
333                                 strlen(wl_display_interface.name)) == 0) {
334                 return TPL_TRUE;
335         }
336
337         return TPL_FALSE;
338 }
339
340 static tpl_bool_t
341 __thread_func_tdm_dispatch(tpl_gsource *gsource, uint64_t message)
342 {
343         tpl_wl_egl_display_t wl_egl_display(tpl_gsource_get_data(gsource));
344         tdm_error                   tdm_err = TDM_ERROR_NONE;
345
346         TPL_IGNORE(message);
347
348         if (!wl_egl_display) {
349                 TPL_ERR("Failed to get wl_egl_display from gsource(%p)", gsource);
350                 TPL_WARN("tdm_source(%p) will be removed from thread.", gsource);
351                 return TPL_FALSE;
352         }
353
354         tdm_err = tdm_client_handle_events(wl_egl_display->tdm.tdm_client);
355
356         /* If an error occurs in tdm_client_handle_events, it cannot be recovered.
357          * When tdm_source is no longer available due to an unexpected situation,
358          * wl_egl_thread must remove it from the thread and destroy it.
359          * In that case, tdm_vblank can no longer be used for surfaces and displays
360          * that used this tdm_source. */
361         if (tdm_err != TDM_ERROR_NONE) {
362                 TPL_ERR("Error occured in tdm_client_handle_events. tdm_err(%d)",
363                                 tdm_err);
364                 TPL_WARN("tdm_source(%p) will be removed from thread.", gsource);
365
366                 tpl_gsource_destroy(gsource, TPL_FALSE);
367
368                 wl_egl_display->tdm.tdm_source = NULL;
369
370                 return TPL_FALSE;
371         }
372
373         return TPL_TRUE;
374 }
375
376 static void
377 __thread_func_tdm_finalize(tpl_gsource *gsource)
378 {
379         tpl_wl_egl_display_t wl_egl_display(tpl_gsource_get_data(gsource));
380
381         tpl_gmutex_lock(&wl_egl_display->tdm.tdm_mutex);
382
383         TPL_INFO("[TDM_CLIENT_FINI]",
384                          "wl_egl_display(%p) tdm_client(%p) tdm_display_fd(%d)",
385                          wl_egl_display, wl_egl_display->tdm.tdm_client,
386                          wl_egl_display->tdm.tdm_display_fd);
387
388         if (wl_egl_display->tdm.tdm_client) {
389
390                 if (wl_egl_display->tdm.surface_vblanks) {
391                         __tpl_list_free(wl_egl_display->tdm.surface_vblanks,
392                                     __cb_surface_vblank_free);
393                         wl_egl_display->tdm.surface_vblanks = NULL;
394                 }
395
396                 tdm_client_destroy(wl_egl_display->tdm.tdm_client);
397                 wl_egl_display->tdm.tdm_client = NULL;
398                 wl_egl_display->tdm.tdm_display_fd = -1;
399                 wl_egl_display->tdm.tdm_source = NULL;
400         }
401
402         wl_egl_display->use_wait_vblank = TPL_FALSE;
403         wl_egl_display->tdm.tdm_initialized = TPL_FALSE;
404         wl_egl_display->tdm.gsource_finalized = TPL_TRUE;
405
406         tpl_gcond_signal(&wl_egl_display->tdm.tdm_cond);
407         tpl_gmutex_unlock(&wl_egl_display->tdm.tdm_mutex);
408 }
409
410 static tpl_gsource_functions tdm_funcs = {
411         .prepare  = NULL,
412         .check    = NULL,
413         .dispatch = __thread_func_tdm_dispatch,
414         .finalize = __thread_func_tdm_finalize,
415 };
416
417 tpl_result_t
418 _thread_tdm_init(tpl_wl_egl_display_t *wl_egl_display)
419 {
420         tdm_client       *tdm_client = NULL;
421         int               tdm_display_fd = -1;
422         tdm_error         tdm_err = TDM_ERROR_NONE;
423
424         tdm_client = tdm_client_create(&tdm_err);
425         if (!tdm_client || tdm_err != TDM_ERROR_NONE) {
426                 TPL_ERR("TDM_ERROR:%d Failed to create tdm_client\n", tdm_err);
427                 return TPL_ERROR_INVALID_OPERATION;
428         }
429
430         tdm_err = tdm_client_get_fd(tdm_client, &tdm_display_fd);
431         if (tdm_display_fd < 0 || tdm_err != TDM_ERROR_NONE) {
432                 TPL_ERR("TDM_ERROR:%d Failed to get tdm_client fd\n", tdm_err);
433                 tdm_client_destroy(tdm_client);
434                 return TPL_ERROR_INVALID_OPERATION;
435         }
436
437         wl_egl_display->tdm.tdm_display_fd  = tdm_display_fd;
438         wl_egl_display->tdm.tdm_client      = tdm_client;
439         wl_egl_display->tdm.tdm_source      = NULL;
440         wl_egl_display->tdm.tdm_initialized = TPL_TRUE;
441         wl_egl_display->tdm.surface_vblanks = __tpl_list_alloc();
442
443         TPL_INFO("[TDM_CLIENT_INIT]",
444                          "wl_egl_display(%p) tdm_client(%p) tdm_display_fd(%d)",
445                          wl_egl_display, tdm_client, tdm_display_fd);
446
447         return TPL_ERROR_NONE;
448 }
449
450 #define IMPL_TIZEN_SURFACE_SHM_VERSION 2
451
452
453 static void
454 __cb_wl_resistry_global_callback(void *data, struct wl_registry *wl_registry,
455                                                           uint32_t name, const char *interface,
456                                                           uint32_t version)
457 {
458 #if TIZEN_FEATURE_ENABLE
459         tpl_wl_egl_display_t wl_egl_display(data);
460
461         if (!strcmp(interface, "tizen_surface_shm")) {
462                 wl_egl_display->tss =
463                         wl_registry_bind(wl_registry,
464                                                          name,
465                                                          &tizen_surface_shm_interface,
466                                                          ((version < IMPL_TIZEN_SURFACE_SHM_VERSION) ?
467                                                          version : IMPL_TIZEN_SURFACE_SHM_VERSION));
468                 wl_egl_display->use_tss = TPL_TRUE;
469         } else if (!strcmp(interface, wp_presentation_interface.name)) {
470                 wl_egl_display->presentation =
471                                         wl_registry_bind(wl_registry,
472                                                                          name, &wp_presentation_interface, 1);
473                 TPL_LOG_D("[REGISTRY_BIND]",
474                                   "wl_egl_display(%p) bind wp_presentation_interface",
475                                   wl_egl_display);
476         } else if (strcmp(interface, "zwp_linux_explicit_synchronization_v1") == 0) {
477                 char *env = tpl_getenv("TPL_EFS");
478                 if (env && !atoi(env)) {
479                         wl_egl_display->use_explicit_sync = TPL_FALSE;
480                 } else {
481                         wl_egl_display->explicit_sync =
482                                         wl_registry_bind(wl_registry, name,
483                                                                          &zwp_linux_explicit_synchronization_v1_interface, 1);
484                         wl_egl_display->use_explicit_sync = TPL_TRUE;
485                         TPL_LOG_D("[REGISTRY_BIND]",
486                                           "wl_egl_display(%p) bind zwp_linux_explicit_synchronization_v1_interface",
487                                           wl_egl_display);
488                 }
489         }
490 #endif
491 }
492
493 static void
494 __cb_wl_resistry_global_remove_callback(void *data,
495                                                                          struct wl_registry *wl_registry,
496                                                                          uint32_t name)
497 {
498 }
499
500 static const struct wl_registry_listener registry_listener = {
501         __cb_wl_resistry_global_callback,
502         __cb_wl_resistry_global_remove_callback
503 };
504
505 static void
506 _wl_display_print_err(tpl_wl_egl_display_t *wl_egl_display,
507                                           const char *func_name)
508 {
509         int dpy_err;
510         char buf[1024];
511         strerror_r(errno, buf, sizeof(buf));
512
513         if (wl_egl_display->last_error == errno)
514                 return;
515
516         TPL_ERR("falied to %s. error:%d(%s)", func_name, errno, buf);
517
518         dpy_err = wl_display_get_error(wl_egl_display->wl_display);
519         if (dpy_err == EPROTO) {
520                 const struct wl_interface *err_interface;
521                 uint32_t err_proxy_id, err_code;
522                 err_code = wl_display_get_protocol_error(wl_egl_display->wl_display,
523                                                                                                  &err_interface,
524                                                                                                  &err_proxy_id);
525                 TPL_ERR("[Protocol Error] interface: %s, error_code: %d, proxy_id: %d",
526                                 (err_interface ? err_interface->name : "UNKNOWN"),
527                                 err_code, err_proxy_id);
528         }
529
530         wl_egl_display->last_error = errno;
531 }
532
533 tpl_result_t
534 _thread_wl_display_init(tpl_wl_egl_display_t *wl_egl_display)
535 {
536         struct wl_registry *registry                = NULL;
537         struct wl_event_queue *queue                = NULL;
538         struct wl_display *display_wrapper          = NULL;
539         struct wl_proxy *wl_tbm                     = NULL;
540         struct wayland_tbm_client *wl_tbm_client    = NULL;
541         int ret;
542         tpl_result_t result = TPL_ERROR_NONE;
543
544         queue = wl_display_create_queue(wl_egl_display->wl_display);
545         if (!queue) {
546                 TPL_ERR("Failed to create wl_queue wl_display(%p)",
547                                 wl_egl_display->wl_display);
548                 result = TPL_ERROR_INVALID_OPERATION;
549                 goto fini;
550         }
551
552         wl_egl_display->ev_queue = wl_display_create_queue(wl_egl_display->wl_display);
553         if (!wl_egl_display->ev_queue) {
554                 TPL_ERR("Failed to create wl_queue wl_display(%p)",
555                                 wl_egl_display->wl_display);
556                 result = TPL_ERROR_INVALID_OPERATION;
557                 goto fini;
558         }
559
560         display_wrapper = wl_proxy_create_wrapper(wl_egl_display->wl_display);
561         if (!display_wrapper) {
562                 TPL_ERR("Failed to create a proxy wrapper of wl_display(%p)",
563                                 wl_egl_display->wl_display);
564                 result = TPL_ERROR_INVALID_OPERATION;
565                 goto fini;
566         }
567
568         wl_proxy_set_queue((struct wl_proxy *)display_wrapper, queue);
569
570         registry = wl_display_get_registry(display_wrapper);
571         if (!registry) {
572                 TPL_ERR("Failed to create wl_registry");
573                 result = TPL_ERROR_INVALID_OPERATION;
574                 goto fini;
575         }
576
577         wl_proxy_wrapper_destroy(display_wrapper);
578         display_wrapper = NULL;
579
580         wl_tbm_client = wayland_tbm_client_init(wl_egl_display->wl_display);
581         if (!wl_tbm_client) {
582                 TPL_ERR("Failed to initialize wl_tbm_client.");
583                 result = TPL_ERROR_INVALID_CONNECTION;
584                 goto fini;
585         }
586
587         wl_tbm = (struct wl_proxy *)wayland_tbm_client_get_wl_tbm(wl_tbm_client);
588         if (!wl_tbm) {
589                 TPL_ERR("Failed to get wl_tbm from wl_tbm_client(%p)", wl_tbm_client);
590                 result = TPL_ERROR_INVALID_CONNECTION;
591                 goto fini;
592         }
593
594         wl_proxy_set_queue(wl_tbm, wl_egl_display->ev_queue);
595         wl_egl_display->wl_tbm_client = wl_tbm_client;
596
597         if (wl_registry_add_listener(registry, &registry_listener,
598                                                                  wl_egl_display)) {
599                 TPL_ERR("Failed to wl_registry_add_listener");
600                 result = TPL_ERROR_INVALID_OPERATION;
601                 goto fini;
602         }
603
604         ret = wl_display_roundtrip_queue(wl_egl_display->wl_display, queue);
605         if (ret == -1) {
606                 _wl_display_print_err(wl_egl_display, "roundtrip_queue");
607                 result = TPL_ERROR_INVALID_OPERATION;
608                 goto fini;
609         }
610
611 #if TIZEN_FEATURE_ENABLE
612         /* set tizen_surface_shm's queue as client's private queue */
613         if (wl_egl_display->tss) {
614                 wl_proxy_set_queue((struct wl_proxy *)wl_egl_display->tss,
615                                                    wl_egl_display->ev_queue);
616                 TPL_LOG_T("WL_EGL", "tizen_surface_shm(%p) init.", wl_egl_display->tss);
617         }
618
619         if (wl_egl_display->presentation) {
620                 wl_proxy_set_queue((struct wl_proxy *)wl_egl_display->presentation,
621                                                    wl_egl_display->ev_queue);
622                 TPL_LOG_T("WL_EGL", "wp_presentation(%p) init.",
623                                   wl_egl_display->presentation);
624         }
625
626         if (wl_egl_display->explicit_sync) {
627                 wl_proxy_set_queue((struct wl_proxy *)wl_egl_display->explicit_sync,
628                                                    wl_egl_display->ev_queue);
629                 TPL_LOG_T("WL_EGL", "zwp_linux_explicit_synchronization_v1(%p) init.",
630                                   wl_egl_display->explicit_sync);
631         }
632 #endif
633         wl_egl_display->wl_initialized = TPL_TRUE;
634
635         TPL_INFO("[WAYLAND_INIT]",
636                          "wl_egl_display(%p) wl_display(%p) wl_tbm_client(%p) event_queue(%p)",
637                          wl_egl_display, wl_egl_display->wl_display,
638                          wl_egl_display->wl_tbm_client, wl_egl_display->ev_queue);
639 #if TIZEN_FEATURE_ENABLE
640         TPL_INFO("[WAYLAND_INIT]",
641                          "tizen_surface_shm(%p) wp_presentation(%p) explicit_sync(%p)",
642                          wl_egl_display->tss, wl_egl_display->presentation,
643                          wl_egl_display->explicit_sync);
644 #endif
645 fini:
646         if (display_wrapper)
647                 wl_proxy_wrapper_destroy(display_wrapper);
648         if (registry)
649                 wl_registry_destroy(registry);
650         if (queue)
651                 wl_event_queue_destroy(queue);
652
653         return result;
654 }
655
656 void
657 _thread_wl_display_fini(tpl_wl_egl_display_t *wl_egl_display)
658 {
659         /* If wl_egl_display is in prepared state, cancel it */
660         if (wl_egl_display->prepared) {
661                 wl_display_cancel_read(wl_egl_display->wl_display);
662                 wl_egl_display->prepared = TPL_FALSE;
663         }
664
665         if (wl_display_roundtrip_queue(wl_egl_display->wl_display,
666                                                                    wl_egl_display->ev_queue) == -1) {
667                 _wl_display_print_err(wl_egl_display, "roundtrip_queue");
668         }
669
670 #if TIZEN_FEATURE_ENABLE
671         if (wl_egl_display->tss) {
672                 TPL_INFO("[TIZEN_SURFACE_SHM_DESTROY]",
673                                  "wl_egl_display(%p) tizen_surface_shm(%p) fini.",
674                                  wl_egl_display, wl_egl_display->tss);
675                 tizen_surface_shm_destroy(wl_egl_display->tss);
676                 wl_egl_display->tss = NULL;
677         }
678
679         if (wl_egl_display->presentation) {
680                 TPL_INFO("[WP_PRESENTATION_DESTROY]",
681                                  "wl_egl_display(%p) wp_presentation(%p) fini.",
682                                  wl_egl_display, wl_egl_display->presentation);
683                 wp_presentation_destroy(wl_egl_display->presentation);
684                 wl_egl_display->presentation = NULL;
685         }
686
687         if (wl_egl_display->explicit_sync) {
688                 TPL_INFO("[EXPLICIT_SYNC_DESTROY]",
689                                  "wl_egl_display(%p) zwp_linux_explicit_synchronization_v1(%p) fini.",
690                                  wl_egl_display, wl_egl_display->explicit_sync);
691                 zwp_linux_explicit_synchronization_v1_destroy(wl_egl_display->explicit_sync);
692                 wl_egl_display->explicit_sync = NULL;
693         }
694 #endif
695         if (wl_egl_display->wl_tbm_client) {
696                 struct wl_proxy *wl_tbm = NULL;
697
698                 wl_tbm = (struct wl_proxy *)wayland_tbm_client_get_wl_tbm(
699                                                                                 wl_egl_display->wl_tbm_client);
700                 if (wl_tbm) {
701                         wl_proxy_set_queue(wl_tbm, NULL);
702                 }
703
704                 TPL_INFO("[WL_TBM_DEINIT]",
705                                  "wl_egl_display(%p) wl_tbm_client(%p)",
706                                  wl_egl_display, wl_egl_display->wl_tbm_client);
707                 wayland_tbm_client_deinit(wl_egl_display->wl_tbm_client);
708                 wl_egl_display->wl_tbm_client = NULL;
709         }
710
711         wl_event_queue_destroy(wl_egl_display->ev_queue);
712
713         wl_egl_display->ev_queue = NULL;
714         wl_egl_display->wl_initialized = TPL_FALSE;
715
716         TPL_INFO("[DISPLAY_FINI]", "wl_egl_display(%p) wl_display(%p)",
717                          wl_egl_display, wl_egl_display->wl_display);
718 }
719
720 static void*
721 _thread_init(void *data)
722 {
723         tpl_wl_egl_display_t wl_egl_display(data);
724
725         if (_thread_wl_display_init(wl_egl_display) != TPL_ERROR_NONE) {
726                 TPL_ERR("Failed to initialize wl_egl_display(%p) with wl_display(%p)",
727                                 wl_egl_display, wl_egl_display->wl_display);
728         }
729
730         if (wl_egl_display->use_wait_vblank &&
731                 _thread_tdm_init(wl_egl_display) != TPL_ERROR_NONE) {
732                 TPL_WARN("Failed to initialize tdm-client. TPL_WAIT_VLANK:DISABLED");
733         }
734
735         return wl_egl_display;
736 }
737
738 static tpl_bool_t
739 __thread_func_disp_prepare(tpl_gsource *gsource)
740 {
741         tpl_wl_egl_display_t wl_egl_display(tpl_gsource_get_data(gsource));
742
743         /* If this wl_egl_display is already prepared,
744          * do nothing in this function. */
745         if (wl_egl_display->prepared)
746                 return TPL_FALSE;
747
748         /* If there is a last_error, there is no need to poll,
749          * so skip directly to dispatch.
750          * prepare -> dispatch */
751         if (wl_egl_display->last_error)
752                 return TPL_TRUE;
753
754         while (wl_display_prepare_read_queue(wl_egl_display->wl_display,
755                                                                                  wl_egl_display->ev_queue) != 0) {
756                 if (wl_display_dispatch_queue_pending(wl_egl_display->wl_display,
757                                                                                           wl_egl_display->ev_queue) == -1) {
758                         _wl_display_print_err(wl_egl_display, "dispatch_queue_pending");
759                 }
760         }
761
762         wl_egl_display->prepared = TPL_TRUE;
763
764         wl_display_flush(wl_egl_display->wl_display);
765
766         return TPL_FALSE;
767 }
768
769 static tpl_bool_t
770 __thread_func_disp_check(tpl_gsource *gsource)
771 {
772         tpl_wl_egl_display_t wl_egl_display(tpl_gsource_get_data(gsource));
773         tpl_bool_t ret = TPL_FALSE;
774
775         if (!wl_egl_display->prepared)
776                 return ret;
777
778         /* If prepared, but last_error is set,
779          * cancel_read is executed and FALSE is returned.
780          * That can lead to G_SOURCE_REMOVE by calling disp_prepare again
781          * and skipping disp_check from prepare to disp_dispatch.
782          * check -> prepare -> dispatch -> G_SOURCE_REMOVE */
783         if (wl_egl_display->prepared && wl_egl_display->last_error) {
784                 wl_display_cancel_read(wl_egl_display->wl_display);
785                 return ret;
786         }
787
788         if (tpl_gsource_check_io_condition(gsource)) {
789                 if (wl_display_read_events(wl_egl_display->wl_display) == -1)
790                         _wl_display_print_err(wl_egl_display, "read_event");
791                 ret = TPL_TRUE;
792         } else {
793                 wl_display_cancel_read(wl_egl_display->wl_display);
794                 ret = TPL_FALSE;
795         }
796
797         wl_egl_display->prepared = TPL_FALSE;
798
799         return ret;
800 }
801
802 static tpl_bool_t
803 __thread_func_disp_dispatch(tpl_gsource *gsource, uint64_t message)
804 {
805         tpl_wl_egl_display_t wl_egl_display(tpl_gsource_get_data(gsource));
806
807         TPL_IGNORE(message);
808
809         /* If there is last_error, SOURCE_REMOVE should be returned
810          * to remove the gsource from the main loop.
811          * This is because wl_egl_display is not valid since last_error was set.*/
812         if (wl_egl_display->last_error) {
813                 return TPL_FALSE;
814         }
815
816         tpl_gmutex_lock(&wl_egl_display->wl_event_mutex);
817         if (tpl_gsource_check_io_condition(gsource)) {
818                 if (wl_display_dispatch_queue_pending(wl_egl_display->wl_display,
819                                                                                           wl_egl_display->ev_queue) == -1) {
820                         _wl_display_print_err(wl_egl_display, "dispatch_queue_pending");
821                 }
822         }
823
824         wl_display_flush(wl_egl_display->wl_display);
825         tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
826
827         return TPL_TRUE;
828 }
829
830 static void
831 __thread_func_disp_finalize(tpl_gsource *gsource)
832 {
833         tpl_wl_egl_display_t wl_egl_display(tpl_gsource_get_data(gsource));
834
835         tpl_gmutex_lock(&wl_egl_display->disp_mutex);
836         TPL_LOG_D("[D_FINALIZE]", "wl_egl_display(%p) tpl_gsource(%p)",
837                           wl_egl_display, gsource);
838
839         if (wl_egl_display->wl_initialized)
840                 _thread_wl_display_fini(wl_egl_display);
841
842         wl_egl_display->gsource_finalized = TPL_TRUE;
843
844         tpl_gcond_signal(&wl_egl_display->disp_cond);
845         tpl_gmutex_unlock(&wl_egl_display->disp_mutex);
846
847         return;
848 }
849
850
851 static tpl_gsource_functions disp_funcs = {
852         .prepare  = __thread_func_disp_prepare,
853         .check    = __thread_func_disp_check,
854         .dispatch = __thread_func_disp_dispatch,
855         .finalize = __thread_func_disp_finalize,
856 };
857
858 static tpl_result_t
859 __tpl_wl_egl_display_init(tpl_display_t *display)
860 {
861         tpl_wl_egl_display_t *wl_egl_display    = NULL;
862
863         TPL_ASSERT(display);
864
865         /* Do not allow default display in wayland. */
866         if (!display->native_handle) {
867                 TPL_ERR("Invalid native handle for display.");
868                 return TPL_ERROR_INVALID_PARAMETER;
869         }
870
871         if (!_check_native_handle_is_wl_display(display->native_handle)) {
872                 TPL_ERR("native_handle(%p) is not wl_display", display->native_handle);
873                 return TPL_ERROR_INVALID_PARAMETER;
874         }
875
876         wl_egl_display = calloc(1, sizeof(tpl_wl_egl_display_t));
877         if (!wl_egl_display) {
878                 TPL_ERR("Failed to allocate memory for new tpl_wl_egl_display_t.");
879                 return TPL_ERROR_OUT_OF_MEMORY;
880         }
881
882         display->backend.data             = wl_egl_display;
883         display->bufmgr_fd                = -1;
884
885         wl_egl_display->tdm.tdm_initialized   = TPL_FALSE;
886         wl_egl_display->tdm.tdm_client        = NULL;
887         wl_egl_display->tdm.tdm_display_fd    = -1;
888         wl_egl_display->tdm.tdm_source        = NULL;
889
890         wl_egl_display->wl_initialized    = TPL_FALSE;
891
892         wl_egl_display->ev_queue          = NULL;
893         wl_egl_display->wl_display        = (struct wl_display *)display->native_handle;
894         wl_egl_display->last_error        = 0;
895         wl_egl_display->use_tss           = TPL_FALSE;
896         wl_egl_display->use_explicit_sync = TPL_FALSE;   // default disabled
897         wl_egl_display->prepared          = TPL_FALSE;
898         wl_egl_display->gsource_finalized = TPL_FALSE;
899
900 #if TIZEN_FEATURE_ENABLE
901         /* Wayland Interfaces */
902         wl_egl_display->tss               = NULL;
903         wl_egl_display->presentation      = NULL;
904         wl_egl_display->explicit_sync     = NULL;
905 #endif
906         wl_egl_display->wl_tbm_client     = NULL;
907
908         wl_egl_display->use_wait_vblank   = TPL_TRUE;   // default enabled
909         {
910                 char *env = tpl_getenv("TPL_WAIT_VBLANK");
911                 if (env && !atoi(env)) {
912                         wl_egl_display->use_wait_vblank = TPL_FALSE;
913                 }
914         }
915
916         tpl_gmutex_init(&wl_egl_display->wl_event_mutex);
917
918         tpl_gmutex_init(&wl_egl_display->disp_mutex);
919         tpl_gcond_init(&wl_egl_display->disp_cond);
920
921         /* Create gthread */
922         wl_egl_display->thread = tpl_gthread_create("wl_egl_thread",
923                                                                                                 (tpl_gthread_func)_thread_init,
924                                                                                                 (void *)wl_egl_display);
925         if (!wl_egl_display->thread) {
926                 TPL_ERR("Failed to create wl_egl_thread");
927                 goto free_display;
928         }
929
930         wl_egl_display->disp_source = tpl_gsource_create(wl_egl_display->thread,
931                                                                                                          (void *)wl_egl_display,
932                                                                                                          wl_display_get_fd(wl_egl_display->wl_display),
933                                                                                                          FD_TYPE_SOCKET,
934                                                                                                          &disp_funcs, SOURCE_TYPE_NORMAL);
935         if (!wl_egl_display->disp_source) {
936                 TPL_ERR("Failed to add native_display(%p) to thread(%p)",
937                                 display->native_handle,
938                                 wl_egl_display->thread);
939                 goto free_display;
940         }
941
942         if (wl_egl_display->use_wait_vblank &&
943                 wl_egl_display->tdm.tdm_initialized) {
944                 tpl_gmutex_init(&wl_egl_display->tdm.tdm_mutex);
945                 tpl_gcond_init(&wl_egl_display->tdm.tdm_cond);
946                 wl_egl_display->tdm.tdm_source = tpl_gsource_create(wl_egl_display->thread,
947                                                                                                                 (void *)wl_egl_display,
948                                                                                                                 wl_egl_display->tdm.tdm_display_fd,
949                                                                                                                 FD_TYPE_SOCKET,
950                                                                                                                 &tdm_funcs, SOURCE_TYPE_NORMAL);
951                 wl_egl_display->tdm.gsource_finalized = TPL_FALSE;
952                 if (!wl_egl_display->tdm.tdm_source) {
953                         TPL_ERR("Failed to create tdm_gsource\n");
954                         goto free_display;
955                 }
956         }
957
958         wl_egl_display->use_wait_vblank = (wl_egl_display->tdm.tdm_initialized &&
959                                                                            (wl_egl_display->tdm.tdm_source != NULL));
960
961         TPL_INFO("[DISPLAY_INIT]",
962                          "wl_egl_display(%p) tpl_gthread(%p) wl_display(%p)",
963                          wl_egl_display,
964                          wl_egl_display->thread,
965                          wl_egl_display->wl_display);
966
967         TPL_INFO("[DISPLAY_INIT]",
968                          "USE_WAIT_VBLANK(%s) TIZEN_SURFACE_SHM(%s) USE_EXPLICIT_SYNC(%s)",
969                          wl_egl_display->use_wait_vblank ? "TRUE" : "FALSE",
970                          wl_egl_display->use_tss ? "TRUE" : "FALSE",
971                          wl_egl_display->use_explicit_sync ? "TRUE" : "FALSE");
972
973         return TPL_ERROR_NONE;
974
975 free_display:
976         if (wl_egl_display->tdm.tdm_source) {
977                 tpl_gmutex_lock(&wl_egl_display->tdm.tdm_mutex);
978                 // Send destroy mesage to thread
979                 tpl_gsource_destroy(wl_egl_display->tdm.tdm_source, TPL_TRUE);
980                 while (!wl_egl_display->tdm.gsource_finalized) {
981                         tpl_gcond_wait(&wl_egl_display->tdm.tdm_cond, &wl_egl_display->tdm.tdm_mutex);
982                 }
983                 tpl_gmutex_unlock(&wl_egl_display->tdm.tdm_mutex);
984         }
985
986         if (wl_egl_display->disp_source) {
987                 tpl_gmutex_lock(&wl_egl_display->disp_mutex);
988                 // Send destroy mesage to thread
989                 tpl_gsource_destroy(wl_egl_display->disp_source, TPL_TRUE);
990                 while (!wl_egl_display->gsource_finalized) {
991                         tpl_gcond_wait(&wl_egl_display->disp_cond, &wl_egl_display->disp_mutex);
992                 }
993                 tpl_gmutex_unlock(&wl_egl_display->disp_mutex);
994         }
995
996         if (wl_egl_display->thread) {
997                 tpl_gthread_destroy(wl_egl_display->thread);
998         }
999
1000         tpl_gcond_clear(&wl_egl_display->tdm.tdm_cond);
1001         tpl_gmutex_clear(&wl_egl_display->tdm.tdm_mutex);
1002         tpl_gcond_clear(&wl_egl_display->disp_cond);
1003         tpl_gmutex_clear(&wl_egl_display->disp_mutex);
1004
1005         wl_egl_display->thread = NULL;
1006         free(wl_egl_display);
1007
1008         display->backend.data = NULL;
1009         return TPL_ERROR_INVALID_OPERATION;
1010 }
1011
1012 static void
1013 __tpl_wl_egl_display_fini(tpl_display_t *display)
1014 {
1015         tpl_wl_egl_display_t wl_egl_display(display->backend.data);
1016         if (wl_egl_display) {
1017                 TPL_INFO("[DISPLAY_FINI]",
1018                                   "wl_egl_display(%p) tpl_gthread(%p) wl_display(%p)",
1019                                   wl_egl_display,
1020                                   wl_egl_display->thread,
1021                                   wl_egl_display->wl_display);
1022
1023                 if (wl_egl_display->tdm.tdm_source && wl_egl_display->tdm.tdm_initialized) {
1024                         /* This is a protection to prevent problems that arise in unexpected situations
1025                          * that g_cond_wait cannot work normally.
1026                          * When calling tpl_gsource_destroy() with destroy_in_thread is TPL_TRUE,
1027                          * caller should use tpl_gcond_wait() in the loop with checking finalized flag
1028                          * */
1029                         tpl_gmutex_lock(&wl_egl_display->tdm.tdm_mutex);
1030                         // Send destroy mesage to thread
1031                         tpl_gsource_destroy(wl_egl_display->tdm.tdm_source, TPL_TRUE);
1032                         while (!wl_egl_display->tdm.gsource_finalized) {
1033                                 tpl_gcond_wait(&wl_egl_display->tdm.tdm_cond, &wl_egl_display->tdm.tdm_mutex);
1034                         }
1035                         wl_egl_display->tdm.tdm_source = NULL;
1036                         tpl_gmutex_unlock(&wl_egl_display->tdm.tdm_mutex);
1037                 }
1038
1039                 if (wl_egl_display->disp_source) {
1040                         tpl_gmutex_lock(&wl_egl_display->disp_mutex);
1041                         // Send destroy mesage to thread
1042                         tpl_gsource_destroy(wl_egl_display->disp_source, TPL_TRUE);
1043                         /* This is a protection to prevent problems that arise in unexpected situations
1044                          * that g_cond_wait cannot work normally.
1045                          * When calling tpl_gsource_destroy() with destroy_in_thread is TPL_TRUE,
1046                          * caller should use tpl_gcond_wait() in the loop with checking finalized flag
1047                          * */
1048                         while (!wl_egl_display->gsource_finalized) {
1049                                 tpl_gcond_wait(&wl_egl_display->disp_cond, &wl_egl_display->disp_mutex);
1050                         }
1051                         wl_egl_display->disp_source = NULL;
1052                         tpl_gmutex_unlock(&wl_egl_display->disp_mutex);
1053                 }
1054
1055                 if (wl_egl_display->thread) {
1056                         tpl_gthread_destroy(wl_egl_display->thread);
1057                         wl_egl_display->thread = NULL;
1058                 }
1059
1060                 tpl_gcond_clear(&wl_egl_display->tdm.tdm_cond);
1061                 tpl_gmutex_clear(&wl_egl_display->tdm.tdm_mutex);
1062                 tpl_gcond_clear(&wl_egl_display->disp_cond);
1063                 tpl_gmutex_clear(&wl_egl_display->disp_mutex);
1064
1065                 tpl_gmutex_clear(&wl_egl_display->wl_event_mutex);
1066
1067                 free(wl_egl_display);
1068         }
1069
1070         display->backend.data = NULL;
1071 }
1072
1073 static tpl_result_t
1074 __tpl_wl_egl_display_query_config(tpl_display_t *display,
1075                                                                   tpl_surface_type_t surface_type,
1076                                                                   int red_size, int green_size,
1077                                                                   int blue_size, int alpha_size,
1078                                                                   int color_depth, int *native_visual_id,
1079                                                                   tpl_bool_t *is_slow)
1080 {
1081         TPL_ASSERT(display);
1082
1083         if (surface_type == TPL_SURFACE_TYPE_WINDOW && red_size == 8 &&
1084                         green_size == 8 && blue_size == 8 &&
1085                         (color_depth == 32 || color_depth == 24)) {
1086
1087                 if (alpha_size == 8) {
1088                         if (native_visual_id) *native_visual_id = TBM_FORMAT_ARGB8888;
1089                         if (is_slow) *is_slow = TPL_FALSE;
1090                         return TPL_ERROR_NONE;
1091                 }
1092                 if (alpha_size == 0) {
1093                         if (native_visual_id) *native_visual_id = TBM_FORMAT_XRGB8888;
1094                         if (is_slow) *is_slow = TPL_FALSE;
1095                         return TPL_ERROR_NONE;
1096                 }
1097         }
1098
1099         return TPL_ERROR_INVALID_PARAMETER;
1100 }
1101
1102 static tpl_result_t
1103 __tpl_wl_egl_display_filter_config(tpl_display_t *display, int *visual_id,
1104                                                                    int alpha_size)
1105 {
1106         TPL_IGNORE(display);
1107         TPL_IGNORE(visual_id);
1108         TPL_IGNORE(alpha_size);
1109         return TPL_ERROR_NONE;
1110 }
1111
1112 static tpl_result_t
1113 __tpl_wl_egl_display_get_window_info(tpl_display_t *display,
1114                                                                          tpl_handle_t window, int *width,
1115                                                                          int *height, tbm_format *format,
1116                                                                          int depth, int a_size)
1117 {
1118         tpl_result_t ret = TPL_ERROR_NONE;
1119         struct wl_egl_window *wl_egl_window = (struct wl_egl_window *)window;
1120
1121         if (!wl_egl_window) {
1122                 TPL_ERR("Invalid parameter. tpl_handle_t(%p)", window);
1123                 return TPL_ERROR_INVALID_PARAMETER;
1124         }
1125
1126         if (width) *width = wl_egl_window->width;
1127         if (height) *height = wl_egl_window->height;
1128         if (format) {
1129                 struct tizen_private tizen_private(wl_egl_window->driver_private);
1130                 if (tizen_private && tizen_private->data) {
1131                         tpl_wl_egl_surface_t wl_egl_surface(tizen_private->data);
1132                         *format = wl_egl_surface->format;
1133                 } else {
1134                         if (a_size == 8)
1135                                 *format = TBM_FORMAT_ARGB8888;
1136                         else
1137                                 *format = TBM_FORMAT_XRGB8888;
1138                 }
1139         }
1140
1141         return ret;
1142 }
1143
1144 static tpl_result_t
1145 __tpl_wl_egl_display_get_pixmap_info(tpl_display_t *display,
1146                                                                          tpl_handle_t pixmap, int *width,
1147                                                                          int *height, tbm_format *format)
1148 {
1149         tbm_surface_h   tbm_surface = NULL;
1150
1151         if (!pixmap) {
1152                 TPL_ERR("Invalid parameter. tpl_handle_t(%p)", pixmap);
1153                 return TPL_ERROR_INVALID_PARAMETER;
1154         }
1155
1156         tbm_surface = wayland_tbm_server_get_surface(NULL,
1157                                                                                                  (struct wl_resource *)pixmap);
1158         if (!tbm_surface) {
1159                 TPL_ERR("Failed to get tbm_surface from wayland_tbm.");
1160                 return TPL_ERROR_INVALID_PARAMETER;
1161         }
1162
1163         if (width) *width = tbm_surface_get_width(tbm_surface);
1164         if (height) *height = tbm_surface_get_height(tbm_surface);
1165         if (format) *format = tbm_surface_get_format(tbm_surface);
1166
1167         return TPL_ERROR_NONE;
1168 }
1169
1170 static tbm_surface_h
1171 __tpl_wl_egl_display_get_buffer_from_native_pixmap(tpl_handle_t pixmap)
1172 {
1173         tbm_surface_h tbm_surface = NULL;
1174
1175         TPL_ASSERT(pixmap);
1176
1177         tbm_surface = wayland_tbm_server_get_surface(NULL,
1178                                                                                                  (struct wl_resource *)pixmap);
1179         if (!tbm_surface) {
1180                 TPL_ERR("Failed to get tbm_surface_h from wayland_tbm.");
1181                 return NULL;
1182         }
1183
1184         return tbm_surface;
1185 }
1186
1187 tpl_bool_t
1188 __tpl_display_choose_backend_wl_egl_thread(tpl_handle_t native_dpy)
1189 {
1190         struct wl_interface *wl_egl_native_dpy = *(void **) native_dpy;
1191
1192         TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_native_dpy, TPL_FALSE);
1193
1194         /* MAGIC CHECK: A native display handle is a wl_display if the de-referenced first value
1195            is a memory address pointing the structure of wl_display_interface. */
1196         if (wl_egl_native_dpy == &wl_display_interface)
1197                 return TPL_TRUE;
1198
1199         if (strncmp(wl_egl_native_dpy->name, wl_display_interface.name,
1200                                 strlen(wl_display_interface.name)) == 0) {
1201                 return TPL_TRUE;
1202         }
1203
1204         return TPL_FALSE;
1205 }
1206
1207 /* -- BEGIN -- wl_egl_window callback functions */
1208 static void
1209 __cb_destroy_callback(void *private)
1210 {
1211         struct tizen_private tizen_private(private);
1212
1213         if (!tizen_private) {
1214                 TPL_LOG_D("[WL_EGL_WINDOW_DESTROY_CALLBACK]", "Already destroyed surface");
1215                 return;
1216         }
1217
1218         tpl_wl_egl_surface_t wl_egl_surface(tizen_private->data);
1219         if (wl_egl_surface) {
1220                 TPL_WARN("[DESTROY_CB][!!!ABNORMAL BEHAVIOR!!!] wl_egl_window(%p) is destroyed.",
1221                                  wl_egl_surface->wl_egl_window);
1222                 TPL_WARN("[DESTROY_CB] native window should be destroyed after eglDestroySurface.");
1223
1224                 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1225                 wl_egl_surface->wl_egl_window->destroy_window_callback = NULL;
1226                 wl_egl_surface->wl_egl_window->resize_callback = NULL;
1227                 wl_egl_surface->wl_egl_window->driver_private = NULL;
1228                 wl_egl_surface->wl_egl_window = NULL;
1229                 wl_egl_surface->wl_surface = NULL;
1230
1231                 tizen_private->set_window_serial_callback = NULL;
1232                 tizen_private->rotate_callback = NULL;
1233                 tizen_private->get_rotation_capability = NULL;
1234                 tizen_private->set_frontbuffer_callback = NULL;
1235                 tizen_private->create_commit_sync_fd = NULL;
1236                 tizen_private->create_presentation_sync_fd = NULL;
1237                 tizen_private->data = NULL;
1238
1239                 free(tizen_private);
1240                 tizen_private = NULL;
1241                 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1242         }
1243 }
1244
1245 static void
1246 __cb_resize_callback(struct wl_egl_window *wl_egl_window, void *private)
1247 {
1248         TPL_ASSERT(private);
1249
1250         struct tizen_private tizen_private(private);
1251         tpl_wl_egl_surface_t wl_egl_surface(tizen_private->data);
1252         int cur_w, cur_h, req_w, req_h, format;
1253
1254         if (!wl_egl_surface) {
1255                 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1256                                 wl_egl_window);
1257                 return;
1258         }
1259
1260         format = wl_egl_surface->format;
1261         cur_w = wl_egl_surface->width;
1262         cur_h = wl_egl_surface->height;
1263         req_w = wl_egl_window->width;
1264         req_h = wl_egl_window->height;
1265
1266         TPL_INFO("[WINDOW_RESIZE]",
1267                          "wl_egl_surface(%p) wl_egl_window(%p) (%dx%d) -> (%dx%d)",
1268                          wl_egl_surface, wl_egl_window, cur_w, cur_h, req_w, req_h);
1269
1270         if (tbm_surface_queue_reset(wl_egl_surface->tbm_queue, req_w, req_h, format)
1271                         != TBM_SURFACE_QUEUE_ERROR_NONE) {
1272                 TPL_ERR("Failed to reset tbm_surface_queue(%p)", wl_egl_surface->tbm_queue);
1273                 return;
1274         }
1275 }
1276 /* -- END -- wl_egl_window callback functions */
1277
1278 /* -- BEGIN -- wl_egl_window tizen private callback functions */
1279
1280 /* There is no usecase for using prerotation callback below */
1281 static void
1282 __cb_rotate_callback(struct wl_egl_window *wl_egl_window, void *private)
1283 {
1284         TPL_ASSERT(private);
1285
1286         struct tizen_private tizen_private(private);
1287         tpl_wl_egl_surface_t wl_egl_surface(tizen_private->data);
1288         int rotation = tizen_private->rotation;
1289
1290         if (!wl_egl_surface) {
1291                 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1292                                 wl_egl_window);
1293                 return;
1294         }
1295
1296         TPL_INFO("[WINDOW_ROTATE]",
1297                          "wl_egl_surface(%p) wl_egl_window(%p) (%d) -> (%d)",
1298                          wl_egl_surface, wl_egl_window,
1299                          wl_egl_surface->rotation, rotation);
1300
1301         wl_egl_surface->rotation = rotation;
1302 }
1303
1304 /* There is no usecase for using prerotation callback below */
1305 static int
1306 __cb_get_rotation_capability(struct wl_egl_window *wl_egl_window,
1307                                                          void *private)
1308 {
1309         TPL_ASSERT(private);
1310
1311         int rotation_capability              = WL_EGL_WINDOW_TIZEN_CAPABILITY_NONE;
1312         struct tizen_private tizen_private(private);
1313         tpl_wl_egl_surface_t wl_egl_surface(tizen_private->data);
1314
1315         if (!wl_egl_surface) {
1316                 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1317                                 wl_egl_window);
1318                 return rotation_capability;
1319         }
1320
1321         if (wl_egl_surface->prerotation_capability == TPL_TRUE)
1322                 rotation_capability = WL_EGL_WINDOW_TIZEN_CAPABILITY_ROTATION_SUPPORTED;
1323         else
1324                 rotation_capability = WL_EGL_WINDOW_TIZEN_CAPABILITY_ROTATION_UNSUPPORTED;
1325
1326
1327         return rotation_capability;
1328 }
1329
1330 static void
1331 __cb_set_window_serial_callback(struct wl_egl_window *wl_egl_window,
1332                                                                 void *private, unsigned int serial)
1333 {
1334         TPL_ASSERT(private);
1335
1336         struct tizen_private tizen_private(private);
1337         tpl_wl_egl_surface_t wl_egl_surface(tizen_private->data);
1338
1339         if (!wl_egl_surface) {
1340                 TPL_ERR("Invalid wl_egl_window(%p) tizen_private->data is null.",
1341                                 wl_egl_window);
1342                 return;
1343         }
1344
1345         wl_egl_surface->set_serial_is_used = TPL_TRUE;
1346         wl_egl_surface->serial = serial;
1347 }
1348
1349 static int
1350 __cb_create_commit_sync_fd(struct wl_egl_window *wl_egl_window, void *private)
1351 {
1352         TPL_ASSERT(private);
1353         TPL_ASSERT(wl_egl_window);
1354
1355         int commit_sync_fd = -1;
1356
1357         struct tizen_private tizen_private(private);
1358         tpl_wl_egl_surface_t wl_egl_surface(tizen_private->data);
1359
1360         if (!wl_egl_surface) {
1361                 TPL_ERR("Invalid parameter. wl_egl_surface(%p) is NULL", wl_egl_surface);
1362                 return -1;
1363         }
1364
1365         tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
1366
1367         if (wl_egl_surface->commit_sync.fd != -1) {
1368                 commit_sync_fd = dup(wl_egl_surface->commit_sync.fd);
1369                 TRACE_MARK("[ONLY_DUP] commit_sync_fd(%d) dup(%d)",
1370                                    wl_egl_surface->commit_sync.fd, commit_sync_fd);
1371                 TPL_LOG_D("[COMMIT_SYNC][DUP]", "wl_egl_surface(%p) commit_sync_fd(%d) dup(%d)",
1372                                   wl_egl_surface, wl_egl_surface->commit_sync.fd, commit_sync_fd);
1373                 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
1374                 return commit_sync_fd;
1375         }
1376
1377         wl_egl_surface->commit_sync.fd = eventfd(0, EFD_CLOEXEC);
1378         if (wl_egl_surface->commit_sync.fd == -1) {
1379                 TPL_ERR("Failed to create commit_sync_fd. wl_egl_surface(%p)",
1380                                 wl_egl_surface);
1381                 tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
1382                 return -1;
1383         }
1384
1385         commit_sync_fd = dup(wl_egl_surface->commit_sync.fd);
1386
1387         TRACE_MARK("[CREATE] commit_sync_fd(%d) dup(%d)",
1388                            wl_egl_surface->commit_sync.fd, commit_sync_fd);
1389         TPL_LOG_D("[COMMIT_SYNC][CREATE]", "wl_egl_surface(%p) commit_sync_fd(%d)",
1390                           wl_egl_surface, commit_sync_fd);
1391
1392         tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
1393
1394         return commit_sync_fd;
1395 }
1396
1397 static void
1398 __cb_client_window_set_frontbuffer_mode(struct wl_egl_window *wl_egl_window,
1399                                                                                 void *private, int set)
1400 {
1401         TPL_ASSERT(private);
1402         TPL_ASSERT(wl_egl_window);
1403         struct tizen_private tizen_private(private);
1404         tpl_wl_egl_surface_t wl_egl_surface(tizen_private->data);
1405         TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
1406
1407         tpl_surface_t *surface = wl_egl_surface->tpl_surface;
1408         TPL_CHECK_ON_NULL_RETURN(surface);
1409
1410         tpl_bool_t is_frontbuffer_mode = set ? TPL_TRUE : TPL_FALSE;
1411
1412         TPL_OBJECT_LOCK(surface);
1413         if (is_frontbuffer_mode == surface->is_frontbuffer_mode) {
1414                 TPL_OBJECT_UNLOCK(surface);
1415                 return;
1416         }
1417
1418         TPL_INFO("[FRONTBUFFER_MODE]",
1419                          "[%s] wl_egl_surface(%p) wl_egl_window(%p)",
1420                          is_frontbuffer_mode ? "ON" : "OFF",
1421                          wl_egl_surface, wl_egl_window);
1422
1423         surface->is_frontbuffer_mode = is_frontbuffer_mode;
1424
1425         TPL_OBJECT_UNLOCK(surface);
1426 }
1427
1428 #if TIZEN_FEATURE_ENABLE
1429 static int
1430 __cb_create_presentation_sync_fd(struct wl_egl_window *wl_egl_window, void *private)
1431 {
1432         TPL_ASSERT(private);
1433         TPL_ASSERT(wl_egl_window);
1434
1435         int presentation_sync_fd = -1;
1436
1437         struct tizen_private tizen_private(private);
1438         tpl_wl_egl_surface_t wl_egl_surface(tizen_private->data);
1439
1440         if (!wl_egl_surface) {
1441                 TPL_ERR("Invalid parameter. wl_egl_surface is NULL");
1442                 return -1;
1443         }
1444
1445         tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
1446         if (wl_egl_surface->presentation_sync.fd != -1) {
1447                 presentation_sync_fd = dup(wl_egl_surface->presentation_sync.fd);
1448                 TRACE_MARK("[ONLY_DUP] presentation_sync_fd(%d) dup(%d)",
1449                                    wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1450                 TPL_LOG_D("[PRESENTATION_SYNC][DUP]", "wl_egl_surface(%p) presentation_sync_fd(%d) dup(%d)",
1451                                   wl_egl_surface, wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1452                 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1453                 return presentation_sync_fd;
1454         }
1455
1456         wl_egl_surface->presentation_sync.fd = eventfd(0, EFD_CLOEXEC);
1457         if (wl_egl_surface->presentation_sync.fd == -1) {
1458                 TPL_ERR("Failed to create presentation_sync_fd. wl_egl_surface(%p)",
1459                                 wl_egl_surface);
1460                 tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1461                 return -1;
1462         }
1463
1464         presentation_sync_fd = dup(wl_egl_surface->presentation_sync.fd);
1465         TRACE_MARK("[CREATE] presentation_sync_fd(%d) dup(%d)",
1466                            wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1467         TPL_LOG_D("[PRESENTATION_SYNC][CREATE]", "wl_egl_surface(%p) presentation_sync_fd(%d) dup(%d)",
1468                           wl_egl_surface, wl_egl_surface->presentation_sync.fd, presentation_sync_fd);
1469
1470         tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1471
1472         return presentation_sync_fd;
1473 }
1474 /* -- END -- wl_egl_window tizen private callback functions */
1475
1476 /* -- BEGIN -- tizen_surface_shm_flusher_listener */
1477 static void __cb_tss_flusher_flush_callback(void *data,
1478                 struct tizen_surface_shm_flusher *tss_flusher)
1479 {
1480         tpl_wl_egl_surface_t wl_egl_surface(data);
1481         tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
1482
1483         TPL_INFO("[BUFFER_FLUSH]", "wl_egl_surface(%p) tbm_queue(%p)",
1484                          wl_egl_surface, wl_egl_surface->tbm_queue);
1485
1486         tsq_err = tbm_surface_queue_flush(wl_egl_surface->tbm_queue);
1487         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
1488                 TPL_ERR("Failed to flush tbm_queue(%p)", wl_egl_surface->tbm_queue);
1489                 return;
1490         }
1491 }
1492
1493 static void __cb_tss_flusher_free_flush_callback(void *data,
1494                 struct tizen_surface_shm_flusher *tss_flusher)
1495 {
1496         tpl_wl_egl_surface_t wl_egl_surface(data);
1497         tbm_surface_queue_error_e tsq_err    = TBM_SURFACE_QUEUE_ERROR_NONE;
1498
1499         TPL_INFO("[FREE_BUFFER_FLUSH]", "wl_egl_surface(%p) tbm_queue(%p)",
1500                          wl_egl_surface, wl_egl_surface->tbm_queue);
1501
1502         tsq_err = tbm_surface_queue_free_flush(wl_egl_surface->tbm_queue);
1503         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
1504                 TPL_ERR("Failed to free flush tbm_queue(%p)", wl_egl_surface->tbm_queue);
1505                 return;
1506         }
1507 }
1508
1509 static const struct tizen_surface_shm_flusher_listener
1510 tss_flusher_listener = {
1511         __cb_tss_flusher_flush_callback,
1512         __cb_tss_flusher_free_flush_callback
1513 };
1514 /* -- END -- tizen_surface_shm_flusher_listener */
1515 #endif
1516
1517 /* -- BEGIN -- tbm_surface_queue callback funstions */
1518 static void
1519 __cb_tbm_queue_reset_callback(tbm_surface_queue_h tbm_queue,
1520                                                                           void *data)
1521 {
1522         tpl_wl_egl_display_t *wl_egl_display = NULL;
1523         tpl_surface_t *surface = NULL;
1524         tpl_bool_t is_activated = TPL_FALSE;
1525         int width, height;
1526
1527         tpl_wl_egl_surface_t wl_egl_surface(data);
1528         TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
1529
1530         wl_egl_display = wl_egl_surface->wl_egl_display;
1531         TPL_CHECK_ON_NULL_RETURN(wl_egl_display);
1532
1533         surface = wl_egl_surface->tpl_surface;
1534         TPL_CHECK_ON_NULL_RETURN(surface);
1535
1536         /* When the queue is resized, change the reset flag to TPL_TRUE to reflect
1537          * the changed window size at the next frame. */
1538         width = tbm_surface_queue_get_width(tbm_queue);
1539         height = tbm_surface_queue_get_height(tbm_queue);
1540         if (surface->width != width || surface->height != height) {
1541                 TPL_INFO("[QUEUE_RESIZE]",
1542                                  "wl_egl_surface(%p) tbm_queue(%p) (%dx%d) -> (%dx%d)",
1543                                  wl_egl_surface, tbm_queue,
1544                                  surface->width, surface->height, width, height);
1545         }
1546
1547         /* When queue_reset_callback is called, if is_activated is different from
1548          * its previous state change the reset flag to TPL_TRUE to get a new buffer
1549          * with the changed state(ACTIVATED/DEACTIVATED) at the next frame. */
1550         is_activated = wayland_tbm_client_queue_check_activate(wl_egl_display->wl_tbm_client,
1551                                                                                                                    wl_egl_surface->tbm_queue);
1552         if (wl_egl_surface->is_activated != is_activated) {
1553                 if (is_activated) {
1554                         TPL_INFO("[ACTIVATED]",
1555                                           "wl_egl_surface(%p) wl_surface(%p) tbm_queue(%p)",
1556                                           wl_egl_surface, wl_egl_surface->wl_surface, tbm_queue);
1557                 } else {
1558                         TPL_INFO("[DEACTIVATED]",
1559                                          " wl_egl_surface(%p) wl_surface(%p) tbm_queue(%p)",
1560                                          wl_egl_surface, wl_egl_surface->wl_surface, tbm_queue);
1561                 }
1562         }
1563
1564         wl_egl_surface->reset = TPL_TRUE;
1565
1566         if (surface->reset_cb)
1567                 surface->reset_cb(surface->reset_data);
1568 }
1569
1570 static void
1571 __cb_tbm_queue_acquirable_callback(tbm_surface_queue_h tbm_queue,
1572                                                                    void *data)
1573 {
1574         TPL_IGNORE(tbm_queue);
1575
1576         tpl_wl_egl_surface_t wl_egl_surface(data);
1577         TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
1578
1579         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1580         if (wl_egl_surface->sent_message == NONE_MESSAGE) {
1581                 wl_egl_surface->sent_message = ACQUIRABLE;
1582                 tpl_gsource_send_message(wl_egl_surface->surf_source,
1583                                                          wl_egl_surface->sent_message);
1584         }
1585         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1586 }
1587 /* -- END -- tbm_surface_queue callback funstions */
1588
1589 static void
1590 _thread_wl_egl_surface_fini(tpl_wl_egl_surface_t *wl_egl_surface)
1591 {
1592         tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
1593
1594         TPL_INFO("[SURFACE_FINI]",
1595                           "wl_egl_surface(%p) wl_egl_window(%p) wl_surface(%p)",
1596                           wl_egl_surface, wl_egl_surface->wl_egl_window,
1597                           wl_egl_surface->wl_surface);
1598 #if TIZEN_FEATURE_ENABLE
1599         tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
1600
1601         if (wl_egl_display->presentation && wl_egl_surface->presentation_feedbacks) {
1602                 while (!__tpl_list_is_empty(wl_egl_surface->presentation_feedbacks)) {
1603                         struct pst_feedback *pst_feedback =
1604                                 (struct pst_feedback *)__tpl_list_pop_front(
1605                                                 wl_egl_surface->presentation_feedbacks, NULL);
1606                         if (pst_feedback) {
1607                                 send_signal(pst_feedback->pst_sync_fd, "PST_FEEDBACK");
1608                                 pst_feedback->pst_sync_fd = -1;
1609
1610                                 wp_presentation_feedback_destroy(pst_feedback->presentation_feedback);
1611                                 pst_feedback->presentation_feedback = NULL;
1612
1613                                 free(pst_feedback);
1614                         }
1615                 }
1616
1617                 __tpl_list_free(wl_egl_surface->presentation_feedbacks, NULL);
1618                 wl_egl_surface->presentation_feedbacks = NULL;
1619         }
1620
1621         send_signal(wl_egl_surface->presentation_sync.fd, "PST_SYNC");
1622         wl_egl_surface->presentation_sync.fd = -1;
1623
1624         tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
1625
1626         if (wl_egl_surface->surface_sync) {
1627                 TPL_INFO("[SURFACE_SYNC_DESTROY]",
1628                                  "wl_egl_surface(%p) surface_sync(%p)",
1629                                   wl_egl_surface, wl_egl_surface->surface_sync);
1630                 zwp_linux_surface_synchronization_v1_destroy(wl_egl_surface->surface_sync);
1631                 wl_egl_surface->surface_sync = NULL;
1632         }
1633
1634         if (wl_egl_surface->tss_flusher) {
1635                 TPL_INFO("[FLUSHER_DESTROY]",
1636                                   "wl_egl_surface(%p) tss_flusher(%p)",
1637                                   wl_egl_surface, wl_egl_surface->tss_flusher);
1638                 tizen_surface_shm_flusher_destroy(wl_egl_surface->tss_flusher);
1639                 wl_egl_surface->tss_flusher = NULL;
1640         }
1641 #endif
1642
1643         if (wl_egl_surface->tbm_queue) {
1644                 TPL_INFO("[TBM_QUEUE_DESTROY]",
1645                                  "wl_egl_surface(%p) tbm_queue(%p)",
1646                                  wl_egl_surface, wl_egl_surface->tbm_queue);
1647                 tbm_surface_queue_destroy(wl_egl_surface->tbm_queue);
1648                 wl_egl_surface->tbm_queue = NULL;
1649         }
1650
1651         if (wl_egl_surface->vblank && wl_egl_surface->vblank->waiting_buffers) {
1652                 tpl_gmutex_lock(&wl_egl_surface->vblank->mutex);
1653                 __tpl_list_free(wl_egl_surface->vblank->waiting_buffers, NULL);
1654                 wl_egl_surface->vblank->waiting_buffers = NULL;
1655                 tpl_gmutex_unlock(&wl_egl_surface->vblank->mutex);
1656         }
1657
1658         if (wl_egl_surface->vblank) {
1659                 __tpl_list_remove_data(wl_egl_display->tdm.surface_vblanks,
1660                                                            (void *)wl_egl_surface->vblank,
1661                                                            TPL_FIRST,
1662                                                            __cb_surface_vblank_free);
1663                 wl_egl_surface->vblank = NULL;
1664         }
1665 }
1666
1667 static tpl_bool_t
1668 __thread_func_surf_dispatch(tpl_gsource *gsource, uint64_t message)
1669 {
1670         tpl_wl_egl_surface_t wl_egl_surface(tpl_gsource_get_data(gsource));
1671
1672         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1673         if (message == INIT_SURFACE) { /* Initialize surface */
1674                 TPL_LOG_D("[MSG_RECEIVED]", "wl_egl_surface(%p) initialize message received!",
1675                                   wl_egl_surface);
1676                 _thread_wl_egl_surface_init(wl_egl_surface);
1677                 wl_egl_surface->initialized_in_thread = TPL_TRUE;
1678                 tpl_gcond_signal(&wl_egl_surface->surf_cond);
1679         } else if (message == ACQUIRABLE) { /* Acquirable */
1680                 TPL_LOG_D("[MSG_RECEIVED]", "wl_egl_surface(%p) acquirable message received!",
1681                                   wl_egl_surface);
1682                 _thread_surface_queue_acquire(wl_egl_surface);
1683         }
1684
1685         wl_egl_surface->sent_message = NONE_MESSAGE;
1686
1687         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1688
1689         return TPL_TRUE;
1690 }
1691
1692 static void
1693 __thread_func_surf_finalize(tpl_gsource *gsource)
1694 {
1695         tpl_wl_egl_surface_t wl_egl_surface(tpl_gsource_get_data(gsource));
1696         TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
1697
1698         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1699         TPL_LOG_D("[S_FINALIZE]", "wl_egl_surface(%p) tpl_gsource(%p)",
1700                           wl_egl_surface, gsource);
1701
1702         _thread_wl_egl_surface_fini(wl_egl_surface);
1703
1704         wl_egl_surface->gsource_finalized = TPL_TRUE;
1705
1706         tpl_gcond_signal(&wl_egl_surface->surf_cond);
1707         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1708 }
1709
1710 static tpl_gsource_functions surf_funcs = {
1711         .prepare = NULL,
1712         .check = NULL,
1713         .dispatch = __thread_func_surf_dispatch,
1714         .finalize = __thread_func_surf_finalize,
1715 };
1716
1717 static tpl_result_t
1718 __tpl_wl_egl_surface_init(tpl_surface_t *surface)
1719 {
1720         tpl_wl_egl_display_t wl_egl_display(surface->display->backend.data);
1721         tpl_wl_egl_surface_t *wl_egl_surface    = NULL;
1722         tpl_gsource *surf_source                = NULL;
1723
1724         struct wl_egl_window *wl_egl_window =
1725                 (struct wl_egl_window *)surface->native_handle;
1726
1727         TPL_ASSERT(surface->type == TPL_SURFACE_TYPE_WINDOW);
1728         TPL_ASSERT(surface->native_handle);
1729         TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_display, TPL_ERROR_INVALID_PARAMETER);
1730
1731         wl_egl_surface = calloc(1, sizeof(tpl_wl_egl_surface_t));
1732         if (!wl_egl_surface) {
1733                 TPL_ERR("Failed to allocate memory for new tpl_wl_egl_surface_t.");
1734                 return TPL_ERROR_OUT_OF_MEMORY;
1735         }
1736
1737         surf_source = tpl_gsource_create(wl_egl_display->thread, (void *)wl_egl_surface,
1738                                                                          -1, FD_TYPE_NONE, &surf_funcs, SOURCE_TYPE_NORMAL);
1739         if (!surf_source) {
1740                 TPL_ERR("Failed to create surf_source with wl_egl_surface(%p)",
1741                                 wl_egl_surface);
1742                 goto surf_source_create_fail;
1743         }
1744
1745         surface->backend.data = (void *)wl_egl_surface;
1746         surface->width        = wl_egl_window->width;
1747         surface->height       = wl_egl_window->height;
1748         surface->rotation     = 0;
1749
1750         wl_egl_surface->tpl_surface            = surface;
1751         wl_egl_surface->width                  = wl_egl_window->width;
1752         wl_egl_surface->height                 = wl_egl_window->height;
1753         wl_egl_surface->format                 = surface->format;
1754         wl_egl_surface->num_buffers            = surface->num_buffers;
1755
1756         wl_egl_surface->surf_source            = surf_source;
1757         wl_egl_surface->wl_egl_window          = wl_egl_window;
1758         wl_egl_surface->wl_surface             = wl_egl_window->surface;
1759
1760         wl_egl_surface->wl_egl_display         = wl_egl_display;
1761
1762         wl_egl_surface->reset                  = TPL_FALSE;
1763         wl_egl_surface->is_activated           = TPL_FALSE;
1764         wl_egl_surface->need_to_enqueue        = TPL_TRUE;
1765         wl_egl_surface->prerotation_capability = TPL_FALSE;
1766         wl_egl_surface->vblank_done            = TPL_TRUE;
1767         wl_egl_surface->use_render_done_fence  = TPL_FALSE;
1768         wl_egl_surface->set_serial_is_used     = TPL_FALSE;
1769         wl_egl_surface->gsource_finalized      = TPL_FALSE;
1770         wl_egl_surface->initialized_in_thread  = TPL_FALSE;
1771         wl_egl_surface->frontbuffer_activated  = TPL_FALSE;
1772
1773         wl_egl_surface->latest_transform       = -1;
1774         wl_egl_surface->serial                 = 0;
1775
1776         wl_egl_surface->vblank                 = NULL;
1777 #if TIZEN_FEATURE_ENABLE
1778         wl_egl_surface->tss_flusher            = NULL;
1779         wl_egl_surface->surface_sync           = NULL;
1780 #endif
1781
1782         wl_egl_surface->post_interval          = surface->post_interval;
1783
1784         wl_egl_surface->vblank_enable          = TPL_FALSE;
1785
1786         wl_egl_surface->commit_sync.fd         = -1;
1787         wl_egl_surface->presentation_sync.fd   = -1;
1788
1789         wl_egl_surface->sent_message           = NONE_MESSAGE;
1790         wl_egl_surface->last_enq_buffer        = NULL;
1791
1792         wl_egl_surface->buffers = __tpl_list_alloc();
1793
1794         {
1795                 struct tizen_private *tizen_private = NULL;
1796
1797                 if (wl_egl_window->driver_private)
1798                         tizen_private = (struct tizen_private *)wl_egl_window->driver_private;
1799                 else {
1800                         tizen_private = tizen_private_create();
1801                         wl_egl_window->driver_private = (void *)tizen_private;
1802                 }
1803
1804                 if (tizen_private) {
1805                         tizen_private->data = (void *)wl_egl_surface;
1806                         tizen_private->rotate_callback = (void *)__cb_rotate_callback;
1807                         tizen_private->get_rotation_capability = (void *)
1808                                 __cb_get_rotation_capability;
1809                         tizen_private->set_window_serial_callback = (void *)
1810                                 __cb_set_window_serial_callback;
1811                         tizen_private->create_commit_sync_fd = (void *)__cb_create_commit_sync_fd;
1812                         tizen_private->set_frontbuffer_callback = (void *)__cb_client_window_set_frontbuffer_mode;
1813 #if TIZEN_FEATURE_ENABLE
1814                         tizen_private->create_presentation_sync_fd = (void *)__cb_create_presentation_sync_fd;
1815 #else
1816                         tizen_private->create_presentation_sync_fd = NULL;
1817 #endif
1818
1819                         wl_egl_window->destroy_window_callback = (void *)__cb_destroy_callback;
1820                         wl_egl_window->resize_callback = (void *)__cb_resize_callback;
1821                 }
1822         }
1823
1824         tpl_gmutex_init(&wl_egl_surface->commit_sync.mutex);
1825         tpl_gmutex_init(&wl_egl_surface->presentation_sync.mutex);
1826
1827         tpl_gmutex_init(&wl_egl_surface->buffers_mutex);
1828
1829         tpl_gmutex_init(&wl_egl_surface->surf_mutex);
1830         tpl_gcond_init(&wl_egl_surface->surf_cond);
1831
1832         /* Initialize in thread */
1833         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
1834         wl_egl_surface->sent_message = INIT_SURFACE;
1835         tpl_gsource_send_message(wl_egl_surface->surf_source,
1836                                                          wl_egl_surface->sent_message);
1837         while (!wl_egl_surface->initialized_in_thread)
1838                 tpl_gcond_wait(&wl_egl_surface->surf_cond, &wl_egl_surface->surf_mutex);
1839         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
1840
1841         TPL_ASSERT(wl_egl_surface->tbm_queue);
1842
1843         TPL_INFO("[SURFACE_INIT]",
1844                           "tpl_surface(%p) wl_egl_surface(%p) gsource(%p)",
1845                           surface, wl_egl_surface, wl_egl_surface->surf_source);
1846
1847         return TPL_ERROR_NONE;
1848
1849 surf_source_create_fail:
1850         free(wl_egl_surface);
1851         surface->backend.data = NULL;
1852         return TPL_ERROR_INVALID_OPERATION;
1853 }
1854
1855 static tbm_surface_queue_h
1856 _thread_create_tbm_queue(tpl_wl_egl_surface_t *wl_egl_surface,
1857                                                  struct wayland_tbm_client *wl_tbm_client,
1858                                                  int num_buffers)
1859 {
1860         tbm_surface_queue_h tbm_queue = NULL;
1861         tbm_bufmgr bufmgr             = NULL;
1862         unsigned int capability;
1863
1864         struct wl_surface *wl_surface = wl_egl_surface->wl_surface;
1865         int width = wl_egl_surface->width;
1866         int height = wl_egl_surface->height;
1867         int format = wl_egl_surface->format;
1868
1869         if (!wl_tbm_client || !wl_surface) {
1870                 TPL_ERR("Invalid parameters. wl_tbm_client(%p) wl_surface(%p)",
1871                                 wl_tbm_client, wl_surface);
1872                 return NULL;
1873         }
1874
1875         bufmgr = tbm_bufmgr_init(-1);
1876         capability = tbm_bufmgr_get_capability(bufmgr);
1877         tbm_bufmgr_deinit(bufmgr);
1878
1879         if (capability & TBM_BUFMGR_CAPABILITY_TILED_MEMORY) {
1880                 tbm_queue = wayland_tbm_client_create_surface_queue_tiled(
1881                                                 wl_tbm_client,
1882                                                 wl_surface,
1883                                                 num_buffers,
1884                                                 width,
1885                                                 height,
1886                                                 format);
1887         } else {
1888                 tbm_queue = wayland_tbm_client_create_surface_queue(
1889                                                 wl_tbm_client,
1890                                                 wl_surface,
1891                                                 num_buffers,
1892                                                 width,
1893                                                 height,
1894                                                 format);
1895         }
1896
1897         if (!tbm_queue) {
1898                 TPL_ERR("Failed to create tbm_queue. wl_tbm_client(%p)",
1899                                 wl_tbm_client);
1900                 return NULL;
1901         }
1902
1903         if (tbm_surface_queue_set_modes(
1904                         tbm_queue, TBM_SURFACE_QUEUE_MODE_GUARANTEE_CYCLE) !=
1905                                 TBM_SURFACE_QUEUE_ERROR_NONE) {
1906                 TPL_ERR("Failed to set queue mode to tbm_surface_queue(%p)",
1907                                 tbm_queue);
1908                 tbm_surface_queue_destroy(tbm_queue);
1909                 return NULL;
1910         }
1911
1912         if (tbm_surface_queue_add_reset_cb(
1913                         tbm_queue,
1914                         __cb_tbm_queue_reset_callback,
1915                         (void *)wl_egl_surface) != TBM_SURFACE_QUEUE_ERROR_NONE) {
1916                 TPL_ERR("Failed to register reset callback to tbm_surface_queue(%p)",
1917                                 tbm_queue);
1918                 tbm_surface_queue_destroy(tbm_queue);
1919                 return NULL;
1920         }
1921
1922         if (tbm_surface_queue_add_acquirable_cb(
1923                         tbm_queue,
1924                         __cb_tbm_queue_acquirable_callback,
1925                         (void *)wl_egl_surface) != TBM_SURFACE_QUEUE_ERROR_NONE) {
1926                 TPL_ERR("Failed to register acquirable callback to tbm_surface_queue(%p)",
1927                                 tbm_queue);
1928                 tbm_surface_queue_destroy(tbm_queue);
1929                 return NULL;
1930         }
1931
1932         return tbm_queue;
1933 }
1934
1935 static tdm_client_vblank*
1936 _thread_create_tdm_client_vblank(tdm_client *tdm_client)
1937 {
1938         tdm_client_vblank *tdm_vblank = NULL;
1939         tdm_client_output *tdm_output = NULL;
1940         tdm_error tdm_err = TDM_ERROR_NONE;
1941
1942         if (!tdm_client) {
1943                 TPL_ERR("Invalid parameter. tdm_client(%p)", tdm_client);
1944                 return NULL;
1945         }
1946
1947         tdm_output = tdm_client_get_output(tdm_client, "primary", &tdm_err);
1948         if (!tdm_output || tdm_err != TDM_ERROR_NONE) {
1949                 TPL_ERR("Failed to get tdm_client_output. tdm_err(%d)", tdm_err);
1950                 return NULL;
1951         }
1952
1953         tdm_vblank = tdm_client_output_create_vblank(tdm_output, &tdm_err);
1954         if (!tdm_vblank || tdm_err != TDM_ERROR_NONE) {
1955                 TPL_ERR("Failed to create tdm_vblank. tdm_err(%d)", tdm_err);
1956                 return NULL;
1957         }
1958
1959         tdm_err = tdm_client_handle_pending_events(tdm_client);
1960         if (tdm_err != TDM_ERROR_NONE) {
1961                 TPL_ERR("Failed to handle pending events. tdm_err(%d)", tdm_err);
1962         }
1963
1964         tdm_client_vblank_set_enable_fake(tdm_vblank, 1);
1965         tdm_client_vblank_set_sync(tdm_vblank, 0);
1966
1967         return tdm_vblank;
1968 }
1969
1970 static void
1971 __cb_surface_vblank_free(void *data)
1972 {
1973         TPL_CHECK_ON_NULL_RETURN(data);
1974
1975         tpl_surface_vblank_t *vblank = (tpl_surface_vblank_t *)data;
1976         tpl_wl_egl_surface_t *wl_egl_surface = vblank->wl_egl_surface;
1977
1978         TPL_INFO("[VBLANK_DESTROY]",
1979                          "wl_egl_surface(%p) surface_vblank(%p) tdm_vblank(%p)",
1980                          wl_egl_surface, vblank,
1981                          vblank->tdm_vblank);
1982
1983         tdm_client_vblank_destroy(vblank->tdm_vblank);
1984         vblank->tdm_vblank = NULL;
1985         vblank->wl_egl_surface = NULL;
1986         tpl_gmutex_clear(&vblank->mutex);
1987
1988         free(vblank);
1989
1990         wl_egl_surface->vblank = NULL;
1991 }
1992
1993 static void
1994 _thread_wl_egl_surface_init(tpl_wl_egl_surface_t *wl_egl_surface)
1995 {
1996         tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
1997         tpl_surface_vblank_t *vblank         = NULL;
1998
1999         wl_egl_surface->tbm_queue = _thread_create_tbm_queue(
2000                                                                         wl_egl_surface,
2001                                                                         wl_egl_display->wl_tbm_client,
2002                                                                         wl_egl_surface->num_buffers);
2003         if (!wl_egl_surface->tbm_queue) {
2004                 TPL_ERR("Failed to create tbm_queue. wl_egl_surface(%p) wl_tbm_client(%p)",
2005                                 wl_egl_surface, wl_egl_display->wl_tbm_client);
2006                 return;
2007         }
2008
2009         TPL_INFO("[QUEUE_CREATION][1/2]",
2010                          "wl_egl_surface(%p) wl_surface(%p) wl_tbm_client(%p)",
2011                          wl_egl_surface, wl_egl_surface->wl_surface,
2012                          wl_egl_display->wl_tbm_client);
2013         TPL_INFO("[QUEUE_CREATION][2/2]",
2014                          "wl_egl_surface(%p) tbm_queue(%p) size(%d x %d) X %d format(%d)",
2015                          wl_egl_surface,
2016                          wl_egl_surface->tbm_queue,
2017                          wl_egl_surface->width,
2018                          wl_egl_surface->height,
2019                          wl_egl_surface->num_buffers,
2020                          wl_egl_surface->format);
2021
2022         if (wl_egl_display->use_wait_vblank) {
2023                 vblank = (tpl_surface_vblank_t *)calloc(1, sizeof(tpl_surface_vblank_t));
2024                 if (vblank) {
2025                         vblank->tdm_vblank = _thread_create_tdm_client_vblank(
2026                                                                         wl_egl_display->tdm.tdm_client);
2027                         if (!vblank->tdm_vblank) {
2028                                 TPL_ERR("Failed to create tdm_vblank from tdm_client(%p)",
2029                                                 wl_egl_display->tdm.tdm_client);
2030                                 free(vblank);
2031                                 vblank = NULL;
2032                         } else {
2033                                 vblank->waiting_buffers = __tpl_list_alloc();
2034                                 if (!vblank->waiting_buffers) {
2035                                         tdm_client_vblank_destroy(vblank->tdm_vblank);
2036                                         free(vblank);
2037                                         vblank = NULL;
2038                                 } else{
2039                                         vblank->wl_egl_surface = wl_egl_surface;
2040                                         tpl_gmutex_init(&vblank->mutex);
2041
2042                                         __tpl_list_push_back(wl_egl_display->tdm.surface_vblanks,
2043                                                                                 (void *)vblank);
2044
2045                                         TPL_INFO("[VBLANK_INIT]",
2046                                                         "wl_egl_surface(%p) tdm_client(%p) tdm_vblank(%p)",
2047                                                         wl_egl_surface, wl_egl_display->tdm.tdm_client,
2048                                                         vblank->tdm_vblank);
2049                                 }
2050                         }
2051                 }
2052         }
2053
2054         wl_egl_surface->vblank = vblank;
2055         wl_egl_surface->vblank_enable = (vblank != NULL &&
2056                                                                         wl_egl_surface->post_interval > 0);
2057
2058 #if TIZEN_FEATURE_ENABLE
2059         if (wl_egl_display->tss) {
2060                 wl_egl_surface->tss_flusher =
2061                         tizen_surface_shm_get_flusher(wl_egl_display->tss,
2062                                                                                   wl_egl_surface->wl_surface);
2063         }
2064
2065         if (wl_egl_surface->tss_flusher) {
2066                 tizen_surface_shm_flusher_add_listener(wl_egl_surface->tss_flusher,
2067                                                                                            &tss_flusher_listener,
2068                                                                                            wl_egl_surface);
2069                 TPL_INFO("[FLUSHER_INIT]",
2070                                  "wl_egl_surface(%p) tss_flusher(%p)",
2071                                  wl_egl_surface, wl_egl_surface->tss_flusher);
2072         }
2073
2074         if (wl_egl_display->explicit_sync && wl_egl_display->use_explicit_sync) {
2075                 wl_egl_surface->surface_sync =
2076                         zwp_linux_explicit_synchronization_v1_get_synchronization(
2077                                         wl_egl_display->explicit_sync, wl_egl_surface->wl_surface);
2078                 if (wl_egl_surface->surface_sync) {
2079                         TPL_INFO("[EXPLICIT_SYNC_INIT]",
2080                                          "wl_egl_surface(%p) surface_sync(%p)",
2081                                          wl_egl_surface, wl_egl_surface->surface_sync);
2082                 } else {
2083                         TPL_WARN("Failed to create surface_sync. | wl_egl_surface(%p)",
2084                                          wl_egl_surface);
2085                         wl_egl_display->use_explicit_sync = TPL_FALSE;
2086                 }
2087         }
2088 #endif
2089         wl_egl_surface->presentation_feedbacks = __tpl_list_alloc();
2090 }
2091
2092 static void
2093 _tpl_wl_egl_surface_buffer_clear(tpl_wl_egl_surface_t *wl_egl_surface)
2094 {
2095         tbm_surface_queue_error_e tsq_err       = TBM_SURFACE_QUEUE_ERROR_NONE;
2096         tpl_wl_egl_display_t *wl_egl_display    = wl_egl_surface->wl_egl_display;
2097         tpl_bool_t need_to_release              = TPL_FALSE;
2098         tpl_bool_t need_to_cancel               = TPL_FALSE;
2099         buffer_status_t status                  = RELEASED;
2100         int buffer_cnt                          = 0;
2101         int idx                                 = 0;
2102
2103         tpl_gthread_pause_in_idle(wl_egl_display->thread);
2104
2105         buffer_cnt = __tpl_list_get_count(wl_egl_surface->buffers);
2106
2107         while (!__tpl_list_is_empty(wl_egl_surface->buffers)) {
2108                 tpl_wl_egl_buffer_t wl_egl_buffer(__tpl_list_pop_front(wl_egl_surface->buffers,
2109                                                                                                                            NULL));
2110
2111                 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2112
2113                 status = wl_egl_buffer->status;
2114
2115                 TPL_INFO("[BUFFER_CLEAR]",
2116                                  "[%d/%d] wl_egl_surface(%p) wl_egl_buffer(%p) tbm_surface(%p) status(%s)",
2117                                  ++idx, buffer_cnt, wl_egl_surface, wl_egl_buffer,
2118                                  wl_egl_buffer->tbm_surface,
2119                                  status_to_string[status]);
2120
2121                 if (status >= ENQUEUED) {
2122                         tpl_result_t wait_result = TPL_ERROR_NONE;
2123
2124                         while (status < COMMITTED && wait_result != TPL_ERROR_TIME_OUT) {
2125                                 tpl_gthread_continue(wl_egl_display->thread);
2126                                 wait_result = tpl_gcond_timed_wait(&wl_egl_buffer->cond,
2127                                                                                                    &wl_egl_buffer->mutex,
2128                                                                                                    500); /* 500ms */
2129                                 tpl_gthread_pause_in_idle(wl_egl_display->thread);
2130                                 status = wl_egl_buffer->status; /* update status */
2131
2132                                 if (wait_result == TPL_ERROR_TIME_OUT) {
2133                                         TPL_WARN("timeout occured waiting signaled. wl_egl_buffer(%p) status(%s)",
2134                                                          wl_egl_buffer, status_to_string[status]);
2135                                 }
2136                         }
2137                 }
2138
2139                 /* ACQUIRED, WAITING_SIGNALED, WAITING_VBLANK, COMMITTED */
2140                 /* It has been acquired but has not yet been released, so this
2141                  * buffer must be released. */
2142                 need_to_release = (status >= ACQUIRED && status <= COMMITTED);
2143
2144                 /* After dequeue, it has not been enqueued yet
2145                  * so cancel_dequeue must be performed. */
2146                 need_to_cancel = (status == DEQUEUED);
2147
2148                 if (need_to_release) {
2149                         tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
2150                                                                                                 wl_egl_buffer->tbm_surface);
2151                         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2152                                 TPL_ERR("Failed to release. tbm_surface(%p) tsq_err(%d)",
2153                                                 wl_egl_buffer->tbm_surface, tsq_err);
2154                 }
2155
2156                 if (need_to_cancel) {
2157                         tsq_err = tbm_surface_queue_cancel_dequeue(wl_egl_surface->tbm_queue,
2158                                                                                                            wl_egl_buffer->tbm_surface);
2159                         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2160                                 TPL_ERR("Failed to release tbm_surface(%p) tsq_err(%d)",
2161                                                 wl_egl_buffer->tbm_surface, tsq_err);
2162                 }
2163
2164                 wl_egl_buffer->status = RELEASED;
2165
2166                 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2167
2168                 if (need_to_release || need_to_cancel || status == ENQUEUED)
2169                         tbm_surface_internal_unref(wl_egl_buffer->tbm_surface);
2170         }
2171
2172         tpl_gthread_continue(wl_egl_display->thread);
2173 }
2174
2175 static void
2176 __tpl_wl_egl_surface_fini(tpl_surface_t *surface)
2177 {
2178         tpl_wl_egl_display_t *wl_egl_display = NULL;
2179
2180         TPL_ASSERT(surface);
2181         TPL_ASSERT(surface->display);
2182
2183         TPL_CHECK_ON_FALSE_RETURN(surface->type == TPL_SURFACE_TYPE_WINDOW);
2184
2185         tpl_wl_egl_surface_t wl_egl_surface(surface->backend.data);
2186         TPL_CHECK_ON_NULL_RETURN(wl_egl_surface);
2187
2188         wl_egl_display = wl_egl_surface->wl_egl_display;
2189         TPL_CHECK_ON_NULL_RETURN(wl_egl_display);
2190
2191         TPL_INFO("[SURFACE_FINI][BEGIN]",
2192                          "wl_egl_surface(%p) wl_surface(%p) tbm_queue(%p)",
2193                          wl_egl_surface,
2194                          wl_egl_surface->wl_surface, wl_egl_surface->tbm_queue);
2195
2196         _tpl_wl_egl_surface_buffer_clear(wl_egl_surface);
2197
2198         if (wl_egl_surface->surf_source) {
2199                 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2200                 // Send destroy mesage to thread
2201                 tpl_gsource_destroy(wl_egl_surface->surf_source, TPL_TRUE);
2202                 /* This is a protection to prevent problems that arise in unexpected situations
2203                  * that g_cond_wait cannot work normally.
2204                  * When calling tpl_gsource_destroy() with destroy_in_thread is TPL_TRUE,
2205                  * caller should use tpl_gcond_wait() in the loop with checking finalized flag
2206                  * */
2207                 while (!wl_egl_surface->gsource_finalized) {
2208                         tpl_gcond_wait(&wl_egl_surface->surf_cond, &wl_egl_surface->surf_mutex);
2209                 }
2210                 wl_egl_surface->surf_source = NULL;
2211                 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2212         }
2213
2214         if (wl_egl_surface->wl_egl_window) {
2215                 struct wl_egl_window *wl_egl_window = wl_egl_surface->wl_egl_window;
2216                 struct tizen_private tizen_private(wl_egl_window->driver_private);
2217                 TPL_INFO("[WL_EGL_WINDOW_FINI]",
2218                                  "wl_egl_surface(%p) wl_egl_window(%p) wl_surface(%p)",
2219                                  wl_egl_surface, wl_egl_window,
2220                                  wl_egl_surface->wl_surface);
2221
2222                 if (tizen_private) {
2223                         tizen_private->set_window_serial_callback = NULL;
2224                         tizen_private->rotate_callback = NULL;
2225                         tizen_private->get_rotation_capability = NULL;
2226                         tizen_private->create_presentation_sync_fd = NULL;
2227                         tizen_private->create_commit_sync_fd = NULL;
2228                         tizen_private->set_frontbuffer_callback = NULL;
2229                         tizen_private->merge_sync_fds = NULL;
2230                         tizen_private->data = NULL;
2231                         free(tizen_private);
2232
2233                         wl_egl_window->driver_private = NULL;
2234                 }
2235
2236                 wl_egl_window->destroy_window_callback = NULL;
2237                 wl_egl_window->resize_callback = NULL;
2238
2239                 wl_egl_surface->wl_egl_window = NULL;
2240         }
2241
2242         wl_egl_surface->last_enq_buffer = NULL;
2243
2244         wl_egl_surface->wl_surface = NULL;
2245         wl_egl_surface->wl_egl_display = NULL;
2246         wl_egl_surface->tpl_surface = NULL;
2247
2248         tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
2249         __tpl_list_free(wl_egl_surface->buffers, NULL);
2250         wl_egl_surface->buffers = NULL;
2251         tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
2252         tpl_gmutex_clear(&wl_egl_surface->buffers_mutex);
2253
2254         tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
2255         tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
2256         tpl_gmutex_clear(&wl_egl_surface->commit_sync.mutex);
2257
2258         tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
2259         tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
2260         tpl_gmutex_clear(&wl_egl_surface->presentation_sync.mutex);
2261
2262         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2263         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2264         tpl_gmutex_clear(&wl_egl_surface->surf_mutex);
2265         tpl_gcond_clear(&wl_egl_surface->surf_cond);
2266
2267         TPL_INFO("[SURFACE_FINI][END]", "wl_egl_surface(%p)", wl_egl_surface);
2268
2269         free(wl_egl_surface);
2270         surface->backend.data = NULL;
2271 }
2272
2273 static tpl_result_t
2274 __tpl_wl_egl_surface_set_rotation_capability(tpl_surface_t *surface,
2275                                                                                          tpl_bool_t set)
2276 {
2277         TPL_CHECK_ON_NULL_RETURN_VAL(surface, TPL_ERROR_INVALID_PARAMETER);
2278
2279         tpl_wl_egl_surface_t wl_egl_surface(surface->backend.data);
2280
2281         TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_surface, TPL_ERROR_INVALID_PARAMETER);
2282
2283         TPL_INFO("[SET_PREROTATION_CAPABILITY]",
2284                          "wl_egl_surface(%p) prerotation capability set to [%s]",
2285                          wl_egl_surface, (set ? "TRUE" : "FALSE"));
2286
2287         wl_egl_surface->prerotation_capability = set;
2288         return TPL_ERROR_NONE;
2289 }
2290
2291 static tpl_result_t
2292 __tpl_wl_egl_surface_set_post_interval(tpl_surface_t *surface,
2293                                                                            int post_interval)
2294 {
2295         TPL_CHECK_ON_NULL_RETURN_VAL(surface, TPL_ERROR_INVALID_PARAMETER);
2296
2297         tpl_wl_egl_surface_t wl_egl_surface(surface->backend.data);
2298
2299         TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_surface, TPL_ERROR_INVALID_PARAMETER);
2300
2301         TPL_INFO("[SET_POST_INTERVAL]",
2302                          "wl_egl_surface(%p) post_interval(%d -> %d)",
2303                          wl_egl_surface, wl_egl_surface->post_interval, post_interval);
2304
2305         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2306         wl_egl_surface->post_interval = post_interval;
2307         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2308
2309         return TPL_ERROR_NONE;
2310 }
2311
2312 static tpl_bool_t
2313 __tpl_wl_egl_surface_validate(tpl_surface_t *surface)
2314 {
2315         tpl_bool_t retval = TPL_TRUE;
2316
2317         TPL_ASSERT(surface);
2318         TPL_ASSERT(surface->backend.data);
2319
2320         tpl_wl_egl_surface_t wl_egl_surface(surface->backend.data);
2321
2322         retval = !(wl_egl_surface->reset);
2323
2324         return retval;
2325 }
2326
2327 static void
2328 __tpl_wl_egl_surface_get_size(tpl_surface_t *surface, int *width, int *height)
2329 {
2330         tpl_wl_egl_surface_t wl_egl_surface(surface->backend.data);
2331
2332         if (width)
2333                 *width = tbm_surface_queue_get_width(wl_egl_surface->tbm_queue);
2334         if (height)
2335                 *height = tbm_surface_queue_get_height(wl_egl_surface->tbm_queue);
2336 }
2337
2338 static tpl_bool_t
2339 __tpl_wl_egl_surface_fence_sync_is_available(tpl_surface_t *surface)
2340 {
2341         tpl_wl_egl_surface_t wl_egl_surface(surface->backend.data);
2342
2343         return !wl_egl_surface->frontbuffer_activated;
2344 }
2345
2346 #define CAN_DEQUEUE_TIMEOUT_MS 10000
2347
2348 tpl_result_t
2349 _tbm_queue_force_flush(tpl_wl_egl_surface_t *wl_egl_surface)
2350 {
2351         tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2352
2353         _print_buffer_lists(wl_egl_surface);
2354
2355         if (wl_egl_surface->vblank) {
2356                 tpl_gmutex_lock(&wl_egl_surface->vblank->mutex);
2357
2358                 if (wl_egl_surface->vblank->waiting_buffers)
2359                         __tpl_list_fini(wl_egl_surface->vblank->waiting_buffers, NULL);
2360
2361                 tpl_gmutex_unlock(&wl_egl_surface->vblank->mutex);
2362         }
2363
2364         if ((tsq_err = tbm_surface_queue_flush(wl_egl_surface->tbm_queue))
2365                 != TBM_SURFACE_QUEUE_ERROR_NONE) {
2366                 TPL_ERR("Failed to flush tbm_surface_queue(%p) tsq_err(%d)",
2367                                 wl_egl_surface->tbm_queue, tsq_err);
2368                 return TPL_ERROR_INVALID_OPERATION;
2369         }
2370
2371         while (!__tpl_list_is_empty(wl_egl_surface->buffers)) {
2372                 tpl_bool_t need_to_release = TPL_FALSE;
2373                 tpl_wl_egl_buffer_t wl_egl_buffer(
2374                         __tpl_list_pop_front(wl_egl_surface->buffers, NULL));
2375                 need_to_release = (wl_egl_buffer->status >= ACQUIRED) &&
2376                                                         (wl_egl_buffer->status <= COMMITTED);
2377
2378                 if (need_to_release) {
2379                         tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
2380                                                                                                 wl_egl_buffer->tbm_surface);
2381                         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
2382                                 TPL_ERR("Failed to release. tbm_surface(%p) tsq_err(%d)",
2383                                                 wl_egl_buffer->tbm_surface, tsq_err);
2384                         tbm_surface_internal_unref(wl_egl_buffer->tbm_surface);
2385                 }
2386         }
2387
2388         TPL_INFO("[FORCE_FLUSH]",
2389                          "wl_egl_surface(%p) tbm_queue(%p)",
2390                          wl_egl_surface, wl_egl_surface->tbm_queue);
2391
2392         _print_buffer_lists(wl_egl_surface);
2393
2394         return TPL_ERROR_NONE;
2395 }
2396
2397 static void
2398 _wl_egl_buffer_init(tpl_wl_egl_buffer_t *wl_egl_buffer,
2399                                         tpl_wl_egl_surface_t *wl_egl_surface)
2400 {
2401         struct wl_egl_window *wl_egl_window = wl_egl_surface->wl_egl_window;
2402         struct tizen_private tizen_private(wl_egl_window->driver_private);
2403
2404         TPL_ASSERT(tizen_private);
2405
2406         wl_egl_buffer->draw_done                = TPL_FALSE;
2407         wl_egl_buffer->need_to_commit           = TPL_TRUE;
2408 #if TIZEN_FEATURE_ENABLE
2409         wl_egl_buffer->buffer_release           = NULL;
2410 #endif
2411         wl_egl_buffer->transform                = tizen_private->transform;
2412
2413         if (wl_egl_buffer->w_transform != tizen_private->window_transform) {
2414                 wl_egl_buffer->w_transform          = tizen_private->window_transform;
2415                 wl_egl_buffer->w_rotated            = TPL_TRUE;
2416         }
2417
2418         if (wl_egl_surface->set_serial_is_used) {
2419                 wl_egl_buffer->serial               = wl_egl_surface->serial;
2420         } else {
2421                 wl_egl_buffer->serial               = ++tizen_private->serial;
2422         }
2423
2424         if (wl_egl_buffer->rects) {
2425                 free(wl_egl_buffer->rects);
2426                 wl_egl_buffer->rects                = NULL;
2427                 wl_egl_buffer->num_rects            = 0;
2428         }
2429 }
2430
2431 static tpl_wl_egl_buffer_t *
2432 _get_wl_egl_buffer(tbm_surface_h tbm_surface)
2433 {
2434         tpl_wl_egl_buffer_t *wl_egl_buffer = NULL;
2435         tbm_surface_internal_get_user_data(tbm_surface, KEY_WL_EGL_BUFFER,
2436                                                                            (void **)&wl_egl_buffer);
2437         return wl_egl_buffer;
2438 }
2439
2440 static tpl_wl_egl_buffer_t *
2441 _wl_egl_buffer_create(tpl_wl_egl_surface_t *wl_egl_surface,
2442                                           tbm_surface_h tbm_surface)
2443 {
2444         tpl_wl_egl_buffer_t  *wl_egl_buffer  = NULL;
2445         struct wl_egl_window *wl_egl_window  = wl_egl_surface->wl_egl_window;
2446
2447         wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2448
2449         if (!wl_egl_buffer) {
2450                 wl_egl_buffer = calloc(1, sizeof(tpl_wl_egl_buffer_t));
2451                 TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_buffer, NULL);
2452
2453                 tbm_surface_internal_add_user_data(tbm_surface, KEY_WL_EGL_BUFFER,
2454                                                                                    (tbm_data_free)__cb_wl_egl_buffer_free);
2455                 tbm_surface_internal_set_user_data(tbm_surface, KEY_WL_EGL_BUFFER,
2456                                                                                    wl_egl_buffer);
2457
2458                 wl_egl_buffer->wl_buffer                = NULL;
2459                 wl_egl_buffer->tbm_surface              = tbm_surface;
2460                 wl_egl_buffer->bo_name                  = _get_tbm_surface_bo_name(tbm_surface);
2461                 wl_egl_buffer->wl_egl_surface           = wl_egl_surface;
2462
2463                 wl_egl_buffer->status                   = RELEASED;
2464
2465                 wl_egl_buffer->acquire_fence_fd         = -1;
2466                 wl_egl_buffer->commit_sync_fd           = -1;
2467                 wl_egl_buffer->presentation_sync_fd     = -1;
2468                 wl_egl_buffer->release_fence_fd         = -1;
2469
2470                 wl_egl_buffer->dx                       = wl_egl_window->dx;
2471                 wl_egl_buffer->dy                       = wl_egl_window->dy;
2472                 wl_egl_buffer->width                    = tbm_surface_get_width(tbm_surface);
2473                 wl_egl_buffer->height                   = tbm_surface_get_height(tbm_surface);
2474
2475                 wl_egl_buffer->w_transform              = -1;
2476
2477                 tpl_gmutex_init(&wl_egl_buffer->mutex);
2478                 tpl_gcond_init(&wl_egl_buffer->cond);
2479
2480                 tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
2481                 __tpl_list_push_back(wl_egl_surface->buffers, (void *)wl_egl_buffer);
2482                 tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
2483
2484                 TPL_INFO("[WL_EGL_BUFFER_CREATE]",
2485                                  "wl_egl_surface(%p) wl_egl_buffer(%p) tbm_surface(%p) bo(%d)",
2486                                  wl_egl_surface, wl_egl_buffer, tbm_surface,
2487                                  wl_egl_buffer->bo_name);
2488         }
2489
2490         _wl_egl_buffer_init(wl_egl_buffer, wl_egl_surface);
2491
2492         return wl_egl_buffer;
2493 }
2494
2495 static tbm_surface_h
2496 __tpl_wl_egl_surface_dequeue_buffer(tpl_surface_t *surface, uint64_t timeout_ns,
2497                                                                         int32_t *release_fence)
2498 {
2499         TPL_ASSERT(surface->backend.data);
2500         TPL_ASSERT(surface->display);
2501         TPL_ASSERT(surface->display->backend.data);
2502
2503         tpl_wl_egl_surface_t wl_egl_surface(surface->backend.data);
2504         tpl_wl_egl_display_t wl_egl_display(surface->display->backend.data);
2505         tpl_wl_egl_buffer_t *wl_egl_buffer   = NULL;
2506
2507         tbm_surface_queue_error_e tsq_err    = TBM_SURFACE_QUEUE_ERROR_NONE;
2508         int             bo_name              = 0;
2509         tbm_surface_h   tbm_surface          = NULL;
2510
2511         TPL_OBJECT_UNLOCK(surface);
2512         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2513         if (wl_egl_surface->reset == TPL_TRUE) {
2514                 if (_check_buffer_validate(wl_egl_surface, wl_egl_surface->last_enq_buffer) &&
2515                         tbm_surface_internal_is_valid(wl_egl_surface->last_enq_buffer)) {
2516                         tbm_surface_h last_enq_buffer = wl_egl_surface->last_enq_buffer;
2517                         tpl_wl_egl_buffer_t *enqueued_buffer =
2518                                 _get_wl_egl_buffer(last_enq_buffer);
2519
2520                         if (enqueued_buffer) {
2521                                 tbm_surface_internal_ref(last_enq_buffer);
2522                                 tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2523                                 tpl_gmutex_lock(&enqueued_buffer->mutex);
2524                                 while (enqueued_buffer->status >= ENQUEUED &&
2525                                            enqueued_buffer->status < COMMITTED) {
2526                                         tpl_result_t wait_result;
2527                                         TPL_INFO("[DEQ_AFTER_RESET]",
2528                                                          "wl_egl_surface(%p) waiting for previous wl_egl_buffer(%p) commit",
2529                                                          wl_egl_surface, enqueued_buffer);
2530
2531                                         wait_result = tpl_gcond_timed_wait(&enqueued_buffer->cond,
2532                                                                                                           &enqueued_buffer->mutex,
2533                                                                                                           200); /* 200ms */
2534                                         if (wait_result == TPL_ERROR_TIME_OUT) {
2535                                                 TPL_WARN("timeout occured waiting signaled. wl_egl_buffer(%p)",
2536                                                                  enqueued_buffer);
2537                                                 break;
2538                                         }
2539                                 }
2540                                 tpl_gmutex_unlock(&enqueued_buffer->mutex);
2541                                 tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2542                                 tbm_surface_internal_unref(last_enq_buffer);
2543                         }
2544                 }
2545
2546                 wl_egl_surface->last_enq_buffer = NULL;
2547         }
2548         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2549
2550         tsq_err = tbm_surface_queue_can_dequeue_wait_timeout(
2551                                 wl_egl_surface->tbm_queue, CAN_DEQUEUE_TIMEOUT_MS);
2552         TPL_OBJECT_LOCK(surface);
2553
2554
2555         if (tsq_err == TBM_SURFACE_QUEUE_ERROR_TIMEOUT) {
2556                 TPL_WARN("[CAN_DEQUEUE_TIMEOUT] queue(%p) will be reset. surface(%p)",
2557                                  wl_egl_surface->tbm_queue, surface);
2558
2559                 tpl_gthread_pause_in_idle(wl_egl_display->thread);
2560                 /* Locking wl_event_mutex is a secondary means of preparing for
2561                  * the failure of tpl_gthread_pause_in_idle().
2562                  * If tpl_gthread_pause_in_idle()is successful,
2563                  * locking wl_event_mutex does not affect. */
2564                 tpl_gmutex_lock(&wl_egl_display->wl_event_mutex);
2565                 if (_tbm_queue_force_flush(wl_egl_surface) != TPL_ERROR_NONE) {
2566                         TPL_ERR("Failed to timeout reset. tbm_queue(%p) surface(%p)",
2567                                         wl_egl_surface->tbm_queue, surface);
2568                         tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2569                         tpl_gthread_continue(wl_egl_display->thread);
2570                         return NULL;
2571                 } else {
2572                         tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE;
2573                 }
2574
2575                 wl_egl_surface->vblank_done = TPL_TRUE;
2576
2577                 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2578                 tpl_gthread_continue(wl_egl_display->thread);
2579         }
2580
2581         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2582                 TPL_ERR("Failed to query can_dequeue. tbm_queue(%p) surface(%p)",
2583                                 wl_egl_surface->tbm_queue, surface);
2584                 return NULL;
2585         }
2586
2587         /* After the can dequeue state, lock the wl_event_mutex to prevent other
2588          * events from being processed in wayland_egl_thread
2589          * during below dequeue procedure. */
2590         tpl_gmutex_lock(&wl_egl_display->wl_event_mutex);
2591
2592         surface->width = tbm_surface_queue_get_width(wl_egl_surface->tbm_queue);
2593         surface->height = tbm_surface_queue_get_height(wl_egl_surface->tbm_queue);
2594         wl_egl_surface->width = surface->width;
2595         wl_egl_surface->height = surface->height;
2596
2597
2598         /* If surface->frontbuffer is not null, the frontbuffer rendering mode will be
2599          * maintained if the surface state meets the conditions below.
2600          *  1. surface->is_frontbuffer_mode == TPL_TRUE
2601          *   - It may be changed to true or false by calling
2602          *         tpl_surface_set_frontbuffer_mode(will be deprecated)
2603          *      or
2604          *         wl_egl_window_tizen_set_frontbuffer_mode (recommanded)
2605          *  2. is_activated == TPL_TRUE
2606          *   - To check wheter direct display is possible.
2607          *  3. wl_egl_surface->reset == TPL_FALSE
2608          *   - tbm_queue reset should not have occured due to window resize.
2609          * If surface is not satisfied with any of above conditions,
2610          *  frontbuffer rendering will be stopped and surface->frontbuffer becomes null.
2611          * */
2612         if (surface->frontbuffer) {
2613                 if (!surface->is_frontbuffer_mode ||
2614                         !wl_egl_surface->is_activated ||
2615                         wl_egl_surface->reset) {
2616                         surface->frontbuffer = NULL;
2617                         wl_egl_surface->need_to_enqueue = TPL_TRUE;
2618                         wl_egl_surface->frontbuffer_activated = TPL_FALSE;
2619                         TPL_INFO("[FRONTBUFFER_RENDERING_STOP]",
2620                                          "wl_egl_surface(%p) wl_egl_window(%p)",
2621                                          wl_egl_surface, wl_egl_surface->wl_egl_window);
2622                 } else {
2623                         bo_name = _get_tbm_surface_bo_name(surface->frontbuffer);
2624                         TPL_LOG_T("WL_EGL",
2625                                           "[DEQ][F] surface->frontbuffer(%p) BO_NAME(%d)",
2626                                           surface->frontbuffer, bo_name);
2627                         TRACE_ASYNC_BEGIN((intptr_t)surface->frontbuffer,
2628                                                           "[DEQ]~[ENQ] BO_NAME:%d",
2629                                                           bo_name);
2630                         wl_egl_surface->frontbuffer_activated = TPL_TRUE;
2631                         tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2632                         return surface->frontbuffer;
2633                 }
2634         }
2635
2636         tsq_err = tbm_surface_queue_dequeue(wl_egl_surface->tbm_queue,
2637                                                                                 &tbm_surface);
2638         if (!tbm_surface) {
2639                 TPL_ERR("Failed to dequeue from tbm_queue(%p) wl_egl_surface(%p)| tsq_err = %d",
2640                                 wl_egl_surface->tbm_queue, wl_egl_surface, tsq_err);
2641                 tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2642                 return NULL;
2643         }
2644
2645         tbm_surface_internal_ref(tbm_surface);
2646
2647         wl_egl_buffer = _wl_egl_buffer_create(wl_egl_surface, tbm_surface);
2648         TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer, "Failed to create/get wl_egl_buffer.");
2649
2650         tpl_gmutex_lock(&wl_egl_buffer->mutex);
2651         wl_egl_buffer->status = DEQUEUED;
2652
2653         /* If wl_egl_buffer->release_fence_fd is -1,
2654          * the tbm_surface can be used immediately.
2655          * If not, user(EGL) have to wait until signaled. */
2656         if (release_fence) {
2657 #if TIZEN_FEATURE_ENABLE
2658                 if (wl_egl_display->use_explicit_sync) {
2659                         *release_fence = wl_egl_buffer->release_fence_fd;
2660                         TPL_LOG_D("[EXPLICIT_FENCE]", "wl_egl_surface(%p) wl_egl_buffer(%p) release_fence_fd(%d)",
2661                                           wl_egl_surface, wl_egl_buffer, *release_fence);
2662
2663                         wl_egl_buffer->release_fence_fd = -1;
2664                 } else
2665 #endif
2666                 {
2667                         *release_fence = -1;
2668                 }
2669         }
2670
2671         if (surface->is_frontbuffer_mode && wl_egl_surface->is_activated) {
2672                 if (surface->frontbuffer == NULL) {
2673                         TPL_INFO("[FRONTBUFFER_RENDERING_START]",
2674                                          "wl_egl_surface(%p) wl_egl_window(%p) bo(%d)",
2675                                          wl_egl_surface, wl_egl_surface->wl_egl_window,
2676                                          _get_tbm_surface_bo_name(tbm_surface));
2677                 }
2678                 surface->frontbuffer = tbm_surface;
2679         }
2680
2681         wl_egl_surface->reset = TPL_FALSE;
2682
2683         TRACE_MARK("[DEQ][NEW]BO_NAME:%d", wl_egl_buffer->bo_name);
2684         TRACE_ASYNC_BEGIN((intptr_t)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d",
2685                                           wl_egl_buffer->bo_name);
2686         TPL_LOG_T("WL_EGL", "[DEQ] wl_egl_buffer(%p) tbm_surface(%p) bo(%d) fence(%d)",
2687                           wl_egl_buffer, tbm_surface, wl_egl_buffer->bo_name,
2688                           release_fence ? *release_fence : -1);
2689
2690         tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2691         tpl_gmutex_unlock(&wl_egl_display->wl_event_mutex);
2692
2693         return tbm_surface;
2694 }
2695
2696 static tpl_result_t
2697 __tpl_wl_egl_surface_cancel_buffer(tpl_surface_t *surface,
2698                                                                    tbm_surface_h tbm_surface)
2699 {
2700         TPL_ASSERT(surface);
2701         TPL_ASSERT(surface->backend.data);
2702
2703         tpl_wl_egl_surface_t wl_egl_surface(surface->backend.data);
2704         tpl_wl_egl_buffer_t *wl_egl_buffer      = NULL;
2705         tbm_surface_queue_error_e tsq_err       = TBM_SURFACE_QUEUE_ERROR_NONE;
2706
2707         if (!tbm_surface_internal_is_valid(tbm_surface)) {
2708                 TPL_ERR("Invalid buffer. tbm_surface(%p)", tbm_surface);
2709                 return TPL_ERROR_INVALID_PARAMETER;
2710         }
2711
2712         wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2713         if (wl_egl_buffer) {
2714                 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2715                 wl_egl_buffer->status = RELEASED;
2716                 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2717         }
2718
2719         tbm_surface_internal_unref(tbm_surface);
2720
2721         tsq_err = tbm_surface_queue_cancel_dequeue(wl_egl_surface->tbm_queue,
2722                                                                                            tbm_surface);
2723         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2724                 TPL_ERR("Failed to release tbm_surface(%p) surface(%p)",
2725                                 tbm_surface, surface);
2726                 return TPL_ERROR_INVALID_OPERATION;
2727         }
2728
2729         TPL_INFO("[CANCEL_BUFFER]", "wl_egl_surface(%p) tbm_surface(%p) bo(%d)",
2730                           wl_egl_surface, tbm_surface, _get_tbm_surface_bo_name(tbm_surface));
2731
2732         return TPL_ERROR_NONE;
2733 }
2734
2735 static tpl_result_t
2736 __tpl_wl_egl_surface_enqueue_buffer(tpl_surface_t *surface,
2737                 tbm_surface_h tbm_surface,
2738                 int num_rects, const int *rects, int32_t acquire_fence)
2739 {
2740         TPL_ASSERT(surface);
2741         TPL_ASSERT(surface->display);
2742         TPL_ASSERT(surface->backend.data);
2743         TPL_ASSERT(tbm_surface);
2744         TPL_OBJECT_CHECK_RETURN(surface, TPL_ERROR_INVALID_PARAMETER);
2745
2746         tpl_wl_egl_surface_t wl_egl_surface(surface->backend.data);
2747         tpl_wl_egl_buffer_t *wl_egl_buffer      = NULL;
2748         tbm_surface_queue_error_e tsq_err       = TBM_SURFACE_QUEUE_ERROR_NONE;
2749         int bo_name                             = -1;
2750
2751         if (!tbm_surface_internal_is_valid(tbm_surface)) {
2752                 TPL_ERR("Failed to enqueue tbm_surface(%p) Invalid value.",
2753                                 tbm_surface);
2754                 TRACE_ASYNC_END((intptr_t)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2755                 return TPL_ERROR_INVALID_PARAMETER;
2756         }
2757
2758         wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2759         if (!wl_egl_buffer) {
2760                 TPL_ERR("Failed to get wl_egl_buffer from tbm_surface(%p)", tbm_surface);
2761                 return TPL_ERROR_INVALID_PARAMETER;
2762         }
2763
2764         bo_name = _get_tbm_surface_bo_name(tbm_surface);
2765
2766         TRACE_MARK("[ENQ] BO_NAME:%d", bo_name);
2767
2768         tpl_gmutex_lock(&wl_egl_buffer->mutex);
2769
2770         /* If there are received region information, save it to wl_egl_buffer */
2771         if (num_rects && rects) {
2772                 if (wl_egl_buffer->rects != NULL) {
2773                         free(wl_egl_buffer->rects);
2774                         wl_egl_buffer->rects = NULL;
2775                         wl_egl_buffer->num_rects = 0;
2776                 }
2777
2778                 wl_egl_buffer->rects = (int *)calloc(1, (sizeof(int) * 4 * num_rects));
2779                 wl_egl_buffer->num_rects = num_rects;
2780
2781                 if (!wl_egl_buffer->rects) {
2782                         TPL_ERR("Failed to allocate memory fo damage rects info.");
2783                         tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2784                         return TPL_ERROR_OUT_OF_MEMORY;
2785                 }
2786
2787                 memcpy((char *)wl_egl_buffer->rects, (char *)rects, sizeof(int) * 4 * num_rects);
2788         }
2789
2790         if (!wl_egl_surface->need_to_enqueue ||
2791                 !wl_egl_buffer->need_to_commit) {
2792
2793                 if (acquire_fence != -1) {
2794                         close(acquire_fence);
2795                         acquire_fence = -1;
2796                 }
2797                 TPL_LOG_T("FRONTBUFFER_MODE", "[ENQ_SKIP] tbm_surface(%p) bo(%d) need not to enqueue",
2798                                   tbm_surface, _get_tbm_surface_bo_name(tbm_surface));
2799                 TRACE_ASYNC_END((intptr_t)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2800                 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2801                 return TPL_ERROR_NONE;
2802         }
2803
2804         /* In frontbuffer mode, will skip tbm_surface_queue_enqueue, acquire, and
2805          * commit if surface->frontbuffer that is already set and the tbm_surface
2806          * client want to enqueue are the same.
2807          */
2808         if (surface->is_frontbuffer_mode) {
2809                 /* The first buffer to be activated in frontbuffer mode must be
2810                  * committed. Subsequence frames do not need to be committed because
2811                  * the buffer is already displayed.
2812                  */
2813                 if (surface->frontbuffer == tbm_surface)
2814                         wl_egl_surface->need_to_enqueue = TPL_FALSE;
2815         }
2816
2817         if (wl_egl_buffer->acquire_fence_fd != -1)
2818                 close(wl_egl_buffer->acquire_fence_fd);
2819
2820         wl_egl_buffer->acquire_fence_fd = acquire_fence;
2821
2822         tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
2823         if (wl_egl_surface->presentation_sync.fd != -1) {
2824                 wl_egl_buffer->presentation_sync_fd  = wl_egl_surface->presentation_sync.fd;
2825                 wl_egl_surface->presentation_sync.fd = -1;
2826         }
2827         tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
2828
2829         tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
2830         if (wl_egl_surface->commit_sync.fd != -1) {
2831                 wl_egl_buffer->commit_sync_fd  = wl_egl_surface->commit_sync.fd;
2832                 wl_egl_surface->commit_sync.fd = -1;
2833                 TRACE_ASYNC_BEGIN(wl_egl_buffer->commit_sync_fd, "[COMMIT_SYNC] bo(%d)",
2834                                                   _get_tbm_surface_bo_name(tbm_surface));
2835         }
2836         tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
2837
2838         wl_egl_buffer->status = ENQUEUED;
2839         TPL_LOG_T("WL_EGL",
2840                           "[ENQ] wl_egl_buffer(%p) tbm_surface(%p) bo(%d) fence(%d)",
2841                           wl_egl_buffer, tbm_surface, bo_name, acquire_fence);
2842
2843         tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2844
2845         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2846         wl_egl_surface->last_enq_buffer = tbm_surface;
2847         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2848
2849         tsq_err = tbm_surface_queue_enqueue(wl_egl_surface->tbm_queue,
2850                                                                                 tbm_surface);
2851         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2852                 tbm_surface_internal_unref(tbm_surface);
2853                 TPL_ERR("Failed to enqueue tbm_surface(%p). wl_egl_surface(%p) tsq_err=%d",
2854                                 tbm_surface, wl_egl_surface, tsq_err);
2855                 TRACE_ASYNC_END((intptr_t)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2856                 return TPL_ERROR_INVALID_OPERATION;
2857         }
2858
2859         tbm_surface_internal_unref(tbm_surface);
2860
2861         TRACE_ASYNC_END((intptr_t)tbm_surface, "[DEQ]~[ENQ] BO_NAME:%d", bo_name);
2862
2863         return TPL_ERROR_NONE;
2864 }
2865
2866 static tpl_bool_t
2867 __thread_func_waiting_source_dispatch(tpl_gsource *gsource, uint64_t message)
2868 {
2869         tpl_wl_egl_buffer_t wl_egl_buffer(tpl_gsource_get_data(gsource));
2870         TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_buffer, TPL_FALSE);
2871
2872         tpl_wl_egl_surface_t *wl_egl_surface    = wl_egl_buffer->wl_egl_surface;
2873         TPL_CHECK_ON_NULL_RETURN_VAL(wl_egl_surface, TPL_FALSE);
2874
2875         tbm_surface_h tbm_surface               = wl_egl_buffer->tbm_surface;
2876         TPL_CHECK_ON_NULL_RETURN_VAL(tbm_surface, TPL_FALSE);
2877         TPL_CHECK_ON_FALSE_RETURN_VAL(tbm_surface_internal_is_valid(tbm_surface), TPL_FALSE);
2878
2879         TRACE_ASYNC_END(wl_egl_buffer->acquire_fence_fd, "FENCE WAIT fd(%d)",
2880                                         wl_egl_buffer->acquire_fence_fd);
2881
2882         TPL_LOG_D("[RENDER DONE]", "wl_egl_surface(%p) wl_egl_buffer(%p) tbm_surface(%p)",
2883                           wl_egl_surface, wl_egl_buffer, tbm_surface);
2884
2885         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
2886         tpl_gmutex_lock(&wl_egl_buffer->mutex);
2887
2888         wl_egl_buffer->status = WAITING_VBLANK;
2889
2890         TPL_LOG_D("[FINALIZE]", "wl_egl_surface(%p) wl_egl_buffer(%p) wait_source(%p) fence_fd(%d)",
2891                           wl_egl_surface, wl_egl_buffer, wl_egl_buffer->waiting_source,
2892                           wl_egl_buffer->acquire_fence_fd);
2893
2894         wl_egl_buffer->acquire_fence_fd = -1;
2895         wl_egl_buffer->waiting_source = NULL;
2896
2897         if (!wl_egl_surface->vblank_enable || wl_egl_surface->vblank_done) {
2898                 _thread_wl_surface_commit(wl_egl_surface, wl_egl_buffer);
2899                 tpl_gcond_signal(&wl_egl_buffer->cond);
2900         } else {
2901                 tpl_gmutex_lock(&wl_egl_surface->vblank->mutex);
2902                 __tpl_list_push_back(wl_egl_surface->vblank->waiting_buffers,
2903                                                          wl_egl_buffer);
2904                 tpl_gmutex_unlock(&wl_egl_surface->vblank->mutex);
2905         }
2906
2907         tpl_gmutex_unlock(&wl_egl_buffer->mutex);
2908         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
2909
2910         return TPL_FALSE;
2911 }
2912
2913 static void
2914 __thread_func_waiting_source_finalize(tpl_gsource *gsource)
2915 {
2916         TPL_IGNORE(gsource);
2917 }
2918
2919 static tpl_gsource_functions buffer_funcs = {
2920         .prepare = NULL,
2921         .check = NULL,
2922         .dispatch = __thread_func_waiting_source_dispatch,
2923         .finalize = __thread_func_waiting_source_finalize,
2924 };
2925
2926 static tpl_result_t
2927 _thread_surface_queue_acquire(tpl_wl_egl_surface_t *wl_egl_surface)
2928 {
2929         tbm_surface_h tbm_surface            = NULL;
2930         tbm_surface_queue_error_e tsq_err    = TBM_SURFACE_QUEUE_ERROR_NONE;
2931         tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
2932         tpl_wl_egl_buffer_t *wl_egl_buffer   = NULL;
2933         tpl_bool_t ready_to_commit           = TPL_FALSE;
2934
2935         while (tbm_surface_queue_can_acquire(wl_egl_surface->tbm_queue, 0)) {
2936                 tsq_err = tbm_surface_queue_acquire(wl_egl_surface->tbm_queue,
2937                                                                                         &tbm_surface);
2938                 if (!tbm_surface || tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) {
2939                         TPL_ERR("Failed to acquire from tbm_queue(%p)",
2940                                         wl_egl_surface->tbm_queue);
2941                         return TPL_ERROR_INVALID_OPERATION;
2942                 }
2943
2944                 tbm_surface_internal_ref(tbm_surface);
2945
2946                 wl_egl_buffer = _get_wl_egl_buffer(tbm_surface);
2947                 TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer != NULL,
2948                                                                            "wl_egl_buffer sould be not NULL");
2949
2950                 tpl_gmutex_lock(&wl_egl_buffer->mutex);
2951
2952                 wl_egl_buffer->status = ACQUIRED;
2953
2954                 TPL_LOG_T("WL_EGL", "[ACQ] wl_egl_buffer(%p) tbm_surface(%p) bo(%d)",
2955                                   wl_egl_buffer, tbm_surface,
2956                                   _get_tbm_surface_bo_name(tbm_surface));
2957
2958                 if (wl_egl_buffer->acquire_fence_fd != -1) {
2959 #if TIZEN_FEATURE_ENABLE
2960                         if (wl_egl_display->use_explicit_sync)
2961                                 ready_to_commit = TPL_TRUE;
2962                         else
2963 #endif
2964                         {
2965                                 if (wl_egl_buffer->waiting_source) {
2966                                         tpl_gsource_destroy(wl_egl_buffer->waiting_source, TPL_FALSE);
2967                                         wl_egl_buffer->waiting_source = NULL;
2968                                 }
2969
2970                                 wl_egl_buffer->waiting_source =
2971                                         tpl_gsource_create(wl_egl_display->thread, wl_egl_buffer,
2972                                                                            wl_egl_buffer->acquire_fence_fd,
2973                                                                            FD_TYPE_FENCE, &buffer_funcs,
2974                                                                            SOURCE_TYPE_DISPOSABLE);
2975                                 wl_egl_buffer->status = WAITING_SIGNALED;
2976
2977                                 TRACE_ASYNC_BEGIN(wl_egl_buffer->acquire_fence_fd, "FENCE WAIT fd(%d)",
2978                                                                   wl_egl_buffer->acquire_fence_fd);
2979
2980                                 ready_to_commit = TPL_FALSE;
2981                         }
2982                 } else {
2983                         ready_to_commit = TPL_TRUE;
2984                 }
2985
2986                 if (ready_to_commit) {
2987                         if (!wl_egl_surface->vblank_enable || wl_egl_surface->vblank_done)
2988                                 ready_to_commit = TPL_TRUE;
2989                         else {
2990                                 wl_egl_buffer->status = WAITING_VBLANK;
2991                                 tpl_gmutex_lock(&wl_egl_surface->vblank->mutex);
2992                                 __tpl_list_push_back(wl_egl_surface->vblank->waiting_buffers, wl_egl_buffer);
2993                                 tpl_gmutex_unlock(&wl_egl_surface->vblank->mutex);
2994                                 ready_to_commit = TPL_FALSE;
2995                         }
2996                 }
2997
2998                 if (ready_to_commit) {
2999                         _thread_wl_surface_commit(wl_egl_surface, wl_egl_buffer);
3000                         tpl_gcond_signal(&wl_egl_buffer->cond);
3001                 }
3002
3003                 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3004         }
3005
3006         return TPL_ERROR_NONE;
3007 }
3008
3009 /* -- BEGIN -- tdm_client vblank callback function */
3010 static void
3011 __cb_tdm_client_vblank(tdm_client_vblank *vblank, tdm_error error,
3012                                            unsigned int sequence, unsigned int tv_sec,
3013                                            unsigned int tv_usec, void *user_data)
3014 {
3015         tpl_wl_egl_surface_t wl_egl_surface(user_data);
3016
3017         TRACE_ASYNC_END((intptr_t)wl_egl_surface, "WAIT_VBLANK");
3018         TPL_LOG_D("[VBLANK_DONE]", "wl_egl_surface(%p)", wl_egl_surface);
3019
3020         if (error == TDM_ERROR_TIMEOUT)
3021                 TPL_WARN("[TDM_ERROR_TIMEOUT] It will keep going. wl_egl_surface(%p)",
3022                                  wl_egl_surface);
3023
3024         tpl_gmutex_lock(&wl_egl_surface->surf_mutex);
3025         wl_egl_surface->vblank_done = TPL_TRUE;
3026
3027         if (wl_egl_surface->vblank && wl_egl_surface->vblank->waiting_buffers) {
3028                 tpl_bool_t is_empty = TPL_TRUE;
3029                 do {
3030                         tpl_gmutex_lock(&wl_egl_surface->vblank->mutex);
3031                         tpl_wl_egl_buffer_t wl_egl_buffer(
3032                                 __tpl_list_pop_front( wl_egl_surface->vblank->waiting_buffers, NULL));
3033                         is_empty = __tpl_list_is_empty(wl_egl_surface->vblank->waiting_buffers);
3034                         tpl_gmutex_unlock(&wl_egl_surface->vblank->mutex);
3035
3036                         if (!wl_egl_buffer) break;
3037
3038                         tpl_gmutex_lock(&wl_egl_buffer->mutex);
3039                         _thread_wl_surface_commit(wl_egl_surface, wl_egl_buffer);
3040                         tpl_gcond_signal(&wl_egl_buffer->cond);
3041                         tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3042
3043                         /* If tdm error such as TIMEOUT occured,
3044                          * flush all vblank waiting buffers of its wl_egl_surface.
3045                          * Otherwise, only one wl_egl_buffer will be commited per one vblank event.
3046                          */
3047                         if (error == TDM_ERROR_NONE && wl_egl_surface->post_interval > 0)
3048                                 break;
3049                 } while (!is_empty);
3050
3051                 wl_egl_surface->vblank_enable = (wl_egl_surface->post_interval > 0);
3052         }
3053         tpl_gmutex_unlock(&wl_egl_surface->surf_mutex);
3054 }
3055 /* -- END -- tdm_client vblank callback function */
3056
3057 #if TIZEN_FEATURE_ENABLE
3058 static void
3059 __cb_buffer_fenced_release(void *data,
3060                                 struct zwp_linux_buffer_release_v1 *release, int32_t fence)
3061 {
3062         tpl_wl_egl_buffer_t wl_egl_buffer(data);
3063         tbm_surface_h tbm_surface           = NULL;
3064
3065         TPL_CHECK_ON_NULL_RETURN(wl_egl_buffer);
3066
3067         tbm_surface = wl_egl_buffer->tbm_surface;
3068
3069         if (tbm_surface_internal_is_valid(tbm_surface)) {
3070                 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_INVALID_SURFACE;
3071
3072                 tpl_gmutex_lock(&wl_egl_buffer->mutex);
3073                 if (wl_egl_buffer->status == COMMITTED) {
3074                         tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
3075
3076                         zwp_linux_buffer_release_v1_destroy(wl_egl_buffer->buffer_release);
3077                         wl_egl_buffer->buffer_release = NULL;
3078
3079                         wl_egl_buffer->release_fence_fd = fence;
3080                         wl_egl_buffer->status = RELEASED;
3081
3082                         TRACE_MARK("[FENCED_RELEASE] BO(%d) fence(%d)",
3083                                            _get_tbm_surface_bo_name(tbm_surface),
3084                                            fence);
3085                         TRACE_ASYNC_END((intptr_t)tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
3086                                                         _get_tbm_surface_bo_name(tbm_surface));
3087
3088                         TPL_LOG_T("WL_EGL",
3089                                           "[FENCED_RELEASE] wl_egl_buffer(%p) tbm_surface(%p) bo(%d) fence(%d)",
3090                                           wl_egl_buffer, tbm_surface,
3091                                           _get_tbm_surface_bo_name(tbm_surface),
3092                                           fence);
3093
3094                         tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
3095                                                                                                 tbm_surface);
3096                         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
3097                                 TPL_ERR("tbm_surface(%p) tsq_err(%d)", tbm_surface, tsq_err);
3098                 }
3099
3100                 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3101
3102                 if (tsq_err == TBM_SURFACE_QUEUE_ERROR_NONE)
3103                         tbm_surface_internal_unref(tbm_surface);
3104
3105         } else {
3106                 TPL_ERR("Invalid parameter | tbm_surface(%p)", tbm_surface);
3107         }
3108 }
3109
3110 static void
3111 __cb_buffer_immediate_release(void *data,
3112                                                           struct zwp_linux_buffer_release_v1 *release)
3113 {
3114         tpl_wl_egl_buffer_t wl_egl_buffer(data);
3115         tbm_surface_h tbm_surface           = NULL;
3116
3117         TPL_CHECK_ON_NULL_RETURN(wl_egl_buffer);
3118
3119         tbm_surface = wl_egl_buffer->tbm_surface;
3120
3121         if (tbm_surface_internal_is_valid(tbm_surface)) {
3122                 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_INVALID_SURFACE;
3123
3124                 tpl_gmutex_lock(&wl_egl_buffer->mutex);
3125                 if (wl_egl_buffer->status == COMMITTED) {
3126                         tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
3127
3128                         zwp_linux_buffer_release_v1_destroy(wl_egl_buffer->buffer_release);
3129                         wl_egl_buffer->buffer_release = NULL;
3130
3131                         wl_egl_buffer->release_fence_fd = -1;
3132                         wl_egl_buffer->status = RELEASED;
3133
3134                         TRACE_MARK("[IMMEDIATE_RELEASE] BO(%d)",
3135                                            _get_tbm_surface_bo_name(tbm_surface));
3136                         TRACE_ASYNC_END((intptr_t)tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
3137                                                         _get_tbm_surface_bo_name(tbm_surface));
3138
3139                         TPL_LOG_T("WL_EGL",
3140                                           "[IMMEDIATE_RELEASE] wl_egl_buffer(%p) tbm_surface(%p) bo(%d)",
3141                                           wl_egl_buffer, tbm_surface,
3142                                           _get_tbm_surface_bo_name(tbm_surface));
3143
3144                         tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
3145                                                                                                 tbm_surface);
3146                         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
3147                                 TPL_ERR("tbm_surface(%p) tsq_err(%d)", tbm_surface, tsq_err);
3148                 }
3149
3150                 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3151
3152                 if (tsq_err == TBM_SURFACE_QUEUE_ERROR_NONE)
3153                         tbm_surface_internal_unref(tbm_surface);
3154
3155         } else {
3156                 TPL_ERR("Invalid parameter | tbm_surface(%p)", tbm_surface);
3157         }
3158 }
3159
3160 static const struct zwp_linux_buffer_release_v1_listener zwp_release_listner = {
3161         __cb_buffer_fenced_release,
3162         __cb_buffer_immediate_release,
3163 };
3164 #endif
3165
3166 static void
3167 __cb_wl_buffer_release(void *data, struct wl_proxy *wl_buffer)
3168 {
3169         tpl_wl_egl_buffer_t wl_egl_buffer(data);
3170         tbm_surface_h tbm_surface = NULL;
3171
3172         TPL_CHECK_ON_NULL_RETURN(wl_egl_buffer)
3173
3174         tbm_surface = wl_egl_buffer->tbm_surface;
3175
3176         if (tbm_surface_internal_is_valid(tbm_surface)) {
3177                 tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
3178                 tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_INVALID_SURFACE;
3179
3180                 tpl_gmutex_lock(&wl_egl_buffer->mutex);
3181
3182                 if (wl_egl_buffer->status == COMMITTED) {
3183
3184                         tsq_err = tbm_surface_queue_release(wl_egl_surface->tbm_queue,
3185                                                                                                 tbm_surface);
3186                         if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE)
3187                                 TPL_ERR("tbm_surface(%p) tsq_err(%d)", tbm_surface, tsq_err);
3188
3189                         wl_egl_buffer->status = RELEASED;
3190
3191                         TRACE_MARK("[RELEASE] BO(%d)", _get_tbm_surface_bo_name(tbm_surface));
3192                         TRACE_ASYNC_END((intptr_t)tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
3193                                                         _get_tbm_surface_bo_name(tbm_surface));
3194
3195                         TPL_LOG_T("WL_EGL", "[REL] wl_buffer(%p) tbm_surface(%p) bo(%d)",
3196                                           wl_egl_buffer->wl_buffer, tbm_surface,
3197                                           _get_tbm_surface_bo_name(tbm_surface));
3198                 }
3199
3200                 tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3201
3202                 if (tsq_err == TBM_SURFACE_QUEUE_ERROR_NONE)
3203                         tbm_surface_internal_unref(tbm_surface);
3204         } else {
3205                 TPL_ERR("Invalid parameter | tbm_surface(%p)", tbm_surface);
3206         }
3207 }
3208
3209 static const struct wl_buffer_listener wl_buffer_release_listener = {
3210         (void *)__cb_wl_buffer_release,
3211 };
3212 #if TIZEN_FEATURE_ENABLE
3213 static void
3214 __cb_presentation_feedback_sync_output(void *data,
3215                         struct wp_presentation_feedback *presentation_feedback,
3216                         struct wl_output *output)
3217 {
3218         TPL_IGNORE(data);
3219         TPL_IGNORE(presentation_feedback);
3220         TPL_IGNORE(output);
3221         /* Nothing to do */
3222 }
3223
3224 static void
3225 __cb_presentation_feedback_presented(void *data,
3226                         struct wp_presentation_feedback *presentation_feedback,
3227                         uint32_t tv_sec_hi,
3228                         uint32_t tv_sec_lo,
3229                         uint32_t tv_nsec,
3230                         uint32_t refresh_nsec,
3231                         uint32_t seq_hi,
3232                         uint32_t seq_lo,
3233                         uint32_t flags)
3234 {
3235         TPL_IGNORE(tv_sec_hi);
3236         TPL_IGNORE(tv_sec_lo);
3237         TPL_IGNORE(tv_nsec);
3238         TPL_IGNORE(refresh_nsec);
3239         TPL_IGNORE(seq_hi);
3240         TPL_IGNORE(seq_lo);
3241         TPL_IGNORE(flags);
3242
3243         struct pst_feedback *pst_feedback       = (struct pst_feedback *)data;
3244         tpl_wl_egl_surface_t *wl_egl_surface    = pst_feedback->wl_egl_surface;
3245
3246         tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
3247
3248         TPL_LOG_D("[PRESENTED]", "pst_feedback(%p) presentation_feedback(%p) bo(%d)",
3249                           pst_feedback, presentation_feedback, pst_feedback->bo_name);
3250
3251         if (pst_feedback->pst_sync_fd != -1) {
3252                 TRACE_ASYNC_END(pst_feedback->pst_sync_fd,
3253                                                 "[PRESENTATION_SYNC] bo(%d)",
3254                                                 pst_feedback->bo_name);
3255                 send_signal(pst_feedback->pst_sync_fd, "PST_FEEDBACK");
3256                 pst_feedback->pst_sync_fd = -1;
3257         }
3258
3259         wp_presentation_feedback_destroy(presentation_feedback);
3260
3261         pst_feedback->presentation_feedback = NULL;
3262         pst_feedback->wl_egl_surface        = NULL;
3263         pst_feedback->bo_name               = 0;
3264
3265         __tpl_list_remove_data(wl_egl_surface->presentation_feedbacks, pst_feedback,
3266                                                    TPL_FIRST, NULL);
3267
3268         free(pst_feedback);
3269
3270         tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
3271 }
3272
3273 static void
3274 __cb_presentation_feedback_discarded(void *data,
3275                         struct wp_presentation_feedback *presentation_feedback)
3276 {
3277         struct pst_feedback *pst_feedback       = (struct pst_feedback *)data;
3278         tpl_wl_egl_surface_t *wl_egl_surface    = pst_feedback->wl_egl_surface;
3279
3280         tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
3281
3282         TPL_LOG_D("[DISCARDED]", "pst_feedback(%p) presentation_feedback(%p) bo(%d)",
3283                           pst_feedback, presentation_feedback, pst_feedback->bo_name);
3284
3285         if (pst_feedback->pst_sync_fd != -1) {
3286                 TRACE_ASYNC_END(pst_feedback->pst_sync_fd,
3287                                                 "[PRESENTATION_SYNC] bo(%d)",
3288                                                 pst_feedback->bo_name);
3289                 send_signal(pst_feedback->pst_sync_fd, "PST_FEEDBACK");
3290                 pst_feedback->pst_sync_fd = -1;
3291         }
3292
3293         wp_presentation_feedback_destroy(presentation_feedback);
3294
3295         pst_feedback->presentation_feedback = NULL;
3296         pst_feedback->wl_egl_surface        = NULL;
3297         pst_feedback->bo_name               = 0;
3298
3299         __tpl_list_remove_data(wl_egl_surface->presentation_feedbacks, pst_feedback,
3300                                                    TPL_FIRST, NULL);
3301
3302         free(pst_feedback);
3303
3304         tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
3305 }
3306
3307 static const struct wp_presentation_feedback_listener feedback_listener = {
3308         __cb_presentation_feedback_sync_output, /* sync_output feedback -*/
3309         __cb_presentation_feedback_presented,
3310         __cb_presentation_feedback_discarded
3311 };
3312 #endif
3313
3314 static tpl_result_t
3315 _thread_surface_vblank_wait(tpl_wl_egl_surface_t *wl_egl_surface)
3316 {
3317         tdm_error tdm_err                       = TDM_ERROR_NONE;
3318         tpl_surface_vblank_t *vblank            = wl_egl_surface->vblank;
3319
3320         tdm_err = tdm_client_vblank_wait(vblank->tdm_vblank,
3321                         wl_egl_surface->post_interval,
3322                         __cb_tdm_client_vblank,
3323                         (void *)wl_egl_surface);
3324
3325         if (tdm_err == TDM_ERROR_NONE) {
3326                 wl_egl_surface->vblank_done = TPL_FALSE;
3327                 TRACE_ASYNC_BEGIN((intptr_t)wl_egl_surface, "WAIT_VBLANK");
3328         } else {
3329                 TPL_ERR("Failed to tdm_client_vblank_wait. tdm_err(%d)", tdm_err);
3330                 return TPL_ERROR_INVALID_OPERATION;
3331         }
3332
3333         return TPL_ERROR_NONE;
3334 }
3335
3336 static void
3337 _thread_wl_surface_commit(tpl_wl_egl_surface_t *wl_egl_surface,
3338                                                   tpl_wl_egl_buffer_t *wl_egl_buffer)
3339 {
3340         tpl_wl_egl_display_t *wl_egl_display    = wl_egl_surface->wl_egl_display;
3341         struct wl_surface *wl_surface           = wl_egl_surface->wl_surface;
3342         struct wl_egl_window *wl_egl_window     = wl_egl_surface->wl_egl_window;
3343         uint32_t version;
3344
3345         TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer != NULL,
3346                                                                    "wl_egl_buffer sould be not NULL");
3347
3348         if (wl_egl_buffer->wl_buffer == NULL) {
3349                 wl_egl_buffer->wl_buffer =
3350                         (struct wl_proxy *)wayland_tbm_client_create_buffer(
3351                                                 wl_egl_display->wl_tbm_client,
3352                                                 wl_egl_buffer->tbm_surface);
3353
3354                 TPL_CHECK_ON_FALSE_ASSERT_FAIL(wl_egl_buffer->wl_buffer != NULL,
3355                                                                            "[FATAL] Failed to create wl_buffer");
3356
3357                 TPL_INFO("[WL_BUFFER_CREATE]",
3358                                  "wl_egl_surface(%p) wl_egl_buffer(%p) wl_buffer(%p) tbm_surface(%p)",
3359                                  wl_egl_surface, wl_egl_buffer, wl_egl_buffer->wl_buffer,
3360                                  wl_egl_buffer->tbm_surface);
3361
3362 #if TIZEN_FEATURE_ENABLE
3363                 if (!wl_egl_display->use_explicit_sync ||
3364                         wl_egl_buffer->acquire_fence_fd == -1)
3365 #endif
3366                 {
3367                         wl_buffer_add_listener((struct wl_buffer *)wl_egl_buffer->wl_buffer,
3368                                                                    &wl_buffer_release_listener,
3369                                                                    wl_egl_buffer);
3370                 }
3371         }
3372
3373         version = wl_proxy_get_version((struct wl_proxy *)wl_surface);
3374
3375 #if TIZEN_FEATURE_ENABLE
3376         /* create presentation feedback and add listener */
3377         tpl_gmutex_lock(&wl_egl_surface->presentation_sync.mutex);
3378         if (wl_egl_display->presentation && wl_egl_buffer->presentation_sync_fd != -1) {
3379
3380                 struct pst_feedback *pst_feedback = NULL;
3381                 pst_feedback = (struct pst_feedback *) calloc(1, sizeof(struct pst_feedback));
3382                 if (pst_feedback) {
3383                         pst_feedback->presentation_feedback =
3384                                 wp_presentation_feedback(wl_egl_display->presentation,
3385                                                                                  wl_surface);
3386
3387                         pst_feedback->wl_egl_surface        = wl_egl_surface;
3388                         pst_feedback->bo_name               = wl_egl_buffer->bo_name;
3389
3390                         pst_feedback->pst_sync_fd           = wl_egl_buffer->presentation_sync_fd;
3391                         wl_egl_buffer->presentation_sync_fd = -1;
3392
3393                         wp_presentation_feedback_add_listener(pst_feedback->presentation_feedback,
3394                                                                                                   &feedback_listener, pst_feedback);
3395                         __tpl_list_push_back(wl_egl_surface->presentation_feedbacks, pst_feedback);
3396                         TRACE_ASYNC_BEGIN(pst_feedback->pst_sync_fd,
3397                                                           "[PRESENTATION_SYNC] bo(%d)",
3398                                                           pst_feedback->bo_name);
3399                 } else {
3400                         TPL_ERR("Failed to create presentation feedback. wl_egl_buffer(%p)",
3401                                         wl_egl_buffer);
3402                         send_signal(wl_egl_buffer->presentation_sync_fd, "PST_SYNC");
3403                         wl_egl_buffer->presentation_sync_fd = -1;
3404                 }
3405         }
3406         tpl_gmutex_unlock(&wl_egl_surface->presentation_sync.mutex);
3407 #endif
3408
3409         if (wl_egl_buffer->w_rotated == TPL_TRUE) {
3410                 if (version > 1) {
3411                         wayland_tbm_client_set_buffer_transform(
3412                                         wl_egl_display->wl_tbm_client,
3413                                         (void *)wl_egl_buffer->wl_buffer,
3414                                         wl_egl_buffer->w_transform);
3415                         TPL_INFO("[W_TRANSFORM]",
3416                                          "wl_egl_surface(%p) wl_egl_buffer(%p) w_transform(%d)",
3417                                          wl_egl_surface, wl_egl_buffer, wl_egl_buffer->w_transform);
3418                 }
3419                 wl_egl_buffer->w_rotated = TPL_FALSE;
3420         }
3421
3422         if (wl_egl_surface->latest_transform != wl_egl_buffer->transform) {
3423                 if (version > 1) {
3424                         wl_surface_set_buffer_transform(wl_surface, wl_egl_buffer->transform);
3425                         TPL_INFO("[TRANSFORM]",
3426                                          "wl_egl_surface(%p) wl_egl_buffer(%p) transform(%d -> %d)",
3427                                          wl_egl_surface, wl_egl_buffer,
3428                                          wl_egl_surface->latest_transform, wl_egl_buffer->transform);
3429                 }
3430                 wl_egl_surface->latest_transform = wl_egl_buffer->transform;
3431         }
3432
3433         if (wl_egl_window) {
3434                 wl_egl_window->attached_width = wl_egl_buffer->width;
3435                 wl_egl_window->attached_height = wl_egl_buffer->height;
3436         }
3437
3438         wl_surface_attach(wl_surface, (void *)wl_egl_buffer->wl_buffer,
3439                                           wl_egl_buffer->dx, wl_egl_buffer->dy);
3440
3441         if (wl_egl_buffer->num_rects < 1 || wl_egl_buffer->rects == NULL) {
3442                 if (version < 4) {
3443                         wl_surface_damage(wl_surface,
3444                                                           wl_egl_buffer->dx, wl_egl_buffer->dy,
3445                                                           wl_egl_buffer->width, wl_egl_buffer->height);
3446                 } else {
3447                         wl_surface_damage_buffer(wl_surface,
3448                                                                          0, 0,
3449                                                                          wl_egl_buffer->width, wl_egl_buffer->height);
3450                 }
3451         } else {
3452                 int i;
3453                 for (i = 0; i < wl_egl_buffer->num_rects; i++) {
3454                         int inverted_y =
3455                                 wl_egl_buffer->height - (wl_egl_buffer->rects[i * 4 + 1] +
3456                                                 wl_egl_buffer->rects[i * 4 + 3]);
3457                         if (version < 4) {
3458                                 wl_surface_damage(wl_surface,
3459                                                                   wl_egl_buffer->rects[i * 4 + 0],
3460                                                                   inverted_y,
3461                                                                   wl_egl_buffer->rects[i * 4 + 2],
3462                                                                   wl_egl_buffer->rects[i * 4 + 3]);
3463                         } else {
3464                                 wl_surface_damage_buffer(wl_surface,
3465                                                                                  wl_egl_buffer->rects[i * 4 + 0],
3466                                                                                  inverted_y,
3467                                                                                  wl_egl_buffer->rects[i * 4 + 2],
3468                                                                                  wl_egl_buffer->rects[i * 4 + 3]);
3469                         }
3470                 }
3471         }
3472
3473         wayland_tbm_client_set_buffer_serial(wl_egl_display->wl_tbm_client,
3474                                                 (void *)wl_egl_buffer->wl_buffer,
3475                                                 wl_egl_buffer->serial);
3476 #if TIZEN_FEATURE_ENABLE
3477         if (wl_egl_display->use_explicit_sync &&
3478                 wl_egl_buffer->acquire_fence_fd != -1) {
3479
3480                 zwp_linux_surface_synchronization_v1_set_acquire_fence(wl_egl_surface->surface_sync,
3481                                                                                                                            wl_egl_buffer->acquire_fence_fd);
3482                 TPL_LOG_D("[SET_ACQUIRE_FENCE][1/2]", "wl_egl_surface(%p) tbm_surface(%p) acquire_fence(%d)",
3483                                   wl_egl_surface, wl_egl_buffer->tbm_surface, wl_egl_buffer->acquire_fence_fd);
3484                 close(wl_egl_buffer->acquire_fence_fd);
3485                 wl_egl_buffer->acquire_fence_fd = -1;
3486
3487                 wl_egl_buffer->buffer_release =
3488                         zwp_linux_surface_synchronization_v1_get_release(wl_egl_surface->surface_sync);
3489                 if (!wl_egl_buffer->buffer_release) {
3490                         TPL_ERR("Failed to get buffer_release. wl_egl_surface(%p)", wl_egl_surface);
3491                 } else {
3492                         zwp_linux_buffer_release_v1_add_listener(
3493                                 wl_egl_buffer->buffer_release, &zwp_release_listner, wl_egl_buffer);
3494                         TPL_LOG_D("[SET_ACQUIRE_FENCE][2/2]", "add explicit_sync_release_listener.");
3495                 }
3496         }
3497 #endif
3498
3499         wl_surface_commit(wl_surface);
3500
3501         wl_display_flush(wl_egl_display->wl_display);
3502
3503         TRACE_ASYNC_BEGIN((intptr_t)wl_egl_buffer->tbm_surface, "[COMMIT ~ RELEASE] BO(%d)",
3504                                           wl_egl_buffer->bo_name);
3505
3506         wl_egl_buffer->need_to_commit   = TPL_FALSE;
3507         wl_egl_buffer->status           = COMMITTED;
3508         if (wl_egl_surface->last_enq_buffer == wl_egl_buffer->tbm_surface)
3509                 wl_egl_surface->last_enq_buffer = NULL;
3510
3511         TPL_LOG_T("WL_EGL",
3512                           "[COMMIT] wl_egl_buffer(%p) wl_buffer(%p) tbm_surface(%p) bo(%d)",
3513                           wl_egl_buffer, wl_egl_buffer->wl_buffer, wl_egl_buffer->tbm_surface,
3514                           wl_egl_buffer->bo_name);
3515
3516         if (wl_egl_surface->post_interval > 0 && wl_egl_surface->vblank != NULL) {
3517                 wl_egl_surface->vblank_enable = TPL_TRUE;
3518                 if (_thread_surface_vblank_wait(wl_egl_surface) != TPL_ERROR_NONE)
3519                         TPL_ERR("Failed to set wait vblank.");
3520         }
3521
3522         tpl_gmutex_lock(&wl_egl_surface->commit_sync.mutex);
3523
3524         if (wl_egl_buffer->commit_sync_fd != -1) {
3525                 TRACE_ASYNC_END(wl_egl_buffer->commit_sync_fd, "[COMMIT_SYNC] bo(%d)",
3526                                                 wl_egl_buffer->bo_name);
3527                 TPL_LOG_D("[COMMIT_SYNC][SEND]", "wl_egl_surface(%p) commit_sync_fd(%d)",
3528                                   wl_egl_surface, wl_egl_buffer->commit_sync_fd);
3529                 send_signal(wl_egl_buffer->commit_sync_fd, "COMMIT_SYNC");
3530                 wl_egl_buffer->commit_sync_fd = -1;
3531         }
3532
3533         tpl_gmutex_unlock(&wl_egl_surface->commit_sync.mutex);
3534 }
3535
3536 static int
3537 _write_to_eventfd(int eventfd, uint64_t value)
3538 {
3539         int ret;
3540
3541         ret = write(eventfd, &value, sizeof(uint64_t));
3542         if (ret == -1) {
3543                 TPL_ERR("failed to write to fd(%d)", eventfd);
3544                 return ret;
3545         }
3546
3547         return ret;
3548 }
3549
3550 static int send_signal(int fd, const char *type)
3551 {
3552         int ret = 0;
3553         if (fd < 0) return ret;
3554
3555         ret = _write_to_eventfd(fd, 1);
3556         if (ret < 0)
3557                 TPL_ERR("Failed to send %s signal to fd(%d)", type, fd);
3558
3559         close(fd);
3560
3561         return ret;
3562 }
3563
3564 void
3565 __tpl_display_init_backend_wl_egl_thread(tpl_display_backend_t *backend)
3566 {
3567         TPL_ASSERT(backend);
3568
3569         backend->type = TPL_BACKEND_WAYLAND_THREAD;
3570         backend->data = NULL;
3571
3572         backend->init = __tpl_wl_egl_display_init;
3573         backend->fini = __tpl_wl_egl_display_fini;
3574         backend->query_config = __tpl_wl_egl_display_query_config;
3575         backend->filter_config = __tpl_wl_egl_display_filter_config;
3576         backend->get_window_info = __tpl_wl_egl_display_get_window_info;
3577         backend->get_pixmap_info = __tpl_wl_egl_display_get_pixmap_info;
3578         backend->get_buffer_from_native_pixmap =
3579                 __tpl_wl_egl_display_get_buffer_from_native_pixmap;
3580 }
3581
3582 void
3583 __tpl_surface_init_backend_wl_egl_thread(tpl_surface_backend_t *backend)
3584 {
3585         TPL_ASSERT(backend);
3586
3587         backend->type = TPL_BACKEND_WAYLAND_THREAD;
3588         backend->data = NULL;
3589
3590         backend->init = __tpl_wl_egl_surface_init;
3591         backend->fini = __tpl_wl_egl_surface_fini;
3592         backend->validate = __tpl_wl_egl_surface_validate;
3593         backend->cancel_dequeued_buffer =
3594                 __tpl_wl_egl_surface_cancel_buffer;
3595         backend->dequeue_buffer = __tpl_wl_egl_surface_dequeue_buffer;
3596         backend->enqueue_buffer = __tpl_wl_egl_surface_enqueue_buffer;
3597         backend->set_rotation_capability =
3598                 __tpl_wl_egl_surface_set_rotation_capability;
3599         backend->set_post_interval =
3600                 __tpl_wl_egl_surface_set_post_interval;
3601         backend->get_size =
3602                 __tpl_wl_egl_surface_get_size;
3603         backend->fence_sync_is_available =
3604                 __tpl_wl_egl_surface_fence_sync_is_available;
3605 }
3606
3607 static void
3608 __cb_wl_egl_buffer_free(tpl_wl_egl_buffer_t *wl_egl_buffer)
3609 {
3610         tpl_wl_egl_surface_t *wl_egl_surface = wl_egl_buffer->wl_egl_surface;
3611         tpl_wl_egl_display_t *wl_egl_display = wl_egl_surface->wl_egl_display;
3612
3613         TPL_INFO("[BUFFER_FREE]", "wl_egl_surface(%p) wl_egl_buffer(%p)",
3614                          wl_egl_surface, wl_egl_buffer);
3615         TPL_INFO("[BUFFER_FREE]", "tbm_surface(%p) bo(%d)",
3616                          wl_egl_buffer->tbm_surface, wl_egl_buffer->bo_name);
3617
3618         tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
3619         if (wl_egl_surface->buffers) {
3620                 __tpl_list_remove_data(wl_egl_surface->buffers, (void *)wl_egl_buffer,
3621                                                            TPL_FIRST, NULL);
3622         }
3623         tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
3624
3625         if (wl_egl_surface->vblank) {
3626                 tpl_gmutex_lock(&wl_egl_surface->vblank->mutex);
3627                 if (wl_egl_surface->vblank->waiting_buffers)
3628                         __tpl_list_remove_data(wl_egl_surface->vblank->waiting_buffers, (void *)wl_egl_buffer,
3629                                                                    TPL_FIRST, NULL);
3630                 tpl_gmutex_unlock(&wl_egl_surface->vblank->mutex);
3631         }
3632
3633         tpl_gmutex_lock(&wl_egl_buffer->mutex);
3634
3635         if (wl_egl_display) {
3636                 if (wl_egl_display->wl_tbm_client && wl_egl_buffer->wl_buffer) {
3637                         wayland_tbm_client_destroy_buffer(wl_egl_display->wl_tbm_client,
3638                                                                                           (void *)wl_egl_buffer->wl_buffer);
3639                         wl_egl_buffer->wl_buffer = NULL;
3640                 }
3641
3642                 wl_display_flush(wl_egl_display->wl_display);
3643         }
3644
3645
3646 #if TIZEN_FEATURE_ENABLE
3647         if (wl_egl_buffer->buffer_release) {
3648                 zwp_linux_buffer_release_v1_destroy(wl_egl_buffer->buffer_release);
3649                 wl_egl_buffer->buffer_release = NULL;
3650         }
3651
3652         if (wl_egl_buffer->release_fence_fd != -1) {
3653                 close(wl_egl_buffer->release_fence_fd);
3654                 wl_egl_buffer->release_fence_fd = -1;
3655         }
3656 #endif
3657
3658         if (wl_egl_buffer->waiting_source) {
3659                 tpl_gsource_destroy(wl_egl_buffer->waiting_source, TPL_FALSE);
3660                 wl_egl_buffer->waiting_source = NULL;
3661         }
3662
3663         send_signal(wl_egl_buffer->commit_sync_fd, "COMMIT_SYNC");
3664         wl_egl_buffer->commit_sync_fd = -1;
3665
3666         send_signal(wl_egl_buffer->presentation_sync_fd, "PST_SYNC");
3667         wl_egl_buffer->presentation_sync_fd = -1;
3668
3669         if (wl_egl_buffer->rects) {
3670                 free(wl_egl_buffer->rects);
3671                 wl_egl_buffer->rects = NULL;
3672                 wl_egl_buffer->num_rects = 0;
3673         }
3674
3675         wl_egl_buffer->wl_egl_surface = NULL;
3676         wl_egl_buffer->tbm_surface = NULL;
3677         wl_egl_buffer->bo_name = -1;
3678         wl_egl_buffer->status = RELEASED;
3679
3680         tpl_gmutex_unlock(&wl_egl_buffer->mutex);
3681         tpl_gmutex_clear(&wl_egl_buffer->mutex);
3682         tpl_gcond_clear(&wl_egl_buffer->cond);
3683         free(wl_egl_buffer);
3684 }
3685
3686 static int
3687 _get_tbm_surface_bo_name(tbm_surface_h tbm_surface)
3688 {
3689         return tbm_bo_export(tbm_surface_internal_get_bo(tbm_surface, 0));
3690 }
3691
3692 static void
3693 _print_buffer_lists(tpl_wl_egl_surface_t *wl_egl_surface)
3694 {
3695         tpl_list_node_t *node = NULL;
3696         int buffer_cnt = 0;
3697         int idx = 0;
3698
3699         tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
3700         buffer_cnt = __tpl_list_get_count(wl_egl_surface->buffers);
3701
3702         node = __tpl_list_get_front_node(wl_egl_surface->buffers);
3703         do {
3704                 if (!node) break;
3705                 tpl_wl_egl_buffer_t wl_egl_buffer(__tpl_list_node_get_data(node));
3706                 TPL_INFO("[BUFFERS_INFO]",
3707                                  "[%d/%d] wl_egl_surface(%p), wl_egl_buffer(%p) tbm_surface(%p) bo(%d) | status(%s)",
3708                                  ++idx, buffer_cnt, wl_egl_surface, wl_egl_buffer,
3709                                  wl_egl_buffer->tbm_surface, wl_egl_buffer->bo_name,
3710                                  status_to_string[wl_egl_buffer->status]);
3711         } while ((node = __tpl_list_node_next(node)));
3712         tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
3713 }
3714
3715 static tpl_bool_t
3716 _check_buffer_validate(tpl_wl_egl_surface_t *wl_egl_surface, tbm_surface_h tbm_surface)
3717 {
3718         tpl_list_node_t *node = NULL;
3719         tpl_bool_t ret = TPL_FALSE;
3720
3721         /* silent return */
3722         if (!wl_egl_surface || !tbm_surface)
3723                 return ret;
3724
3725         tpl_gmutex_lock(&wl_egl_surface->buffers_mutex);
3726         node = __tpl_list_get_front_node(wl_egl_surface->buffers);
3727         do {
3728                 if (!node) break;
3729                 tpl_wl_egl_buffer_t wl_egl_buffer(__tpl_list_node_get_data(node));
3730                 if (wl_egl_buffer->tbm_surface == tbm_surface) {
3731                         ret = TPL_TRUE;
3732                         break;
3733                 }
3734         } while ((node = __tpl_list_node_next(node)));
3735
3736         if (ret == TPL_FALSE) {
3737                 TPL_ERR("tbm_surface(%p) is not owned by wl_egl_surface(%p)",
3738                                 tbm_surface, wl_egl_surface);
3739         }
3740
3741         tpl_gmutex_unlock(&wl_egl_surface->buffers_mutex);
3742
3743         return ret;
3744 }